,original_method,input_method,target_block,tokens_in_method 5126,"def stream_edit(request, stream_id, response_format=""html""): ""Stream edit page"" user = request.user.profile stream = get_object_or_404(MessageStream, pk=stream_id) if not request.user.profile.has_permission(stream, mode=""w""): return user_denied( request, message=""You don't have access to this Stream"", response_format=response_format, ) if request.POST: if ""cancel"" not in request.POST: form = MessageStreamForm(user, request.POST, instance=stream) if form.is_valid(): stream = form.save() return HttpResponseRedirect( reverse(""messaging_stream_view"", args=[stream.id]) ) else: return HttpResponseRedirect( reverse(""messaging_stream_view"", args=[stream.id]) ) else: form = MessageStreamForm(user, instance=stream) context = _get_default_context(request) context.update({""form"": form, ""stream"": stream}) return render_to_response( ""messaging/stream_edit"", context, context_instance=RequestContext(request), response_format=response_format, )","def stream_edit ( request , stream_id , response_format = ""html"" ) : ""Stream edit page"" user = request . user . profile stream = get_object_or_404 ( MessageStream , pk = stream_id ) if not request . user . profile . has_permission ( stream , mode = ""w"" ) : return user_denied ( request , message = ""You don't have access to this Stream"" , response_format = response_format , ) if request . POST : form = MessageStreamForm ( user , request . POST , instance = stream ) if form . is_valid ( ) : stream = form . save ( ) return HttpResponseRedirect ( reverse ( ""messaging_stream_view"" , args = [ stream . id ] ) ) else : return HttpResponseRedirect ( reverse ( ""messaging_stream_view"" , args = [ stream . id ] ) ) else : form = MessageStreamForm ( user , instance = stream ) context = _get_default_context ( request ) context . update ( { ""form"" : form , ""stream"" : stream } ) return render_to_response ( ""messaging/stream_edit"" , context , context_instance = RequestContext ( request ) , response_format = response_format , )","if ""cancel"" not in request . POST :",352 10859,"def _read_and_parse_includes(self): # Map header-filename: (#include AST node, module). included_files = {} # Map declaration-name: AST node. forward_declarations = {} files_seen = {} for node in self.ast_list: if isinstance(node, ast.Include): if node.system: filename = node.filename else: module = self._get_module(node) filename = module.filename _, ext = os.path.splitext(filename) if ext.lower() != "".hxx"": included_files[filename] = node, module if is_cpp_file(filename): self._add_warning( ""should not #include C++ source file '{}'"".format(node.filename), node, ) if filename == self.filename: self._add_warning(""'{}' #includes itself"".format(node.filename), node) if filename in files_seen: include_node = files_seen[filename] line_num = get_line_number(self.metrics, include_node) self._add_warning( ""'{}' already #included on line {}"".format(node.filename, line_num), node, ) else: files_seen[filename] = node if isinstance(node, DECLARATION_TYPES) and node.is_declaration(): forward_declarations[node.full_name()] = node return included_files, forward_declarations ","def _read_and_parse_includes ( self ) : included_files = { } forward_declarations = { } files_seen = { } for node in self . ast_list : if node . system : filename = node . filename else : module = self . _get_module ( node ) filename = module . filename _ , ext = os . path . splitext ( filename ) if ext . lower ( ) != "".hxx"" : included_files [ filename ] = node , module if is_cpp_file ( filename ) : self . _add_warning ( ""should not #include C++ source file '{}'"" . format ( node . filename ) , node , ) if filename == self . filename : self . _add_warning ( ""'{}' #includes itself"" . format ( node . filename ) , node ) if filename in files_seen : include_node = files_seen [ filename ] line_num = get_line_number ( self . metrics , include_node ) self . _add_warning ( ""'{}' already #included on line {}"" . format ( node . filename , line_num ) , node , ) else : files_seen [ filename ] = node if isinstance ( node , DECLARATION_TYPES ) and node . is_declaration ( ) : forward_declarations [ node . full_name ( ) ] = node return included_files , forward_declarations","if isinstance ( node , ast . Include ) :",439 10615,"def _get_list_key(self, spaces, lines): key_list = [] parse_key = False key, desc, ptype = None, """", None param_spaces = 0 for line in lines: if len(line.strip()) == 0: continue curr_spaces = get_leading_spaces(line) if not param_spaces: param_spaces = len(curr_spaces) if len(curr_spaces) == param_spaces: if parse_key: key_list.append((key, desc, ptype)) if "":"" in line: elems = line.split("":"", 1) ptype = None key = elems[0].strip() # the param's type is near the key in parenthesis if ""("" in key and "")"" in key: tstart = key.index(""("") + 1 tend = key.index("")"") # the 'optional' keyword can follow the style after a comma if "","" in key: tend = key.index("","") ptype = key[tstart:tend].strip() key = key[: tstart - 1].strip() desc = elems[1].strip() parse_key = True else: if len(curr_spaces) > len(spaces): line = line.replace(spaces, """", 1) if desc: desc += ""\n"" desc += line else: if len(curr_spaces) > len(spaces): line = line.replace(spaces, """", 1) if desc: desc += ""\n"" desc += line if parse_key or desc: key_list.append((key, desc, ptype)) return key_list","def _get_list_key ( self , spaces , lines ) : key_list = [ ] parse_key = False key , desc , ptype = None , """" , None param_spaces = 0 for line in lines : continue curr_spaces = get_leading_spaces ( line ) if not param_spaces : param_spaces = len ( curr_spaces ) if len ( curr_spaces ) == param_spaces : if parse_key : key_list . append ( ( key , desc , ptype ) ) if "":"" in line : elems = line . split ( "":"" , 1 ) ptype = None key = elems [ 0 ] . strip ( ) if ""("" in key and "")"" in key : tstart = key . index ( ""("" ) + 1 tend = key . index ( "")"" ) if "","" in key : tend = key . index ( "","" ) ptype = key [ tstart : tend ] . strip ( ) key = key [ : tstart - 1 ] . strip ( ) desc = elems [ 1 ] . strip ( ) parse_key = True else : if len ( curr_spaces ) > len ( spaces ) : line = line . replace ( spaces , """" , 1 ) if desc : desc += ""\n"" desc += line else : if len ( curr_spaces ) > len ( spaces ) : line = line . replace ( spaces , """" , 1 ) if desc : desc += ""\n"" desc += line if parse_key or desc : key_list . append ( ( key , desc , ptype ) ) return key_list",if len ( line . strip ( ) ) == 0 :,510 17853,"def search_host(self, search_string): results = [] for host_entry in self.config_data: if host_entry.get(""type"") != ""entry"": continue if host_entry.get(""host"") == ""*"": continue searchable_information = host_entry.get(""host"") for key, value in six.iteritems(host_entry.get(""options"")): if isinstance(value, list): value = "" "".join(value) if isinstance(value, int): value = str(value) searchable_information += "" "" + value if search_string in searchable_information: results.append(host_entry) return results ","def search_host ( self , search_string ) : results = [ ] for host_entry in self . config_data : if host_entry . get ( ""type"" ) != ""entry"" : continue if host_entry . get ( ""host"" ) == ""*"" : continue searchable_information = host_entry . get ( ""host"" ) for key , value in six . iteritems ( host_entry . get ( ""options"" ) ) : if isinstance ( value , list ) : value = "" "" . join ( value ) value = str ( value ) searchable_information += "" "" + value if search_string in searchable_information : results . append ( host_entry ) return results","if isinstance ( value , int ) :",191 3922,"def pop(self, key: Union[str, Enum], default: Any = DEFAULT_VALUE_MARKER) -> Any: try: if self._get_flag(""readonly""): raise ReadonlyConfigError(""Cannot pop from read-only node"") if self._get_flag(""struct""): raise ConfigTypeError(""DictConfig in struct mode does not support pop"") if self._is_typed() and self._get_node_flag(""struct"") is not False: raise ConfigTypeError( f""{type_str(self._metadata.object_type)} (DictConfig) does not support pop"" ) key = self._validate_and_normalize_key(key) node = self._get_node(key=key, validate_access=False) if node is not None: value = self._resolve_with_default( key=key, value=node, default_value=default ) del self[key] return value else: if default is not DEFAULT_VALUE_MARKER: return default else: full = self._get_full_key(key=key) if full != key: raise ConfigKeyError(f""Key not found: '{key}' (path: '{full}')"") else: raise ConfigKeyError(f""Key not found: '{key}'"") except Exception as e: self._format_and_raise(key=key, value=None, cause=e) ","def pop ( self , key : Union [ str , Enum ] , default : Any = DEFAULT_VALUE_MARKER ) -> Any : try : if self . _get_flag ( ""readonly"" ) : raise ReadonlyConfigError ( ""Cannot pop from read-only node"" ) raise ConfigTypeError ( ""DictConfig in struct mode does not support pop"" ) if self . _is_typed ( ) and self . _get_node_flag ( ""struct"" ) is not False : raise ConfigTypeError ( f""{type_str(self._metadata.object_type)} (DictConfig) does not support pop"" ) key = self . _validate_and_normalize_key ( key ) node = self . _get_node ( key = key , validate_access = False ) if node is not None : value = self . _resolve_with_default ( key = key , value = node , default_value = default ) del self [ key ] return value else : if default is not DEFAULT_VALUE_MARKER : return default else : full = self . _get_full_key ( key = key ) if full != key : raise ConfigKeyError ( f""Key not found: '{key}' (path: '{full}')"" ) else : raise ConfigKeyError ( f""Key not found: '{key}'"" ) except Exception as e : self . _format_and_raise ( key = key , value = None , cause = e )","if self . _get_flag ( ""struct"" ) :",396 8997,"def _key(self, index): len_self = len(self) if index < 0: index += len_self if index < 0: raise IndexError(""deque index out of range"") elif index >= len_self: raise IndexError(""deque index out of range"") diff = len_self - index - 1 _cache_iterkeys = self._cache.iterkeys try: if index <= diff: iter_keys = _cache_iterkeys() key = next(islice(iter_keys, index, index + 1)) else: iter_keys = _cache_iterkeys(reverse=True) key = next(islice(iter_keys, diff, diff + 1)) except StopIteration: raise IndexError(""deque index out of range"") return key","def _key ( self , index ) : len_self = len ( self ) index += len_self raise IndexError ( ""deque index out of range"" ) elif index >= len_self : raise IndexError ( ""deque index out of range"" ) diff = len_self - index - 1 _cache_iterkeys = self . _cache . iterkeys try : if index <= diff : iter_keys = _cache_iterkeys ( ) key = next ( islice ( iter_keys , index , index + 1 ) ) else : iter_keys = _cache_iterkeys ( reverse = True ) key = next ( islice ( iter_keys , diff , diff + 1 ) ) except StopIteration : raise IndexError ( ""deque index out of range"" ) return key",if index < 0 :,210 15810,"def convert(src, dst): """"""Convert keys in pycls pretrained RegNet models to mmdet style."""""" # load caffe model regnet_model = torch.load(src) blobs = regnet_model[""model_state""] # convert to pytorch style state_dict = OrderedDict() converted_names = set() for key, weight in blobs.items(): if ""stem"" in key: convert_stem(key, weight, state_dict, converted_names) elif ""head"" in key: convert_head(key, weight, state_dict, converted_names) elif key.startswith(""s""): convert_reslayer(key, weight, state_dict, converted_names) # check if all layers are converted for key in blobs: if key not in converted_names: print(f""not converted: {key}"") # save checkpoint checkpoint = dict() checkpoint[""state_dict""] = state_dict torch.save(checkpoint, dst) ","def convert ( src , dst ) : """"""Convert keys in pycls pretrained RegNet models to mmdet style."""""" regnet_model = torch . load ( src ) blobs = regnet_model [ ""model_state"" ] state_dict = OrderedDict ( ) converted_names = set ( ) for key , weight in blobs . items ( ) : if ""stem"" in key : convert_stem ( key , weight , state_dict , converted_names ) convert_head ( key , weight , state_dict , converted_names ) elif key . startswith ( ""s"" ) : convert_reslayer ( key , weight , state_dict , converted_names ) for key in blobs : if key not in converted_names : print ( f""not converted: {key}"" ) checkpoint = dict ( ) checkpoint [ ""state_dict"" ] = state_dict torch . save ( checkpoint , dst )","elif ""head"" in key :",258 17333,"def run(self, args, **kwargs): # Filtering options if args.action: kwargs[""action""] = args.action if args.status: kwargs[""status""] = args.status if args.trigger_instance: kwargs[""trigger_instance""] = args.trigger_instance if not args.showall: # null is the magic string that translates to does not exist. kwargs[""parent""] = ""null"" if args.timestamp_gt: kwargs[""timestamp_gt""] = args.timestamp_gt if args.timestamp_lt: kwargs[""timestamp_lt""] = args.timestamp_lt if args.sort_order: if args.sort_order in [""asc"", ""ascending""]: kwargs[""sort_asc""] = True elif args.sort_order in [""desc"", ""descending""]: kwargs[""sort_desc""] = True # We only retrieve attributes which are needed to speed things up include_attributes = self._get_include_attributes(args=args) if include_attributes: kwargs[""include_attributes""] = "","".join(include_attributes) return self.manager.query_with_count(limit=args.last, **kwargs) ","def run ( self , args , ** kwargs ) : if args . action : kwargs [ ""action"" ] = args . action if args . status : kwargs [ ""status"" ] = args . status if args . trigger_instance : kwargs [ ""trigger_instance"" ] = args . trigger_instance if not args . showall : kwargs [ ""parent"" ] = ""null"" if args . timestamp_gt : kwargs [ ""timestamp_gt"" ] = args . timestamp_gt if args . timestamp_lt : kwargs [ ""timestamp_lt"" ] = args . timestamp_lt if args . sort_order : kwargs [ ""sort_asc"" ] = True elif args . sort_order in [ ""desc"" , ""descending"" ] : kwargs [ ""sort_desc"" ] = True include_attributes = self . _get_include_attributes ( args = args ) if include_attributes : kwargs [ ""include_attributes"" ] = "","" . join ( include_attributes ) return self . manager . query_with_count ( limit = args . last , ** kwargs )","if args . sort_order in [ ""asc"" , ""ascending"" ] :",304 22834,"def goToPrevMarkedHeadline(self, event=None): """"""Select the next marked node."""""" c = self p = c.p if not p: return p.moveToThreadBack() wrapped = False while 1: if p and p.isMarked(): break elif p: p.moveToThreadBack() elif wrapped: break else: wrapped = True p = c.rootPosition() if not p: g.blue(""done"") c.treeSelectHelper(p) # Sets focus.","def goToPrevMarkedHeadline ( self , event = None ) : """"""Select the next marked node."""""" c = self p = c . p if not p : return p . moveToThreadBack ( ) wrapped = False while 1 : break elif p : p . moveToThreadBack ( ) elif wrapped : break else : wrapped = True p = c . rootPosition ( ) if not p : g . blue ( ""done"" ) c . treeSelectHelper ( p )",if p and p . isMarked ( ) :,164 7429,"def read(self, iprot): if ( iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None ): fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 0: if ftype == TType.LIST: self.success = [] (_etype683, _size680) = iprot.readListBegin() for _i684 in xrange(_size680): _elem685 = iprot.readString() self.success.append(_elem685) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 1: if ftype == TType.STRUCT: self.o1 = MetaException() self.o1.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd()","def read ( self , iprot ) : if ( iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None ) : fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) return iprot . readStructBegin ( ) while True : ( fname , ftype , fid ) = iprot . readFieldBegin ( ) if ftype == TType . STOP : break if fid == 0 : self . success = [ ] ( _etype683 , _size680 ) = iprot . readListBegin ( ) for _i684 in xrange ( _size680 ) : _elem685 = iprot . readString ( ) self . success . append ( _elem685 ) iprot . readListEnd ( ) else : iprot . skip ( ftype ) elif fid == 1 : if ftype == TType . STRUCT : self . o1 = MetaException ( ) self . o1 . read ( iprot ) else : iprot . skip ( ftype ) else : iprot . skip ( ftype ) iprot . readFieldEnd ( ) iprot . readStructEnd ( )",if ftype == TType . LIST :,411 19426,"def __iter__(self): n_samples = self.image_dataset._X_shape[0] worker_info = torch.utils.data.get_worker_info() if worker_info is None: first_sample = 0 last_sample = n_samples else: first_sample = worker_info.id * n_samples // worker_info.num_workers last_sample = (worker_info.id + 1) * n_samples // worker_info.num_workers for epoch in range(self.epochs): if self.deterministic: order = first_sample + np.arange(last_sample - first_sample) else: # Ensure that every worker will pick the same random order for each epoch. random = np.random.RandomState(epoch) order = random.permutation(n_samples)[first_sample:last_sample] if self.batch_size is None: for i in order: yield ( self.image_dataset._get_image(self.image_dataset._X, i), self.image_dataset._get_image(self.image_dataset._y, i), self.image_dataset._w[i], self.image_dataset._ids[i], ) else: for i in range(0, len(order), self.batch_size): indices = order[i : i + self.batch_size] yield ( self.image_dataset._get_image(self.image_dataset._X, indices), self.image_dataset._get_image(self.image_dataset._y, indices), self.image_dataset._w[indices], self.image_dataset._ids[indices], )","def __iter__ ( self ) : n_samples = self . image_dataset . _X_shape [ 0 ] worker_info = torch . utils . data . get_worker_info ( ) if worker_info is None : first_sample = 0 last_sample = n_samples else : first_sample = worker_info . id * n_samples // worker_info . num_workers last_sample = ( worker_info . id + 1 ) * n_samples // worker_info . num_workers for epoch in range ( self . epochs ) : order = first_sample + np . arange ( last_sample - first_sample ) else : random = np . random . RandomState ( epoch ) order = random . permutation ( n_samples ) [ first_sample : last_sample ] if self . batch_size is None : for i in order : yield ( self . image_dataset . _get_image ( self . image_dataset . _X , i ) , self . image_dataset . _get_image ( self . image_dataset . _y , i ) , self . image_dataset . _w [ i ] , self . image_dataset . _ids [ i ] , ) else : for i in range ( 0 , len ( order ) , self . batch_size ) : indices = order [ i : i + self . batch_size ] yield ( self . image_dataset . _get_image ( self . image_dataset . _X , indices ) , self . image_dataset . _get_image ( self . image_dataset . _y , indices ) , self . image_dataset . _w [ indices ] , self . image_dataset . _ids [ indices ] , )",if self . deterministic :,487 24553,"def on_leave(self, instance): """"""Called when the mouse cursor goes outside the button of stack."""""" if self.state == ""open"": for widget in self.children: if isinstance(widget, MDFloatingLabel) and self.hint_animation: Animation.cancel_all(widget) if self.data[instance.icon] == widget.text: Animation( _canvas_width=0, _padding_right=0, d=self.opening_time, t=self.opening_transition, ).start(instance) if self.hint_animation: Animation( opacity=0, d=0.1, t=self.opening_transition, ).start(widget) break","def on_leave ( self , instance ) : """"""Called when the mouse cursor goes outside the button of stack."""""" if self . state == ""open"" : for widget in self . children : if isinstance ( widget , MDFloatingLabel ) and self . hint_animation : Animation . cancel_all ( widget ) Animation ( _canvas_width = 0 , _padding_right = 0 , d = self . opening_time , t = self . opening_transition , ) . start ( instance ) if self . hint_animation : Animation ( opacity = 0 , d = 0.1 , t = self . opening_transition , ) . start ( widget ) break",if self . data [ instance . icon ] == widget . text :,265 25340,"def reset_init_only_vars( self, info: TypeInfo, attributes: List[DataclassAttribute] ) -> None: """"""Remove init-only vars from the class and reset init var declarations."""""" for attr in attributes: if attr.is_init_var: if attr.name in info.names: del info.names[attr.name] else: # Nodes of superclass InitVars not used in __init__ # cannot be reached. assert attr.is_init_var for stmt in info.defn.defs.body: if isinstance(stmt, AssignmentStmt) and stmt.unanalyzed_type: lvalue = stmt.lvalues[0] if isinstance(lvalue, NameExpr) and lvalue.name == attr.name: # Reset node so that another semantic analysis pass will # recreate a symbol node for this attribute. lvalue.node = None","def reset_init_only_vars ( self , info : TypeInfo , attributes : List [ DataclassAttribute ] ) -> None : """"""Remove init-only vars from the class and reset init var declarations."""""" for attr in attributes : if attr . name in info . names : del info . names [ attr . name ] else : assert attr . is_init_var for stmt in info . defn . defs . body : if isinstance ( stmt , AssignmentStmt ) and stmt . unanalyzed_type : lvalue = stmt . lvalues [ 0 ] if isinstance ( lvalue , NameExpr ) and lvalue . name == attr . name : lvalue . node = None",if attr . is_init_var :,252 23924,"def __call__(self, request): if ( request.path.startswith(get_script_prefix() + ""control"") and request.user.is_authenticated ): if is_hijacked(request): hijack_history = request.session.get(""hijack_history"", False) hijacker = get_object_or_404(User, pk=hijack_history[0]) ss = hijacker.get_active_staff_session( request.session.get(""hijacker_session"") ) if ss: ss.logs.create( url=request.path, method=request.method, impersonating=request.user ) else: ss = request.user.get_active_staff_session(request.session.session_key) if ss: ss.logs.create(url=request.path, method=request.method) response = self.get_response(request) return response","def __call__ ( self , request ) : if ( request . path . startswith ( get_script_prefix ( ) + ""control"" ) and request . user . is_authenticated ) : if is_hijacked ( request ) : hijack_history = request . session . get ( ""hijack_history"" , False ) hijacker = get_object_or_404 ( User , pk = hijack_history [ 0 ] ) ss = hijacker . get_active_staff_session ( request . session . get ( ""hijacker_session"" ) ) ss . logs . create ( url = request . path , method = request . method , impersonating = request . user ) else : ss = request . user . get_active_staff_session ( request . session . session_key ) ss . logs . create ( url = request . path , method = request . method ) response = self . get_response ( request ) return response",if ss :,273 7806,"def test_other_attributes(self): print_test_name(""TEST OTHER ATTRIBUTES"") correct = 0 props = {} for example in OTHER_PROP_EXAMPLES: original_schema = schema.parse(example.schema_string) round_trip_schema = schema.parse(str(original_schema)) self.assertEqual(original_schema.other_props, round_trip_schema.other_props) if original_schema.type == ""record"": field_props = 0 for f in original_schema.fields: if f.other_props: props.update(f.other_props) field_props += 1 self.assertEqual(field_props, len(original_schema.fields)) if original_schema.other_props: props.update(original_schema.other_props) correct += 1 for k in props: v = props[k] if k == ""cp_boolean"": self.assertEqual(type(v), bool) elif k == ""cp_int"": self.assertEqual(type(v), int) elif k == ""cp_object"": self.assertEqual(type(v), dict) elif k == ""cp_float"": self.assertEqual(type(v), float) elif k == ""cp_array"": self.assertEqual(type(v), list) self.assertEqual(correct, len(OTHER_PROP_EXAMPLES))","def test_other_attributes ( self ) : print_test_name ( ""TEST OTHER ATTRIBUTES"" ) correct = 0 props = { } for example in OTHER_PROP_EXAMPLES : original_schema = schema . parse ( example . schema_string ) round_trip_schema = schema . parse ( str ( original_schema ) ) self . assertEqual ( original_schema . other_props , round_trip_schema . other_props ) field_props = 0 for f in original_schema . fields : if f . other_props : props . update ( f . other_props ) field_props += 1 self . assertEqual ( field_props , len ( original_schema . fields ) ) if original_schema . other_props : props . update ( original_schema . other_props ) correct += 1 for k in props : v = props [ k ] if k == ""cp_boolean"" : self . assertEqual ( type ( v ) , bool ) elif k == ""cp_int"" : self . assertEqual ( type ( v ) , int ) elif k == ""cp_object"" : self . assertEqual ( type ( v ) , dict ) elif k == ""cp_float"" : self . assertEqual ( type ( v ) , float ) elif k == ""cp_array"" : self . assertEqual ( type ( v ) , list ) self . assertEqual ( correct , len ( OTHER_PROP_EXAMPLES ) )","if original_schema . type == ""record"" :",396 1520,"def test_no_unknown_state_fields_in_mp_events(): all_fields = ins.MediaPlayerStateIterator.fields.keys() ok = True for evname in ins.mp_events: if evname == ""version"": continue for name in ins.mp_events[evname][""update_names""]: if name not in all_fields: print( ""Error, in evname '%s' unknown field '%s' in 'update_names'"" % (evname, name) ) ok = False for name in ins.mp_events[evname][""other_fields""]: if name not in all_fields: print( ""Error, in evname '%s' unknown field '%s' in 'other_fields'"" % (evname, name) ) ok = False if ok: print(""test_no_unknown_state_fields_in_mp_events: passed"") ","def test_no_unknown_state_fields_in_mp_events ( ) : all_fields = ins . MediaPlayerStateIterator . fields . keys ( ) ok = True for evname in ins . mp_events : if evname == ""version"" : continue for name in ins . mp_events [ evname ] [ ""update_names"" ] : print ( ""Error, in evname '%s' unknown field '%s' in 'update_names'"" % ( evname , name ) ) ok = False for name in ins . mp_events [ evname ] [ ""other_fields"" ] : print ( ""Error, in evname '%s' unknown field '%s' in 'other_fields'"" % ( evname , name ) ) ok = False if ok : print ( ""test_no_unknown_state_fields_in_mp_events: passed"" )",if name not in all_fields :,277 4014,"def __call__(self, A, a, order=10, mu=0.1, s=0.5): # NE Enhancement via Spectral Propagation print(""Chebyshev Series -----------------"") if order == 1: return a node_number = a.shape[0] A = sp.eye(node_number) + A DA = preprocessing.normalize(A, norm=""l1"") L = sp.eye(node_number) - DA M = L - mu * sp.eye(node_number) Lx0 = a Lx1 = M.dot(a) Lx1 = 0.5 * M.dot(Lx1) - a conv = iv(0, s) * Lx0 conv -= 2 * iv(1, s) * Lx1 for i in range(2, order): Lx2 = M.dot(Lx1) Lx2 = (M.dot(Lx2) - 2 * Lx1) - Lx0 # Lx2 = 2*L.dot(Lx1) - Lx0 if i % 2 == 0: conv += 2 * iv(i, s) * Lx2 else: conv -= 2 * iv(i, s) * Lx2 Lx0 = Lx1 Lx1 = Lx2 del Lx2 mm = A.dot(a - conv) return mm","def __call__ ( self , A , a , order = 10 , mu = 0.1 , s = 0.5 ) : print ( ""Chebyshev Series -----------------"" ) if order == 1 : return a node_number = a . shape [ 0 ] A = sp . eye ( node_number ) + A DA = preprocessing . normalize ( A , norm = ""l1"" ) L = sp . eye ( node_number ) - DA M = L - mu * sp . eye ( node_number ) Lx0 = a Lx1 = M . dot ( a ) Lx1 = 0.5 * M . dot ( Lx1 ) - a conv = iv ( 0 , s ) * Lx0 conv -= 2 * iv ( 1 , s ) * Lx1 for i in range ( 2 , order ) : Lx2 = M . dot ( Lx1 ) Lx2 = ( M . dot ( Lx2 ) - 2 * Lx1 ) - Lx0 conv += 2 * iv ( i , s ) * Lx2 else : conv -= 2 * iv ( i , s ) * Lx2 Lx0 = Lx1 Lx1 = Lx2 del Lx2 mm = A . dot ( a - conv ) return mm",if i % 2 == 0 :,368 1613,"def parse_clusterflow_logs(self, f): """"""Parse Clusterflow logs"""""" module = None job_id = None pipeline_id = None for l in f[""f""]: # Get pipeline ID module_r = re.match(r""Module:\s+(.+)$"", l) if module_r: module = module_r.group(1) job_id_r = re.match(r""Job ID:\s+(.+)$"", l) if job_id_r: job_id = job_id_r.group(1) if module is not None: pipeline_r = re.match( r""(cf_.+)_"" + re.escape(module) + r""_\d+$"", job_id ) if pipeline_r: pipeline_id = pipeline_r.group(1) # Get commands that have been run if l.startswith(""###CFCMD""): if pipeline_id is None: pipeline_id = ""unknown"" if pipeline_id not in self.clusterflow_commands.keys(): self.clusterflow_commands[pipeline_id] = list() self.clusterflow_commands[pipeline_id].append(l[8:])","def parse_clusterflow_logs ( self , f ) : """"""Parse Clusterflow logs"""""" module = None job_id = None pipeline_id = None for l in f [ ""f"" ] : module_r = re . match ( r""Module:\s+(.+)$"" , l ) if module_r : module = module_r . group ( 1 ) job_id_r = re . match ( r""Job ID:\s+(.+)$"" , l ) if job_id_r : job_id = job_id_r . group ( 1 ) if module is not None : pipeline_r = re . match ( r""(cf_.+)_"" + re . escape ( module ) + r""_\d+$"" , job_id ) if pipeline_r : pipeline_id = pipeline_r . group ( 1 ) if l . startswith ( ""###CFCMD"" ) : pipeline_id = ""unknown"" if pipeline_id not in self . clusterflow_commands . keys ( ) : self . clusterflow_commands [ pipeline_id ] = list ( ) self . clusterflow_commands [ pipeline_id ] . append ( l [ 8 : ] )",if pipeline_id is None :,343 15841,"def check_other_queues(queue_counts_dict: Dict[str, int]) -> List[Dict[str, Any]]: """"""Do a simple queue size check for queues whose workers don't publish stats files."""""" results = [] for queue, count in queue_counts_dict.items(): if queue in normal_queues: continue if count > CRITICAL_COUNT_THRESHOLD_DEFAULT: results.append( dict(status=CRITICAL, name=queue, message=f""count critical: {count}"") ) elif count > WARN_COUNT_THRESHOLD_DEFAULT: results.append( dict(status=WARNING, name=queue, message=f""count warning: {count}"") ) else: results.append(dict(status=OK, name=queue, message="""")) return results ","def check_other_queues ( queue_counts_dict : Dict [ str , int ] ) -> List [ Dict [ str , Any ] ] : """"""Do a simple queue size check for queues whose workers don't publish stats files."""""" results = [ ] for queue , count in queue_counts_dict . items ( ) : if queue in normal_queues : continue results . append ( dict ( status = CRITICAL , name = queue , message = f""count critical: {count}"" ) ) elif count > WARN_COUNT_THRESHOLD_DEFAULT : results . append ( dict ( status = WARNING , name = queue , message = f""count warning: {count}"" ) ) else : results . append ( dict ( status = OK , name = queue , message = """" ) ) return results",if count > CRITICAL_COUNT_THRESHOLD_DEFAULT :,220 1755,"def handle(self): # Send a welcome message. self._send_textline(""* OK IMAP4rev1"") while 1: # Gather up input until we receive a line terminator or we timeout. # Accumulate read(1) because it's simpler to handle the differences # between naked sockets and SSL sockets. line = b"""" while 1: try: part = self.rfile.read(1) if part == b"""": # Naked sockets return empty strings.. return line += part except OSError: # ..but SSLSockets raise exceptions. return if line.endswith(b""\r\n""): break if verbose: print(""GOT: %r"" % line.strip()) if self.continuation: try: self.continuation.send(line) except StopIteration: self.continuation = None continue splitline = line.decode(""ASCII"").split() tag = splitline[0] cmd = splitline[1] args = splitline[2:] if hasattr(self, ""cmd_"" + cmd): continuation = getattr(self, ""cmd_"" + cmd)(tag, args) if continuation: self.continuation = continuation next(continuation) else: self._send_tagged(tag, ""BAD"", cmd + "" unknown"")","def handle ( self ) : self . _send_textline ( ""* OK IMAP4rev1"" ) while 1 : line = b"""" while 1 : try : part = self . rfile . read ( 1 ) if part == b"""" : return line += part except OSError : return if line . endswith ( b""\r\n"" ) : break if verbose : print ( ""GOT: %r"" % line . strip ( ) ) if self . continuation : try : self . continuation . send ( line ) except StopIteration : self . continuation = None continue splitline = line . decode ( ""ASCII"" ) . split ( ) tag = splitline [ 0 ] cmd = splitline [ 1 ] args = splitline [ 2 : ] if hasattr ( self , ""cmd_"" + cmd ) : continuation = getattr ( self , ""cmd_"" + cmd ) ( tag , args ) self . continuation = continuation next ( continuation ) else : self . _send_tagged ( tag , ""BAD"" , cmd + "" unknown"" )",if continuation :,400 3058,"def get_indexes(self, cursor, table_name): # This query retrieves each index on the given table, including the # first associated field name cursor.execute(self._get_indexes_query, [table_name, self.connection.schema_name]) indexes = {} for row in cursor.fetchall(): # row[1] (idx.indkey) is stored in the DB as an array. It comes out as # a string of space-separated integers. This designates the field # indexes (1-based) of the fields that have indexes on the table. # Here, we skip any indexes across multiple fields. if "" "" in row[1]: continue if row[0] not in indexes: indexes[row[0]] = {""primary_key"": False, ""unique"": False} # It's possible to have the unique and PK constraints in separate indexes. if row[3]: indexes[row[0]][""primary_key""] = True if row[2]: indexes[row[0]][""unique""] = True return indexes","def get_indexes ( self , cursor , table_name ) : cursor . execute ( self . _get_indexes_query , [ table_name , self . connection . schema_name ] ) indexes = { } for row in cursor . fetchall ( ) : if "" "" in row [ 1 ] : continue if row [ 0 ] not in indexes : indexes [ row [ 0 ] ] = { ""primary_key"" : False , ""unique"" : False } if row [ 3 ] : indexes [ row [ 0 ] ] [ ""primary_key"" ] = True indexes [ row [ 0 ] ] [ ""unique"" ] = True return indexes",if row [ 2 ] :,274 7177,"def _cache_key(ui, url=None, locale=None, additional_key_data=None): if url is None: url = request.base_url if locale is None: locale = g.locale.language if g.locale else ""en"" k = ""ui:{}:{}:{}"".format(ui, url, locale) if callable(additional_key_data): try: ak = additional_key_data() if ak: # we have some additional key components, let's attach them if not isinstance(ak, (list, tuple)): ak = [ak] k = ""{}:{}"".format(k, "":"".join(ak)) except Exception: _logger.exception( ""Error while trying to retrieve additional cache key parts for ui {}"".format( ui ) ) return k ","def _cache_key ( ui , url = None , locale = None , additional_key_data = None ) : if url is None : url = request . base_url if locale is None : locale = g . locale . language if g . locale else ""en"" k = ""ui:{}:{}:{}"" . format ( ui , url , locale ) if callable ( additional_key_data ) : try : ak = additional_key_data ( ) if not isinstance ( ak , ( list , tuple ) ) : ak = [ ak ] k = ""{}:{}"" . format ( k , "":"" . join ( ak ) ) except Exception : _logger . exception ( ""Error while trying to retrieve additional cache key parts for ui {}"" . format ( ui ) ) return k",if ak :,238 16186,"def _ArgumentListHasDictionaryEntry(self, token): """"""Check if the function argument list has a dictionary as an arg."""""" if _IsArgumentToFunction(token): while token: if token.value == ""{"": length = token.matching_bracket.total_length - token.total_length return length + self.stack[-2].indent > self.column_limit if token.ClosesScope(): break if token.OpensScope(): token = token.matching_bracket token = token.next_token return False","def _ArgumentListHasDictionaryEntry ( self , token ) : """"""Check if the function argument list has a dictionary as an arg."""""" if _IsArgumentToFunction ( token ) : while token : if token . value == ""{"" : length = token . matching_bracket . total_length - token . total_length return length + self . stack [ - 2 ] . indent > self . column_limit break if token . OpensScope ( ) : token = token . matching_bracket token = token . next_token return False",if token . ClosesScope ( ) :,153 13354,"def parse_escaped_hierarchical_category_name(category_name): """"""Parse a category name."""""" result = [] current = None index = 0 next_backslash = category_name.find(""\\"", index) next_slash = category_name.find(""/"", index) while index < len(category_name): if next_backslash == -1 and next_slash == -1: current = (current if current else """") + category_name[index:] index = len(category_name) elif next_slash >= 0 and (next_backslash == -1 or next_backslash > next_slash): result.append( (current if current else """") + category_name[index:next_slash] ) current = """" index = next_slash + 1 next_slash = category_name.find(""/"", index) else: if len(category_name) == next_backslash + 1: raise Exception( ""Unexpected '\\' in '{0}' at last position!"".format(category_name) ) esc_ch = category_name[next_backslash + 1] if esc_ch not in {""/"", ""\\""}: raise Exception( ""Unknown escape sequence '\\{0}' in '{1}'!"".format( esc_ch, category_name ) ) current = ( (current if current else """") + category_name[index:next_backslash] + esc_ch ) index = next_backslash + 2 next_backslash = category_name.find(""\\"", index) if esc_ch == ""/"": next_slash = category_name.find(""/"", index) if current is not None: result.append(current) return result","def parse_escaped_hierarchical_category_name ( category_name ) : """"""Parse a category name."""""" result = [ ] current = None index = 0 next_backslash = category_name . find ( ""\\"" , index ) next_slash = category_name . find ( ""/"" , index ) while index < len ( category_name ) : current = ( current if current else """" ) + category_name [ index : ] index = len ( category_name ) elif next_slash >= 0 and ( next_backslash == - 1 or next_backslash > next_slash ) : result . append ( ( current if current else """" ) + category_name [ index : next_slash ] ) current = """" index = next_slash + 1 next_slash = category_name . find ( ""/"" , index ) else : if len ( category_name ) == next_backslash + 1 : raise Exception ( ""Unexpected '\\' in '{0}' at last position!"" . format ( category_name ) ) esc_ch = category_name [ next_backslash + 1 ] if esc_ch not in { ""/"" , ""\\"" } : raise Exception ( ""Unknown escape sequence '\\{0}' in '{1}'!"" . format ( esc_ch , category_name ) ) current = ( ( current if current else """" ) + category_name [ index : next_backslash ] + esc_ch ) index = next_backslash + 2 next_backslash = category_name . find ( ""\\"" , index ) if esc_ch == ""/"" : next_slash = category_name . find ( ""/"" , index ) if current is not None : result . append ( current ) return result",if next_backslash == - 1 and next_slash == - 1 :,495 23681,"def addStudent(self, name, age): new_names = self.getNames() found = False for item in new_names: if item == (name, age): found = True break if not found: new_names.append((name, age)) new_names.sort() while len(self.children): self.remove(self.children[0]) # self.clear() self.addTitle() for student in new_names: sw = StudentWidget(student[0], student[1]) makeDraggable(sw) self.append(sw) self.setCellVerticalAlignment(sw, HasVerticalAlignment.ALIGN_TOP)","def addStudent ( self , name , age ) : new_names = self . getNames ( ) found = False for item in new_names : found = True break if not found : new_names . append ( ( name , age ) ) new_names . sort ( ) while len ( self . children ) : self . remove ( self . children [ 0 ] ) self . addTitle ( ) for student in new_names : sw = StudentWidget ( student [ 0 ] , student [ 1 ] ) makeDraggable ( sw ) self . append ( sw ) self . setCellVerticalAlignment ( sw , HasVerticalAlignment . ALIGN_TOP )","if item == ( name , age ) :",191 6262,"def mockup(self, records): provider = TransipProvider("""", """", """") _dns_entries = [] for record in records: if record._type in provider.SUPPORTS: entries_for = getattr(provider, ""_entries_for_{}"".format(record._type)) # Root records have '@' as name name = record.name if name == """": name = provider.ROOT_RECORD _dns_entries.extend(entries_for(name, record)) # NS is not supported as a DNS Entry, # so it should cover the if statement _dns_entries.append(DnsEntry(""@"", ""3600"", ""NS"", ""ns01.transip.nl."")) self.mockupEntries = _dns_entries","def mockup ( self , records ) : provider = TransipProvider ( """" , """" , """" ) _dns_entries = [ ] for record in records : entries_for = getattr ( provider , ""_entries_for_{}"" . format ( record . _type ) ) name = record . name if name == """" : name = provider . ROOT_RECORD _dns_entries . extend ( entries_for ( name , record ) ) _dns_entries . append ( DnsEntry ( ""@"" , ""3600"" , ""NS"" , ""ns01.transip.nl."" ) ) self . mockupEntries = _dns_entries",if record . _type in provider . SUPPORTS :,196 15643,"def _compare(d1, d2, skip_keys=None): """"""Compare two lists or dictionaries or array"""""" if type(d1) != type(d2): return False if isinstance(d1, dict): if set(d1) != set(d2): return False for key in d1: if skip_keys is not None and key in skip_keys: continue if not _compare(d1[key], d2[key], skip_keys=skip_keys): return False elif isinstance(d1, list): for i, _ in enumerate(d1): if not _compare(d1[i], d2[i], skip_keys=skip_keys): return False elif isinstance(d1, np.ndarray): if not np.array_equal(d1, d2): return False else: if d1 != d2: return False return True","def _compare ( d1 , d2 , skip_keys = None ) : """"""Compare two lists or dictionaries or array"""""" if type ( d1 ) != type ( d2 ) : return False if isinstance ( d1 , dict ) : return False for key in d1 : if skip_keys is not None and key in skip_keys : continue if not _compare ( d1 [ key ] , d2 [ key ] , skip_keys = skip_keys ) : return False elif isinstance ( d1 , list ) : for i , _ in enumerate ( d1 ) : if not _compare ( d1 [ i ] , d2 [ i ] , skip_keys = skip_keys ) : return False elif isinstance ( d1 , np . ndarray ) : if not np . array_equal ( d1 , d2 ) : return False else : if d1 != d2 : return False return True",if set ( d1 ) != set ( d2 ) :,261 11111,"def _get_families(self): families = [] for name, ext in self._get_family_dirs(): if ext is None: # is a directory family = self.get_resource( FileSystemPackageFamilyResource.key, location=self.location, name=name ) else: family = self.get_resource( FileSystemCombinedPackageFamilyResource.key, location=self.location, name=name, ext=ext, ) families.append(family) return families ","def _get_families ( self ) : families = [ ] for name , ext in self . _get_family_dirs ( ) : family = self . get_resource ( FileSystemPackageFamilyResource . key , location = self . location , name = name ) else : family = self . get_resource ( FileSystemCombinedPackageFamilyResource . key , location = self . location , name = name , ext = ext , ) families . append ( family ) return families",if ext is None :,157 1959,"def _module_repr_from_spec(spec): """"""Return the repr to use for the module."""""" # We mostly replicate _module_repr() using the spec attributes. name = ""?"" if spec.name is None else spec.name if spec.origin is None: if spec.loader is None: return """".format(name) else: return """".format(name, spec.loader) else: if spec.has_location: return """".format(name, spec.origin) else: return """".format(spec.name, spec.origin) ","def _module_repr_from_spec ( spec ) : """"""Return the repr to use for the module."""""" name = ""?"" if spec . name is None else spec . name if spec . origin is None : return """" . format ( name ) else : return """" . format ( name , spec . loader ) else : if spec . has_location : return """" . format ( name , spec . origin ) else : return """" . format ( spec . name , spec . origin )",if spec . loader is None :,180 14479,"def doDir(elem): for child in elem.childNodes: if not isinstance(child, minidom.Element): continue if child.tagName == ""Directory"": doDir(child) elif child.tagName == ""Component"": for grandchild in child.childNodes: if not isinstance(grandchild, minidom.Element): continue if grandchild.tagName != ""File"": continue files.add(grandchild.getAttribute(""Source"").replace(os.sep, ""/"")) ","def doDir ( elem ) : for child in elem . childNodes : if not isinstance ( child , minidom . Element ) : continue if child . tagName == ""Directory"" : doDir ( child ) elif child . tagName == ""Component"" : for grandchild in child . childNodes : continue if grandchild . tagName != ""File"" : continue files . add ( grandchild . getAttribute ( ""Source"" ) . replace ( os . sep , ""/"" ) )","if not isinstance ( grandchild , minidom . Element ) :",152 16950,"def test_row(self, row): for idx, test in self.patterns.items(): try: value = row[idx] except IndexError: value = """" result = test(value) if self.any_match: if result: return not self.inverse # True else: if not result: return self.inverse # False if self.any_match: return self.inverse # False else: return not self.inverse # True ","def test_row ( self , row ) : for idx , test in self . patterns . items ( ) : try : value = row [ idx ] except IndexError : value = """" result = test ( value ) if self . any_match : if result : return not self . inverse else : return self . inverse if self . any_match : return self . inverse else : return not self . inverse",if not result :,149 1714,"def _validate_scalar_extensions(self) -> List[str]: errors = [] for extension in [ x for x in self.extensions if isinstance(x, GraphQLScalarTypeExtension) ]: extended = self.type_definitions.get(extension.name) ext_errors = _validate_extension( extended, extension.name, GraphQLScalarType, ""SCALAR"" ) errors.extend(ext_errors) if not ext_errors: errors.extend(_validate_extension_directives(extension, extended, ""SCALAR"")) return errors ","def _validate_scalar_extensions ( self ) -> List [ str ] : errors = [ ] for extension in [ x for x in self . extensions if isinstance ( x , GraphQLScalarTypeExtension ) ] : extended = self . type_definitions . get ( extension . name ) ext_errors = _validate_extension ( extended , extension . name , GraphQLScalarType , ""SCALAR"" ) errors . extend ( ext_errors ) errors . extend ( _validate_extension_directives ( extension , extended , ""SCALAR"" ) ) return errors",if not ext_errors :,149 11775,"def call(monad, *args): for arg, name in izip(args, (""hour"", ""minute"", ""second"", ""microsecond"")): if not isinstance(arg, NumericMixin) or arg.type is not int: throw( TypeError, ""'%s' argument of time(...) function must be of 'int' type. Got: %r"" % (name, type2str(arg.type)), ) if not isinstance(arg, ConstMonad): throw(NotImplementedError) return ConstMonad.new(time(*tuple(arg.value for arg in args)))","def call ( monad , * args ) : for arg , name in izip ( args , ( ""hour"" , ""minute"" , ""second"" , ""microsecond"" ) ) : if not isinstance ( arg , NumericMixin ) or arg . type is not int : throw ( TypeError , ""'%s' argument of time(...) function must be of 'int' type. Got: %r"" % ( name , type2str ( arg . type ) ) , ) throw ( NotImplementedError ) return ConstMonad . new ( time ( * tuple ( arg . value for arg in args ) ) )","if not isinstance ( arg , ConstMonad ) :",157 24737,"def get_config(): try: config_str = config.get(""plugins"", ""equalizer_levels"", ""[]"") config_dict = ast.literal_eval(config_str) if isinstance(config_dict, list): print_w(""Converting old EQ config to new format."") config_dict = {""Current"": config_dict} if not isinstance(config_dict, dict): raise ValueError(""Saved config is of wrong type."") if not ""Current"" in config_dict.keys(): raise ValueError(""Saved config was malformed."") # Run through the values to check everything is of correct type. for key in config_dict.keys(): [float(s) for s in config_dict[key]] return config_dict except (config.Error, ValueError) as e: print_e(str(e)) return {""Current"": []} ","def get_config ( ) : try : config_str = config . get ( ""plugins"" , ""equalizer_levels"" , ""[]"" ) config_dict = ast . literal_eval ( config_str ) if isinstance ( config_dict , list ) : print_w ( ""Converting old EQ config to new format."" ) config_dict = { ""Current"" : config_dict } if not isinstance ( config_dict , dict ) : raise ValueError ( ""Saved config is of wrong type."" ) raise ValueError ( ""Saved config was malformed."" ) for key in config_dict . keys ( ) : [ float ( s ) for s in config_dict [ key ] ] return config_dict except ( config . Error , ValueError ) as e : print_e ( str ( e ) ) return { ""Current"" : [ ] }","if not ""Current"" in config_dict . keys ( ) :",231 25172,"def _parse(self, contents): entries = [] for line in contents.splitlines(): if not len(line.strip()): entries.append((""blank"", [line])) continue (head, tail) = chop_comment(line.strip(), ""#"") if not len(head): entries.append((""all_comment"", [line])) continue entries.append((""option"", [head.split(None), tail])) return entries ","def _parse ( self , contents ) : entries = [ ] for line in contents . splitlines ( ) : entries . append ( ( ""blank"" , [ line ] ) ) continue ( head , tail ) = chop_comment ( line . strip ( ) , ""#"" ) if not len ( head ) : entries . append ( ( ""all_comment"" , [ line ] ) ) continue entries . append ( ( ""option"" , [ head . split ( None ) , tail ] ) ) return entries",if not len ( line . strip ( ) ) :,121 14036,"def _brush_modified_cb(self, settings): """"""Updates the brush's base setting adjustments on brush changes"""""" for cname in settings: adj = self.brush_adjustment.get(cname, None) if adj is None: continue value = self.brush.get_base_value(cname) adj.set_value(value) ","def _brush_modified_cb ( self , settings ) : """"""Updates the brush's base setting adjustments on brush changes"""""" for cname in settings : adj = self . brush_adjustment . get ( cname , None ) continue value = self . brush . get_base_value ( cname ) adj . set_value ( value )",if adj is None :,92 324,"def upgrade(migrate_engine): print(__doc__) metadata.bind = migrate_engine liftoverjobs = dict() jobs = context.query(DeferredJob).filter_by(plugin=""LiftOverTransferPlugin"").all() for job in jobs: if job.params[""parentjob""] not in liftoverjobs: liftoverjobs[job.params[""parentjob""]] = [] liftoverjobs[job.params[""parentjob""]].append(job.id) for parent in liftoverjobs: lifts = liftoverjobs[parent] deferred = context.query(DeferredJob).filter_by(id=parent).first() deferred.params[""liftover""] = lifts context.flush() ","def upgrade ( migrate_engine ) : print ( __doc__ ) metadata . bind = migrate_engine liftoverjobs = dict ( ) jobs = context . query ( DeferredJob ) . filter_by ( plugin = ""LiftOverTransferPlugin"" ) . all ( ) for job in jobs : liftoverjobs [ job . params [ ""parentjob"" ] ] = [ ] liftoverjobs [ job . params [ ""parentjob"" ] ] . append ( job . id ) for parent in liftoverjobs : lifts = liftoverjobs [ parent ] deferred = context . query ( DeferredJob ) . filter_by ( id = parent ) . first ( ) deferred . params [ ""liftover"" ] = lifts context . flush ( )","if job . params [ ""parentjob"" ] not in liftoverjobs :",182 8891,"def bump_version(bump_type): """"""Bumps version to the next release, or development version."""""" cur_ver = _get_version() click.echo(""current version: %s"" % cur_ver) ver_split = cur_ver.split(""."") if ""dev"" in ver_split[-1]: if bump_type == ""dev"": # If this is already a development version, increment the dev count by 1 ver_split[-1] = ""dev%d"" % (int(ver_split[-1].strip(""dev"") or 0) + 1) else: # Just strip off dev tag for next release version ver_split = ver_split[:-1] else: # Increment the revision number by one if len(ver_split) == 2: # We don't have a revision number, assume 0 ver_split.append(""1"") else: if ""b"" in ver_split[2]: # beta version minor, beta = ver_split[-1].split(""b"") ver_split[-1] = ""%sb%s"" % (minor, int(beta) + 1) else: ver_split[-1] = str(int(ver_split[-1]) + 1) if bump_type == ""dev"": ver_split.append(""dev"") new_version = ""."".join(ver_split) for line in fileinput.FileInput(""flexget/_version.py"", inplace=1): if line.startswith(""__version__ =""): line = ""__version__ = '%s'\n"" % new_version print(line, end="""") click.echo(""new version: %s"" % new_version)","def bump_version ( bump_type ) : """"""Bumps version to the next release, or development version."""""" cur_ver = _get_version ( ) click . echo ( ""current version: %s"" % cur_ver ) ver_split = cur_ver . split ( ""."" ) if ""dev"" in ver_split [ - 1 ] : if bump_type == ""dev"" : ver_split [ - 1 ] = ""dev%d"" % ( int ( ver_split [ - 1 ] . strip ( ""dev"" ) or 0 ) + 1 ) else : ver_split = ver_split [ : - 1 ] else : ver_split . append ( ""1"" ) else : if ""b"" in ver_split [ 2 ] : minor , beta = ver_split [ - 1 ] . split ( ""b"" ) ver_split [ - 1 ] = ""%sb%s"" % ( minor , int ( beta ) + 1 ) else : ver_split [ - 1 ] = str ( int ( ver_split [ - 1 ] ) + 1 ) if bump_type == ""dev"" : ver_split . append ( ""dev"" ) new_version = ""."" . join ( ver_split ) for line in fileinput . FileInput ( ""flexget/_version.py"" , inplace = 1 ) : if line . startswith ( ""__version__ ="" ) : line = ""__version__ = '%s'\n"" % new_version print ( line , end = """" ) click . echo ( ""new version: %s"" % new_version )",if len ( ver_split ) == 2 :,453 19054,"def __find_smallest(self): """"""Find the smallest uncovered value in the matrix."""""" minval = sys.maxsize for i in range(self.n): for j in range(self.n): if (not self.row_covered[i]) and (not self.col_covered[j]): if minval > self.C[i][j]: minval = self.C[i][j] return minval ","def __find_smallest ( self ) : """"""Find the smallest uncovered value in the matrix."""""" minval = sys . maxsize for i in range ( self . n ) : for j in range ( self . n ) : if ( not self . row_covered [ i ] ) and ( not self . col_covered [ j ] ) : minval = self . C [ i ] [ j ] return minval",if minval > self . C [ i ] [ j ] :,114 16481,"def git_branch_for_post(self, path, interactive=False): if path is None: return None if path in self.git_local_branches: return self.git_branch(path) branches = [] for branch in self.git_local_branches: if path in self.git_local_posts(branches=[branch]): branches.append(branch) if len(branches) == 0: if path in self.dir(): return self.git_branch(self.config.published_branch) return None if len(branches) == 1: return self.git_branch(branches[0]) # Deal with ambiguity if interactive: print(""There are multiple branches for post '{}'."".format(path)) for i, branch in enumerate(branches): print(""{}. {}"".format(i, branch)) response = None while not isinstance(response, int): response = input(""Please select the branch you would like to use: "") try: response = int(response) except: response = None else: response = 0 return self.git_branch(branches[response])","def git_branch_for_post ( self , path , interactive = False ) : if path is None : return None if path in self . git_local_branches : return self . git_branch ( path ) branches = [ ] for branch in self . git_local_branches : branches . append ( branch ) if len ( branches ) == 0 : if path in self . dir ( ) : return self . git_branch ( self . config . published_branch ) return None if len ( branches ) == 1 : return self . git_branch ( branches [ 0 ] ) if interactive : print ( ""There are multiple branches for post '{}'."" . format ( path ) ) for i , branch in enumerate ( branches ) : print ( ""{}. {}"" . format ( i , branch ) ) response = None while not isinstance ( response , int ) : response = input ( ""Please select the branch you would like to use: "" ) try : response = int ( response ) except : response = None else : response = 0 return self . git_branch ( branches [ response ] )",if path in self . git_local_posts ( branches = [ branch ] ) :,312 6549,"def update_brush(self, *args): with self.output: if not self.brush.brushing: # if we ended brushing, reset it self.figure.interaction = None if self.brush.selected is not None: (x1, y1), (x2, y2) = self.brush.selected mode = self.modes_names[ self.modes_labels.index(self.button_selection_mode.value) ] self.plot.select_rectangle(x1, y1, x2, y2, mode=mode) else: self.dataset.select_nothing() if ( not self.brush.brushing ): # but then put it back again so the rectangle is gone, self.figure.interaction = self.brush","def update_brush ( self , * args ) : with self . output : if not self . brush . brushing : self . figure . interaction = None ( x1 , y1 ) , ( x2 , y2 ) = self . brush . selected mode = self . modes_names [ self . modes_labels . index ( self . button_selection_mode . value ) ] self . plot . select_rectangle ( x1 , y1 , x2 , y2 , mode = mode ) else : self . dataset . select_nothing ( ) if ( not self . brush . brushing ) : self . figure . interaction = self . brush",if self . brush . selected is not None :,220 15689,"def check(self, check_all=False, do_reload=True): """"""Check whether some modules need to be reloaded."""""" if not self.enabled and not check_all: return if check_all or self.check_all: modules = list(sys.modules.keys()) else: modules = list(self.modules.keys()) for modname in modules: m = sys.modules.get(modname, None) if modname in self.skip_modules: continue py_filename, pymtime = self.filename_and_mtime(m) if py_filename is None: continue try: if pymtime <= self.modules_mtimes[modname]: continue except KeyError: self.modules_mtimes[modname] = pymtime continue else: if self.failed.get(py_filename, None) == pymtime: continue self.modules_mtimes[modname] = pymtime # If we've reached this point, we should try to reload the module if do_reload: try: superreload(m, reload, self.old_objects) if py_filename in self.failed: del self.failed[py_filename] except: print( ""[autoreload of %s failed: %s]"" % (modname, traceback.format_exc(1)), file=sys.stderr, ) self.failed[py_filename] = pymtime","def check ( self , check_all = False , do_reload = True ) : """"""Check whether some modules need to be reloaded."""""" if not self . enabled and not check_all : return if check_all or self . check_all : modules = list ( sys . modules . keys ( ) ) else : modules = list ( self . modules . keys ( ) ) for modname in modules : m = sys . modules . get ( modname , None ) if modname in self . skip_modules : continue py_filename , pymtime = self . filename_and_mtime ( m ) if py_filename is None : continue try : if pymtime <= self . modules_mtimes [ modname ] : continue except KeyError : self . modules_mtimes [ modname ] = pymtime continue else : if self . failed . get ( py_filename , None ) == pymtime : continue self . modules_mtimes [ modname ] = pymtime try : superreload ( m , reload , self . old_objects ) if py_filename in self . failed : del self . failed [ py_filename ] except : print ( ""[autoreload of %s failed: %s]"" % ( modname , traceback . format_exc ( 1 ) ) , file = sys . stderr , ) self . failed [ py_filename ] = pymtime",if do_reload :,423 18163,"def viewresult_to_response(context, request): result = view(context, request) if result.__class__ is Response: # common case response = result else: response = info.registry.queryAdapterOrSelf(result, IResponse) if response is None: if result is None: append = ( "" You may have forgotten to return a value "" ""from the view callable."" ) elif isinstance(result, dict): append = ( "" You may have forgotten to define a "" ""renderer in the view configuration."" ) else: append = """" msg = ( ""Could not convert return value of the view "" ""callable %s into a response object. "" ""The value returned was %r."" + append ) raise ValueError(msg % (view_description(view), result)) return response ","def viewresult_to_response ( context , request ) : result = view ( context , request ) if result . __class__ is Response : response = result else : response = info . registry . queryAdapterOrSelf ( result , IResponse ) if response is None : append = ( "" You may have forgotten to return a value "" ""from the view callable."" ) elif isinstance ( result , dict ) : append = ( "" You may have forgotten to define a "" ""renderer in the view configuration."" ) else : append = """" msg = ( ""Could not convert return value of the view "" ""callable %s into a response object. "" ""The value returned was %r."" + append ) raise ValueError ( msg % ( view_description ( view ) , result ) ) return response",if result is None :,273 7134,"def put( self, value: V = None, key: K = None, partition: Optional[int] = None, timestamp: Optional[float] = None, headers: HeadersArg = None, key_serializer: CodecArg = None, value_serializer: CodecArg = None, *, reply_to: ReplyToArg = None, correlation_id: str = None, wait: bool = True ) -> EventT: if reply_to: value, headers = self._create_req(key, value, reply_to, correlation_id, headers) channel = cast(ChannelT, self.stream().channel) message = self.to_message( key, value, partition=partition, offset=self.sent_offset, timestamp=timestamp, headers=headers, ) event: EventT = await channel.decode(message) await channel.put(event) self.sent_offset += 1 if wait: async with self.new_value_processed: await self.new_value_processed.wait() if self._crash_reason: raise self._crash_reason from self._crash_reason return event","def put ( self , value : V = None , key : K = None , partition : Optional [ int ] = None , timestamp : Optional [ float ] = None , headers : HeadersArg = None , key_serializer : CodecArg = None , value_serializer : CodecArg = None , * , reply_to : ReplyToArg = None , correlation_id : str = None , wait : bool = True ) -> EventT : if reply_to : value , headers = self . _create_req ( key , value , reply_to , correlation_id , headers ) channel = cast ( ChannelT , self . stream ( ) . channel ) message = self . to_message ( key , value , partition = partition , offset = self . sent_offset , timestamp = timestamp , headers = headers , ) event : EventT = await channel . decode ( message ) await channel . put ( event ) self . sent_offset += 1 if wait : async with self . new_value_processed : await self . new_value_processed . wait ( ) raise self . _crash_reason from self . _crash_reason return event",if self . _crash_reason :,318 4316,"def __setattr__(self, name: str, val: Any): if name.startswith(""COMPUTED_""): if name in self: old_val = self[name] if old_val == val: return raise KeyError( ""Computed attributed '{}' already exists "" ""with a different value! old={}, new={}."".format(name, old_val, val) ) self[name] = val else: super().__setattr__(name, val) ","def __setattr__ ( self , name : str , val : Any ) : if name . startswith ( ""COMPUTED_"" ) : old_val = self [ name ] if old_val == val : return raise KeyError ( ""Computed attributed '{}' already exists "" ""with a different value! old={}, new={}."" . format ( name , old_val , val ) ) self [ name ] = val else : super ( ) . __setattr__ ( name , val )",if name in self :,137 14754,"def _try_parser(self, parse_method): _order = self._settings.DATE_ORDER try: if self._settings.PREFER_LOCALE_DATE_ORDER: if ""DATE_ORDER"" not in self._settings._mod_settings: self._settings.DATE_ORDER = self.locale.info.get(""date_order"", _order) date_obj, period = date_parser.parse( self._get_translated_date(), parse_method=parse_method, settings=self._settings, ) self._settings.DATE_ORDER = _order return DateData( date_obj=date_obj, period=period, ) except ValueError: self._settings.DATE_ORDER = _order return None ","def _try_parser ( self , parse_method ) : _order = self . _settings . DATE_ORDER try : if ""DATE_ORDER"" not in self . _settings . _mod_settings : self . _settings . DATE_ORDER = self . locale . info . get ( ""date_order"" , _order ) date_obj , period = date_parser . parse ( self . _get_translated_date ( ) , parse_method = parse_method , settings = self . _settings , ) self . _settings . DATE_ORDER = _order return DateData ( date_obj = date_obj , period = period , ) except ValueError : self . _settings . DATE_ORDER = _order return None",if self . _settings . PREFER_LOCALE_DATE_ORDER :,214 18728,"def _merge_substs(self, subst, new_substs): subst = subst.copy() for new_subst in new_substs: for name, var in new_subst.items(): if name not in subst: subst[name] = var elif subst[name] is not var: subst[name].PasteVariable(var) return subst ","def _merge_substs ( self , subst , new_substs ) : subst = subst . copy ( ) for new_subst in new_substs : for name , var in new_subst . items ( ) : subst [ name ] = var elif subst [ name ] is not var : subst [ name ] . PasteVariable ( var ) return subst",if name not in subst :,109 17133,"def calculate(self): for task in taskmods.DllList.calculate(self): pid = task.UniqueProcessId if task.ObjectTable.HandleTableList: for handle in task.ObjectTable.handles(): if not handle.is_valid(): continue name = """" object_type = handle.get_object_type() if object_type == ""File"": file_obj = handle.dereference_as(""_FILE_OBJECT"") name = str(file_obj.file_name_with_device()) elif object_type == ""Key"": key_obj = handle.dereference_as(""_CM_KEY_BODY"") name = key_obj.full_key_name() elif object_type == ""Process"": proc_obj = handle.dereference_as(""_EPROCESS"") name = ""{0}({1})"".format( proc_obj.ImageFileName, proc_obj.UniqueProcessId ) elif object_type == ""Thread"": thrd_obj = handle.dereference_as(""_ETHREAD"") name = ""TID {0} PID {1}"".format( thrd_obj.Cid.UniqueThread, thrd_obj.Cid.UniqueProcess ) elif handle.NameInfo.Name == None: name = """" else: name = str(handle.NameInfo.Name) yield pid, handle, object_type, name","def calculate ( self ) : for task in taskmods . DllList . calculate ( self ) : pid = task . UniqueProcessId if task . ObjectTable . HandleTableList : for handle in task . ObjectTable . handles ( ) : if not handle . is_valid ( ) : continue name = """" object_type = handle . get_object_type ( ) if object_type == ""File"" : file_obj = handle . dereference_as ( ""_FILE_OBJECT"" ) name = str ( file_obj . file_name_with_device ( ) ) elif object_type == ""Key"" : key_obj = handle . dereference_as ( ""_CM_KEY_BODY"" ) name = key_obj . full_key_name ( ) proc_obj = handle . dereference_as ( ""_EPROCESS"" ) name = ""{0}({1})"" . format ( proc_obj . ImageFileName , proc_obj . UniqueProcessId ) elif object_type == ""Thread"" : thrd_obj = handle . dereference_as ( ""_ETHREAD"" ) name = ""TID {0} PID {1}"" . format ( thrd_obj . Cid . UniqueThread , thrd_obj . Cid . UniqueProcess ) elif handle . NameInfo . Name == None : name = """" else : name = str ( handle . NameInfo . Name ) yield pid , handle , object_type , name","elif object_type == ""Process"" :",443 22655,"def _maybe_female(self, path_elements, female, strict): if female: if self.has_gender_differences: elements = path_elements + [""female""] try: return self._get_file(elements, "".png"", strict=strict) except ValueError: if strict: raise elif strict: raise ValueError(""Pokemon %s has no gender differences"" % self.species_id) return self._get_file(path_elements, "".png"", strict=strict) ","def _maybe_female ( self , path_elements , female , strict ) : if female : if self . has_gender_differences : elements = path_elements + [ ""female"" ] try : return self . _get_file ( elements , "".png"" , strict = strict ) except ValueError : if strict : raise raise ValueError ( ""Pokemon %s has no gender differences"" % self . species_id ) return self . _get_file ( path_elements , "".png"" , strict = strict )",elif strict :,146 11757,"def process_target(decompiler, pos, partial=False): if pos is None: limit = None elif partial: limit = decompiler.targets.get(pos, None) else: limit = decompiler.targets.pop(pos, None) top = decompiler.stack.pop() while True: top = simplify(top) if top is limit: break if isinstance(top, ast.GenExprFor): break if not decompiler.stack: break top2 = decompiler.stack[-1] if isinstance(top2, ast.GenExprFor): break if partial and hasattr(top2, ""endpos"") and top2.endpos == pos: break if isinstance(top2, (ast.And, ast.Or)): if top2.__class__ == top.__class__: top2.nodes.extend(top.nodes) else: top2.nodes.append(top) elif isinstance(top2, ast.IfExp): # Python 2.5 top2.else_ = top if hasattr(top, ""endpos""): top2.endpos = top.endpos if decompiler.targets.get(top.endpos) is top: decompiler.targets[top.endpos] = top2 else: throw( DecompileError( 'Expression is too complex to decompile, try to pass query as string, e.g. select(""x for x in Something"")' ) ) top2.endpos = max(top2.endpos, getattr(top, ""endpos"", 0)) top = decompiler.stack.pop() decompiler.stack.append(top)","def process_target ( decompiler , pos , partial = False ) : if pos is None : limit = None elif partial : limit = decompiler . targets . get ( pos , None ) else : limit = decompiler . targets . pop ( pos , None ) top = decompiler . stack . pop ( ) while True : top = simplify ( top ) if top is limit : break if isinstance ( top , ast . GenExprFor ) : break if not decompiler . stack : break top2 = decompiler . stack [ - 1 ] if isinstance ( top2 , ast . GenExprFor ) : break if partial and hasattr ( top2 , ""endpos"" ) and top2 . endpos == pos : break if isinstance ( top2 , ( ast . And , ast . Or ) ) : if top2 . __class__ == top . __class__ : top2 . nodes . extend ( top . nodes ) else : top2 . nodes . append ( top ) top2 . else_ = top if hasattr ( top , ""endpos"" ) : top2 . endpos = top . endpos if decompiler . targets . get ( top . endpos ) is top : decompiler . targets [ top . endpos ] = top2 else : throw ( DecompileError ( 'Expression is too complex to decompile, try to pass query as string, e.g. select(""x for x in Something"")' ) ) top2 . endpos = max ( top2 . endpos , getattr ( top , ""endpos"" , 0 ) ) top = decompiler . stack . pop ( ) decompiler . stack . append ( top )","elif isinstance ( top2 , ast . IfExp ) :",485 11895,"def test_inkey_0s_raw_ctrl_c(): ""0-second inkey with raw allows receiving ^C."" pid, master_fd = pty.fork() if pid is 0: # child try: cov = __import__(""cov_core_init"").init() except ImportError: cov = None term = TestTerminal() read_until_semaphore(sys.__stdin__.fileno(), semaphore=SEMAPHORE) with term.raw(): os.write(sys.__stdout__.fileno(), RECV_SEMAPHORE) inp = term.inkey(timeout=0) os.write(sys.__stdout__.fileno(), inp.encode(""latin1"")) if cov is not None: cov.stop() cov.save() os._exit(0) with echo_off(master_fd): os.write(master_fd, SEND_SEMAPHORE) # ensure child is in raw mode before sending ^C, read_until_semaphore(master_fd) os.write(master_fd, u""\x03"".encode(""latin1"")) stime = time.time() output = read_until_eof(master_fd) pid, status = os.waitpid(pid, 0) if os.environ.get(""TRAVIS"", None) is not None: # For some reason, setraw has no effect travis-ci, # is still accepts ^C, causing system exit on py26, # but exit 0 on py27, and either way on py33 # .. strange, huh? assert output in (u"""", u""\x03"") assert os.WEXITSTATUS(status) in (0, 2) else: assert output == u""\x03"" or output == u"""" and not os.isatty(0) assert os.WEXITSTATUS(status) == 0 assert math.floor(time.time() - stime) == 0.0","def test_inkey_0s_raw_ctrl_c ( ) : ""0-second inkey with raw allows receiving ^C."" pid , master_fd = pty . fork ( ) if pid is 0 : try : cov = __import__ ( ""cov_core_init"" ) . init ( ) except ImportError : cov = None term = TestTerminal ( ) read_until_semaphore ( sys . __stdin__ . fileno ( ) , semaphore = SEMAPHORE ) with term . raw ( ) : os . write ( sys . __stdout__ . fileno ( ) , RECV_SEMAPHORE ) inp = term . inkey ( timeout = 0 ) os . write ( sys . __stdout__ . fileno ( ) , inp . encode ( ""latin1"" ) ) cov . stop ( ) cov . save ( ) os . _exit ( 0 ) with echo_off ( master_fd ) : os . write ( master_fd , SEND_SEMAPHORE ) read_until_semaphore ( master_fd ) os . write ( master_fd , u""\x03"" . encode ( ""latin1"" ) ) stime = time . time ( ) output = read_until_eof ( master_fd ) pid , status = os . waitpid ( pid , 0 ) if os . environ . get ( ""TRAVIS"" , None ) is not None : assert output in ( u"""" , u""\x03"" ) assert os . WEXITSTATUS ( status ) in ( 0 , 2 ) else : assert output == u""\x03"" or output == u"""" and not os . isatty ( 0 ) assert os . WEXITSTATUS ( status ) == 0 assert math . floor ( time . time ( ) - stime ) == 0.0",if cov is not None :,512 10823,"def doLabels(): global difficulty, answers answers = [] for loop in range(10): numa = random.randint(1 * difficulty * 2, 10 * difficulty * 2) numb = random.randint(1 * difficulty * 2, 10 * difficulty * 2) action = random.choice(actions) if action == ""+"": answers.append(numa + numb) elif action == ""-"": answers.append(numa - numb) elif action == ""*"": answers.append(numa * numb) elif action == ""/"": answers.append(numa / numb) lab = str(numa) + "" "" + action + "" "" + str(numb) + "" = "" try: win.addLabel(""l"" + str(loop), lab, 2 + loop, 0) win.addEntry(""l"" + str(loop), 2 + loop, 1) except Exception: win.setLabel(""l"" + str(loop), lab) win.enableEntry(""l"" + str(loop)) win.setEntryBg(""l"" + str(loop), ""white"") win.setEntry(""l"" + str(loop), """")","def doLabels ( ) : global difficulty , answers answers = [ ] for loop in range ( 10 ) : numa = random . randint ( 1 * difficulty * 2 , 10 * difficulty * 2 ) numb = random . randint ( 1 * difficulty * 2 , 10 * difficulty * 2 ) action = random . choice ( actions ) if action == ""+"" : answers . append ( numa + numb ) elif action == ""-"" : answers . append ( numa - numb ) elif action == ""*"" : answers . append ( numa * numb ) answers . append ( numa / numb ) lab = str ( numa ) + "" "" + action + "" "" + str ( numb ) + "" = "" try : win . addLabel ( ""l"" + str ( loop ) , lab , 2 + loop , 0 ) win . addEntry ( ""l"" + str ( loop ) , 2 + loop , 1 ) except Exception : win . setLabel ( ""l"" + str ( loop ) , lab ) win . enableEntry ( ""l"" + str ( loop ) ) win . setEntryBg ( ""l"" + str ( loop ) , ""white"" ) win . setEntry ( ""l"" + str ( loop ) , """" )","elif action == ""/"" :",325 18842,"def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, Integral): return timestamp # assume precision is correct if timestamp is int if isinstance(_get_unicode(timestamp), text_type): timestamp = parse(timestamp) if isinstance(timestamp, datetime): ns = timegm(timestamp.utctimetuple()) * 1e9 + timestamp.microsecond * 1e3 if precision is None or precision == ""n"": return ns elif precision == ""u"": return ns / 1e3 elif precision == ""ms"": return ns / 1e6 elif precision == ""s"": return ns / 1e9 elif precision == ""m"": return ns / 1e9 / 60 elif precision == ""h"": return ns / 1e9 / 3600 raise ValueError(timestamp)","def _convert_timestamp ( timestamp , precision = None ) : if isinstance ( timestamp , Integral ) : return timestamp if isinstance ( _get_unicode ( timestamp ) , text_type ) : timestamp = parse ( timestamp ) if isinstance ( timestamp , datetime ) : ns = timegm ( timestamp . utctimetuple ( ) ) * 1e9 + timestamp . microsecond * 1e3 if precision is None or precision == ""n"" : return ns return ns / 1e3 elif precision == ""ms"" : return ns / 1e6 elif precision == ""s"" : return ns / 1e9 elif precision == ""m"" : return ns / 1e9 / 60 elif precision == ""h"" : return ns / 1e9 / 3600 raise ValueError ( timestamp )","elif precision == ""u"" :",222 17386,"def gotHeaders(self, headers): HTTPClientFactory.gotHeaders(self, headers) if self.requestedPartial: contentRange = headers.get(b""content-range"", None) if not contentRange: # server doesn't support partial requests, oh well self.requestedPartial = 0 return start, end, realLength = http.parseContentRange(contentRange[0]) if start != self.requestedPartial: # server is acting weirdly self.requestedPartial = 0","def gotHeaders ( self , headers ) : HTTPClientFactory . gotHeaders ( self , headers ) if self . requestedPartial : contentRange = headers . get ( b""content-range"" , None ) self . requestedPartial = 0 return start , end , realLength = http . parseContentRange ( contentRange [ 0 ] ) if start != self . requestedPartial : self . requestedPartial = 0",if not contentRange :,138 15378,"def _strip_extras_markers(marker): # type: (Union[MarkerTuple, List[Union[MarkerTuple, str]]]) -> List[Union[MarkerTuple, str]] if marker is None or not isinstance(marker, (list, tuple)): raise TypeError(""Expecting a marker type, received {0!r}"".format(marker)) markers_to_remove = [] # iterate forwards and generate a list of indexes to remove first, then reverse the # list so we can remove the text that normally occurs after (but we will already # be past it in the loop) for i, marker_list in enumerate(marker): if isinstance(marker_list, list): cleaned = _strip_extras_markers(marker_list) if not cleaned: markers_to_remove.append(i) elif isinstance(marker_list, tuple) and marker_list[0].value == ""extra"": markers_to_remove.append(i) for i in reversed(markers_to_remove): del marker[i] if i > 0 and marker[i - 1] == ""and"": del marker[i - 1] return marker","def _strip_extras_markers ( marker ) : if marker is None or not isinstance ( marker , ( list , tuple ) ) : raise TypeError ( ""Expecting a marker type, received {0!r}"" . format ( marker ) ) markers_to_remove = [ ] for i , marker_list in enumerate ( marker ) : if isinstance ( marker_list , list ) : cleaned = _strip_extras_markers ( marker_list ) markers_to_remove . append ( i ) elif isinstance ( marker_list , tuple ) and marker_list [ 0 ] . value == ""extra"" : markers_to_remove . append ( i ) for i in reversed ( markers_to_remove ) : del marker [ i ] if i > 0 and marker [ i - 1 ] == ""and"" : del marker [ i - 1 ] return marker",if not cleaned :,292 20728,"def updateStats(self, stats): stats[""global""][""day""] = date.fromordinal(stats[""global""][""day""]) self.applyDict(self.deck._globalStats, stats[""global""]) self.deck._globalStats.toDB(self.deck.s) for record in stats[""daily""]: record[""day""] = date.fromordinal(record[""day""]) stat = Stats() id = self.deck.s.scalar( ""select id from stats where "" ""type = :type and day = :day"", type=1, day=record[""day""], ) if id: stat.fromDB(self.deck.s, id) else: stat.create(self.deck.s, 1, record[""day""]) self.applyDict(stat, record) stat.toDB(self.deck.s)","def updateStats ( self , stats ) : stats [ ""global"" ] [ ""day"" ] = date . fromordinal ( stats [ ""global"" ] [ ""day"" ] ) self . applyDict ( self . deck . _globalStats , stats [ ""global"" ] ) self . deck . _globalStats . toDB ( self . deck . s ) for record in stats [ ""daily"" ] : record [ ""day"" ] = date . fromordinal ( record [ ""day"" ] ) stat = Stats ( ) id = self . deck . s . scalar ( ""select id from stats where "" ""type = :type and day = :day"" , type = 1 , day = record [ ""day"" ] , ) stat . fromDB ( self . deck . s , id ) else : stat . create ( self . deck . s , 1 , record [ ""day"" ] ) self . applyDict ( stat , record ) stat . toDB ( self . deck . s )",if id :,236 23413,"def rotate_selected(self): opts = self.rotate_selected_opts if self.actions.pressed(opts[""move_done_pressed""]): return ""main"" if self.actions.released(opts[""move_done_released""]): return ""main"" if self.actions.pressed(opts[""move_cancelled""]): self.undo_cancel() return ""main"" if (self.actions.mouse - opts[""mouselast""]).length == 0: return if time.time() < opts[""lasttime""] + 0.05: return opts[""mouselast""] = self.actions.mouse opts[""lasttime""] = time.time() delta = Direction2D(self.actions.mouse - opts[""center""]) dx, dy = opts[""rotate_x""].dot(delta), opts[""rotate_y""].dot(delta) theta = math.atan2(dy, dx) set2D_vert = self.set2D_vert for bmv, xy in opts[""bmverts""]: if not bmv.is_valid: continue dxy = xy - opts[""center""] nx = dxy.x * math.cos(theta) - dxy.y * math.sin(theta) ny = dxy.x * math.sin(theta) + dxy.y * math.cos(theta) nxy = Point2D((nx, ny)) + opts[""center""] set2D_vert(bmv, nxy) self.update_verts_faces(v for v, _ in opts[""bmverts""]) self.dirty()","def rotate_selected ( self ) : opts = self . rotate_selected_opts if self . actions . pressed ( opts [ ""move_done_pressed"" ] ) : return ""main"" if self . actions . released ( opts [ ""move_done_released"" ] ) : return ""main"" if self . actions . pressed ( opts [ ""move_cancelled"" ] ) : self . undo_cancel ( ) return ""main"" if ( self . actions . mouse - opts [ ""mouselast"" ] ) . length == 0 : return if time . time ( ) < opts [ ""lasttime"" ] + 0.05 : return opts [ ""mouselast"" ] = self . actions . mouse opts [ ""lasttime"" ] = time . time ( ) delta = Direction2D ( self . actions . mouse - opts [ ""center"" ] ) dx , dy = opts [ ""rotate_x"" ] . dot ( delta ) , opts [ ""rotate_y"" ] . dot ( delta ) theta = math . atan2 ( dy , dx ) set2D_vert = self . set2D_vert for bmv , xy in opts [ ""bmverts"" ] : continue dxy = xy - opts [ ""center"" ] nx = dxy . x * math . cos ( theta ) - dxy . y * math . sin ( theta ) ny = dxy . x * math . sin ( theta ) + dxy . y * math . cos ( theta ) nxy = Point2D ( ( nx , ny ) ) + opts [ ""center"" ] set2D_vert ( bmv , nxy ) self . update_verts_faces ( v for v , _ in opts [ ""bmverts"" ] ) self . dirty ( )",if not bmv . is_valid :,405 15036,"def _cache_mem(curr_out, prev_mem, mem_len, reuse_len=None): """"""cache hidden states into memory."""""" if mem_len is None or mem_len == 0: return None else: if reuse_len is not None and reuse_len > 0: curr_out = curr_out[:reuse_len] if prev_mem is None: new_mem = curr_out[-mem_len:] else: new_mem = tf.concat([prev_mem, curr_out], 0)[-mem_len:] return tf.keras.backend.stop_gradient(new_mem)","def _cache_mem ( curr_out , prev_mem , mem_len , reuse_len = None ) : """"""cache hidden states into memory."""""" if mem_len is None or mem_len == 0 : return None else : if reuse_len is not None and reuse_len > 0 : curr_out = curr_out [ : reuse_len ] new_mem = curr_out [ - mem_len : ] else : new_mem = tf . concat ( [ prev_mem , curr_out ] , 0 ) [ - mem_len : ] return tf . keras . backend . stop_gradient ( new_mem )",if prev_mem is None :,165 4792,"def data_download(chunksize, filename, dataset, name): # Setup the API connection key = get_api_key() api = shodan.Shodan(key) # Get the file object that the user requested which will contain the URL and total file size file = None try: files = api.data.list_files(dataset) for tmp in files: if tmp[""name""] == name: file = tmp break except shodan.APIError as e: raise click.ClickException(e.value) # The file isn't available if not file: raise click.ClickException(""File not found"") # Start downloading the file response = requests.get(file[""url""], stream=True) # Figure out the size of the file based on the headers filesize = response.headers.get(""content-length"", None) if not filesize: # Fall back to using the filesize provided by the API filesize = file[""size""] else: filesize = int(filesize) chunk_size = 1024 limit = filesize / chunk_size # Create a default filename based on the dataset and the filename within that dataset if not filename: filename = ""{}-{}"".format(dataset, name) # Open the output file and start writing to it in chunks with open(filename, ""wb"") as fout: with click.progressbar( response.iter_content(chunk_size=chunk_size), length=limit ) as bar: for chunk in bar: if chunk: fout.write(chunk) click.echo(click.style(""Download completed: {}"".format(filename), ""green""))","def data_download ( chunksize , filename , dataset , name ) : key = get_api_key ( ) api = shodan . Shodan ( key ) file = None try : files = api . data . list_files ( dataset ) for tmp in files : file = tmp break except shodan . APIError as e : raise click . ClickException ( e . value ) if not file : raise click . ClickException ( ""File not found"" ) response = requests . get ( file [ ""url"" ] , stream = True ) filesize = response . headers . get ( ""content-length"" , None ) if not filesize : filesize = file [ ""size"" ] else : filesize = int ( filesize ) chunk_size = 1024 limit = filesize / chunk_size if not filename : filename = ""{}-{}"" . format ( dataset , name ) with open ( filename , ""wb"" ) as fout : with click . progressbar ( response . iter_content ( chunk_size = chunk_size ) , length = limit ) as bar : for chunk in bar : if chunk : fout . write ( chunk ) click . echo ( click . style ( ""Download completed: {}"" . format ( filename ) , ""green"" ) )","if tmp [ ""name"" ] == name :",436 21226,"def build(opt): dpath = os.path.join(opt[""datapath""], ""HotpotQA"") if not build_data.built(dpath, version_string=VERSION): print(""[building data: "" + dpath + ""]"") if build_data.built(dpath): # An older version exists, so remove these outdated files. build_data.remove_dir(dpath) build_data.make_dir(dpath) # Download the data. for downloadable_file in RESOURCES: downloadable_file.download_file(dpath) with PathManager.open(os.path.join(dpath, TRAIN_FILENAME)) as f: data = json.load(f) make_parlai_format(dpath, ""train"", data) with PathManager.open(os.path.join(dpath, DEV_DISTRACTOR_FILENAME)) as f: data = json.load(f) make_parlai_format(dpath, ""valid_distractor"", data) with PathManager.open(os.path.join(dpath, DEV_FULLWIKI_FILENAME)) as f: data = json.load(f) make_parlai_format(dpath, ""valid_fullwiki"", data) # Mark the data as built. build_data.mark_done(dpath, version_string=VERSION) ","def build ( opt ) : dpath = os . path . join ( opt [ ""datapath"" ] , ""HotpotQA"" ) if not build_data . built ( dpath , version_string = VERSION ) : print ( ""[building data: "" + dpath + ""]"" ) build_data . remove_dir ( dpath ) build_data . make_dir ( dpath ) for downloadable_file in RESOURCES : downloadable_file . download_file ( dpath ) with PathManager . open ( os . path . join ( dpath , TRAIN_FILENAME ) ) as f : data = json . load ( f ) make_parlai_format ( dpath , ""train"" , data ) with PathManager . open ( os . path . join ( dpath , DEV_DISTRACTOR_FILENAME ) ) as f : data = json . load ( f ) make_parlai_format ( dpath , ""valid_distractor"" , data ) with PathManager . open ( os . path . join ( dpath , DEV_FULLWIKI_FILENAME ) ) as f : data = json . load ( f ) make_parlai_format ( dpath , ""valid_fullwiki"" , data ) build_data . mark_done ( dpath , version_string = VERSION )",if build_data . built ( dpath ) :,360 1916,"def scanvars(reader, frame, locals): """"""Scan one logical line of Python and look up values of variables used."""""" vars, lasttoken, parent, prefix, value = [], None, None, """", __UNDEF__ for ttype, token, start, end, line in tokenize.generate_tokens(reader): if ttype == tokenize.NEWLINE: break if ttype == tokenize.NAME and token not in keyword.kwlist: if lasttoken == ""."": if parent is not __UNDEF__: value = getattr(parent, token, __UNDEF__) vars.append((prefix + token, prefix, value)) else: where, value = lookup(token, frame, locals) vars.append((token, where, value)) elif token == ""."": prefix += lasttoken + ""."" parent = value else: parent, prefix = None, """" lasttoken = token return vars ","def scanvars ( reader , frame , locals ) : """"""Scan one logical line of Python and look up values of variables used."""""" vars , lasttoken , parent , prefix , value = [ ] , None , None , """" , __UNDEF__ for ttype , token , start , end , line in tokenize . generate_tokens ( reader ) : break if ttype == tokenize . NAME and token not in keyword . kwlist : if lasttoken == ""."" : if parent is not __UNDEF__ : value = getattr ( parent , token , __UNDEF__ ) vars . append ( ( prefix + token , prefix , value ) ) else : where , value = lookup ( token , frame , locals ) vars . append ( ( token , where , value ) ) elif token == ""."" : prefix += lasttoken + ""."" parent = value else : parent , prefix = None , """" lasttoken = token return vars",if ttype == tokenize . NEWLINE :,252 1700,"def queue_viewing(request): addon_ids = request.GET.get(""addon_ids"") if not addon_ids: return {} viewing = {} user_id = request.user.id for addon_id in addon_ids.split("",""): addon_id = addon_id.strip() key = get_reviewing_cache_key(addon_id) currently_viewing = cache.get(key) if currently_viewing and currently_viewing != user_id: viewing[addon_id] = UserProfile.objects.get(id=currently_viewing).name return viewing ","def queue_viewing ( request ) : addon_ids = request . GET . get ( ""addon_ids"" ) if not addon_ids : return { } viewing = { } user_id = request . user . id for addon_id in addon_ids . split ( "","" ) : addon_id = addon_id . strip ( ) key = get_reviewing_cache_key ( addon_id ) currently_viewing = cache . get ( key ) viewing [ addon_id ] = UserProfile . objects . get ( id = currently_viewing ) . name return viewing",if currently_viewing and currently_viewing != user_id :,165 11748,"def decompile(decompiler): for pos, next_pos, opname, arg in decompiler.instructions: if pos in decompiler.targets: decompiler.process_target(pos) method = getattr(decompiler, opname, None) if method is None: throw(DecompileError(""Unsupported operation: %s"" % opname)) decompiler.pos = pos decompiler.next_pos = next_pos x = method(*arg) if x is not None: decompiler.stack.append(x) ","def decompile ( decompiler ) : for pos , next_pos , opname , arg in decompiler . instructions : if pos in decompiler . targets : decompiler . process_target ( pos ) method = getattr ( decompiler , opname , None ) throw ( DecompileError ( ""Unsupported operation: %s"" % opname ) ) decompiler . pos = pos decompiler . next_pos = next_pos x = method ( * arg ) if x is not None : decompiler . stack . append ( x )",if method is None :,143 22023,"def add_directive(self, name, obj, content=None, arguments=None, **options): if isinstance(obj, clstypes) and issubclass(obj, Directive): if content or arguments or options: raise ExtensionError( ""when adding directive classes, no "" ""additional arguments may be given"" ) directives.register_directive(name, directive_dwim(obj)) else: obj.content = content obj.arguments = arguments obj.options = options directives.register_directive(name, obj) ","def add_directive ( self , name , obj , content = None , arguments = None , ** options ) : if isinstance ( obj , clstypes ) and issubclass ( obj , Directive ) : raise ExtensionError ( ""when adding directive classes, no "" ""additional arguments may be given"" ) directives . register_directive ( name , directive_dwim ( obj ) ) else : obj . content = content obj . arguments = arguments obj . options = options directives . register_directive ( name , obj )",if content or arguments or options :,144 9120,"def discover(self, *objlist): ret = [] for l in self.splitlines(): if len(l) < 5: continue if l[0] == ""Filename"": continue try: int(l[2]) int(l[3]) except: continue # ret.append(improve(l[0])) ret.append(l[0]) ret.sort() for item in objlist: ret.append(item) return ret","def discover ( self , * objlist ) : ret = [ ] for l in self . splitlines ( ) : if len ( l ) < 5 : continue continue try : int ( l [ 2 ] ) int ( l [ 3 ] ) except : continue ret . append ( l [ 0 ] ) ret . sort ( ) for item in objlist : ret . append ( item ) return ret","if l [ 0 ] == ""Filename"" :",154 2570,"def pop(self): if not HAS_SQL: return None tries = 3 wait = 0.1 try: conn, c = self.connect() except sqlite3.Error: log.traceback(logging.DEBUG) return None heartbeat = None loop = True while loop and tries > -1: try: c.execute(""BEGIN IMMEDIATE"") c.execute(""SELECT * FROM {0} LIMIT 1"".format(self.table_name)) row = c.fetchone() if row is not None: id = row[0] heartbeat = Heartbeat( json.loads(row[1]), self.args, self.configs, _clone=True ) c.execute(""DELETE FROM {0} WHERE id=?"".format(self.table_name), [id]) conn.commit() loop = False except sqlite3.Error: log.traceback(logging.DEBUG) sleep(wait) tries -= 1 try: conn.close() except sqlite3.Error: log.traceback(logging.DEBUG) return heartbeat","def pop ( self ) : if not HAS_SQL : return None tries = 3 wait = 0.1 try : conn , c = self . connect ( ) except sqlite3 . Error : log . traceback ( logging . DEBUG ) return None heartbeat = None loop = True while loop and tries > - 1 : try : c . execute ( ""BEGIN IMMEDIATE"" ) c . execute ( ""SELECT * FROM {0} LIMIT 1"" . format ( self . table_name ) ) row = c . fetchone ( ) id = row [ 0 ] heartbeat = Heartbeat ( json . loads ( row [ 1 ] ) , self . args , self . configs , _clone = True ) c . execute ( ""DELETE FROM {0} WHERE id=?"" . format ( self . table_name ) , [ id ] ) conn . commit ( ) loop = False except sqlite3 . Error : log . traceback ( logging . DEBUG ) sleep ( wait ) tries -= 1 try : conn . close ( ) except sqlite3 . Error : log . traceback ( logging . DEBUG ) return heartbeat",if row is not None :,320 15791,"def _translate_bboxes(self, results, offset): """"""Shift bboxes horizontally or vertically, according to offset."""""" h, w, c = results[""img_shape""] for key in results.get(""bbox_fields"", []): min_x, min_y, max_x, max_y = np.split( results[key], results[key].shape[-1], axis=-1 ) if self.direction == ""horizontal"": min_x = np.maximum(0, min_x + offset) max_x = np.minimum(w, max_x + offset) elif self.direction == ""vertical"": min_y = np.maximum(0, min_y + offset) max_y = np.minimum(h, max_y + offset) # the boxs translated outside of image will be filtered along with # the corresponding masks, by invoking ``_filter_invalid``. results[key] = np.concatenate([min_x, min_y, max_x, max_y], axis=-1)","def _translate_bboxes ( self , results , offset ) : """"""Shift bboxes horizontally or vertically, according to offset."""""" h , w , c = results [ ""img_shape"" ] for key in results . get ( ""bbox_fields"" , [ ] ) : min_x , min_y , max_x , max_y = np . split ( results [ key ] , results [ key ] . shape [ - 1 ] , axis = - 1 ) if self . direction == ""horizontal"" : min_x = np . maximum ( 0 , min_x + offset ) max_x = np . minimum ( w , max_x + offset ) min_y = np . maximum ( 0 , min_y + offset ) max_y = np . minimum ( h , max_y + offset ) results [ key ] = np . concatenate ( [ min_x , min_y , max_x , max_y ] , axis = - 1 )","elif self . direction == ""vertical"" :",267 10115,"def runScripts(self): pythonCmd = shutil.which(""python3"") if pythonCmd is None: pythonCmd = shutil.which(""python"") if pythonCmd is None: pythonCmd = ""python"" if not self.noDownload: ###DOWNLOAD ASSETS try: if not self.assetRepoLocation is None: os.environ[""GIT_OFFICIAL_CLONE_LOCATION""] = self.assetRepoLocation self.runProcess([pythonCmd, ""download_assets_git.py""]) except subprocess.CalledProcessError: print(""check that download_assets_git.py is working correctly"") sys.exit(1) print(""\n"") ###COMPILE TARGETS try: self.runProcess([pythonCmd, ""compile_targets.py""]) except subprocess.CalledProcessError: print(""check that compile_targets.py is working correctly"") sys.exit(1) print(""\n"") ###COMPILE MODELS try: self.runProcess([pythonCmd, ""compile_models.py""]) except subprocess.CalledProcessError: print(""check that compile_models.py is working correctly"") sys.exit(1) print(""\n"") ###COMPILE PROXIES try: self.runProcess([pythonCmd, ""compile_proxies.py""]) except subprocess.CalledProcessError: print(""check that compile_proxies.py is working correctly"") sys.exit(1) print(""\n"")","def runScripts ( self ) : pythonCmd = shutil . which ( ""python3"" ) if pythonCmd is None : pythonCmd = shutil . which ( ""python"" ) if pythonCmd is None : pythonCmd = ""python"" if not self . noDownload : try : os . environ [ ""GIT_OFFICIAL_CLONE_LOCATION"" ] = self . assetRepoLocation self . runProcess ( [ pythonCmd , ""download_assets_git.py"" ] ) except subprocess . CalledProcessError : print ( ""check that download_assets_git.py is working correctly"" ) sys . exit ( 1 ) print ( ""\n"" ) try : self . runProcess ( [ pythonCmd , ""compile_targets.py"" ] ) except subprocess . CalledProcessError : print ( ""check that compile_targets.py is working correctly"" ) sys . exit ( 1 ) print ( ""\n"" ) try : self . runProcess ( [ pythonCmd , ""compile_models.py"" ] ) except subprocess . CalledProcessError : print ( ""check that compile_models.py is working correctly"" ) sys . exit ( 1 ) print ( ""\n"" ) try : self . runProcess ( [ pythonCmd , ""compile_proxies.py"" ] ) except subprocess . CalledProcessError : print ( ""check that compile_proxies.py is working correctly"" ) sys . exit ( 1 ) print ( ""\n"" )",if not self . assetRepoLocation is None :,391 12684,"def assert_backend(self, expected_translated, language=""cs""): """"""Check that backend has correct data."""""" translation = self.get_translation(language) translation.commit_pending(""test"", None) store = translation.component.file_format_cls(translation.get_filename(), None) messages = set() translated = 0 for unit in store.content_units: id_hash = unit.id_hash self.assertFalse(id_hash in messages, ""Duplicate string in in backend file!"") if unit.is_translated(): translated += 1 self.assertEqual( translated, expected_translated, ""Did not found expected number of translations ({} != {})."".format( translated, expected_translated ), )","def assert_backend ( self , expected_translated , language = ""cs"" ) : """"""Check that backend has correct data."""""" translation = self . get_translation ( language ) translation . commit_pending ( ""test"" , None ) store = translation . component . file_format_cls ( translation . get_filename ( ) , None ) messages = set ( ) translated = 0 for unit in store . content_units : id_hash = unit . id_hash self . assertFalse ( id_hash in messages , ""Duplicate string in in backend file!"" ) translated += 1 self . assertEqual ( translated , expected_translated , ""Did not found expected number of translations ({} != {})."" . format ( translated , expected_translated ) , )",if unit . is_translated ( ) :,195 19373,"def process_results_file(f, region): try: formatted_findings_list = [] results = results_file_to_dict(f) aws_account_id = results[""account_id""] creation_date = datetime.datetime.strptime( results[""last_run""][""time""], ""%Y-%m-%d %H:%M:%S%z"" ).isoformat() for service in results.get(""service_list""): for finding_key, finding_value in ( results.get(""services"", {}).get(service).get(""findings"").items() ): if finding_value.get(""items""): formatted_finding = format_finding_to_securityhub_format( aws_account_id, region, creation_date, finding_key, finding_value, ) formatted_findings_list.append(formatted_finding) return formatted_findings_list except Exception as e: print_exception(f""Unable to process results file: {e}"") ","def process_results_file ( f , region ) : try : formatted_findings_list = [ ] results = results_file_to_dict ( f ) aws_account_id = results [ ""account_id"" ] creation_date = datetime . datetime . strptime ( results [ ""last_run"" ] [ ""time"" ] , ""%Y-%m-%d %H:%M:%S%z"" ) . isoformat ( ) for service in results . get ( ""service_list"" ) : for finding_key , finding_value in ( results . get ( ""services"" , { } ) . get ( service ) . get ( ""findings"" ) . items ( ) ) : formatted_finding = format_finding_to_securityhub_format ( aws_account_id , region , creation_date , finding_key , finding_value , ) formatted_findings_list . append ( formatted_finding ) return formatted_findings_list except Exception as e : print_exception ( f""Unable to process results file: {e}"" )","if finding_value . get ( ""items"" ) :",310 24045,"def _open_archive(self, archive_name): try: # Determine archive format archive = None if tarfile.is_tarfile(archive_name): # Open TAR archive = tarfile.open(archive_name, ""r"", bufsize=CHUNK_SIZE * 2) elif zipfile.is_zipfile(archive_name): # Open ZIP archive = ZipThatPretendsToBeTar(archive_name, ""r"") else: # Unrecognized format self.emit(""error"", None, _(""Downloaded file is corrupted."")) # Find binary inside for pathname in archive.getnames(): # Strip initial 'syncthing-platform-vXYZ' from path path = pathname.replace(""\\"", ""/"").split(""/"")[1:] if len(path) < 1: continue filename = path[0] if filename in (""syncthing"", ""syncthing.exe""): # Last sanity check, then just open files # and start extracting tinfo = archive.getmember(pathname) log.debug(""Extracting '%s'..."" % (pathname,)) if tinfo.isfile(): compressed = archive.extractfile(pathname) try: os.makedirs(os.path.split(self.target)[0]) except Exception: pass output = open(self.target, ""wb"") GLib.idle_add( self._extract, (archive, compressed, output, 0, tinfo.size) ) return except Exception as e: log.exception(e) self.emit(""error"", e, _(""Failed to determine latest Syncthing version."")) return","def _open_archive ( self , archive_name ) : try : archive = None if tarfile . is_tarfile ( archive_name ) : archive = tarfile . open ( archive_name , ""r"" , bufsize = CHUNK_SIZE * 2 ) elif zipfile . is_zipfile ( archive_name ) : archive = ZipThatPretendsToBeTar ( archive_name , ""r"" ) else : self . emit ( ""error"" , None , _ ( ""Downloaded file is corrupted."" ) ) for pathname in archive . getnames ( ) : path = pathname . replace ( ""\\"" , ""/"" ) . split ( ""/"" ) [ 1 : ] if len ( path ) < 1 : continue filename = path [ 0 ] tinfo = archive . getmember ( pathname ) log . debug ( ""Extracting '%s'..."" % ( pathname , ) ) if tinfo . isfile ( ) : compressed = archive . extractfile ( pathname ) try : os . makedirs ( os . path . split ( self . target ) [ 0 ] ) except Exception : pass output = open ( self . target , ""wb"" ) GLib . idle_add ( self . _extract , ( archive , compressed , output , 0 , tinfo . size ) ) return except Exception as e : log . exception ( e ) self . emit ( ""error"" , e , _ ( ""Failed to determine latest Syncthing version."" ) ) return","if filename in ( ""syncthing"" , ""syncthing.exe"" ) :",490 13705,"def worker_callback(self, worker): process_request_count = 0 while not worker.stop_event.is_set(): worker.process_pause_signal() try: result, task = self.input_queue.get(True, 0.1) except queue.Empty: pass else: worker.is_busy_event.set() try: process_request_count += 1 try: handler = self.spider.find_task_handler(task) except NoTaskHandler as ex: ex.tb = format_exc() self.spider.task_dispatcher.input_queue.put( (ex, task, {""exc_info"": sys.exc_info()}) ) self.spider.stat.inc(""parser:handler-not-found"") else: self.execute_task_handler(handler, result, task) self.spider.stat.inc(""parser:handler-processed"") if self.spider.parser_requests_per_process: if process_request_count >= self.spider.parser_requests_per_process: self.spider.stat.inc( ""parser:handler-req-limit"", ) return finally: worker.is_busy_event.clear()","def worker_callback ( self , worker ) : process_request_count = 0 while not worker . stop_event . is_set ( ) : worker . process_pause_signal ( ) try : result , task = self . input_queue . get ( True , 0.1 ) except queue . Empty : pass else : worker . is_busy_event . set ( ) try : process_request_count += 1 try : handler = self . spider . find_task_handler ( task ) except NoTaskHandler as ex : ex . tb = format_exc ( ) self . spider . task_dispatcher . input_queue . put ( ( ex , task , { ""exc_info"" : sys . exc_info ( ) } ) ) self . spider . stat . inc ( ""parser:handler-not-found"" ) else : self . execute_task_handler ( handler , result , task ) self . spider . stat . inc ( ""parser:handler-processed"" ) if process_request_count >= self . spider . parser_requests_per_process : self . spider . stat . inc ( ""parser:handler-req-limit"" , ) return finally : worker . is_busy_event . clear ( )",if self . spider . parser_requests_per_process :,404 6261,"def _construct(self, node): self.flatten_mapping(node) ret = self.construct_pairs(node) keys = [d[0] for d in ret] keys_sorted = sorted(keys, key=_natsort_key) for key in keys: expected = keys_sorted.pop(0) if key != expected: raise ConstructorError( None, None, ""keys out of order: "" ""expected {} got {} at {}"".format(expected, key, node.start_mark), ) return dict(ret)","def _construct ( self , node ) : self . flatten_mapping ( node ) ret = self . construct_pairs ( node ) keys = [ d [ 0 ] for d in ret ] keys_sorted = sorted ( keys , key = _natsort_key ) for key in keys : expected = keys_sorted . pop ( 0 ) raise ConstructorError ( None , None , ""keys out of order: "" ""expected {} got {} at {}"" . format ( expected , key , node . start_mark ) , ) return dict ( ret )",if key != expected :,159 1251,"def __iter__(self): consumed = 0 skipped = 0 for query in self.queries: query_copy = copy(query) if self._limit: query = query.limit(self._limit - consumed) if self._offset: query = query.offset(self._offset - skipped) obj_count = 0 for obj in query: consumed += 1 obj_count += 1 yield obj if not obj_count: skipped += query_copy.count() else: skipped += obj_count",def __iter__ ( self ) : consumed = 0 skipped = 0 for query in self . queries : query_copy = copy ( query ) query = query . limit ( self . _limit - consumed ) if self . _offset : query = query . offset ( self . _offset - skipped ) obj_count = 0 for obj in query : consumed += 1 obj_count += 1 yield obj if not obj_count : skipped += query_copy . count ( ) else : skipped += obj_count,if self . _limit :,158 13006,"def refresh(self): self.window.erase() for index, line in enumerate(self.lines): if index < self.head_position: continue elif index > self.head_position + self.text_height - 1: continue x = 0 y = index - self.head_position if len(line) > 0: self.window.addstr(y, x, line) xpos = self.width for index, item in enumerate(self.menu_items): if index == self.menu_position: mode = curses.color_pair(3) else: mode = curses.color_pair(2) self.window.addstr(self.text_height + 1, xpos - len(item[0]) - 4, item[0], mode) xpos = xpos - len(item[0]) - 4 self.render_scroll_bar() self.window.refresh() self.panel.top() self.panel.show() curses.panel.update_panels() curses.doupdate()","def refresh ( self ) : self . window . erase ( ) for index , line in enumerate ( self . lines ) : continue elif index > self . head_position + self . text_height - 1 : continue x = 0 y = index - self . head_position if len ( line ) > 0 : self . window . addstr ( y , x , line ) xpos = self . width for index , item in enumerate ( self . menu_items ) : if index == self . menu_position : mode = curses . color_pair ( 3 ) else : mode = curses . color_pair ( 2 ) self . window . addstr ( self . text_height + 1 , xpos - len ( item [ 0 ] ) - 4 , item [ 0 ] , mode ) xpos = xpos - len ( item [ 0 ] ) - 4 self . render_scroll_bar ( ) self . window . refresh ( ) self . panel . top ( ) self . panel . show ( ) curses . panel . update_panels ( ) curses . doupdate ( )",if index < self . head_position :,285 23040,"def process_one_node(self, p, result, environment): """"""Handle one node."""""" c = self.c if not self.code_only: result.append(self.underline2(p)) d = c.scanAllDirectives(p) if self.verbose: g.trace(d.get(""language"") or ""None"", "":"", p.h) s, code = self.process_directives(p.b, d) result.append(s) result.append(""\n\n"") # Add an empty line so bullet lists display properly. if code and self.execcode: s, err = self.exec_code(code, environment) # execute code found in a node, append to reST if not self.restoutput and s.strip(): s = self.format_output(s) # if some non-reST to print result.append(s) # append, whether plain or reST output if err: err = self.format_output(err, prefix=""**Error**::"") result.append(err)","def process_one_node ( self , p , result , environment ) : """"""Handle one node."""""" c = self . c if not self . code_only : result . append ( self . underline2 ( p ) ) d = c . scanAllDirectives ( p ) if self . verbose : g . trace ( d . get ( ""language"" ) or ""None"" , "":"" , p . h ) s , code = self . process_directives ( p . b , d ) result . append ( s ) result . append ( ""\n\n"" ) if code and self . execcode : s , err = self . exec_code ( code , environment ) if not self . restoutput and s . strip ( ) : s = self . format_output ( s ) result . append ( s ) err = self . format_output ( err , prefix = ""**Error**::"" ) result . append ( err )",if err :,281 19188,"def getReferences(view, name=""""): """"""Find all reference definitions."""""" # returns {name -> Region} refs = [] name = re.escape(name) if name == """": refs.extend(view.find_all(r""(?<=^\[)([^\]]+)(?=\]:)"", 0)) else: refs.extend(view.find_all(r""(?<=^\[)(%s)(?=\]:)"" % name, 0)) regions = refs ids = {} for reg in regions: name = view.substr(reg).strip() key = name.lower() if key in ids: ids[key].regions.append(reg) else: ids[key] = Obj(regions=[reg], label=name) return ids","def getReferences ( view , name = """" ) : """"""Find all reference definitions."""""" refs = [ ] name = re . escape ( name ) if name == """" : refs . extend ( view . find_all ( r""(?<=^\[)([^\]]+)(?=\]:)"" , 0 ) ) else : refs . extend ( view . find_all ( r""(?<=^\[)(%s)(?=\]:)"" % name , 0 ) ) regions = refs ids = { } for reg in regions : name = view . substr ( reg ) . strip ( ) key = name . lower ( ) ids [ key ] . regions . append ( reg ) else : ids [ key ] = Obj ( regions = [ reg ] , label = name ) return ids",if key in ids :,199 5463,"def download_chunk(args): global counter x, y, latest, level = args url_format = ( ""https://himawari8-dl.nict.go.jp/himawari8/img/D531106/{}d/{}/{}_{}_{}.png"" ) url = url_format.format(level, WIDTH, strftime(""%Y/%m/%d/%H%M%S"", latest), x, y) tiledata = download(url) # If the tile data is 2867 bytes, it is a blank ""No Image"" tile. if tiledata.__sizeof__() == 2867: sys.exit( ""No image available for {}."".format(strftime(""%Y/%m/%d %H:%M:%S"", latest)) ) with counter.get_lock(): counter.value += 1 if counter.value == level * level: print(""Downloading tiles: completed."") else: print( ""Downloading tiles: {}/{} completed..."".format( counter.value, level * level ) ) return x, y, tiledata","def download_chunk ( args ) : global counter x , y , latest , level = args url_format = ( ""https://himawari8-dl.nict.go.jp/himawari8/img/D531106/{}d/{}/{}_{}_{}.png"" ) url = url_format . format ( level , WIDTH , strftime ( ""%Y/%m/%d/%H%M%S"" , latest ) , x , y ) tiledata = download ( url ) if tiledata . __sizeof__ ( ) == 2867 : sys . exit ( ""No image available for {}."" . format ( strftime ( ""%Y/%m/%d %H:%M:%S"" , latest ) ) ) with counter . get_lock ( ) : counter . value += 1 print ( ""Downloading tiles: completed."" ) else : print ( ""Downloading tiles: {}/{} completed..."" . format ( counter . value , level * level ) ) return x , y , tiledata",if counter . value == level * level :,290 4820,"def save(self, mute=False, visited=None, *args, **kwargs): from ralph.ui.views.common import SAVE_PRIORITY # by default save with the same priority as in 'edit device' forms etc. visited = visited or set() visited.add(self) priority = kwargs.get(""priority"") change_author = kwargs.get(""user"") if priority is None: priority = SAVE_PRIORITY changes = [] for obj, fields in self.get_synced_objs_and_fields(): if obj in visited: continue for f in fields: setattr(obj, f, getattr(self, f)) obj.save(visited=visited, mute=True, priority=priority) # if 'mute' is False *and* if the given field is not present in # SYNC_FIELD_MIXIN_NOTIFICATIONS_WHITELIST, *then* notification of # change won't be send if not mute: changes = [] try: old_obj = type(self).objects.get(pk=self.pk) except type(self).DoesNotExist: old_obj = None for field in self._meta.fields: if field.name not in SYNC_FIELD_MIXIN_NOTIFICATIONS_WHITELIST: continue old_value = getattr(old_obj, field.name) if old_obj else None new_value = getattr(self, field.name) if old_value != new_value: changes.append(ChangeTuple(field.name, old_value, new_value)) fields_synced_signal.send_robust( sender=self, changes=changes, change_author=change_author ) return super(SyncFieldMixin, self).save(*args, **kwargs) ","def save ( self , mute = False , visited = None , * args , ** kwargs ) : from ralph . ui . views . common import SAVE_PRIORITY visited = visited or set ( ) visited . add ( self ) priority = kwargs . get ( ""priority"" ) change_author = kwargs . get ( ""user"" ) if priority is None : priority = SAVE_PRIORITY changes = [ ] for obj , fields in self . get_synced_objs_and_fields ( ) : if obj in visited : continue for f in fields : setattr ( obj , f , getattr ( self , f ) ) obj . save ( visited = visited , mute = True , priority = priority ) if not mute : changes = [ ] try : old_obj = type ( self ) . objects . get ( pk = self . pk ) except type ( self ) . DoesNotExist : old_obj = None for field in self . _meta . fields : if field . name not in SYNC_FIELD_MIXIN_NOTIFICATIONS_WHITELIST : continue old_value = getattr ( old_obj , field . name ) if old_obj else None new_value = getattr ( self , field . name ) changes . append ( ChangeTuple ( field . name , old_value , new_value ) ) fields_synced_signal . send_robust ( sender = self , changes = changes , change_author = change_author ) return super ( SyncFieldMixin , self ) . save ( * args , ** kwargs )",if old_value != new_value :,472 21744,"def tail(f, n, grep): if n <= 0: raise ValueError(""Invalid amount of lines: {}"".format(n)) BUFSIZ = 4096 CR = ""\n"" data = """" f.seek(0, os.SEEK_END) fsize = f.tell() block = -1 exit = False retval = [] while not exit: step = block * BUFSIZ if abs(step) >= fsize: f.seek(0) newdata = f.read(BUFSIZ - (abs(step) - fsize)) exit = True else: f.seek(step, os.SEEK_END) newdata = f.read(BUFSIZ) data = newdata + data if len(retval) + data.count(CR) >= n: if grep: lines = data.splitlines() llines = len(lines) for idx in xrange(llines - 1): line = lines[llines - idx - 1] if grep.search(line): retval.insert(0, line) if len(retval) >= n: break if len(retval) >= n: break else: data = lines[0] block -= 1 else: break else: block -= 1 if len(retval) < n: n -= len(retval) retval += data.splitlines()[-n:] return retval","def tail ( f , n , grep ) : if n <= 0 : raise ValueError ( ""Invalid amount of lines: {}"" . format ( n ) ) BUFSIZ = 4096 CR = ""\n"" data = """" f . seek ( 0 , os . SEEK_END ) fsize = f . tell ( ) block = - 1 exit = False retval = [ ] while not exit : step = block * BUFSIZ if abs ( step ) >= fsize : f . seek ( 0 ) newdata = f . read ( BUFSIZ - ( abs ( step ) - fsize ) ) exit = True else : f . seek ( step , os . SEEK_END ) newdata = f . read ( BUFSIZ ) data = newdata + data if len ( retval ) + data . count ( CR ) >= n : lines = data . splitlines ( ) llines = len ( lines ) for idx in xrange ( llines - 1 ) : line = lines [ llines - idx - 1 ] if grep . search ( line ) : retval . insert ( 0 , line ) if len ( retval ) >= n : break if len ( retval ) >= n : break else : data = lines [ 0 ] block -= 1 else : break else : block -= 1 if len ( retval ) < n : n -= len ( retval ) retval += data . splitlines ( ) [ - n : ] return retval",if grep :,431 3930,"def getattr(self, path, fh=None): logger.debug(""getattr -> '%s' '%s'"" % (path, fh)) if self.cache.is_deleting(path): logger.debug(""getattr path '%s' is deleting -- throwing ENOENT"" % (path)) raise FuseOSError(errno.ENOENT) with self.cache.get_lock( path ): # To avoid consistency issues, e.g. with a concurrent purge cache = True recheck_s3 = False if self.cache.is_empty(path): logger.debug(""getattr <- '%s' '%s' cache ENOENT"" % (path, fh)) if self.recheck_s3: cache = False recheck_s3 = True logger.debug( ""getattr rechecking on s3 <- '%s' '%s' cache ENOENT"" % (path, fh) ) else: raise FuseOSError(errno.ENOENT) attr = self.get_metadata(path, ""attr"") if attr == None: logger.debug(""getattr <- '%s' '%s' ENOENT"" % (path, fh)) raise FuseOSError(errno.ENOENT) if attr[""st_size""] == 0 and stat.S_ISDIR(attr[""st_mode""]): attr[""st_size""] = 4096 # For compatibility... attr[""st_nlink""] = 1 # Something better TODO ??? if self.st_blksize: attr[""st_blksize""] = self.st_blksize if self.full_prefetch: # Prefetch if stat.S_ISDIR(attr[""st_mode""]): self.readdir(path) else: self.check_data(path) logger.debug(""getattr <- '%s' '%s' '%s'"" % (path, fh, attr)) return attr","def getattr ( self , path , fh = None ) : logger . debug ( ""getattr -> '%s' '%s'"" % ( path , fh ) ) if self . cache . is_deleting ( path ) : logger . debug ( ""getattr path '%s' is deleting -- throwing ENOENT"" % ( path ) ) raise FuseOSError ( errno . ENOENT ) with self . cache . get_lock ( path ) : cache = True recheck_s3 = False if self . cache . is_empty ( path ) : logger . debug ( ""getattr <- '%s' '%s' cache ENOENT"" % ( path , fh ) ) cache = False recheck_s3 = True logger . debug ( ""getattr rechecking on s3 <- '%s' '%s' cache ENOENT"" % ( path , fh ) ) else : raise FuseOSError ( errno . ENOENT ) attr = self . get_metadata ( path , ""attr"" ) if attr == None : logger . debug ( ""getattr <- '%s' '%s' ENOENT"" % ( path , fh ) ) raise FuseOSError ( errno . ENOENT ) if attr [ ""st_size"" ] == 0 and stat . S_ISDIR ( attr [ ""st_mode"" ] ) : attr [ ""st_size"" ] = 4096 attr [ ""st_nlink"" ] = 1 if self . st_blksize : attr [ ""st_blksize"" ] = self . st_blksize if self . full_prefetch : if stat . S_ISDIR ( attr [ ""st_mode"" ] ) : self . readdir ( path ) else : self . check_data ( path ) logger . debug ( ""getattr <- '%s' '%s' '%s'"" % ( path , fh , attr ) ) return attr",if self . recheck_s3 :,509 2752,"def execute_commands_and_raise_on_return_code( commands, error=None ): # pylint: disable=invalid-name for command in commands: bad_return = error if error else ""execute {}"".format(command) output, return_code = execute_shell_command_get_return_code(command) if return_code != 0: raise InstallationError(""Failed to {}\n{}"".format(bad_return, output))","def execute_commands_and_raise_on_return_code ( commands , error = None ) : for command in commands : bad_return = error if error else ""execute {}"" . format ( command ) output , return_code = execute_shell_command_get_return_code ( command ) raise InstallationError ( ""Failed to {}\n{}"" . format ( bad_return , output ) )",if return_code != 0 :,112 20776,"def image_diff(test, ref, key=""image"", prompt_num=None): """"""Diff two base64-encoded images."""""" if test == ref: return True, """" message = ""Mismatch in %s output"" % key if prompt_num is not None: message += "" (#%d)"" % prompt_num try: test = base64_to_array(test) ref = base64_to_array(ref) if test.shape == ref.shape: import numpy as np diff = np.abs(test - ref).mean() * 100 # TODO hardcode tol, make configurable later if diff < 5: return True, """" message += "": %.3g%% difference"" % diff else: message += "": Test image (%dx%d)"" % test.shape[:2] message += ""; Ref image (%dx%d)"" % ref.shape[:2] except ImportError: pass return False, message","def image_diff ( test , ref , key = ""image"" , prompt_num = None ) : """"""Diff two base64-encoded images."""""" if test == ref : return True , """" message = ""Mismatch in %s output"" % key if prompt_num is not None : message += "" (#%d)"" % prompt_num try : test = base64_to_array ( test ) ref = base64_to_array ( ref ) import numpy as np diff = np . abs ( test - ref ) . mean ( ) * 100 if diff < 5 : return True , """" message += "": %.3g%% difference"" % diff else : message += "": Test image (%dx%d)"" % test . shape [ : 2 ] message += ""; Ref image (%dx%d)"" % ref . shape [ : 2 ] except ImportError : pass return False , message",if test . shape == ref . shape :,260 15907,"def get_event(payload: Dict[str, Any]) -> Optional[str]: action = get_action_with_primary_id(payload) event = ""{}_{}"".format(action[""entity_type""], action[""action""]) if event in IGNORED_EVENTS: return None changes = action.get(""changes"") if changes is not None: if changes.get(""description"") is not None: event = ""{}_{}"".format(event, ""description"") elif changes.get(""state"") is not None: event = ""{}_{}"".format(event, ""state"") elif changes.get(""workflow_state_id"") is not None: event = ""{}_{}"".format(event, ""state"") elif changes.get(""name"") is not None: event = ""{}_{}"".format(event, ""name"") elif changes.get(""archived"") is not None: event = ""{}_{}"".format(event, ""archived"") elif changes.get(""complete"") is not None: event = ""{}_{}"".format(event, ""complete"") elif changes.get(""epic_id"") is not None: event = ""{}_{}"".format(event, ""epic"") elif changes.get(""estimate"") is not None: event = ""{}_{}"".format(event, ""estimate"") elif changes.get(""file_ids"") is not None: event = ""{}_{}"".format(event, ""attachment"") elif changes.get(""label_ids"") is not None: event = ""{}_{}"".format(event, ""label"") elif changes.get(""project_id"") is not None: event = ""{}_{}"".format(event, ""project"") elif changes.get(""story_type"") is not None: event = ""{}_{}"".format(event, ""type"") elif changes.get(""owner_ids"") is not None: event = ""{}_{}"".format(event, ""owner"") return event ","def get_event ( payload : Dict [ str , Any ] ) -> Optional [ str ] : action = get_action_with_primary_id ( payload ) event = ""{}_{}"" . format ( action [ ""entity_type"" ] , action [ ""action"" ] ) if event in IGNORED_EVENTS : return None changes = action . get ( ""changes"" ) if changes is not None : if changes . get ( ""description"" ) is not None : event = ""{}_{}"" . format ( event , ""description"" ) elif changes . get ( ""state"" ) is not None : event = ""{}_{}"" . format ( event , ""state"" ) elif changes . get ( ""workflow_state_id"" ) is not None : event = ""{}_{}"" . format ( event , ""state"" ) elif changes . get ( ""name"" ) is not None : event = ""{}_{}"" . format ( event , ""name"" ) elif changes . get ( ""archived"" ) is not None : event = ""{}_{}"" . format ( event , ""archived"" ) elif changes . get ( ""complete"" ) is not None : event = ""{}_{}"" . format ( event , ""complete"" ) elif changes . get ( ""epic_id"" ) is not None : event = ""{}_{}"" . format ( event , ""epic"" ) elif changes . get ( ""estimate"" ) is not None : event = ""{}_{}"" . format ( event , ""estimate"" ) event = ""{}_{}"" . format ( event , ""attachment"" ) elif changes . get ( ""label_ids"" ) is not None : event = ""{}_{}"" . format ( event , ""label"" ) elif changes . get ( ""project_id"" ) is not None : event = ""{}_{}"" . format ( event , ""project"" ) elif changes . get ( ""story_type"" ) is not None : event = ""{}_{}"" . format ( event , ""type"" ) elif changes . get ( ""owner_ids"" ) is not None : event = ""{}_{}"" . format ( event , ""owner"" ) return event","elif changes . get ( ""file_ids"" ) is not None :",472 5344,"def cd(self, name): path = self.abspath(name) if name == "".."": self.path = ""/"".join(self.path.split(""/"")[:-1]) if self.path == """": self.path = ""/"" return try: # test self.client.files_list_folder(path, recursive=False) except dropbox.exceptions.ApiError as api_e: e = api_e.reason if e.is_other(): raise OperationFailure(repr(e)) elif e.is_path(): pe = e.get_path() if pe.is_not_folder(): raise IsFile() elif pe.is_not_found(): raise OperationFailure(""Not Found!"") else: raise OperationFailure(repr(e)) else: raise OperationFailure(""Not found!"") else: self.path = path","def cd ( self , name ) : path = self . abspath ( name ) if name == "".."" : self . path = ""/"" . join ( self . path . split ( ""/"" ) [ : - 1 ] ) if self . path == """" : self . path = ""/"" return try : self . client . files_list_folder ( path , recursive = False ) except dropbox . exceptions . ApiError as api_e : e = api_e . reason if e . is_other ( ) : raise OperationFailure ( repr ( e ) ) elif e . is_path ( ) : pe = e . get_path ( ) if pe . is_not_folder ( ) : raise IsFile ( ) raise OperationFailure ( ""Not Found!"" ) else : raise OperationFailure ( repr ( e ) ) else : raise OperationFailure ( ""Not found!"" ) else : self . path = path",elif pe . is_not_found ( ) :,256 9015,"def concatenateCharacterTokens(tokens): pendingCharacters = [] for token in tokens: type = token[""type""] if type in (""Characters"", ""SpaceCharacters""): pendingCharacters.append(token[""data""]) else: if pendingCharacters: yield {""type"": ""Characters"", ""data"": """".join(pendingCharacters)} pendingCharacters = [] yield token if pendingCharacters: yield {""type"": ""Characters"", ""data"": """".join(pendingCharacters)} ","def concatenateCharacterTokens ( tokens ) : pendingCharacters = [ ] for token in tokens : type = token [ ""type"" ] pendingCharacters . append ( token [ ""data"" ] ) else : if pendingCharacters : yield { ""type"" : ""Characters"" , ""data"" : """" . join ( pendingCharacters ) } pendingCharacters = [ ] yield token if pendingCharacters : yield { ""type"" : ""Characters"" , ""data"" : """" . join ( pendingCharacters ) }","if type in ( ""Characters"" , ""SpaceCharacters"" ) :",130 6772,"def verify_output(actual, expected): actual = _read_file(actual, ""Actual"") expected = _read_file(join(CURDIR, expected), ""Expected"") if len(expected) != len(actual): raise AssertionError( ""Lengths differ. Expected %d lines but got %d"" % (len(expected), len(actual)) ) for exp, act in zip(expected, actual): tester = fnmatchcase if ""*"" in exp else eq if not tester(act.rstrip(), exp.rstrip()): raise AssertionError( ""Lines differ.\nExpected: %s\nActual: %s"" % (exp, act) ) ","def verify_output ( actual , expected ) : actual = _read_file ( actual , ""Actual"" ) expected = _read_file ( join ( CURDIR , expected ) , ""Expected"" ) if len ( expected ) != len ( actual ) : raise AssertionError ( ""Lengths differ. Expected %d lines but got %d"" % ( len ( expected ) , len ( actual ) ) ) for exp , act in zip ( expected , actual ) : tester = fnmatchcase if ""*"" in exp else eq raise AssertionError ( ""Lines differ.\nExpected: %s\nActual: %s"" % ( exp , act ) )","if not tester ( act . rstrip ( ) , exp . rstrip ( ) ) :",179 6631,"def forward(self, inputs, feat_layers): out = {} res = self.conv_bn_init(inputs) res = fluid.layers.relu(res) res = self.maxpool(res) # out['conv_init'] = res for i in range(len(self.block_collect)): for layer in self.block_collect[i]: res = layer(res) name = ""block{}"".format(i) if name in feat_layers: out[name] = res if len(out) == len(feat_layers): return out res = self.global_pool(res) B, C, _, _ = res.shape res = fluid.layers.reshape(res, [B, C]) res = self.fc(res) out[""fc""] = res return out ","def forward ( self , inputs , feat_layers ) : out = { } res = self . conv_bn_init ( inputs ) res = fluid . layers . relu ( res ) res = self . maxpool ( res ) for i in range ( len ( self . block_collect ) ) : for layer in self . block_collect [ i ] : res = layer ( res ) name = ""block{}"" . format ( i ) out [ name ] = res if len ( out ) == len ( feat_layers ) : return out res = self . global_pool ( res ) B , C , _ , _ = res . shape res = fluid . layers . reshape ( res , [ B , C ] ) res = self . fc ( res ) out [ ""fc"" ] = res return out",if name in feat_layers :,223 1772,"def _test_forever(self, tests): while True: for test_name in tests: yield test_name if self.bad: return if self.ns.fail_env_changed and self.environment_changed: return ","def _test_forever ( self , tests ) : while True : for test_name in tests : yield test_name return if self . ns . fail_env_changed and self . environment_changed : return",if self . bad :,76 5811,"def init_wake_button_switch(self): try: import RPi.GPIO if susicfg.get(""wakebutton"") == ""enabled"": self.wake_button_switch.set_active(True) else: self.wake_button_switch.set_active(False) except ImportError: self.wake_button_switch.set_sensitive(False) except RuntimeError: self.wake_button_switch.set_sensitive(False) ",def init_wake_button_switch ( self ) : try : import RPi . GPIO self . wake_button_switch . set_active ( True ) else : self . wake_button_switch . set_active ( False ) except ImportError : self . wake_button_switch . set_sensitive ( False ) except RuntimeError : self . wake_button_switch . set_sensitive ( False ),"if susicfg . get ( ""wakebutton"" ) == ""enabled"" :",131 1869,"def transform(self, node, results): names_inserted = set() testlist = results[""args""] args = testlist.children new_args = [] iterator = enumerate(args) for idx, arg in iterator: if arg.type == token.NAME and arg.value in names_inserted: if idx < len(args) - 1 and args[idx + 1].type == token.COMMA: next(iterator) continue else: new_args.append(arg) if arg.type == token.NAME: names_inserted.add(arg.value) if new_args and new_args[-1].type == token.COMMA: del new_args[-1] if len(new_args) == 1: atom = testlist.parent new_args[0].prefix = atom.prefix atom.replace(new_args[0]) else: args[:] = new_args node.changed()","def transform ( self , node , results ) : names_inserted = set ( ) testlist = results [ ""args"" ] args = testlist . children new_args = [ ] iterator = enumerate ( args ) for idx , arg in iterator : if idx < len ( args ) - 1 and args [ idx + 1 ] . type == token . COMMA : next ( iterator ) continue else : new_args . append ( arg ) if arg . type == token . NAME : names_inserted . add ( arg . value ) if new_args and new_args [ - 1 ] . type == token . COMMA : del new_args [ - 1 ] if len ( new_args ) == 1 : atom = testlist . parent new_args [ 0 ] . prefix = atom . prefix atom . replace ( new_args [ 0 ] ) else : args [ : ] = new_args node . changed ( )",if arg . type == token . NAME and arg . value in names_inserted :,260 37,"def scan(self, targets): for target in targets: target.print_infos() if self.is_interesting(target): self.target[""other""].append(target) if self.match(target): return target return None ","def scan ( self , targets ) : for target in targets : target . print_infos ( ) self . target [ ""other"" ] . append ( target ) if self . match ( target ) : return target return None",if self . is_interesting ( target ) :,72 12605,"def decode(self, segment): numbers = [] accu = 0 weight = 1 for char in segment: ordinal = self.decoding[char] isContinuation = ordinal >= 32 if isContinuation: ordinal -= 32 # Reset continuation bit if weight == 1: # It was the tail of a number sign = -1 if ordinal % 2 else 1 # Remember sign ordinal //= 2 # Remove sign bit, no matter what it was accu += weight * ordinal # Add new ordinal as currently least significant if isContinuation: # If it's a continuation if ( weight == 1 ): # If it's the first continuation it will have the sign bit weight = 16 # So next weight is 16 else: # Else it won't have the sign bit: weight *= 32 # So next weight * 32 else: # Else ('no continuation' means 'end of number', since chunks are reversed) numbers.append(sign * accu) # Append accumulated number to results accu = 0 # Reset accumulator for next number weight = 1 # Reset weight, next number will again start with least significant part return numbers","def decode ( self , segment ) : numbers = [ ] accu = 0 weight = 1 for char in segment : ordinal = self . decoding [ char ] isContinuation = ordinal >= 32 if isContinuation : ordinal -= 32 sign = - 1 if ordinal % 2 else 1 ordinal //= 2 accu += weight * ordinal if isContinuation : if ( weight == 1 ) : weight = 16 else : weight *= 32 else : numbers . append ( sign * accu ) accu = 0 weight = 1 return numbers",if weight == 1 :,346 1289,"def new_f(*args, **kwargs): try: D = pickle.load(open(filename, ""rb"")) cache_exists = True except: D = {} cache_exists = False # simple comparison doesn't work in the case of numpy arrays Dargs = D.get(""args"") Dkwargs = D.get(""kwargs"") try: args_match = args == Dargs except: args_match = np.all([np.all(a1 == a2) for (a1, a2) in zip(Dargs, args)]) try: kwargs_match = kwargs == Dkwargs except: kwargs_match = (sorted(Dkwargs.keys()) == sorted(kwargs.keys())) and ( np.all([np.all(Dkwargs[key] == kwargs[key]) for key in kwargs]) ) if ( type(D) == dict and D.get(""funcname"") == f.__name__ and args_match and kwargs_match ): if verbose: print(""@pickle_results: using precomputed "" ""results from '%s'"" % filename) retval = D[""retval""] else: if verbose: print(""@pickle_results: computing results "" ""and saving to '%s'"" % filename) if cache_exists: print("" warning: cache file '%s' exists"" % filename) print("" - args match: %s"" % args_match) print("" - kwargs match: %s"" % kwargs_match) retval = f(*args, **kwargs) funcdict = dict(funcname=f.__name__, retval=retval, args=args, kwargs=kwargs) with open(filename, ""wb"") as outfile: pickle.dump(funcdict, outfile) return retval","def new_f ( * args , ** kwargs ) : try : D = pickle . load ( open ( filename , ""rb"" ) ) cache_exists = True except : D = { } cache_exists = False Dargs = D . get ( ""args"" ) Dkwargs = D . get ( ""kwargs"" ) try : args_match = args == Dargs except : args_match = np . all ( [ np . all ( a1 == a2 ) for ( a1 , a2 ) in zip ( Dargs , args ) ] ) try : kwargs_match = kwargs == Dkwargs except : kwargs_match = ( sorted ( Dkwargs . keys ( ) ) == sorted ( kwargs . keys ( ) ) ) and ( np . all ( [ np . all ( Dkwargs [ key ] == kwargs [ key ] ) for key in kwargs ] ) ) if ( type ( D ) == dict and D . get ( ""funcname"" ) == f . __name__ and args_match and kwargs_match ) : if verbose : print ( ""@pickle_results: using precomputed "" ""results from '%s'"" % filename ) retval = D [ ""retval"" ] else : if verbose : print ( ""@pickle_results: computing results "" ""and saving to '%s'"" % filename ) print ( "" warning: cache file '%s' exists"" % filename ) print ( "" - args match: %s"" % args_match ) print ( "" - kwargs match: %s"" % kwargs_match ) retval = f ( * args , ** kwargs ) funcdict = dict ( funcname = f . __name__ , retval = retval , args = args , kwargs = kwargs ) with open ( filename , ""wb"" ) as outfile : pickle . dump ( funcdict , outfile ) return retval",if cache_exists :,482 25428,"def interface_update(self, request: web.Request) -> None: """"""Update the configuration of an interface."""""" interface = self._get_interface(request.match_info.get(ATTR_INTERFACE)) # Validate data body = await api_validate(SCHEMA_UPDATE, request) if not body: raise APIError(""You need to supply at least one option to update"") # Apply config for key, config in body.items(): if key == ATTR_IPV4: interface.ipv4 = attr.evolve( interface.ipv4 or IpConfig(InterfaceMethod.STATIC, [], None, []), **config, ) elif key == ATTR_IPV6: interface.ipv6 = attr.evolve( interface.ipv6 or IpConfig(InterfaceMethod.STATIC, [], None, []), **config, ) elif key == ATTR_WIFI: interface.wifi = attr.evolve( interface.wifi or WifiConfig(WifiMode.INFRASTRUCTURE, """", AuthMethod.OPEN, None, None), **config, ) elif key == ATTR_ENABLED: interface.enabled = config await asyncio.shield(self.sys_host.network.apply_changes(interface))","def interface_update ( self , request : web . Request ) -> None : """"""Update the configuration of an interface."""""" interface = self . _get_interface ( request . match_info . get ( ATTR_INTERFACE ) ) body = await api_validate ( SCHEMA_UPDATE , request ) if not body : raise APIError ( ""You need to supply at least one option to update"" ) for key , config in body . items ( ) : if key == ATTR_IPV4 : interface . ipv4 = attr . evolve ( interface . ipv4 or IpConfig ( InterfaceMethod . STATIC , [ ] , None , [ ] ) , ** config , ) elif key == ATTR_IPV6 : interface . ipv6 = attr . evolve ( interface . ipv6 or IpConfig ( InterfaceMethod . STATIC , [ ] , None , [ ] ) , ** config , ) elif key == ATTR_WIFI : interface . wifi = attr . evolve ( interface . wifi or WifiConfig ( WifiMode . INFRASTRUCTURE , """" , AuthMethod . OPEN , None , None ) , ** config , ) interface . enabled = config await asyncio . shield ( self . sys_host . network . apply_changes ( interface ) )",elif key == ATTR_ENABLED :,349 12380,"def cache_dst(self): final_dst = None final_linenb = None for linenb, assignblk in enumerate(self): for dst, src in viewitems(assignblk): if dst.is_id(""IRDst""): if final_dst is not None: raise ValueError(""Multiple destinations!"") final_dst = src final_linenb = linenb self._dst = final_dst self._dst_linenb = final_linenb return final_dst ","def cache_dst ( self ) : final_dst = None final_linenb = None for linenb , assignblk in enumerate ( self ) : for dst , src in viewitems ( assignblk ) : if final_dst is not None : raise ValueError ( ""Multiple destinations!"" ) final_dst = src final_linenb = linenb self . _dst = final_dst self . _dst_linenb = final_linenb return final_dst","if dst . is_id ( ""IRDst"" ) :",144 20405,"def _tab_only_directories(self): from os.path import dirname, basename, expanduser, join, isdir line = parse(self.line) cwd = self.fm.env.cwd.path try: rel_dest = line.rest(1) except IndexError: rel_dest = """" # expand the tilde into the user directory if rel_dest.startswith(""~""): rel_dest = expanduser(rel_dest) # define some shortcuts abs_dest = join(cwd, rel_dest) abs_dirname = dirname(abs_dest) rel_basename = basename(rel_dest) rel_dirname = dirname(rel_dest) try: # are we after a directory? if rel_dest.endswith(""/"") or rel_dest == """": _, dirnames, _ = os.walk(abs_dest).next() # are we in the middle of the filename? else: _, dirnames, _ = os.walk(abs_dirname).next() dirnames = [dn for dn in dirnames if dn.startswith(rel_basename)] except (OSError, StopIteration): # os.walk found nothing pass else: dirnames.sort() # no results, return None if len(dirnames) == 0: return # one result. since it must be a directory, append a slash. if len(dirnames) == 1: return line + join(rel_dirname, dirnames[0]) + ""/"" # more than one result. append no slash, so the user can # manually type in the slash to advance into that directory return (line + join(rel_dirname, dirname) for dirname in dirnames)","def _tab_only_directories ( self ) : from os . path import dirname , basename , expanduser , join , isdir line = parse ( self . line ) cwd = self . fm . env . cwd . path try : rel_dest = line . rest ( 1 ) except IndexError : rel_dest = """" if rel_dest . startswith ( ""~"" ) : rel_dest = expanduser ( rel_dest ) abs_dest = join ( cwd , rel_dest ) abs_dirname = dirname ( abs_dest ) rel_basename = basename ( rel_dest ) rel_dirname = dirname ( rel_dest ) try : if rel_dest . endswith ( ""/"" ) or rel_dest == """" : _ , dirnames , _ = os . walk ( abs_dest ) . next ( ) else : _ , dirnames , _ = os . walk ( abs_dirname ) . next ( ) dirnames = [ dn for dn in dirnames if dn . startswith ( rel_basename ) ] except ( OSError , StopIteration ) : pass else : dirnames . sort ( ) return if len ( dirnames ) == 1 : return line + join ( rel_dirname , dirnames [ 0 ] ) + ""/"" return ( line + join ( rel_dirname , dirname ) for dirname in dirnames )",if len ( dirnames ) == 0 :,437 10986,"def parse(self, backend, x509_obj): extensions = [] seen_oids = set() for i in range(self.ext_count(backend, x509_obj)): ext = self.get_ext(backend, x509_obj, i) backend.openssl_assert(ext != backend._ffi.NULL) crit = backend._lib.X509_EXTENSION_get_critical(ext) critical = crit == 1 oid = x509.ObjectIdentifier(_obj2txt(backend, ext.object)) if oid in seen_oids: raise x509.DuplicateExtension( ""Duplicate {0} extension found"".format(oid), oid ) try: handler = self.handlers[oid] except KeyError: if critical: raise x509.UnsupportedExtension( ""Critical extension {0} is not currently supported"".format(oid), oid ) else: # For extensions which are not supported by OpenSSL we pass the # extension object directly to the parsing routine so it can # be decoded manually. if self.unsupported_exts and oid in self.unsupported_exts: ext_data = ext else: ext_data = backend._lib.X509V3_EXT_d2i(ext) if ext_data == backend._ffi.NULL: backend._consume_errors() raise ValueError( ""The {0} extension is invalid and can't be "" ""parsed"".format(oid) ) value = handler(backend, ext_data) extensions.append(x509.Extension(oid, critical, value)) seen_oids.add(oid) return x509.Extensions(extensions)","def parse ( self , backend , x509_obj ) : extensions = [ ] seen_oids = set ( ) for i in range ( self . ext_count ( backend , x509_obj ) ) : ext = self . get_ext ( backend , x509_obj , i ) backend . openssl_assert ( ext != backend . _ffi . NULL ) crit = backend . _lib . X509_EXTENSION_get_critical ( ext ) critical = crit == 1 oid = x509 . ObjectIdentifier ( _obj2txt ( backend , ext . object ) ) raise x509 . DuplicateExtension ( ""Duplicate {0} extension found"" . format ( oid ) , oid ) try : handler = self . handlers [ oid ] except KeyError : if critical : raise x509 . UnsupportedExtension ( ""Critical extension {0} is not currently supported"" . format ( oid ) , oid ) else : if self . unsupported_exts and oid in self . unsupported_exts : ext_data = ext else : ext_data = backend . _lib . X509V3_EXT_d2i ( ext ) if ext_data == backend . _ffi . NULL : backend . _consume_errors ( ) raise ValueError ( ""The {0} extension is invalid and can't be "" ""parsed"" . format ( oid ) ) value = handler ( backend , ext_data ) extensions . append ( x509 . Extension ( oid , critical , value ) ) seen_oids . add ( oid ) return x509 . Extensions ( extensions )",if oid in seen_oids :,492 4913,"def __init__(self, parent, dir, mask, with_dirs=True): filelist = [] dirlist = [""..""] self.dir = dir self.file = """" mask = mask.upper() pattern = self.MakeRegex(mask) for i in os.listdir(dir): if i == ""."" or i == "".."": continue path = os.path.join(dir, i) if os.path.isdir(path): dirlist.append(i) continue path = path.upper() value = i.upper() if pattern.match(value) is not None: filelist.append(i) self.files = filelist if with_dirs: self.dirs = dirlist ","def __init__ ( self , parent , dir , mask , with_dirs = True ) : filelist = [ ] dirlist = [ "".."" ] self . dir = dir self . file = """" mask = mask . upper ( ) pattern = self . MakeRegex ( mask ) for i in os . listdir ( dir ) : if i == ""."" or i == "".."" : continue path = os . path . join ( dir , i ) dirlist . append ( i ) continue path = path . upper ( ) value = i . upper ( ) if pattern . match ( value ) is not None : filelist . append ( i ) self . files = filelist if with_dirs : self . dirs = dirlist",if os . path . isdir ( path ) :,199 12358,"def initialize(self): nn.init.xavier_uniform_(self.linear.weight.data) if self.linear.bias is not None: self.linear.bias.data.uniform_(-1.0, 1.0) if self.self_layer: nn.init.xavier_uniform_(self.linear_self.weight.data) if self.linear_self.bias is not None: self.linear_self.bias.data.uniform_(-1.0, 1.0)","def initialize ( self ) : nn . init . xavier_uniform_ ( self . linear . weight . data ) if self . linear . bias is not None : self . linear . bias . data . uniform_ ( - 1.0 , 1.0 ) if self . self_layer : nn . init . xavier_uniform_ ( self . linear_self . weight . data ) self . linear_self . bias . data . uniform_ ( - 1.0 , 1.0 )",if self . linear_self . bias is not None :,126 20158,"def datestamp(): """"""Enter today's date as the release date in the changelog."""""" dt = datetime.datetime.now() stamp = ""({} {}, {})"".format(dt.strftime(""%B""), dt.day, dt.year) marker = ""(in development)"" lines = [] underline_length = None with open(CHANGELOG) as f: for line in f: if marker in line: # The header line. line = line.replace(marker, stamp) lines.append(line) underline_length = len(line.strip()) elif underline_length: # This is the line after the header. Rewrite the dashes. lines.append(""-"" * underline_length + ""\n"") underline_length = None else: lines.append(line) with open(CHANGELOG, ""w"") as f: for line in lines: f.write(line) ","def datestamp ( ) : """"""Enter today's date as the release date in the changelog."""""" dt = datetime . datetime . now ( ) stamp = ""({} {}, {})"" . format ( dt . strftime ( ""%B"" ) , dt . day , dt . year ) marker = ""(in development)"" lines = [ ] underline_length = None with open ( CHANGELOG ) as f : for line in f : line = line . replace ( marker , stamp ) lines . append ( line ) underline_length = len ( line . strip ( ) ) elif underline_length : lines . append ( ""-"" * underline_length + ""\n"" ) underline_length = None else : lines . append ( line ) with open ( CHANGELOG , ""w"" ) as f : for line in lines : f . write ( line )",if marker in line :,253 6885,"def go(self, pyfile): for line in open(pyfile): if self.mode == ""in def"": self.text += "" "" + line.strip() if line.strip().endswith("":""): if self.definition(self.text): self.text = """" self.mode = ""in func"" else: self.text = """" self.mode = ""normal"" elif self.mode == ""in func"": if '""""""' in line: self.text += line.strip().strip('""') self.mode = ""in doc"" if line.count('""""""') == 2: self.mode = ""normal"" self.docstring(self.text) self.text = """" else: self.mode = ""normal"" elif self.mode == ""in doc"": self.text += "" "" + line if '""""""' in line: self.mode = ""normal"" self.docstring(self.text.strip().strip('""')) self.text = """" elif line.startswith(""## ""): self.header(line.strip().strip(""#"")) elif line.startswith(""def "") or line.startswith(""class ""): self.text += line.strip().strip("":"") if line.strip().endswith("":""): if self.definition(self.text): self.text = """" self.mode = ""in func"" else: self.text = """" self.mode = ""normal"" else: self.mode = ""in def""","def go ( self , pyfile ) : for line in open ( pyfile ) : if self . mode == ""in def"" : self . text += "" "" + line . strip ( ) if line . strip ( ) . endswith ( "":"" ) : if self . definition ( self . text ) : self . text = """" self . mode = ""in func"" else : self . text = """" self . mode = ""normal"" if '""""""' in line : self . text += line . strip ( ) . strip ( '""' ) self . mode = ""in doc"" if line . count ( '""""""' ) == 2 : self . mode = ""normal"" self . docstring ( self . text ) self . text = """" else : self . mode = ""normal"" elif self . mode == ""in doc"" : self . text += "" "" + line if '""""""' in line : self . mode = ""normal"" self . docstring ( self . text . strip ( ) . strip ( '""' ) ) self . text = """" elif line . startswith ( ""## "" ) : self . header ( line . strip ( ) . strip ( ""#"" ) ) elif line . startswith ( ""def "" ) or line . startswith ( ""class "" ) : self . text += line . strip ( ) . strip ( "":"" ) if line . strip ( ) . endswith ( "":"" ) : if self . definition ( self . text ) : self . text = """" self . mode = ""in func"" else : self . text = """" self . mode = ""normal"" else : self . mode = ""in def""","elif self . mode == ""in func"" :",472 21531,"def _convert_upsample(inexpr, keras_layer, etab): _check_data_format(keras_layer) upsample_type = type(keras_layer).__name__ params = {} if upsample_type == ""UpSampling1D"": h = keras_layer.size params[""scale_h""] = h elif upsample_type == ""UpSampling2D"": h, w = keras_layer.size if h != w: raise tvm.error.OpAttributeInvalid( ""Height must equal width for operator Upsample."" ) params[""scale_h""] = h params[""scale_w""] = h if hasattr(keras_layer, ""interpolation""): interpolation = keras_layer.interpolation if interpolation == ""nearest"": params[""method""] = ""nearest_neighbor"" else: params[""method""] = ""bilinear"" else: raise tvm.error.OpNotImplemented( ""Operator {} is not supported for frontend Keras."".format(upsample_type) ) params[""layout""] = etab.data_layout out = _op.nn.upsampling(inexpr, **params) return out","def _convert_upsample ( inexpr , keras_layer , etab ) : _check_data_format ( keras_layer ) upsample_type = type ( keras_layer ) . __name__ params = { } if upsample_type == ""UpSampling1D"" : h = keras_layer . size params [ ""scale_h"" ] = h elif upsample_type == ""UpSampling2D"" : h , w = keras_layer . size raise tvm . error . OpAttributeInvalid ( ""Height must equal width for operator Upsample."" ) params [ ""scale_h"" ] = h params [ ""scale_w"" ] = h if hasattr ( keras_layer , ""interpolation"" ) : interpolation = keras_layer . interpolation if interpolation == ""nearest"" : params [ ""method"" ] = ""nearest_neighbor"" else : params [ ""method"" ] = ""bilinear"" else : raise tvm . error . OpNotImplemented ( ""Operator {} is not supported for frontend Keras."" . format ( upsample_type ) ) params [ ""layout"" ] = etab . data_layout out = _op . nn . upsampling ( inexpr , ** params ) return out",if h != w :,317 19512,"def apply_transformation(self, cli, document, lineno, source_to_display, tokens): # Get the highlight positions. key = (cli.render_counter, document.text, document.cursor_position) positions = self._positions_cache.get( key, lambda: self._get_positions_to_highlight(document) ) # Apply if positions were found at this line. if positions: for row, col in positions: if row == lineno: col = source_to_display(col) tokens = explode_tokens(tokens) token, text = tokens[col] if col == document.cursor_position_col: token += ("":"",) + Token.MatchingBracket.Cursor else: token += ("":"",) + Token.MatchingBracket.Other tokens[col] = (token, text) return Transformation(tokens) ","def apply_transformation ( self , cli , document , lineno , source_to_display , tokens ) : key = ( cli . render_counter , document . text , document . cursor_position ) positions = self . _positions_cache . get ( key , lambda : self . _get_positions_to_highlight ( document ) ) if positions : for row , col in positions : col = source_to_display ( col ) tokens = explode_tokens ( tokens ) token , text = tokens [ col ] if col == document . cursor_position_col : token += ( "":"" , ) + Token . MatchingBracket . Cursor else : token += ( "":"" , ) + Token . MatchingBracket . Other tokens [ col ] = ( token , text ) return Transformation ( tokens )",if row == lineno :,244 20809,"def download(self, *args, **kwargs): fmt = self.query.get(""format"", ""spec"") version = self.query.get(""version"", None) branch = self.query.get(""branch"", None) selector = self.query.get(""selector"") or ""css"" spider_id = self.kwargs.get(""spider_id"", None) spiders = [spider_id] if spider_id is not None else None try: self.project except InvalidFilename as e: raise JsonApiNotFoundError(str(e)) if hasattr(self.storage, ""checkout"") and (version or branch): try: if version and len(version) < 40: version = self.commit_from_short_sha(version).id self.storage.checkout(version, branch) except IOError: pass except ValueError as e: raise JsonApiNotFoundError(str(e)) archiver = CodeProjectArchiver if fmt == u""code"" else ProjectArchiver try: content = archiver(self.storage).archive(spiders, selector=selector) except IOError as e: raise JsonApiNotFoundError(str(e)) try: name = u""{}.zip"".format(self.project.name) except UnicodeEncodeError: name = str(self.project.id) return FileResponse(name, content, status=HTTP_200_OK)","def download ( self , * args , ** kwargs ) : fmt = self . query . get ( ""format"" , ""spec"" ) version = self . query . get ( ""version"" , None ) branch = self . query . get ( ""branch"" , None ) selector = self . query . get ( ""selector"" ) or ""css"" spider_id = self . kwargs . get ( ""spider_id"" , None ) spiders = [ spider_id ] if spider_id is not None else None try : self . project except InvalidFilename as e : raise JsonApiNotFoundError ( str ( e ) ) if hasattr ( self . storage , ""checkout"" ) and ( version or branch ) : try : version = self . commit_from_short_sha ( version ) . id self . storage . checkout ( version , branch ) except IOError : pass except ValueError as e : raise JsonApiNotFoundError ( str ( e ) ) archiver = CodeProjectArchiver if fmt == u""code"" else ProjectArchiver try : content = archiver ( self . storage ) . archive ( spiders , selector = selector ) except IOError as e : raise JsonApiNotFoundError ( str ( e ) ) try : name = u""{}.zip"" . format ( self . project . name ) except UnicodeEncodeError : name = str ( self . project . id ) return FileResponse ( name , content , status = HTTP_200_OK )",if version and len ( version ) < 40 :,365 13825,"def check_smtp_login(self): if self.urlwatch_config.smtp_login: config = self.urlwatcher.config_storage.config[""report""][""email""] smtp_config = config[""smtp""] success = True if not config[""enabled""]: print(""Please enable e-mail reporting in the config first."") success = False if config[""method""] != ""smtp"": print(""Please set the method to SMTP for the e-mail reporter."") success = False if not smtp_config.get(""auth"", smtp_config.get(""keyring"", False)): print(""Authentication must be enabled for SMTP."") success = False smtp_hostname = smtp_config[""host""] if not smtp_hostname: print(""Please configure the SMTP hostname in the config first."") success = False smtp_username = smtp_config.get(""user"", None) or config[""from""] if not smtp_username: print(""Please configure the SMTP user in the config first."") success = False if not success: sys.exit(1) if ""insecure_password"" in smtp_config: print( 'The password is already set in the config (key ""insecure_password"").' ) sys.exit(0) if have_password(smtp_hostname, smtp_username): message = ""Password for %s / %s already set, update? [y/N] "" % ( smtp_username, smtp_hostname, ) if input(message).lower() != ""y"": print(""Password unchanged."") sys.exit(0) if success: set_password(smtp_hostname, smtp_username) # TODO: Actually verify that the login to the server works sys.exit(0)","def check_smtp_login ( self ) : if self . urlwatch_config . smtp_login : config = self . urlwatcher . config_storage . config [ ""report"" ] [ ""email"" ] smtp_config = config [ ""smtp"" ] success = True if not config [ ""enabled"" ] : print ( ""Please enable e-mail reporting in the config first."" ) success = False if config [ ""method"" ] != ""smtp"" : print ( ""Please set the method to SMTP for the e-mail reporter."" ) success = False if not smtp_config . get ( ""auth"" , smtp_config . get ( ""keyring"" , False ) ) : print ( ""Authentication must be enabled for SMTP."" ) success = False smtp_hostname = smtp_config [ ""host"" ] if not smtp_hostname : print ( ""Please configure the SMTP hostname in the config first."" ) success = False smtp_username = smtp_config . get ( ""user"" , None ) or config [ ""from"" ] if not smtp_username : print ( ""Please configure the SMTP user in the config first."" ) success = False if not success : sys . exit ( 1 ) if ""insecure_password"" in smtp_config : print ( 'The password is already set in the config (key ""insecure_password"").' ) sys . exit ( 0 ) message = ""Password for %s / %s already set, update? [y/N] "" % ( smtp_username , smtp_hostname , ) if input ( message ) . lower ( ) != ""y"" : print ( ""Password unchanged."" ) sys . exit ( 0 ) if success : set_password ( smtp_hostname , smtp_username ) sys . exit ( 0 )","if have_password ( smtp_hostname , smtp_username ) :",495 8940,"def reverse_url(self, name: str, *args: Any) -> Optional[str]: if name in self.named_rules: return self.named_rules[name].matcher.reverse(*args) for rule in self.rules: if isinstance(rule.target, ReversibleRouter): reversed_url = rule.target.reverse_url(name, *args) if reversed_url is not None: return reversed_url return None ","def reverse_url ( self , name : str , * args : Any ) -> Optional [ str ] : if name in self . named_rules : return self . named_rules [ name ] . matcher . reverse ( * args ) for rule in self . rules : reversed_url = rule . target . reverse_url ( name , * args ) if reversed_url is not None : return reversed_url return None","if isinstance ( rule . target , ReversibleRouter ) :",121 23821,"def handle(self, *args, **options): if not settings.ST_BASE_DIR.endswith(""spirit""): raise CommandError( ""settings.ST_BASE_DIR is not the spirit root folder, are you overriding it?"" ) for root, dirs, files in os.walk(settings.ST_BASE_DIR): if ""locale"" not in dirs: continue with utils.pushd(root): call_command( ""makemessages"", stdout=self.stdout, stderr=self.stderr, **options ) self.stdout.write(""ok"") ","def handle ( self , * args , ** options ) : if not settings . ST_BASE_DIR . endswith ( ""spirit"" ) : raise CommandError ( ""settings.ST_BASE_DIR is not the spirit root folder, are you overriding it?"" ) for root , dirs , files in os . walk ( settings . ST_BASE_DIR ) : continue with utils . pushd ( root ) : call_command ( ""makemessages"" , stdout = self . stdout , stderr = self . stderr , ** options ) self . stdout . write ( ""ok"" )","if ""locale"" not in dirs :",160 10976,"def _declare(self, name, obj, included=False, quals=0): if name in self._declarations: prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return if not self._override: raise api.FFIError( ""multiple declarations of %s (for interactive usage, "" ""try cdef(xx, override=True))"" % (name,) ) assert ""__dotdotdot__"" not in name.split() self._declarations[name] = (obj, quals) if included: self._included_declarations.add(obj)","def _declare ( self , name , obj , included = False , quals = 0 ) : if name in self . _declarations : prevobj , prevquals = self . _declarations [ name ] if prevobj is obj and prevquals == quals : return raise api . FFIError ( ""multiple declarations of %s (for interactive usage, "" ""try cdef(xx, override=True))"" % ( name , ) ) assert ""__dotdotdot__"" not in name . split ( ) self . _declarations [ name ] = ( obj , quals ) if included : self . _included_declarations . add ( obj )",if not self . _override :,174 17930,"def EnumerateUsersFromClient(args): """"""Enumerates all the users on this system."""""" del args # Unused users = _ParseWtmp() for user, last_login in users.items(): # Lose the null termination username, _ = user.split(b""\x00"", 1) username = username.decode(""utf-8"") if username: # Somehow the last login time can be < 0. There is no documentation # what this means so we just set it to 0 (the rdfvalue field is # unsigned so we can't send negative values). if last_login < 0: last_login = 0 result = rdf_client.User(username=username, last_logon=last_login * 1000000) try: pwdict = pwd.getpwnam(username) result.homedir = utils.SmartUnicode(pwdict.pw_dir) result.full_name = utils.SmartUnicode(pwdict.pw_gecos) result.uid = pwdict.pw_uid result.gid = pwdict.pw_gid result.shell = utils.SmartUnicode(pwdict.pw_shell) except KeyError: pass yield result","def EnumerateUsersFromClient ( args ) : """"""Enumerates all the users on this system."""""" del args users = _ParseWtmp ( ) for user , last_login in users . items ( ) : username , _ = user . split ( b""\x00"" , 1 ) username = username . decode ( ""utf-8"" ) if last_login < 0 : last_login = 0 result = rdf_client . User ( username = username , last_logon = last_login * 1000000 ) try : pwdict = pwd . getpwnam ( username ) result . homedir = utils . SmartUnicode ( pwdict . pw_dir ) result . full_name = utils . SmartUnicode ( pwdict . pw_gecos ) result . uid = pwdict . pw_uid result . gid = pwdict . pw_gid result . shell = utils . SmartUnicode ( pwdict . pw_shell ) except KeyError : pass yield result",if username :,341 9683,"def _get_vif_port(self, name): external_ids = self.db_get_map(""Interface"", name, ""external_ids"") if ""iface-id"" in external_ids and ""attached-mac"" in external_ids: return self._vifport(name, external_ids) elif ""xs-vif-uuid"" in external_ids and ""attached-mac"" in external_ids: # if this is a xenserver and iface-id is not automatically # synced to OVS from XAPI, we grab it from XAPI directly ofport = self.db_get_val(""Interface"", name, ""ofport"") iface_id = self.get_xapi_iface_id(external_ids[""xs-vif-uuid""]) return VifPort(name, ofport, iface_id, external_ids[""attached-mac""], self) ","def _get_vif_port ( self , name ) : external_ids = self . db_get_map ( ""Interface"" , name , ""external_ids"" ) if ""iface-id"" in external_ids and ""attached-mac"" in external_ids : return self . _vifport ( name , external_ids ) elif ""xs-vif-uuid"" in external_ids and ""attached-mac"" in external_ids : ofport = self . db_get_val ( ""Interface"" , name , ""ofport"" ) return VifPort ( name , ofport , iface_id , external_ids [ ""attached-mac"" ] , self )","iface_id = self . get_xapi_iface_id ( external_ids [ ""xs-vif-uuid"" ] )",222 18053,"def checkpoint(vd): vd.undoPids.append(os.getpid()) pid = os.fork() if pid > 0: # parent, halt until undo before = time.time() pid, st = os.wait() if st == 42: # undo vd.scr.clear() vd.undoPids.remove(os.getpid()) raise Exception(""undid %ss"" % int(time.time() - before))","def checkpoint ( vd ) : vd . undoPids . append ( os . getpid ( ) ) pid = os . fork ( ) if pid > 0 : before = time . time ( ) pid , st = os . wait ( ) vd . scr . clear ( ) vd . undoPids . remove ( os . getpid ( ) ) raise Exception ( ""undid %ss"" % int ( time . time ( ) - before ) )",if st == 42 :,123 17455,"def prune(self, study: ""optuna.study.Study"", trial: ""optuna.trial.FrozenTrial"") -> bool: step = trial.last_step if step is None: return False rung = _get_current_rung(trial) value = trial.intermediate_values[step] trials: Optional[List[""optuna.trial.FrozenTrial""]] = None while True: if self._min_resource is None: if trials is None: trials = study.get_trials(deepcopy=False) self._min_resource = _estimate_min_resource(trials) if self._min_resource is None: return False assert self._min_resource is not None rung_promotion_step = self._min_resource * ( self._reduction_factor ** (self._min_early_stopping_rate + rung) ) if step < rung_promotion_step: return False if math.isnan(value): return True if trials is None: trials = study.get_trials(deepcopy=False) rung_key = _completed_rung_key(rung) study._storage.set_trial_system_attr(trial._trial_id, rung_key, value) competing = _get_competing_values(trials, value, rung_key) # 'competing' already includes the current trial # Therefore, we need to use the '<=' operator here if len(competing) <= self._bootstrap_count: return True if not _is_trial_promotable_to_next_rung( value, competing, self._reduction_factor, study.direction, ): return True rung += 1","def prune ( self , study : ""optuna.study.Study"" , trial : ""optuna.trial.FrozenTrial"" ) -> bool : step = trial . last_step if step is None : return False rung = _get_current_rung ( trial ) value = trial . intermediate_values [ step ] trials : Optional [ List [ ""optuna.trial.FrozenTrial"" ] ] = None while True : if self . _min_resource is None : trials = study . get_trials ( deepcopy = False ) self . _min_resource = _estimate_min_resource ( trials ) if self . _min_resource is None : return False assert self . _min_resource is not None rung_promotion_step = self . _min_resource * ( self . _reduction_factor ** ( self . _min_early_stopping_rate + rung ) ) if step < rung_promotion_step : return False if math . isnan ( value ) : return True trials = study . get_trials ( deepcopy = False ) rung_key = _completed_rung_key ( rung ) study . _storage . set_trial_system_attr ( trial . _trial_id , rung_key , value ) competing = _get_competing_values ( trials , value , rung_key ) if len ( competing ) <= self . _bootstrap_count : return True if not _is_trial_promotable_to_next_rung ( value , competing , self . _reduction_factor , study . direction , ) : return True rung += 1",if trials is None :,495 15545,"def __call__(self, loss, sess): if self.sign * loss > self.sign * self.best: if FLAGS.log_progress: tf.logging.info(""Previous best %s: %.4f."", self.label, self.best) tf.gfile.MakeDirs(os.path.dirname(self.save_path)) self.saver.save(sess, self.save_path) tf.logging.info( ""Storing best model so far with loss %.4f at %s."" % (loss, self.save_path) ) self.best = loss self.age = 0 self.true_age = 0 else: self.age += 1 self.true_age += 1 if self.age > self.patience: sess.run([self.decay_op]) self.age = 0","def __call__ ( self , loss , sess ) : if self . sign * loss > self . sign * self . best : tf . logging . info ( ""Previous best %s: %.4f."" , self . label , self . best ) tf . gfile . MakeDirs ( os . path . dirname ( self . save_path ) ) self . saver . save ( sess , self . save_path ) tf . logging . info ( ""Storing best model so far with loss %.4f at %s."" % ( loss , self . save_path ) ) self . best = loss self . age = 0 self . true_age = 0 else : self . age += 1 self . true_age += 1 if self . age > self . patience : sess . run ( [ self . decay_op ] ) self . age = 0",if FLAGS . log_progress :,240 14837,"def filter( self, projects=None, tags=None, ignore_projects=None, ignore_tags=None, span=None, include_partial_frames=False, ): for frame in self._rows: if projects is not None and frame.project not in projects: continue if ignore_projects is not None and frame.project in ignore_projects: continue if tags is not None and not any(tag in frame.tags for tag in tags): continue if ignore_tags is not None and any(tag in frame.tags for tag in ignore_tags): continue if span is None: yield frame elif frame in span: yield frame elif include_partial_frames and span.overlaps(frame): # If requested, return the part of the frame that is within the # span, for frames that are *partially* within span or reaching # over span start = span.start if frame.start < span.start else frame.start stop = span.stop if frame.stop > span.stop else frame.stop yield frame._replace(start=start, stop=stop) ","def filter ( self , projects = None , tags = None , ignore_projects = None , ignore_tags = None , span = None , include_partial_frames = False , ) : for frame in self . _rows : continue if ignore_projects is not None and frame . project in ignore_projects : continue if tags is not None and not any ( tag in frame . tags for tag in tags ) : continue if ignore_tags is not None and any ( tag in frame . tags for tag in ignore_tags ) : continue if span is None : yield frame elif frame in span : yield frame elif include_partial_frames and span . overlaps ( frame ) : start = span . start if frame . start < span . start else frame . start stop = span . stop if frame . stop > span . stop else frame . stop yield frame . _replace ( start = start , stop = stop )",if projects is not None and frame . project not in projects :,310 23971,"def parse(wb): rst, key = [], {} for ws in wb.worksheets: rst.append((ws.title, [])) for row in ws.rows: for cell in row: if not isinstance(cell.value, str): continue if cell.value[0] + cell.value[-1] != ""{}"": continue cont = cell.value[1:-1].strip() tp = cont.split("" "")[0] cont = cont[len(tp) :].strip() note, value = ""no description"", None if ""#"" in cont: note = cont.split(""#"")[-1].strip() cont = cont[: cont.index(""#"")].strip() if ""="" in cont: value = cont.split(""="")[1].strip() name = cont[: cont.index(""="")].strip() else: name = cont rst[-1][-1].append(((cell.row, cell.col_idx), [tp, name, value, note])) key[name] = [tp, name, value, note] return rst, key","def parse ( wb ) : rst , key = [ ] , { } for ws in wb . worksheets : rst . append ( ( ws . title , [ ] ) ) for row in ws . rows : for cell in row : if not isinstance ( cell . value , str ) : continue continue cont = cell . value [ 1 : - 1 ] . strip ( ) tp = cont . split ( "" "" ) [ 0 ] cont = cont [ len ( tp ) : ] . strip ( ) note , value = ""no description"" , None if ""#"" in cont : note = cont . split ( ""#"" ) [ - 1 ] . strip ( ) cont = cont [ : cont . index ( ""#"" ) ] . strip ( ) if ""="" in cont : value = cont . split ( ""="" ) [ 1 ] . strip ( ) name = cont [ : cont . index ( ""="" ) ] . strip ( ) else : name = cont rst [ - 1 ] [ - 1 ] . append ( ( ( cell . row , cell . col_idx ) , [ tp , name , value , note ] ) ) key [ name ] = [ tp , name , value , note ] return rst , key","if cell . value [ 0 ] + cell . value [ - 1 ] != ""{}"" :",327 1633,"def parse_flash_log(self, logf): """"""parse flash logs"""""" data = OrderedDict() samplelogs = self.split_log(logf[""f""]) for slog in samplelogs: try: sample = dict() ## Sample name ## s_name = self.clean_pe_name(slog, logf[""root""]) if s_name is None: continue sample[""s_name""] = s_name ## Log attributes ## sample[""totalpairs""] = self.get_field(""Total pairs"", slog) sample[""discardpairs""] = self.get_field(""Discarded pairs"", slog) sample[""percdiscard""] = self.get_field(""Percent Discarded"", slog, fl=True) sample[""combopairs""] = self.get_field(""Combined pairs"", slog) sample[""inniepairs""] = self.get_field(""Innie pairs"", slog) sample[""outiepairs""] = self.get_field(""Outie pairs"", slog) sample[""uncombopairs""] = self.get_field(""Uncombined pairs"", slog) sample[""perccombo""] = self.get_field(""Percent combined"", slog, fl=True) data[s_name] = sample except Exception as err: log.warning(""Error parsing record in {}. {}"".format(logf[""fn""], err)) log.debug(traceback.format_exc()) continue return data ","def parse_flash_log ( self , logf ) : """"""parse flash logs"""""" data = OrderedDict ( ) samplelogs = self . split_log ( logf [ ""f"" ] ) for slog in samplelogs : try : sample = dict ( ) s_name = self . clean_pe_name ( slog , logf [ ""root"" ] ) continue sample [ ""s_name"" ] = s_name sample [ ""totalpairs"" ] = self . get_field ( ""Total pairs"" , slog ) sample [ ""discardpairs"" ] = self . get_field ( ""Discarded pairs"" , slog ) sample [ ""percdiscard"" ] = self . get_field ( ""Percent Discarded"" , slog , fl = True ) sample [ ""combopairs"" ] = self . get_field ( ""Combined pairs"" , slog ) sample [ ""inniepairs"" ] = self . get_field ( ""Innie pairs"" , slog ) sample [ ""outiepairs"" ] = self . get_field ( ""Outie pairs"" , slog ) sample [ ""uncombopairs"" ] = self . get_field ( ""Uncombined pairs"" , slog ) sample [ ""perccombo"" ] = self . get_field ( ""Percent combined"" , slog , fl = True ) data [ s_name ] = sample except Exception as err : log . warning ( ""Error parsing record in {}. {}"" . format ( logf [ ""fn"" ] , err ) ) log . debug ( traceback . format_exc ( ) ) continue return data",if s_name is None :,404 4642,"def import_refs( self, base, other, committer=None, timestamp=None, timezone=None, message=None, prune=False, ): if prune: to_delete = set(self.subkeys(base)) else: to_delete = set() for name, value in other.items(): if value is None: to_delete.add(name) else: self.set_if_equals(b""/"".join((base, name)), None, value, message=message) if to_delete: try: to_delete.remove(name) except KeyError: pass for ref in to_delete: self.remove_if_equals(b""/"".join((base, ref)), None, message=message) ","def import_refs ( self , base , other , committer = None , timestamp = None , timezone = None , message = None , prune = False , ) : if prune : to_delete = set ( self . subkeys ( base ) ) else : to_delete = set ( ) for name , value in other . items ( ) : if value is None : to_delete . add ( name ) else : self . set_if_equals ( b""/"" . join ( ( base , name ) ) , None , value , message = message ) try : to_delete . remove ( name ) except KeyError : pass for ref in to_delete : self . remove_if_equals ( b""/"" . join ( ( base , ref ) ) , None , message = message )",if to_delete :,224 16456,"def remove(self, values): if not isinstance(values, (list, tuple, set)): values = [values] for v in values: v = str(v) if isinstance(self._definition, dict): self._definition.pop(v, None) elif self._definition == ""ANY"": if v == ""ANY"": self._definition = [] elif v in self._definition: self._definition.remove(v) if ( self._value is not None and self._value not in self._definition and self._not_any() ): raise ConanException(bad_value_msg(self._name, self._value, self.values_range)) ","def remove ( self , values ) : if not isinstance ( values , ( list , tuple , set ) ) : values = [ values ] for v in values : v = str ( v ) if isinstance ( self . _definition , dict ) : self . _definition . pop ( v , None ) if v == ""ANY"" : self . _definition = [ ] elif v in self . _definition : self . _definition . remove ( v ) if ( self . _value is not None and self . _value not in self . _definition and self . _not_any ( ) ) : raise ConanException ( bad_value_msg ( self . _name , self . _value , self . values_range ) )","elif self . _definition == ""ANY"" :",192 5864,"def taiga(request, trigger_id, key): signature = request.META.get(""HTTP_X_TAIGA_WEBHOOK_SIGNATURE"") # check that the data are ok with the provided signature if verify_signature(request._request.body, key, signature): data = data_filter(trigger_id, **request.data) status = save_data(trigger_id, data) return ( Response({""message"": ""Success""}) if status else Response({""message"": ""Failed!""}) ) Response({""message"": ""Bad request""}) ","def taiga ( request , trigger_id , key ) : signature = request . META . get ( ""HTTP_X_TAIGA_WEBHOOK_SIGNATURE"" ) if verify_signature ( request . _request . body , key , signature ) : data = data_filter ( trigger_id , ** request . data ) status = save_data ( trigger_id , data ) return ( Response ( { ""message"" : ""Success"" } ) else Response ( { ""message"" : ""Failed!"" } ) ) Response ( { ""message"" : ""Bad request"" } )",if status,149 2192,"def genLoopPackets(self): while True: # If we are in simulator mode, sleep first (as if we are gathering # observations). If we are in generator mode, don't sleep at all. if self.mode == ""simulator"": # Determine how long to sleep if self.real_time: # We are in real time mode. Try to keep synched up with the # wall clock sleep_time = self.the_time + self.loop_interval - time.time() if sleep_time > 0: time.sleep(sleep_time) else: # A start time was specified, so we are not in real time. # Just sleep the appropriate interval time.sleep(self.loop_interval) # Update the simulator clock: self.the_time += self.loop_interval # Because a packet represents the measurements observed over the # time interval, we want the measurement values at the middle # of the interval. avg_time = self.the_time - self.loop_interval / 2.0 _packet = {""dateTime"": int(self.the_time + 0.5), ""usUnits"": weewx.US} for obs_type in self.observations: _packet[obs_type] = self.observations[obs_type].value_at(avg_time) yield _packet","def genLoopPackets ( self ) : while True : if self . mode == ""simulator"" : if self . real_time : sleep_time = self . the_time + self . loop_interval - time . time ( ) time . sleep ( sleep_time ) else : time . sleep ( self . loop_interval ) self . the_time += self . loop_interval avg_time = self . the_time - self . loop_interval / 2.0 _packet = { ""dateTime"" : int ( self . the_time + 0.5 ) , ""usUnits"" : weewx . US } for obs_type in self . observations : _packet [ obs_type ] = self . observations [ obs_type ] . value_at ( avg_time ) yield _packet",if sleep_time > 0 :,380 15329,"def input_str( self, s, default_value=None, valid_list=None, show_default_value=True, help_message=None, ): if show_default_value and default_value is not None: s = f""[{default_value}] {s}"" if valid_list is not None or help_message is not None: s += "" ("" if valid_list is not None: s += "" "" + ""/"".join(valid_list) if help_message is not None: s += "" ?:help"" if valid_list is not None or help_message is not None: s += "" )"" s += "" : "" while True: try: inp = input(s) if len(inp) == 0: if default_value is None: print("""") return None result = default_value break if help_message is not None and inp == ""?"": print(help_message) continue if valid_list is not None: if inp.lower() in valid_list: result = inp.lower() break if inp in valid_list: result = inp break continue result = inp break except: result = default_value break print(result) return result","def input_str ( self , s , default_value = None , valid_list = None , show_default_value = True , help_message = None , ) : if show_default_value and default_value is not None : s = f""[{default_value}] {s}"" if valid_list is not None or help_message is not None : s += "" ("" if valid_list is not None : s += "" "" + ""/"" . join ( valid_list ) if help_message is not None : s += "" ?:help"" if valid_list is not None or help_message is not None : s += "" )"" s += "" : "" while True : try : inp = input ( s ) if len ( inp ) == 0 : if default_value is None : print ( """" ) return None result = default_value break if help_message is not None and inp == ""?"" : print ( help_message ) continue if valid_list is not None : result = inp . lower ( ) break if inp in valid_list : result = inp break continue result = inp break except : result = default_value break print ( result ) return result",if inp . lower ( ) in valid_list :,408 15403,"def split_version_and_name( major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[Union[str, int]] patch=None, # type: Optional[Union[str, int]] name=None, # type: Optional[str] ): # type: (...) -> Tuple[Optional[Union[str, int]], Optional[Union[str, int]], Optional[Union[str, int]], Optional[str]] # noqa if isinstance(major, six.string_types) and not minor and not patch: # Only proceed if this is in the format ""x.y.z"" or similar if major.isdigit() or (major.count(""."") > 0 and major[0].isdigit()): version = major.split(""."", 2) if isinstance(version, (tuple, list)): if len(version) > 3: major, minor, patch, _ = version elif len(version) == 3: major, minor, patch = version elif len(version) == 2: major, minor = version else: major = major[0] else: major = major name = None else: name = ""{0!s}"".format(major) major = None return (major, minor, patch, name)","def split_version_and_name ( major = None , minor = None , patch = None , name = None , ) : if isinstance ( major , six . string_types ) and not minor and not patch : if major . isdigit ( ) or ( major . count ( ""."" ) > 0 and major [ 0 ] . isdigit ( ) ) : version = major . split ( ""."" , 2 ) if isinstance ( version , ( tuple , list ) ) : major , minor , patch , _ = version elif len ( version ) == 3 : major , minor , patch = version elif len ( version ) == 2 : major , minor = version else : major = major [ 0 ] else : major = major name = None else : name = ""{0!s}"" . format ( major ) major = None return ( major , minor , patch , name )",if len ( version ) > 3 :,367 3661,"def parse_workflow_directory(workflow_directory): parsed = { ""versions"": [], } # Verify that the directory exists. if not os.path.exists(workflow_directory): raise WorkflowError(""Workflow directory does not exist."") # Look for and parse the workflow manifest. workflow_files = os.listdir(workflow_directory) if ""workflow.json"" not in workflow_files: raise WorkflowError('No ""workflow.json"" manifest file found.') with open(os.path.join(workflow_directory, ""workflow.json""), ""r"") as f: parsed[""workflow""] = json.load(f) # Look for and parse workflow version subdirectories. workflow_subdirs = [ os.path.join(workflow_directory, workflow_file) for workflow_file in workflow_files if os.path.isdir(os.path.join(workflow_directory, workflow_file)) ] for version_directory in workflow_subdirs: version_files = os.listdir(version_directory) if ""version.json"" not in version_files: continue # Subdirectory wasn't a workflow version. with open(os.path.join(version_directory, ""version.json""), ""r"") as f: parsed[""versions""].append(json.load(f)) # Complain if the workflow has no versions. if len(parsed[""versions""]) == 0: raise WorkflowError( ""Workflow directory {} does not contain any "" ""versions"".format(workflow_directory) ) return parsed","def parse_workflow_directory ( workflow_directory ) : parsed = { ""versions"" : [ ] , } if not os . path . exists ( workflow_directory ) : raise WorkflowError ( ""Workflow directory does not exist."" ) workflow_files = os . listdir ( workflow_directory ) if ""workflow.json"" not in workflow_files : raise WorkflowError ( 'No ""workflow.json"" manifest file found.' ) with open ( os . path . join ( workflow_directory , ""workflow.json"" ) , ""r"" ) as f : parsed [ ""workflow"" ] = json . load ( f ) workflow_subdirs = [ os . path . join ( workflow_directory , workflow_file ) for workflow_file in workflow_files if os . path . isdir ( os . path . join ( workflow_directory , workflow_file ) ) ] for version_directory in workflow_subdirs : version_files = os . listdir ( version_directory ) continue with open ( os . path . join ( version_directory , ""version.json"" ) , ""r"" ) as f : parsed [ ""versions"" ] . append ( json . load ( f ) ) if len ( parsed [ ""versions"" ] ) == 0 : raise WorkflowError ( ""Workflow directory {} does not contain any "" ""versions"" . format ( workflow_directory ) ) return parsed","if ""version.json"" not in version_files :",399 15233,"def not_modssl_ifmodule(self, path): """"""Checks if the provided Augeas path has argument !mod_ssl"""""" if ""ifmodule"" not in path.lower(): return False # Trim the path to the last ifmodule workpath = path.lower() while workpath: # Get path to the last IfModule (ignore the tail) parts = workpath.rpartition(""ifmodule"") if not parts[0]: # IfModule not found break ifmod_path = parts[0] + parts[1] # Check if ifmodule had an index if parts[2].startswith(""[""): # Append the index from tail ifmod_path += parts[2].partition(""/"")[0] # Get the original path trimmed to correct length # This is required to preserve cases ifmod_real_path = path[0 : len(ifmod_path)] if ""!mod_ssl.c"" in self.get_all_args(ifmod_real_path): return True # Set the workpath to the heading part workpath = parts[0] return False","def not_modssl_ifmodule ( self , path ) : """"""Checks if the provided Augeas path has argument !mod_ssl"""""" if ""ifmodule"" not in path . lower ( ) : return False workpath = path . lower ( ) while workpath : parts = workpath . rpartition ( ""ifmodule"" ) break ifmod_path = parts [ 0 ] + parts [ 1 ] if parts [ 2 ] . startswith ( ""["" ) : ifmod_path += parts [ 2 ] . partition ( ""/"" ) [ 0 ] ifmod_real_path = path [ 0 : len ( ifmod_path ) ] if ""!mod_ssl.c"" in self . get_all_args ( ifmod_real_path ) : return True workpath = parts [ 0 ] return False",if not parts [ 0 ] :,296 1408,"def read_config_file(args): if os.path.isfile(args.config_file): try: with open(args.config_file) as f: config = json.load(f) for key, elem in config.items(): if key not in defaults_flag_in_config: logger.info( yellow( ""{} has an unknown key: {} : {}"".format( args.config_file, key, elem ) ) ) continue if getattr(args, key) == defaults_flag_in_config[key]: setattr(args, key, elem) except json.decoder.JSONDecodeError as e: logger.error( red( ""Impossible to read {}, please check the file {}"".format( args.config_file, e ) ) ) ","def read_config_file ( args ) : if os . path . isfile ( args . config_file ) : try : with open ( args . config_file ) as f : config = json . load ( f ) for key , elem in config . items ( ) : logger . info ( yellow ( ""{} has an unknown key: {} : {}"" . format ( args . config_file , key , elem ) ) ) continue if getattr ( args , key ) == defaults_flag_in_config [ key ] : setattr ( args , key , elem ) except json . decoder . JSONDecodeError as e : logger . error ( red ( ""Impossible to read {}, please check the file {}"" . format ( args . config_file , e ) ) )",if key not in defaults_flag_in_config :,302 15855,"def consume(self, data: Dict[str, Any]) -> None: # TODO: This is the only implementation with Dict cf Mapping; should we simplify? user_profile = get_user_profile_by_id(data[""user_id""]) logging.info( ""Processing signup for user %s in realm %s"", user_profile.id, user_profile.realm.string_id, ) if settings.MAILCHIMP_API_KEY and settings.PRODUCTION: endpoint = ""https://{}.api.mailchimp.com/3.0/lists/{}/members"".format( settings.MAILCHIMP_API_KEY.split(""-"")[1], settings.ZULIP_FRIENDS_LIST_ID, ) params = dict(data) del params[""user_id""] params[""list_id""] = settings.ZULIP_FRIENDS_LIST_ID params[""status""] = ""subscribed"" r = requests.post( endpoint, auth=(""apikey"", settings.MAILCHIMP_API_KEY), json=params, timeout=10, ) if r.status_code == 400 and orjson.loads(r.content)[""title""] == ""Member Exists"": logging.warning( ""Attempted to sign up already existing email to list: %s"", data[""email_address""], ) elif r.status_code == 400: retry_event(self.queue_name, data, lambda e: r.raise_for_status()) else: r.raise_for_status()","def consume ( self , data : Dict [ str , Any ] ) -> None : user_profile = get_user_profile_by_id ( data [ ""user_id"" ] ) logging . info ( ""Processing signup for user %s in realm %s"" , user_profile . id , user_profile . realm . string_id , ) if settings . MAILCHIMP_API_KEY and settings . PRODUCTION : endpoint = ""https://{}.api.mailchimp.com/3.0/lists/{}/members"" . format ( settings . MAILCHIMP_API_KEY . split ( ""-"" ) [ 1 ] , settings . ZULIP_FRIENDS_LIST_ID , ) params = dict ( data ) del params [ ""user_id"" ] params [ ""list_id"" ] = settings . ZULIP_FRIENDS_LIST_ID params [ ""status"" ] = ""subscribed"" r = requests . post ( endpoint , auth = ( ""apikey"" , settings . MAILCHIMP_API_KEY ) , json = params , timeout = 10 , ) if r . status_code == 400 and orjson . loads ( r . content ) [ ""title"" ] == ""Member Exists"" : logging . warning ( ""Attempted to sign up already existing email to list: %s"" , data [ ""email_address"" ] , ) retry_event ( self . queue_name , data , lambda e : r . raise_for_status ( ) ) else : r . raise_for_status ( )",elif r . status_code == 400 :,428 308,"def _read_model_arguments(argv, use_argparse=False): if use_argparse: parser = argparse.ArgumentParser() parser.add_argument( ""database"", metavar=""DATABASE"", type=str, default=""galaxy"", nargs=""?"", help=""database to target (galaxy, tool_shed, install)"", ) populate_config_args(parser) args = parser.parse_args(argv[1:] if argv else []) return args.config_file, args.config_section, args.database else: config_file = None for arg in [""-c"", ""--config"", ""--config-file""]: if arg in argv: pos = argv.index(arg) argv.pop(pos) config_file = argv.pop(pos) config_section = None if ""--config-section"" in argv: pos = argv.index(""--config-section"") argv.pop(pos) config_section = argv.pop(pos) if argv and (argv[-1] in DATABASE): database = argv.pop() # database name tool_shed, galaxy, or install. else: database = ""galaxy"" return config_file, config_section, database","def _read_model_arguments ( argv , use_argparse = False ) : if use_argparse : parser = argparse . ArgumentParser ( ) parser . add_argument ( ""database"" , metavar = ""DATABASE"" , type = str , default = ""galaxy"" , nargs = ""?"" , help = ""database to target (galaxy, tool_shed, install)"" , ) populate_config_args ( parser ) args = parser . parse_args ( argv [ 1 : ] if argv else [ ] ) return args . config_file , args . config_section , args . database else : config_file = None for arg in [ ""-c"" , ""--config"" , ""--config-file"" ] : if arg in argv : pos = argv . index ( arg ) argv . pop ( pos ) config_file = argv . pop ( pos ) config_section = None pos = argv . index ( ""--config-section"" ) argv . pop ( pos ) config_section = argv . pop ( pos ) if argv and ( argv [ - 1 ] in DATABASE ) : database = argv . pop ( ) else : database = ""galaxy"" return config_file , config_section , database","if ""--config-section"" in argv :",357 8856,"def seen_add(options): seen_name = options.add_value if is_imdb_url(seen_name): console(""IMDB url detected, try to parse ID"") imdb_id = extract_id(seen_name) if imdb_id: seen_name = imdb_id else: console(""Could not parse IMDB ID"") db.add(seen_name, ""cli_add"", {""cli_add"": seen_name}) console(""Added %s as seen. This will affect all tasks."" % seen_name) ","def seen_add ( options ) : seen_name = options . add_value if is_imdb_url ( seen_name ) : console ( ""IMDB url detected, try to parse ID"" ) imdb_id = extract_id ( seen_name ) seen_name = imdb_id else : console ( ""Could not parse IMDB ID"" ) db . add ( seen_name , ""cli_add"" , { ""cli_add"" : seen_name } ) console ( ""Added %s as seen. This will affect all tasks."" % seen_name )",if imdb_id :,144 24822,"def translate_apply(self, exp): pre = [] callable_pre, callable_value = self.translate(exp[0], False) pre.extend(callable_pre) args = [] keyword_args = [] keyword_arg_exps = [] arg_exps = exp[1:] for i, argexp in enumerate(arg_exps): if type(argexp) is Keyword: keyword_arg_exps = arg_exps[i:] arg_exps = arg_exps[:i] break for argexp in arg_exps: arg_pre, arg_value = self.translate(argexp, False) pre.extend(arg_pre) args.append(arg_value) for argKey, argExp in chunks(keyword_arg_exps, 2): if type(argKey) is not Keyword: raise MochiSyntaxError(argKey, self.filename) arg_pre, arg_value = self.translate(argExp, False) pre.extend(arg_pre) keyword_args.append(ast.keyword(arg=argKey.name, value=arg_value)) value = ast.Call( func=callable_value, args=args, keywords=keyword_args, starargs=None, kwargs=None, lineno=callable_value.lineno, col_offset=0, ) return pre, value","def translate_apply ( self , exp ) : pre = [ ] callable_pre , callable_value = self . translate ( exp [ 0 ] , False ) pre . extend ( callable_pre ) args = [ ] keyword_args = [ ] keyword_arg_exps = [ ] arg_exps = exp [ 1 : ] for i , argexp in enumerate ( arg_exps ) : keyword_arg_exps = arg_exps [ i : ] arg_exps = arg_exps [ : i ] break for argexp in arg_exps : arg_pre , arg_value = self . translate ( argexp , False ) pre . extend ( arg_pre ) args . append ( arg_value ) for argKey , argExp in chunks ( keyword_arg_exps , 2 ) : if type ( argKey ) is not Keyword : raise MochiSyntaxError ( argKey , self . filename ) arg_pre , arg_value = self . translate ( argExp , False ) pre . extend ( arg_pre ) keyword_args . append ( ast . keyword ( arg = argKey . name , value = arg_value ) ) value = ast . Call ( func = callable_value , args = args , keywords = keyword_args , starargs = None , kwargs = None , lineno = callable_value . lineno , col_offset = 0 , ) return pre , value",if type ( argexp ) is Keyword :,371 24812,"def parse_shoutcast1(url, timeout=5): """"""A Shoutcast object of raises ParseError"""""" root = get_root(url) shoutcast1_status = root + ""/7.html"" headers = {""User-Agent"": ""Mozilla/4.0""} try: r = requests.get( shoutcast1_status, headers=headers, timeout=timeout, stream=True ) if ""text"" not in r.headers.get(""content-type"", """"): raise ParseError r.content except (RequestException, socket.timeout): raise ParseError if r.status_code != 200: raise ParseError soup = BeautifulSoup(r.content) body = soup.find(""body"") if not body: raise ParseError status_line = body.string if status_line is None: raise ParseError try: current, status, peak, max_, unique, bitrate, songtitle = status_line.split( "","", 6 ) except ValueError: raise ParseError try: peak = str(int(peak)) current = str(int(current)) except ValueError: raise ParseError return Stream(root, current, peak)","def parse_shoutcast1 ( url , timeout = 5 ) : """"""A Shoutcast object of raises ParseError"""""" root = get_root ( url ) shoutcast1_status = root + ""/7.html"" headers = { ""User-Agent"" : ""Mozilla/4.0"" } try : r = requests . get ( shoutcast1_status , headers = headers , timeout = timeout , stream = True ) raise ParseError r . content except ( RequestException , socket . timeout ) : raise ParseError if r . status_code != 200 : raise ParseError soup = BeautifulSoup ( r . content ) body = soup . find ( ""body"" ) if not body : raise ParseError status_line = body . string if status_line is None : raise ParseError try : current , status , peak , max_ , unique , bitrate , songtitle = status_line . split ( "","" , 6 ) except ValueError : raise ParseError try : peak = str ( int ( peak ) ) current = str ( int ( current ) ) except ValueError : raise ParseError return Stream ( root , current , peak )","if ""text"" not in r . headers . get ( ""content-type"" , """" ) :",320 18894,"def reconnect_user(self, user_id, host_id, server_id): if host_id == settings.local.host_id: return if server_id and self.server.id != server_id: return for client in self.clients.find({""user_id"": user_id}): self.clients.update_id( client[""id""], { ""ignore_routes"": True, }, ) if len(client[""id""]) > 32: self.instance.disconnect_wg(client[""id""]) else: self.instance_com.client_kill(client[""id""])","def reconnect_user ( self , user_id , host_id , server_id ) : if host_id == settings . local . host_id : return if server_id and self . server . id != server_id : return for client in self . clients . find ( { ""user_id"" : user_id } ) : self . clients . update_id ( client [ ""id"" ] , { ""ignore_routes"" : True , } , ) self . instance . disconnect_wg ( client [ ""id"" ] ) else : self . instance_com . client_kill ( client [ ""id"" ] )","if len ( client [ ""id"" ] ) > 32 :",176 24057,"def __init__(self, *args, **decimals): self.amounts = OrderedDict( (currency, decimals.get(currency, Money.ZEROS[currency].amount)) for currency in CURRENCIES ) for arg in args: if isinstance(arg, Money): self.amounts[arg.currency] += arg.amount else: for m in arg: self.amounts[m.currency] += m.amount ","def __init__ ( self , * args , ** decimals ) : self . amounts = OrderedDict ( ( currency , decimals . get ( currency , Money . ZEROS [ currency ] . amount ) ) for currency in CURRENCIES ) for arg in args : self . amounts [ arg . currency ] += arg . amount else : for m in arg : self . amounts [ m . currency ] += m . amount","if isinstance ( arg , Money ) :",120 15800,"def _mask_forward_test(self, stage, x, bboxes, semantic_feat=None): """"""Mask head forward function for testing."""""" mask_roi_extractor = self.mask_roi_extractor[stage] mask_head = self.mask_head[stage] mask_rois = bbox2roi([bboxes]) mask_feats = mask_roi_extractor( x[: len(mask_roi_extractor.featmap_strides)], mask_rois ) if self.with_semantic and ""mask"" in self.semantic_fusion: mask_semantic_feat = self.semantic_roi_extractor([semantic_feat], mask_rois) if mask_semantic_feat.shape[-2:] != mask_feats.shape[-2:]: mask_semantic_feat = F.adaptive_avg_pool2d( mask_semantic_feat, mask_feats.shape[-2:] ) mask_feats += mask_semantic_feat if self.mask_info_flow: last_feat = None last_pred = None for i in range(stage): mask_pred, last_feat = self.mask_head[i](mask_feats, last_feat) if last_pred is not None: mask_pred = mask_pred + last_pred last_pred = mask_pred mask_pred = mask_head(mask_feats, last_feat, return_feat=False) if last_pred is not None: mask_pred = mask_pred + last_pred else: mask_pred = mask_head(mask_feats) return mask_pred","def _mask_forward_test ( self , stage , x , bboxes , semantic_feat = None ) : """"""Mask head forward function for testing."""""" mask_roi_extractor = self . mask_roi_extractor [ stage ] mask_head = self . mask_head [ stage ] mask_rois = bbox2roi ( [ bboxes ] ) mask_feats = mask_roi_extractor ( x [ : len ( mask_roi_extractor . featmap_strides ) ] , mask_rois ) if self . with_semantic and ""mask"" in self . semantic_fusion : mask_semantic_feat = self . semantic_roi_extractor ( [ semantic_feat ] , mask_rois ) if mask_semantic_feat . shape [ - 2 : ] != mask_feats . shape [ - 2 : ] : mask_semantic_feat = F . adaptive_avg_pool2d ( mask_semantic_feat , mask_feats . shape [ - 2 : ] ) mask_feats += mask_semantic_feat if self . mask_info_flow : last_feat = None last_pred = None for i in range ( stage ) : mask_pred , last_feat = self . mask_head [ i ] ( mask_feats , last_feat ) mask_pred = mask_pred + last_pred last_pred = mask_pred mask_pred = mask_head ( mask_feats , last_feat , return_feat = False ) mask_pred = mask_pred + last_pred else : mask_pred = mask_head ( mask_feats ) return mask_pred",if last_pred is not None :,427 17720,"def on_completed2(): doner[0] = True if not qr: if len(ql) > 0: observer.on_next(False) observer.on_completed() elif donel[0]: observer.on_next(True) observer.on_completed()",def on_completed2 ( ) : doner [ 0 ] = True if not qr : observer . on_next ( False ) observer . on_completed ( ) elif donel [ 0 ] : observer . on_next ( True ) observer . on_completed ( ),if len ( ql ) > 0 :,86 16984,"def modify_vpc_attribute(self): vpc_id = self._get_param(""VpcId"") for attribute in (""EnableDnsSupport"", ""EnableDnsHostnames""): if self.querystring.get(""%s.Value"" % attribute): attr_name = camelcase_to_underscores(attribute) attr_value = self.querystring.get(""%s.Value"" % attribute)[0] self.ec2_backend.modify_vpc_attribute(vpc_id, attr_name, attr_value) return MODIFY_VPC_ATTRIBUTE_RESPONSE","def modify_vpc_attribute ( self ) : vpc_id = self . _get_param ( ""VpcId"" ) for attribute in ( ""EnableDnsSupport"" , ""EnableDnsHostnames"" ) : attr_name = camelcase_to_underscores ( attribute ) attr_value = self . querystring . get ( ""%s.Value"" % attribute ) [ 0 ] self . ec2_backend . modify_vpc_attribute ( vpc_id , attr_name , attr_value ) return MODIFY_VPC_ATTRIBUTE_RESPONSE","if self . querystring . get ( ""%s.Value"" % attribute ) :",143 12045,"def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: self.set_socket_descriptor(d.getPrefixedString()) continue if tt == 26: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_server_address().TryMerge(tmp) continue if tt == 34: length = d.getVarInt32() tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) d.skip(length) self.mutable_proxy_external_ip().TryMerge(tmp) continue if tt == 0: raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt)","def TryMerge ( self , d ) : while d . avail ( ) > 0 : tt = d . getVarInt32 ( ) if tt == 10 : self . set_socket_descriptor ( d . getPrefixedString ( ) ) continue if tt == 26 : length = d . getVarInt32 ( ) tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) d . skip ( length ) self . mutable_server_address ( ) . TryMerge ( tmp ) continue length = d . getVarInt32 ( ) tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) d . skip ( length ) self . mutable_proxy_external_ip ( ) . TryMerge ( tmp ) continue if tt == 0 : raise ProtocolBuffer . ProtocolBufferDecodeError d . skipData ( tt )",if tt == 34 :,252 23807,"def is_accepted_drag_event(self, event): if event.source() == self.table: return True mime = event.mimeData() if mime.hasUrls(): for url in mime.urls(): # Only support local files. if not url.isLocalFile(): break # And only allow supported extensions. filename = url.toLocalFile() extension = os.path.splitext(filename)[1].lower()[1:] if extension not in _dictionary_formats(): break else: return True return False","def is_accepted_drag_event ( self , event ) : if event . source ( ) == self . table : return True mime = event . mimeData ( ) if mime . hasUrls ( ) : for url in mime . urls ( ) : break filename = url . toLocalFile ( ) extension = os . path . splitext ( filename ) [ 1 ] . lower ( ) [ 1 : ] if extension not in _dictionary_formats ( ) : break else : return True return False",if not url . isLocalFile ( ) :,163 4810,"def clean_new_hostname(self): old_ip = self.cleaned_data.get(""address"") new_hostname = self.cleaned_data[""new_hostname""] if not is_valid_hostname(new_hostname): raise forms.ValidationError(""Invalid hostname"") try: get_domain(new_hostname) except Domain.DoesNotExist: raise forms.ValidationError(""Invalid domain"") try: ipaddress = IPAddress.objects.get(hostname=new_hostname) except IPAddress.DoesNotExist: if find_addresses_for_hostname(new_hostname): raise forms.ValidationError(""Hostname already in DNS."") else: if ipaddress.device and not ipaddress.device.deleted: if not old_ip: raise forms.ValidationError(""Hostname in use."") device = Device.objects.get(ipaddress__address=old_ip) if ipaddress.device.id != device.id: raise forms.ValidationError( ""Hostname used by %s"" % device, ) elif Record.objects.filter(name=new_hostname).exists(): raise forms.ValidationError(""Hostname already in DNS."") return new_hostname ","def clean_new_hostname ( self ) : old_ip = self . cleaned_data . get ( ""address"" ) new_hostname = self . cleaned_data [ ""new_hostname"" ] if not is_valid_hostname ( new_hostname ) : raise forms . ValidationError ( ""Invalid hostname"" ) try : get_domain ( new_hostname ) except Domain . DoesNotExist : raise forms . ValidationError ( ""Invalid domain"" ) try : ipaddress = IPAddress . objects . get ( hostname = new_hostname ) except IPAddress . DoesNotExist : raise forms . ValidationError ( ""Hostname already in DNS."" ) else : if ipaddress . device and not ipaddress . device . deleted : if not old_ip : raise forms . ValidationError ( ""Hostname in use."" ) device = Device . objects . get ( ipaddress__address = old_ip ) if ipaddress . device . id != device . id : raise forms . ValidationError ( ""Hostname used by %s"" % device , ) elif Record . objects . filter ( name = new_hostname ) . exists ( ) : raise forms . ValidationError ( ""Hostname already in DNS."" ) return new_hostname",if find_addresses_for_hostname ( new_hostname ) :,308 18320,"def __getattr__(self, name): # Machine.__dict__ does not contain double underscore variables. # Class variables will be mangled. if name.startswith(""__""): raise AttributeError( ""'{}' does not exist on "".format(name, id(self)) ) # Could be a callback callback_type, target = self._identify_callback(name) if callback_type is not None: if callback_type in self.transition_cls.dynamic_methods: if target not in self.events: raise AttributeError( ""event '{}' is not registered on "".format( target, id(self) ) ) return partial(self.events[target].add_callback, callback_type) elif callback_type in self.state_cls.dynamic_methods: state = self.get_state(target) return partial(state.add_callback, callback_type[3:]) try: return self.__getattribute__(name) except AttributeError: # Nothing matched raise AttributeError( ""'{}' does not exist on "".format(name, id(self)) )","def __getattr__ ( self , name ) : if name . startswith ( ""__"" ) : raise AttributeError ( ""'{}' does not exist on "" . format ( name , id ( self ) ) ) callback_type , target = self . _identify_callback ( name ) if callback_type is not None : if callback_type in self . transition_cls . dynamic_methods : if target not in self . events : raise AttributeError ( ""event '{}' is not registered on "" . format ( target , id ( self ) ) ) return partial ( self . events [ target ] . add_callback , callback_type ) state = self . get_state ( target ) return partial ( state . add_callback , callback_type [ 3 : ] ) try : return self . __getattribute__ ( name ) except AttributeError : raise AttributeError ( ""'{}' does not exist on "" . format ( name , id ( self ) ) )",elif callback_type in self . state_cls . dynamic_methods :,317 19709,"def delete_user(self, uid): """"""Delete a user"""""" if not self.__user_exists(uid): raise exception.LDAPUserNotFound(user_id=uid) self.__remove_from_all(uid) if FLAGS.ldap_user_modify_only: # Delete attributes attr = [] # Retrieve user by name user = self.__get_ldap_user(uid) if ""secretKey"" in user.keys(): attr.append((self.ldap.MOD_DELETE, ""secretKey"", user[""secretKey""])) if ""accessKey"" in user.keys(): attr.append((self.ldap.MOD_DELETE, ""accessKey"", user[""accessKey""])) if LdapDriver.isadmin_attribute in user.keys(): attr.append( ( self.ldap.MOD_DELETE, LdapDriver.isadmin_attribute, user[LdapDriver.isadmin_attribute], ) ) self.conn.modify_s(self.__uid_to_dn(uid), attr) else: # Delete entry self.conn.delete_s(self.__uid_to_dn(uid)) ","def delete_user ( self , uid ) : """"""Delete a user"""""" if not self . __user_exists ( uid ) : raise exception . LDAPUserNotFound ( user_id = uid ) self . __remove_from_all ( uid ) if FLAGS . ldap_user_modify_only : attr = [ ] user = self . __get_ldap_user ( uid ) if ""secretKey"" in user . keys ( ) : attr . append ( ( self . ldap . MOD_DELETE , ""secretKey"" , user [ ""secretKey"" ] ) ) if ""accessKey"" in user . keys ( ) : attr . append ( ( self . ldap . MOD_DELETE , ""accessKey"" , user [ ""accessKey"" ] ) ) attr . append ( ( self . ldap . MOD_DELETE , LdapDriver . isadmin_attribute , user [ LdapDriver . isadmin_attribute ] , ) ) self . conn . modify_s ( self . __uid_to_dn ( uid ) , attr ) else : self . conn . delete_s ( self . __uid_to_dn ( uid ) )",if LdapDriver . isadmin_attribute in user . keys ( ) :,337 12544,"def setLabel(self, label): if label is None: if self.label is not None: self.label.scene().removeItem(self.label) self.label = None else: if self.label is None: self.label = TextItem() self.label.setParentItem(self) self.label.setText(label) self._updateLabel() ","def setLabel ( self , label ) : if label is None : self . label . scene ( ) . removeItem ( self . label ) self . label = None else : if self . label is None : self . label = TextItem ( ) self . label . setParentItem ( self ) self . label . setText ( label ) self . _updateLabel ( )",if self . label is not None :,112 10532,"def dispatch_return(self, frame, arg): if self.stop_here(frame) or frame == self.returnframe: # Ignore return events in generator except when stepping. if self.stopframe and frame.f_code.co_flags & CO_GENERATOR: return self.trace_dispatch try: self.frame_returning = frame self.user_return(frame, arg) finally: self.frame_returning = None if self.quitting: raise BdbQuit # The user issued a 'next' or 'until' command. if self.stopframe is frame and self.stoplineno != -1: self._set_stopinfo(None, None) return self.trace_dispatch","def dispatch_return ( self , frame , arg ) : if self . stop_here ( frame ) or frame == self . returnframe : return self . trace_dispatch try : self . frame_returning = frame self . user_return ( frame , arg ) finally : self . frame_returning = None if self . quitting : raise BdbQuit if self . stopframe is frame and self . stoplineno != - 1 : self . _set_stopinfo ( None , None ) return self . trace_dispatch",if self . stopframe and frame . f_code . co_flags & CO_GENERATOR :,199 13528,"def allow_hide_post(user_acl, target): if user_acl[""is_anonymous""]: raise PermissionDenied(_(""You have to sign in to hide posts."")) category_acl = user_acl[""categories""].get( target.category_id, {""can_hide_posts"": 0, ""can_hide_own_posts"": 0} ) if not category_acl[""can_hide_posts""]: if not category_acl[""can_hide_own_posts""]: raise PermissionDenied(_(""You can't hide posts in this category."")) if user_acl[""user_id""] != target.poster_id: raise PermissionDenied( _(""You can't hide other users posts in this category."") ) if target.is_protected and not category_acl[""can_protect_posts""]: raise PermissionDenied(_(""This post is protected. You can't hide it."")) if not has_time_to_edit_post(user_acl, target): message = ngettext( ""You can't hide posts that are older than %(minutes)s minute."", ""You can't hide posts that are older than %(minutes)s minutes."", category_acl[""post_edit_time""], ) raise PermissionDenied( message % {""minutes"": category_acl[""post_edit_time""]} ) if target.is_first_post: raise PermissionDenied(_(""You can't hide thread's first post."")) if not category_acl[""can_close_threads""]: if target.category.is_closed: raise PermissionDenied( _(""This category is closed. You can't hide posts in it."") ) if target.thread.is_closed: raise PermissionDenied( _(""This thread is closed. You can't hide posts in it."") )","def allow_hide_post ( user_acl , target ) : if user_acl [ ""is_anonymous"" ] : raise PermissionDenied ( _ ( ""You have to sign in to hide posts."" ) ) category_acl = user_acl [ ""categories"" ] . get ( target . category_id , { ""can_hide_posts"" : 0 , ""can_hide_own_posts"" : 0 } ) if not category_acl [ ""can_hide_posts"" ] : if not category_acl [ ""can_hide_own_posts"" ] : raise PermissionDenied ( _ ( ""You can't hide posts in this category."" ) ) raise PermissionDenied ( _ ( ""You can't hide other users posts in this category."" ) ) if target . is_protected and not category_acl [ ""can_protect_posts"" ] : raise PermissionDenied ( _ ( ""This post is protected. You can't hide it."" ) ) if not has_time_to_edit_post ( user_acl , target ) : message = ngettext ( ""You can't hide posts that are older than %(minutes)s minute."" , ""You can't hide posts that are older than %(minutes)s minutes."" , category_acl [ ""post_edit_time"" ] , ) raise PermissionDenied ( message % { ""minutes"" : category_acl [ ""post_edit_time"" ] } ) if target . is_first_post : raise PermissionDenied ( _ ( ""You can't hide thread's first post."" ) ) if not category_acl [ ""can_close_threads"" ] : if target . category . is_closed : raise PermissionDenied ( _ ( ""This category is closed. You can't hide posts in it."" ) ) if target . thread . is_closed : raise PermissionDenied ( _ ( ""This thread is closed. You can't hide posts in it."" ) )","if user_acl [ ""user_id"" ] != target . poster_id :",469 4172,"def test_dayoffsets(self): start = datetime.datetime(self.yr, self.mth, self.dy, 9) for date_string, expected_day_offset in [ (""Aujourd'hui"", 0), (""aujourd'hui"", 0), (""Demain"", 1), (""demain"", 1), (""Hier"", -1), (""hier"", -1), (""au jour de hui"", None), ]: got_dt, rc = self.cal.parseDT(date_string, start) if expected_day_offset is not None: self.assertEqual(rc, 1) target = start + datetime.timedelta(days=expected_day_offset) self.assertEqual(got_dt, target) else: self.assertEqual(rc, 0)","def test_dayoffsets ( self ) : start = datetime . datetime ( self . yr , self . mth , self . dy , 9 ) for date_string , expected_day_offset in [ ( ""Aujourd'hui"" , 0 ) , ( ""aujourd'hui"" , 0 ) , ( ""Demain"" , 1 ) , ( ""demain"" , 1 ) , ( ""Hier"" , - 1 ) , ( ""hier"" , - 1 ) , ( ""au jour de hui"" , None ) , ] : got_dt , rc = self . cal . parseDT ( date_string , start ) self . assertEqual ( rc , 1 ) target = start + datetime . timedelta ( days = expected_day_offset ) self . assertEqual ( got_dt , target ) else : self . assertEqual ( rc , 0 )",if expected_day_offset is not None :,225 20363,"def send_messages(self, messages): sent_messages = 0 for m in messages: payload = {} for opt, optval in { ""mattermost_icon_url"": ""icon_url"", ""mattermost_channel"": ""channel"", ""mattermost_username"": ""username"", }.items(): optvalue = getattr(self, opt) if optvalue is not None: payload[optval] = optvalue.strip() payload[""text""] = m.subject r = requests.post( ""{}"".format(m.recipients()[0]), data=json.dumps(payload), verify=(not self.mattermost_no_verify_ssl), ) if r.status_code >= 400: logger.error( smart_text( _(""Error sending notification mattermost: {}"").format(r.text) ) ) if not self.fail_silently: raise Exception( smart_text( _(""Error sending notification mattermost: {}"").format(r.text) ) ) sent_messages += 1 return sent_messages","def send_messages ( self , messages ) : sent_messages = 0 for m in messages : payload = { } for opt , optval in { ""mattermost_icon_url"" : ""icon_url"" , ""mattermost_channel"" : ""channel"" , ""mattermost_username"" : ""username"" , } . items ( ) : optvalue = getattr ( self , opt ) if optvalue is not None : payload [ optval ] = optvalue . strip ( ) payload [ ""text"" ] = m . subject r = requests . post ( ""{}"" . format ( m . recipients ( ) [ 0 ] ) , data = json . dumps ( payload ) , verify = ( not self . mattermost_no_verify_ssl ) , ) logger . error ( smart_text ( _ ( ""Error sending notification mattermost: {}"" ) . format ( r . text ) ) ) if not self . fail_silently : raise Exception ( smart_text ( _ ( ""Error sending notification mattermost: {}"" ) . format ( r . text ) ) ) sent_messages += 1 return sent_messages",if r . status_code >= 400 :,339 3299,"def get_top_level_stats(self): for func, (cc, nc, tt, ct, callers) in self.stats.items(): self.total_calls += nc self.prim_calls += cc self.total_tt += tt if (""jprofile"", 0, ""profiler"") in callers: self.top_level[func] = None if len(func_std_string(func)) > self.max_name_len: self.max_name_len = len(func_std_string(func))","def get_top_level_stats ( self ) : for func , ( cc , nc , tt , ct , callers ) in self . stats . items ( ) : self . total_calls += nc self . prim_calls += cc self . total_tt += tt if ( ""jprofile"" , 0 , ""profiler"" ) in callers : self . top_level [ func ] = None self . max_name_len = len ( func_std_string ( func ) )",if len ( func_std_string ( func ) ) > self . max_name_len :,141 5834,"def read(self, iprot): if ( iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None ): iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRUCT: self.status = TStatus() self.status.read(iprot) else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.schema = TTableSchema() self.schema.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd()","def read ( self , iprot ) : if ( iprot . _fast_decode is not None and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None ) : iprot . _fast_decode ( self , iprot , ( self . __class__ , self . thrift_spec ) ) return iprot . readStructBegin ( ) while True : ( fname , ftype , fid ) = iprot . readFieldBegin ( ) if ftype == TType . STOP : break if ftype == TType . STRUCT : self . status = TStatus ( ) self . status . read ( iprot ) else : iprot . skip ( ftype ) elif fid == 2 : if ftype == TType . STRUCT : self . schema = TTableSchema ( ) self . schema . read ( iprot ) else : iprot . skip ( ftype ) else : iprot . skip ( ftype ) iprot . readFieldEnd ( ) iprot . readStructEnd ( )",if fid == 1 :,322 11258,"def __init__(self, cga, *args) -> None: super().__init__(cga) self.einf = self.cga.einf # we use this alot if len(args) == 0: # generate random highest dimension flat nulls = [self.cga.null_vector() for k in range(self.layout.dims - 2)] self.mv = reduce(op, nulls + [self.einf]) elif len(args) == 1: # from existing multivector if isinstance(args[0], MultiVector): self.mv = args[0] # generate random flat for given dimension elif isinstance(args[0], int): dim = args[0] points = [self.cga.base_vector() for k in range(dim + 1)] points = list(map(self.cga.up, points)) self.mv = reduce(op, points + [self.einf]) # from vectors on flat else: nulls = map(self.cga.null_vector, args) if self.einf not in nulls: nulls = list(nulls) + [self.einf] self.mv = reduce(op, nulls) self.mv = self.mv.normal()","def __init__ ( self , cga , * args ) -> None : super ( ) . __init__ ( cga ) self . einf = self . cga . einf if len ( args ) == 0 : nulls = [ self . cga . null_vector ( ) for k in range ( self . layout . dims - 2 ) ] self . mv = reduce ( op , nulls + [ self . einf ] ) elif len ( args ) == 1 : self . mv = args [ 0 ] elif isinstance ( args [ 0 ] , int ) : dim = args [ 0 ] points = [ self . cga . base_vector ( ) for k in range ( dim + 1 ) ] points = list ( map ( self . cga . up , points ) ) self . mv = reduce ( op , points + [ self . einf ] ) else : nulls = map ( self . cga . null_vector , args ) if self . einf not in nulls : nulls = list ( nulls ) + [ self . einf ] self . mv = reduce ( op , nulls ) self . mv = self . mv . normal ( )","if isinstance ( args [ 0 ] , MultiVector ) :",339 5700,"def get_final_results(log_json_path, iter_num): result_dict = dict() with open(log_json_path, ""r"") as f: for line in f.readlines(): log_line = json.loads(line) if ""mode"" not in log_line.keys(): continue if log_line[""mode""] == ""train"" and log_line[""iter""] == iter_num: result_dict[""memory""] = log_line[""memory""] if log_line[""iter""] == iter_num: result_dict.update( {key: log_line[key] for key in RESULTS_LUT if key in log_line} ) return result_dict ","def get_final_results ( log_json_path , iter_num ) : result_dict = dict ( ) with open ( log_json_path , ""r"" ) as f : for line in f . readlines ( ) : log_line = json . loads ( line ) if ""mode"" not in log_line . keys ( ) : continue result_dict [ ""memory"" ] = log_line [ ""memory"" ] if log_line [ ""iter"" ] == iter_num : result_dict . update ( { key : log_line [ key ] for key in RESULTS_LUT if key in log_line } ) return result_dict","if log_line [ ""mode"" ] == ""train"" and log_line [ ""iter"" ] == iter_num :",204 24343,"def argument_action(self, text, loc, arg): """"""Code executed after recognising each of function's arguments"""""" exshared.setpos(loc, text) if DEBUG > 0: print(""ARGUMENT:"", arg.exp) if DEBUG == 2: self.symtab.display() if DEBUG > 2: return arg_ordinal = len(self.function_arguments) # check argument's type if not self.symtab.same_type_as_argument( arg.exp, self.function_call_index, arg_ordinal ): raise SemanticException( ""Incompatible type for argument %d in '%s'"" % (arg_ordinal + 1, self.symtab.get_name(self.function_call_index)) ) self.function_arguments.append(arg.exp)","def argument_action ( self , text , loc , arg ) : """"""Code executed after recognising each of function's arguments"""""" exshared . setpos ( loc , text ) if DEBUG > 0 : print ( ""ARGUMENT:"" , arg . exp ) if DEBUG == 2 : self . symtab . display ( ) return arg_ordinal = len ( self . function_arguments ) if not self . symtab . same_type_as_argument ( arg . exp , self . function_call_index , arg_ordinal ) : raise SemanticException ( ""Incompatible type for argument %d in '%s'"" % ( arg_ordinal + 1 , self . symtab . get_name ( self . function_call_index ) ) ) self . function_arguments . append ( arg . exp )",if DEBUG > 2 :,218 3697,"def reload(self): """"""Parse bindings and mangle into an appropriate form"""""" self._lookup = {} self._masks = 0 for action, bindings in self.keys.items(): if not isinstance(bindings, tuple): bindings = (bindings,) for binding in bindings: if not binding or binding == ""None"": continue try: keyval, mask = self._parsebinding(binding) # Does much the same, but with poorer error handling. # keyval, mask = Gtk.accelerator_parse(binding) except KeymapError as e: err( ""keybindings.reload failed to parse binding '%s': %s"" % (binding, e) ) else: if mask & Gdk.ModifierType.SHIFT_MASK: if keyval == Gdk.KEY_Tab: keyval = Gdk.KEY_ISO_Left_Tab mask &= ~Gdk.ModifierType.SHIFT_MASK else: keyvals = Gdk.keyval_convert_case(keyval) if keyvals[0] != keyvals[1]: keyval = keyvals[1] mask &= ~Gdk.ModifierType.SHIFT_MASK else: keyval = Gdk.keyval_to_lower(keyval) self._lookup.setdefault(mask, {}) self._lookup[mask][keyval] = action self._masks |= mask","def reload ( self ) : """"""Parse bindings and mangle into an appropriate form"""""" self . _lookup = { } self . _masks = 0 for action , bindings in self . keys . items ( ) : bindings = ( bindings , ) for binding in bindings : if not binding or binding == ""None"" : continue try : keyval , mask = self . _parsebinding ( binding ) except KeymapError as e : err ( ""keybindings.reload failed to parse binding '%s': %s"" % ( binding , e ) ) else : if mask & Gdk . ModifierType . SHIFT_MASK : if keyval == Gdk . KEY_Tab : keyval = Gdk . KEY_ISO_Left_Tab mask &= ~ Gdk . ModifierType . SHIFT_MASK else : keyvals = Gdk . keyval_convert_case ( keyval ) if keyvals [ 0 ] != keyvals [ 1 ] : keyval = keyvals [ 1 ] mask &= ~ Gdk . ModifierType . SHIFT_MASK else : keyval = Gdk . keyval_to_lower ( keyval ) self . _lookup . setdefault ( mask , { } ) self . _lookup [ mask ] [ keyval ] = action self . _masks |= mask","if not isinstance ( bindings , tuple ) :",423 19061,"def write(self): """"""Make a copy of the stored config and write it to the configured file"""""" new_config = ConfigObj(encoding=""UTF-8"") new_config.filename = self._config_file # first copy over everything from the old config, even if it is not # correctly defined to keep from losing data for key, subkeys in self._config.items(): if key not in new_config: new_config[key] = {} for subkey, value in subkeys.items(): new_config[key][subkey] = value # next make sure that everything we expect to have defined is so for key in _CONFIG_DEFINITIONS.keys(): key, definition_type, section, ini_key, default = self._define(key) self.check_setting(key) if section not in new_config: new_config[section] = {} new_config[section][ini_key] = self._config[section][ini_key] # Write it to file headphones.logger.info(""Writing configuration to file"") try: new_config.write() except IOError as e: headphones.logger.error(""Error writing configuration file: %s"", e)","def write ( self ) : """"""Make a copy of the stored config and write it to the configured file"""""" new_config = ConfigObj ( encoding = ""UTF-8"" ) new_config . filename = self . _config_file for key , subkeys in self . _config . items ( ) : if key not in new_config : new_config [ key ] = { } for subkey , value in subkeys . items ( ) : new_config [ key ] [ subkey ] = value for key in _CONFIG_DEFINITIONS . keys ( ) : key , definition_type , section , ini_key , default = self . _define ( key ) self . check_setting ( key ) new_config [ section ] = { } new_config [ section ] [ ini_key ] = self . _config [ section ] [ ini_key ] headphones . logger . info ( ""Writing configuration to file"" ) try : new_config . write ( ) except IOError as e : headphones . logger . error ( ""Error writing configuration file: %s"" , e )",if section not in new_config :,314 6414,"def __init__( self, endpoint: str, credential: MetricsAdvisorKeyCredential, **kwargs: Any ) -> None: try: if not endpoint.lower().startswith(""http""): endpoint = ""https://"" + endpoint except AttributeError: raise ValueError(""Base URL must be a string."") if not credential: raise ValueError(""Missing credential"") self._endpoint = endpoint if isinstance(credential, MetricsAdvisorKeyCredential): self._client = _ClientAsync( endpoint=endpoint, sdk_moniker=SDK_MONIKER, authentication_policy=MetricsAdvisorKeyCredentialPolicy(credential), **kwargs ) else: if hasattr(credential, ""get_token""): credential_scopes = kwargs.pop( ""credential_scopes"", [""https://cognitiveservices.azure.com/.default""] ) credential_policy = AsyncBearerTokenCredentialPolicy( credential, *credential_scopes ) else: raise TypeError( ""Please provide an instance from azure-identity "" ""or a class that implement the 'get_token protocol"" ) self._client = _ClientAsync( endpoint=endpoint, sdk_moniker=SDK_MONIKER, authentication_policy=credential_policy, **kwargs ) ","def __init__ ( self , endpoint : str , credential : MetricsAdvisorKeyCredential , ** kwargs : Any ) -> None : try : endpoint = ""https://"" + endpoint except AttributeError : raise ValueError ( ""Base URL must be a string."" ) if not credential : raise ValueError ( ""Missing credential"" ) self . _endpoint = endpoint if isinstance ( credential , MetricsAdvisorKeyCredential ) : self . _client = _ClientAsync ( endpoint = endpoint , sdk_moniker = SDK_MONIKER , authentication_policy = MetricsAdvisorKeyCredentialPolicy ( credential ) , ** kwargs ) else : if hasattr ( credential , ""get_token"" ) : credential_scopes = kwargs . pop ( ""credential_scopes"" , [ ""https://cognitiveservices.azure.com/.default"" ] ) credential_policy = AsyncBearerTokenCredentialPolicy ( credential , * credential_scopes ) else : raise TypeError ( ""Please provide an instance from azure-identity "" ""or a class that implement the 'get_token protocol"" ) self . _client = _ClientAsync ( endpoint = endpoint , sdk_moniker = SDK_MONIKER , authentication_policy = credential_policy , ** kwargs )","if not endpoint . lower ( ) . startswith ( ""http"" ) :",372 16624,"def _build_blocks_by_usage( ids: Sequence[FunctionID], *, level: int = 0, to: Optional[FunctionID] = None, origin: float = 0, visited: AbstractSet[Call] = frozenset(), parent_width: float = 0, ) -> None: factor = 1.0 if ids and to is not None: calls_tottime = sum(calls[fid, to][3] for fid in ids) if calls_tottime: factor = parent_width / calls_tottime for fid in sorted(ids): call = fid, to if to is not None: cc, nc, tt, tc = calls[call] # type: ignore ttt = tc * factor else: cc, nc, tt, tc = funcs[fid].stat ttt = tt * factor if ttt / maxw < threshold: origin += ttt continue tooltip = TOOLTIP.format(tt / maxw, cc, nc, tt, tc) block = Block( func=fid, call_stack=(), color=2 if level > 0 else not funcs[fid].calls, level=level, tooltip=tooltip, w=ttt, x=origin, ) usage_blocks.append(block) if call not in visited: _build_blocks_by_usage( funcs[fid].calledby, level=level + 1, to=fid, origin=origin, visited=visited | {call}, parent_width=ttt, ) origin += ttt","def _build_blocks_by_usage ( ids : Sequence [ FunctionID ] , * , level : int = 0 , to : Optional [ FunctionID ] = None , origin : float = 0 , visited : AbstractSet [ Call ] = frozenset ( ) , parent_width : float = 0 , ) -> None : factor = 1.0 if ids and to is not None : calls_tottime = sum ( calls [ fid , to ] [ 3 ] for fid in ids ) if calls_tottime : factor = parent_width / calls_tottime for fid in sorted ( ids ) : call = fid , to if to is not None : cc , nc , tt , tc = calls [ call ] ttt = tc * factor else : cc , nc , tt , tc = funcs [ fid ] . stat ttt = tt * factor origin += ttt continue tooltip = TOOLTIP . format ( tt / maxw , cc , nc , tt , tc ) block = Block ( func = fid , call_stack = ( ) , color = 2 if level > 0 else not funcs [ fid ] . calls , level = level , tooltip = tooltip , w = ttt , x = origin , ) usage_blocks . append ( block ) if call not in visited : _build_blocks_by_usage ( funcs [ fid ] . calledby , level = level + 1 , to = fid , origin = origin , visited = visited | { call } , parent_width = ttt , ) origin += ttt",if ttt / maxw < threshold :,464 23439,"def _map_saslprep(s): """"""Map stringprep table B.1 to nothing and C.1.2 to ASCII space"""""" r = [] for c in s: if stringprep.in_table_c12(c): r.append("" "") elif not stringprep.in_table_b1(c): r.append(c) return """".join(r)","def _map_saslprep ( s ) : """"""Map stringprep table B.1 to nothing and C.1.2 to ASCII space"""""" r = [ ] for c in s : r . append ( "" "" ) elif not stringprep . in_table_b1 ( c ) : r . append ( c ) return """" . join ( r )",if stringprep . in_table_c12 ( c ) :,106 9912,"def _del_port(self, ctx, br_name=None, target=None, must_exist=False, with_iface=False): assert target is not None ctx.populate_cache() if not with_iface: vsctl_port = ctx.find_port(target, must_exist) else: vsctl_port = ctx.find_port(target, False) if not vsctl_port: vsctl_iface = ctx.find_iface(target, False) if vsctl_iface: vsctl_port = vsctl_iface.port() if must_exist and not vsctl_port: vsctl_fatal(""no port or interface named %s"" % target) if not vsctl_port: return if not br_name: vsctl_bridge = ctx.find_bridge(br_name, True) if vsctl_port.bridge() != vsctl_bridge: if vsctl_port.bridge().parent == vsctl_bridge: vsctl_fatal( ""bridge %s does not have a port %s (although "" ""its parent bridge %s does)"" % (br_name, target, vsctl_bridge.parent.name) ) else: vsctl_fatal(""bridge %s does not have a port %s"" % (br_name, target)) ctx.del_port(vsctl_port) ","def _del_port ( self , ctx , br_name = None , target = None , must_exist = False , with_iface = False ) : assert target is not None ctx . populate_cache ( ) if not with_iface : vsctl_port = ctx . find_port ( target , must_exist ) else : vsctl_port = ctx . find_port ( target , False ) if not vsctl_port : vsctl_iface = ctx . find_iface ( target , False ) if vsctl_iface : vsctl_port = vsctl_iface . port ( ) vsctl_fatal ( ""no port or interface named %s"" % target ) if not vsctl_port : return if not br_name : vsctl_bridge = ctx . find_bridge ( br_name , True ) if vsctl_port . bridge ( ) != vsctl_bridge : if vsctl_port . bridge ( ) . parent == vsctl_bridge : vsctl_fatal ( ""bridge %s does not have a port %s (although "" ""its parent bridge %s does)"" % ( br_name , target , vsctl_bridge . parent . name ) ) else : vsctl_fatal ( ""bridge %s does not have a port %s"" % ( br_name , target ) ) ctx . del_port ( vsctl_port )",if must_exist and not vsctl_port :,389 11887,"def reset(self): if self._on_memory: self._generation += 1 if self._shuffle and self._generation > 0: self._order = list(self._rng.permutation(self._size)) else: self._order = list(range(self._size)) if self._position == 0: self._generation = -1 else: self._data_source._position = self._position self._data_source.reset() else: self._data_source.reset() self._generation = self._data_source._generation self._position = self._data_source._position super(DataSourceWithMemoryCache, self).reset()","def reset ( self ) : if self . _on_memory : self . _generation += 1 self . _order = list ( self . _rng . permutation ( self . _size ) ) else : self . _order = list ( range ( self . _size ) ) if self . _position == 0 : self . _generation = - 1 else : self . _data_source . _position = self . _position self . _data_source . reset ( ) else : self . _data_source . reset ( ) self . _generation = self . _data_source . _generation self . _position = self . _data_source . _position super ( DataSourceWithMemoryCache , self ) . reset ( )",if self . _shuffle and self . _generation > 0 :,186 11132,"def _format_arg(self, name, trait_spec, value): if name == ""mask_file"": return """" if name == ""op_string"": if ""-k %s"" in self.inputs.op_string: if isdefined(self.inputs.mask_file): return self.inputs.op_string % self.inputs.mask_file else: raise ValueError(""-k %s option in op_string requires mask_file"") return super(ImageStats, self)._format_arg(name, trait_spec, value) ","def _format_arg ( self , name , trait_spec , value ) : if name == ""mask_file"" : return """" if name == ""op_string"" : if ""-k %s"" in self . inputs . op_string : return self . inputs . op_string % self . inputs . mask_file else : raise ValueError ( ""-k %s option in op_string requires mask_file"" ) return super ( ImageStats , self ) . _format_arg ( name , trait_spec , value )",if isdefined ( self . inputs . mask_file ) :,146 16003,"def _prepare_subset( full_data: torch.Tensor, full_targets: torch.Tensor, num_samples: int, digits: Sequence, ): classes = {d: 0 for d in digits} indexes = [] for idx, target in enumerate(full_targets): label = target.item() if classes.get(label, float(""inf"")) >= num_samples: continue indexes.append(idx) classes[label] += 1 if all(classes[k] >= num_samples for k in classes): break data = full_data[indexes] targets = full_targets[indexes] return data, targets","def _prepare_subset ( full_data : torch . Tensor , full_targets : torch . Tensor , num_samples : int , digits : Sequence , ) : classes = { d : 0 for d in digits } indexes = [ ] for idx , target in enumerate ( full_targets ) : label = target . item ( ) continue indexes . append ( idx ) classes [ label ] += 1 if all ( classes [ k ] >= num_samples for k in classes ) : break data = full_data [ indexes ] targets = full_targets [ indexes ] return data , targets","if classes . get ( label , float ( ""inf"" ) ) >= num_samples :",174 8988,"def apply(self, response): updated_headers = self.update_headers(response) if updated_headers: response.headers.update(updated_headers) warning_header_value = self.warning(response) if warning_header_value is not None: response.headers.update({""Warning"": warning_header_value}) return response ","def apply ( self , response ) : updated_headers = self . update_headers ( response ) if updated_headers : response . headers . update ( updated_headers ) warning_header_value = self . warning ( response ) response . headers . update ( { ""Warning"" : warning_header_value } ) return response",if warning_header_value is not None :,92 6554,"def dataset_to_stream(dataset, input_name): """"""Takes a tf.Dataset and creates a numpy stream of ready batches."""""" # All input-pipeline processing should be on CPU. for example in fastmath.dataset_as_numpy(dataset): features = example[0] inp, out = features[input_name], example[1] mask = features[""mask""] if ""mask"" in features else None # Some accelerators don't handle uint8 well, cast to int. if isinstance(inp, np.uint8): inp = inp.astype(np.int32) if isinstance(out, np.uint8): out = out.astype(np.int32) yield (inp, out) if mask is None else (inp, out, mask)","def dataset_to_stream ( dataset , input_name ) : """"""Takes a tf.Dataset and creates a numpy stream of ready batches."""""" for example in fastmath . dataset_as_numpy ( dataset ) : features = example [ 0 ] inp , out = features [ input_name ] , example [ 1 ] mask = features [ ""mask"" ] if ""mask"" in features else None if isinstance ( inp , np . uint8 ) : inp = inp . astype ( np . int32 ) out = out . astype ( np . int32 ) yield ( inp , out ) if mask is None else ( inp , out , mask )","if isinstance ( out , np . uint8 ) :",198 24341,"def numexp_action(self, text, loc, num): """"""Code executed after recognising a numexp expression (something +|- something)"""""" exshared.setpos(loc, text) if DEBUG > 0: print(""NUM_EXP:"", num) if DEBUG == 2: self.symtab.display() if DEBUG > 2: return # iterate through all additions/substractions n = list(num) while len(n) > 1: if not self.symtab.same_types(n[0], n[2]): raise SemanticException(""Invalid opernads to binary '%s'"" % n[1]) reg = self.codegen.arithmetic(n[1], n[0], n[2]) # replace first calculation with it's result n[0:3] = [reg] return n[0]","def numexp_action ( self , text , loc , num ) : """"""Code executed after recognising a numexp expression (something +|- something)"""""" exshared . setpos ( loc , text ) if DEBUG > 0 : print ( ""NUM_EXP:"" , num ) if DEBUG == 2 : self . symtab . display ( ) if DEBUG > 2 : return n = list ( num ) while len ( n ) > 1 : raise SemanticException ( ""Invalid opernads to binary '%s'"" % n [ 1 ] ) reg = self . codegen . arithmetic ( n [ 1 ] , n [ 0 ] , n [ 2 ] ) n [ 0 : 3 ] = [ reg ] return n [ 0 ]","if not self . symtab . same_types ( n [ 0 ] , n [ 2 ] ) :",223 8286,"def _analyze_callsite( self, caller_block_addr: int, rda: ReachingDefinitionsModel ) -> CallSiteFact: fact = CallSiteFact( True, # by default we treat all return values as used ) state = rda.observed_results[(""node"", caller_block_addr, 1)] all_uses: ""Uses"" = rda.all_uses default_cc_cls = DefaultCC.get(self.project.arch.name, None) if default_cc_cls is not None: default_cc: SimCC = default_cc_cls(self.project.arch) all_defs: Set[""Definition""] = state.register_definitions.get_all_variables() return_val = default_cc.RETURN_VAL if return_val is not None and isinstance(return_val, SimRegArg): return_reg_offset, _ = self.project.arch.registers[return_val.reg_name] # find the def of the return val try: return_def = next( iter( d for d in all_defs if isinstance(d.atom, Register) and d.atom.reg_offset == return_reg_offset ) ) except StopIteration: return_def = None if return_def is not None: # is it used? uses = all_uses.get_uses(return_def) if uses: # the return value is used! fact.return_value_used = True else: fact.return_value_used = False # TODO: Detect if arguments are used return fact","def _analyze_callsite ( self , caller_block_addr : int , rda : ReachingDefinitionsModel ) -> CallSiteFact : fact = CallSiteFact ( True , ) state = rda . observed_results [ ( ""node"" , caller_block_addr , 1 ) ] all_uses : ""Uses"" = rda . all_uses default_cc_cls = DefaultCC . get ( self . project . arch . name , None ) if default_cc_cls is not None : default_cc : SimCC = default_cc_cls ( self . project . arch ) all_defs : Set [ ""Definition"" ] = state . register_definitions . get_all_variables ( ) return_val = default_cc . RETURN_VAL if return_val is not None and isinstance ( return_val , SimRegArg ) : return_reg_offset , _ = self . project . arch . registers [ return_val . reg_name ] try : return_def = next ( iter ( d for d in all_defs if isinstance ( d . atom , Register ) and d . atom . reg_offset == return_reg_offset ) ) except StopIteration : return_def = None uses = all_uses . get_uses ( return_def ) if uses : fact . return_value_used = True else : fact . return_value_used = False return fact",if return_def is not None :,483 13842,"def handle_noargs(self, **options): global gdata_service try: from gdata import service gdata_service = service except ImportError: raise CommandError( ""You need to install the gdata "" ""module to run this command."" ) self.verbosity = int(options.get(""verbosity"", 1)) self.blogger_username = options.get(""blogger_username"") self.category_title = options.get(""category_title"") self.blogger_blog_id = options.get(""blogger_blog_id"") self.write_out( self.style.TITLE(""Starting migration from Blogger to Zinnia %s\n"" % __version__) ) if not self.blogger_username: self.blogger_username = raw_input(""Blogger username: "") if not self.blogger_username: raise CommandError(""Invalid Blogger username"") self.blogger_password = getpass(""Blogger password: "") try: self.blogger_manager = BloggerManager( self.blogger_username, self.blogger_password ) except gdata_service.BadAuthentication: raise CommandError(""Incorrect Blogger username or password"") default_author = options.get(""author"") if default_author: try: self.default_author = User.objects.get(username=default_author) except User.DoesNotExist: raise CommandError( 'Invalid Zinnia username for default author ""%s""' % default_author ) else: self.default_author = User.objects.all()[0] if not self.blogger_blog_id: self.select_blog_id() if not self.category_title: self.category_title = raw_input(""Category title for imported entries: "") if not self.category_title: raise CommandError(""Invalid category title"") self.import_posts()","def handle_noargs ( self , ** options ) : global gdata_service try : from gdata import service gdata_service = service except ImportError : raise CommandError ( ""You need to install the gdata "" ""module to run this command."" ) self . verbosity = int ( options . get ( ""verbosity"" , 1 ) ) self . blogger_username = options . get ( ""blogger_username"" ) self . category_title = options . get ( ""category_title"" ) self . blogger_blog_id = options . get ( ""blogger_blog_id"" ) self . write_out ( self . style . TITLE ( ""Starting migration from Blogger to Zinnia %s\n"" % __version__ ) ) self . blogger_username = raw_input ( ""Blogger username: "" ) raise CommandError ( ""Invalid Blogger username"" ) self . blogger_password = getpass ( ""Blogger password: "" ) try : self . blogger_manager = BloggerManager ( self . blogger_username , self . blogger_password ) except gdata_service . BadAuthentication : raise CommandError ( ""Incorrect Blogger username or password"" ) default_author = options . get ( ""author"" ) if default_author : try : self . default_author = User . objects . get ( username = default_author ) except User . DoesNotExist : raise CommandError ( 'Invalid Zinnia username for default author ""%s""' % default_author ) else : self . default_author = User . objects . all ( ) [ 0 ] if not self . blogger_blog_id : self . select_blog_id ( ) if not self . category_title : self . category_title = raw_input ( ""Category title for imported entries: "" ) if not self . category_title : raise CommandError ( ""Invalid category title"" ) self . import_posts ( )",if not self . blogger_username :,484 10048,"def nodes(self): if not self._nodes: nodes = self.cluster_group.instances() self._nodes = [] master = self.master_node nodeid = 1 for node in nodes: if node.state not in [""pending"", ""running""]: continue if node.id == master.id: self._nodes.insert(0, master) continue self._nodes.append(Node(node, self.key_location, ""node%.3d"" % nodeid)) nodeid += 1 else: for node in self._nodes: log.debug(""refreshing instance %s"" % node.id) node.update() return self._nodes","def nodes ( self ) : if not self . _nodes : nodes = self . cluster_group . instances ( ) self . _nodes = [ ] master = self . master_node nodeid = 1 for node in nodes : continue if node . id == master . id : self . _nodes . insert ( 0 , master ) continue self . _nodes . append ( Node ( node , self . key_location , ""node%.3d"" % nodeid ) ) nodeid += 1 else : for node in self . _nodes : log . debug ( ""refreshing instance %s"" % node . id ) node . update ( ) return self . _nodes","if node . state not in [ ""pending"" , ""running"" ] :",198 4610,"def set_ok_port(self, cookie, request): if cookie.port_specified: req_port = request_port(request) if req_port is None: req_port = ""80"" else: req_port = str(req_port) for p in cookie.port.split("",""): try: int(p) except ValueError: debug("" bad port %s (not numeric)"", p) return False if p == req_port: break else: debug("" request port (%s) not found in %s"", req_port, cookie.port) return False return True","def set_ok_port ( self , cookie , request ) : if cookie . port_specified : req_port = request_port ( request ) if req_port is None : req_port = ""80"" else : req_port = str ( req_port ) for p in cookie . port . split ( "","" ) : try : int ( p ) except ValueError : debug ( "" bad port %s (not numeric)"" , p ) return False break else : debug ( "" request port (%s) not found in %s"" , req_port , cookie . port ) return False return True",if p == req_port :,195 12643,"def _test_kneighbors_regressor( self, n_neighbors=5, algorithm=""brute"", weights=""uniform"", metric=""minkowski"", metric_params={""p"": 2}, score_w_train_data=False, ): for data in [datasets.load_boston(), datasets.load_diabetes()]: X, y = data.data, data.target X = X.astype(np.float32) if metric == ""wminkowski"": metric_params[""w""] = np.random.rand(X.shape[1]) elif metric == ""seuclidean"": metric_params[""V""] = np.random.rand(X.shape[1]) elif metric == ""mahalanobis"": V = np.cov(X.T) metric_params[""VI""] = np.linalg.inv(V) model = KNeighborsRegressor( n_neighbors=n_neighbors, algorithm=algorithm, weights=weights, metric=metric, metric_params=metric_params, ) n_train_rows = int(X.shape[0] * 0.6) model.fit(X[:n_train_rows, :], y[:n_train_rows]) if not score_w_train_data: X = X[n_train_rows:, :] extra_config = { hummingbird.ml.operator_converters.constants.BATCH_SIZE: X.shape[0] } torch_model = hummingbird.ml.convert(model, ""torch"", extra_config=extra_config) self.assertTrue(torch_model is not None) np.testing.assert_allclose( model.predict(X), torch_model.predict(X), rtol=1e-5, atol=1e-5 )","def _test_kneighbors_regressor ( self , n_neighbors = 5 , algorithm = ""brute"" , weights = ""uniform"" , metric = ""minkowski"" , metric_params = { ""p"" : 2 } , score_w_train_data = False , ) : for data in [ datasets . load_boston ( ) , datasets . load_diabetes ( ) ] : X , y = data . data , data . target X = X . astype ( np . float32 ) if metric == ""wminkowski"" : metric_params [ ""w"" ] = np . random . rand ( X . shape [ 1 ] ) elif metric == ""seuclidean"" : metric_params [ ""V"" ] = np . random . rand ( X . shape [ 1 ] ) elif metric == ""mahalanobis"" : V = np . cov ( X . T ) metric_params [ ""VI"" ] = np . linalg . inv ( V ) model = KNeighborsRegressor ( n_neighbors = n_neighbors , algorithm = algorithm , weights = weights , metric = metric , metric_params = metric_params , ) n_train_rows = int ( X . shape [ 0 ] * 0.6 ) model . fit ( X [ : n_train_rows , : ] , y [ : n_train_rows ] ) X = X [ n_train_rows : , : ] extra_config = { hummingbird . ml . operator_converters . constants . BATCH_SIZE : X . shape [ 0 ] } torch_model = hummingbird . ml . convert ( model , ""torch"" , extra_config = extra_config ) self . assertTrue ( torch_model is not None ) np . testing . assert_allclose ( model . predict ( X ) , torch_model . predict ( X ) , rtol = 1e-5 , atol = 1e-5 )",if not score_w_train_data :,480 18581,"def __call__( self, engine: Engine, logger: ClearMLLogger, event_name: Union[str, Events] ) -> None: if not isinstance(logger, ClearMLLogger): raise RuntimeError(""Handler 'GradsHistHandler' works only with ClearMLLogger"") global_step = engine.state.get_event_attrib_value(event_name) tag_prefix = f""{self.tag}/"" if self.tag else """" for name, p in self.model.named_parameters(): if p.grad is None: continue title_name, _, series_name = name.partition(""."") logger.grad_helper.add_histogram( title=f""{tag_prefix}grads_{title_name}"", series=series_name, step=global_step, hist_data=p.grad.detach().cpu().numpy(), ) ","def __call__ ( self , engine : Engine , logger : ClearMLLogger , event_name : Union [ str , Events ] ) -> None : if not isinstance ( logger , ClearMLLogger ) : raise RuntimeError ( ""Handler 'GradsHistHandler' works only with ClearMLLogger"" ) global_step = engine . state . get_event_attrib_value ( event_name ) tag_prefix = f""{self.tag}/"" if self . tag else """" for name , p in self . model . named_parameters ( ) : continue title_name , _ , series_name = name . partition ( ""."" ) logger . grad_helper . add_histogram ( title = f""{tag_prefix}grads_{title_name}"" , series = series_name , step = global_step , hist_data = p . grad . detach ( ) . cpu ( ) . numpy ( ) , )",if p . grad is None :,224 9130,"def extract(self): try: c = self.db.cursor() c.execute(""""""show global variables like 'max_connections';"""""") max = c.fetchone() c.execute(""""""show global status like 'Threads_connected';"""""") thread = c.fetchone() if thread[0] in self.vars: self.set2[thread[0]] = float(thread[1]) self.set2[""Threads""] = float(thread[1]) / float(max[1]) * 100.0 for name in self.vars: self.val[name] = self.set2[name] * 1.0 / elapsed if step == op.delay: self.set1.update(self.set2) except Exception as e: for name in self.vars: self.val[name] = -1","def extract ( self ) : try : c = self . db . cursor ( ) c . execute ( """"""show global variables like 'max_connections';"""""" ) max = c . fetchone ( ) c . execute ( """"""show global status like 'Threads_connected';"""""" ) thread = c . fetchone ( ) self . set2 [ thread [ 0 ] ] = float ( thread [ 1 ] ) self . set2 [ ""Threads"" ] = float ( thread [ 1 ] ) / float ( max [ 1 ] ) * 100.0 for name in self . vars : self . val [ name ] = self . set2 [ name ] * 1.0 / elapsed if step == op . delay : self . set1 . update ( self . set2 ) except Exception as e : for name in self . vars : self . val [ name ] = - 1",if thread [ 0 ] in self . vars :,225 24908,"def _setUpClass(cls): global solver import pyomo.environ from pyomo.solvers.tests.io.writer_test_cases import testCases for test_case in testCases: if ((test_case.name, test_case.io) in solver) and (test_case.available): solver[(test_case.name, test_case.io)] = True ","def _setUpClass ( cls ) : global solver import pyomo . environ from pyomo . solvers . tests . io . writer_test_cases import testCases for test_case in testCases : solver [ ( test_case . name , test_case . io ) ] = True","if ( ( test_case . name , test_case . io ) in solver ) and ( test_case . available ) :",101 11371,"def test_timestamp_overflow(self): # A modification timestamp larger than 2**32 should not be a problem # when importing a module (issue #11235). sys.path.insert(0, os.curdir) try: source = TESTFN + "".py"" if is_jython: compiled = TESTFN + ""$py.class"" else: compiled = source + (""c"" if __debug__ else ""o"") with open(source, ""w"") as f: pass try: os.utime(source, (2 ** 33 - 5, 2 ** 33 - 5)) except OverflowError: self.skipTest(""cannot set modification time to large integer"") except OSError as e: if e.errno != getattr(errno, ""EOVERFLOW"", None): raise self.skipTest( ""cannot set modification time to large integer ({})"".format(e) ) __import__(TESTFN) # The pyc file was created. os.stat(compiled) finally: del sys.path[0] remove_files(TESTFN)","def test_timestamp_overflow ( self ) : sys . path . insert ( 0 , os . curdir ) try : source = TESTFN + "".py"" compiled = TESTFN + ""$py.class"" else : compiled = source + ( ""c"" if __debug__ else ""o"" ) with open ( source , ""w"" ) as f : pass try : os . utime ( source , ( 2 ** 33 - 5 , 2 ** 33 - 5 ) ) except OverflowError : self . skipTest ( ""cannot set modification time to large integer"" ) except OSError as e : if e . errno != getattr ( errno , ""EOVERFLOW"" , None ) : raise self . skipTest ( ""cannot set modification time to large integer ({})"" . format ( e ) ) __import__ ( TESTFN ) os . stat ( compiled ) finally : del sys . path [ 0 ] remove_files ( TESTFN )",if is_jython :,297 10654,"def to_representation(self, value): old_social_string_fields = [""twitter"", ""github"", ""linkedIn""] request = self.context.get(""request"") show_old_format = ( request and is_deprecated(request.version, self.min_version) and request.method == ""GET"" ) if show_old_format: social = value.copy() for key in old_social_string_fields: if social.get(key): social[key] = value[key][0] elif social.get(key) == []: social[key] = """" value = social return super(SocialField, self).to_representation(value)","def to_representation ( self , value ) : old_social_string_fields = [ ""twitter"" , ""github"" , ""linkedIn"" ] request = self . context . get ( ""request"" ) show_old_format = ( request and is_deprecated ( request . version , self . min_version ) and request . method == ""GET"" ) if show_old_format : social = value . copy ( ) for key in old_social_string_fields : social [ key ] = value [ key ] [ 0 ] elif social . get ( key ) == [ ] : social [ key ] = """" value = social return super ( SocialField , self ) . to_representation ( value )",if social . get ( key ) :,200 16502,"def contribute(self, converter, model, form_class, inline_model): # Find property from target model to current model reverse_field = None info = self.get_info(inline_model) for field in get_meta_fields(info.model): field_type = type(field) if field_type == ForeignKeyField: if field.rel_model == model: reverse_field = field break else: raise Exception(""Cannot find reverse relation for model %s"" % info.model) # Remove reverse property from the list ignore = [reverse_field.name] if info.form_excluded_columns: exclude = ignore + info.form_excluded_columns else: exclude = ignore # Create field child_form = info.get_form() if child_form is None: child_form = model_form( info.model, base_class=form.BaseForm, only=info.form_columns, exclude=exclude, field_args=info.form_args, allow_pk=True, converter=converter, ) try: prop_name = reverse_field.related_name except AttributeError: prop_name = reverse_field.backref label = self.get_label(info, prop_name) setattr( form_class, prop_name, self.inline_field_list_type( child_form, info.model, reverse_field.name, info, label=label or info.model.__name__, ), ) return form_class ","def contribute ( self , converter , model , form_class , inline_model ) : reverse_field = None info = self . get_info ( inline_model ) for field in get_meta_fields ( info . model ) : field_type = type ( field ) if field_type == ForeignKeyField : reverse_field = field break else : raise Exception ( ""Cannot find reverse relation for model %s"" % info . model ) ignore = [ reverse_field . name ] if info . form_excluded_columns : exclude = ignore + info . form_excluded_columns else : exclude = ignore child_form = info . get_form ( ) if child_form is None : child_form = model_form ( info . model , base_class = form . BaseForm , only = info . form_columns , exclude = exclude , field_args = info . form_args , allow_pk = True , converter = converter , ) try : prop_name = reverse_field . related_name except AttributeError : prop_name = reverse_field . backref label = self . get_label ( info , prop_name ) setattr ( form_class , prop_name , self . inline_field_list_type ( child_form , info . model , reverse_field . name , info , label = label or info . model . __name__ , ) , ) return form_class",if field . rel_model == model :,450 11480,"def get_aa_from_codonre(re_aa): aas = [] m = 0 for i in re_aa: if i == ""["": m = -1 aas.append("""") elif i == ""]"": m = 0 continue elif m == -1: aas[-1] = aas[-1] + i elif m == 0: aas.append(i) return aas","def get_aa_from_codonre ( re_aa ) : aas = [ ] m = 0 for i in re_aa : if i == ""["" : m = - 1 aas . append ( """" ) m = 0 continue elif m == - 1 : aas [ - 1 ] = aas [ - 1 ] + i elif m == 0 : aas . append ( i ) return aas","elif i == ""]"" :",129 22697,"def _do_db_notes(self, params): """"""Adds notes to rows in the database"""""" table, params = self._parse_params(params) if not table: self._help_db_notes() return if table in self.get_tables(): # get rowid and note from parameters if params: arg, note = self._parse_params(params) rowids = self._parse_rowids(arg) # get rowid and note from interactive input else: try: # prompt user for data params = input(""rowid(s) (INT): "") rowids = self._parse_rowids(params) note = input(""note (TXT): "") except KeyboardInterrupt: print("""") return finally: # ensure proper output for resource scripts if Framework._script: print(f""{params}"") # delete record(s) from the database count = 0 for rowid in rowids: count += self.query( f""UPDATE `{table}` SET notes=? WHERE ROWID IS ?"", (note, rowid) ) self.output(f""{count} rows affected."") else: self.output(""Invalid table name."")","def _do_db_notes ( self , params ) : """"""Adds notes to rows in the database"""""" table , params = self . _parse_params ( params ) if not table : self . _help_db_notes ( ) return if table in self . get_tables ( ) : if params : arg , note = self . _parse_params ( params ) rowids = self . _parse_rowids ( arg ) else : try : params = input ( ""rowid(s) (INT): "" ) rowids = self . _parse_rowids ( params ) note = input ( ""note (TXT): "" ) except KeyboardInterrupt : print ( """" ) return finally : print ( f""{params}"" ) count = 0 for rowid in rowids : count += self . query ( f""UPDATE `{table}` SET notes=? WHERE ROWID IS ?"" , ( note , rowid ) ) self . output ( f""{count} rows affected."" ) else : self . output ( ""Invalid table name."" )",if Framework . _script :,355 5221,"def start_workunit(self, workunit): """"""Implementation of Reporter callback."""""" if self.is_under_background_root(workunit): return label_format = self._get_label_format(workunit) if label_format == LabelFormat.FULL: if not WorkUnitLabel.SUPPRESS_LABEL in workunit.labels: self._emit_indented_workunit_label(workunit) # Start output on a new line. tool_output_format = self._get_tool_output_format(workunit) if tool_output_format == ToolOutputFormat.INDENT: self.emit(self._prefix(workunit, ""\n"")) elif tool_output_format == ToolOutputFormat.UNINDENTED: self.emit(""\n"") elif label_format == LabelFormat.DOT: self.emit(""."") self.flush() ","def start_workunit ( self , workunit ) : """"""Implementation of Reporter callback."""""" if self . is_under_background_root ( workunit ) : return label_format = self . _get_label_format ( workunit ) if label_format == LabelFormat . FULL : if not WorkUnitLabel . SUPPRESS_LABEL in workunit . labels : self . _emit_indented_workunit_label ( workunit ) tool_output_format = self . _get_tool_output_format ( workunit ) self . emit ( self . _prefix ( workunit , ""\n"" ) ) elif tool_output_format == ToolOutputFormat . UNINDENTED : self . emit ( ""\n"" ) elif label_format == LabelFormat . DOT : self . emit ( ""."" ) self . flush ( )",if tool_output_format == ToolOutputFormat . INDENT :,227 2006,"def strip_dirs(self): oldstats = self.stats self.stats = newstats = {} max_name_len = 0 for func, (cc, nc, tt, ct, callers) in oldstats.items(): newfunc = func_strip_path(func) if len(func_std_string(newfunc)) > max_name_len: max_name_len = len(func_std_string(newfunc)) newcallers = {} for func2, caller in callers.items(): newcallers[func_strip_path(func2)] = caller if newfunc in newstats: newstats[newfunc] = add_func_stats( newstats[newfunc], (cc, nc, tt, ct, newcallers) ) else: newstats[newfunc] = (cc, nc, tt, ct, newcallers) old_top = self.top_level self.top_level = new_top = set() for func in old_top: new_top.add(func_strip_path(func)) self.max_name_len = max_name_len self.fcn_list = None self.all_callees = None return self","def strip_dirs ( self ) : oldstats = self . stats self . stats = newstats = { } max_name_len = 0 for func , ( cc , nc , tt , ct , callers ) in oldstats . items ( ) : newfunc = func_strip_path ( func ) if len ( func_std_string ( newfunc ) ) > max_name_len : max_name_len = len ( func_std_string ( newfunc ) ) newcallers = { } for func2 , caller in callers . items ( ) : newcallers [ func_strip_path ( func2 ) ] = caller newstats [ newfunc ] = add_func_stats ( newstats [ newfunc ] , ( cc , nc , tt , ct , newcallers ) ) else : newstats [ newfunc ] = ( cc , nc , tt , ct , newcallers ) old_top = self . top_level self . top_level = new_top = set ( ) for func in old_top : new_top . add ( func_strip_path ( func ) ) self . max_name_len = max_name_len self . fcn_list = None self . all_callees = None return self",if newfunc in newstats :,324 23781,"def _maybe_run_close_callback(self): # If there are pending callbacks, don't run the close callback # until they're done (see _maybe_add_error_handler) if self.closed() and self._pending_callbacks == 0: futures = [] if self._read_future is not None: futures.append(self._read_future) self._read_future = None if self._write_future is not None: futures.append(self._write_future) self._write_future = None if self._connect_future is not None: futures.append(self._connect_future) self._connect_future = None if self._ssl_connect_future is not None: futures.append(self._ssl_connect_future) self._ssl_connect_future = None for future in futures: future.set_exception(StreamClosedError(real_error=self.error)) if self._close_callback is not None: cb = self._close_callback self._close_callback = None self._run_callback(cb) # Delete any unfinished callbacks to break up reference cycles. self._read_callback = self._write_callback = None # Clear the buffers so they can be cleared immediately even # if the IOStream object is kept alive by a reference cycle. # TODO: Clear the read buffer too; it currently breaks some tests. self._write_buffer = None",def _maybe_run_close_callback ( self ) : if self . closed ( ) and self . _pending_callbacks == 0 : futures = [ ] if self . _read_future is not None : futures . append ( self . _read_future ) self . _read_future = None futures . append ( self . _write_future ) self . _write_future = None if self . _connect_future is not None : futures . append ( self . _connect_future ) self . _connect_future = None if self . _ssl_connect_future is not None : futures . append ( self . _ssl_connect_future ) self . _ssl_connect_future = None for future in futures : future . set_exception ( StreamClosedError ( real_error = self . error ) ) if self . _close_callback is not None : cb = self . _close_callback self . _close_callback = None self . _run_callback ( cb ) self . _read_callback = self . _write_callback = None self . _write_buffer = None,if self . _write_future is not None :,391 4411,"def route(tokeniser): ipmask = prefix(tokeniser) if ""rd"" in tokeniser.tokens or ""route-distinguisher"" in tokeniser.tokens: nlri = IPVPN(IP.toafi(ipmask.top()), SAFI.mpls_vpn, OUT.ANNOUNCE) elif ""label"" in tokeniser.tokens: nlri = Label(IP.toafi(ipmask.top()), SAFI.nlri_mpls, OUT.ANNOUNCE) else: nlri = INET(IP.toafi(ipmask.top()), IP.tosafi(ipmask.top()), OUT.ANNOUNCE) nlri.cidr = CIDR(ipmask.pack(), ipmask.mask) change = Change(nlri, Attributes()) while True: command = tokeniser() if not command: break if command == ""label"": nlri.labels = label(tokeniser) continue if command == ""rd"" or command == ""route-distinguisher"": nlri.rd = route_distinguisher(tokeniser) continue action = ParseStatic.action.get(command, """") if action == ""attribute-add"": change.attributes.add(ParseStatic.known[command](tokeniser)) elif action == ""nlri-set"": change.nlri.assign( ParseStatic.assign[command], ParseStatic.known[command](tokeniser) ) elif action == ""nexthop-and-attribute"": nexthop, attribute = ParseStatic.known[command](tokeniser) change.nlri.nexthop = nexthop change.attributes.add(attribute) else: raise ValueError('route: unknown command ""%s""' % command) return list(ParseStatic.split(change)) ","def route ( tokeniser ) : ipmask = prefix ( tokeniser ) if ""rd"" in tokeniser . tokens or ""route-distinguisher"" in tokeniser . tokens : nlri = IPVPN ( IP . toafi ( ipmask . top ( ) ) , SAFI . mpls_vpn , OUT . ANNOUNCE ) elif ""label"" in tokeniser . tokens : nlri = Label ( IP . toafi ( ipmask . top ( ) ) , SAFI . nlri_mpls , OUT . ANNOUNCE ) else : nlri = INET ( IP . toafi ( ipmask . top ( ) ) , IP . tosafi ( ipmask . top ( ) ) , OUT . ANNOUNCE ) nlri . cidr = CIDR ( ipmask . pack ( ) , ipmask . mask ) change = Change ( nlri , Attributes ( ) ) while True : command = tokeniser ( ) if not command : break nlri . labels = label ( tokeniser ) continue if command == ""rd"" or command == ""route-distinguisher"" : nlri . rd = route_distinguisher ( tokeniser ) continue action = ParseStatic . action . get ( command , """" ) if action == ""attribute-add"" : change . attributes . add ( ParseStatic . known [ command ] ( tokeniser ) ) elif action == ""nlri-set"" : change . nlri . assign ( ParseStatic . assign [ command ] , ParseStatic . known [ command ] ( tokeniser ) ) elif action == ""nexthop-and-attribute"" : nexthop , attribute = ParseStatic . known [ command ] ( tokeniser ) change . nlri . nexthop = nexthop change . attributes . add ( attribute ) else : raise ValueError ( 'route: unknown command ""%s""' % command ) return list ( ParseStatic . split ( change ) )","if command == ""label"" :",480 18738,"def _get_match_location(self, node, name=None): loc = source.Location(node.lineno, node.col_offset) if not name: return loc if isinstance(node, (self._ast.Import, self._ast.ImportFrom)): # Search for imported module names m = re.search(""[ ,]"" + name + r""\b"", self.source.line(node.lineno)) if m is not None: c, _ = m.span() return source.Location(node.lineno, c + 1) elif isinstance(node, self._ast.Attribute): attr_loc, _ = self.source.get_attr_location(name, loc) return attr_loc return loc","def _get_match_location ( self , node , name = None ) : loc = source . Location ( node . lineno , node . col_offset ) if not name : return loc if isinstance ( node , ( self . _ast . Import , self . _ast . ImportFrom ) ) : m = re . search ( ""[ ,]"" + name + r""\b"" , self . source . line ( node . lineno ) ) c , _ = m . span ( ) return source . Location ( node . lineno , c + 1 ) elif isinstance ( node , self . _ast . Attribute ) : attr_loc , _ = self . source . get_attr_location ( name , loc ) return attr_loc return loc",if m is not None :,188 19880,"def create_columns(self, treeview): # single column with icon + markup (dep. on init options) column = Gtk.TreeViewColumn("""") row_data_index = 1 # first element in the row is always the item itself for i, f in enumerate(self.fields): if f == IconTextRendererColumns.ICON: iconcell = Gtk.CellRendererPixbuf() iconcell.set_property(""width"", self.icon_size() + 10) column.set_cell_data_func(iconcell, self.icon, i) column.pack_start(iconcell, False) elif f == IconTextRendererColumns.TITLE: namecell = Gtk.CellRendererText() namecell.set_property(""ellipsize"", Pango.EllipsizeMode.END) column.pack_start(namecell, True) column.add_attribute(namecell, ""text"", row_data_index) elif f == IconTextRendererColumns.TITLE_SUBTITLE: namecell = Gtk.CellRendererText() namecell.set_property(""ellipsize"", Pango.EllipsizeMode.END) column.set_cell_data_func(namecell, self.markup, i) column.pack_start(namecell, True) row_data_index += 1 treeview.append_column(column)","def create_columns ( self , treeview ) : column = Gtk . TreeViewColumn ( """" ) row_data_index = 1 for i , f in enumerate ( self . fields ) : if f == IconTextRendererColumns . ICON : iconcell = Gtk . CellRendererPixbuf ( ) iconcell . set_property ( ""width"" , self . icon_size ( ) + 10 ) column . set_cell_data_func ( iconcell , self . icon , i ) column . pack_start ( iconcell , False ) namecell = Gtk . CellRendererText ( ) namecell . set_property ( ""ellipsize"" , Pango . EllipsizeMode . END ) column . pack_start ( namecell , True ) column . add_attribute ( namecell , ""text"" , row_data_index ) elif f == IconTextRendererColumns . TITLE_SUBTITLE : namecell = Gtk . CellRendererText ( ) namecell . set_property ( ""ellipsize"" , Pango . EllipsizeMode . END ) column . set_cell_data_func ( namecell , self . markup , i ) column . pack_start ( namecell , True ) row_data_index += 1 treeview . append_column ( column )",elif f == IconTextRendererColumns . TITLE :,343 5235,"def _create_tiny_git_repo(self, *, copy_files: Optional[Sequence[Path]] = None): with temporary_dir() as gitdir, temporary_dir() as worktree: # A tiny little fake git repo we will set up. initialize_repo() requires at least one file. Path(worktree, ""README"").touch() # The contextmanager interface is only necessary if an explicit gitdir is not provided. with initialize_repo(worktree, gitdir=gitdir) as git: if copy_files is not None: for fp in copy_files: new_fp = Path(worktree, fp) safe_mkdir_for(str(new_fp)) shutil.copy(fp, new_fp) yield git, worktree, gitdir ","def _create_tiny_git_repo ( self , * , copy_files : Optional [ Sequence [ Path ] ] = None ) : with temporary_dir ( ) as gitdir , temporary_dir ( ) as worktree : Path ( worktree , ""README"" ) . touch ( ) with initialize_repo ( worktree , gitdir = gitdir ) as git : for fp in copy_files : new_fp = Path ( worktree , fp ) safe_mkdir_for ( str ( new_fp ) ) shutil . copy ( fp , new_fp ) yield git , worktree , gitdir",if copy_files is not None :,210 10778,"def click_outside(event): if event not in d: x, y, z = self.blockFaceUnderCursor[0] if y == 0: y = 64 y += 3 gotoPanel.X, gotoPanel.Y, gotoPanel.Z = x, y, z if event.num_clicks == 2: d.dismiss(""Goto"")","def click_outside ( event ) : if event not in d : x , y , z = self . blockFaceUnderCursor [ 0 ] if y == 0 : y = 64 y += 3 gotoPanel . X , gotoPanel . Y , gotoPanel . Z = x , y , z d . dismiss ( ""Goto"" )",if event . num_clicks == 2 :,100 3094,"def get_doc_object(obj, what=None, doc=None, config={}): if what is None: if inspect.isclass(obj): what = ""class"" elif inspect.ismodule(obj): what = ""module"" elif callable(obj): what = ""function"" else: what = ""object"" if what == ""class"": return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc, config=config) elif what in (""function"", ""method""): return SphinxFunctionDoc(obj, doc=doc, config=config) else: if doc is None: doc = pydoc.getdoc(obj) return SphinxObjDoc(obj, doc, config=config) ","def get_doc_object ( obj , what = None , doc = None , config = { } ) : if what is None : if inspect . isclass ( obj ) : what = ""class"" elif inspect . ismodule ( obj ) : what = ""module"" what = ""function"" else : what = ""object"" if what == ""class"" : return SphinxClassDoc ( obj , func_doc = SphinxFunctionDoc , doc = doc , config = config ) elif what in ( ""function"" , ""method"" ) : return SphinxFunctionDoc ( obj , doc = doc , config = config ) else : if doc is None : doc = pydoc . getdoc ( obj ) return SphinxObjDoc ( obj , doc , config = config )",elif callable ( obj ) :,204 20196,"def _attempt_proof_app(self, current, context, agenda, accessible_vars, atoms, debug): f, args = current.uncurry() for i, arg in enumerate(args): if not TableauProver.is_atom(arg): ctx = f nv = Variable(""X%s"" % _counter.get()) for j, a in enumerate(args): ctx = ctx(VariableExpression(nv)) if i == j else ctx(a) if context: ctx = context(ctx).simplify() ctx = LambdaExpression(nv, ctx) agenda.put(arg, ctx) return self._attempt_proof(agenda, accessible_vars, atoms, debug + 1) raise Exception(""If this method is called, there must be a non-atomic argument"")","def _attempt_proof_app ( self , current , context , agenda , accessible_vars , atoms , debug ) : f , args = current . uncurry ( ) for i , arg in enumerate ( args ) : ctx = f nv = Variable ( ""X%s"" % _counter . get ( ) ) for j , a in enumerate ( args ) : ctx = ctx ( VariableExpression ( nv ) ) if i == j else ctx ( a ) if context : ctx = context ( ctx ) . simplify ( ) ctx = LambdaExpression ( nv , ctx ) agenda . put ( arg , ctx ) return self . _attempt_proof ( agenda , accessible_vars , atoms , debug + 1 ) raise Exception ( ""If this method is called, there must be a non-atomic argument"" )",if not TableauProver . is_atom ( arg ) :,206 17470,"def background_size(tokens): """"""Validation for ``background-size``."""""" if len(tokens) == 1: token = tokens[0] keyword = get_keyword(token) if keyword in (""contain"", ""cover""): return keyword if keyword == ""auto"": return (""auto"", ""auto"") length = get_length(token, negative=False, percentage=True) if length: return (length, ""auto"") elif len(tokens) == 2: values = [] for token in tokens: length = get_length(token, negative=False, percentage=True) if length: values.append(length) elif get_keyword(token) == ""auto"": values.append(""auto"") if len(values) == 2: return tuple(values)","def background_size ( tokens ) : """"""Validation for ``background-size``."""""" if len ( tokens ) == 1 : token = tokens [ 0 ] keyword = get_keyword ( token ) if keyword in ( ""contain"" , ""cover"" ) : return keyword if keyword == ""auto"" : return ( ""auto"" , ""auto"" ) length = get_length ( token , negative = False , percentage = True ) if length : return ( length , ""auto"" ) elif len ( tokens ) == 2 : values = [ ] for token in tokens : length = get_length ( token , negative = False , percentage = True ) if length : values . append ( length ) values . append ( ""auto"" ) if len ( values ) == 2 : return tuple ( values )","elif get_keyword ( token ) == ""auto"" :",230 12493,"def _extract_subtitles(url, subtitle_url): subtitles = {} if subtitle_url and isinstance(subtitle_url, compat_str): subtitle_url = urljoin(url, subtitle_url) STL_EXT = "".stl"" SRT_EXT = "".srt"" subtitles[""it""] = [ { ""ext"": ""stl"", ""url"": subtitle_url, } ] if subtitle_url.endswith(STL_EXT): srt_url = subtitle_url[: -len(STL_EXT)] + SRT_EXT subtitles[""it""].append( { ""ext"": ""srt"", ""url"": srt_url, } ) return subtitles ","def _extract_subtitles ( url , subtitle_url ) : subtitles = { } if subtitle_url and isinstance ( subtitle_url , compat_str ) : subtitle_url = urljoin ( url , subtitle_url ) STL_EXT = "".stl"" SRT_EXT = "".srt"" subtitles [ ""it"" ] = [ { ""ext"" : ""stl"" , ""url"" : subtitle_url , } ] srt_url = subtitle_url [ : - len ( STL_EXT ) ] + SRT_EXT subtitles [ ""it"" ] . append ( { ""ext"" : ""srt"" , ""url"" : srt_url , } ) return subtitles",if subtitle_url . endswith ( STL_EXT ) :,222 15936,"def do_status(self, directory, path): if path: try: return next( self._gitcmd( directory, ""status"", ""--porcelain"", ""--ignored"", ""--"", path ) )[:2] except StopIteration: return None else: wt_column = "" "" index_column = "" "" untracked_column = "" "" for line in self._gitcmd(directory, ""status"", ""--porcelain""): if line[0] == ""?"": untracked_column = ""U"" continue elif line[0] == ""!"": continue if line[0] != "" "": index_column = ""I"" if line[1] != "" "": wt_column = ""D"" r = wt_column + index_column + untracked_column return r if r != "" "" else None","def do_status ( self , directory , path ) : if path : try : return next ( self . _gitcmd ( directory , ""status"" , ""--porcelain"" , ""--ignored"" , ""--"" , path ) ) [ : 2 ] except StopIteration : return None else : wt_column = "" "" index_column = "" "" untracked_column = "" "" for line in self . _gitcmd ( directory , ""status"" , ""--porcelain"" ) : untracked_column = ""U"" continue elif line [ 0 ] == ""!"" : continue if line [ 0 ] != "" "" : index_column = ""I"" if line [ 1 ] != "" "" : wt_column = ""D"" r = wt_column + index_column + untracked_column return r if r != "" "" else None","if line [ 0 ] == ""?"" :",270 1475,"def save(self, filename=None, ignore_discard=False, ignore_expires=False): if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) f = open(filename, ""w"") try: f.write(self.header) now = time.time() for cookie in self: if not ignore_discard and cookie.discard: continue if not ignore_expires and cookie.is_expired(now): continue if cookie.secure: secure = ""TRUE"" else: secure = ""FALSE"" if cookie.domain.startswith("".""): initial_dot = ""TRUE"" else: initial_dot = ""FALSE"" if cookie.expires is not None: expires = str(cookie.expires) else: expires = """" if cookie.value is None: # cookies.txt regards 'Set-Cookie: foo' as a cookie # with no name, whereas http.cookiejar regards it as a # cookie with no value. name = """" value = cookie.name else: name = cookie.name value = cookie.value f.write( ""\t"".join( [ cookie.domain, initial_dot, cookie.path, secure, expires, name, value, ] ) + ""\n"" ) finally: f.close() ","def save ( self , filename = None , ignore_discard = False , ignore_expires = False ) : if filename is None : if self . filename is not None : filename = self . filename else : raise ValueError ( MISSING_FILENAME_TEXT ) f = open ( filename , ""w"" ) try : f . write ( self . header ) now = time . time ( ) for cookie in self : if not ignore_discard and cookie . discard : continue if not ignore_expires and cookie . is_expired ( now ) : continue if cookie . secure : secure = ""TRUE"" else : secure = ""FALSE"" if cookie . domain . startswith ( ""."" ) : initial_dot = ""TRUE"" else : initial_dot = ""FALSE"" if cookie . expires is not None : expires = str ( cookie . expires ) else : expires = """" name = """" value = cookie . name else : name = cookie . name value = cookie . value f . write ( ""\t"" . join ( [ cookie . domain , initial_dot , cookie . path , secure , expires , name , value , ] ) + ""\n"" ) finally : f . close ( )",if cookie . value is None :,507 970,"def check_metadata_equal(df1, df2): # Checks to make sure metadata for df1 and df2 are equal. for attr in df1._metadata: if attr == ""_recommendation"": x = df1._recommendation y = df2._recommendation for key in x: if key in y: assert len(x[key]) == len(y[key]) for i in range(len(x[key])): vis1 = x[key][i] vis2 = y[key][i] compare_vis(vis1, vis2) elif attr == ""_rec_info"": x = df1._rec_info y = df2._rec_info assert len(x) == len(y) for i in range(len(x)): x_info, y_info = x[i], y[i] for key in x_info: if key in y_info and key == ""collection"": assert len(x_info[key]) == len(y_info[key]) for i in range(len(x_info[key])): vis1 = x_info[key][i] vis2 = y_info[key][i] compare_vis(vis1, vis2) elif key in y_info: assert x_info[key] == y_info[key] elif attr != ""_widget"" and attr != ""_sampled"" and attr != ""_message"": assert getattr(df1, attr) == getattr(df2, attr)","def check_metadata_equal ( df1 , df2 ) : for attr in df1 . _metadata : if attr == ""_recommendation"" : x = df1 . _recommendation y = df2 . _recommendation for key in x : if key in y : assert len ( x [ key ] ) == len ( y [ key ] ) for i in range ( len ( x [ key ] ) ) : vis1 = x [ key ] [ i ] vis2 = y [ key ] [ i ] compare_vis ( vis1 , vis2 ) x = df1 . _rec_info y = df2 . _rec_info assert len ( x ) == len ( y ) for i in range ( len ( x ) ) : x_info , y_info = x [ i ] , y [ i ] for key in x_info : if key in y_info and key == ""collection"" : assert len ( x_info [ key ] ) == len ( y_info [ key ] ) for i in range ( len ( x_info [ key ] ) ) : vis1 = x_info [ key ] [ i ] vis2 = y_info [ key ] [ i ] compare_vis ( vis1 , vis2 ) elif key in y_info : assert x_info [ key ] == y_info [ key ] elif attr != ""_widget"" and attr != ""_sampled"" and attr != ""_message"" : assert getattr ( df1 , attr ) == getattr ( df2 , attr )","elif attr == ""_rec_info"" :",464 23280,"def compute_most_posted(server, message): module, num, keyword, paste_date = message.split("";"") redis_progression_name_set = ""top_"" + module + ""_set_"" + paste_date # Add/Update in Redis server.hincrby(paste_date, module + ""-"" + keyword, int(num)) # Compute Most Posted date = get_date_range(0)[0] # check if this keyword is eligible for progression keyword_total_sum = 0 curr_value = server.hget(date, module + ""-"" + keyword) keyword_total_sum += int(curr_value) if curr_value is not None else 0 if server.zcard(redis_progression_name_set) < max_set_cardinality: server.zadd(redis_progression_name_set, float(keyword_total_sum), keyword) else: # not in set member_set = server.zrangebyscore( redis_progression_name_set, ""-inf"", ""+inf"", withscores=True, start=0, num=1 ) # Member set is a list of (value, score) pairs if int(member_set[0][1]) < keyword_total_sum: # remove min from set and add the new one print( module + "": adding "" + keyword + ""("" + str(keyword_total_sum) + "") in set and removing "" + member_set[0][0] + ""("" + str(member_set[0][1]) + "")"" ) server.zrem(redis_progression_name_set, member_set[0][0]) server.zadd(redis_progression_name_set, float(keyword_total_sum), keyword) print(redis_progression_name_set)","def compute_most_posted ( server , message ) : module , num , keyword , paste_date = message . split ( "";"" ) redis_progression_name_set = ""top_"" + module + ""_set_"" + paste_date server . hincrby ( paste_date , module + ""-"" + keyword , int ( num ) ) date = get_date_range ( 0 ) [ 0 ] keyword_total_sum = 0 curr_value = server . hget ( date , module + ""-"" + keyword ) keyword_total_sum += int ( curr_value ) if curr_value is not None else 0 if server . zcard ( redis_progression_name_set ) < max_set_cardinality : server . zadd ( redis_progression_name_set , float ( keyword_total_sum ) , keyword ) else : member_set = server . zrangebyscore ( redis_progression_name_set , ""-inf"" , ""+inf"" , withscores = True , start = 0 , num = 1 ) print ( module + "": adding "" + keyword + ""("" + str ( keyword_total_sum ) + "") in set and removing "" + member_set [ 0 ] [ 0 ] + ""("" + str ( member_set [ 0 ] [ 1 ] ) + "")"" ) server . zrem ( redis_progression_name_set , member_set [ 0 ] [ 0 ] ) server . zadd ( redis_progression_name_set , float ( keyword_total_sum ) , keyword ) print ( redis_progression_name_set )",if int ( member_set [ 0 ] [ 1 ] ) < keyword_total_sum :,512 12964,"def _split_bitstream(buf: bytes) -> Iterator[bytes]: # TODO: write in a more pytonic way, # translate from: https://github.com/aizvorski/h264bitstream/blob/master/h264_nal.c#L134 i = 0 while True: while (buf[i] != 0 or buf[i + 1] != 0 or buf[i + 2] != 0x01) and ( buf[i] != 0 or buf[i + 1] != 0 or buf[i + 2] != 0 or buf[i + 3] != 0x01 ): i += 1 # skip leading zero if i + 4 >= len(buf): return if buf[i] != 0 or buf[i + 1] != 0 or buf[i + 2] != 0x01: i += 1 i += 3 nal_start = i while (buf[i] != 0 or buf[i + 1] != 0 or buf[i + 2] != 0) and ( buf[i] != 0 or buf[i + 1] != 0 or buf[i + 2] != 0x01 ): i += 1 # FIXME: the next line fails when reading a nal that ends # exactly at the end of the data if i + 3 >= len(buf): nal_end = len(buf) yield buf[nal_start:nal_end] return # did not find nal end, stream ended first nal_end = i yield buf[nal_start:nal_end]",def _split_bitstream ( buf : bytes ) -> Iterator [ bytes ] : i = 0 while True : while ( buf [ i ] != 0 or buf [ i + 1 ] != 0 or buf [ i + 2 ] != 0x01 ) and ( buf [ i ] != 0 or buf [ i + 1 ] != 0 or buf [ i + 2 ] != 0 or buf [ i + 3 ] != 0x01 ) : i += 1 if i + 4 >= len ( buf ) : return i += 1 i += 3 nal_start = i while ( buf [ i ] != 0 or buf [ i + 1 ] != 0 or buf [ i + 2 ] != 0 ) and ( buf [ i ] != 0 or buf [ i + 1 ] != 0 or buf [ i + 2 ] != 0x01 ) : i += 1 if i + 3 >= len ( buf ) : nal_end = len ( buf ) yield buf [ nal_start : nal_end ] return nal_end = i yield buf [ nal_start : nal_end ],if buf [ i ] != 0 or buf [ i + 1 ] != 0 or buf [ i + 2 ] != 0x01 :,422 4845,"def __init__(self, fmt=None, *args): if not isinstance(fmt, BaseException): Error.__init__(self, fmt, *args) else: e = fmt cls = e.__class__ fmt = ""%s.%s: %s"" % (cls.__module__, cls.__name__, e) tb = sys.exc_info()[2] if tb: fmt += ""\n"" fmt += """".join(traceback.format_tb(tb)) Error.__init__(self, fmt)","def __init__ ( self , fmt = None , * args ) : if not isinstance ( fmt , BaseException ) : Error . __init__ ( self , fmt , * args ) else : e = fmt cls = e . __class__ fmt = ""%s.%s: %s"" % ( cls . __module__ , cls . __name__ , e ) tb = sys . exc_info ( ) [ 2 ] fmt += ""\n"" fmt += """" . join ( traceback . format_tb ( tb ) ) Error . __init__ ( self , fmt )",if tb :,138 10227,"def collect_textmate_scheme(self, scheme_tree): scheme = {} for style in scheme_tree.findall("".//dict[key='scope']""): try: cur_style = {} cur_tag = None for elem in style.iter(): if elem.tag == ""key"": cur_tag = elem.text # We are going to fill it next time elif elem.tag == ""string"" and cur_tag is not None: cur_style[cur_tag] = elem.text cur_tag = None if ""scope"" in cur_style: scheme[cur_style[""scope""]] = cur_style except ValueError: pass return scheme ","def collect_textmate_scheme ( self , scheme_tree ) : scheme = { } for style in scheme_tree . findall ( "".//dict[key='scope']"" ) : try : cur_style = { } cur_tag = None for elem in style . iter ( ) : cur_tag = elem . text elif elem . tag == ""string"" and cur_tag is not None : cur_style [ cur_tag ] = elem . text cur_tag = None if ""scope"" in cur_style : scheme [ cur_style [ ""scope"" ] ] = cur_style except ValueError : pass return scheme","if elem . tag == ""key"" :",204 188,"def get_ending(lines, begin, end, begin_line, begin_char, type): # Find the ending of the enclosing comment block, given a # description of the beginning of the block end_line = begin_line end_char = 0 if type == MULT: while end_line < len(lines): start = 0 if end_line == begin_line: start = begin_char + len(begin) end_char = lines[end_line].find(end, start) if end_char >= 0: break end_line += 1 end_line += 1 elif type == IN: while end_line < len(lines): start = 0 if end_line == begin_line: start = lines[end_line].index(begin) if not lines[end_line][start:].strip().startswith(begin): break end_line += 1 return end_line, end_char","def get_ending ( lines , begin , end , begin_line , begin_char , type ) : end_line = begin_line end_char = 0 if type == MULT : while end_line < len ( lines ) : start = 0 if end_line == begin_line : start = begin_char + len ( begin ) end_char = lines [ end_line ] . find ( end , start ) if end_char >= 0 : break end_line += 1 end_line += 1 elif type == IN : while end_line < len ( lines ) : start = 0 if end_line == begin_line : start = lines [ end_line ] . index ( begin ) break end_line += 1 return end_line , end_char",if not lines [ end_line ] [ start : ] . strip ( ) . startswith ( begin ) :,272 16347,"def pauseAllDownloads(self, menu): # get active gid of downloads from aria active_gids = download.activeDownloads() # check that if gid is in download_list_file_active f = Open(download_list_file_active) download_list_file_active_lines = f.readlines() f.close() for i in range(len(download_list_file_active_lines)): download_list_file_active_lines[i] = download_list_file_active_lines[i].strip() for gid in active_gids: if gid in download_list_file_active_lines: answer = download.downloadPause(gid) if answer == ""None"": notifySend(""Aria2 did not respond!"", ""Try agian!"", 10000, ""critical"") sleep(0.3)","def pauseAllDownloads ( self , menu ) : active_gids = download . activeDownloads ( ) f = Open ( download_list_file_active ) download_list_file_active_lines = f . readlines ( ) f . close ( ) for i in range ( len ( download_list_file_active_lines ) ) : download_list_file_active_lines [ i ] = download_list_file_active_lines [ i ] . strip ( ) for gid in active_gids : if gid in download_list_file_active_lines : answer = download . downloadPause ( gid ) notifySend ( ""Aria2 did not respond!"" , ""Try agian!"" , 10000 , ""critical"" ) sleep ( 0.3 )","if answer == ""None"" :",220 1373,"def _get_requested_databases(self): """"""Returns a list of databases requested, not including ignored dbs"""""" requested_databases = [] if (self._requested_namespaces is not None) and (self._requested_namespaces != []): for requested_namespace in self._requested_namespaces: if requested_namespace[0] is ""*"": return [] elif requested_namespace[0] not in IGNORE_DBS: requested_databases.append(requested_namespace[0]) return requested_databases","def _get_requested_databases ( self ) : """"""Returns a list of databases requested, not including ignored dbs"""""" requested_databases = [ ] if ( self . _requested_namespaces is not None ) and ( self . _requested_namespaces != [ ] ) : for requested_namespace in self . _requested_namespaces : if requested_namespace [ 0 ] is ""*"" : return [ ] requested_databases . append ( requested_namespace [ 0 ] ) return requested_databases",elif requested_namespace [ 0 ] not in IGNORE_DBS :,131 19661,"def read_work_titles(fields): found = [] if ""240"" in fields: for line in fields[""240""]: title = join_subfield_values(line, [""a"", ""m"", ""n"", ""p"", ""r""]) if title not in found: found.append(title) if ""130"" in fields: for line in fields[""130""]: title = "" "".join(get_lower_subfields(line)) if title not in found: found.append(title) return {""work_titles"": found} if found else {}","def read_work_titles ( fields ) : found = [ ] if ""240"" in fields : for line in fields [ ""240"" ] : title = join_subfield_values ( line , [ ""a"" , ""m"" , ""n"" , ""p"" , ""r"" ] ) found . append ( title ) if ""130"" in fields : for line in fields [ ""130"" ] : title = "" "" . join ( get_lower_subfields ( line ) ) found . append ( title ) return { ""work_titles"" : found } if found else { }",if title not in found :,157 9465,"def generic_tag_compiler(params, defaults, name, node_class, parser, token): ""Returns a template.Node subclass."" bits = token.split_contents()[1:] bmax = len(params) def_len = defaults and len(defaults) or 0 bmin = bmax - def_len if len(bits) < bmin or len(bits) > bmax: if bmin == bmax: message = ""%s takes %s arguments"" % (name, bmin) else: message = ""%s takes between %s and %s arguments"" % (name, bmin, bmax) raise TemplateSyntaxError(message) return node_class(bits)","def generic_tag_compiler ( params , defaults , name , node_class , parser , token ) : ""Returns a template.Node subclass."" bits = token . split_contents ( ) [ 1 : ] bmax = len ( params ) def_len = defaults and len ( defaults ) or 0 bmin = bmax - def_len if len ( bits ) < bmin or len ( bits ) > bmax : message = ""%s takes %s arguments"" % ( name , bmin ) else : message = ""%s takes between %s and %s arguments"" % ( name , bmin , bmax ) raise TemplateSyntaxError ( message ) return node_class ( bits )",if bmin == bmax :,176 3806,"def _handle_control_flow_operator(self, operator, values): if operator == ""$switch"": if not isinstance(values, dict): raise OperationFailure( ""$switch requires an object as an argument, "" ""found: %s"" % type(values) ) branches = values.get(""branches"", []) if not isinstance(branches, (list, tuple)): raise OperationFailure( ""$switch expected an array for 'branches', "" ""found: %s"" % type(branches) ) if not branches: raise OperationFailure(""$switch requires at least one branch."") for branch in branches: if not isinstance(branch, dict): raise OperationFailure( ""$switch expected each branch to be an object, "" ""found: %s"" % type(branch) ) if ""case"" not in branch: raise OperationFailure( ""$switch requires each branch have a 'case' expression"" ) if ""then"" not in branch: raise OperationFailure( ""$switch requires each branch have a 'then' expression."" ) for branch in branches: if self._parse_to_bool(branch[""case""]): return self.parse(branch[""then""]) if ""default"" not in values: raise OperationFailure( ""$switch could not find a matching branch for an input, "" ""and no default was specified."" ) return self.parse(values[""default""]) # This should never happen: it is only a safe fallback if something went wrong. raise NotImplementedError( # pragma: no cover ""Although '%s' is a valid control flow operator for the "" ""aggregation pipeline, it is currently not implemented "" ""in Mongomock."" % operator ) ","def _handle_control_flow_operator ( self , operator , values ) : if operator == ""$switch"" : if not isinstance ( values , dict ) : raise OperationFailure ( ""$switch requires an object as an argument, "" ""found: %s"" % type ( values ) ) branches = values . get ( ""branches"" , [ ] ) if not isinstance ( branches , ( list , tuple ) ) : raise OperationFailure ( ""$switch expected an array for 'branches', "" ""found: %s"" % type ( branches ) ) if not branches : raise OperationFailure ( ""$switch requires at least one branch."" ) for branch in branches : if not isinstance ( branch , dict ) : raise OperationFailure ( ""$switch expected each branch to be an object, "" ""found: %s"" % type ( branch ) ) raise OperationFailure ( ""$switch requires each branch have a 'case' expression"" ) if ""then"" not in branch : raise OperationFailure ( ""$switch requires each branch have a 'then' expression."" ) for branch in branches : if self . _parse_to_bool ( branch [ ""case"" ] ) : return self . parse ( branch [ ""then"" ] ) if ""default"" not in values : raise OperationFailure ( ""$switch could not find a matching branch for an input, "" ""and no default was specified."" ) return self . parse ( values [ ""default"" ] ) raise NotImplementedError ( ""Although '%s' is a valid control flow operator for the "" ""aggregation pipeline, it is currently not implemented "" ""in Mongomock."" % operator )","if ""case"" not in branch :",507 3257,"def load_stack(self, stack, index=None): self.stack = stack self.clear() for i in range(len(stack)): frame, lineno = stack[i] try: modname = frame.f_globals[""__name__""] except: modname = ""?"" code = frame.f_code filename = code.co_filename funcname = code.co_name import linecache sourceline = linecache.getline(filename, lineno) import string sourceline = string.strip(sourceline) if funcname in (""?"", """", None): item = ""%s, line %d: %s"" % (modname, lineno, sourceline) else: item = ""%s.%s(), line %d: %s"" % (modname, funcname, lineno, sourceline) if i == index: item = ""> "" + item self.append(item) if index is not None: self.select(index) ","def load_stack ( self , stack , index = None ) : self . stack = stack self . clear ( ) for i in range ( len ( stack ) ) : frame , lineno = stack [ i ] try : modname = frame . f_globals [ ""__name__"" ] except : modname = ""?"" code = frame . f_code filename = code . co_filename funcname = code . co_name import linecache sourceline = linecache . getline ( filename , lineno ) import string sourceline = string . strip ( sourceline ) if funcname in ( ""?"" , """" , None ) : item = ""%s, line %d: %s"" % ( modname , lineno , sourceline ) else : item = ""%s.%s(), line %d: %s"" % ( modname , funcname , lineno , sourceline ) item = ""> "" + item self . append ( item ) if index is not None : self . select ( index )",if i == index :,264 8167,"def can_read_or_exception( self, user, doc_class, doc_id, exception_class=PopupException ): if doc_id is None: return try: ct = ContentType.objects.get_for_model(doc_class) doc = Document.objects.get(object_id=doc_id, content_type=ct) if doc.can_read(user): return doc else: message = _( ""Permission denied. %(username)s does not have the permissions required to access document %(id)s"" ) % {""username"": user.username, ""id"": doc.id} raise exception_class(message) except Document.DoesNotExist: raise exception_class(_(""Document %(id)s does not exist"") % {""id"": doc_id}) ","def can_read_or_exception ( self , user , doc_class , doc_id , exception_class = PopupException ) : if doc_id is None : return try : ct = ContentType . objects . get_for_model ( doc_class ) doc = Document . objects . get ( object_id = doc_id , content_type = ct ) return doc else : message = _ ( ""Permission denied. %(username)s does not have the permissions required to access document %(id)s"" ) % { ""username"" : user . username , ""id"" : doc . id } raise exception_class ( message ) except Document . DoesNotExist : raise exception_class ( _ ( ""Document %(id)s does not exist"" ) % { ""id"" : doc_id } )",if doc . can_read ( user ) :,213 7153,"def _defuse_padding(self, IR_node): auto_pad = IR_node.get_attr(""auto_pad"") if auto_pad: input_node = self.parent_variable_name(IR_node) if auto_pad == ""VALID"": padding = False elif auto_pad.startswith(""SAME""): padding = True else: raise ValueError(""Unknown padding type [{}]."".format(auto_pad)) return input_node, padding else: padding = IR_node.get_attr(""pads"") if not is_valid_padding(padding): dim = len(padding) // 2 padding_str = list() for i in xrange(1, dim): padding_str.append((padding[i], padding[i + dim])) input_node = IR_node.variable_name + ""_pad"" self.add_body( 1, ""{:<15} = cntk.pad({}, pattern={})"".format( input_node, self.parent_variable_name(IR_node), padding_str ), ) else: input_node = self.parent_variable_name(IR_node) return input_node, False","def _defuse_padding ( self , IR_node ) : auto_pad = IR_node . get_attr ( ""auto_pad"" ) if auto_pad : input_node = self . parent_variable_name ( IR_node ) padding = False elif auto_pad . startswith ( ""SAME"" ) : padding = True else : raise ValueError ( ""Unknown padding type [{}]."" . format ( auto_pad ) ) return input_node , padding else : padding = IR_node . get_attr ( ""pads"" ) if not is_valid_padding ( padding ) : dim = len ( padding ) // 2 padding_str = list ( ) for i in xrange ( 1 , dim ) : padding_str . append ( ( padding [ i ] , padding [ i + dim ] ) ) input_node = IR_node . variable_name + ""_pad"" self . add_body ( 1 , ""{:<15} = cntk.pad({}, pattern={})"" . format ( input_node , self . parent_variable_name ( IR_node ) , padding_str ) , ) else : input_node = self . parent_variable_name ( IR_node ) return input_node , False","if auto_pad == ""VALID"" :",342 5978,"def append_chunk(self, source, chunk): try: data = json.loads(chunk) except ValueError: logger.error(""unable to decode chunk %s"", chunk, exc_info=True) else: try: ts = data[""timestamp""] self.results.setdefault(ts, {}) for key, value in data[""fields""].iteritems(): if data[""name""] == ""diskio"": data[""name""] = ""{metric_name}-{disk_id}"".format( metric_name=data[""name""], disk_id=data[""tags""][""name""] ) elif data[""name""] == ""net"": data[""name""] = ""{metric_name}-{interface}"".format( metric_name=data[""name""], interface=data[""tags""][""interface""] ) elif data[""name""] == ""cpu"": data[""name""] = ""{metric_name}-{cpu_id}"".format( metric_name=data[""name""], cpu_id=data[""tags""][""cpu""] ) key = data[""name""] + ""_"" + key if key.endswith(""_exec_value""): key = key.replace(""_exec_value"", """") self.results[ts][key] = value except KeyError: logger.error( ""Malformed json from source %s: %s"", source, chunk, exc_info=True ) except BaseException: logger.error(""Something nasty happend in consolidator work"", exc_info=True) ","def append_chunk ( self , source , chunk ) : try : data = json . loads ( chunk ) except ValueError : logger . error ( ""unable to decode chunk %s"" , chunk , exc_info = True ) else : try : ts = data [ ""timestamp"" ] self . results . setdefault ( ts , { } ) for key , value in data [ ""fields"" ] . iteritems ( ) : if data [ ""name"" ] == ""diskio"" : data [ ""name"" ] = ""{metric_name}-{disk_id}"" . format ( metric_name = data [ ""name"" ] , disk_id = data [ ""tags"" ] [ ""name"" ] ) elif data [ ""name"" ] == ""net"" : data [ ""name"" ] = ""{metric_name}-{interface}"" . format ( metric_name = data [ ""name"" ] , interface = data [ ""tags"" ] [ ""interface"" ] ) elif data [ ""name"" ] == ""cpu"" : data [ ""name"" ] = ""{metric_name}-{cpu_id}"" . format ( metric_name = data [ ""name"" ] , cpu_id = data [ ""tags"" ] [ ""cpu"" ] ) key = data [ ""name"" ] + ""_"" + key key = key . replace ( ""_exec_value"" , """" ) self . results [ ts ] [ key ] = value except KeyError : logger . error ( ""Malformed json from source %s: %s"" , source , chunk , exc_info = True ) except BaseException : logger . error ( ""Something nasty happend in consolidator work"" , exc_info = True )","if key . endswith ( ""_exec_value"" ) :",438 5567,"def CastClass(c, graph=None): graph = graph is None and c.factoryGraph or graph for kind in graph.objects(subject=classOrIdentifier(c), predicate=RDF.type): if kind == OWL_NS.Restriction: kwArgs = {""identifier"": classOrIdentifier(c), ""graph"": graph} for s, p, o in graph.triples((classOrIdentifier(c), None, None)): if p != RDF.type: if p == OWL_NS.onProperty: kwArgs[""onProperty""] = o else: if p not in Restriction.restrictionKinds: continue kwArgs[str(p.split(OWL_NS)[-1])] = o if not set( [str(i.split(OWL_NS)[-1]) for i in Restriction.restrictionKinds] ).intersection(kwArgs): raise MalformedClass(""Malformed owl:Restriction"") return Restriction(**kwArgs) else: for s, p, o in graph.triples_choices( ( classOrIdentifier(c), [OWL_NS.intersectionOf, OWL_NS.unionOf, OWL_NS.oneOf], None, ) ): if p == OWL_NS.oneOf: return EnumeratedClass(classOrIdentifier(c), graph=graph) else: return BooleanClass(classOrIdentifier(c), operator=p, graph=graph) # assert (classOrIdentifier(c),RDF.type,OWL_NS.Class) in graph return Class(classOrIdentifier(c), graph=graph, skipOWLClassMembership=True)","def CastClass ( c , graph = None ) : graph = graph is None and c . factoryGraph or graph for kind in graph . objects ( subject = classOrIdentifier ( c ) , predicate = RDF . type ) : if kind == OWL_NS . Restriction : kwArgs = { ""identifier"" : classOrIdentifier ( c ) , ""graph"" : graph } for s , p , o in graph . triples ( ( classOrIdentifier ( c ) , None , None ) ) : if p != RDF . type : if p == OWL_NS . onProperty : kwArgs [ ""onProperty"" ] = o else : continue kwArgs [ str ( p . split ( OWL_NS ) [ - 1 ] ) ] = o if not set ( [ str ( i . split ( OWL_NS ) [ - 1 ] ) for i in Restriction . restrictionKinds ] ) . intersection ( kwArgs ) : raise MalformedClass ( ""Malformed owl:Restriction"" ) return Restriction ( ** kwArgs ) else : for s , p , o in graph . triples_choices ( ( classOrIdentifier ( c ) , [ OWL_NS . intersectionOf , OWL_NS . unionOf , OWL_NS . oneOf ] , None , ) ) : if p == OWL_NS . oneOf : return EnumeratedClass ( classOrIdentifier ( c ) , graph = graph ) else : return BooleanClass ( classOrIdentifier ( c ) , operator = p , graph = graph ) return Class ( classOrIdentifier ( c ) , graph = graph , skipOWLClassMembership = True )",if p not in Restriction . restrictionKinds :,501 6636,"def get_unique_attribute(self, name: str): feat = None for f in self.features: if self._return_feature(f) and hasattr(f, name): if feat is not None: raise RuntimeError(""The attribute was not unique."") feat = f if feat is None: raise RuntimeError(""The attribute did not exist"") return getattr(feat, name) ","def get_unique_attribute ( self , name : str ) : feat = None for f in self . features : if self . _return_feature ( f ) and hasattr ( f , name ) : raise RuntimeError ( ""The attribute was not unique."" ) feat = f if feat is None : raise RuntimeError ( ""The attribute did not exist"" ) return getattr ( feat , name )",if feat is not None :,106 11583,"def _patch(): """"""Monkey-patch pyopengl to fix a bug in glBufferSubData."""""" import sys from OpenGL import GL if sys.version_info > (3,): buffersubdatafunc = GL.glBufferSubData if hasattr(buffersubdatafunc, ""wrapperFunction""): buffersubdatafunc = buffersubdatafunc.wrapperFunction _m = sys.modules[buffersubdatafunc.__module__] _m.long = int # Fix missing enum try: from OpenGL.GL.VERSION import GL_2_0 GL_2_0.GL_OBJECT_SHADER_SOURCE_LENGTH = GL_2_0.GL_SHADER_SOURCE_LENGTH except Exception: pass","def _patch ( ) : """"""Monkey-patch pyopengl to fix a bug in glBufferSubData."""""" import sys from OpenGL import GL if sys . version_info > ( 3 , ) : buffersubdatafunc = GL . glBufferSubData buffersubdatafunc = buffersubdatafunc . wrapperFunction _m = sys . modules [ buffersubdatafunc . __module__ ] _m . long = int try : from OpenGL . GL . VERSION import GL_2_0 GL_2_0 . GL_OBJECT_SHADER_SOURCE_LENGTH = GL_2_0 . GL_SHADER_SOURCE_LENGTH except Exception : pass","if hasattr ( buffersubdatafunc , ""wrapperFunction"" ) :",194 3268,"def formatmonthname(self, theyear, themonth, withyear=True): with TimeEncoding(self.locale) as encoding: s = month_name[themonth] if encoding is not None: s = s.decode(encoding) if withyear: s = ""%s %s"" % (s, theyear) return '%s' % s","def formatmonthname ( self , theyear , themonth , withyear = True ) : with TimeEncoding ( self . locale ) as encoding : s = month_name [ themonth ] if encoding is not None : s = s . decode ( encoding ) s = ""%s %s"" % ( s , theyear ) return '%s' % s",if withyear :,115 15044,"def _write_summaries(self, summary_dict, relative_path=""""): for name, value in summary_dict.items(): if isinstance(value, dict): self._write_summaries( value, relative_path=os.path.join(relative_path, name) ) else: with self.summary_writer(relative_path).as_default(): self._summary_fn(name, value, step=self._global_step) ","def _write_summaries ( self , summary_dict , relative_path = """" ) : for name , value in summary_dict . items ( ) : self . _write_summaries ( value , relative_path = os . path . join ( relative_path , name ) ) else : with self . summary_writer ( relative_path ) . as_default ( ) : self . _summary_fn ( name , value , step = self . _global_step )","if isinstance ( value , dict ) :",125 14326,"def execute_many(self, query: str, values: list) -> None: async with self.acquire_connection() as connection: self.log.debug(""%s: %s"", query, values) async with connection.cursor() as cursor: if self.capabilities.supports_transactions: await connection.begin() try: await cursor.executemany(query, values) except Exception: await connection.rollback() raise else: await connection.commit() else: await cursor.executemany(query, values) ","def execute_many ( self , query : str , values : list ) -> None : async with self . acquire_connection ( ) as connection : self . log . debug ( ""%s: %s"" , query , values ) async with connection . cursor ( ) as cursor : await connection . begin ( ) try : await cursor . executemany ( query , values ) except Exception : await connection . rollback ( ) raise else : await connection . commit ( ) else : await cursor . executemany ( query , values )",if self . capabilities . supports_transactions :,171 7540,"def read(self, iprot): if ( iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None ): fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I32: self.protocol_version = iprot.readI32() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.propertyName = iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.defaultValue = iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd()","def read ( self , iprot ) : if ( iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None ) : fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) return iprot . readStructBegin ( ) while True : ( fname , ftype , fid ) = iprot . readFieldBegin ( ) if ftype == TType . STOP : break if fid == 1 : if ftype == TType . I32 : self . protocol_version = iprot . readI32 ( ) else : iprot . skip ( ftype ) if ftype == TType . STRING : self . propertyName = iprot . readString ( ) else : iprot . skip ( ftype ) elif fid == 3 : if ftype == TType . STRING : self . defaultValue = iprot . readString ( ) else : iprot . skip ( ftype ) else : iprot . skip ( ftype ) iprot . readFieldEnd ( ) iprot . readStructEnd ( )",elif fid == 2 :,378 11983,"def _iter_ns_range(self): """"""Iterates over self._ns_range, delegating to self._iter_key_range()."""""" while True: if self._current_key_range is None: query = self._ns_range.make_datastore_query() namespace_result = query.Get(1) if not namespace_result: break namespace = namespace_result[0].name() or """" self._current_key_range = key_range.KeyRange( namespace=namespace, _app=self._ns_range.app ) yield ALLOW_CHECKPOINT for key, o in self._iter_key_range(copy.deepcopy(self._current_key_range)): self._current_key_range.advance(key) yield o if ( self._ns_range.is_single_namespace or self._current_key_range.namespace == self._ns_range.namespace_end ): break self._ns_range = self._ns_range.with_start_after( self._current_key_range.namespace ) self._current_key_range = None","def _iter_ns_range ( self ) : """"""Iterates over self._ns_range, delegating to self._iter_key_range()."""""" while True : query = self . _ns_range . make_datastore_query ( ) namespace_result = query . Get ( 1 ) if not namespace_result : break namespace = namespace_result [ 0 ] . name ( ) or """" self . _current_key_range = key_range . KeyRange ( namespace = namespace , _app = self . _ns_range . app ) yield ALLOW_CHECKPOINT for key , o in self . _iter_key_range ( copy . deepcopy ( self . _current_key_range ) ) : self . _current_key_range . advance ( key ) yield o if ( self . _ns_range . is_single_namespace or self . _current_key_range . namespace == self . _ns_range . namespace_end ) : break self . _ns_range = self . _ns_range . with_start_after ( self . _current_key_range . namespace ) self . _current_key_range = None",if self . _current_key_range is None :,321 18507,"def __init__(self, artifact=None, pad=None): if pad is None: if artifact is None: raise TypeError( ""Either artifact or pad is needed to "" ""construct a context."" ) pad = artifact.build_state.pad if artifact is not None: self.artifact = artifact self.source = artifact.source_obj self.build_state = self.artifact.build_state else: self.artifact = None self.source = None self.build_state = None self.exc_info = None self.pad = pad # Processing information self.referenced_dependencies = set() self.referenced_virtual_dependencies = {} self.sub_artifacts = [] self.flow_block_render_stack = [] self._forced_base_url = None # General cache system where other things can put their temporary # stuff in. self.cache = {} self._dependency_collectors = [] ","def __init__ ( self , artifact = None , pad = None ) : if pad is None : raise TypeError ( ""Either artifact or pad is needed to "" ""construct a context."" ) pad = artifact . build_state . pad if artifact is not None : self . artifact = artifact self . source = artifact . source_obj self . build_state = self . artifact . build_state else : self . artifact = None self . source = None self . build_state = None self . exc_info = None self . pad = pad self . referenced_dependencies = set ( ) self . referenced_virtual_dependencies = { } self . sub_artifacts = [ ] self . flow_block_render_stack = [ ] self . _forced_base_url = None self . cache = { } self . _dependency_collectors = [ ]",if artifact is None :,259 17627,"def _fix_default(self, action): if ( hasattr(action, ""default"") and hasattr(action, ""dest"") and action.default != SUPPRESS ): as_type = get_type(action) names = OrderedDict( (i.lstrip(""-"").replace(""-"", ""_""), None) for i in action.option_strings ) outcome = None for name in names: outcome = get_env_var(name, as_type, self.env) if outcome is not None: break if outcome is None and self.file_config: for name in names: outcome = self.file_config.get(name, as_type) if outcome is not None: break if outcome is not None: action.default, action.default_source = outcome else: outcome = action.default, ""default"" self.options.set_src(action.dest, *outcome) ","def _fix_default ( self , action ) : if ( hasattr ( action , ""default"" ) and hasattr ( action , ""dest"" ) and action . default != SUPPRESS ) : as_type = get_type ( action ) names = OrderedDict ( ( i . lstrip ( ""-"" ) . replace ( ""-"" , ""_"" ) , None ) for i in action . option_strings ) outcome = None for name in names : outcome = get_env_var ( name , as_type , self . env ) break if outcome is None and self . file_config : for name in names : outcome = self . file_config . get ( name , as_type ) break action . default , action . default_source = outcome else : outcome = action . default , ""default"" self . options . set_src ( action . dest , * outcome )",if outcome is not None :,271 5065,"def disassemble(self, byte_parser, histogram=False): """"""Disassemble code, for ad-hoc experimenting."""""" for bp in byte_parser.child_parsers(): if bp.text: srclines = bp.text.splitlines() else: srclines = None print(""\n%s: "" % bp.code) upto = None for disline in disgen.disgen(bp.code): if histogram: opcode_counts[disline.opcode] += 1 continue if disline.first: if srclines: upto = upto or disline.lineno - 1 while upto <= disline.lineno - 1: print(""%100s%s"" % ("""", srclines[upto])) upto += 1 elif disline.offset > 0: print("""") line = disgen.format_dis_line(disline) print(""%-70s"" % (line,)) print("""")","def disassemble ( self , byte_parser , histogram = False ) : """"""Disassemble code, for ad-hoc experimenting."""""" for bp in byte_parser . child_parsers ( ) : if bp . text : srclines = bp . text . splitlines ( ) else : srclines = None print ( ""\n%s: "" % bp . code ) upto = None for disline in disgen . disgen ( bp . code ) : if histogram : opcode_counts [ disline . opcode ] += 1 continue if srclines : upto = upto or disline . lineno - 1 while upto <= disline . lineno - 1 : print ( ""%100s%s"" % ( """" , srclines [ upto ] ) ) upto += 1 elif disline . offset > 0 : print ( """" ) line = disgen . format_dis_line ( disline ) print ( ""%-70s"" % ( line , ) ) print ( """" )",if disline . first :,292 13087,"def send_request(self, request_method, *args, **kwargs): request_func = getattr(self.client, request_method) status_code = None if ""content_type"" not in kwargs and request_method != ""get"": kwargs[""content_type""] = ""application/json"" if ( ""data"" in kwargs and request_method != ""get"" and kwargs[""content_type""] == ""application/json"" ): data = kwargs.get(""data"", """") kwargs[""data""] = json.dumps(data) # , cls=CustomJSONEncoder if ""status_code"" in kwargs: status_code = kwargs.pop(""status_code"") # check_headers = kwargs.pop('check_headers', True) if hasattr(self, ""token""): if getattr(settings, ""REST_USE_JWT"", False): kwargs[""HTTP_AUTHORIZATION""] = ""JWT %s"" % self.token else: kwargs[""HTTP_AUTHORIZATION""] = ""Token %s"" % self.token self.response = request_func(*args, **kwargs) is_json = bool([x for x in self.response._headers[""content-type""] if ""json"" in x]) self.response.json = {} if is_json and self.response.content: self.response.json = json.loads(force_text(self.response.content)) if status_code: self.assertEqual(self.response.status_code, status_code) return self.response ","def send_request ( self , request_method , * args , ** kwargs ) : request_func = getattr ( self . client , request_method ) status_code = None if ""content_type"" not in kwargs and request_method != ""get"" : kwargs [ ""content_type"" ] = ""application/json"" if ( ""data"" in kwargs and request_method != ""get"" and kwargs [ ""content_type"" ] == ""application/json"" ) : data = kwargs . get ( ""data"" , """" ) kwargs [ ""data"" ] = json . dumps ( data ) if ""status_code"" in kwargs : status_code = kwargs . pop ( ""status_code"" ) if hasattr ( self , ""token"" ) : kwargs [ ""HTTP_AUTHORIZATION"" ] = ""JWT %s"" % self . token else : kwargs [ ""HTTP_AUTHORIZATION"" ] = ""Token %s"" % self . token self . response = request_func ( * args , ** kwargs ) is_json = bool ( [ x for x in self . response . _headers [ ""content-type"" ] if ""json"" in x ] ) self . response . json = { } if is_json and self . response . content : self . response . json = json . loads ( force_text ( self . response . content ) ) if status_code : self . assertEqual ( self . response . status_code , status_code ) return self . response","if getattr ( settings , ""REST_USE_JWT"" , False ) :",387 22710,"def _wait_for_bot_presense(self, online): for _ in range(10): time.sleep(2) if online and self._is_testbot_online(): break if not online and not self._is_testbot_online(): break else: raise AssertionError( ""test bot is still {}"".format(""offline"" if online else ""online"") )","def _wait_for_bot_presense ( self , online ) : for _ in range ( 10 ) : time . sleep ( 2 ) break if not online and not self . _is_testbot_online ( ) : break else : raise AssertionError ( ""test bot is still {}"" . format ( ""offline"" if online else ""online"" ) )",if online and self . _is_testbot_online ( ) :,111 15430,"def set_logging(self, showOnCmd=True, loggingFile=None, loggingLevel=logging.INFO): if showOnCmd != self.showOnCmd: if showOnCmd: self.logger.addHandler(self.cmdHandler) else: self.logger.removeHandler(self.cmdHandler) self.showOnCmd = showOnCmd if loggingFile != self.loggingFile: if self.loggingFile is not None: # clear old fileHandler self.logger.removeHandler(self.fileHandler) self.fileHandler.close() if loggingFile is not None: # add new fileHandler self.fileHandler = logging.FileHandler(loggingFile) self.logger.addHandler(self.fileHandler) self.loggingFile = loggingFile if loggingLevel != self.loggingLevel: self.logger.setLevel(loggingLevel) self.loggingLevel = loggingLevel ","def set_logging ( self , showOnCmd = True , loggingFile = None , loggingLevel = logging . INFO ) : if showOnCmd != self . showOnCmd : if showOnCmd : self . logger . addHandler ( self . cmdHandler ) else : self . logger . removeHandler ( self . cmdHandler ) self . showOnCmd = showOnCmd if loggingFile != self . loggingFile : self . logger . removeHandler ( self . fileHandler ) self . fileHandler . close ( ) if loggingFile is not None : self . fileHandler = logging . FileHandler ( loggingFile ) self . logger . addHandler ( self . fileHandler ) self . loggingFile = loggingFile if loggingLevel != self . loggingLevel : self . logger . setLevel ( loggingLevel ) self . loggingLevel = loggingLevel",if self . loggingFile is not None :,245 2416,"def render(self, mcanv, op, idx): value = self.imm hint = mcanv.syms.getSymHint(op.va, idx) if hint is not None: if mcanv.mem.isValidPointer(value): mcanv.addVaText(hint, value) else: mcanv.addNameText(hint) elif mcanv.mem.isValidPointer(value): name = addrToName(mcanv, value) mcanv.addVaText(name, value) else: if self.tsize == 6: mcanv.addNameText(""0x%.4x:0x%.8x"" % (value >> 32, value & 0xFFFFFFFF)) elif self.imm >= 4096: mcanv.addNameText(""0x%.8x"" % value) else: mcanv.addNameText(str(value))","def render ( self , mcanv , op , idx ) : value = self . imm hint = mcanv . syms . getSymHint ( op . va , idx ) if hint is not None : if mcanv . mem . isValidPointer ( value ) : mcanv . addVaText ( hint , value ) else : mcanv . addNameText ( hint ) elif mcanv . mem . isValidPointer ( value ) : name = addrToName ( mcanv , value ) mcanv . addVaText ( name , value ) else : mcanv . addNameText ( ""0x%.4x:0x%.8x"" % ( value >> 32 , value & 0xFFFFFFFF ) ) elif self . imm >= 4096 : mcanv . addNameText ( ""0x%.8x"" % value ) else : mcanv . addNameText ( str ( value ) )",if self . tsize == 6 :,236 18279,"def _guardAgainstUnicode(self, data): # Only accept byte strings or ascii unicode values, otherwise # there is no way to correctly decode the data into bytes. if _pythonMajorVersion < 3: if isinstance(data, unicode): data = data.encode(""utf8"") else: if isinstance(data, str): # Only accept ascii unicode values. try: return data.encode(""ascii"") except UnicodeEncodeError: pass raise ValueError(""pyDes can only work with encoded strings, not Unicode."") return data","def _guardAgainstUnicode ( self , data ) : if _pythonMajorVersion < 3 : data = data . encode ( ""utf8"" ) else : if isinstance ( data , str ) : try : return data . encode ( ""ascii"" ) except UnicodeEncodeError : pass raise ValueError ( ""pyDes can only work with encoded strings, not Unicode."" ) return data","if isinstance ( data , unicode ) :",154 4566,"def get(self, block=True, timeout=None): if block and timeout is None: self._rlock.acquire() try: res = self._recv() self._sem.release() return res finally: self._rlock.release() else: if block: deadline = time.time() + timeout if not self._rlock.acquire(block, timeout): raise Empty try: if not self._poll(block and (deadline - time.time()) or 0.0): raise Empty res = self._recv() self._sem.release() return res finally: self._rlock.release()","def get ( self , block = True , timeout = None ) : if block and timeout is None : self . _rlock . acquire ( ) try : res = self . _recv ( ) self . _sem . release ( ) return res finally : self . _rlock . release ( ) else : deadline = time . time ( ) + timeout if not self . _rlock . acquire ( block , timeout ) : raise Empty try : if not self . _poll ( block and ( deadline - time . time ( ) ) or 0.0 ) : raise Empty res = self . _recv ( ) self . _sem . release ( ) return res finally : self . _rlock . release ( )",if block :,204 22643,"def __init__(self, name, lines): self.name = name self.flds = [] self.parms = {} self.recfmt = None # Example line: # f02=Person_last_change ,32,10,10, 1,68,"""",""INDI CHAN DATE"" line_pat = re.compile(r""(\w+) = (.*)"", re.VERBOSE) for lne in lines: mtch = line_pat.match(lne) if mtch: # TODO. Catch duplicates? self.parms[mtch.group(1)] = mtch.group(2) self.fileext = self.parms.get(""fileext"", None) # If there is a n_fields entry then this is a table that # has details about the record format of another file (PER or REL). if ""n_fields"" in self.parms: self.get_fields() self.recfmt = self.get_recfmt() self.nam2fld = {} self.nam2idx = {} self.recflds = [] # list of fields that use up space in a record j = 0 for fld in enumerate(self.flds): # print(""# field %s"" % fld) nam = fld[1].name self.nam2fld[nam] = fld # fld.size == 0: Field will not be acknowleged! if fld[1].size != 0: self.nam2idx[nam] = j # print(""# %s <= %d"" % (fld.fieldname, j)) self.recflds.append(fld[1]) j += 1","def __init__ ( self , name , lines ) : self . name = name self . flds = [ ] self . parms = { } self . recfmt = None line_pat = re . compile ( r""(\w+) = (.*)"" , re . VERBOSE ) for lne in lines : mtch = line_pat . match ( lne ) self . parms [ mtch . group ( 1 ) ] = mtch . group ( 2 ) self . fileext = self . parms . get ( ""fileext"" , None ) if ""n_fields"" in self . parms : self . get_fields ( ) self . recfmt = self . get_recfmt ( ) self . nam2fld = { } self . nam2idx = { } self . recflds = [ ] j = 0 for fld in enumerate ( self . flds ) : nam = fld [ 1 ] . name self . nam2fld [ nam ] = fld if fld [ 1 ] . size != 0 : self . nam2idx [ nam ] = j self . recflds . append ( fld [ 1 ] ) j += 1",if mtch :,452 8281,"def __getitem__(self, key): arch = self._project.arch if key in arch.registers: # it's a register name reg_offset, size = arch.registers[key] # obtain the CFGNode cfg_node = self._cfg.model.get_any_node(self._insn_addr, anyaddr=True) if cfg_node is None: # not found raise KeyError( ""CFGNode for instruction %#x is not found."" % self._insn_addr ) # determine the statement ID vex_block = self._project.factory.block( cfg_node.addr, size=cfg_node.size, opt_level=self._cfg._iropt_level ).vex stmt_idx = None insn_addr = cfg_node.addr for i, stmt in enumerate(vex_block.statements): if isinstance(stmt, pyvex.IRStmt.IMark): insn_addr = stmt.addr + stmt.delta elif insn_addr == self._insn_addr: if isinstance(stmt, pyvex.IRStmt.Put) and stmt.offset == reg_offset: stmt_idx = i break elif insn_addr > self._insn_addr: break if stmt_idx is None: raise KeyError(""Cannot find the statement."") # create a program variable variable = SimRegisterVariable(reg_offset, size) location = CodeLocation(cfg_node.addr, stmt_idx, ins_addr=self._insn_addr) pv = ProgramVariable(variable, location, arch=self._project.arch) return DDGViewItem(self._ddg, pv, simplified=self._simplified) ","def __getitem__ ( self , key ) : arch = self . _project . arch if key in arch . registers : reg_offset , size = arch . registers [ key ] cfg_node = self . _cfg . model . get_any_node ( self . _insn_addr , anyaddr = True ) if cfg_node is None : raise KeyError ( ""CFGNode for instruction %#x is not found."" % self . _insn_addr ) vex_block = self . _project . factory . block ( cfg_node . addr , size = cfg_node . size , opt_level = self . _cfg . _iropt_level ) . vex stmt_idx = None insn_addr = cfg_node . addr for i , stmt in enumerate ( vex_block . statements ) : if isinstance ( stmt , pyvex . IRStmt . IMark ) : insn_addr = stmt . addr + stmt . delta elif insn_addr == self . _insn_addr : stmt_idx = i break elif insn_addr > self . _insn_addr : break if stmt_idx is None : raise KeyError ( ""Cannot find the statement."" ) variable = SimRegisterVariable ( reg_offset , size ) location = CodeLocation ( cfg_node . addr , stmt_idx , ins_addr = self . _insn_addr ) pv = ProgramVariable ( variable , location , arch = self . _project . arch ) return DDGViewItem ( self . _ddg , pv , simplified = self . _simplified )","if isinstance ( stmt , pyvex . IRStmt . Put ) and stmt . offset == reg_offset :",475 6934,"def _get_dependency_manager(dependency_manager, runtime): if not dependency_manager: valid_dep_managers = RUNTIME_TO_DEPENDENCY_MANAGERS.get(runtime) if valid_dep_managers is None: dependency_manager = None elif len(valid_dep_managers) == 1: dependency_manager = valid_dep_managers[0] else: choices = list(map(str, range(1, len(valid_dep_managers) + 1))) choice_num = 1 click.echo(""\nWhich dependency manager would you like to use?"") for dm in valid_dep_managers: msg = ""\t"" + str(choice_num) + "" - "" + dm click.echo(msg) choice_num = choice_num + 1 choice = click.prompt( ""Dependency manager"", type=click.Choice(choices), show_choices=False ) dependency_manager = valid_dep_managers[int(choice) - 1] # zero index return dependency_manager","def _get_dependency_manager ( dependency_manager , runtime ) : if not dependency_manager : valid_dep_managers = RUNTIME_TO_DEPENDENCY_MANAGERS . get ( runtime ) if valid_dep_managers is None : dependency_manager = None dependency_manager = valid_dep_managers [ 0 ] else : choices = list ( map ( str , range ( 1 , len ( valid_dep_managers ) + 1 ) ) ) choice_num = 1 click . echo ( ""\nWhich dependency manager would you like to use?"" ) for dm in valid_dep_managers : msg = ""\t"" + str ( choice_num ) + "" - "" + dm click . echo ( msg ) choice_num = choice_num + 1 choice = click . prompt ( ""Dependency manager"" , type = click . Choice ( choices ) , show_choices = False ) dependency_manager = valid_dep_managers [ int ( choice ) - 1 ] return dependency_manager",elif len ( valid_dep_managers ) == 1 :,289 14805,"def compare(self, first, second, scope_bracket=False): """"""Compare brackets. This function allows bracket plugins to add additional logic."""""" if scope_bracket: match = first is not None and second is not None else: match = first.type == second.type if not self.rules.check_compare: return match if match: if scope_bracket: bracket = self.rules.scopes[first.scope][""brackets""][first.type] else: bracket = self.rules.brackets[first.type] try: if bracket.compare is not None and match: match = bracket.compare( bracket.name, bh_plugin.BracketRegion(first.begin, first.end), bh_plugin.BracketRegion(second.begin, second.end), self.search.get_buffer(), ) except Exception: log(""Plugin Compare Error:\n%s"" % str(traceback.format_exc())) return match ","def compare ( self , first , second , scope_bracket = False ) : """"""Compare brackets. This function allows bracket plugins to add additional logic."""""" match = first is not None and second is not None else : match = first . type == second . type if not self . rules . check_compare : return match if match : bracket = self . rules . scopes [ first . scope ] [ ""brackets"" ] [ first . type ] else : bracket = self . rules . brackets [ first . type ] try : if bracket . compare is not None and match : match = bracket . compare ( bracket . name , bh_plugin . BracketRegion ( first . begin , first . end ) , bh_plugin . BracketRegion ( second . begin , second . end ) , self . search . get_buffer ( ) , ) except Exception : log ( ""Plugin Compare Error:\n%s"" % str ( traceback . format_exc ( ) ) ) return match",if scope_bracket :,280 14611,"def _to_record(self, data, zone): records = [] rrset_values = data[""rrset_values""] multiple_value_record = len(rrset_values) > 1 for index, rrset_value in enumerate(rrset_values): record = self._to_record_sub(data, zone, rrset_value) record.extra[""_multi_value""] = multiple_value_record if multiple_value_record: record.extra[""_other_records""] = [] records.append(record) if multiple_value_record: for index in range(0, len(records)): record = records[index] for other_index, other_record in enumerate(records): if index == other_index: continue extra = copy.deepcopy(other_record.extra) extra.pop(""_multi_value"") extra.pop(""_other_records"") item = { ""name"": other_record.name, ""data"": other_record.data, ""type"": other_record.type, ""extra"": extra, } record.extra[""_other_records""].append(item) return records","def _to_record ( self , data , zone ) : records = [ ] rrset_values = data [ ""rrset_values"" ] multiple_value_record = len ( rrset_values ) > 1 for index , rrset_value in enumerate ( rrset_values ) : record = self . _to_record_sub ( data , zone , rrset_value ) record . extra [ ""_multi_value"" ] = multiple_value_record if multiple_value_record : record . extra [ ""_other_records"" ] = [ ] records . append ( record ) if multiple_value_record : for index in range ( 0 , len ( records ) ) : record = records [ index ] for other_index , other_record in enumerate ( records ) : continue extra = copy . deepcopy ( other_record . extra ) extra . pop ( ""_multi_value"" ) extra . pop ( ""_other_records"" ) item = { ""name"" : other_record . name , ""data"" : other_record . data , ""type"" : other_record . type , ""extra"" : extra , } record . extra [ ""_other_records"" ] . append ( item ) return records",if index == other_index :,341 231,"def decompress(self, data): if not data: return data if not self._first_try: return self._obj.decompress(data) self._data += data try: decompressed = self._obj.decompress(data) if decompressed: self._first_try = False self._data = None return decompressed except zlib.error: self._first_try = False self._obj = zlib.decompressobj(-zlib.MAX_WBITS) try: return self.decompress(self._data) finally: self._data = None ","def decompress ( self , data ) : if not data : return data if not self . _first_try : return self . _obj . decompress ( data ) self . _data += data try : decompressed = self . _obj . decompress ( data ) self . _first_try = False self . _data = None return decompressed except zlib . error : self . _first_try = False self . _obj = zlib . decompressobj ( - zlib . MAX_WBITS ) try : return self . decompress ( self . _data ) finally : self . _data = None",if decompressed :,163 4904,"def CountButtons(self): """"""Returns the number of visible buttons in the docked pane."""""" n = 0 if self.HasCaption() or self.HasCaptionLeft(): if isinstance(wx.GetTopLevelParent(self.window), AuiFloatingFrame): return 1 if self.HasCloseButton(): n += 1 if self.HasMaximizeButton(): n += 1 if self.HasMinimizeButton(): n += 1 if self.HasPinButton(): n += 1 return n","def CountButtons ( self ) : """"""Returns the number of visible buttons in the docked pane."""""" n = 0 if self . HasCaption ( ) or self . HasCaptionLeft ( ) : if isinstance ( wx . GetTopLevelParent ( self . window ) , AuiFloatingFrame ) : return 1 if self . HasCloseButton ( ) : n += 1 if self . HasMaximizeButton ( ) : n += 1 n += 1 if self . HasPinButton ( ) : n += 1 return n",if self . HasMinimizeButton ( ) :,149 9090,"def layer_op(self, image): if image.ndim == 3: return self.__make_mask_3d(image) if image.ndim == 5: mod_to_mask = [m for m in range(image.shape[4]) if np.any(image[..., :, m])] mask = np.zeros_like(image, dtype=bool) mod_mask = None for mod in mod_to_mask: for t in range(image.shape[3]): mask[..., t, mod] = self.__make_mask_3d(image[..., t, mod]) # combine masks across the modalities dim if self.multimod_fusion == ""or"": if mod_mask is None: mod_mask = np.zeros(image.shape[:4], dtype=bool) mod_mask = np.logical_or(mod_mask, mask[..., mod]) elif self.multimod_fusion == ""and"": if mod_mask is None: mod_mask = np.ones(image.shape[:4], dtype=bool) mod_mask = np.logical_and(mod_mask, mask[..., mod]) for mod in mod_to_mask: mask[..., mod] = mod_mask return mask else: raise ValueError(""unknown input format"")","def layer_op ( self , image ) : if image . ndim == 3 : return self . __make_mask_3d ( image ) if image . ndim == 5 : mod_to_mask = [ m for m in range ( image . shape [ 4 ] ) if np . any ( image [ ... , : , m ] ) ] mask = np . zeros_like ( image , dtype = bool ) mod_mask = None for mod in mod_to_mask : for t in range ( image . shape [ 3 ] ) : mask [ ... , t , mod ] = self . __make_mask_3d ( image [ ... , t , mod ] ) if mod_mask is None : mod_mask = np . zeros ( image . shape [ : 4 ] , dtype = bool ) mod_mask = np . logical_or ( mod_mask , mask [ ... , mod ] ) elif self . multimod_fusion == ""and"" : if mod_mask is None : mod_mask = np . ones ( image . shape [ : 4 ] , dtype = bool ) mod_mask = np . logical_and ( mod_mask , mask [ ... , mod ] ) for mod in mod_to_mask : mask [ ... , mod ] = mod_mask return mask else : raise ValueError ( ""unknown input format"" )","if self . multimod_fusion == ""or"" :",359 18399,"def process_resource(self, resource, related): related_ids = self.get_related_ids([resource]) model = self.manager.get_model() op = self.data.get(""operator"", ""or"") found = [] if self.data.get(""match-resource"") is True: self.data[""value""] = self.get_resource_value(self.data[""key""], resource) if self.data.get(""value_type"") == ""resource_count"": count_matches = OPERATORS[self.data.get(""op"")]( len(related_ids), self.data.get(""value"") ) if count_matches: self._add_annotations(related_ids, resource) return count_matches for rid in related_ids: robj = related.get(rid, None) if robj is None: self.log.warning( ""Resource %s:%s references non existant %s: %s"", self.manager.type, resource[model.id], self.RelatedResource.rsplit(""."", 1)[-1], rid, ) continue if self.match(robj): found.append(rid) if found: self._add_annotations(found, resource) if op == ""or"" and found: return True elif op == ""and"" and len(found) == len(related_ids): return True return False","def process_resource ( self , resource , related ) : related_ids = self . get_related_ids ( [ resource ] ) model = self . manager . get_model ( ) op = self . data . get ( ""operator"" , ""or"" ) found = [ ] if self . data . get ( ""match-resource"" ) is True : self . data [ ""value"" ] = self . get_resource_value ( self . data [ ""key"" ] , resource ) if self . data . get ( ""value_type"" ) == ""resource_count"" : count_matches = OPERATORS [ self . data . get ( ""op"" ) ] ( len ( related_ids ) , self . data . get ( ""value"" ) ) if count_matches : self . _add_annotations ( related_ids , resource ) return count_matches for rid in related_ids : robj = related . get ( rid , None ) if robj is None : self . log . warning ( ""Resource %s:%s references non existant %s: %s"" , self . manager . type , resource [ model . id ] , self . RelatedResource . rsplit ( ""."" , 1 ) [ - 1 ] , rid , ) continue found . append ( rid ) if found : self . _add_annotations ( found , resource ) if op == ""or"" and found : return True elif op == ""and"" and len ( found ) == len ( related_ids ) : return True return False",if self . match ( robj ) :,397 20170,"def write_custom_dns_config(config, env): # We get a list of (qname, rtype, value) triples. Convert this into a # nice dictionary format for storage on disk. from collections import OrderedDict config = list(config) dns = OrderedDict() seen_qnames = set() # Process the qnames in the order we see them. for qname in [rec[0] for rec in config]: if qname in seen_qnames: continue seen_qnames.add(qname) records = [(rec[1], rec[2]) for rec in config if rec[0] == qname] if len(records) == 1 and records[0][0] == ""A"": dns[qname] = records[0][1] else: dns[qname] = OrderedDict() seen_rtypes = set() # Process the rtypes in the order we see them. for rtype in [rec[0] for rec in records]: if rtype in seen_rtypes: continue seen_rtypes.add(rtype) values = [rec[1] for rec in records if rec[0] == rtype] if len(values) == 1: values = values[0] dns[qname][rtype] = values # Write. config_yaml = rtyaml.dump(dns) with open(os.path.join(env[""STORAGE_ROOT""], ""dns/custom.yaml""), ""w"") as f: f.write(config_yaml)","def write_custom_dns_config ( config , env ) : from collections import OrderedDict config = list ( config ) dns = OrderedDict ( ) seen_qnames = set ( ) for qname in [ rec [ 0 ] for rec in config ] : if qname in seen_qnames : continue seen_qnames . add ( qname ) records = [ ( rec [ 1 ] , rec [ 2 ] ) for rec in config if rec [ 0 ] == qname ] dns [ qname ] = records [ 0 ] [ 1 ] else : dns [ qname ] = OrderedDict ( ) seen_rtypes = set ( ) for rtype in [ rec [ 0 ] for rec in records ] : if rtype in seen_rtypes : continue seen_rtypes . add ( rtype ) values = [ rec [ 1 ] for rec in records if rec [ 0 ] == rtype ] if len ( values ) == 1 : values = values [ 0 ] dns [ qname ] [ rtype ] = values config_yaml = rtyaml . dump ( dns ) with open ( os . path . join ( env [ ""STORAGE_ROOT"" ] , ""dns/custom.yaml"" ) , ""w"" ) as f : f . write ( config_yaml )","if len ( records ) == 1 and records [ 0 ] [ 0 ] == ""A"" :",401 9592,"def translate(self, line): parsed = self.RE_LINE_PARSER.match(line) if parsed: value = parsed.group(3) stage = parsed.group(1) if stage == ""send"": # query string is rendered here return ""\n# HTTP Request:\n"" + self.stripslashes(value) elif stage == ""reply"": return ""\n\n# HTTP Response:\n"" + self.stripslashes(value) elif stage == ""header"": return value + ""\n"" else: return value return line","def translate ( self , line ) : parsed = self . RE_LINE_PARSER . match ( line ) if parsed : value = parsed . group ( 3 ) stage = parsed . group ( 1 ) return ""\n# HTTP Request:\n"" + self . stripslashes ( value ) elif stage == ""reply"" : return ""\n\n# HTTP Response:\n"" + self . stripslashes ( value ) elif stage == ""header"" : return value + ""\n"" else : return value return line","if stage == ""send"" :",156 18646,"def _encode_regex(name, value, dummy0, dummy1): """"""Encode a python regex or bson.regex.Regex."""""" flags = value.flags # Python 2 common case if flags == 0: return b""\x0B"" + name + _make_c_string_check(value.pattern) + b""\x00"" # Python 3 common case elif flags == re.UNICODE: return b""\x0B"" + name + _make_c_string_check(value.pattern) + b""u\x00"" else: sflags = b"""" if flags & re.IGNORECASE: sflags += b""i"" if flags & re.LOCALE: sflags += b""l"" if flags & re.MULTILINE: sflags += b""m"" if flags & re.DOTALL: sflags += b""s"" if flags & re.UNICODE: sflags += b""u"" if flags & re.VERBOSE: sflags += b""x"" sflags += b""\x00"" return b""\x0B"" + name + _make_c_string_check(value.pattern) + sflags","def _encode_regex ( name , value , dummy0 , dummy1 ) : """"""Encode a python regex or bson.regex.Regex."""""" flags = value . flags if flags == 0 : return b""\x0B"" + name + _make_c_string_check ( value . pattern ) + b""\x00"" elif flags == re . UNICODE : return b""\x0B"" + name + _make_c_string_check ( value . pattern ) + b""u\x00"" else : sflags = b"""" if flags & re . IGNORECASE : sflags += b""i"" if flags & re . LOCALE : sflags += b""l"" sflags += b""m"" if flags & re . DOTALL : sflags += b""s"" if flags & re . UNICODE : sflags += b""u"" if flags & re . VERBOSE : sflags += b""x"" sflags += b""\x00"" return b""\x0B"" + name + _make_c_string_check ( value . pattern ) + sflags",if flags & re . MULTILINE :,313 8134,"def find_field_type_differ(self, meta, table_description, table_name, func=None): db_fields = dict([(row[0], row) for row in table_description]) for field in all_local_fields(meta): if field.name not in db_fields: continue description = db_fields[field.name] model_type = self.get_field_model_type(field) db_type = self.get_field_db_type(description, field, table_name) # use callback function if defined if func: model_type, db_type = func(field, description, model_type, db_type) if not self.strip_parameters(db_type) == self.strip_parameters(model_type): self.add_difference( ""field-type-differ"", table_name, field.name, model_type, db_type )","def find_field_type_differ ( self , meta , table_description , table_name , func = None ) : db_fields = dict ( [ ( row [ 0 ] , row ) for row in table_description ] ) for field in all_local_fields ( meta ) : if field . name not in db_fields : continue description = db_fields [ field . name ] model_type = self . get_field_model_type ( field ) db_type = self . get_field_db_type ( description , field , table_name ) model_type , db_type = func ( field , description , model_type , db_type ) if not self . strip_parameters ( db_type ) == self . strip_parameters ( model_type ) : self . add_difference ( ""field-type-differ"" , table_name , field . name , model_type , db_type )",if func :,244 13371,"def _activate_plugins_of_category(self, category): """"""Activate all the plugins of a given category and return them."""""" # this code duplicated in nikola/nikola.py plugins = [] for plugin_info in self.plugin_manager.getPluginsOfCategory(category): if plugin_info.name in self.config.get(""DISABLED_PLUGINS""): self.plugin_manager.removePluginFromCategory(plugin_info, category) else: self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) plugins.append(plugin_info) return plugins ","def _activate_plugins_of_category ( self , category ) : """"""Activate all the plugins of a given category and return them."""""" plugins = [ ] for plugin_info in self . plugin_manager . getPluginsOfCategory ( category ) : self . plugin_manager . removePluginFromCategory ( plugin_info , category ) else : self . plugin_manager . activatePluginByName ( plugin_info . name ) plugin_info . plugin_object . set_site ( self ) plugins . append ( plugin_info ) return plugins","if plugin_info . name in self . config . get ( ""DISABLED_PLUGINS"" ) :",175 24145,"def makeStaircaseCtrls(self): """"""Setup the controls for a StairHandler"""""" panel = wx.Panel(parent=self) panelSizer = wx.GridBagSizer(5, 5) panel.SetSizer(panelSizer) row = 0 handler = self.stairHandler # loop through the params for fieldName in handler.params: # try and get alternative ""label"" for the parameter try: label = handler.params[fieldName].label if not label: # it might exist but be empty label = fieldName except Exception: label = fieldName # handle special cases if fieldName == ""endPoints"": continue # this was deprecated in v1.62.00 if fieldName in self.globalCtrls: # these have already been made and inserted into sizer ctrls = self.globalCtrls[fieldName] else: # normal text entry field ctrls = ParamCtrls( dlg=self, parent=panel, label=label, fieldName=fieldName, param=handler.params[fieldName], ) panelSizer.Add(ctrls.nameCtrl, [row, 0]) if hasattr(ctrls.valueCtrl, ""_szr""): panelSizer.Add(ctrls.valueCtrl._szr, [row, 1]) else: panelSizer.Add(ctrls.valueCtrl, [row, 1]) row += 1 # store info about the field self.staircaseCtrls[fieldName] = ctrls return panel","def makeStaircaseCtrls ( self ) : """"""Setup the controls for a StairHandler"""""" panel = wx . Panel ( parent = self ) panelSizer = wx . GridBagSizer ( 5 , 5 ) panel . SetSizer ( panelSizer ) row = 0 handler = self . stairHandler for fieldName in handler . params : try : label = handler . params [ fieldName ] . label if not label : label = fieldName except Exception : label = fieldName continue if fieldName in self . globalCtrls : ctrls = self . globalCtrls [ fieldName ] else : ctrls = ParamCtrls ( dlg = self , parent = panel , label = label , fieldName = fieldName , param = handler . params [ fieldName ] , ) panelSizer . Add ( ctrls . nameCtrl , [ row , 0 ] ) if hasattr ( ctrls . valueCtrl , ""_szr"" ) : panelSizer . Add ( ctrls . valueCtrl . _szr , [ row , 1 ] ) else : panelSizer . Add ( ctrls . valueCtrl , [ row , 1 ] ) row += 1 self . staircaseCtrls [ fieldName ] = ctrls return panel","if fieldName == ""endPoints"" :",444 9208,"def get_rules(self, map): for rulefactory in self.rules: for rule in rulefactory.get_rules(map): new_defaults = subdomain = None if rule.defaults: new_defaults = {} for key, value in iteritems(rule.defaults): if isinstance(value, string_types): value = format_string(value, self.context) new_defaults[key] = value if rule.subdomain is not None: subdomain = format_string(rule.subdomain, self.context) new_endpoint = rule.endpoint if isinstance(new_endpoint, string_types): new_endpoint = format_string(new_endpoint, self.context) yield Rule( format_string(rule.rule, self.context), new_defaults, subdomain, rule.methods, rule.build_only, new_endpoint, rule.strict_slashes, ) ","def get_rules ( self , map ) : for rulefactory in self . rules : for rule in rulefactory . get_rules ( map ) : new_defaults = subdomain = None new_defaults = { } for key , value in iteritems ( rule . defaults ) : if isinstance ( value , string_types ) : value = format_string ( value , self . context ) new_defaults [ key ] = value if rule . subdomain is not None : subdomain = format_string ( rule . subdomain , self . context ) new_endpoint = rule . endpoint if isinstance ( new_endpoint , string_types ) : new_endpoint = format_string ( new_endpoint , self . context ) yield Rule ( format_string ( rule . rule , self . context ) , new_defaults , subdomain , rule . methods , rule . build_only , new_endpoint , rule . strict_slashes , )",if rule . defaults :,296 3081,"def cmd_exec_stdout(self, command, errormsg="""", log=True): """"""Run shell command from Python"""""" try: log and Log.debug(self, ""Running command: {0}"".format(command)) with subprocess.Popen( [command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True ) as proc: (cmd_stdout_bytes, cmd_stderr_bytes) = proc.communicate() (cmd_stdout, cmd_stderr) = ( cmd_stdout_bytes.decode(""utf-8"", ""replace""), cmd_stderr_bytes.decode(""utf-8"", ""replace""), ) if proc.returncode == 0: Log.debug( self, ""Command Output: {0}, \nCommand Error: {1}"".format( cmd_stdout, cmd_stderr ), ) return cmd_stdout else: Log.debug( self, ""Command Output: {0}, \nCommand Error: {1}"".format( cmd_stdout, cmd_stderr ), ) return cmd_stdout except OSError as e: Log.debug(self, str(e)) raise CommandExecutionError except Exception as e: Log.debug(self, str(e)) raise CommandExecutionError","def cmd_exec_stdout ( self , command , errormsg = """" , log = True ) : """"""Run shell command from Python"""""" try : log and Log . debug ( self , ""Running command: {0}"" . format ( command ) ) with subprocess . Popen ( [ command ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) as proc : ( cmd_stdout_bytes , cmd_stderr_bytes ) = proc . communicate ( ) ( cmd_stdout , cmd_stderr ) = ( cmd_stdout_bytes . decode ( ""utf-8"" , ""replace"" ) , cmd_stderr_bytes . decode ( ""utf-8"" , ""replace"" ) , ) Log . debug ( self , ""Command Output: {0}, \nCommand Error: {1}"" . format ( cmd_stdout , cmd_stderr ) , ) return cmd_stdout else : Log . debug ( self , ""Command Output: {0}, \nCommand Error: {1}"" . format ( cmd_stdout , cmd_stderr ) , ) return cmd_stdout except OSError as e : Log . debug ( self , str ( e ) ) raise CommandExecutionError except Exception as e : Log . debug ( self , str ( e ) ) raise CommandExecutionError",if proc . returncode == 0 :,379 11591,"def getUnread(self): unreadMessages = 0 unreadSubscriptions = 0 queryreturn = sqlQuery( """"""SELECT msgid, toaddress, read FROM inbox where folder='inbox' """""" ) for row in queryreturn: msgid, toAddress, read = row try: if toAddress == str_broadcast_subscribers: toLabel = str_broadcast_subscribers else: toLabel = shared.config.get(toAddress, ""label"") except: toLabel = """" if toLabel == """": toLabel = toAddress if not read: if toLabel == str_broadcast_subscribers: # increment the unread subscriptions unreadSubscriptions = unreadSubscriptions + 1 else: # increment the unread messages unreadMessages = unreadMessages + 1 return unreadMessages, unreadSubscriptions","def getUnread ( self ) : unreadMessages = 0 unreadSubscriptions = 0 queryreturn = sqlQuery ( """"""SELECT msgid, toaddress, read FROM inbox where folder='inbox' """""" ) for row in queryreturn : msgid , toAddress , read = row try : if toAddress == str_broadcast_subscribers : toLabel = str_broadcast_subscribers else : toLabel = shared . config . get ( toAddress , ""label"" ) except : toLabel = """" if toLabel == """" : toLabel = toAddress if not read : unreadSubscriptions = unreadSubscriptions + 1 else : unreadMessages = unreadMessages + 1 return unreadMessages , unreadSubscriptions",if toLabel == str_broadcast_subscribers :,245 24462,"def populate_disk_bus_combo(self, devtype, no_default): buslist = self.widget(""disk-bus-combo"") busmodel = buslist.get_model() busmodel.clear() buses = [] if devtype == virtinst.VirtualDisk.DEVICE_FLOPPY: buses.append([""fdc"", ""Floppy""]) elif devtype == virtinst.VirtualDisk.DEVICE_CDROM: buses.append([""ide"", ""IDE""]) if self.vm.rhel6_defaults(): buses.append([""scsi"", ""SCSI""]) else: if self.vm.is_hvm(): buses.append([""ide"", ""IDE""]) if self.vm.rhel6_defaults(): buses.append([""scsi"", ""SCSI""]) buses.append([""usb"", ""USB""]) if self.vm.get_hv_type() in [""kvm"", ""test""]: buses.append([""sata"", ""SATA""]) buses.append([""virtio"", ""Virtio""]) if self.vm.conn.is_xen() or self.vm.get_hv_type() == ""test"": buses.append([""xen"", ""Xen""]) for row in buses: busmodel.append(row) if not no_default: busmodel.append([None, ""default""])","def populate_disk_bus_combo ( self , devtype , no_default ) : buslist = self . widget ( ""disk-bus-combo"" ) busmodel = buslist . get_model ( ) busmodel . clear ( ) buses = [ ] if devtype == virtinst . VirtualDisk . DEVICE_FLOPPY : buses . append ( [ ""fdc"" , ""Floppy"" ] ) elif devtype == virtinst . VirtualDisk . DEVICE_CDROM : buses . append ( [ ""ide"" , ""IDE"" ] ) if self . vm . rhel6_defaults ( ) : buses . append ( [ ""scsi"" , ""SCSI"" ] ) else : if self . vm . is_hvm ( ) : buses . append ( [ ""ide"" , ""IDE"" ] ) if self . vm . rhel6_defaults ( ) : buses . append ( [ ""scsi"" , ""SCSI"" ] ) buses . append ( [ ""usb"" , ""USB"" ] ) buses . append ( [ ""sata"" , ""SATA"" ] ) buses . append ( [ ""virtio"" , ""Virtio"" ] ) if self . vm . conn . is_xen ( ) or self . vm . get_hv_type ( ) == ""test"" : buses . append ( [ ""xen"" , ""Xen"" ] ) for row in buses : busmodel . append ( row ) if not no_default : busmodel . append ( [ None , ""default"" ] )","if self . vm . get_hv_type ( ) in [ ""kvm"" , ""test"" ] :",378 7024,"def _find_w9xpopen(self): """"""Find and return absolute path to w9xpopen.exe"""""" # pylint:disable=undefined-variable w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), ""w9xpopen.exe"") if not os.path.exists(w9xpopen): # Eeek - file-not-found - possibly an embedding # situation - see if we can locate it in sys.exec_prefix w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), ""w9xpopen.exe"") if not os.path.exists(w9xpopen): raise RuntimeError( ""Cannot locate w9xpopen.exe, which is "" ""needed for Popen to work with your "" ""shell or platform."" ) return w9xpopen","def _find_w9xpopen ( self ) : """"""Find and return absolute path to w9xpopen.exe"""""" w9xpopen = os . path . join ( os . path . dirname ( GetModuleFileName ( 0 ) ) , ""w9xpopen.exe"" ) w9xpopen = os . path . join ( os . path . dirname ( sys . exec_prefix ) , ""w9xpopen.exe"" ) raise RuntimeError ( ""Cannot locate w9xpopen.exe, which is "" ""needed for Popen to work with your "" ""shell or platform."" ) return w9xpopen",if not os . path . exists ( w9xpopen ) :,232 17050,"def get_first_param_index(self, group_id, param_group, partition_id): for index, param in enumerate(param_group): param_id = self.get_param_id(param) if partition_id in self.param_to_partition_ids[group_id][param_id]: return index return None ","def get_first_param_index ( self , group_id , param_group , partition_id ) : for index , param in enumerate ( param_group ) : param_id = self . get_param_id ( param ) return index return None",if partition_id in self . param_to_partition_ids [ group_id ] [ param_id ] :,90 14545,"def parse_bash_set_output(output): """"""Parse Bash-like 'set' output"""""" if not sys.platform.startswith(""win""): # Replace ""\""-continued lines in *Linux* environment dumps. # Cannot do this on Windows because a ""\"" at the end of the # line does not imply a continuation. output = output.replace(""\\\n"", """") environ = {} for line in output.splitlines(0): line = line.rstrip() if not line: continue # skip black lines item = _ParseBashEnvStr(line) if item: environ[item[0]] = item[1] return environ","def parse_bash_set_output ( output ) : """"""Parse Bash-like 'set' output"""""" if not sys . platform . startswith ( ""win"" ) : output = output . replace ( ""\\\n"" , """" ) environ = { } for line in output . splitlines ( 0 ) : line = line . rstrip ( ) continue item = _ParseBashEnvStr ( line ) if item : environ [ item [ 0 ] ] = item [ 1 ] return environ",if not line :,177 12785,"def _convert_to_seconds(value): """"""Converts TTL strings into seconds"""""" try: return int(value) except ValueError: # parse the BIND format # (w)eek, (d)ay, (h)our, (m)inute, (s)econd seconds = 0 ttl_string = value.lower() for component in [""w"", ""d"", ""h"", ""m"", ""s""]: regex = date_regex_dict[component][""regex""] match = regex.search(ttl_string) if match: match_string = match.group(0) ttl_string = ttl_string.replace(match_string, """") match_value = int(match_string.strip(component)) seconds += match_value * date_regex_dict[component][""scale""] if not ttl_string: return seconds # convert the last piece without any units, which must be seconds try: seconds += int(ttl_string) return seconds except ValueError: raise InvalidArgumentValueError( ""Unable to convert value '{}' to seconds."".format(value) )","def _convert_to_seconds ( value ) : """"""Converts TTL strings into seconds"""""" try : return int ( value ) except ValueError : seconds = 0 ttl_string = value . lower ( ) for component in [ ""w"" , ""d"" , ""h"" , ""m"" , ""s"" ] : regex = date_regex_dict [ component ] [ ""regex"" ] match = regex . search ( ttl_string ) match_string = match . group ( 0 ) ttl_string = ttl_string . replace ( match_string , """" ) match_value = int ( match_string . strip ( component ) ) seconds += match_value * date_regex_dict [ component ] [ ""scale"" ] if not ttl_string : return seconds try : seconds += int ( ttl_string ) return seconds except ValueError : raise InvalidArgumentValueError ( ""Unable to convert value '{}' to seconds."" . format ( value ) )",if match :,325 8839,"def test_sin_values(): firstval = None for i, v in zip(range(1000), sin_values()): assert -1 <= v <= 1 assert isclose(v, sin(radians(i)), abs_tol=1e-9) if i == 0: firstval = v else: if i % 360 == 0: assert v == firstval for period in (360, 100): firstval = None for i, v in zip(range(1000), sin_values(period)): assert -1 <= v <= 1 if i == 0: firstval = v else: if i % period == 0: assert v == firstval","def test_sin_values ( ) : firstval = None for i , v in zip ( range ( 1000 ) , sin_values ( ) ) : assert - 1 <= v <= 1 assert isclose ( v , sin ( radians ( i ) ) , abs_tol = 1e-9 ) if i == 0 : firstval = v else : assert v == firstval for period in ( 360 , 100 ) : firstval = None for i , v in zip ( range ( 1000 ) , sin_values ( period ) ) : assert - 1 <= v <= 1 if i == 0 : firstval = v else : if i % period == 0 : assert v == firstval",if i % 360 == 0 :,202 13126,"def wait_complete(self): """"""Wait for futures complete done."""""" for future in concurrent.futures.as_completed(self._futures.keys()): try: error = future.exception() except concurrent.futures.CancelledError: break name = self._futures[future] if error is not None: err_msg = 'Extracting ""{0}"", got: {1}'.format(name, error) logger.error(err_msg)","def wait_complete ( self ) : """"""Wait for futures complete done."""""" for future in concurrent . futures . as_completed ( self . _futures . keys ( ) ) : try : error = future . exception ( ) except concurrent . futures . CancelledError : break name = self . _futures [ future ] err_msg = 'Extracting ""{0}"", got: {1}' . format ( name , error ) logger . error ( err_msg )",if error is not None :,124 13795,"def _wrapper(self, *args, **kwargs): if self.rebuild is False: self.rebuild = True if self.max_size is not None and len(self.word_count) >= self.max_size: logger.info( ""[Warning] Vocabulary has reached the max size {} when calling {} method. "" ""Adding more words may cause unexpected behaviour of Vocabulary. "".format( self.max_size, func.__name__ ) ) return func(self, *args, **kwargs) ","def _wrapper ( self , * args , ** kwargs ) : if self . rebuild is False : self . rebuild = True logger . info ( ""[Warning] Vocabulary has reached the max size {} when calling {} method. "" ""Adding more words may cause unexpected behaviour of Vocabulary. "" . format ( self . max_size , func . __name__ ) ) return func ( self , * args , ** kwargs )",if self . max_size is not None and len ( self . word_count ) >= self . max_size :,146 1703,"def formatted_addon(self, obj): if obj.version: return format_html( '{}' ""
"" """" "" "" "" "" ""
Version:{}
Channel:{}
"", # We use the add-on's ID to support deleted add-ons. urljoin( settings.EXTERNAL_SITE_URL, reverse( ""reviewers.review"", args=[ ( ""listed"" if obj.version.channel == amo.RELEASE_CHANNEL_LISTED else ""unlisted"" ), obj.version.addon.id, ], ), ), obj.version.addon.name, obj.version.version, obj.version.get_channel_display(), ) return ""-"" ","def formatted_addon ( self , obj ) : if obj . version : return format_html ( '{}' ""
"" """" "" "" "" "" ""
Version:{}
Channel:{}
"" , urljoin ( settings . EXTERNAL_SITE_URL , reverse ( ""reviewers.review"" , args = [ ( ""listed"" else ""unlisted"" ) , obj . version . addon . id , ] , ) , ) , obj . version . addon . name , obj . version . version , obj . version . get_channel_display ( ) , ) return ""-""",if obj . version . channel == amo . RELEASE_CHANNEL_LISTED,305 21114,"def home(request): from django.conf import settings print(settings.SOME_VALUE) subject = None message = None size = 0 print(request.META) if request.POST: form = MsgForm(request.POST, request.FILES) print(request.FILES) if form.is_valid(): subject = form.cleaned_data[""subject""] message = form.cleaned_data[""message""] f = request.FILES[""f""] if not hasattr(f, ""fileno""): size = len(f.read()) else: try: size = int(os.fstat(f.fileno())[6]) except io.UnsupportedOperation: size = len(f.read()) else: form = MsgForm() return render( request, ""home.html"", {""form"": form, ""subject"": subject, ""message"": message, ""size"": size}, )","def home ( request ) : from django . conf import settings print ( settings . SOME_VALUE ) subject = None message = None size = 0 print ( request . META ) if request . POST : form = MsgForm ( request . POST , request . FILES ) print ( request . FILES ) subject = form . cleaned_data [ ""subject"" ] message = form . cleaned_data [ ""message"" ] f = request . FILES [ ""f"" ] if not hasattr ( f , ""fileno"" ) : size = len ( f . read ( ) ) else : try : size = int ( os . fstat ( f . fileno ( ) ) [ 6 ] ) except io . UnsupportedOperation : size = len ( f . read ( ) ) else : form = MsgForm ( ) return render ( request , ""home.html"" , { ""form"" : form , ""subject"" : subject , ""message"" : message , ""size"" : size } , )",if form . is_valid ( ) :,270 3433,"def backup_txs(self, txs, is_unspendable): undo_info = self.db.read_undo_info(self.height) if undo_info is None: raise ChainError( ""no undo information found for height {:,d}"".format(self.height) ) # Use local vars for speed in the loops put_utxo = self.utxo_cache.__setitem__ spend_utxo = self.spend_utxo add_touched = self.touched.add undo_entry_len = 13 + HASHX_LEN # Restore coins that had been spent # (may include coins made then spent in this block) n = 0 for tx, tx_hash in txs: for txin in tx.inputs: if txin.is_generation(): continue undo_item = undo_info[n : n + undo_entry_len] put_utxo(txin.prev_hash + pack_le_uint32(txin.prev_idx), undo_item) add_touched(undo_item[:-13]) n += undo_entry_len assert n == len(undo_info) # Remove tx outputs made in this block, by spending them. for tx, tx_hash in txs: for idx, txout in enumerate(tx.outputs): # Spend the TX outputs. Be careful with unspendable # outputs - we didn't save those in the first place. if is_unspendable(txout.pk_script): continue cache_value = spend_utxo(tx_hash, idx) add_touched(cache_value[:-13]) self.tx_count -= len(txs)","def backup_txs ( self , txs , is_unspendable ) : undo_info = self . db . read_undo_info ( self . height ) if undo_info is None : raise ChainError ( ""no undo information found for height {:,d}"" . format ( self . height ) ) put_utxo = self . utxo_cache . __setitem__ spend_utxo = self . spend_utxo add_touched = self . touched . add undo_entry_len = 13 + HASHX_LEN n = 0 for tx , tx_hash in txs : for txin in tx . inputs : continue undo_item = undo_info [ n : n + undo_entry_len ] put_utxo ( txin . prev_hash + pack_le_uint32 ( txin . prev_idx ) , undo_item ) add_touched ( undo_item [ : - 13 ] ) n += undo_entry_len assert n == len ( undo_info ) for tx , tx_hash in txs : for idx , txout in enumerate ( tx . outputs ) : if is_unspendable ( txout . pk_script ) : continue cache_value = spend_utxo ( tx_hash , idx ) add_touched ( cache_value [ : - 13 ] ) self . tx_count -= len ( txs )",if txin . is_generation ( ) :,470 20662,"def __setitem__(self, key, value): key = self.__fixkey__(key) checker = self.get_rule(key)[self.RULE_CHECKER] if not checker is True: if checker is False: if isinstance(value, dict) and isinstance(self[key], dict): for k, v in value.iteritems(): self[key][k] = v return raise ConfigValueError( _(""Modifying %s/%s is not "" ""allowed"") % (self._name, key) ) elif isinstance(checker, (list, set, tuple)): if value not in checker: raise ConfigValueError( _(""Invalid value for %s/%s: %s"") % (self._name, key, value) ) elif isinstance(checker, (type, type(RuledContainer))): try: if value is None: value = checker() else: value = checker(value) except (ConfigValueError): raise except (validators.IgnoreValue): return except (ValueError, TypeError): raise ValueError( _(""Invalid value for %s/%s: %s"") % (self._name, key, value) ) else: raise Exception( _(""Unknown constraint for %s/%s: %s"") % (self._name, key, checker) ) write_watcher = self.real_getattr(""_write_watcher"") if write_watcher is not None: write_watcher(self, key, value) self.__passkey__(key, value) self.__createkey_and_setitem__(key, value) self.__passkey_recurse__(key, value) ","def __setitem__ ( self , key , value ) : key = self . __fixkey__ ( key ) checker = self . get_rule ( key ) [ self . RULE_CHECKER ] if not checker is True : if checker is False : if isinstance ( value , dict ) and isinstance ( self [ key ] , dict ) : for k , v in value . iteritems ( ) : self [ key ] [ k ] = v return raise ConfigValueError ( _ ( ""Modifying %s/%s is not "" ""allowed"" ) % ( self . _name , key ) ) elif isinstance ( checker , ( list , set , tuple ) ) : raise ConfigValueError ( _ ( ""Invalid value for %s/%s: %s"" ) % ( self . _name , key , value ) ) elif isinstance ( checker , ( type , type ( RuledContainer ) ) ) : try : if value is None : value = checker ( ) else : value = checker ( value ) except ( ConfigValueError ) : raise except ( validators . IgnoreValue ) : return except ( ValueError , TypeError ) : raise ValueError ( _ ( ""Invalid value for %s/%s: %s"" ) % ( self . _name , key , value ) ) else : raise Exception ( _ ( ""Unknown constraint for %s/%s: %s"" ) % ( self . _name , key , checker ) ) write_watcher = self . real_getattr ( ""_write_watcher"" ) if write_watcher is not None : write_watcher ( self , key , value ) self . __passkey__ ( key , value ) self . __createkey_and_setitem__ ( key , value ) self . __passkey_recurse__ ( key , value )",if value not in checker :,483 16107,"def _merge_dict(stack, obj): strategy = obj.pop(""__"", ""merge-last"") if strategy not in strategies: raise Exception( 'Unknown strategy ""{0}"", should be one of {1}'.format(strategy, strategies) ) if strategy == ""overwrite"": return _cleanup(obj) else: for k, v in six.iteritems(obj): if strategy == ""remove"": stack.pop(k, None) continue if k in stack: if strategy == ""merge-first"": # merge-first is same as merge-last but the other way round # so let's switch stack[k] and v stack_k = stack[k] stack[k] = _cleanup(v) v = stack_k if type(stack[k]) != type(v): log.debug( ""Force overwrite, types differ: '%s' != '%s'"", stack[k], v ) stack[k] = _cleanup(v) elif isinstance(v, dict): stack[k] = _merge_dict(stack[k], v) elif isinstance(v, list): stack[k] = _merge_list(stack[k], v) else: stack[k] = v else: stack[k] = _cleanup(v) return stack","def _merge_dict ( stack , obj ) : strategy = obj . pop ( ""__"" , ""merge-last"" ) if strategy not in strategies : raise Exception ( 'Unknown strategy ""{0}"", should be one of {1}' . format ( strategy , strategies ) ) if strategy == ""overwrite"" : return _cleanup ( obj ) else : for k , v in six . iteritems ( obj ) : if strategy == ""remove"" : stack . pop ( k , None ) continue if k in stack : if strategy == ""merge-first"" : stack_k = stack [ k ] stack [ k ] = _cleanup ( v ) v = stack_k if type ( stack [ k ] ) != type ( v ) : log . debug ( ""Force overwrite, types differ: '%s' != '%s'"" , stack [ k ] , v ) stack [ k ] = _cleanup ( v ) stack [ k ] = _merge_dict ( stack [ k ] , v ) elif isinstance ( v , list ) : stack [ k ] = _merge_list ( stack [ k ] , v ) else : stack [ k ] = v else : stack [ k ] = _cleanup ( v ) return stack","elif isinstance ( v , dict ) :",420 13327,"def icyparser(self, url: str) -> Optional[str]: try: async with self.session.get(url, headers={""Icy-MetaData"": ""1""}) as resp: metaint = int(resp.headers[""icy-metaint""]) for _ in range(5): await resp.content.readexactly(metaint) metadata_length = ( struct.unpack(""B"", await resp.content.readexactly(1))[0] * 16 ) metadata = await resp.content.readexactly(metadata_length) m = re.search(STREAM_TITLE, metadata.rstrip(b""\0"")) if m: title = m.group(1) if title: title = title.decode(""utf-8"", errors=""replace"") return title else: return None except (KeyError, aiohttp.ClientConnectionError, aiohttp.ClientResponseError): return None","def icyparser ( self , url : str ) -> Optional [ str ] : try : async with self . session . get ( url , headers = { ""Icy-MetaData"" : ""1"" } ) as resp : metaint = int ( resp . headers [ ""icy-metaint"" ] ) for _ in range ( 5 ) : await resp . content . readexactly ( metaint ) metadata_length = ( struct . unpack ( ""B"" , await resp . content . readexactly ( 1 ) ) [ 0 ] * 16 ) metadata = await resp . content . readexactly ( metadata_length ) m = re . search ( STREAM_TITLE , metadata . rstrip ( b""\0"" ) ) title = m . group ( 1 ) if title : title = title . decode ( ""utf-8"" , errors = ""replace"" ) return title else : return None except ( KeyError , aiohttp . ClientConnectionError , aiohttp . ClientResponseError ) : return None",if m :,279 23113,"def readTables(self): """"""Read tables section"""""" while True: table = self.readTable() if table is None: return if table[""type""] == ""LAYER"": name = table.get(""name"") if name is not None: self.layers[name] = Layer(name, table) ","def readTables ( self ) : """"""Read tables section"""""" while True : table = self . readTable ( ) return if table [ ""type"" ] == ""LAYER"" : name = table . get ( ""name"" ) if name is not None : self . layers [ name ] = Layer ( name , table )",if table is None :,94 17876,"def Handle(self, args, context=None): result = ApiListClientActionRequestsResult() request_cache = {} for r in data_store.REL_DB.ReadAllClientActionRequests(str(args.client_id)): stub = action_registry.ACTION_STUB_BY_ID[r.action_identifier] client_action = compatibility.GetName(stub) request = ApiClientActionRequest( leased_until=r.leased_until, session_id=""%s/%s"" % (r.client_id, r.flow_id), client_action=client_action, ) result.items.append(request) if not args.fetch_responses: continue if r.flow_id not in request_cache: req_res = data_store.REL_DB.ReadAllFlowRequestsAndResponses( str(args.client_id), r.flow_id ) request_cache[r.flow_id] = req_res for req, responses in request_cache[r.flow_id]: if req.request_id == r.request_id: res = [] for resp_id in sorted(responses): m = responses[resp_id].AsLegacyGrrMessage() res.append(m) request.responses = res return result ","def Handle ( self , args , context = None ) : result = ApiListClientActionRequestsResult ( ) request_cache = { } for r in data_store . REL_DB . ReadAllClientActionRequests ( str ( args . client_id ) ) : stub = action_registry . ACTION_STUB_BY_ID [ r . action_identifier ] client_action = compatibility . GetName ( stub ) request = ApiClientActionRequest ( leased_until = r . leased_until , session_id = ""%s/%s"" % ( r . client_id , r . flow_id ) , client_action = client_action , ) result . items . append ( request ) if not args . fetch_responses : continue req_res = data_store . REL_DB . ReadAllFlowRequestsAndResponses ( str ( args . client_id ) , r . flow_id ) request_cache [ r . flow_id ] = req_res for req , responses in request_cache [ r . flow_id ] : if req . request_id == r . request_id : res = [ ] for resp_id in sorted ( responses ) : m = responses [ resp_id ] . AsLegacyGrrMessage ( ) res . append ( m ) request . responses = res return result",if r . flow_id not in request_cache :,376 20369,"def _should_mark_node_dnr(self, node, parent_nodes): for p in parent_nodes: if p.do_not_run is True: pass elif p.job: if p.job.status == ""successful"": if node in ( self.get_children(p, ""success_nodes"") + self.get_children(p, ""always_nodes"") ): return False elif p.job.status in [""failed"", ""error"", ""canceled""]: if node in ( self.get_children(p, ""failure_nodes"") + self.get_children(p, ""always_nodes"") ): return False else: return False elif not p.do_not_run and p.unified_job_template is None: if node in ( self.get_children(p, ""failure_nodes"") + self.get_children(p, ""always_nodes"") ): return False else: return False return True ","def _should_mark_node_dnr ( self , node , parent_nodes ) : for p in parent_nodes : pass elif p . job : if p . job . status == ""successful"" : if node in ( self . get_children ( p , ""success_nodes"" ) + self . get_children ( p , ""always_nodes"" ) ) : return False elif p . job . status in [ ""failed"" , ""error"" , ""canceled"" ] : if node in ( self . get_children ( p , ""failure_nodes"" ) + self . get_children ( p , ""always_nodes"" ) ) : return False else : return False elif not p . do_not_run and p . unified_job_template is None : if node in ( self . get_children ( p , ""failure_nodes"" ) + self . get_children ( p , ""always_nodes"" ) ) : return False else : return False return True",if p . do_not_run is True :,322 16051,"def update_metadata(self): for attrname in dir(self): if attrname.startswith(""__""): continue attrvalue = getattr(self, attrname, None) if attrvalue == 0: continue if attrname == ""salt_version"": attrname = ""version"" if hasattr(self.metadata, ""set_{0}"".format(attrname)): getattr(self.metadata, ""set_{0}"".format(attrname))(attrvalue) elif hasattr(self.metadata, attrname): try: setattr(self.metadata, attrname, attrvalue) except AttributeError: pass","def update_metadata ( self ) : for attrname in dir ( self ) : if attrname . startswith ( ""__"" ) : continue attrvalue = getattr ( self , attrname , None ) if attrvalue == 0 : continue attrname = ""version"" if hasattr ( self . metadata , ""set_{0}"" . format ( attrname ) ) : getattr ( self . metadata , ""set_{0}"" . format ( attrname ) ) ( attrvalue ) elif hasattr ( self . metadata , attrname ) : try : setattr ( self . metadata , attrname , attrvalue ) except AttributeError : pass","if attrname == ""salt_version"" :",173 24077,"def _end_completion(self, args): value = args[""completion_text""] paths = args[""paths""] if args[""forward_completion""]: common_prefix = os.path.commonprefix(paths) if len(common_prefix) > len(value): self.path_entry.set_text( common_prefix, set_file_chooser_folder=True, trigger_event=True ) self.path_entry.text_entry.set_position(len(self.path_entry.get_text())) self.completion_popup.set_values(paths, preserve_selection=True) if self.use_popup and len(paths) > 1: self.completion_popup.popup() elif self.completion_popup.is_popped_up() and args[""forward_completion""]: self.completion_popup.popdown()","def _end_completion ( self , args ) : value = args [ ""completion_text"" ] paths = args [ ""paths"" ] if args [ ""forward_completion"" ] : common_prefix = os . path . commonprefix ( paths ) self . path_entry . set_text ( common_prefix , set_file_chooser_folder = True , trigger_event = True ) self . path_entry . text_entry . set_position ( len ( self . path_entry . get_text ( ) ) ) self . completion_popup . set_values ( paths , preserve_selection = True ) if self . use_popup and len ( paths ) > 1 : self . completion_popup . popup ( ) elif self . completion_popup . is_popped_up ( ) and args [ ""forward_completion"" ] : self . completion_popup . popdown ( )",if len ( common_prefix ) > len ( value ) :,221 20288,"def R_op(self, inputs, eval_points): outs = self(*inputs, **dict(return_list=True)) rval = [None for x in outs] # For each output for idx, out in enumerate(outs): # make such that _bgrads computes only the gradients of the # current output on the inputs ( and not all outputs) ograds = [x.zeros_like() for x in outs] ograds[idx] = theano.tensor.ones_like(out) bgrads = self._bgrad(inputs, outs, ograds) rop_out = None for jdx, (inp, eval_point) in enumerate(izip(inputs, eval_points)): # if None, then we can just ignore this branch .. # what we do is to assume that for any non-differentiable # branch, the gradient is actually 0, which I think is not # the right thing to do .. have to talk to Ian and James # about it if bgrads[jdx] is None or isinstance(bgrads[jdx].type, DisconnectedType): pass elif eval_point is not None: if rop_out is None: rop_out = bgrads[jdx] * eval_point else: rop_out = rop_out + bgrads[jdx] * eval_point rval[idx] = rop_out return rval","def R_op ( self , inputs , eval_points ) : outs = self ( * inputs , ** dict ( return_list = True ) ) rval = [ None for x in outs ] for idx , out in enumerate ( outs ) : ograds = [ x . zeros_like ( ) for x in outs ] ograds [ idx ] = theano . tensor . ones_like ( out ) bgrads = self . _bgrad ( inputs , outs , ograds ) rop_out = None for jdx , ( inp , eval_point ) in enumerate ( izip ( inputs , eval_points ) ) : if bgrads [ jdx ] is None or isinstance ( bgrads [ jdx ] . type , DisconnectedType ) : pass if rop_out is None : rop_out = bgrads [ jdx ] * eval_point else : rop_out = rop_out + bgrads [ jdx ] * eval_point rval [ idx ] = rop_out return rval",elif eval_point is not None :,385 6835,"def assert_warns(expected): with warnings.catch_warnings(record=True) as w: warnings.simplefilter(""always"") yield # Python 2 does not raise warnings multiple times from the same stack # frame. if sys.version_info >= (3, 0): if not any(isinstance(m.message, expected) for m in w): try: exc_name = expected.__name__ except AttributeError: exc_name = str(expected) raise AssertionError(""%s not triggerred"" % exc_name)","def assert_warns ( expected ) : with warnings . catch_warnings ( record = True ) as w : warnings . simplefilter ( ""always"" ) yield if sys . version_info >= ( 3 , 0 ) : try : exc_name = expected . __name__ except AttributeError : exc_name = str ( expected ) raise AssertionError ( ""%s not triggerred"" % exc_name )","if not any ( isinstance ( m . message , expected ) for m in w ) :",147 20422,"def init_params(net): """"""Init layer parameters."""""" for module in net.modules(): if isinstance(module, nn.Conv2d): init.kaiming_normal(module.weight, mode=""fan_out"") if module.bias: init.constant(module.bias, 0) elif isinstance(module, nn.BatchNorm2d): init.constant(module.weight, 1) init.constant(module.bias, 0) elif isinstance(module, nn.Linear): init.normal(module.weight, std=1e-3) if module.bias: init.constant(module.bias, 0)","def init_params ( net ) : """"""Init layer parameters."""""" for module in net . modules ( ) : if isinstance ( module , nn . Conv2d ) : init . kaiming_normal ( module . weight , mode = ""fan_out"" ) if module . bias : init . constant ( module . bias , 0 ) elif isinstance ( module , nn . BatchNorm2d ) : init . constant ( module . weight , 1 ) init . constant ( module . bias , 0 ) init . normal ( module . weight , std = 1e-3 ) if module . bias : init . constant ( module . bias , 0 )","elif isinstance ( module , nn . Linear ) :",180 17483,"def _mock_send_packet(eio_sid, pkt): # make sure the packet can be encoded and decoded epkt = pkt.encode() if not isinstance(epkt, list): pkt = packet.Packet(encoded_packet=epkt) else: pkt = packet.Packet(encoded_packet=epkt[0]) for att in epkt[1:]: pkt.add_attachment(att) if pkt.packet_type == packet.EVENT or pkt.packet_type == packet.BINARY_EVENT: if eio_sid not in self.queue: self.queue[eio_sid] = [] if pkt.data[0] == ""message"" or pkt.data[0] == ""json"": self.queue[eio_sid].append( { ""name"": pkt.data[0], ""args"": pkt.data[1], ""namespace"": pkt.namespace or ""/"", } ) else: self.queue[eio_sid].append( { ""name"": pkt.data[0], ""args"": pkt.data[1:], ""namespace"": pkt.namespace or ""/"", } ) elif pkt.packet_type == packet.ACK or pkt.packet_type == packet.BINARY_ACK: self.acks[eio_sid] = {""args"": pkt.data, ""namespace"": pkt.namespace or ""/""} elif pkt.packet_type in [packet.DISCONNECT, packet.CONNECT_ERROR]: self.connected[pkt.namespace or ""/""] = False","def _mock_send_packet ( eio_sid , pkt ) : epkt = pkt . encode ( ) if not isinstance ( epkt , list ) : pkt = packet . Packet ( encoded_packet = epkt ) else : pkt = packet . Packet ( encoded_packet = epkt [ 0 ] ) for att in epkt [ 1 : ] : pkt . add_attachment ( att ) if pkt . packet_type == packet . EVENT or pkt . packet_type == packet . BINARY_EVENT : if eio_sid not in self . queue : self . queue [ eio_sid ] = [ ] self . queue [ eio_sid ] . append ( { ""name"" : pkt . data [ 0 ] , ""args"" : pkt . data [ 1 ] , ""namespace"" : pkt . namespace or ""/"" , } ) else : self . queue [ eio_sid ] . append ( { ""name"" : pkt . data [ 0 ] , ""args"" : pkt . data [ 1 : ] , ""namespace"" : pkt . namespace or ""/"" , } ) elif pkt . packet_type == packet . ACK or pkt . packet_type == packet . BINARY_ACK : self . acks [ eio_sid ] = { ""args"" : pkt . data , ""namespace"" : pkt . namespace or ""/"" } elif pkt . packet_type in [ packet . DISCONNECT , packet . CONNECT_ERROR ] : self . connected [ pkt . namespace or ""/"" ] = False","if pkt . data [ 0 ] == ""message"" or pkt . data [ 0 ] == ""json"" :",438 19844,"def mergeCombiners(self, x, y): for item in y: if len(x) < self.heap_limit: self.heap.push(x, item) else: self.heap.push_pop(x, item) return x ","def mergeCombiners ( self , x , y ) : for item in y : self . heap . push ( x , item ) else : self . heap . push_pop ( x , item ) return x",if len ( x ) < self . heap_limit :,73 25218,"def test_write_buffer(self): try: for mode in (""b"", """"): with open(""foo"", ""w+"" + mode) as foo: b = buffer(b""hello world"", 6) foo.write(b) with open(""foo"", ""r"") as foo: self.assertEqual(foo.readlines(), [""world""]) with open(""foo"", ""w+"") as foo: b = buffer(u""hello world"", 6) foo.write(b) with open(""foo"", ""r"") as foo: self.assertEqual(foo.readlines(), [""world""]) with open(""foo"", ""w+b"") as foo: b = buffer(u""hello world"", 6) foo.write(b) with open(""foo"", ""r"") as foo: if is_cpython: self.assertEqual( foo.readlines(), [""l\x00o\x00 \x00w\x00o\x00r\x00l\x00d\x00""] ) else: self.assertEqual(foo.readlines(), [""world""]) finally: self.delete_files(""foo"")","def test_write_buffer ( self ) : try : for mode in ( ""b"" , """" ) : with open ( ""foo"" , ""w+"" + mode ) as foo : b = buffer ( b""hello world"" , 6 ) foo . write ( b ) with open ( ""foo"" , ""r"" ) as foo : self . assertEqual ( foo . readlines ( ) , [ ""world"" ] ) with open ( ""foo"" , ""w+"" ) as foo : b = buffer ( u""hello world"" , 6 ) foo . write ( b ) with open ( ""foo"" , ""r"" ) as foo : self . assertEqual ( foo . readlines ( ) , [ ""world"" ] ) with open ( ""foo"" , ""w+b"" ) as foo : b = buffer ( u""hello world"" , 6 ) foo . write ( b ) with open ( ""foo"" , ""r"" ) as foo : self . assertEqual ( foo . readlines ( ) , [ ""l\x00o\x00 \x00w\x00o\x00r\x00l\x00d\x00"" ] ) else : self . assertEqual ( foo . readlines ( ) , [ ""world"" ] ) finally : self . delete_files ( ""foo"" )",if is_cpython :,327 18806,"def read_callback(): """"""Parse stats response from Marathon"""""" log_verbose(""Read callback called"") try: metrics = json.load(urllib2.urlopen(MARATHON_URL, timeout=10)) for group in [""gauges"", ""histograms"", ""meters"", ""timers"", ""counters""]: for name, values in metrics.get(group, {}).items(): for metric, value in values.items(): if not isinstance(value, basestring): dispatch_stat(""gauge"", ""."".join((name, metric)), value) except urllib2.URLError as e: collectd.error( ""marathon plugin: Error connecting to %s - %r"" % (MARATHON_URL, e) )","def read_callback ( ) : """"""Parse stats response from Marathon"""""" log_verbose ( ""Read callback called"" ) try : metrics = json . load ( urllib2 . urlopen ( MARATHON_URL , timeout = 10 ) ) for group in [ ""gauges"" , ""histograms"" , ""meters"" , ""timers"" , ""counters"" ] : for name , values in metrics . get ( group , { } ) . items ( ) : for metric , value in values . items ( ) : dispatch_stat ( ""gauge"" , ""."" . join ( ( name , metric ) ) , value ) except urllib2 . URLError as e : collectd . error ( ""marathon plugin: Error connecting to %s - %r"" % ( MARATHON_URL , e ) )","if not isinstance ( value , basestring ) :",203 88,"def ReceiveMessageLoop(self): while self.connected == True: tmp = await self.ReadSocketData(16) if tmp is None: break (expr,) = struct.unpack(""!I"", tmp[:4]) (num,) = struct.unpack(""!I"", tmp[8:12]) num2 = expr - 16 tmp = await self.ReadSocketData(num2) if tmp is None: break if num2 != 0: num -= 1 if num == 0 or num == 1 or num == 2: (num3,) = struct.unpack(""!I"", tmp) self._UserCount = num3 continue elif num == 3 or num == 4: try: messages = tmp.decode(""utf-8"") except: continue await self.parseDanMu(messages) continue elif num == 5 or num == 6 or num == 7: continue else: if num != 16: pass else: continue","def ReceiveMessageLoop ( self ) : while self . connected == True : tmp = await self . ReadSocketData ( 16 ) if tmp is None : break ( expr , ) = struct . unpack ( ""!I"" , tmp [ : 4 ] ) ( num , ) = struct . unpack ( ""!I"" , tmp [ 8 : 12 ] ) num2 = expr - 16 tmp = await self . ReadSocketData ( num2 ) if tmp is None : break if num2 != 0 : num -= 1 ( num3 , ) = struct . unpack ( ""!I"" , tmp ) self . _UserCount = num3 continue elif num == 3 or num == 4 : try : messages = tmp . decode ( ""utf-8"" ) except : continue await self . parseDanMu ( messages ) continue elif num == 5 or num == 6 or num == 7 : continue else : if num != 16 : pass else : continue",if num == 0 or num == 1 or num == 2 :,318 15684,"def _rmtree(self, path): # Essentially a stripped down version of shutil.rmtree. We can't # use globals because they may be None'ed out at shutdown. for name in self._listdir(path): fullname = self._path_join(path, name) try: isdir = self._isdir(fullname) except self._os_error: isdir = False if isdir: self._rmtree(fullname) else: try: self._remove(fullname) except self._os_error: pass try: self._rmdir(path) except self._os_error: pass ","def _rmtree ( self , path ) : for name in self . _listdir ( path ) : fullname = self . _path_join ( path , name ) try : isdir = self . _isdir ( fullname ) except self . _os_error : isdir = False self . _rmtree ( fullname ) else : try : self . _remove ( fullname ) except self . _os_error : pass try : self . _rmdir ( path ) except self . _os_error : pass",if isdir :,183 17279,"def write(self, *bits): for bit in bits: if not self.bytestream: self.bytestream.append(0) byte = self.bytestream[self.bytenum] if self.bitnum == 8: if self.bytenum == len(self.bytestream) - 1: byte = 0 self.bytestream += bytes([byte]) self.bytenum += 1 self.bitnum = 0 mask = 2 ** self.bitnum if bit: byte |= mask else: byte &= ~mask self.bytestream[self.bytenum] = byte self.bitnum += 1","def write ( self , * bits ) : for bit in bits : self . bytestream . append ( 0 ) byte = self . bytestream [ self . bytenum ] if self . bitnum == 8 : if self . bytenum == len ( self . bytestream ) - 1 : byte = 0 self . bytestream += bytes ( [ byte ] ) self . bytenum += 1 self . bitnum = 0 mask = 2 ** self . bitnum if bit : byte |= mask else : byte &= ~ mask self . bytestream [ self . bytenum ] = byte self . bitnum += 1",if not self . bytestream :,186 10547,"def _write_ready(self): assert self._buffer, ""Data should not be empty"" try: n = self._sock.send(self._buffer) except (BlockingIOError, InterruptedError): pass except Exception as exc: self._loop.remove_writer(self._sock_fd) self._buffer.clear() self._fatal_error(exc, ""Fatal write error on socket transport"") else: if n: del self._buffer[:n] self._maybe_resume_protocol() # May append to buffer. if not self._buffer: self._loop.remove_writer(self._sock_fd) if self._closing: self._call_connection_lost(None) elif self._eof: self._sock.shutdown(socket.SHUT_WR) ","def _write_ready ( self ) : assert self . _buffer , ""Data should not be empty"" try : n = self . _sock . send ( self . _buffer ) except ( BlockingIOError , InterruptedError ) : pass except Exception as exc : self . _loop . remove_writer ( self . _sock_fd ) self . _buffer . clear ( ) self . _fatal_error ( exc , ""Fatal write error on socket transport"" ) else : if n : del self . _buffer [ : n ] self . _maybe_resume_protocol ( ) if not self . _buffer : self . _loop . remove_writer ( self . _sock_fd ) if self . _closing : self . _call_connection_lost ( None ) self . _sock . shutdown ( socket . SHUT_WR )",elif self . _eof :,226 19456,"def jupyter_progress_bar(min=0, max=1.0): """"""Returns an ipywidget progress bar or None if we can't import it"""""" widgets = wandb.util.get_module(""ipywidgets"") try: if widgets is None: # TODO: this currently works in iPython but it's deprecated since 4.0 from IPython.html import widgets # type: ignore assert hasattr(widgets, ""VBox"") assert hasattr(widgets, ""Label"") assert hasattr(widgets, ""FloatProgress"") return ProgressWidget(widgets, min=min, max=max) except (ImportError, AssertionError): return None","def jupyter_progress_bar ( min = 0 , max = 1.0 ) : """"""Returns an ipywidget progress bar or None if we can't import it"""""" widgets = wandb . util . get_module ( ""ipywidgets"" ) try : from IPython . html import widgets assert hasattr ( widgets , ""VBox"" ) assert hasattr ( widgets , ""Label"" ) assert hasattr ( widgets , ""FloatProgress"" ) return ProgressWidget ( widgets , min = min , max = max ) except ( ImportError , AssertionError ) : return None",if widgets is None :,168 6619,"def call(self, step_input, states): new_states = [] for i in range(self.num_layers): out, new_state = self.lstm_cells[i](step_input, states[i]) step_input = ( layers.dropout( out, self.dropout_prob, dropout_implementation=""upscale_in_train"" ) if self.dropout_prob > 0.0 else out ) new_states.append(new_state) return step_input, new_states","def call ( self , step_input , states ) : new_states = [ ] for i in range ( self . num_layers ) : out , new_state = self . lstm_cells [ i ] ( step_input , states [ i ] ) step_input = ( layers . dropout ( out , self . dropout_prob , dropout_implementation = ""upscale_in_train"" ) else out ) new_states . append ( new_state ) return step_input , new_states",if self . dropout_prob > 0.0,148 8045,"def _get_stream(self, mem, base, sat, sec_size, start_sid, size=None, name=""""): # print >> self.logfile, ""_get_stream"", base, sec_size, start_sid, size sectors = [] s = start_sid if size is None: # nothing to check against while s >= 0: start_pos = base + s * sec_size sectors.append(mem[start_pos : start_pos + sec_size]) try: s = sat[s] except IndexError: raise CompDocError( ""OLE2 stream %r: sector allocation table invalid entry (%d)"" % (name, s) ) assert s == EOCSID else: todo = size while s >= 0: start_pos = base + s * sec_size grab = sec_size if grab > todo: grab = todo todo -= grab sectors.append(mem[start_pos : start_pos + grab]) try: s = sat[s] except IndexError: raise CompDocError( ""OLE2 stream %r: sector allocation table invalid entry (%d)"" % (name, s) ) assert s == EOCSID if todo != 0: print( ""WARNING *** OLE2 stream %r: expected size %d, actual size %d"" % (name, size, size - todo), file=self.logfile, ) return b"""".join(sectors)","def _get_stream ( self , mem , base , sat , sec_size , start_sid , size = None , name = """" ) : sectors = [ ] s = start_sid if size is None : while s >= 0 : start_pos = base + s * sec_size sectors . append ( mem [ start_pos : start_pos + sec_size ] ) try : s = sat [ s ] except IndexError : raise CompDocError ( ""OLE2 stream %r: sector allocation table invalid entry (%d)"" % ( name , s ) ) assert s == EOCSID else : todo = size while s >= 0 : start_pos = base + s * sec_size grab = sec_size if grab > todo : grab = todo todo -= grab sectors . append ( mem [ start_pos : start_pos + grab ] ) try : s = sat [ s ] except IndexError : raise CompDocError ( ""OLE2 stream %r: sector allocation table invalid entry (%d)"" % ( name , s ) ) assert s == EOCSID print ( ""WARNING *** OLE2 stream %r: expected size %d, actual size %d"" % ( name , size , size - todo ) , file = self . logfile , ) return b"""" . join ( sectors )",if todo != 0 :,463 6811,"def __call__(self, trainer): # accumulate the observations keys = self._keys observation = trainer.observation summary = self._summary if keys is None: summary.add(observation) else: summary.add({k: observation[k] for k in keys if k in observation}) if trainer.is_before_training or self._trigger(trainer): # output the result stats = self._summary.compute_mean() stats_cpu = {} for name, value in six.iteritems(stats): stats_cpu[name] = float(value) # copy to CPU updater = trainer.updater stats_cpu[""epoch""] = updater.epoch stats_cpu[""iteration""] = updater.iteration stats_cpu[""elapsed_time""] = trainer.elapsed_time if self._postprocess is not None: self._postprocess(stats_cpu) self._log.append(stats_cpu) # write to the log file if self._log_name is not None: log_name = self._log_name.format(**stats_cpu) with utils.tempdir(prefix=log_name, dir=trainer.out) as tempd: path = os.path.join(tempd, ""log.json"") with open(path, ""w"") as f: json.dump(self._log, f, indent=4) new_path = os.path.join(trainer.out, log_name) shutil.move(path, new_path) # reset the summary for the next output self._init_summary()","def __call__ ( self , trainer ) : keys = self . _keys observation = trainer . observation summary = self . _summary if keys is None : summary . add ( observation ) else : summary . add ( { k : observation [ k ] for k in keys if k in observation } ) if trainer . is_before_training or self . _trigger ( trainer ) : stats = self . _summary . compute_mean ( ) stats_cpu = { } for name , value in six . iteritems ( stats ) : stats_cpu [ name ] = float ( value ) updater = trainer . updater stats_cpu [ ""epoch"" ] = updater . epoch stats_cpu [ ""iteration"" ] = updater . iteration stats_cpu [ ""elapsed_time"" ] = trainer . elapsed_time self . _postprocess ( stats_cpu ) self . _log . append ( stats_cpu ) if self . _log_name is not None : log_name = self . _log_name . format ( ** stats_cpu ) with utils . tempdir ( prefix = log_name , dir = trainer . out ) as tempd : path = os . path . join ( tempd , ""log.json"" ) with open ( path , ""w"" ) as f : json . dump ( self . _log , f , indent = 4 ) new_path = os . path . join ( trainer . out , log_name ) shutil . move ( path , new_path ) self . _init_summary ( )",if self . _postprocess is not None :,429 21848,"def _from_to_normal(self, pymodule, import_stmt): resource = pymodule.get_resource() from_import = import_stmt.import_info module_name = from_import.module_name for name, alias in from_import.names_and_aliases: imported = name if alias is not None: imported = alias occurrence_finder = occurrences.create_finder( self.pycore, imported, pymodule[imported], imports=False ) source = rename.rename_in_module( occurrence_finder, module_name + ""."" + name, pymodule=pymodule, replace_primary=True, ) if source is not None: pymodule = self.pycore.get_string_module(source, resource) return pymodule ","def _from_to_normal ( self , pymodule , import_stmt ) : resource = pymodule . get_resource ( ) from_import = import_stmt . import_info module_name = from_import . module_name for name , alias in from_import . names_and_aliases : imported = name imported = alias occurrence_finder = occurrences . create_finder ( self . pycore , imported , pymodule [ imported ] , imports = False ) source = rename . rename_in_module ( occurrence_finder , module_name + ""."" + name , pymodule = pymodule , replace_primary = True , ) if source is not None : pymodule = self . pycore . get_string_module ( source , resource ) return pymodule",if alias is not None :,222 19949,"def test_with_three_points(self): cba = ia.Polygon([(1, 2), (3, 4), (5, 5)]) for i, xy in enumerate(cba): assert i in [0, 1, 2] if i == 0: assert np.allclose(xy, (1, 2)) elif i == 1: assert np.allclose(xy, (3, 4)) elif i == 2: assert np.allclose(xy, (5, 5)) assert i == 2","def test_with_three_points ( self ) : cba = ia . Polygon ( [ ( 1 , 2 ) , ( 3 , 4 ) , ( 5 , 5 ) ] ) for i , xy in enumerate ( cba ) : assert i in [ 0 , 1 , 2 ] assert np . allclose ( xy , ( 1 , 2 ) ) elif i == 1 : assert np . allclose ( xy , ( 3 , 4 ) ) elif i == 2 : assert np . allclose ( xy , ( 5 , 5 ) ) assert i == 2",if i == 0 :,136 11358,"def resize(self, newshape): (datashape,) = self._data.shape if newshape > datashape: (shape,) = self.shape # we work with int shapes only newdatashape = max(newshape, int(shape * self.factor) + 1) if self.use_numpy_resize and self._data.flags[""C_CONTIGUOUS""]: self.data = None self._data.resize(newdatashape, refcheck=self.refcheck) else: newdata = zeros(newdatashape, dtype=self.dtype) newdata[:shape] = self.data self._data = newdata elif newshape < self.shape[0]: # If we reduced the size, set the no longer used memory to 0 self._data[newshape:] = 0 # Reduce our view to the requested size if necessary self.data = self._data[:newshape] self.shape = (newshape,)","def resize ( self , newshape ) : ( datashape , ) = self . _data . shape if newshape > datashape : ( shape , ) = self . shape newdatashape = max ( newshape , int ( shape * self . factor ) + 1 ) self . data = None self . _data . resize ( newdatashape , refcheck = self . refcheck ) else : newdata = zeros ( newdatashape , dtype = self . dtype ) newdata [ : shape ] = self . data self . _data = newdata elif newshape < self . shape [ 0 ] : self . _data [ newshape : ] = 0 self . data = self . _data [ : newshape ] self . shape = ( newshape , )","if self . use_numpy_resize and self . _data . flags [ ""C_CONTIGUOUS"" ] :",240 1368,"def handle(self, input): match = self._rx.match(input) if match is not None: query = self._yamlfy_query(match.group(""query"")) if query is not None: # query['time'] = datetime.strptime(match.group('ts'), ""%a %b %d %H:%M:%S"") query[""millis""] = match.group(""query_time"") query[""ns""] = match.group(""ns"") if query[""query""].has_key(""$orderby""): query[""orderby""] = query[""query""][""$orderby""] del query[""query""][""$orderby""] if query[""query""].has_key(""$query""): query[""query""] = query[""query""][""$query""] query[""stats""] = parse_line_stats(match.group(""stats"")) return query return None ","def handle ( self , input ) : match = self . _rx . match ( input ) if match is not None : query = self . _yamlfy_query ( match . group ( ""query"" ) ) if query is not None : query [ ""millis"" ] = match . group ( ""query_time"" ) query [ ""ns"" ] = match . group ( ""ns"" ) query [ ""orderby"" ] = query [ ""query"" ] [ ""$orderby"" ] del query [ ""query"" ] [ ""$orderby"" ] if query [ ""query"" ] . has_key ( ""$query"" ) : query [ ""query"" ] = query [ ""query"" ] [ ""$query"" ] query [ ""stats"" ] = parse_line_stats ( match . group ( ""stats"" ) ) return query return None","if query [ ""query"" ] . has_key ( ""$orderby"" ) :",232 17394,"def setUp(self): CFTPClientTestBase.setUp(self) self.startServer() cmds = ( ""-p %i -l testuser "" ""--known-hosts kh_test "" ""--user-authentications publickey "" ""--host-key-algorithms ssh-rsa "" ""-i dsa_test "" ""-a "" ""-v "" ""127.0.0.1"" ) port = self.server.getHost().port cmds = test_conch._makeArgs((cmds % port).split(), mod=""cftp"") log.msg(""running {} {}"".format(sys.executable, cmds)) d = defer.Deferred() self.processProtocol = SFTPTestProcess(d) d.addCallback(lambda _: self.processProtocol.clearBuffer()) env = os.environ.copy() env[""PYTHONPATH""] = os.pathsep.join(sys.path) encodedCmds = [] encodedEnv = {} for cmd in cmds: if isinstance(cmd, str): cmd = cmd.encode(""utf-8"") encodedCmds.append(cmd) for var in env: val = env[var] if isinstance(var, str): var = var.encode(""utf-8"") if isinstance(val, str): val = val.encode(""utf-8"") encodedEnv[var] = val log.msg(encodedCmds) log.msg(encodedEnv) reactor.spawnProcess( self.processProtocol, sys.executable, encodedCmds, env=encodedEnv ) return d","def setUp ( self ) : CFTPClientTestBase . setUp ( self ) self . startServer ( ) cmds = ( ""-p %i -l testuser "" ""--known-hosts kh_test "" ""--user-authentications publickey "" ""--host-key-algorithms ssh-rsa "" ""-i dsa_test "" ""-a "" ""-v "" ""127.0.0.1"" ) port = self . server . getHost ( ) . port cmds = test_conch . _makeArgs ( ( cmds % port ) . split ( ) , mod = ""cftp"" ) log . msg ( ""running {} {}"" . format ( sys . executable , cmds ) ) d = defer . Deferred ( ) self . processProtocol = SFTPTestProcess ( d ) d . addCallback ( lambda _ : self . processProtocol . clearBuffer ( ) ) env = os . environ . copy ( ) env [ ""PYTHONPATH"" ] = os . pathsep . join ( sys . path ) encodedCmds = [ ] encodedEnv = { } for cmd in cmds : if isinstance ( cmd , str ) : cmd = cmd . encode ( ""utf-8"" ) encodedCmds . append ( cmd ) for var in env : val = env [ var ] var = var . encode ( ""utf-8"" ) if isinstance ( val , str ) : val = val . encode ( ""utf-8"" ) encodedEnv [ var ] = val log . msg ( encodedCmds ) log . msg ( encodedEnv ) reactor . spawnProcess ( self . processProtocol , sys . executable , encodedCmds , env = encodedEnv ) return d","if isinstance ( var , str ) :",423 16115,"def __new__(mcs, name, bases, attrs): include_profile = include_trace = include_garbage = True bases = list(bases) if name == ""SaltLoggingClass"": for base in bases: if hasattr(base, ""trace""): include_trace = False if hasattr(base, ""garbage""): include_garbage = False if include_profile: bases.append(LoggingProfileMixin) if include_trace: bases.append(LoggingTraceMixin) if include_garbage: bases.append(LoggingGarbageMixin) return super(LoggingMixinMeta, mcs).__new__(mcs, name, tuple(bases), attrs) ","def __new__ ( mcs , name , bases , attrs ) : include_profile = include_trace = include_garbage = True bases = list ( bases ) if name == ""SaltLoggingClass"" : for base in bases : include_trace = False if hasattr ( base , ""garbage"" ) : include_garbage = False if include_profile : bases . append ( LoggingProfileMixin ) if include_trace : bases . append ( LoggingTraceMixin ) if include_garbage : bases . append ( LoggingGarbageMixin ) return super ( LoggingMixinMeta , mcs ) . __new__ ( mcs , name , tuple ( bases ) , attrs )","if hasattr ( base , ""trace"" ) :",176 2641,"def alloc(self): with self.lock: # gc self.ban: for item in tuple(self.ban): if item[""counter""] == 0: self.free(item[""addr""]) self.ban.remove(item) else: item[""counter""] -= 1 # iterate through addr_map base = 0 for cell in self.addr_map: if cell: # not allocated addr bit = 0 while True: if (1 << bit) & self.addr_map[base]: self.addr_map[base] ^= 1 << bit break bit += 1 ret = base * self.cell_size + bit if self.reverse: ret = self.maxaddr - ret else: ret = ret + self.minaddr if self.minaddr <= ret <= self.maxaddr: if self.release: self.free(ret, ban=self.release) self.allocated += 1 return ret else: self.free(ret) raise KeyError(""no free address available"") base += 1 # no free address available if len(self.addr_map) < self.cells: # create new cell to allocate address from self.addr_map.append(self.cell) return self.alloc() else: raise KeyError(""no free address available"")","def alloc ( self ) : with self . lock : for item in tuple ( self . ban ) : if item [ ""counter"" ] == 0 : self . free ( item [ ""addr"" ] ) self . ban . remove ( item ) else : item [ ""counter"" ] -= 1 base = 0 for cell in self . addr_map : if cell : bit = 0 while True : if ( 1 << bit ) & self . addr_map [ base ] : self . addr_map [ base ] ^= 1 << bit break bit += 1 ret = base * self . cell_size + bit if self . reverse : ret = self . maxaddr - ret else : ret = ret + self . minaddr if self . minaddr <= ret <= self . maxaddr : self . free ( ret , ban = self . release ) self . allocated += 1 return ret else : self . free ( ret ) raise KeyError ( ""no free address available"" ) base += 1 if len ( self . addr_map ) < self . cells : self . addr_map . append ( self . cell ) return self . alloc ( ) else : raise KeyError ( ""no free address available"" )",if self . release :,452 13323,"def _wait_for_launcher(self) -> None: log.debug(""Waiting for Lavalink server to be ready"") lastmessage = 0 for i in itertools.cycle(range(50)): line = await self._proc.stdout.readline() if _RE_READY_LINE.search(line): self.ready.set() break if _FAILED_TO_START.search(line): raise RuntimeError(f""Lavalink failed to start: {line.decode().strip()}"") if self._proc.returncode is not None and lastmessage + 2 < time.time(): # Avoid Console spam only print once every 2 seconds lastmessage = time.time() log.critical(""Internal lavalink server exited early"") if i == 49: # Sleep after 50 lines to prevent busylooping await asyncio.sleep(0.1)","def _wait_for_launcher ( self ) -> None : log . debug ( ""Waiting for Lavalink server to be ready"" ) lastmessage = 0 for i in itertools . cycle ( range ( 50 ) ) : line = await self . _proc . stdout . readline ( ) if _RE_READY_LINE . search ( line ) : self . ready . set ( ) break if _FAILED_TO_START . search ( line ) : raise RuntimeError ( f""Lavalink failed to start: {line.decode().strip()}"" ) lastmessage = time . time ( ) log . critical ( ""Internal lavalink server exited early"" ) if i == 49 : await asyncio . sleep ( 0.1 )",if self . _proc . returncode is not None and lastmessage + 2 < time . time ( ) :,231 15895,"def get_type(request: HttpRequest, payload: Dict[str, Any]) -> str: if payload.get(""push""): return ""push"" elif payload.get(""fork""): return ""fork"" elif payload.get(""comment"") and payload.get(""commit""): return ""commit_comment"" elif payload.get(""commit_status""): return ""change_commit_status"" elif payload.get(""issue""): if payload.get(""changes""): return ""issue_updated"" if payload.get(""comment""): return ""issue_commented"" return ""issue_created"" elif payload.get(""pullrequest""): pull_request_template = ""pull_request_{}"" # Note that we only need the HTTP header to determine pullrequest events. # We rely on the payload itself to determine the other ones. event_key = validate_extract_webhook_http_header( request, ""X_EVENT_KEY"", ""BitBucket"" ) assert event_key is not None action = re.match(""pullrequest:(?P.*)$"", event_key) if action: action_group = action.group(""action"") if action_group in PULL_REQUEST_SUPPORTED_ACTIONS: return pull_request_template.format(action_group) else: event_key = validate_extract_webhook_http_header( request, ""X_EVENT_KEY"", ""BitBucket"" ) if event_key == ""repo:updated"": return event_key raise UnsupportedWebhookEventType(event_key) ","def get_type ( request : HttpRequest , payload : Dict [ str , Any ] ) -> str : if payload . get ( ""push"" ) : return ""push"" elif payload . get ( ""fork"" ) : return ""fork"" elif payload . get ( ""comment"" ) and payload . get ( ""commit"" ) : return ""commit_comment"" elif payload . get ( ""commit_status"" ) : return ""change_commit_status"" elif payload . get ( ""issue"" ) : if payload . get ( ""changes"" ) : return ""issue_updated"" return ""issue_commented"" return ""issue_created"" elif payload . get ( ""pullrequest"" ) : pull_request_template = ""pull_request_{}"" event_key = validate_extract_webhook_http_header ( request , ""X_EVENT_KEY"" , ""BitBucket"" ) assert event_key is not None action = re . match ( ""pullrequest:(?P.*)$"" , event_key ) if action : action_group = action . group ( ""action"" ) if action_group in PULL_REQUEST_SUPPORTED_ACTIONS : return pull_request_template . format ( action_group ) else : event_key = validate_extract_webhook_http_header ( request , ""X_EVENT_KEY"" , ""BitBucket"" ) if event_key == ""repo:updated"" : return event_key raise UnsupportedWebhookEventType ( event_key )","if payload . get ( ""comment"" ) :",417 1596,"def _get_contrast(second_level_contrast, design_matrix): """"""Check and return contrast when testing one contrast at the time"""""" if isinstance(second_level_contrast, str): if second_level_contrast in design_matrix.columns.tolist(): contrast = second_level_contrast else: raise ValueError( '""{}"" is not a valid contrast name'.format(second_level_contrast) ) else: # Check contrast definition if second_level_contrast is None: if design_matrix.shape[1] == 1: second_level_contrast = np.ones([1]) else: raise ValueError(""No second-level contrast is specified."") elif (np.nonzero(second_level_contrast)[0]).size != 1: raise ValueError(""second_level_contrast must be "" ""a list of 0s and 1s"") con_val = np.asarray(second_level_contrast, dtype=bool) contrast = np.asarray(design_matrix.columns.tolist())[con_val][0] return contrast","def _get_contrast ( second_level_contrast , design_matrix ) : """"""Check and return contrast when testing one contrast at the time"""""" if isinstance ( second_level_contrast , str ) : if second_level_contrast in design_matrix . columns . tolist ( ) : contrast = second_level_contrast else : raise ValueError ( '""{}"" is not a valid contrast name' . format ( second_level_contrast ) ) else : if second_level_contrast is None : second_level_contrast = np . ones ( [ 1 ] ) else : raise ValueError ( ""No second-level contrast is specified."" ) elif ( np . nonzero ( second_level_contrast ) [ 0 ] ) . size != 1 : raise ValueError ( ""second_level_contrast must be "" ""a list of 0s and 1s"" ) con_val = np . asarray ( second_level_contrast , dtype = bool ) contrast = np . asarray ( design_matrix . columns . tolist ( ) ) [ con_val ] [ 0 ] return contrast",if design_matrix . shape [ 1 ] == 1 :,285 20403,"def _get_left_part(self, bar): import socket, os, pwd try: username = pwd.getpwuid(os.geteuid()).pw_name except: username = ""???"" if username == ""root"": clr = ""bad"" else: clr = ""good"" bar.add(username, ""hostname"", clr, fixedsize=True) bar.add(""@"", ""hostname"", clr, fixedsize=True) bar.add(socket.gethostname(), ""hostname"", clr, fixedsize=True) for path in self.env.pathway: if path.islink: clr = ""link"" else: clr = ""directory"" bar.add(path.basename, clr) bar.add(""/"", clr, fixedsize=True) if self.env.cf is not None: bar.add(self.env.cf.basename, ""file"", fixedsize=True) ","def _get_left_part ( self , bar ) : import socket , os , pwd try : username = pwd . getpwuid ( os . geteuid ( ) ) . pw_name except : username = ""???"" if username == ""root"" : clr = ""bad"" else : clr = ""good"" bar . add ( username , ""hostname"" , clr , fixedsize = True ) bar . add ( ""@"" , ""hostname"" , clr , fixedsize = True ) bar . add ( socket . gethostname ( ) , ""hostname"" , clr , fixedsize = True ) for path in self . env . pathway : clr = ""link"" else : clr = ""directory"" bar . add ( path . basename , clr ) bar . add ( ""/"" , clr , fixedsize = True ) if self . env . cf is not None : bar . add ( self . env . cf . basename , ""file"" , fixedsize = True )",if path . islink :,250 12682,"def assert_registration_mailbox(self, match=None): if match is None: match = ""[Weblate] Your registration on Weblate"" # Check mailbox self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, match) live_url = getattr(self, ""live_server_url"", None) # Parse URL for line in mail.outbox[0].body.splitlines(): if ""verification_code"" not in line: continue if ""("" in line or "")"" in line or ""<"" in line or "">"" in line: continue if live_url and line.startswith(live_url): return line + ""&confirm=1"" if line.startswith(""http://example.com/""): return line[18:] + ""&confirm=1"" self.fail(""Confirmation URL not found"") return """"","def assert_registration_mailbox ( self , match = None ) : if match is None : match = ""[Weblate] Your registration on Weblate"" self . assertEqual ( len ( mail . outbox ) , 1 ) self . assertEqual ( mail . outbox [ 0 ] . subject , match ) live_url = getattr ( self , ""live_server_url"" , None ) for line in mail . outbox [ 0 ] . body . splitlines ( ) : if ""verification_code"" not in line : continue if ""("" in line or "")"" in line or ""<"" in line or "">"" in line : continue return line + ""&confirm=1"" if line . startswith ( ""http://example.com/"" ) : return line [ 18 : ] + ""&confirm=1"" self . fail ( ""Confirmation URL not found"" ) return """"",if live_url and line . startswith ( live_url ) :,228 18495,"def __init__(self, document, collection): self._document = document self._collection_obj = collection self._mongo_query = None self._query_obj = Q() self._cls_query = {} self._where_clause = None self._loaded_fields = QueryFieldList() self._ordering = None self._snapshot = False self._timeout = True self._read_preference = None self._read_concern = None self._iter = False self._scalar = [] self._none = False self._as_pymongo = False self._search_text = None # If inheritance is allowed, only return instances and instances of # subclasses of the class being used if document._meta.get(""allow_inheritance"") is True: if len(self._document._subclasses) == 1: self._cls_query = {""_cls"": self._document._subclasses[0]} else: self._cls_query = {""_cls"": {""$in"": self._document._subclasses}} self._loaded_fields = QueryFieldList(always_include=[""_cls""]) self._cursor_obj = None self._limit = None self._skip = None self._hint = -1 # Using -1 as None is a valid value for hint self._collation = None self._batch_size = None self._max_time_ms = None self._comment = None # Hack - As people expect cursor[5:5] to return # an empty result set. It's hard to do that right, though, because the # server uses limit(0) to mean 'no limit'. So we set _empty # in that case and check for it when iterating. We also unset # it anytime we change _limit. Inspired by how it is done in pymongo.Cursor self._empty = False","def __init__ ( self , document , collection ) : self . _document = document self . _collection_obj = collection self . _mongo_query = None self . _query_obj = Q ( ) self . _cls_query = { } self . _where_clause = None self . _loaded_fields = QueryFieldList ( ) self . _ordering = None self . _snapshot = False self . _timeout = True self . _read_preference = None self . _read_concern = None self . _iter = False self . _scalar = [ ] self . _none = False self . _as_pymongo = False self . _search_text = None if document . _meta . get ( ""allow_inheritance"" ) is True : self . _cls_query = { ""_cls"" : self . _document . _subclasses [ 0 ] } else : self . _cls_query = { ""_cls"" : { ""$in"" : self . _document . _subclasses } } self . _loaded_fields = QueryFieldList ( always_include = [ ""_cls"" ] ) self . _cursor_obj = None self . _limit = None self . _skip = None self . _hint = - 1 self . _collation = None self . _batch_size = None self . _max_time_ms = None self . _comment = None self . _empty = False",if len ( self . _document . _subclasses ) == 1 :,460 4850,"def wait_for_child(pid, timeout=1.0): deadline = mitogen.core.now() + timeout while timeout < mitogen.core.now(): try: target_pid, status = os.waitpid(pid, os.WNOHANG) if target_pid == pid: return except OSError: e = sys.exc_info()[1] if e.args[0] == errno.ECHILD: return time.sleep(0.05) assert False, ""wait_for_child() timed out""","def wait_for_child ( pid , timeout = 1.0 ) : deadline = mitogen . core . now ( ) + timeout while timeout < mitogen . core . now ( ) : try : target_pid , status = os . waitpid ( pid , os . WNOHANG ) return except OSError : e = sys . exc_info ( ) [ 1 ] if e . args [ 0 ] == errno . ECHILD : return time . sleep ( 0.05 ) assert False , ""wait_for_child() timed out""",if target_pid == pid :,156 7260,"def resolve_none(self, data): # replace None to '_' for tok_idx in range(len(data)): for feat_idx in range(len(data[tok_idx])): if data[tok_idx][feat_idx] is None: data[tok_idx][feat_idx] = ""_"" return data ","def resolve_none ( self , data ) : for tok_idx in range ( len ( data ) ) : for feat_idx in range ( len ( data [ tok_idx ] ) ) : data [ tok_idx ] [ feat_idx ] = ""_"" return data",if data [ tok_idx ] [ feat_idx ] is None :,87 2721,"def test_attributes_types(self): if not self.connection.strategy.pooled: if not self.connection.server.info: self.connection.refresh_server_info() self.assertEqual( type(self.connection.server.schema.attribute_types[""cn""]), AttributeTypeInfo ) ","def test_attributes_types ( self ) : if not self . connection . strategy . pooled : self . connection . refresh_server_info ( ) self . assertEqual ( type ( self . connection . server . schema . attribute_types [ ""cn"" ] ) , AttributeTypeInfo )",if not self . connection . server . info :,82 21930,"def get_modified_addr(self, erase_last=False): last = self.last_iteration new = self.feed(self.last_value, erase_last=erase_last) ret = {} for type, l in last.iteritems(): typeset = set(new[type]) for addr in l: if addr not in typeset: if type not in ret: ret[type] = [] ret[type].append(addr) return ret ","def get_modified_addr ( self , erase_last = False ) : last = self . last_iteration new = self . feed ( self . last_value , erase_last = erase_last ) ret = { } for type , l in last . iteritems ( ) : typeset = set ( new [ type ] ) for addr in l : if addr not in typeset : ret [ type ] = [ ] ret [ type ] . append ( addr ) return ret",if type not in ret :,134 10372,"def _get_compressor(self, algorithm): try: if algorithm.lower() in (""none"", ""off"", ""no""): return None if algorithm.lower() in (""zlib"", ""gzip""): import zlib as compressor result = compressor elif algorithm.lower() in (""bz2"", ""bzip2""): import bz2 as compressor result = compressor else: result = None if result: # NOTE(geguileo): Compression/Decompression starves # greenthreads so we use a native thread instead. return eventlet.tpool.Proxy(result) except ImportError: pass err = _(""unsupported compression algorithm: %s"") % algorithm raise ValueError(err)","def _get_compressor ( self , algorithm ) : try : if algorithm . lower ( ) in ( ""none"" , ""off"" , ""no"" ) : return None if algorithm . lower ( ) in ( ""zlib"" , ""gzip"" ) : import zlib as compressor result = compressor elif algorithm . lower ( ) in ( ""bz2"" , ""bzip2"" ) : import bz2 as compressor result = compressor else : result = None return eventlet . tpool . Proxy ( result ) except ImportError : pass err = _ ( ""unsupported compression algorithm: %s"" ) % algorithm raise ValueError ( err )",if result :,202 3085,"def choices(): """"""Return a dict of different choices."""""" choices = {} for choice in Action.__dict__: if hasattr(Action, choice): try: value = int(getattr(Action, choice)) choices[value] = choice except (TypeError, ValueError): pass return choices ","def choices ( ) : """"""Return a dict of different choices."""""" choices = { } for choice in Action . __dict__ : try : value = int ( getattr ( Action , choice ) ) choices [ value ] = choice except ( TypeError , ValueError ) : pass return choices","if hasattr ( Action , choice ) :",90 20725,"def _walkingCount(self, limFn=None, cntFn=None): tot = 0 pcounts = {} # for each of the active decks for did in self.col.decks.active(): # early alphas were setting the active ids as a str did = int(did) # get the individual deck's limit lim = limFn(self.col.decks.get(did)) if not lim: continue # check the parents parents = self.col.decks.parents(did) for p in parents: # add if missing if p[""id""] not in pcounts: pcounts[p[""id""]] = limFn(p) # take minimum of child and parent lim = min(pcounts[p[""id""]], lim) # see how many cards we actually have cnt = cntFn(did, lim) # if non-zero, decrement from parent counts for p in parents: pcounts[p[""id""]] -= cnt # we may also be a parent pcounts[did] = lim - cnt # and add to running total tot += cnt return tot","def _walkingCount ( self , limFn = None , cntFn = None ) : tot = 0 pcounts = { } for did in self . col . decks . active ( ) : did = int ( did ) lim = limFn ( self . col . decks . get ( did ) ) if not lim : continue parents = self . col . decks . parents ( did ) for p in parents : pcounts [ p [ ""id"" ] ] = limFn ( p ) lim = min ( pcounts [ p [ ""id"" ] ] , lim ) cnt = cntFn ( did , lim ) for p in parents : pcounts [ p [ ""id"" ] ] -= cnt pcounts [ did ] = lim - cnt tot += cnt return tot","if p [ ""id"" ] not in pcounts :",323 20879,"def generate_eway_bill(self, **kwargs): args = frappe._dict(kwargs) headers = self.get_headers() eway_bill_details = get_eway_bill_details(args) data = json.dumps( { ""Irn"": args.irn, ""Distance"": cint(eway_bill_details.distance), ""TransMode"": eway_bill_details.mode_of_transport, ""TransId"": eway_bill_details.gstin, ""TransName"": eway_bill_details.transporter, ""TrnDocDt"": eway_bill_details.document_date, ""TrnDocNo"": eway_bill_details.document_name, ""VehNo"": eway_bill_details.vehicle_no, ""VehType"": eway_bill_details.vehicle_type, }, indent=4, ) try: res = self.make_request(""post"", self.generate_ewaybill_url, headers, data) if res.get(""success""): self.invoice.ewaybill = res.get(""result"").get(""EwbNo"") self.invoice.eway_bill_cancelled = 0 self.invoice.update(args) self.invoice.flags.updater_reference = { ""doctype"": self.invoice.doctype, ""docname"": self.invoice.name, ""label"": _(""E-Way Bill Generated""), } self.update_invoice() else: raise RequestFailed except RequestFailed: errors = self.sanitize_error_message(res.get(""message"")) self.raise_error(errors=errors) except Exception: self.log_error(data) self.raise_error(True)","def generate_eway_bill ( self , ** kwargs ) : args = frappe . _dict ( kwargs ) headers = self . get_headers ( ) eway_bill_details = get_eway_bill_details ( args ) data = json . dumps ( { ""Irn"" : args . irn , ""Distance"" : cint ( eway_bill_details . distance ) , ""TransMode"" : eway_bill_details . mode_of_transport , ""TransId"" : eway_bill_details . gstin , ""TransName"" : eway_bill_details . transporter , ""TrnDocDt"" : eway_bill_details . document_date , ""TrnDocNo"" : eway_bill_details . document_name , ""VehNo"" : eway_bill_details . vehicle_no , ""VehType"" : eway_bill_details . vehicle_type , } , indent = 4 , ) try : res = self . make_request ( ""post"" , self . generate_ewaybill_url , headers , data ) self . invoice . ewaybill = res . get ( ""result"" ) . get ( ""EwbNo"" ) self . invoice . eway_bill_cancelled = 0 self . invoice . update ( args ) self . invoice . flags . updater_reference = { ""doctype"" : self . invoice . doctype , ""docname"" : self . invoice . name , ""label"" : _ ( ""E-Way Bill Generated"" ) , } self . update_invoice ( ) else : raise RequestFailed except RequestFailed : errors = self . sanitize_error_message ( res . get ( ""message"" ) ) self . raise_error ( errors = errors ) except Exception : self . log_error ( data ) self . raise_error ( True )","if res . get ( ""success"" ) :",504 2871,"def removeKey(self, key, group=None, locales=True): # set default group if not group: group = self.defaultGroup try: if locales: for name in list(self.content[group]): if re.match(""^"" + key + xdg.Locale.regex + ""$"", name) and name != key: del self.content[group][name] value = self.content[group].pop(key) self.tainted = True return value except KeyError as e: if debug: if e == group: raise NoGroupError(group, self.filename) else: raise NoKeyError(key, group, self.filename) else: return """" ","def removeKey ( self , key , group = None , locales = True ) : if not group : group = self . defaultGroup try : if locales : for name in list ( self . content [ group ] ) : del self . content [ group ] [ name ] value = self . content [ group ] . pop ( key ) self . tainted = True return value except KeyError as e : if debug : if e == group : raise NoGroupError ( group , self . filename ) else : raise NoKeyError ( key , group , self . filename ) else : return """"","if re . match ( ""^"" + key + xdg . Locale . regex + ""$"" , name ) and name != key :",208 15361,"def clean_requires_python(candidates): """"""Get a cleaned list of all the candidates with valid specifiers in the `requires_python` attributes."""""" all_candidates = [] py_version = parse_version( os.environ.get(""PIP_PYTHON_VERSION"", ""."".join(map(str, sys.version_info[:3]))) ) for c in candidates: if getattr(c, ""requires_python"", None): # Old specifications had people setting this to single digits # which is effectively the same as '>=digit,={0},<{1!s}"".format( c.requires_python, int(c.requires_python) + 1 ) try: specifierset = SpecifierSet(c.requires_python) except InvalidSpecifier: continue else: if not specifierset.contains(py_version): continue all_candidates.append(c) return all_candidates","def clean_requires_python ( candidates ) : """"""Get a cleaned list of all the candidates with valid specifiers in the `requires_python` attributes."""""" all_candidates = [ ] py_version = parse_version ( os . environ . get ( ""PIP_PYTHON_VERSION"" , ""."" . join ( map ( str , sys . version_info [ : 3 ] ) ) ) ) for c in candidates : if getattr ( c , ""requires_python"" , None ) : c . requires_python = "">={0},<{1!s}"" . format ( c . requires_python , int ( c . requires_python ) + 1 ) try : specifierset = SpecifierSet ( c . requires_python ) except InvalidSpecifier : continue else : if not specifierset . contains ( py_version ) : continue all_candidates . append ( c ) return all_candidates","if len ( c . requires_python ) == 1 and c . requires_python in ( ""2"" , ""3"" ) :",301 4765,"def JujuWait(self): """"""Wait for all deployed services to be installed, configured, and idle."""""" status = yaml.safe_load(self.JujuStatus()) for service in status[""services""]: ss = status[""services""][service][""service-status""][""current""] # Accept blocked because the service may be waiting on relation if ss not in [""active"", ""unknown""]: raise errors.Juju.TimeoutException( ""Service %s is not ready; status is %s"" % (service, ss) ) if ss in [""error""]: # The service has failed to deploy. debuglog = self.JujuRun(""juju debug-log --limit 200"") logging.warn(debuglog) raise errors.Juju.UnitErrorException( ""Service %s is in an error state"" % service ) for unit in status[""services""][service][""units""]: unit_data = status[""services""][service][""units""][unit] ag = unit_data[""agent-state""] if ag != ""started"": raise errors.Juju.TimeoutException( ""Service %s is not ready; agent-state is %s"" % (service, ag) ) ws = unit_data[""workload-status""][""current""] if ws not in [""active"", ""unknown""]: raise errors.Juju.TimeoutException( ""Service %s is not ready; workload-state is %s"" % (service, ws) )","def JujuWait ( self ) : """"""Wait for all deployed services to be installed, configured, and idle."""""" status = yaml . safe_load ( self . JujuStatus ( ) ) for service in status [ ""services"" ] : ss = status [ ""services"" ] [ service ] [ ""service-status"" ] [ ""current"" ] if ss not in [ ""active"" , ""unknown"" ] : raise errors . Juju . TimeoutException ( ""Service %s is not ready; status is %s"" % ( service , ss ) ) if ss in [ ""error"" ] : debuglog = self . JujuRun ( ""juju debug-log --limit 200"" ) logging . warn ( debuglog ) raise errors . Juju . UnitErrorException ( ""Service %s is in an error state"" % service ) for unit in status [ ""services"" ] [ service ] [ ""units"" ] : unit_data = status [ ""services"" ] [ service ] [ ""units"" ] [ unit ] ag = unit_data [ ""agent-state"" ] if ag != ""started"" : raise errors . Juju . TimeoutException ( ""Service %s is not ready; agent-state is %s"" % ( service , ag ) ) ws = unit_data [ ""workload-status"" ] [ ""current"" ] raise errors . Juju . TimeoutException ( ""Service %s is not ready; workload-state is %s"" % ( service , ws ) )","if ws not in [ ""active"" , ""unknown"" ] :",415 4534,"def docroutine(self, object, name=None, mod=None, cl=None): """"""Produce text documentation for a function or method object."""""" realname = object.__name__ name = name or realname note = """" skipdocs = 0 if inspect.ismethod(object): imclass = object.im_class if cl: if imclass is not cl: note = "" from "" + classname(imclass, mod) else: if object.im_self is not None: note = "" method of %s instance"" % classname( object.im_self.__class__, mod ) else: note = "" unbound %s method"" % classname(imclass, mod) object = object.im_func if name == realname: title = self.bold(realname) else: if cl and realname in cl.__dict__ and cl.__dict__[realname] is object: skipdocs = 1 title = self.bold(name) + "" = "" + realname if inspect.isfunction(object): args, varargs, varkw, defaults = inspect.getargspec(object) argspec = inspect.formatargspec( args, varargs, varkw, defaults, formatvalue=self.formatvalue ) if realname == """": title = self.bold(name) + "" lambda "" argspec = argspec[1:-1] # remove parentheses else: argspec = ""(...)"" decl = title + argspec + note if skipdocs: return decl + ""\n"" else: doc = getdoc(object) or """" return decl + ""\n"" + (doc and rstrip(self.indent(doc)) + ""\n"")","def docroutine ( self , object , name = None , mod = None , cl = None ) : """"""Produce text documentation for a function or method object."""""" realname = object . __name__ name = name or realname note = """" skipdocs = 0 if inspect . ismethod ( object ) : imclass = object . im_class if cl : if imclass is not cl : note = "" from "" + classname ( imclass , mod ) else : if object . im_self is not None : note = "" method of %s instance"" % classname ( object . im_self . __class__ , mod ) else : note = "" unbound %s method"" % classname ( imclass , mod ) object = object . im_func if name == realname : title = self . bold ( realname ) else : if cl and realname in cl . __dict__ and cl . __dict__ [ realname ] is object : skipdocs = 1 title = self . bold ( name ) + "" = "" + realname if inspect . isfunction ( object ) : args , varargs , varkw , defaults = inspect . getargspec ( object ) argspec = inspect . formatargspec ( args , varargs , varkw , defaults , formatvalue = self . formatvalue ) title = self . bold ( name ) + "" lambda "" argspec = argspec [ 1 : - 1 ] else : argspec = ""(...)"" decl = title + argspec + note if skipdocs : return decl + ""\n"" else : doc = getdoc ( object ) or """" return decl + ""\n"" + ( doc and rstrip ( self . indent ( doc ) ) + ""\n"" )","if realname == """" :",464 22230,"def __call__(self, context): ## Lookup function: ## getitem dict / getattr objects / getattr on wrapper obj = self.param.parent(context) name = self.param.key if obj is None: raise AssertionError(""No such object: %s"" % self.param.parent.name) try: function = obj[name] if not isinstance(function, ExpressionFunction): raise KeyError except (TypeError, KeyError): if hasattr(obj, name) and isinstance(getattr(obj, name), ExpressionFunction): function = getattr(obj, name) else: wrapper = self.wrap_object(obj) if ( wrapper is not None and hasattr(wrapper, name) and isinstance(getattr(wrapper, name), ExpressionFunction) ): function = getattr(wrapper, name) else: raise AssertionError(""Not a valid function: %s"" % self.param.name) ## Execute function if not isinstance(function, ExpressionFunction): # Just being paranoid here, but leave it in to block any mistakes in above lookup raise AssertionError(""Not a valid function: %s"" % self.param.name) args = self.args(context) return function(*args) ","def __call__ ( self , context ) : obj = self . param . parent ( context ) name = self . param . key if obj is None : raise AssertionError ( ""No such object: %s"" % self . param . parent . name ) try : function = obj [ name ] raise KeyError except ( TypeError , KeyError ) : if hasattr ( obj , name ) and isinstance ( getattr ( obj , name ) , ExpressionFunction ) : function = getattr ( obj , name ) else : wrapper = self . wrap_object ( obj ) if ( wrapper is not None and hasattr ( wrapper , name ) and isinstance ( getattr ( wrapper , name ) , ExpressionFunction ) ) : function = getattr ( wrapper , name ) else : raise AssertionError ( ""Not a valid function: %s"" % self . param . name ) raise AssertionError ( ""Not a valid function: %s"" % self . param . name ) args = self . args ( context ) return function ( * args )","if not isinstance ( function , ExpressionFunction ) :",337 12208,"def process_response(self, request, response): # It's not worth attempting to compress really short responses. if not response.streaming and len(response.content) < 200: return response patch_vary_headers(response, (""Accept-Encoding"",)) # Avoid gzipping if we've already got a content-encoding. if response.has_header(""Content-Encoding""): return response # MSIE have issues with gzipped response of various content types. if ""msie"" in request.META.get(""HTTP_USER_AGENT"", """").lower(): ctype = response.get(""Content-Type"", """").lower() if not ctype.startswith(""text/"") or ""javascript"" in ctype: return response ae = request.META.get(""HTTP_ACCEPT_ENCODING"", """") if not re_accepts_gzip.search(ae): return response if response.streaming: # Delete the `Content-Length` header for streaming content, because # we won't know the compressed size until we stream it. response.streaming_content = compress_sequence(response.streaming_content) del response[""Content-Length""] else: # Return the compressed content only if it's actually shorter. compressed_content = compress_string(response.content) if len(compressed_content) >= len(response.content): return response response.content = compressed_content response[""Content-Length""] = str(len(response.content)) if response.has_header(""ETag""): response[""ETag""] = re.sub('""$', ';gzip""', response[""ETag""]) response[""Content-Encoding""] = ""gzip"" return response","def process_response ( self , request , response ) : if not response . streaming and len ( response . content ) < 200 : return response patch_vary_headers ( response , ( ""Accept-Encoding"" , ) ) if response . has_header ( ""Content-Encoding"" ) : return response if ""msie"" in request . META . get ( ""HTTP_USER_AGENT"" , """" ) . lower ( ) : ctype = response . get ( ""Content-Type"" , """" ) . lower ( ) return response ae = request . META . get ( ""HTTP_ACCEPT_ENCODING"" , """" ) if not re_accepts_gzip . search ( ae ) : return response if response . streaming : response . streaming_content = compress_sequence ( response . streaming_content ) del response [ ""Content-Length"" ] else : compressed_content = compress_string ( response . content ) if len ( compressed_content ) >= len ( response . content ) : return response response . content = compressed_content response [ ""Content-Length"" ] = str ( len ( response . content ) ) if response . has_header ( ""ETag"" ) : response [ ""ETag"" ] = re . sub ( '""$' , ';gzip""' , response [ ""ETag"" ] ) response [ ""Content-Encoding"" ] = ""gzip"" return response","if not ctype . startswith ( ""text/"" ) or ""javascript"" in ctype :",419 14050,"def brushengine_paint_hires(): from lib import tiledsurface, brush s = tiledsurface.Surface() with open(""brushes/v2/watercolor.myb"") as fp: bi = brush.BrushInfo(fp.read()) b = brush.Brush(bi) events = np.loadtxt(""painting30sec.dat"") t_old = events[0][0] yield start_measurement s.begin_atomic() trans_time = 0.0 for t, x, y, pressure in events: dtime = t - t_old t_old = t b.stroke_to(s.backend, x * 5, y * 5, pressure, 0.0, 0.0, dtime) trans_time += dtime if trans_time > 0.05: trans_time = 0.0 s.end_atomic() s.begin_atomic() s.end_atomic() yield stop_measurement","def brushengine_paint_hires ( ) : from lib import tiledsurface , brush s = tiledsurface . Surface ( ) with open ( ""brushes/v2/watercolor.myb"" ) as fp : bi = brush . BrushInfo ( fp . read ( ) ) b = brush . Brush ( bi ) events = np . loadtxt ( ""painting30sec.dat"" ) t_old = events [ 0 ] [ 0 ] yield start_measurement s . begin_atomic ( ) trans_time = 0.0 for t , x , y , pressure in events : dtime = t - t_old t_old = t b . stroke_to ( s . backend , x * 5 , y * 5 , pressure , 0.0 , 0.0 , dtime ) trans_time += dtime trans_time = 0.0 s . end_atomic ( ) s . begin_atomic ( ) s . end_atomic ( ) yield stop_measurement",if trans_time > 0.05 :,268 13636,"def _tile_series(cls, op): series = op.inputs[0] if len(series.chunks) == 1: chunk = series.chunks[0] chunk_op = op.copy().reset_key() out_chunks = [ chunk_op.new_chunk( series.chunks, shape=chunk.shape, index=chunk.index, index_value=op.outputs[0].index_value, dtype=chunk.dtype, name=chunk.name, ) ] new_op = op.copy() kws = op.outputs[0].params.copy() kws[""nsplits""] = series.nsplits kws[""chunks""] = out_chunks return new_op.new_seriess(op.inputs, **kws) else: if op.na_position != ""last"": # pragma: no cover raise NotImplementedError(""Only support puts NaNs at the end."") # use parallel sorting by regular sampling return cls._tile_psrs(op, series)","def _tile_series ( cls , op ) : series = op . inputs [ 0 ] if len ( series . chunks ) == 1 : chunk = series . chunks [ 0 ] chunk_op = op . copy ( ) . reset_key ( ) out_chunks = [ chunk_op . new_chunk ( series . chunks , shape = chunk . shape , index = chunk . index , index_value = op . outputs [ 0 ] . index_value , dtype = chunk . dtype , name = chunk . name , ) ] new_op = op . copy ( ) kws = op . outputs [ 0 ] . params . copy ( ) kws [ ""nsplits"" ] = series . nsplits kws [ ""chunks"" ] = out_chunks return new_op . new_seriess ( op . inputs , ** kws ) else : raise NotImplementedError ( ""Only support puts NaNs at the end."" ) return cls . _tile_psrs ( op , series )","if op . na_position != ""last"" :",289 654,"def post_config_hook(self): self.last_transmitted_bytes = 0 self.last_received_bytes = 0 self.last_time = time.perf_counter() # Get default gateway from /proc. if self.nic is None: with Path(""/proc/net/route"").open() as fh: for line in fh: fields = line.strip().split() if fields[1] == ""00000000"" and int(fields[3], 16) & 2: self.nic = fields[0] break if self.nic is None: self.nic = ""lo"" self.py3.log(f""selected nic: {self.nic}"") self.thresholds_init = self.py3.get_color_names_list(self.format)","def post_config_hook ( self ) : self . last_transmitted_bytes = 0 self . last_received_bytes = 0 self . last_time = time . perf_counter ( ) with Path ( ""/proc/net/route"" ) . open ( ) as fh : for line in fh : fields = line . strip ( ) . split ( ) if fields [ 1 ] == ""00000000"" and int ( fields [ 3 ] , 16 ) & 2 : self . nic = fields [ 0 ] break self . nic = ""lo"" self . py3 . log ( f""selected nic: {self.nic}"" ) self . thresholds_init = self . py3 . get_color_names_list ( self . format )",if self . nic is None :,218 18874,"def import_data(self): if os.path.isfile(self._path): with open(self._path, ""r"") as db_file: import_data = json.loads(db_file.read()) data = import_data[""data""] for key_data in data: key = key_data[0] key_type = key_data[1] key_ttl = key_data[2] key_val = key_data[3] if key_type == ""set"": key_val = set(key_val) elif key_type == ""deque"": key_val = collections.deque(key_val) self._data[key] = { ""ttl"": key_ttl, ""val"": key_val, } if ""timers"" in import_data: for key in import_data[""timers""]: if key not in self._data: continue ttl = self._data[key][""ttl""] if not ttl: continue ttl -= int(time.time() * 1000) ttl /= 1000.0 if ttl >= 0: timer = threading.Timer(ttl, self.remove, (key,)) timer.daemon = True self._timers[key] = timer timer.start() else: self.remove(key) if ""commit_log"" in import_data: for tran in import_data[""commit_log""]: self._apply_trans(tran)","def import_data ( self ) : if os . path . isfile ( self . _path ) : with open ( self . _path , ""r"" ) as db_file : import_data = json . loads ( db_file . read ( ) ) data = import_data [ ""data"" ] for key_data in data : key = key_data [ 0 ] key_type = key_data [ 1 ] key_ttl = key_data [ 2 ] key_val = key_data [ 3 ] if key_type == ""set"" : key_val = set ( key_val ) elif key_type == ""deque"" : key_val = collections . deque ( key_val ) self . _data [ key ] = { ""ttl"" : key_ttl , ""val"" : key_val , } if ""timers"" in import_data : for key in import_data [ ""timers"" ] : if key not in self . _data : continue ttl = self . _data [ key ] [ ""ttl"" ] if not ttl : continue ttl -= int ( time . time ( ) * 1000 ) ttl /= 1000.0 if ttl >= 0 : timer = threading . Timer ( ttl , self . remove , ( key , ) ) timer . daemon = True self . _timers [ key ] = timer timer . start ( ) else : self . remove ( key ) for tran in import_data [ ""commit_log"" ] : self . _apply_trans ( tran )","if ""commit_log"" in import_data :",505 7100,"def _process_mempool(self, all_hashes): # Re-sync with the new set of hashes txs = self.txs hashXs = self.hashXs touched = set() # First handle txs that have disappeared for tx_hash in set(txs).difference(all_hashes): tx = txs.pop(tx_hash) tx_hashXs = {hashX for hashX, value in tx.in_pairs} tx_hashXs.update(hashX for hashX, value in tx.out_pairs) for hashX in tx_hashXs: hashXs[hashX].remove(tx_hash) if not hashXs[hashX]: del hashXs[hashX] touched.update(tx_hashXs) # Process new transactions new_hashes = list(all_hashes.difference(txs)) if new_hashes: fetches = [] for hashes in chunks(new_hashes, 200): fetches.append(self._fetch_and_accept(hashes, all_hashes, touched)) tx_map = {} utxo_map = {} for fetch in asyncio.as_completed(fetches): deferred, unspent = await fetch tx_map.update(deferred) utxo_map.update(unspent) prior_count = 0 # FIXME: this is not particularly efficient while tx_map and len(tx_map) != prior_count: prior_count = len(tx_map) tx_map, utxo_map = self._accept_transactions(tx_map, utxo_map, touched) if tx_map: self.logger.info(f""{len(tx_map)} txs dropped"") return touched","def _process_mempool ( self , all_hashes ) : txs = self . txs hashXs = self . hashXs touched = set ( ) for tx_hash in set ( txs ) . difference ( all_hashes ) : tx = txs . pop ( tx_hash ) tx_hashXs = { hashX for hashX , value in tx . in_pairs } tx_hashXs . update ( hashX for hashX , value in tx . out_pairs ) for hashX in tx_hashXs : hashXs [ hashX ] . remove ( tx_hash ) del hashXs [ hashX ] touched . update ( tx_hashXs ) new_hashes = list ( all_hashes . difference ( txs ) ) if new_hashes : fetches = [ ] for hashes in chunks ( new_hashes , 200 ) : fetches . append ( self . _fetch_and_accept ( hashes , all_hashes , touched ) ) tx_map = { } utxo_map = { } for fetch in asyncio . as_completed ( fetches ) : deferred , unspent = await fetch tx_map . update ( deferred ) utxo_map . update ( unspent ) prior_count = 0 while tx_map and len ( tx_map ) != prior_count : prior_count = len ( tx_map ) tx_map , utxo_map = self . _accept_transactions ( tx_map , utxo_map , touched ) if tx_map : self . logger . info ( f""{len(tx_map)} txs dropped"" ) return touched",if not hashXs [ hashX ] :,482 15091,"def forward( self, hidden_states, attention_mask=None, head_mask=None, output_attentions=False, output_hidden_states=False, return_dict=True, ): all_hidden_states = () if output_hidden_states else None all_attentions = () if output_attentions else None for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) layer_outputs = layer_module( hidden_states, attention_mask, head_mask[i], output_attentions, ) hidden_states = layer_outputs[0] if output_attentions: all_attentions = all_attentions + (layer_outputs[1],) # Add last layer if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple( v for v in [hidden_states, all_hidden_states, all_attentions] if v is not None ) return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_attentions, )","def forward ( self , hidden_states , attention_mask = None , head_mask = None , output_attentions = False , output_hidden_states = False , return_dict = True , ) : all_hidden_states = ( ) if output_hidden_states else None all_attentions = ( ) if output_attentions else None for i , layer_module in enumerate ( self . layer ) : if output_hidden_states : all_hidden_states = all_hidden_states + ( hidden_states , ) layer_outputs = layer_module ( hidden_states , attention_mask , head_mask [ i ] , output_attentions , ) hidden_states = layer_outputs [ 0 ] if output_attentions : all_attentions = all_attentions + ( layer_outputs [ 1 ] , ) if output_hidden_states : all_hidden_states = all_hidden_states + ( hidden_states , ) if not return_dict : return tuple ( v for v in [ hidden_states , all_hidden_states , all_attentions ] ) return BaseModelOutput ( last_hidden_state = hidden_states , hidden_states = all_hidden_states , attentions = all_attentions , )",if v is not None,356 22468,"def sanitizeTreeKobo(filetree): pageNumber = 0 for root, dirs, files in os.walk(filetree): dirs, files = walkSort(dirs, files) for name in files: splitname = os.path.splitext(name) slugified = str(pageNumber).zfill(5) pageNumber += 1 while ( os.path.exists(os.path.join(root, slugified + splitname[1])) and splitname[0].upper() != slugified.upper() ): slugified += ""A"" newKey = os.path.join(root, slugified + splitname[1]) key = os.path.join(root, name) if key != newKey: os.replace(key, newKey)","def sanitizeTreeKobo ( filetree ) : pageNumber = 0 for root , dirs , files in os . walk ( filetree ) : dirs , files = walkSort ( dirs , files ) for name in files : splitname = os . path . splitext ( name ) slugified = str ( pageNumber ) . zfill ( 5 ) pageNumber += 1 while ( os . path . exists ( os . path . join ( root , slugified + splitname [ 1 ] ) ) and splitname [ 0 ] . upper ( ) != slugified . upper ( ) ) : slugified += ""A"" newKey = os . path . join ( root , slugified + splitname [ 1 ] ) key = os . path . join ( root , name ) os . replace ( key , newKey )",if key != newKey :,223 21653,"def testCheckpointMiddleOfSequence(self): # Create a model and give it some inputs to learn. tm1 = BacktrackingTM(numberOfCols=100, cellsPerColumn=12, verbosity=VERBOSITY) sequences = [self.generateSequence() for _ in xrange(5)] train = list(itertools.chain.from_iterable(sequences[:3] + [sequences[3][:5]])) for bottomUpInput in train: if bottomUpInput is None: tm1.reset() else: tm1.compute(bottomUpInput, True, True) # Serialize and deserialized the TM. checkpointPath = os.path.join(self._tmpDir, ""a"") tm1.saveToFile(checkpointPath) tm2 = pickle.loads(pickle.dumps(tm1)) tm2.loadFromFile(checkpointPath) # Check that the TMs are the same. self.assertTMsEqual(tm1, tm2) # Feed some data into the models. test = list(itertools.chain.from_iterable([sequences[3][5:]] + sequences[3:])) for bottomUpInput in test: if bottomUpInput is None: tm1.reset() tm2.reset() else: result1 = tm1.compute(bottomUpInput, True, True) result2 = tm2.compute(bottomUpInput, True, True) self.assertTMsEqual(tm1, tm2) self.assertTrue(numpy.array_equal(result1, result2))","def testCheckpointMiddleOfSequence ( self ) : tm1 = BacktrackingTM ( numberOfCols = 100 , cellsPerColumn = 12 , verbosity = VERBOSITY ) sequences = [ self . generateSequence ( ) for _ in xrange ( 5 ) ] train = list ( itertools . chain . from_iterable ( sequences [ : 3 ] + [ sequences [ 3 ] [ : 5 ] ] ) ) for bottomUpInput in train : tm1 . reset ( ) else : tm1 . compute ( bottomUpInput , True , True ) checkpointPath = os . path . join ( self . _tmpDir , ""a"" ) tm1 . saveToFile ( checkpointPath ) tm2 = pickle . loads ( pickle . dumps ( tm1 ) ) tm2 . loadFromFile ( checkpointPath ) self . assertTMsEqual ( tm1 , tm2 ) test = list ( itertools . chain . from_iterable ( [ sequences [ 3 ] [ 5 : ] ] + sequences [ 3 : ] ) ) for bottomUpInput in test : tm1 . reset ( ) tm2 . reset ( ) else : result1 = tm1 . compute ( bottomUpInput , True , True ) result2 = tm2 . compute ( bottomUpInput , True , True ) self . assertTMsEqual ( tm1 , tm2 ) self . assertTrue ( numpy . array_equal ( result1 , result2 ) )",if bottomUpInput is None :,396 6839,"def __init__( self, size, comm, decay=0.9, eps=2e-5, dtype=None, use_gamma=True, use_beta=True, initial_gamma=None, initial_beta=None, communication_backend=""auto"", ): chainer.utils.experimental(""chainermn.links.MultiNodeBatchNormalization"") super(MultiNodeBatchNormalization, self).__init__() self._highprec_dtype = chainer.get_dtype(dtype, map_mixed16=numpy.float32) self.comm = comm self.avg_mean = numpy.zeros(size, dtype=self._highprec_dtype) self.register_persistent(""avg_mean"") self.avg_var = numpy.zeros(size, dtype=self._highprec_dtype) self.register_persistent(""avg_var"") self.N = 0 self.register_persistent(""N"") self.decay = decay self.eps = eps self._communication_backend = ( chainermn_batch_normalization.get_communication_backend( comm, communication_backend ) ) with self.init_scope(): if use_gamma: if initial_gamma is None: initial_gamma = 1 initial_gamma = initializers._get_initializer(initial_gamma) initial_gamma.dtype = self._highprec_dtype self.gamma = variable.Parameter(initial_gamma, size) if use_beta: if initial_beta is None: initial_beta = 0 initial_beta = initializers._get_initializer(initial_beta) initial_beta.dtype = self._highprec_dtype self.beta = variable.Parameter(initial_beta, size)","def __init__ ( self , size , comm , decay = 0.9 , eps = 2e-5 , dtype = None , use_gamma = True , use_beta = True , initial_gamma = None , initial_beta = None , communication_backend = ""auto"" , ) : chainer . utils . experimental ( ""chainermn.links.MultiNodeBatchNormalization"" ) super ( MultiNodeBatchNormalization , self ) . __init__ ( ) self . _highprec_dtype = chainer . get_dtype ( dtype , map_mixed16 = numpy . float32 ) self . comm = comm self . avg_mean = numpy . zeros ( size , dtype = self . _highprec_dtype ) self . register_persistent ( ""avg_mean"" ) self . avg_var = numpy . zeros ( size , dtype = self . _highprec_dtype ) self . register_persistent ( ""avg_var"" ) self . N = 0 self . register_persistent ( ""N"" ) self . decay = decay self . eps = eps self . _communication_backend = ( chainermn_batch_normalization . get_communication_backend ( comm , communication_backend ) ) with self . init_scope ( ) : if use_gamma : initial_gamma = 1 initial_gamma = initializers . _get_initializer ( initial_gamma ) initial_gamma . dtype = self . _highprec_dtype self . gamma = variable . Parameter ( initial_gamma , size ) if use_beta : if initial_beta is None : initial_beta = 0 initial_beta = initializers . _get_initializer ( initial_beta ) initial_beta . dtype = self . _highprec_dtype self . beta = variable . Parameter ( initial_beta , size )",if initial_gamma is None :,465 14047,"def _install_script(self, src, header): strip_ext = True set_mode = False if sys.platform == ""win32"": if ""MSYSTEM"" not in os.environ: # and not MSYS2 strip_ext = False targ_basename = os.path.basename(src) if strip_ext and targ_basename.endswith("".py""): targ_basename = targ_basename[:-3] targ = os.path.join(self.install_dir, targ_basename) self.announce(""installing %s as %s"" % (src, targ_basename), level=2) if self.dry_run: return [] with open(src, ""rU"") as in_fp: with open(targ, ""w"") as out_fp: line = in_fp.readline().rstrip() if line.startswith(""#!""): print(line, file=out_fp) print(header, file=out_fp) if os.name == ""posix"": set_mode = True else: print(header, file=out_fp) print(line, file=out_fp) for line in in_fp.readlines(): line = line.rstrip() print(line, file=out_fp) if set_mode: mode = ((os.stat(targ).st_mode) | 0o555) & 0o7777 self.announce(""changing mode of %s to %o"" % (targ, mode), level=2) os.chmod(targ, mode) return [targ]","def _install_script ( self , src , header ) : strip_ext = True set_mode = False if sys . platform == ""win32"" : strip_ext = False targ_basename = os . path . basename ( src ) if strip_ext and targ_basename . endswith ( "".py"" ) : targ_basename = targ_basename [ : - 3 ] targ = os . path . join ( self . install_dir , targ_basename ) self . announce ( ""installing %s as %s"" % ( src , targ_basename ) , level = 2 ) if self . dry_run : return [ ] with open ( src , ""rU"" ) as in_fp : with open ( targ , ""w"" ) as out_fp : line = in_fp . readline ( ) . rstrip ( ) if line . startswith ( ""#!"" ) : print ( line , file = out_fp ) print ( header , file = out_fp ) if os . name == ""posix"" : set_mode = True else : print ( header , file = out_fp ) print ( line , file = out_fp ) for line in in_fp . readlines ( ) : line = line . rstrip ( ) print ( line , file = out_fp ) if set_mode : mode = ( ( os . stat ( targ ) . st_mode ) | 0o555 ) & 0o7777 self . announce ( ""changing mode of %s to %o"" % ( targ , mode ) , level = 2 ) os . chmod ( targ , mode ) return [ targ ]","if ""MSYSTEM"" not in os . environ :",435 7769,"def extract_authors(self, field, name, docinfo): try: if len(field[1]) == 1: if isinstance(field[1][0], nodes.paragraph): authors = self.authors_from_one_paragraph(field) elif isinstance(field[1][0], nodes.bullet_list): authors = self.authors_from_bullet_list(field) else: raise TransformError else: authors = self.authors_from_paragraphs(field) authornodes = [nodes.author("""", """", *author) for author in authors if author] if len(authornodes) >= 1: docinfo.append(nodes.authors("""", *authornodes)) else: raise TransformError except TransformError: field[-1] += self.document.reporter.warning( 'Bibliographic field ""%s"" incompatible with extraction: ' ""it must contain either a single paragraph (with authors "" 'separated by one of ""%s""), multiple paragraphs (one per ' ""author), or a bullet list with one paragraph (one author) "" ""per item."" % (name, """".join(self.language.author_separators)), base_node=field, ) raise","def extract_authors ( self , field , name , docinfo ) : try : if len ( field [ 1 ] ) == 1 : if isinstance ( field [ 1 ] [ 0 ] , nodes . paragraph ) : authors = self . authors_from_one_paragraph ( field ) authors = self . authors_from_bullet_list ( field ) else : raise TransformError else : authors = self . authors_from_paragraphs ( field ) authornodes = [ nodes . author ( """" , """" , * author ) for author in authors if author ] if len ( authornodes ) >= 1 : docinfo . append ( nodes . authors ( """" , * authornodes ) ) else : raise TransformError except TransformError : field [ - 1 ] += self . document . reporter . warning ( 'Bibliographic field ""%s"" incompatible with extraction: ' ""it must contain either a single paragraph (with authors "" 'separated by one of ""%s""), multiple paragraphs (one per ' ""author), or a bullet list with one paragraph (one author) "" ""per item."" % ( name , """" . join ( self . language . author_separators ) ) , base_node = field , ) raise","elif isinstance ( field [ 1 ] [ 0 ] , nodes . bullet_list ) :",339 8870,"def on_task_filter(self, task, config): if not task.accepted: log.debug(""No accepted entries, not scanning for existing."") return log.verbose(""Scanning path(s) for existing files."") config = self.prepare_config(config) filenames = {} for folder in config: folder = Path(folder).expanduser() if not folder.exists(): raise plugin.PluginWarning(""Path %s does not exist"" % folder, log) for p in folder.rglob(""*""): if p.is_file(): key = p.name # windows file system is not case sensitive if platform.system() == ""Windows"": key = key.lower() filenames[key] = p for entry in task.accepted: # priority is: filename, location (filename only), title name = Path(entry.get(""filename"", entry.get(""location"", entry[""title""]))).name if platform.system() == ""Windows"": name = name.lower() if name in filenames: log.debug(""Found %s in %s"" % (name, filenames[name])) entry.reject(""exists in %s"" % filenames[name]) ","def on_task_filter ( self , task , config ) : if not task . accepted : log . debug ( ""No accepted entries, not scanning for existing."" ) return log . verbose ( ""Scanning path(s) for existing files."" ) config = self . prepare_config ( config ) filenames = { } for folder in config : folder = Path ( folder ) . expanduser ( ) if not folder . exists ( ) : raise plugin . PluginWarning ( ""Path %s does not exist"" % folder , log ) for p in folder . rglob ( ""*"" ) : key = p . name if platform . system ( ) == ""Windows"" : key = key . lower ( ) filenames [ key ] = p for entry in task . accepted : name = Path ( entry . get ( ""filename"" , entry . get ( ""location"" , entry [ ""title"" ] ) ) ) . name if platform . system ( ) == ""Windows"" : name = name . lower ( ) if name in filenames : log . debug ( ""Found %s in %s"" % ( name , filenames [ name ] ) ) entry . reject ( ""exists in %s"" % filenames [ name ] )",if p . is_file ( ) :,326 14110,"def _update_cds_vdims(self): # Add any value dimensions not already in the CDS data # ensuring the element can be reconstituted in entirety element = self.plot.current_frame cds = self.plot.handles[""cds""] for d in element.vdims: scalar = element.interface.isscalar(element, d) dim = dimension_sanitizer(d.name) if dim not in cds.data: if scalar: cds.data[dim] = element.dimension_values(d, not scalar) else: cds.data[dim] = [ arr[:, 0] for arr in element.split(datatype=""array"", dimensions=[dim]) ]","def _update_cds_vdims ( self ) : element = self . plot . current_frame cds = self . plot . handles [ ""cds"" ] for d in element . vdims : scalar = element . interface . isscalar ( element , d ) dim = dimension_sanitizer ( d . name ) if scalar : cds . data [ dim ] = element . dimension_values ( d , not scalar ) else : cds . data [ dim ] = [ arr [ : , 0 ] for arr in element . split ( datatype = ""array"" , dimensions = [ dim ] ) ]",if dim not in cds . data :,206 11909,"def progress_bar_update( count1=None, count2=None, count3=None, count4=None, count5=None, count6=None ): lock.acquire() global pbar_file_permission_done if count1 is not None: if count1 <= 100: pbar1.update(count1) if count2 is not None: if count2 <= 100: pbar2.update(count2) if count3 is not None: if not pbar_file_permission_done: if count3 < 100: pbar3.update(count3) else: pbar3.update(count3) pbar_file_permission_done = True else: pbar4.update(count3) if count4 is not None: if count4 <= 100: pbar5.update(count4) if count5 is not None: if count5 <= 100: pbar6.update(count5) if count6 is not None: if count6 <= 100: pbar7.update(count6) lock.release()","def progress_bar_update ( count1 = None , count2 = None , count3 = None , count4 = None , count5 = None , count6 = None ) : lock . acquire ( ) global pbar_file_permission_done if count1 is not None : if count1 <= 100 : pbar1 . update ( count1 ) if count2 is not None : if count2 <= 100 : pbar2 . update ( count2 ) if count3 is not None : if not pbar_file_permission_done : if count3 < 100 : pbar3 . update ( count3 ) else : pbar3 . update ( count3 ) pbar_file_permission_done = True else : pbar4 . update ( count3 ) if count4 is not None : if count4 <= 100 : pbar5 . update ( count4 ) if count5 is not None : if count5 <= 100 : pbar6 . update ( count5 ) if count6 is not None : pbar7 . update ( count6 ) lock . release ( )",if count6 <= 100 :,307 16890,"def _executables_in_windows(path): if not os.path.isdir(path): return extensions = builtins.__xonsh__.env[""PATHEXT""] try: for x in scandir(path): try: is_file = x.is_file() except OSError: continue if is_file: fname = x.name else: continue base_name, ext = os.path.splitext(fname) if ext.upper() in extensions: yield fname except FileNotFoundError: # On Windows, there's no guarantee for the directory to really # exist even if isdir returns True. This may happen for instance # if the path contains trailing spaces. return ","def _executables_in_windows ( path ) : if not os . path . isdir ( path ) : return extensions = builtins . __xonsh__ . env [ ""PATHEXT"" ] try : for x in scandir ( path ) : try : is_file = x . is_file ( ) except OSError : continue fname = x . name else : continue base_name , ext = os . path . splitext ( fname ) if ext . upper ( ) in extensions : yield fname except FileNotFoundError : return",if is_file :,213 9639,"def test_payload_splitter(self): with open(FIXTURE_PATH + ""/legacy_payload.json"") as f: legacy_payload = json.load(f) legacy_payload_split, metrics_payload, checkruns_payload = split_payload( dict(legacy_payload) ) series = metrics_payload[""series""] legacy_payload_split[""metrics""] = [] for s in series: attributes = {} if s.get(""type""): attributes[""type""] = s[""type""] if s.get(""host""): attributes[""hostname""] = s[""host""] if s.get(""tags""): attributes[""tags""] = s[""tags""] if s.get(""device""): attributes[""device_name""] = s[""device""] formatted_sample = [ s[""metric""], s[""points""][0][0], s[""points""][0][1], attributes, ] legacy_payload_split[""metrics""].append(formatted_sample) del legacy_payload[""service_checks""] self.assertEqual(legacy_payload, legacy_payload_split) with open(FIXTURE_PATH + ""/sc_payload.json"") as f: expected_sc_payload = json.load(f) self.assertEqual(checkruns_payload, expected_sc_payload)","def test_payload_splitter ( self ) : with open ( FIXTURE_PATH + ""/legacy_payload.json"" ) as f : legacy_payload = json . load ( f ) legacy_payload_split , metrics_payload , checkruns_payload = split_payload ( dict ( legacy_payload ) ) series = metrics_payload [ ""series"" ] legacy_payload_split [ ""metrics"" ] = [ ] for s in series : attributes = { } if s . get ( ""type"" ) : attributes [ ""type"" ] = s [ ""type"" ] if s . get ( ""host"" ) : attributes [ ""hostname"" ] = s [ ""host"" ] if s . get ( ""tags"" ) : attributes [ ""tags"" ] = s [ ""tags"" ] attributes [ ""device_name"" ] = s [ ""device"" ] formatted_sample = [ s [ ""metric"" ] , s [ ""points"" ] [ 0 ] [ 0 ] , s [ ""points"" ] [ 0 ] [ 1 ] , attributes , ] legacy_payload_split [ ""metrics"" ] . append ( formatted_sample ) del legacy_payload [ ""service_checks"" ] self . assertEqual ( legacy_payload , legacy_payload_split ) with open ( FIXTURE_PATH + ""/sc_payload.json"" ) as f : expected_sc_payload = json . load ( f ) self . assertEqual ( checkruns_payload , expected_sc_payload )","if s . get ( ""device"" ) :",351 15760,"def write(self, data): if mock_target._mirror_on_stderr: if self._write_line: sys.stderr.write(fn + "": "") if bytes: sys.stderr.write(data.decode(""utf8"")) else: sys.stderr.write(data) if (data[-1]) == ""\n"": self._write_line = True else: self._write_line = False super(Buffer, self).write(data)","def write ( self , data ) : if mock_target . _mirror_on_stderr : if self . _write_line : sys . stderr . write ( fn + "": "" ) sys . stderr . write ( data . decode ( ""utf8"" ) ) else : sys . stderr . write ( data ) if ( data [ - 1 ] ) == ""\n"" : self . _write_line = True else : self . _write_line = False super ( Buffer , self ) . write ( data )",if bytes :,137 12997,"def _calculateParams(self, listPackages): self.mapCyclesToPackageList.clear() self.mapPackageToCycle.clear() self.sortedPackageList = [] self.listOfPackagesAlreadyBuilt = self._readAlreadyAvailablePackages() if self.listOfPackagesAlreadyBuilt: self.logger.debug(""List of already available packages:"") self.logger.debug(self.listOfPackagesAlreadyBuilt) listPackagesToBuild = copy.copy(listPackages) for pkg in listPackages: if pkg in self.listOfPackagesAlreadyBuilt and not constants.rpmCheck: listPackagesToBuild.remove(pkg) if constants.rpmCheck: self.sortedPackageList = listPackagesToBuild else: if not self._readPackageBuildData(listPackagesToBuild): return False if self.sortedPackageList: self.logger.info(""List of packages yet to be built..."") self.logger.info( str(set(self.sortedPackageList) - set(self.listOfPackagesAlreadyBuilt)) ) self.logger.info("""") return True ","def _calculateParams ( self , listPackages ) : self . mapCyclesToPackageList . clear ( ) self . mapPackageToCycle . clear ( ) self . sortedPackageList = [ ] self . listOfPackagesAlreadyBuilt = self . _readAlreadyAvailablePackages ( ) if self . listOfPackagesAlreadyBuilt : self . logger . debug ( ""List of already available packages:"" ) self . logger . debug ( self . listOfPackagesAlreadyBuilt ) listPackagesToBuild = copy . copy ( listPackages ) for pkg in listPackages : if pkg in self . listOfPackagesAlreadyBuilt and not constants . rpmCheck : listPackagesToBuild . remove ( pkg ) if constants . rpmCheck : self . sortedPackageList = listPackagesToBuild else : return False if self . sortedPackageList : self . logger . info ( ""List of packages yet to be built..."" ) self . logger . info ( str ( set ( self . sortedPackageList ) - set ( self . listOfPackagesAlreadyBuilt ) ) ) self . logger . info ( """" ) return True",if not self . _readPackageBuildData ( listPackagesToBuild ) :,288 9363,"def PyJs_anonymous_53_(ast, comments, tokens, this, arguments, var=var): var = Scope( { u""tokens"": tokens, u""this"": this, u""arguments"": arguments, u""comments"": comments, u""ast"": ast, }, var, ) var.registers([u""tokens"", u""comments"", u""ast""]) if var.get(u""ast""): if PyJsStrictEq(var.get(u""ast"").get(u""type""), Js(u""Program"")): return var.get(u""t"").callprop( u""file"", var.get(u""ast""), (var.get(u""comments"") or Js([])), (var.get(u""tokens"") or Js([])), ) else: if PyJsStrictEq(var.get(u""ast"").get(u""type""), Js(u""File"")): return var.get(u""ast"") PyJsTempException = JsToPyException( var.get(u""Error"").create(Js(u""Not a valid ast?"")) ) raise PyJsTempException","def PyJs_anonymous_53_ ( ast , comments , tokens , this , arguments , var = var ) : var = Scope ( { u""tokens"" : tokens , u""this"" : this , u""arguments"" : arguments , u""comments"" : comments , u""ast"" : ast , } , var , ) var . registers ( [ u""tokens"" , u""comments"" , u""ast"" ] ) if var . get ( u""ast"" ) : if PyJsStrictEq ( var . get ( u""ast"" ) . get ( u""type"" ) , Js ( u""Program"" ) ) : return var . get ( u""t"" ) . callprop ( u""file"" , var . get ( u""ast"" ) , ( var . get ( u""comments"" ) or Js ( [ ] ) ) , ( var . get ( u""tokens"" ) or Js ( [ ] ) ) , ) else : return var . get ( u""ast"" ) PyJsTempException = JsToPyException ( var . get ( u""Error"" ) . create ( Js ( u""Not a valid ast?"" ) ) ) raise PyJsTempException","if PyJsStrictEq ( var . get ( u""ast"" ) . get ( u""type"" ) , Js ( u""File"" ) ) :",330 12126,"def AdjustLabels(self, axis, minimum_label_spacing): if minimum_label_spacing is None: return if len(axis.labels) <= 1: # Nothing to adjust return if axis.max is not None and axis.min is not None: # Find the spacing required to fit all labels evenly. # Don't try to push them farther apart than that. maximum_possible_spacing = (axis.max - axis.min) / (len(axis.labels) - 1) if minimum_label_spacing > maximum_possible_spacing: minimum_label_spacing = maximum_possible_spacing labels = [list(x) for x in zip(axis.label_positions, axis.labels)] labels = sorted(labels, reverse=True) # First pass from the top, moving colliding labels downward for i in range(1, len(labels)): if labels[i - 1][0] - labels[i][0] < minimum_label_spacing: new_position = labels[i - 1][0] - minimum_label_spacing if axis.min is not None and new_position < axis.min: new_position = axis.min labels[i][0] = new_position # Second pass from the bottom, moving colliding labels upward for i in range(len(labels) - 2, -1, -1): if labels[i][0] - labels[i + 1][0] < minimum_label_spacing: new_position = labels[i + 1][0] + minimum_label_spacing if axis.max is not None and new_position > axis.max: new_position = axis.max labels[i][0] = new_position # Separate positions and labels label_positions, labels = zip(*labels) axis.labels = labels axis.label_positions = label_positions","def AdjustLabels ( self , axis , minimum_label_spacing ) : if minimum_label_spacing is None : return if len ( axis . labels ) <= 1 : return if axis . max is not None and axis . min is not None : maximum_possible_spacing = ( axis . max - axis . min ) / ( len ( axis . labels ) - 1 ) if minimum_label_spacing > maximum_possible_spacing : minimum_label_spacing = maximum_possible_spacing labels = [ list ( x ) for x in zip ( axis . label_positions , axis . labels ) ] labels = sorted ( labels , reverse = True ) for i in range ( 1 , len ( labels ) ) : if labels [ i - 1 ] [ 0 ] - labels [ i ] [ 0 ] < minimum_label_spacing : new_position = labels [ i - 1 ] [ 0 ] - minimum_label_spacing if axis . min is not None and new_position < axis . min : new_position = axis . min labels [ i ] [ 0 ] = new_position for i in range ( len ( labels ) - 2 , - 1 , - 1 ) : if labels [ i ] [ 0 ] - labels [ i + 1 ] [ 0 ] < minimum_label_spacing : new_position = labels [ i + 1 ] [ 0 ] + minimum_label_spacing new_position = axis . max labels [ i ] [ 0 ] = new_position label_positions , labels = zip ( * labels ) axis . labels = labels axis . label_positions = label_positions",if axis . max is not None and new_position > axis . max :,482 11989,"def __get_property_type_info(cls, property_proto): """"""Returns the type mapping for the provided property."""""" name = property_proto.name() is_repeated = bool(property_proto.multiple()) primitive_type = None entity_type = None if property_proto.has_meaning(): primitive_type = MEANING_TO_PRIMITIVE_TYPE.get(property_proto.meaning()) if primitive_type is None: value = property_proto.value() if value.has_int64value(): primitive_type = backup_pb2.EntitySchema.INTEGER elif value.has_booleanvalue(): primitive_type = backup_pb2.EntitySchema.BOOLEAN elif value.has_stringvalue(): if property_proto.meaning() == entity_pb.Property.ENTITY_PROTO: entity_proto = entity_pb.EntityProto() try: entity_proto.ParsePartialFromString(value.stringvalue()) except Exception: pass else: entity_type = EntityTypeInfo.create_from_entity_proto(entity_proto) else: primitive_type = backup_pb2.EntitySchema.STRING elif value.has_doublevalue(): primitive_type = backup_pb2.EntitySchema.FLOAT elif value.has_pointvalue(): primitive_type = backup_pb2.EntitySchema.GEO_POINT elif value.has_uservalue(): primitive_type = backup_pb2.EntitySchema.USER elif value.has_referencevalue(): primitive_type = backup_pb2.EntitySchema.REFERENCE return PropertyTypeInfo( name, is_repeated, (primitive_type,) if primitive_type is not None else None, (entity_type,) if entity_type else None, )","def __get_property_type_info ( cls , property_proto ) : """"""Returns the type mapping for the provided property."""""" name = property_proto . name ( ) is_repeated = bool ( property_proto . multiple ( ) ) primitive_type = None entity_type = None if property_proto . has_meaning ( ) : primitive_type = MEANING_TO_PRIMITIVE_TYPE . get ( property_proto . meaning ( ) ) if primitive_type is None : value = property_proto . value ( ) if value . has_int64value ( ) : primitive_type = backup_pb2 . EntitySchema . INTEGER elif value . has_booleanvalue ( ) : primitive_type = backup_pb2 . EntitySchema . BOOLEAN elif value . has_stringvalue ( ) : if property_proto . meaning ( ) == entity_pb . Property . ENTITY_PROTO : entity_proto = entity_pb . EntityProto ( ) try : entity_proto . ParsePartialFromString ( value . stringvalue ( ) ) except Exception : pass else : entity_type = EntityTypeInfo . create_from_entity_proto ( entity_proto ) else : primitive_type = backup_pb2 . EntitySchema . STRING primitive_type = backup_pb2 . EntitySchema . FLOAT elif value . has_pointvalue ( ) : primitive_type = backup_pb2 . EntitySchema . GEO_POINT elif value . has_uservalue ( ) : primitive_type = backup_pb2 . EntitySchema . USER elif value . has_referencevalue ( ) : primitive_type = backup_pb2 . EntitySchema . REFERENCE return PropertyTypeInfo ( name , is_repeated , ( primitive_type , ) if primitive_type is not None else None , ( entity_type , ) if entity_type else None , )",elif value . has_doublevalue ( ) :,495 21067,"def initialize_batcher( self, dataset, batch_size=128, bucketing_field=None, should_shuffle=True, ignore_last=False, ): if self.horovod: batcher = DistributedBatcher( dataset, self.horovod.rank(), self.horovod, batch_size, should_shuffle=should_shuffle, ignore_last=ignore_last, ) elif bucketing_field is not None: input_features = self.hyperparameters[""input_features""] bucketing_feature = [ feature for feature in input_features if feature[""name""] == bucketing_field ] if not bucketing_feature: raise ValueError( ""Bucketing field {} not present in input features"".format( bucketing_field ) ) else: bucketing_feature = bucketing_feature[0] should_trim = bucketing_feature[""encoder""] in dynamic_length_encoders if ""preprocessing"" in bucketing_feature: trim_side = bucketing_feature[""preprocessing""][""padding""] else: trim_side = self.hyperparameters[""preprocessing""][ bucketing_feature[""type""] ][""padding""] batcher = BucketedBatcher( dataset, bucketing_field=bucketing_field, batch_size=batch_size, buckets=10, ignore_last=ignore_last, should_shuffle=should_shuffle, should_trim=should_trim, trim_side=trim_side, ) else: batcher = Batcher( dataset, batch_size, should_shuffle=should_shuffle, ignore_last=ignore_last ) return batcher","def initialize_batcher ( self , dataset , batch_size = 128 , bucketing_field = None , should_shuffle = True , ignore_last = False , ) : if self . horovod : batcher = DistributedBatcher ( dataset , self . horovod . rank ( ) , self . horovod , batch_size , should_shuffle = should_shuffle , ignore_last = ignore_last , ) elif bucketing_field is not None : input_features = self . hyperparameters [ ""input_features"" ] bucketing_feature = [ feature for feature in input_features if feature [ ""name"" ] == bucketing_field ] if not bucketing_feature : raise ValueError ( ""Bucketing field {} not present in input features"" . format ( bucketing_field ) ) else : bucketing_feature = bucketing_feature [ 0 ] should_trim = bucketing_feature [ ""encoder"" ] in dynamic_length_encoders trim_side = bucketing_feature [ ""preprocessing"" ] [ ""padding"" ] else : trim_side = self . hyperparameters [ ""preprocessing"" ] [ bucketing_feature [ ""type"" ] ] [ ""padding"" ] batcher = BucketedBatcher ( dataset , bucketing_field = bucketing_field , batch_size = batch_size , buckets = 10 , ignore_last = ignore_last , should_shuffle = should_shuffle , should_trim = should_trim , trim_side = trim_side , ) else : batcher = Batcher ( dataset , batch_size , should_shuffle = should_shuffle , ignore_last = ignore_last ) return batcher","if ""preprocessing"" in bucketing_feature :",512 1094,"def get(self, request, *args, **kwargs): if request.GET.get(""format"", None) == ""json"": self.setup_queryset(*args, **kwargs) # Put the project id into the context for the static_data_template if ""pid"" in kwargs: self.static_context_extra[""pid""] = kwargs[""pid""] cmd = request.GET.get(""cmd"", None) if cmd and ""filterinfo"" in cmd: data = self.get_filter_info(request, **kwargs) else: # If no cmd is specified we give you the table data data = self.get_data(request, **kwargs) return HttpResponse(data, content_type=""application/json"") return super(ToasterTable, self).get(request, *args, **kwargs) ","def get ( self , request , * args , ** kwargs ) : if request . GET . get ( ""format"" , None ) == ""json"" : self . setup_queryset ( * args , ** kwargs ) if ""pid"" in kwargs : self . static_context_extra [ ""pid"" ] = kwargs [ ""pid"" ] cmd = request . GET . get ( ""cmd"" , None ) data = self . get_filter_info ( request , ** kwargs ) else : data = self . get_data ( request , ** kwargs ) return HttpResponse ( data , content_type = ""application/json"" ) return super ( ToasterTable , self ) . get ( request , * args , ** kwargs )","if cmd and ""filterinfo"" in cmd :",214 17378,"def wakeUp(self): """"""Write one byte to the pipe, and flush it."""""" # We don't use fdesc.writeToFD since we need to distinguish # between EINTR (try again) and EAGAIN (do nothing). if self.o is not None: try: util.untilConcludes(os.write, self.o, b""x"") except OSError as e: # XXX There is no unit test for raising the exception # for other errnos. See #4285. if e.errno != errno.EAGAIN: raise","def wakeUp ( self ) : """"""Write one byte to the pipe, and flush it."""""" if self . o is not None : try : util . untilConcludes ( os . write , self . o , b""x"" ) except OSError as e : raise",if e . errno != errno . EAGAIN :,158 5085,"def contact_me(request, attribute="""", response_format=""html""): ""My Contact card"" contact = request.user.profile.get_contact() if not request.user.profile.has_permission(contact): return user_denied(request, message=""You don't have access to this Contact"") types = Object.filter_by_request(request, ContactType.objects.order_by(""name"")) if not contact: return render_to_response( ""identities/contact_me_missing"", {""types"": types}, context_instance=RequestContext(request), response_format=response_format, ) subcontacts = Object.filter_by_request(request, contact.child_set) contact_values = contact.contactvalue_set.order_by(""field__name"") objects = get_contact_objects(request.user.profile, contact, preformat=True) module = None for key in objects: if not attribute: if objects[key][""count""]: # attribute = objects[key]['objects'].keys()[0] module = objects[key][""module""] else: if attribute in objects[key][""objects""].keys(): module = objects[key][""module""] break return render_to_response( ""identities/contact_me"", { ""contact"": contact, ""subcontacts"": subcontacts, ""objects"": objects, ""current_module"": module, ""attribute"": attribute, ""types"": types, ""contact_values"": contact_values, }, context_instance=RequestContext(request), response_format=response_format, )","def contact_me ( request , attribute = """" , response_format = ""html"" ) : ""My Contact card"" contact = request . user . profile . get_contact ( ) if not request . user . profile . has_permission ( contact ) : return user_denied ( request , message = ""You don't have access to this Contact"" ) types = Object . filter_by_request ( request , ContactType . objects . order_by ( ""name"" ) ) if not contact : return render_to_response ( ""identities/contact_me_missing"" , { ""types"" : types } , context_instance = RequestContext ( request ) , response_format = response_format , ) subcontacts = Object . filter_by_request ( request , contact . child_set ) contact_values = contact . contactvalue_set . order_by ( ""field__name"" ) objects = get_contact_objects ( request . user . profile , contact , preformat = True ) module = None for key in objects : if not attribute : module = objects [ key ] [ ""module"" ] else : if attribute in objects [ key ] [ ""objects"" ] . keys ( ) : module = objects [ key ] [ ""module"" ] break return render_to_response ( ""identities/contact_me"" , { ""contact"" : contact , ""subcontacts"" : subcontacts , ""objects"" : objects , ""current_module"" : module , ""attribute"" : attribute , ""types"" : types , ""contact_values"" : contact_values , } , context_instance = RequestContext ( request ) , response_format = response_format , )","if objects [ key ] [ ""count"" ] :",461 12918,"def findfiles(path): files = [] for name in os.listdir(path): # ignore hidden files/dirs and other unwanted files if name.startswith(""."") or name == ""lastsnap.jpg"": continue pathname = os.path.join(path, name) st = os.lstat(pathname) mode = st.st_mode if stat.S_ISDIR(mode): files.extend(findfiles(pathname)) elif stat.S_ISREG(mode): files.append((pathname, name, st)) return files ","def findfiles ( path ) : files = [ ] for name in os . listdir ( path ) : if name . startswith ( ""."" ) or name == ""lastsnap.jpg"" : continue pathname = os . path . join ( path , name ) st = os . lstat ( pathname ) mode = st . st_mode files . extend ( findfiles ( pathname ) ) elif stat . S_ISREG ( mode ) : files . append ( ( pathname , name , st ) ) return files",if stat . S_ISDIR ( mode ) :,150 16874,"def make_parser( func: tp.Callable, subparser: ap._SubParsersAction = None, params: tp.Dict[str, tp.Dict[str, tp.Any]] = None, **kwargs ) -> ""ap.ArgumentParser"": """"""A bare-bones argparse builder from functions"""""" doc = get_doc(func) kwargs.setdefault(""formatter_class"", ap.RawTextHelpFormatter) if subparser is None: kwargs.setdefault(""description"", doc) parser = ap.ArgumentParser(**kwargs) parser.set_defaults( **{_FUNC_NAME: lambda stdout: parser.print_help(file=stdout)} ) return parser else: parser = subparser.add_parser( kwargs.pop(""prog"", func.__name__), help=doc, **kwargs, ) parser.set_defaults(**{_FUNC_NAME: func}) if params: for par, args in params.items(): args.setdefault(""help"", get_doc(func, par)) parser.add_argument(par, **args) return parser ","def make_parser ( func : tp . Callable , subparser : ap . _SubParsersAction = None , params : tp . Dict [ str , tp . Dict [ str , tp . Any ] ] = None , ** kwargs ) -> ""ap.ArgumentParser"" : """"""A bare-bones argparse builder from functions"""""" doc = get_doc ( func ) kwargs . setdefault ( ""formatter_class"" , ap . RawTextHelpFormatter ) if subparser is None : kwargs . setdefault ( ""description"" , doc ) parser = ap . ArgumentParser ( ** kwargs ) parser . set_defaults ( ** { _FUNC_NAME : lambda stdout : parser . print_help ( file = stdout ) } ) return parser else : parser = subparser . add_parser ( kwargs . pop ( ""prog"" , func . __name__ ) , help = doc , ** kwargs , ) parser . set_defaults ( ** { _FUNC_NAME : func } ) for par , args in params . items ( ) : args . setdefault ( ""help"" , get_doc ( func , par ) ) parser . add_argument ( par , ** args ) return parser",if params :,294 15188,"def load_ip(self): if os.path.isfile(self.ip_list_fn): file_path = self.ip_list_fn elif self.default_ip_list_fn and os.path.isfile(self.default_ip_list_fn): file_path = self.default_ip_list_fn else: return with open(file_path, ""r"") as fd: lines = fd.readlines() for line in lines: try: if line.startswith(""#""): continue str_l = line.split("" "") if len(str_l) < 4: self.logger.warning(""line err: %s"", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) if len(str_l) > 4: fail_times = int(str_l[4]) else: fail_times = 0 if len(str_l) > 5: down_fail = int(str_l[5]) else: down_fail = 0 # self.logger.info(""load ip: %s time:%d domain:%s server:%s"", ip, handshake_time, domain, server) self.add_ip( ip_str, handshake_time, domain, server, fail_times, down_fail, False ) except Exception as e: self.logger.exception(""load_ip line:%s err:%s"", line, e) self.logger.info( ""load ip_list num:%d, target num:%d"", len(self.ip_dict), len(self.ip_list) ) self.try_sort_ip(force=True)","def load_ip ( self ) : if os . path . isfile ( self . ip_list_fn ) : file_path = self . ip_list_fn elif self . default_ip_list_fn and os . path . isfile ( self . default_ip_list_fn ) : file_path = self . default_ip_list_fn else : return with open ( file_path , ""r"" ) as fd : lines = fd . readlines ( ) for line in lines : try : continue str_l = line . split ( "" "" ) if len ( str_l ) < 4 : self . logger . warning ( ""line err: %s"" , line ) continue ip_str = str_l [ 0 ] domain = str_l [ 1 ] server = str_l [ 2 ] handshake_time = int ( str_l [ 3 ] ) if len ( str_l ) > 4 : fail_times = int ( str_l [ 4 ] ) else : fail_times = 0 if len ( str_l ) > 5 : down_fail = int ( str_l [ 5 ] ) else : down_fail = 0 self . add_ip ( ip_str , handshake_time , domain , server , fail_times , down_fail , False ) except Exception as e : self . logger . exception ( ""load_ip line:%s err:%s"" , line , e ) self . logger . info ( ""load ip_list num:%d, target num:%d"" , len ( self . ip_dict ) , len ( self . ip_list ) ) self . try_sort_ip ( force = True )","if line . startswith ( ""#"" ) :",510 25441,"def tms_to_quadkey(self, tms, google=False): quadKey = """" x, y, z = tms # this algorithm works with google tiles, rather than tms, so convert # to those first. if not google: y = (2 ** z - 1) - y for i in range(z, 0, -1): digit = 0 mask = 1 << (i - 1) if (x & mask) != 0: digit += 1 if (y & mask) != 0: digit += 2 quadKey += str(digit) return quadKey","def tms_to_quadkey ( self , tms , google = False ) : quadKey = """" x , y , z = tms if not google : y = ( 2 ** z - 1 ) - y for i in range ( z , 0 , - 1 ) : digit = 0 mask = 1 << ( i - 1 ) if ( x & mask ) != 0 : digit += 1 digit += 2 quadKey += str ( digit ) return quadKey",if ( y & mask ) != 0 :,164 19356,"def wait_success(self, timeout=60 * 10): for i in range(timeout // 10): time.sleep(10) status = self.query_job() print(""job {} status is {}"".format(self.job_id, status)) if status and status == StatusSet.SUCCESS: return True if status and status in [ StatusSet.CANCELED, StatusSet.TIMEOUT, StatusSet.FAILED, ]: return False return False","def wait_success ( self , timeout = 60 * 10 ) : for i in range ( timeout // 10 ) : time . sleep ( 10 ) status = self . query_job ( ) print ( ""job {} status is {}"" . format ( self . job_id , status ) ) return True if status and status in [ StatusSet . CANCELED , StatusSet . TIMEOUT , StatusSet . FAILED , ] : return False return False",if status and status == StatusSet . SUCCESS :,134 6273,"def create_connection(self, address, protocol_factory=None, **kw): """"""Helper method for creating a connection to an ``address``."""""" protocol_factory = protocol_factory or self.create_protocol if isinstance(address, tuple): host, port = address if self.debug: self.logger.debug(""Create connection %s:%s"", host, port) _, protocol = await self._loop.create_connection( protocol_factory, host, port, **kw ) await protocol.event(""connection_made"") else: raise NotImplementedError(""Could not connect to %s"" % str(address)) return protocol ","def create_connection ( self , address , protocol_factory = None , ** kw ) : """"""Helper method for creating a connection to an ``address``."""""" protocol_factory = protocol_factory or self . create_protocol if isinstance ( address , tuple ) : host , port = address self . logger . debug ( ""Create connection %s:%s"" , host , port ) _ , protocol = await self . _loop . create_connection ( protocol_factory , host , port , ** kw ) await protocol . event ( ""connection_made"" ) else : raise NotImplementedError ( ""Could not connect to %s"" % str ( address ) ) return protocol",if self . debug :,165 1603,"def _import_module_with_version_check(module_name, minimum_version, install_info=None): """"""Check that module is installed with a recent enough version"""""" from distutils.version import LooseVersion try: module = __import__(module_name) except ImportError as exc: user_friendly_info = ('Module ""{0}"" could not be found. {1}').format( module_name, install_info or ""Please install it properly to use nilearn."" ) exc.args += (user_friendly_info,) # Necessary for Python 3 because the repr/str of ImportError # objects was changed in Python 3 if hasattr(exc, ""msg""): exc.msg += "". "" + user_friendly_info raise # Avoid choking on modules with no __version__ attribute module_version = getattr(module, ""__version__"", ""0.0.0"") version_too_old = not LooseVersion(module_version) >= LooseVersion(minimum_version) if version_too_old: message = ( ""A {module_name} version of at least {minimum_version} "" ""is required to use nilearn. {module_version} was found. "" ""Please upgrade {module_name}"" ).format( module_name=module_name, minimum_version=minimum_version, module_version=module_version, ) raise ImportError(message) return module","def _import_module_with_version_check ( module_name , minimum_version , install_info = None ) : """"""Check that module is installed with a recent enough version"""""" from distutils . version import LooseVersion try : module = __import__ ( module_name ) except ImportError as exc : user_friendly_info = ( 'Module ""{0}"" could not be found. {1}' ) . format ( module_name , install_info or ""Please install it properly to use nilearn."" ) exc . args += ( user_friendly_info , ) exc . msg += "". "" + user_friendly_info raise module_version = getattr ( module , ""__version__"" , ""0.0.0"" ) version_too_old = not LooseVersion ( module_version ) >= LooseVersion ( minimum_version ) if version_too_old : message = ( ""A {module_name} version of at least {minimum_version} "" ""is required to use nilearn. {module_version} was found. "" ""Please upgrade {module_name}"" ) . format ( module_name = module_name , minimum_version = minimum_version , module_version = module_version , ) raise ImportError ( message ) return module","if hasattr ( exc , ""msg"" ) :",382 10952,"def do_search(lo, hi): if hi - lo <= 1: return hi mid = int(math.floor((hi - lo) / 2) + lo) log.info(""Testing {0}"".format(draw_graph(lo, mid, hi, len(commit_hashes)))) with log.indent(): lo_result = None while lo_result is None: lo_result = do_benchmark(lo) if not non_null_results(lo_result): lo_result = None lo += 1 if lo >= mid: raise util.UserError(""Too many commits failed"") mid_result = None while mid_result is None: mid_result = do_benchmark(mid) if not non_null_results(mid_result, lo_result): mid_result = None mid += 1 if mid >= hi: raise util.UserError(""Too many commits failed"") hi_result = None while hi_result is None: hi_result = do_benchmark(hi) if not non_null_results(lo_result, mid_result, hi_result): hi_result = None hi -= 1 if hi <= mid: raise util.UserError(""Too many commits failed"") diff_b, diff_a = difference_3way(hi_result, mid_result, lo_result) if invert: diff_a *= -1.0 diff_b *= -1.0 if diff_a >= diff_b: return do_search(lo, mid) else: return do_search(mid, hi)","def do_search ( lo , hi ) : if hi - lo <= 1 : return hi mid = int ( math . floor ( ( hi - lo ) / 2 ) + lo ) log . info ( ""Testing {0}"" . format ( draw_graph ( lo , mid , hi , len ( commit_hashes ) ) ) ) with log . indent ( ) : lo_result = None while lo_result is None : lo_result = do_benchmark ( lo ) if not non_null_results ( lo_result ) : lo_result = None lo += 1 if lo >= mid : raise util . UserError ( ""Too many commits failed"" ) mid_result = None while mid_result is None : mid_result = do_benchmark ( mid ) if not non_null_results ( mid_result , lo_result ) : mid_result = None mid += 1 raise util . UserError ( ""Too many commits failed"" ) hi_result = None while hi_result is None : hi_result = do_benchmark ( hi ) if not non_null_results ( lo_result , mid_result , hi_result ) : hi_result = None hi -= 1 if hi <= mid : raise util . UserError ( ""Too many commits failed"" ) diff_b , diff_a = difference_3way ( hi_result , mid_result , lo_result ) if invert : diff_a *= - 1.0 diff_b *= - 1.0 if diff_a >= diff_b : return do_search ( lo , mid ) else : return do_search ( mid , hi )",if mid >= hi :,460 13790,"def _load(self, path: str): ds = DataSet() with open(path, ""r"", encoding=""utf-8"") as f: for line in f: line = line.strip() if line: parts = line.split(""\t"") raw_words1 = parts[1] raw_words2 = parts[2] target = parts[0] if raw_words1 and raw_words2 and target: ds.append( Instance( raw_words1=raw_words1, raw_words2=raw_words2, target=target ) ) return ds","def _load ( self , path : str ) : ds = DataSet ( ) with open ( path , ""r"" , encoding = ""utf-8"" ) as f : for line in f : line = line . strip ( ) if line : parts = line . split ( ""\t"" ) raw_words1 = parts [ 1 ] raw_words2 = parts [ 2 ] target = parts [ 0 ] ds . append ( Instance ( raw_words1 = raw_words1 , raw_words2 = raw_words2 , target = target ) ) return ds",if raw_words1 and raw_words2 and target :,200 22039,"def FallbackGetIndex(self, targetMO, argMOs, errorSuggestionMO): ## Defer if any object has no value so that we evaulate their ## Expressions and nest a CallSite for the InvokeMember. if not targetMO.HasValue or not all(map(lambda x: x.HasValue, argMOs)): return self.Defer((targetMO,) + tuple(argMOs)) ## Try COM object first. isCom, com = ComBinder.TryBindGetIndex(self, targetMO, argMOs) if isCom: return com ## Give a good error for Cons. if type(targetMO.Value) is Cons: if len(argMOs) != 1: return errorSuggestionMO or CreateThrow( targetMO, argMOs, BindingRestrictions.Empty, InvalidOperationException, ""Indexing Sympl list requires exactly one argument."", ) ## Find our own binding. ## ## Conversions created in GetIndexExpression must be consistent with ## restrictions made in GetTargetArgsRestrictions. return DynamicMetaObject( EnsureObjectResult(GetIndexExpression(targetMO, argMOs)), ## False means make type restriction on targetMO.LimitType GetTargetArgsRestrictions(targetMO, argMOs, False), )","def FallbackGetIndex ( self , targetMO , argMOs , errorSuggestionMO ) : if not targetMO . HasValue or not all ( map ( lambda x : x . HasValue , argMOs ) ) : return self . Defer ( ( targetMO , ) + tuple ( argMOs ) ) isCom , com = ComBinder . TryBindGetIndex ( self , targetMO , argMOs ) if isCom : return com if type ( targetMO . Value ) is Cons : return errorSuggestionMO or CreateThrow ( targetMO , argMOs , BindingRestrictions . Empty , InvalidOperationException , ""Indexing Sympl list requires exactly one argument."" , ) return DynamicMetaObject ( EnsureObjectResult ( GetIndexExpression ( targetMO , argMOs ) ) , GetTargetArgsRestrictions ( targetMO , argMOs , False ) , )",if len ( argMOs ) != 1 :,345 23598,"def _find_completions(self, doc, incomplete): """"""Find completions for incomplete word and save them."""""" self._completions = [] self._remains = [] favorites = self._favorite_words.get(doc, ()) _all_words = set(()) for words in self._all_words.itervalues(): _all_words.update(words) limit = self._settings.max_completions_show for sequence in (favorites, _all_words): for word in sequence: if not word.startswith(incomplete): continue if word == incomplete: continue if word in self._completions: continue self._completions.append(word) self._remains.append(word[len(incomplete) :]) if len(self._remains) >= limit: break ","def _find_completions ( self , doc , incomplete ) : """"""Find completions for incomplete word and save them."""""" self . _completions = [ ] self . _remains = [ ] favorites = self . _favorite_words . get ( doc , ( ) ) _all_words = set ( ( ) ) for words in self . _all_words . itervalues ( ) : _all_words . update ( words ) limit = self . _settings . max_completions_show for sequence in ( favorites , _all_words ) : for word in sequence : if not word . startswith ( incomplete ) : continue if word == incomplete : continue continue self . _completions . append ( word ) self . _remains . append ( word [ len ( incomplete ) : ] ) if len ( self . _remains ) >= limit : break",if word in self . _completions :,228 24989,"def run(self): while True: try: with DelayedKeyboardInterrupt(): raw_inputs = self._parent_task_queue.get() if self._has_stop_signal(raw_inputs): self._rq.put(raw_inputs, block=True) break if self._flow_type == BATCH: self._rq.put(raw_inputs, block=True) elif self._flow_type == REALTIME: try: self._rq.put(raw_inputs, block=False) except: pass except KeyboardInterrupt: continue ","def run ( self ) : while True : try : with DelayedKeyboardInterrupt ( ) : raw_inputs = self . _parent_task_queue . get ( ) if self . _has_stop_signal ( raw_inputs ) : self . _rq . put ( raw_inputs , block = True ) break if self . _flow_type == BATCH : self . _rq . put ( raw_inputs , block = True ) try : self . _rq . put ( raw_inputs , block = False ) except : pass except KeyboardInterrupt : continue",elif self . _flow_type == REALTIME :,199 551,"def run(algs): for alg in algs: vcs = alg.get(""variantcaller"") if vcs: if isinstance(vcs, dict): vcs = reduce(operator.add, vcs.values()) if not isinstance(vcs, (list, tuple)): vcs = [vcs] return any(vc.startswith(prefix) for vc in vcs if vc) ","def run ( algs ) : for alg in algs : vcs = alg . get ( ""variantcaller"" ) if vcs : if isinstance ( vcs , dict ) : vcs = reduce ( operator . add , vcs . values ( ) ) vcs = [ vcs ] return any ( vc . startswith ( prefix ) for vc in vcs if vc )","if not isinstance ( vcs , ( list , tuple ) ) :",117 1901,"def getProperty(self, name): if name == handler.property_lexical_handler: return self._lex_handler_prop elif name == property_interning_dict: return self._interning elif name == property_xml_string: if self._parser: if hasattr(self._parser, ""GetInputContext""): return self._parser.GetInputContext() else: raise SAXNotRecognizedException( ""This version of expat does not support getting"" "" the XML string"" ) else: raise SAXNotSupportedException( ""XML string cannot be returned when not parsing"" ) raise SAXNotRecognizedException(""Property '%s' not recognized"" % name) ","def getProperty ( self , name ) : if name == handler . property_lexical_handler : return self . _lex_handler_prop elif name == property_interning_dict : return self . _interning elif name == property_xml_string : if self . _parser : return self . _parser . GetInputContext ( ) else : raise SAXNotRecognizedException ( ""This version of expat does not support getting"" "" the XML string"" ) else : raise SAXNotSupportedException ( ""XML string cannot be returned when not parsing"" ) raise SAXNotRecognizedException ( ""Property '%s' not recognized"" % name )","if hasattr ( self . _parser , ""GetInputContext"" ) :",204 11202,"def visible_settings(self): visible_settings = super(RelateObjects, self).visible_settings() visible_settings += [ self.wants_per_parent_means, self.find_parent_child_distances, self.wants_child_objects_saved, ] if self.wants_child_objects_saved: visible_settings += [self.output_child_objects_name] if self.find_parent_child_distances != D_NONE and self.has_step_parents: visible_settings += [self.wants_step_parent_distances] if self.wants_step_parent_distances: for group in self.step_parent_names: visible_settings += group.visible_settings() visible_settings += [self.add_step_parent_button] return visible_settings ","def visible_settings ( self ) : visible_settings = super ( RelateObjects , self ) . visible_settings ( ) visible_settings += [ self . wants_per_parent_means , self . find_parent_child_distances , self . wants_child_objects_saved , ] if self . wants_child_objects_saved : visible_settings += [ self . output_child_objects_name ] if self . find_parent_child_distances != D_NONE and self . has_step_parents : visible_settings += [ self . wants_step_parent_distances ] for group in self . step_parent_names : visible_settings += group . visible_settings ( ) visible_settings += [ self . add_step_parent_button ] return visible_settings",if self . wants_step_parent_distances :,223 4477,"def get_host_ipv6(with_nic=True): nic_info = get_all_nic_info() ipv4 = get_host_ip() ipv6 = None for nic, info in nic_info.items(): ip4 = info[""inet4""] ip6 = info[""inet6""] if not all([ip4, ip6]): continue ip4, ip6 = ip4.pop(), ip6.pop() if ip4 == ipv4: ipv6 = ip6 if ip6 else None if ipv6 and ""%"" not in ipv6: ipv6 = ipv6 + ""%"" + nic break if ipv6: if not with_nic: ipv6 = ipv6.split(""%"")[0] return ipv6","def get_host_ipv6 ( with_nic = True ) : nic_info = get_all_nic_info ( ) ipv4 = get_host_ip ( ) ipv6 = None for nic , info in nic_info . items ( ) : ip4 = info [ ""inet4"" ] ip6 = info [ ""inet6"" ] continue ip4 , ip6 = ip4 . pop ( ) , ip6 . pop ( ) if ip4 == ipv4 : ipv6 = ip6 if ip6 else None if ipv6 and ""%"" not in ipv6 : ipv6 = ipv6 + ""%"" + nic break if ipv6 : if not with_nic : ipv6 = ipv6 . split ( ""%"" ) [ 0 ] return ipv6","if not all ( [ ip4 , ip6 ] ) :",211 8783,"def _listVMsCallback(self, result, ignore_error=False, error=False, **kwargs): if error: if ""message"" in result: if not ignore_error: QtWidgets.QMessageBox.critical( self, ""List vms"", ""Error while listing vms: {}"".format(result[""message""]), ) return if not sip_is_deleted(self.uiVMListComboBox): self.uiVMListComboBox.clear() for vm in result: self.uiVMListComboBox.addItem(vm[""vmname""], vm[""vmname""]) index = self.uiVMListComboBox.findText(self._settings[""vmname""]) if index == -1: index = self.uiVMListComboBox.findText(""GNS3 VM"") if index == -1: index = 0 self.uiVMListComboBox.setCurrentIndex(index) self._initialized = True","def _listVMsCallback ( self , result , ignore_error = False , error = False , ** kwargs ) : if error : if ""message"" in result : QtWidgets . QMessageBox . critical ( self , ""List vms"" , ""Error while listing vms: {}"" . format ( result [ ""message"" ] ) , ) return if not sip_is_deleted ( self . uiVMListComboBox ) : self . uiVMListComboBox . clear ( ) for vm in result : self . uiVMListComboBox . addItem ( vm [ ""vmname"" ] , vm [ ""vmname"" ] ) index = self . uiVMListComboBox . findText ( self . _settings [ ""vmname"" ] ) if index == - 1 : index = self . uiVMListComboBox . findText ( ""GNS3 VM"" ) if index == - 1 : index = 0 self . uiVMListComboBox . setCurrentIndex ( index ) self . _initialized = True",if not ignore_error :,272 23356,"def get_library_dirs(platform, arch=None): if platform == ""win32"": jre_home = get_jre_home(platform) jdk_home = JAVA_HOME if isinstance(jre_home, bytes): jre_home = jre_home.decode(""utf-8"") return [join(jdk_home, ""lib""), join(jdk_home, ""bin"", ""server"")] elif platform == ""android"": return [""libs/{}"".format(arch)] return []","def get_library_dirs ( platform , arch = None ) : if platform == ""win32"" : jre_home = get_jre_home ( platform ) jdk_home = JAVA_HOME jre_home = jre_home . decode ( ""utf-8"" ) return [ join ( jdk_home , ""lib"" ) , join ( jdk_home , ""bin"" , ""server"" ) ] elif platform == ""android"" : return [ ""libs/{}"" . format ( arch ) ] return [ ]","if isinstance ( jre_home , bytes ) :",136 4988,"def transform(self, data): with timer(""transform %s"" % self.name, logging.DEBUG): if self.operator in {""lat"", ""latitude""}: return self.series(data).apply(GeoIP.get_latitude) elif self.operator in {""lon"", ""longitude""}: return self.series(data).apply(GeoIP.get_longitude) elif self.operator in {""acc"", ""accuracy""}: return self.series(data).apply(GeoIP.get_accuracy) raise NameError(""Unknown GeoIP operator [lat, lon, acc]: %s"" % self.operator) ","def transform ( self , data ) : with timer ( ""transform %s"" % self . name , logging . DEBUG ) : if self . operator in { ""lat"" , ""latitude"" } : return self . series ( data ) . apply ( GeoIP . get_latitude ) elif self . operator in { ""lon"" , ""longitude"" } : return self . series ( data ) . apply ( GeoIP . get_longitude ) return self . series ( data ) . apply ( GeoIP . get_accuracy ) raise NameError ( ""Unknown GeoIP operator [lat, lon, acc]: %s"" % self . operator )","elif self . operator in { ""acc"" , ""accuracy"" } :",161 9343,"def parseFunctionSourceElements(): global strict sourceElement = None sourceElements = [] token = None directive = None firstRestricted = None oldLabelSet = None oldInIteration = None oldInSwitch = None oldInFunctionBody = None skipComment() delegate.markStart() expect(""{"") while index < length: if lookahead.type != Token.StringLiteral: break token = lookahead sourceElement = parseSourceElement() sourceElements.append(sourceElement) if sourceElement.expression.type != Syntax.Literal: break directive = source[(token.range[0] + 1) : (token.range[1] - 1)] if directive == ""use strict"": strict = True if firstRestricted: throwErrorTolerant(firstRestricted, Messages.StrictOctalLiteral) else: if (not firstRestricted) and token.octal: firstRestricted = token oldLabelSet = state.labelSet oldInIteration = state.inIteration oldInSwitch = state.inSwitch oldInFunctionBody = state.inFunctionBody state.labelSet = jsdict({}) state.inIteration = False state.inSwitch = False state.inFunctionBody = True while index < length: if match(""}""): break sourceElement = parseSourceElement() if ( ""undefined"" if not ""sourceElement"" in locals() else typeof(sourceElement) ) == ""undefined"": break sourceElements.append(sourceElement) expect(""}"") state.labelSet = oldLabelSet state.inIteration = oldInIteration state.inSwitch = oldInSwitch state.inFunctionBody = oldInFunctionBody return delegate.markEnd(delegate.createBlockStatement(sourceElements))","def parseFunctionSourceElements ( ) : global strict sourceElement = None sourceElements = [ ] token = None directive = None firstRestricted = None oldLabelSet = None oldInIteration = None oldInSwitch = None oldInFunctionBody = None skipComment ( ) delegate . markStart ( ) expect ( ""{"" ) while index < length : if lookahead . type != Token . StringLiteral : break token = lookahead sourceElement = parseSourceElement ( ) sourceElements . append ( sourceElement ) if sourceElement . expression . type != Syntax . Literal : break directive = source [ ( token . range [ 0 ] + 1 ) : ( token . range [ 1 ] - 1 ) ] if directive == ""use strict"" : strict = True if firstRestricted : throwErrorTolerant ( firstRestricted , Messages . StrictOctalLiteral ) else : if ( not firstRestricted ) and token . octal : firstRestricted = token oldLabelSet = state . labelSet oldInIteration = state . inIteration oldInSwitch = state . inSwitch oldInFunctionBody = state . inFunctionBody state . labelSet = jsdict ( { } ) state . inIteration = False state . inSwitch = False state . inFunctionBody = True while index < length : break sourceElement = parseSourceElement ( ) if ( ""undefined"" if not ""sourceElement"" in locals ( ) else typeof ( sourceElement ) ) == ""undefined"" : break sourceElements . append ( sourceElement ) expect ( ""}"" ) state . labelSet = oldLabelSet state . inIteration = oldInIteration state . inSwitch = oldInSwitch state . inFunctionBody = oldInFunctionBody return delegate . markEnd ( delegate . createBlockStatement ( sourceElements ) )","if match ( ""}"" ) :",490 11033,"def publish(self, channel, *args, **kwargs): """"""Return output of all subscribers for the given channel."""""" if channel not in self.listeners: return [] exc = ChannelFailures() output = [] items = [ (self._priorities[(channel, listener)], listener) for listener in self.listeners[channel] ] try: items.sort(key=lambda item: item[0]) except TypeError: # Python 2.3 had no 'key' arg, but that doesn't matter # since it could sort dissimilar types just fine. items.sort() for priority, listener in items: try: output.append(listener(*args, **kwargs)) except KeyboardInterrupt: raise except SystemExit: e = sys.exc_info()[1] # If we have previous errors ensure the exit code is non-zero if exc and e.code == 0: e.code = 1 raise except: exc.handle_exception() if channel == ""log"": # Assume any further messages to 'log' will fail. pass else: self.log( ""Error in %r listener %r"" % (channel, listener), level=40, traceback=True, ) if exc: raise exc return output","def publish ( self , channel , * args , ** kwargs ) : """"""Return output of all subscribers for the given channel."""""" if channel not in self . listeners : return [ ] exc = ChannelFailures ( ) output = [ ] items = [ ( self . _priorities [ ( channel , listener ) ] , listener ) for listener in self . listeners [ channel ] ] try : items . sort ( key = lambda item : item [ 0 ] ) except TypeError : items . sort ( ) for priority , listener in items : try : output . append ( listener ( * args , ** kwargs ) ) except KeyboardInterrupt : raise except SystemExit : e = sys . exc_info ( ) [ 1 ] e . code = 1 raise except : exc . handle_exception ( ) if channel == ""log"" : pass else : self . log ( ""Error in %r listener %r"" % ( channel , listener ) , level = 40 , traceback = True , ) if exc : raise exc return output",if exc and e . code == 0 :,387 24528,"def bitcoin_done(request): with mock.patch(""bitcoinrpc.connection.BitcoinConnection"") as MockBitcoinConnection: connection = MockBitcoinConnection() connection.getnewaddress.return_value = BTC_TEST_ADDRESS connection.listtransactions.return_value = BTC_TEST_SUCCESSFUL_TXNS amount = 0.01 bitcoin_obj = get_gateway(""bitcoin"") address = request.session.get(""bitcoin_address"", None) if not address: return HttpResponseRedirect(reverse(""app_bitcoin"")) result = bitcoin_obj.purchase(amount, address) if result[""status""] == ""SUCCESS"": del request.session[""bitcoin_address""] return render( request, ""app/bitcoin_done.html"", { ""title"": ""Bitcoin"", ""amount"": amount, ""address"": address, ""result"": result, }, )","def bitcoin_done ( request ) : with mock . patch ( ""bitcoinrpc.connection.BitcoinConnection"" ) as MockBitcoinConnection : connection = MockBitcoinConnection ( ) connection . getnewaddress . return_value = BTC_TEST_ADDRESS connection . listtransactions . return_value = BTC_TEST_SUCCESSFUL_TXNS amount = 0.01 bitcoin_obj = get_gateway ( ""bitcoin"" ) address = request . session . get ( ""bitcoin_address"" , None ) if not address : return HttpResponseRedirect ( reverse ( ""app_bitcoin"" ) ) result = bitcoin_obj . purchase ( amount , address ) del request . session [ ""bitcoin_address"" ] return render ( request , ""app/bitcoin_done.html"" , { ""title"" : ""Bitcoin"" , ""amount"" : amount , ""address"" : address , ""result"" : result , } , )","if result [ ""status"" ] == ""SUCCESS"" :",283 7259,"def ensemble(self, pairs, other_preds): """"""Ensemble the dict with statistical model predictions."""""" lemmas = [] assert len(pairs) == len(other_preds) for p, pred in zip(pairs, other_preds): w, pos = p if (w, pos) in self.composite_dict: lemma = self.composite_dict[(w, pos)] elif w in self.word_dict: lemma = self.word_dict[w] else: lemma = pred if lemma is None: lemma = w lemmas.append(lemma) return lemmas ","def ensemble ( self , pairs , other_preds ) : """"""Ensemble the dict with statistical model predictions."""""" lemmas = [ ] assert len ( pairs ) == len ( other_preds ) for p , pred in zip ( pairs , other_preds ) : w , pos = p if ( w , pos ) in self . composite_dict : lemma = self . composite_dict [ ( w , pos ) ] elif w in self . word_dict : lemma = self . word_dict [ w ] else : lemma = pred lemma = w lemmas . append ( lemma ) return lemmas",if lemma is None :,164 2332,"def __editorKeyPress(editor, event): if event.key == ""B"": __findBookmark(editor) return True if event.key in [str(x) for x in range(0, 10)]: numericBookmark = int(event.key) if event.modifiers == event.modifiers.Control: # Assign node = None if isinstance(editor, GafferUI.GraphEditor): selection = editor.scriptNode().selection() if len(selection) == 1: node = selection[0] else: backdrops = [n for n in selection if isinstance(n, Gaffer.Backdrop)] if len(backdrops) == 1: node = backdrops[0] elif isinstance(editor, GafferUI.NodeSetEditor): nodeSet = editor.getNodeSet() node = nodeSet[-1] if len(nodeSet) else None if node is not None: __assignNumericBookmark(node, numericBookmark) elif not event.modifiers: # Find # For linked editors, its more intuitive for the user if we update # the driving editor, rather than breaking the link. if numericBookmark != 0: __findNumericBookmark(editor, numericBookmark) elif isinstance(editor, GafferUI.NodeSetEditor): editor.setNodeSet(editor.scriptNode().selection()) return True","def __editorKeyPress ( editor , event ) : if event . key == ""B"" : __findBookmark ( editor ) return True if event . key in [ str ( x ) for x in range ( 0 , 10 ) ] : numericBookmark = int ( event . key ) if event . modifiers == event . modifiers . Control : node = None if isinstance ( editor , GafferUI . GraphEditor ) : selection = editor . scriptNode ( ) . selection ( ) if len ( selection ) == 1 : node = selection [ 0 ] else : backdrops = [ n for n in selection if isinstance ( n , Gaffer . Backdrop ) ] node = backdrops [ 0 ] elif isinstance ( editor , GafferUI . NodeSetEditor ) : nodeSet = editor . getNodeSet ( ) node = nodeSet [ - 1 ] if len ( nodeSet ) else None if node is not None : __assignNumericBookmark ( node , numericBookmark ) elif not event . modifiers : if numericBookmark != 0 : __findNumericBookmark ( editor , numericBookmark ) elif isinstance ( editor , GafferUI . NodeSetEditor ) : editor . setNodeSet ( editor . scriptNode ( ) . selection ( ) ) return True",if len ( backdrops ) == 1 :,420 5313,"def kc_pressed(self, key, modifierFlags): if modifierFlags == CTRL_KEY_FLAG: if key == ""C"": self.send(""\x03"") self.telnet.running = False elif key == ""D"": self.send(""\x04"") elif key == ""A"": self.send(""\x01"") elif key == ""E"": self.send(""\x05"") elif key == ""K"": self.send(""\x0B"") elif key == ""L"": self.send(""\x0C"") elif key == ""U"": self.send(""\x15"") elif key == ""Z"": self.send(""\x1A"") elif key == ""["": self.send(""\x1B"") # ESC elif modifierFlags == 0: if key == ""UIKeyInputUpArrow"": self.send(""\x10"") elif key == ""UIKeyInputDownArrow"": self.send(""\x0E"") elif key == ""UIKeyInputLeftArrow"": self.send(""\033[D"") elif key == ""UIKeyInputRightArrow"": self.send(""\033[C"")","def kc_pressed ( self , key , modifierFlags ) : if modifierFlags == CTRL_KEY_FLAG : if key == ""C"" : self . send ( ""\x03"" ) self . telnet . running = False elif key == ""D"" : self . send ( ""\x04"" ) self . send ( ""\x01"" ) elif key == ""E"" : self . send ( ""\x05"" ) elif key == ""K"" : self . send ( ""\x0B"" ) elif key == ""L"" : self . send ( ""\x0C"" ) elif key == ""U"" : self . send ( ""\x15"" ) elif key == ""Z"" : self . send ( ""\x1A"" ) elif key == ""["" : self . send ( ""\x1B"" ) elif modifierFlags == 0 : if key == ""UIKeyInputUpArrow"" : self . send ( ""\x10"" ) elif key == ""UIKeyInputDownArrow"" : self . send ( ""\x0E"" ) elif key == ""UIKeyInputLeftArrow"" : self . send ( ""\033[D"" ) elif key == ""UIKeyInputRightArrow"" : self . send ( ""\033[C"" )","elif key == ""A"" :",330 7781,"def starttag(self, quoteattr=None): # the optional arg is used by the docutils_xml writer if quoteattr is None: quoteattr = pseudo_quoteattr parts = [self.tagname] for name, value in self.attlist(): if value is None: # boolean attribute parts.append('%s=""True""' % name) continue if isinstance(value, list): values = [serial_escape(""%s"" % (v,)) for v in value] value = "" "".join(values) else: value = unicode(value) value = quoteattr(value) parts.append(u""%s=%s"" % (name, value)) return u""<%s>"" % u"" "".join(parts) ","def starttag ( self , quoteattr = None ) : if quoteattr is None : quoteattr = pseudo_quoteattr parts = [ self . tagname ] for name , value in self . attlist ( ) : if value is None : parts . append ( '%s=""True""' % name ) continue values = [ serial_escape ( ""%s"" % ( v , ) ) for v in value ] value = "" "" . join ( values ) else : value = unicode ( value ) value = quoteattr ( value ) parts . append ( u""%s=%s"" % ( name , value ) ) return u""<%s>"" % u"" "" . join ( parts )","if isinstance ( value , list ) :",201 15268,"def get_tag_values(self, event): http = event.interfaces.get(""sentry.interfaces.Http"") if not http: return [] if not http.headers: return [] headers = http.headers # XXX: transitional support for workers if isinstance(headers, dict): headers = headers.items() output = [] for key, value in headers: if key != ""User-Agent"": continue ua = Parse(value) if not ua: continue result = self.get_tag_from_ua(ua) if result: output.append(result) return output ","def get_tag_values ( self , event ) : http = event . interfaces . get ( ""sentry.interfaces.Http"" ) if not http : return [ ] if not http . headers : return [ ] headers = http . headers if isinstance ( headers , dict ) : headers = headers . items ( ) output = [ ] for key , value in headers : if key != ""User-Agent"" : continue ua = Parse ( value ) continue result = self . get_tag_from_ua ( ua ) if result : output . append ( result ) return output",if not ua :,176 2666,"def post(self): old = self._fetch_existing_config() new = dict() for key in self.ALLOWED.keys(): if self.ALLOWED[key] == bool: val = self.get_argument(key, False) else: val = self.get_argument(key, None) if val is None or val == """": new[key] = old[key] elif key == ""pwdhash"": new[key] = bcrypt.hashpw(val, bcrypt.gensalt()) elif self.ALLOWED[key] == str: new[key] = str(val) elif self.ALLOWED[key] == int: new[key] = int(val) elif self.ALLOWED[key] == bool: new[key] = bool(val) config_file = open(self.settings.config_path.web, ""w"") for key, val in new.items(): if self.ALLOWED[key] == str: config_file.write(""%s='%s'\n"" % (key, val)) else: config_file.write(""%s=%s\n"" % (key, val)) config_file.close() self.redirect(""/"") ","def post ( self ) : old = self . _fetch_existing_config ( ) new = dict ( ) for key in self . ALLOWED . keys ( ) : if self . ALLOWED [ key ] == bool : val = self . get_argument ( key , False ) else : val = self . get_argument ( key , None ) if val is None or val == """" : new [ key ] = old [ key ] elif key == ""pwdhash"" : new [ key ] = bcrypt . hashpw ( val , bcrypt . gensalt ( ) ) el new [ key ] = str ( val ) elif self . ALLOWED [ key ] == int : new [ key ] = int ( val ) elif self . ALLOWED [ key ] == bool : new [ key ] = bool ( val ) config_file = open ( self . settings . config_path . web , ""w"" ) for key , val in new . items ( ) : config_file . write ( ""%s='%s'\n"" % ( key , val ) ) else : config_file . write ( ""%s=%s\n"" % ( key , val ) ) config_file . close ( ) self . redirect ( ""/"" )",if self . ALLOWED [ key ] == str :,326 8175,"def check_samplers_fit_resample(name, sampler_orig): sampler = clone(sampler_orig) X, y = make_classification( n_samples=1000, n_classes=3, n_informative=4, weights=[0.2, 0.3, 0.5], random_state=0, ) target_stats = Counter(y) X_res, y_res = sampler.fit_resample(X, y) if isinstance(sampler, BaseOverSampler): target_stats_res = Counter(y_res) n_samples = max(target_stats.values()) assert all(value >= n_samples for value in Counter(y_res).values()) elif isinstance(sampler, BaseUnderSampler): n_samples = min(target_stats.values()) if name == ""InstanceHardnessThreshold"": # IHT does not enforce the number of samples but provide a number # of samples the closest to the desired target. assert all( Counter(y_res)[k] <= target_stats[k] for k in target_stats.keys() ) else: assert all(value == n_samples for value in Counter(y_res).values()) elif isinstance(sampler, BaseCleaningSampler): target_stats_res = Counter(y_res) class_minority = min(target_stats, key=target_stats.get) assert all( target_stats[class_sample] > target_stats_res[class_sample] for class_sample in target_stats.keys() if class_sample != class_minority )","def check_samplers_fit_resample ( name , sampler_orig ) : sampler = clone ( sampler_orig ) X , y = make_classification ( n_samples = 1000 , n_classes = 3 , n_informative = 4 , weights = [ 0.2 , 0.3 , 0.5 ] , random_state = 0 , ) target_stats = Counter ( y ) X_res , y_res = sampler . fit_resample ( X , y ) if isinstance ( sampler , BaseOverSampler ) : target_stats_res = Counter ( y_res ) n_samples = max ( target_stats . values ( ) ) assert all ( value >= n_samples for value in Counter ( y_res ) . values ( ) ) elif isinstance ( sampler , BaseUnderSampler ) : n_samples = min ( target_stats . values ( ) ) assert all ( Counter ( y_res ) [ k ] <= target_stats [ k ] for k in target_stats . keys ( ) ) else : assert all ( value == n_samples for value in Counter ( y_res ) . values ( ) ) elif isinstance ( sampler , BaseCleaningSampler ) : target_stats_res = Counter ( y_res ) class_minority = min ( target_stats , key = target_stats . get ) assert all ( target_stats [ class_sample ] > target_stats_res [ class_sample ] for class_sample in target_stats . keys ( ) if class_sample != class_minority )","if name == ""InstanceHardnessThreshold"" :",435 19817,"def preprocess_raw_enwik9(input_filename, output_filename): with open(input_filename, ""r"") as f1: with open(output_filename, ""w"") as f2: while True: line = f1.readline() if not line: break line = list(enwik9_norm_transform([line]))[0] if line != "" "" and line != """": if line[0] == "" "": line = line[1:] f2.writelines(line + ""\n"")","def preprocess_raw_enwik9 ( input_filename , output_filename ) : with open ( input_filename , ""r"" ) as f1 : with open ( output_filename , ""w"" ) as f2 : while True : line = f1 . readline ( ) if not line : break line = list ( enwik9_norm_transform ( [ line ] ) ) [ 0 ] if line != "" "" and line != """" : line = line [ 1 : ] f2 . writelines ( line + ""\n"" )","if line [ 0 ] == "" "" :",164 17540,"def __setitem__(self, key, value): if isinstance(value, (tuple, list)): info, reference = value if info not in self._reverse_infos: self._reverse_infos[info] = len(self._infos) self._infos.append(info) if reference not in self._reverse_references: self._reverse_references[reference] = len(self._references) self._references.append(reference) self._trails[key] = ""%d,%d"" % ( self._reverse_infos[info], self._reverse_references[reference], ) else: raise Exception(""unsupported type '%s'"" % type(value)) ","def __setitem__ ( self , key , value ) : if isinstance ( value , ( tuple , list ) ) : info , reference = value if info not in self . _reverse_infos : self . _reverse_infos [ info ] = len ( self . _infos ) self . _infos . append ( info ) self . _reverse_references [ reference ] = len ( self . _references ) self . _references . append ( reference ) self . _trails [ key ] = ""%d,%d"" % ( self . _reverse_infos [ info ] , self . _reverse_references [ reference ] , ) else : raise Exception ( ""unsupported type '%s'"" % type ( value ) )",if reference not in self . _reverse_references :,184 832,"def init(self, view, items=None): selections = [] if view.sel(): for region in view.sel(): selections.append(view.substr(region)) values = [] for idx, index in enumerate(map(int, items)): if idx >= len(selections): break i = index - 1 if i >= 0 and i < len(selections): values.append(selections[i]) else: values.append(None) # fill up for idx, value in enumerate(selections): if len(values) + 1 < idx: values.append(value) self.stack = values","def init ( self , view , items = None ) : selections = [ ] if view . sel ( ) : for region in view . sel ( ) : selections . append ( view . substr ( region ) ) values = [ ] for idx , index in enumerate ( map ( int , items ) ) : break i = index - 1 if i >= 0 and i < len ( selections ) : values . append ( selections [ i ] ) else : values . append ( None ) for idx , value in enumerate ( selections ) : if len ( values ) + 1 < idx : values . append ( value ) self . stack = values",if idx >= len ( selections ) :,178 23031,"def viewrendered(event): """"""Open render view for commander"""""" c = event.get(""c"") if not c: return None global controllers, layouts vr = controllers.get(c.hash()) if vr: vr.activate() vr.show() vr.adjust_layout(""open"") else: h = c.hash() controllers[h] = vr = ViewRenderedController(c) layouts[h] = c.db.get(""viewrendered_default_layouts"", (None, None)) if hasattr(c, ""free_layout""): vr._ns_id = ""_leo_viewrendered"" # for free_layout load/save vr.splitter = splitter = c.free_layout.get_top_splitter() # Careful: we may be unit testing. if splitter: vr.store_layout(""closed"") sizes = split_last_sizes(splitter.sizes()) ok = splitter.add_adjacent(vr, ""bodyFrame"", ""right-of"") if not ok: splitter.insert(0, vr) else: if splitter.orientation() == QtCore.Qt.Horizontal: splitter.setSizes(sizes) vr.adjust_layout(""open"") else: vr.setWindowTitle(""Rendered View"") vr.resize(600, 600) vr.show() c.bodyWantsFocusNow() # The following conflicts with F11: help-for-command. # I'm not sure why it was needed, but for sure it can not be used. # def at_idle(c=c): # c.bodyWantsFocusNow() # QtCore.QTimer.singleShot(0,at_idle) return vr","def viewrendered ( event ) : """"""Open render view for commander"""""" c = event . get ( ""c"" ) if not c : return None global controllers , layouts vr = controllers . get ( c . hash ( ) ) if vr : vr . activate ( ) vr . show ( ) vr . adjust_layout ( ""open"" ) else : h = c . hash ( ) controllers [ h ] = vr = ViewRenderedController ( c ) layouts [ h ] = c . db . get ( ""viewrendered_default_layouts"" , ( None , None ) ) if hasattr ( c , ""free_layout"" ) : vr . _ns_id = ""_leo_viewrendered"" vr . splitter = splitter = c . free_layout . get_top_splitter ( ) if splitter : vr . store_layout ( ""closed"" ) sizes = split_last_sizes ( splitter . sizes ( ) ) ok = splitter . add_adjacent ( vr , ""bodyFrame"" , ""right-of"" ) splitter . insert ( 0 , vr ) else : if splitter . orientation ( ) == QtCore . Qt . Horizontal : splitter . setSizes ( sizes ) vr . adjust_layout ( ""open"" ) else : vr . setWindowTitle ( ""Rendered View"" ) vr . resize ( 600 , 600 ) vr . show ( ) c . bodyWantsFocusNow ( ) return vr",if not ok :,508 11395,"def _stringify(value): """"""Internal function."""""" if isinstance(value, (list, tuple)): if len(value) == 1: value = _stringify(value[0]) if value[0] == ""{"": value = ""{%s}"" % value else: value = ""{%s}"" % _join(value) else: if isinstance(value, basestring): value = unicode(value) else: value = str(value) if not value: value = ""{}"" elif _magic_re.search(value): # add '\' before special characters and spaces value = _magic_re.sub(r""\\\1"", value) value = _space_re.sub(r""\\\1"", value) elif value[0] == '""' or _space_re.search(value): value = ""{%s}"" % value return value","def _stringify ( value ) : """"""Internal function."""""" if isinstance ( value , ( list , tuple ) ) : if len ( value ) == 1 : value = _stringify ( value [ 0 ] ) value = ""{%s}"" % value else : value = ""{%s}"" % _join ( value ) else : if isinstance ( value , basestring ) : value = unicode ( value ) else : value = str ( value ) if not value : value = ""{}"" elif _magic_re . search ( value ) : value = _magic_re . sub ( r""\\\1"" , value ) value = _space_re . sub ( r""\\\1"" , value ) elif value [ 0 ] == '""' or _space_re . search ( value ) : value = ""{%s}"" % value return value","if value [ 0 ] == ""{"" :",248 6685,"def __init__( self, host: str, port: int, app: ""WSGIApplication"", handler: t.Optional[t.Type[WSGIRequestHandler]] = None, passthrough_errors: bool = False, ssl_context: t.Optional[_TSSLContextArg] = None, fd: t.Optional[int] = None, ) -> None: if handler is None: handler = WSGIRequestHandler self.address_family = select_address_family(host, port) if fd is not None: real_sock = socket.fromfd(fd, self.address_family, socket.SOCK_STREAM) port = 0 server_address = get_sockaddr(host, int(port), self.address_family) # remove socket file if it already exists if self.address_family == af_unix: server_address = t.cast(str, server_address) if os.path.exists(server_address): os.unlink(server_address) super().__init__(server_address, handler) # type: ignore self.app = app self.passthrough_errors = passthrough_errors self.shutdown_signal = False self.host = host self.port = self.socket.getsockname()[1] # Patch in the original socket. if fd is not None: self.socket.close() self.socket = real_sock self.server_address = self.socket.getsockname() if ssl_context is not None: if isinstance(ssl_context, tuple): ssl_context = load_ssl_context(*ssl_context) if ssl_context == ""adhoc"": ssl_context = generate_adhoc_ssl_context() self.socket = ssl_context.wrap_socket(self.socket, server_side=True) self.ssl_context: t.Optional[""ssl.SSLContext""] = ssl_context else: self.ssl_context = None","def __init__ ( self , host : str , port : int , app : ""WSGIApplication"" , handler : t . Optional [ t . Type [ WSGIRequestHandler ] ] = None , passthrough_errors : bool = False , ssl_context : t . Optional [ _TSSLContextArg ] = None , fd : t . Optional [ int ] = None , ) -> None : if handler is None : handler = WSGIRequestHandler self . address_family = select_address_family ( host , port ) if fd is not None : real_sock = socket . fromfd ( fd , self . address_family , socket . SOCK_STREAM ) port = 0 server_address = get_sockaddr ( host , int ( port ) , self . address_family ) if self . address_family == af_unix : server_address = t . cast ( str , server_address ) if os . path . exists ( server_address ) : os . unlink ( server_address ) super ( ) . __init__ ( server_address , handler ) self . app = app self . passthrough_errors = passthrough_errors self . shutdown_signal = False self . host = host self . port = self . socket . getsockname ( ) [ 1 ] if fd is not None : self . socket . close ( ) self . socket = real_sock self . server_address = self . socket . getsockname ( ) if ssl_context is not None : ssl_context = load_ssl_context ( * ssl_context ) if ssl_context == ""adhoc"" : ssl_context = generate_adhoc_ssl_context ( ) self . socket = ssl_context . wrap_socket ( self . socket , server_side = True ) self . ssl_context : t . Optional [ ""ssl.SSLContext"" ] = ssl_context else : self . ssl_context = None","if isinstance ( ssl_context , tuple ) :",505 13343,"def _handle_server_unmutes(self): """"""This is where the logic for role unmutes is taken care of"""""" log.debug(""Checking server unmutes"") for g_id in self._server_mutes: guild = self.bot.get_guild(g_id) if guild is None or await self.bot.cog_disabled_in_guild(self, guild): continue await i18n.set_contextual_locales_from_guild(self.bot, guild) for u_id in self._server_mutes[guild.id]: if self._server_mutes[guild.id][u_id][""until""] is None: continue time_to_unmute = ( self._server_mutes[guild.id][u_id][""until""] - datetime.now(timezone.utc).timestamp() ) if time_to_unmute < 60.0: task_name = f""server-unmute-{g_id}-{u_id}"" if task_name in self._unmute_tasks: continue log.debug(f""Creating task: {task_name}"") self._unmute_tasks[task_name] = asyncio.create_task( self._auto_unmute_user(guild, self._server_mutes[guild.id][u_id]) )","def _handle_server_unmutes ( self ) : """"""This is where the logic for role unmutes is taken care of"""""" log . debug ( ""Checking server unmutes"" ) for g_id in self . _server_mutes : guild = self . bot . get_guild ( g_id ) if guild is None or await self . bot . cog_disabled_in_guild ( self , guild ) : continue await i18n . set_contextual_locales_from_guild ( self . bot , guild ) for u_id in self . _server_mutes [ guild . id ] : if self . _server_mutes [ guild . id ] [ u_id ] [ ""until"" ] is None : continue time_to_unmute = ( self . _server_mutes [ guild . id ] [ u_id ] [ ""until"" ] - datetime . now ( timezone . utc ) . timestamp ( ) ) if time_to_unmute < 60.0 : task_name = f""server-unmute-{g_id}-{u_id}"" continue log . debug ( f""Creating task: {task_name}"" ) self . _unmute_tasks [ task_name ] = asyncio . create_task ( self . _auto_unmute_user ( guild , self . _server_mutes [ guild . id ] [ u_id ] ) )",if task_name in self . _unmute_tasks :,389 6211,"def indent(elem, level=0): i = ""\n"" + level * "" "" if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + "" "" if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: indent(elem, level + 1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): elem.tail = i","def indent ( elem , level = 0 ) : i = ""\n"" + level * "" "" if len ( elem ) : if not elem . text or not elem . text . strip ( ) : elem . text = i + "" "" if not elem . tail or not elem . tail . strip ( ) : elem . tail = i for elem in elem : indent ( elem , level + 1 ) if not elem . tail or not elem . tail . strip ( ) : elem . tail = i else : elem . tail = i",if level and ( not elem . tail or not elem . tail . strip ( ) ) :,161 15674,"def pg_launcher(pre_created_pgs, num_pgs_to_create): pgs = [] pgs += pre_created_pgs for i in range(num_pgs_to_create): pgs.append(placement_group(bundles, strategy=""STRICT_SPREAD"", name=str(i))) pgs_removed = [] pgs_unremoved = [] # Randomly choose placement groups to remove. for pg in pgs: if random() < 0.5: pgs_removed.append(pg) else: pgs_unremoved.append(pg) tasks = [] max_actor_cnt = 5 actor_cnt = 0 actors = [] # Randomly schedule tasks or actors on placement groups that # are not removed. for pg in pgs_unremoved: # TODO(sang): Comment in this line causes GCS actor management # failure. We need to fix it. if random() < 0.5: tasks.append(mock_task.options(placement_group=pg).remote()) else: if actor_cnt < max_actor_cnt: actors.append(MockActor.options(placement_group=pg).remote()) actor_cnt += 1 # Remove the rest of placement groups. for pg in pgs_removed: remove_placement_group(pg) ray.get([pg.ready() for pg in pgs_unremoved]) ray.get(tasks) ray.get([actor.ping.remote() for actor in actors]) # Since placement groups are scheduled, remove them. for pg in pgs_unremoved: remove_placement_group(pg)","def pg_launcher ( pre_created_pgs , num_pgs_to_create ) : pgs = [ ] pgs += pre_created_pgs for i in range ( num_pgs_to_create ) : pgs . append ( placement_group ( bundles , strategy = ""STRICT_SPREAD"" , name = str ( i ) ) ) pgs_removed = [ ] pgs_unremoved = [ ] for pg in pgs : if random ( ) < 0.5 : pgs_removed . append ( pg ) else : pgs_unremoved . append ( pg ) tasks = [ ] max_actor_cnt = 5 actor_cnt = 0 actors = [ ] for pg in pgs_unremoved : if random ( ) < 0.5 : tasks . append ( mock_task . options ( placement_group = pg ) . remote ( ) ) else : actors . append ( MockActor . options ( placement_group = pg ) . remote ( ) ) actor_cnt += 1 for pg in pgs_removed : remove_placement_group ( pg ) ray . get ( [ pg . ready ( ) for pg in pgs_unremoved ] ) ray . get ( tasks ) ray . get ( [ actor . ping . remote ( ) for actor in actors ] ) for pg in pgs_unremoved : remove_placement_group ( pg )",if actor_cnt < max_actor_cnt :,443 15997,"def _find_names(self, lr_schedulers) -> List[str]: # Create uniqe names in the case we have multiple of the same learning # rate schduler + multiple parameter groups names = [] for scheduler in lr_schedulers: sch = scheduler[""scheduler""] if scheduler[""name""] is not None: name = scheduler[""name""] else: opt_name = ""lr-"" + sch.optimizer.__class__.__name__ i, name = 1, opt_name # Multiple schduler of the same type while True: if name not in names: break i, name = i + 1, f""{opt_name}-{i}"" # Multiple param groups for the same schduler param_groups = sch.optimizer.param_groups if len(param_groups) != 1: for i, pg in enumerate(param_groups): temp = f""{name}/pg{i + 1}"" names.append(temp) else: names.append(name) self.lr_sch_names.append(name) return names","def _find_names ( self , lr_schedulers ) -> List [ str ] : names = [ ] for scheduler in lr_schedulers : sch = scheduler [ ""scheduler"" ] name = scheduler [ ""name"" ] else : opt_name = ""lr-"" + sch . optimizer . __class__ . __name__ i , name = 1 , opt_name while True : if name not in names : break i , name = i + 1 , f""{opt_name}-{i}"" param_groups = sch . optimizer . param_groups if len ( param_groups ) != 1 : for i , pg in enumerate ( param_groups ) : temp = f""{name}/pg{i + 1}"" names . append ( temp ) else : names . append ( name ) self . lr_sch_names . append ( name ) return names","if scheduler [ ""name"" ] is not None :",305 21304,"def _adjust_to_data(self, trace, data_trace): subsampled_idxs = dict() for name, site in trace.iter_stochastic_nodes(): # Adjust subsample sites if site_is_subsample(site): site[""fn""] = data_trace.nodes[name][""fn""] site[""value""] = data_trace.nodes[name][""value""] # Adjust sites under conditionally independent stacks orig_cis_stack = site[""cond_indep_stack""] site[""cond_indep_stack""] = data_trace.nodes[name][""cond_indep_stack""] assert len(orig_cis_stack) == len(site[""cond_indep_stack""]) site[""fn""] = data_trace.nodes[name][""fn""] for ocis, cis in zip(orig_cis_stack, site[""cond_indep_stack""]): # Select random sub-indices to replay values under conditionally independent stacks. # Otherwise, we assume there is an dependence of indexes between training data # and prediction data. assert ocis.name == cis.name assert not site_is_subsample(site) batch_dim = cis.dim - site[""fn""].event_dim subsampled_idxs[cis.name] = subsampled_idxs.get( cis.name, torch.randint(0, ocis.size, (cis.size,), device=site[""value""].device), ) site[""value""] = site[""value""].index_select( batch_dim, subsampled_idxs[cis.name] )","def _adjust_to_data ( self , trace , data_trace ) : subsampled_idxs = dict ( ) for name , site in trace . iter_stochastic_nodes ( ) : site [ ""fn"" ] = data_trace . nodes [ name ] [ ""fn"" ] site [ ""value"" ] = data_trace . nodes [ name ] [ ""value"" ] orig_cis_stack = site [ ""cond_indep_stack"" ] site [ ""cond_indep_stack"" ] = data_trace . nodes [ name ] [ ""cond_indep_stack"" ] assert len ( orig_cis_stack ) == len ( site [ ""cond_indep_stack"" ] ) site [ ""fn"" ] = data_trace . nodes [ name ] [ ""fn"" ] for ocis , cis in zip ( orig_cis_stack , site [ ""cond_indep_stack"" ] ) : assert ocis . name == cis . name assert not site_is_subsample ( site ) batch_dim = cis . dim - site [ ""fn"" ] . event_dim subsampled_idxs [ cis . name ] = subsampled_idxs . get ( cis . name , torch . randint ( 0 , ocis . size , ( cis . size , ) , device = site [ ""value"" ] . device ) , ) site [ ""value"" ] = site [ ""value"" ] . index_select ( batch_dim , subsampled_idxs [ cis . name ] )",if site_is_subsample ( site ) :,425 9260,"def deserialize(self, data): parts = data.pop(""parts"") self.parts = {} self.__dict__.update(data) if parts: for part_id, part in six.iteritems(parts): self.parts[part_id] = {} for language, sub_data in six.iteritems(part): self.parts[part_id][language] = {} for sub_key, subtitle_data in six.iteritems(sub_data): if sub_key == ""current"": if not isinstance(subtitle_data, tuple): subtitle_data = tuple(subtitle_data.split(""__"")) self.parts[part_id][language][""current""] = subtitle_data elif sub_key == ""blacklist"": bl = dict( (tuple([str(a) for a in k.split(""__"")]), v) for k, v in six.iteritems(subtitle_data) ) self.parts[part_id][language][""blacklist""] = bl else: sub = JSONStoredSubtitle() sub.initialize(**subtitle_data) if not isinstance(sub_key, tuple): sub_key = tuple(sub_key.split(""__"")) self.parts[part_id][language][sub_key] = sub ","def deserialize ( self , data ) : parts = data . pop ( ""parts"" ) self . parts = { } self . __dict__ . update ( data ) if parts : for part_id , part in six . iteritems ( parts ) : self . parts [ part_id ] = { } for language , sub_data in six . iteritems ( part ) : self . parts [ part_id ] [ language ] = { } for sub_key , subtitle_data in six . iteritems ( sub_data ) : if sub_key == ""current"" : subtitle_data = tuple ( subtitle_data . split ( ""__"" ) ) self . parts [ part_id ] [ language ] [ ""current"" ] = subtitle_data elif sub_key == ""blacklist"" : bl = dict ( ( tuple ( [ str ( a ) for a in k . split ( ""__"" ) ] ) , v ) for k , v in six . iteritems ( subtitle_data ) ) self . parts [ part_id ] [ language ] [ ""blacklist"" ] = bl else : sub = JSONStoredSubtitle ( ) sub . initialize ( ** subtitle_data ) if not isinstance ( sub_key , tuple ) : sub_key = tuple ( sub_key . split ( ""__"" ) ) self . parts [ part_id ] [ language ] [ sub_key ] = sub","if not isinstance ( subtitle_data , tuple ) :",400 14432,"def get_php_interpreter_path(ver): config = get_config() candidates = [ join(config.phpsBaseDir, ver + ""*""), join(config.phpsBaseDir, ""php-%s*"" % ver), ] for pattern in candidates: base_dirs = glob(pattern) if base_dirs: base_dir = base_dirs[0] break else: import subprocess # Look at PATH. exe_paths = findPathsForInterpreters([""php""], ""PHP"") for exe in exe_paths: try: p = subprocess.Popen( [exe, ""-r"", ""echo phpversion();""], stdout=subprocess.PIPE ) stdout, _ = p.communicate() if stdout.strip().startswith(ver): return exe except IOError: pass raise TestSkipped( ""could not find PHP %s for testing: '%s' don't "" ""exist"" % (ver, ""', '"".join(candidates)) ) if sys.platform == ""win32"": candidates = [ join(base_dir, ""php.exe""), join(base_dir, ""Release_TS"", ""php.exe""), ] for candidate in candidates: if exists(candidate): return candidate else: raise TestSkipped( ""could not find PHP %s for testing: '%s' "" ""don't exist"" % (ver, ""', '"".join(candidates)) ) else: return join(base_dir, ""bin"", ""php"")","def get_php_interpreter_path ( ver ) : config = get_config ( ) candidates = [ join ( config . phpsBaseDir , ver + ""*"" ) , join ( config . phpsBaseDir , ""php-%s*"" % ver ) , ] for pattern in candidates : base_dirs = glob ( pattern ) if base_dirs : base_dir = base_dirs [ 0 ] break else : import subprocess exe_paths = findPathsForInterpreters ( [ ""php"" ] , ""PHP"" ) for exe in exe_paths : try : p = subprocess . Popen ( [ exe , ""-r"" , ""echo phpversion();"" ] , stdout = subprocess . PIPE ) stdout , _ = p . communicate ( ) if stdout . strip ( ) . startswith ( ver ) : return exe except IOError : pass raise TestSkipped ( ""could not find PHP %s for testing: '%s' don't "" ""exist"" % ( ver , ""', '"" . join ( candidates ) ) ) if sys . platform == ""win32"" : candidates = [ join ( base_dir , ""php.exe"" ) , join ( base_dir , ""Release_TS"" , ""php.exe"" ) , ] for candidate in candidates : return candidate else : raise TestSkipped ( ""could not find PHP %s for testing: '%s' "" ""don't exist"" % ( ver , ""', '"" . join ( candidates ) ) ) else : return join ( base_dir , ""bin"" , ""php"" )",if exists ( candidate ) :,460 5721,"def __getitem__(self, name, set=set, getattr=getattr, id=id): visited = set() mydict = self.basedict while 1: value = mydict[name] if value is not None: return value myid = id(mydict) assert myid not in visited visited.add(myid) mydict = mydict.Parent if mydict is None: return","def __getitem__ ( self , name , set = set , getattr = getattr , id = id ) : visited = set ( ) mydict = self . basedict while 1 : value = mydict [ name ] if value is not None : return value myid = id ( mydict ) assert myid not in visited visited . add ( myid ) mydict = mydict . Parent return",if mydict is None :,120 10781,"def selectionToChunks(self, remove=False, add=False): box = self.selectionBox() if box: if box == self.level.bounds: self.selectedChunks = set(self.level.allChunks) return selectedChunks = self.selectedChunks boxedChunks = set(box.chunkPositions) if boxedChunks.issubset(selectedChunks): remove = True if remove and not add: selectedChunks.difference_update(boxedChunks) else: selectedChunks.update(boxedChunks) self.selectionTool.selectNone() ","def selectionToChunks ( self , remove = False , add = False ) : box = self . selectionBox ( ) if box : if box == self . level . bounds : self . selectedChunks = set ( self . level . allChunks ) return selectedChunks = self . selectedChunks boxedChunks = set ( box . chunkPositions ) if boxedChunks . issubset ( selectedChunks ) : remove = True selectedChunks . difference_update ( boxedChunks ) else : selectedChunks . update ( boxedChunks ) self . selectionTool . selectNone ( )",if remove and not add :,158 15144,"def change_opacity_function(self, new_f): self.opacity_function = new_f dr = self.radius / self.num_levels sectors = [] for submob in self.submobjects: if type(submob) == AnnularSector: sectors.append(submob) for (r, submob) in zip(np.arange(0, self.radius, dr), sectors): if type(submob) != AnnularSector: # it's the shadow, don't dim it continue alpha = self.opacity_function(r) submob.set_fill(opacity=alpha)","def change_opacity_function ( self , new_f ) : self . opacity_function = new_f dr = self . radius / self . num_levels sectors = [ ] for submob in self . submobjects : sectors . append ( submob ) for ( r , submob ) in zip ( np . arange ( 0 , self . radius , dr ) , sectors ) : if type ( submob ) != AnnularSector : continue alpha = self . opacity_function ( r ) submob . set_fill ( opacity = alpha )",if type ( submob ) == AnnularSector :,180 17399,"def addOutput(self, data, isAsync=None, **kwargs): isAsync = _get_async_param(isAsync, **kwargs) if isAsync: self.terminal.eraseLine() self.terminal.cursorBackward(len(self.lineBuffer) + len(self.ps[self.pn])) self.terminal.write(data) if isAsync: if self._needsNewline(): self.terminal.nextLine() self.terminal.write(self.ps[self.pn]) if self.lineBuffer: oldBuffer = self.lineBuffer self.lineBuffer = [] self.lineBufferIndex = 0 self._deliverBuffer(oldBuffer)","def addOutput ( self , data , isAsync = None , ** kwargs ) : isAsync = _get_async_param ( isAsync , ** kwargs ) if isAsync : self . terminal . eraseLine ( ) self . terminal . cursorBackward ( len ( self . lineBuffer ) + len ( self . ps [ self . pn ] ) ) self . terminal . write ( data ) if isAsync : if self . _needsNewline ( ) : self . terminal . nextLine ( ) self . terminal . write ( self . ps [ self . pn ] ) oldBuffer = self . lineBuffer self . lineBuffer = [ ] self . lineBufferIndex = 0 self . _deliverBuffer ( oldBuffer )",if self . lineBuffer :,188 21651,"def testSimple(self, useCluster=False): """"""Run with one really bad swarm to see if terminator picks it up correctly"""""" if not g_myEnv.options.runInProc: self.skipTest(""Skipping One Node test since runInProc is not specified"") self._printTestHeader() expDir = os.path.join(g_myEnv.testSrcExpDir, ""swarm_v2"") (jobID, jobInfo, resultInfos, metricResults, minErrScore) = self.runPermutations( expDir, hsImp=""v2"", loggingLevel=g_myEnv.options.logLevel, maxModels=None, onCluster=useCluster, env=self.env, dummyModel={""iterations"": 200}, ) cjDB = ClientJobsDAO.get() jobResultsStr = cjDB.jobGetFields(jobID, [""results""])[0] jobResults = json.loads(jobResultsStr) terminatedSwarms = jobResults[""terminatedSwarms""] swarmMaturityWindow = int( configuration.Configuration.get(""nupic.hypersearch.swarmMaturityWindow"") ) prefix = ""modelParams|sensorParams|encoders|"" for swarm, (generation, scores) in terminatedSwarms.iteritems(): if prefix + ""gym"" in swarm.split("".""): self.assertEqual(generation, swarmMaturityWindow - 1) else: self.assertEqual(generation, swarmMaturityWindow - 1 + 4)","def testSimple ( self , useCluster = False ) : """"""Run with one really bad swarm to see if terminator picks it up correctly"""""" if not g_myEnv . options . runInProc : self . skipTest ( ""Skipping One Node test since runInProc is not specified"" ) self . _printTestHeader ( ) expDir = os . path . join ( g_myEnv . testSrcExpDir , ""swarm_v2"" ) ( jobID , jobInfo , resultInfos , metricResults , minErrScore ) = self . runPermutations ( expDir , hsImp = ""v2"" , loggingLevel = g_myEnv . options . logLevel , maxModels = None , onCluster = useCluster , env = self . env , dummyModel = { ""iterations"" : 200 } , ) cjDB = ClientJobsDAO . get ( ) jobResultsStr = cjDB . jobGetFields ( jobID , [ ""results"" ] ) [ 0 ] jobResults = json . loads ( jobResultsStr ) terminatedSwarms = jobResults [ ""terminatedSwarms"" ] swarmMaturityWindow = int ( configuration . Configuration . get ( ""nupic.hypersearch.swarmMaturityWindow"" ) ) prefix = ""modelParams|sensorParams|encoders|"" for swarm , ( generation , scores ) in terminatedSwarms . iteritems ( ) : self . assertEqual ( generation , swarmMaturityWindow - 1 ) else : self . assertEqual ( generation , swarmMaturityWindow - 1 + 4 )","if prefix + ""gym"" in swarm . split ( ""."" ) :",386 18389,"def fit(self, dataset, force_retrain): if force_retrain: self.sub_unit_1[""fitted""] = True self.sub_unit_1[""calls""] += 1 self.sub_unit_2[""fitted""] = True self.sub_unit_2[""calls""] += 1 else: if not self.sub_unit_1[""fitted""]: self.sub_unit_1[""fitted""] = True self.sub_unit_1[""calls""] += 1 if not self.sub_unit_2[""fitted""]: self.sub_unit_2[""fitted""] = True self.sub_unit_2[""calls""] += 1 return self","def fit ( self , dataset , force_retrain ) : if force_retrain : self . sub_unit_1 [ ""fitted"" ] = True self . sub_unit_1 [ ""calls"" ] += 1 self . sub_unit_2 [ ""fitted"" ] = True self . sub_unit_2 [ ""calls"" ] += 1 else : self . sub_unit_1 [ ""fitted"" ] = True self . sub_unit_1 [ ""calls"" ] += 1 if not self . sub_unit_2 [ ""fitted"" ] : self . sub_unit_2 [ ""fitted"" ] = True self . sub_unit_2 [ ""calls"" ] += 1 return self","if not self . sub_unit_1 [ ""fitted"" ] :",183 14030,"def event_cb(self, widget, event): if event.type == Gdk.EventType.EXPOSE: return False msg = self.event2str(widget, event) motion_reports_limit = 5 if event.type == Gdk.EventType.MOTION_NOTIFY: if widget is self.app.doc.tdw: # statistics dt = event.time - self.last_motion_time self.motion_event_counter += 1 self.motion_dtime_sample.append(dt) self.motion_dtime_sample = self.motion_dtime_sample[-10:] self.last_motion_time = event.time # report suppression if len(self.motion_reports) < motion_reports_limit: self.report(msg) # report first few motion event immediately self.motion_reports.append(msg) else: unreported = self.motion_reports[motion_reports_limit:] if unreported: last_report = unreported.pop() if unreported: self.report( ""... MOTION_NOTIFY %d events suppressed"" % len(unreported) ) self.report(last_report) self.motion_reports = [] self.report(msg) return False","def event_cb ( self , widget , event ) : if event . type == Gdk . EventType . EXPOSE : return False msg = self . event2str ( widget , event ) motion_reports_limit = 5 if event . type == Gdk . EventType . MOTION_NOTIFY : dt = event . time - self . last_motion_time self . motion_event_counter += 1 self . motion_dtime_sample . append ( dt ) self . motion_dtime_sample = self . motion_dtime_sample [ - 10 : ] self . last_motion_time = event . time if len ( self . motion_reports ) < motion_reports_limit : self . report ( msg ) self . motion_reports . append ( msg ) else : unreported = self . motion_reports [ motion_reports_limit : ] if unreported : last_report = unreported . pop ( ) if unreported : self . report ( ""... MOTION_NOTIFY %d events suppressed"" % len ( unreported ) ) self . report ( last_report ) self . motion_reports = [ ] self . report ( msg ) return False",if widget is self . app . doc . tdw :,364 25596,"def _terminal_messenger(tp=""write"", msg="""", out=sys.stdout): try: if tp == ""write"": out.write(msg) elif tp == ""flush"": out.flush() elif tp == ""write_flush"": out.write(msg) out.flush() elif tp == ""print"": print(msg, file=out) else: raise ValueError(""Unsupported type: "" + tp) except IOError as e: logger.critical(""{}: {}"".format(type(e).__name__, ucd(e))) pass ","def _terminal_messenger ( tp = ""write"" , msg = """" , out = sys . stdout ) : try : if tp == ""write"" : out . write ( msg ) elif tp == ""flush"" : out . flush ( ) out . write ( msg ) out . flush ( ) elif tp == ""print"" : print ( msg , file = out ) else : raise ValueError ( ""Unsupported type: "" + tp ) except IOError as e : logger . critical ( ""{}: {}"" . format ( type ( e ) . __name__ , ucd ( e ) ) ) pass","elif tp == ""write_flush"" :",160 9241,"def test_file_output(): """"""Test output to arbitrary file-like objects"""""" with closing(StringIO()) as our_file: for i in tqdm(_range(3), file=our_file): if i == 1: our_file.seek(0) assert ""0/3"" in our_file.read()","def test_file_output ( ) : """"""Test output to arbitrary file-like objects"""""" with closing ( StringIO ( ) ) as our_file : for i in tqdm ( _range ( 3 ) , file = our_file ) : our_file . seek ( 0 ) assert ""0/3"" in our_file . read ( )",if i == 1 :,90 13968,"def _transmit_from_storage(self) -> None: for blob in self.storage.gets(): # give a few more seconds for blob lease operation # to reduce the chance of race (for perf consideration) if blob.lease(self._timeout + 5): envelopes = [TelemetryItem(**x) for x in blob.get()] result = self._transmit(list(envelopes)) if result == ExportResult.FAILED_RETRYABLE: blob.lease(1) else: blob.delete()",def _transmit_from_storage ( self ) -> None : for blob in self . storage . gets ( ) : envelopes = [ TelemetryItem ( ** x ) for x in blob . get ( ) ] result = self . _transmit ( list ( envelopes ) ) if result == ExportResult . FAILED_RETRYABLE : blob . lease ( 1 ) else : blob . delete ( ),if blob . lease ( self . _timeout + 5 ) :,147 2348,"def __plugContextMenuSignal(graphEditor, plug, menuDefinition): # See if we've got a CompoundNodule. Early out if not. nodeGadget = graphEditor.graphGadget().nodeGadget(plug.node()) if not nodeGadget: return nodule = nodeGadget.nodule(plug) if not isinstance(nodule, GafferUI.CompoundNumericNodule): plug = plug.parent() if isinstance(plug, Gaffer.Plug): nodule = nodeGadget.nodule(plug) if not isinstance(nodule, GafferUI.CompoundNumericNodule): return # Add menu items for showing/hiding the children. childNames = """".join(c.getName() for c in plug).upper() if len(nodule) > 0: menuDefinition.append( ""/Collapse {} Components"".format(childNames), { ""command"": functools.partial(__applyChildVisibility, plug, False), ""active"": not Gaffer.MetadataAlgo.readOnly(plug), }, ) else: menuDefinition.append( ""/Expand {} Components"".format(childNames), { ""command"": functools.partial(__applyChildVisibility, plug, True), ""active"": not Gaffer.MetadataAlgo.readOnly(plug), }, )","def __plugContextMenuSignal ( graphEditor , plug , menuDefinition ) : nodeGadget = graphEditor . graphGadget ( ) . nodeGadget ( plug . node ( ) ) if not nodeGadget : return nodule = nodeGadget . nodule ( plug ) plug = plug . parent ( ) if isinstance ( plug , Gaffer . Plug ) : nodule = nodeGadget . nodule ( plug ) return childNames = """" . join ( c . getName ( ) for c in plug ) . upper ( ) if len ( nodule ) > 0 : menuDefinition . append ( ""/Collapse {} Components"" . format ( childNames ) , { ""command"" : functools . partial ( __applyChildVisibility , plug , False ) , ""active"" : not Gaffer . MetadataAlgo . readOnly ( plug ) , } , ) else : menuDefinition . append ( ""/Expand {} Components"" . format ( childNames ) , { ""command"" : functools . partial ( __applyChildVisibility , plug , True ) , ""active"" : not Gaffer . MetadataAlgo . readOnly ( plug ) , } , )","if not isinstance ( nodule , GafferUI . CompoundNumericNodule ) :",382 20432,"def main(): parser = argparse.ArgumentParser(description=""Dispatcher command line parser"") parser.add_argument(""--exp_params"", type=str, required=True) args, _ = parser.parse_known_args() exp_params_decode = base64.b64decode(args.exp_params).decode(""utf-8"") logger.debug(""decoded exp_params: [%s]"", exp_params_decode) exp_params = json.loads(exp_params_decode) logger.debug(""exp_params json obj: [%s]"", json.dumps(exp_params, indent=4)) if exp_params.get(""multiThread""): enable_multi_thread() if exp_params.get(""multiPhase""): enable_multi_phase() if exp_params.get(""advisor"") is not None: # advisor is enabled and starts to run _run_advisor(exp_params) else: # tuner (and assessor) is enabled and starts to run assert exp_params.get(""tuner"") is not None tuner = _create_tuner(exp_params) if exp_params.get(""assessor"") is not None: assessor = _create_assessor(exp_params) else: assessor = None dispatcher = MsgDispatcher(tuner, assessor) try: dispatcher.run() tuner._on_exit() if assessor is not None: assessor._on_exit() except Exception as exception: logger.exception(exception) tuner._on_error() if assessor is not None: assessor._on_error() raise","def main ( ) : parser = argparse . ArgumentParser ( description = ""Dispatcher command line parser"" ) parser . add_argument ( ""--exp_params"" , type = str , required = True ) args , _ = parser . parse_known_args ( ) exp_params_decode = base64 . b64decode ( args . exp_params ) . decode ( ""utf-8"" ) logger . debug ( ""decoded exp_params: [%s]"" , exp_params_decode ) exp_params = json . loads ( exp_params_decode ) logger . debug ( ""exp_params json obj: [%s]"" , json . dumps ( exp_params , indent = 4 ) ) if exp_params . get ( ""multiThread"" ) : enable_multi_thread ( ) if exp_params . get ( ""multiPhase"" ) : enable_multi_phase ( ) if exp_params . get ( ""advisor"" ) is not None : _run_advisor ( exp_params ) else : assert exp_params . get ( ""tuner"" ) is not None tuner = _create_tuner ( exp_params ) if exp_params . get ( ""assessor"" ) is not None : assessor = _create_assessor ( exp_params ) else : assessor = None dispatcher = MsgDispatcher ( tuner , assessor ) try : dispatcher . run ( ) tuner . _on_exit ( ) assessor . _on_exit ( ) except Exception as exception : logger . exception ( exception ) tuner . _on_error ( ) assessor . _on_error ( ) raise",if assessor is not None :,465 21096,"def main(args): alphabet = args.alphabet subsize = args.length if args.lookup: pat = args.lookup try: pat = int(pat, 0) except ValueError: pass pat = flat(pat, bytes=args.length) if len(pat) != subsize: log.critical(""Subpattern must be %d bytes"" % subsize) sys.exit(1) if not all(c in alphabet for c in pat): log.critical(""Pattern contains characters not present in the alphabet"") sys.exit(1) offset = cyclic_find(pat, alphabet, subsize) if offset == -1: log.critical(""Given pattern does not exist in cyclic pattern"") sys.exit(1) else: print(offset) else: want = args.count result = cyclic(want, alphabet, subsize) got = len(result) if want is not None and got < want: log.failure(""Alphabet too small (max length = %i)"" % got) out = getattr(sys.stdout, ""buffer"", sys.stdout) out.write(result) if out.isatty(): out.write(b""\n"")","def main ( args ) : alphabet = args . alphabet subsize = args . length if args . lookup : pat = args . lookup try : pat = int ( pat , 0 ) except ValueError : pass pat = flat ( pat , bytes = args . length ) log . critical ( ""Subpattern must be %d bytes"" % subsize ) sys . exit ( 1 ) if not all ( c in alphabet for c in pat ) : log . critical ( ""Pattern contains characters not present in the alphabet"" ) sys . exit ( 1 ) offset = cyclic_find ( pat , alphabet , subsize ) if offset == - 1 : log . critical ( ""Given pattern does not exist in cyclic pattern"" ) sys . exit ( 1 ) else : print ( offset ) else : want = args . count result = cyclic ( want , alphabet , subsize ) got = len ( result ) if want is not None and got < want : log . failure ( ""Alphabet too small (max length = %i)"" % got ) out = getattr ( sys . stdout , ""buffer"" , sys . stdout ) out . write ( result ) if out . isatty ( ) : out . write ( b""\n"" )",if len ( pat ) != subsize :,346 12676,"def post_create(self, user, billing=None): from weblate.trans.models import Change if billing: billing.projects.add(self) if billing.plan.change_access_control: self.access_control = Project.ACCESS_PRIVATE else: self.access_control = Project.ACCESS_PUBLIC self.save() if not user.is_superuser: self.add_user(user, ""@Administration"") Change.objects.create( action=Change.ACTION_CREATE_PROJECT, project=self, user=user, author=user ) ","def post_create ( self , user , billing = None ) : from weblate . trans . models import Change if billing : billing . projects . add ( self ) self . access_control = Project . ACCESS_PRIVATE else : self . access_control = Project . ACCESS_PUBLIC self . save ( ) if not user . is_superuser : self . add_user ( user , ""@Administration"" ) Change . objects . create ( action = Change . ACTION_CREATE_PROJECT , project = self , user = user , author = user )",if billing . plan . change_access_control :,158 5714,"def _determine_tool_runners(self, config, profile): if config.tools is None: # we had no command line settings for an explicit list of # tools, so we use the defaults to_run = set(DEFAULT_TOOLS) # we can also use any that the profiles dictate for tool in tools.TOOLS.keys(): if profile.is_tool_enabled(tool): to_run.add(tool) else: to_run = set(config.tools) # profiles have no say in the list of tools run when # a command line is specified for tool in config.with_tools: to_run.add(tool) for tool in config.without_tools: if tool in to_run: to_run.remove(tool) if ( config.tools is None and len(config.with_tools) == 0 and len(config.without_tools) == 0 ): for tool in tools.TOOLS.keys(): enabled = profile.is_tool_enabled(tool) if enabled is None: enabled = tool in DEFAULT_TOOLS if tool in to_run and not enabled: to_run.remove(tool) return sorted(list(to_run))","def _determine_tool_runners ( self , config , profile ) : if config . tools is None : to_run = set ( DEFAULT_TOOLS ) for tool in tools . TOOLS . keys ( ) : if profile . is_tool_enabled ( tool ) : to_run . add ( tool ) else : to_run = set ( config . tools ) for tool in config . with_tools : to_run . add ( tool ) for tool in config . without_tools : if tool in to_run : to_run . remove ( tool ) if ( config . tools is None and len ( config . with_tools ) == 0 and len ( config . without_tools ) == 0 ) : for tool in tools . TOOLS . keys ( ) : enabled = profile . is_tool_enabled ( tool ) if enabled is None : enabled = tool in DEFAULT_TOOLS to_run . remove ( tool ) return sorted ( list ( to_run ) )",if tool in to_run and not enabled :,347 3946,"def sample_admin_user(): """"""List of iris messages"""""" with iris_ctl.db_from_config(sample_db_config) as (conn, cursor): cursor.execute( ""SELECT `name` FROM `target` JOIN `user` on `target`.`id` = `user`.`target_id` WHERE `user`.`admin` = TRUE LIMIT 1"" ) result = cursor.fetchone() if result: return result[0]","def sample_admin_user ( ) : """"""List of iris messages"""""" with iris_ctl . db_from_config ( sample_db_config ) as ( conn , cursor ) : cursor . execute ( ""SELECT `name` FROM `target` JOIN `user` on `target`.`id` = `user`.`target_id` WHERE `user`.`admin` = TRUE LIMIT 1"" ) result = cursor . fetchone ( ) return result [ 0 ]",if result :,123 3510,"def read(self, iprot): if ( iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None ): fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.type = iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd()","def read ( self , iprot ) : if ( iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None ) : fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) return iprot . readStructBegin ( ) while True : ( fname , ftype , fid ) = iprot . readFieldBegin ( ) if ftype == TType . STOP : break if fid == 1 : self . type = iprot . readString ( ) else : iprot . skip ( ftype ) else : iprot . skip ( ftype ) iprot . readFieldEnd ( ) iprot . readStructEnd ( )",if ftype == TType . STRING :,260 510,"def groups(self, trans, **kwargs): if ""operation"" in kwargs: operation = kwargs[""operation""].lower().replace(""+"", "" "") if operation == ""groups"": return self.group(trans, **kwargs) if operation == ""create"": return self.create_group(trans, **kwargs) if operation == ""delete"": return self.mark_group_deleted(trans, **kwargs) if operation == ""undelete"": return self.undelete_group(trans, **kwargs) if operation == ""purge"": return self.purge_group(trans, **kwargs) if operation == ""manage users and roles"": return self.manage_users_and_roles_for_group(trans, **kwargs) if operation == ""rename"": return self.rename_group(trans, **kwargs) # Render the list view return self.group_list_grid(trans, **kwargs) ","def groups ( self , trans , ** kwargs ) : if ""operation"" in kwargs : operation = kwargs [ ""operation"" ] . lower ( ) . replace ( ""+"" , "" "" ) if operation == ""groups"" : return self . group ( trans , ** kwargs ) if operation == ""create"" : return self . create_group ( trans , ** kwargs ) if operation == ""delete"" : return self . mark_group_deleted ( trans , ** kwargs ) if operation == ""undelete"" : return self . undelete_group ( trans , ** kwargs ) if operation == ""purge"" : return self . purge_group ( trans , ** kwargs ) return self . manage_users_and_roles_for_group ( trans , ** kwargs ) if operation == ""rename"" : return self . rename_group ( trans , ** kwargs ) return self . group_list_grid ( trans , ** kwargs )","if operation == ""manage users and roles"" :",247 11729,"def compare_hash(hash_of_gold, path_to_file): with open(path_to_file, ""rb"") as f: hash_of_file = hashlib.sha256(f.read()).hexdigest() if hash_of_file != hash_of_gold: print( ""########## Hash sum of"", path_to_file, ""differs from the target, the topology will be deleted !!! ##########"", ) shutil.rmtree(os.path.dirname(path_to_file))","def compare_hash ( hash_of_gold , path_to_file ) : with open ( path_to_file , ""rb"" ) as f : hash_of_file = hashlib . sha256 ( f . read ( ) ) . hexdigest ( ) print ( ""########## Hash sum of"" , path_to_file , ""differs from the target, the topology will be deleted !!! ##########"" , ) shutil . rmtree ( os . path . dirname ( path_to_file ) )",if hash_of_file != hash_of_gold :,147 15656,"def _get_node(self, node_id): self.non_terminated_nodes({}) # Side effect: updates cache with self.lock: if node_id in self.cached_nodes: return self.cached_nodes[node_id] instance = ( self.compute.instances() .get( project=self.provider_config[""project_id""], zone=self.provider_config[""availability_zone""], instance=node_id, ) .execute() ) return instance ","def _get_node ( self , node_id ) : self . non_terminated_nodes ( { } ) with self . lock : return self . cached_nodes [ node_id ] instance = ( self . compute . instances ( ) . get ( project = self . provider_config [ ""project_id"" ] , zone = self . provider_config [ ""availability_zone"" ] , instance = node_id , ) . execute ( ) ) return instance",if node_id in self . cached_nodes :,156 14828,"def _validate_and_define(params, key, value): (key, force_generic) = _validate_key(_unescape(key)) if key in params: raise SyntaxError(f'duplicate key ""{key}""') cls = _class_for_key.get(key, GenericParam) emptiness = cls.emptiness() if value is None: if emptiness == Emptiness.NEVER: raise SyntaxError(""value cannot be empty"") value = cls.from_value(value) else: if force_generic: value = cls.from_wire_parser(dns.wire.Parser(_unescape(value))) else: value = cls.from_value(value) params[key] = value ","def _validate_and_define ( params , key , value ) : ( key , force_generic ) = _validate_key ( _unescape ( key ) ) if key in params : raise SyntaxError ( f'duplicate key ""{key}""' ) cls = _class_for_key . get ( key , GenericParam ) emptiness = cls . emptiness ( ) if value is None : raise SyntaxError ( ""value cannot be empty"" ) value = cls . from_value ( value ) else : if force_generic : value = cls . from_wire_parser ( dns . wire . Parser ( _unescape ( value ) ) ) else : value = cls . from_value ( value ) params [ key ] = value",if emptiness == Emptiness . NEVER :,194 16933,"def get_components_list(component_revisions_dict, job_type): """"""Return a prioritized order of components based on job type."""""" components = sorted(component_revisions_dict.keys()) if utils.is_chromium(): # Components prioritization only applies to non-chromium projects. return components project_name = data_handler.get_project_name(job_type) if not project_name: # No project name found in job environment, return list as-is. return components main_repo = data_handler.get_main_repo(job_type) project_src = ""/src/"" + project_name for component in components.copy(): if component_revisions_dict[component][""url""] == main_repo: # Matches recorded main repo. components.remove(component) components.insert(0, component) break if component == project_src: components.remove(component) components.insert(0, component) break if project_name.lower() in os.path.basename(component).lower(): components.remove(component) components.insert(0, component) # Keep trying in case an exact match is found later. return components","def get_components_list ( component_revisions_dict , job_type ) : """"""Return a prioritized order of components based on job type."""""" components = sorted ( component_revisions_dict . keys ( ) ) if utils . is_chromium ( ) : return components project_name = data_handler . get_project_name ( job_type ) if not project_name : return components main_repo = data_handler . get_main_repo ( job_type ) project_src = ""/src/"" + project_name for component in components . copy ( ) : if component_revisions_dict [ component ] [ ""url"" ] == main_repo : components . remove ( component ) components . insert ( 0 , component ) break components . remove ( component ) components . insert ( 0 , component ) break if project_name . lower ( ) in os . path . basename ( component ) . lower ( ) : components . remove ( component ) components . insert ( 0 , component ) return components",if component == project_src :,327 18282,"def initEnv(self, mandatory=True, detailed=False, web=False, forceInit=False): self._initRunAs() if self.envInitialized and not forceInit: return if web: self.webInit() else: self.checkDbmsOs(detailed) if mandatory and not self.isDba(): warnMsg = ""functionality requested probably does not work because "" warnMsg += ""the curent session user is not a database administrator"" if not conf.dbmsCred and Backend.getIdentifiedDbms() in ( DBMS.MSSQL, DBMS.PGSQL, ): warnMsg += "". You can try to use option '--dbms-cred' "" warnMsg += ""to execute statements as a DBA user if you "" warnMsg += ""were able to extract and crack a DBA "" warnMsg += ""password by any mean"" logger.warn(warnMsg) if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): success = self.udfInjectSys() if success is not True: msg = ""unable to mount the operating system takeover"" raise SqlmapFilePathException(msg) elif Backend.isDbms(DBMS.MSSQL): if mandatory: self.xpCmdshellInit() else: errMsg = ""feature not yet implemented for the back-end DBMS"" raise SqlmapUnsupportedFeatureException(errMsg) self.envInitialized = True ","def initEnv ( self , mandatory = True , detailed = False , web = False , forceInit = False ) : self . _initRunAs ( ) if self . envInitialized and not forceInit : return if web : self . webInit ( ) else : self . checkDbmsOs ( detailed ) warnMsg = ""functionality requested probably does not work because "" warnMsg += ""the curent session user is not a database administrator"" if not conf . dbmsCred and Backend . getIdentifiedDbms ( ) in ( DBMS . MSSQL , DBMS . PGSQL , ) : warnMsg += "". You can try to use option '--dbms-cred' "" warnMsg += ""to execute statements as a DBA user if you "" warnMsg += ""were able to extract and crack a DBA "" warnMsg += ""password by any mean"" logger . warn ( warnMsg ) if Backend . getIdentifiedDbms ( ) in ( DBMS . MYSQL , DBMS . PGSQL ) : success = self . udfInjectSys ( ) if success is not True : msg = ""unable to mount the operating system takeover"" raise SqlmapFilePathException ( msg ) elif Backend . isDbms ( DBMS . MSSQL ) : if mandatory : self . xpCmdshellInit ( ) else : errMsg = ""feature not yet implemented for the back-end DBMS"" raise SqlmapUnsupportedFeatureException ( errMsg ) self . envInitialized = True",if mandatory and not self . isDba ( ) :,429 24295,"def getCalculatedPosition(self): if self._lastVLCPositionUpdate is None: return self._client.getGlobalPosition() diff = time.time() - self._lastVLCPositionUpdate if diff > constants.PLAYER_ASK_DELAY and not self._paused: self._client.ui.showDebugMessage( ""VLC did not response in time, so assuming position is {} ({}+{})"".format( self._position + diff, self._position, diff ) ) if diff > constants.VLC_LATENCY_ERROR_THRESHOLD: if not self.shownVLCLatencyError or constants.DEBUG_MODE: self._client.ui.showErrorMessage( getMessage(""media-player-latency-warning"").format(int(diff)) ) self.shownVLCLatencyError = True return self._position + diff else: return self._position ","def getCalculatedPosition ( self ) : if self . _lastVLCPositionUpdate is None : return self . _client . getGlobalPosition ( ) diff = time . time ( ) - self . _lastVLCPositionUpdate if diff > constants . PLAYER_ASK_DELAY and not self . _paused : self . _client . ui . showDebugMessage ( ""VLC did not response in time, so assuming position is {} ({}+{})"" . format ( self . _position + diff , self . _position , diff ) ) if not self . shownVLCLatencyError or constants . DEBUG_MODE : self . _client . ui . showErrorMessage ( getMessage ( ""media-player-latency-warning"" ) . format ( int ( diff ) ) ) self . shownVLCLatencyError = True return self . _position + diff else : return self . _position",if diff > constants . VLC_LATENCY_ERROR_THRESHOLD :,252 10660,"def build_query_from_field(self, field_name, operation): if field_name == ""permission"": if operation[""op""] != ""eq"": raise InvalidFilterOperator(value=operation[""op""], valid_operators=[""eq""]) # operation['value'] should be 'admin', 'write', or 'read' query_val = operation[""value""].lower().strip() if query_val not in API_CONTRIBUTOR_PERMISSIONS: raise InvalidFilterValue(value=operation[""value""]) # This endpoint should only be returning *contributors* not group members resource = self.get_resource() if query_val == READ: # If read, return all contributors return Q(user_id__in=resource.contributors.values_list(""id"", flat=True)) elif query_val == WRITE: # If write, return members of write and admin groups, both groups have write perms return Q( user_id__in=( resource.get_group(WRITE).user_set.values_list(""id"", flat=True) | resource.get_group(ADMIN).user_set.values_list(""id"", flat=True) ) ) elif query_val == ADMIN: # If admin, return only members of admin group return Q( user_id__in=resource.get_group(ADMIN).user_set.values_list( ""id"", flat=True ) ) return super(BaseContributorList, self).build_query_from_field( field_name, operation ) ","def build_query_from_field ( self , field_name , operation ) : if field_name == ""permission"" : if operation [ ""op"" ] != ""eq"" : raise InvalidFilterOperator ( value = operation [ ""op"" ] , valid_operators = [ ""eq"" ] ) query_val = operation [ ""value"" ] . lower ( ) . strip ( ) raise InvalidFilterValue ( value = operation [ ""value"" ] ) resource = self . get_resource ( ) if query_val == READ : return Q ( user_id__in = resource . contributors . values_list ( ""id"" , flat = True ) ) elif query_val == WRITE : return Q ( user_id__in = ( resource . get_group ( WRITE ) . user_set . values_list ( ""id"" , flat = True ) | resource . get_group ( ADMIN ) . user_set . values_list ( ""id"" , flat = True ) ) ) elif query_val == ADMIN : return Q ( user_id__in = resource . get_group ( ADMIN ) . user_set . values_list ( ""id"" , flat = True ) ) return super ( BaseContributorList , self ) . build_query_from_field ( field_name , operation )",if query_val not in API_CONTRIBUTOR_PERMISSIONS :,441 1339,"def login(self): error = None Form = self.get_login_form() if request.method == ""POST"": form = Form(request.form) next_url = request.form.get(""next"") or self.default_next_url if form.validate(): authenticated_user = self.authenticate( form.username.data, form.password.data, ) if authenticated_user: self.login_user(authenticated_user) return redirect(next_url) else: flash(""Incorrect username or password"") else: form = Form() next_url = request.args.get(""next"") return render_template( ""auth/login.html"", error=error, form=form, login_url=url_for(""%s.login"" % self.blueprint.name), next=next_url, ) ","def login ( self ) : error = None Form = self . get_login_form ( ) if request . method == ""POST"" : form = Form ( request . form ) next_url = request . form . get ( ""next"" ) or self . default_next_url authenticated_user = self . authenticate ( form . username . data , form . password . data , ) if authenticated_user : self . login_user ( authenticated_user ) return redirect ( next_url ) else : flash ( ""Incorrect username or password"" ) else : form = Form ( ) next_url = request . args . get ( ""next"" ) return render_template ( ""auth/login.html"" , error = error , form = form , login_url = url_for ( ""%s.login"" % self . blueprint . name ) , next = next_url , )",if form . validate ( ) :,260 4613,"def preflight(): """"""Preflight checks."""""" logger.warning( ""This action is deprecated. Use https://github.com/hacs/action instead"" ) event_data = get_event_data() ref = None if REPOSITORY and CATEGORY: repository = REPOSITORY category = CATEGORY pr = False elif GITHUB_REPOSITORY == ""hacs/default"": category = chose_category() repository = chose_repository(category) pr = False logger.info(f""Actor: {GITHUB_ACTOR}"") else: category = CATEGORY.lower() pr = True if event_data.get(""pull_request"") is not None else False if pr: head = event_data[""pull_request""][""head""] ref = head[""ref""] repository = head[""repo""][""full_name""] else: repository = GITHUB_REPOSITORY logger.info(f""Category: {category}"") logger.info(f""Repository: {repository}"") if TOKEN is None: error(""No GitHub token found, use env GITHUB_TOKEN to set this."") if repository is None: error(""No repository found, use env REPOSITORY to set this."") if category is None: error(""No category found, use env CATEGORY to set this."") async with aiohttp.ClientSession() as session: github = GitHub(TOKEN, session) repo = await github.get_repo(repository) if not pr and repo.description is None: error(""Repository is missing description"") if not pr and not repo.attributes[""has_issues""]: error(""Repository does not have issues enabled"") if ref is None and GITHUB_REPOSITORY != ""hacs/default"": ref = repo.default_branch await validate_repository(repository, category, ref) ","def preflight ( ) : """"""Preflight checks."""""" logger . warning ( ""This action is deprecated. Use https://github.com/hacs/action instead"" ) event_data = get_event_data ( ) ref = None if REPOSITORY and CATEGORY : repository = REPOSITORY category = CATEGORY pr = False elif GITHUB_REPOSITORY == ""hacs/default"" : category = chose_category ( ) repository = chose_repository ( category ) pr = False logger . info ( f""Actor: {GITHUB_ACTOR}"" ) else : category = CATEGORY . lower ( ) pr = True if event_data . get ( ""pull_request"" ) is not None else False if pr : head = event_data [ ""pull_request"" ] [ ""head"" ] ref = head [ ""ref"" ] repository = head [ ""repo"" ] [ ""full_name"" ] else : repository = GITHUB_REPOSITORY logger . info ( f""Category: {category}"" ) logger . info ( f""Repository: {repository}"" ) if TOKEN is None : error ( ""No GitHub token found, use env GITHUB_TOKEN to set this."" ) if repository is None : error ( ""No repository found, use env REPOSITORY to set this."" ) if category is None : error ( ""No category found, use env CATEGORY to set this."" ) async with aiohttp . ClientSession ( ) as session : github = GitHub ( TOKEN , session ) repo = await github . get_repo ( repository ) error ( ""Repository is missing description"" ) if not pr and not repo . attributes [ ""has_issues"" ] : error ( ""Repository does not have issues enabled"" ) if ref is None and GITHUB_REPOSITORY != ""hacs/default"" : ref = repo . default_branch await validate_repository ( repository , category , ref )",if not pr and repo . description is None :,487 18073,"def _wrap_ssl_client(sock, ssl, server_hostname, alpn_protocols): # Applies SSL to a client connection. Returns an SSL socket. if ssl: if isinstance(ssl, bool): sslcontext = curiossl.create_default_context() if not server_hostname: sslcontext._context.check_hostname = False sslcontext._context.verify_mode = curiossl.CERT_NONE if alpn_protocols: sslcontext.set_alpn_protocols(alpn_protocols) else: # Assume that ssl is an already created context sslcontext = ssl if server_hostname: extra_args = {""server_hostname"": server_hostname} else: extra_args = {} # if the context is Curio's own, it expects a Curio socket and # returns one. If context is from an external source, including # the stdlib's ssl.SSLContext, it expects a non-Curio socket and # returns a non-Curio socket, which then needs wrapping in a Curio # socket. # # Perhaps the CurioSSLContext is no longer needed. In which case, # this code can be simplified to just the else case below. # if isinstance(sslcontext, curiossl.CurioSSLContext): sock = await sslcontext.wrap_socket( sock, do_handshake_on_connect=False, **extra_args ) else: # do_handshake_on_connect should not be specified for # non-blocking sockets extra_args[""do_handshake_on_connect""] = sock._socket.gettimeout() != 0.0 sock = Socket(sslcontext.wrap_socket(sock._socket, **extra_args)) await sock.do_handshake() return sock","def _wrap_ssl_client ( sock , ssl , server_hostname , alpn_protocols ) : if ssl : if isinstance ( ssl , bool ) : sslcontext = curiossl . create_default_context ( ) sslcontext . _context . check_hostname = False sslcontext . _context . verify_mode = curiossl . CERT_NONE if alpn_protocols : sslcontext . set_alpn_protocols ( alpn_protocols ) else : sslcontext = ssl if server_hostname : extra_args = { ""server_hostname"" : server_hostname } else : extra_args = { } if isinstance ( sslcontext , curiossl . CurioSSLContext ) : sock = await sslcontext . wrap_socket ( sock , do_handshake_on_connect = False , ** extra_args ) else : extra_args [ ""do_handshake_on_connect"" ] = sock . _socket . gettimeout ( ) != 0.0 sock = Socket ( sslcontext . wrap_socket ( sock . _socket , ** extra_args ) ) await sock . do_handshake ( ) return sock",if not server_hostname :,491 6844,"def _evaluate_local_single(self, iterator): for batch in iterator: in_arrays = convert._call_converter(self.converter, batch, self.device) with function.no_backprop_mode(): if isinstance(in_arrays, tuple): results = self.calc_local(*in_arrays) elif isinstance(in_arrays, dict): results = self.calc_local(**in_arrays) else: results = self.calc_local(in_arrays) if self._progress_hook: self._progress_hook(batch) yield results ","def _evaluate_local_single ( self , iterator ) : for batch in iterator : in_arrays = convert . _call_converter ( self . converter , batch , self . device ) with function . no_backprop_mode ( ) : if isinstance ( in_arrays , tuple ) : results = self . calc_local ( * in_arrays ) results = self . calc_local ( ** in_arrays ) else : results = self . calc_local ( in_arrays ) if self . _progress_hook : self . _progress_hook ( batch ) yield results","elif isinstance ( in_arrays , dict ) :",166 527,"def get_note_title_file(note): mo = note_title_re.match(note.get(""content"", """")) if mo: fn = mo.groups()[0] fn = fn.replace("" "", ""_"") fn = fn.replace(""/"", ""_"") if not fn: return """" if isinstance(fn, str): fn = unicode(fn, ""utf-8"") else: fn = unicode(fn) if note_markdown(note): fn += "".mkdn"" else: fn += "".txt"" return fn else: return """"","def get_note_title_file ( note ) : mo = note_title_re . match ( note . get ( ""content"" , """" ) ) if mo : fn = mo . groups ( ) [ 0 ] fn = fn . replace ( "" "" , ""_"" ) fn = fn . replace ( ""/"" , ""_"" ) if not fn : return """" fn = unicode ( fn , ""utf-8"" ) else : fn = unicode ( fn ) if note_markdown ( note ) : fn += "".mkdn"" else : fn += "".txt"" return fn else : return """"","if isinstance ( fn , str ) :",169 19202,"def run(self, edit, reverse=False): for region in self.view.sel(): line = self.view.line(region) line_content = self.view.substr(line) bullets = self.view.settings().get(""mde.list_indent_bullets"", [""*"", ""-"", ""+""]) bullet_pattern = ""(["" + """".join(re.escape(i) for i in bullets) + ""])"" new_line = line_content # Transform the bullet to the next/previous bullet type if self.view.settings().get(""mde.list_indent_auto_switch_bullet"", True): for key, bullet in enumerate(bullets): if bullet in new_line: if reverse and new_line.startswith(bullet) and key is 0: # In this case, do not switch bullets continue new_line = new_line.replace( bullet, bullets[(key + (1 if not reverse else -1)) % len(bullets)], ) break # Determine how to indent (tab or spaces) if self.view.settings().get(""translate_tabs_to_spaces""): tab_str = self.view.settings().get(""tab_size"", 4) * "" "" else: tab_str = ""\t"" if not reverse: # Do the indentation new_line = re.sub(bullet_pattern, tab_str + ""\\1"", new_line) else: # Do the unindentation new_line = re.sub(tab_str + bullet_pattern, ""\\1"", new_line) # Insert the new item self.view.replace(edit, line, new_line)","def run ( self , edit , reverse = False ) : for region in self . view . sel ( ) : line = self . view . line ( region ) line_content = self . view . substr ( line ) bullets = self . view . settings ( ) . get ( ""mde.list_indent_bullets"" , [ ""*"" , ""-"" , ""+"" ] ) bullet_pattern = ""(["" + """" . join ( re . escape ( i ) for i in bullets ) + ""])"" new_line = line_content if self . view . settings ( ) . get ( ""mde.list_indent_auto_switch_bullet"" , True ) : for key , bullet in enumerate ( bullets ) : if bullet in new_line : continue new_line = new_line . replace ( bullet , bullets [ ( key + ( 1 if not reverse else - 1 ) ) % len ( bullets ) ] , ) break if self . view . settings ( ) . get ( ""translate_tabs_to_spaces"" ) : tab_str = self . view . settings ( ) . get ( ""tab_size"" , 4 ) * "" "" else : tab_str = ""\t"" if not reverse : new_line = re . sub ( bullet_pattern , tab_str + ""\\1"" , new_line ) else : new_line = re . sub ( tab_str + bullet_pattern , ""\\1"" , new_line ) self . view . replace ( edit , line , new_line )",if reverse and new_line . startswith ( bullet ) and key is 0 :,486 18925,"def _integrate_cycle(self, cycle, outevent, inevent): if outevent not in cycle: total = inevent.null() for member in cycle.functions: subtotal = member[inevent] for call in member.calls.itervalues(): callee = self.functions[call.callee_id] if callee.cycle is not cycle: subtotal += self._integrate_call(call, outevent, inevent) total += subtotal cycle[outevent] = total callees = {} for function in self.functions.itervalues(): if function.cycle is not cycle: for call in function.calls.itervalues(): callee = self.functions[call.callee_id] if callee.cycle is cycle: try: callees[callee] += call[CALL_RATIO] except KeyError: callees[callee] = call[CALL_RATIO] for callee, call_ratio in callees.iteritems(): ranks = {} call_ratios = {} partials = {} self._rank_cycle_function(cycle, callee, 0, ranks) self._call_ratios_cycle(cycle, callee, ranks, call_ratios, set()) partial = self._integrate_cycle_function( cycle, callee, call_ratio, partials, ranks, call_ratios, outevent, inevent, ) assert partial == max(partials.values()) assert not total or abs(1.0 - partial / (call_ratio * total)) <= 0.001 return cycle[outevent]","def _integrate_cycle ( self , cycle , outevent , inevent ) : if outevent not in cycle : total = inevent . null ( ) for member in cycle . functions : subtotal = member [ inevent ] for call in member . calls . itervalues ( ) : callee = self . functions [ call . callee_id ] if callee . cycle is not cycle : subtotal += self . _integrate_call ( call , outevent , inevent ) total += subtotal cycle [ outevent ] = total callees = { } for function in self . functions . itervalues ( ) : for call in function . calls . itervalues ( ) : callee = self . functions [ call . callee_id ] if callee . cycle is cycle : try : callees [ callee ] += call [ CALL_RATIO ] except KeyError : callees [ callee ] = call [ CALL_RATIO ] for callee , call_ratio in callees . iteritems ( ) : ranks = { } call_ratios = { } partials = { } self . _rank_cycle_function ( cycle , callee , 0 , ranks ) self . _call_ratios_cycle ( cycle , callee , ranks , call_ratios , set ( ) ) partial = self . _integrate_cycle_function ( cycle , callee , call_ratio , partials , ranks , call_ratios , outevent , inevent , ) assert partial == max ( partials . values ( ) ) assert not total or abs ( 1.0 - partial / ( call_ratio * total ) ) <= 0.001 return cycle [ outevent ]",if function . cycle is not cycle :,470 19374,"def iter(self, retry_state): # noqa fut = retry_state.outcome if fut is None: if self.before is not None: self.before(retry_state) return DoAttempt() is_explicit_retry = retry_state.outcome.failed and isinstance( retry_state.outcome.exception(), TryAgain ) if not (is_explicit_retry or self.retry(retry_state=retry_state)): return fut.result() if self.after is not None: self.after(retry_state=retry_state) self.statistics[""delay_since_first_attempt""] = retry_state.seconds_since_start if self.stop(retry_state=retry_state): if self.retry_error_callback: return self.retry_error_callback(retry_state=retry_state) retry_exc = self.retry_error_cls(fut) if self.reraise: raise retry_exc.reraise() six.raise_from(retry_exc, fut.exception()) if self.wait: sleep = self.wait(retry_state=retry_state) else: sleep = 0.0 retry_state.next_action = RetryAction(sleep) retry_state.idle_for += sleep self.statistics[""idle_for""] += sleep self.statistics[""attempt_number""] += 1 if self.before_sleep is not None: self.before_sleep(retry_state=retry_state) return DoSleep(sleep)","def iter ( self , retry_state ) : fut = retry_state . outcome if fut is None : self . before ( retry_state ) return DoAttempt ( ) is_explicit_retry = retry_state . outcome . failed and isinstance ( retry_state . outcome . exception ( ) , TryAgain ) if not ( is_explicit_retry or self . retry ( retry_state = retry_state ) ) : return fut . result ( ) if self . after is not None : self . after ( retry_state = retry_state ) self . statistics [ ""delay_since_first_attempt"" ] = retry_state . seconds_since_start if self . stop ( retry_state = retry_state ) : if self . retry_error_callback : return self . retry_error_callback ( retry_state = retry_state ) retry_exc = self . retry_error_cls ( fut ) if self . reraise : raise retry_exc . reraise ( ) six . raise_from ( retry_exc , fut . exception ( ) ) if self . wait : sleep = self . wait ( retry_state = retry_state ) else : sleep = 0.0 retry_state . next_action = RetryAction ( sleep ) retry_state . idle_for += sleep self . statistics [ ""idle_for"" ] += sleep self . statistics [ ""attempt_number"" ] += 1 if self . before_sleep is not None : self . before_sleep ( retry_state = retry_state ) return DoSleep ( sleep )",if self . before is not None :,403 4841,"def get_boot_command(self): # Note: sudo did not introduce long-format option processing until July # 2013, so even though we parse long-format options, supply short-form # to the sudo command. boot_cmd = super(Connection, self).get_boot_command() bits = [self.options.sudo_path, ""-u"", self.options.username] if self.options.preserve_env: bits += [""-E""] if self.options.set_home: bits += [""-H""] if self.options.login: bits += [""-i""] if self.options.selinux_role: bits += [""-r"", self.options.selinux_role] if self.options.selinux_type: bits += [""-t"", self.options.selinux_type] # special handling for bash builtins # TODO: more efficient way of doing this, at least # it's only 1 iteration of boot_cmd to go through source_found = False for cmd in boot_cmd[:]: # rip `source` from boot_cmd if it exists; sudo.py can't run this # even with -i or -s options # since we've already got our ssh command working we shouldn't # need to source anymore # couldn't figure out how to get this to work using sudo flags if ""source"" == cmd: boot_cmd.remove(cmd) source_found = True continue if source_found: # remove words until we hit the python interpreter call if not cmd.endswith(""python""): boot_cmd.remove(cmd) else: break return bits + [""--""] + boot_cmd","def get_boot_command ( self ) : boot_cmd = super ( Connection , self ) . get_boot_command ( ) bits = [ self . options . sudo_path , ""-u"" , self . options . username ] if self . options . preserve_env : bits += [ ""-E"" ] if self . options . set_home : bits += [ ""-H"" ] if self . options . login : bits += [ ""-i"" ] if self . options . selinux_role : bits += [ ""-r"" , self . options . selinux_role ] if self . options . selinux_type : bits += [ ""-t"" , self . options . selinux_type ] source_found = False for cmd in boot_cmd [ : ] : boot_cmd . remove ( cmd ) source_found = True continue if source_found : if not cmd . endswith ( ""python"" ) : boot_cmd . remove ( cmd ) else : break return bits + [ ""--"" ] + boot_cmd","if ""source"" == cmd :",451 20817,"def _handle_open_tag(self, html_tag): ignored = self.handle_ignore(html_tag) tagname = self.handle_replacement(html_tag) jannotations = self.read_jannotations(html_tag) if not jannotations and tagname in self.labelled_tag_stacks: # add this tag to the stack to match correct end tag self.labelled_tag_stacks[tagname].append(None) increment = not jannotations for jannotation in arg_to_iter(jannotations): self.extra_required_attrs.extend(jannotation.pop(""required"", [])) annotation = self.build_annotation(jannotation) self.handle_generated(annotation, ignored) self.handle_variant(annotation) # Don't increment generated/text annotation if annotation.annotation_text is None and not increment: increment = True # look for a closing tag if the content is important if annotation.surrounds_attribute: self.labelled_tag_stacks[tagname].append(annotation) else: annotation.end_index = annotation.start_index + 1 self.annotations.append(annotation) self.next_tag_index += increment","def _handle_open_tag ( self , html_tag ) : ignored = self . handle_ignore ( html_tag ) tagname = self . handle_replacement ( html_tag ) jannotations = self . read_jannotations ( html_tag ) if not jannotations and tagname in self . labelled_tag_stacks : self . labelled_tag_stacks [ tagname ] . append ( None ) increment = not jannotations for jannotation in arg_to_iter ( jannotations ) : self . extra_required_attrs . extend ( jannotation . pop ( ""required"" , [ ] ) ) annotation = self . build_annotation ( jannotation ) self . handle_generated ( annotation , ignored ) self . handle_variant ( annotation ) increment = True if annotation . surrounds_attribute : self . labelled_tag_stacks [ tagname ] . append ( annotation ) else : annotation . end_index = annotation . start_index + 1 self . annotations . append ( annotation ) self . next_tag_index += increment",if annotation . annotation_text is None and not increment :,311 22409,"def _check_main_square_in_range(self): """"""Notifies the user via a message in case there is no main square in range"""""" if not self.owner.is_local_player: return # only check this for local player for building in self.get_buildings_in_range(): if building.id == BUILDINGS.MAIN_SQUARE: if StaticPather.get_path_on_roads(self.island, self, building) is not None: # a main square is in range if hasattr(self, ""_main_square_status_icon""): RemoveStatusIcon.broadcast(self, self, SettlerNotConnectedStatus) del self._main_square_status_icon return if not hasattr(self, ""_main_square_status_icon""): self._main_square_status_icon = SettlerNotConnectedStatus( self ) # save ref for removal later AddStatusIcon.broadcast(self, self._main_square_status_icon) # no main square found # check_duplicate: only trigger once for different settlers of a neighborhood self.session.ingame_gui.message_widget.add( point=self.position.origin, string_id=""NO_MAIN_SQUARE_IN_RANGE"", check_duplicate=True, ) ","def _check_main_square_in_range ( self ) : """"""Notifies the user via a message in case there is no main square in range"""""" if not self . owner . is_local_player : return for building in self . get_buildings_in_range ( ) : if StaticPather . get_path_on_roads ( self . island , self , building ) is not None : if hasattr ( self , ""_main_square_status_icon"" ) : RemoveStatusIcon . broadcast ( self , self , SettlerNotConnectedStatus ) del self . _main_square_status_icon return if not hasattr ( self , ""_main_square_status_icon"" ) : self . _main_square_status_icon = SettlerNotConnectedStatus ( self ) AddStatusIcon . broadcast ( self , self . _main_square_status_icon ) self . session . ingame_gui . message_widget . add ( point = self . position . origin , string_id = ""NO_MAIN_SQUARE_IN_RANGE"" , check_duplicate = True , )",if building . id == BUILDINGS . MAIN_SQUARE :,358 21297,"def __init__(self, centered=None, shape_params=()): assert centered is None or isinstance(centered, (float, torch.Tensor)) assert isinstance(shape_params, (tuple, list)) assert all(isinstance(name, str) for name in shape_params) if is_validation_enabled(): if isinstance(centered, float): assert 0 <= centered and centered <= 1 elif isinstance(centered, torch.Tensor): assert (0 <= centered).all() assert (centered <= 1).all() else: assert centered is None self.centered = centered self.shape_params = shape_params","def __init__ ( self , centered = None , shape_params = ( ) ) : assert centered is None or isinstance ( centered , ( float , torch . Tensor ) ) assert isinstance ( shape_params , ( tuple , list ) ) assert all ( isinstance ( name , str ) for name in shape_params ) if is_validation_enabled ( ) : if isinstance ( centered , float ) : assert 0 <= centered and centered <= 1 assert ( 0 <= centered ) . all ( ) assert ( centered <= 1 ) . all ( ) else : assert centered is None self . centered = centered self . shape_params = shape_params","elif isinstance ( centered , torch . Tensor ) :",163 5772,"def __get_id_list(self, user, attr): if user.is_superuser or not filer_settings.FILER_ENABLE_PERMISSIONS: return ""All"" allow_list = set() deny_list = set() group_ids = user.groups.all().values_list(""id"", flat=True) q = Q(user=user) | Q(group__in=group_ids) | Q(everybody=True) perms = self.filter(q).order_by(""folder__tree_id"", ""folder__level"", ""folder__lft"") for perm in perms: p = getattr(perm, attr) if p is None: # Not allow nor deny, we continue with the next permission continue if not perm.folder: assert perm.type == FolderPermission.ALL if p == FolderPermission.ALLOW: allow_list.update(Folder.objects.all().values_list(""id"", flat=True)) else: deny_list.update(Folder.objects.all().values_list(""id"", flat=True)) continue folder_id = perm.folder.id if p == FolderPermission.ALLOW: allow_list.add(folder_id) else: deny_list.add(folder_id) if perm.type == FolderPermission.CHILDREN: if p == FolderPermission.ALLOW: allow_list.update( perm.folder.get_descendants().values_list(""id"", flat=True) ) else: deny_list.update( perm.folder.get_descendants().values_list(""id"", flat=True) ) # Deny has precedence over allow return allow_list - deny_list ","def __get_id_list ( self , user , attr ) : if user . is_superuser or not filer_settings . FILER_ENABLE_PERMISSIONS : return ""All"" allow_list = set ( ) deny_list = set ( ) group_ids = user . groups . all ( ) . values_list ( ""id"" , flat = True ) q = Q ( user = user ) | Q ( group__in = group_ids ) | Q ( everybody = True ) perms = self . filter ( q ) . order_by ( ""folder__tree_id"" , ""folder__level"" , ""folder__lft"" ) for perm in perms : p = getattr ( perm , attr ) if p is None : continue if not perm . folder : assert perm . type == FolderPermission . ALL allow_list . update ( Folder . objects . all ( ) . values_list ( ""id"" , flat = True ) ) else : deny_list . update ( Folder . objects . all ( ) . values_list ( ""id"" , flat = True ) ) continue folder_id = perm . folder . id allow_list . add ( folder_id ) else : deny_list . add ( folder_id ) if perm . type == FolderPermission . CHILDREN : allow_list . update ( perm . folder . get_descendants ( ) . values_list ( ""id"" , flat = True ) ) else : deny_list . update ( perm . folder . get_descendants ( ) . values_list ( ""id"" , flat = True ) ) return allow_list - deny_list",if p == FolderPermission . ALLOW :,481 2032,"def test_native_types(self): for tp, fmt, shape, itemtp in native_types: ob = tp() v = memoryview(ob) try: self.assertEqual(normalize(v.format), normalize(fmt)) if shape: self.assertEqual(len(v), shape[0]) else: self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob)) self.assertEqual(v.itemsize, sizeof(itemtp)) self.assertEqual(v.shape, shape) # XXX Issue #12851: PyCData_NewGetBuffer() must provide strides # if requested. memoryview currently reconstructs missing # stride information, so this assert will fail. # self.assertEqual(v.strides, ()) # they are always read/write self.assertFalse(v.readonly) if v.shape: n = 1 for dim in v.shape: n = n * dim self.assertEqual(n * v.itemsize, len(v.tobytes())) except: # so that we can see the failing type print(tp) raise","def test_native_types ( self ) : for tp , fmt , shape , itemtp in native_types : ob = tp ( ) v = memoryview ( ob ) try : self . assertEqual ( normalize ( v . format ) , normalize ( fmt ) ) self . assertEqual ( len ( v ) , shape [ 0 ] ) else : self . assertEqual ( len ( v ) * sizeof ( itemtp ) , sizeof ( ob ) ) self . assertEqual ( v . itemsize , sizeof ( itemtp ) ) self . assertEqual ( v . shape , shape ) self . assertFalse ( v . readonly ) if v . shape : n = 1 for dim in v . shape : n = n * dim self . assertEqual ( n * v . itemsize , len ( v . tobytes ( ) ) ) except : print ( tp ) raise",if shape :,334 482,"def uninstall_environments(self, environments): environments = [ env if not env.startswith(self.conda_context.envs_path) else os.path.basename(env) for env in environments ] return_codes = [self.conda_context.exec_remove([env]) for env in environments] final_return_code = 0 for env, return_code in zip(environments, return_codes): if return_code == 0: log.debug(""Conda environment '%s' successfully removed."" % env) else: log.debug(""Conda environment '%s' could not be removed."" % env) final_return_code = return_code return final_return_code","def uninstall_environments ( self , environments ) : environments = [ env if not env . startswith ( self . conda_context . envs_path ) else os . path . basename ( env ) for env in environments ] return_codes = [ self . conda_context . exec_remove ( [ env ] ) for env in environments ] final_return_code = 0 for env , return_code in zip ( environments , return_codes ) : log . debug ( ""Conda environment '%s' successfully removed."" % env ) else : log . debug ( ""Conda environment '%s' could not be removed."" % env ) final_return_code = return_code return final_return_code",if return_code == 0 :,191 23424,"def updater_run_install_popup_handler(scene): global ran_autocheck_install_popup ran_autocheck_install_popup = True # in case of error importing updater if updater.invalidupdater: return try: bpy.app.handlers.scene_update_post.remove(updater_run_install_popup_handler) except: pass if ""ignore"" in updater.json and updater.json[""ignore""]: return # don't do popup if ignore pressed # elif type(updater.update_version) != type((0,0,0)): # # likely was from master or another branch, shouldn't trigger popup # updater.json_reset_restore() # return elif ""version_text"" in updater.json and ""version"" in updater.json[""version_text""]: version = updater.json[""version_text""][""version""] ver_tuple = updater.version_tuple_from_text(version) if ver_tuple < updater.current_version: # user probably manually installed to get the up to date addon # in here. Clear out the update flag using this function if updater.verbose: print(""RetopoFlow updater: appears user updated, clearing flag"") updater.json_reset_restore() return atr = addon_updater_install_popup.bl_idname.split(""."") getattr(getattr(bpy.ops, atr[0]), atr[1])(""INVOKE_DEFAULT"")","def updater_run_install_popup_handler ( scene ) : global ran_autocheck_install_popup ran_autocheck_install_popup = True if updater . invalidupdater : return try : bpy . app . handlers . scene_update_post . remove ( updater_run_install_popup_handler ) except : pass if ""ignore"" in updater . json and updater . json [ ""ignore"" ] : return elif ""version_text"" in updater . json and ""version"" in updater . json [ ""version_text"" ] : version = updater . json [ ""version_text"" ] [ ""version"" ] ver_tuple = updater . version_tuple_from_text ( version ) if updater . verbose : print ( ""RetopoFlow updater: appears user updated, clearing flag"" ) updater . json_reset_restore ( ) return atr = addon_updater_install_popup . bl_idname . split ( ""."" ) getattr ( getattr ( bpy . ops , atr [ 0 ] ) , atr [ 1 ] ) ( ""INVOKE_DEFAULT"" )",if ver_tuple < updater . current_version :,389 8305,"def _test_reducibility(self): # make a copy of the graph graph = networkx.DiGraph(self._graph) # preprocess: make it a super graph self._make_supergraph(graph) while True: changed = False # find a node with a back-edge, remove the edge (deleting the loop), and replace it with a MultiNode changed |= self._remove_self_loop(graph) # find a node that has only one predecessor, and merge it with its predecessor (replace them with a # MultiNode) changed |= self._merge_single_entry_node(graph) if not changed: # a fixed-point is reached break ",def _test_reducibility ( self ) : graph = networkx . DiGraph ( self . _graph ) self . _make_supergraph ( graph ) while True : changed = False changed |= self . _remove_self_loop ( graph ) changed |= self . _merge_single_entry_node ( graph ) break,if not changed :,178 18202,"def _process_features(self, datum): if len(datum) != 2: raise ValueError( ""Expected tuples of ({}_id, features), "" ""got {}."".format(self._entity_type, datum) ) entity_id, features = datum if entity_id not in self._id_mapping: raise ValueError( ""{entity_type} id {entity_id} not in {entity_type} id mappings."".format( entity_type=self._entity_type, entity_id=entity_id ) ) idx = self._id_mapping[entity_id] for (feature, weight) in self._iter_features(features): if feature not in self._feature_mapping: raise ValueError( ""Feature {} not in feature mapping. "" ""Call fit first."".format(feature) ) feature_idx = self._feature_mapping[feature] yield (idx, feature_idx, weight)","def _process_features ( self , datum ) : if len ( datum ) != 2 : raise ValueError ( ""Expected tuples of ({}_id, features), "" ""got {}."" . format ( self . _entity_type , datum ) ) entity_id , features = datum if entity_id not in self . _id_mapping : raise ValueError ( ""{entity_type} id {entity_id} not in {entity_type} id mappings."" . format ( entity_type = self . _entity_type , entity_id = entity_id ) ) idx = self . _id_mapping [ entity_id ] for ( feature , weight ) in self . _iter_features ( features ) : raise ValueError ( ""Feature {} not in feature mapping. "" ""Call fit first."" . format ( feature ) ) feature_idx = self . _feature_mapping [ feature ] yield ( idx , feature_idx , weight )",if feature not in self . _feature_mapping :,261 2395,"def vsGetFastParseFields(self): fields = [] for fname in self._vs_fields: fobj = self._vs_values.get(fname) if fobj.vsIsPrim(): fields.append(fobj) continue fields.extend(fobj.vsGetFastParseFields()) return fields ",def vsGetFastParseFields ( self ) : fields = [ ] for fname in self . _vs_fields : fobj = self . _vs_values . get ( fname ) fields . append ( fobj ) continue fields . extend ( fobj . vsGetFastParseFields ( ) ) return fields,if fobj . vsIsPrim ( ) :,89 19666,"def query(q): url = query_url() + urllib.parse.quote(json.dumps(q)) ret = None for i in range(20): try: ret = urlread(url) while ret.startswith(b""canceling statement due to statement timeout""): ret = urlread(url) if not ret: print(""ret == None"") except IOError: pass if ret: try: data = json.loads(ret) if isinstance(data, dict): if ""error"" in data: print(""error:"") print(ret) assert ""error"" not in data return data except: print(ret) print(url) sleep(20)","def query ( q ) : url = query_url ( ) + urllib . parse . quote ( json . dumps ( q ) ) ret = None for i in range ( 20 ) : try : ret = urlread ( url ) while ret . startswith ( b""canceling statement due to statement timeout"" ) : ret = urlread ( url ) if not ret : print ( ""ret == None"" ) except IOError : pass if ret : try : data = json . loads ( ret ) if isinstance ( data , dict ) : print ( ""error:"" ) print ( ret ) assert ""error"" not in data return data except : print ( ret ) print ( url ) sleep ( 20 )","if ""error"" in data :",242 23055,"def __get_ratio(self): """"""Return splitter ratio of the main splitter."""""" c = self.c free_layout = c.free_layout if free_layout: w = free_layout.get_main_splitter() if w: aList = w.sizes() if len(aList) == 2: n1, n2 = aList # 2017/06/07: guard against division by zero. ratio = 0.5 if n1 + n2 == 0 else float(n1) / float(n1 + n2) return ratio return 0.5","def __get_ratio ( self ) : """"""Return splitter ratio of the main splitter."""""" c = self . c free_layout = c . free_layout if free_layout : w = free_layout . get_main_splitter ( ) if w : aList = w . sizes ( ) n1 , n2 = aList ratio = 0.5 if n1 + n2 == 0 else float ( n1 ) / float ( n1 + n2 ) return ratio return 0.5",if len ( aList ) == 2 :,170 14162,"def _readenv(var, msg): match = _ENV_VAR_PAT.match(var) if match and match.groups(): envvar = match.groups()[0] if envvar in os.environ: value = os.environ[envvar] if six.PY2: value = value.decode(""utf8"") return value else: raise InvalidConfigException( ""{} - environment variable '{}' not set"".format(msg, var) ) else: raise InvalidConfigException( ""{} - environment variable name '{}' does not match pattern '{}'"".format( msg, var, _ENV_VAR_PAT_STR ) )","def _readenv ( var , msg ) : match = _ENV_VAR_PAT . match ( var ) if match and match . groups ( ) : envvar = match . groups ( ) [ 0 ] if envvar in os . environ : value = os . environ [ envvar ] value = value . decode ( ""utf8"" ) return value else : raise InvalidConfigException ( ""{} - environment variable '{}' not set"" . format ( msg , var ) ) else : raise InvalidConfigException ( ""{} - environment variable name '{}' does not match pattern '{}'"" . format ( msg , var , _ENV_VAR_PAT_STR ) )",if six . PY2 :,190 6353,"def _make_doc_structure(d, level): if d.is_redirect: return None if expand: res = dict(d.get_json_data()) res[""subpages""] = [] else: res = { ""title"": d.title, ""slug"": d.slug, ""locale"": d.locale, ""url"": d.get_absolute_url(), ""subpages"": [], } if level < depth: descendants = d.get_descendants(1) descendants.sort(key=lambda item: item.title) for descendant in descendants: sp = _make_doc_structure(descendant, level + 1) if sp is not None: res[""subpages""].append(sp) return res","def _make_doc_structure ( d , level ) : if d . is_redirect : return None if expand : res = dict ( d . get_json_data ( ) ) res [ ""subpages"" ] = [ ] else : res = { ""title"" : d . title , ""slug"" : d . slug , ""locale"" : d . locale , ""url"" : d . get_absolute_url ( ) , ""subpages"" : [ ] , } if level < depth : descendants = d . get_descendants ( 1 ) descendants . sort ( key = lambda item : item . title ) for descendant in descendants : sp = _make_doc_structure ( descendant , level + 1 ) res [ ""subpages"" ] . append ( sp ) return res",if sp is not None :,216 19601,"def _setup_layer(self, trainable=False, **kwargs): """"""Constructs keras layer with relevant weights and losses."""""" # Initialize an empty layer, then add_weight() etc. as needed. super(KerasLayer, self).__init__(trainable=trainable, **kwargs) # Add trainable and non-trainable weights from the callable. if hasattr(self._func, ""trainable_variables""): for v in self._func.trainable_variables: self._add_existing_weight(v, trainable=True) trainable_variables = {id(v) for v in self._func.trainable_variables} else: trainable_variables = set() if hasattr(self._func, ""variables""): for v in self._func.variables: if id(v) not in trainable_variables: self._add_existing_weight(v, trainable=False) # Forward the callable's regularization losses (if any). if hasattr(self._func, ""regularization_losses""): for l in self._func.regularization_losses: if not callable(l): raise ValueError( ""hub.KerasLayer(obj) expects obj.regularization_losses to be an "" ""iterable of callables, each returning a scalar loss term."" ) self.add_loss(self._call_loss_if_trainable(l)) # Supports callables. ","def _setup_layer ( self , trainable = False , ** kwargs ) : """"""Constructs keras layer with relevant weights and losses."""""" super ( KerasLayer , self ) . __init__ ( trainable = trainable , ** kwargs ) if hasattr ( self . _func , ""trainable_variables"" ) : for v in self . _func . trainable_variables : self . _add_existing_weight ( v , trainable = True ) trainable_variables = { id ( v ) for v in self . _func . trainable_variables } else : trainable_variables = set ( ) if hasattr ( self . _func , ""variables"" ) : for v in self . _func . variables : self . _add_existing_weight ( v , trainable = False ) if hasattr ( self . _func , ""regularization_losses"" ) : for l in self . _func . regularization_losses : if not callable ( l ) : raise ValueError ( ""hub.KerasLayer(obj) expects obj.regularization_losses to be an "" ""iterable of callables, each returning a scalar loss term."" ) self . add_loss ( self . _call_loss_if_trainable ( l ) )",if id ( v ) not in trainable_variables :,352 21960,"def process_signature(app, what, name, obj, options, signature, return_annotation): if signature: # replace Mock function names signature = re.sub("""", ""\g<1>"", signature) signature = re.sub(""tensorflow"", ""tf"", signature) # add scope name to layer signatures: if hasattr(obj, ""use_scope""): if obj.use_scope: signature = signature[0] + ""variable_scope_name, "" + signature[1:] elif obj.use_scope is None: signature = signature[0] + ""[variable_scope_name,] "" + signature[1:] # signature: arg list return signature, return_annotation","def process_signature ( app , what , name , obj , options , signature , return_annotation ) : if signature : signature = re . sub ( """" , ""\g<1>"" , signature ) signature = re . sub ( ""tensorflow"" , ""tf"" , signature ) if hasattr ( obj , ""use_scope"" ) : signature = signature [ 0 ] + ""variable_scope_name, "" + signature [ 1 : ] elif obj . use_scope is None : signature = signature [ 0 ] + ""[variable_scope_name,] "" + signature [ 1 : ] return signature , return_annotation",if obj . use_scope :,188 15071,"def check_model_list_copy(overwrite=False, max_per_line=119): """"""Check the model lists in the README and index.rst are consistent and maybe `overwrite`."""""" rst_list, start_index, end_index, lines = _find_text_in_file( filename=os.path.join(PATH_TO_DOCS, ""index.rst""), start_prompt="" This list is updated automatically from the README"", end_prompt="".. _bigtable:"", ) md_list = get_model_list() converted_list = convert_to_rst(md_list, max_per_line=max_per_line) if converted_list != rst_list: if overwrite: with open( os.path.join(PATH_TO_DOCS, ""index.rst""), ""w"", encoding=""utf-8"", newline=""\n"", ) as f: f.writelines(lines[:start_index] + [converted_list] + lines[end_index:]) else: raise ValueError( ""The model list in the README changed and the list in `index.rst` has not been updated. Run "" ""`make fix-copies` to fix this."" )","def check_model_list_copy ( overwrite = False , max_per_line = 119 ) : """"""Check the model lists in the README and index.rst are consistent and maybe `overwrite`."""""" rst_list , start_index , end_index , lines = _find_text_in_file ( filename = os . path . join ( PATH_TO_DOCS , ""index.rst"" ) , start_prompt = "" This list is updated automatically from the README"" , end_prompt = "".. _bigtable:"" , ) md_list = get_model_list ( ) converted_list = convert_to_rst ( md_list , max_per_line = max_per_line ) if converted_list != rst_list : with open ( os . path . join ( PATH_TO_DOCS , ""index.rst"" ) , ""w"" , encoding = ""utf-8"" , newline = ""\n"" , ) as f : f . writelines ( lines [ : start_index ] + [ converted_list ] + lines [ end_index : ] ) else : raise ValueError ( ""The model list in the README changed and the list in `index.rst` has not been updated. Run "" ""`make fix-copies` to fix this."" )",if overwrite :,335 12138,"def ExcludePath(self, path): """"""Check to see if this is a service url and matches inbound_services."""""" skip = False for reserved_path in self.reserved_paths.keys(): if path.startswith(reserved_path): if ( not self.inbound_services or self.reserved_paths[reserved_path] not in self.inbound_services ): return (True, self.reserved_paths[reserved_path]) return (False, None) ","def ExcludePath ( self , path ) : """"""Check to see if this is a service url and matches inbound_services."""""" skip = False for reserved_path in self . reserved_paths . keys ( ) : if ( not self . inbound_services or self . reserved_paths [ reserved_path ] not in self . inbound_services ) : return ( True , self . reserved_paths [ reserved_path ] ) return ( False , None )",if path . startswith ( reserved_path ) :,132 6333,"def _parse_firstline(self, line): try: if self.kind == 2: # auto detect try: self._parse_request_line(line) except InvalidRequestLine: self._parse_response_line(line) elif self.kind == 1: self._parse_response_line(line) elif self.kind == 0: self._parse_request_line(line) except InvalidRequestLine as e: self.errno = BAD_FIRST_LINE self.errstr = str(e) return False return True","def _parse_firstline ( self , line ) : try : if self . kind == 2 : try : self . _parse_request_line ( line ) except InvalidRequestLine : self . _parse_response_line ( line ) elif self . kind == 1 : self . _parse_response_line ( line ) self . _parse_request_line ( line ) except InvalidRequestLine as e : self . errno = BAD_FIRST_LINE self . errstr = str ( e ) return False return True",elif self . kind == 0 :,165 18633,"def compare_multiple_events(i, expected_results, actual_results): events_in_a_row = [] j = i while j < len(expected_results) and isinstance( actual_results[j], actual_results[i].__class__ ): events_in_a_row.append(actual_results[j]) j += 1 message = """" for event in events_in_a_row: for k in range(i, j): passed, message = compare_events(expected_results[k], event) if passed: expected_results[k] = None break else: return i, False, message return j, True, """"","def compare_multiple_events ( i , expected_results , actual_results ) : events_in_a_row = [ ] j = i while j < len ( expected_results ) and isinstance ( actual_results [ j ] , actual_results [ i ] . __class__ ) : events_in_a_row . append ( actual_results [ j ] ) j += 1 message = """" for event in events_in_a_row : for k in range ( i , j ) : passed , message = compare_events ( expected_results [ k ] , event ) expected_results [ k ] = None break else : return i , False , message return j , True , """"",if passed :,192 7504,"def get_default_region(): region = """" if ""default"" in AWS_ACCOUNTS: # First check the host/endpoint configuration if AWS_ACCOUNTS[""default""].HOST.get(): endpoint = AWS_ACCOUNTS[""default""].HOST.get() if re.search(SUBDOMAIN_ENDPOINT_RE, endpoint, re.IGNORECASE): region = re.search( SUBDOMAIN_ENDPOINT_RE, endpoint, re.IGNORECASE ).group(""region"") elif re.search(HYPHEN_ENDPOINT_RE, endpoint, re.IGNORECASE): region = re.search(HYPHEN_ENDPOINT_RE, endpoint, re.IGNORECASE).group( ""region"" ) elif re.search(DUALSTACK_ENDPOINT_RE, endpoint, re.IGNORECASE): region = re.search( DUALSTACK_ENDPOINT_RE, endpoint, re.IGNORECASE ).group(""region"") elif AWS_ACCOUNTS[""default""].REGION.get(): region = AWS_ACCOUNTS[""default""].REGION.get() # If the parsed out region is not in the list of supported regions, fallback to the default if region not in get_locations(): LOG.warn( ""Region, %s, not found in the list of supported regions: %s"" % (region, "", "".join(get_locations())) ) region = """" return region ","def get_default_region ( ) : region = """" if ""default"" in AWS_ACCOUNTS : endpoint = AWS_ACCOUNTS [ ""default"" ] . HOST . get ( ) if re . search ( SUBDOMAIN_ENDPOINT_RE , endpoint , re . IGNORECASE ) : region = re . search ( SUBDOMAIN_ENDPOINT_RE , endpoint , re . IGNORECASE ) . group ( ""region"" ) elif re . search ( HYPHEN_ENDPOINT_RE , endpoint , re . IGNORECASE ) : region = re . search ( HYPHEN_ENDPOINT_RE , endpoint , re . IGNORECASE ) . group ( ""region"" ) elif re . search ( DUALSTACK_ENDPOINT_RE , endpoint , re . IGNORECASE ) : region = re . search ( DUALSTACK_ENDPOINT_RE , endpoint , re . IGNORECASE ) . group ( ""region"" ) elif AWS_ACCOUNTS [ ""default"" ] . REGION . get ( ) : region = AWS_ACCOUNTS [ ""default"" ] . REGION . get ( ) if region not in get_locations ( ) : LOG . warn ( ""Region, %s, not found in the list of supported regions: %s"" % ( region , "", "" . join ( get_locations ( ) ) ) ) region = """" return region","if AWS_ACCOUNTS [ ""default"" ] . HOST . get ( ) :",386 1300,"def __init__(self, factors, contrast_matrices, num_columns): self.factors = tuple(factors) factor_set = frozenset(factors) if not isinstance(contrast_matrices, dict): raise ValueError(""contrast_matrices must be dict"") for factor, contrast_matrix in six.iteritems(contrast_matrices): if factor not in factor_set: raise ValueError(""Unexpected factor in contrast_matrices dict"") if not isinstance(contrast_matrix, ContrastMatrix): raise ValueError(""Expected a ContrastMatrix, not %r"" % (contrast_matrix,)) self.contrast_matrices = contrast_matrices if not isinstance(num_columns, six.integer_types): raise ValueError(""num_columns must be an integer"") self.num_columns = num_columns ","def __init__ ( self , factors , contrast_matrices , num_columns ) : self . factors = tuple ( factors ) factor_set = frozenset ( factors ) if not isinstance ( contrast_matrices , dict ) : raise ValueError ( ""contrast_matrices must be dict"" ) for factor , contrast_matrix in six . iteritems ( contrast_matrices ) : if factor not in factor_set : raise ValueError ( ""Unexpected factor in contrast_matrices dict"" ) raise ValueError ( ""Expected a ContrastMatrix, not %r"" % ( contrast_matrix , ) ) self . contrast_matrices = contrast_matrices if not isinstance ( num_columns , six . integer_types ) : raise ValueError ( ""num_columns must be an integer"" ) self . num_columns = num_columns","if not isinstance ( contrast_matrix , ContrastMatrix ) :",193 9736,"def resolve(self, all_profiles, controls_manager=None): if self.resolved: return self.resolve_controls(controls_manager) self.resolved_selections = set(self.selected) if self.extends: if self.extends not in all_profiles: msg = ( ""Profile {name} extends profile {extended}, but "" ""only profiles {known_profiles} are available for resolution."".format( name=self.id_, extended=self.extends, known_profiles=list(all_profiles.keys()), ) ) raise RuntimeError(msg) extended_profile = all_profiles[self.extends] extended_profile.resolve(all_profiles, controls_manager) self.extend_by(extended_profile) for uns in self.unselected: self.resolved_selections.discard(uns) self.unselected = [] self.extends = None self.selected = sorted(self.resolved_selections) self.resolved = True ","def resolve ( self , all_profiles , controls_manager = None ) : if self . resolved : return self . resolve_controls ( controls_manager ) self . resolved_selections = set ( self . selected ) if self . extends : msg = ( ""Profile {name} extends profile {extended}, but "" ""only profiles {known_profiles} are available for resolution."" . format ( name = self . id_ , extended = self . extends , known_profiles = list ( all_profiles . keys ( ) ) , ) ) raise RuntimeError ( msg ) extended_profile = all_profiles [ self . extends ] extended_profile . resolve ( all_profiles , controls_manager ) self . extend_by ( extended_profile ) for uns in self . unselected : self . resolved_selections . discard ( uns ) self . unselected = [ ] self . extends = None self . selected = sorted ( self . resolved_selections ) self . resolved = True",if self . extends not in all_profiles :,288 4015,"def __init__( self, data_type=""unsupervised"", transform=None, pre_transform=None, pre_filter=None, empty=False, args=None, ): self.data_type = data_type self.url = ""https://cloud.tsinghua.edu.cn/f/2cac04ee904e4b54b4b2/?dl=1"" self.root = osp.join(osp.dirname(osp.realpath(__file__)), ""../.."", ""data"", ""CHEM"") super(MoleculeDataset, self).__init__( self.root, transform, pre_transform, pre_filter ) self.transform, self.pre_transform, self.pre_filter = ( transform, pre_transform, pre_filter, ) if not empty: if data_type == ""unsupervised"": self.data, self.slices = torch.load(self.processed_paths[1]) else: self.data, self.slices = torch.load(self.processed_paths[0])","def __init__ ( self , data_type = ""unsupervised"" , transform = None , pre_transform = None , pre_filter = None , empty = False , args = None , ) : self . data_type = data_type self . url = ""https://cloud.tsinghua.edu.cn/f/2cac04ee904e4b54b4b2/?dl=1"" self . root = osp . join ( osp . dirname ( osp . realpath ( __file__ ) ) , ""../.."" , ""data"" , ""CHEM"" ) super ( MoleculeDataset , self ) . __init__ ( self . root , transform , pre_transform , pre_filter ) self . transform , self . pre_transform , self . pre_filter = ( transform , pre_transform , pre_filter , ) if not empty : self . data , self . slices = torch . load ( self . processed_paths [ 1 ] ) else : self . data , self . slices = torch . load ( self . processed_paths [ 0 ] )","if data_type == ""unsupervised"" :",278 11817,"def leave(self, reason=None): try: if self.id.startswith(""C""): log.info(""Leaving channel %s (%s)"", self, self.id) self._bot.api_call(""conversations.leave"", data={""channel"": self.id}) else: log.info(""Leaving group %s (%s)"", self, self.id) self._bot.api_call(""conversations.leave"", data={""channel"": self.id}) except SlackAPIResponseError as e: if e.error == ""user_is_bot"": raise RoomError(f""Unable to leave channel. {USER_IS_BOT_HELPTEXT}"") else: raise RoomError(e) self._id = None ","def leave ( self , reason = None ) : try : log . info ( ""Leaving channel %s (%s)"" , self , self . id ) self . _bot . api_call ( ""conversations.leave"" , data = { ""channel"" : self . id } ) else : log . info ( ""Leaving group %s (%s)"" , self , self . id ) self . _bot . api_call ( ""conversations.leave"" , data = { ""channel"" : self . id } ) except SlackAPIResponseError as e : if e . error == ""user_is_bot"" : raise RoomError ( f""Unable to leave channel. {USER_IS_BOT_HELPTEXT}"" ) else : raise RoomError ( e ) self . _id = None","if self . id . startswith ( ""C"" ) :",197 16447,"def excluded_files(self): ret = [] try: file_paths = [ os.path.normpath( os.path.join(os.path.relpath(folder, self.folder), el) ).replace(""\\"", ""/"") for folder, dirpaths, fs in walk(self.folder) for el in fs + dirpaths ] if file_paths: paths = to_file_bytes(""\n"".join(file_paths)) out = input_runner([""git"", ""check-ignore"", ""--stdin""], paths, self.folder) grep_stdout = decode_text(out) ret = grep_stdout.splitlines() except (CalledProcessError, IOError, OSError) as e: if self._output: self._output.warn( ""Error checking excluded git files: %s. "" ""Ignoring excluded files"" % e ) ret = [] return ret ","def excluded_files ( self ) : ret = [ ] try : file_paths = [ os . path . normpath ( os . path . join ( os . path . relpath ( folder , self . folder ) , el ) ) . replace ( ""\\"" , ""/"" ) for folder , dirpaths , fs in walk ( self . folder ) for el in fs + dirpaths ] if file_paths : paths = to_file_bytes ( ""\n"" . join ( file_paths ) ) out = input_runner ( [ ""git"" , ""check-ignore"" , ""--stdin"" ] , paths , self . folder ) grep_stdout = decode_text ( out ) ret = grep_stdout . splitlines ( ) except ( CalledProcessError , IOError , OSError ) as e : self . _output . warn ( ""Error checking excluded git files: %s. "" ""Ignoring excluded files"" % e ) ret = [ ] return ret",if self . _output :,251 24319,"def find_internal_python_modules( root_module: types.ModuleType, ) -> Sequence[Tuple[str, types.ModuleType]]: """"""Returns `(name, module)` for all Haiku submodules under `root_module`."""""" modules = set([(root_module.__name__, root_module)]) visited = set() to_visit = [root_module] while to_visit: mod = to_visit.pop() visited.add(mod) for name in dir(mod): obj = getattr(mod, name) if inspect.ismodule(obj) and obj not in visited: if obj.__name__.startswith(""haiku""): to_visit.append(obj) modules.add((obj.__name__, obj)) return sorted(modules) ","def find_internal_python_modules ( root_module : types . ModuleType , ) -> Sequence [ Tuple [ str , types . ModuleType ] ] : """"""Returns `(name, module)` for all Haiku submodules under `root_module`."""""" modules = set ( [ ( root_module . __name__ , root_module ) ] ) visited = set ( ) to_visit = [ root_module ] while to_visit : mod = to_visit . pop ( ) visited . add ( mod ) for name in dir ( mod ) : obj = getattr ( mod , name ) if obj . __name__ . startswith ( ""haiku"" ) : to_visit . append ( obj ) modules . add ( ( obj . __name__ , obj ) ) return sorted ( modules )",if inspect . ismodule ( obj ) and obj not in visited :,206 14376,"def __init__( self, msg=None, data=None, filename=None, password=None, vals=None, file_obj=None ): self.p = None self.q = None self.g = None self.y = None self.x = None if file_obj is not None: self._from_private_key(file_obj, password) return if filename is not None: self._from_private_key_file(filename, password) return if (msg is None) and (data is not None): msg = Message(data) if vals is not None: self.p, self.q, self.g, self.y = vals else: if msg is None: raise SSHException(""Key object may not be empty"") if msg.get_text() != ""ssh-dss"": raise SSHException(""Invalid key"") self.p = msg.get_mpint() self.q = msg.get_mpint() self.g = msg.get_mpint() self.y = msg.get_mpint() self.size = util.bit_length(self.p) ","def __init__ ( self , msg = None , data = None , filename = None , password = None , vals = None , file_obj = None ) : self . p = None self . q = None self . g = None self . y = None self . x = None if file_obj is not None : self . _from_private_key ( file_obj , password ) return if filename is not None : self . _from_private_key_file ( filename , password ) return if ( msg is None ) and ( data is not None ) : msg = Message ( data ) if vals is not None : self . p , self . q , self . g , self . y = vals else : if msg is None : raise SSHException ( ""Key object may not be empty"" ) raise SSHException ( ""Invalid key"" ) self . p = msg . get_mpint ( ) self . q = msg . get_mpint ( ) self . g = msg . get_mpint ( ) self . y = msg . get_mpint ( ) self . size = util . bit_length ( self . p )","if msg . get_text ( ) != ""ssh-dss"" :",308 4158,"def test_broadcast(self): """"""Test example broadcast functionality."""""" self.create_lang_connection(""1000000000"", ""en"") self.create_lang_connection(""1000000001"", ""en"") self.create_lang_connection(""1000000002"", ""en"") self.create_lang_connection(""1000000003"", ""es"") self.create_lang_connection(""1000000004"", ""es"") app.lang_broadcast() self.assertEqual(2, len(self.outbound)) for message in self.outbound: if message.text == ""hello"": self.assertEqual(3, len(message.connections)) elif message.text == ""hola"": self.assertEqual(2, len(message.connections))","def test_broadcast ( self ) : """"""Test example broadcast functionality."""""" self . create_lang_connection ( ""1000000000"" , ""en"" ) self . create_lang_connection ( ""1000000001"" , ""en"" ) self . create_lang_connection ( ""1000000002"" , ""en"" ) self . create_lang_connection ( ""1000000003"" , ""es"" ) self . create_lang_connection ( ""1000000004"" , ""es"" ) app . lang_broadcast ( ) self . assertEqual ( 2 , len ( self . outbound ) ) for message in self . outbound : self . assertEqual ( 3 , len ( message . connections ) ) elif message . text == ""hola"" : self . assertEqual ( 2 , len ( message . connections ) )","if message . text == ""hello"" :",187 13792,"def _map_args(maps: dict, **kwargs): # maps: key=old name, value= new name output = {} for name, val in kwargs.items(): if name in maps: assert isinstance(maps[name], str) output.update({maps[name]: val}) else: output.update({name: val}) for keys in maps.keys(): if keys not in output.keys(): pass return output ","def _map_args ( maps : dict , ** kwargs ) : output = { } for name , val in kwargs . items ( ) : assert isinstance ( maps [ name ] , str ) output . update ( { maps [ name ] : val } ) else : output . update ( { name : val } ) for keys in maps . keys ( ) : if keys not in output . keys ( ) : pass return output",if name in maps :,125 13383,"def parse_network_whitelist(self, network_whitelist_location): networks = [] with open(network_whitelist_location, ""r"") as text_file: for line in text_file: line = line.strip().strip(""'"").strip('""') if isIPv4(line) or isIPv6(line): networks.append(line) return networks","def parse_network_whitelist ( self , network_whitelist_location ) : networks = [ ] with open ( network_whitelist_location , ""r"" ) as text_file : for line in text_file : line = line . strip ( ) . strip ( ""'"" ) . strip ( '""' ) networks . append ( line ) return networks",if isIPv4 ( line ) or isIPv6 ( line ) :,98 22809,"def h_line_down(self, input): end_this_line = self.value.find(""\n"", self.cursor_position) if end_this_line == -1: if self.scroll_exit: self.h_exit_down(None) else: self.cursor_position = len(self.value) else: self.cursor_position = end_this_line + 1 for x in range(self.cursorx): if self.cursor_position > len(self.value) - 1: break elif self.value[self.cursor_position] == ""\n"": break else: self.cursor_position += 1","def h_line_down ( self , input ) : end_this_line = self . value . find ( ""\n"" , self . cursor_position ) if end_this_line == - 1 : self . h_exit_down ( None ) else : self . cursor_position = len ( self . value ) else : self . cursor_position = end_this_line + 1 for x in range ( self . cursorx ) : if self . cursor_position > len ( self . value ) - 1 : break elif self . value [ self . cursor_position ] == ""\n"" : break else : self . cursor_position += 1",if self . scroll_exit :,193 7638,"def lookup_field(name, obj, model_admin=None): opts = obj._meta try: f = _get_non_gfk_field(opts, name) except (FieldDoesNotExist, FieldIsAForeignKeyColumnName): # For non-field values, the value is either a method, property or # returned via a callable. if callable(name): attr = name value = attr(obj) elif ( model_admin is not None and hasattr(model_admin, name) and not name == ""__str__"" and not name == ""__unicode__"" ): attr = getattr(model_admin, name) value = attr(obj) else: attr = getattr(obj, name) if callable(attr): value = attr() else: value = attr f = None else: attr = None value = getattr(obj, name) return f, attr, value ","def lookup_field ( name , obj , model_admin = None ) : opts = obj . _meta try : f = _get_non_gfk_field ( opts , name ) except ( FieldDoesNotExist , FieldIsAForeignKeyColumnName ) : attr = name value = attr ( obj ) elif ( model_admin is not None and hasattr ( model_admin , name ) and not name == ""__str__"" and not name == ""__unicode__"" ) : attr = getattr ( model_admin , name ) value = attr ( obj ) else : attr = getattr ( obj , name ) if callable ( attr ) : value = attr ( ) else : value = attr f = None else : attr = None value = getattr ( obj , name ) return f , attr , value",if callable ( name ) :,274 22695,"def _update_module_index(self): self.debug(""Updating index file..."") # initialize module index self._module_index = [] # load module index from local copy path = os.path.join(self.home_path, ""modules.yml"") if os.path.exists(path): with open(path, ""r"") as infile: self._module_index = yaml.safe_load(infile) # add status to index for each module for module in self._module_index: status = ""not installed"" if module[""path""] in self._loaded_category.get(""disabled"", []): status = ""disabled"" elif module[""path""] in self._loaded_modules.keys(): status = ""installed"" loaded = self._loaded_modules[module[""path""]] if loaded.meta[""version""] != module[""version""]: status = ""outdated"" module[""status""] = status ","def _update_module_index ( self ) : self . debug ( ""Updating index file..."" ) self . _module_index = [ ] path = os . path . join ( self . home_path , ""modules.yml"" ) if os . path . exists ( path ) : with open ( path , ""r"" ) as infile : self . _module_index = yaml . safe_load ( infile ) for module in self . _module_index : status = ""not installed"" if module [ ""path"" ] in self . _loaded_category . get ( ""disabled"" , [ ] ) : status = ""disabled"" elif module [ ""path"" ] in self . _loaded_modules . keys ( ) : status = ""installed"" loaded = self . _loaded_modules [ module [ ""path"" ] ] status = ""outdated"" module [ ""status"" ] = status","if loaded . meta [ ""version"" ] != module [ ""version"" ] :",257 15726,"def test_nce(self): window_size = 5 words = [] for i in range(window_size): words.append(layers.data(name=""word_{0}"".format(i), shape=[1], dtype=""int64"")) dict_size = 10000 label_word = int(window_size // 2) + 1 embs = [] for i in range(window_size): if i == label_word: continue emb = layers.embedding( input=words[i], size=[dict_size, 32], param_attr=""emb.w"", is_sparse=True ) embs.append(emb) embs = layers.concat(input=embs, axis=1) loss = layers.nce( input=embs, label=words[label_word], num_total_classes=dict_size, param_attr=""nce.w"", bias_attr=""nce.b"", ) avg_loss = layers.mean(loss) self.assertIsNotNone(avg_loss) print(str(default_main_program()))","def test_nce ( self ) : window_size = 5 words = [ ] for i in range ( window_size ) : words . append ( layers . data ( name = ""word_{0}"" . format ( i ) , shape = [ 1 ] , dtype = ""int64"" ) ) dict_size = 10000 label_word = int ( window_size // 2 ) + 1 embs = [ ] for i in range ( window_size ) : continue emb = layers . embedding ( input = words [ i ] , size = [ dict_size , 32 ] , param_attr = ""emb.w"" , is_sparse = True ) embs . append ( emb ) embs = layers . concat ( input = embs , axis = 1 ) loss = layers . nce ( input = embs , label = words [ label_word ] , num_total_classes = dict_size , param_attr = ""nce.w"" , bias_attr = ""nce.b"" , ) avg_loss = layers . mean ( loss ) self . assertIsNotNone ( avg_loss ) print ( str ( default_main_program ( ) ) )",if i == label_word :,286 7138,"def create_if_compatible(cls, typ: Type, *, root: ""RootNode"") -> Optional[""Node""]: if cls.compatible_types: target_type: Type = typ if cls.use_origin: target_type = getattr(typ, ""__origin__"", None) or typ if cls._issubclass(target_type, cls.compatible_types): return cls(typ, root=root) return None ","def create_if_compatible ( cls , typ : Type , * , root : ""RootNode"" ) -> Optional [ ""Node"" ] : if cls . compatible_types : target_type : Type = typ target_type = getattr ( typ , ""__origin__"" , None ) or typ if cls . _issubclass ( target_type , cls . compatible_types ) : return cls ( typ , root = root ) return None",if cls . use_origin :,109 18737,"def generator(): """"""Yields mutations."""""" if not self.is_attribute_of_class or not first_posarg or not substs: return try: inst = abstract_utils.get_atomic_value(first_posarg, Instance) except abstract_utils.ConversionError: return if inst.cls.template: for subst in substs: for k, v in subst.items(): if k in inst.instance_type_parameters: value = inst.instance_type_parameters[k].AssignToNewVariable(node) value.PasteVariable(v, node) yield function.Mutation(inst, k, value) ","def generator ( ) : """"""Yields mutations."""""" if not self . is_attribute_of_class or not first_posarg or not substs : return try : inst = abstract_utils . get_atomic_value ( first_posarg , Instance ) except abstract_utils . ConversionError : return if inst . cls . template : for subst in substs : for k , v in subst . items ( ) : value = inst . instance_type_parameters [ k ] . AssignToNewVariable ( node ) value . PasteVariable ( v , node ) yield function . Mutation ( inst , k , value )",if k in inst . instance_type_parameters :,184 1988,"def set_sequences(self, sequences): """"""Set sequences using the given name-to-key-list dictionary."""""" f = open(os.path.join(self._path, "".mh_sequences""), ""r+"", encoding=""ASCII"") try: os.close(os.open(f.name, os.O_WRONLY | os.O_TRUNC)) for name, keys in sequences.items(): if len(keys) == 0: continue f.write(name + "":"") prev = None completing = False for key in sorted(set(keys)): if key - 1 == prev: if not completing: completing = True f.write(""-"") elif completing: completing = False f.write(""%s %s"" % (prev, key)) else: f.write("" %s"" % key) prev = key if completing: f.write(str(prev) + ""\n"") else: f.write(""\n"") finally: _sync_close(f)","def set_sequences ( self , sequences ) : """"""Set sequences using the given name-to-key-list dictionary."""""" f = open ( os . path . join ( self . _path , "".mh_sequences"" ) , ""r+"" , encoding = ""ASCII"" ) try : os . close ( os . open ( f . name , os . O_WRONLY | os . O_TRUNC ) ) for name , keys in sequences . items ( ) : if len ( keys ) == 0 : continue f . write ( name + "":"" ) prev = None completing = False for key in sorted ( set ( keys ) ) : if not completing : completing = True f . write ( ""-"" ) elif completing : completing = False f . write ( ""%s %s"" % ( prev , key ) ) else : f . write ( "" %s"" % key ) prev = key if completing : f . write ( str ( prev ) + ""\n"" ) else : f . write ( ""\n"" ) finally : _sync_close ( f )",if key - 1 == prev :,328 2232,"def on_load_status_changed(self, widget, *args): if widget.get_property(""load-status"") == WebKit.LoadStatus.FINISHED: self._go_back_button.set_sensitive(widget.can_go_back()) self._forward_button.set_sensitive(widget.can_go_forward()) self.on_size_allocate(widget) # TODO enable when it will not crash # self._webview.save_cache() if self.is_loaded == False: self.is_loaded = True self.emit(""loaded"") ","def on_load_status_changed ( self , widget , * args ) : if widget . get_property ( ""load-status"" ) == WebKit . LoadStatus . FINISHED : self . _go_back_button . set_sensitive ( widget . can_go_back ( ) ) self . _forward_button . set_sensitive ( widget . can_go_forward ( ) ) self . on_size_allocate ( widget ) self . is_loaded = True self . emit ( ""loaded"" )",if self . is_loaded == False :,153 22581,"def _get_parents_data(self, data): parents = 0 if data[COLUMN_PARENT]: family = self.db.get_family_from_handle(data[COLUMN_PARENT][0]) if family.get_father_handle(): parents += 1 if family.get_mother_handle(): parents += 1 return parents","def _get_parents_data ( self , data ) : parents = 0 if data [ COLUMN_PARENT ] : family = self . db . get_family_from_handle ( data [ COLUMN_PARENT ] [ 0 ] ) if family . get_father_handle ( ) : parents += 1 parents += 1 return parents",if family . get_mother_handle ( ) :,98 19847,"def jobFinished(self, job): logger.debug(""job %s finished"", job.id) if job.id in self.activeJobs: self.last_finish_time = time.time() del self.activeJobs[job.id] self.activeJobsQueue.remove(job) for tid in self.jobTasks[job.id]: self.driver.killTask(Dict(value=tid)) del self.jobTasks[job.id] if not self.activeJobs: self.agentTasks.clear() for tid, jid in six.iteritems(self.taskIdToJobId): if jid not in self.activeJobs: logger.debug(""kill task %s, because it is orphan"", tid) self.driver.killTask(Dict(value=tid)) ","def jobFinished ( self , job ) : logger . debug ( ""job %s finished"" , job . id ) if job . id in self . activeJobs : self . last_finish_time = time . time ( ) del self . activeJobs [ job . id ] self . activeJobsQueue . remove ( job ) for tid in self . jobTasks [ job . id ] : self . driver . killTask ( Dict ( value = tid ) ) del self . jobTasks [ job . id ] self . agentTasks . clear ( ) for tid , jid in six . iteritems ( self . taskIdToJobId ) : if jid not in self . activeJobs : logger . debug ( ""kill task %s, because it is orphan"" , tid ) self . driver . killTask ( Dict ( value = tid ) )",if not self . activeJobs :,221 6788,"def _validate_tag_field(value): # Valid field: # - [""tag1"", ""tag2"", ""tag3""...] # - [""tag1"", [""tag2"", None], [""tag3"", ""#ccc""], [tag4, #cccccc]...] for tag in value: if isinstance(tag, str): continue if isinstance(tag, (list, tuple)) and len(tag) == 2: name = tag[0] color = tag[1] if isinstance(name, str): if color is None or color == """": continue if isinstance(color, str) and re.match( ""^\#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})$"", color ): continue raise ValidationError( _( ""Invalid tag '{value}'. The color is not a "" ""valid HEX color or null."" ).format(value=tag) ) raise ValidationError( _( ""Invalid tag '{value}'. it must be the name or a pair "" '\'[""name"", ""hex color/"" | null]\'.' ).format(value=tag) )","def _validate_tag_field ( value ) : for tag in value : if isinstance ( tag , str ) : continue if isinstance ( tag , ( list , tuple ) ) and len ( tag ) == 2 : name = tag [ 0 ] color = tag [ 1 ] if color is None or color == """" : continue if isinstance ( color , str ) and re . match ( ""^\#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})$"" , color ) : continue raise ValidationError ( _ ( ""Invalid tag '{value}'. The color is not a "" ""valid HEX color or null."" ) . format ( value = tag ) ) raise ValidationError ( _ ( ""Invalid tag '{value}'. it must be the name or a pair "" '\'[""name"", ""hex color/"" | null]\'.' ) . format ( value = tag ) )","if isinstance ( name , str ) :",363 15990,"def _process_dataloader_aggregated_steps(result, weights): internal_keys = {""meta""} moved = False for k, v in result.items(): if k in internal_keys: continue # make sure v is a tensor if not isinstance(v, torch.Tensor): v = torch.tensor(v) # move to memory only once if not moved: weights = weights.to(v.device) moved = True # move weights to same device as value to reduce weights_t = weights[: v.size(0)] # weighted mean numerator = torch.dot(v.float(), weights_t.transpose(-1, 0).float()) v = numerator / weights.sum().float() result[k] = v","def _process_dataloader_aggregated_steps ( result , weights ) : internal_keys = { ""meta"" } moved = False for k , v in result . items ( ) : continue if not isinstance ( v , torch . Tensor ) : v = torch . tensor ( v ) if not moved : weights = weights . to ( v . device ) moved = True weights_t = weights [ : v . size ( 0 ) ] numerator = torch . dot ( v . float ( ) , weights_t . transpose ( - 1 , 0 ) . float ( ) ) v = numerator / weights . sum ( ) . float ( ) result [ k ] = v",if k in internal_keys :,209 8196,"def __call__(cls, *args, **kwargs): obj = cls.__new__(cls, *args, **kwargs) from .keras_model import KerasModel if issubclass(cls, KerasModel): from tensorflow.keras import backend as K if K.backend() != ""tensorflow"": obj.__init__(*args, **kwargs) return obj K.clear_session() obj.graph = tf.Graph() with obj.graph.as_default(): if hasattr(cls, ""_config_session""): obj.sess = cls._config_session() else: obj.sess = tf.Session() else: obj.graph = tf.Graph() for meth in dir(obj): if meth == ""__class__"": continue attr = getattr(obj, meth) if callable(attr): if issubclass(cls, KerasModel): wrapped_attr = _keras_wrap(attr, obj.sess) else: wrapped_attr = _graph_wrap(attr, obj.graph) setattr(obj, meth, wrapped_attr) obj.__init__(*args, **kwargs) return obj ","def __call__ ( cls , * args , ** kwargs ) : obj = cls . __new__ ( cls , * args , ** kwargs ) from . keras_model import KerasModel if issubclass ( cls , KerasModel ) : from tensorflow . keras import backend as K if K . backend ( ) != ""tensorflow"" : obj . __init__ ( * args , ** kwargs ) return obj K . clear_session ( ) obj . graph = tf . Graph ( ) with obj . graph . as_default ( ) : obj . sess = cls . _config_session ( ) else : obj . sess = tf . Session ( ) else : obj . graph = tf . Graph ( ) for meth in dir ( obj ) : if meth == ""__class__"" : continue attr = getattr ( obj , meth ) if callable ( attr ) : if issubclass ( cls , KerasModel ) : wrapped_attr = _keras_wrap ( attr , obj . sess ) else : wrapped_attr = _graph_wrap ( attr , obj . graph ) setattr ( obj , meth , wrapped_attr ) obj . __init__ ( * args , ** kwargs ) return obj","if hasattr ( cls , ""_config_session"" ) :",315 21093,"def __getattr__(self, attr): if attr.startswith(""_""): raise AttributeError(attr) for name in self: if name == attr: break else: raise AttributeError(""No partition %r"" % attr) path = os.path.join(self.by_name_dir, name) with context.quiet: # Find the actual path of the device devpath = readlink(path) devname = os.path.basename(devpath) # Get the size of the partition for blocks, name in self.iter_proc_partitions(): if name in (devname, attr): break else: log.error(""Could not find size of partition %r"" % name) return Partition(devpath, attr, int(blocks)) ","def __getattr__ ( self , attr ) : if attr . startswith ( ""_"" ) : raise AttributeError ( attr ) for name in self : break else : raise AttributeError ( ""No partition %r"" % attr ) path = os . path . join ( self . by_name_dir , name ) with context . quiet : devpath = readlink ( path ) devname = os . path . basename ( devpath ) for blocks , name in self . iter_proc_partitions ( ) : if name in ( devname , attr ) : break else : log . error ( ""Could not find size of partition %r"" % name ) return Partition ( devpath , attr , int ( blocks ) )",if name == attr :,210 16510,"def validate(self, value): try: value = [ datetime.datetime.strptime(range, ""%Y-%m-%d %H:%M:%S"") for range in value.split("" to "") ] if (len(value) == 2) and (value[0] <= value[1]): return True else: return False except ValueError: return False","def validate ( self , value ) : try : value = [ datetime . datetime . strptime ( range , ""%Y-%m-%d %H:%M:%S"" ) for range in value . split ( "" to "" ) ] return True else : return False except ValueError : return False",if ( len ( value ) == 2 ) and ( value [ 0 ] <= value [ 1 ] ) :,110 11544,"def _print_cell(self, space, text, align, width, x, y, fg, attr, bg): # Sort out spacing first. if space: self._frame.canvas.print_at(self._space_delimiter * space, x, y, fg, attr, bg) # Now align text, taking into account double space glyphs. paint_text = _enforce_width(text, width, self._frame.canvas.unicode_aware) text_size = self.string_len(str(paint_text)) if text_size < width: # Default does no alignment or padding. buffer_1 = buffer_2 = """" if align == ""<"": buffer_2 = "" "" * (width - text_size) elif align == "">"": buffer_1 = "" "" * (width - text_size) elif align == ""^"": start_len = int((width - text_size) / 2) buffer_1 = "" "" * start_len buffer_2 = "" "" * (width - text_size - start_len) paint_text = paint_text.join([buffer_1, buffer_2]) self._frame.canvas.paint( str(paint_text), x + space, y, fg, attr, bg, colour_map=paint_text.colour_map if hasattr(paint_text, ""colour_map"") else None, )","def _print_cell ( self , space , text , align , width , x , y , fg , attr , bg ) : if space : self . _frame . canvas . print_at ( self . _space_delimiter * space , x , y , fg , attr , bg ) paint_text = _enforce_width ( text , width , self . _frame . canvas . unicode_aware ) text_size = self . string_len ( str ( paint_text ) ) if text_size < width : buffer_1 = buffer_2 = """" if align == ""<"" : buffer_2 = "" "" * ( width - text_size ) elif align == "">"" : buffer_1 = "" "" * ( width - text_size ) start_len = int ( ( width - text_size ) / 2 ) buffer_1 = "" "" * start_len buffer_2 = "" "" * ( width - text_size - start_len ) paint_text = paint_text . join ( [ buffer_1 , buffer_2 ] ) self . _frame . canvas . paint ( str ( paint_text ) , x + space , y , fg , attr , bg , colour_map = paint_text . colour_map if hasattr ( paint_text , ""colour_map"" ) else None , )","elif align == ""^"" :",380 20880,"def create_warehouse(warehouse_name, properties=None, company=None): if not company: company = ""_Test Company"" warehouse_id = erpnext.encode_company_abbr(warehouse_name, company) if not frappe.db.exists(""Warehouse"", warehouse_id): warehouse = frappe.new_doc(""Warehouse"") warehouse.warehouse_name = warehouse_name warehouse.parent_warehouse = ""All Warehouses - _TCUV"" warehouse.company = company warehouse.account = get_warehouse_account(warehouse_name, company) if properties: warehouse.update(properties) warehouse.save() return warehouse.name else: return warehouse_id ","def create_warehouse ( warehouse_name , properties = None , company = None ) : if not company : company = ""_Test Company"" warehouse_id = erpnext . encode_company_abbr ( warehouse_name , company ) if not frappe . db . exists ( ""Warehouse"" , warehouse_id ) : warehouse = frappe . new_doc ( ""Warehouse"" ) warehouse . warehouse_name = warehouse_name warehouse . parent_warehouse = ""All Warehouses - _TCUV"" warehouse . company = company warehouse . account = get_warehouse_account ( warehouse_name , company ) warehouse . update ( properties ) warehouse . save ( ) return warehouse . name else : return warehouse_id",if properties :,186 19569,"def test_clifford_circuit(): (q0, q1) = (cirq.LineQubit(0), cirq.LineQubit(1)) circuit = cirq.Circuit() np.random.seed(0) for _ in range(100): x = np.random.randint(7) if x == 0: circuit.append(cirq.X(np.random.choice((q0, q1)))) elif x == 1: circuit.append(cirq.Z(np.random.choice((q0, q1)))) elif x == 2: circuit.append(cirq.Y(np.random.choice((q0, q1)))) elif x == 3: circuit.append(cirq.S(np.random.choice((q0, q1)))) elif x == 4: circuit.append(cirq.H(np.random.choice((q0, q1)))) elif x == 5: circuit.append(cirq.CNOT(q0, q1)) elif x == 6: circuit.append(cirq.CZ(q0, q1)) clifford_simulator = cirq.CliffordSimulator() state_vector_simulator = cirq.Simulator() np.testing.assert_almost_equal( clifford_simulator.simulate(circuit).final_state.state_vector(), state_vector_simulator.simulate(circuit).final_state_vector, )","def test_clifford_circuit ( ) : ( q0 , q1 ) = ( cirq . LineQubit ( 0 ) , cirq . LineQubit ( 1 ) ) circuit = cirq . Circuit ( ) np . random . seed ( 0 ) for _ in range ( 100 ) : x = np . random . randint ( 7 ) if x == 0 : circuit . append ( cirq . X ( np . random . choice ( ( q0 , q1 ) ) ) ) circuit . append ( cirq . Z ( np . random . choice ( ( q0 , q1 ) ) ) ) elif x == 2 : circuit . append ( cirq . Y ( np . random . choice ( ( q0 , q1 ) ) ) ) elif x == 3 : circuit . append ( cirq . S ( np . random . choice ( ( q0 , q1 ) ) ) ) elif x == 4 : circuit . append ( cirq . H ( np . random . choice ( ( q0 , q1 ) ) ) ) elif x == 5 : circuit . append ( cirq . CNOT ( q0 , q1 ) ) elif x == 6 : circuit . append ( cirq . CZ ( q0 , q1 ) ) clifford_simulator = cirq . CliffordSimulator ( ) state_vector_simulator = cirq . Simulator ( ) np . testing . assert_almost_equal ( clifford_simulator . simulate ( circuit ) . final_state . state_vector ( ) , state_vector_simulator . simulate ( circuit ) . final_state_vector , )",elif x == 1 :,400 22004,"def _find_localserver_module(): import win32com.server path = win32com.server.__path__[0] baseName = ""localserver"" pyfile = os.path.join(path, baseName + "".py"") try: os.stat(pyfile) except os.error: # See if we have a compiled extension if __debug__: ext = "".pyc"" else: ext = "".pyo"" pyfile = os.path.join(path, baseName + ext) try: os.stat(pyfile) except os.error: raise RuntimeError( ""Can not locate the Python module 'win32com.server.%s'"" % baseName ) return pyfile","def _find_localserver_module ( ) : import win32com . server path = win32com . server . __path__ [ 0 ] baseName = ""localserver"" pyfile = os . path . join ( path , baseName + "".py"" ) try : os . stat ( pyfile ) except os . error : ext = "".pyc"" else : ext = "".pyo"" pyfile = os . path . join ( path , baseName + ext ) try : os . stat ( pyfile ) except os . error : raise RuntimeError ( ""Can not locate the Python module 'win32com.server.%s'"" % baseName ) return pyfile",if __debug__ :,202 6640,"def conv2d( input: PTensor, weight: PTensor, bias: PTensor = None, stride=1, padding=0, dilation=1, groups=1, mode=None, ): """"""Standard conv2d. Returns the input if weight=None."""""" if weight is None: return input ind = None if mode is not None: if padding != 0: raise ValueError(""Cannot input both padding and mode."") if mode == ""same"": padding = (weight.shape[2] // 2, weight.shape[3] // 2) if weight.shape[2] % 2 == 0 or weight.shape[3] % 2 == 0: ind = ( slice(-1) if weight.shape[2] % 2 == 0 else slice(None), slice(-1) if weight.shape[3] % 2 == 0 else slice(None), ) elif mode == ""valid"": padding = (0, 0) elif mode == ""full"": padding = (weight.shape[2] - 1, weight.shape[3] - 1) else: raise ValueError(""Unknown mode for padding."") assert bias is None out = FConv2D( input, weight, stride=stride, padding=padding, dilation=dilation, groups=groups ) if ind is None: return out return out[:, :, ind[0], ind[1]]","def conv2d ( input : PTensor , weight : PTensor , bias : PTensor = None , stride = 1 , padding = 0 , dilation = 1 , groups = 1 , mode = None , ) : """"""Standard conv2d. Returns the input if weight=None."""""" if weight is None : return input ind = None if mode is not None : if padding != 0 : raise ValueError ( ""Cannot input both padding and mode."" ) if mode == ""same"" : padding = ( weight . shape [ 2 ] // 2 , weight . shape [ 3 ] // 2 ) ind = ( slice ( - 1 ) if weight . shape [ 2 ] % 2 == 0 else slice ( None ) , slice ( - 1 ) if weight . shape [ 3 ] % 2 == 0 else slice ( None ) , ) elif mode == ""valid"" : padding = ( 0 , 0 ) elif mode == ""full"" : padding = ( weight . shape [ 2 ] - 1 , weight . shape [ 3 ] - 1 ) else : raise ValueError ( ""Unknown mode for padding."" ) assert bias is None out = FConv2D ( input , weight , stride = stride , padding = padding , dilation = dilation , groups = groups ) if ind is None : return out return out [ : , : , ind [ 0 ] , ind [ 1 ] ]",if weight . shape [ 2 ] % 2 == 0 or weight . shape [ 3 ] % 2 == 0 :,387 16909,"def remove_testcases_from_directories(directories): """"""Removes all testcases and their dependencies from testcase directories."""""" generators = [] for directory in directories: if not directory.strip(): continue # If there is a bot-specific files list, delete it now. bot_testcases_file_path = utils.get_bot_testcases_file_path(directory) shell.remove_file(bot_testcases_file_path) generators.append(shell.walk(directory)) for generator in generators: for structure in generator: base_directory = structure[0] for filename in structure[2]: if not is_testcase_resource(filename): continue if filename.startswith(RESOURCES_PREFIX): # In addition to removing this file, remove all resources. resources_file_path = os.path.join(base_directory, filename) resources = read_resource_list(resources_file_path) for resource in resources: shell.remove_file(resource) file_path = os.path.join(base_directory, filename) shell.remove_file(file_path)","def remove_testcases_from_directories ( directories ) : """"""Removes all testcases and their dependencies from testcase directories."""""" generators = [ ] for directory in directories : continue bot_testcases_file_path = utils . get_bot_testcases_file_path ( directory ) shell . remove_file ( bot_testcases_file_path ) generators . append ( shell . walk ( directory ) ) for generator in generators : for structure in generator : base_directory = structure [ 0 ] for filename in structure [ 2 ] : if not is_testcase_resource ( filename ) : continue if filename . startswith ( RESOURCES_PREFIX ) : resources_file_path = os . path . join ( base_directory , filename ) resources = read_resource_list ( resources_file_path ) for resource in resources : shell . remove_file ( resource ) file_path = os . path . join ( base_directory , filename ) shell . remove_file ( file_path )",if not directory . strip ( ) :,323 8232,"def test_one_dead_branch(): with deterministic_PRNG(): seen = set() @run_to_buffer def x(data): i = data.draw_bytes(1)[0] if i > 0: data.mark_invalid() i = data.draw_bytes(1)[0] if len(seen) < 255: seen.add(i) elif i not in seen: data.mark_interesting()",def test_one_dead_branch ( ) : with deterministic_PRNG ( ) : seen = set ( ) @ run_to_buffer def x ( data ) : i = data . draw_bytes ( 1 ) [ 0 ] if i > 0 : data . mark_invalid ( ) i = data . draw_bytes ( 1 ) [ 0 ] seen . add ( i ) elif i not in seen : data . mark_interesting ( ),if len ( seen ) < 255 :,138 18291,"def _(value): retVal = value if value and isinstance(value, basestring) and len(value) % 2 == 0: retVal = hexdecode(retVal) if not kb.binaryField: if Backend.isDbms(DBMS.MSSQL) and value.startswith(""0x""): try: retVal = retVal.decode(""utf-16-le"") except UnicodeDecodeError: pass elif Backend.isDbms(DBMS.HSQLDB): try: retVal = retVal.decode(""utf-16-be"") except UnicodeDecodeError: pass if not isinstance(retVal, unicode): retVal = getUnicode(retVal, ""utf8"") return retVal","def _ ( value ) : retVal = value if value and isinstance ( value , basestring ) and len ( value ) % 2 == 0 : retVal = hexdecode ( retVal ) if not kb . binaryField : if Backend . isDbms ( DBMS . MSSQL ) and value . startswith ( ""0x"" ) : try : retVal = retVal . decode ( ""utf-16-le"" ) except UnicodeDecodeError : pass try : retVal = retVal . decode ( ""utf-16-be"" ) except UnicodeDecodeError : pass if not isinstance ( retVal , unicode ) : retVal = getUnicode ( retVal , ""utf8"" ) return retVal",elif Backend . isDbms ( DBMS . HSQLDB ) :,218 11052,"def mapping(self): m = {} if getGdriveCredentialsFile() is not None: m[""gdrive""] = """" unknown = 0 for f in self.scan: bits = f.split(""#"", 2) if len(bits) == 1: label = os.path.basename(f) else: label = bits[1] if not label or len(label) == 0 or label == """": label = ""L"" + str(unknown) unknown += 1 m[label] = bits[0] return m","def mapping ( self ) : m = { } if getGdriveCredentialsFile ( ) is not None : m [ ""gdrive"" ] = """" unknown = 0 for f in self . scan : bits = f . split ( ""#"" , 2 ) if len ( bits ) == 1 : label = os . path . basename ( f ) else : label = bits [ 1 ] label = ""L"" + str ( unknown ) unknown += 1 m [ label ] = bits [ 0 ] return m","if not label or len ( label ) == 0 or label == """" :",153 6139,"def update_schedulers(self, start=False): applications_folder = os.path.join(self.options.folder, ""applications"") available_apps = [ arq for arq in os.listdir(applications_folder) if os.path.isdir(os.path.join(applications_folder, arq)) ] with self.scheduler_processes_lock: # reset the menu # since applications can disappear (be disinstalled) must # clear the menu (should use tkinter.END or tkinter.LAST) self.schedmenu.delete(0, ""end"") for arq in available_apps: if arq not in self.scheduler_processes: item = lambda a=arq: self.try_start_scheduler(a) self.schedmenu.add_command(label=""start %s"" % arq, command=item) if arq in self.scheduler_processes: item = lambda a=arq: self.try_stop_scheduler(a) self.schedmenu.add_command(label=""stop %s"" % arq, command=item) if start and self.options.with_scheduler and self.options.schedulers: # the widget takes care of starting the schedulers apps = [ag.split("":"", 1)[0] for ag in self.options.schedulers] else: apps = [] for app in apps: self.try_start_scheduler(app)","def update_schedulers ( self , start = False ) : applications_folder = os . path . join ( self . options . folder , ""applications"" ) available_apps = [ arq for arq in os . listdir ( applications_folder ) if os . path . isdir ( os . path . join ( applications_folder , arq ) ) ] with self . scheduler_processes_lock : self . schedmenu . delete ( 0 , ""end"" ) for arq in available_apps : if arq not in self . scheduler_processes : item = lambda a = arq : self . try_start_scheduler ( a ) self . schedmenu . add_command ( label = ""start %s"" % arq , command = item ) item = lambda a = arq : self . try_stop_scheduler ( a ) self . schedmenu . add_command ( label = ""stop %s"" % arq , command = item ) if start and self . options . with_scheduler and self . options . schedulers : apps = [ ag . split ( "":"" , 1 ) [ 0 ] for ag in self . options . schedulers ] else : apps = [ ] for app in apps : self . try_start_scheduler ( app )",if arq in self . scheduler_processes :,380 12059,"def TryMerge(self, d): while d.avail() > 0: tt = d.getVarInt32() if tt == 10: self.set_socket_descriptor(d.getPrefixedString()) continue if tt == 16: self.set_requested_events(d.getVarInt32()) continue if tt == 24: self.set_observed_events(d.getVarInt32()) continue if tt == 0: raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt)","def TryMerge ( self , d ) : while d . avail ( ) > 0 : tt = d . getVarInt32 ( ) if tt == 10 : self . set_socket_descriptor ( d . getPrefixedString ( ) ) continue if tt == 16 : self . set_requested_events ( d . getVarInt32 ( ) ) continue if tt == 24 : self . set_observed_events ( d . getVarInt32 ( ) ) continue raise ProtocolBuffer . ProtocolBufferDecodeError d . skipData ( tt )",if tt == 0 :,156 9660,"def test_adjust_random_hue_in_yiq(): x_shapes = [ [2, 2, 3], [4, 2, 3], [2, 4, 3], [2, 5, 3], [1000, 1, 3], ] test_styles = [ ""all_random"", ""rg_same"", ""rb_same"", ""gb_same"", ""rgb_same"", ] for x_shape in x_shapes: for test_style in test_styles: x_np = np.random.rand(*x_shape) * 255.0 delta_h = (np.random.rand() * 2.0 - 1.0) * np.pi if test_style == ""all_random"": pass elif test_style == ""rg_same"": x_np[..., 1] = x_np[..., 0] elif test_style == ""rb_same"": x_np[..., 2] = x_np[..., 0] elif test_style == ""gb_same"": x_np[..., 2] = x_np[..., 1] elif test_style == ""rgb_same"": x_np[..., 1] = x_np[..., 0] x_np[..., 2] = x_np[..., 0] else: raise AssertionError(""Invalid test style: %s"" % (test_style)) y_np = _adjust_hue_in_yiq_np(x_np, delta_h) y_tf = _adjust_hue_in_yiq_tf(x_np, delta_h) np.testing.assert_allclose(y_tf, y_np, rtol=2e-4, atol=1e-4)","def test_adjust_random_hue_in_yiq ( ) : x_shapes = [ [ 2 , 2 , 3 ] , [ 4 , 2 , 3 ] , [ 2 , 4 , 3 ] , [ 2 , 5 , 3 ] , [ 1000 , 1 , 3 ] , ] test_styles = [ ""all_random"" , ""rg_same"" , ""rb_same"" , ""gb_same"" , ""rgb_same"" , ] for x_shape in x_shapes : for test_style in test_styles : x_np = np . random . rand ( * x_shape ) * 255.0 delta_h = ( np . random . rand ( ) * 2.0 - 1.0 ) * np . pi if test_style == ""all_random"" : pass elif test_style == ""rg_same"" : x_np [ ... , 1 ] = x_np [ ... , 0 ] x_np [ ... , 2 ] = x_np [ ... , 0 ] elif test_style == ""gb_same"" : x_np [ ... , 2 ] = x_np [ ... , 1 ] elif test_style == ""rgb_same"" : x_np [ ... , 1 ] = x_np [ ... , 0 ] x_np [ ... , 2 ] = x_np [ ... , 0 ] else : raise AssertionError ( ""Invalid test style: %s"" % ( test_style ) ) y_np = _adjust_hue_in_yiq_np ( x_np , delta_h ) y_tf = _adjust_hue_in_yiq_tf ( x_np , delta_h ) np . testing . assert_allclose ( y_tf , y_np , rtol = 2e-4 , atol = 1e-4 )","elif test_style == ""rb_same"" :",481 20393,"def _get_current_status(self): if self.source: if self.current_job and self.current_job.status: return self.current_job.status elif not self.last_job: return ""never updated"" # inherit the child job status else: return self.last_job.status else: return ""none"" ","def _get_current_status ( self ) : if self . source : return self . current_job . status elif not self . last_job : return ""never updated"" else : return self . last_job . status else : return ""none""",if self . current_job and self . current_job . status :,105 21909,"def test_summary(): if debug_mode: if ""summary"" not in to_test: # pragma: no cover return else: print(""\n\nSUMMARY"", end="""") for ds in datasets: for dt in ds.dt_s_list: if debug_mode: print(""\n"" + dt_s_tup_to_string(dt) + "": "", end="""") # see if summary gets printed results_sm[ds][dt].summary(alpha=0.05) exog = results_sm_exog[ds][dt].exog is not None if exog is not None: results_sm_exog[ds][dt].summary(alpha=0.05) exog_coint = results_sm_exog_coint[ds][dt].exog_coint is not None if exog_coint is not None: results_sm_exog_coint[ds][dt].summary(alpha=0.05)","def test_summary ( ) : if debug_mode : if ""summary"" not in to_test : return else : print ( ""\n\nSUMMARY"" , end = """" ) for ds in datasets : for dt in ds . dt_s_list : if debug_mode : print ( ""\n"" + dt_s_tup_to_string ( dt ) + "": "" , end = """" ) results_sm [ ds ] [ dt ] . summary ( alpha = 0.05 ) exog = results_sm_exog [ ds ] [ dt ] . exog is not None results_sm_exog [ ds ] [ dt ] . summary ( alpha = 0.05 ) exog_coint = results_sm_exog_coint [ ds ] [ dt ] . exog_coint is not None if exog_coint is not None : results_sm_exog_coint [ ds ] [ dt ] . summary ( alpha = 0.05 )",if exog is not None :,263 7037,"def test_socketserver(self): """"""Using socketserver to create and manage SSL connections."""""" server = make_https_server(self, certfile=CERTFILE) # try to connect if support.verbose: sys.stdout.write(""\n"") with open(CERTFILE, ""rb"") as f: d1 = f.read() d2 = """" # now fetch the same data from the HTTPS server url = ""https://localhost:%d/%s"" % (server.port, os.path.split(CERTFILE)[1]) context = ssl.create_default_context(cafile=CERTFILE) f = urllib.request.urlopen(url, context=context) try: dlen = f.info().get(""content-length"") if dlen and (int(dlen) > 0): d2 = f.read(int(dlen)) if support.verbose: sys.stdout.write( "" client: read %d bytes from remote server '%s'\n"" % (len(d2), server) ) finally: f.close() self.assertEqual(d1, d2)","def test_socketserver ( self ) : """"""Using socketserver to create and manage SSL connections."""""" server = make_https_server ( self , certfile = CERTFILE ) sys . stdout . write ( ""\n"" ) with open ( CERTFILE , ""rb"" ) as f : d1 = f . read ( ) d2 = """" url = ""https://localhost:%d/%s"" % ( server . port , os . path . split ( CERTFILE ) [ 1 ] ) context = ssl . create_default_context ( cafile = CERTFILE ) f = urllib . request . urlopen ( url , context = context ) try : dlen = f . info ( ) . get ( ""content-length"" ) if dlen and ( int ( dlen ) > 0 ) : d2 = f . read ( int ( dlen ) ) sys . stdout . write ( "" client: read %d bytes from remote server '%s'\n"" % ( len ( d2 ) , server ) ) finally : f . close ( ) self . assertEqual ( d1 , d2 )",if support . verbose :,305 10206,"def lex_number(self, pos): # numeric literal start = pos found_dot = False while pos < len(self.string) and ( self.string[pos].isdigit() or self.string[pos] == ""."" ): if self.string[pos] == ""."": if found_dot is True: raise ValueError(""Invalid number. Found multiple '.'"") found_dot = True # technically we allow more than one ""."" and let float()'s parsing # complain later pos += 1 val = self.string[start:pos] return Token(TokenType.LNUM, val, len(val))","def lex_number ( self , pos ) : start = pos found_dot = False while pos < len ( self . string ) and ( self . string [ pos ] . isdigit ( ) or self . string [ pos ] == ""."" ) : if found_dot is True : raise ValueError ( ""Invalid number. Found multiple '.'"" ) found_dot = True pos += 1 val = self . string [ start : pos ] return Token ( TokenType . LNUM , val , len ( val ) )","if self . string [ pos ] == ""."" :",168 1027,"def process_deps(pipe, pkg, pkgdest, provides, requires): file = None for line in pipe.split(""\n""): m = file_re.match(line) if m: file = m.group(1) file = file.replace(pkgdest + ""/"" + pkg, """") file = file_translate(file) continue m = dep_re.match(line) if not m or not file: continue type, dep = m.groups() if type == ""R"": i = requires elif type == ""P"": i = provides else: continue if dep.startswith(""python(""): continue # Ignore all perl(VMS::...) and perl(Mac::...) dependencies. These # are typically used conditionally from the Perl code, but are # generated as unconditional dependencies. if dep.startswith(""perl(VMS::"") or dep.startswith(""perl(Mac::""): continue # Ignore perl dependencies on .pl files. if dep.startswith(""perl("") and dep.endswith("".pl)""): continue # Remove perl versions and perl module versions since they typically # do not make sense when used as package versions. if dep.startswith(""perl"") and r.search(dep): dep = dep.split()[0] # Put parentheses around any version specifications. dep = r.sub(r""(\g<0>)"", dep) if file not in i: i[file] = [] i[file].append(dep) return provides, requires","def process_deps ( pipe , pkg , pkgdest , provides , requires ) : file = None for line in pipe . split ( ""\n"" ) : m = file_re . match ( line ) if m : file = m . group ( 1 ) file = file . replace ( pkgdest + ""/"" + pkg , """" ) file = file_translate ( file ) continue m = dep_re . match ( line ) if not m or not file : continue type , dep = m . groups ( ) if type == ""R"" : i = requires i = provides else : continue if dep . startswith ( ""python("" ) : continue if dep . startswith ( ""perl(VMS::"" ) or dep . startswith ( ""perl(Mac::"" ) : continue if dep . startswith ( ""perl("" ) and dep . endswith ( "".pl)"" ) : continue if dep . startswith ( ""perl"" ) and r . search ( dep ) : dep = dep . split ( ) [ 0 ] dep = r . sub ( r""(\g<0>)"" , dep ) if file not in i : i [ file ] = [ ] i [ file ] . append ( dep ) return provides , requires","elif type == ""P"" :",437 1386,"def translate(self): if self.offset: raise RuntimeError(""Parser is a one time instance."") while True: m = self.re_split.search(self.source[self.offset :]) if m: text = self.source[self.offset : self.offset + m.start()] self.text_buffer.append(text) self.offset += m.end() if m.group(1): # New escape syntax line, sep, _ = self.source[self.offset :].partition(""\n"") self.text_buffer.append(m.group(2) + m.group(5) + line + sep) self.offset += len(line + sep) + 1 continue elif m.group(5): # Old escape syntax depr(""Escape code lines with a backslash."") # 0.12 line, sep, _ = self.source[self.offset :].partition(""\n"") self.text_buffer.append(m.group(2) + line + sep) self.offset += len(line + sep) + 1 continue self.flush_text() self.read_code(multiline=bool(m.group(4))) else: break self.text_buffer.append(self.source[self.offset :]) self.flush_text() return """".join(self.code_buffer)","def translate ( self ) : if self . offset : raise RuntimeError ( ""Parser is a one time instance."" ) while True : m = self . re_split . search ( self . source [ self . offset : ] ) if m : text = self . source [ self . offset : self . offset + m . start ( ) ] self . text_buffer . append ( text ) self . offset += m . end ( ) line , sep , _ = self . source [ self . offset : ] . partition ( ""\n"" ) self . text_buffer . append ( m . group ( 2 ) + m . group ( 5 ) + line + sep ) self . offset += len ( line + sep ) + 1 continue elif m . group ( 5 ) : depr ( ""Escape code lines with a backslash."" ) line , sep , _ = self . source [ self . offset : ] . partition ( ""\n"" ) self . text_buffer . append ( m . group ( 2 ) + line + sep ) self . offset += len ( line + sep ) + 1 continue self . flush_text ( ) self . read_code ( multiline = bool ( m . group ( 4 ) ) ) else : break self . text_buffer . append ( self . source [ self . offset : ] ) self . flush_text ( ) return """" . join ( self . code_buffer )",if m . group ( 1 ) :,383 18177,"def test_invite_generation(event, default_account): from inbox.events.ical import generate_icalendar_invite event.sequence_number = 1 event.participants = [{""email"": ""helena@nylas.com""}, {""email"": ""myles@nylas.com""}] cal = generate_icalendar_invite(event) assert cal[""method""] == ""REQUEST"" for component in cal.walk(): if component.name == ""VEVENT"": assert component.get(""summary"") == event.title assert int(component.get(""sequence"")) == event.sequence_number assert component.get(""location"") == event.location attendees = component.get(""attendee"", []) # the iCalendar python module doesn't return a list when # there's only one attendee. Go figure. if not isinstance(attendees, list): attendees = [attendees] for attendee in attendees: email = unicode(attendee) # strip mailto: if it exists if email.lower().startswith(""mailto:""): email = email[7:] assert email in [""helena@nylas.com"", ""myles@nylas.com""]","def test_invite_generation ( event , default_account ) : from inbox . events . ical import generate_icalendar_invite event . sequence_number = 1 event . participants = [ { ""email"" : ""helena@nylas.com"" } , { ""email"" : ""myles@nylas.com"" } ] cal = generate_icalendar_invite ( event ) assert cal [ ""method"" ] == ""REQUEST"" for component in cal . walk ( ) : if component . name == ""VEVENT"" : assert component . get ( ""summary"" ) == event . title assert int ( component . get ( ""sequence"" ) ) == event . sequence_number assert component . get ( ""location"" ) == event . location attendees = component . get ( ""attendee"" , [ ] ) attendees = [ attendees ] for attendee in attendees : email = unicode ( attendee ) if email . lower ( ) . startswith ( ""mailto:"" ) : email = email [ 7 : ] assert email in [ ""helena@nylas.com"" , ""myles@nylas.com"" ]","if not isinstance ( attendees , list ) :",339 17418,"def remove_duplicate_association(): bind = op.get_bind() session = Session(bind=bind) results = session.query(AssociationTable).all() seen = {} for result in results: if seen.get(""{}-{}"".format(result.user_id, result.account_id)): print( ""[-] Duplicate association marked for deletion: {} - {}"".format( result.user_id, result.account_id ) ) session.delete(result) else: seen[""{}-{}"".format(result.user_id, result.account_id)] = True print(""[-->] Deleting duplicate associations..."") session.commit() session.flush() print(""[@] Deleted all duplicate associations."") ","def remove_duplicate_association ( ) : bind = op . get_bind ( ) session = Session ( bind = bind ) results = session . query ( AssociationTable ) . all ( ) seen = { } for result in results : print ( ""[-] Duplicate association marked for deletion: {} - {}"" . format ( result . user_id , result . account_id ) ) session . delete ( result ) else : seen [ ""{}-{}"" . format ( result . user_id , result . account_id ) ] = True print ( ""[-->] Deleting duplicate associations..."" ) session . commit ( ) session . flush ( ) print ( ""[@] Deleted all duplicate associations."" )","if seen . get ( ""{}-{}"" . format ( result . user_id , result . account_id ) ) :",205 389,"def set_meta(self, dataset, overwrite=True, **kwd): """"""Sets the metadata information for datasets previously determined to be in bed format."""""" i = 0 if dataset.has_data(): for i, line in enumerate(open(dataset.file_name)): line = line.rstrip(""\r\n"") if line and not line.startswith(""#""): elems = line.split(""\t"") if len(elems) > 2: if len(elems) > 3: if overwrite or not dataset.metadata.element_is_set(""nameCol""): dataset.metadata.nameCol = 4 if len(elems) < 6: if overwrite or not dataset.metadata.element_is_set( ""strandCol"" ): dataset.metadata.strandCol = 0 else: if overwrite or not dataset.metadata.element_is_set( ""strandCol"" ): dataset.metadata.strandCol = 6 break Tabular.set_meta(self, dataset, overwrite=overwrite, skip=i)","def set_meta ( self , dataset , overwrite = True , ** kwd ) : """"""Sets the metadata information for datasets previously determined to be in bed format."""""" i = 0 if dataset . has_data ( ) : for i , line in enumerate ( open ( dataset . file_name ) ) : line = line . rstrip ( ""\r\n"" ) if line and not line . startswith ( ""#"" ) : elems = line . split ( ""\t"" ) if len ( elems ) > 2 : if len ( elems ) > 3 : dataset . metadata . nameCol = 4 if len ( elems ) < 6 : if overwrite or not dataset . metadata . element_is_set ( ""strandCol"" ) : dataset . metadata . strandCol = 0 else : if overwrite or not dataset . metadata . element_is_set ( ""strandCol"" ) : dataset . metadata . strandCol = 6 break Tabular . set_meta ( self , dataset , overwrite = overwrite , skip = i )","if overwrite or not dataset . metadata . element_is_set ( ""nameCol"" ) :",342 16483,"def __remote_port(self): port = 22 if self.git_has_remote: m = re.match(r""^(.*?)?@([^/:]*):?([0-9]+)?"", self.git_remote.url) if m: if m.group(3): port = m.group(3) return int(port)","def __remote_port ( self ) : port = 22 if self . git_has_remote : m = re . match ( r""^(.*?)?@([^/:]*):?([0-9]+)?"" , self . git_remote . url ) if m . group ( 3 ) : port = m . group ( 3 ) return int ( port )",if m :,94 11472,"def startEntryElement(self, name, qname, attrs): """"""Set new entry with id and the optional entry source (PRIVATE)."""""" if name != (None, ""entry""): raise ValueError(""Expected to find the start of an entry element"") if qname is not None: raise RuntimeError(""Unexpected qname for entry element"") record = SeqRecord("""", id=None) if self.speciesName is not None: record.annotations[""organism""] = self.speciesName if self.ncbiTaxID is not None: record.annotations[""ncbi_taxid""] = self.ncbiTaxID record.annotations[""source""] = self.source for key, value in attrs.items(): namespace, localname = key if namespace is None: if localname == ""id"": record.id = value elif localname == ""source"": record.annotations[""source""] = value else: raise ValueError(""Unexpected attribute %s in entry element"" % localname) else: raise ValueError( ""Unexpected namespace '%s' for entry attribute"" % namespace ) if record.id is None: raise ValueError(""Failed to find entry ID"") self.records.append(record) self.startElementNS = self.startEntryFieldElement self.endElementNS = self.endEntryElement ","def startEntryElement ( self , name , qname , attrs ) : """"""Set new entry with id and the optional entry source (PRIVATE)."""""" if name != ( None , ""entry"" ) : raise ValueError ( ""Expected to find the start of an entry element"" ) if qname is not None : raise RuntimeError ( ""Unexpected qname for entry element"" ) record = SeqRecord ( """" , id = None ) if self . speciesName is not None : record . annotations [ ""organism"" ] = self . speciesName if self . ncbiTaxID is not None : record . annotations [ ""ncbi_taxid"" ] = self . ncbiTaxID record . annotations [ ""source"" ] = self . source for key , value in attrs . items ( ) : namespace , localname = key if namespace is None : record . id = value elif localname == ""source"" : record . annotations [ ""source"" ] = value else : raise ValueError ( ""Unexpected attribute %s in entry element"" % localname ) else : raise ValueError ( ""Unexpected namespace '%s' for entry attribute"" % namespace ) if record . id is None : raise ValueError ( ""Failed to find entry ID"" ) self . records . append ( record ) self . startElementNS = self . startEntryFieldElement self . endElementNS = self . endEntryElement","if localname == ""id"" :",343 3723,"def process_error(self, data): if data.get(""error""): if ""denied"" in data[""error""] or ""cancelled"" in data[""error""]: raise AuthCanceled(self, data.get(""error_description"", """")) raise AuthFailed(self, data.get(""error_description"") or data[""error""]) elif ""denied"" in data: raise AuthCanceled(self, data[""denied""]) ","def process_error ( self , data ) : if data . get ( ""error"" ) : raise AuthCanceled ( self , data . get ( ""error_description"" , """" ) ) raise AuthFailed ( self , data . get ( ""error_description"" ) or data [ ""error"" ] ) elif ""denied"" in data : raise AuthCanceled ( self , data [ ""denied"" ] )","if ""denied"" in data [ ""error"" ] or ""cancelled"" in data [ ""error"" ] :",103 21875,"def __init__(self, endog, exog=None, rho=1, missing=""none"", hasconst=None, **kwargs): # this looks strange, interpreting rho as order if it is int if isinstance(rho, (int, np.integer)): self.order = int(rho) self.rho = np.zeros(self.order, np.float64) else: self.rho = np.squeeze(np.asarray(rho)) if len(self.rho.shape) not in [0, 1]: raise ValueError(""AR parameters must be a scalar or a vector"") if self.rho.shape == (): self.rho.shape = (1,) self.order = self.rho.shape[0] if exog is None: # JP this looks wrong, should be a regression on constant # results for rho estimate now identical to yule-walker on y # super(AR, self).__init__(endog, add_constant(endog)) super(GLSAR, self).__init__( endog, np.ones((endog.shape[0], 1)), missing=missing, hasconst=None, **kwargs ) else: super(GLSAR, self).__init__(endog, exog, missing=missing, **kwargs)","def __init__ ( self , endog , exog = None , rho = 1 , missing = ""none"" , hasconst = None , ** kwargs ) : if isinstance ( rho , ( int , np . integer ) ) : self . order = int ( rho ) self . rho = np . zeros ( self . order , np . float64 ) else : self . rho = np . squeeze ( np . asarray ( rho ) ) raise ValueError ( ""AR parameters must be a scalar or a vector"" ) if self . rho . shape == ( ) : self . rho . shape = ( 1 , ) self . order = self . rho . shape [ 0 ] if exog is None : super ( GLSAR , self ) . __init__ ( endog , np . ones ( ( endog . shape [ 0 ] , 1 ) ) , missing = missing , hasconst = None , ** kwargs ) else : super ( GLSAR , self ) . __init__ ( endog , exog , missing = missing , ** kwargs )","if len ( self . rho . shape ) not in [ 0 , 1 ] :",335 3142,"def __exit__(self, type_=None, value=None, traceback=None): reset_Breakpoint() sys.settrace(None) not_empty = """" if self.tracer.set_list: not_empty += ""All paired tuples have not been processed, "" not_empty += ""the last one was number %d"" % self.tracer.expect_set_no # Make a BdbNotExpectedError a unittest failure. if type_ is not None and issubclass(BdbNotExpectedError, type_): if isinstance(value, BaseException) and value.args: err_msg = value.args[0] if not_empty: err_msg += ""\n"" + not_empty if self.dry_run: print(err_msg) return True else: self.test_case.fail(err_msg) else: assert False, ""BdbNotExpectedError with empty args"" if not_empty: if self.dry_run: print(not_empty) else: self.test_case.fail(not_empty)","def __exit__ ( self , type_ = None , value = None , traceback = None ) : reset_Breakpoint ( ) sys . settrace ( None ) not_empty = """" if self . tracer . set_list : not_empty += ""All paired tuples have not been processed, "" not_empty += ""the last one was number %d"" % self . tracer . expect_set_no if type_ is not None and issubclass ( BdbNotExpectedError , type_ ) : if isinstance ( value , BaseException ) and value . args : err_msg = value . args [ 0 ] if not_empty : err_msg += ""\n"" + not_empty print ( err_msg ) return True else : self . test_case . fail ( err_msg ) else : assert False , ""BdbNotExpectedError with empty args"" if not_empty : print ( not_empty ) else : self . test_case . fail ( not_empty )",if self . dry_run :,305 7969,"def __init__(self, addr, conf, log, fd=None): if fd is None: try: st = os.stat(addr) except OSError as e: if e.args[0] != errno.ENOENT: raise else: if stat.S_ISSOCK(st.st_mode): os.remove(addr) else: raise ValueError(""%r is not a socket"" % addr) super(UnixSocket, self).__init__(addr, conf, log, fd=fd)","def __init__ ( self , addr , conf , log , fd = None ) : if fd is None : try : st = os . stat ( addr ) except OSError as e : if e . args [ 0 ] != errno . ENOENT : raise else : os . remove ( addr ) else : raise ValueError ( ""%r is not a socket"" % addr ) super ( UnixSocket , self ) . __init__ ( addr , conf , log , fd = fd )",if stat . S_ISSOCK ( st . st_mode ) :,150 7998,"def iter_open_logger_fds(): seen = set() loggers = list(values(logging.Logger.manager.loggerDict)) + [ logging.getLogger(None) ] for l in loggers: try: for handler in l.handlers: try: if handler not in seen: # pragma: no cover yield handler.stream seen.add(handler) except AttributeError: pass except AttributeError: # PlaceHolder does not have handlers pass ",def iter_open_logger_fds ( ) : seen = set ( ) loggers = list ( values ( logging . Logger . manager . loggerDict ) ) + [ logging . getLogger ( None ) ] for l in loggers : try : for handler in l . handlers : try : yield handler . stream seen . add ( handler ) except AttributeError : pass except AttributeError : pass,if handler not in seen :,154 130,"def get_all_tracks(self): try: listing = self.__MTPDevice.get_tracklisting(callback=self.__callback) except Exception as exc: logger.error(""unable to get file listing %s (%s)"") tracks = [] for track in listing: title = track.title if not title or title == """": title = track.filename if len(title) > 50: title = title[0:49] + ""..."" artist = track.artist if artist and len(artist) > 50: artist = artist[0:49] + ""..."" length = track.filesize age_in_days = 0 date = self.__mtp_to_date(track.date) if not date: modified = track.date # not a valid mtp date. Display what mtp gave anyway modified_sort = -1 # no idea how to sort invalid date else: modified = util.format_date(date) modified_sort = date t = SyncTrack( title, length, modified, modified_sort=modified_sort, mtptrack=track, podcast=artist, ) tracks.append(t) return tracks","def get_all_tracks ( self ) : try : listing = self . __MTPDevice . get_tracklisting ( callback = self . __callback ) except Exception as exc : logger . error ( ""unable to get file listing %s (%s)"" ) tracks = [ ] for track in listing : title = track . title if not title or title == """" : title = track . filename if len ( title ) > 50 : title = title [ 0 : 49 ] + ""..."" artist = track . artist if artist and len ( artist ) > 50 : artist = artist [ 0 : 49 ] + ""..."" length = track . filesize age_in_days = 0 date = self . __mtp_to_date ( track . date ) modified = track . date modified_sort = - 1 else : modified = util . format_date ( date ) modified_sort = date t = SyncTrack ( title , length , modified , modified_sort = modified_sort , mtptrack = track , podcast = artist , ) tracks . append ( t ) return tracks",if not date :,346 1098,"def _dup_file_descriptor(self, source_fd, dest_fd, mode): source_fd = int(source_fd) if source_fd not in self._descriptors: raise RedirectionError('""%s"" is not a valid file descriptor' % str(source_fd)) source = self._descriptors[source_fd] if source.mode() != mode: raise RedirectionError( 'Descriptor %s cannot be duplicated in mode ""%s""' % (str(source), mode) ) if dest_fd == ""-"": # Close the source descriptor del self._descriptors[source_fd] source.close() else: dest_fd = int(dest_fd) if dest_fd not in self._descriptors: raise RedirectionError(""Cannot replace file descriptor %s"" % str(dest_fd)) dest = self._descriptors[dest_fd] if dest.mode() != mode: raise RedirectionError( 'Descriptor %s cannot be cannot be redirected in mode ""%s""' % (str(dest), mode) ) self._descriptors[dest_fd] = source.dup() dest.close() ","def _dup_file_descriptor ( self , source_fd , dest_fd , mode ) : source_fd = int ( source_fd ) if source_fd not in self . _descriptors : raise RedirectionError ( '""%s"" is not a valid file descriptor' % str ( source_fd ) ) source = self . _descriptors [ source_fd ] if source . mode ( ) != mode : raise RedirectionError ( 'Descriptor %s cannot be duplicated in mode ""%s""' % ( str ( source ) , mode ) ) if dest_fd == ""-"" : del self . _descriptors [ source_fd ] source . close ( ) else : dest_fd = int ( dest_fd ) raise RedirectionError ( ""Cannot replace file descriptor %s"" % str ( dest_fd ) ) dest = self . _descriptors [ dest_fd ] if dest . mode ( ) != mode : raise RedirectionError ( 'Descriptor %s cannot be cannot be redirected in mode ""%s""' % ( str ( dest ) , mode ) ) self . _descriptors [ dest_fd ] = source . dup ( ) dest . close ( )",if dest_fd not in self . _descriptors :,304 563,"def get_maxcov_downsample_cl(data, in_pipe=None): """"""Retrieve command line for max coverage downsampling, fitting into bamsormadup output."""""" max_cov = ( _get_maxcov_downsample(data) if dd.get_aligner(data) not in [""snap""] else None ) if max_cov: if in_pipe == ""bamsormadup"": prefix = ""level=0"" elif in_pipe == ""samtools"": prefix = ""-l 0"" else: prefix = """" # Swap over to multiple cores until after testing # core_arg = ""-t %s"" % dd.get_num_cores(data) core_arg = """" return ""%s | variant - -b %s --mark-as-qc-fail --max-coverage %s"" % ( prefix, core_arg, max_cov, ) else: if in_pipe == ""bamsormadup"": prefix = ""indexfilename={tx_out_file}.bai"" else: prefix = """" return prefix","def get_maxcov_downsample_cl ( data , in_pipe = None ) : """"""Retrieve command line for max coverage downsampling, fitting into bamsormadup output."""""" max_cov = ( _get_maxcov_downsample ( data ) if dd . get_aligner ( data ) not in [ ""snap"" ] else None ) if max_cov : if in_pipe == ""bamsormadup"" : prefix = ""level=0"" prefix = ""-l 0"" else : prefix = """" core_arg = """" return ""%s | variant - -b %s --mark-as-qc-fail --max-coverage %s"" % ( prefix , core_arg , max_cov , ) else : if in_pipe == ""bamsormadup"" : prefix = ""indexfilename={tx_out_file}.bai"" else : prefix = """" return prefix","elif in_pipe == ""samtools"" :",302 16923,"def __init__( self, fuzzer_name, job_types, stats_columns, group_by, date_start, date_end ): assert group_by self.fuzzer_name = fuzzer_name self.job_types = job_types self.group_by = group_by self.date_start = date_start self.date_end = date_end self.job_run_query = None self.testcase_run_query = None job_run_fields = [] testcase_run_fields = [] fields = parse_stats_column_fields(stats_columns) for field in fields: # Split up fields by table. if not isinstance(field, QueryField): continue if field.table_alias == JobQuery.ALIAS: job_run_fields.append(field) elif field.table_alias == TestcaseQuery.ALIAS: testcase_run_fields.append(field) # subqueries. # For query by time, we can't correlate the time of testcase run with a job # run since they are set at different times. So, use only the results from # testcase run and don't join them with job run, see build(). Also, the job # paramaters like: known crashes, new crashes are aggregate numbers from job # that are not applicable to show per testcase run (a point on graph). if job_run_fields and self.group_by != QueryGroupBy.GROUP_BY_TIME: self.job_run_query = JobQuery( fuzzer_name, job_types, job_run_fields, group_by, date_start, date_end ) if testcase_run_fields: self.testcase_run_query = TestcaseQuery( fuzzer_name, job_types, testcase_run_fields, group_by, date_start, date_end ) assert self.job_run_query or self.testcase_run_query, ""Unable to create query."" ","def __init__ ( self , fuzzer_name , job_types , stats_columns , group_by , date_start , date_end ) : assert group_by self . fuzzer_name = fuzzer_name self . job_types = job_types self . group_by = group_by self . date_start = date_start self . date_end = date_end self . job_run_query = None self . testcase_run_query = None job_run_fields = [ ] testcase_run_fields = [ ] fields = parse_stats_column_fields ( stats_columns ) for field in fields : if not isinstance ( field , QueryField ) : continue if field . table_alias == JobQuery . ALIAS : job_run_fields . append ( field ) testcase_run_fields . append ( field ) if job_run_fields and self . group_by != QueryGroupBy . GROUP_BY_TIME : self . job_run_query = JobQuery ( fuzzer_name , job_types , job_run_fields , group_by , date_start , date_end ) if testcase_run_fields : self . testcase_run_query = TestcaseQuery ( fuzzer_name , job_types , testcase_run_fields , group_by , date_start , date_end ) assert self . job_run_query or self . testcase_run_query , ""Unable to create query.""",elif field . table_alias == TestcaseQuery . ALIAS :,508 1033,"def create_initial(self): pkgs = dict() with open(self.initial_manifest, ""w+"") as manifest: manifest.write(self.initial_manifest_file_header) for var in self.var_maps[self.manifest_type]: if var in self.vars_to_split: split_pkgs = self._split_multilib(self.d.getVar(var)) if split_pkgs is not None: pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) else: pkg_list = self.d.getVar(var) if pkg_list is not None: pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) for pkg_type in pkgs: for pkg in pkgs[pkg_type].split(): manifest.write(""%s,%s\n"" % (pkg_type, pkg)) ","def create_initial ( self ) : pkgs = dict ( ) with open ( self . initial_manifest , ""w+"" ) as manifest : manifest . write ( self . initial_manifest_file_header ) for var in self . var_maps [ self . manifest_type ] : split_pkgs = self . _split_multilib ( self . d . getVar ( var ) ) if split_pkgs is not None : pkgs = dict ( list ( pkgs . items ( ) ) + list ( split_pkgs . items ( ) ) ) else : pkg_list = self . d . getVar ( var ) if pkg_list is not None : pkgs [ self . var_maps [ self . manifest_type ] [ var ] ] = self . d . getVar ( var ) for pkg_type in pkgs : for pkg in pkgs [ pkg_type ] . split ( ) : manifest . write ( ""%s,%s\n"" % ( pkg_type , pkg ) )",if var in self . vars_to_split :,261 18882,"def parse_object_id(_, values): if values: for key in values: if key.endswith(""_id""): val = values[key] if len(val) > 10: try: values[key] = utils.ObjectIdSilent(val) except: values[key] = None","def parse_object_id ( _ , values ) : if values : for key in values : if key . endswith ( ""_id"" ) : val = values [ key ] try : values [ key ] = utils . ObjectIdSilent ( val ) except : values [ key ] = None",if len ( val ) > 10 :,108 6655,"def net( self, input, output_stride=32, class_dim=1000, end_points=None, decode_points=None ): self.stride = 2 self.block_point = 0 self.output_stride = output_stride self.decode_points = decode_points self.short_cuts = dict() with scope(self.backbone): # Entry flow data = self.entry_flow(input) if check_points(self.block_point, end_points): return data, self.short_cuts # Middle flow data = self.middle_flow(data) if check_points(self.block_point, end_points): return data, self.short_cuts # Exit flow data = self.exit_flow(data) if check_points(self.block_point, end_points): return data, self.short_cuts data = fluid.layers.reduce_mean(data, [2, 3], keep_dim=True) data = fluid.layers.dropout(data, 0.5) stdv = 1.0 / math.sqrt(data.shape[1] * 1.0) with scope(""logit""): out = fluid.layers.fc( input=data, size=class_dim, param_attr=fluid.param_attr.ParamAttr( name=""fc_weights"", initializer=fluid.initializer.Uniform(-stdv, stdv), ), bias_attr=fluid.param_attr.ParamAttr(name=""fc_bias""), ) return out","def net ( self , input , output_stride = 32 , class_dim = 1000 , end_points = None , decode_points = None ) : self . stride = 2 self . block_point = 0 self . output_stride = output_stride self . decode_points = decode_points self . short_cuts = dict ( ) with scope ( self . backbone ) : data = self . entry_flow ( input ) return data , self . short_cuts data = self . middle_flow ( data ) return data , self . short_cuts data = self . exit_flow ( data ) return data , self . short_cuts data = fluid . layers . reduce_mean ( data , [ 2 , 3 ] , keep_dim = True ) data = fluid . layers . dropout ( data , 0.5 ) stdv = 1.0 / math . sqrt ( data . shape [ 1 ] * 1.0 ) with scope ( ""logit"" ) : out = fluid . layers . fc ( input = data , size = class_dim , param_attr = fluid . param_attr . ParamAttr ( name = ""fc_weights"" , initializer = fluid . initializer . Uniform ( - stdv , stdv ) , ) , bias_attr = fluid . param_attr . ParamAttr ( name = ""fc_bias"" ) , ) return out","if check_points ( self . block_point , end_points ) :",444 19299,"def deploy_component(self, components=None): if self._train_dsl is None: raise ValueError(""Before deploy model, training should be finish!!!"") if components is None: components = self._components deploy_cpns = [] for cpn in components: if isinstance(cpn, str): deploy_cpns.append(cpn) elif isinstance(cpn, Component): deploy_cpns.append(cpn.name) else: raise ValueError( ""deploy component parameters is wrong, expect str or Component object, but {} find"".format( type(cpn) ) ) if deploy_cpns[-1] not in self._components: raise ValueError( ""Component {} does not exist in pipeline"".format(deploy_cpns[-1]) ) if isinstance(self._components.get(deploy_cpns[-1]), Reader): raise ValueError(""Reader should not be include in predict pipeline"") res_dict = self._job_invoker.model_deploy( model_id=self._model_info.model_id, model_version=self._model_info.model_version, cpn_list=deploy_cpns, ) self._predict_dsl = self._job_invoker.get_predict_dsl( model_id=res_dict[""model_id""], model_version=res_dict[""model_version""] ) if self._predict_dsl: self._deploy = True return self","def deploy_component ( self , components = None ) : if self . _train_dsl is None : raise ValueError ( ""Before deploy model, training should be finish!!!"" ) if components is None : components = self . _components deploy_cpns = [ ] for cpn in components : if isinstance ( cpn , str ) : deploy_cpns . append ( cpn ) elif isinstance ( cpn , Component ) : deploy_cpns . append ( cpn . name ) else : raise ValueError ( ""deploy component parameters is wrong, expect str or Component object, but {} find"" . format ( type ( cpn ) ) ) raise ValueError ( ""Component {} does not exist in pipeline"" . format ( deploy_cpns [ - 1 ] ) ) if isinstance ( self . _components . get ( deploy_cpns [ - 1 ] ) , Reader ) : raise ValueError ( ""Reader should not be include in predict pipeline"" ) res_dict = self . _job_invoker . model_deploy ( model_id = self . _model_info . model_id , model_version = self . _model_info . model_version , cpn_list = deploy_cpns , ) self . _predict_dsl = self . _job_invoker . get_predict_dsl ( model_id = res_dict [ ""model_id"" ] , model_version = res_dict [ ""model_version"" ] ) if self . _predict_dsl : self . _deploy = True return self",if deploy_cpns [ - 1 ] not in self . _components :,415 18067,"def AutoTest(): with open(sys.argv[1], ""rb"") as f: for line in f.read().split(b""\n""): line = BYTES2SYSTEMSTR(line.strip()) if not line: continue elif line.startswith(""#""): print(line) else: print("">>> "" + line) os.system(line) sys.stdout.write(""\npress enter to continue..."") if PY3: input() else: raw_input() sys.stdout.write(""\n"")","def AutoTest ( ) : with open ( sys . argv [ 1 ] , ""rb"" ) as f : for line in f . read ( ) . split ( b""\n"" ) : line = BYTES2SYSTEMSTR ( line . strip ( ) ) continue elif line . startswith ( ""#"" ) : print ( line ) else : print ( "">>> "" + line ) os . system ( line ) sys . stdout . write ( ""\npress enter to continue..."" ) if PY3 : input ( ) else : raw_input ( ) sys . stdout . write ( ""\n"" )",if not line :,179 24151,"def reposition_division(f1): lines = f1.splitlines() if lines[2] == division: lines.pop(2) found = 0 for i, line in enumerate(lines): if line.startswith('""""""'): found += 1 if found == 2: if division in ""\n"".join(lines): break # already in the right place lines.insert(i + 1, """") lines.insert(i + 2, division) break return ""\n"".join(lines)","def reposition_division ( f1 ) : lines = f1 . splitlines ( ) if lines [ 2 ] == division : lines . pop ( 2 ) found = 0 for i , line in enumerate ( lines ) : found += 1 if found == 2 : if division in ""\n"" . join ( lines ) : break lines . insert ( i + 1 , """" ) lines . insert ( i + 2 , division ) break return ""\n"" . join ( lines )","if line . startswith ( '""""""' ) :",153 14912,"def _WriteActionParams(f, actions, counter): param_names = [] for key in sorted(actions): action = actions[key] to_write = None if isinstance(action, args.SetToString): if action.valid: to_write = action.valid elif isinstance(action, args.SetNamedOption): if action.names: to_write = action.names elif isinstance(action, args.SetNamedAction): if action.names: to_write = action.names if to_write: uniq = counter.next() var_name = ""params_%d"" % uniq _WriteStrArray(f, var_name, to_write) else: var_name = None param_names.append(var_name) return param_names ","def _WriteActionParams ( f , actions , counter ) : param_names = [ ] for key in sorted ( actions ) : action = actions [ key ] to_write = None if isinstance ( action , args . SetToString ) : if action . valid : to_write = action . valid elif isinstance ( action , args . SetNamedOption ) : if action . names : to_write = action . names if action . names : to_write = action . names if to_write : uniq = counter . next ( ) var_name = ""params_%d"" % uniq _WriteStrArray ( f , var_name , to_write ) else : var_name = None param_names . append ( var_name ) return param_names","elif isinstance ( action , args . SetNamedAction ) :",236 7947,"def __lt__(self, other): # 0: clock 1: timestamp 3: process id try: A, B = self[0], other[0] # uses logical clock value first if A and B: # use logical clock if available if A == B: # equal clocks use lower process id return self[2] < other[2] return A < B return self[1] < other[1] # ... or use timestamp except IndexError: return NotImplemented","def __lt__ ( self , other ) : try : A , B = self [ 0 ] , other [ 0 ] if A == B : return self [ 2 ] < other [ 2 ] return A < B return self [ 1 ] < other [ 1 ] except IndexError : return NotImplemented",if A and B :,135 10031,"def reset(self): self.tree.reset() self.firstStartTag = False self.errors = [] self.log = [] # only used with debug mode # ""quirks"" / ""limited quirks"" / ""no quirks"" self.compatMode = ""no quirks"" if self.innerHTMLMode: self.innerHTML = self.container.lower() if self.innerHTML in cdataElements: self.tokenizer.state = self.tokenizer.rcdataState elif self.innerHTML in rcdataElements: self.tokenizer.state = self.tokenizer.rawtextState elif self.innerHTML == ""plaintext"": self.tokenizer.state = self.tokenizer.plaintextState else: # state already is data state # self.tokenizer.state = self.tokenizer.dataState pass self.phase = self.phases[""beforeHtml""] self.phase.insertHtmlElement() self.resetInsertionMode() else: self.innerHTML = False self.phase = self.phases[""initial""] self.lastPhase = None self.beforeRCDataPhase = None self.framesetOK = True ","def reset ( self ) : self . tree . reset ( ) self . firstStartTag = False self . errors = [ ] self . log = [ ] self . compatMode = ""no quirks"" if self . innerHTMLMode : self . innerHTML = self . container . lower ( ) if self . innerHTML in cdataElements : self . tokenizer . state = self . tokenizer . rcdataState elif self . innerHTML in rcdataElements : self . tokenizer . state = self . tokenizer . rawtextState self . tokenizer . state = self . tokenizer . plaintextState else : pass self . phase = self . phases [ ""beforeHtml"" ] self . phase . insertHtmlElement ( ) self . resetInsertionMode ( ) else : self . innerHTML = False self . phase = self . phases [ ""initial"" ] self . lastPhase = None self . beforeRCDataPhase = None self . framesetOK = True","elif self . innerHTML == ""plaintext"" :",320 9622,"def get_host_metadata(self): meta = {} if self.agent_url: try: resp = requests.get( self.agent_url + ECS_AGENT_METADATA_PATH, timeout=1 ).json() if ""Version"" in resp: match = AGENT_VERSION_EXP.search(resp.get(""Version"")) if match is not None and len(match.groups()) == 1: meta[""ecs_version""] = match.group(1) except Exception as e: self.log.debug(""Error getting ECS version: %s"" % str(e)) return meta","def get_host_metadata ( self ) : meta = { } if self . agent_url : try : resp = requests . get ( self . agent_url + ECS_AGENT_METADATA_PATH , timeout = 1 ) . json ( ) if ""Version"" in resp : match = AGENT_VERSION_EXP . search ( resp . get ( ""Version"" ) ) meta [ ""ecs_version"" ] = match . group ( 1 ) except Exception as e : self . log . debug ( ""Error getting ECS version: %s"" % str ( e ) ) return meta",if match is not None and len ( match . groups ( ) ) == 1 :,176 5280,"def _build_request(url: str) -> HTTPResponse: # Detect basic auth # Adapted from python-feedparser user_passwd = None u = urllib.parse.urlparse(url) if u.username is not None or u.password is not None: if u.username and u.password: user_passwd = ""%s:%s"" % (u.username, u.password) host_port = u.hostname if u.port: host_port += "":"" + str(u.port) url = urllib.parse.urlunparse(u._replace(netloc=host_port)) # Start request req = urllib.request.Request(url) # Add headers req.add_header(""User-Agent"", ""SABnzbd/%s"" % sabnzbd.__version__) req.add_header(""Accept-encoding"", ""gzip"") if user_passwd: req.add_header( ""Authorization"", ""Basic "" + ubtou(base64.b64encode(utob(user_passwd))).strip(), ) return urllib.request.urlopen(req)","def _build_request ( url : str ) -> HTTPResponse : user_passwd = None u = urllib . parse . urlparse ( url ) if u . username is not None or u . password is not None : if u . username and u . password : user_passwd = ""%s:%s"" % ( u . username , u . password ) host_port = u . hostname host_port += "":"" + str ( u . port ) url = urllib . parse . urlunparse ( u . _replace ( netloc = host_port ) ) req = urllib . request . Request ( url ) req . add_header ( ""User-Agent"" , ""SABnzbd/%s"" % sabnzbd . __version__ ) req . add_header ( ""Accept-encoding"" , ""gzip"" ) if user_passwd : req . add_header ( ""Authorization"" , ""Basic "" + ubtou ( base64 . b64encode ( utob ( user_passwd ) ) ) . strip ( ) , ) return urllib . request . urlopen ( req )",if u . port :,289 3561,"def read(self, iprot): if ( iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None ): fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.I64: self.maxColLen = iprot.readI64() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.DOUBLE: self.avgColLen = iprot.readDouble() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I64: self.numNulls = iprot.readI64() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd()","def read ( self , iprot ) : if ( iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None ) : fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) return iprot . readStructBegin ( ) while True : ( fname , ftype , fid ) = iprot . readFieldBegin ( ) if ftype == TType . STOP : break if fid == 1 : if ftype == TType . I64 : self . maxColLen = iprot . readI64 ( ) else : iprot . skip ( ftype ) elif fid == 2 : if ftype == TType . DOUBLE : self . avgColLen = iprot . readDouble ( ) else : iprot . skip ( ftype ) if ftype == TType . I64 : self . numNulls = iprot . readI64 ( ) else : iprot . skip ( ftype ) else : iprot . skip ( ftype ) iprot . readFieldEnd ( ) iprot . readStructEnd ( )",elif fid == 3 :,384 21131,"def writeback(self, doc_type, body, rule=None, match_body=None): # ES 2.0 - 2.3 does not support dots in field names. if self.replace_dots_in_field_names: writeback_body = replace_dots_in_field_names(body) else: writeback_body = body for key in list(writeback_body.keys()): # Convert any datetime objects to timestamps if isinstance(writeback_body[key], datetime.datetime): writeback_body[key] = dt_to_ts(writeback_body[key]) if self.debug: elastalert_logger.info(""Skipping writing to ES: %s"" % (writeback_body)) return None if ""@timestamp"" not in writeback_body: writeback_body[""@timestamp""] = dt_to_ts(ts_now()) try: index = self.writeback_es.resolve_writeback_index( self.writeback_index, doc_type ) if self.writeback_es.is_atleastsixtwo(): res = self.writeback_es.index(index=index, body=body) else: res = self.writeback_es.index(index=index, doc_type=doc_type, body=body) return res except ElasticsearchException as e: logging.exception(""Error writing alert info to Elasticsearch: %s"" % (e))","def writeback ( self , doc_type , body , rule = None , match_body = None ) : if self . replace_dots_in_field_names : writeback_body = replace_dots_in_field_names ( body ) else : writeback_body = body for key in list ( writeback_body . keys ( ) ) : writeback_body [ key ] = dt_to_ts ( writeback_body [ key ] ) if self . debug : elastalert_logger . info ( ""Skipping writing to ES: %s"" % ( writeback_body ) ) return None if ""@timestamp"" not in writeback_body : writeback_body [ ""@timestamp"" ] = dt_to_ts ( ts_now ( ) ) try : index = self . writeback_es . resolve_writeback_index ( self . writeback_index , doc_type ) if self . writeback_es . is_atleastsixtwo ( ) : res = self . writeback_es . index ( index = index , body = body ) else : res = self . writeback_es . index ( index = index , doc_type = doc_type , body = body ) return res except ElasticsearchException as e : logging . exception ( ""Error writing alert info to Elasticsearch: %s"" % ( e ) )","if isinstance ( writeback_body [ key ] , datetime . datetime ) :",380 8775,"def update_sockets(self): """"""Upate sockets based on selected operation"""""" inputs = self.inputs if self.operation in Q_operations: # Q or Q+S operations for a in ABC[1:]: # remove all B-Z inputs (keep A) if a in inputs: inputs.remove(inputs[a]) elif self.operation in QQ_operations: # Q + Q operations for a in ABC[2:]: # remove all C-Z inputs (keep A & B) if a in inputs: inputs.remove(inputs[a]) if not ""B"" in inputs: inputs.new(""SvQuaternionSocket"", ""B"") else: # multiple Q operations if not ""B"" in inputs: inputs.new(""SvQuaternionSocket"", ""B"") inputs[""Scale""].hide_safe = self.operation != ""SCALE"" outputs = self.outputs if self.operation in output_S_operations: outputs[""Quaternion""].hide_safe = True if outputs[""Value""].hide: outputs[""Value""].hide_safe = False else: if outputs[""Quaternion""].hide: outputs[""Quaternion""].hide_safe = False outputs[""Value""].hide_safe = True self.update()","def update_sockets ( self ) : """"""Upate sockets based on selected operation"""""" inputs = self . inputs if self . operation in Q_operations : for a in ABC [ 1 : ] : if a in inputs : inputs . remove ( inputs [ a ] ) elif self . operation in QQ_operations : for a in ABC [ 2 : ] : if a in inputs : inputs . remove ( inputs [ a ] ) if not ""B"" in inputs : inputs . new ( ""SvQuaternionSocket"" , ""B"" ) else : if not ""B"" in inputs : inputs . new ( ""SvQuaternionSocket"" , ""B"" ) inputs [ ""Scale"" ] . hide_safe = self . operation != ""SCALE"" outputs = self . outputs if self . operation in output_S_operations : outputs [ ""Quaternion"" ] . hide_safe = True if outputs [ ""Value"" ] . hide : outputs [ ""Value"" ] . hide_safe = False else : outputs [ ""Quaternion"" ] . hide_safe = False outputs [ ""Value"" ] . hide_safe = True self . update ( )","if outputs [ ""Quaternion"" ] . hide :",343 10307,"def _return_poolnumber(self, nominated_pools): """"""Select pool form nominated pools."""""" selected_pool = -1 min_ldn = 0 for pool in nominated_pools: nld = len(pool[""ld_list""]) if selected_pool == -1 or min_ldn > nld: selected_pool = pool[""pool_num""] min_ldn = nld if selected_pool < 0: msg = _(""No available pools found."") raise exception.VolumeBackendAPIException(data=msg) return selected_pool","def _return_poolnumber ( self , nominated_pools ) : """"""Select pool form nominated pools."""""" selected_pool = - 1 min_ldn = 0 for pool in nominated_pools : nld = len ( pool [ ""ld_list"" ] ) selected_pool = pool [ ""pool_num"" ] min_ldn = nld if selected_pool < 0 : msg = _ ( ""No available pools found."" ) raise exception . VolumeBackendAPIException ( data = msg ) return selected_pool",if selected_pool == - 1 or min_ldn > nld :,150 10240,"def __call__(self, x: JaxArray, training: bool) -> JaxArray: if self.stride > 1: shortcut = objax.functional.max_pool_2d(x, size=1, strides=self.stride) else: shortcut = x for i, (bn_i, conv_i) in enumerate(self.layers): x = bn_i(x, training) x = self.activation_fn(x) if i == 0 and self.use_projection: shortcut = self.proj_conv(x) x = conv_i(x) return x + shortcut","def __call__ ( self , x : JaxArray , training : bool ) -> JaxArray : if self . stride > 1 : shortcut = objax . functional . max_pool_2d ( x , size = 1 , strides = self . stride ) else : shortcut = x for i , ( bn_i , conv_i ) in enumerate ( self . layers ) : x = bn_i ( x , training ) x = self . activation_fn ( x ) shortcut = self . proj_conv ( x ) x = conv_i ( x ) return x + shortcut",if i == 0 and self . use_projection :,162 9018,"def tokens(self, event, next): kind, data, _ = event if kind == START: tag, attribs = data name = tag.localname namespace = tag.namespace converted_attribs = {} for k, v in attribs: if isinstance(k, QName): converted_attribs[(k.namespace, k.localname)] = v else: converted_attribs[(None, k)] = v if namespace == namespaces[""html""] and name in voidElements: for token in self.emptyTag( namespace, name, converted_attribs, not next or next[0] != END or next[1] != tag, ): yield token else: yield self.startTag(namespace, name, converted_attribs) elif kind == END: name = data.localname namespace = data.namespace if namespace != namespaces[""html""] or name not in voidElements: yield self.endTag(namespace, name) elif kind == COMMENT: yield self.comment(data) elif kind == TEXT: for token in self.text(data): yield token elif kind == DOCTYPE: yield self.doctype(*data) elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS, START_CDATA, END_CDATA, PI): pass else: yield self.unknown(kind)","def tokens ( self , event , next ) : kind , data , _ = event if kind == START : tag , attribs = data name = tag . localname namespace = tag . namespace converted_attribs = { } for k , v in attribs : if isinstance ( k , QName ) : converted_attribs [ ( k . namespace , k . localname ) ] = v else : converted_attribs [ ( None , k ) ] = v for token in self . emptyTag ( namespace , name , converted_attribs , not next or next [ 0 ] != END or next [ 1 ] != tag , ) : yield token else : yield self . startTag ( namespace , name , converted_attribs ) elif kind == END : name = data . localname namespace = data . namespace if namespace != namespaces [ ""html"" ] or name not in voidElements : yield self . endTag ( namespace , name ) elif kind == COMMENT : yield self . comment ( data ) elif kind == TEXT : for token in self . text ( data ) : yield token elif kind == DOCTYPE : yield self . doctype ( * data ) elif kind in ( XML_NAMESPACE , DOCTYPE , START_NS , END_NS , START_CDATA , END_CDATA , PI ) : pass else : yield self . unknown ( kind )","if namespace == namespaces [ ""html"" ] and name in voidElements :",395 11368,"def slowSorted(qq): ""Reference sort peformed by insertion using only <"" rr = list() for q in qq: i = 0 for i in range(len(rr)): if q < rr[i]: rr.insert(i, q) break else: rr.append(q) return rr","def slowSorted ( qq ) : ""Reference sort peformed by insertion using only <"" rr = list ( ) for q in qq : i = 0 for i in range ( len ( rr ) ) : rr . insert ( i , q ) break else : rr . append ( q ) return rr",if q < rr [ i ] :,101 7439,"def read(self, iprot): if ( iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None ): fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.db_name = iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.tbl_name = iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.LIST: self.part_vals = [] (_etype725, _size722) = iprot.readListBegin() for _i726 in xrange(_size722): _elem727 = iprot.readString() self.part_vals.append(_elem727) iprot.readListEnd() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd()","def read ( self , iprot ) : if ( iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None ) : fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) return iprot . readStructBegin ( ) while True : ( fname , ftype , fid ) = iprot . readFieldBegin ( ) if ftype == TType . STOP : break if fid == 1 : self . db_name = iprot . readString ( ) else : iprot . skip ( ftype ) elif fid == 2 : self . tbl_name = iprot . readString ( ) else : iprot . skip ( ftype ) elif fid == 3 : if ftype == TType . LIST : self . part_vals = [ ] ( _etype725 , _size722 ) = iprot . readListBegin ( ) for _i726 in xrange ( _size722 ) : _elem727 = iprot . readString ( ) self . part_vals . append ( _elem727 ) iprot . readListEnd ( ) else : iprot . skip ( ftype ) else : iprot . skip ( ftype ) iprot . readFieldEnd ( ) iprot . readStructEnd ( )",if ftype == TType . STRING :,460 13499,"def set_random_avatar(user): galleries = get_available_galleries(include_default=True) if not galleries: raise RuntimeError(""no avatar galleries are set"") avatars_list = [] for gallery in galleries: if gallery[""name""] == DEFAULT_GALLERY: avatars_list = gallery[""images""] break else: avatars_list += gallery[""images""] random_avatar = random.choice(avatars_list) store.store_new_avatar(user, Image.open(random_avatar.image)) ","def set_random_avatar ( user ) : galleries = get_available_galleries ( include_default = True ) if not galleries : raise RuntimeError ( ""no avatar galleries are set"" ) avatars_list = [ ] for gallery in galleries : avatars_list = gallery [ ""images"" ] break else : avatars_list += gallery [ ""images"" ] random_avatar = random . choice ( avatars_list ) store . store_new_avatar ( user , Image . open ( random_avatar . image ) )","if gallery [ ""name"" ] == DEFAULT_GALLERY :",169 17633,"def fetch_rvalue(self) -> List[Token]: """"""Fetch right-hand value of assignment."""""" tokens = [] while self.fetch_token(): tokens.append(self.current) if self.current == [OP, ""(""]: tokens += self.fetch_until([OP, "")""]) elif self.current == [OP, ""{""]: tokens += self.fetch_until([OP, ""}""]) elif self.current == [OP, ""[""]: tokens += self.fetch_until([OP, ""]""]) elif self.current == INDENT: tokens += self.fetch_until(DEDENT) elif self.current == [OP, "";""]: break elif self.current.kind not in (OP, NAME, NUMBER, STRING): break return tokens ","def fetch_rvalue ( self ) -> List [ Token ] : """"""Fetch right-hand value of assignment."""""" tokens = [ ] while self . fetch_token ( ) : tokens . append ( self . current ) if self . current == [ OP , ""("" ] : tokens += self . fetch_until ( [ OP , "")"" ] ) elif self . current == [ OP , ""{"" ] : tokens += self . fetch_until ( [ OP , ""}"" ] ) elif self . current == [ OP , ""["" ] : tokens += self . fetch_until ( [ OP , ""]"" ] ) tokens += self . fetch_until ( DEDENT ) elif self . current == [ OP , "";"" ] : break elif self . current . kind not in ( OP , NAME , NUMBER , STRING ) : break return tokens",elif self . current == INDENT :,208 16732,"def describe(self, done=False): description = ShellCommand.describe(self, done) if done: if not description: description = [""compile""] description.append(""%d projects"" % self.getStatistic(""projects"", 0)) description.append(""%d files"" % self.getStatistic(""files"", 0)) warnings = self.getStatistic(""warnings"", 0) if warnings > 0: description.append(""%d warnings"" % warnings) errors = self.getStatistic(""errors"", 0) if errors > 0: description.append(""%d errors"" % errors) return description","def describe ( self , done = False ) : description = ShellCommand . describe ( self , done ) if done : if not description : description = [ ""compile"" ] description . append ( ""%d projects"" % self . getStatistic ( ""projects"" , 0 ) ) description . append ( ""%d files"" % self . getStatistic ( ""files"" , 0 ) ) warnings = self . getStatistic ( ""warnings"" , 0 ) if warnings > 0 : description . append ( ""%d warnings"" % warnings ) errors = self . getStatistic ( ""errors"" , 0 ) description . append ( ""%d errors"" % errors ) return description",if errors > 0 :,164 17800,"def notify(self, message="""", data=None, listener=None): if not data: data = {} nma = pynma.PyNMA() keys = splitString(self.conf(""api_key"")) nma.addkey(keys) nma.developerkey(self.conf(""dev_key"")) response = nma.push( application=self.default_title, event=message.split("" "")[0], description=message, priority=self.conf(""priority""), batch_mode=len(keys) > 1, ) successful = 0 for key in keys: if not response[str(key)][""code""] == six.u(""200""): log.error( ""Could not send notification to NotifyMyAndroid (%s). %s"", (key, response[key][""message""]), ) else: successful += 1 return successful == len(keys)","def notify ( self , message = """" , data = None , listener = None ) : if not data : data = { } nma = pynma . PyNMA ( ) keys = splitString ( self . conf ( ""api_key"" ) ) nma . addkey ( keys ) nma . developerkey ( self . conf ( ""dev_key"" ) ) response = nma . push ( application = self . default_title , event = message . split ( "" "" ) [ 0 ] , description = message , priority = self . conf ( ""priority"" ) , batch_mode = len ( keys ) > 1 , ) successful = 0 for key in keys : log . error ( ""Could not send notification to NotifyMyAndroid (%s). %s"" , ( key , response [ key ] [ ""message"" ] ) , ) else : successful += 1 return successful == len ( keys )","if not response [ str ( key ) ] [ ""code"" ] == six . u ( ""200"" ) :",251 3042,"def _run_and_test(self, hparams, test_transform=False): # Construct database scalar_data = ScalarData(hparams) self.assertEqual(scalar_data.list_items()[0], hparams[""dataset""][""data_name""]) iterator = DataIterator(scalar_data) i = 0 for batch in iterator: self.assertEqual(set(batch.keys()), set(scalar_data.list_items())) value = batch[scalar_data.data_name][0] if test_transform: self.assertEqual(2 * i, value) else: self.assertEqual(i, value) i += 1 data_type = hparams[""dataset""][""data_type""] if data_type == ""int"": self.assertEqual(value.dtype, torch.int32) elif data_type == ""float"": self.assertEqual(value.dtype, torch.float32) elif data_type == ""bool"": self.assertTrue(value.dtype, torch_bool) self.assertIsInstance(value, torch.Tensor)","def _run_and_test ( self , hparams , test_transform = False ) : scalar_data = ScalarData ( hparams ) self . assertEqual ( scalar_data . list_items ( ) [ 0 ] , hparams [ ""dataset"" ] [ ""data_name"" ] ) iterator = DataIterator ( scalar_data ) i = 0 for batch in iterator : self . assertEqual ( set ( batch . keys ( ) ) , set ( scalar_data . list_items ( ) ) ) value = batch [ scalar_data . data_name ] [ 0 ] self . assertEqual ( 2 * i , value ) else : self . assertEqual ( i , value ) i += 1 data_type = hparams [ ""dataset"" ] [ ""data_type"" ] if data_type == ""int"" : self . assertEqual ( value . dtype , torch . int32 ) elif data_type == ""float"" : self . assertEqual ( value . dtype , torch . float32 ) elif data_type == ""bool"" : self . assertTrue ( value . dtype , torch_bool ) self . assertIsInstance ( value , torch . Tensor )",if test_transform :,278 175,"def check_WinExec(self, emu): profile = emu.emu_profile_output.decode() while True: offset = profile.find(""WinExec"") if offset < 0: break profile = profile[offset:] p = profile.split("";"") if not p: # pragma: no cover profile = profile[1:] continue s = p[0].split('""') if len(s) < 2: # pragma: no cover profile = profile[1:] continue url = s[1] if not url.startswith(""\\\\""): profile = profile[1:] continue self.retrieve_WinExec(url) profile = profile[1:]","def check_WinExec ( self , emu ) : profile = emu . emu_profile_output . decode ( ) while True : offset = profile . find ( ""WinExec"" ) if offset < 0 : break profile = profile [ offset : ] p = profile . split ( "";"" ) if not p : profile = profile [ 1 : ] continue s = p [ 0 ] . split ( '""' ) if len ( s ) < 2 : profile = profile [ 1 : ] continue url = s [ 1 ] profile = profile [ 1 : ] continue self . retrieve_WinExec ( url ) profile = profile [ 1 : ]","if not url . startswith ( ""\\\\"" ) :",202 21626,"def convert_path(ctx, tpath): for points, code in tpath.iter_segments(): if code == Path.MOVETO: ctx.move_to(*points) elif code == Path.LINETO: ctx.line_to(*points) elif code == Path.CURVE3: ctx.curve_to( points[0], points[1], points[0], points[1], points[2], points[3] ) elif code == Path.CURVE4: ctx.curve_to(*points) elif code == Path.CLOSEPOLY: ctx.close_path()","def convert_path ( ctx , tpath ) : for points , code in tpath . iter_segments ( ) : if code == Path . MOVETO : ctx . move_to ( * points ) elif code == Path . LINETO : ctx . line_to ( * points ) elif code == Path . CURVE3 : ctx . curve_to ( points [ 0 ] , points [ 1 ] , points [ 0 ] , points [ 1 ] , points [ 2 ] , points [ 3 ] ) ctx . curve_to ( * points ) elif code == Path . CLOSEPOLY : ctx . close_path ( )",elif code == Path . CURVE4 :,172 11235,"def msg_ser(inst, sformat, lev=0): if sformat in [""urlencoded"", ""json""]: if isinstance(inst, Message): res = inst.serialize(sformat, lev) else: res = inst elif sformat == ""dict"": if isinstance(inst, Message): res = inst.serialize(sformat, lev) elif isinstance(inst, dict): res = inst elif isinstance(inst, str): # Iff ID Token res = inst else: raise MessageException(""Wrong type: %s"" % type(inst)) else: raise PyoidcError(""Unknown sformat"", inst) return res","def msg_ser ( inst , sformat , lev = 0 ) : if sformat in [ ""urlencoded"" , ""json"" ] : res = inst . serialize ( sformat , lev ) else : res = inst elif sformat == ""dict"" : res = inst . serialize ( sformat , lev ) elif isinstance ( inst , dict ) : res = inst elif isinstance ( inst , str ) : res = inst else : raise MessageException ( ""Wrong type: %s"" % type ( inst ) ) else : raise PyoidcError ( ""Unknown sformat"" , inst ) return res","if isinstance ( inst , Message ) :",182 3291,"def tokeneater(self, type, token, srow_scol, erow_ecol, line): srow, scol = srow_scol erow, ecol = erow_ecol if not self.started: # look for the first ""def"", ""class"" or ""lambda"" if token in (""def"", ""class"", ""lambda""): if token == ""lambda"": self.islambda = True self.started = True self.passline = True # skip to the end of the line elif type == tokenize.NEWLINE: self.passline = False # stop skipping when a NEWLINE is seen self.last = srow if self.islambda: # lambdas always end at the first NEWLINE raise EndOfBlock elif self.passline: pass elif type == tokenize.INDENT: self.indent = self.indent + 1 self.passline = True elif type == tokenize.DEDENT: self.indent = self.indent - 1 # the end of matching indent/dedent pairs end a block # (note that this only works for ""def""/""class"" blocks, # not e.g. for ""if: else:"" or ""try: finally:"" blocks) if self.indent <= 0: raise EndOfBlock elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL): # any other token on the same indentation level end the previous # block as well, except the pseudo-tokens COMMENT and NL. raise EndOfBlock","def tokeneater ( self , type , token , srow_scol , erow_ecol , line ) : srow , scol = srow_scol erow , ecol = erow_ecol if not self . started : if token in ( ""def"" , ""class"" , ""lambda"" ) : self . islambda = True self . started = True self . passline = True elif type == tokenize . NEWLINE : self . passline = False self . last = srow if self . islambda : raise EndOfBlock elif self . passline : pass elif type == tokenize . INDENT : self . indent = self . indent + 1 self . passline = True elif type == tokenize . DEDENT : self . indent = self . indent - 1 if self . indent <= 0 : raise EndOfBlock elif self . indent == 0 and type not in ( tokenize . COMMENT , tokenize . NL ) : raise EndOfBlock","if token == ""lambda"" :",403 24976,"def precheck(self, runner, script, info): # create models dict if nonexistent if getattr(script, ""modelVars"", None) is None: script.modelVars = {} # parse AST node if isinstance(info, ast.Assign): if isinstance(info.value, ast.Call): if isinstance(info.value.func, ast.Name): if info.value.func.id.endswith(""Model""): for target in info.targets: if isinstance(target, ast.Name): script.modelVars[target.id] = info.value.func.id elif isinstance(target, ast.Tuple): for elt in target.elts: if isinstance(elt, ast.Name): script.modelVars[elt.id] = info.value.func.id else: for target in info.targets: if isinstance(target, ast.Name): if target.id in script.modelVars: del script.modelVars[target.id] elif isinstance(target, ast.Tuple): for elt in target.elts: if isinstance(elt, ast.Name): if elt.id in script.modelVars: del script.modelVars[elt.id] ","def precheck ( self , runner , script , info ) : if getattr ( script , ""modelVars"" , None ) is None : script . modelVars = { } if isinstance ( info , ast . Assign ) : if isinstance ( info . value , ast . Call ) : if isinstance ( info . value . func , ast . Name ) : for target in info . targets : if isinstance ( target , ast . Name ) : script . modelVars [ target . id ] = info . value . func . id elif isinstance ( target , ast . Tuple ) : for elt in target . elts : if isinstance ( elt , ast . Name ) : script . modelVars [ elt . id ] = info . value . func . id else : for target in info . targets : if isinstance ( target , ast . Name ) : if target . id in script . modelVars : del script . modelVars [ target . id ] elif isinstance ( target , ast . Tuple ) : for elt in target . elts : if isinstance ( elt , ast . Name ) : if elt . id in script . modelVars : del script . modelVars [ elt . id ]","if info . value . func . id . endswith ( ""Model"" ) :",405 8952,"def _load_ui_modules(self, modules: Any) -> None: if isinstance(modules, types.ModuleType): self._load_ui_modules(dict((n, getattr(modules, n)) for n in dir(modules))) elif isinstance(modules, list): for m in modules: self._load_ui_modules(m) else: assert isinstance(modules, dict) for name, cls in modules.items(): try: if issubclass(cls, UIModule): self.ui_modules[name] = cls except TypeError: pass ","def _load_ui_modules ( self , modules : Any ) -> None : if isinstance ( modules , types . ModuleType ) : self . _load_ui_modules ( dict ( ( n , getattr ( modules , n ) ) for n in dir ( modules ) ) ) elif isinstance ( modules , list ) : for m in modules : self . _load_ui_modules ( m ) else : assert isinstance ( modules , dict ) for name , cls in modules . items ( ) : try : self . ui_modules [ name ] = cls except TypeError : pass","if issubclass ( cls , UIModule ) :",162 25022,"def postprocess_message(self, msg): if msg[""type""] in (""subsample"", ""param"") and self.dim is not None: event_dim = msg[""kwargs""].get(""event_dim"") if event_dim is not None: assert event_dim >= 0 dim = self.dim - event_dim shape = jnp.shape(msg[""value""]) if len(shape) >= -dim and shape[dim] != 1: if shape[dim] != self.size: if msg[""type""] == ""param"": statement = ""numpyro.param({}, ..., event_dim={})"".format( msg[""name""], event_dim ) else: statement = ""numpyro.subsample(..., event_dim={})"".format( event_dim ) raise ValueError( ""Inside numpyro.plate({}, {}, dim={}) invalid shape of {}: {}"".format( self.name, self.size, self.dim, statement, shape ) ) if self.subsample_size < self.size: value = msg[""value""] new_value = jnp.take(value, self._indices, dim) msg[""value""] = new_value","def postprocess_message ( self , msg ) : if msg [ ""type"" ] in ( ""subsample"" , ""param"" ) and self . dim is not None : event_dim = msg [ ""kwargs"" ] . get ( ""event_dim"" ) if event_dim is not None : assert event_dim >= 0 dim = self . dim - event_dim shape = jnp . shape ( msg [ ""value"" ] ) if len ( shape ) >= - dim and shape [ dim ] != 1 : if shape [ dim ] != self . size : if msg [ ""type"" ] == ""param"" : statement = ""numpyro.param({}, ..., event_dim={})"" . format ( msg [ ""name"" ] , event_dim ) else : statement = ""numpyro.subsample(..., event_dim={})"" . format ( event_dim ) raise ValueError ( ""Inside numpyro.plate({}, {}, dim={}) invalid shape of {}: {}"" . format ( self . name , self . size , self . dim , statement , shape ) ) value = msg [ ""value"" ] new_value = jnp . take ( value , self . _indices , dim ) msg [ ""value"" ] = new_value",if self . subsample_size < self . size :,381 2532,"def forward(self, input): s0 = s1 = self.stem(input) weights2 = None for i, cell in enumerate(self.cells): if cell.reduction: weights = fluid.layers.softmax(self.alphas_reduce) if self._method == ""PC-DARTS"": n = 3 start = 2 weights2 = fluid.layers.softmax(self.betas_reduce[0:2]) for i in range(self._steps - 1): end = start + n tw2 = fluid.layers.softmax(self.betas_reduce[start:end]) start = end n += 1 weights2 = fluid.layers.concat([weights2, tw2]) else: weights = fluid.layers.softmax(self.alphas_normal) if self._method == ""PC-DARTS"": n = 3 start = 2 weights2 = fluid.layers.softmax(self.betas_normal[0:2]) for i in range(self._steps - 1): end = start + n tw2 = fluid.layers.softmax(self.betas_normal[start:end]) start = end n += 1 weights2 = fluid.layers.concat([weights2, tw2]) s0, s1 = s1, cell(s0, s1, weights, weights2) out = self.global_pooling(s1) out = fluid.layers.squeeze(out, axes=[2, 3]) logits = self.classifier(out) return logits","def forward ( self , input ) : s0 = s1 = self . stem ( input ) weights2 = None for i , cell in enumerate ( self . cells ) : if cell . reduction : weights = fluid . layers . softmax ( self . alphas_reduce ) n = 3 start = 2 weights2 = fluid . layers . softmax ( self . betas_reduce [ 0 : 2 ] ) for i in range ( self . _steps - 1 ) : end = start + n tw2 = fluid . layers . softmax ( self . betas_reduce [ start : end ] ) start = end n += 1 weights2 = fluid . layers . concat ( [ weights2 , tw2 ] ) else : weights = fluid . layers . softmax ( self . alphas_normal ) n = 3 start = 2 weights2 = fluid . layers . softmax ( self . betas_normal [ 0 : 2 ] ) for i in range ( self . _steps - 1 ) : end = start + n tw2 = fluid . layers . softmax ( self . betas_normal [ start : end ] ) start = end n += 1 weights2 = fluid . layers . concat ( [ weights2 , tw2 ] ) s0 , s1 = s1 , cell ( s0 , s1 , weights , weights2 ) out = self . global_pooling ( s1 ) out = fluid . layers . squeeze ( out , axes = [ 2 , 3 ] ) logits = self . classifier ( out ) return logits","if self . _method == ""PC-DARTS"" :",463 22273,"def read(self, iprot): if ( iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None ): fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.username = iprot.readString() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.password = iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.consumerKey = iprot.readString() else: iprot.skip(ftype) elif fid == 4: if ftype == TType.STRING: self.consumerSecret = iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd()","def read ( self , iprot ) : if ( iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None ) : fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) return iprot . readStructBegin ( ) while True : ( fname , ftype , fid ) = iprot . readFieldBegin ( ) if ftype == TType . STOP : break if fid == 1 : self . username = iprot . readString ( ) else : iprot . skip ( ftype ) elif fid == 2 : self . password = iprot . readString ( ) else : iprot . skip ( ftype ) elif fid == 3 : self . consumerKey = iprot . readString ( ) else : iprot . skip ( ftype ) elif fid == 4 : self . consumerSecret = iprot . readString ( ) else : iprot . skip ( ftype ) else : iprot . skip ( ftype ) iprot . readFieldEnd ( ) iprot . readStructEnd ( )",if ftype == TType . STRING :,432 4521,"def include(elem, loader=None): if loader is None: loader = default_loader # look for xinclude elements i = 0 while i < len(elem): e = elem[i] if e.tag == XINCLUDE_INCLUDE: # process xinclude directive href = e.get(""href"") parse = e.get(""parse"", ""xml"") if parse == ""xml"": node = loader(href, parse) if node is None: raise FatalIncludeError(""cannot load %r as %r"" % (href, parse)) node = copy.copy(node) if e.tail: node.tail = (node.tail or """") + e.tail elem[i] = node elif parse == ""text"": text = loader(href, parse, e.get(""encoding"")) if text is None: raise FatalIncludeError(""cannot load %r as %r"" % (href, parse)) if i: node = elem[i - 1] node.tail = (node.tail or """") + text else: elem.text = (elem.text or """") + text + (e.tail or """") del elem[i] continue else: raise FatalIncludeError( ""unknown parse type in xi:include tag (%r)"" % parse ) elif e.tag == XINCLUDE_FALLBACK: raise FatalIncludeError( ""xi:fallback tag must be child of xi:include (%r)"" % e.tag ) else: include(e, loader) i = i + 1","def include ( elem , loader = None ) : if loader is None : loader = default_loader i = 0 while i < len ( elem ) : e = elem [ i ] href = e . get ( ""href"" ) parse = e . get ( ""parse"" , ""xml"" ) if parse == ""xml"" : node = loader ( href , parse ) if node is None : raise FatalIncludeError ( ""cannot load %r as %r"" % ( href , parse ) ) node = copy . copy ( node ) if e . tail : node . tail = ( node . tail or """" ) + e . tail elem [ i ] = node elif parse == ""text"" : text = loader ( href , parse , e . get ( ""encoding"" ) ) if text is None : raise FatalIncludeError ( ""cannot load %r as %r"" % ( href , parse ) ) if i : node = elem [ i - 1 ] node . tail = ( node . tail or """" ) + text else : elem . text = ( elem . text or """" ) + text + ( e . tail or """" ) del elem [ i ] continue else : raise FatalIncludeError ( ""unknown parse type in xi:include tag (%r)"" % parse ) elif e . tag == XINCLUDE_FALLBACK : raise FatalIncludeError ( ""xi:fallback tag must be child of xi:include (%r)"" % e . tag ) else : include ( e , loader ) i = i + 1",if e . tag == XINCLUDE_INCLUDE :,485 8794,"def make_row(self): res = [] for i in range(self.num_cols): t = sqlite3_column_type(self.stmnt, i) # print(""type"", t) if t == SQLITE_INTEGER: res.append(sqlite3_column_int(self.stmnt, i)) elif t == SQLITE_FLOAT: res.append(sqlite3_column_double(self.stmnt, i)) elif t == SQLITE_TEXT: res.append(sqlite3_column_text(self.stmnt, i)) else: raise NotImplementedError return tuple(res)","def make_row ( self ) : res = [ ] for i in range ( self . num_cols ) : t = sqlite3_column_type ( self . stmnt , i ) if t == SQLITE_INTEGER : res . append ( sqlite3_column_int ( self . stmnt , i ) ) elif t == SQLITE_FLOAT : res . append ( sqlite3_column_double ( self . stmnt , i ) ) res . append ( sqlite3_column_text ( self . stmnt , i ) ) else : raise NotImplementedError return tuple ( res )",elif t == SQLITE_TEXT :,172 14468,"def startNotificationService(self): if self.__enabled: if self.__polling_service: self.__polling_service.startNotificationService() if self.os_notifications_available: self.__os_file_service.startNotificationService() ",def startNotificationService ( self ) : if self . __enabled : if self . __polling_service : self . __polling_service . startNotificationService ( ) self . __os_file_service . startNotificationService ( ),if self . os_notifications_available :,69 21813,"def add_to_path(self, fnames): """"""Add fnames to path"""""" indexes = [] for path in fnames: project = self.get_source_project(path) if project.add_to_pythonpath(path): self.parent_widget.emit(SIGNAL(""pythonpath_changed()"")) indexes.append(self.get_index(path)) if indexes: self.reset_icon_provider() for index in indexes: self.update(index) ","def add_to_path ( self , fnames ) : """"""Add fnames to path"""""" indexes = [ ] for path in fnames : project = self . get_source_project ( path ) self . parent_widget . emit ( SIGNAL ( ""pythonpath_changed()"" ) ) indexes . append ( self . get_index ( path ) ) if indexes : self . reset_icon_provider ( ) for index in indexes : self . update ( index )",if project . add_to_pythonpath ( path ) :,132 18175,"def test_adding_a_mutually_exclusive_label_replaces_the_other( db, api_client, default_account, folder_and_message_maps, label ): # Verify a Gmail message can only have ONE of the 'all', 'trash', 'spam' # labels at a time. We specifically test that adding 'all'/ 'trash'/ 'spam' # to a message in one of the other two folders *replaces* # the existing label with the label being added. folder_map, message_map = folder_and_message_maps label_to_add = folder_map[label] for key in message_map: if key == label: continue message = message_map[key] resp_data = api_client.get_data(""/messages/{}"".format(message.public_id)) labels = resp_data[""labels""] assert len(labels) == 1 assert labels[0][""name""] == key existing_label = labels[0][""id""] # Adding 'all'/ 'trash'/ 'spam' removes the existing one, # irrespective of whether it's provided in the request or not. response = api_client.put_data( ""/messages/{}"".format(message.public_id), {""label_ids"": [label_to_add.category.public_id, existing_label]}, ) labels = json.loads(response.data)[""labels""] assert len(labels) == 1 assert labels[0][""name""] == label","def test_adding_a_mutually_exclusive_label_replaces_the_other ( db , api_client , default_account , folder_and_message_maps , label ) : folder_map , message_map = folder_and_message_maps label_to_add = folder_map [ label ] for key in message_map : continue message = message_map [ key ] resp_data = api_client . get_data ( ""/messages/{}"" . format ( message . public_id ) ) labels = resp_data [ ""labels"" ] assert len ( labels ) == 1 assert labels [ 0 ] [ ""name"" ] == key existing_label = labels [ 0 ] [ ""id"" ] response = api_client . put_data ( ""/messages/{}"" . format ( message . public_id ) , { ""label_ids"" : [ label_to_add . category . public_id , existing_label ] } , ) labels = json . loads ( response . data ) [ ""labels"" ] assert len ( labels ) == 1 assert labels [ 0 ] [ ""name"" ] == label",if key == label :,382 24943,"def _process_include(cmd, _model, _data, _default, options=None): if len(cmd) == 1: raise IOError(""Cannot execute 'include' command without a filename"") if len(cmd) > 2: raise IOError(""The 'include' command only accepts a single filename"") global Filename Filename = cmd[1] global Lineno Lineno = 0 try: scenarios = parse_data_commands(filename=cmd[1]) except IOError: raise err = sys.exc_info()[1] raise IOError(""Error parsing file '%s': %s"" % (Filename, str(err))) if scenarios is None: return False for scenario in scenarios: for cmd in scenarios[scenario]: if scenario not in _data: _data[scenario] = {} if cmd[0] in (""include"", ""import"", ""load""): _tmpdata = {} _process_data(cmd, _model, _tmpdata, _default, Filename, Lineno) if scenario is None: for key in _tmpdata: if key in _data: _data[key].update(_tmpdata[key]) else: _data[key] = _tmpdata[key] else: for key in _tmpdata: if key is None: _data[scenario].update(_tmpdata[key]) else: raise IOError( ""Cannot define a scenario within another scenario"" ) else: _process_data(cmd, _model, _data[scenario], _default, Filename, Lineno) return True","def _process_include ( cmd , _model , _data , _default , options = None ) : if len ( cmd ) == 1 : raise IOError ( ""Cannot execute 'include' command without a filename"" ) if len ( cmd ) > 2 : raise IOError ( ""The 'include' command only accepts a single filename"" ) global Filename Filename = cmd [ 1 ] global Lineno Lineno = 0 try : scenarios = parse_data_commands ( filename = cmd [ 1 ] ) except IOError : raise err = sys . exc_info ( ) [ 1 ] raise IOError ( ""Error parsing file '%s': %s"" % ( Filename , str ( err ) ) ) if scenarios is None : return False for scenario in scenarios : for cmd in scenarios [ scenario ] : _data [ scenario ] = { } if cmd [ 0 ] in ( ""include"" , ""import"" , ""load"" ) : _tmpdata = { } _process_data ( cmd , _model , _tmpdata , _default , Filename , Lineno ) if scenario is None : for key in _tmpdata : if key in _data : _data [ key ] . update ( _tmpdata [ key ] ) else : _data [ key ] = _tmpdata [ key ] else : for key in _tmpdata : if key is None : _data [ scenario ] . update ( _tmpdata [ key ] ) else : raise IOError ( ""Cannot define a scenario within another scenario"" ) else : _process_data ( cmd , _model , _data [ scenario ] , _default , Filename , Lineno ) return True",if scenario not in _data :,495 19979,"def add_channels(cls, voucher, add_channels): for add_channel in add_channels: channel = add_channel[""channel""] defaults = {""currency"": channel.currency_code} if ""discount_value"" in add_channel.keys(): defaults[""discount_value""] = add_channel.get(""discount_value"") if ""min_amount_spent"" in add_channel.keys(): defaults[""min_spent_amount""] = add_channel.get(""min_amount_spent"", None) models.VoucherChannelListing.objects.update_or_create( voucher=voucher, channel=channel, defaults=defaults, ) ","def add_channels ( cls , voucher , add_channels ) : for add_channel in add_channels : channel = add_channel [ ""channel"" ] defaults = { ""currency"" : channel . currency_code } if ""discount_value"" in add_channel . keys ( ) : defaults [ ""discount_value"" ] = add_channel . get ( ""discount_value"" ) defaults [ ""min_spent_amount"" ] = add_channel . get ( ""min_amount_spent"" , None ) models . VoucherChannelListing . objects . update_or_create ( voucher = voucher , channel = channel , defaults = defaults , )","if ""min_amount_spent"" in add_channel . keys ( ) :",176 6099,"def unknown_starttag(self, tag, attrs): # called for each start tag # attrs is a list of (attr, value) tuples # e.g. for
, tag='pre', attrs=[('class', 'screen')]
    uattrs = []
    strattrs = """"
    if attrs:
        for key, value in attrs:
            value = (
                value.replace("">"", "">"").replace(""<"", ""<"").replace('""', """"")
            )
            value = self.bare_ampersand.sub(""&"", value)
            # thanks to Kevin Marks for this breathtaking hack to deal with (valid) high-bit attribute values in UTF-8 feeds
            if not isinstance(value, unicode):
                value = value.decode(self.encoding, ""ignore"")
            try:
                # Currently, in Python 3 the key is already a str, and cannot be decoded again
                uattrs.append((unicode(key, self.encoding), value))
            except TypeError:
                uattrs.append((key, value))
        strattrs = u"""".join([u' %s=""%s""' % (key, value) for key, value in uattrs])
        if self.encoding:
            try:
                strattrs = strattrs.encode(self.encoding)
            except (UnicodeEncodeError, LookupError):
                pass
    if tag in self.elements_no_end_tag:
        self.pieces.append(""<%s%s />"" % (tag, strattrs))
    else:
        self.pieces.append(""<%s%s>"" % (tag, strattrs))","def unknown_starttag ( self , tag , attrs ) :     uattrs = [ ]  strattrs = """"  if attrs :   for key , value in attrs :    value = (     value . replace ( "">"" , "">"" ) . replace ( ""<"" , ""<"" ) . replace ( '""' , """"" )    )    value = self . bare_ampersand . sub ( ""&"" , value )            value = value . decode ( self . encoding , ""ignore"" )    try :         uattrs . append ( ( unicode ( key , self . encoding ) , value ) )    except TypeError :     uattrs . append ( ( key , value ) )   strattrs = u"""" . join ( [ u' %s=""%s""' % ( key , value ) for key , value in uattrs ] )   if self . encoding :    try :     strattrs = strattrs . encode ( self . encoding )    except ( UnicodeEncodeError , LookupError ) :     pass  if tag in self . elements_no_end_tag :   self . pieces . append ( ""<%s%s />"" % ( tag , strattrs ) )  else :   self . pieces . append ( ""<%s%s>"" % ( tag , strattrs ) )","if not isinstance ( value , unicode ) :",417
12236,"def serialize_content_range(value):
    if isinstance(value, (tuple, list)):
        if len(value) not in (2, 3):
            raise ValueError(
                ""When setting content_range to a list/tuple, it must ""
                ""be length 2 or 3 (not %r)"" % value
            )
        if len(value) == 2:
            begin, end = value
            length = None
        else:
            begin, end, length = value
        value = ContentRange(begin, end, length)
    value = str(value).strip()
    if not value:
        return None
    return value","def serialize_content_range ( value ) :  if isinstance ( value , ( tuple , list ) ) :   if len ( value ) not in ( 2 , 3 ) :    raise ValueError (     ""When setting content_range to a list/tuple, it must ""     ""be length 2 or 3 (not %r)"" % value    )       begin , end = value    length = None   else :    begin , end , length = value   value = ContentRange ( begin , end , length )  value = str ( value ) . strip ( )  if not value :   return None  return value",if len ( value ) == 2 :,169
3075,"def aggregate_to_full_tokens(attn, tokens, token_starts, token_ends, attention=True):
    to_combine = []
    spacy_attn = []
    spacy_token_starts = []
    spacy_token_ends = []
    spacy_start = None
    for token, prob, start, end in zip(tokens, attn, token_starts, token_ends):
        to_combine.append(prob)
        if spacy_start is None:
            spacy_start = start
        if token.endswith(""""):
            if attention:
                spacy_attn.append(np.max(to_combine, 0))
            else:
                spacy_attn.append(to_combine[-1])
            spacy_token_starts.append(spacy_start)
            spacy_token_ends.append(end)
            to_combine = []
            spacy_start = None
    if attention:
        spacy_attn = spacy_attn / sum(spacy_attn)
        key = ""attention_weights""
    else:
        key = ""explanation""
    return {
        key: spacy_attn,
        ""token_starts"": spacy_token_starts,
        ""token_ends"": spacy_token_ends,
    }","def aggregate_to_full_tokens ( attn , tokens , token_starts , token_ends , attention = True ) :  to_combine = [ ]  spacy_attn = [ ]  spacy_token_starts = [ ]  spacy_token_ends = [ ]  spacy_start = None  for token , prob , start , end in zip ( tokens , attn , token_starts , token_ends ) :   to_combine . append ( prob )       spacy_start = start   if token . endswith ( """" ) :    if attention :     spacy_attn . append ( np . max ( to_combine , 0 ) )    else :     spacy_attn . append ( to_combine [ - 1 ] )    spacy_token_starts . append ( spacy_start )    spacy_token_ends . append ( end )    to_combine = [ ]    spacy_start = None  if attention :   spacy_attn = spacy_attn / sum ( spacy_attn )   key = ""attention_weights""  else :   key = ""explanation""  return {   key : spacy_attn ,   ""token_starts"" : spacy_token_starts ,   ""token_ends"" : spacy_token_ends ,  }",if spacy_start is None :,317
6445,"def _get_next_segment(self, segment_path, page_size, segment_cursor=None):
    if segment_path:
        if self.end_time and self._is_later_than_end_time(segment_path):
            return None
        return Segment(self.client, segment_path, page_size, segment_cursor)
    return None
","def _get_next_segment ( self , segment_path , page_size , segment_cursor = None ) :  if segment_path :       return None   return Segment ( self . client , segment_path , page_size , segment_cursor )  return None",if self . end_time and self . _is_later_than_end_time ( segment_path ) :,91
22289,"def update_completion(self):
    """"""Update completion model with exist tags""""""
    orig_text = self.widget.text()
    text = "", "".join(orig_text.replace("", "", "","").split("","")[:-1])
    tags = []
    for tag in self.tags_list:
        if "","" in orig_text:
            if orig_text[-1] not in ("","", "" ""):
                tags.append(""%s,%s"" % (text, tag))
            tags.append(""%s, %s"" % (text, tag))
        else:
            tags.append(tag)
    if tags != self.completer_model.stringList():
        self.completer_model.setStringList(tags)","def update_completion ( self ) :  """"""Update completion model with exist tags""""""  orig_text = self . widget . text ( )  text = "", "" . join ( orig_text . replace ( "", "" , "","" ) . split ( "","" ) [ : - 1 ] )  tags = [ ]  for tag in self . tags_list :       if orig_text [ - 1 ] not in ( "","" , "" "" ) :     tags . append ( ""%s,%s"" % ( text , tag ) )    tags . append ( ""%s, %s"" % ( text , tag ) )   else :    tags . append ( tag )  if tags != self . completer_model . stringList ( ) :   self . completer_model . setStringList ( tags )","if "","" in orig_text :",177
23090,"def configure_httpretty(sitedir):
    httpretty.enable()
    dir = Path(f""tests/test_sites/data/test_{sitedir}/"")
    data_file = dir / ""data.json""
    data = None
    with open(data_file) as f:
        data = json.load(f)
    for obj in data:
        method = httpretty.POST
        if obj[""method""] == ""GET"":
            method = httpretty.GET
        with open(dir / obj[""file""]) as f:
            httpretty.register_uri(
                method,
                obj[""url""],
                f.read(),
            )
","def configure_httpretty ( sitedir ) :  httpretty . enable ( )  dir = Path ( f""tests/test_sites/data/test_{sitedir}/"" )  data_file = dir / ""data.json""  data = None  with open ( data_file ) as f :   data = json . load ( f )  for obj in data :   method = httpretty . POST       method = httpretty . GET   with open ( dir / obj [ ""file"" ] ) as f :    httpretty . register_uri (     method ,     obj [ ""url"" ] ,     f . read ( ) ,    )","if obj [ ""method"" ] == ""GET"" :",176
21306,"def __call__(self, x, y, axes=2):
    xnd = x.ndimension()
    ynd = y.ndimension()
    # convert int argument to (list[int], list[int])
    if isinstance(axes, int):
        axes = range(xnd - axes, xnd), range(axes)
    # convert (int, int) to (list[int], list[int])
    if isinstance(axes[0], int):
        axes = (axes[0],), axes[1]
    if isinstance(axes[1], int):
        axes = axes[0], (axes[1],)
    # initialize empty indices
    x_ix = [None] * xnd
    y_ix = [None] * ynd
    out_ix = []
    # fill in repeated indices
    available_ix = iter(EINSUM_SYMBOLS_BASE)
    for ax1, ax2 in zip(*axes):
        repeat = next(available_ix)
        x_ix[ax1] = repeat
        y_ix[ax2] = repeat
    # fill in the rest, and maintain output order
    for i in range(xnd):
        if x_ix[i] is None:
            leave = next(available_ix)
            x_ix[i] = leave
            out_ix.append(leave)
    for i in range(ynd):
        if y_ix[i] is None:
            leave = next(available_ix)
            y_ix[i] = leave
            out_ix.append(leave)
    # form full string and contract!
    einsum_str = ""{},{}->{}"".format(*map("""".join, (x_ix, y_ix, out_ix)))
    return self.einsum(einsum_str, x, y)","def __call__ ( self , x , y , axes = 2 ) :  xnd = x . ndimension ( )  ynd = y . ndimension ( )   if isinstance ( axes , int ) :   axes = range ( xnd - axes , xnd ) , range ( axes )   if isinstance ( axes [ 0 ] , int ) :   axes = ( axes [ 0 ] , ) , axes [ 1 ]  if isinstance ( axes [ 1 ] , int ) :   axes = axes [ 0 ] , ( axes [ 1 ] , )   x_ix = [ None ] * xnd  y_ix = [ None ] * ynd  out_ix = [ ]   available_ix = iter ( EINSUM_SYMBOLS_BASE )  for ax1 , ax2 in zip ( * axes ) :   repeat = next ( available_ix )   x_ix [ ax1 ] = repeat   y_ix [ ax2 ] = repeat   for i in range ( xnd ) :   if x_ix [ i ] is None :    leave = next ( available_ix )    x_ix [ i ] = leave    out_ix . append ( leave )  for i in range ( ynd ) :       leave = next ( available_ix )    y_ix [ i ] = leave    out_ix . append ( leave )   einsum_str = ""{},{}->{}"" . format ( * map ( """" . join , ( x_ix , y_ix , out_ix ) ) )  return self . einsum ( einsum_str , x , y )",if y_ix [ i ] is None :,438
20318,"def insert_broken_add_sometimes(node):
    if node.op == theano.tensor.add:
        last_time_replaced[0] = not last_time_replaced[0]
        if last_time_replaced[0]:
            return [off_by_half(*node.inputs)]
    return False",def insert_broken_add_sometimes ( node ) :  if node . op == theano . tensor . add :   last_time_replaced [ 0 ] = not last_time_replaced [ 0 ]       return [ off_by_half ( * node . inputs ) ]  return False,if last_time_replaced [ 0 ] :,78
15963,"def __test_using_best_weights(self, ckpt_path, test_dataloaders):
    model = self.lightning_module
    # if user requests the best checkpoint but we don't have it, error
    if ckpt_path == ""best"" and not self.checkpoint_callback.best_model_path:
        raise MisconfigurationException(
            'ckpt_path is ""best"", but ModelCheckpoint is not configured to save the best model.'
        )
    # load best weights
    if ckpt_path is not None:
        # ckpt_path is 'best' so load the best model
        if ckpt_path == ""best"":
            ckpt_path = self.checkpoint_callback.best_model_path
        if len(ckpt_path) == 0:
            rank_zero_warn(
                f"".test() found no path for the best weights, {ckpt_path}. Please ""
                f""specify a path for a checkpoint .test(ckpt_path=PATH)""
            )
            return {}
        if not self._device_type == DeviceType.TPU:
            self.accelerator.barrier()
        ckpt = pl_load(ckpt_path, map_location=lambda storage, loc: storage)
        model.load_state_dict(ckpt[""state_dict""])
    # attach dataloaders
    if test_dataloaders is not None:
        self.data_connector.attach_dataloaders(model, test_dataloaders=test_dataloaders)
    # run tests
    self.tested_ckpt_path = ckpt_path
    results = self.fit(model)
    # teardown
    if self.is_function_implemented(""teardown""):
        model_ref = self.lightning_module
        model_ref.teardown(""test"")
    return results","def __test_using_best_weights ( self , ckpt_path , test_dataloaders ) :  model = self . lightning_module   if ckpt_path == ""best"" and not self . checkpoint_callback . best_model_path :   raise MisconfigurationException (    'ckpt_path is ""best"", but ModelCheckpoint is not configured to save the best model.'   )   if ckpt_path is not None :     if ckpt_path == ""best"" :    ckpt_path = self . checkpoint_callback . best_model_path   if len ( ckpt_path ) == 0 :    rank_zero_warn (     f"".test() found no path for the best weights, {ckpt_path}. Please ""     f""specify a path for a checkpoint .test(ckpt_path=PATH)""    )    return { }       self . accelerator . barrier ( )   ckpt = pl_load ( ckpt_path , map_location = lambda storage , loc : storage )   model . load_state_dict ( ckpt [ ""state_dict"" ] )   if test_dataloaders is not None :   self . data_connector . attach_dataloaders ( model , test_dataloaders = test_dataloaders )   self . tested_ckpt_path = ckpt_path  results = self . fit ( model )   if self . is_function_implemented ( ""teardown"" ) :   model_ref = self . lightning_module   model_ref . teardown ( ""test"" )  return results",if not self . _device_type == DeviceType . TPU :,434
15073,"def __call__(self, gradients):
    """"""Accumulates :obj:`gradients` on the current replica.""""""
    if not self._gradients:
        _ = self.step  # Create the step variable.
        self._gradients.extend(
            [
                tf.Variable(
                    tf.zeros_like(gradient),
                    trainable=False,
                    synchronization=tf.VariableSynchronization.ON_READ,
                    aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA,
                )
                if gradient is not None
                else gradient
                for gradient in gradients
            ]
        )
    if len(gradients) != len(self._gradients):
        raise ValueError(
            ""Expected %s gradients, but got %d"" % (len(self._gradients), len(gradients))
        )
    for accum_gradient, gradient in zip(self._gradients, gradients):
        if accum_gradient is not None and gradient is not None:
            accum_gradient.assign_add(gradient)
    self._accum_steps.assign_add(1)","def __call__ ( self , gradients ) :  """"""Accumulates :obj:`gradients` on the current replica.""""""  if not self . _gradients :   _ = self . step   self . _gradients . extend (    [     tf . Variable (      tf . zeros_like ( gradient ) ,      trainable = False ,      synchronization = tf . VariableSynchronization . ON_READ ,      aggregation = tf . VariableAggregation . ONLY_FIRST_REPLICA ,     )          else gradient     for gradient in gradients    ]   )  if len ( gradients ) != len ( self . _gradients ) :   raise ValueError (    ""Expected %s gradients, but got %d"" % ( len ( self . _gradients ) , len ( gradients ) )   )  for accum_gradient , gradient in zip ( self . _gradients , gradients ) :   if accum_gradient is not None and gradient is not None :    accum_gradient . assign_add ( gradient )  self . _accum_steps . assign_add ( 1 )",if gradient is not None,292
4115,"def handle_startendtag(self, tag, attrs):
    for i, attr in enumerate(attrs):
        attrname, attrvalue = attr
        if attrname in self.INCLUDEATTRS and self.currentblock == """":
            self.addhtmlblock(attrvalue)
            attrs[i] = (
                attrname,
                self.callback(normalize_html(attrvalue).replace(""\n"", "" "")),
            )
    if self.currenttag is not None:
        self.currentblock += self.get_starttag_text()
        self.currentsrc += self.get_starttag_text()
    else:
        self.filesrc += self.buildtag(tag, attrs, startend=True)
","def handle_startendtag ( self , tag , attrs ) :  for i , attr in enumerate ( attrs ) :   attrname , attrvalue = attr       self . addhtmlblock ( attrvalue )    attrs [ i ] = (     attrname ,     self . callback ( normalize_html ( attrvalue ) . replace ( ""\n"" , "" "" ) ) ,    )  if self . currenttag is not None :   self . currentblock += self . get_starttag_text ( )   self . currentsrc += self . get_starttag_text ( )  else :   self . filesrc += self . buildtag ( tag , attrs , startend = True )","if attrname in self . INCLUDEATTRS and self . currentblock == """" :",178
4948,"def act_mapping(self, items, actions, mapping):
    """"""Executes all the actions on the list of pods.""""""
    success = True
    for action in actions:
        for key, method in mapping.items():
            if key in action:
                params = action.get(key)
                ret = method(items, params)
                if not ret:
                    success = False
    return success
","def act_mapping ( self , items , actions , mapping ) :  """"""Executes all the actions on the list of pods.""""""  success = True  for action in actions :   for key , method in mapping . items ( ) :    if key in action :     params = action . get ( key )     ret = method ( items , params )           success = False  return success",if not ret :,109
422,"def check_twobit_file(dbkey, GALAXY_DATA_INDEX_DIR):
    twobit_file = ""%s/twobit.loc"" % GALAXY_DATA_INDEX_DIR
    twobit_path = """"
    twobits = {}
    for i, line in enumerate(open(twobit_file)):
        line = line.rstrip(""\r\n"")
        if line and not line.startswith(""#""):
            fields = line.split(""\t"")
            if len(fields) < 2:
                continue
            twobits[(fields[0])] = fields[1]
    if dbkey in twobits:
        twobit_path = twobits[(dbkey)]
    return twobit_path","def check_twobit_file ( dbkey , GALAXY_DATA_INDEX_DIR ) :  twobit_file = ""%s/twobit.loc"" % GALAXY_DATA_INDEX_DIR  twobit_path = """"  twobits = { }  for i , line in enumerate ( open ( twobit_file ) ) :   line = line . rstrip ( ""\r\n"" )   if line and not line . startswith ( ""#"" ) :    fields = line . split ( ""\t"" )         continue    twobits [ ( fields [ 0 ] ) ] = fields [ 1 ]  if dbkey in twobits :   twobit_path = twobits [ ( dbkey ) ]  return twobit_path",if len ( fields ) < 2 :,177
11919,"def read(self, iprot):
    if (
        iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
        and isinstance(iprot.trans, TTransport.CReadableTransport)
        and self.thrift_spec is not None
        and fastbinary is not None
    ):
        fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
        return
    iprot.readStructBegin()
    while True:
        (fname, ftype, fid) = iprot.readFieldBegin()
        if ftype == TType.STOP:
            break
        if fid == 1:
            if ftype == TType.STRING:
                self.key = iprot.readString()
            else:
                iprot.skip(ftype)
        elif fid == 2:
            if ftype == TType.STRUCT:
                self.column_path = ColumnPath()
                self.column_path.read(iprot)
            else:
                iprot.skip(ftype)
        elif fid == 3:
            if ftype == TType.I32:
                self.consistency_level = iprot.readI32()
            else:
                iprot.skip(ftype)
        else:
            iprot.skip(ftype)
        iprot.readFieldEnd()
    iprot.readStructEnd()","def read ( self , iprot ) :  if (   iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated   and isinstance ( iprot . trans , TTransport . CReadableTransport )   and self . thrift_spec is not None   and fastbinary is not None  ) :   fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) )   return  iprot . readStructBegin ( )  while True :   ( fname , ftype , fid ) = iprot . readFieldBegin ( )   if ftype == TType . STOP :    break   if fid == 1 :    if ftype == TType . STRING :     self . key = iprot . readString ( )    else :     iprot . skip ( ftype )       if ftype == TType . STRUCT :     self . column_path = ColumnPath ( )     self . column_path . read ( iprot )    else :     iprot . skip ( ftype )   elif fid == 3 :    if ftype == TType . I32 :     self . consistency_level = iprot . readI32 ( )    else :     iprot . skip ( ftype )   else :    iprot . skip ( ftype )   iprot . readFieldEnd ( )  iprot . readStructEnd ( )",elif fid == 2 :,391
23731,"def beginRendering(self, canvasRegion):
    if None != self.canvas:
        if None == canvasRegion:
            # hold onto empty canvas for simplicity
            self.canvas.resize(0, 0)
            self.canvasWidth = self.canvasHeight = 0
        else:
            width = int(round(canvasRegion.width))
            height = int(round(canvasRegion.height))
            # if exactly same size, just clear...seems to save a little time
            if width == self.canvasWidth and height == self.canvasHeight:
                self.canvas.clear()
                # reuse same canvas
            else:
                # size changed
                self.canvas.resize(width, height)
                self.canvasWidth = width
                self.canvasHeight = height
            self.x0 = int(round(canvasRegion.x))
            self.y0 = int(round(canvasRegion.y))
            # workaround problem with special meaning of (-1,-1) to
            # setWidgetPosition (makes position off by one pixel).
            if self.x0 == -1 and self.y0 == -1:
                self.x0 = 0
            self.canvasPanel.setWidgetPosition(self.canvas, self.x0, self.y0)
    self.imageIndex = 0","def beginRendering ( self , canvasRegion ) :  if None != self . canvas :   if None == canvasRegion :       self . canvas . resize ( 0 , 0 )    self . canvasWidth = self . canvasHeight = 0   else :    width = int ( round ( canvasRegion . width ) )    height = int ( round ( canvasRegion . height ) )            self . canvas . clear ( )        else :         self . canvas . resize ( width , height )     self . canvasWidth = width     self . canvasHeight = height    self . x0 = int ( round ( canvasRegion . x ) )    self . y0 = int ( round ( canvasRegion . y ) )          if self . x0 == - 1 and self . y0 == - 1 :     self . x0 = 0    self . canvasPanel . setWidgetPosition ( self . canvas , self . x0 , self . y0 )  self . imageIndex = 0",if width == self . canvasWidth and height == self . canvasHeight :,350
6529,"def limits(self, value, square=False):
    """"""TODO: doc + server side implementation""""""
    if isinstance(value, six.string_types):
        import re
        match = re.match(r""(\d*)(\D*)"", value)
        if match is None:
            raise ValueError(
                ""do not understand limit specifier %r, examples are 90%, 3sigma""
            )
        else:
            value, type = match.groups()
            import ast
            value = ast.literal_eval(value)
            type = type.strip()
            if type in [""s"", ""sigma""]:
                return self.limits_sigma(value)
            elif type in [""ss"", ""sigmasquare""]:
                return self.limits_sigma(value, square=True)
            elif type in [""%"", ""percent""]:
                return self.limits_percentage(value)
            elif type in [""%s"", ""%square"", ""percentsquare""]:
                return self.limits_percentage(value, square=True)
    if value is None:
        return self.limits_percentage(square=square)
    else:
        return value","def limits ( self , value , square = False ) :  """"""TODO: doc + server side implementation""""""  if isinstance ( value , six . string_types ) :   import re   match = re . match ( r""(\d*)(\D*)"" , value )       raise ValueError (     ""do not understand limit specifier %r, examples are 90%, 3sigma""    )   else :    value , type = match . groups ( )    import ast    value = ast . literal_eval ( value )    type = type . strip ( )    if type in [ ""s"" , ""sigma"" ] :     return self . limits_sigma ( value )    elif type in [ ""ss"" , ""sigmasquare"" ] :     return self . limits_sigma ( value , square = True )    elif type in [ ""%"" , ""percent"" ] :     return self . limits_percentage ( value )    elif type in [ ""%s"" , ""%square"" , ""percentsquare"" ] :     return self . limits_percentage ( value , square = True )  if value is None :   return self . limits_percentage ( square = square )  else :   return value",if match is None :,298
874,"def load_coll(self, name, coll_config):
    if coll_config == ""$all"" and self.auto_handler:
        return self.auto_handler
    if isinstance(coll_config, str):
        index = coll_config
        archive_paths = None
        acl_paths = None
        default_access = self.default_access
    elif isinstance(coll_config, dict):
        index = coll_config.get(""index"")
        if not index:
            index = coll_config.get(""index_paths"")
        archive_paths = coll_config.get(""archive_paths"")
        acl_paths = coll_config.get(""acl_paths"")
        default_access = coll_config.get(""default_access"", self.default_access)
    else:
        raise Exception(""collection config must be string or dict"")
    # INDEX CONFIG
    if index:
        agg = init_index_agg({name: index})
    else:
        if not isinstance(coll_config, dict):
            raise Exception(""collection config missing"")
        sequence = coll_config.get(""sequence"")
        if sequence:
            return self.init_sequence(name, sequence)
        index_group = coll_config.get(""index_group"")
        if not index_group:
            raise Exception(""no index, index_group or sequence found"")
        timeout = int(coll_config.get(""timeout"", 0))
        agg = init_index_agg(index_group, True, timeout)
    # ARCHIVE CONFIG
    if not archive_paths:
        archive_paths = self.config.get(""archive_paths"")
    # ACCESS CONFIG
    access_checker = None
    if acl_paths:
        access_checker = AccessChecker(acl_paths, default_access)
    return DefaultResourceHandler(
        agg, archive_paths, rules_file=self.rules_file, access_checker=access_checker
    )","def load_coll ( self , name , coll_config ) :  if coll_config == ""$all"" and self . auto_handler :   return self . auto_handler  if isinstance ( coll_config , str ) :   index = coll_config   archive_paths = None   acl_paths = None   default_access = self . default_access  elif isinstance ( coll_config , dict ) :   index = coll_config . get ( ""index"" )   if not index :    index = coll_config . get ( ""index_paths"" )   archive_paths = coll_config . get ( ""archive_paths"" )   acl_paths = coll_config . get ( ""acl_paths"" )   default_access = coll_config . get ( ""default_access"" , self . default_access )  else :   raise Exception ( ""collection config must be string or dict"" )   if index :   agg = init_index_agg ( { name : index } )  else :   if not isinstance ( coll_config , dict ) :    raise Exception ( ""collection config missing"" )   sequence = coll_config . get ( ""sequence"" )   if sequence :    return self . init_sequence ( name , sequence )   index_group = coll_config . get ( ""index_group"" )       raise Exception ( ""no index, index_group or sequence found"" )   timeout = int ( coll_config . get ( ""timeout"" , 0 ) )   agg = init_index_agg ( index_group , True , timeout )   if not archive_paths :   archive_paths = self . config . get ( ""archive_paths"" )   access_checker = None  if acl_paths :   access_checker = AccessChecker ( acl_paths , default_access )  return DefaultResourceHandler (   agg , archive_paths , rules_file = self . rules_file , access_checker = access_checker  )",if not index_group :,480
6503,"def getOptions(self, section=None, ignoreWrong=True):
    """"""Reads configuration for jail(s) and adds enabled jails to __jails""""""
    opts = []
    self.__opts = ConfigReader.getOptions(self, ""Definition"", opts)
    if section is None:
        sections = self.sections()
    else:
        sections = [section]
    # Get the options of all jails.
    parse_status = 0
    for sec in sections:
        if sec == ""INCLUDES"":
            continue
        # use the cfg_share for filter/action caching and the same config for all
        # jails (use_config=...), therefore don't read it here:
        jail = JailReader(
            sec,
            force_enable=self.__force_enable,
            share_config=self.share_config,
            use_config=self._cfg,
        )
        ret = jail.getOptions()
        if ret:
            if jail.isEnabled():
                # at least one jail was successful:
                parse_status |= 1
                # We only add enabled jails
                self.__jails.append(jail)
        else:
            logSys.error(
                ""Errors in jail %r.%s"", sec, "" Skipping..."" if ignoreWrong else """"
            )
            self.__jails.append(jail)
            # at least one jail was invalid:
            parse_status |= 2
    return (ignoreWrong and parse_status & 1) or not (parse_status & 2)","def getOptions ( self , section = None , ignoreWrong = True ) :  """"""Reads configuration for jail(s) and adds enabled jails to __jails""""""  opts = [ ]  self . __opts = ConfigReader . getOptions ( self , ""Definition"" , opts )  if section is None :   sections = self . sections ( )  else :   sections = [ section ]   parse_status = 0  for sec in sections :   if sec == ""INCLUDES"" :    continue       jail = JailReader (    sec ,    force_enable = self . __force_enable ,    share_config = self . share_config ,    use_config = self . _cfg ,   )   ret = jail . getOptions ( )       if jail . isEnabled ( ) :         parse_status |= 1         self . __jails . append ( jail )   else :    logSys . error (     ""Errors in jail %r.%s"" , sec , "" Skipping..."" if ignoreWrong else """"    )    self . __jails . append ( jail )       parse_status |= 2  return ( ignoreWrong and parse_status & 1 ) or not ( parse_status & 2 )",if ret :,409
22968,"def write_Leo_file(self, fileName, outlineOnlyFlag, toString=False, toOPML=False):
    """"""Write the .leo file.""""""
    c, fc = self.c, self
    structure_errors = c.checkOutline()
    if structure_errors:
        g.error(""Major structural errors! outline not written"")
        return False
    if not outlineOnlyFlag or toOPML:
        g.app.recentFilesManager.writeRecentFilesFile(c)
        fc.writeAllAtFileNodesHelper()  # Ignore any errors.
    if fc.isReadOnly(fileName):
        return False
    if g.SQLITE and fileName and fileName.endswith("".db""):
        return fc.exportToSqlite(fileName)
    try:
        fc.putCount = 0
        fc.toString = toString
        if toString:
            ok = fc.writeToStringHelper(fileName)
        else:
            ok = fc.writeToFileHelper(fileName, toOPML)
    finally:
        fc.outputFile = None
        fc.toString = False
    return ok","def write_Leo_file ( self , fileName , outlineOnlyFlag , toString = False , toOPML = False ) :  """"""Write the .leo file.""""""  c , fc = self . c , self  structure_errors = c . checkOutline ( )  if structure_errors :   g . error ( ""Major structural errors! outline not written"" )   return False  if not outlineOnlyFlag or toOPML :   g . app . recentFilesManager . writeRecentFilesFile ( c )   fc . writeAllAtFileNodesHelper ( )  if fc . isReadOnly ( fileName ) :   return False  if g . SQLITE and fileName and fileName . endswith ( "".db"" ) :   return fc . exportToSqlite ( fileName )  try :   fc . putCount = 0   fc . toString = toString       ok = fc . writeToStringHelper ( fileName )   else :    ok = fc . writeToFileHelper ( fileName , toOPML )  finally :   fc . outputFile = None   fc . toString = False  return ok",if toString :,279
16097,"def _send_internal(self, bytes_):
    # buffering
    if self.pendings:
        self.pendings += bytes_
        bytes_ = self.pendings
    try:
        # reconnect if possible
        self._reconnect()
        # send message
        self.socket.sendall(bytes_)
        # send finished
        self.pendings = None
    except Exception:  # pylint: disable=broad-except
        # close socket
        self._close()
        # clear buffer if it exceeds max bufer size
        if self.pendings and (len(self.pendings) > self.bufmax):
            # TODO: add callback handler here
            self.pendings = None
        else:
            self.pendings = bytes_
","def _send_internal ( self , bytes_ ) :   if self . pendings :   self . pendings += bytes_   bytes_ = self . pendings  try :     self . _reconnect ( )     self . socket . sendall ( bytes_ )     self . pendings = None  except Exception :     self . _close ( )            self . pendings = None   else :    self . pendings = bytes_",if self . pendings and ( len ( self . pendings ) > self . bufmax ) :,194
9410,"def get_profile_cutoff(profile_id):
    cutoff_language = None
    if not len(profile_id_list):
        update_profile_id_list()
    if profile_id:
        cutoff_language = []
        for profile in profile_id_list:
            profileId, name, cutoff, items = profile.values()
            if cutoff:
                if profileId == int(profile_id):
                    for item in ast.literal_eval(items):
                        if item[""id""] == cutoff:
                            return [item]
                        elif cutoff == 65535:
                            cutoff_language.append(item)
        if not len(cutoff_language):
            cutoff_language = None
    return cutoff_language","def get_profile_cutoff ( profile_id ) :  cutoff_language = None  if not len ( profile_id_list ) :   update_profile_id_list ( )  if profile_id :   cutoff_language = [ ]   for profile in profile_id_list :    profileId , name , cutoff , items = profile . values ( )         if profileId == int ( profile_id ) :      for item in ast . literal_eval ( items ) :       if item [ ""id"" ] == cutoff :        return [ item ]       elif cutoff == 65535 :        cutoff_language . append ( item )   if not len ( cutoff_language ) :    cutoff_language = None  return cutoff_language",if cutoff :,206
20150,"def assert_conll_writer_output(
    dataset: InternalBioNerDataset,
    expected_output: List[str],
    sentence_splitter: SentenceSplitter = None,
):
    outfile_path = tempfile.mkstemp()[1]
    try:
        sentence_splitter = (
            sentence_splitter
            if sentence_splitter
            else NoSentenceSplitter(tokenizer=SpaceTokenizer())
        )
        writer = CoNLLWriter(sentence_splitter=sentence_splitter)
        writer.write_to_conll(dataset, Path(outfile_path))
        contents = [l.strip() for l in open(outfile_path).readlines() if l.strip()]
    finally:
        os.remove(outfile_path)
    assert contents == expected_output","def assert_conll_writer_output (  dataset : InternalBioNerDataset ,  expected_output : List [ str ] ,  sentence_splitter : SentenceSplitter = None , ) :  outfile_path = tempfile . mkstemp ( ) [ 1 ]  try :   sentence_splitter = (    sentence_splitter        else NoSentenceSplitter ( tokenizer = SpaceTokenizer ( ) )   )   writer = CoNLLWriter ( sentence_splitter = sentence_splitter )   writer . write_to_conll ( dataset , Path ( outfile_path ) )   contents = [ l . strip ( ) for l in open ( outfile_path ) . readlines ( ) if l . strip ( ) ]  finally :   os . remove ( outfile_path )  assert contents == expected_output",if sentence_splitter,175
7464,"def read(self, iprot):
    if (
        iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
        and isinstance(iprot.trans, TTransport.CReadableTransport)
        and self.thrift_spec is not None
        and fastbinary is not None
    ):
        fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
        return
    iprot.readStructBegin()
    while True:
        (fname, ftype, fid) = iprot.readFieldBegin()
        if ftype == TType.STOP:
            break
        if fid == 0:
            if ftype == TType.LIST:
                self.success = []
                (_etype987, _size984) = iprot.readListBegin()
                for _i988 in xrange(_size984):
                    _elem989 = iprot.readString()
                    self.success.append(_elem989)
                iprot.readListEnd()
            else:
                iprot.skip(ftype)
        elif fid == 1:
            if ftype == TType.STRUCT:
                self.o1 = MetaException()
                self.o1.read(iprot)
            else:
                iprot.skip(ftype)
        else:
            iprot.skip(ftype)
        iprot.readFieldEnd()
    iprot.readStructEnd()","def read ( self , iprot ) :  if (   iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated   and isinstance ( iprot . trans , TTransport . CReadableTransport )   and self . thrift_spec is not None   and fastbinary is not None  ) :   fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) )   return  iprot . readStructBegin ( )  while True :   ( fname , ftype , fid ) = iprot . readFieldBegin ( )   if ftype == TType . STOP :    break   if fid == 0 :    if ftype == TType . LIST :     self . success = [ ]     ( _etype987 , _size984 ) = iprot . readListBegin ( )     for _i988 in xrange ( _size984 ) :      _elem989 = iprot . readString ( )      self . success . append ( _elem989 )     iprot . readListEnd ( )    else :     iprot . skip ( ftype )   elif fid == 1 :         self . o1 = MetaException ( )     self . o1 . read ( iprot )    else :     iprot . skip ( ftype )   else :    iprot . skip ( ftype )   iprot . readFieldEnd ( )  iprot . readStructEnd ( )",if ftype == TType . STRUCT :,408
5722,"def get_versions(*, all=False, quiet=None):
    import bonobo
    from bonobo.util.pkgs import bonobo_packages
    yield _format_version(bonobo, quiet=quiet)
    if all:
        for name in sorted(bonobo_packages):
            if name != ""bonobo"":
                try:
                    mod = __import__(name.replace(""-"", ""_""))
                    try:
                        yield _format_version(mod, name=name, quiet=quiet)
                    except Exception as exc:
                        yield ""{} ({})"".format(name, exc)
                except ImportError as exc:
                    yield ""{} is not importable ({})."".format(name, exc)
","def get_versions ( * , all = False , quiet = None ) :  import bonobo  from bonobo . util . pkgs import bonobo_packages  yield _format_version ( bonobo , quiet = quiet )  if all :   for name in sorted ( bonobo_packages ) :         try :      mod = __import__ ( name . replace ( ""-"" , ""_"" ) )      try :       yield _format_version ( mod , name = name , quiet = quiet )      except Exception as exc :       yield ""{} ({})"" . format ( name , exc )     except ImportError as exc :      yield ""{} is not importable ({})."" . format ( name , exc )","if name != ""bonobo"" :",188
20086,"def visit_type_type(self, t: TypeType) -> ProperType:
    if isinstance(self.s, TypeType):
        typ = self.meet(t.item, self.s.item)
        if not isinstance(typ, NoneType):
            typ = TypeType.make_normalized(typ, line=t.line)
        return typ
    elif isinstance(self.s, Instance) and self.s.type.fullname == ""builtins.type"":
        return t
    elif isinstance(self.s, CallableType):
        return self.meet(t, self.s)
    else:
        return self.default(self.s)
","def visit_type_type ( self , t : TypeType ) -> ProperType :  if isinstance ( self . s , TypeType ) :   typ = self . meet ( t . item , self . s . item )       typ = TypeType . make_normalized ( typ , line = t . line )   return typ  elif isinstance ( self . s , Instance ) and self . s . type . fullname == ""builtins.type"" :   return t  elif isinstance ( self . s , CallableType ) :   return self . meet ( t , self . s )  else :   return self . default ( self . s )","if not isinstance ( typ , NoneType ) :",154
18465,"def get_ou_from_path(client, path):
    ou = client.list_roots()[""Roots""][0]
    if path == ""/"":
        ou[""Path""] = path
        return ou
    ou_pager = client.get_paginator(""list_organizational_units_for_parent"")
    for part in path.strip(""/"").split(""/""):
        found = False
        for page in ou_pager.paginate(ParentId=ou[""Id""]):
            for child in page.get(""OrganizationalUnits""):
                if child[""Name""] == part:
                    found = True
                    ou = child
                    break
            if found:
                break
        if found is False:
            raise ValueError(""No OU named:%r found in path: %s"" % (path, path))
    ou[""Path""] = path
    return ou","def get_ou_from_path ( client , path ) :  ou = client . list_roots ( ) [ ""Roots"" ] [ 0 ]  if path == ""/"" :   ou [ ""Path"" ] = path   return ou  ou_pager = client . get_paginator ( ""list_organizational_units_for_parent"" )  for part in path . strip ( ""/"" ) . split ( ""/"" ) :   found = False   for page in ou_pager . paginate ( ParentId = ou [ ""Id"" ] ) :    for child in page . get ( ""OrganizationalUnits"" ) :     if child [ ""Name"" ] == part :      found = True      ou = child      break    if found :     break       raise ValueError ( ""No OU named:%r found in path: %s"" % ( path , path ) )  ou [ ""Path"" ] = path  return ou",if found is False :,224
16668,"def hash_path(
    path, algorithm=""blake2b"", suffix_len=None, num_orig_chars=None, constant_len=False
):
    # pylint: disable=missing-function-docstring
    path = Path(path)
    if suffix_len is None:
        suffix_len = len(path.suffix)
    stem = str(path.stem)
    replaced_stem = stem.replace("" "", ""_"")
    replaced_stem = replaced_stem.replace(""-"", ""_"")
    filtered_stem = non_word_pattern.sub("""", replaced_stem)
    if len(filtered_stem) == len(stem):
        return replaced_stem
    path = str(path)
    if algorithm == ""blake2b"":
        # Using blake2b by default since it is fast and as good as sha-3: https://blake2.net/
        hashstr = hashlib.blake2b(path.encode(), digest_size=16).hexdigest()
    elif algorithm == ""md5"":
        hashstr = hashlib.md5(path.encode()).hexdigest()
    else:
        raise ValueError(""Unsupported algorithm {}"".format(algorithm))
    # 1 for underscore between
    max_orig_chars = PATH_MAXLEN - (len(hashstr) + 1) - suffix_len
    orig_take_chars = (
        max_orig_chars
        if num_orig_chars is None
        else min(num_orig_chars, max_orig_chars)
    )
    if orig_take_chars > 0:
        trunc_stem = filtered_stem[:orig_take_chars]
        if num_orig_chars and constant_len:
            trunc_stem = trunc_stem.ljust(orig_take_chars, ""_"")
        new_stem = ""{}_{}"".format(trunc_stem, hashstr)
    else:
        new_stem = hashstr
    return new_stem","def hash_path (  path , algorithm = ""blake2b"" , suffix_len = None , num_orig_chars = None , constant_len = False ) :   path = Path ( path )  if suffix_len is None :   suffix_len = len ( path . suffix )  stem = str ( path . stem )  replaced_stem = stem . replace ( "" "" , ""_"" )  replaced_stem = replaced_stem . replace ( ""-"" , ""_"" )  filtered_stem = non_word_pattern . sub ( """" , replaced_stem )  if len ( filtered_stem ) == len ( stem ) :   return replaced_stem  path = str ( path )  if algorithm == ""blake2b"" :     hashstr = hashlib . blake2b ( path . encode ( ) , digest_size = 16 ) . hexdigest ( )  elif algorithm == ""md5"" :   hashstr = hashlib . md5 ( path . encode ( ) ) . hexdigest ( )  else :   raise ValueError ( ""Unsupported algorithm {}"" . format ( algorithm ) )   max_orig_chars = PATH_MAXLEN - ( len ( hashstr ) + 1 ) - suffix_len  orig_take_chars = (   max_orig_chars   if num_orig_chars is None   else min ( num_orig_chars , max_orig_chars )  )  if orig_take_chars > 0 :   trunc_stem = filtered_stem [ : orig_take_chars ]       trunc_stem = trunc_stem . ljust ( orig_take_chars , ""_"" )   new_stem = ""{}_{}"" . format ( trunc_stem , hashstr )  else :   new_stem = hashstr  return new_stem",if num_orig_chars and constant_len :,452
13808,"def profile_by_id(request, user_id):
    user = User.objects.get(pk=user_id)
    if request.method == ""POST"":
        form = ProfileForm(request.POST, request.FILES)
        if form.is_valid():
            print(""made it!"")
            if request.POST.get(""username"") != user.username:
                user.username = request.POST.get(""username"")
            if request.POST.get(""first_name"") != user.first_name:
                user.first_name = request.POST.get(""first_name"")
            if request.POST.get(""last_name"") != user.last_name:
                user.last_name = request.POST.get(""last_name"")
            if request.POST.get(""email"") != user.email:
                user.email = request.POST.get(""email"")
            if request.POST.get(""password""):
                user.set_password(request.POST.get(""password""))
            if request.FILES:
                user.userprofile.image = store_uploaded_file(
                    user.username + ""."" + request.FILES[""picture""].name.split(""."")[-1],
                    request.FILES[""picture""],
                )
                user.userprofile.save()
            user.save()
            messages.info(request, ""User Updated"")
    return render(request, ""taskManager/profile.html"", {""user"": user})","def profile_by_id ( request , user_id ) :  user = User . objects . get ( pk = user_id )  if request . method == ""POST"" :   form = ProfileForm ( request . POST , request . FILES )   if form . is_valid ( ) :    print ( ""made it!"" )    if request . POST . get ( ""username"" ) != user . username :     user . username = request . POST . get ( ""username"" )    if request . POST . get ( ""first_name"" ) != user . first_name :     user . first_name = request . POST . get ( ""first_name"" )    if request . POST . get ( ""last_name"" ) != user . last_name :     user . last_name = request . POST . get ( ""last_name"" )         user . email = request . POST . get ( ""email"" )    if request . POST . get ( ""password"" ) :     user . set_password ( request . POST . get ( ""password"" ) )    if request . FILES :     user . userprofile . image = store_uploaded_file (      user . username + ""."" + request . FILES [ ""picture"" ] . name . split ( ""."" ) [ - 1 ] ,      request . FILES [ ""picture"" ] ,     )     user . userprofile . save ( )    user . save ( )    messages . info ( request , ""User Updated"" )  return render ( request , ""taskManager/profile.html"" , { ""user"" : user } )","if request . POST . get ( ""email"" ) != user . email :",383
23303,"def __call__(self, list_data):
    coords, feats, labels = list(zip(*list_data))
    coords_batch, feats_batch, labels_batch = [], [], []
    batch_num_points = 0
    for batch_id, _ in enumerate(coords):
        num_points = coords[batch_id].shape[0]
        batch_num_points += num_points
        if self.limit_numpoints > 0 and batch_num_points > self.limit_numpoints:
            num_full_points = sum(len(c) for c in coords)
            num_full_batch_size = len(coords)
            logging.warning(
                f""\tCannot fit {num_full_points} points into""
                "" {self.limit_numpoints} points limit. Truncating batch ""
                f""size at {batch_id} out of {num_full_batch_size} with ""
                f""{batch_num_points - num_points}.""
            )
            break
        coords_batch.append(coords[batch_id])
        feats_batch.append(feats[batch_id])
        labels_batch.append(labels[batch_id])
    # Concatenate all lists
    return sparse_collate(
        coords_batch,
        feats_batch,
        labels_batch,
        dtype=self.dtype,
        device=self.device,
    )","def __call__ ( self , list_data ) :  coords , feats , labels = list ( zip ( * list_data ) )  coords_batch , feats_batch , labels_batch = [ ] , [ ] , [ ]  batch_num_points = 0  for batch_id , _ in enumerate ( coords ) :   num_points = coords [ batch_id ] . shape [ 0 ]   batch_num_points += num_points       num_full_points = sum ( len ( c ) for c in coords )    num_full_batch_size = len ( coords )    logging . warning (     f""\tCannot fit {num_full_points} points into""     "" {self.limit_numpoints} points limit. Truncating batch ""     f""size at {batch_id} out of {num_full_batch_size} with ""     f""{batch_num_points - num_points}.""    )    break   coords_batch . append ( coords [ batch_id ] )   feats_batch . append ( feats [ batch_id ] )   labels_batch . append ( labels [ batch_id ] )   return sparse_collate (   coords_batch ,   feats_batch ,   labels_batch ,   dtype = self . dtype ,   device = self . device ,  )",if self . limit_numpoints > 0 and batch_num_points > self . limit_numpoints :,364
14459,"def dtdwrite(dtdfile, entities, force=False):
    if not entities:
        return
    dtdEntities = [
        '' % (id, escape(val, entitydefs))
        for id, val in entities.items()
    ]
    dtdEntities.sort()
    dtdFileData = ""\n"".join(dtdEntities) + ""\n""
    if type(dtdfile) in types.StringTypes:
        if os.path.exists(dtdfile):
            if force:
                os.remove(dtdfile)
            else:
                raise DTDGenError(
                    ""dtd '%s' already exists, use '--force' to ""
                    ""allow overwrite"" % dtdfile
                )
        dtdf = open(dtdfile, ""w"")
    else:
        dtdf = dtdfile
    dtdf.write(dtdFileData)
    if dtdf != dtdfile:
        dtdf.close()
","def dtdwrite ( dtdfile , entities , force = False ) :  if not entities :   return  dtdEntities = [   '' % ( id , escape ( val , entitydefs ) )   for id , val in entities . items ( )  ]  dtdEntities . sort ( )  dtdFileData = ""\n"" . join ( dtdEntities ) + ""\n""  if type ( dtdfile ) in types . StringTypes :       if force :     os . remove ( dtdfile )    else :     raise DTDGenError (      ""dtd '%s' already exists, use '--force' to ""      ""allow overwrite"" % dtdfile     )   dtdf = open ( dtdfile , ""w"" )  else :   dtdf = dtdfile  dtdf . write ( dtdFileData )  if dtdf != dtdfile :   dtdf . close ( )",if os . path . exists ( dtdfile ) :,255
19588,"def _mixture_(self) -> Sequence[Tuple[float, np.ndarray]]:
    ps = []
    for pauli in self._error_probabilities:
        Pi = np.identity(1)
        for gate in pauli:
            if gate == ""I"":
                Pi = np.kron(Pi, protocols.unitary(identity.I))
            elif gate == ""X"":
                Pi = np.kron(Pi, protocols.unitary(pauli_gates.X))
            elif gate == ""Y"":
                Pi = np.kron(Pi, protocols.unitary(pauli_gates.Y))
            elif gate == ""Z"":
                Pi = np.kron(Pi, protocols.unitary(pauli_gates.Z))
        ps.append(Pi)
    return tuple(zip(self._error_probabilities.values(), ps))","def _mixture_ ( self ) -> Sequence [ Tuple [ float , np . ndarray ] ] :  ps = [ ]  for pauli in self . _error_probabilities :   Pi = np . identity ( 1 )   for gate in pauli :    if gate == ""I"" :     Pi = np . kron ( Pi , protocols . unitary ( identity . I ) )    elif gate == ""X"" :     Pi = np . kron ( Pi , protocols . unitary ( pauli_gates . X ) )         Pi = np . kron ( Pi , protocols . unitary ( pauli_gates . Y ) )    elif gate == ""Z"" :     Pi = np . kron ( Pi , protocols . unitary ( pauli_gates . Z ) )   ps . append ( Pi )  return tuple ( zip ( self . _error_probabilities . values ( ) , ps ) )","elif gate == ""Y"" :",227
3126,"def newtodolink(self, url, origin):
    # Call self.format_url(), since the URL here
    # is now a (URL, fragment) pair.
    if self.todo.has_key(url):
        if origin not in self.todo[url]:
            self.todo[url].append(origin)
        self.note(3, ""  Seen todo link %s"", self.format_url(url))
    else:
        self.todo[url] = [origin]
        self.note(3, ""  New todo link %s"", self.format_url(url))","def newtodolink ( self , url , origin ) :    if self . todo . has_key ( url ) :       self . todo [ url ] . append ( origin )   self . note ( 3 , ""  Seen todo link %s"" , self . format_url ( url ) )  else :   self . todo [ url ] = [ origin ]   self . note ( 3 , ""  New todo link %s"" , self . format_url ( url ) )",if origin not in self . todo [ url ] :,148
16826,"def checkpoint(self, metrics_dict, iteration, model, optimizer, lr_scheduler):
    # Return early if checkpoint_runway has not been met
    if self.checkpoint_runway:
        if iteration < self.checkpoint_runway:
            return
        elif iteration == self.checkpoint_runway:
            print(""Checkpoint runway has been met. Checkpointing will now occur."")
    if (
        self.checkpoint_every
        and iteration > 0
        and iteration % self.checkpoint_every == 0
    ):
        # Save the checkpoint regardless of performance
        score = None
        state = self.bundle_state(iteration, score, model, optimizer, lr_scheduler)
        checkpoint_path = f""{self.checkpoint_dir}/model_checkpoint_{iteration}.pth""
        torch.save(state, checkpoint_path)
    if self.checkpoint_best and self.checkpoint_metric in metrics_dict:
        score = metrics_dict[self.checkpoint_metric]
        if self.is_best(score):
            if self.verbose:
                print(
                    f""Saving model at iteration {iteration:.2f} with best ""
                    f""({self.checkpoint_metric_mode}) score ""
                    f""{self.checkpoint_metric}={score:.3f}""
                )
            self.best_model_found = True
            self.best_iteration = iteration
            self.best_score = score
            # Save the checkpoint, overriding previous best if it exists
            state = self.bundle_state(iteration, score, model, optimizer, lr_scheduler)
            checkpoint_path = f""{self.checkpoint_dir}/best_model.pth""
            torch.save(state, checkpoint_path)","def checkpoint ( self , metrics_dict , iteration , model , optimizer , lr_scheduler ) :   if self . checkpoint_runway :   if iteration < self . checkpoint_runway :    return   elif iteration == self . checkpoint_runway :    print ( ""Checkpoint runway has been met. Checkpointing will now occur."" )  if (   self . checkpoint_every   and iteration > 0   and iteration % self . checkpoint_every == 0  ) :     score = None   state = self . bundle_state ( iteration , score , model , optimizer , lr_scheduler )   checkpoint_path = f""{self.checkpoint_dir}/model_checkpoint_{iteration}.pth""   torch . save ( state , checkpoint_path )  if self . checkpoint_best and self . checkpoint_metric in metrics_dict :   score = metrics_dict [ self . checkpoint_metric ]   if self . is_best ( score ) :         print (      f""Saving model at iteration {iteration:.2f} with best ""      f""({self.checkpoint_metric_mode}) score ""      f""{self.checkpoint_metric}={score:.3f}""     )    self . best_model_found = True    self . best_iteration = iteration    self . best_score = score       state = self . bundle_state ( iteration , score , model , optimizer , lr_scheduler )    checkpoint_path = f""{self.checkpoint_dir}/best_model.pth""    torch . save ( state , checkpoint_path )",if self . verbose :,441
11521,"def format_bpe_text(symbols, delimiter=b""@@""):
    """"""Convert a sequence of bpe words into sentence.""""""
    words = []
    word = b""""
    if isinstance(symbols, str):
        symbols = symbols.encode()
    delimiter_len = len(delimiter)
    for symbol in symbols:
        if len(symbol) >= delimiter_len and symbol[-delimiter_len:] == delimiter:
            word += symbol[:-delimiter_len]
        else:  # end of a word
            word += symbol
            words.append(word)
            word = b""""
    return b"" "".join(words)
","def format_bpe_text ( symbols , delimiter = b""@@"" ) :  """"""Convert a sequence of bpe words into sentence.""""""  words = [ ]  word = b""""  if isinstance ( symbols , str ) :   symbols = symbols . encode ( )  delimiter_len = len ( delimiter )  for symbol in symbols :       word += symbol [ : - delimiter_len ]   else :    word += symbol    words . append ( word )    word = b""""  return b"" "" . join ( words )",if len ( symbol ) >= delimiter_len and symbol [ - delimiter_len : ] == delimiter :,154
15967,"def configure_slurm_ddp(self):
    # extract SLURM flag vars
    # whenever we have the correct number of tasks, we let slurm manage processes
    # otherwise we launch the required number of processes
    if self.use_ddp or self.use_ddp2:
        num_requested_gpus = self.num_gpus * self.num_nodes
        num_slurm_tasks = 0
        try:
            num_slurm_tasks = int(os.environ[""SLURM_NTASKS""])
            self.is_slurm_managing_tasks = num_slurm_tasks == num_requested_gpus
            # enable slurm cpu
            if num_requested_gpus == 0:
                self.is_slurm_managing_tasks = num_slurm_tasks == self.num_processes
            # in interactive mode we don't manage tasks
            job_name = os.environ[""SLURM_JOB_NAME""]
            if job_name == ""bash"":
                self.is_slurm_managing_tasks = False
        except Exception:
            # likely not on slurm, so set the slurm managed flag to false
            self.is_slurm_managing_tasks = False
    # used for tests only, set this flag to simulate slurm managing a task
    try:
        should_fake = int(os.environ[""FAKE_SLURM_MANAGING_TASKS""])
        if should_fake:
            self.is_slurm_managing_tasks = True
    except Exception:
        pass
    # notify user the that slurm is managing tasks
    if self.is_slurm_managing_tasks:
        rank_zero_info(""Multi-processing is handled by Slurm."")","def configure_slurm_ddp ( self ) :     if self . use_ddp or self . use_ddp2 :   num_requested_gpus = self . num_gpus * self . num_nodes   num_slurm_tasks = 0   try :    num_slurm_tasks = int ( os . environ [ ""SLURM_NTASKS"" ] )    self . is_slurm_managing_tasks = num_slurm_tasks == num_requested_gpus       if num_requested_gpus == 0 :     self . is_slurm_managing_tasks = num_slurm_tasks == self . num_processes       job_name = os . environ [ ""SLURM_JOB_NAME"" ]         self . is_slurm_managing_tasks = False   except Exception :       self . is_slurm_managing_tasks = False   try :   should_fake = int ( os . environ [ ""FAKE_SLURM_MANAGING_TASKS"" ] )   if should_fake :    self . is_slurm_managing_tasks = True  except Exception :   pass   if self . is_slurm_managing_tasks :   rank_zero_info ( ""Multi-processing is handled by Slurm."" )","if job_name == ""bash"" :",451
13716,"def _build_dom(cls, content, mode):
    assert mode in (""html"", ""xml"")
    if mode == ""html"":
        if not hasattr(THREAD_STORAGE, ""html_parser""):
            THREAD_STORAGE.html_parser = HTMLParser()
        dom = defusedxml.lxml.parse(
            StringIO(content), parser=THREAD_STORAGE.html_parser
        )
        return dom.getroot()
    else:
        if not hasattr(THREAD_STORAGE, ""xml_parser""):
            THREAD_STORAGE.xml_parser = XMLParser()
        dom = defusedxml.lxml.parse(BytesIO(content), parser=THREAD_STORAGE.xml_parser)
        return dom.getroot()
","def _build_dom ( cls , content , mode ) :  assert mode in ( ""html"" , ""xml"" )  if mode == ""html"" :   if not hasattr ( THREAD_STORAGE , ""html_parser"" ) :    THREAD_STORAGE . html_parser = HTMLParser ( )   dom = defusedxml . lxml . parse (    StringIO ( content ) , parser = THREAD_STORAGE . html_parser   )   return dom . getroot ( )  else :       THREAD_STORAGE . xml_parser = XMLParser ( )   dom = defusedxml . lxml . parse ( BytesIO ( content ) , parser = THREAD_STORAGE . xml_parser )   return dom . getroot ( )","if not hasattr ( THREAD_STORAGE , ""xml_parser"" ) :",175
2097,"def useful(self, pos):
    global TIMESTAMP
    TIMESTAMP += 1
    square = self.squares[pos]
    if self.useful_fast(square):
        return True
    old_hash = self.zobrist.hash
    self.zobrist.update(square, self.color)
    empties = opps = weak_opps = neighs = weak_neighs = 0
    for neighbour in square.neighbours:
        neighcolor = neighbour.color
        if neighcolor == EMPTY:
            empties += 1
            continue
        neighbour_ref = neighbour.find()
        if neighbour_ref.timestamp != TIMESTAMP:
            if neighcolor == self.color:
                neighs += 1
            else:
                opps += 1
            neighbour_ref.timestamp = TIMESTAMP
            neighbour_ref.temp_ledges = neighbour_ref.ledges
        neighbour_ref.temp_ledges -= 1
        if neighbour_ref.temp_ledges == 0:
            if neighcolor == self.color:
                weak_neighs += 1
            else:
                weak_opps += 1
                neighbour_ref.remove(neighbour_ref, update=False)
    dupe = self.zobrist.dupe()
    self.zobrist.hash = old_hash
    strong_neighs = neighs - weak_neighs
    strong_opps = opps - weak_opps
    return not dupe and (
        empties or weak_opps or (strong_neighs and (strong_opps or weak_neighs))
    )","def useful ( self , pos ) :  global TIMESTAMP  TIMESTAMP += 1  square = self . squares [ pos ]  if self . useful_fast ( square ) :   return True  old_hash = self . zobrist . hash  self . zobrist . update ( square , self . color )  empties = opps = weak_opps = neighs = weak_neighs = 0  for neighbour in square . neighbours :   neighcolor = neighbour . color   if neighcolor == EMPTY :    empties += 1    continue   neighbour_ref = neighbour . find ( )   if neighbour_ref . timestamp != TIMESTAMP :         neighs += 1    else :     opps += 1    neighbour_ref . timestamp = TIMESTAMP    neighbour_ref . temp_ledges = neighbour_ref . ledges   neighbour_ref . temp_ledges -= 1   if neighbour_ref . temp_ledges == 0 :         weak_neighs += 1    else :     weak_opps += 1     neighbour_ref . remove ( neighbour_ref , update = False )  dupe = self . zobrist . dupe ( )  self . zobrist . hash = old_hash  strong_neighs = neighs - weak_neighs  strong_opps = opps - weak_opps  return not dupe and (   empties or weak_opps or ( strong_neighs and ( strong_opps or weak_neighs ) )  )",if neighcolor == self . color :,412
18580,"def __call__(self, engine: Optional[Engine], name: Optional[str] = None) -> None:
    value = self.get_param()
    if isinstance(value, list):
        if len(value) != len(self.optimizer_param_groups):
            raise ValueError(
                ""size of value is different than optimizer_param_groups ""
                f""{len(value)} != {len(self.optimizer_param_groups)}""
            )
        for i, param_group in enumerate(self.optimizer_param_groups):
            param_group[self.param_name] = value[i]
    else:
        for i, param_group in enumerate(self.optimizer_param_groups):
            param_group[self.param_name] = value
    if name is None:
        name = self.param_name
    if self.save_history and engine:
        if not hasattr(engine.state, ""param_history"") or engine.state.param_history is None:  # type: ignore
            setattr(engine.state, ""param_history"", {})
        engine.state.param_history.setdefault(name, [])  # type: ignore[attr-defined]
        values = [pg[self.param_name] for pg in self.optimizer_param_groups]
        engine.state.param_history[name].append(values)  # type: ignore[attr-defined]
    self.event_index += 1","def __call__ ( self , engine : Optional [ Engine ] , name : Optional [ str ] = None ) -> None :  value = self . get_param ( )  if isinstance ( value , list ) :   if len ( value ) != len ( self . optimizer_param_groups ) :    raise ValueError (     ""size of value is different than optimizer_param_groups ""     f""{len(value)} != {len(self.optimizer_param_groups)}""    )   for i , param_group in enumerate ( self . optimizer_param_groups ) :    param_group [ self . param_name ] = value [ i ]  else :   for i , param_group in enumerate ( self . optimizer_param_groups ) :    param_group [ self . param_name ] = value  if name is None :   name = self . param_name  if self . save_history and engine :       setattr ( engine . state , ""param_history"" , { } )   engine . state . param_history . setdefault ( name , [ ] )   values = [ pg [ self . param_name ] for pg in self . optimizer_param_groups ]   engine . state . param_history [ name ] . append ( values )  self . event_index += 1","if not hasattr ( engine . state , ""param_history"" ) or engine . state . param_history is None :",358
3655,"def getitem_tuple_lower(context, builder, sig, args):
    tupty, idx = sig.args
    idx = idx.literal_value
    tup, _ = args
    if isinstance(idx, int):
        if idx < 0:
            idx += len(tupty)
        if not 0 <= idx < len(tupty):
            raise IndexError(""cannot index at %d in %s"" % (idx, tupty))
        res = builder.extract_value(tup, idx)
    elif isinstance(idx, slice):
        items = cgutils.unpack_tuple(builder, tup)[idx]
        res = context.make_tuple(builder, sig.return_type, items)
    else:
        raise NotImplementedError(""unexpected index %r for %s"" % (idx, sig.args[0]))
    return impl_ret_borrowed(context, builder, sig.return_type, res)","def getitem_tuple_lower ( context , builder , sig , args ) :  tupty , idx = sig . args  idx = idx . literal_value  tup , _ = args  if isinstance ( idx , int ) :       idx += len ( tupty )   if not 0 <= idx < len ( tupty ) :    raise IndexError ( ""cannot index at %d in %s"" % ( idx , tupty ) )   res = builder . extract_value ( tup , idx )  elif isinstance ( idx , slice ) :   items = cgutils . unpack_tuple ( builder , tup ) [ idx ]   res = context . make_tuple ( builder , sig . return_type , items )  else :   raise NotImplementedError ( ""unexpected index %r for %s"" % ( idx , sig . args [ 0 ] ) )  return impl_ret_borrowed ( context , builder , sig . return_type , res )",if idx < 0 :,223
25626,"def migrate_InternalTip(self):
    for old_obj in self.session_old.query(self.model_from[""InternalTip""]):
        new_obj = self.model_to[""InternalTip""]()
        for key in new_obj.__table__.columns._data.keys():
            new_obj.status = ""antani!""
            if key == ""status"" or key == ""substatus"":
                pass
            elif key in old_obj.__table__.columns._data.keys():
                setattr(new_obj, key, getattr(old_obj, key))
        self.session_new.add(new_obj)
        if old_obj.receipt_hash:
            new_wbtip = self.model_to[""WhistleblowerTip""]()
            new_wbtip.id = old_obj.id
            new_wbtip.tid = old_obj.tid
            new_wbtip.receipt_hash = old_obj.receipt_hash
            self.session_new.add(new_wbtip)
","def migrate_InternalTip ( self ) :  for old_obj in self . session_old . query ( self . model_from [ ""InternalTip"" ] ) :   new_obj = self . model_to [ ""InternalTip"" ] ( )   for key in new_obj . __table__ . columns . _data . keys ( ) :    new_obj . status = ""antani!""    if key == ""status"" or key == ""substatus"" :     pass         setattr ( new_obj , key , getattr ( old_obj , key ) )   self . session_new . add ( new_obj )   if old_obj . receipt_hash :    new_wbtip = self . model_to [ ""WhistleblowerTip"" ] ( )    new_wbtip . id = old_obj . id    new_wbtip . tid = old_obj . tid    new_wbtip . receipt_hash = old_obj . receipt_hash    self . session_new . add ( new_wbtip )",elif key in old_obj . __table__ . columns . _data . keys ( ) :,270
6116,"def postprocess_element(elements, processed):
    """"""Fix unresolved references""""""
    # elements variable contains all eelements and complexTypes defined in http://www.w3.org/2001/XMLSchema
    # (elements referenced before its definition, thanks .net)
    # avoid already processed elements:
    if elements in processed:
        return
    processed.append(elements)
    for k, v in elements.items():
        if isinstance(v, Struct):
            if v != elements:  # TODO: fix recursive elements
                try:
                    postprocess_element(v, processed)
                except RuntimeError as e:  # maximum recursion depth exceeded
                    warnings.warn(unicode(e), RuntimeWarning)
            if v.refers_to:  # extension base?
                if isinstance(v.refers_to, dict):
                    extend_element(v, v.refers_to)
                    # clean the reference:
                    v.refers_to = None
                else:  # ""alias"", just replace
                    ##log.debug('Replacing %s = %s' % (k, v.refers_to))
                    elements[k] = v.refers_to
            if v.array:
                elements[k] = [v]  # convert arrays to python lists
        if isinstance(v, list):
            for n in v:  # recurse list
                if isinstance(n, (Struct, list)):
                    # if n != elements:  # TODO: fix recursive elements
                    postprocess_element(n, processed)","def postprocess_element ( elements , processed ) :  """"""Fix unresolved references""""""     if elements in processed :   return  processed . append ( elements )  for k , v in elements . items ( ) :   if isinstance ( v , Struct ) :    if v != elements :     try :      postprocess_element ( v , processed )     except RuntimeError as e :      warnings . warn ( unicode ( e ) , RuntimeWarning )    if v . refers_to :     if isinstance ( v . refers_to , dict ) :      extend_element ( v , v . refers_to )           v . refers_to = None     else :           elements [ k ] = v . refers_to    if v . array :     elements [ k ] = [ v ]   if isinstance ( v , list ) :    for n in v :                postprocess_element ( n , processed )","if isinstance ( n , ( Struct , list ) ) :",410
14358,"def _open(file_, mode=""r""):
    """"""Open file object given filenames, open files or even archives.""""""
    if isinstance(file_, string_types):
        _, ext = path.splitext(file_)
        if ext in {"".bz2"", "".gz""}:
            s = tarfile.open(file_)
            return s.extractfile(s.next())
        else:
            return open(file_, mode)
    return file_","def _open ( file_ , mode = ""r"" ) :  """"""Open file object given filenames, open files or even archives.""""""  if isinstance ( file_ , string_types ) :   _ , ext = path . splitext ( file_ )       s = tarfile . open ( file_ )    return s . extractfile ( s . next ( ) )   else :    return open ( file_ , mode )  return file_","if ext in { "".bz2"" , "".gz"" } :",108
25189,"def d3_box_overlap_kernel(boxes, qboxes, rinc, criterion=-1):
    # ONLY support overlap in CAMERA, not lidar.
    # TODO: change to use prange for parallel mode, should check the difference
    N, K = boxes.shape[0], qboxes.shape[0]
    for i in numba.prange(N):
        for j in numba.prange(K):
            if rinc[i, j] > 0:
                # iw = (min(boxes[i, 1] + boxes[i, 4], qboxes[j, 1] +
                #         qboxes[j, 4]) - max(boxes[i, 1], qboxes[j, 1]))
                iw = min(boxes[i, 1], qboxes[j, 1]) - max(
                    boxes[i, 1] - boxes[i, 4], qboxes[j, 1] - qboxes[j, 4]
                )
                if iw > 0:
                    area1 = boxes[i, 3] * boxes[i, 4] * boxes[i, 5]
                    area2 = qboxes[j, 3] * qboxes[j, 4] * qboxes[j, 5]
                    inc = iw * rinc[i, j]
                    if criterion == -1:
                        ua = area1 + area2 - inc
                    elif criterion == 0:
                        ua = area1
                    elif criterion == 1:
                        ua = area2
                    else:
                        ua = inc
                    rinc[i, j] = inc / ua
                else:
                    rinc[i, j] = 0.0","def d3_box_overlap_kernel ( boxes , qboxes , rinc , criterion = - 1 ) :    N , K = boxes . shape [ 0 ] , qboxes . shape [ 0 ]  for i in numba . prange ( N ) :   for j in numba . prange ( K ) :    if rinc [ i , j ] > 0 :             iw = min ( boxes [ i , 1 ] , qboxes [ j , 1 ] ) - max (      boxes [ i , 1 ] - boxes [ i , 4 ] , qboxes [ j , 1 ] - qboxes [ j , 4 ]     )           area1 = boxes [ i , 3 ] * boxes [ i , 4 ] * boxes [ i , 5 ]      area2 = qboxes [ j , 3 ] * qboxes [ j , 4 ] * qboxes [ j , 5 ]      inc = iw * rinc [ i , j ]      if criterion == - 1 :       ua = area1 + area2 - inc      elif criterion == 0 :       ua = area1      elif criterion == 1 :       ua = area2      else :       ua = inc      rinc [ i , j ] = inc / ua     else :      rinc [ i , j ] = 0.0",if iw > 0 :,474
3550,"def read(self, iprot):
    if (
        iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
        and isinstance(iprot.trans, TTransport.CReadableTransport)
        and self.thrift_spec is not None
        and fastbinary is not None
    ):
        fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
        return
    iprot.readStructBegin()
    while True:
        (fname, ftype, fid) = iprot.readFieldBegin()
        if ftype == TType.STOP:
            break
        if fid == 1:
            if ftype == TType.STRUCT:
                self.hiveObject = HiveObjectRef()
                self.hiveObject.read(iprot)
            else:
                iprot.skip(ftype)
        elif fid == 2:
            if ftype == TType.STRING:
                self.principalName = iprot.readString()
            else:
                iprot.skip(ftype)
        elif fid == 3:
            if ftype == TType.I32:
                self.principalType = iprot.readI32()
            else:
                iprot.skip(ftype)
        elif fid == 4:
            if ftype == TType.STRUCT:
                self.grantInfo = PrivilegeGrantInfo()
                self.grantInfo.read(iprot)
            else:
                iprot.skip(ftype)
        else:
            iprot.skip(ftype)
        iprot.readFieldEnd()
    iprot.readStructEnd()","def read ( self , iprot ) :  if (   iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated   and isinstance ( iprot . trans , TTransport . CReadableTransport )   and self . thrift_spec is not None   and fastbinary is not None  ) :   fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) )   return  iprot . readStructBegin ( )  while True :   ( fname , ftype , fid ) = iprot . readFieldBegin ( )   if ftype == TType . STOP :    break   if fid == 1 :    if ftype == TType . STRUCT :     self . hiveObject = HiveObjectRef ( )     self . hiveObject . read ( iprot )    else :     iprot . skip ( ftype )   elif fid == 2 :    if ftype == TType . STRING :     self . principalName = iprot . readString ( )    else :     iprot . skip ( ftype )   elif fid == 3 :    if ftype == TType . I32 :     self . principalType = iprot . readI32 ( )    else :     iprot . skip ( ftype )       if ftype == TType . STRUCT :     self . grantInfo = PrivilegeGrantInfo ( )     self . grantInfo . read ( iprot )    else :     iprot . skip ( ftype )   else :    iprot . skip ( ftype )   iprot . readFieldEnd ( )  iprot . readStructEnd ( )",elif fid == 4 :,465
6098,"def _recv_value(self, server, flags, rlen):
    rlen += 2  # include \r\n
    buf = server.recv(rlen)
    if len(buf) != rlen:
        raise _Error(""received %d bytes when expecting %d"" % (len(buf), rlen))
    if len(buf) == rlen:
        buf = buf[:-2]  # strip \r\n
    if flags & Client._FLAG_COMPRESSED:
        buf = zlib.decompress(buf)
    if flags == 0 or flags == Client._FLAG_COMPRESSED:
        # Either a bare string or a compressed string now decompressed...
        val = buf
    elif flags & Client._FLAG_INTEGER:
        val = int(buf)
    elif flags & Client._FLAG_LONG:
        val = long(buf)
    elif flags & Client._FLAG_PICKLE:
        try:
            file = BytesIO(buf)
            unpickler = self.unpickler(file)
            if self.persistent_load:
                unpickler.persistent_load = self.persistent_load
            val = unpickler.load()
        except Exception as e:
            self.debuglog(""Pickle error: %s\n"" % e)
            return None
    else:
        self.debuglog(""unknown flags on get: %x\n"" % flags)
        raise ValueError(""Unknown flags on get: %x"" % flags)
    return val","def _recv_value ( self , server , flags , rlen ) :  rlen += 2  buf = server . recv ( rlen )  if len ( buf ) != rlen :   raise _Error ( ""received %d bytes when expecting %d"" % ( len ( buf ) , rlen ) )  if len ( buf ) == rlen :   buf = buf [ : - 2 ]  if flags & Client . _FLAG_COMPRESSED :   buf = zlib . decompress ( buf )  if flags == 0 or flags == Client . _FLAG_COMPRESSED :     val = buf  elif flags & Client . _FLAG_INTEGER :   val = int ( buf )  elif flags & Client . _FLAG_LONG :   val = long ( buf )  elif flags & Client . _FLAG_PICKLE :   try :    file = BytesIO ( buf )    unpickler = self . unpickler ( file )         unpickler . persistent_load = self . persistent_load    val = unpickler . load ( )   except Exception as e :    self . debuglog ( ""Pickle error: %s\n"" % e )    return None  else :   self . debuglog ( ""unknown flags on get: %x\n"" % flags )   raise ValueError ( ""Unknown flags on get: %x"" % flags )  return val",if self . persistent_load :,369
19402,"def isFinished(self):
    # returns true if episode timesteps has reached episode length and resets the task
    if self.count > self.epiLen:
        self.res()
        return True
    else:
        if self.count == 1:
            self.pertGlasPos(0)
        if self.count == self.epiLen / 2 + 1:
            self.env.reset()
            self.pertGlasPos(1)
        self.count += 1
        return False",def isFinished ( self ) :   if self . count > self . epiLen :   self . res ( )   return True  else :       self . pertGlasPos ( 0 )   if self . count == self . epiLen / 2 + 1 :    self . env . reset ( )    self . pertGlasPos ( 1 )   self . count += 1   return False,if self . count == 1 :,132
20160,"def group_by_heading(lines):
    from collections import OrderedDict
    ret = OrderedDict()
    k = []
    ret[""No Category""] = k
    for line_type, line_args, line_kwargs in lines:
        if line_type == ""add_heading"":
            k = []
            ret[line_args[0]] = k
        else:
            k.append((line_type, line_args, line_kwargs))
    return ret","def group_by_heading ( lines ) :  from collections import OrderedDict  ret = OrderedDict ( )  k = [ ]  ret [ ""No Category"" ] = k  for line_type , line_args , line_kwargs in lines :       k = [ ]    ret [ line_args [ 0 ] ] = k   else :    k . append ( ( line_type , line_args , line_kwargs ) )  return ret","if line_type == ""add_heading"" :",116
10611,"def create_paddle_predictor(args):
    config = Config(args.model_file, args.params_file)
    if args.use_gpu:
        config.enable_use_gpu(args.gpu_mem, 0)
    else:
        config.disable_gpu()
        if args.enable_mkldnn:
            # cache 10 different shapes for mkldnn to avoid memory leak
            config.set_mkldnn_cache_capacity(10)
            config.enable_mkldnn()
    config.set_cpu_math_library_num_threads(args.cpu_num_threads)
    if args.enable_profile:
        config.enable_profile()
    config.disable_glog_info()
    config.switch_ir_optim(args.ir_optim)  # default true
    if args.use_tensorrt:
        config.enable_tensorrt_engine(
            precision_mode=Config.Precision.Half
            if args.use_fp16
            else Config.Precision.Float32,
            max_batch_size=args.batch_size,
        )
    config.enable_memory_optim()
    # use zero copy
    config.switch_use_feed_fetch_ops(False)
    predictor = create_predictor(config)
    return predictor","def create_paddle_predictor ( args ) :  config = Config ( args . model_file , args . params_file )  if args . use_gpu :   config . enable_use_gpu ( args . gpu_mem , 0 )  else :   config . disable_gpu ( )   if args . enable_mkldnn :       config . set_mkldnn_cache_capacity ( 10 )    config . enable_mkldnn ( )  config . set_cpu_math_library_num_threads ( args . cpu_num_threads )  if args . enable_profile :   config . enable_profile ( )  config . disable_glog_info ( )  config . switch_ir_optim ( args . ir_optim )  if args . use_tensorrt :   config . enable_tensorrt_engine (    precision_mode = Config . Precision . Half        else Config . Precision . Float32 ,    max_batch_size = args . batch_size ,   )  config . enable_memory_optim ( )   config . switch_use_feed_fetch_ops ( False )  predictor = create_predictor ( config )  return predictor",if args . use_fp16,326
5045,"def do_debug(self, args):
    """"""Implementation of 'coverage debug'.""""""
    if not args:
        show_help(""What information would you like: config, data, sys, premain?"")
        return ERR
    for info in args:
        if info == ""sys"":
            sys_info = self.coverage.sys_info()
            print(info_header(""sys""))
            for line in info_formatter(sys_info):
                print("" %s"" % line)
        elif info == ""data"":
            self.coverage.load()
            data = self.coverage.get_data()
            print(info_header(""data""))
            print(""path: %s"" % self.coverage.get_data().data_filename())
            if data:
                print(""has_arcs: %r"" % data.has_arcs())
                summary = line_counts(data, fullpath=True)
                filenames = sorted(summary.keys())
                print(""\n%d files:"" % len(filenames))
                for f in filenames:
                    line = ""%s: %d lines"" % (f, summary[f])
                    plugin = data.file_tracer(f)
                    if plugin:
                        line += "" [%s]"" % plugin
                    print(line)
            else:
                print(""No data collected"")
        elif info == ""config"":
            print(info_header(""config""))
            config_info = self.coverage.config.__dict__.items()
            for line in info_formatter(config_info):
                print("" %s"" % line)
        elif info == ""premain"":
            print(info_header(""premain""))
            print(short_stack())
        else:
            show_help(""Don't know what you mean by %r"" % info)
            return ERR
    return OK
","def do_debug ( self , args ) :  """"""Implementation of 'coverage debug'.""""""  if not args :   show_help ( ""What information would you like: config, data, sys, premain?"" )   return ERR  for info in args :   if info == ""sys"" :    sys_info = self . coverage . sys_info ( )    print ( info_header ( ""sys"" ) )    for line in info_formatter ( sys_info ) :     print ( "" %s"" % line )   elif info == ""data"" :    self . coverage . load ( )    data = self . coverage . get_data ( )    print ( info_header ( ""data"" ) )    print ( ""path: %s"" % self . coverage . get_data ( ) . data_filename ( ) )         print ( ""has_arcs: %r"" % data . has_arcs ( ) )     summary = line_counts ( data , fullpath = True )     filenames = sorted ( summary . keys ( ) )     print ( ""\n%d files:"" % len ( filenames ) )     for f in filenames :      line = ""%s: %d lines"" % ( f , summary [ f ] )      plugin = data . file_tracer ( f )      if plugin :       line += "" [%s]"" % plugin      print ( line )    else :     print ( ""No data collected"" )   elif info == ""config"" :    print ( info_header ( ""config"" ) )    config_info = self . coverage . config . __dict__ . items ( )    for line in info_formatter ( config_info ) :     print ( "" %s"" % line )   elif info == ""premain"" :    print ( info_header ( ""premain"" ) )    print ( short_stack ( ) )   else :    show_help ( ""Don't know what you mean by %r"" % info )    return ERR  return OK",if data :,504
17284,"def _execute_mock_call(self, /, *args, **kwargs):
    # This is nearly just like super(), except for special handling
    # of coroutines
    _call = _Call((args, kwargs), two=True)
    self.await_count += 1
    self.await_args = _call
    self.await_args_list.append(_call)
    effect = self.side_effect
    if effect is not None:
        if _is_exception(effect):
            raise effect
        elif not _callable(effect):
            try:
                result = next(effect)
            except StopIteration:
                # It is impossible to propogate a StopIteration
                # through coroutines because of PEP 479
                raise StopAsyncIteration
            if _is_exception(result):
                raise result
        elif iscoroutinefunction(effect):
            result = await effect(*args, **kwargs)
        else:
            result = effect(*args, **kwargs)
        if result is not DEFAULT:
            return result
    if self._mock_return_value is not DEFAULT:
        return self.return_value
    if self._mock_wraps is not None:
        if iscoroutinefunction(self._mock_wraps):
            return await self._mock_wraps(*args, **kwargs)
        return self._mock_wraps(*args, **kwargs)
    return self.return_value","def _execute_mock_call ( self , / , * args , ** kwargs ) :    _call = _Call ( ( args , kwargs ) , two = True )  self . await_count += 1  self . await_args = _call  self . await_args_list . append ( _call )  effect = self . side_effect  if effect is not None :   if _is_exception ( effect ) :    raise effect       try :     result = next ( effect )    except StopIteration :             raise StopAsyncIteration    if _is_exception ( result ) :     raise result   elif iscoroutinefunction ( effect ) :    result = await effect ( * args , ** kwargs )   else :    result = effect ( * args , ** kwargs )   if result is not DEFAULT :    return result  if self . _mock_return_value is not DEFAULT :   return self . return_value  if self . _mock_wraps is not None :   if iscoroutinefunction ( self . _mock_wraps ) :    return await self . _mock_wraps ( * args , ** kwargs )   return self . _mock_wraps ( * args , ** kwargs )  return self . return_value",elif not _callable ( effect ) :,352
6141,"def createform(self, xfields):
    formstyle = self.formstyle
    if isinstance(formstyle, basestring):
        if formstyle in SQLFORM.formstyles:
            formstyle = SQLFORM.formstyles[formstyle]
        else:
            raise RuntimeError(""formstyle not found"")
    if callable(formstyle):
        try:
            table = formstyle(self, xfields)
            for id, a, b, c in xfields:
                self.field_parent[id] = (
                    getattr(b, ""parent"", None) if isinstance(b, XmlComponent) else None
                )
        except TypeError:
            # backward compatibility, 4 argument function is the old style
            table = TABLE()
            for id, a, b, c in xfields:
                newrows = formstyle(id, a, b, c)
                self.field_parent[id] = (
                    getattr(b, ""parent"", None) if isinstance(b, XmlComponent) else None
                )
                if type(newrows).__name__ != ""tuple"":
                    newrows = [newrows]
                for newrow in newrows:
                    table.append(newrow)
    else:
        raise RuntimeError(""formstyle not supported"")
    return table","def createform ( self , xfields ) :  formstyle = self . formstyle  if isinstance ( formstyle , basestring ) :       formstyle = SQLFORM . formstyles [ formstyle ]   else :    raise RuntimeError ( ""formstyle not found"" )  if callable ( formstyle ) :   try :    table = formstyle ( self , xfields )    for id , a , b , c in xfields :     self . field_parent [ id ] = (      getattr ( b , ""parent"" , None ) if isinstance ( b , XmlComponent ) else None     )   except TypeError :       table = TABLE ( )    for id , a , b , c in xfields :     newrows = formstyle ( id , a , b , c )     self . field_parent [ id ] = (      getattr ( b , ""parent"" , None ) if isinstance ( b , XmlComponent ) else None     )     if type ( newrows ) . __name__ != ""tuple"" :      newrows = [ newrows ]     for newrow in newrows :      table . append ( newrow )  else :   raise RuntimeError ( ""formstyle not supported"" )  return table",if formstyle in SQLFORM . formstyles :,349
24896,"def push_solution_to_instance(self):
    scenario_instance = self._instance
    scenariotree_sm_bySymbol = scenario_instance._ScenarioTreeSymbolMap.bySymbol
    for tree_node in self._node_list:
        stage_name = tree_node._stage.name
        cost_variable_name, cost_variable_index = tree_node._stage._cost_variable
        stage_cost_component = self._instance.find_component(cost_variable_name)[
            cost_variable_index
        ]
        # Some of these might be Expression objects so we check
        # for is_expression before changing.value
        if not stage_cost_component.is_expression():
            stage_cost_component.value = self._stage_costs[stage_name]
    for tree_node in self._node_list:
        # Some of these might be Expression objects so we check
        # for is_expression before changing.value
        for variable_id, var_value in iteritems(self._x[tree_node._name]):
            compdata = scenariotree_sm_bySymbol[variable_id]
            if not compdata.is_expression():
                compdata.value = var_value
        for variable_id in self._fixed[tree_node._name]:
            vardata = scenariotree_sm_bySymbol[variable_id]
            vardata.fix()
        for variable_id in self._stale[tree_node._name]:
            vardata = scenariotree_sm_bySymbol[variable_id]
            vardata.stale = True
","def push_solution_to_instance ( self ) :  scenario_instance = self . _instance  scenariotree_sm_bySymbol = scenario_instance . _ScenarioTreeSymbolMap . bySymbol  for tree_node in self . _node_list :   stage_name = tree_node . _stage . name   cost_variable_name , cost_variable_index = tree_node . _stage . _cost_variable   stage_cost_component = self . _instance . find_component ( cost_variable_name ) [    cost_variable_index   ]       if not stage_cost_component . is_expression ( ) :    stage_cost_component . value = self . _stage_costs [ stage_name ]  for tree_node in self . _node_list :       for variable_id , var_value in iteritems ( self . _x [ tree_node . _name ] ) :    compdata = scenariotree_sm_bySymbol [ variable_id ]         compdata . value = var_value   for variable_id in self . _fixed [ tree_node . _name ] :    vardata = scenariotree_sm_bySymbol [ variable_id ]    vardata . fix ( )   for variable_id in self . _stale [ tree_node . _name ] :    vardata = scenariotree_sm_bySymbol [ variable_id ]    vardata . stale = True",if not compdata . is_expression ( ) :,404
14807,"def change_sel(self):
    """"""Change the view's selections.""""""
    if self.alter_select and len(self.sels) > 0:
        if self.multi_select is False:
            self.view.show(self.sels[0])
        self.view.sel().clear()
        self.view.sel().add_all(self.sels)","def change_sel ( self ) :  """"""Change the view's selections.""""""  if self . alter_select and len ( self . sels ) > 0 :       self . view . show ( self . sels [ 0 ] )   self . view . sel ( ) . clear ( )   self . view . sel ( ) . add_all ( self . sels )",if self . multi_select is False :,94
19499,"def get_type(type_ref):
    kind = type_ref.get(""kind"")
    if kind == TypeKind.LIST:
        item_ref = type_ref.get(""ofType"")
        if not item_ref:
            raise Exception(""Decorated type deeper than introspection query."")
        return GraphQLList(get_type(item_ref))
    elif kind == TypeKind.NON_NULL:
        nullable_ref = type_ref.get(""ofType"")
        if not nullable_ref:
            raise Exception(""Decorated type deeper than introspection query."")
        return GraphQLNonNull(get_type(nullable_ref))
    return get_named_type(type_ref[""name""])
","def get_type ( type_ref ) :  kind = type_ref . get ( ""kind"" )  if kind == TypeKind . LIST :   item_ref = type_ref . get ( ""ofType"" )       raise Exception ( ""Decorated type deeper than introspection query."" )   return GraphQLList ( get_type ( item_ref ) )  elif kind == TypeKind . NON_NULL :   nullable_ref = type_ref . get ( ""ofType"" )   if not nullable_ref :    raise Exception ( ""Decorated type deeper than introspection query."" )   return GraphQLNonNull ( get_type ( nullable_ref ) )  return get_named_type ( type_ref [ ""name"" ] )",if not item_ref :,171
17372,"def gotAvatar(avatar):
    if avatar.realm is not None:
        raise ewords.AlreadyLoggedIn()
    for iface in interfaces:
        facet = iface(avatar, None)
        if facet is not None:
            avatar.loggedIn(self, mind)
            mind.name = avatarId
            mind.realm = self
            mind.avatar = avatar
            return iface, facet, self.logoutFactory(avatar, facet)
    raise NotImplementedError(self, interfaces)
","def gotAvatar ( avatar ) :  if avatar . realm is not None :   raise ewords . AlreadyLoggedIn ( )  for iface in interfaces :   facet = iface ( avatar , None )       avatar . loggedIn ( self , mind )    mind . name = avatarId    mind . realm = self    mind . avatar = avatar    return iface , facet , self . logoutFactory ( avatar , facet )  raise NotImplementedError ( self , interfaces )",if facet is not None :,128
196,"def contains_only_whitespace(node):
    if is_tag(node):
        if not any([not is_text(s) for s in node.contents]):
            if not any([unicode(s).strip() for s in node.contents]):
                return True
    return False
",def contains_only_whitespace ( node ) :  if is_tag ( node ) :       if not any ( [ unicode ( s ) . strip ( ) for s in node . contents ] ) :     return True  return False,if not any ( [ not is_text ( s ) for s in node . contents ] ) :,72
24227,"def validate_cell(self, cell):
    super(MetadataValidatorV2, self).validate_cell(cell)
    if ""nbgrader"" not in cell.metadata:
        return
    meta = cell.metadata[""nbgrader""]
    grade = meta[""grade""]
    solution = meta[""solution""]
    locked = meta[""locked""]
    # check if the cell type has changed
    if ""cell_type"" in meta:
        if meta[""cell_type""] != cell.cell_type:
            self.log.warning(
                ""Cell type has changed from {} to {}!"".format(
                    meta[""cell_type""], cell.cell_type
                ),
                cell,
            )
    # check for a valid grade id
    if grade or solution or locked:
        if ""grade_id"" not in meta:
            raise ValidationError(
                ""nbgrader cell does not have a grade_id: {}"".format(cell.source)
            )
        if meta[""grade_id""] == """":
            raise ValidationError(""grade_id is empty"")
    # check for valid points
    if grade:
        if ""points"" not in meta:
            raise ValidationError(
                ""nbgrader cell '{}' does not have points"".format(meta[""grade_id""])
            )
    # check that markdown cells are grade AND solution (not either/or)
    if cell.cell_type == ""markdown"" and grade and not solution:
        raise ValidationError(
            ""Markdown grade cell '{}' is not marked as a solution cell"".format(
                meta[""grade_id""]
            )
        )
    if cell.cell_type == ""markdown"" and not grade and solution:
        raise ValidationError(
            ""Markdown solution cell is not marked as a grade cell: {}"".format(
                cell.source
            )
        )","def validate_cell ( self , cell ) :  super ( MetadataValidatorV2 , self ) . validate_cell ( cell )  if ""nbgrader"" not in cell . metadata :   return  meta = cell . metadata [ ""nbgrader"" ]  grade = meta [ ""grade"" ]  solution = meta [ ""solution"" ]  locked = meta [ ""locked"" ]   if ""cell_type"" in meta :       self . log . warning (     ""Cell type has changed from {} to {}!"" . format (      meta [ ""cell_type"" ] , cell . cell_type     ) ,     cell ,    )   if grade or solution or locked :   if ""grade_id"" not in meta :    raise ValidationError (     ""nbgrader cell does not have a grade_id: {}"" . format ( cell . source )    )   if meta [ ""grade_id"" ] == """" :    raise ValidationError ( ""grade_id is empty"" )   if grade :   if ""points"" not in meta :    raise ValidationError (     ""nbgrader cell '{}' does not have points"" . format ( meta [ ""grade_id"" ] )    )   if cell . cell_type == ""markdown"" and grade and not solution :   raise ValidationError (    ""Markdown grade cell '{}' is not marked as a solution cell"" . format (     meta [ ""grade_id"" ]    )   )  if cell . cell_type == ""markdown"" and not grade and solution :   raise ValidationError (    ""Markdown solution cell is not marked as a grade cell: {}"" . format (     cell . source    )   )","if meta [ ""cell_type"" ] != cell . cell_type :",484
3913,"def __method_playback(self, symbol, *args, **dargs):
    if self._debug:
        print >>sys.__stdout__, (
            "" * Mock call: "" + _dump_function_call(symbol, args, dargs)
        )
    if len(self.recording) != 0:
        # self.recording is subscriptable (deque), ignore E1136
        func_call = self.recording[0]  # pylint: disable=E1136
        if func_call.symbol != symbol:
            msg = ""Unexpected call: %s\nExpected: %s"" % (
                _dump_function_call(symbol, args, dargs),
                func_call,
            )
            self._append_error(msg)
            return None
        if not func_call.match(*args, **dargs):
            msg = ""Incorrect call: %s\nExpected: %s"" % (
                _dump_function_call(symbol, args, dargs),
                func_call,
            )
            self._append_error(msg)
            return None
        # this is the expected call so pop it and return
        self.recording.popleft()
        if func_call.error:
            raise func_call.error
        else:
            return func_call.return_obj
    else:
        msg = ""unexpected call: %s"" % (_dump_function_call(symbol, args, dargs))
        self._append_error(msg)
        return None","def __method_playback ( self , symbol , * args , ** dargs ) :  if self . _debug :   print >> sys . __stdout__ , (    "" * Mock call: "" + _dump_function_call ( symbol , args , dargs )   )  if len ( self . recording ) != 0 :     func_call = self . recording [ 0 ]   if func_call . symbol != symbol :    msg = ""Unexpected call: %s\nExpected: %s"" % (     _dump_function_call ( symbol , args , dargs ) ,     func_call ,    )    self . _append_error ( msg )    return None       msg = ""Incorrect call: %s\nExpected: %s"" % (     _dump_function_call ( symbol , args , dargs ) ,     func_call ,    )    self . _append_error ( msg )    return None     self . recording . popleft ( )   if func_call . error :    raise func_call . error   else :    return func_call . return_obj  else :   msg = ""unexpected call: %s"" % ( _dump_function_call ( symbol , args , dargs ) )   self . _append_error ( msg )   return None","if not func_call . match ( * args , ** dargs ) :",395
13702,"def __next__(self):
    if not self.has_catalogs:
        raise StopIteration(""No catalogs"")
    if self.current_catalog_index >= len(self.manifest.catalog_paths):
        raise StopIteration(""No more catalogs"")
    if self.current_catalog is None:
        current_catalog_path = os.path.join(
            self.manifest.base_path,
            self.manifest.catalog_paths[self.current_catalog_index],
        )
        self.current_catalog = Catalog(
            current_catalog_path, read_only=self.manifest.read_only
        )
        self.current_catalog.seekable.seek_line_start(1)
    contents = self.current_catalog.seekable.readline()
    if contents is not None and len(contents) > 0:
        # Check for current_index when we are ready to advance the underlying iterator.
        current_index = self.current_index
        self.current_index += 1
        if current_index in self.manifest.deleted_indexes:
            # Skip over index, because it has been marked deleted
            return self.__next__()
        else:
            try:
                record = json.loads(contents)
                return record
            except Exception:
                print(""Ignoring record at index %s"" % (current_index))
                return self.__next__()
    else:
        self.current_catalog = None
        self.current_catalog_index += 1
        return self.__next__()","def __next__ ( self ) :  if not self . has_catalogs :   raise StopIteration ( ""No catalogs"" )  if self . current_catalog_index >= len ( self . manifest . catalog_paths ) :   raise StopIteration ( ""No more catalogs"" )  if self . current_catalog is None :   current_catalog_path = os . path . join (    self . manifest . base_path ,    self . manifest . catalog_paths [ self . current_catalog_index ] ,   )   self . current_catalog = Catalog (    current_catalog_path , read_only = self . manifest . read_only   )   self . current_catalog . seekable . seek_line_start ( 1 )  contents = self . current_catalog . seekable . readline ( )  if contents is not None and len ( contents ) > 0 :     current_index = self . current_index   self . current_index += 1          return self . __next__ ( )   else :    try :     record = json . loads ( contents )     return record    except Exception :     print ( ""Ignoring record at index %s"" % ( current_index ) )     return self . __next__ ( )  else :   self . current_catalog = None   self . current_catalog_index += 1   return self . __next__ ( )",if current_index in self . manifest . deleted_indexes :,388
12093,"def TryMerge(self, d):
    while d.avail() > 0:
        tt = d.getVarInt32()
        if tt == 10:
            length = d.getVarInt32()
            tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
            d.skip(length)
            self.add_subscription().TryMerge(tmp)
            continue
        if tt == 0:
            raise ProtocolBuffer.ProtocolBufferDecodeError
        d.skipData(tt)","def TryMerge ( self , d ) :  while d . avail ( ) > 0 :   tt = d . getVarInt32 ( )   if tt == 10 :    length = d . getVarInt32 ( )    tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length )    d . skip ( length )    self . add_subscription ( ) . TryMerge ( tmp )    continue       raise ProtocolBuffer . ProtocolBufferDecodeError   d . skipData ( tt )",if tt == 0 :,138
10829,"def filter_errors(self, errors: List[str]) -> List[str]:
    real_errors: List[str] = list()
    current_file = __file__
    current_path = os.path.split(current_file)
    for line in errors:
        line = line.strip()
        if not line:
            continue
        fn, lno, lvl, msg = self.parse_trace_line(line)
        if fn is not None:
            _path = os.path.split(fn)
            if _path[-1] != current_path[-1]:
                continue
        real_errors.append(line)
    return real_errors","def filter_errors ( self , errors : List [ str ] ) -> List [ str ] :  real_errors : List [ str ] = list ( )  current_file = __file__  current_path = os . path . split ( current_file )  for line in errors :   line = line . strip ( )   if not line :    continue   fn , lno , lvl , msg = self . parse_trace_line ( line )       _path = os . path . split ( fn )    if _path [ - 1 ] != current_path [ - 1 ] :     continue   real_errors . append ( line )  return real_errors",if fn is not None :,171
23848,"def parser_info(self, video, info, stream, lvid, uid):
    if not ""allot"" in info or lvid != info[""id""]:
        return
    stream_id = self.types_2_id[stream]
    stream_profile = self.id_2_profile[stream_id]
    host = info[""allot""]
    data = info[""data""]
    size = sum(map(int, data[""clipsBytes""]))
    urls = []
    assert len(data[""clipsURL""]) == len(data[""clipsBytes""]) == len(data[""su""])
    for (
        new,
        ck,
    ) in zip(data[""su""], data[""ck""]):
        params = {
            ""ch"": data[""ch""],
            ""num"": data[""num""],
            ""new"": new,
            ""key"": ck,
            ""uid"": uid,
            ""prod"": ""h5n"",
            ""pt"": 1,
            ""pg"": 2,
        }
        if urlparse(new).netloc == """":
            cdnurl = ""https://{}/ip?{}"".format(host, urlencode(params))
            url = json.loads(get_content(cdnurl))[""servers""][0][""url""]
        else:
            url = new
        urls.append(url)
    video.streams[stream_id] = {
        ""container"": ""mp4"",
        ""video_profile"": stream_profile,
        ""src"": urls,
        ""size"": size,
    }
    video.stream_types.append(stream_id)","def parser_info ( self , video , info , stream , lvid , uid ) :  if not ""allot"" in info or lvid != info [ ""id"" ] :   return  stream_id = self . types_2_id [ stream ]  stream_profile = self . id_2_profile [ stream_id ]  host = info [ ""allot"" ]  data = info [ ""data"" ]  size = sum ( map ( int , data [ ""clipsBytes"" ] ) )  urls = [ ]  assert len ( data [ ""clipsURL"" ] ) == len ( data [ ""clipsBytes"" ] ) == len ( data [ ""su"" ] )  for (   new ,   ck ,  ) in zip ( data [ ""su"" ] , data [ ""ck"" ] ) :   params = {    ""ch"" : data [ ""ch"" ] ,    ""num"" : data [ ""num"" ] ,    ""new"" : new ,    ""key"" : ck ,    ""uid"" : uid ,    ""prod"" : ""h5n"" ,    ""pt"" : 1 ,    ""pg"" : 2 ,   }       cdnurl = ""https://{}/ip?{}"" . format ( host , urlencode ( params ) )    url = json . loads ( get_content ( cdnurl ) ) [ ""servers"" ] [ 0 ] [ ""url"" ]   else :    url = new   urls . append ( url )  video . streams [ stream_id ] = {   ""container"" : ""mp4"" ,   ""video_profile"" : stream_profile ,   ""src"" : urls ,   ""size"" : size ,  }  video . stream_types . append ( stream_id )","if urlparse ( new ) . netloc == """" :",399
8994,"def parseImpl(self, instring, loc, doActions=True):
    try:
        loc, tokens = self.expr._parse(instring, loc, doActions, callPreParse=False)
    except (ParseException, IndexError):
        if self.defaultValue is not self.__optionalNotMatched:
            if self.expr.resultsName:
                tokens = ParseResults([self.defaultValue])
                tokens[self.expr.resultsName] = self.defaultValue
            else:
                tokens = [self.defaultValue]
        else:
            tokens = []
    return loc, tokens
","def parseImpl ( self , instring , loc , doActions = True ) :  try :   loc , tokens = self . expr . _parse ( instring , loc , doActions , callPreParse = False )  except ( ParseException , IndexError ) :       if self . expr . resultsName :     tokens = ParseResults ( [ self . defaultValue ] )     tokens [ self . expr . resultsName ] = self . defaultValue    else :     tokens = [ self . defaultValue ]   else :    tokens = [ ]  return loc , tokens",if self . defaultValue is not self . __optionalNotMatched :,157
17972,"def validate_configuration(self, configuration: Optional[ExpectationConfiguration]):
    """"""Validating that user has inputted a value set and that configuration has been initialized""""""
    super().validate_configuration(configuration)
    try:
        assert ""value_set"" in configuration.kwargs, ""value_set is required""
        assert isinstance(
            configuration.kwargs[""value_set""], (list, set, dict)
        ), ""value_set must be a list or a set""
        if isinstance(configuration.kwargs[""value_set""], dict):
            assert (
                ""$PARAMETER"" in configuration.kwargs[""value_set""]
            ), 'Evaluation Parameter dict for value_set kwarg must have ""$PARAMETER"" key'
    except AssertionError as e:
        raise InvalidExpectationConfigurationError(str(e))
    return True
","def validate_configuration ( self , configuration : Optional [ ExpectationConfiguration ] ) :  """"""Validating that user has inputted a value set and that configuration has been initialized""""""  super ( ) . validate_configuration ( configuration )  try :   assert ""value_set"" in configuration . kwargs , ""value_set is required""   assert isinstance (    configuration . kwargs [ ""value_set"" ] , ( list , set , dict )   ) , ""value_set must be a list or a set""       assert (     ""$PARAMETER"" in configuration . kwargs [ ""value_set"" ]    ) , 'Evaluation Parameter dict for value_set kwarg must have ""$PARAMETER"" key'  except AssertionError as e :   raise InvalidExpectationConfigurationError ( str ( e ) )  return True","if isinstance ( configuration . kwargs [ ""value_set"" ] , dict ) :",196
18832,"def post(self, request, *args, **kwargs):
    settings_form = self.get_settings_form()
    children_formset = self.get_children_formset()
    data = request.POST.copy()
    if settings_form:
        if settings_form.is_valid():
            settings = settings_form.cleaned_data
            data[""settings""] = self.module.dump_settings(settings)
        else:
            return self.form_invalid(self.get_form(self.get_form_class()))
    if children_formset:
        if children_formset.is_valid():
            self.module.children = self.clean_children_data(
                children_formset.cleaned_data
            )
            data[""children""] = self.module.dump_children()
        else:
            return self.form_invalid(self.get_form(self.get_form_class()))
    request.POST = data
    return super(UpdateDashboardModuleView, self).post(request, *args, **kwargs)
","def post ( self , request , * args , ** kwargs ) :  settings_form = self . get_settings_form ( )  children_formset = self . get_children_formset ( )  data = request . POST . copy ( )  if settings_form :       settings = settings_form . cleaned_data    data [ ""settings"" ] = self . module . dump_settings ( settings )   else :    return self . form_invalid ( self . get_form ( self . get_form_class ( ) ) )  if children_formset :   if children_formset . is_valid ( ) :    self . module . children = self . clean_children_data (     children_formset . cleaned_data    )    data [ ""children"" ] = self . module . dump_children ( )   else :    return self . form_invalid ( self . get_form ( self . get_form_class ( ) ) )  request . POST = data  return super ( UpdateDashboardModuleView , self ) . post ( request , * args , ** kwargs )",if settings_form . is_valid ( ) :,263
21318,"def _safe_coalesce(t):
    tc = t.coalesce()
    value_map = {}
    for idx, val in zip(t._indices().t(), t._values()):
        idx_tup = tuple(idx)
        if idx_tup in value_map:
            value_map[idx_tup] += val
        else:
            value_map[idx_tup] = val.clone() if torch.is_tensor(val) else val
    new_indices = sorted(list(value_map.keys()))
    new_values = [value_map[idx] for idx in new_indices]
    if t._values().dim() < 2:
        new_values = t._values().new_tensor(new_values)
    else:
        new_values = torch.stack(new_values)
    new_indices = t._indices().new_tensor(new_indices).t()
    tg = t.new(new_indices, new_values, t.size())
    assert (tc._indices() == tg._indices()).all()
    assert (tc._values() == tg._values()).all()
    return tg","def _safe_coalesce ( t ) :  tc = t . coalesce ( )  value_map = { }  for idx , val in zip ( t . _indices ( ) . t ( ) , t . _values ( ) ) :   idx_tup = tuple ( idx )       value_map [ idx_tup ] += val   else :    value_map [ idx_tup ] = val . clone ( ) if torch . is_tensor ( val ) else val  new_indices = sorted ( list ( value_map . keys ( ) ) )  new_values = [ value_map [ idx ] for idx in new_indices ]  if t . _values ( ) . dim ( ) < 2 :   new_values = t . _values ( ) . new_tensor ( new_values )  else :   new_values = torch . stack ( new_values )  new_indices = t . _indices ( ) . new_tensor ( new_indices ) . t ( )  tg = t . new ( new_indices , new_values , t . size ( ) )  assert ( tc . _indices ( ) == tg . _indices ( ) ) . all ( )  assert ( tc . _values ( ) == tg . _values ( ) ) . all ( )  return tg",if idx_tup in value_map :,279
10601,"def updateToolForHostInformation(self, update=True):
    # if self.commandTabsValidate() == True:
    if self.validateCommandTabs(
        self.hostActionNameText, self.hostLabelText, self.hostCommandText
    ):
        # do not update any values the first time or when the remove button is clicked
        if self.hostTableRow == -1 or update == False:
            pass
        else:
            self.updateHostActions()
        #           self.hostLabelText.setStyleSheet(""border: 1px solid grey;"")
        #           self.hostCommandText.setStyleSheet(""border: 1px solid grey;"")
        self.hostTableRow = self.toolForHostsTableWidget.currentRow()
        self.hostLabelText.setReadOnly(False)
        if self.toolForHostsTableWidget.item(self.hostTableRow, 0) is not None:
            key = self.toolForHostsTableWidget.item(self.hostTableRow, 0).text()
            for tool in self.settings.hostActions:
                if tool[1] == key:
                    self.hostActionNameText.setText(tool[1])
                    self.hostLabelText.setText(tool[0])
                    self.hostCommandText.setText(tool[2])
    else:
        self.toolForHostsTableWidget.selectRow(self.hostTableRow)","def updateToolForHostInformation ( self , update = True ) :   if self . validateCommandTabs (   self . hostActionNameText , self . hostLabelText , self . hostCommandText  ) :         pass   else :    self . updateHostActions ( )       self . hostTableRow = self . toolForHostsTableWidget . currentRow ( )   self . hostLabelText . setReadOnly ( False )   if self . toolForHostsTableWidget . item ( self . hostTableRow , 0 ) is not None :    key = self . toolForHostsTableWidget . item ( self . hostTableRow , 0 ) . text ( )    for tool in self . settings . hostActions :     if tool [ 1 ] == key :      self . hostActionNameText . setText ( tool [ 1 ] )      self . hostLabelText . setText ( tool [ 0 ] )      self . hostCommandText . setText ( tool [ 2 ] )  else :   self . toolForHostsTableWidget . selectRow ( self . hostTableRow )",if self . hostTableRow == - 1 or update == False :,372
7914,"def slo(environ, start_response, user):
    # so here I might get either a LogoutResponse or a LogoutRequest
    client = environ[""repoze.who.plugins""][""saml2auth""]
    sc = client.saml_client
    if ""QUERY_STRING"" in environ:
        query = parse_qs(environ[""QUERY_STRING""])
        logger.info(""query: %s"", query)
        try:
            response = sc.parse_logout_request_response(
                query[""SAMLResponse""][0], binding=BINDING_HTTP_REDIRECT
            )
            if response:
                logger.info(""LOGOUT response parsed OK"")
        except KeyError:
            # return error reply
            response = None
        if response is None:
            request = sc.lo
    headers = []
    delco = delete_cookie(environ, ""pysaml2"")
    if delco:
        headers.append(delco)
    resp = Redirect(""/done"", headers=headers)
    return resp(environ, start_response)","def slo ( environ , start_response , user ) :   client = environ [ ""repoze.who.plugins"" ] [ ""saml2auth"" ]  sc = client . saml_client  if ""QUERY_STRING"" in environ :   query = parse_qs ( environ [ ""QUERY_STRING"" ] )   logger . info ( ""query: %s"" , query )   try :    response = sc . parse_logout_request_response (     query [ ""SAMLResponse"" ] [ 0 ] , binding = BINDING_HTTP_REDIRECT    )         logger . info ( ""LOGOUT response parsed OK"" )   except KeyError :       response = None   if response is None :    request = sc . lo  headers = [ ]  delco = delete_cookie ( environ , ""pysaml2"" )  if delco :   headers . append ( delco )  resp = Redirect ( ""/done"" , headers = headers )  return resp ( environ , start_response )",if response :,266
3746,"def _probe(self):
    """"""Copy all probed signals to buffers.""""""
    self._probe_step_time()
    for probe in self.model.probes:
        period = 1 if probe.sample_every is None else probe.sample_every / self.dt
        if self.n_steps % period < 1:
            tmp = self.signals[self.model.sig[probe][""in""]].copy()
            self._probe_outputs[probe].append(tmp)","def _probe ( self ) :  """"""Copy all probed signals to buffers.""""""  self . _probe_step_time ( )  for probe in self . model . probes :   period = 1 if probe . sample_every is None else probe . sample_every / self . dt       tmp = self . signals [ self . model . sig [ probe ] [ ""in"" ] ] . copy ( )    self . _probe_outputs [ probe ] . append ( tmp )",if self . n_steps % period < 1 :,119
3439,"def services_to_report(self):
    services = self._parse_services(self.default(""REPORT_SERVICES"", """"), None)
    for service in services:
        if service.protocol == ""rpc"":
            raise ServiceError(f""bad protocol for REPORT_SERVICES: {service.protocol}"")
        if isinstance(service.host, (IPv4Address, IPv6Address)):
            ip_addr = service.host
            if (
                ip_addr.is_multicast
                or ip_addr.is_unspecified
                or (ip_addr.is_private and self.peer_announce)
            ):
                raise ServiceError(f""bad IP address for REPORT_SERVICES: {ip_addr}"")
        elif service.host.lower() == ""localhost"":
            raise ServiceError(f""bad host for REPORT_SERVICES: {service.host}"")
    return services","def services_to_report ( self ) :  services = self . _parse_services ( self . default ( ""REPORT_SERVICES"" , """" ) , None )  for service in services :   if service . protocol == ""rpc"" :    raise ServiceError ( f""bad protocol for REPORT_SERVICES: {service.protocol}"" )       ip_addr = service . host    if (     ip_addr . is_multicast     or ip_addr . is_unspecified     or ( ip_addr . is_private and self . peer_announce )    ) :     raise ServiceError ( f""bad IP address for REPORT_SERVICES: {ip_addr}"" )   elif service . host . lower ( ) == ""localhost"" :    raise ServiceError ( f""bad host for REPORT_SERVICES: {service.host}"" )  return services","if isinstance ( service . host , ( IPv4Address , IPv6Address ) ) :",235
9474,"def next_ohlcv(self) -> pd.DataFrame:
    if self._has_loaded_historical:
        frame = self.data_frame[self._current_index]
        self._current_index += 1
        return frame
    data = self.exchange.fetchOHLCV(
        symbol=self.symbol_pair,
        timeframe=self.timeframe,
        since=self._current_index,
        limit=1,
    )
    if len(data):
        self._current_index = data[len(data) - 1][""timestamp""]
        frame = pd.DataFrame(data, columns=self.in_columns)
        frame = self.prepare_data(frame)
        if self.data_frame is None:
            self.data_frame = pd.DataFrame(None, columns=self.columns)
        self.data_frame = self.data_frame.append(frame, ignore_index=True)
        return frame
    return None","def next_ohlcv ( self ) -> pd . DataFrame :  if self . _has_loaded_historical :   frame = self . data_frame [ self . _current_index ]   self . _current_index += 1   return frame  data = self . exchange . fetchOHLCV (   symbol = self . symbol_pair ,   timeframe = self . timeframe ,   since = self . _current_index ,   limit = 1 ,  )  if len ( data ) :   self . _current_index = data [ len ( data ) - 1 ] [ ""timestamp"" ]   frame = pd . DataFrame ( data , columns = self . in_columns )   frame = self . prepare_data ( frame )       self . data_frame = pd . DataFrame ( None , columns = self . columns )   self . data_frame = self . data_frame . append ( frame , ignore_index = True )   return frame  return None",if self . data_frame is None :,244
13594,"def _handle_loaded(objs):
    try:
        data_locations = storage_client.get_data_locations(session_id, keys_to_fetch)
        shared_quota_keys = []
        inproc_keys = []
        inproc_quota_keys = []
        context_dict.update(zip(keys_to_fetch, objs))
        for k, locations in zip(keys_to_fetch, data_locations):
            quota_key = build_quota_key(session_id, k, owner=self.proc_id)
            if (self.proc_id, DataStorageDevice.PROC_MEMORY) not in locations:
                shared_quota_keys.append(quota_key)
            else:
                inproc_keys.append(k)
                inproc_quota_keys.append(quota_key)
        if shared_quota_keys:
            self._mem_quota_ref.hold_quotas(shared_quota_keys, _tell=True)
        if inproc_keys:
            self._mem_quota_ref.hold_quotas(inproc_quota_keys, _tell=True)
            if self._remove_intermediate:
                storage_client.delete(
                    session_id, inproc_keys, [self._calc_intermediate_device]
                )
    finally:
        objs[:] = []
","def _handle_loaded ( objs ) :  try :   data_locations = storage_client . get_data_locations ( session_id , keys_to_fetch )   shared_quota_keys = [ ]   inproc_keys = [ ]   inproc_quota_keys = [ ]   context_dict . update ( zip ( keys_to_fetch , objs ) )   for k , locations in zip ( keys_to_fetch , data_locations ) :    quota_key = build_quota_key ( session_id , k , owner = self . proc_id )    if ( self . proc_id , DataStorageDevice . PROC_MEMORY ) not in locations :     shared_quota_keys . append ( quota_key )    else :     inproc_keys . append ( k )     inproc_quota_keys . append ( quota_key )       self . _mem_quota_ref . hold_quotas ( shared_quota_keys , _tell = True )   if inproc_keys :    self . _mem_quota_ref . hold_quotas ( inproc_quota_keys , _tell = True )    if self . _remove_intermediate :     storage_client . delete (      session_id , inproc_keys , [ self . _calc_intermediate_device ]     )  finally :   objs [ : ] = [ ]",if shared_quota_keys :,344
2407,"def _cobra_getsock(self, thr=None):
    if self._cobra_spoolcnt:
        sock = self._cobra_sockpool.get()
    else:
        if not thr:  # if thread isn't specified, use the current thread
            thr = currentThread()
        tsocks = getattr(thr, ""cobrasocks"", None)
        if tsocks is None:
            tsocks = {}
            thr.cobrasocks = tsocks
        sock = tsocks.get(self._cobra_slookup)
    if not sock or sock.trashed:
        # Lets build a new socket... shall we?
        sock = self._cobra_newsock()
        # If we have authinfo lets authenticate
        authinfo = self._cobra_kwargs.get(""authinfo"")
        if authinfo is not None:
            mtype, rver, data = sock.cobraTransaction(COBRA_AUTH, """", authinfo)
            if mtype != COBRA_AUTH:
                raise CobraAuthException(""Authentication Failed!"")
        if not self._cobra_spoolcnt:
            tsocks[self._cobra_slookup] = sock
    return sock
","def _cobra_getsock ( self , thr = None ) :  if self . _cobra_spoolcnt :   sock = self . _cobra_sockpool . get ( )  else :   if not thr :    thr = currentThread ( )   tsocks = getattr ( thr , ""cobrasocks"" , None )   if tsocks is None :    tsocks = { }    thr . cobrasocks = tsocks   sock = tsocks . get ( self . _cobra_slookup )  if not sock or sock . trashed :     sock = self . _cobra_newsock ( )     authinfo = self . _cobra_kwargs . get ( ""authinfo"" )   if authinfo is not None :    mtype , rver , data = sock . cobraTransaction ( COBRA_AUTH , """" , authinfo )         raise CobraAuthException ( ""Authentication Failed!"" )   if not self . _cobra_spoolcnt :    tsocks [ self . _cobra_slookup ] = sock  return sock",if mtype != COBRA_AUTH :,299
8652,"def get_other(self, data, items):
    is_tuple = False
    if type(data) == tuple:
        data = list(data)
        is_tuple = True
    if type(data) == list:
        m_items = items.copy()
        for idx, item in enumerate(items):
            if item < 0:
                m_items[idx] = len(data) - abs(item)
        for i in sorted(set(m_items), reverse=True):
            if i < len(data) and i > -1:
                del data[i]
        if is_tuple:
            return tuple(data)
        else:
            return data
    else:
        return None","def get_other ( self , data , items ) :  is_tuple = False  if type ( data ) == tuple :   data = list ( data )   is_tuple = True  if type ( data ) == list :   m_items = items . copy ( )   for idx , item in enumerate ( items ) :    if item < 0 :     m_items [ idx ] = len ( data ) - abs ( item )   for i in sorted ( set ( m_items ) , reverse = True ) :    if i < len ( data ) and i > - 1 :     del data [ i ]       return tuple ( data )   else :    return data  else :   return None",if is_tuple :,191
18230,"def __init__(self, document_data):
    self.document_data = document_data
    self.field_paths = []
    self.deleted_fields = []
    self.server_timestamps = []
    self.array_removes = {}
    self.array_unions = {}
    self.increments = {}
    self.minimums = {}
    self.maximums = {}
    self.set_fields = {}
    self.empty_document = False
    prefix_path = FieldPath()
    iterator = self._get_document_iterator(prefix_path)
    for field_path, value in iterator:
        if field_path == prefix_path and value is _EmptyDict:
            self.empty_document = True
        elif value is transforms.DELETE_FIELD:
            self.deleted_fields.append(field_path)
        elif value is transforms.SERVER_TIMESTAMP:
            self.server_timestamps.append(field_path)
        elif isinstance(value, transforms.ArrayRemove):
            self.array_removes[field_path] = value.values
        elif isinstance(value, transforms.ArrayUnion):
            self.array_unions[field_path] = value.values
        elif isinstance(value, transforms.Increment):
            self.increments[field_path] = value.value
        elif isinstance(value, transforms.Maximum):
            self.maximums[field_path] = value.value
        elif isinstance(value, transforms.Minimum):
            self.minimums[field_path] = value.value
        else:
            self.field_paths.append(field_path)
            set_field_value(self.set_fields, field_path, value)
","def __init__ ( self , document_data ) :  self . document_data = document_data  self . field_paths = [ ]  self . deleted_fields = [ ]  self . server_timestamps = [ ]  self . array_removes = { }  self . array_unions = { }  self . increments = { }  self . minimums = { }  self . maximums = { }  self . set_fields = { }  self . empty_document = False  prefix_path = FieldPath ( )  iterator = self . _get_document_iterator ( prefix_path )  for field_path , value in iterator :   if field_path == prefix_path and value is _EmptyDict :    self . empty_document = True   elif value is transforms . DELETE_FIELD :    self . deleted_fields . append ( field_path )   elif value is transforms . SERVER_TIMESTAMP :    self . server_timestamps . append ( field_path )   elif isinstance ( value , transforms . ArrayRemove ) :    self . array_removes [ field_path ] = value . values   elif isinstance ( value , transforms . ArrayUnion ) :    self . array_unions [ field_path ] = value . values   elif isinstance ( value , transforms . Increment ) :    self . increments [ field_path ] = value . value       self . maximums [ field_path ] = value . value   elif isinstance ( value , transforms . Minimum ) :    self . minimums [ field_path ] = value . value   else :    self . field_paths . append ( field_path )    set_field_value ( self . set_fields , field_path , value )","elif isinstance ( value , transforms . Maximum ) :",422
2979,"def _ParseAndCheckSubOpts(self):
    # Default argument values
    delta = None
    method = ""GET""
    content_type = """"
    passwd = None
    for o, v in self.sub_opts:
        if o == ""-d"":
            if delta is not None:
                delta += _DurationToTimeDelta(v)
            else:
                delta = _DurationToTimeDelta(v)
        elif o == ""-m"":
            method = v
        elif o == ""-c"":
            content_type = v
        elif o == ""-p"":
            passwd = v
        else:
            self.RaiseInvalidArgumentException()
    if delta is None:
        delta = timedelta(hours=1)
    expiration = calendar.timegm((datetime.utcnow() + delta).utctimetuple())
    if method not in [""GET"", ""PUT"", ""DELETE"", ""HEAD""]:
        raise CommandException(""HTTP method must be one of [GET|HEAD|PUT|DELETE]"")
    return method, expiration, content_type, passwd","def _ParseAndCheckSubOpts ( self ) :   delta = None  method = ""GET""  content_type = """"  passwd = None  for o , v in self . sub_opts :   if o == ""-d"" :    if delta is not None :     delta += _DurationToTimeDelta ( v )    else :     delta = _DurationToTimeDelta ( v )   elif o == ""-m"" :    method = v   elif o == ""-c"" :    content_type = v       passwd = v   else :    self . RaiseInvalidArgumentException ( )  if delta is None :   delta = timedelta ( hours = 1 )  expiration = calendar . timegm ( ( datetime . utcnow ( ) + delta ) . utctimetuple ( ) )  if method not in [ ""GET"" , ""PUT"" , ""DELETE"" , ""HEAD"" ] :   raise CommandException ( ""HTTP method must be one of [GET|HEAD|PUT|DELETE]"" )  return method , expiration , content_type , passwd","elif o == ""-p"" :",266
21185,"def exe(self, ret):
    if not ret:
        self.assertEqual(ret, """")
    else:
        assert os.path.isabs(ret), ret
        # Note: os.stat() may return False even if the file is there
        # hence we skip the test, see:
        # http://stackoverflow.com/questions/3112546/os-path-exists-lies
        if POSIX:
            assert os.path.isfile(ret), ret
            if hasattr(os, ""access"") and hasattr(os, ""X_OK""):
                # XXX may fail on OSX
                self.assertTrue(os.access(ret, os.X_OK))","def exe ( self , ret ) :  if not ret :   self . assertEqual ( ret , """" )  else :   assert os . path . isabs ( ret ) , ret         if POSIX :    assert os . path . isfile ( ret ) , ret             self . assertTrue ( os . access ( ret , os . X_OK ) )","if hasattr ( os , ""access"" ) and hasattr ( os , ""X_OK"" ) :",171
16412,"def package_exists(self, pref):
    try:
        if pref.revision:
            path = self.server_store.package(pref)
        else:
            path = self.test_server.server_store.package_revisions_root(pref)
        return self.test_server.server_store.path_exists(path)
    except NotFoundException:  # When resolves the latest and there is no package
        return False
","def package_exists ( self , pref ) :  try :       path = self . server_store . package ( pref )   else :    path = self . test_server . server_store . package_revisions_root ( pref )   return self . test_server . server_store . path_exists ( path )  except NotFoundException :   return False",if pref . revision :,110
18709,"def find_subdomains(domain, data):
    subdomains = set()
    js_urls = set()
    db = Database()
    for infos in data:
        jump_history = infos.get(""history"")
        req_url = infos.get(""url"")
        subdomains.update(find_in_history(domain, req_url, jump_history))
        rsp_html = db.get_resp_by_url(domain, req_url)
        if not rsp_html:
            logger.log(
                ""DEBUG"", f""an abnormal response occurred in the request {req_url}""
            )
            continue
        subdomains.update(find_in_resp(domain, req_url, rsp_html))
        js_urls.update(find_js_urls(domain, req_url, rsp_html))
    req_data = convert_to_dict(js_urls)
    resp_data = request.bulk_request(domain, req_data, ret=True)
    while not resp_data.empty():
        _, resp = resp_data.get()
        if not isinstance(resp, Response):
            continue
        text = utils.decode_resp_text(resp)
        subdomains.update(find_in_resp(domain, resp.url, text))
    return subdomains
","def find_subdomains ( domain , data ) :  subdomains = set ( )  js_urls = set ( )  db = Database ( )  for infos in data :   jump_history = infos . get ( ""history"" )   req_url = infos . get ( ""url"" )   subdomains . update ( find_in_history ( domain , req_url , jump_history ) )   rsp_html = db . get_resp_by_url ( domain , req_url )       logger . log (     ""DEBUG"" , f""an abnormal response occurred in the request {req_url}""    )    continue   subdomains . update ( find_in_resp ( domain , req_url , rsp_html ) )   js_urls . update ( find_js_urls ( domain , req_url , rsp_html ) )  req_data = convert_to_dict ( js_urls )  resp_data = request . bulk_request ( domain , req_data , ret = True )  while not resp_data . empty ( ) :   _ , resp = resp_data . get ( )   if not isinstance ( resp , Response ) :    continue   text = utils . decode_resp_text ( resp )   subdomains . update ( find_in_resp ( domain , resp . url , text ) )  return subdomains",if not rsp_html :,330
19702,"def _allocate_nbd(self):
    if not os.path.exists(""/sys/block/nbd0""):
        self.error = _(""nbd unavailable: module not loaded"")
        return None
    while True:
        if not self._DEVICES:
            # really want to log this info, not raise
            self.error = _(""No free nbd devices"")
            return None
        device = self._DEVICES.pop()
        if not os.path.exists(""/sys/block/%s/pid"" % os.path.basename(device)):
            break
    return device","def _allocate_nbd ( self ) :  if not os . path . exists ( ""/sys/block/nbd0"" ) :   self . error = _ ( ""nbd unavailable: module not loaded"" )   return None  while True :          self . error = _ ( ""No free nbd devices"" )    return None   device = self . _DEVICES . pop ( )   if not os . path . exists ( ""/sys/block/%s/pid"" % os . path . basename ( device ) ) :    break  return device",if not self . _DEVICES :,146
12242,"def __setattr__(self, name, value):
    self.__lock__.acquire()
    try:
        ident = get_ident()
        storage = self.__storage__
        if ident in storage:
            storage[ident][name] = value
        else:
            storage[ident] = {name: value}
    finally:
        self.__lock__.release()
","def __setattr__ ( self , name , value ) :  self . __lock__ . acquire ( )  try :   ident = get_ident ( )   storage = self . __storage__       storage [ ident ] [ name ] = value   else :    storage [ ident ] = { name : value }  finally :   self . __lock__ . release ( )",if ident in storage :,93
20957,"def get_price_list_rate(args, item_doc, out):
    meta = frappe.get_meta(args.parenttype or args.doctype)
    if meta.get_field(""currency"") or args.get(""currency""):
        pl_details = get_price_list_currency_and_exchange_rate(args)
        args.update(pl_details)
        if meta.get_field(""currency""):
            validate_conversion_rate(args, meta)
        price_list_rate = get_price_list_rate_for(args, item_doc.name) or 0
        # variant
        if not price_list_rate and item_doc.variant_of:
            price_list_rate = get_price_list_rate_for(args, item_doc.variant_of)
        # insert in database
        if not price_list_rate:
            if args.price_list and args.rate:
                insert_item_price(args)
            return {}
        out.price_list_rate = (
            flt(price_list_rate)
            * flt(args.plc_conversion_rate)
            / flt(args.conversion_rate)
        )
        if not out.price_list_rate and args.transaction_type == ""buying"":
            from erpnext.stock.doctype.item.item import get_last_purchase_details
            out.update(
                get_last_purchase_details(
                    item_doc.name, args.name, args.conversion_rate
                )
            )","def get_price_list_rate ( args , item_doc , out ) :  meta = frappe . get_meta ( args . parenttype or args . doctype )  if meta . get_field ( ""currency"" ) or args . get ( ""currency"" ) :   pl_details = get_price_list_currency_and_exchange_rate ( args )   args . update ( pl_details )       validate_conversion_rate ( args , meta )   price_list_rate = get_price_list_rate_for ( args , item_doc . name ) or 0     if not price_list_rate and item_doc . variant_of :    price_list_rate = get_price_list_rate_for ( args , item_doc . variant_of )     if not price_list_rate :    if args . price_list and args . rate :     insert_item_price ( args )    return { }   out . price_list_rate = (    flt ( price_list_rate )    * flt ( args . plc_conversion_rate )    / flt ( args . conversion_rate )   )   if not out . price_list_rate and args . transaction_type == ""buying"" :    from erpnext . stock . doctype . item . item import get_last_purchase_details    out . update (     get_last_purchase_details (      item_doc . name , args . name , args . conversion_rate     )    )","if meta . get_field ( ""currency"" ) :",413
7385,"def read(self, iprot):
    if (
        iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
        and isinstance(iprot.trans, TTransport.CReadableTransport)
        and self.thrift_spec is not None
        and fastbinary is not None
    ):
        fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
        return
    iprot.readStructBegin()
    while True:
        (fname, ftype, fid) = iprot.readFieldBegin()
        if ftype == TType.STOP:
            break
        if fid == 0:
            if ftype == TType.STRUCT:
                self.success = ExecStats.ttypes.TExecSummary()
                self.success.read(iprot)
            else:
                iprot.skip(ftype)
        elif fid == 1:
            if ftype == TType.STRUCT:
                self.error = beeswaxd.ttypes.QueryNotFoundException()
                self.error.read(iprot)
            else:
                iprot.skip(ftype)
        elif fid == 2:
            if ftype == TType.STRUCT:
                self.error2 = beeswaxd.ttypes.BeeswaxException()
                self.error2.read(iprot)
            else:
                iprot.skip(ftype)
        else:
            iprot.skip(ftype)
        iprot.readFieldEnd()
    iprot.readStructEnd()","def read ( self , iprot ) :  if (   iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated   and isinstance ( iprot . trans , TTransport . CReadableTransport )   and self . thrift_spec is not None   and fastbinary is not None  ) :   fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) )   return  iprot . readStructBegin ( )  while True :   ( fname , ftype , fid ) = iprot . readFieldBegin ( )   if ftype == TType . STOP :    break   if fid == 0 :    if ftype == TType . STRUCT :     self . success = ExecStats . ttypes . TExecSummary ( )     self . success . read ( iprot )    else :     iprot . skip ( ftype )   elif fid == 1 :    if ftype == TType . STRUCT :     self . error = beeswaxd . ttypes . QueryNotFoundException ( )     self . error . read ( iprot )    else :     iprot . skip ( ftype )       if ftype == TType . STRUCT :     self . error2 = beeswaxd . ttypes . BeeswaxException ( )     self . error2 . read ( iprot )    else :     iprot . skip ( ftype )   else :    iprot . skip ( ftype )   iprot . readFieldEnd ( )  iprot . readStructEnd ( )",elif fid == 2 :,433
7366,"def read(self, iprot):
    if (
        iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
        and isinstance(iprot.trans, TTransport.CReadableTransport)
        and self.thrift_spec is not None
        and fastbinary is not None
    ):
        fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
        return
    iprot.readStructBegin()
    while True:
        (fname, ftype, fid) = iprot.readFieldBegin()
        if ftype == TType.STOP:
            break
        if fid == 1:
            if ftype == TType.I64:
                self.id = iprot.readI64()
            else:
                iprot.skip(ftype)
        elif fid == 2:
            if ftype == TType.I32:
                self.state = iprot.readI32()
            else:
                iprot.skip(ftype)
        elif fid == 3:
            if ftype == TType.STRING:
                self.user = iprot.readString()
            else:
                iprot.skip(ftype)
        elif fid == 4:
            if ftype == TType.STRING:
                self.hostname = iprot.readString()
            else:
                iprot.skip(ftype)
        else:
            iprot.skip(ftype)
        iprot.readFieldEnd()
    iprot.readStructEnd()","def read ( self , iprot ) :  if (   iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated   and isinstance ( iprot . trans , TTransport . CReadableTransport )   and self . thrift_spec is not None   and fastbinary is not None  ) :   fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) )   return  iprot . readStructBegin ( )  while True :   ( fname , ftype , fid ) = iprot . readFieldBegin ( )   if ftype == TType . STOP :    break   if fid == 1 :    if ftype == TType . I64 :     self . id = iprot . readI64 ( )    else :     iprot . skip ( ftype )   elif fid == 2 :    if ftype == TType . I32 :     self . state = iprot . readI32 ( )    else :     iprot . skip ( ftype )   elif fid == 3 :         self . user = iprot . readString ( )    else :     iprot . skip ( ftype )   elif fid == 4 :         self . hostname = iprot . readString ( )    else :     iprot . skip ( ftype )   else :    iprot . skip ( ftype )   iprot . readFieldEnd ( )  iprot . readStructEnd ( )",if ftype == TType . STRING :,432
21432,"def validate_arguments(args):
    if args.num_pss < 1:
        print(""Value error: must have ore than one parameter servers."")
        exit(1)
    if not GPU_IDS:
        num_cpus = multiprocessing.cpu_count()
        if args.cpu_trainers > num_cpus:
            print(
                ""Value error: there are %s available CPUs but you are requiring %s.""
                % (num_cpus, args.cpu_trainers)
            )
            exit(1)
    if not os.path.isfile(args.file):
        print(""Value error: model trainning file does not exist"")
        exit(1)","def validate_arguments ( args ) :  if args . num_pss < 1 :   print ( ""Value error: must have ore than one parameter servers."" )   exit ( 1 )  if not GPU_IDS :   num_cpus = multiprocessing . cpu_count ( )       print (     ""Value error: there are %s available CPUs but you are requiring %s.""     % ( num_cpus , args . cpu_trainers )    )    exit ( 1 )  if not os . path . isfile ( args . file ) :   print ( ""Value error: model trainning file does not exist"" )   exit ( 1 )",if args . cpu_trainers > num_cpus :,177
14426,"def KoUserEnviron(startupEnvFileName=None):
    koEnviron = components.classes[""@activestate.com/koUserEnviron;1""].createInstance(
        components.interfaces.koIUserEnviron
    )
    if startupEnvFileName:
        environ = UnwrapObject(koEnviron)
        environ.__init__(startupEnvFileName)
        current_encoding = locale.getlocale()[1]
        # For some reason this can be the value 'None' when running in
        # the pyxpcom test suite, so fall back to the expected default
        # platform encoding.
        if not current_encoding:
            if sys.platform.startswith(""win""):
                current_encoding = ""mbcs""
            elif sys.platform.startswith(""darwin""):
                current_encoding = ""mac-roman""
            elif sys.platform.startswith(""linux""):
                current_encoding = ""utf-8""
        environ.startupEnvironEncoding = current_encoding
    return koEnviron","def KoUserEnviron ( startupEnvFileName = None ) :  koEnviron = components . classes [ ""@activestate.com/koUserEnviron;1"" ] . createInstance (   components . interfaces . koIUserEnviron  )  if startupEnvFileName :   environ = UnwrapObject ( koEnviron )   environ . __init__ ( startupEnvFileName )   current_encoding = locale . getlocale ( ) [ 1 ]             if sys . platform . startswith ( ""win"" ) :     current_encoding = ""mbcs""    elif sys . platform . startswith ( ""darwin"" ) :     current_encoding = ""mac-roman""    elif sys . platform . startswith ( ""linux"" ) :     current_encoding = ""utf-8""   environ . startupEnvironEncoding = current_encoding  return koEnviron",if not current_encoding :,251
7857,"def make_table(items):
    if isinstance(items, dict):
        items = items.items()
        items.sort()
    rows = []
    i = 0
    for name, value in items:
        i += 1
        out = StringIO()
        try:
            pprint.pprint(value, out)
        except Exception as e:
            print(""Error: %s"" % e, file=out)
        value = html_quote(out.getvalue())
        if len(value) > 100:
            # @@: This can actually break the HTML :(
            # should I truncate before quoting?
            orig_value = value
            value = value[:100]
            value += '...'
            value += '%s' % orig_value[100:]
        value = formatter.make_wrappable(value)
        if i % 2:
            attr = ' class=""even""'
        else:
            attr = ' class=""odd""'
        rows.append(
            ''
            '%s%s'
            % (attr, html_quote(name), preserve_whitespace(value, quote=False))
        )
    return ""%s
"" % (""\n"".join(rows))","def make_table ( items ) : if isinstance ( items , dict ) : items = items . items ( ) items . sort ( ) rows = [ ] i = 0 for name , value in items : i += 1 out = StringIO ( ) try : pprint . pprint ( value , out ) except Exception as e : print ( ""Error: %s"" % e , file = out ) value = html_quote ( out . getvalue ( ) ) if len ( value ) > 100 : orig_value = value value = value [ : 100 ] value += '...' value += '%s' % orig_value [ 100 : ] value = formatter . make_wrappable ( value ) attr = ' class=""even""' else : attr = ' class=""odd""' rows . append ( '' '%s%s' % ( attr , html_quote ( name ) , preserve_whitespace ( value , quote = False ) ) ) return ""%s
"" % ( ""\n"" . join ( rows ) )",if i % 2 :,386 21906,"def test_ml_sigma(): if debug_mode: if ""Sigma_u"" not in to_test: # pragma: no cover return print(""\n\nSIGMA_U"", end="""") for ds in datasets: for dt in ds.dt_s_list: if debug_mode: print(""\n"" + dt_s_tup_to_string(dt) + "": "", end="""") exog = results_sm_exog[ds][dt].exog is not None exog_coint = results_sm_exog_coint[ds][dt].exog_coint is not None err_msg = build_err_msg(ds, dt, ""Sigma_u"") obtained = results_sm[ds][dt].sigma_u obtained_exog = results_sm_exog[ds][dt].sigma_u obtained_exog_coint = results_sm_exog_coint[ds][dt].sigma_u desired = results_ref[ds][dt][""est""][""Sigma_u""] assert_allclose(obtained, desired, rtol, atol, False, err_msg) if exog: assert_equal(obtained_exog, obtained, ""WITH EXOG"" + err_msg) if exog_coint: assert_equal(obtained_exog_coint, obtained, ""WITH EXOG_COINT"" + err_msg) ","def test_ml_sigma ( ) : if debug_mode : if ""Sigma_u"" not in to_test : return print ( ""\n\nSIGMA_U"" , end = """" ) for ds in datasets : for dt in ds . dt_s_list : if debug_mode : print ( ""\n"" + dt_s_tup_to_string ( dt ) + "": "" , end = """" ) exog = results_sm_exog [ ds ] [ dt ] . exog is not None exog_coint = results_sm_exog_coint [ ds ] [ dt ] . exog_coint is not None err_msg = build_err_msg ( ds , dt , ""Sigma_u"" ) obtained = results_sm [ ds ] [ dt ] . sigma_u obtained_exog = results_sm_exog [ ds ] [ dt ] . sigma_u obtained_exog_coint = results_sm_exog_coint [ ds ] [ dt ] . sigma_u desired = results_ref [ ds ] [ dt ] [ ""est"" ] [ ""Sigma_u"" ] assert_allclose ( obtained , desired , rtol , atol , False , err_msg ) if exog : assert_equal ( obtained_exog , obtained , ""WITH EXOG"" + err_msg ) assert_equal ( obtained_exog_coint , obtained , ""WITH EXOG_COINT"" + err_msg )",if exog_coint :,374 18333,"def test_sortagrad_trainable_with_batch_bins(module): args = make_arg(sortagrad=1) idim = 10 odim = 5 dummy_json = make_dummy_json(2, [3, 5], [3, 5], idim=idim, odim=odim) if module == ""pytorch"": import espnet.nets.pytorch_backend.e2e_asr as m else: import espnet.nets.chainer_backend.e2e_asr as m batch_elems = 2000 batchset = make_batchset(dummy_json, batch_bins=batch_elems, shortest_first=True) for batch in batchset: n = 0 for uttid, info in batch: ilen = int(info[""input""][0][""shape""][0]) olen = int(info[""output""][0][""shape""][0]) n += ilen * idim + olen * odim assert olen < batch_elems model = m.E2E(idim, odim, args) for batch in batchset: loss = model(*convert_batch(batch, module, idim=idim, odim=odim)) if isinstance(loss, tuple): # chainer return several values as tuple loss[0].backward() # trainable else: loss.backward() # trainable with torch.no_grad(), chainer.no_backprop_mode(): in_data = np.random.randn(10, idim) model.recognize(in_data, args, args.char_list)","def test_sortagrad_trainable_with_batch_bins ( module ) : args = make_arg ( sortagrad = 1 ) idim = 10 odim = 5 dummy_json = make_dummy_json ( 2 , [ 3 , 5 ] , [ 3 , 5 ] , idim = idim , odim = odim ) if module == ""pytorch"" : import espnet . nets . pytorch_backend . e2e_asr as m else : import espnet . nets . chainer_backend . e2e_asr as m batch_elems = 2000 batchset = make_batchset ( dummy_json , batch_bins = batch_elems , shortest_first = True ) for batch in batchset : n = 0 for uttid , info in batch : ilen = int ( info [ ""input"" ] [ 0 ] [ ""shape"" ] [ 0 ] ) olen = int ( info [ ""output"" ] [ 0 ] [ ""shape"" ] [ 0 ] ) n += ilen * idim + olen * odim assert olen < batch_elems model = m . E2E ( idim , odim , args ) for batch in batchset : loss = model ( * convert_batch ( batch , module , idim = idim , odim = odim ) ) loss [ 0 ] . backward ( ) else : loss . backward ( ) with torch . no_grad ( ) , chainer . no_backprop_mode ( ) : in_data = np . random . randn ( 10 , idim ) model . recognize ( in_data , args , args . char_list )","if isinstance ( loss , tuple ) :",418 6893,"def handleEvent(self, event): eventName = event.eventType srcModuleName = event.module eventData = event.data parentEvent = event if self.errorState: return None self.sf.debug(f""Received event, {eventName}, from {srcModuleName}"") if self.opts[""api_key""] == """": self.sf.error(""You enabled sfp_honeypot but did not set an API key!"") self.errorState = True return None if eventData in self.results: return None self.results[eventData] = True if eventName == ""NETBLOCK_OWNER"": if not self.opts[""netblocklookup""]: return None else: if IPNetwork(eventData).prefixlen < self.opts[""maxnetblock""]: self.sf.debug( ""Network size bigger than permitted: "" + str(IPNetwork(eventData).prefixlen) + "" > "" + str(self.opts[""maxnetblock""]) ) return None if eventName == ""NETBLOCK_MEMBER"": if not self.opts[""subnetlookup""]: return None else: if IPNetwork(eventData).prefixlen < self.opts[""maxsubnet""]: self.sf.debug( ""Network size bigger than permitted: "" + str(IPNetwork(eventData).prefixlen) + "" > "" + str(self.opts[""maxsubnet""]) ) return None if eventName.startswith(""NETBLOCK_""): for addr in IPNetwork(eventData): if self.checkForStop(): return None self.queryAddr(str(addr), parentEvent) else: self.queryAddr(eventData, parentEvent) ","def handleEvent ( self , event ) : eventName = event . eventType srcModuleName = event . module eventData = event . data parentEvent = event if self . errorState : return None self . sf . debug ( f""Received event, {eventName}, from {srcModuleName}"" ) if self . opts [ ""api_key"" ] == """" : self . sf . error ( ""You enabled sfp_honeypot but did not set an API key!"" ) self . errorState = True return None if eventData in self . results : return None self . results [ eventData ] = True if eventName == ""NETBLOCK_OWNER"" : if not self . opts [ ""netblocklookup"" ] : return None else : if IPNetwork ( eventData ) . prefixlen < self . opts [ ""maxnetblock"" ] : self . sf . debug ( ""Network size bigger than permitted: "" + str ( IPNetwork ( eventData ) . prefixlen ) + "" > "" + str ( self . opts [ ""maxnetblock"" ] ) ) return None if eventName == ""NETBLOCK_MEMBER"" : return None else : if IPNetwork ( eventData ) . prefixlen < self . opts [ ""maxsubnet"" ] : self . sf . debug ( ""Network size bigger than permitted: "" + str ( IPNetwork ( eventData ) . prefixlen ) + "" > "" + str ( self . opts [ ""maxsubnet"" ] ) ) return None if eventName . startswith ( ""NETBLOCK_"" ) : for addr in IPNetwork ( eventData ) : if self . checkForStop ( ) : return None self . queryAddr ( str ( addr ) , parentEvent ) else : self . queryAddr ( eventData , parentEvent )","if not self . opts [ ""subnetlookup"" ] :",510 3719,"def get_state(self): """"""See class definition."""""" obs = {} # normalizing constants max_speed = self.k.scenario.max_speed() max_length = self.k.scenario.length() for rl_id in self.k.vehicle.get_rl_ids(): this_speed = self.k.vehicle.get_speed(rl_id) lead_id = self.k.vehicle.get_leader(rl_id) follower = self.k.vehicle.get_follower(rl_id) if lead_id in ["""", None]: # in case leader is not visible lead_speed = max_speed lead_head = max_length else: lead_speed = self.k.vehicle.get_speed(lead_id) lead_head = self.k.vehicle.get_headway(lead_id) if follower in ["""", None]: # in case follower is not visible follow_speed = 0 follow_head = max_length else: follow_speed = self.k.vehicle.get_speed(follower) follow_head = self.k.vehicle.get_headway(follower) observation = np.array( [ this_speed / max_speed, (lead_speed - this_speed) / max_speed, lead_head / max_length, (this_speed - follow_speed) / max_speed, follow_head / max_length, ] ) obs.update({rl_id: observation}) return obs","def get_state ( self ) : """"""See class definition."""""" obs = { } max_speed = self . k . scenario . max_speed ( ) max_length = self . k . scenario . length ( ) for rl_id in self . k . vehicle . get_rl_ids ( ) : this_speed = self . k . vehicle . get_speed ( rl_id ) lead_id = self . k . vehicle . get_leader ( rl_id ) follower = self . k . vehicle . get_follower ( rl_id ) if lead_id in [ """" , None ] : lead_speed = max_speed lead_head = max_length else : lead_speed = self . k . vehicle . get_speed ( lead_id ) lead_head = self . k . vehicle . get_headway ( lead_id ) follow_speed = 0 follow_head = max_length else : follow_speed = self . k . vehicle . get_speed ( follower ) follow_head = self . k . vehicle . get_headway ( follower ) observation = np . array ( [ this_speed / max_speed , ( lead_speed - this_speed ) / max_speed , lead_head / max_length , ( this_speed - follow_speed ) / max_speed , follow_head / max_length , ] ) obs . update ( { rl_id : observation } ) return obs","if follower in [ """" , None ] :",458 12497,"def _real_extract(self, url): dj_id = self._match_id(url) name = None desc = None entries = [] for offset in compat_itertools_count(start=0, step=self._PAGE_SIZE): info = self.query_api( ""dj/program/byradio?asc=false&limit=%d&radioId=%s&offset=%d"" % (self._PAGE_SIZE, dj_id, offset), dj_id, ""Downloading dj programs - %d"" % offset, ) entries.extend( [ self.url_result( ""http://music.163.com/#/program?id=%s"" % program[""id""], ""NetEaseMusicProgram"", program[""id""], ) for program in info[""programs""] ] ) if name is None: radio = info[""programs""][0][""radio""] name = radio[""name""] desc = radio[""desc""] if not info[""more""]: break return self.playlist_result(entries, dj_id, name, desc)","def _real_extract ( self , url ) : dj_id = self . _match_id ( url ) name = None desc = None entries = [ ] for offset in compat_itertools_count ( start = 0 , step = self . _PAGE_SIZE ) : info = self . query_api ( ""dj/program/byradio?asc=false&limit=%d&radioId=%s&offset=%d"" % ( self . _PAGE_SIZE , dj_id , offset ) , dj_id , ""Downloading dj programs - %d"" % offset , ) entries . extend ( [ self . url_result ( ""http://music.163.com/#/program?id=%s"" % program [ ""id"" ] , ""NetEaseMusicProgram"" , program [ ""id"" ] , ) for program in info [ ""programs"" ] ] ) radio = info [ ""programs"" ] [ 0 ] [ ""radio"" ] name = radio [ ""name"" ] desc = radio [ ""desc"" ] if not info [ ""more"" ] : break return self . playlist_result ( entries , dj_id , name , desc )",if name is None :,329 7606,"def _test_configuration(self): config_path = self._write_config() try: self._log.debug(""testing configuration"") verboseflag = ""-Q"" if self._log.isEnabledFor(logging.DEBUG): verboseflag = ""-v"" p = subprocess.Popen([self.PATH_SLAPTEST, verboseflag, ""-f"", config_path]) if p.wait() != 0: raise RuntimeError(""configuration test failed"") self._log.debug(""configuration seems ok"") finally: os.remove(config_path)","def _test_configuration ( self ) : config_path = self . _write_config ( ) try : self . _log . debug ( ""testing configuration"" ) verboseflag = ""-Q"" if self . _log . isEnabledFor ( logging . DEBUG ) : verboseflag = ""-v"" p = subprocess . Popen ( [ self . PATH_SLAPTEST , verboseflag , ""-f"" , config_path ] ) raise RuntimeError ( ""configuration test failed"" ) self . _log . debug ( ""configuration seems ok"" ) finally : os . remove ( config_path )",if p . wait ( ) != 0 :,147 14616,"def __new__(cls, key, secret=None, api_version=DEFAULT_API_VERSION, **kwargs): if cls is OpenNebulaNodeDriver: if api_version in [""1.4""]: cls = OpenNebula_1_4_NodeDriver elif api_version in [""2.0"", ""2.2""]: cls = OpenNebula_2_0_NodeDriver elif api_version in [""3.0""]: cls = OpenNebula_3_0_NodeDriver elif api_version in [""3.2""]: cls = OpenNebula_3_2_NodeDriver elif api_version in [""3.6""]: cls = OpenNebula_3_6_NodeDriver elif api_version in [""3.8""]: cls = OpenNebula_3_8_NodeDriver if ""plain_auth"" not in kwargs: kwargs[""plain_auth""] = cls.plain_auth else: cls.plain_auth = kwargs[""plain_auth""] else: raise NotImplementedError( ""No OpenNebulaNodeDriver found for API version %s"" % (api_version) ) return super(OpenNebulaNodeDriver, cls).__new__(cls)","def __new__ ( cls , key , secret = None , api_version = DEFAULT_API_VERSION , ** kwargs ) : if cls is OpenNebulaNodeDriver : cls = OpenNebula_1_4_NodeDriver elif api_version in [ ""2.0"" , ""2.2"" ] : cls = OpenNebula_2_0_NodeDriver elif api_version in [ ""3.0"" ] : cls = OpenNebula_3_0_NodeDriver elif api_version in [ ""3.2"" ] : cls = OpenNebula_3_2_NodeDriver elif api_version in [ ""3.6"" ] : cls = OpenNebula_3_6_NodeDriver elif api_version in [ ""3.8"" ] : cls = OpenNebula_3_8_NodeDriver if ""plain_auth"" not in kwargs : kwargs [ ""plain_auth"" ] = cls . plain_auth else : cls . plain_auth = kwargs [ ""plain_auth"" ] else : raise NotImplementedError ( ""No OpenNebulaNodeDriver found for API version %s"" % ( api_version ) ) return super ( OpenNebulaNodeDriver , cls ) . __new__ ( cls )","if api_version in [ ""1.4"" ] :",339 22370,"def wait_for_image_to_analyze(image_id, api_conf: callable): status = ""analyzing"" start_time_sec = time.time() while status != ""analyzed"" and time.time() - start_time_sec < WAIT_TIMEOUT_SEC: resp = http_get([""images"", ""by_id"", image_id], config=api_conf) status = resp.body[0].get(""analysis_status"", None) if status != ""analyzed"": _logger.info( ""Waiting for Image Analysis to complete. Elapsed Time={}sec"".format( int(time.time() - start_time_sec) ) ) time.sleep(5) if time.time() - start_time_sec >= WAIT_TIMEOUT_SEC: raise TimeoutError( ""Timed out waiting for Image to Analyze (timeout={}sec)"".format( WAIT_TIMEOUT_SEC ) ) else: _logger.info( ""Image Analysis Complete, wait time: {}sec"".format( int(time.time() - start_time_sec) ) )","def wait_for_image_to_analyze ( image_id , api_conf : callable ) : status = ""analyzing"" start_time_sec = time . time ( ) while status != ""analyzed"" and time . time ( ) - start_time_sec < WAIT_TIMEOUT_SEC : resp = http_get ( [ ""images"" , ""by_id"" , image_id ] , config = api_conf ) status = resp . body [ 0 ] . get ( ""analysis_status"" , None ) _logger . info ( ""Waiting for Image Analysis to complete. Elapsed Time={}sec"" . format ( int ( time . time ( ) - start_time_sec ) ) ) time . sleep ( 5 ) if time . time ( ) - start_time_sec >= WAIT_TIMEOUT_SEC : raise TimeoutError ( ""Timed out waiting for Image to Analyze (timeout={}sec)"" . format ( WAIT_TIMEOUT_SEC ) ) else : _logger . info ( ""Image Analysis Complete, wait time: {}sec"" . format ( int ( time . time ( ) - start_time_sec ) ) )","if status != ""analyzed"" :",313 23915,"def provider_forms(self) -> list: providers = [] responses = register_ticket_outputs.send(self.request.event) for receiver, response in responses: provider = response(self.request.event) provider.form = ProviderForm( obj=self.request.event, settingspref=""ticketoutput_%s_"" % provider.identifier, data=(self.request.POST if self.request.method == ""POST"" else None), files=(self.request.FILES if self.request.method == ""POST"" else None), ) provider.form.fields = OrderedDict( [ (""ticketoutput_%s_%s"" % (provider.identifier, k), v) for k, v in provider.settings_form_fields.items() ] ) provider.settings_content = provider.settings_content_render(self.request) provider.form.prepare_fields() provider.evaluated_preview_allowed = True if not provider.preview_allowed: provider.evaluated_preview_allowed = False else: for k, v in provider.settings_form_fields.items(): if v.required and not self.request.event.settings.get( ""ticketoutput_%s_%s"" % (provider.identifier, k) ): provider.evaluated_preview_allowed = False break providers.append(provider) return providers ","def provider_forms ( self ) -> list : providers = [ ] responses = register_ticket_outputs . send ( self . request . event ) for receiver , response in responses : provider = response ( self . request . event ) provider . form = ProviderForm ( obj = self . request . event , settingspref = ""ticketoutput_%s_"" % provider . identifier , data = ( self . request . POST if self . request . method == ""POST"" else None ) , files = ( self . request . FILES if self . request . method == ""POST"" else None ) , ) provider . form . fields = OrderedDict ( [ ( ""ticketoutput_%s_%s"" % ( provider . identifier , k ) , v ) for k , v in provider . settings_form_fields . items ( ) ] ) provider . settings_content = provider . settings_content_render ( self . request ) provider . form . prepare_fields ( ) provider . evaluated_preview_allowed = True provider . evaluated_preview_allowed = False else : for k , v in provider . settings_form_fields . items ( ) : if v . required and not self . request . event . settings . get ( ""ticketoutput_%s_%s"" % ( provider . identifier , k ) ) : provider . evaluated_preview_allowed = False break providers . append ( provider ) return providers",if not provider . preview_allowed :,397 20180,"def books(self): """"""The list of KoboBook objects in the library."""""" if len(self._books) != 0: return self._books """"""Drm-ed kepub"""""" for row in self.__cursor.execute( ""SELECT DISTINCT volumeid, Title, Attribution, Series FROM content_keys, content WHERE contentid = volumeid"" ): self._books.append( KoboBook( row[0], row[1], self.__bookfile(row[0]), ""kepub"", self.__cursor, author=row[2], series=row[3], ) ) self._volumeID.append(row[0]) """"""Drm-free"""""" for f in os.listdir(self.bookdir): if f not in self._volumeID: row = self.__cursor.execute( ""SELECT Title, Attribution, Series FROM content WHERE ContentID = '"" + f + ""'"" ).fetchone() if row is not None: fTitle = row[0] self._books.append( KoboBook( f, fTitle, self.__bookfile(f), ""drm-free"", self.__cursor, author=row[1], series=row[2], ) ) self._volumeID.append(f) """"""Sort"""""" self._books.sort(key=lambda x: x.title) return self._books","def books ( self ) : """"""The list of KoboBook objects in the library."""""" if len ( self . _books ) != 0 : return self . _books """"""Drm-ed kepub"""""" for row in self . __cursor . execute ( ""SELECT DISTINCT volumeid, Title, Attribution, Series FROM content_keys, content WHERE contentid = volumeid"" ) : self . _books . append ( KoboBook ( row [ 0 ] , row [ 1 ] , self . __bookfile ( row [ 0 ] ) , ""kepub"" , self . __cursor , author = row [ 2 ] , series = row [ 3 ] , ) ) self . _volumeID . append ( row [ 0 ] ) """"""Drm-free"""""" for f in os . listdir ( self . bookdir ) : row = self . __cursor . execute ( ""SELECT Title, Attribution, Series FROM content WHERE ContentID = '"" + f + ""'"" ) . fetchone ( ) if row is not None : fTitle = row [ 0 ] self . _books . append ( KoboBook ( f , fTitle , self . __bookfile ( f ) , ""drm-free"" , self . __cursor , author = row [ 1 ] , series = row [ 2 ] , ) ) self . _volumeID . append ( f ) """"""Sort"""""" self . _books . sort ( key = lambda x : x . title ) return self . _books",if f not in self . _volumeID :,457 25274,"def end_object(self, obj): fields = self.selected_fields if fields is not None: missing = set(fields).difference(self._current.keys()) if missing: _nothing = object() for f in missing: fs = f.split(""__"") value = obj while fs: value = getattr(value, fs.pop(0), _nothing) if value is not _nothing: self._current[f] = value return super().end_object(obj)","def end_object ( self , obj ) : fields = self . selected_fields if fields is not None : missing = set ( fields ) . difference ( self . _current . keys ( ) ) _nothing = object ( ) for f in missing : fs = f . split ( ""__"" ) value = obj while fs : value = getattr ( value , fs . pop ( 0 ) , _nothing ) if value is not _nothing : self . _current [ f ] = value return super ( ) . end_object ( obj )",if missing :,153 11291,"def main(client): # Initialize appropriate service. placement_service = client.GetService(""PlacementService"", version=""v202008"") # Create a statement to select placements. statement = ( ad_manager.StatementBuilder(version=""v202008"") .Where(""status = :status"") .WithBindVariable(""status"", ""ACTIVE"") ) # Retrieve a small amount of placements at a time, paging # through until all placements have been retrieved. while True: response = placement_service.getPlacementsByStatement(statement.ToStatement()) if ""results"" in response and len(response[""results""]): for placement in response[""results""]: # Print out some information for each placement. print( 'Placement with ID ""%d"" and name ""%s"" was found.\n' % (placement[""id""], placement[""name""]) ) statement.offset += statement.limit else: break print(""\nNumber of results found: %s"" % response[""totalResultSetSize""])","def main ( client ) : placement_service = client . GetService ( ""PlacementService"" , version = ""v202008"" ) statement = ( ad_manager . StatementBuilder ( version = ""v202008"" ) . Where ( ""status = :status"" ) . WithBindVariable ( ""status"" , ""ACTIVE"" ) ) while True : response = placement_service . getPlacementsByStatement ( statement . ToStatement ( ) ) for placement in response [ ""results"" ] : print ( 'Placement with ID ""%d"" and name ""%s"" was found.\n' % ( placement [ ""id"" ] , placement [ ""name"" ] ) ) statement . offset += statement . limit else : break print ( ""\nNumber of results found: %s"" % response [ ""totalResultSetSize"" ] )","if ""results"" in response and len ( response [ ""results"" ] ) :",278 22009,"def _LazyAddAttr_(self, attr): if self._lazydata_ is None: return 0 res = 0 typeinfo, typecomp = self._lazydata_ olerepr = self._olerepr_ # We need to explicitly check each invoke type individually - simply # specifying '0' will bind to ""any member"", which may not be the one # we are actually after (ie, we may be after prop_get, but returned # the info for the prop_put.) for i in ALL_INVOKE_TYPES: try: x, t = typecomp.Bind(attr, i) # Support 'Get' and 'Set' properties - see # bug 1587023 if x == 0 and attr[:3] in (""Set"", ""Get""): x, t = typecomp.Bind(attr[3:], i) if x == 1: # it's a FUNCDESC r = olerepr._AddFunc_(typeinfo, t, 0) elif x == 2: # it's a VARDESC r = olerepr._AddVar_(typeinfo, t, 0) else: # not found or TYPEDESC/IMPLICITAPP r = None if not r is None: key, map = r[0], r[1] item = map[key] if map == olerepr.propMapPut: olerepr._propMapPutCheck_(key, item) elif map == olerepr.propMapGet: olerepr._propMapGetCheck_(key, item) res = 1 except: pass return res","def _LazyAddAttr_ ( self , attr ) : if self . _lazydata_ is None : return 0 res = 0 typeinfo , typecomp = self . _lazydata_ olerepr = self . _olerepr_ for i in ALL_INVOKE_TYPES : try : x , t = typecomp . Bind ( attr , i ) x , t = typecomp . Bind ( attr [ 3 : ] , i ) if x == 1 : r = olerepr . _AddFunc_ ( typeinfo , t , 0 ) elif x == 2 : r = olerepr . _AddVar_ ( typeinfo , t , 0 ) else : r = None if not r is None : key , map = r [ 0 ] , r [ 1 ] item = map [ key ] if map == olerepr . propMapPut : olerepr . _propMapPutCheck_ ( key , item ) elif map == olerepr . propMapGet : olerepr . _propMapGetCheck_ ( key , item ) res = 1 except : pass return res","if x == 0 and attr [ : 3 ] in ( ""Set"" , ""Get"" ) :",450 12351,"def _convert(self, value, context): if value is None: return None if self.is_allowed_model(value): return value if not isinstance(value, dict): if len(self.model_classes) > 1: instanceof_msg = ""one of: {}"".format( "", "".join(cls.__name__ for cls in self.model_classes) ) else: instanceof_msg = self.model_classes[0].__name__ raise ConversionError( ""Please use a mapping for this field or "" ""an instance of {}"".format(instanceof_msg) ) model_class = self.find_model(value) return model_class(value, context=context)","def _convert ( self , value , context ) : if value is None : return None if self . is_allowed_model ( value ) : return value if not isinstance ( value , dict ) : instanceof_msg = ""one of: {}"" . format ( "", "" . join ( cls . __name__ for cls in self . model_classes ) ) else : instanceof_msg = self . model_classes [ 0 ] . __name__ raise ConversionError ( ""Please use a mapping for this field or "" ""an instance of {}"" . format ( instanceof_msg ) ) model_class = self . find_model ( value ) return model_class ( value , context = context )",if len ( self . model_classes ) > 1 :,201 4405,"def _establish(self): # try to establish the outgoing connection self.fsm.change(FSM.ACTIVE) if not self.proto: for action in self._connect(): if action in ACTION.ALL: yield action self.fsm.change(FSM.CONNECT) # normal sending of OPEN first ... if self.neighbor.local_as: for sent_open in self._send_open(): if sent_open in ACTION.ALL: yield sent_open self.proto.negotiated.sent(sent_open) self.fsm.change(FSM.OPENSENT) # read the peer's open for received_open in self._read_open(): if received_open in ACTION.ALL: yield received_open self.proto.negotiated.received(received_open) self.proto.connection.msg_size = self.proto.negotiated.msg_size # if we mirror the ASN, we need to read first and send second if not self.neighbor.local_as: for sent_open in self._send_open(): if sent_open in ACTION.ALL: yield sent_open self.proto.negotiated.sent(sent_open) self.fsm.change(FSM.OPENSENT) self.proto.validate_open() self.fsm.change(FSM.OPENCONFIRM) self.recv_timer = ReceiveTimer( self.proto.connection.session, self.proto.negotiated.holdtime, 4, 0 ) for action in self._send_ka(): yield action for action in self._read_ka(): yield action self.fsm.change(FSM.ESTABLISHED) self.stats[""complete""] = time.time() # let the caller know that we were sucesfull yield ACTION.NOW","def _establish ( self ) : self . fsm . change ( FSM . ACTIVE ) if not self . proto : for action in self . _connect ( ) : if action in ACTION . ALL : yield action self . fsm . change ( FSM . CONNECT ) if self . neighbor . local_as : for sent_open in self . _send_open ( ) : if sent_open in ACTION . ALL : yield sent_open self . proto . negotiated . sent ( sent_open ) self . fsm . change ( FSM . OPENSENT ) for received_open in self . _read_open ( ) : yield received_open self . proto . negotiated . received ( received_open ) self . proto . connection . msg_size = self . proto . negotiated . msg_size if not self . neighbor . local_as : for sent_open in self . _send_open ( ) : if sent_open in ACTION . ALL : yield sent_open self . proto . negotiated . sent ( sent_open ) self . fsm . change ( FSM . OPENSENT ) self . proto . validate_open ( ) self . fsm . change ( FSM . OPENCONFIRM ) self . recv_timer = ReceiveTimer ( self . proto . connection . session , self . proto . negotiated . holdtime , 4 , 0 ) for action in self . _send_ka ( ) : yield action for action in self . _read_ka ( ) : yield action self . fsm . change ( FSM . ESTABLISHED ) self . stats [ ""complete"" ] = time . time ( ) yield ACTION . NOW",if received_open in ACTION . ALL :,506 14088,"def aggregate(cls, dataset, dimensions, function, **kwargs): data = dataset.data cols = [d.name for d in dataset.kdims if d in dimensions] vdims = dataset.dimensions(""value"", label=""name"") dtypes = data.dtypes numeric = [ c for c, dtype in zip(dtypes.index, dtypes.values) if dtype.kind in ""iufc"" and c in vdims ] reindexed = data[cols + numeric] inbuilts = { ""amin"": ""min"", ""amax"": ""max"", ""mean"": ""mean"", ""std"": ""std"", ""sum"": ""sum"", ""var"": ""var"", } if len(dimensions): groups = reindexed.groupby(cols) if function.__name__ in inbuilts: agg = getattr(groups, inbuilts[function.__name__])() else: agg = groups.apply(function) df = agg.reset_index() else: if function.__name__ in inbuilts: agg = getattr(reindexed, inbuilts[function.__name__])() else: raise NotImplementedError df = pd.DataFrame(agg.compute()).T dropped = [] for vd in vdims: if vd not in df.columns: dropped.append(vd) return df, dropped ","def aggregate ( cls , dataset , dimensions , function , ** kwargs ) : data = dataset . data cols = [ d . name for d in dataset . kdims if d in dimensions ] vdims = dataset . dimensions ( ""value"" , label = ""name"" ) dtypes = data . dtypes numeric = [ c for c , dtype in zip ( dtypes . index , dtypes . values ) if dtype . kind in ""iufc"" and c in vdims ] reindexed = data [ cols + numeric ] inbuilts = { ""amin"" : ""min"" , ""amax"" : ""max"" , ""mean"" : ""mean"" , ""std"" : ""std"" , ""sum"" : ""sum"" , ""var"" : ""var"" , } if len ( dimensions ) : groups = reindexed . groupby ( cols ) if function . __name__ in inbuilts : agg = getattr ( groups , inbuilts [ function . __name__ ] ) ( ) else : agg = groups . apply ( function ) df = agg . reset_index ( ) else : if function . __name__ in inbuilts : agg = getattr ( reindexed , inbuilts [ function . __name__ ] ) ( ) else : raise NotImplementedError df = pd . DataFrame ( agg . compute ( ) ) . T dropped = [ ] for vd in vdims : dropped . append ( vd ) return df , dropped",if vd not in df . columns :,369 11519,"def checkbox_callback(checked_value): global search_box_area, phrases_area group_info_box.text = """" if 0 in checked_value: annotation_layout.children = [ annotation_input, annotate_button, annotation_output, ] else: annotation_layout.children = [] annotation_output.text = """" if 1 in checked_value: if vocab is None: working_label.text = fetching_text get_vocab() if not phrases_list.options: working_label.text = working_text phrases_list.options = list(cut_vocab_dict.keys())[ 0:max_visible_phrases ] # show the cut representation # search_box_area.children = [search_input_box] phrases_area.children = [search_input_box, search_working_label, phrases_list] working_label.text = """" else: # search_box_area.children = [] phrases_area.children = [] group_info_box.text = """"","def checkbox_callback ( checked_value ) : global search_box_area , phrases_area group_info_box . text = """" if 0 in checked_value : annotation_layout . children = [ annotation_input , annotate_button , annotation_output , ] else : annotation_layout . children = [ ] annotation_output . text = """" if 1 in checked_value : working_label . text = fetching_text get_vocab ( ) if not phrases_list . options : working_label . text = working_text phrases_list . options = list ( cut_vocab_dict . keys ( ) ) [ 0 : max_visible_phrases ] phrases_area . children = [ search_input_box , search_working_label , phrases_list ] working_label . text = """" else : phrases_area . children = [ ] group_info_box . text = """"",if vocab is None :,311 6051,"def get_tokens_unprocessed(self, text): bashlexer = BashLexer(**self.options) pos = 0 curcode = """" insertions = [] for match in line_re.finditer(text): line = match.group() m = re.match( r""^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)"" r""?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)"", line, ) if m: # To support output lexers (say diff output), the output # needs to be broken by prompts whenever the output lexer # changes. if not insertions: pos = match.start() insertions.append((len(curcode), [(0, Generic.Prompt, m.group(1))])) curcode += m.group(2) elif line.startswith("">""): insertions.append((len(curcode), [(0, Generic.Prompt, line[:1])])) curcode += line[1:] else: if insertions: toks = bashlexer.get_tokens_unprocessed(curcode) for i, t, v in do_insertions(insertions, toks): yield pos + i, t, v yield match.start(), Generic.Output, line insertions = [] curcode = """" if insertions: for i, t, v in do_insertions( insertions, bashlexer.get_tokens_unprocessed(curcode) ): yield pos + i, t, v","def get_tokens_unprocessed ( self , text ) : bashlexer = BashLexer ( ** self . options ) pos = 0 curcode = """" insertions = [ ] for match in line_re . finditer ( text ) : line = match . group ( ) m = re . match ( r""^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)"" r""?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)"" , line , ) if m : if not insertions : pos = match . start ( ) insertions . append ( ( len ( curcode ) , [ ( 0 , Generic . Prompt , m . group ( 1 ) ) ] ) ) curcode += m . group ( 2 ) elif line . startswith ( "">"" ) : insertions . append ( ( len ( curcode ) , [ ( 0 , Generic . Prompt , line [ : 1 ] ) ] ) ) curcode += line [ 1 : ] else : toks = bashlexer . get_tokens_unprocessed ( curcode ) for i , t , v in do_insertions ( insertions , toks ) : yield pos + i , t , v yield match . start ( ) , Generic . Output , line insertions = [ ] curcode = """" for i , t , v in do_insertions ( insertions , bashlexer . get_tokens_unprocessed ( curcode ) ) : yield pos + i , t , v",if insertions :,448 16253,"def display_list_by_prefix(names_list, starting_spaces=0): """"""Creates a help string for names_list grouped by prefix."""""" cur_prefix, result_lines = None, [] space = "" "" * starting_spaces for name in sorted(names_list): split = name.split(""_"", 1) prefix = split[0] if cur_prefix != prefix: result_lines.append(space + prefix + "":"") cur_prefix = prefix result_lines.append(space + "" * "" + name) return ""\n"".join(result_lines)","def display_list_by_prefix ( names_list , starting_spaces = 0 ) : """"""Creates a help string for names_list grouped by prefix."""""" cur_prefix , result_lines = None , [ ] space = "" "" * starting_spaces for name in sorted ( names_list ) : split = name . split ( ""_"" , 1 ) prefix = split [ 0 ] result_lines . append ( space + prefix + "":"" ) cur_prefix = prefix result_lines . append ( space + "" * "" + name ) return ""\n"" . join ( result_lines )",if cur_prefix != prefix :,153 24824,"def interact(show_tokens=False): try: import readline except ImportError: pass sys.modules[""__main__""] = global_env while True: buffer = """" continuation_flag = False tokens = [] while True: try: if continuation_flag: s = input(""... "") if s == ""\n"": continue buffer = buffer + ""\n"" + s else: s = input("">>> "") if s == ""\n"": continue buffer = s except EOFError: print() sys.exit() try: lexer = lex(buffer, repl_mode=True, debug=show_tokens) for last in lexer: tokens.append(last) if len(tokens) == 0: buffer = """" continue if ( last is REPL_CONTINUE or last.name == ""COLON"" or last.name == ""THINARROW"" ): continuation_flag = True tokens = [] continue else: break except Exception: traceback.print_exc(file=current_error_port) continuation_flag = False buffer = """" continue try: eval_tokens(tokens) except ParsingError as e: print(e, file=current_error_port) except Exception: traceback.print_exc(file=current_error_port)","def interact ( show_tokens = False ) : try : import readline except ImportError : pass sys . modules [ ""__main__"" ] = global_env while True : buffer = """" continuation_flag = False tokens = [ ] while True : try : s = input ( ""... "" ) if s == ""\n"" : continue buffer = buffer + ""\n"" + s else : s = input ( "">>> "" ) if s == ""\n"" : continue buffer = s except EOFError : print ( ) sys . exit ( ) try : lexer = lex ( buffer , repl_mode = True , debug = show_tokens ) for last in lexer : tokens . append ( last ) if len ( tokens ) == 0 : buffer = """" continue if ( last is REPL_CONTINUE or last . name == ""COLON"" or last . name == ""THINARROW"" ) : continuation_flag = True tokens = [ ] continue else : break except Exception : traceback . print_exc ( file = current_error_port ) continuation_flag = False buffer = """" continue try : eval_tokens ( tokens ) except ParsingError as e : print ( e , file = current_error_port ) except Exception : traceback . print_exc ( file = current_error_port )",if continuation_flag :,495 11811,"def incr_and_sum(self, key, keys, amount, maximum, ttl): ttl = int(ttl / 1000) with self.pool.reserve(block=True) as client: client.add(key, 0, time=ttl) while True: value, cid = client.gets(key) if cid is None: return False value += amount if value > maximum: return False # TODO: Drop non-callable keys in Dramatiq v2. key_list = keys() if callable(keys) else keys mapping = client.get_multi(key_list) total = amount + sum(mapping.values()) if total > maximum: return False try: swapped = client.cas(key, value, cid, ttl) if swapped: return True except NotFound: # pragma: no cover continue","def incr_and_sum ( self , key , keys , amount , maximum , ttl ) : ttl = int ( ttl / 1000 ) with self . pool . reserve ( block = True ) as client : client . add ( key , 0 , time = ttl ) while True : value , cid = client . gets ( key ) if cid is None : return False value += amount if value > maximum : return False key_list = keys ( ) if callable ( keys ) else keys mapping = client . get_multi ( key_list ) total = amount + sum ( mapping . values ( ) ) return False try : swapped = client . cas ( key , value , cid , ttl ) if swapped : return True except NotFound : continue",if total > maximum :,270 13035,"def _validate_cfg(self): if not isinstance(self.paramwise_cfg, dict): raise TypeError( ""paramwise_cfg should be None or a dict, "" f""but got {type(self.paramwise_cfg)}"" ) if ""custom_keys"" in self.paramwise_cfg: if not isinstance(self.paramwise_cfg[""custom_keys""], dict): raise TypeError( ""If specified, custom_keys must be a dict, "" f'but got {type(self.paramwise_cfg[""custom_keys""])}' ) if self.base_wd is None: for key in self.paramwise_cfg[""custom_keys""]: if ""decay_mult"" in self.paramwise_cfg[""custom_keys""][key]: raise ValueError(""base_wd should not be None"") # get base lr and weight decay # weight_decay must be explicitly specified if mult is specified if ( ""bias_decay_mult"" in self.paramwise_cfg or ""norm_decay_mult"" in self.paramwise_cfg or ""dwconv_decay_mult"" in self.paramwise_cfg ): if self.base_wd is None: raise ValueError(""base_wd should not be None"") ","def _validate_cfg ( self ) : if not isinstance ( self . paramwise_cfg , dict ) : raise TypeError ( ""paramwise_cfg should be None or a dict, "" f""but got {type(self.paramwise_cfg)}"" ) if ""custom_keys"" in self . paramwise_cfg : if not isinstance ( self . paramwise_cfg [ ""custom_keys"" ] , dict ) : raise TypeError ( ""If specified, custom_keys must be a dict, "" f'but got {type(self.paramwise_cfg[""custom_keys""])}' ) if self . base_wd is None : for key in self . paramwise_cfg [ ""custom_keys"" ] : raise ValueError ( ""base_wd should not be None"" ) if ( ""bias_decay_mult"" in self . paramwise_cfg or ""norm_decay_mult"" in self . paramwise_cfg or ""dwconv_decay_mult"" in self . paramwise_cfg ) : if self . base_wd is None : raise ValueError ( ""base_wd should not be None"" )","if ""decay_mult"" in self . paramwise_cfg [ ""custom_keys"" ] [ key ] :",342 13630,"def tile(cls, op: ""DataFrameToSQLTable""): inp = op.inputs[0] out = op.outputs[0] if inp.ndim == 2: inp = inp.rechunk({1: (inp.shape[1],)})._inplace_tile() chunks = [] for c in inp.chunks: new_op = op.copy().reset_key() new_op._if_exists = ""append"" index_value = parse_index(c.index_value.to_pandas()[:0], c) if c.ndim == 2: columns_value = parse_index( c.columns_value.to_pandas()[:0], store_data=True ) chunks.append( new_op.new_chunk( [c], shape=(0, 0), index=c.index, dtypes=out.dtypes, index_value=index_value, columns_value=columns_value, ) ) else: chunks.append( new_op.new_chunk( [c], shape=(0,), index=c.index, dtype=out.dtype, index_value=index_value, ) ) new_op = op.copy().reset_key() params = out.params.copy() params[""nsplits""] = tuple((0,) * len(sp) for sp in inp.nsplits) return new_op.new_tileables([inp], chunks=chunks, **params)","def tile ( cls , op : ""DataFrameToSQLTable"" ) : inp = op . inputs [ 0 ] out = op . outputs [ 0 ] if inp . ndim == 2 : inp = inp . rechunk ( { 1 : ( inp . shape [ 1 ] , ) } ) . _inplace_tile ( ) chunks = [ ] for c in inp . chunks : new_op = op . copy ( ) . reset_key ( ) new_op . _if_exists = ""append"" index_value = parse_index ( c . index_value . to_pandas ( ) [ : 0 ] , c ) columns_value = parse_index ( c . columns_value . to_pandas ( ) [ : 0 ] , store_data = True ) chunks . append ( new_op . new_chunk ( [ c ] , shape = ( 0 , 0 ) , index = c . index , dtypes = out . dtypes , index_value = index_value , columns_value = columns_value , ) ) else : chunks . append ( new_op . new_chunk ( [ c ] , shape = ( 0 , ) , index = c . index , dtype = out . dtype , index_value = index_value , ) ) new_op = op . copy ( ) . reset_key ( ) params = out . params . copy ( ) params [ ""nsplits"" ] = tuple ( ( 0 , ) * len ( sp ) for sp in inp . nsplits ) return new_op . new_tileables ( [ inp ] , chunks = chunks , ** params )",if c . ndim == 2 :,447 22940,"def tokenize(self, s): """"""Tokenize comments, strings, identifiers, whitespace and operators."""""" i, result = 0, [] while i < len(s): # Loop invariant: at end: j > i and s[i:j] is the new token. j = i ch = s[i] if ch in ""@\n"": # Make *sure* these are separate tokens. j += 1 elif ch == ""#"": # Preprocessor directive. j = g.skip_to_end_of_line(s, i) elif ch in "" \t"": j = g.skip_ws(s, i) elif ch.isalpha() or ch == ""_"": j = g.skip_c_id(s, i) elif g.match(s, i, ""//""): j = g.skip_line(s, i) elif g.match(s, i, ""/*""): j = self.skip_block_comment(s, i) elif ch in ""'\"""": j = g.skip_string(s, i) else: j += 1 assert j > i result.append("""".join(s[i:j])) i = j # Advance. return result","def tokenize ( self , s ) : """"""Tokenize comments, strings, identifiers, whitespace and operators."""""" i , result = 0 , [ ] while i < len ( s ) : j = i ch = s [ i ] if ch in ""@\n"" : j += 1 elif ch == ""#"" : j = g . skip_to_end_of_line ( s , i ) elif ch in "" \t"" : j = g . skip_ws ( s , i ) j = g . skip_c_id ( s , i ) elif g . match ( s , i , ""//"" ) : j = g . skip_line ( s , i ) elif g . match ( s , i , ""/*"" ) : j = self . skip_block_comment ( s , i ) elif ch in ""'\"""" : j = g . skip_string ( s , i ) else : j += 1 assert j > i result . append ( """" . join ( s [ i : j ] ) ) i = j return result","elif ch . isalpha ( ) or ch == ""_"" :",333 10917,"def _check_init_script(path, sentinel): if not os.path.exists(path): return lines = open(path).readlines() for i, line in enumerate(lines): if sentinel in line: cli.out( ""Guild completion is already installed in %s on line %i:\n %s"" % (util.format_dir(path), i + 1, line.rstrip()), err=True, ) raise SystemExit(0)","def _check_init_script ( path , sentinel ) : if not os . path . exists ( path ) : return lines = open ( path ) . readlines ( ) for i , line in enumerate ( lines ) : cli . out ( ""Guild completion is already installed in %s on line %i:\n %s"" % ( util . format_dir ( path ) , i + 1 , line . rstrip ( ) ) , err = True , ) raise SystemExit ( 0 )",if sentinel in line :,137 5307,"def postprocess_slice(slicename, skipped): pngsliceFName = slicename + "".png"" hotsliceFName = slicename + "".hotspot.png"" for i, size in enumerate(sizes): subdir = ""bitmaps/{}x{}"".format(size, size) relslice = ""{}/{}"".format(subdir, pngsliceFName) csize = get_csize(i, size) if relslice not in skipped: new_base = cropalign(csize, relslice) if options.hotspots: hotrelslice = ""{}/{}"".format(subdir, hotsliceFName) cropalign_hotspot(new_base, csize, hotrelslice) for scale in scale_pairs: subdir = ""bitmaps/{}x{}_{}"".format(size, size, scale[1]) relslice = ""{}/{}"".format(subdir, pngsliceFName) if relslice not in skipped: new_base = cropalign(csize, relslice) if options.hotspots: hotrelslice = ""{}/{}"".format(subdir, hotsliceFName) cropalign_hotspot(new_base, csize, hotrelslice)","def postprocess_slice ( slicename , skipped ) : pngsliceFName = slicename + "".png"" hotsliceFName = slicename + "".hotspot.png"" for i , size in enumerate ( sizes ) : subdir = ""bitmaps/{}x{}"" . format ( size , size ) relslice = ""{}/{}"" . format ( subdir , pngsliceFName ) csize = get_csize ( i , size ) if relslice not in skipped : new_base = cropalign ( csize , relslice ) hotrelslice = ""{}/{}"" . format ( subdir , hotsliceFName ) cropalign_hotspot ( new_base , csize , hotrelslice ) for scale in scale_pairs : subdir = ""bitmaps/{}x{}_{}"" . format ( size , size , scale [ 1 ] ) relslice = ""{}/{}"" . format ( subdir , pngsliceFName ) if relslice not in skipped : new_base = cropalign ( csize , relslice ) hotrelslice = ""{}/{}"" . format ( subdir , hotsliceFName ) cropalign_hotspot ( new_base , csize , hotrelslice )",if options . hotspots :,315 22423,"def remove(self): # remove every non-ship collectors (those are independent) for collector in self.__collectors[:]: if not collector.is_ship: collector.remove() else: collector.decouple_from_home_building() # TODO remove the remove call() #2123 collector.remove() assert not [c for c in self.__collectors] super().remove() self.__collectors = None self.path_nodes = None",def remove ( self ) : for collector in self . __collectors [ : ] : collector . remove ( ) else : collector . decouple_from_home_building ( ) collector . remove ( ) assert not [ c for c in self . __collectors ] super ( ) . remove ( ) self . __collectors = None self . path_nodes = None,if not collector . is_ship :,129 5616,"def get_error_diagnostics(self): diagnostics = [] class_name = self.__class__.__name__ if self.stdout is not None: with open(self.stdout.name) as fds: contents = fds.read().strip() if contents: diagnostics.append(class_name + "" STDOUT:\n"" + contents) if self.stderr is not None: with open(self.stderr.name) as fds: contents = fds.read().strip() if contents: diagnostics.append(class_name + "" STDERR:\n"" + contents) return diagnostics ","def get_error_diagnostics ( self ) : diagnostics = [ ] class_name = self . __class__ . __name__ if self . stdout is not None : with open ( self . stdout . name ) as fds : contents = fds . read ( ) . strip ( ) diagnostics . append ( class_name + "" STDOUT:\n"" + contents ) if self . stderr is not None : with open ( self . stderr . name ) as fds : contents = fds . read ( ) . strip ( ) diagnostics . append ( class_name + "" STDERR:\n"" + contents ) return diagnostics",if contents :,172 9373,"def PyJs_anonymous_1469_(that, key, this, arguments, var=var): var = Scope( {u""this"": this, u""arguments"": arguments, u""key"": key, u""that"": that}, var ) var.registers([u""index"", u""that"", u""key"", u""entry""]) var.put(u""index"", var.get(u""fastKey"")(var.get(u""key""))) if PyJsStrictNeq(var.get(u""index""), Js(u""F"")): return var.get(u""that"").get(u""_i"").get(var.get(u""index"")) # for JS loop var.put(u""entry"", var.get(u""that"").get(u""_f"")) while var.get(u""entry""): try: if var.get(u""entry"").get(u""k"") == var.get(u""key""): return var.get(u""entry"") finally: var.put(u""entry"", var.get(u""entry"").get(u""n""))","def PyJs_anonymous_1469_ ( that , key , this , arguments , var = var ) : var = Scope ( { u""this"" : this , u""arguments"" : arguments , u""key"" : key , u""that"" : that } , var ) var . registers ( [ u""index"" , u""that"" , u""key"" , u""entry"" ] ) var . put ( u""index"" , var . get ( u""fastKey"" ) ( var . get ( u""key"" ) ) ) if PyJsStrictNeq ( var . get ( u""index"" ) , Js ( u""F"" ) ) : return var . get ( u""that"" ) . get ( u""_i"" ) . get ( var . get ( u""index"" ) ) var . put ( u""entry"" , var . get ( u""that"" ) . get ( u""_f"" ) ) while var . get ( u""entry"" ) : try : return var . get ( u""entry"" ) finally : var . put ( u""entry"" , var . get ( u""entry"" ) . get ( u""n"" ) )","if var . get ( u""entry"" ) . get ( u""k"" ) == var . get ( u""key"" ) :",280 3136,"def _format(node): if isinstance(node, AST): fields = [(a, _format(b)) for a, b in iter_fields(node)] rv = ""%s(%s"" % ( node.__class__.__name__, "", "".join( (""%s=%s"" % field for field in fields) if annotate_fields else (b for a, b in fields) ), ) if include_attributes and node._attributes: rv += fields and "", "" or "" "" rv += "", "".join( ""%s=%s"" % (a, _format(getattr(node, a))) for a in node._attributes ) return rv + "")"" elif isinstance(node, list): return ""[%s]"" % "", "".join(_format(x) for x in node) return repr(node) ","def _format ( node ) : if isinstance ( node , AST ) : fields = [ ( a , _format ( b ) ) for a , b in iter_fields ( node ) ] rv = ""%s(%s"" % ( node . __class__ . __name__ , "", "" . join ( ( ""%s=%s"" % field for field in fields ) if annotate_fields else ( b for a , b in fields ) ) , ) rv += fields and "", "" or "" "" rv += "", "" . join ( ""%s=%s"" % ( a , _format ( getattr ( node , a ) ) ) for a in node . _attributes ) return rv + "")"" elif isinstance ( node , list ) : return ""[%s]"" % "", "" . join ( _format ( x ) for x in node ) return repr ( node )",if include_attributes and node . _attributes :,233 1104,"def expandWithRefs(self, s, varname): if not isinstance(s, str): # sanity check return VariableParse(varname, self, s) if varname and varname in self.expand_cache: return self.expand_cache[varname] varparse = VariableParse(varname, self) while s.find(""${"") != -1: olds = s try: s = __expand_var_regexp__.sub(varparse.var_sub, s) try: s = __expand_python_regexp__.sub(varparse.python_sub, s) except SyntaxError as e: # Likely unmatched brackets, just don't expand the expression if e.msg != ""EOL while scanning string literal"": raise if s == olds: break except ExpansionError: raise except bb.parse.SkipRecipe: raise except Exception as exc: raise ExpansionError(varname, s, exc) from exc varparse.value = s if varname: self.expand_cache[varname] = varparse return varparse","def expandWithRefs ( self , s , varname ) : if not isinstance ( s , str ) : return VariableParse ( varname , self , s ) if varname and varname in self . expand_cache : return self . expand_cache [ varname ] varparse = VariableParse ( varname , self ) while s . find ( ""${"" ) != - 1 : olds = s try : s = __expand_var_regexp__ . sub ( varparse . var_sub , s ) try : s = __expand_python_regexp__ . sub ( varparse . python_sub , s ) except SyntaxError as e : raise if s == olds : break except ExpansionError : raise except bb . parse . SkipRecipe : raise except Exception as exc : raise ExpansionError ( varname , s , exc ) from exc varparse . value = s if varname : self . expand_cache [ varname ] = varparse return varparse","if e . msg != ""EOL while scanning string literal"" :",309 5802,"def check_network(self) -> NetworkStatus: try: loop = asyncio.get_event_loop() async with aiohttp.ClientSession( loop=loop, connector=aiohttp.TCPConnector(verify_ssl=False) ) as session: async with session.get(self.log_server_url) as resp: status_text = await resp.text() if status_text != ""OK"": raise Exception(""Log proxy server is down."") except asyncio.CancelledError: raise except Exception: return NetworkStatus.NOT_CONNECTED return NetworkStatus.CONNECTED ","def check_network ( self ) -> NetworkStatus : try : loop = asyncio . get_event_loop ( ) async with aiohttp . ClientSession ( loop = loop , connector = aiohttp . TCPConnector ( verify_ssl = False ) ) as session : async with session . get ( self . log_server_url ) as resp : status_text = await resp . text ( ) raise Exception ( ""Log proxy server is down."" ) except asyncio . CancelledError : raise except Exception : return NetworkStatus . NOT_CONNECTED return NetworkStatus . CONNECTED","if status_text != ""OK"" :",168 11285,"def main(client, key_id): # Initialize appropriate service. custom_targeting_service = client.GetService( ""CustomTargetingService"", version=""v202008"" ) statement = ( ad_manager.StatementBuilder(version=""v202008"") .Where(""customTargetingKeyId = :keyId"") .WithBindVariable(""keyId"", int(key_id)) ) while True: # Get custom targeting values by statement. response = custom_targeting_service.getCustomTargetingValuesByStatement( statement.ToStatement() ) # Update each local custom targeting value object by changing its name. if ""results"" in response and len(response[""results""]): updated_values = [] for value in response[""results""]: if not value[""displayName""]: value[""displayName""] = value[""name""] value[""displayName""] += "" (Deprecated)"" updated_values.append(value) values = custom_targeting_service.updateCustomTargetingValues( updated_values ) # Display results. for value in values: print( 'Custom targeting value with id ""%s"", name ""%s"", and display' ' name ""%s"" was updated.' % (value[""id""], value[""name""], value[""displayName""]) ) statement.offset += statement.limit else: break if response[""totalResultSetSize""] == 0: print(""No custom targeting values were updated."")","def main ( client , key_id ) : custom_targeting_service = client . GetService ( ""CustomTargetingService"" , version = ""v202008"" ) statement = ( ad_manager . StatementBuilder ( version = ""v202008"" ) . Where ( ""customTargetingKeyId = :keyId"" ) . WithBindVariable ( ""keyId"" , int ( key_id ) ) ) while True : response = custom_targeting_service . getCustomTargetingValuesByStatement ( statement . ToStatement ( ) ) updated_values = [ ] for value in response [ ""results"" ] : if not value [ ""displayName"" ] : value [ ""displayName"" ] = value [ ""name"" ] value [ ""displayName"" ] += "" (Deprecated)"" updated_values . append ( value ) values = custom_targeting_service . updateCustomTargetingValues ( updated_values ) for value in values : print ( 'Custom targeting value with id ""%s"", name ""%s"", and display' ' name ""%s"" was updated.' % ( value [ ""id"" ] , value [ ""name"" ] , value [ ""displayName"" ] ) ) statement . offset += statement . limit else : break if response [ ""totalResultSetSize"" ] == 0 : print ( ""No custom targeting values were updated."" )","if ""results"" in response and len ( response [ ""results"" ] ) :",427 18980,"def check_app_config_brackets(self): for sn, app in cherrypy.tree.apps.items(): if not isinstance(app, cherrypy.Application): continue if not app.config: continue for key in app.config.keys(): if key.startswith(""["") or key.endswith(""]""): warnings.warn( ""The application mounted at %r has config "" ""section names with extraneous brackets: %r. "" ""Config *files* need brackets; config *dicts* "" ""(e.g. passed to tree.mount) do not."" % (sn, key) ) ","def check_app_config_brackets ( self ) : for sn , app in cherrypy . tree . apps . items ( ) : continue if not app . config : continue for key in app . config . keys ( ) : if key . startswith ( ""["" ) or key . endswith ( ""]"" ) : warnings . warn ( ""The application mounted at %r has config "" ""section names with extraneous brackets: %r. "" ""Config *files* need brackets; config *dicts* "" ""(e.g. passed to tree.mount) do not."" % ( sn , key ) )","if not isinstance ( app , cherrypy . Application ) :",186 12279,"def printErrors(self): # Overridden to avoid unnecessary empty line if self.errors or self.failures: if self.dots or self.showAll: self.stream.writeln() self.printErrorList(""ERROR"", self.errors) self.printErrorList(""FAIL"", self.failures) ","def printErrors ( self ) : if self . errors or self . failures : self . stream . writeln ( ) self . printErrorList ( ""ERROR"" , self . errors ) self . printErrorList ( ""FAIL"" , self . failures )",if self . dots or self . showAll :,79 25421,"def _check_connectivity(self) -> None: """"""Check system connectivity."""""" value = self._cache.get(""connectivity"", 0) # Need only full check if not connected or each 10min if value >= 600: pass elif ( self.sys_supervisor.connectivity and self.sys_host.network.connectivity is None ) or ( self.sys_supervisor.connectivity and self.sys_host.network.connectivity is not None and self.sys_host.network.connectivity ): self._cache[""connectivity""] = value + RUN_CHECK_CONNECTIVITY return # Check connectivity try: await self.sys_supervisor.check_connectivity() if HostFeature.NETWORK in self.sys_host.features: await self.sys_host.network.check_connectivity() finally: self._cache[""connectivity""] = 0","def _check_connectivity ( self ) -> None : """"""Check system connectivity."""""" value = self . _cache . get ( ""connectivity"" , 0 ) if value >= 600 : pass elif ( self . sys_supervisor . connectivity and self . sys_host . network . connectivity is None ) or ( self . sys_supervisor . connectivity and self . sys_host . network . connectivity is not None and self . sys_host . network . connectivity ) : self . _cache [ ""connectivity"" ] = value + RUN_CHECK_CONNECTIVITY return try : await self . sys_supervisor . check_connectivity ( ) await self . sys_host . network . check_connectivity ( ) finally : self . _cache [ ""connectivity"" ] = 0",if HostFeature . NETWORK in self . sys_host . features :,227 4461,"def set_active_tools(tools_to_activate, permanently_activate, system): tools_to_activate = process_tool_list(tools_to_activate, log_errors=True) if tools_to_activate: tools = [x for x in tools_to_activate if not x.is_sdk] print( ""Setting the following tools as active:\n "" + ""\n "".join(map(lambda x: str(x), tools)) ) print("""") generate_dot_emscripten(tools_to_activate) # Construct a .bat script that will be invoked to set env. vars and PATH # We only do this on windows since emsdk.bat is able to modify the # calling shell environment. On other platform `source emsdk_env.sh` is # required. if WINDOWS: # always set local environment variables since permanently activating will only set the registry settings and # will not affect the current session env_vars_to_add = get_env_vars_to_add( tools_to_activate, system, user=permanently_activate ) env_string = construct_env_with_vars(env_vars_to_add) write_set_env_script(env_string) if permanently_activate: win_set_environment_variables( env_vars_to_add, system, user=permanently_activate ) return tools_to_activate ","def set_active_tools ( tools_to_activate , permanently_activate , system ) : tools_to_activate = process_tool_list ( tools_to_activate , log_errors = True ) if tools_to_activate : tools = [ x for x in tools_to_activate if not x . is_sdk ] print ( ""Setting the following tools as active:\n "" + ""\n "" . join ( map ( lambda x : str ( x ) , tools ) ) ) print ( """" ) generate_dot_emscripten ( tools_to_activate ) if WINDOWS : env_vars_to_add = get_env_vars_to_add ( tools_to_activate , system , user = permanently_activate ) env_string = construct_env_with_vars ( env_vars_to_add ) write_set_env_script ( env_string ) win_set_environment_variables ( env_vars_to_add , system , user = permanently_activate ) return tools_to_activate",if permanently_activate :,382 14826,"def _getnameinfo(sockaddr, flags=0): host = sockaddr[0] port = sockaddr[1] if len(sockaddr) == 4: scope = sockaddr[3] family = socket.AF_INET6 else: scope = None family = socket.AF_INET tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, socket.SOL_TCP, 0) if len(tuples) > 1: raise socket.error(""sockaddr resolved to multiple addresses"") addr = tuples[0][4][0] if flags & socket.NI_DGRAM: pname = ""udp"" else: pname = ""tcp"" qname = dns.reversename.from_address(addr) if flags & socket.NI_NUMERICHOST == 0: try: answer = _resolver.resolve(qname, ""PTR"") hostname = answer.rrset[0].target.to_text(True) except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): if flags & socket.NI_NAMEREQD: raise socket.gaierror(socket.EAI_NONAME, ""Name or service not known"") hostname = addr if scope is not None: hostname += ""%"" + str(scope) else: hostname = addr if scope is not None: hostname += ""%"" + str(scope) if flags & socket.NI_NUMERICSERV: service = str(port) else: service = socket.getservbyport(port, pname) return (hostname, service)","def _getnameinfo ( sockaddr , flags = 0 ) : host = sockaddr [ 0 ] port = sockaddr [ 1 ] if len ( sockaddr ) == 4 : scope = sockaddr [ 3 ] family = socket . AF_INET6 else : scope = None family = socket . AF_INET tuples = _getaddrinfo ( host , port , family , socket . SOCK_STREAM , socket . SOL_TCP , 0 ) if len ( tuples ) > 1 : raise socket . error ( ""sockaddr resolved to multiple addresses"" ) addr = tuples [ 0 ] [ 4 ] [ 0 ] if flags & socket . NI_DGRAM : pname = ""udp"" else : pname = ""tcp"" qname = dns . reversename . from_address ( addr ) if flags & socket . NI_NUMERICHOST == 0 : try : answer = _resolver . resolve ( qname , ""PTR"" ) hostname = answer . rrset [ 0 ] . target . to_text ( True ) except ( dns . resolver . NXDOMAIN , dns . resolver . NoAnswer ) : raise socket . gaierror ( socket . EAI_NONAME , ""Name or service not known"" ) hostname = addr if scope is not None : hostname += ""%"" + str ( scope ) else : hostname = addr if scope is not None : hostname += ""%"" + str ( scope ) if flags & socket . NI_NUMERICSERV : service = str ( port ) else : service = socket . getservbyport ( port , pname ) return ( hostname , service )",if flags & socket . NI_NAMEREQD :,433 3801,"def parse_many(self, values): for value in values: try: yield self.parse(value) except KeyError: if self._ignore_missing_keys: yield None else: raise ","def parse_many ( self , values ) : for value in values : try : yield self . parse ( value ) except KeyError : yield None else : raise",if self . _ignore_missing_keys :,71 11773,"def __new__(meta, cls_name, bases, cls_dict): func = cls_dict.get(""func"") monad_cls = super(FuncMonadMeta, meta).__new__(meta, cls_name, bases, cls_dict) if func: if type(func) is tuple: functions = func else: functions = (func,) for func in functions: registered_functions[func] = monad_cls return monad_cls ","def __new__ ( meta , cls_name , bases , cls_dict ) : func = cls_dict . get ( ""func"" ) monad_cls = super ( FuncMonadMeta , meta ) . __new__ ( meta , cls_name , bases , cls_dict ) if func : functions = func else : functions = ( func , ) for func in functions : registered_functions [ func ] = monad_cls return monad_cls",if type ( func ) is tuple :,126 2205,"def mergeHiLo(self, x_stats): """"""Merge the highs and lows of another accumulator into myself."""""" if x_stats.min is not None: if self.min is None or x_stats.min < self.min: self.min = x_stats.min self.mintime = x_stats.mintime if x_stats.max is not None: if self.max is None or x_stats.max > self.max: self.max = x_stats.max self.maxtime = x_stats.maxtime if x_stats.lasttime is not None: if self.lasttime is None or x_stats.lasttime >= self.lasttime: self.lasttime = x_stats.lasttime self.last = x_stats.last ","def mergeHiLo ( self , x_stats ) : """"""Merge the highs and lows of another accumulator into myself."""""" if x_stats . min is not None : if self . min is None or x_stats . min < self . min : self . min = x_stats . min self . mintime = x_stats . mintime if x_stats . max is not None : self . max = x_stats . max self . maxtime = x_stats . maxtime if x_stats . lasttime is not None : if self . lasttime is None or x_stats . lasttime >= self . lasttime : self . lasttime = x_stats . lasttime self . last = x_stats . last",if self . max is None or x_stats . max > self . max :,215 25479,"def get_attribute_value(self, nodeid, attr): with self._lock: self.logger.debug(""get attr val: %s %s"", nodeid, attr) if nodeid not in self._nodes: dv = ua.DataValue() dv.StatusCode = ua.StatusCode(ua.StatusCodes.BadNodeIdUnknown) return dv node = self._nodes[nodeid] if attr not in node.attributes: dv = ua.DataValue() dv.StatusCode = ua.StatusCode(ua.StatusCodes.BadAttributeIdInvalid) return dv attval = node.attributes[attr] if attval.value_callback: return attval.value_callback() return attval.value ","def get_attribute_value ( self , nodeid , attr ) : with self . _lock : self . logger . debug ( ""get attr val: %s %s"" , nodeid , attr ) if nodeid not in self . _nodes : dv = ua . DataValue ( ) dv . StatusCode = ua . StatusCode ( ua . StatusCodes . BadNodeIdUnknown ) return dv node = self . _nodes [ nodeid ] dv = ua . DataValue ( ) dv . StatusCode = ua . StatusCode ( ua . StatusCodes . BadAttributeIdInvalid ) return dv attval = node . attributes [ attr ] if attval . value_callback : return attval . value_callback ( ) return attval . value",if attr not in node . attributes :,200 1599,"def _eval(self, code, ns, pos): # __traceback_hide__ = True try: try: value = eval(code, self.default_namespace, ns) except SyntaxError as e: raise SyntaxError(""invalid syntax in expression: %s"" % code) return value except: exc_info = sys.exc_info() e = exc_info[1] if getattr(e, ""args"", None): arg0 = e.args[0] else: arg0 = coerce_text(e) e.args = (self._add_line_info(arg0, pos),) if PY3: raise (e) else: raise (exc_info[1], e, exc_info[2])","def _eval ( self , code , ns , pos ) : try : try : value = eval ( code , self . default_namespace , ns ) except SyntaxError as e : raise SyntaxError ( ""invalid syntax in expression: %s"" % code ) return value except : exc_info = sys . exc_info ( ) e = exc_info [ 1 ] arg0 = e . args [ 0 ] else : arg0 = coerce_text ( e ) e . args = ( self . _add_line_info ( arg0 , pos ) , ) if PY3 : raise ( e ) else : raise ( exc_info [ 1 ] , e , exc_info [ 2 ] )","if getattr ( e , ""args"" , None ) :",214 10300,"def _build_initiator_target_map(self, target_wwns, connector): """"""Build the target_wwns and the initiator target map."""""" init_targ_map = {} if self._lookup_service: # use FC san lookup to determine which NSPs to use # for the new VLUN. dev_map = self._lookup_service.get_device_mapping_from_network( connector[""wwpns""], target_wwns ) for fabric_name in dev_map: fabric = dev_map[fabric_name] for initiator in fabric[""initiator_port_wwn_list""]: if initiator not in init_targ_map: init_targ_map[initiator] = [] init_targ_map[initiator] += fabric[""target_port_wwn_list""] init_targ_map[initiator] = list(set(init_targ_map[initiator])) else: init_targ_map = dict.fromkeys(connector[""wwpns""], target_wwns) return init_targ_map ","def _build_initiator_target_map ( self , target_wwns , connector ) : """"""Build the target_wwns and the initiator target map."""""" init_targ_map = { } if self . _lookup_service : dev_map = self . _lookup_service . get_device_mapping_from_network ( connector [ ""wwpns"" ] , target_wwns ) for fabric_name in dev_map : fabric = dev_map [ fabric_name ] for initiator in fabric [ ""initiator_port_wwn_list"" ] : init_targ_map [ initiator ] = [ ] init_targ_map [ initiator ] += fabric [ ""target_port_wwn_list"" ] init_targ_map [ initiator ] = list ( set ( init_targ_map [ initiator ] ) ) else : init_targ_map = dict . fromkeys ( connector [ ""wwpns"" ] , target_wwns ) return init_targ_map",if initiator not in init_targ_map :,300 22823,"def scan_options(self): """"""Set all configuration-related ivars."""""" if not self.config_fn: return self.parser = parser = self.create_parser() s = self.get_config_string() self.init_parser(s) if self.files: # files_source = 'command-line' files = self.files elif parser.has_section(""Global""): # files_source = 'config file' files = parser.get(""Global"", ""files"") files = [z.strip() for z in files.split(""\n"") if z.strip()] else: return files2 = [] for z in files: files2.extend(glob.glob(self.finalize(z))) self.files = [z for z in files2 if z and os.path.exists(z)] if ""output_directory"" in parser.options(""Global""): s = parser.get(""Global"", ""output_directory"") output_dir = self.finalize(s) if os.path.exists(output_dir): self.output_directory = output_dir if self.verbose: print(""output directory: %s\n"" % output_dir) else: print(""output directory not found: %s\n"" % output_dir) self.output_directory = None # inhibit run(). if ""prefix_lines"" in parser.options(""Global""): prefix = parser.get(""Global"", ""prefix_lines"") self.prefix_lines = prefix.split(""\n"")","def scan_options ( self ) : """"""Set all configuration-related ivars."""""" if not self . config_fn : return self . parser = parser = self . create_parser ( ) s = self . get_config_string ( ) self . init_parser ( s ) if self . files : files = self . files elif parser . has_section ( ""Global"" ) : files = parser . get ( ""Global"" , ""files"" ) files = [ z . strip ( ) for z in files . split ( ""\n"" ) if z . strip ( ) ] else : return files2 = [ ] for z in files : files2 . extend ( glob . glob ( self . finalize ( z ) ) ) self . files = [ z for z in files2 if z and os . path . exists ( z ) ] if ""output_directory"" in parser . options ( ""Global"" ) : s = parser . get ( ""Global"" , ""output_directory"" ) output_dir = self . finalize ( s ) self . output_directory = output_dir if self . verbose : print ( ""output directory: %s\n"" % output_dir ) else : print ( ""output directory not found: %s\n"" % output_dir ) self . output_directory = None if ""prefix_lines"" in parser . options ( ""Global"" ) : prefix = parser . get ( ""Global"" , ""prefix_lines"" ) self . prefix_lines = prefix . split ( ""\n"" )",if os . path . exists ( output_dir ) :,411 9264,"def parse_known_args(self, args=None, namespace=None): # args default to the system args if args is None: args = _sys.argv[1:] # default Namespace built from parser defaults if namespace is None: namespace = Namespace() # add any action defaults that aren't present for action in self._actions: if action.dest is not SUPPRESS: if not hasattr(namespace, action.dest): if action.default is not SUPPRESS: setattr(namespace, action.dest, action.default) # add any parser defaults that aren't present for dest in self._defaults: if not hasattr(namespace, dest): setattr(namespace, dest, self._defaults[dest]) # parse the arguments and exit if there are any errors try: namespace, args = self._parse_known_args(args, namespace) if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR): args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR)) delattr(namespace, _UNRECOGNIZED_ARGS_ATTR) return namespace, args except ArgumentError: err = _sys.exc_info()[1] self.error(str(err))","def parse_known_args ( self , args = None , namespace = None ) : if args is None : args = _sys . argv [ 1 : ] if namespace is None : namespace = Namespace ( ) for action in self . _actions : if action . dest is not SUPPRESS : if action . default is not SUPPRESS : setattr ( namespace , action . dest , action . default ) for dest in self . _defaults : if not hasattr ( namespace , dest ) : setattr ( namespace , dest , self . _defaults [ dest ] ) try : namespace , args = self . _parse_known_args ( args , namespace ) if hasattr ( namespace , _UNRECOGNIZED_ARGS_ATTR ) : args . extend ( getattr ( namespace , _UNRECOGNIZED_ARGS_ATTR ) ) delattr ( namespace , _UNRECOGNIZED_ARGS_ATTR ) return namespace , args except ArgumentError : err = _sys . exc_info ( ) [ 1 ] self . error ( str ( err ) )","if not hasattr ( namespace , action . dest ) :",332 24807,"def test_canonicalise(self): from quodlibet.util.path import normalize_path as norm f, path = tempfile.mkstemp() path = os.path.realpath(path) # on osx tmp is a symlink os.close(f) path = norm(path) link_dir = mkdtemp() link = None if not is_win: link = os.path.join(link_dir, str(uuid.uuid4())) os.symlink(path, link) try: self.failUnlessEqual(norm(path, canonicalise=True), path) self.failUnlessEqual(norm(os.path.join(path, ""foo"", ""..""), True), path) if link: self.failUnlessEqual(norm(link, True), path) # A symlink shouldn't be resolved unless asked for self.failIfEqual(norm(link, False), path) # And the other behaviour should also work unnormalised_path = os.path.join(link, ""foo"", "".."") self.failUnlessEqual(norm(unnormalised_path, True), path) finally: if link: os.remove(link) os.remove(path) os.rmdir(link_dir)","def test_canonicalise ( self ) : from quodlibet . util . path import normalize_path as norm f , path = tempfile . mkstemp ( ) path = os . path . realpath ( path ) os . close ( f ) path = norm ( path ) link_dir = mkdtemp ( ) link = None if not is_win : link = os . path . join ( link_dir , str ( uuid . uuid4 ( ) ) ) os . symlink ( path , link ) try : self . failUnlessEqual ( norm ( path , canonicalise = True ) , path ) self . failUnlessEqual ( norm ( os . path . join ( path , ""foo"" , "".."" ) , True ) , path ) self . failUnlessEqual ( norm ( link , True ) , path ) self . failIfEqual ( norm ( link , False ) , path ) unnormalised_path = os . path . join ( link , ""foo"" , "".."" ) self . failUnlessEqual ( norm ( unnormalised_path , True ) , path ) finally : os . remove ( link ) os . remove ( path ) os . rmdir ( link_dir )",if link :,327 17840,"def testLimit(self): ""Verify that CPU limits are within a 2% tolerance of limit for each scheduler"" p = pexpect.spawn(""python -m mininet.examples.limit"") opts = [ ""\*\*\* Testing network ([\d\.]+) Mbps"", ""\*\*\* Results: \[([\d\., ]+)\]"", pexpect.EOF, ] count = 0 bw = 0 tolerance = 2 while True: index = p.expect(opts) if index == 0: bw = float(p.match.group(1)) count += 1 elif index == 1: results = p.match.group(1) for x in results.split("",""): result = float(x) self.assertTrue(result < bw + tolerance) self.assertTrue(result > bw - tolerance) else: break self.assertTrue(count > 0)","def testLimit ( self ) : ""Verify that CPU limits are within a 2% tolerance of limit for each scheduler"" p = pexpect . spawn ( ""python -m mininet.examples.limit"" ) opts = [ ""\*\*\* Testing network ([\d\.]+) Mbps"" , ""\*\*\* Results: \[([\d\., ]+)\]"" , pexpect . EOF , ] count = 0 bw = 0 tolerance = 2 while True : index = p . expect ( opts ) bw = float ( p . match . group ( 1 ) ) count += 1 elif index == 1 : results = p . match . group ( 1 ) for x in results . split ( "","" ) : result = float ( x ) self . assertTrue ( result < bw + tolerance ) self . assertTrue ( result > bw - tolerance ) else : break self . assertTrue ( count > 0 )",if index == 0 :,251 12489,"def _real_extract(self, url): course_name = self._match_id(url) webpage = self._download_webpage(url, course_name) props = self._parse_json( self._search_regex(r""data\s*=\s*({.+?})\s*;"", webpage, ""data""), course_name )[""initialProps""] entries = [] for chapter_num, chapter in enumerate(props[""concepts""], 1): if not isinstance(chapter, dict): continue materials = chapter.get(""materials"") if not materials or not isinstance(materials, list): continue chapter_title = chapter.get(""title"") chapter_id = str_or_none(chapter.get(""id"")) for material in materials: if not isinstance(material, dict): continue if material.get(""material_type"") != ""video"": continue video_url = urljoin(url, material.get(""url"")) if not video_url: continue entries.append( { ""_type"": ""url_transparent"", ""url"": video_url, ""title"": str_or_none(material.get(""name"")), ""id"": str_or_none(material.get(""id"")), ""ie_key"": PlatziIE.ie_key(), ""chapter"": chapter_title, ""chapter_number"": chapter_num, ""chapter_id"": chapter_id, } ) course_id = compat_str(try_get(props, lambda x: x[""course""][""id""])) course_title = try_get(props, lambda x: x[""course""][""name""], compat_str) return self.playlist_result(entries, course_id, course_title)","def _real_extract ( self , url ) : course_name = self . _match_id ( url ) webpage = self . _download_webpage ( url , course_name ) props = self . _parse_json ( self . _search_regex ( r""data\s*=\s*({.+?})\s*;"" , webpage , ""data"" ) , course_name ) [ ""initialProps"" ] entries = [ ] for chapter_num , chapter in enumerate ( props [ ""concepts"" ] , 1 ) : if not isinstance ( chapter , dict ) : continue materials = chapter . get ( ""materials"" ) continue chapter_title = chapter . get ( ""title"" ) chapter_id = str_or_none ( chapter . get ( ""id"" ) ) for material in materials : if not isinstance ( material , dict ) : continue if material . get ( ""material_type"" ) != ""video"" : continue video_url = urljoin ( url , material . get ( ""url"" ) ) if not video_url : continue entries . append ( { ""_type"" : ""url_transparent"" , ""url"" : video_url , ""title"" : str_or_none ( material . get ( ""name"" ) ) , ""id"" : str_or_none ( material . get ( ""id"" ) ) , ""ie_key"" : PlatziIE . ie_key ( ) , ""chapter"" : chapter_title , ""chapter_number"" : chapter_num , ""chapter_id"" : chapter_id , } ) course_id = compat_str ( try_get ( props , lambda x : x [ ""course"" ] [ ""id"" ] ) ) course_title = try_get ( props , lambda x : x [ ""course"" ] [ ""name"" ] , compat_str ) return self . playlist_result ( entries , course_id , course_title )","if not materials or not isinstance ( materials , list ) :",505 5985,"def perform_search(self, dir, s=None, start=None, update_search_start=False): self.cancel_highlight() # self.ui.lhs_col.set_focus(self.ui.lhs_col.widget_list[1]) if s is None: s = self.last_search_string if s is None: self.ui.message(""No previous search term."") return False else: self.last_search_string = s if start is None: start = self.search_start case_insensitive = s.lower() == s if start > len(self.ui.source): start = 0 i = (start + dir) % len(self.ui.source) if i >= len(self.ui.source): i = 0 while i != start: sline = self.ui.source[i].text if case_insensitive: sline = sline.lower() if s in sline: sl = self.ui.source[i] sl.set_highlight(True) self.highlight_line = sl self.ui.source.set_focus(i) if update_search_start: self.search_start = i return True i = (i + dir) % len(self.ui.source) return False","def perform_search ( self , dir , s = None , start = None , update_search_start = False ) : self . cancel_highlight ( ) s = self . last_search_string self . ui . message ( ""No previous search term."" ) return False else : self . last_search_string = s if start is None : start = self . search_start case_insensitive = s . lower ( ) == s if start > len ( self . ui . source ) : start = 0 i = ( start + dir ) % len ( self . ui . source ) if i >= len ( self . ui . source ) : i = 0 while i != start : sline = self . ui . source [ i ] . text if case_insensitive : sline = sline . lower ( ) if s in sline : sl = self . ui . source [ i ] sl . set_highlight ( True ) self . highlight_line = sl self . ui . source . set_focus ( i ) if update_search_start : self . search_start = i return True i = ( i + dir ) % len ( self . ui . source ) return False",if s is None :,368 2355,"def acquire(cls, node, floating=None): if isinstance(node, Gaffer.ScriptNode): script = node else: script = node.scriptNode() scriptWindow = GafferUI.ScriptWindow.acquire(script) if floating in (None, False): for editor in scriptWindow.getLayout().editors(type=cls): if node.isSame(editor._lastAddedNode()): editor.reveal() return editor if floating in (None, True): childWindows = scriptWindow.childWindows() for window in childWindows: if isinstance(window, _EditorWindow): if ( isinstance(window.getChild(), cls) and node in window.getChild().getNodeSet() ): window.setVisible(True) return window.getChild() editor = cls(script) editor.setNodeSet(Gaffer.StandardSet([node])) if floating is False: scriptWindow.getLayout().addEditor(editor) else: window = _EditorWindow(scriptWindow, editor) # Ensure keyboard shortcuts are relayed to the main menu bar scriptWindow.menuBar().addShortcutTarget(window) window.setVisible(True) if isinstance(editor, GafferUI.NodeEditor): # The window will have opened at the perfect size for the # contained widgets. But some NodeEditors have expanding # sections and buttons to add new widgets, and for that # reason, a minimum height of 400px has been deemed more # suitable. size = window._qtWidget().size() if size.height() < 400: size.setHeight(400) window._qtWidget().resize(size) return editor","def acquire ( cls , node , floating = None ) : if isinstance ( node , Gaffer . ScriptNode ) : script = node else : script = node . scriptNode ( ) scriptWindow = GafferUI . ScriptWindow . acquire ( script ) if floating in ( None , False ) : for editor in scriptWindow . getLayout ( ) . editors ( type = cls ) : if node . isSame ( editor . _lastAddedNode ( ) ) : editor . reveal ( ) return editor if floating in ( None , True ) : childWindows = scriptWindow . childWindows ( ) for window in childWindows : if ( isinstance ( window . getChild ( ) , cls ) and node in window . getChild ( ) . getNodeSet ( ) ) : window . setVisible ( True ) return window . getChild ( ) editor = cls ( script ) editor . setNodeSet ( Gaffer . StandardSet ( [ node ] ) ) if floating is False : scriptWindow . getLayout ( ) . addEditor ( editor ) else : window = _EditorWindow ( scriptWindow , editor ) scriptWindow . menuBar ( ) . addShortcutTarget ( window ) window . setVisible ( True ) if isinstance ( editor , GafferUI . NodeEditor ) : size = window . _qtWidget ( ) . size ( ) if size . height ( ) < 400 : size . setHeight ( 400 ) window . _qtWidget ( ) . resize ( size ) return editor","if isinstance ( window , _EditorWindow ) :",487 9370,"def PyJs_updateScopeInfo_823_(this, arguments, var=var): var = Scope( { u""this"": this, u""arguments"": arguments, u""updateScopeInfo"": PyJs_updateScopeInfo_823_, }, var, ) var.registers([u""letRefs"", u""binding"", u""key"", u""parentScope"", u""scope"", u""ref""]) var.put(u""scope"", var.get(u""this"").get(u""scope"")) var.put(u""parentScope"", var.get(u""scope"").callprop(u""getFunctionParent"")) var.put(u""letRefs"", var.get(u""this"").get(u""letReferences"")) for PyJsTemp in var.get(u""letRefs""): var.put(u""key"", PyJsTemp) var.put(u""ref"", var.get(u""letRefs"").get(var.get(u""key""))) var.put( u""binding"", var.get(u""scope"").callprop(u""getBinding"", var.get(u""ref"").get(u""name"")), ) if var.get(u""binding"").neg(): continue if PyJsStrictEq(var.get(u""binding"").get(u""kind""), Js(u""let"")) or PyJsStrictEq( var.get(u""binding"").get(u""kind""), Js(u""const"") ): var.get(u""binding"").put(u""kind"", Js(u""var"")) var.get(u""scope"").callprop( u""moveBindingTo"", var.get(u""ref"").get(u""name""), var.get(u""parentScope"") )","def PyJs_updateScopeInfo_823_ ( this , arguments , var = var ) : var = Scope ( { u""this"" : this , u""arguments"" : arguments , u""updateScopeInfo"" : PyJs_updateScopeInfo_823_ , } , var , ) var . registers ( [ u""letRefs"" , u""binding"" , u""key"" , u""parentScope"" , u""scope"" , u""ref"" ] ) var . put ( u""scope"" , var . get ( u""this"" ) . get ( u""scope"" ) ) var . put ( u""parentScope"" , var . get ( u""scope"" ) . callprop ( u""getFunctionParent"" ) ) var . put ( u""letRefs"" , var . get ( u""this"" ) . get ( u""letReferences"" ) ) for PyJsTemp in var . get ( u""letRefs"" ) : var . put ( u""key"" , PyJsTemp ) var . put ( u""ref"" , var . get ( u""letRefs"" ) . get ( var . get ( u""key"" ) ) ) var . put ( u""binding"" , var . get ( u""scope"" ) . callprop ( u""getBinding"" , var . get ( u""ref"" ) . get ( u""name"" ) ) , ) continue if PyJsStrictEq ( var . get ( u""binding"" ) . get ( u""kind"" ) , Js ( u""let"" ) ) or PyJsStrictEq ( var . get ( u""binding"" ) . get ( u""kind"" ) , Js ( u""const"" ) ) : var . get ( u""binding"" ) . put ( u""kind"" , Js ( u""var"" ) ) var . get ( u""scope"" ) . callprop ( u""moveBindingTo"" , var . get ( u""ref"" ) . get ( u""name"" ) , var . get ( u""parentScope"" ) )","if var . get ( u""binding"" ) . neg ( ) :",473 21783,"def validate_cpu(self, value): for k, v in value.viewitems(): if v is None: # use NoneType to unset a value continue if not re.match(PROCTYPE_MATCH, k): raise serializers.ValidationError(""Process types can only contain [a-z]"") shares = re.match(CPUSHARE_MATCH, str(v)) if not shares: raise serializers.ValidationError(""CPU shares must be an integer"") for v in shares.groupdict().viewvalues(): try: i = int(v) except ValueError: raise serializers.ValidationError(""CPU shares must be an integer"") if i > 1024 or i < 0: raise serializers.ValidationError( ""CPU shares must be between 0 and 1024"" ) return value","def validate_cpu ( self , value ) : for k , v in value . viewitems ( ) : if v is None : continue raise serializers . ValidationError ( ""Process types can only contain [a-z]"" ) shares = re . match ( CPUSHARE_MATCH , str ( v ) ) if not shares : raise serializers . ValidationError ( ""CPU shares must be an integer"" ) for v in shares . groupdict ( ) . viewvalues ( ) : try : i = int ( v ) except ValueError : raise serializers . ValidationError ( ""CPU shares must be an integer"" ) if i > 1024 or i < 0 : raise serializers . ValidationError ( ""CPU shares must be between 0 and 1024"" ) return value","if not re . match ( PROCTYPE_MATCH , k ) :",227 3019,"def tables_size(results): print(""\nSIZE RESULTS\n"") sizes_per_datatype = {} for ser in results: for datatype in results[ser][""sizes""]: size = results[ser][""sizes""][datatype] if datatype not in sizes_per_datatype: sizes_per_datatype[datatype] = [] sizes_per_datatype[datatype].append((size, ser)) sizes_per_datatype = { datatype: sorted(sizes) for datatype, sizes in sizes_per_datatype.items() } for dt in sorted(sizes_per_datatype): print(dt) for pos, (size, serializer) in enumerate(sizes_per_datatype[dt]): if size == no_result: size = ""unsupported"" else: size = ""%8d"" % size print("" %2d: %-8s %s"" % (pos + 1, serializer, size)) print()","def tables_size ( results ) : print ( ""\nSIZE RESULTS\n"" ) sizes_per_datatype = { } for ser in results : for datatype in results [ ser ] [ ""sizes"" ] : size = results [ ser ] [ ""sizes"" ] [ datatype ] if datatype not in sizes_per_datatype : sizes_per_datatype [ datatype ] = [ ] sizes_per_datatype [ datatype ] . append ( ( size , ser ) ) sizes_per_datatype = { datatype : sorted ( sizes ) for datatype , sizes in sizes_per_datatype . items ( ) } for dt in sorted ( sizes_per_datatype ) : print ( dt ) for pos , ( size , serializer ) in enumerate ( sizes_per_datatype [ dt ] ) : size = ""unsupported"" else : size = ""%8d"" % size print ( "" %2d: %-8s %s"" % ( pos + 1 , serializer , size ) ) print ( )",if size == no_result :,254 23085,"def _get_sources(self): server_links = { ""mp4upload"": ""https://www.mp4upload.com/embed-{}.html"", ""trollvid"": ""https://trollvid.net/embed/{}"", } resp = helpers.soupify(helpers.get(self.url).text).find_all(""script"") # [-3].string for i in resp: if i.string: if ""sources"" in i.string: res = i.string hosts = json.loads(re.search(r""(\[[^)]+\])"", res).group(1)) logger.debug(""Hosts: {}"".format(hosts)) sources_list = [] for i in hosts: for j in server_links: if i.get(""host"") in j and i.get(""source""): sources_list.append( { ""extractor"": j, ""url"": server_links[j].format(i[""source""]), ""server"": j, ""version"": i[""source""], } ) return self.sort_sources(sources_list)","def _get_sources ( self ) : server_links = { ""mp4upload"" : ""https://www.mp4upload.com/embed-{}.html"" , ""trollvid"" : ""https://trollvid.net/embed/{}"" , } resp = helpers . soupify ( helpers . get ( self . url ) . text ) . find_all ( ""script"" ) for i in resp : if i . string : res = i . string hosts = json . loads ( re . search ( r""(\[[^)]+\])"" , res ) . group ( 1 ) ) logger . debug ( ""Hosts: {}"" . format ( hosts ) ) sources_list = [ ] for i in hosts : for j in server_links : if i . get ( ""host"" ) in j and i . get ( ""source"" ) : sources_list . append ( { ""extractor"" : j , ""url"" : server_links [ j ] . format ( i [ ""source"" ] ) , ""server"" : j , ""version"" : i [ ""source"" ] , } ) return self . sort_sources ( sources_list )","if ""sources"" in i . string :",316 4464,"def get_command(cls): ifconfig_cmd = ""ifconfig"" for path in [""/sbin"", ""/usr/sbin"", ""/bin"", ""/usr/bin""]: if os.path.exists(os.path.join(path, ifconfig_cmd)): ifconfig_cmd = os.path.join(path, ifconfig_cmd) break ifconfig_cmd = ifconfig_cmd + "" -a"" return ifconfig_cmd ","def get_command ( cls ) : ifconfig_cmd = ""ifconfig"" for path in [ ""/sbin"" , ""/usr/sbin"" , ""/bin"" , ""/usr/bin"" ] : if os . path . exists ( os . path . join ( path , ifconfig_cmd ) ) : break ifconfig_cmd = ifconfig_cmd + "" -a"" return ifconfig_cmd","ifconfig_cmd = os . path . join ( path , ifconfig_cmd )",109 13228,"def registerExtensions(self, extensions, configs): if not configs: configs = {} for ext in extensions: extension_module_name = ""mdx_"" + ext try: module = __import__(extension_module_name) except: message( CRITICAL, ""couldn't load extension %s (looking for %s module)"" % (ext, extension_module_name), ) else: if configs.has_key(ext): configs_for_ext = configs[ext] else: configs_for_ext = [] extension = module.makeExtension(configs_for_ext) extension.extendMarkdown(self, globals()) ","def registerExtensions ( self , extensions , configs ) : if not configs : configs = { } for ext in extensions : extension_module_name = ""mdx_"" + ext try : module = __import__ ( extension_module_name ) except : message ( CRITICAL , ""couldn't load extension %s (looking for %s module)"" % ( ext , extension_module_name ) , ) else : configs_for_ext = configs [ ext ] else : configs_for_ext = [ ] extension = module . makeExtension ( configs_for_ext ) extension . extendMarkdown ( self , globals ( ) )",if configs . has_key ( ext ) :,206 1054,"def eventloop(self): poll = select.poll() event_read_mask = self.errorevents | self.readevents poll.register(self.serversock.fileno()) poll.register(self.readpipe, event_read_mask) breakout = False self.running = True self.logger.debug(""Starting thread event loop"") while not breakout: events = poll.poll() for event in events: # An error occurred, bail out if event[1] & self.errorevents: raise Exception(self.stringify_event(event[1])) # Event to stop the thread if self.readpipe == event[0]: self.logger.debug(""Stop event received"") breakout = True break # A connection request was received elif self.serversock.fileno() == event[0]: self.logger.debug(""Connection request received"") self.readsock, _ = self.serversock.accept() self.readsock.setblocking(0) poll.unregister(self.serversock.fileno()) poll.register(self.readsock.fileno(), event_read_mask) self.logger.debug(""Setting connection established event"") self.connection_established.set() # Actual data to be logged elif self.readsock.fileno() == event[0]: data = self.recv(1024) self.logfunc(data)","def eventloop ( self ) : poll = select . poll ( ) event_read_mask = self . errorevents | self . readevents poll . register ( self . serversock . fileno ( ) ) poll . register ( self . readpipe , event_read_mask ) breakout = False self . running = True self . logger . debug ( ""Starting thread event loop"" ) while not breakout : events = poll . poll ( ) for event in events : if event [ 1 ] & self . errorevents : raise Exception ( self . stringify_event ( event [ 1 ] ) ) if self . readpipe == event [ 0 ] : self . logger . debug ( ""Stop event received"" ) breakout = True break self . logger . debug ( ""Connection request received"" ) self . readsock , _ = self . serversock . accept ( ) self . readsock . setblocking ( 0 ) poll . unregister ( self . serversock . fileno ( ) ) poll . register ( self . readsock . fileno ( ) , event_read_mask ) self . logger . debug ( ""Setting connection established event"" ) self . connection_established . set ( ) elif self . readsock . fileno ( ) == event [ 0 ] : data = self . recv ( 1024 ) self . logfunc ( data )",elif self . serversock . fileno ( ) == event [ 0 ] :,414 9012,"def _list_item_sub(self, match): item = match.group(4) leading_line = match.group(1) if leading_line or ""\n\n"" in item or self._last_li_endswith_two_eols: item = self._run_block_gamut(self._outdent(item)) else: # Recursion for sub-lists: item = self._do_lists(self._outdent(item)) if item.endswith(""\n""): item = item[:-1] item = self._run_span_gamut(item) self._last_li_endswith_two_eols = len(match.group(5)) == 2 if ""task_list"" in self.extras: item = self._task_list_item_re.sub(self._task_list_item_sub, item) return ""
  • %s
  • \n"" % item","def _list_item_sub ( self , match ) : item = match . group ( 4 ) leading_line = match . group ( 1 ) if leading_line or ""\n\n"" in item or self . _last_li_endswith_two_eols : item = self . _run_block_gamut ( self . _outdent ( item ) ) else : item = self . _do_lists ( self . _outdent ( item ) ) item = item [ : - 1 ] item = self . _run_span_gamut ( item ) self . _last_li_endswith_two_eols = len ( match . group ( 5 ) ) == 2 if ""task_list"" in self . extras : item = self . _task_list_item_re . sub ( self . _task_list_item_sub , item ) return ""
  • %s
  • \n"" % item","if item . endswith ( ""\n"" ) :",234 25023,"def update(self, x, who=None, metadata=None): self._retain_refs(metadata) y = self._get_key(x) if self.keep == ""last"": # remove key if already present so that emitted value # will reflect elements' actual relative ordering self._buffer.pop(y, None) self._metadata_buffer.pop(y, None) self._buffer[y] = x self._metadata_buffer[y] = metadata else: # self.keep == ""first"" if y not in self._buffer: self._buffer[y] = x self._metadata_buffer[y] = metadata return self.last ","def update ( self , x , who = None , metadata = None ) : self . _retain_refs ( metadata ) y = self . _get_key ( x ) if self . keep == ""last"" : self . _buffer . pop ( y , None ) self . _metadata_buffer . pop ( y , None ) self . _buffer [ y ] = x self . _metadata_buffer [ y ] = metadata else : self . _buffer [ y ] = x self . _metadata_buffer [ y ] = metadata return self . last",if y not in self . _buffer :,180 12101,"def _GetValue(value_pb): """"""Gets the value from the value_pb."""""" if value_pb.type() in _PROTO_FIELDS_STRING_VALUE: if value_pb.has_string_value(): return value_pb.string_value() return None if value_pb.type() == document_pb.FieldValue.DATE: if value_pb.has_string_value(): return search_util.DeserializeDate(value_pb.string_value()) return None if value_pb.type() == document_pb.FieldValue.NUMBER: if value_pb.has_string_value(): return float(value_pb.string_value()) return None if value_pb.type() == document_pb.FieldValue.GEO: if value_pb.has_geo(): geo_pb = value_pb.geo() return GeoPoint(latitude=geo_pb.lat(), longitude=geo_pb.lng()) return None raise TypeError(""unknown FieldValue type %d"" % value_pb.type()) ","def _GetValue ( value_pb ) : """"""Gets the value from the value_pb."""""" if value_pb . type ( ) in _PROTO_FIELDS_STRING_VALUE : return value_pb . string_value ( ) return None if value_pb . type ( ) == document_pb . FieldValue . DATE : return search_util . DeserializeDate ( value_pb . string_value ( ) ) return None if value_pb . type ( ) == document_pb . FieldValue . NUMBER : return float ( value_pb . string_value ( ) ) return None if value_pb . type ( ) == document_pb . FieldValue . GEO : if value_pb . has_geo ( ) : geo_pb = value_pb . geo ( ) return GeoPoint ( latitude = geo_pb . lat ( ) , longitude = geo_pb . lng ( ) ) return None raise TypeError ( ""unknown FieldValue type %d"" % value_pb . type ( ) )",if value_pb . has_string_value ( ) :,281 292,"def forms_list(self, trans, payload=None, **kwd): message = kwd.get(""message"", """") status = kwd.get(""status"", """") if ""operation"" in kwd: id = kwd.get(""id"") if not id: return self.message_exception( trans, ""Invalid form id (%s) received."" % str(id) ) ids = util.listify(id) operation = kwd[""operation""].lower() if operation == ""delete"": message, status = self._delete_form(trans, ids) elif operation == ""undelete"": message, status = self._undelete_form(trans, ids) if message and status: kwd[""message""] = util.sanitize_text(message) kwd[""status""] = status return self.forms_grid(trans, **kwd) ","def forms_list ( self , trans , payload = None , ** kwd ) : message = kwd . get ( ""message"" , """" ) status = kwd . get ( ""status"" , """" ) if ""operation"" in kwd : id = kwd . get ( ""id"" ) if not id : return self . message_exception ( trans , ""Invalid form id (%s) received."" % str ( id ) ) ids = util . listify ( id ) operation = kwd [ ""operation"" ] . lower ( ) if operation == ""delete"" : message , status = self . _delete_form ( trans , ids ) message , status = self . _undelete_form ( trans , ids ) if message and status : kwd [ ""message"" ] = util . sanitize_text ( message ) kwd [ ""status"" ] = status return self . forms_grid ( trans , ** kwd )","elif operation == ""undelete"" :",228 22381,"def update_obj(obj, new, fatal_errors=True): elems = list(obj.keys()) if ""Conf"" in elems: elems.remove(""Conf"") elems.insert(0, ""Conf"") if ""Env"" in elems: elems.remove(""Env"") obj[""Env""].update(new[""Env""]) if ""Hist"" in elems: elems.remove(""Hist"") obj[""Hist""] += new[""Hist""] for elem in elems: if isinstance(obj[elem], dict): for key, value in new[elem].items(): try: obj[elem][key] = value except Exception as error: item_repr = ""session.%s.%s"" % (elem, key) msg_prefix = ""[-] Couldn't set %s"" % item_repr if fatal_errors: print(""%s:"" % msg_prefix) raise else: print(""%s: %s"" % (msg_prefix, error)) else: obj[elem] = new[elem] return obj","def update_obj ( obj , new , fatal_errors = True ) : elems = list ( obj . keys ( ) ) if ""Conf"" in elems : elems . remove ( ""Conf"" ) elems . insert ( 0 , ""Conf"" ) if ""Env"" in elems : elems . remove ( ""Env"" ) obj [ ""Env"" ] . update ( new [ ""Env"" ] ) if ""Hist"" in elems : elems . remove ( ""Hist"" ) obj [ ""Hist"" ] += new [ ""Hist"" ] for elem in elems : for key , value in new [ elem ] . items ( ) : try : obj [ elem ] [ key ] = value except Exception as error : item_repr = ""session.%s.%s"" % ( elem , key ) msg_prefix = ""[-] Couldn't set %s"" % item_repr if fatal_errors : print ( ""%s:"" % msg_prefix ) raise else : print ( ""%s: %s"" % ( msg_prefix , error ) ) else : obj [ elem ] = new [ elem ] return obj","if isinstance ( obj [ elem ] , dict ) :",315 9926,"def _process_rtdest(self): LOG.debug(""Processing RT NLRI destination..."") if self._rtdest_queue.is_empty(): return else: processed_any = False while not self._rtdest_queue.is_empty(): # We process the first destination in the queue. next_dest = self._rtdest_queue.pop_first() if next_dest: next_dest.process() processed_any = True if processed_any: # Since RT destination were updated we update RT filters self._core_service.update_rtfilters() ","def _process_rtdest ( self ) : LOG . debug ( ""Processing RT NLRI destination..."" ) if self . _rtdest_queue . is_empty ( ) : return else : processed_any = False while not self . _rtdest_queue . is_empty ( ) : next_dest = self . _rtdest_queue . pop_first ( ) next_dest . process ( ) processed_any = True if processed_any : self . _core_service . update_rtfilters ( )",if next_dest :,171 9070,"def display_my_sessions_view(): placeholder_images = DataGetter.get_event_default_images() custom_placeholder = DataGetter.get_custom_placeholders() upcoming_events_sessions = DataGetter.get_sessions_of_user(upcoming_events=True) im_config = DataGetter.get_image_configs() im_size = """" for config in im_config: if config.page == ""mysession"": im_size = config.size past_events_sessions = DataGetter.get_sessions_of_user(upcoming_events=False) page_content = { ""tab_upcoming_events"": ""Upcoming Sessions"", ""tab_past_events"": ""Past Sessions"", ""title"": ""My Session Proposals"", } if not AuthManager.is_verified_user(): flash( Markup( ""Your account is unverified. "" ""Please verify by clicking on the confirmation link that has been emailed to you."" '
    Did not get the email? Please ' ""click here to resend the confirmation."" ) ) return render_template( ""gentelella/users/mysessions/mysessions_list.html"", upcoming_events_sessions=upcoming_events_sessions, past_events_sessions=past_events_sessions, page_content=page_content, placeholder_images=placeholder_images, custom_placeholder=custom_placeholder, im_size=im_size, ) ","def display_my_sessions_view ( ) : placeholder_images = DataGetter . get_event_default_images ( ) custom_placeholder = DataGetter . get_custom_placeholders ( ) upcoming_events_sessions = DataGetter . get_sessions_of_user ( upcoming_events = True ) im_config = DataGetter . get_image_configs ( ) im_size = """" for config in im_config : im_size = config . size past_events_sessions = DataGetter . get_sessions_of_user ( upcoming_events = False ) page_content = { ""tab_upcoming_events"" : ""Upcoming Sessions"" , ""tab_past_events"" : ""Past Sessions"" , ""title"" : ""My Session Proposals"" , } if not AuthManager . is_verified_user ( ) : flash ( Markup ( ""Your account is unverified. "" ""Please verify by clicking on the confirmation link that has been emailed to you."" '
    Did not get the email? Please ' ""click here to resend the confirmation."" ) ) return render_template ( ""gentelella/users/mysessions/mysessions_list.html"" , upcoming_events_sessions = upcoming_events_sessions , past_events_sessions = past_events_sessions , page_content = page_content , placeholder_images = placeholder_images , custom_placeholder = custom_placeholder , im_size = im_size , )","if config . page == ""mysession"" :",426 7511,"def get_engine(user, engine=""solr"", facet=None, source=""data"", cluster='""""'): if isinstance(engine, dict): if source == ""data"": source = engine.get(""source"") engine = engine.get(""engine"", ""solr"") if engine == ""report"" and facet: engine = facet[""properties""].get(""engine"") if engine != ""solr"": if engine == ""impala"": from impala.dashboard_api import ImpalaDashboardApi return ImpalaDashboardApi(user, engine, source=source, cluster=cluster) elif engine == ""hive"": from beeswax.dashboard_api import HiveDashboardApi return HiveDashboardApi(user, engine, source=source, cluster=cluster) else: from notebook.dashboard_api import SQLDashboardApi return SQLDashboardApi(user, engine, source=source, cluster=cluster) else: from search.dashboard_api import SearchApi # Could add source to Solr at some point, to behave like a 'view', but need state in query history or URL params return SearchApi(user, cluster) ","def get_engine ( user , engine = ""solr"" , facet = None , source = ""data"" , cluster = '""""' ) : if isinstance ( engine , dict ) : if source == ""data"" : source = engine . get ( ""source"" ) engine = engine . get ( ""engine"" , ""solr"" ) if engine == ""report"" and facet : engine = facet [ ""properties"" ] . get ( ""engine"" ) if engine != ""solr"" : from impala . dashboard_api import ImpalaDashboardApi return ImpalaDashboardApi ( user , engine , source = source , cluster = cluster ) elif engine == ""hive"" : from beeswax . dashboard_api import HiveDashboardApi return HiveDashboardApi ( user , engine , source = source , cluster = cluster ) else : from notebook . dashboard_api import SQLDashboardApi return SQLDashboardApi ( user , engine , source = source , cluster = cluster ) else : from search . dashboard_api import SearchApi return SearchApi ( user , cluster )","if engine == ""impala"" :",297 9712,"def printHexFormat(data, addr, nocolor=False): for i in range((int(len(data) / 16)) + 1): part = data[i * 16 : i * 16 + 16] bytes = cstr("""") c = 0 for j in range(0, len(part), 2): if j == len(part) - 1: bytes += cstr( (""%.2x "" % tuple(part[j : j + 1])), Color.WHITE if c % 2 else Color.LIGHT_GRAY, ) else: bytes += cstr( (""%.2x%.2x "" % tuple(part[j : j + 2])), Color.WHITE if c % 2 else Color.LIGHT_GRAY, ) c += 1 string = """" if nocolor: if len(bytes) < 40: bytes += "" "" * (40 - len(bytes)) else: if len(bytes) < 227: bytes += "" "" * ((8 - int(len(bytes) / 29)) * 5) for b in part: if b < 32 or b > 126: string += ""."" else: string += chr(b) bytes += "" "" + cstr(string, Color.BLUE) print(cstr(toHex(addr + i * 16), Color.RED) + "": "" + bytes)","def printHexFormat ( data , addr , nocolor = False ) : for i in range ( ( int ( len ( data ) / 16 ) ) + 1 ) : part = data [ i * 16 : i * 16 + 16 ] bytes = cstr ( """" ) c = 0 for j in range ( 0 , len ( part ) , 2 ) : if j == len ( part ) - 1 : bytes += cstr ( ( ""%.2x "" % tuple ( part [ j : j + 1 ] ) ) , Color . WHITE if c % 2 else Color . LIGHT_GRAY , ) else : bytes += cstr ( ( ""%.2x%.2x "" % tuple ( part [ j : j + 2 ] ) ) , Color . WHITE if c % 2 else Color . LIGHT_GRAY , ) c += 1 string = """" if nocolor : if len ( bytes ) < 40 : bytes += "" "" * ( 40 - len ( bytes ) ) else : if len ( bytes ) < 227 : bytes += "" "" * ( ( 8 - int ( len ( bytes ) / 29 ) ) * 5 ) for b in part : string += ""."" else : string += chr ( b ) bytes += "" "" + cstr ( string , Color . BLUE ) print ( cstr ( toHex ( addr + i * 16 ) , Color . RED ) + "": "" + bytes )",if b < 32 or b > 126 :,400 21378,"def __iter__(self): for name, value in self.__class__.__dict__.items(): if isinstance(value, alias_flag_value): continue if isinstance(value, flag_value): yield (name, self._has_flag(value.flag)) ","def __iter__ ( self ) : for name , value in self . __class__ . __dict__ . items ( ) : if isinstance ( value , alias_flag_value ) : continue yield ( name , self . _has_flag ( value . flag ) )","if isinstance ( value , flag_value ) :",71 11499,"def _read_allele_freq_table(f): line = f.readline() while "" --"" not in line: if line == """": raise StopIteration if ""No data"" in line: return None, None line = f.readline() alleles = [x for x in f.readline().rstrip().split("" "") if x != """"] alleles = [_gp_int(x) for x in alleles] line = f.readline().rstrip() table = [] while line != """": parts = [x for x in line.split("" "") if x != """"] try: table.append( (parts[0], [_gp_float(x) for x in parts[1:-1]], _gp_int(parts[-1])) ) except ValueError: table.append((parts[0], [None] * len(alleles), 0)) line = f.readline().rstrip() return alleles, table","def _read_allele_freq_table ( f ) : line = f . readline ( ) while "" --"" not in line : raise StopIteration if ""No data"" in line : return None , None line = f . readline ( ) alleles = [ x for x in f . readline ( ) . rstrip ( ) . split ( "" "" ) if x != """" ] alleles = [ _gp_int ( x ) for x in alleles ] line = f . readline ( ) . rstrip ( ) table = [ ] while line != """" : parts = [ x for x in line . split ( "" "" ) if x != """" ] try : table . append ( ( parts [ 0 ] , [ _gp_float ( x ) for x in parts [ 1 : - 1 ] ] , _gp_int ( parts [ - 1 ] ) ) ) except ValueError : table . append ( ( parts [ 0 ] , [ None ] * len ( alleles ) , 0 ) ) line = f . readline ( ) . rstrip ( ) return alleles , table","if line == """" :",254 19291,"def check_require(require_modules, require_lines): for require_module in require_modules: st = try_import(require_module) if st == 0: continue elif st == 1: print( ""installed {}: {}\n"".format( require_module, require_lines[require_module] ) ) elif st == 2: print( ""failed installed {}: {}\n"".format( require_module, require_lines[require_module] ) )","def check_require ( require_modules , require_lines ) : for require_module in require_modules : st = try_import ( require_module ) if st == 0 : continue elif st == 1 : print ( ""installed {}: {}\n"" . format ( require_module , require_lines [ require_module ] ) ) print ( ""failed installed {}: {}\n"" . format ( require_module , require_lines [ require_module ] ) )",elif st == 2 :,164 3456,"def prefixed(self, prefix: _StrType) -> typing.Iterator[""Env""]: """"""Context manager for parsing envvars with a common prefix."""""" try: old_prefix = self._prefix if old_prefix is None: self._prefix = prefix else: self._prefix = f""{old_prefix}{prefix}"" yield self finally: # explicitly reset the stored prefix on completion and exceptions self._prefix = None self._prefix = old_prefix ","def prefixed ( self , prefix : _StrType ) -> typing . Iterator [ ""Env"" ] : """"""Context manager for parsing envvars with a common prefix."""""" try : old_prefix = self . _prefix self . _prefix = prefix else : self . _prefix = f""{old_prefix}{prefix}"" yield self finally : self . _prefix = None self . _prefix = old_prefix",if old_prefix is None :,126 19151,"def compute_up(expr, data, **kwargs): if isinstance(expr.slice, _inttypes): idx = expr.slice + 1 if idx < 1: # SQL string indexing is 1-based and positive. msg = ""Index {} out-of-bounds for SQL string indexing."" raise IndexError(msg.format(expr.slice)) args = idx, 1 elif isinstance(expr.slice, tuple): start, stop, step = expr.slice if step is not None: msg = ""step value {} not valid for SQL string indexing."" raise ValueError(msg.format(step)) norm_start = start if isinstance(start, _inttypes) else 0 if norm_start < 0: msg = ""Negative indexing not valid for SQL strings; given {}."" raise ValueError(msg.format(norm_start)) if isinstance(stop, _inttypes): if stop < 0: msg = ""Negative indexing not valid for SQL strings; given {}."" raise ValueError(msg.format(stop)) args = norm_start + 1, (stop - norm_start) elif stop is None: args = (norm_start + 1,) return sa.sql.func.substring(data, *args)","def compute_up ( expr , data , ** kwargs ) : if isinstance ( expr . slice , _inttypes ) : idx = expr . slice + 1 if idx < 1 : msg = ""Index {} out-of-bounds for SQL string indexing."" raise IndexError ( msg . format ( expr . slice ) ) args = idx , 1 elif isinstance ( expr . slice , tuple ) : start , stop , step = expr . slice if step is not None : msg = ""step value {} not valid for SQL string indexing."" raise ValueError ( msg . format ( step ) ) norm_start = start if isinstance ( start , _inttypes ) else 0 if norm_start < 0 : msg = ""Negative indexing not valid for SQL strings; given {}."" raise ValueError ( msg . format ( norm_start ) ) if isinstance ( stop , _inttypes ) : if stop < 0 : msg = ""Negative indexing not valid for SQL strings; given {}."" raise ValueError ( msg . format ( stop ) ) args = norm_start + 1 , ( stop - norm_start ) args = ( norm_start + 1 , ) return sa . sql . func . substring ( data , * args )",elif stop is None :,333 7243,"def handle_read(self, socket_): try: data, (addr, port) = socket_.recvfrom(_MAX_MSG_ABSOLUTE) except Exception: self.log_exception_warning() return log.debug(""Received from %r:%r: %r "", addr, port, data) self.data = data msg = DNSIncoming(data) if not msg.valid: pass elif msg.is_query(): # Always multicast responses if port == _MDNS_PORT: self.zc.handle_query(msg, _MDNS_ADDR, _MDNS_PORT) # If it's not a multicast query, reply via unicast # and multicast elif port == _DNS_PORT: self.zc.handle_query(msg, addr, port) self.zc.handle_query(msg, _MDNS_ADDR, _MDNS_PORT) else: self.zc.handle_response(msg) ","def handle_read ( self , socket_ ) : try : data , ( addr , port ) = socket_ . recvfrom ( _MAX_MSG_ABSOLUTE ) except Exception : self . log_exception_warning ( ) return log . debug ( ""Received from %r:%r: %r "" , addr , port , data ) self . data = data msg = DNSIncoming ( data ) if not msg . valid : pass elif msg . is_query ( ) : if port == _MDNS_PORT : self . zc . handle_query ( msg , _MDNS_ADDR , _MDNS_PORT ) self . zc . handle_query ( msg , addr , port ) self . zc . handle_query ( msg , _MDNS_ADDR , _MDNS_PORT ) else : self . zc . handle_response ( msg )",elif port == _DNS_PORT :,261 11508,"def get_schema(form_fields): attrs = {} for field in form_fields: if field.type in [""text"", ""checkbox"", ""select""]: field_type = marshmallow.fields.Str elif field.type == ""email"": field_type = marshmallow.fields.Email elif field.type == ""number"": field_type = marshmallow.fields.Float else: raise UnprocessableEntityError( {""pointer"": ""/data/complex-field-values/"" + field.identifier}, ""Invalid Field Type: "" + field.type, ) attrs[field.identifier] = field_type(required=field.is_required) return type(""DynamicSchema"", (marshmallow.Schema,), attrs) ","def get_schema ( form_fields ) : attrs = { } for field in form_fields : field_type = marshmallow . fields . Str elif field . type == ""email"" : field_type = marshmallow . fields . Email elif field . type == ""number"" : field_type = marshmallow . fields . Float else : raise UnprocessableEntityError ( { ""pointer"" : ""/data/complex-field-values/"" + field . identifier } , ""Invalid Field Type: "" + field . type , ) attrs [ field . identifier ] = field_type ( required = field . is_required ) return type ( ""DynamicSchema"" , ( marshmallow . Schema , ) , attrs )","if field . type in [ ""text"" , ""checkbox"" , ""select"" ] :",206 10045,"def __init__(self, app): self._credential = app.credential db_url = app.options.get(""databaseURL"") if db_url: self._db_url = db_url else: self._db_url = None auth_override = _DatabaseService._get_auth_override(app) if auth_override not in (self._DEFAULT_AUTH_OVERRIDE, {}): self._auth_override = json.dumps(auth_override, separators=("","", "":"")) else: self._auth_override = None self._timeout = app.options.get(""httpTimeout"", _http_client.DEFAULT_TIMEOUT_SECONDS) self._clients = {} emulator_host = os.environ.get(_EMULATOR_HOST_ENV_VAR) if emulator_host: if ""//"" in emulator_host: raise ValueError( 'Invalid {0}: ""{1}"". It must follow format ""host:port"".'.format( _EMULATOR_HOST_ENV_VAR, emulator_host ) ) self._emulator_host = emulator_host else: self._emulator_host = None","def __init__ ( self , app ) : self . _credential = app . credential db_url = app . options . get ( ""databaseURL"" ) if db_url : self . _db_url = db_url else : self . _db_url = None auth_override = _DatabaseService . _get_auth_override ( app ) if auth_override not in ( self . _DEFAULT_AUTH_OVERRIDE , { } ) : self . _auth_override = json . dumps ( auth_override , separators = ( "","" , "":"" ) ) else : self . _auth_override = None self . _timeout = app . options . get ( ""httpTimeout"" , _http_client . DEFAULT_TIMEOUT_SECONDS ) self . _clients = { } emulator_host = os . environ . get ( _EMULATOR_HOST_ENV_VAR ) if emulator_host : raise ValueError ( 'Invalid {0}: ""{1}"". It must follow format ""host:port"".' . format ( _EMULATOR_HOST_ENV_VAR , emulator_host ) ) self . _emulator_host = emulator_host else : self . _emulator_host = None","if ""//"" in emulator_host :",299 2444,"def platformGetMaps(self): maps = [] address = ctypes.c_ulong(0) mapsize = ctypes.c_ulong(0) name = ctypes.c_uint32(0) count = ctypes.c_uint32(VM_REGION_BASIC_INFO_COUNT_64) info = vm_region_basic_info_64() while True: r = self.libc.mach_vm_region( self.task, addrof(address), addrof(mapsize), VM_REGION_BASIC_INFO_64, addrof(info), addrof(count), addrof(name), ) # If we get told ""invalid address"", we have crossed into kernel land... if r == 1: break if r != 0: self.libc.mach_error(""mach_vm_region"", r) raise Exception(""vm_region Failed for 0x%.8x: 0x%.8x"" % (address.value, r)) perms = 0 p = info.protection if p & VM_PROT_READ: perms |= e_mem.MM_READ if p & VM_PROT_WRITE: perms |= e_mem.MM_WRITE if p & VM_PROT_EXECUTE: perms |= e_mem.MM_EXEC if info.shared: perms |= e_mem.MM_SHARED # If we got any perms, report the map if perms: maps.append((address.value, mapsize.value, perms, """")) address.value += mapsize.value return maps","def platformGetMaps ( self ) : maps = [ ] address = ctypes . c_ulong ( 0 ) mapsize = ctypes . c_ulong ( 0 ) name = ctypes . c_uint32 ( 0 ) count = ctypes . c_uint32 ( VM_REGION_BASIC_INFO_COUNT_64 ) info = vm_region_basic_info_64 ( ) while True : r = self . libc . mach_vm_region ( self . task , addrof ( address ) , addrof ( mapsize ) , VM_REGION_BASIC_INFO_64 , addrof ( info ) , addrof ( count ) , addrof ( name ) , ) if r == 1 : break if r != 0 : self . libc . mach_error ( ""mach_vm_region"" , r ) raise Exception ( ""vm_region Failed for 0x%.8x: 0x%.8x"" % ( address . value , r ) ) perms = 0 p = info . protection if p & VM_PROT_READ : perms |= e_mem . MM_READ if p & VM_PROT_WRITE : perms |= e_mem . MM_WRITE if p & VM_PROT_EXECUTE : perms |= e_mem . MM_EXEC if info . shared : perms |= e_mem . MM_SHARED maps . append ( ( address . value , mapsize . value , perms , """" ) ) address . value += mapsize . value return maps",if perms :,454 11450,"def _set_qresult_hits(qresult, hit_rows=()): """"""Append Hits without alignments into QueryResults (PRIVATE)."""""" for hit_row in hit_rows: hit_id, remainder = hit_row.split("" "", 1) # TODO: parse hit and hsp properties properly; by dealing with: # - any character in the description (brackets, spaces, etc.) # - possible [f] or [r] presence (for frame info) # - possible presence of E2() column # - possible incomplete hit_id due to column length limit # The current method only looks at the Hit ID, none of the things above if hit_id not in qresult: frag = HSPFragment(hit_id, qresult.id) hsp = HSP([frag]) hit = Hit([hsp]) qresult.append(hit) return qresult","def _set_qresult_hits ( qresult , hit_rows = ( ) ) : """"""Append Hits without alignments into QueryResults (PRIVATE)."""""" for hit_row in hit_rows : hit_id , remainder = hit_row . split ( "" "" , 1 ) frag = HSPFragment ( hit_id , qresult . id ) hsp = HSP ( [ frag ] ) hit = Hit ( [ hsp ] ) qresult . append ( hit ) return qresult",if hit_id not in qresult :,240 17646,"def process_ifconfig_nodes(app: Sphinx, doctree: nodes.document, docname: str) -> None: ns = {confval.name: confval.value for confval in app.config} ns.update(app.config.__dict__.copy()) ns[""builder""] = app.builder.name for node in doctree.traverse(ifconfig): try: res = eval(node[""expr""], ns) except Exception as err: # handle exceptions in a clean fashion from traceback import format_exception_only msg = """".join(format_exception_only(err.__class__, err)) newnode = doctree.reporter.error( ""Exception occurred in "" ""ifconfig expression: \n%s"" % msg, base_node=node, ) node.replace_self(newnode) else: if not res: node.replace_self([]) else: node.replace_self(node.children) ","def process_ifconfig_nodes ( app : Sphinx , doctree : nodes . document , docname : str ) -> None : ns = { confval . name : confval . value for confval in app . config } ns . update ( app . config . __dict__ . copy ( ) ) ns [ ""builder"" ] = app . builder . name for node in doctree . traverse ( ifconfig ) : try : res = eval ( node [ ""expr"" ] , ns ) except Exception as err : from traceback import format_exception_only msg = """" . join ( format_exception_only ( err . __class__ , err ) ) newnode = doctree . reporter . error ( ""Exception occurred in "" ""ifconfig expression: \n%s"" % msg , base_node = node , ) node . replace_self ( newnode ) else : node . replace_self ( [ ] ) else : node . replace_self ( node . children )",if not res :,268 19476,"def explain(self, other, depth=0): exp = super(UnionType, self).explain(other, depth) for ndx, subtype in enumerate(self.params[""allowed_types""]): if ndx > 0: exp += ""\n{}and"".format("""".join([""\t""] * depth)) exp += ""\n"" + subtype.explain(other, depth=depth + 1) return exp","def explain ( self , other , depth = 0 ) : exp = super ( UnionType , self ) . explain ( other , depth ) for ndx , subtype in enumerate ( self . params [ ""allowed_types"" ] ) : exp += ""\n{}and"" . format ( """" . join ( [ ""\t"" ] * depth ) ) exp += ""\n"" + subtype . explain ( other , depth = depth + 1 ) return exp",if ndx > 0 :,101 1968,"def convert_with_key(self, key, value, replace=True): result = self.configurator.convert(value) # If the converted value is different, save for next time if value is not result: if replace: self[key] = result if type(result) in (ConvertingDict, ConvertingList, ConvertingTuple): result.parent = self result.key = key return result ","def convert_with_key ( self , key , value , replace = True ) : result = self . configurator . convert ( value ) if value is not result : self [ key ] = result if type ( result ) in ( ConvertingDict , ConvertingList , ConvertingTuple ) : result . parent = self result . key = key return result",if replace :,111 9842,"def OnLeftUp(self, event): btnpos = self.GetButtonsPos() btnsize = self.GetButtonsSize() if self.HasCapture(): self.ReleaseMouse() for btn in range(2): if self.HitTest(btnpos[btn], event.GetPosition(), btnsize[btn]): if btn == 0: if self.searchButtonPressed: self.searchButtonPressed = False self.Refresh() self.SetFocus() wx.PostEvent(self, SearchButton()) if btn == 1: if self.cancelButtonPressed: self.cancelButtonPressed = False self.Refresh() self.SetFocus() wx.PostEvent(self, CancelButton()) else: if btn == 0: if self.searchButtonPressed: self.searchButtonPressed = False self.Refresh() if btn == 1: if self.cancelButtonPressed: self.cancelButtonPressed = False self.Refresh()","def OnLeftUp ( self , event ) : btnpos = self . GetButtonsPos ( ) btnsize = self . GetButtonsSize ( ) if self . HasCapture ( ) : self . ReleaseMouse ( ) for btn in range ( 2 ) : if self . HitTest ( btnpos [ btn ] , event . GetPosition ( ) , btnsize [ btn ] ) : if btn == 0 : self . searchButtonPressed = False self . Refresh ( ) self . SetFocus ( ) wx . PostEvent ( self , SearchButton ( ) ) if btn == 1 : if self . cancelButtonPressed : self . cancelButtonPressed = False self . Refresh ( ) self . SetFocus ( ) wx . PostEvent ( self , CancelButton ( ) ) else : if btn == 0 : self . searchButtonPressed = False self . Refresh ( ) if btn == 1 : if self . cancelButtonPressed : self . cancelButtonPressed = False self . Refresh ( )",if self . searchButtonPressed :,319 20992,"def get_boarding_status(project): status = ""Pending"" if project: doc = frappe.get_doc(""Project"", project) if flt(doc.percent_complete) > 0.0 and flt(doc.percent_complete) < 100.0: status = ""In Process"" elif flt(doc.percent_complete) == 100.0: status = ""Completed"" return status","def get_boarding_status ( project ) : status = ""Pending"" if project : doc = frappe . get_doc ( ""Project"" , project ) status = ""In Process"" elif flt ( doc . percent_complete ) == 100.0 : status = ""Completed"" return status",if flt ( doc . percent_complete ) > 0.0 and flt ( doc . percent_complete ) < 100.0 :,116 14999,"def replace_all(self, event=None): prog = self.engine.getprog() if not prog: return repl = self.replvar.get() text = self.text res = self.engine.search_text(text, prog) if not res: text.bell() return text.tag_remove(""sel"", ""1.0"", ""end"") text.tag_remove(""hit"", ""1.0"", ""end"") line = res[0] col = res[1].start() if self.engine.iswrap(): line = 1 col = 0 ok = 1 first = last = None # XXX ought to replace circular instead of top-to-bottom when wrapping text.undo_block_start() while 1: res = self.engine.search_forward(text, prog, line, col, 0, ok) if not res: break line, m = res chars = text.get(""%d.0"" % line, ""%d.0"" % (line + 1)) orig = m.group() new = self._replace_expand(m, repl) if new is None: break i, j = m.span() first = ""%d.%d"" % (line, i) last = ""%d.%d"" % (line, j) if new == orig: text.mark_set(""insert"", last) else: text.mark_set(""insert"", first) if first != last: text.delete(first, last) if new: text.insert(first, new) col = i + len(new) ok = 0 text.undo_block_stop() if first and last: self.show_hit(first, last)","def replace_all ( self , event = None ) : prog = self . engine . getprog ( ) if not prog : return repl = self . replvar . get ( ) text = self . text res = self . engine . search_text ( text , prog ) if not res : text . bell ( ) return text . tag_remove ( ""sel"" , ""1.0"" , ""end"" ) text . tag_remove ( ""hit"" , ""1.0"" , ""end"" ) line = res [ 0 ] col = res [ 1 ] . start ( ) if self . engine . iswrap ( ) : line = 1 col = 0 ok = 1 first = last = None text . undo_block_start ( ) while 1 : res = self . engine . search_forward ( text , prog , line , col , 0 , ok ) if not res : break line , m = res chars = text . get ( ""%d.0"" % line , ""%d.0"" % ( line + 1 ) ) orig = m . group ( ) new = self . _replace_expand ( m , repl ) break i , j = m . span ( ) first = ""%d.%d"" % ( line , i ) last = ""%d.%d"" % ( line , j ) if new == orig : text . mark_set ( ""insert"" , last ) else : text . mark_set ( ""insert"" , first ) if first != last : text . delete ( first , last ) if new : text . insert ( first , new ) col = i + len ( new ) ok = 0 text . undo_block_stop ( ) if first and last : self . show_hit ( first , last )",if new is None :,487 9255,"def normalize_host(host): """"""Normalize a host string."""""" if misc.IPv6_MATCHER.match(host): percent = host.find(""%"") if percent != -1: percent_25 = host.find(""%25"") # Replace RFC 4007 IPv6 Zone ID delimiter '%' with '%25' # from RFC 6874. If the host is '[%25]' then we # assume RFC 4007 and normalize to '[%2525]' if ( percent_25 == -1 or percent < percent_25 or (percent == percent_25 and percent_25 == len(host) - 4) ): host = host.replace(""%"", ""%25"", 1) # Don't normalize the casing of the Zone ID return host[:percent].lower() + host[percent:] return host.lower()","def normalize_host ( host ) : """"""Normalize a host string."""""" if misc . IPv6_MATCHER . match ( host ) : percent = host . find ( ""%"" ) percent_25 = host . find ( ""%25"" ) if ( percent_25 == - 1 or percent < percent_25 or ( percent == percent_25 and percent_25 == len ( host ) - 4 ) ) : host = host . replace ( ""%"" , ""%25"" , 1 ) return host [ : percent ] . lower ( ) + host [ percent : ] return host . lower ( )",if percent != - 1 :,239 12186,"def get_indexes(self, cursor, table_name): indexes = {} for info in self._table_info(cursor, table_name): if info[""pk""] != 0: indexes[info[""name""]] = {""primary_key"": True, ""unique"": False} cursor.execute(""PRAGMA index_list(%s)"" % self.connection.ops.quote_name(table_name)) # seq, name, unique for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]: cursor.execute(""PRAGMA index_info(%s)"" % self.connection.ops.quote_name(index)) info = cursor.fetchall() # Skip indexes across multiple fields if len(info) != 1: continue name = info[0][2] # seqno, cid, name indexes[name] = {""primary_key"": False, ""unique"": unique} return indexes","def get_indexes ( self , cursor , table_name ) : indexes = { } for info in self . _table_info ( cursor , table_name ) : if info [ ""pk"" ] != 0 : indexes [ info [ ""name"" ] ] = { ""primary_key"" : True , ""unique"" : False } cursor . execute ( ""PRAGMA index_list(%s)"" % self . connection . ops . quote_name ( table_name ) ) for index , unique in [ ( field [ 1 ] , field [ 2 ] ) for field in cursor . fetchall ( ) ] : cursor . execute ( ""PRAGMA index_info(%s)"" % self . connection . ops . quote_name ( index ) ) info = cursor . fetchall ( ) continue name = info [ 0 ] [ 2 ] indexes [ name ] = { ""primary_key"" : False , ""unique"" : unique } return indexes",if len ( info ) != 1 :,233 18947,"def __init__(self, parent, name, description=None): FieldSet.__init__(self, parent, name, description) self._size = (self[""size""].value + 3 * 4) * 8 if MAX_CHUNK_SIZE < (self._size // 8): raise ParserError(""PNG: Chunk is too big (%s)"" % humanFilesize(self._size // 8)) tag = self[""tag""].value self.desc_func = None self.value_func = None if tag in self.TAG_INFO: self._name, self.parse_func, desc, value_func = self.TAG_INFO[tag] if value_func: self.value_func = value_func self.createValue = self.createValueFunc if desc: if isinstance(desc, str): self._description = desc else: self.desc_func = desc else: self._description = """" self.parse_func = None","def __init__ ( self , parent , name , description = None ) : FieldSet . __init__ ( self , parent , name , description ) self . _size = ( self [ ""size"" ] . value + 3 * 4 ) * 8 if MAX_CHUNK_SIZE < ( self . _size // 8 ) : raise ParserError ( ""PNG: Chunk is too big (%s)"" % humanFilesize ( self . _size // 8 ) ) tag = self [ ""tag"" ] . value self . desc_func = None self . value_func = None if tag in self . TAG_INFO : self . _name , self . parse_func , desc , value_func = self . TAG_INFO [ tag ] if value_func : self . value_func = value_func self . createValue = self . createValueFunc if isinstance ( desc , str ) : self . _description = desc else : self . desc_func = desc else : self . _description = """" self . parse_func = None",if desc :,258 18955,"def extract(self, mp3): if ""/frames/frame[0]"" in mp3: frame = mp3[""/frames/frame[0]""] self.nb_channel = (frame.getNbChannel(), frame[""channel_mode""].display) self.format_version = u""MPEG version %s layer %s"" % ( frame[""version""].display, frame[""layer""].display, ) self.sample_rate = frame.getSampleRate() self.bits_per_sample = 16 if mp3[""frames""].looksConstantBitRate(): self.computeBitrate(frame) else: self.computeVariableBitrate(mp3) if ""id3v1"" in mp3: id3 = mp3[""id3v1""] self.comment = id3[""comment""].value self.author = id3[""author""].value self.title = id3[""song""].value self.album = id3[""album""].value if id3[""year""].value != ""0"": self.creation_date = id3[""year""].value if ""track_nb"" in id3: self.track_number = id3[""track_nb""].value if ""id3v2"" in mp3: self.readID3v2(mp3[""id3v2""]) if ""frames"" in mp3: computeComprRate(self, mp3[""frames""].size)","def extract ( self , mp3 ) : if ""/frames/frame[0]"" in mp3 : frame = mp3 [ ""/frames/frame[0]"" ] self . nb_channel = ( frame . getNbChannel ( ) , frame [ ""channel_mode"" ] . display ) self . format_version = u""MPEG version %s layer %s"" % ( frame [ ""version"" ] . display , frame [ ""layer"" ] . display , ) self . sample_rate = frame . getSampleRate ( ) self . bits_per_sample = 16 if mp3 [ ""frames"" ] . looksConstantBitRate ( ) : self . computeBitrate ( frame ) else : self . computeVariableBitrate ( mp3 ) if ""id3v1"" in mp3 : id3 = mp3 [ ""id3v1"" ] self . comment = id3 [ ""comment"" ] . value self . author = id3 [ ""author"" ] . value self . title = id3 [ ""song"" ] . value self . album = id3 [ ""album"" ] . value self . creation_date = id3 [ ""year"" ] . value if ""track_nb"" in id3 : self . track_number = id3 [ ""track_nb"" ] . value if ""id3v2"" in mp3 : self . readID3v2 ( mp3 [ ""id3v2"" ] ) if ""frames"" in mp3 : computeComprRate ( self , mp3 [ ""frames"" ] . size )","if id3 [ ""year"" ] . value != ""0"" :",378 293,"def tool_lineages(self, trans): rval = [] for id, tool in self.app.toolbox.tools(): if hasattr(tool, ""lineage""): lineage_dict = tool.lineage.to_dict() else: lineage_dict = None entry = dict(id=id, lineage=lineage_dict) rval.append(entry) return rval ","def tool_lineages ( self , trans ) : rval = [ ] for id , tool in self . app . toolbox . tools ( ) : lineage_dict = tool . lineage . to_dict ( ) else : lineage_dict = None entry = dict ( id = id , lineage = lineage_dict ) rval . append ( entry ) return rval","if hasattr ( tool , ""lineage"" ) :",102 2465,"def _div(self, op, isInvalid=None): oper = op.opers[0] divbase = self.getOperObj(op, 0) if isInvalid is None: limit = (2 ** (oper.tsize * 8)) - 1 isInvalid = lambda val: val > limit if oper.tsize == 1: ax = self.getRegObj(e_i386.REG_AX) quot = ax / divbase rem = ax % divbase if quot.isDiscrete() and isInvalid(quot): raise envi.DivideError(""i386 #DE"") self.effSetVariable(""eax"", (rem << 8) + quot) elif oper.tsize == 2: ax = self.getRegObj(e_i386.REG_AX) dx = self.getRegObj(e_i386.REG_DX) tot = (edx << Const(16, self._psize)) + eax quot = tot / divbase rem = tot % divbase if quot.isDiscrete() and isInvalid(quot): raise envi.DivideError(""i386 #DE"") self.effSetVariable(""eax"", quot) self.effSetVariable(""edx"", rem) elif oper.tsize == 4: eax = Var(""eax"", self._psize) edx = Var(""edx"", self._psize) tot = (edx << Const(32, self._psize)) + eax quot = tot / divbase rem = tot % divbase if quot.isDiscrete() and isInvalid(quot): raise envi.DivideError(""i386 #DE"") self.effSetVariable(""eax"", quot) self.effSetVariable(""edx"", rem) # FIXME maybe we need a ""check exception"" effect? else: raise envi.UnsupportedInstruction(self, op)","def _div ( self , op , isInvalid = None ) : oper = op . opers [ 0 ] divbase = self . getOperObj ( op , 0 ) if isInvalid is None : limit = ( 2 ** ( oper . tsize * 8 ) ) - 1 isInvalid = lambda val : val > limit if oper . tsize == 1 : ax = self . getRegObj ( e_i386 . REG_AX ) quot = ax / divbase rem = ax % divbase raise envi . DivideError ( ""i386 #DE"" ) self . effSetVariable ( ""eax"" , ( rem << 8 ) + quot ) elif oper . tsize == 2 : ax = self . getRegObj ( e_i386 . REG_AX ) dx = self . getRegObj ( e_i386 . REG_DX ) tot = ( edx << Const ( 16 , self . _psize ) ) + eax quot = tot / divbase rem = tot % divbase raise envi . DivideError ( ""i386 #DE"" ) self . effSetVariable ( ""eax"" , quot ) self . effSetVariable ( ""edx"" , rem ) elif oper . tsize == 4 : eax = Var ( ""eax"" , self . _psize ) edx = Var ( ""edx"" , self . _psize ) tot = ( edx << Const ( 32 , self . _psize ) ) + eax quot = tot / divbase rem = tot % divbase raise envi . DivideError ( ""i386 #DE"" ) self . effSetVariable ( ""eax"" , quot ) self . effSetVariable ( ""edx"" , rem ) else : raise envi . UnsupportedInstruction ( self , op )",if quot . isDiscrete ( ) and isInvalid ( quot ) :,499 13417,"def batch_slice(data, batch_size, sort=True): batch_num = int(np.ceil(len(data) / float(batch_size))) for i in range(batch_num): cur_batch_size = batch_size if i < batch_num - 1 else len(data) - batch_size * i src_sents = [data[i * batch_size + b][0] for b in range(cur_batch_size)] tgt_sents = [data[i * batch_size + b][1] for b in range(cur_batch_size)] if sort: src_ids = sorted( range(cur_batch_size), key=lambda src_id: len(src_sents[src_id]), reverse=True, ) src_sents = [src_sents[src_id] for src_id in src_ids] tgt_sents = [tgt_sents[src_id] for src_id in src_ids] yield src_sents, tgt_sents","def batch_slice ( data , batch_size , sort = True ) : batch_num = int ( np . ceil ( len ( data ) / float ( batch_size ) ) ) for i in range ( batch_num ) : cur_batch_size = batch_size if i < batch_num - 1 else len ( data ) - batch_size * i src_sents = [ data [ i * batch_size + b ] [ 0 ] for b in range ( cur_batch_size ) ] tgt_sents = [ data [ i * batch_size + b ] [ 1 ] for b in range ( cur_batch_size ) ] src_ids = sorted ( range ( cur_batch_size ) , key = lambda src_id : len ( src_sents [ src_id ] ) , reverse = True , ) src_sents = [ src_sents [ src_id ] for src_id in src_ids ] tgt_sents = [ tgt_sents [ src_id ] for src_id in src_ids ] yield src_sents , tgt_sents",if sort :,271 8265,"def serialize_to_cmessage(self): # pylint:disable=no-member # delayed import from ...engines.light import SpOffset # pylint:disable=import-outside-toplevel cmsg = self._get_cmsg() if self.memory_data is not None: # determine target_type from memory_data.sort if self.memory_data.sort == MemoryDataSort.CodeReference: cmsg.target_type = primitives_pb2.CodeReference.CodeTarget else: cmsg.target_type = primitives_pb2.CodeReference.DataTarget cmsg.location = primitives_pb2.CodeReference.Internal cmsg.data_ea = self.memory_data.addr elif self.dst is not None: if isinstance(self.dst, SpOffset): cmsg.target_type = primitives_pb2.CodeReference.StackTarget cmsg.data_ea = self.dst.offset else: cmsg.data_ea = self.dst else: # Unknown... why? cmsg.data_ea = -1 if self.insn_op_idx is None: cmsg.operand_idx = -1 else: cmsg.operand_idx = self.insn_op_idx cmsg.ea = self.ins_addr cmsg.block_ea = self.block_addr cmsg.stmt_idx = self.stmt_idx cmsg.ref_type = self.type return cmsg","def serialize_to_cmessage ( self ) : from ... engines . light import SpOffset cmsg = self . _get_cmsg ( ) if self . memory_data is not None : cmsg . target_type = primitives_pb2 . CodeReference . CodeTarget else : cmsg . target_type = primitives_pb2 . CodeReference . DataTarget cmsg . location = primitives_pb2 . CodeReference . Internal cmsg . data_ea = self . memory_data . addr elif self . dst is not None : if isinstance ( self . dst , SpOffset ) : cmsg . target_type = primitives_pb2 . CodeReference . StackTarget cmsg . data_ea = self . dst . offset else : cmsg . data_ea = self . dst else : cmsg . data_ea = - 1 if self . insn_op_idx is None : cmsg . operand_idx = - 1 else : cmsg . operand_idx = self . insn_op_idx cmsg . ea = self . ins_addr cmsg . block_ea = self . block_addr cmsg . stmt_idx = self . stmt_idx cmsg . ref_type = self . type return cmsg",if self . memory_data . sort == MemoryDataSort . CodeReference :,394 16936,"def _find_key_in_yaml_file( yaml_file_path, search_keys, full_key_name, value_is_relative_path ): """"""Find a key in a yaml file."""""" if not os.path.isfile(yaml_file_path): return None result = _load_yaml_file(yaml_file_path) if not search_keys: # Give the entire yaml file contents. # |value_is_relative_path| is not applicable here. return result for search_key in search_keys: if not isinstance(result, dict): raise errors.InvalidConfigKey(full_key_name) if search_key not in result: return None result = result[search_key] if value_is_relative_path: yaml_directory = os.path.dirname(yaml_file_path) if isinstance(result, list): result = [os.path.join(yaml_directory, str(i)) for i in result] else: result = os.path.join(yaml_directory, str(result)) return result ","def _find_key_in_yaml_file ( yaml_file_path , search_keys , full_key_name , value_is_relative_path ) : """"""Find a key in a yaml file."""""" if not os . path . isfile ( yaml_file_path ) : return None result = _load_yaml_file ( yaml_file_path ) if not search_keys : return result for search_key in search_keys : raise errors . InvalidConfigKey ( full_key_name ) if search_key not in result : return None result = result [ search_key ] if value_is_relative_path : yaml_directory = os . path . dirname ( yaml_file_path ) if isinstance ( result , list ) : result = [ os . path . join ( yaml_directory , str ( i ) ) for i in result ] else : result = os . path . join ( yaml_directory , str ( result ) ) return result","if not isinstance ( result , dict ) :",292 5883,"def call(self, inputs, state): """""" """""" (c_prev, m_prev) = state self._batch_size = inputs.shape[0].value or array_ops.shape(inputs)[0] scope = vs.get_variable_scope() with vs.variable_scope(scope, initializer=self._initializer): x = array_ops.concat([inputs, m_prev], axis=1) with vs.variable_scope(""first_gemm""): if self._linear1 is None: # no bias for bottleneck self._linear1 = _Linear(x, self._fact_size, False) R_fact = self._linear1(x) with vs.variable_scope(""second_gemm""): if self._linear2 is None: self._linear2 = _Linear(R_fact, 4 * self._num_units, True) R = self._linear2(R_fact) i, j, f, o = array_ops.split(R, 4, 1) c = math_ops.sigmoid(f + self._forget_bias) * c_prev + math_ops.sigmoid( i ) * math_ops.tanh(j) m = math_ops.sigmoid(o) * self._activation(c) if self._num_proj is not None: with vs.variable_scope(""projection""): if self._linear3 is None: self._linear3 = _Linear(m, self._num_proj, False) m = self._linear3(m) new_state = rnn_cell_impl.LSTMStateTuple(c, m) return m, new_state","def call ( self , inputs , state ) : """""" """""" ( c_prev , m_prev ) = state self . _batch_size = inputs . shape [ 0 ] . value or array_ops . shape ( inputs ) [ 0 ] scope = vs . get_variable_scope ( ) with vs . variable_scope ( scope , initializer = self . _initializer ) : x = array_ops . concat ( [ inputs , m_prev ] , axis = 1 ) with vs . variable_scope ( ""first_gemm"" ) : if self . _linear1 is None : self . _linear1 = _Linear ( x , self . _fact_size , False ) R_fact = self . _linear1 ( x ) with vs . variable_scope ( ""second_gemm"" ) : if self . _linear2 is None : self . _linear2 = _Linear ( R_fact , 4 * self . _num_units , True ) R = self . _linear2 ( R_fact ) i , j , f , o = array_ops . split ( R , 4 , 1 ) c = math_ops . sigmoid ( f + self . _forget_bias ) * c_prev + math_ops . sigmoid ( i ) * math_ops . tanh ( j ) m = math_ops . sigmoid ( o ) * self . _activation ( c ) if self . _num_proj is not None : with vs . variable_scope ( ""projection"" ) : self . _linear3 = _Linear ( m , self . _num_proj , False ) m = self . _linear3 ( m ) new_state = rnn_cell_impl . LSTMStateTuple ( c , m ) return m , new_state",if self . _linear3 is None :,438 8910,"def log(self, level, msg, *args, **kw): if args: if kw: raise TypeError(""You may give positional or keyword arguments, not both"") args = args or kw rendered = None for consumer_level, consumer in self.consumers: if self.level_matches(level, consumer_level): if self.in_progress_hanging and consumer in (sys.stdout, sys.stderr): self.in_progress_hanging = False print("""") sys.stdout.flush() if rendered is None: if args: rendered = msg % args else: rendered = msg rendered = "" "" * self.indent + rendered if hasattr(consumer, ""write""): consumer.write(rendered + ""\n"") else: consumer(rendered) ","def log ( self , level , msg , * args , ** kw ) : if args : if kw : raise TypeError ( ""You may give positional or keyword arguments, not both"" ) args = args or kw rendered = None for consumer_level , consumer in self . consumers : if self . level_matches ( level , consumer_level ) : self . in_progress_hanging = False print ( """" ) sys . stdout . flush ( ) if rendered is None : if args : rendered = msg % args else : rendered = msg rendered = "" "" * self . indent + rendered if hasattr ( consumer , ""write"" ) : consumer . write ( rendered + ""\n"" ) else : consumer ( rendered )","if self . in_progress_hanging and consumer in ( sys . stdout , sys . stderr ) :",252 17587,"def remove_data_directory(self): self.set_role(""uninitialized"") logger.info(""Removing data directory: %s"", self._data_dir) try: if os.path.islink(self._data_dir): os.unlink(self._data_dir) elif not os.path.exists(self._data_dir): return elif os.path.isfile(self._data_dir): os.remove(self._data_dir) elif os.path.isdir(self._data_dir): # let's see if wal directory is a symlink, in this case we # should clean the target for pg_wal_realpath in self.pg_wal_realpath().values(): logger.info(""Removing WAL directory: %s"", pg_wal_realpath) shutil.rmtree(pg_wal_realpath) # Remove user defined tablespace directories for pg_tsp_rpath in self.pg_tblspc_realpaths().values(): logger.info( ""Removing user defined tablespace directory: %s"", pg_tsp_rpath ) shutil.rmtree(pg_tsp_rpath, ignore_errors=True) shutil.rmtree(self._data_dir) except (IOError, OSError): logger.exception(""Could not remove data directory %s"", self._data_dir) self.move_data_directory() ","def remove_data_directory ( self ) : self . set_role ( ""uninitialized"" ) logger . info ( ""Removing data directory: %s"" , self . _data_dir ) try : if os . path . islink ( self . _data_dir ) : os . unlink ( self . _data_dir ) elif not os . path . exists ( self . _data_dir ) : return elif os . path . isfile ( self . _data_dir ) : os . remove ( self . _data_dir ) for pg_wal_realpath in self . pg_wal_realpath ( ) . values ( ) : logger . info ( ""Removing WAL directory: %s"" , pg_wal_realpath ) shutil . rmtree ( pg_wal_realpath ) for pg_tsp_rpath in self . pg_tblspc_realpaths ( ) . values ( ) : logger . info ( ""Removing user defined tablespace directory: %s"" , pg_tsp_rpath ) shutil . rmtree ( pg_tsp_rpath , ignore_errors = True ) shutil . rmtree ( self . _data_dir ) except ( IOError , OSError ) : logger . exception ( ""Could not remove data directory %s"" , self . _data_dir ) self . move_data_directory ( )",elif os . path . isdir ( self . _data_dir ) :,383 14364,"def _simple_interactive_update(self): while True: stale_packages = [] stale = partial = False for info in sorted(getattr(self._ds, ""packages"")(), key=str): if self._ds.status(info) == self._ds.STALE: stale_packages.append((info.id, info.name)) print() if stale_packages: print(""Will update following packages (o=ok; x=cancel)"") for pid, pname in stale_packages: name = textwrap.fill( ""-"" * 27 + (pname), 75, subsequent_indent=27 * "" "" )[27:] print("" [ ] %s %s"" % (pid.ljust(20, "".""), name)) print() user_input = unicode(input("" Identifier> "")) if user_input.lower() == ""o"": for pid, pname in stale_packages: try: self._ds.download(pid, prefix="" "") except (IOError, ValueError) as e: print(e) break elif user_input.lower() in (""x"", ""q"", """"): return else: print(""Nothing to update."") return","def _simple_interactive_update ( self ) : while True : stale_packages = [ ] stale = partial = False for info in sorted ( getattr ( self . _ds , ""packages"" ) ( ) , key = str ) : if self . _ds . status ( info ) == self . _ds . STALE : stale_packages . append ( ( info . id , info . name ) ) print ( ) print ( ""Will update following packages (o=ok; x=cancel)"" ) for pid , pname in stale_packages : name = textwrap . fill ( ""-"" * 27 + ( pname ) , 75 , subsequent_indent = 27 * "" "" ) [ 27 : ] print ( "" [ ] %s %s"" % ( pid . ljust ( 20 , ""."" ) , name ) ) print ( ) user_input = unicode ( input ( "" Identifier> "" ) ) if user_input . lower ( ) == ""o"" : for pid , pname in stale_packages : try : self . _ds . download ( pid , prefix = "" "" ) except ( IOError , ValueError ) as e : print ( e ) break elif user_input . lower ( ) in ( ""x"" , ""q"" , """" ) : return else : print ( ""Nothing to update."" ) return",if stale_packages :,361 12768,"def deploy_arm_template_at_subscription_scope( cmd, template_file=None, template_uri=None, parameters=None, deployment_name=None, deployment_location=None, no_wait=False, handle_extended_json_format=None, no_prompt=False, confirm_with_what_if=None, what_if_result_format=None, what_if_exclude_change_types=None, template_spec=None, query_string=None, ): if confirm_with_what_if: what_if_deploy_arm_template_at_subscription_scope( cmd, template_file=template_file, template_uri=template_uri, parameters=parameters, deployment_name=deployment_name, deployment_location=deployment_location, result_format=what_if_result_format, exclude_change_types=what_if_exclude_change_types, no_prompt=no_prompt, template_spec=template_spec, query_string=query_string, ) from knack.prompting import prompt_y_n if not prompt_y_n(""\nAre you sure you want to execute the deployment?""): return None return _deploy_arm_template_at_subscription_scope( cmd=cmd, template_file=template_file, template_uri=template_uri, parameters=parameters, deployment_name=deployment_name, deployment_location=deployment_location, validate_only=False, no_wait=no_wait, no_prompt=no_prompt, template_spec=template_spec, query_string=query_string, ) ","def deploy_arm_template_at_subscription_scope ( cmd , template_file = None , template_uri = None , parameters = None , deployment_name = None , deployment_location = None , no_wait = False , handle_extended_json_format = None , no_prompt = False , confirm_with_what_if = None , what_if_result_format = None , what_if_exclude_change_types = None , template_spec = None , query_string = None , ) : if confirm_with_what_if : what_if_deploy_arm_template_at_subscription_scope ( cmd , template_file = template_file , template_uri = template_uri , parameters = parameters , deployment_name = deployment_name , deployment_location = deployment_location , result_format = what_if_result_format , exclude_change_types = what_if_exclude_change_types , no_prompt = no_prompt , template_spec = template_spec , query_string = query_string , ) from knack . prompting import prompt_y_n return None return _deploy_arm_template_at_subscription_scope ( cmd = cmd , template_file = template_file , template_uri = template_uri , parameters = parameters , deployment_name = deployment_name , deployment_location = deployment_location , validate_only = False , no_wait = no_wait , no_prompt = no_prompt , template_spec = template_spec , query_string = query_string , )","if not prompt_y_n ( ""\nAre you sure you want to execute the deployment?"" ) :",488 19862,"def readchunk(self, inode, index, chunkopflags=0): cnt = 0 while True: cnt += 1 if self.version < (3, 0, 4): ans = self.sendAndReceive(CLTOMA_FUSE_READ_CHUNK, inode, index) else: ans = self.sendAndReceive( CLTOMA_FUSE_READ_CHUNK, inode, index, uint8(chunkopflags) ) n = len(ans) if n == 1: from .utils import Error err = ord(ans) if err == ERROR_LOCKED: if cnt < 100: time.sleep(0.1) continue logger.warning(""Waited too long for locked chunk %s:%s"", inode, index) raise Error(ord(ans)) if n < 20: raise Exception(""read chunk invalid length: %s(expected 20 above)"" % n) if self.version >= (3, 0, 10): assert (n - 21) % 14 == 0, n protocolid, length, id_, version = unpack(""BQQI"", ans) return Chunk(id_, length, version, ans[21:], ele_width=14) elif self.version >= (1, 7, 32): assert (n - 21) % 10 == 0, n protocolid, length, id_, version = unpack(""BQQI"", ans) return Chunk(id_, length, version, ans[21:], ele_width=10) else: assert (n - 20) % 6 == 0, n length, id_, version = unpack(""QQI"", ans) return Chunk(id_, length, version, ans[20:])","def readchunk ( self , inode , index , chunkopflags = 0 ) : cnt = 0 while True : cnt += 1 if self . version < ( 3 , 0 , 4 ) : ans = self . sendAndReceive ( CLTOMA_FUSE_READ_CHUNK , inode , index ) else : ans = self . sendAndReceive ( CLTOMA_FUSE_READ_CHUNK , inode , index , uint8 ( chunkopflags ) ) n = len ( ans ) from . utils import Error err = ord ( ans ) if err == ERROR_LOCKED : if cnt < 100 : time . sleep ( 0.1 ) continue logger . warning ( ""Waited too long for locked chunk %s:%s"" , inode , index ) raise Error ( ord ( ans ) ) if n < 20 : raise Exception ( ""read chunk invalid length: %s(expected 20 above)"" % n ) if self . version >= ( 3 , 0 , 10 ) : assert ( n - 21 ) % 14 == 0 , n protocolid , length , id_ , version = unpack ( ""BQQI"" , ans ) return Chunk ( id_ , length , version , ans [ 21 : ] , ele_width = 14 ) elif self . version >= ( 1 , 7 , 32 ) : assert ( n - 21 ) % 10 == 0 , n protocolid , length , id_ , version = unpack ( ""BQQI"" , ans ) return Chunk ( id_ , length , version , ans [ 21 : ] , ele_width = 10 ) else : assert ( n - 20 ) % 6 == 0 , n length , id_ , version = unpack ( ""QQI"" , ans ) return Chunk ( id_ , length , version , ans [ 20 : ] )",if n == 1 :,478 1828,"def tearDown(self): """"""Shutdown the UDP server."""""" try: if self.server: self.server.stop(2.0) if self.sock_hdlr: self.root_logger.removeHandler(self.sock_hdlr) self.sock_hdlr.close() finally: BaseTest.tearDown(self)","def tearDown ( self ) : """"""Shutdown the UDP server."""""" try : if self . server : self . server . stop ( 2.0 ) self . root_logger . removeHandler ( self . sock_hdlr ) self . sock_hdlr . close ( ) finally : BaseTest . tearDown ( self )",if self . sock_hdlr :,97 15544,"def labels_to_inputs(self, labels, converter): inputs = [] for label_arr in labels: input_ = np.zeros( (len(label_arr), converter.input_depth), converter.input_dtype ) for i, l in enumerate(label_arr): if l == converter.end_token: input_[i, -2] = 1 elif l == 0: input_[i, -1] = 1 else: j = 0 while l: input_[i, j] = l % 2 l >>= 1 j += 1 assert np.any(input_[i]), label_arr.astype(np.int) inputs.append(input_) return inputs","def labels_to_inputs ( self , labels , converter ) : inputs = [ ] for label_arr in labels : input_ = np . zeros ( ( len ( label_arr ) , converter . input_depth ) , converter . input_dtype ) for i , l in enumerate ( label_arr ) : if l == converter . end_token : input_ [ i , - 2 ] = 1 input_ [ i , - 1 ] = 1 else : j = 0 while l : input_ [ i , j ] = l % 2 l >>= 1 j += 1 assert np . any ( input_ [ i ] ) , label_arr . astype ( np . int ) inputs . append ( input_ ) return inputs",elif l == 0 :,221 17609,"def package_files(self): seen_package_directories = () directories = self.distribution.package_dir or {} empty_directory_exists = """" in directories packages = self.distribution.packages or [] for package in packages: if package in directories: package_directory = directories[package] elif empty_directory_exists: package_directory = os.path.join(directories[""""], package) else: package_directory = package if not package_directory.startswith(seen_package_directories): seen_package_directories += (package_directory + ""."",) yield package_directory ","def package_files ( self ) : seen_package_directories = ( ) directories = self . distribution . package_dir or { } empty_directory_exists = """" in directories packages = self . distribution . packages or [ ] for package in packages : if package in directories : package_directory = directories [ package ] elif empty_directory_exists : package_directory = os . path . join ( directories [ """" ] , package ) else : package_directory = package seen_package_directories += ( package_directory + ""."" , ) yield package_directory",if not package_directory . startswith ( seen_package_directories ) :,164 14852,"def _resolve( self, debug: bool, silent: bool, level: Optional[int], spinner ) -> Optional[bool]: if silent: logger.debug( ""next iteration"", extra=dict( layers=len(self.graph._layers), mutations=self.mutator.mutations, ), ) else: spinner.text = ""layers: {layers}, mutations: {mutations}"".format( layers=len(self.graph._layers), mutations=self.mutator.mutations, ) # get not applied deps deps = self.graph.get_leafs(level=level) # if we already build deps for all nodes in graph if not deps: return True # check python version for dep in deps: if not dep.python_compat: self.graph.conflict = dep return False no_conflicts = self._apply_deps(deps, debug=debug) if no_conflicts: return None # if we have conflict, try to mutate graph groups = self.mutator.mutate(self.graph) # if cannot mutate if groups is None: return False self.graph.conflict = None # apply mutation for group in groups: dep = self.graph.get(group.name) if dep.group.number != group.number: logger.debug( ""mutated"", extra=dict( group_from=str(dep.group), group_to=str(group), ), ) self.unapply(dep) dep.group = group ","def _resolve ( self , debug : bool , silent : bool , level : Optional [ int ] , spinner ) -> Optional [ bool ] : if silent : logger . debug ( ""next iteration"" , extra = dict ( layers = len ( self . graph . _layers ) , mutations = self . mutator . mutations , ) , ) else : spinner . text = ""layers: {layers}, mutations: {mutations}"" . format ( layers = len ( self . graph . _layers ) , mutations = self . mutator . mutations , ) deps = self . graph . get_leafs ( level = level ) if not deps : return True for dep in deps : if not dep . python_compat : self . graph . conflict = dep return False no_conflicts = self . _apply_deps ( deps , debug = debug ) if no_conflicts : return None groups = self . mutator . mutate ( self . graph ) if groups is None : return False self . graph . conflict = None for group in groups : dep = self . graph . get ( group . name ) logger . debug ( ""mutated"" , extra = dict ( group_from = str ( dep . group ) , group_to = str ( group ) , ) , ) self . unapply ( dep ) dep . group = group",if dep . group . number != group . number :,453 13898,"def apply(self, items, evaluation): ""%(name)s[items___]"" items = items.flatten(Symbol(""List"")).get_sequence() results = [] best = None for item in items: if item.has_form(""List"", None): leaves = item.leaves else: leaves = [item] for leaf in leaves: if best is None: best = leaf results.append(best) continue c = do_cmp(leaf, best) if c is None: results.append(leaf) elif (self.sense == 1 and c > 0) or (self.sense == -1 and c < 0): results.remove(best) best = leaf results.append(leaf) if not results: return Expression(""DirectedInfinity"", -self.sense) if len(results) == 1: return results.pop() if len(results) < len(items): # Some simplification was possible because we discarded # elements. return Expression(self.get_name(), *results) # If we get here, no simplification was possible. return None","def apply ( self , items , evaluation ) : ""%(name)s[items___]"" items = items . flatten ( Symbol ( ""List"" ) ) . get_sequence ( ) results = [ ] best = None for item in items : if item . has_form ( ""List"" , None ) : leaves = item . leaves else : leaves = [ item ] for leaf in leaves : if best is None : best = leaf results . append ( best ) continue c = do_cmp ( leaf , best ) results . append ( leaf ) elif ( self . sense == 1 and c > 0 ) or ( self . sense == - 1 and c < 0 ) : results . remove ( best ) best = leaf results . append ( leaf ) if not results : return Expression ( ""DirectedInfinity"" , - self . sense ) if len ( results ) == 1 : return results . pop ( ) if len ( results ) < len ( items ) : return Expression ( self . get_name ( ) , * results ) return None",if c is None :,323 8400,"def finish(self): self.done = True if self.has_trailers and hasattr(self.fp, ""read_trailer_lines""): self.trailers = {} try: for line in self.fp.read_trailer_lines(): if line[0] in ntob("" \t""): # It's a continuation line. v = line.strip() else: try: k, v = line.split(ntob("":""), 1) except ValueError: raise ValueError(""Illegal header line."") k = k.strip().title() v = v.strip() if k in cheroot.server.comma_separated_headers: existing = self.trailers.get(k) if existing: v = ntob("", "").join((existing, v)) self.trailers[k] = v except Exception: e = sys.exc_info()[1] if e.__class__.__name__ == ""MaxSizeExceeded"": # Post data is too big raise cherrypy.HTTPError(413, ""Maximum request length: %r"" % e.args[1]) else: raise","def finish ( self ) : self . done = True if self . has_trailers and hasattr ( self . fp , ""read_trailer_lines"" ) : self . trailers = { } try : for line in self . fp . read_trailer_lines ( ) : if line [ 0 ] in ntob ( "" \t"" ) : v = line . strip ( ) else : try : k , v = line . split ( ntob ( "":"" ) , 1 ) except ValueError : raise ValueError ( ""Illegal header line."" ) k = k . strip ( ) . title ( ) v = v . strip ( ) if k in cheroot . server . comma_separated_headers : existing = self . trailers . get ( k ) if existing : v = ntob ( "", "" ) . join ( ( existing , v ) ) self . trailers [ k ] = v except Exception : e = sys . exc_info ( ) [ 1 ] raise cherrypy . HTTPError ( 413 , ""Maximum request length: %r"" % e . args [ 1 ] ) else : raise","if e . __class__ . __name__ == ""MaxSizeExceeded"" :",366 15623,"def sync_up_to_new_location(self, worker_ip): if worker_ip != self.worker_ip: logger.debug(""Setting new worker IP to %s"", worker_ip) self.set_worker_ip(worker_ip) self.reset() if not self.sync_up(): logger.warning(""Sync up to new location skipped. This should not occur."") else: logger.warning(""Sync attempted to same IP %s."", worker_ip) ","def sync_up_to_new_location ( self , worker_ip ) : if worker_ip != self . worker_ip : logger . debug ( ""Setting new worker IP to %s"" , worker_ip ) self . set_worker_ip ( worker_ip ) self . reset ( ) logger . warning ( ""Sync up to new location skipped. This should not occur."" ) else : logger . warning ( ""Sync attempted to same IP %s."" , worker_ip )",if not self . sync_up ( ) :,126 2328,"def __keyPress(self, widget, event): if self.getSelectedIndex() is None: return False if event.key in (""Left"", ""Right"", ""Up"", ""Down""): if self.__positionIncrement == 0: return False if self.__positionIncrement is None: pixelIncrement = 1 else: pixelIncrement = self.__positionIncrement * self.size().x x = self.getPositions()[self.getSelectedIndex()] * self.size().x x += pixelIncrement if event.key in (""Right"", ""Up"") else -pixelIncrement self.__setPositionInternal( self.getSelectedIndex(), x, self.PositionChangedReason.Increment, clamp=not (event.modifiers & event.modifiers.Shift), ) return True elif event.key in (""Backspace"", ""Delete""): index = self.getSelectedIndex() if ( index is not None and self.getSizeEditable() and len(self.getPositions()) > self.getMinimumSize() ): del self.__positions[index] signal = getattr(self, ""_indexRemovedSignal"", None) if signal is not None: signal(self, index) self.__emitPositionChanged(self.PositionChangedReason.IndexRemoved) self._qtWidget().update() return True return False","def __keyPress ( self , widget , event ) : if self . getSelectedIndex ( ) is None : return False if event . key in ( ""Left"" , ""Right"" , ""Up"" , ""Down"" ) : if self . __positionIncrement == 0 : return False pixelIncrement = 1 else : pixelIncrement = self . __positionIncrement * self . size ( ) . x x = self . getPositions ( ) [ self . getSelectedIndex ( ) ] * self . size ( ) . x x += pixelIncrement if event . key in ( ""Right"" , ""Up"" ) else - pixelIncrement self . __setPositionInternal ( self . getSelectedIndex ( ) , x , self . PositionChangedReason . Increment , clamp = not ( event . modifiers & event . modifiers . Shift ) , ) return True elif event . key in ( ""Backspace"" , ""Delete"" ) : index = self . getSelectedIndex ( ) if ( index is not None and self . getSizeEditable ( ) and len ( self . getPositions ( ) ) > self . getMinimumSize ( ) ) : del self . __positions [ index ] signal = getattr ( self , ""_indexRemovedSignal"" , None ) if signal is not None : signal ( self , index ) self . __emitPositionChanged ( self . PositionChangedReason . IndexRemoved ) self . _qtWidget ( ) . update ( ) return True return False",if self . __positionIncrement is None :,381 10948,"def results_default_iter(commit_hash): for result in iter_results_for_machine_and_hash( conf.results_dir, machine, commit_hash ): if env_names is not None and result.env_name not in env_names: continue for key in result.get_all_result_keys(): params = result.get_result_params(key) result_value = result.get_result_value(key, params) result_stats = result.get_result_stats(key, params) result_samples = result.get_result_samples(key, params) result_version = result.benchmark_version.get(key) yield ( key, params, result_value, result_stats, result_samples, result_version, result.params[""machine""], result.env_name, ) ","def results_default_iter ( commit_hash ) : for result in iter_results_for_machine_and_hash ( conf . results_dir , machine , commit_hash ) : continue for key in result . get_all_result_keys ( ) : params = result . get_result_params ( key ) result_value = result . get_result_value ( key , params ) result_stats = result . get_result_stats ( key , params ) result_samples = result . get_result_samples ( key , params ) result_version = result . benchmark_version . get ( key ) yield ( key , params , result_value , result_stats , result_samples , result_version , result . params [ ""machine"" ] , result . env_name , )",if env_names is not None and result . env_name not in env_names :,266 19312,"def _binary(self, other: ""Table"", func, do_func): session_id = self._session.session_id left, right = self, other if left._partitions != right._partitions: if other.count() > self.count(): left = left.save_as( str(uuid.uuid1()), session_id, partition=right._partitions ) else: right = other.save_as( str(uuid.uuid1()), session_id, partition=left._partitions ) # noinspection PyProtectedMember results = self._session._submit_binary( func, do_func, left._partitions, left._name, left._namespace, right._name, right._namespace, ) result: _Operand = results[0] # noinspection PyProtectedMember return _create_table( session=self._session, name=result.name, namespace=result.namespace, partitions=left._partitions, )","def _binary ( self , other : ""Table"" , func , do_func ) : session_id = self . _session . session_id left , right = self , other if left . _partitions != right . _partitions : left = left . save_as ( str ( uuid . uuid1 ( ) ) , session_id , partition = right . _partitions ) else : right = other . save_as ( str ( uuid . uuid1 ( ) ) , session_id , partition = left . _partitions ) results = self . _session . _submit_binary ( func , do_func , left . _partitions , left . _name , left . _namespace , right . _name , right . _namespace , ) result : _Operand = results [ 0 ] return _create_table ( session = self . _session , name = result . name , namespace = result . namespace , partitions = left . _partitions , )",if other . count ( ) > self . count ( ) :,275 1073,"def _form_master_re(relist, reflags, ldict, toknames): if not relist: return [] regex = ""|"".join(relist) try: lexre = re.compile(regex, re.VERBOSE | reflags) # Build the index to function map for the matching engine lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1) lexindexnames = lexindexfunc[:] for f, i in lexre.groupindex.items(): handle = ldict.get(f, None) if type(handle) in (types.FunctionType, types.MethodType): lexindexfunc[i] = (handle, toknames[f]) lexindexnames[i] = f elif handle is not None: lexindexnames[i] = f if f.find(""ignore_"") > 0: lexindexfunc[i] = (None, None) else: lexindexfunc[i] = (None, toknames[f]) return [(lexre, lexindexfunc)], [regex], [lexindexnames] except Exception: m = int(len(relist) / 2) if m == 0: m = 1 llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames) rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames) return llist + rlist, lre + rre, lnames + rnames","def _form_master_re ( relist , reflags , ldict , toknames ) : if not relist : return [ ] regex = ""|"" . join ( relist ) try : lexre = re . compile ( regex , re . VERBOSE | reflags ) lexindexfunc = [ None ] * ( max ( lexre . groupindex . values ( ) ) + 1 ) lexindexnames = lexindexfunc [ : ] for f , i in lexre . groupindex . items ( ) : handle = ldict . get ( f , None ) if type ( handle ) in ( types . FunctionType , types . MethodType ) : lexindexfunc [ i ] = ( handle , toknames [ f ] ) lexindexnames [ i ] = f lexindexnames [ i ] = f if f . find ( ""ignore_"" ) > 0 : lexindexfunc [ i ] = ( None , None ) else : lexindexfunc [ i ] = ( None , toknames [ f ] ) return [ ( lexre , lexindexfunc ) ] , [ regex ] , [ lexindexnames ] except Exception : m = int ( len ( relist ) / 2 ) if m == 0 : m = 1 llist , lre , lnames = _form_master_re ( relist [ : m ] , reflags , ldict , toknames ) rlist , rre , rnames = _form_master_re ( relist [ m : ] , reflags , ldict , toknames ) return llist + rlist , lre + rre , lnames + rnames",elif handle is not None :,423 23105,"def decStep(self, event=None): if event is not None and not self.acceptKey(): return step, power = abcControlFrame._stepPower(self.step.get()) s = step - power if s <= 0.0: s = step - power / 10.0 if s < _LOWSTEP: s = _LOWSTEP elif s > _HIGHSTEP: s = _HIGHSTEP if self.astep is not self.step and self.astep.get() != _NOASTEP: step, power = abcControlFrame._stepPower(self.astep.get()) aas = step - power if aas <= 0.0: aas = step - power / 10.0 if aas < _LOWSTEP: aas = _LOWSTEP elif aas > _HIGHASTEP: aas = _HIGHASTEP else: aas = None self.setStep(s, aas)","def decStep ( self , event = None ) : if event is not None and not self . acceptKey ( ) : return step , power = abcControlFrame . _stepPower ( self . step . get ( ) ) s = step - power if s <= 0.0 : s = step - power / 10.0 if s < _LOWSTEP : s = _LOWSTEP elif s > _HIGHSTEP : s = _HIGHSTEP if self . astep is not self . step and self . astep . get ( ) != _NOASTEP : step , power = abcControlFrame . _stepPower ( self . astep . get ( ) ) aas = step - power if aas <= 0.0 : aas = step - power / 10.0 aas = _LOWSTEP elif aas > _HIGHASTEP : aas = _HIGHASTEP else : aas = None self . setStep ( s , aas )",if aas < _LOWSTEP :,258 25418,"def _nested_ui_dict( self, ui_schema: List[Dict[str, Any]], option_dict: Dict[str, Any], key: str, multiple: bool = False, ) -> None: """"""UI nested dict items."""""" ui_node = { ""name"": key, ""type"": ""schema"", ""optional"": True, ""multiple"": multiple, } nested_schema = [] for c_key, c_value in option_dict.items(): # Nested? if isinstance(c_value, list): self._nested_ui_list(nested_schema, c_value, c_key) else: self._single_ui_option(nested_schema, c_value, c_key) ui_node[""schema""] = nested_schema ui_schema.append(ui_node) ","def _nested_ui_dict ( self , ui_schema : List [ Dict [ str , Any ] ] , option_dict : Dict [ str , Any ] , key : str , multiple : bool = False , ) -> None : """"""UI nested dict items."""""" ui_node = { ""name"" : key , ""type"" : ""schema"" , ""optional"" : True , ""multiple"" : multiple , } nested_schema = [ ] for c_key , c_value in option_dict . items ( ) : self . _nested_ui_list ( nested_schema , c_value , c_key ) else : self . _single_ui_option ( nested_schema , c_value , c_key ) ui_node [ ""schema"" ] = nested_schema ui_schema . append ( ui_node )","if isinstance ( c_value , list ) :",227 18679,"def test_https(): for proto in [""http"", ""https""]: for convention in [""virtualhost"", ""path"", ""subdomain""]: opts = calling_format._s3connection_opts_from_uri( ""{0}+{1}://"".format(proto, convention) ) assert (proto == ""https"") == opts[""is_secure""] assert (proto == ""http"") == (not opts[""is_secure""]) cf = opts[""calling_format""] if convention == ""virtualhost"": assert isinstance(cf, connection.VHostCallingFormat) elif convention == ""path"": assert isinstance(cf, connection.OrdinaryCallingFormat) elif convention == ""subdomain"": assert isinstance(cf, connection.SubdomainCallingFormat) else: assert False","def test_https ( ) : for proto in [ ""http"" , ""https"" ] : for convention in [ ""virtualhost"" , ""path"" , ""subdomain"" ] : opts = calling_format . _s3connection_opts_from_uri ( ""{0}+{1}://"" . format ( proto , convention ) ) assert ( proto == ""https"" ) == opts [ ""is_secure"" ] assert ( proto == ""http"" ) == ( not opts [ ""is_secure"" ] ) cf = opts [ ""calling_format"" ] if convention == ""virtualhost"" : assert isinstance ( cf , connection . VHostCallingFormat ) elif convention == ""path"" : assert isinstance ( cf , connection . OrdinaryCallingFormat ) assert isinstance ( cf , connection . SubdomainCallingFormat ) else : assert False","elif convention == ""subdomain"" :",226 1684,"def render(self, name, value, attrs=None, renderer=None): output = [] for option in self.subwidgets(name, value, attrs): option_value = option[""value""] option[""widget""] = self.create_option( name=name, value=option[""value""], label=option[""label""], selected=option_value == value, index=option[""index""], attrs=option[""attrs""], ) if option_value.split(""/"")[0] == ""icon"" or option_value == """": icon_name = option[""label""] original_widget = self._render(self.option_template_name, option) if not original_widget.startswith(""', label_id=option[""widget""][""attrs""][""id""], widget=original_widget, ) output.append( format_html( self.base_html, active=""active"" if option_value == value else """", static=settings.STATIC_URL, icon_name=icon_name, original_widget=original_widget, ) ) else: output.append( format_html( '
  • {widget}
  • ', widget=self._render(self.option_template_name, option), ) ) return mark_safe(""\n"".join(output))","def render ( self , name , value , attrs = None , renderer = None ) : output = [ ] for option in self . subwidgets ( name , value , attrs ) : option_value = option [ ""value"" ] option [ ""widget"" ] = self . create_option ( name = name , value = option [ ""value"" ] , label = option [ ""label"" ] , selected = option_value == value , index = option [ ""index"" ] , attrs = option [ ""attrs"" ] , ) if option_value . split ( ""/"" ) [ 0 ] == ""icon"" or option_value == """" : icon_name = option [ ""label"" ] original_widget = self . _render ( self . option_template_name , option ) original_widget = format_html ( '' , label_id = option [ ""widget"" ] [ ""attrs"" ] [ ""id"" ] , widget = original_widget , ) output . append ( format_html ( self . base_html , active = ""active"" if option_value == value else """" , static = settings . STATIC_URL , icon_name = icon_name , original_widget = original_widget , ) ) else : output . append ( format_html ( '
  • {widget}
  • ' , widget = self . _render ( self . option_template_name , option ) , ) ) return mark_safe ( ""\n"" . join ( output ) )","if not original_widget . startswith ( ""