,input,is_correct,expected_cond,predicted_cond,score 0,"def stream_edit ( request, stream_id, response_format = ""html"" ) : ""Stream edit page"" user = request. user. profile stream = get_object_or_404 ( MessageStream, pk = stream_id ) if not request. user. profile. has_permission ( stream, mode = ""w"" ) : return user_denied ( request, message = ""You don't have access to this Stream"", response_format = response_format, ) if request. POST : if : form = MessageStreamForm ( user, request. POST, instance = stream ) if form. is_valid ( ) : stream = form. save ( ) return HttpResponseRedirect ( reverse ( ""messaging_stream_view"", args = [ stream. id ] ) ) else : return HttpResponseRedirect ( reverse ( ""messaging_stream_view"", args = [ stream. id ] ) ) else : form = MessageStreamForm ( user, instance = stream ) context = _get_default_context ( request ) context. update ( { ""form"" : form, ""stream"" : stream } ) return render_to_response ( ""messaging/stream_edit"", context, context_instance = RequestContext ( request ), response_format = response_format, )",False,'cancel' not in request.POST,stream.id < 5,0.6595945358276367 1,"def _read_and_parse_includes ( self ) : included_files = { } forward_declarations = { } files_seen = { } for node in self. ast_list : if : if node. system : filename = node. filename else : module = self. _get_module ( node ) filename = module. filename _, ext = os. path. splitext ( filename ) if ext. lower ( )!= "".hxx"" : included_files [ filename ] = node, module if is_cpp_file ( filename ) : self. _add_warning ( ""should not #include C++ source file '{}'"". format ( node. filename ), node, ) if filename == self. filename : self. _add_warning ( ""'{}' #includes itself"". format ( node. filename ), node ) if filename in files_seen : include_node = files_seen [ filename ] line_num = get_line_number ( self. metrics, include_node ) self. _add_warning ( ""'{}' already #included on line {}"". format ( node. filename, line_num ), ",False,"isinstance(node, ast.Include)",node.filename,0.6480394601821899 2,"def _get_list_key ( self, spaces, lines ) : key_list = [ ] parse_key = False key, desc, ptype = None, """", None param_spaces = 0 for line in lines : if : continue curr_spaces = get_leading_spaces ( line ) if not param_spaces : param_spaces = len ( curr_spaces ) if len ( curr_spaces ) == param_spaces : if parse_key : key_list. append ( ( key, desc, ptype ) ) if "":"" in line : elems = line. split ( "":"", 1 ) ptype = None key = elems [ 0 ]. strip ( ) if ""("" in key and "")"" in key : tstart = key. index ( ""("" ) + 1 tend = key. index ( "")"" ) if "","" in key : tend = key. index ( "","" ) ptype = key [ tstart : tend ]. strip ( ) key = key [ : tstart - 1 ]. strip ( ) desc = elems [ 1 ]. strip ( ) parse_key = True <",False,len(line.strip()) == 0,line.startswith(space),0.6560866832733154 3,"def search_host ( self, search_string ) : results = [ ] for host_entry in self. config_data : if host_entry. get ( ""type"" )!= ""entry"" : continue if host_entry. get ( ""host"" ) == ""*"" : continue searchable_information = host_entry. get ( ""host"" ) for key, value in six. iteritems ( host_entry. get ( ""options"" ) ) : if isinstance ( value, list ) : value = "" "". join ( value ) if : value = str ( value ) searchable_information += "" "" + value if search_string in searchable_information : results. append ( host_entry ) return results",False,"isinstance(value, int)","isinstance(value, str)",0.650113582611084 4,"def pop ( self, key : Union [ str, Enum ], default : Any = DEFAULT_VALUE_MARKER ) -> Any : try : if self. _get_flag ( ""readonly"" ) : raise ReadonlyConfigError ( ""Cannot pop from read-only node"" ) if : raise ConfigTypeError ( ""DictConfig in struct mode does not support pop"" ) if self. _is_typed ( ) and self. _get_node_flag ( ""struct"" ) is not False : raise ConfigTypeError ( f""{type_str(self._metadata.object_type)} (DictConfig) does not support pop"" ) key = self. _validate_and_normalize_key ( key ) node = self. _get_node ( key = key, validate_access = False ) if node is not None : value = self. _resolve_with_default ( key = key, value = node, default_value = default ) del self [ key ] return value else : if default is not DEFAULT_VALUE_MARKER : return default else : full = self. _get_full_key ( key = key ) if full!= key : raise ConfigKeyError ( f""Key not found: '{key}' (path: '{full}')"" ) else : raise ConfigKeyError ( f""Key not",False,self._get_flag('struct'),self._is_struct(key),0.6553546190261841 5,"def _key ( self, index ) : len_self = len ( self ) if : index += len_self if : raise IndexError ( ""deque index out of range"" ) elif index >= len_self : raise IndexError ( ""deque index out of range"" ) diff = len_self - index - 1 _cache_iterkeys = self. _cache. iterkeys try : if index <= diff : iter_keys = _cache_iterkeys ( ) key = next ( islice ( iter_keys, index, index + 1 ) ) else : iter_keys = _cache_iterkeys ( reverse = True ) key = next ( islice ( iter_keys, diff, diff + 1 ) ) except StopIteration : raise IndexError ( ""deque index out of range"" ) return key",True,index < 0,index < 0,0.675930380821228 6,"def convert ( src, dst ) : """"""Convert keys in pycls pretrained RegNet models to mmdet style."""""" regnet_model = torch. load ( src ) blobs = regnet_model [ ""model_state"" ] state_dict = OrderedDict ( ) converted_names = set ( ) for key, weight in blobs. items ( ) : if ""stem"" in key : convert_stem ( key, weight, state_dict, converted_names ) elif : convert_head ( key, weight, state_dict, converted_names ) elif key. startswith ( ""s"" ) : convert_reslayer ( key, weight, state_dict, converted_names ) for key in blobs : if key not in converted_names : print ( f""not converted: {key}"" ) checkpoint = dict ( ) checkpoint [ ""state_dict"" ] = state_dict torch. save ( checkpoint, dst )",False,'head' in key,"key.startswith((""head"", ""s"")",0.6630789041519165 7,"def run ( self, args, ** kwargs ) : if args. action : kwargs [ ""action"" ] = args. action if args. status : kwargs [ ""status"" ] = args. status if args. trigger_instance : kwargs [ ""trigger_instance"" ] = args. trigger_instance if not args. showall : kwargs [ ""parent"" ] = ""null"" if args. timestamp_gt : kwargs [ ""timestamp_gt"" ] = args. timestamp_gt if args. timestamp_lt : kwargs [ ""timestamp_lt"" ] = args. timestamp_lt if args. sort_order : if : kwargs [ ""sort_asc"" ] = True elif args. sort_order in [ ""desc"", ""descending"" ] : kwargs [ ""sort_desc"" ] = True include_attributes = self. _get_include_attributes ( args = args ) if include_attributes : kwargs [ ""include_attributes"" ] = "","". join ( include_attributes ) return self. manager. query_with_count ( limit = args. last, ** kwargs )",False,"args.sort_order in ['asc', 'ascending']","args.sort_order in ['asc', 'descending']",0.6580512523651123 8,"def goToPrevMarkedHeadline ( self, event = None ) : """"""Select the next marked node."""""" c = self p = c. p if not p : return p. moveToThreadBack ( ) wrapped = False while 1 : if : break elif p : p. moveToThreadBack ( ) elif wrapped : break else : wrapped = True p = c. rootPosition ( ) if not p : g. blue ( ""done"" ) c. treeSelectHelper ( p )",False,p and p.isMarked(),c.nodeIsBack(),0.6556696891784668 9,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if : self. success = [ ] ( _etype683, _size680 ) = iprot. readListBegin ( ) for _i684 in xrange ( _size680 ) : _elem685 = iprot. readString ( ) self. success. append ( _elem685 ) iprot. readListEnd ( ) else : iprot. skip ( ftype ) elif fid == 1 : if ftype == TType. STRUCT : self. o1 = MetaException ( ) self. o1. read ( iprot ) else : iprot. skip ( ftype ) else : ",False,ftype == TType.LIST,ftype == TType.ENUM,0.6605467796325684 10,"def __iter__ ( self ) : n_samples = self. image_dataset. _X_shape [ 0 ] worker_info = torch. utils. data. get_worker_info ( ) if worker_info is None : first_sample = 0 last_sample = n_samples else : first_sample = worker_info. id * n_samples // worker_info. num_workers last_sample = ( worker_info. id + 1 ) * n_samples // worker_info. num_workers for epoch in range ( self. epochs ) : if : order = first_sample + np. arange ( last_sample - first_sample ) else : random = np. random. RandomState ( epoch ) order = random. permutation ( n_samples ) [ first_sample : last_sample ] if self. batch_size is None : for i in order : yield ( self. image_dataset. _get_image ( self. image_dataset. _X, i ), self. image_dataset. _get_image ( self. image_dataset. _y, i ), self. image_dataset. _w [ i ], self. image_dataset. _ids [ i ], ) else : for i in range ( 0, len ( order ), self. batch_size ) : indices = order [ i : i + self. batch_size ] ",False,self.deterministic,self.training,0.6621298789978027 11,"def on_leave ( self, instance ) : """"""Called when the mouse cursor goes outside the button of stack."""""" if self. state == ""open"" : for widget in self. children : if isinstance ( widget, MDFloatingLabel ) and self. hint_animation : Animation. cancel_all ( widget ) if : Animation ( _canvas_width = 0, _padding_right = 0, d = self. opening_time, t = self. opening_transition, ). start ( instance ) if self. hint_animation : Animation ( opacity = 0, d = 0.1, t = self. opening_transition, ). start ( widget ) break",False,self.data[instance.icon] == widget.text,self.opening_time,0.6539744138717651 12,"def reset_init_only_vars ( self, info : TypeInfo, attributes : List [ DataclassAttribute ] ) -> None : """"""Remove init-only vars from the class and reset init var declarations."""""" for attr in attributes : if : if attr. name in info. names : del info. names [ attr. name ] else : assert attr. is_init_var for stmt in info. defn. defs. body : if isinstance ( stmt, AssignmentStmt ) and stmt. unanalyzed_type : lvalue = stmt. lvalues [ 0 ] if isinstance ( lvalue, NameExpr ) and lvalue. name == attr. name : lvalue. node = None",False,attr.is_init_var,"isinstance(attr, Variable)",0.6558442115783691 13,"def __call__ ( self, request ) : if ( request. path. startswith ( get_script_prefix ( ) + ""control"" ) and request. user. is_authenticated ) : if is_hijacked ( request ) : hijack_history = request. session. get ( ""hijack_history"", False ) hijacker = get_object_or_404 ( User, pk = hijack_history [ 0 ] ) ss = hijacker. get_active_staff_session ( request. session. get ( ""hijacker_session"" ) ) if : ss. logs. create ( url = request. path, method = request. method, impersonating = request. user ) else : ss = request. user. get_active_staff_session ( request. session. session_key ) if : ss. logs. create ( url = request. path, method = request. method ) response = self. get_response ( request ) return response",False,ss,not ss.is_authenticated(),0.6921205520629883 14,"def test_other_attributes ( self ) : print_test_name ( ""TEST OTHER ATTRIBUTES"" ) correct = 0 props = { } for example in OTHER_PROP_EXAMPLES : original_schema = schema. parse ( example. schema_string ) round_trip_schema = schema. parse ( str ( original_schema ) ) self. assertEqual ( original_schema. other_props, round_trip_schema. other_props ) if : field_props = 0 for f in original_schema. fields : if f. other_props : props. update ( f. other_props ) field_props += 1 self. assertEqual ( field_props, len ( original_schema. fields ) ) if original_schema. other_props : props. update ( original_schema. other_props ) correct += 1 for k in props : v = props [ k ] if k == ""cp_boolean"" : self. assertEqual ( type ( v ), bool ) elif k == ""cp_int"" : self. assertEqual ( type ( v ), int ) elif k == ""cp_object"" : self. assertEqual ( type ( v ), dict ) elif k == ""cp_float"" : self. assertEqual ( type ( v ), float ) elif k == ""cp_array"" : self. assertEqual ( type ( v ), list ) self.",False,original_schema.type == 'record',original_schema.fields,0.6533740162849426 15,"def test_no_unknown_state_fields_in_mp_events ( ) : all_fields = ins. MediaPlayerStateIterator. fields. keys ( ) ok = True for evname in ins. mp_events : if evname == ""version"" : continue for name in ins. mp_events [ evname ] [ ""update_names"" ] : if : print ( ""Error, in evname '%s' unknown field '%s' in 'update_names'"" % ( evname, name ) ) ok = False for name in ins. mp_events [ evname ] [ ""other_fields"" ] : if : print ( ""Error, in evname '%s' unknown field '%s' in 'other_fields'"" % ( evname, name ) ) ok = False if ok : print ( ""test_no_unknown_state_fields_in_mp_events: passed"" )",False,name not in all_fields,all_fields,0.6561712026596069 16,"def __call__ ( self, A, a, order = 10, mu = 0.1, s = 0.5 ) : print ( ""Chebyshev Series -----------------"" ) if order == 1 : return a node_number = a. shape [ 0 ] A = sp. eye ( node_number ) + A DA = preprocessing. normalize ( A, norm = ""l1"" ) L = sp. eye ( node_number ) - DA M = L - mu * sp. eye ( node_number ) Lx0 = a Lx1 = M. dot ( a ) Lx1 = 0.5 * M. dot ( Lx1 ) - a conv = iv ( 0, s ) * Lx0 conv -= 2 * iv ( 1, s ) * Lx1 for i in range ( 2, order ) : Lx2 = M. dot ( Lx1 ) Lx2 = ( M. dot ( Lx2 ) - 2 * Lx1 ) - Lx0 if : conv += 2 * iv ( i, s ) * Lx2 else : conv -= 2 * iv ( i, s ) * Lx2 Lx0 = Lx1 Lx1 = Lx2 del Lx2 mm = A. dot ( a - conv ) return mm",False,i % 2 == 0,i < 3,0.6680158376693726 17,"def parse_clusterflow_logs ( self, f ) : """"""Parse Clusterflow logs"""""" module = None job_id = None pipeline_id = None for l in f [ ""f"" ] : module_r = re. match ( r""Module:\s+(.+)$"", l ) if module_r : module = module_r. group ( 1 ) job_id_r = re. match ( r""Job ID:\s+(.+)$"", l ) if job_id_r : job_id = job_id_r. group ( 1 ) if module is not None : pipeline_r = re. match ( r""(cf_.+)_"" + re. escape ( module ) + r""_\d+$"", job_id ) if pipeline_r : pipeline_id = pipeline_r. group ( 1 ) if l. startswith ( ""###CFCMD"" ) : if : pipeline_id = ""unknown"" if pipeline_id not in self. clusterflow_commands. keys ( ) : self. clusterflow_commands [ pipeline_id ] = list ( ) self. clusterflow_commands [ pipeline_id ]. append ( l [ 8 : ] )",True,pipeline_id is None,pipeline_id is None,0.6590132713317871 18,"def check_other_queues ( queue_counts_dict : Dict [ str, int ] ) -> List [ Dict [ str, Any ] ] : """"""Do a simple queue size check for queues whose workers don't publish stats files."""""" results = [ ] for queue, count in queue_counts_dict. items ( ) : if queue in normal_queues : continue if : results. append ( dict ( status = CRITICAL, name = queue, message = f""count critical: {count}"" ) ) elif count > WARN_COUNT_THRESHOLD_DEFAULT : results. append ( dict ( status = WARNING, name = queue, message = f""count warning: {count}"" ) ) else : results. append ( dict ( status = OK, name = queue, message = """" ) ) return results",True,count > CRITICAL_COUNT_THRESHOLD_DEFAULT,count > CRITICAL_COUNT_THRESHOLD_DEFAULT,0.6601839065551758 19,"def handle ( self ) : self. _send_textline ( ""* OK IMAP4rev1"" ) while 1 : line = b"""" while 1 : try : part = self. rfile. read ( 1 ) if part == b"""" : return line += part except OSError : return if line. endswith ( b""\r\n"" ) : break if verbose : print ( ""GOT: %r"" % line. strip ( ) ) if self. continuation : try : self. continuation. send ( line ) except StopIteration : self. continuation = None continue splitline = line. decode ( ""ASCII"" ). split ( ) tag = splitline [ 0 ] cmd = splitline [ 1 ] args = splitline [ 2 : ] if hasattr ( self, ""cmd_"" + cmd ) : continuation = getattr ( self, ""cmd_"" + cmd ) ( tag, args ) if : self. continuation = continuation next ( continuation ) ",False,continuation,continuing,0.6953333616256714 20,"def get_indexes ( self, cursor, table_name ) : cursor. execute ( self. _get_indexes_query, [ table_name, self. connection. schema_name ] ) indexes = { } for row in cursor. fetchall ( ) : if "" "" in row [ 1 ] : continue if row [ 0 ] not in indexes : indexes [ row [ 0 ] ] = { ""primary_key"" : False, ""unique"" : False } if row [ 3 ] : indexes [ row [ 0 ] ] [ ""primary_key"" ] = True if : indexes [ row [ 0 ] ] [ ""unique"" ] = True return indexes",False,row[2],row[4],0.6649578213691711 21,"def _cache_key ( ui, url = None, locale = None, additional_key_data = None ) : if url is None : url = request. base_url if locale is None : locale = g. locale. language if g. locale else ""en"" k = ""ui:{}:{}:{}"". format ( ui, url, locale ) if callable ( additional_key_data ) : try : ak = additional_key_data ( ) if : if not isinstance ( ak, ( list, tuple ) ) : ak = [ ak ] k = ""{}:{}"". format ( k, "":"". join ( ak ) ) except Exception : _logger. exception ( ""Error while trying to retrieve additional cache key parts for ui {}"". format ( ui ) ) return k",False,ak,ak is None,0.698824405670166 22,"def _ArgumentListHasDictionaryEntry ( self, token ) : """"""Check if the function argument list has a dictionary as an arg."""""" if _IsArgumentToFunction ( token ) : while token : if token. value == ""{"" : length = token. matching_bracket. total_length - token. total_length return length + self. stack [ - 2 ]. indent > self. column_limit if : break if token. OpensScope ( ) : token = token. matching_bracket token = token. next_token return False",False,token.ClosesScope(),token.value == '}',0.6620413064956665 23,"def parse_escaped_hierarchical_category_name ( category_name ) : """"""Parse a category name."""""" result = [ ] current = None index = 0 next_backslash = category_name. find ( ""\\"", index ) next_slash = category_name. find ( ""/"", index ) while index < len ( category_name ) : if : current = ( current if current else """" ) + category_name [ index : ] index = len ( category_name ) elif next_slash >= 0 and ( next_backslash == - 1 or next_backslash > next_slash ) : result. append ( ( current if current else """" ) + category_name [ index : next_slash ] ) current = """" index = next_slash + 1 next_slash = category_name. find ( ""/"", index ) else : if len ( category_name ) == next_backslash + 1 : raise Exception ( ""Unexpected '\\' in '{0}' at last position!"". format ( category_name ) ) esc_ch = category_name [ next_backslash + 1 ] if esc_ch not in { ""/"", ""\\"" } : raise Exception ( ""Unknown escape sequence '\\{0}' in '{1}'!"". format ( esc_ch, category_name ) <",False,next_backslash == -1 and next_slash == -1,current,0.6594604849815369 24,"def addStudent ( self, name, age ) : new_names = self. getNames ( ) found = False for item in new_names : if : found = True break if not found : new_names. append ( ( name, age ) ) new_names. sort ( ) while len ( self. children ) : self. remove ( self. children [ 0 ] ) self. addTitle ( ) for student in new_names : sw = StudentWidget ( student [ 0 ], student [ 1 ] ) makeDraggable ( sw ) self. append ( sw ) self. setCellVerticalAlignment ( sw, HasVerticalAlignment. ALIGN_TOP )",False,"item == (name, age)",item,0.6567255854606628 25,"def mockup ( self, records ) : provider = TransipProvider ( """", """", """" ) _dns_entries = [ ] for record in records : if : entries_for = getattr ( provider, ""_entries_for_{}"". format ( record. _type ) ) name = record. name if name == """" : name = provider. ROOT_RECORD _dns_entries. extend ( entries_for ( name, record ) ) _dns_entries. append ( DnsEntry ( ""@"", ""3600"", ""NS"", ""ns01.transip.nl."" ) ) self. mockupEntries = _dns_entries",False,record._type in provider.SUPPORTS,record._type != WebSocketRecord.NONE,0.6495593190193176 26,"def _compare ( d1, d2, skip_keys = None ) : """"""Compare two lists or dictionaries or array"""""" if type ( d1 )!= type ( d2 ) : return False if isinstance ( d1, dict ) : if : return False for key in d1 : if skip_keys is not None and key in skip_keys : continue if not _compare ( d1 [ key ], d2 [ key ], skip_keys = skip_keys ) : return False elif isinstance ( d1, list ) : for i, _ in enumerate ( d1 ) : if not _compare ( d1 [ i ], d2 [ i ], skip_keys = skip_keys ) : return False elif isinstance ( d1, np. ndarray ) : if not np. array_equal ( d1, d2 ) : return False else : if d1!= d2 : return False return True",False,set(d1) != set(d2),d1 != d2,0.6493426561355591 27,"def _get_families ( self ) : families = [ ] for name, ext in self. _get_family_dirs ( ) : if : family = self. get_resource ( FileSystemPackageFamilyResource. key, location = self. location, name = name ) else : family = self. get_resource ( FileSystemCombinedPackageFamilyResource. key, location = self. location, name = name, ext = ext, ) families. append ( family ) return families",False,ext is None,ext,0.6625068187713623 28,"def _module_repr_from_spec ( spec ) : """"""Return the repr to use for the module."""""" name = ""?"" if spec. name is None else spec. name if spec. origin is None : if : return """". format ( name ) else : return """". format ( name, spec. loader ) else : if spec. has_location : return """". format ( name, spec. origin ) else : return """". format ( spec. name, spec. origin )",True,spec.loader is None,spec.loader is None,0.6524957418441772 29,"def doDir ( elem ) : for child in elem. childNodes : if not isinstance ( child, minidom. Element ) : continue if child. tagName == ""Directory"" : doDir ( child ) elif child. tagName == ""Component"" : for grandchild in child. childNodes : if : continue if grandchild. tagName!= ""File"" : continue files. add ( grandchild. getAttribute ( ""Source"" ). replace ( os. sep, ""/"" ) )",False,"not isinstance(grandchild, minidom.Element)",grandchild.tagName == 'Error',0.6502865552902222 30,"def test_row ( self, row ) : for idx, test in self. patterns. items ( ) : try : value = row [ idx ] except IndexError : value = """" result = test ( value ) if self. any_match : if result : return not self. inverse else : if : return self. inverse if self. any_match : return self. inverse else : return not self. inverse",False,not result,result,0.6757747530937195 31,"def _validate_scalar_extensions ( self ) -> List [ str ] : errors = [ ] for extension in [ x for x in self. extensions if isinstance ( x, GraphQLScalarTypeExtension ) ] : extended = self. type_definitions. get ( extension. name ) ext_errors = _validate_extension ( extended, extension. name, GraphQLScalarType, ""SCALAR"" ) errors. extend ( ext_errors ) if : errors. extend ( _validate_extension_directives ( extension, extended, ""SCALAR"" ) ) return errors",False,not ext_errors,extension.directives,0.661431074142456 32,"def call ( monad, * args ) : for arg, name in izip ( args, ( ""hour"", ""minute"", ""second"", ""microsecond"" ) ) : if not isinstance ( arg, NumericMixin ) or arg. type is not int : throw ( TypeError, ""'%s' argument of time(...) function must be of 'int' type. Got: %r"" % ( name, type2str ( arg. type ) ), ) if : throw ( NotImplementedError ) return ConstMonad. new ( time ( * tuple ( arg. value for arg in args ) ) )",False,"not isinstance(arg, ConstMonad)",monad.type is None and arg.type is not int,0.6584521532058716 33,"def get_config ( ) : try : config_str = config. get ( ""plugins"", ""equalizer_levels"", ""[]"" ) config_dict = ast. literal_eval ( config_str ) if isinstance ( config_dict, list ) : print_w ( ""Converting old EQ config to new format."" ) config_dict = { ""Current"" : config_dict } if not isinstance ( config_dict, dict ) : raise ValueError ( ""Saved config is of wrong type."" ) if : raise ValueError ( ""Saved config was malformed."" ) for key in config_dict. keys ( ) : [ float ( s ) for s in config_dict [ key ] ] return config_dict except ( config. Error, ValueError ) as e : print_e ( str ( e ) ) return { ""Current"" : [ ] }",False,not 'Current' in config_dict.keys(),config_dict.get('error'),0.6510497331619263 34,"def _parse ( self, contents ) : entries = [ ] for line in contents. splitlines ( ) : if : entries. append ( ( ""blank"", [ line ] ) ) continue ( head, tail ) = chop_comment ( line. strip ( ), ""#"" ) if not len ( head ) : entries. append ( ( ""all_comment"", [ line ] ) ) continue entries. append ( ( ""option"", [ head. split ( None ), tail ] ) ) return entries",False,not len(line.strip()),not line,0.6477398872375488 35,"def _brush_modified_cb ( self, settings ) : """"""Updates the brush's base setting adjustments on brush changes"""""" for cname in settings : adj = self. brush_adjustment. get ( cname, None ) if : continue value = self. brush. get_base_value ( cname ) adj. set_value ( value )",True,adj is None,adj is None,0.6785727143287659 36,"def upgrade ( migrate_engine ) : print ( __doc__ ) metadata. bind = migrate_engine liftoverjobs = dict ( ) jobs = context. query ( DeferredJob ). filter_by ( plugin = ""LiftOverTransferPlugin"" ). all ( ) for job in jobs : if : liftoverjobs [ job. params [ ""parentjob"" ] ] = [ ] liftoverjobs [ job. params [ ""parentjob"" ] ]. append ( job. id ) for parent in liftoverjobs : lifts = liftoverjobs [ parent ] deferred = context. query ( DeferredJob ). filter_by ( id = parent ). first ( ) deferred. params [ ""liftover"" ] = lifts context. flush ( )",False,job.params['parentjob'] not in liftoverjobs,job.plugin == 'LiftOverTransferPlugin',0.661872923374176 37,"def bump_version ( bump_type ) : """"""Bumps version to the next release, or development version."""""" cur_ver = _get_version ( ) click. echo ( ""current version: %s"" % cur_ver ) ver_split = cur_ver. split ( ""."" ) if ""dev"" in ver_split [ - 1 ] : if bump_type == ""dev"" : ver_split [ - 1 ] = ""dev%d"" % ( int ( ver_split [ - 1 ]. strip ( ""dev"" ) or 0 ) + 1 ) else : ver_split = ver_split [ : - 1 ] else : if : ver_split. append ( ""1"" ) else : if ""b"" in ver_split [ 2 ] : minor, beta = ver_split [ - 1 ]. split ( ""b"" ) ver_split [ - 1 ] = ""%sb%s"" % ( minor, int ( beta ) + 1 ) else : ver_split [ - 1 ] = str ( int ( ver_split [ - 1 ] ) + 1 ) if bump_type == ""dev"" : ver_split. append ( ""dev"" ) new_version = ""."". join ( ver_split ) for line in fileinput. FileInput ( ""flexget/_version.py"", inplace = 1 ) : if line. startswith ( ""__version__ ="" ) : line = ""__version__ = '%s'\n",False,len(ver_split) == 2,len(ver_split) > 1,0.6541119813919067 38,"def __find_smallest ( self ) : """"""Find the smallest uncovered value in the matrix."""""" minval = sys. maxsize for i in range ( self. n ) : for j in range ( self. n ) : if ( not self. row_covered [ i ] ) and ( not self. col_covered [ j ] ) : if : minval = self. C [ i ] [ j ] return minval",False,minval > self.C[i][j],self.C[i] is not None,0.6620251536369324 39,"def git_branch_for_post ( self, path, interactive = False ) : if path is None : return None if path in self. git_local_branches : return self. git_branch ( path ) branches = [ ] for branch in self. git_local_branches : if : branches. append ( branch ) if len ( branches ) == 0 : if path in self. dir ( ) : return self. git_branch ( self. config. published_branch ) return None if len ( branches ) == 1 : return self. git_branch ( branches [ 0 ] ) if interactive : print ( ""There are multiple branches for post '{}'."". format ( path ) ) for i, branch in enumerate ( branches ) : print ( ""{}. {}"". format ( i, branch ) ) response = None while not isinstance ( response, int ) : response = input ( ""Please select the branch you would like to use: "" ) try : response = int ( response ) except : response = None else : response = 0 return self. git_branch ( branches [ response ] )",False,path in self.git_local_posts(branches=[branch]),path in branch,0.656347393989563 40,"def update_brush ( self, * args ) : with self. output : if not self. brush. brushing : self. figure. interaction = None if : ( x1, y1 ), ( x2, y2 ) = self. brush. selected mode = self. modes_names [ self. modes_labels. index ( self. button_selection_mode. value ) ] self. plot. select_rectangle ( x1, y1, x2, y2, mode = mode ) else : self. dataset. select_nothing ( ) if ( not self. brush. brushing ) : self. figure. interaction = self. brush",False,self.brush.selected is not None,self.button_selection_mode.value is not None,0.6603747606277466 41,"def check ( self, check_all = False, do_reload = True ) : """"""Check whether some modules need to be reloaded."""""" if not self. enabled and not check_all : return if check_all or self. check_all : modules = list ( sys. modules. keys ( ) ) else : modules = list ( self. modules. keys ( ) ) for modname in modules : m = sys. modules. get ( modname, None ) if modname in self. skip_modules : continue py_filename, pymtime = self. filename_and_mtime ( m ) if py_filename is None : continue try : if pymtime <= self. modules_mtimes [ modname ] : continue except KeyError : self. modules_mtimes [ modname ] = pymtime continue else : if self. failed. get ( py_filename, None ) == pymtime : continue self. modules_mtimes [ modname ] = pymtime if : try : superreload ( m, reload, self. old_objects ) if py_filename in self. failed : del self. failed [ py_filename ] except : print ( ""[autoreload of %s failed: %s]"" result = view ( context, request ) if result. __class__ is Response : response = result else : response = info. registry. queryAdapterOrSelf ( result, IResponse ) if response is None : if : append = ( "" You may have forgotten to return a value "" ""from the view callable."" ) elif isinstance ( result, dict ) : append = ( "" You may have forgotten to define a "" ""renderer in the view configuration."" ) else : append = """" msg = ( ""Could not convert return value of the view "" ""callable %s into a response object. "" ""The value returned was %r."" + append ) raise ValueError ( msg % ( view_description ( view ), result ) ) return response",False,result is None,"isinstance(result, list)",0.6762720346450806 43,"def put ( self, value : V = None, key : K = None, partition : Optional [ int ] = None, timestamp : Optional [ float ] = None, headers : HeadersArg = None, key_serializer : CodecArg = None, value_serializer : CodecArg = None, *, reply_to : ReplyToArg = None, correlation_id : str = None, wait : bool = True ) -> EventT : if reply_to : value, headers = self. _create_req ( key, value, reply_to, correlation_id, headers ) channel = cast ( ChannelT, self. stream ( ). channel ) message = self. to_message ( key, value, partition = partition, offset = self. sent_offset, timestamp = timestamp, headers = headers, ) event : EventT = await channel. decode ( message ) await channel. put ( event ) self. sent_offset += 1 if wait : async with self. new_value_processed : await self. new_value_processed. wait ( ) if : raise self. _crash_reason from self. _crash_reason return event",False,self._crash_reason,self._crash_reason is not None,0.6541967391967773 44,"def __setattr__ ( self, name : str, val : Any ) : if name. startswith ( ""COMPUTED_"" ) : if : old_val = self [ name ] if old_val == val : return raise KeyError ( ""Computed attributed '{}' already exists "" ""with a different value! old={}, new={}."". format ( name, old_val, val ) ) self [ name ] = val else : super ( ). __setattr__ ( name, val )",True,name in self,name in self,0.6766839623451233 45,"def _try_parser ( self, parse_method ) : _order = self. _settings. DATE_ORDER try : if : if ""DATE_ORDER"" not in self. _settings. _mod_settings : self. _settings. DATE_ORDER = self. locale. info. get ( ""date_order"", _order ) date_obj, period = date_parser. parse ( self. _get_translated_date ( ), parse_method = parse_method, settings = self. _settings, ) self. _settings. DATE_ORDER = _order return DateData ( date_obj = date_obj, period = period, ) except ValueError : self. _settings. DATE_ORDER = _order return None",False,self._settings.PREFER_LOCALE_DATE_ORDER,parse_method == 'TAB > or parse_method == 'NONE',0.6579128503799438 46,"def _merge_substs ( self, subst, new_substs ) : subst = subst. copy ( ) for new_subst in new_substs : for name, var in new_subst. items ( ) : if : subst [ name ] = var elif subst [ name ] is not var : subst [ name ]. PasteVariable ( var ) return subst",True,name not in subst,name not in subst,0.6805781126022339 47,"def calculate ( self ) : for task in taskmods. DllList. calculate ( self ) : pid = task. UniqueProcessId if task. ObjectTable. HandleTableList : for handle in task. ObjectTable. handles ( ) : if not handle. is_valid ( ) : continue name = """" object_type = handle. get_object_type ( ) if object_type == ""File"" : file_obj = handle. dereference_as ( ""_FILE_OBJECT"" ) name = str ( file_obj. file_name_with_device ( ) ) elif object_type == ""Key"" : key_obj = handle. dereference_as ( ""_CM_KEY_BODY"" ) name = key_obj. full_key_name ( ) elif : proc_obj = handle. dereference_as ( ""_EPROCESS"" ) name = ""{0}({1})"". format ( proc_obj. ImageFileName, proc_obj. UniqueProcessId ) elif object_type == ""Thread"" : thrd_obj = handle. dereference_as ( ""_ETHREAD"" ) name = ""T",True,object_type == 'Process',object_type == 'Process',0.6585580110549927 48,"def _maybe_female ( self, path_elements, female, strict ) : if female : if self. has_gender_differences : elements = path_elements + [ ""female"" ] try : return self. _get_file ( elements, "".png"", strict = strict ) except ValueError : if strict : raise elif : raise ValueError ( ""Pokemon %s has no gender differences"" % self. species_id ) return self. _get_file ( path_elements, "".png"", strict = strict )",False,strict,self.has_gender_differences,0.6987574696540833 49,"def process_target ( decompiler, pos, partial = False ) : if pos is None : limit = None elif partial : limit = decompiler. targets. get ( pos, None ) else : limit = decompiler. targets. pop ( pos, None ) top = decompiler. stack. pop ( ) while True : top = simplify ( top ) if top is limit : break if isinstance ( top, ast. GenExprFor ) : break if not decompiler. stack : break top2 = decompiler. stack [ - 1 ] if isinstance ( top2, ast. GenExprFor ) : break if partial and hasattr ( top2, ""endpos"" ) and top2. endpos == pos : break if isinstance ( top2, ( ast. And, ast. Or ) ) : if top2. __class__ == top. __class__ : top2. nodes. extend ( top. nodes ) else : top2. nodes. append ( top ) elif : top2. else_ = top if hasattr ( top, ""endpos"" ) : top2. endpos = top. endpos if decompiler. targets. get ( top. endpos ) is top : decompiler. targets [ top. endpos ] = top2 else : throw",False,"isinstance(top2, ast.IfExp)",top2.endpos is not None,0.6477982997894287 50,"def test_inkey_0s_raw_ctrl_c ( ) : ""0-second inkey with raw allows receiving ^C."" pid, master_fd = pty. fork ( ) if pid is 0 : try : cov = __import__ ( ""cov_core_init"" ). init ( ) except ImportError : cov = None term = TestTerminal ( ) read_until_semaphore ( sys. __stdin__. fileno ( ), semaphore = SEMAPHORE ) with term. raw ( ) : os. write ( sys. __stdout__. fileno ( ), RECV_SEMAPHORE ) inp = term. inkey ( timeout = 0 ) os. write ( sys. __stdout__. fileno ( ), inp. encode ( ""latin1"" ) ) if : cov. stop ( ) cov. save ( ) os. _exit ( 0 ) with echo_off ( master_fd ) : os. write ( master_fd, SEND_SEMAPHORE ) read_until_semaphore ( master_fd ) os. write ( master_fd, u""\x03"". encode ( ""latin1"" ) ) stime = time. time ( ) output = read_until_eof ( master_fd ) pid, status = os. waitpid ( pid, 0 ) if os. environ. get ( ""TRAVIS"", None ) is not None : assert output in ( u"""", u""\x03"" ) assert os",False,cov is not None,os.stat(master_fd) is not None,0.666993260383606 51,"def doLabels ( ) : global difficulty, answers answers = [ ] for loop in range ( 10 ) : numa = random. randint ( 1 * difficulty * 2, 10 * difficulty * 2 ) numb = random. randint ( 1 * difficulty * 2, 10 * difficulty * 2 ) action = random. choice ( actions ) if action == ""+"" : answers. append ( numa + numb ) elif action == ""-"" : answers. append ( numa - numb ) elif action == ""*"" : answers. append ( numa * numb ) elif : answers. append ( numa / numb ) lab = str ( numa ) + "" "" + action + "" "" + str ( numb ) + "" = "" try : win. addLabel ( ""l"" + str ( loop ), lab, 2 + loop, 0 ) win. addEntry ( ""l"" + str ( loop ), 2 + loop, 1 ) except Exception : win. setLabel ( ""l"" + str ( loop ), lab ) win. enableEntry ( ""l"" + str ( loop ) ) win. setEntryBg ( ""l"" + str ( loop ), ""white"" ) win. setEntry ( ""l"" + str ( loop ), """" )",False,action == '/',action == /0.0,0.6787170171737671 52,"def _convert_timestamp ( timestamp, precision = None ) : if isinstance ( timestamp, Integral ) : return timestamp if isinstance ( _get_unicode ( timestamp ), text_type ) : timestamp = parse ( timestamp ) if isinstance ( timestamp, datetime ) : ns = timegm ( timestamp. utctimetuple ( ) ) * 1e9 + timestamp. microsecond * 1e3 if precision is None or precision == ""n"" : return ns elif : return ns / 1e3 elif precision == ""ms"" : return ns / 1e6 elif precision == ""s"" : return ns / 1e9 elif precision == ""m"" : return ns / 1e9 / 60 elif precision == ""h"" : return ns / 1e9 / 3600 raise ValueError ( timestamp )",False,precision == 'u',precision == 't',0.6641597747802734 53,"def gotHeaders ( self, headers ) : HTTPClientFactory. gotHeaders ( self, headers ) if self. requestedPartial : contentRange = headers. get ( b""content-range"", None ) if : self. requestedPartial = 0 return start, end, realLength = http. parseContentRange ( contentRange [ 0 ] ) if start!= self. requestedPartial : self. requestedPartial = 0",True,not contentRange,not contentRange,0.6694787740707397 54,"def _strip_extras_markers ( marker ) : if marker is None or not isinstance ( marker, ( list, tuple ) ) : raise TypeError ( ""Expecting a marker type, received {0!r}"". format ( marker ) ) markers_to_remove = [ ] for i, marker_list in enumerate ( marker ) : if isinstance ( marker_list, list ) : cleaned = _strip_extras_markers ( marker_list ) if : markers_to_remove. append ( i ) elif isinstance ( marker_list, tuple ) and marker_list [ 0 ]. value == ""extra"" : markers_to_remove. append ( i ) for i in reversed ( markers_to_remove ) : del marker [ i ] if i > 0 and marker [ i - 1 ] == ""and"" : del marker [ i - 1 ] return marker",False,not cleaned,cleaned,0.6836735606193542 55,"def updateStats ( self, stats ) : stats [ ""global"" ] [ ""day"" ] = date. fromordinal ( stats [ ""global"" ] [ ""day"" ] ) self. applyDict ( self. deck. _globalStats, stats [ ""global"" ] ) self. deck. _globalStats. toDB ( self. deck. s ) for record in stats [ ""daily"" ] : record [ ""day"" ] = date. fromordinal ( record [ ""day"" ] ) stat = Stats ( ) id = self. deck. s. scalar ( ""select id from stats where "" ""type = :type and day = :day"", type = 1, day = record [ ""day"" ], ) if : stat. fromDB ( self. deck. s, id ) else : stat. create ( self. deck. s, 1, record [ ""day"" ] ) self. applyDict ( stat, record ) stat. toDB ( self. deck. s )",False,id,id > 0,0.6899368166923523 56,"def rotate_selected ( self ) : opts = self. rotate_selected_opts if self. actions. pressed ( opts [ ""move_done_pressed"" ] ) : return ""main"" if self. actions. released ( opts [ ""move_done_released"" ] ) : return ""main"" if self. actions. pressed ( opts [ ""move_cancelled"" ] ) : self. undo_cancel ( ) return ""main"" if ( self. actions. mouse - opts [ ""mouselast"" ] ). length == 0 : return if time. time ( ) < opts [ ""lasttime"" ] + 0.05 : return opts [ ""mouselast"" ] = self. actions. mouse opts [ ""lasttime"" ] = time. time ( ) delta = Direction2D ( self. actions. mouse - opts [ ""center"" ] ) dx, dy = opts [ ""rotate_x"" ]. dot ( delta ), opts [ ""rotate_y"" ]. dot ( delta ) theta = math. atan2 ( dy, dx ) set2D_vert = self. set2D_vert for bmv, xy in opts [ ""bmverts"" ] : if : continue dxy = xy - opts [ ""center"" ] nx = dxy. x * math. cos ( theta ) - dxy. y * math. sin ( theta ) ny = dxy. x * math. sin ( theta ) + dxy. y * math. cos ( theta ) nxy = Point2D ( ( nx, ny ) ) + opts [ ""center"" ] set2D_vert ( bmv, nxy ) self. update_verts_faces ( v for v, _ in opts [ ""bmverts"" ] ) self. dirty ( )",False,not bmv.is_valid,opts['mouselast'] == False,0.6510132551193237 57,"def _cache_mem ( curr_out, prev_mem, mem_len, reuse_len = None ) : """"""cache hidden states into memory."""""" if mem_len is None or mem_len == 0 : return None else : if reuse_len is not None and reuse_len > 0 : curr_out = curr_out [ : reuse_len ] if : new_mem = curr_out [ - mem_len : ] else : new_mem = tf. concat ( [ prev_mem, curr_out ], 0 ) [ - mem_len : ] return tf. keras. backend. stop_gradient ( new_mem )",False,prev_mem is None,curr_out.size - mem_len > 0,0.650373101234436 58,"def data_download ( chunksize, filename, dataset, name ) : key = get_api_key ( ) api = shodan. Shodan ( key ) file = None try : files = api. data. list_files ( dataset ) for tmp in files : if : file = tmp break except shodan. APIError as e : raise click. ClickException ( e. value ) if not file : raise click. ClickException ( ""File not found"" ) response = requests. get ( file [ ""url"" ], stream = True ) filesize = response. headers. get ( ""content-length"", None ) if not filesize : filesize = file [ ""size"" ] else : filesize = int ( filesize ) chunk_size = 1024 limit = filesize / chunk_size if not filename : filename = ""{}-{}"". format ( dataset, name ) with open ( filename, ""wb"" ) as fout : with click. progressbar ( response. iter_content ( chunk_size = chunk_size ), length = limit ) as bar : for chunk in bar : if chunk : fout. write ( chunk ) click. echo ( click. style ( ""Download completed: {}"". format ( filename ), ""green"" ) )",False,tmp['name'] == name,tmp,0.6580382585525513 59,"def build ( opt ) : dpath = os. path. join ( opt [ ""datapath"" ], ""HotpotQA"" ) if not build_data. built ( dpath, version_string = VERSION ) : print ( ""[building data: "" + dpath + ""]"" ) if : build_data. remove_dir ( dpath ) build_data. make_dir ( dpath ) for downloadable_file in RESOURCES : downloadable_file. download_file ( dpath ) with PathManager. open ( os. path. join ( dpath, TRAIN_FILENAME ) ) as f : data = json. load ( f ) make_parlai_format ( dpath, ""train"", data ) with PathManager. open ( os. path. join ( dpath, DEV_DISTRACTOR_FILENAME ) ) as f : data = json. load ( f ) make_parlai_format ( dpath, ""valid_distractor"", data ) with PathManager. open ( os. path. join ( dpath, DEV_FULLWIKI_FILENAME ) ) as f : data = json. load ( f ) make_parlai_format ( dpath, ""valid_fullwiki"", data ) build_data. mark_done ( dpath, version_string = VERSION )",False,build_data.built(dpath),PathManager.exists(dpath),0.6459711790084839 60,"def scanvars ( reader, frame, locals ) : """"""Scan one logical line of Python and look up values of variables used."""""" vars, lasttoken, parent, prefix, value = [ ], None, None, """", __UNDEF__ for ttype, token, start, end, line in tokenize. generate_tokens ( reader ) : if : break if ttype == tokenize. NAME and token not in keyword. kwlist : if lasttoken == ""."" : if parent is not __UNDEF__ : value = getattr ( parent, token, __UNDEF__ ) vars. append ( ( prefix + token, prefix, value ) ) else : where, value = lookup ( token, frame, locals ) vars. append ( ( token, where, value ) ) elif token == ""."" : prefix += lasttoken + ""."" parent = value else : parent, prefix = None, """" lasttoken = token return vars",False,ttype == tokenize.NEWLINE,ttype == tokenize.BREAK,0.6613770127296448 61,"def queue_viewing ( request ) : addon_ids = request. GET. get ( ""addon_ids"" ) if not addon_ids : return { } viewing = { } user_id = request. user. id for addon_id in addon_ids. split ( "","" ) : addon_id = addon_id. strip ( ) key = get_reviewing_cache_key ( addon_id ) currently_viewing = cache. get ( key ) if : viewing [ addon_id ] = UserProfile. objects. get ( id = currently_viewing ). name return viewing",False,currently_viewing and currently_viewing != user_id,currently_viewing,0.6519143581390381 62,"def decompile ( decompiler ) : for pos, next_pos, opname, arg in decompiler. instructions : if pos in decompiler. targets : decompiler. process_target ( pos ) method = getattr ( decompiler, opname, None ) if : throw ( DecompileError ( ""Unsupported operation: %s"" % opname ) ) decompiler. pos = pos decompiler. next_pos = next_pos x = method ( * arg ) if x is not None : decompiler. stack. append ( x )",True,method is None,method is None,0.6689596176147461 63,"def add_directive ( self, name, obj, content = None, arguments = None, ** options ) : if isinstance ( obj, clstypes ) and issubclass ( obj, Directive ) : if : raise ExtensionError ( ""when adding directive classes, no "" ""additional arguments may be given"" ) directives. register_directive ( name, directive_dwim ( obj ) ) else : obj. content = content obj. arguments = arguments obj. options = options directives. register_directive ( name, obj )",False,content or arguments or options,arguments is None,0.6630731821060181 64,"def discover ( self, * objlist ) : ret = [ ] for l in self. splitlines ( ) : if len ( l ) < 5 : continue if : continue try : int ( l [ 2 ] ) int ( l [ 3 ] ) except : continue ret. append ( l [ 0 ] ) ret. sort ( ) for item in objlist : ret. append ( item ) return ret",False,l[0] == 'Filename',l[1] not in objlist,0.6621029376983643 65,"def pop ( self ) : if not HAS_SQL : return None tries = 3 wait = 0.1 try : conn, c = self. connect ( ) except sqlite3. Error : log. traceback ( logging. DEBUG ) return None heartbeat = None loop = True while loop and tries > - 1 : try : c. execute ( ""BEGIN IMMEDIATE"" ) c. execute ( ""SELECT * FROM {0} LIMIT 1"". format ( self. table_name ) ) row = c. fetchone ( ) if : id = row [ 0 ] heartbeat = Heartbeat ( json. loads ( row [ 1 ] ), self. args, self. configs, _clone = True ) c. execute ( ""DELETE FROM {0} WHERE id=?"". format ( self. table_name ), [ id ] ) conn. commit ( ) loop = False except sqlite3. Error : log. traceback ( logging. DEBUG ) sleep ( wait ) tries -= 1 try : conn. close ( ) except sqlite3. Error : log. traceback ( logging. DEBUG ) return heartbeat",False,row is not None,row,0.6631377935409546 66,"def _translate_bboxes ( self, results, offset ) : """"""Shift bboxes horizontally or vertically, according to offset."""""" h, w, c = results [ ""img_shape"" ] for key in results. get ( ""bbox_fields"", [ ] ) : min_x, min_y, max_x, max_y = np. split ( results [ key ], results [ key ]. shape [ - 1 ], axis = - 1 ) if self. direction == ""horizontal"" : min_x = np. maximum ( 0, min_x + offset ) max_x = np. minimum ( w, max_x + offset ) elif : min_y = np. maximum ( 0, min_y + offset ) max_y = np. minimum ( h, max_y + offset ) results [ key ] = np. concatenate ( [ min_x, min_y, max_x, max_y ], axis = - 1 )",True,self.direction == 'vertical',self.direction == 'vertical',0.6605814695358276 67,"def runScripts ( self ) : pythonCmd = shutil. which ( ""python3"" ) if pythonCmd is None : pythonCmd = shutil. which ( ""python"" ) if pythonCmd is None : pythonCmd = ""python"" if not self. noDownload : try : if : os. environ [ ""GIT_OFFICIAL_CLONE_LOCATION"" ] = self. assetRepoLocation self. runProcess ( [ pythonCmd, ""download_assets_git.py"" ] ) except subprocess. CalledProcessError : print ( ""check that download_assets_git.py is working correctly"" ) sys. exit ( 1 ) print ( ""\n"" ) try : self. runProcess ( [ pythonCmd, ""compile_targets.py"" ] ) except subprocess. CalledProcessError : print ( ""check that compile_targets.py is working correctly"" ) sys. exit ( 1 ) print ( ""\n"" ) try : self. runProcess ( [ pythonCmd, ""compile_models.py"" ] ) except subprocess. CalledProcessError : print ( ""check that compile_models.py is working correctly"" ) sys. exit ( 1 ) print ( ""\n"" ) try : self. runProcess ( [ pythonCmd, ""compile_proxies.py"" ] ) except subprocess. CalledProcessError : print ( ""check that compile_proxies.py is working correctly"" ) sys. exit ( 1 ) print ( ""\n"" )",False,not self.assetRepoLocation is None,self.assetRepoLocation is not None,0.6513268947601318 68,"def assert_backend ( self, expected_translated, language = ""cs"" ) : """"""Check that backend has correct data."""""" translation = self. get_translation ( language ) translation. commit_pending ( ""test"", None ) store = translation. component. file_format_cls ( translation. get_filename ( ), None ) messages = set ( ) translated = 0 for unit in store. content_units : id_hash = unit. id_hash self. assertFalse ( id_hash in messages, ""Duplicate string in in backend file!"" ) if : translated += 1 self. assertEqual ( translated, expected_translated, ""Did not found expected number of translations ({}!= {})."". format ( translated, expected_translated ), )",False,unit.is_translated(),expected_translated is not None and translated < expected_translated,0.656049370765686 69,"def process_results_file ( f, region ) : try : formatted_findings_list = [ ] results = results_file_to_dict ( f ) aws_account_id = results [ ""account_id"" ] creation_date = datetime. datetime. strptime ( results [ ""last_run"" ] [ ""time"" ], ""%Y-%m-%d %H:%M:%S%z"" ). isoformat ( ) for service in results. get ( ""service_list"" ) : for finding_key, finding_value in ( results. get ( ""services"", { } ). get ( service ). get ( ""findings"" ). items ( ) ) : if : formatted_finding = format_finding_to_securityhub_format ( aws_account_id, region, creation_date, finding_key, finding_value, ) formatted_findings_list. append ( formatted_finding ) return formatted_findings_list except Exception as e : print_exception ( f""Unable to process results file: {e}"" )",False,finding_value.get('items'),"hasattr(self, 'securityhub_format')",0.6513504981994629 70,"def _open_archive ( self, archive_name ) : try : archive = None if tarfile. is_tarfile ( archive_name ) : archive = tarfile. open ( archive_name, ""r"", bufsize = CHUNK_SIZE * 2 ) elif zipfile. is_zipfile ( archive_name ) : archive = ZipThatPretendsToBeTar ( archive_name, ""r"" ) else : self. emit ( ""error"", None, _ ( ""Downloaded file is corrupted."" ) ) for pathname in archive. getnames ( ) : path = pathname. replace ( ""\\"", ""/"" ). split ( ""/"" ) [ 1 : ] if len ( path ) < 1 : continue filename = path [ 0 ] if : tinfo = archive. getmember ( pathname ) log. debug ( ""Extracting '%s'..."" % ( pathname, ) ) if tinfo. isfile ( ) : compressed = archive. extractfile ( pathname ) try : os. makedirs ( os. path. split ( self. target ) [ 0 ] ) except Exception : ",False,"filename in ('syncthing', 'syncthing.exe')",filename.startswith('/'),0.6522713899612427 71,"def worker_callback ( self, worker ) : process_request_count = 0 while not worker. stop_event. is_set ( ) : worker. process_pause_signal ( ) try : result, task = self. input_queue. get ( True, 0.1 ) except queue. Empty : pass else : worker. is_busy_event. set ( ) try : process_request_count += 1 try : handler = self. spider. find_task_handler ( task ) except NoTaskHandler as ex : ex. tb = format_exc ( ) self. spider. task_dispatcher. input_queue. put ( ( ex, task, { ""exc_info"" : sys. exc_info ( ) } ) ) self. spider. stat. inc ( ""parser:handler-not-found"" ) else : self. execute_task_handler ( handler, result, task ) self. spider. stat. inc ( ""parser:handler-processed"" ) if : if process_request_count >= self. spider. parser_requests_per_process : self. flatten_mapping ( node ) ret = self. construct_pairs ( node ) keys = [ d [ 0 ] for d in ret ] keys_sorted = sorted ( keys, key = _natsort_key ) for key in keys : expected = keys_sorted. pop ( 0 ) if : raise ConstructorError ( None, None, ""keys out of order: "" ""expected {} got {} at {}"". format ( expected, key, node. start_mark ), ) return dict ( ret )",False,key != expected,expected not in node.end_mark,0.6784542798995972 73,def __iter__ ( self ) : consumed = 0 skipped = 0 for query in self. queries : query_copy = copy ( query ) if : query = query. limit ( self. _limit - consumed ) if self. _offset : query = query. offset ( self. _offset - skipped ) obj_count = 0 for obj in query : consumed += 1 obj_count += 1 yield obj if not obj_count : skipped += query_copy. count ( ) else : skipped += obj_count,True,self._limit,self._limit,0.6864940524101257 74,"def refresh ( self ) : self. window. erase ( ) for index, line in enumerate ( self. lines ) : if : continue elif index > self. head_position + self. text_height - 1 : continue x = 0 y = index - self. head_position if len ( line ) > 0 : self. window. addstr ( y, x, line ) xpos = self. width for index, item in enumerate ( self. menu_items ) : if index == self. menu_position : mode = curses. color_pair ( 3 ) else : mode = curses. color_pair ( 2 ) self. window. addstr ( self. text_height + 1, xpos - len ( item [ 0 ] ) - 4, item [ 0 ], mode ) xpos = xpos - len ( item [ 0 ] ) - 4 self. render_scroll_bar ( ) self. window. refresh ( ) self. panel. top ( ) self. panel. show ( ) curses. panel. update_panels ( ) curses. doupdate ( )",False,index < self.head_position,self.is_empty(line),0.6581344604492188 75,"def process_one_node ( self, p, result, environment ) : """"""Handle one node."""""" c = self. c if not self. code_only : result. append ( self. underline2 ( p ) ) d = c. scanAllDirectives ( p ) if self. verbose : g. trace ( d. get ( ""language"" ) or ""None"", "":"", p. h ) s, code = self. process_directives ( p. b, d ) result. append ( s ) result. append ( ""\n\n"" ) if code and self. execcode : s, err = self. exec_code ( code, environment ) if not self. restoutput and s. strip ( ) : s = self. format_output ( s ) result. append ( s ) if : err = self. format_output ( err, prefix = ""**Error**::"" ) result. append ( err )",False,err,err and err.strip(),0.6918526887893677 76,"def getReferences ( view, name = """" ) : """"""Find all reference definitions."""""" refs = [ ] name = re. escape ( name ) if name == """" : refs. extend ( view. find_all ( r""(?<=^\[)([^\]]+)(?=\]:)"", 0 ) ) else : refs. extend ( view. find_all ( r""(?<=^\[)(%s)(?=\]:)"" % name, 0 ) ) regions = refs ids = { } for reg in regions : name = view. substr ( reg ). strip ( ) key = name. lower ( ) if : ids [ key ]. regions. append ( reg ) else : ids [ key ] = Obj ( regions = [ reg ], label = name ) return ids",True,key in ids,key in ids,0.6798847913742065 77,"def download_chunk ( args ) : global counter x, y, latest, level = args url_format = ( ""https://himawari8-dl.nict.go.jp/himawari8/img/D531106/{}d/{}/{}_{}_{}.png"" ) url = url_format. format ( level, WIDTH, strftime ( ""%Y/%m/%d/%H%M%S"", latest ), x, y ) tiledata = download ( url ) if tiledata. __sizeof__ ( ) == 2867 : sys. exit ( ""No image available for {}."". format ( strftime ( ""%Y/%m/%d %H:%M:%S"", latest ) ) ) with counter. get_lock ( ) : counter. value += 1 if : print ( ""Downloading tiles: completed."" ) else : print ( ""Downloading tiles: {}/{} completed..."". format ( counter. value, level * level ) ) return x, y, tiledata",False,counter.value == level * level,level == 1,0.6567804217338562 78,"def save ( self, mute = False, visited = None, * args, ** kwargs ) : from ralph. ui. views. common import SAVE_PRIORITY visited = visited or set ( ) visited. add ( self ) priority = kwargs. get ( ""priority"" ) change_author = kwargs. get ( ""user"" ) if priority is None : priority = SAVE_PRIORITY changes = [ ] for obj, fields in self. get_synced_objs_and_fields ( ) : if obj in visited : continue for f in fields : setattr ( obj, f, getattr ( self, f ) ) obj. save ( visited = visited, mute = True, priority = priority ) if not mute : changes = [ ] try : old_obj = type ( self ). objects. get ( pk = self. pk ) except type ( self ). DoesNotExist : old_obj = None for field in self. _meta. fields : if field. name not in SYNC_FIELD_MIXIN_NOTIFICATIONS_WHITELIST : continue old_value = getattr ( old_obj, field. name ) if old_obj else None new_value = getattr ( self, field. name ) if : changes. append ( ChangeTuple ( field. name, old_value, new_value ) ) fields_synced_signal. send_robust ( sender = self, changes = changes, change_author = change_author ",False,old_value != new_value,new_value,0.655549168586731 79,"def tail ( f, n, grep ) : if n <= 0 : raise ValueError ( ""Invalid amount of lines: {}"". format ( n ) ) BUFSIZ = 4096 CR = ""\n"" data = """" f. seek ( 0, os. SEEK_END ) fsize = f. tell ( ) block = - 1 exit = False retval = [ ] while not exit : step = block * BUFSIZ if abs ( step ) >= fsize : f. seek ( 0 ) newdata = f. read ( BUFSIZ - ( abs ( step ) - fsize ) ) exit = True else : f. seek ( step, os. SEEK_END ) newdata = f. read ( BUFSIZ ) data = newdata + data if len ( retval ) + data. count ( CR ) >= n : if : lines = data. splitlines ( ) llines = len ( lines ) for idx in xrange ( llines - 1 ) : line = lines [ llines - idx - 1 ] if grep. search ( line ) : retval. insert ( 0, line ) if len ( retval ) >= n : break if len ( retval ) >= n : logger. debug ( ""getattr -> '%s' '%s'"" % ( path, fh ) ) if self. cache. is_deleting ( path ) : logger. debug ( ""getattr path '%s' is deleting -- throwing ENOENT"" % ( path ) ) raise FuseOSError ( errno. ENOENT ) with self. cache. get_lock ( path ) : cache = True recheck_s3 = False if self. cache. is_empty ( path ) : logger. debug ( ""getattr <- '%s' '%s' cache ENOENT"" % ( path, fh ) ) if : cache = False recheck_s3 = True logger. debug ( ""getattr rechecking on s3 <- '%s' '%s' cache ENOENT"" % ( path, fh ) ) else : raise FuseOSError ( errno. ENOENT ) attr = self. get_metadata ( path, ""attr"" ) if attr == None : logger. debug ( ""getattr <- '%s' '%s' ENOENT"" % ( path, fh ) ) raise FuseOSError ( errno. ENOENT ) if attr [ ""st_size"" ] == 0 and stat. S_ISDIR ( attr [ ""st_mode"" ] ) : attr [ ""st_size"" ] = 4096 attr [ ""st_nlink"" ] = 1 if self. st_blksize : commands, error = None ) : for command in commands : bad_return = error if error else ""execute {}"". format ( command ) output, return_code = execute_shell_command_get_return_code ( command ) if : raise InstallationError ( ""Failed to {}\n{}"". format ( bad_return, output ) )",True,return_code != 0,return_code != 0,0.6576579213142395 82,"def image_diff ( test, ref, key = ""image"", prompt_num = None ) : """"""Diff two base64-encoded images."""""" if test == ref : return True, """" message = ""Mismatch in %s output"" % key if prompt_num is not None : message += "" (#%d)"" % prompt_num try : test = base64_to_array ( test ) ref = base64_to_array ( ref ) if : import numpy as np diff = np. abs ( test - ref ). mean ( ) * 100 if diff < 5 : return True, """" message += "": %.3g%% difference"" % diff else : message += "": Test image (%dx%d)"" % test. shape [ : 2 ] message += ""; Ref image (%dx%d)"" % ref. shape [ : 2 ] except ImportError : pass return False, message",False,test.shape == ref.shape,test > ref,0.6618626117706299 83,"def get_event ( payload : Dict [ str, Any ] ) -> Optional [ str ] : action = get_action_with_primary_id ( payload ) event = ""{}_{}"". format ( action [ ""entity_type"" ], action [ ""action"" ] ) if event in IGNORED_EVENTS : return None changes = action. get ( ""changes"" ) if changes is not None : if changes. get ( ""description"" ) is not None : event = ""{}_{}"". format ( event, ""description"" ) elif changes. get ( ""state"" ) is not None : event = ""{}_{}"". format ( event, ""state"" ) elif changes. get ( ""workflow_state_id"" ) is not None : event = ""{}_{}"". format ( event, ""state"" ) elif changes. get ( ""name"" ) is not None : event = ""{}_{}"". format ( event, ""name"" ) elif changes. get ( ""archived"" ) is not None : event = ""{}_{}"". format ( event, ""archived"" ) elif changes. get ( ""complete"" ) is not None : event = ""{}_{}"". format ( event, ""complete"" ) elif changes. get ( ""epic_id"" ) is not None : event = ""{}_{}"". format ( event, ""epic"" ) elif changes. get ( ""estimate"" ) is not None : event = ""{}_{}"". format ( event, ""estimate"" ) elif : event = ""{}_{}"". format ( event, ""attachment"" ) elif changes. get ( ""label_ids"" ) is not None : path = self. abspath ( name ) if name == "".."" : self. path = ""/"". join ( self. path. split ( ""/"" ) [ : - 1 ] ) if self. path == """" : self. path = ""/"" return try : self. client. files_list_folder ( path, recursive = False ) except dropbox. exceptions. ApiError as api_e : e = api_e. reason if e. is_other ( ) : raise OperationFailure ( repr ( e ) ) elif e. is_path ( ) : pe = e. get_path ( ) if pe. is_not_folder ( ) : raise IsFile ( ) elif : raise OperationFailure ( ""Not Found!"" ) else : raise OperationFailure ( repr ( e ) ) else : raise OperationFailure ( ""Not found!"" ) else : self. path = path",False,pe.is_not_found(),not pe.is_file(),0.6508831977844238 85,"def concatenateCharacterTokens ( tokens ) : pendingCharacters = [ ] for token in tokens : type = token [ ""type"" ] if : pendingCharacters. append ( token [ ""data"" ] ) else : if pendingCharacters : yield { ""type"" : ""Characters"", ""data"" : """". join ( pendingCharacters ) } pendingCharacters = [ ] yield token if pendingCharacters : yield { ""type"" : ""Characters"", ""data"" : """". join ( pendingCharacters ) }",False,"type in ('Characters', 'SpaceCharacters')",type == 'Characters',0.6518173217773438 86,"def verify_output ( actual, expected ) : actual = _read_file ( actual, ""Actual"" ) expected = _read_file ( join ( CURDIR, expected ), ""Expected"" ) if len ( expected )!= len ( actual ) : raise AssertionError ( ""Lengths differ. Expected %d lines but got %d"" % ( len ( expected ), len ( actual ) ) ) for exp, act in zip ( expected, actual ) : tester = fnmatchcase if ""*"" in exp else eq if : raise AssertionError ( ""Lines differ.\nExpected: %s\nActual: %s"" % ( exp, act ) )",False,"not tester(act.rstrip(), exp.rstrip())",tester != act,0.6507447957992554 87,"def forward ( self, inputs, feat_layers ) : out = { } res = self. conv_bn_init ( inputs ) res = fluid. layers. relu ( res ) res = self. maxpool ( res ) for i in range ( len ( self. block_collect ) ) : for layer in self. block_collect [ i ] : res = layer ( res ) name = ""block{}"". format ( i ) if : out [ name ] = res if len ( out ) == len ( feat_layers ) : return out res = self. global_pool ( res ) B, C, _, _ = res. shape res = fluid. layers. reshape ( res, [ B, C ] ) res = self. fc ( res ) out [ ""fc"" ] = res return out",True,name in feat_layers,name in feat_layers,0.6611325740814209 88,"def _test_forever ( self, tests ) : while True : for test_name in tests : yield test_name if : return if self. ns. fail_env_changed and self. environment_changed : return",False,self.bad,not self.ns.ok_test_name,0.6596339344978333 89,def init_wake_button_switch ( self ) : try : import RPi. GPIO if : self. wake_button_switch. set_active ( True ) else : self. wake_button_switch. set_active ( False ) except ImportError : self. wake_button_switch. set_sensitive ( False ) except RuntimeError : self. wake_button_switch. set_sensitive ( False ),False,susicfg.get('wakebutton') == 'enabled',RPi.GPIO.get_active(),0.663475513458252 90,"def transform ( self, node, results ) : names_inserted = set ( ) testlist = results [ ""args"" ] args = testlist. children new_args = [ ] iterator = enumerate ( args ) for idx, arg in iterator : if : if idx < len ( args ) - 1 and args [ idx + 1 ]. type == token. COMMA : next ( iterator ) continue else : new_args. append ( arg ) if arg. type == token. NAME : names_inserted. add ( arg. value ) if new_args and new_args [ - 1 ]. type == token. COMMA : del new_args [ - 1 ] if len ( new_args ) == 1 : atom = testlist. parent new_args [ 0 ]. prefix = atom. prefix atom. replace ( new_args [ 0 ] ) else : args [ : ] = new_args node. changed ( )",False,arg.type == token.NAME and arg.value in names_inserted,arg.type == token.COMMA,0.6539357900619507 91,"def scan ( self, targets ) : for target in targets : target. print_infos ( ) if : self. target [ ""other"" ]. append ( target ) if self. match ( target ) : return target return None",False,self.is_interesting(target),target.get('other'),0.6490728855133057 92,"def decode ( self, segment ) : numbers = [ ] accu = 0 weight = 1 for char in segment : ordinal = self. decoding [ char ] isContinuation = ordinal >= 32 if isContinuation : ordinal -= 32 if : sign = - 1 if ordinal % 2 else 1 ordinal //= 2 accu += weight * ordinal if isContinuation : if ( weight == 1 ) : weight = 16 else : weight *= 32 else : numbers. append ( sign * accu ) accu = 0 weight = 1 return numbers",False,weight == 1,ordinal % 2,0.6824924945831299 93,"def new_f ( * args, ** kwargs ) : try : D = pickle. load ( open ( filename, ""rb"" ) ) cache_exists = True except : D = { } cache_exists = False Dargs = D. get ( ""args"" ) Dkwargs = D. get ( ""kwargs"" ) try : args_match = args == Dargs except : args_match = np. all ( [ np. all ( a1 == a2 ) for ( a1, a2 ) in zip ( Dargs, args ) ] ) try : kwargs_match = kwargs == Dkwargs except : kwargs_match = ( sorted ( Dkwargs. keys ( ) ) == sorted ( kwargs. keys ( ) ) ) and ( np. all ( [ np. all ( Dkwargs [ key ] == kwargs [ key ] ) for key in kwargs ] ) ) if ( type ( D ) == dict and D. get ( ""funcname"" ) == f. __name__ and args_match and kwargs_match ) : if verbose : print ( ""@pickle_results: using precomputed "" ""results from '%s'"" % filename ) retval = D [ ""retval"" ] else : if verbose : print ( ""@pickle_results: computing results "" ""and saving to '%s'"" % filename ) if : print ( "" warning: cache file '%s' exists"" % filename ) print ( "" - args match: %s"" % args_match ) None : """"""Update the configuration of an interface."""""" interface = self. _get_interface ( request. match_info. get ( ATTR_INTERFACE ) ) body = await api_validate ( SCHEMA_UPDATE, request ) if not body : raise APIError ( ""You need to supply at least one option to update"" ) for key, config in body. items ( ) : if key == ATTR_IPV4 : interface. ipv4 = attr. evolve ( interface. ipv4 or IpConfig ( InterfaceMethod. STATIC, [ ], None, [ ] ), ** config, ) elif key == ATTR_IPV6 : interface. ipv6 = attr. evolve ( interface. ipv6 or IpConfig ( InterfaceMethod. STATIC, [ ], None, [ ] ), ** config, ) elif key == ATTR_WIFI : interface. wifi = attr. evolve ( interface. wifi or WifiConfig ( WifiMode. INFRASTRUCTURE, """", AuthMethod. OPEN, None, None ), ** config, ) elif : interface. enabled = config await asyncio. shield ( self. sys_host. network. apply_changes ( interface ) )",True,key == ATTR_ENABLED,key == ATTR_ENABLED,0.6689105033874512 95,"def cache_dst ( self ) : final_dst = None final_linenb = None for linenb, assignblk in enumerate ( self ) : for dst, src in viewitems ( assignblk ) : if : if final_dst is not None : raise ValueError ( ""Multiple destinations!"" ) final_dst = src final_linenb = linenb self. _dst = final_dst self. _dst_linenb = final_linenb return final_dst",False,dst.is_id('IRDst'),linenb is not None and src is not None,0.6583995819091797 96,"def _tab_only_directories ( self ) : from os. path import dirname, basename, expanduser, join, isdir line = parse ( self. line ) cwd = self. fm. env. cwd. path try : rel_dest = line. rest ( 1 ) except IndexError : rel_dest = """" if rel_dest. startswith ( ""~"" ) : rel_dest = expanduser ( rel_dest ) abs_dest = join ( cwd, rel_dest ) abs_dirname = dirname ( abs_dest ) rel_basename = basename ( rel_dest ) rel_dirname = dirname ( rel_dest ) try : if rel_dest. endswith ( ""/"" ) or rel_dest == """" : _, dirnames, _ = os. walk ( abs_dest ). next ( ) else : _, dirnames, _ = os. walk ( abs_dirname ). next ( ) dirnames = [ dn for dn in dirnames if dn. startswith ( rel_basename ) ] except ( OSError, StopIteration ) : pass else : dirnames. sort ( ) if : return if len ( dirnames ) == 1 : return line + join ( rel_dirname, dirnames [ 0 ] ) + ""/"" return ( line + join ( rel_dirname, dirname ) for dirname in dirnames )",True,len(dirnames) == 0,len(dirnames) == 0,0.6558115482330322 97,"def parse ( self, backend, x509_obj ) : extensions = [ ] seen_oids = set ( ) for i in range ( self. ext_count ( backend, x509_obj ) ) : ext = self. get_ext ( backend, x509_obj, i ) backend. openssl_assert ( ext!= backend. _ffi. NULL ) crit = backend. _lib. X509_EXTENSION_get_critical ( ext ) critical = crit == 1 oid = x509. ObjectIdentifier ( _obj2txt ( backend, ext. object ) ) if : raise x509. DuplicateExtension ( ""Duplicate {0} extension found"". format ( oid ), oid ) try : handler = self. handlers [ oid ] except KeyError : if critical : raise x509. UnsupportedExtension ( ""Critical extension {0} is not currently supported"". format ( oid ), oid ) else : if self. unsupported_exts and oid in self. unsupported_exts : ext_data = ext else : ext_data = backend. _lib. X509V3_EXT_d2i ( ext ) if ext_data == backend. _ffi. NULL : backend. _consume_errors ( ) <",True,oid in seen_oids,oid in seen_oids,0.6713815927505493 98,"def __init__ ( self, parent, dir, mask, with_dirs = True ) : filelist = [ ] dirlist = [ "".."" ] self. dir = dir self. file = """" mask = mask. upper ( ) pattern = self. MakeRegex ( mask ) for i in os. listdir ( dir ) : if i == ""."" or i == "".."" : continue path = os. path. join ( dir, i ) if : dirlist. append ( i ) continue path = path. upper ( ) value = i. upper ( ) if pattern. match ( value ) is not None : filelist. append ( i ) self. files = filelist if with_dirs : self. dirs = dirlist",False,os.path.isdir(path),path.upper() == '',0.6477745771408081 99,"def initialize ( self ) : nn. init. xavier_uniform_ ( self. linear. weight. data ) if self. linear. bias is not None : self. linear. bias. data. uniform_ ( - 1.0, 1.0 ) if self. self_layer : nn. init. xavier_uniform_ ( self. linear_self. weight. data ) if : self. linear_self. bias. data. uniform_ ( - 1.0, 1.0 )",False,self.linear_self.bias is not None,self.self_layer,0.6544812917709351 100,"def datestamp ( ) : """"""Enter today's date as the release date in the changelog."""""" dt = datetime. datetime. now ( ) stamp = ""({} {}, {})"". format ( dt. strftime ( ""%B"" ), dt. day, dt. year ) marker = ""(in development)"" lines = [ ] underline_length = None with open ( CHANGELOG ) as f : for line in f : if : line = line. replace ( marker, stamp ) lines. append ( line ) underline_length = len ( line. strip ( ) ) elif underline_length : lines. append ( ""-"" * underline_length + ""\n"" ) underline_length = None else : lines. append ( line ) with open ( CHANGELOG, ""w"" ) as f : for line in lines : f. write ( line )",False,marker in line,marker,0.6756542921066284 101,"def go ( self, pyfile ) : for line in open ( pyfile ) : if self. mode == ""in def"" : self. text += "" "" + line. strip ( ) if line. strip ( ). endswith ( "":"" ) : if self. definition ( self. text ) : self. text = """" self. mode = ""in func"" else : self. text = """" self. mode = ""normal"" elif : if '""""""' in line : self. text += line. strip ( ). strip ( '""' ) self. mode = ""in doc"" if line. count ( '""""""' ) == 2 : self. mode = ""normal"" self. docstring ( self. text ) self. text = """" else : self. mode = ""normal"" elif self. mode == ""in doc"" : self. text += "" "" + line if '""""""' in line : self. mode = ""normal"" self. docstring ( self. text. strip ( ). strip ( '""' ) ) self. text = """" ",False,self.mode == 'in func',self.mode == 'in doc',0.6566042900085449 102,"def _convert_upsample ( inexpr, keras_layer, etab ) : _check_data_format ( keras_layer ) upsample_type = type ( keras_layer ). __name__ params = { } if upsample_type == ""UpSampling1D"" : h = keras_layer. size params [ ""scale_h"" ] = h elif upsample_type == ""UpSampling2D"" : h, w = keras_layer. size if : raise tvm. error. OpAttributeInvalid ( ""Height must equal width for operator Upsample."" ) params [ ""scale_h"" ] = h params [ ""scale_w"" ] = h if hasattr ( keras_layer, ""interpolation"" ) : interpolation = keras_layer. interpolation if interpolation == ""nearest"" : params [ ""method"" ] = ""nearest_neighbor"" else : params [ ""method"" ] = ""bilinear"" else : raise tvm. error. OpNotImplemented ( ""Operator {} is not supported for frontend Keras."". format ( upsample_type ) ) params [ ""layout"" ] = etab. data_layout out = _op. nn. upsampling ( inexpr, ** params ) return out",True,h != w,h != w,0.6931537985801697 103,"def apply_transformation ( self, cli, document, lineno, source_to_display, tokens ) : key = ( cli. render_counter, document. text, document. cursor_position ) positions = self. _positions_cache. get ( key, lambda : self. _get_positions_to_highlight ( document ) ) if positions : for row, col in positions : if : col = source_to_display ( col ) tokens = explode_tokens ( tokens ) token, text = tokens [ col ] if col == document. cursor_position_col : token += ( "":"", ) + Token. MatchingBracket. Cursor else : token += ( "":"", ) + Token. MatchingBracket. Other tokens [ col ] = ( token, text ) return Transformation ( tokens )",False,row == lineno,source_to_display and col != -1,0.6775771379470825 104,"def download ( self, * args, ** kwargs ) : fmt = self. query. get ( ""format"", ""spec"" ) version = self. query. get ( ""version"", None ) branch = self. query. get ( ""branch"", None ) selector = self. query. get ( ""selector"" ) or ""css"" spider_id = self. kwargs. get ( ""spider_id"", None ) spiders = [ spider_id ] if spider_id is not None else None try : self. project except InvalidFilename as e : raise JsonApiNotFoundError ( str ( e ) ) if hasattr ( self. storage, ""checkout"" ) and ( version or branch ) : try : if : version = self. commit_from_short_sha ( version ). id self. storage. checkout ( version, branch ) except IOError : pass except ValueError as e : raise JsonApiNotFoundError ( str ( e ) ) archiver = CodeProjectArchiver if fmt == u""code"" else ProjectArchiver try : content = archiver ( self. storage ). archive ( spiders, selector = selector ) except IOError as e : raise JsonApiNotFoundError ( str ( e ) ) try : name = u""{}.zip"". format ( self. project. name ) except UnicodeEncodeError : name = str ( self. project. id ) return FileResponse ( name, content, status = HTTP_200_OK )",False,version and len(version) < 40,version,0.653167724609375 105,"def check_smtp_login ( self ) : if self. urlwatch_config. smtp_login : config = self. urlwatcher. config_storage. config [ ""report"" ] [ ""email"" ] smtp_config = config [ ""smtp"" ] success = True if not config [ ""enabled"" ] : print ( ""Please enable e-mail reporting in the config first."" ) success = False if config [ ""method"" ]!= ""smtp"" : print ( ""Please set the method to SMTP for the e-mail reporter."" ) success = False if not smtp_config. get ( ""auth"", smtp_config. get ( ""keyring"", False ) ) : print ( ""Authentication must be enabled for SMTP."" ) success = False smtp_hostname = smtp_config [ ""host"" ] if not smtp_hostname : print ( ""Please configure the SMTP hostname in the config first."" ) success = False smtp_username = smtp_config. get ( ""user"", None ) or config [ ""from"" ] if not smtp_username : print ( ""Please configure the SMTP user in the config first."" ) success = False if not success : sys. exit ( 1 ) if ""insecure_password"" in smtp_config : print ( 'The password is already set in the config (key ""insecure_password"").' ) sys. exit ( 0 ) if : <",False,"have_password(smtp_hostname, smtp_username)",not success,0.6479407548904419 106,"def reverse_url ( self, name : str, * args : Any ) -> Optional [ str ] : if name in self. named_rules : return self. named_rules [ name ]. matcher. reverse ( * args ) for rule in self. rules : if : reversed_url = rule. target. reverse_url ( name, * args ) if reversed_url is not None : return reversed_url return None",False,"isinstance(rule.target, ReversibleRouter)",rule.target,0.6487054824829102 107,"def handle ( self, * args, ** options ) : if not settings. ST_BASE_DIR. endswith ( ""spirit"" ) : raise CommandError ( ""settings.ST_BASE_DIR is not the spirit root folder, are you overriding it?"" ) for root, dirs, files in os. walk ( settings. ST_BASE_DIR ) : if : continue with utils. pushd ( root ) : call_command ( ""makemessages"", stdout = self. stdout, stderr = self. stderr, ** options ) self. stdout. write ( ""ok"" )",False,'locale' not in dirs,not files,0.6600162982940674 108,"def _declare ( self, name, obj, included = False, quals = 0 ) : if name in self. _declarations : prevobj, prevquals = self. _declarations [ name ] if prevobj is obj and prevquals == quals : return if : raise api. FFIError ( ""multiple declarations of %s (for interactive usage, "" ""try cdef(xx, override=True))"" % ( name, ) ) assert ""__dotdotdot__"" not in name. split ( ) self. _declarations [ name ] = ( obj, quals ) if included : self. _included_declarations. add ( obj )",False,not self._override,nexus.interactive and quals > 0,0.6699138879776001 109,"def EnumerateUsersFromClient ( args ) : """"""Enumerates all the users on this system."""""" del args users = _ParseWtmp ( ) for user, last_login in users. items ( ) : username, _ = user. split ( b""\x00"", 1 ) username = username. decode ( ""utf-8"" ) if : if last_login < 0 : last_login = 0 result = rdf_client. User ( username = username, last_logon = last_login * 1000000 ) try : pwdict = pwd. getpwnam ( username ) result. homedir = utils. SmartUnicode ( pwdict. pw_dir ) result. full_name = utils. SmartUnicode ( pwdict. pw_gecos ) result. uid = pwdict. pw_uid result. gid = pwdict. pw_gid result. shell = utils. SmartUnicode ( pwdict. pw_shell ) except KeyError : pass yield result",True,username,username,0.6927834153175354 110,"def _get_vif_port ( self, name ) : external_ids = self. db_get_map ( ""Interface"", name, ""external_ids"" ) if ""iface-id"" in external_ids and ""attached-mac"" in external_ids : return self. _vifport ( name, external_ids ) elif ""xs-vif-uuid"" in external_ids and ""attached-mac"" in external_ids : ofport = self. db_get_val ( ""Interface"", name, ""ofport"" ) : return VifPort ( name, ofport, iface_id, external_ids [ ""attached-mac"" ], self )",False,iface_id = self.get_xapi_iface_id(external_ids['xs-vif-uuid']),ofport is not None,0.6441446542739868 111,"def checkpoint ( vd ) : vd. undoPids. append ( os. getpid ( ) ) pid = os. fork ( ) if pid > 0 : before = time. time ( ) pid, st = os. wait ( ) if : vd. scr. clear ( ) vd. undoPids. remove ( os. getpid ( ) ) raise Exception ( ""undid %ss"" % int ( time. time ( ) - before ) )",False,st == 42,pid > 0,0.66758131980896 112,"def prune ( self, study : ""optuna.study.Study"", trial : ""optuna.trial.FrozenTrial"" ) -> bool : step = trial. last_step if step is None : return False rung = _get_current_rung ( trial ) value = trial. intermediate_values [ step ] trials : Optional [ List [ ""optuna.trial.FrozenTrial"" ] ] = None while True : if self. _min_resource is None : if : trials = study. get_trials ( deepcopy = False ) self. _min_resource = _estimate_min_resource ( trials ) if self. _min_resource is None : return False assert self. _min_resource is not None rung_promotion_step = self. _min_resource * ( self. _reduction_factor ** ( self. _min_early_stopping_rate + rung ) ) if step < rung_promotion_step : return False if math. isnan ( value ) : return True if : trials = study. get_trials ( deepcopy = False ) rung_key = _completed_rung_key ( rung ) study. _storage. set_trial_system_attr ( trial. _trial_id, rung_key, value ) competing = _get_competing_values ( trials, value, rung_key ) if len ( competing ) <= self. _bootstrap_count",False,trials is None,study.has_trials(),0.6724114418029785 113,"def __call__ ( self, loss, sess ) : if self. sign * loss > self. sign * self. best : if : tf. logging. info ( ""Previous best %s: %.4f."", self. label, self. best ) tf. gfile. MakeDirs ( os. path. dirname ( self. save_path ) ) self. saver. save ( sess, self. save_path ) tf. logging. info ( ""Storing best model so far with loss %.4f at %s."" % ( loss, self. save_path ) ) self. best = loss self. age = 0 self. true_age = 0 else : self. age += 1 self. true_age += 1 if self. age > self. patience : sess. run ( [ self. decay_op ] ) self. age = 0",False,FLAGS.log_progress,self.save_path and (not self.save_path.exists()),0.6573533415794373 114,"def filter ( self, projects = None, tags = None, ignore_projects = None, ignore_tags = None, span = None, include_partial_frames = False, ) : for frame in self. _rows : if : continue if ignore_projects is not None and frame. project in ignore_projects : continue if tags is not None and not any ( tag in frame. tags for tag in tags ) : continue if ignore_tags is not None and any ( tag in frame. tags for tag in ignore_tags ) : continue if span is None : yield frame elif frame in span : yield frame elif include_partial_frames and span. overlaps ( frame ) : start = span. start if frame. start < span. start else frame. start stop = span. stop if frame. stop > span. stop else frame. stop yield frame. _replace ( start = start, stop = stop )",False,projects is not None and frame.project not in projects,projects is None,0.6588723659515381 115,"def parse ( wb ) : rst, key = [ ], { } for ws in wb. worksheets : rst. append ( ( ws. title, [ ] ) ) for row in ws. rows : for cell in row : if not isinstance ( cell. value, str ) : continue if : continue cont = cell. value [ 1 : - 1 ]. strip ( ) tp = cont. split ( "" "" ) [ 0 ] cont = cont [ len ( tp ) : ]. strip ( ) note, value = ""no description"", None if ""#"" in cont : note = cont. split ( ""#"" ) [ - 1 ]. strip ( ) cont = cont [ : cont. index ( ""#"" ) ]. strip ( ) if ""="" in cont : value = cont. split ( ""="" ) [ 1 ]. strip ( ) name = cont [ : cont. index ( ""="" ) ]. strip ( ) else : name = cont rst [ - 1 ] [ - 1 ]. append ( ( ( cell. row, cell. col_idx ), [ tp, name, value, note ] ) ) key [ name ] = [ tp, name, value, note ] ",False,cell.value[0] + cell.value[-1] != '{}',cont.col_idx > len(cont),0.6567429900169373 116,"def parse_flash_log ( self, logf ) : """"""parse flash logs"""""" data = OrderedDict ( ) samplelogs = self. split_log ( logf [ ""f"" ] ) for slog in samplelogs : try : sample = dict ( ) s_name = self. clean_pe_name ( slog, logf [ ""root"" ] ) if : continue sample [ ""s_name"" ] = s_name sample [ ""totalpairs"" ] = self. get_field ( ""Total pairs"", slog ) sample [ ""discardpairs"" ] = self. get_field ( ""Discarded pairs"", slog ) sample [ ""percdiscard"" ] = self. get_field ( ""Percent Discarded"", slog, fl = True ) sample [ ""combopairs"" ] = self. get_field ( ""Combined pairs"", slog ) sample [ ""inniepairs"" ] = self. get_field ( ""Innie pairs"", slog ) sample [ ""outiepairs"" ] = self. get_field ( ""Outie pairs"", slog ) sample [ ""uncombopairs"" ] = self. get_field ( ""Uncombined pairs"", slog ) sample [ ""perccombo"" ] = self. get_field ( ""Percent combined"", slog, fl = True ) data [ s_name ] = sample except Exception as err : log. warning ( ""Error parsing record in {}. {}"". format ( logf [ ""fn"" ]",True,s_name is None,s_name is None,0.6595491170883179 117,"def import_refs ( self, base, other, committer = None, timestamp = None, timezone = None, message = None, prune = False, ) : if prune : to_delete = set ( self. subkeys ( base ) ) else : to_delete = set ( ) for name, value in other. items ( ) : if value is None : to_delete. add ( name ) else : self. set_if_equals ( b""/"". join ( ( base, name ) ), None, value, message = message ) if : try : to_delete. remove ( name ) except KeyError : pass for ref in to_delete : self. remove_if_equals ( b""/"". join ( ( base, ref ) ), None, message = message )",False,to_delete,len(to_delete) > 0,0.6647946238517761 118,"def remove ( self, values ) : if not isinstance ( values, ( list, tuple, set ) ) : values = [ values ] for v in values : v = str ( v ) if isinstance ( self. _definition, dict ) : self. _definition. pop ( v, None ) elif : if v == ""ANY"" : self. _definition = [ ] elif v in self. _definition : self. _definition. remove ( v ) if ( self. _value is not None and self. _value not in self. _definition and self. _not_any ( ) ) : raise ConanException ( bad_value_msg ( self. _name, self. _value, self. values_range ) )",False,self._definition == 'ANY',"isinstance(self._definition, dict)",0.6586019992828369 119,"def taiga ( request, trigger_id, key ) : signature = request. META. get ( ""HTTP_X_TAIGA_WEBHOOK_SIGNATURE"" ) if verify_signature ( request. _request. body, key, signature ) : data = data_filter ( trigger_id, ** request. data ) status = save_data ( trigger_id, data ) return ( Response ( { ""message"" : ""Success"" } ) if : else Response ( { ""message"" : ""Failed!"" } ) ) Response ( { ""message"" : ""Bad request"" } )",True,status,status,0.6869302988052368 120,"def genLoopPackets ( self ) : while True : if self. mode == ""simulator"" : if self. real_time : sleep_time = self. the_time + self. loop_interval - time. time ( ) if : time. sleep ( sleep_time ) else : time. sleep ( self. loop_interval ) self. the_time += self. loop_interval avg_time = self. the_time - self. loop_interval / 2.0 _packet = { ""dateTime"" : int ( self. the_time + 0.5 ), ""usUnits"" : weewx. US } for obs_type in self. observations : _packet [ obs_type ] = self. observations [ obs_type ]. value_at ( avg_time ) yield _packet",False,sleep_time > 0,self.sleep_time,0.6646113991737366 121,"def input_str ( self, s, default_value = None, valid_list = None, show_default_value = True, help_message = None, ) : if show_default_value and default_value is not None : s = f""[{default_value}] {s}"" if valid_list is not None or help_message is not None : s += "" ("" if valid_list is not None : s += "" "" + ""/"". join ( valid_list ) if help_message is not None : s += ""?:help"" if valid_list is not None or help_message is not None : s += "" )"" s += "" : "" while True : try : inp = input ( s ) if len ( inp ) == 0 : if default_value is None : print ( """" ) return None result = default_value break if help_message is not None and inp == ""?"" : print ( help_message ) continue if valid_list is not None : if : result = inp. lower ( ) break if inp in valid_list : result = inp major = None, minor = None, patch = None, name = None, ) : if isinstance ( major, six. string_types ) and not minor and not patch : if major. isdigit ( ) or ( major. count ( ""."" ) > 0 and major [ 0 ]. isdigit ( ) ) : version = major. split ( ""."", 2 ) if isinstance ( version, ( tuple, list ) ) : if : major, minor, patch, _ = version elif len ( version ) == 3 : major, minor, patch = version elif len ( version ) == 2 : major, minor = version else : major = major [ 0 ] else : major = major name = None else : name = ""{0!s}"". format ( major ) major = None return ( major, minor, patch, name )",False,len(version) > 3,len(version) == 1,0.6591365337371826 123,"def parse_workflow_directory ( workflow_directory ) : parsed = { ""versions"" : [ ], } if not os. path. exists ( workflow_directory ) : raise WorkflowError ( ""Workflow directory does not exist."" ) workflow_files = os. listdir ( workflow_directory ) if ""workflow.json"" not in workflow_files : raise WorkflowError ( 'No ""workflow.json"" manifest file found.' ) with open ( os. path. join ( workflow_directory, ""workflow.json"" ), ""r"" ) as f : parsed [ ""workflow"" ] = json. load ( f ) workflow_subdirs = [ os. path. join ( workflow_directory, workflow_file ) for workflow_file in workflow_files if os. path. isdir ( os. path. join ( workflow_directory, workflow_file ) ) ] for version_directory in workflow_subdirs : version_files = os. listdir ( version_directory ) if : continue with open ( os. path. join ( version_directory, ""version.json"" ), ""r"" ) as f : parsed [ ""versions"" ]. append ( json. load ( f ) ) if len ( parsed [ ""versions"" ] ) == 0 : raise WorkflowError ( ""Workflow directory {} does not contain any "" ""versions"". format ( workflow_directory ) ) return parsed",False,'version.json' not in version_files,len(version_files) > 0,0.6484920978546143 124,"def not_modssl_ifmodule ( self, path ) : """"""Checks if the provided Augeas path has argument!mod_ssl"""""" if ""ifmodule"" not in path. lower ( ) : return False workpath = path. lower ( ) while workpath : parts = workpath. rpartition ( ""ifmodule"" ) if : break ifmod_path = parts [ 0 ] + parts [ 1 ] if parts [ 2 ]. startswith ( ""["" ) : ifmod_path += parts [ 2 ]. partition ( ""/"" ) [ 0 ] ifmod_real_path = path [ 0 : len ( ifmod_path ) ] if ""!mod_ssl.c"" in self. get_all_args ( ifmod_real_path ) : return True workpath = parts [ 0 ] return False",False,not parts[0],len(parts) == 0,0.6571704745292664 125,"def read_config_file ( args ) : if os. path. isfile ( args. config_file ) : try : with open ( args. config_file ) as f : config = json. load ( f ) for key, elem in config. items ( ) : if : logger. info ( yellow ( ""{} has an unknown key: {} : {}"". format ( args. config_file, key, elem ) ) ) continue if getattr ( args, key ) == defaults_flag_in_config [ key ] : setattr ( args, key, elem ) except json. decoder. JSONDecodeError as e : logger. error ( red ( ""Impossible to read {}, please check the file {}"". format ( args. config_file, e ) ) None : user_profile = get_user_profile_by_id ( data [ ""user_id"" ] ) logging. info ( ""Processing signup for user %s in realm %s"", user_profile. id, user_profile. realm. string_id, ) if settings. MAILCHIMP_API_KEY and settings. PRODUCTION : endpoint = ""https://{}.api.mailchimp.com/3.0/lists/{}/members"". format ( settings. MAILCHIMP_API_KEY. split ( ""-"" ) [ 1 ], settings. ZULIP_FRIENDS_LIST_ID, ) params = dict ( data ) del params [ ""user_id"" ] params [ ""list_id"" ] = settings. ZULIP_FRIENDS_LIST_ID params [ ""status"" ] = ""subscribed"" r = requests. post ( endpoint, auth = ( ""apikey"", settings. MAILCHIMP_API_KEY ), json = params, timeout = 10, ) if r. status_code == 400 and orjson. loads ( r. content ) [ ""title"" ] == ""Member Exists"" : logging. warning ( ""Attempted to sign up already existing email to list: %s"", data [ ""email_address"" ], ) elif : retry_event ( self. queue_name, data, lambda e",False,r.status_code == 400,r.status_code == 200,0.6552725434303284 127,"def _read_model_arguments ( argv, use_argparse = False ) : if use_argparse : parser = argparse. ArgumentParser ( ) parser. add_argument ( ""database"", metavar = ""DATABASE"", type = str, default = ""galaxy"", nargs = ""?"", help = ""database to target (galaxy, tool_shed, install)"", ) populate_config_args ( parser ) args = parser. parse_args ( argv [ 1 : ] if argv else [ ] ) return args. config_file, args. config_section, args. database else : config_file = None for arg in [ ""-c"", ""--config"", ""--config-file"" ] : if arg in argv : pos = argv. index ( arg ) argv. pop ( pos ) config_file = argv. pop ( pos ) config_section = None if : pos = argv. index ( ""--config-section"" ) argv. pop ( pos ) config_section = argv. pop ( pos ) if argv and ( argv [ - 1 ] in DATABASE ) : database = argv. pop ( ) else : database = ""galaxy"" return config_file, config_section, database",False,'--config-section' in argv,argv and argv[--config - section] in CONFIG_FILE,0.653885006904602 128,"def seen_add ( options ) : seen_name = options. add_value if is_imdb_url ( seen_name ) : console ( ""IMDB url detected, try to parse ID"" ) imdb_id = extract_id ( seen_name ) if : seen_name = imdb_id else : console ( ""Could not parse IMDB ID"" ) db. add ( seen_name, ""cli_add"", { ""cli_add"" : seen_name } ) console ( ""Added %s as seen. This will affect all tasks."" % seen_name )",True,imdb_id,imdb_id,0.6628228425979614 129,"def translate_apply ( self, exp ) : pre = [ ] callable_pre, callable_value = self. translate ( exp [ 0 ], False ) pre. extend ( callable_pre ) args = [ ] keyword_args = [ ] keyword_arg_exps = [ ] arg_exps = exp [ 1 : ] for i, argexp in enumerate ( arg_exps ) : if : keyword_arg_exps = arg_exps [ i : ] arg_exps = arg_exps [ : i ] break for argexp in arg_exps : arg_pre, arg_value = self. translate ( argexp, False ) pre. extend ( arg_pre ) args. append ( arg_value ) for argKey, argExp in chunks ( keyword_arg_exps, 2 ) : if type ( argKey ) is not Keyword : raise MochiSyntaxError ( argKey, self. filename ) arg_pre, arg_value = self. translate ( argExp, False ) pre. extend ( arg_pre ) keyword_args. append ( ast. keyword ( arg = argKey. name, value = arg_value ) ) value = ast. Call ( func = callable_value, args = args, keywords = keyword_args, starargs = None, kwargs = None, lineno = callable_value. lineno, col_offset = 0, ) return pre, value",True,type(argexp) is Keyword,type(argexp) is Keyword,0.6585877537727356 130,"def parse_shoutcast1 ( url, timeout = 5 ) : """"""A Shoutcast object of raises ParseError"""""" root = get_root ( url ) shoutcast1_status = root + ""/7.html"" headers = { ""User-Agent"" : ""Mozilla/4.0"" } try : r = requests. get ( shoutcast1_status, headers = headers, timeout = timeout, stream = True ) if : raise ParseError r. content except ( RequestException, socket. timeout ) : raise ParseError if r. status_code!= 200 : raise ParseError soup = BeautifulSoup ( r. content ) body = soup. find ( ""body"" ) if not body : raise ParseError status_line = body. string if status_line is None : raise ParseError try : current, status, peak, max_, unique, bitrate, songtitle = status_line. split ( "","", 6 ) except ValueError : raise ParseError try : peak = str ( int ( peak ) ) current = str ( int ( current ) ) except ValueError : raise ParseError return Stream ( root, current, peak )",False,"'text' not in r.headers.get('content-type', '')",r.status_code == 404,0.6462410688400269 131,"def reconnect_user ( self, user_id, host_id, server_id ) : if host_id == settings. local. host_id : return if server_id and self. server. id!= server_id : return for client in self. clients. find ( { ""user_id"" : user_id } ) : self. clients. update_id ( client [ ""id"" ], { ""ignore_routes"" : True, }, ) if : self. instance. disconnect_wg ( client [ ""id"" ] ) else : self. instance_com. client_kill ( client [ ""id"" ] )",False,len(client['id']) > 32,self.instance_com is None,0.6541067361831665 132,"def __init__ ( self, * args, ** decimals ) : self. amounts = OrderedDict ( ( currency, decimals. get ( currency, Money. ZEROS [ currency ]. amount ) ) for currency in CURRENCIES ) for arg in args : if : self. amounts [ arg. currency ] += arg. amount else : for m in arg : self. amounts [ m. currency ] += m. amount",True,"isinstance(arg, Money)","isinstance(arg, Money)",0.6594305038452148 133,"def _mask_forward_test ( self, stage, x, bboxes, semantic_feat = None ) : """"""Mask head forward function for testing."""""" mask_roi_extractor = self. mask_roi_extractor [ stage ] mask_head = self. mask_head [ stage ] mask_rois = bbox2roi ( [ bboxes ] ) mask_feats = mask_roi_extractor ( x [ : len ( mask_roi_extractor. featmap_strides ) ], mask_rois ) if self. with_semantic and ""mask"" in self. semantic_fusion : mask_semantic_feat = self. semantic_roi_extractor ( [ semantic_feat ], mask_rois ) if mask_semantic_feat. shape [ - 2 : ]!= mask_feats. shape [ - 2 : ] : mask_semantic_feat = F. adaptive_avg_pool2d ( mask_semantic_feat, mask_feats. shape [ - 2 : ] ) mask_feats += mask_semantic_feat if self. mask_info_flow : last_feat = None last_pred = None for i in range ( stage ) : mask_pred, last_feat = self. mask_head [ i ] ( mask_feats, last_feat ) if : mask_pred = mask_pred + last_pred last_pred = mask_pred mask_pred = mask_head ( mask_feats, last_feat, return_feat = False ) if : mask_pred = mask_pred + last_pred else : mask_pred",False,last_pred is not None,self.mask_info_flow,0.6654256582260132 134,def on_completed2 ( ) : doner [ 0 ] = True if not qr : if : observer. on_next ( False ) observer. on_completed ( ) elif donel [ 0 ] : observer. on_next ( True ) observer. on_completed ( ),False,len(ql) > 0,der[0],0.6593277454376221 135,"def modify_vpc_attribute ( self ) : vpc_id = self. _get_param ( ""VpcId"" ) for attribute in ( ""EnableDnsSupport"", ""EnableDnsHostnames"" ) : if : attr_name = camelcase_to_underscores ( attribute ) attr_value = self. querystring. get ( ""%s.Value"" % attribute ) [ 0 ] self. ec2_backend. modify_vpc_attribute ( vpc_id, attr_name, attr_value ) return MODIFY_VPC_ATTRIBUTE_RESPONSE",False,self.querystring.get('%s.Value' % attribute),attribute in self.params,0.6549067497253418 136,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 10 : self. set_socket_descriptor ( d. getPrefixedString ( ) ) continue if tt == 26 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. mutable_server_address ( ). TryMerge ( tmp ) continue if : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. mutable_proxy_external_ip ( ). TryMerge ( tmp ) continue if tt == 0 : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 34,tt == 33554432,0.681266725063324 137,"def is_accepted_drag_event ( self, event ) : if event. source ( ) == self. table : return True mime = event. mimeData ( ) if mime. hasUrls ( ) : for url in mime. urls ( ) : if : break filename = url. toLocalFile ( ) extension = os. path. splitext ( filename ) [ 1 ]. lower ( ) [ 1 : ] if extension not in _dictionary_formats ( ) : break else : return True return False",False,not url.isLocalFile(),url.hasLocalFile(),0.654245138168335 138,"def clean_new_hostname ( self ) : old_ip = self. cleaned_data. get ( ""address"" ) new_hostname = self. cleaned_data [ ""new_hostname"" ] if not is_valid_hostname ( new_hostname ) : raise forms. ValidationError ( ""Invalid hostname"" ) try : get_domain ( new_hostname ) except Domain. DoesNotExist : raise forms. ValidationError ( ""Invalid domain"" ) try : ipaddress = IPAddress. objects. get ( hostname = new_hostname ) except IPAddress. DoesNotExist : if : raise forms. ValidationError ( ""Hostname already in DNS."" ) else : if ipaddress. device and not ipaddress. device. deleted : if not old_ip : raise forms. ValidationError ( ""Hostname in use."" ) device = Device. objects. get ( ipaddress__address = old_ip ) if ipaddress. device. id!= device. id : raise forms. ValidationError ( ""Hostname used by %s"" % device, ) elif Record. objects. filter ( name = new_hostname ). exists ( ) : raise forms. ValidationError ( ""Hostname already in DNS."" ) return new_hostname",False,find_addresses_for_hostname(new_hostname),ipaddress.hostname in ipaddress,0.6452183127403259 139,"def __getattr__ ( self, name ) : if name. startswith ( ""__"" ) : raise AttributeError ( ""'{}' does not exist on "". format ( name, id ( self ) ) ) callback_type, target = self. _identify_callback ( name ) if callback_type is not None : if callback_type in self. transition_cls. dynamic_methods : if target not in self. events : raise AttributeError ( ""event '{}' is not registered on "". format ( target, id ( self ) ) ) return partial ( self. events [ target ]. add_callback, callback_type ) elif : state = self. get_state ( target ) return partial ( state. add_callback, callback_type [ 3 : ] ) try : return self. __getattribute__ ( name ) except AttributeError : raise AttributeError ( ""'{}' does not exist on "". format ( name, id ( self ) ) )",False,callback_type in self.state_cls.dynamic_methods,target > self.state,0.651080846786499 140,"def delete_user ( self, uid ) : """"""Delete a user"""""" if not self. __user_exists ( uid ) : raise exception. LDAPUserNotFound ( user_id = uid ) self. __remove_from_all ( uid ) if FLAGS. ldap_user_modify_only : attr = [ ] user = self. __get_ldap_user ( uid ) if ""secretKey"" in user. keys ( ) : attr. append ( ( self. ldap. MOD_DELETE, ""secretKey"", user [ ""secretKey"" ] ) ) if ""accessKey"" in user. keys ( ) : attr. append ( ( self. ldap. MOD_DELETE, ""accessKey"", user [ ""accessKey"" ] ) ) if : attr. append ( ( self. ldap. MOD_DELETE, LdapDriver. isadmin_attribute, user [ LdapDriver. isadmin_attribute ], ) ) self. conn. modify_s ( self. __uid_to_dn ( uid ), attr ) else : self. conn. delete_s ( self. __uid_to_dn ( uid ) )",False,LdapDriver.isadmin_attribute in user.keys(),ldap.isadmin_attribute,0.6551808714866638 141,"def setLabel ( self, label ) : if label is None : if : self. label. scene ( ). removeItem ( self. label ) self. label = None else : if self. label is None : self. label = TextItem ( ) self. label. setParentItem ( self ) self. label. setText ( label ) self. _updateLabel ( )",True,self.label is not None,self.label is not None,0.6567447185516357 142,"def dispatch_return ( self, frame, arg ) : if self. stop_here ( frame ) or frame == self. returnframe : if : return self. trace_dispatch try : self. frame_returning = frame self. user_return ( frame, arg ) finally : self. frame_returning = None if self. quitting : raise BdbQuit if self. stopframe is frame and self. stoplineno!= - 1 : self. _set_stopinfo ( None, None ) return self. trace_dispatch",False,self.stopframe and frame.f_code.co_flags & CO_GENERATOR,self.trace_dispatch is None,0.6477848887443542 143,"def allow_hide_post ( user_acl, target ) : if user_acl [ ""is_anonymous"" ] : raise PermissionDenied ( _ ( ""You have to sign in to hide posts."" ) ) category_acl = user_acl [ ""categories"" ]. get ( target. category_id, { ""can_hide_posts"" : 0, ""can_hide_own_posts"" : 0 } ) if not category_acl [ ""can_hide_posts"" ] : if not category_acl [ ""can_hide_own_posts"" ] : raise PermissionDenied ( _ ( ""You can't hide posts in this category."" ) ) if : raise PermissionDenied ( _ ( ""You can't hide other users posts in this category."" ) ) if target. is_protected and not category_acl [ ""can_protect_posts"" ] : raise PermissionDenied ( _ ( ""This post is protected. You can't hide it."" ) ) if not has_time_to_edit_post ( user_acl, target ) : message = ngettext ( ""You can't hide posts that are older than %(minutes)s minute."", ""You can't hide posts that are older than %(minutes)s minutes."", category_acl [ ""post_edit_time"" ], ) raise PermissionDenied ( message % { ""minutes"" : category_acl [ ""post_edit_time"" ] } ) if target. is_first_post : raise PermissionDenied ( _ ( ""You can't hide",False,user_acl['user_id'] != target.poster_id,"target.is_user_post and (not category_acl[ ""can_hide_other_post_time_exceeded'])",0.6479650139808655 144,"def test_dayoffsets ( self ) : start = datetime. datetime ( self. yr, self. mth, self. dy, 9 ) for date_string, expected_day_offset in [ ( ""Aujourd'hui"", 0 ), ( ""aujourd'hui"", 0 ), ( ""Demain"", 1 ), ( ""demain"", 1 ), ( ""Hier"", - 1 ), ( ""hier"", - 1 ), ( ""au jour de hui"", None ), ] : got_dt, rc = self. cal. parseDT ( date_string, start ) if : self. assertEqual ( rc, 1 ) target = start + datetime. timedelta ( days = expected_day_offset ) self. assertEqual ( got_dt, target ) else : self. assertEqual ( rc, 0 )",True,expected_day_offset is not None,expected_day_offset is not None,0.6547766923904419 145,"def send_messages ( self, messages ) : sent_messages = 0 for m in messages : payload = { } for opt, optval in { ""mattermost_icon_url"" : ""icon_url"", ""mattermost_channel"" : ""channel"", ""mattermost_username"" : ""username"", }. items ( ) : optvalue = getattr ( self, opt ) if optvalue is not None : payload [ optval ] = optvalue. strip ( ) payload [ ""text"" ] = m. subject r = requests. post ( ""{}"". format ( m. recipients ( ) [ 0 ] ), data = json. dumps ( payload ), verify = ( not self. mattermost_no_verify_ssl ), ) if : logger. error ( smart_text ( _ ( ""Error sending notification mattermost: {}"" ). format ( r. text ) ) ) if not self. fail_silently : raise Exception ( smart_text ( _ ( ""Error sending notification mattermost: {}"" ). format ( r. text ) ) ) sent_messages",False,r.status_code >= 400,verify,0.6575757265090942 146,"def get_top_level_stats ( self ) : for func, ( cc, nc, tt, ct, callers ) in self. stats. items ( ) : self. total_calls += nc self. prim_calls += cc self. total_tt += tt if ( ""jprofile"", 0, ""profiler"" ) in callers : self. top_level [ func ] = None if : self. max_name_len = len ( func_std_string ( func ) )",False,len(func_std_string(func)) > self.max_name_len,func,0.6513847708702087 147,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. STRUCT : self. status = TStatus ( ) self. status. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRUCT : self. schema = TTableSchema ( ) self. schema. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",True,fid == 1,fid == 1,0.675361156463623 148,"def __init__ ( self, cga, * args ) -> None : super ( ). __init__ ( cga ) self. einf = self. cga. einf if len ( args ) == 0 : nulls = [ self. cga. null_vector ( ) for k in range ( self. layout. dims - 2 ) ] self. mv = reduce ( op, nulls + [ self. einf ] ) elif len ( args ) == 1 : if : self. mv = args [ 0 ] elif isinstance ( args [ 0 ], int ) : dim = args [ 0 ] points = [ self. cga. base_vector ( ) for k in range ( dim + 1 ) ] points = list ( map ( self. cga. up, points ) ) self. mv = reduce ( op, points + [ self. einf ] ) else : nulls = map ( self. cga. null_vector, args ) if self. einf not in nulls : nulls = list ( nulls ) + [ self. einf ] self. mv = reduce ( op, nulls ) self. mv = self. mv. normal ( )",False,"isinstance(args[0], MultiVector)","isinstance(args[0], float)",0.6587437391281128 149,"def get_final_results ( log_json_path, iter_num ) : result_dict = dict ( ) with open ( log_json_path, ""r"" ) as f : for line in f. readlines ( ) : log_line = json. loads ( line ) if ""mode"" not in log_line. keys ( ) : continue if : result_dict [ ""memory"" ] = log_line [ ""memory"" ] if log_line [ ""iter"" ] == iter_num : result_dict. update ( { key : log_line [ key ] for key in RESULTS_LUT if key in log_line } ) return result_dict",False,log_line['mode'] == 'train' and log_line['iter'] == iter_num,log_line[0] == 0,0.6507238149642944 150,"def argument_action ( self, text, loc, arg ) : """"""Code executed after recognising each of function's arguments"""""" exshared. setpos ( loc, text ) if DEBUG > 0 : print ( ""ARGUMENT:"", arg. exp ) if DEBUG == 2 : self. symtab. display ( ) if : return arg_ordinal = len ( self. function_arguments ) if not self. symtab. same_type_as_argument ( arg. exp, self. function_call_index, arg_ordinal ) : raise SemanticException ( ""Incompatible type for argument %d in '%s'"" % ( arg_ordinal + 1, self. symtab. get_name ( self. function_call_index ) ) ) self. function_arguments. append ( arg. exp )",False,DEBUG > 2,self.function_arguments is None,0.6751142144203186 151,"def reload ( self ) : """"""Parse bindings and mangle into an appropriate form"""""" self. _lookup = { } self. _masks = 0 for action, bindings in self. keys. items ( ) : if : bindings = ( bindings, ) for binding in bindings : if not binding or binding == ""None"" : continue try : keyval, mask = self. _parsebinding ( binding ) except KeymapError as e : err ( ""keybindings.reload failed to parse binding '%s': %s"" % ( binding, e ) ) else : if mask & Gdk. ModifierType. SHIFT_MASK : if keyval == Gdk. KEY_Tab : keyval = Gdk. KEY_ISO_Left_Tab mask &= ~ Gdk. ModifierType. SHIFT_MASK else : keyvals = Gdk. keyval_convert_case ( keyval ) if keyvals [ 0 ]!= keyvals [ 1 ] : keyval = keyvals [ 1 ] <",False,"not isinstance(bindings, tuple)",self._parse_bindings,0.6541509628295898 152,"def write ( self ) : """"""Make a copy of the stored config and write it to the configured file"""""" new_config = ConfigObj ( encoding = ""UTF-8"" ) new_config. filename = self. _config_file for key, subkeys in self. _config. items ( ) : if key not in new_config : new_config [ key ] = { } for subkey, value in subkeys. items ( ) : new_config [ key ] [ subkey ] = value for key in _CONFIG_DEFINITIONS. keys ( ) : key, definition_type, section, ini_key, default = self. _define ( key ) self. check_setting ( key ) if : new_config [ section ] = { } new_config [ section ] [ ini_key ] = self. _config [ section ] [ ini_key ] headphones. logger. info ( ""Writing configuration to file"" ) try : new_config. write ( ) except IOError as e : headphones. logger. error ( ""Error writing configuration file: %s"", e )",True,section not in new_config,section not in new_config,0.6571708917617798 153,"def __init__ ( self, endpoint : str, credential : MetricsAdvisorKeyCredential, ** kwargs : Any ) -> None : try : if : endpoint = ""https://"" + endpoint except AttributeError : raise ValueError ( ""Base URL must be a string."" ) if not credential : raise ValueError ( ""Missing credential"" ) self. _endpoint = endpoint if isinstance ( credential, MetricsAdvisorKeyCredential ) : self. _client = _ClientAsync ( endpoint = endpoint, sdk_moniker = SDK_MONIKER, authentication_policy = MetricsAdvisorKeyCredentialPolicy ( credential ), ** kwargs ) else : if hasattr ( credential, ""get_token"" ) : credential_scopes = kwargs. pop ( ""credential_scopes"", [ ""https://cognitiveservices.azure.com/.default"" ] ) credential_policy = AsyncBearerTokenCredentialPolicy ( credential, * credential_scopes ) else : raise TypeError ( ""Please provide an instance from azure-identity "" ""or a class that implement the 'get_token protocol"" ) self. _client = _ClientAsync ( endpoint = endpoint, sdk_moniker = SDK_MONIKER, authentication_policy = credential_policy, ** kwargs )",False,not endpoint.lower().startswith('http'),"isinstance(endpoint, str)",0.6574206352233887 154,"def _build_blocks_by_usage ( ids : Sequence [ FunctionID ], *, level : int = 0, to : Optional [ FunctionID ] = None, origin : float = 0, visited : AbstractSet [ Call ] = frozenset ( ), parent_width : float = 0, ) -> None : factor = 1.0 if ids and to is not None : calls_tottime = sum ( calls [ fid, to ] [ 3 ] for fid in ids ) if calls_tottime : factor = parent_width / calls_tottime for fid in sorted ( ids ) : call = fid, to if to is not None : cc, nc, tt, tc = calls [ call ] ttt = tc * factor else : cc, nc, tt, tc = funcs [ fid ]. stat ttt = tt * factor if : origin += ttt continue tooltip = TOOLTIP. format ( tt / maxw, cc, nc, tt, tc ) block = Block ( func = fid, call_stack = ( ), color = 2 if level > 0 else not funcs [ fid ]. calls, level = level, tooltip = tooltip, w = ttt, x = origin, ) usage_blocks. append ( block ) if call not in visited : _build_blocks_by_usage ( 0,0.6720158457756042 155,"def _map_saslprep ( s ) : """"""Map stringprep table B.1 to nothing and C.1.2 to ASCII space"""""" r = [ ] for c in s : if : r. append ( "" "" ) elif not stringprep. in_table_b1 ( c ) : r. append ( c ) return """". join ( r )",False,stringprep.in_table_c12(c),c.lower() == c.lower() or c.lower() == c.lower(),0.6584839820861816 156,"def _del_port ( self, ctx, br_name = None, target = None, must_exist = False, with_iface = False ) : assert target is not None ctx. populate_cache ( ) if not with_iface : vsctl_port = ctx. find_port ( target, must_exist ) else : vsctl_port = ctx. find_port ( target, False ) if not vsctl_port : vsctl_iface = ctx. find_iface ( target, False ) if vsctl_iface : vsctl_port = vsctl_iface. port ( ) if : vsctl_fatal ( ""no port or interface named %s"" % target ) if not vsctl_port : return if not br_name : vsctl_bridge = ctx. find_bridge ( br_name, True ) if vsctl_port. bridge ( )!= vsctl_bridge : if vsctl_port. bridge ( ). parent == vsctl_bridge : vsctl_fatal ( ""bridge %s does not have a port %s (although "" ""its parent bridge %s does)"" % ( br_name, target, vsctl_bridge. parent. name ) ) else : vsctl_fatal ( ""bridge %s does not have a port %s"" % ( br_name, target ) ) ctx. del_port ( vsctl_port )",False,must_exist and (not vsctl_port),not vsctl_port and must_exist,0.650446891784668 157,"def reset ( self ) : if self. _on_memory : self. _generation += 1 if : self. _order = list ( self. _rng. permutation ( self. _size ) ) else : self. _order = list ( range ( self. _size ) ) if self. _position == 0 : self. _generation = - 1 else : self. _data_source. _position = self. _position self. _data_source. reset ( ) else : self. _data_source. reset ( ) self. _generation = self. _data_source. _generation self. _position = self. _data_source. _position super ( DataSourceWithMemoryCache, self ). reset ( )",False,self._shuffle and self._generation > 0,self._rng.random() < self._size,0.6596124172210693 158,"def _format_arg ( self, name, trait_spec, value ) : if name == ""mask_file"" : return """" if name == ""op_string"" : if ""-k %s"" in self. inputs. op_string : if : return self. inputs. op_string % self. inputs. mask_file else : raise ValueError ( ""-k %s option in op_string requires mask_file"" ) return super ( ImageStats, self ). _format_arg ( name, trait_spec, value )",False,isdefined(self.inputs.mask_file),-k %s' in self.inputs.mask_file,0.6470727920532227 159,"def _prepare_subset ( full_data : torch. Tensor, full_targets : torch. Tensor, num_samples : int, digits : Sequence, ) : classes = { d : 0 for d in digits } indexes = [ ] for idx, target in enumerate ( full_targets ) : label = target. item ( ) if : continue indexes. append ( idx ) classes [ label ] += 1 if all ( classes [ k ] >= num_samples for k in classes ) : break data = full_data [ indexes ] targets = full_targets [ indexes ] return data, targets",False,"classes.get(label, float('inf')) >= num_samples",label in empty_data,0.6501825451850891 160,"def apply ( self, response ) : updated_headers = self. update_headers ( response ) if updated_headers : response. headers. update ( updated_headers ) warning_header_value = self. warning ( response ) if : response. headers. update ( { ""Warning"" : warning_header_value } ) return response",False,warning_header_value is not None,warning_header_value,0.6577584743499756 161,"def dataset_to_stream ( dataset, input_name ) : """"""Takes a tf.Dataset and creates a numpy stream of ready batches."""""" for example in fastmath. dataset_as_numpy ( dataset ) : features = example [ 0 ] inp, out = features [ input_name ], example [ 1 ] mask = features [ ""mask"" ] if ""mask"" in features else None if isinstance ( inp, np. uint8 ) : inp = inp. astype ( np. int32 ) if : out = out. astype ( np. int32 ) yield ( inp, out ) if mask is None else ( inp, out, mask )",True,"isinstance(out, np.uint8)","isinstance(out, np.uint8)",0.6530542373657227 162,"def numexp_action ( self, text, loc, num ) : """"""Code executed after recognising a numexp expression (something +|- something)"""""" exshared. setpos ( loc, text ) if DEBUG > 0 : print ( ""NUM_EXP:"", num ) if DEBUG == 2 : self. symtab. display ( ) if DEBUG > 2 : return n = list ( num ) while len ( n ) > 1 : if : raise SemanticException ( ""Invalid opernads to binary '%s'"" % n [ 1 ] ) reg = self. codegen. arithmetic ( n [ 1 ], n [ 0 ], n [ 2 ] ) n [ 0 : 3 ] = [ reg ] return n [ 0 ]",False,"not self.symtab.same_types(n[0], n[2])",n[1] not in self.code_space,0.6545137166976929 163,"def _analyze_callsite ( self, caller_block_addr : int, rda : ReachingDefinitionsModel ) -> CallSiteFact : fact = CallSiteFact ( True, ) state = rda. observed_results [ ( ""node"", caller_block_addr, 1 ) ] all_uses : ""Uses"" = rda. all_uses default_cc_cls = DefaultCC. get ( self. project. arch. name, None ) if default_cc_cls is not None : default_cc : SimCC = default_cc_cls ( self. project. arch ) all_defs : Set [ ""Definition"" ] = state. register_definitions. get_all_variables ( ) return_val = default_cc. RETURN_VAL if return_val is not None and isinstance ( return_val, SimRegArg ) : return_reg_offset, _ = self. project. arch. registers [ return_val. reg_name ] try : return_def = next ( iter ( d for d in all_defs if isinstance ( d. atom, Register ) and d. atom. reg_offset == return_reg_offset ) ) except StopIteration : return_def = None if : global gdata_service try : from gdata import service gdata_service = service except ImportError : raise CommandError ( ""You need to install the gdata "" ""module to run this command."" ) self. verbosity = int ( options. get ( ""verbosity"", 1 ) ) self. blogger_username = options. get ( ""blogger_username"" ) self. category_title = options. get ( ""category_title"" ) self. blogger_blog_id = options. get ( ""blogger_blog_id"" ) self. write_out ( self. style. TITLE ( ""Starting migration from Blogger to Zinnia %s\n"" % __version__ ) ) if : self. blogger_username = raw_input ( ""Blogger username: "" ) if : raise CommandError ( ""Invalid Blogger username"" ) self. blogger_password = getpass ( ""Blogger password: "" ) try : self. blogger_manager = BloggerManager ( self. blogger_username, self. blogger_password ) except gdata_service. BadAuthentication : raise CommandError ( ""Incorrect Blogger username or password"" ) default_author = options. get ( ""author"" ) if default_author : try : self. default_author = User. objects. get ( username = default_author ) except User. DoesNotExist : raise CommandError ( 'Invalid Zinnia username for default author ""%s""' % default_author ) <",True,not self.blogger_username,not self.blogger_username,0.6542549133300781 165,"def nodes ( self ) : if not self. _nodes : nodes = self. cluster_group. instances ( ) self. _nodes = [ ] master = self. master_node nodeid = 1 for node in nodes : if : continue if node. id == master. id : self. _nodes. insert ( 0, master ) continue self. _nodes. append ( Node ( node, self. key_location, ""node%.3d"" % nodeid ) ) nodeid += 1 else : for node in self. _nodes : log. debug ( ""refreshing instance %s"" % node. id ) node. update ( ) return self. _nodes",False,"node.state not in ['pending', 'running']",node.id < 0,0.6543914079666138 166,"def set_ok_port ( self, cookie, request ) : if cookie. port_specified : req_port = request_port ( request ) if req_port is None : req_port = ""80"" else : req_port = str ( req_port ) for p in cookie. port. split ( "","" ) : try : int ( p ) except ValueError : debug ( "" bad port %s (not numeric)"", p ) return False if : break else : debug ( "" request port (%s) not found in %s"", req_port, cookie. port ) return False return True",False,p == req_port,req_port == cookie.req_port,0.6740789413452148 167,"def _test_kneighbors_regressor ( self, n_neighbors = 5, algorithm = ""brute"", weights = ""uniform"", metric = ""minkowski"", metric_params = { ""p"" : 2 }, score_w_train_data = False, ) : for data in [ datasets. load_boston ( ), datasets. load_diabetes ( ) ] : X, y = data. data, data. target X = X. astype ( np. float32 ) if metric == ""wminkowski"" : metric_params [ ""w"" ] = np. random. rand ( X. shape [ 1 ] ) elif metric == ""seuclidean"" : metric_params [ ""V"" ] = np. random. rand ( X. shape [ 1 ] ) elif metric == ""mahalanobis"" : V = np. cov ( X. T ) metric_params [ ""VI"" ] = np. linalg. inv ( V ) model = KNeighborsRegressor ( n_neighbors = n_neighbors, algorithm = algorithm, weights = weights, metric = metric, metric_params = metric_params, ) n_train_rows = int ( X. shape [ 0 ] * 0.6 ) model. fit ( X [ : n_train_rows, : ], y [ : n_train_rows ] ) if : X = X [ n_train_rows :, : ] extra_config = { humming",False,not score_w_train_data,n_neighbors > 5,0.655362606048584 168,"def __call__ ( self, engine : Engine, logger : ClearMLLogger, event_name : Union [ str, Events ] ) -> None : if not isinstance ( logger, ClearMLLogger ) : raise RuntimeError ( ""Handler 'GradsHistHandler' works only with ClearMLLogger"" ) global_step = engine. state. get_event_attrib_value ( event_name ) tag_prefix = f""{self.tag}/"" if self. tag else """" for name, p in self. model. named_parameters ( ) : if : continue title_name, _, series_name = name. partition ( ""."" ) logger. grad_helper. add_histogram ( title = f""{tag_prefix}grads_{title_name}"", series = series_name, step = global_step, hist_data = p. grad. detach ( ). cpu ( ). numpy ( ), )",False,p.grad is None,not p.grad,0.658847451210022 169,"def extract ( self ) : try : c = self. db. cursor ( ) c. execute ( """"""show global variables like'max_connections';"""""" ) max = c. fetchone ( ) c. execute ( """"""show global status like 'Threads_connected';"""""" ) thread = c. fetchone ( ) if : self. set2 [ thread [ 0 ] ] = float ( thread [ 1 ] ) self. set2 [ ""Threads"" ] = float ( thread [ 1 ] ) / float ( max [ 1 ] ) * 100.0 for name in self. vars : self. val [ name ] = self. set2 [ name ] * 1.0 / elapsed if step == op. delay : self. set1. update ( self. set2 ) except Exception as e : for name in self. vars : self. val [ name ] = - 1",False,thread[0] in self.vars,thread != None,0.6545277833938599 170,"def _setUpClass ( cls ) : global solver import pyomo. environ from pyomo. solvers. tests. io. writer_test_cases import testCases for test_case in testCases : if : solver [ ( test_case. name, test_case. io ) ] = True",False,"(test_case.name, test_case.io) in solver and test_case.available",test_case.name in environ,0.6479943990707397 171,"def test_timestamp_overflow ( self ) : sys. path. insert ( 0, os. curdir ) try : source = TESTFN + "".py"" if : compiled = TESTFN + ""$py.class"" else : compiled = source + ( ""c"" if __debug__ else ""o"" ) with open ( source, ""w"" ) as f : pass try : os. utime ( source, ( 2 ** 33 - 5, 2 ** 33 - 5 ) ) except OverflowError : self. skipTest ( ""cannot set modification time to large integer"" ) except OSError as e : if e. errno!= getattr ( errno, ""EOVERFLOW"", None ) : raise self. skipTest ( ""cannot set modification time to large integer ({})"". format ( e ) ) __import__ ( TESTFN ) os. stat ( compiled ) finally : del sys. path [ 0 ] remove_files ( TESTFN )",False,is_jython,__debug__,0.6686463356018066 172,"def to_representation ( self, value ) : old_social_string_fields = [ ""twitter"", ""github"", ""linkedIn"" ] request = self. context. get ( ""request"" ) show_old_format = ( request and is_deprecated ( request. version, self. min_version ) and request. method == ""GET"" ) if show_old_format : social = value. copy ( ) for key in old_social_string_fields : if : social [ key ] = value [ key ] [ 0 ] elif social. get ( key ) == [ ] : social [ key ] = """" value = social return super ( SocialField, self ). to_representation ( value )",False,social.get(key),key in value,0.6571549773216248 173,"def contribute ( self, converter, model, form_class, inline_model ) : reverse_field = None info = self. get_info ( inline_model ) for field in get_meta_fields ( info. model ) : field_type = type ( field ) if field_type == ForeignKeyField : if : reverse_field = field break else : raise Exception ( ""Cannot find reverse relation for model %s"" % info. model ) ignore = [ reverse_field. name ] if info. form_excluded_columns : exclude = ignore + info. form_excluded_columns else : exclude = ignore child_form = info. get_form ( ) if child_form is None : child_form = model_form ( info. model, base_class = form. BaseForm, only = info. form_columns, exclude = exclude, field_args = info. form_args, allow_pk = True, converter = converter, ) try : prop_name = reverse_field. related_name except AttributeError : prop_name = reverse_field. backref label = self. get_label ( info, prop_name ) setattr ( form_class, prop_name, self. inline_field_list_type ( child_form, info. model, <",False,field.rel_model == model,reverse_field is None,0.6588976383209229 174,"def get_aa_from_codonre ( re_aa ) : aas = [ ] m = 0 for i in re_aa : if i == ""["" : m = - 1 aas. append ( """" ) elif : m = 0 continue elif m == - 1 : aas [ - 1 ] = aas [ - 1 ] + i elif m == 0 : aas. append ( i ) return aas",False,i == ']',m == 0,0.6804044246673584 175,"def _do_db_notes ( self, params ) : """"""Adds notes to rows in the database"""""" table, params = self. _parse_params ( params ) if not table : self. _help_db_notes ( ) return if table in self. get_tables ( ) : if params : arg, note = self. _parse_params ( params ) rowids = self. _parse_rowids ( arg ) else : try : params = input ( ""rowid(s) (INT): "" ) rowids = self. _parse_rowids ( params ) note = input ( ""note (TXT): "" ) except KeyboardInterrupt : print ( """" ) return finally : if : print ( f""{params}"" ) count = 0 for rowid in rowids : count += self. query ( f""UPDATE `{table}` SET notes=? WHERE ROWID IS?"", ( note, rowid ) ) self. output ( f""{count} rows affected."" ) else : self. output ( ""Invalid table name."" )",False,Framework._script,params,0.6727245450019836 176,"def start_workunit ( self, workunit ) : """"""Implementation of Reporter callback."""""" if self. is_under_background_root ( workunit ) : return label_format = self. _get_label_format ( workunit ) if label_format == LabelFormat. FULL : if not WorkUnitLabel. SUPPRESS_LABEL in workunit. labels : self. _emit_indented_workunit_label ( workunit ) tool_output_format = self. _get_tool_output_format ( workunit ) if : self. emit ( self. _prefix ( workunit, ""\n"" ) ) elif tool_output_format == ToolOutputFormat. UNINDENTED : self. emit ( ""\n"" ) elif label_format == LabelFormat. DOT : self. emit ( ""."" ) self. flush ( )",False,tool_output_format == ToolOutputFormat.INDENT,tool_output_format == ToolOutputFormat.INDENTED,0.6523141264915466 177,"def strip_dirs ( self ) : oldstats = self. stats self. stats = newstats = { } max_name_len = 0 for func, ( cc, nc, tt, ct, callers ) in oldstats. items ( ) : newfunc = func_strip_path ( func ) if len ( func_std_string ( newfunc ) ) > max_name_len : max_name_len = len ( func_std_string ( newfunc ) ) newcallers = { } for func2, caller in callers. items ( ) : newcallers [ func_strip_path ( func2 ) ] = caller if : newstats [ newfunc ] = add_func_stats ( newstats [ newfunc ], ( cc, nc, tt, ct, newcallers ) ) else : newstats [ newfunc ] = ( cc, nc, tt, ct, newcallers ) old_top = self. top_level self. top_level = new_top = set ( ) for func in old_top : new_top. add ( func_strip_path ( func ) ) self. max_name_len = max_name_len self. fcn_list = None self. all_callees = None return self",False,newfunc in newstats,len(newcallers) > 0,0.6734169721603394 178,def _maybe_run_close_callback ( self ) : if self. closed ( ) and self. _pending_callbacks == 0 : futures = [ ] if self. _read_future is not None : futures. append ( self. _read_future ) self. _read_future = None if : futures. append ( self. _write_future ) self. _write_future = None if self. _connect_future is not None : futures. append ( self. _connect_future ) self. _connect_future = None if self. _ssl_connect_future is not None : futures. append ( self. _ssl_connect_future ) self. _ssl_connect_future = None for future in futures : future. set_exception ( StreamClosedError ( real_error = self. error ) ) if self. _close_callback is not None : cb = self. _close_callback self. _close_callback = None self. _run_callback ( cb ) self. _read_callback = self. _write_callback = None self. _write_buffer = None,True,self._write_future is not None,self._write_future is not None,0.6580326557159424 179,"def route ( tokeniser ) : ipmask = prefix ( tokeniser ) if ""rd"" in tokeniser. tokens or ""route-distinguisher"" in tokeniser. tokens : nlri = IPVPN ( IP. toafi ( ipmask. top ( ) ), SAFI. mpls_vpn, OUT. ANNOUNCE ) elif ""label"" in tokeniser. tokens : nlri = Label ( IP. toafi ( ipmask. top ( ) ), SAFI. nlri_mpls, OUT. ANNOUNCE ) else : nlri = INET ( IP. toafi ( ipmask. top ( ) ), IP. tosafi ( ipmask. top ( ) ), OUT. ANNOUNCE ) nlri. cidr = CIDR ( ipmask. pack ( ), ipmask. mask ) change = Change ( nlri, Attributes ( ) ) while True : command = tokeniser ( ) if not command : break if : nlri. labels = label ( tokeniser ) continue if command == ""rd"" or command == ""route-distinguisher"" : nlri. rd = route_distinguisher ( tokeniser ) continue action = ParseStatic. action. get ( command, """" ) if action == ""attribute-add"" : change. attributes. add ( ParseStatic. known [ command ] ( tokeniser ) ) elif action == ""nlri-set"" : change. nlri. assign ( ParseStatic. assign [ command ], ParseStatic. known [ command ] ( tokeniser ) ) elif action == ""nexthop-and-attribute""",False,command == 'label',label is not None,0.662468433380127 180,"def _get_match_location ( self, node, name = None ) : loc = source. Location ( node. lineno, node. col_offset ) if not name : return loc if isinstance ( node, ( self. _ast. Import, self. _ast. ImportFrom ) ) : m = re. search ( ""[,]"" + name + r""\b"", self. source. line ( node. lineno ) ) if : c, _ = m. span ( ) return source. Location ( node. lineno, c + 1 ) elif isinstance ( node, self. _ast. Attribute ) : attr_loc, _ = self. source. get_attr_location ( name, loc ) return attr_loc return loc",False,m is not None,m,0.659475564956665 181,"def create_columns ( self, treeview ) : column = Gtk. TreeViewColumn ( """" ) row_data_index = 1 for i, f in enumerate ( self. fields ) : if f == IconTextRendererColumns. ICON : iconcell = Gtk. CellRendererPixbuf ( ) iconcell. set_property ( ""width"", self. icon_size ( ) + 10 ) column. set_cell_data_func ( iconcell, self. icon, i ) column. pack_start ( iconcell, False ) elif : namecell = Gtk. CellRendererText ( ) namecell. set_property ( ""ellipsize"", Pango. EllipsizeMode. END ) column. pack_start ( namecell, True ) column. add_attribute ( namecell, ""text"", row_data_index ) elif f == IconTextRendererColumns. TITLE_SUBTITLE : namecell = Gtk. CellRendererText ( ) namecell. set_property ( ""ellipsize"", Pango. EllipsizeMode. END ) column. set_cell_data_func ( namecell, self. markup, i ) column. pack_start ( namecell, True ) row_data_index += 1 treeview. append_column ( column )",False,f == IconTextRendererColumns.TITLE,f == IconTextRendererColumns.TITLE_SUBTITLE,0.6661038994789124 182,"def _create_tiny_git_repo ( self, *, copy_files : Optional [ Sequence [ Path ] ] = None ) : with temporary_dir ( ) as gitdir, temporary_dir ( ) as worktree : Path ( worktree, ""README"" ). touch ( ) with initialize_repo ( worktree, gitdir = gitdir ) as git : if : for fp in copy_files : new_fp = Path ( worktree, fp ) safe_mkdir_for ( str ( new_fp ) ) shutil. copy ( fp, new_fp ) yield git, worktree, gitdir",True,copy_files is not None,copy_files is not None,0.656037449836731 183,"def click_outside ( event ) : if event not in d : x, y, z = self. blockFaceUnderCursor [ 0 ] if y == 0 : y = 64 y += 3 gotoPanel. X, gotoPanel. Y, gotoPanel. Z = x, y, z if : d. dismiss ( ""Goto"" )",False,event.num_clicks == 2,x == 0 and z == 64,0.6589517593383789 184,"def get_doc_object ( obj, what = None, doc = None, config = { } ) : if what is None : if inspect. isclass ( obj ) : what = ""class"" elif inspect. ismodule ( obj ) : what = ""module"" elif : what = ""function"" else : what = ""object"" if what == ""class"" : return SphinxClassDoc ( obj, func_doc = SphinxFunctionDoc, doc = doc, config = config ) elif what in ( ""function"", ""method"" ) : return SphinxFunctionDoc ( obj, doc = doc, config = config ) else : if doc is None : doc = pydoc. getdoc ( obj ) return SphinxObjDoc ( obj, doc, config = config )",False,callable(obj),what == 'function',0.6599302291870117 185,"def _attempt_proof_app ( self, current, context, agenda, accessible_vars, atoms, debug ) : f, args = current. uncurry ( ) for i, arg in enumerate ( args ) : if : ctx = f nv = Variable ( ""X%s"" % _counter. get ( ) ) for j, a in enumerate ( args ) : ctx = ctx ( VariableExpression ( nv ) ) if i == j else ctx ( a ) if context : ctx = context ( ctx ). simplify ( ) ctx = LambdaExpression ( nv, ctx ) agenda. put ( arg, ctx ) return self. _attempt_proof ( agenda, accessible_vars, atoms, debug + 1 ) raise Exception ( ""If this method is called, there must be a non-atomic argument"" )",False,not TableauProver.is_atom(arg),f,0.647758960723877 186,"def background_size ( tokens ) : """"""Validation for ``background-size``."""""" if len ( tokens ) == 1 : token = tokens [ 0 ] keyword = get_keyword ( token ) if keyword in ( ""contain"", ""cover"" ) : return keyword if keyword == ""auto"" : return ( ""auto"", ""auto"" ) length = get_length ( token, negative = False, percentage = True ) if length : return ( length, ""auto"" ) elif len ( tokens ) == 2 : values = [ ] for token in tokens : length = get_length ( token, negative = False, percentage = True ) if length : values. append ( length ) elif : values. append ( ""auto"" ) if len ( values ) == 2 : return tuple ( values )",False,get_keyword(token) == 'auto',auto,0.649667501449585 187,"def _extract_subtitles ( url, subtitle_url ) : subtitles = { } if subtitle_url and isinstance ( subtitle_url, compat_str ) : subtitle_url = urljoin ( url, subtitle_url ) STL_EXT = "".stl"" SRT_EXT = "".srt"" subtitles [ ""it"" ] = [ { ""ext"" : ""stl"", ""url"" : subtitle_url, } ] if : srt_url = subtitle_url [ : - len ( STL_EXT ) ] + SRT_EXT subtitles [ ""it"" ]. append ( { ""ext"" : ""srt"", ""url"" : srt_url, } ) return subtitles",False,subtitle_url.endswith(STL_EXT),subtitle_url and subtitle_url.endswith(STL_EXT),0.6486467719078064 188,"def do_status ( self, directory, path ) : if path : try : return next ( self. _gitcmd ( directory, ""status"", ""--porcelain"", ""--ignored"", ""--"", path ) ) [ : 2 ] except StopIteration : return None else : wt_column = "" "" index_column = "" "" untracked_column = "" "" for line in self. _gitcmd ( directory, ""status"", ""--porcelain"" ) : if : untracked_column = ""U"" continue elif line [ 0 ] == ""!"" : continue if line [ 0 ]!= "" "" : index_column = ""I"" if line [ 1 ]!= "" "" : wt_column = ""D"" r = wt_column + index_column + untracked_column return r if r!= "" "" else None",False,line[0] == '?',line[0] == '!',0.6660847663879395 189,"def save ( self, filename = None, ignore_discard = False, ignore_expires = False ) : if filename is None : if self. filename is not None : filename = self. filename else : raise ValueError ( MISSING_FILENAME_TEXT ) f = open ( filename, ""w"" ) try : f. write ( self. header ) now = time. time ( ) for cookie in self : if not ignore_discard and cookie. discard : continue if not ignore_expires and cookie. is_expired ( now ) : continue if cookie. secure : secure = ""TRUE"" else : secure = ""FALSE"" if cookie. domain. startswith ( ""."" ) : initial_dot = ""TRUE"" else : initial_dot = ""FALSE"" if cookie. expires is not None : expires = str ( cookie. expires ) else : expires = """" if : name = """" value = cookie. name else : name = cookie. name ",False,cookie.value is None,len(cookie.name),0.660980761051178 190,"def check_metadata_equal ( df1, df2 ) : for attr in df1. _metadata : if attr == ""_recommendation"" : x = df1. _recommendation y = df2. _recommendation for key in x : if key in y : assert len ( x [ key ] ) == len ( y [ key ] ) for i in range ( len ( x [ key ] ) ) : vis1 = x [ key ] [ i ] vis2 = y [ key ] [ i ] compare_vis ( vis1, vis2 ) elif : x = df1. _rec_info y = df2. _rec_info assert len ( x ) == len ( y ) for i in range ( len ( x ) ) : x_info, y_info = x [ i ], y [ i ] for key in x_info : if key in y_info and key == ""collection"" : assert len ( x_info [ key ] ) == len ( y_info [ key ] ) for i in range ( len ( x_info [ key ] ) ) : vis1 = x_",False,attr == '_rec_info',attr == 'collection',0.6667472124099731 191,"def compute_most_posted ( server, message ) : module, num, keyword, paste_date = message. split ( "";"" ) redis_progression_name_set = ""top_"" + module + ""_set_"" + paste_date server. hincrby ( paste_date, module + ""-"" + keyword, int ( num ) ) date = get_date_range ( 0 ) [ 0 ] keyword_total_sum = 0 curr_value = server. hget ( date, module + ""-"" + keyword ) keyword_total_sum += int ( curr_value ) if curr_value is not None else 0 if server. zcard ( redis_progression_name_set ) < max_set_cardinality : server. zadd ( redis_progression_name_set, float ( keyword_total_sum ), keyword ) else : member_set = server. zrangebyscore ( redis_progression_name_set, ""-inf"", ""+inf"", withscores = True, start = 0, num = 1 ) if : print ( module + "": adding "" + keyword + ""("" + str ( keyword_total_sum ) + "") in set and removing "" + member_set [ 0 ] [ 0 ] + ""("" + str ( member_set [ 0 ] [ 1 ] ) + "")"" ) 0,0.6516745090484619 192,def _split_bitstream ( buf : bytes ) -> Iterator [ bytes ] : i = 0 while True : while ( buf [ i ]!= 0 or buf [ i + 1 ]!= 0 or buf [ i + 2 ]!= 0x01 ) and ( buf [ i ]!= 0 or buf [ i + 1 ]!= 0 or buf [ i + 2 ]!= 0 or buf [ i + 3 ]!= 0x01 ) : i += 1 if i + 4 >= len ( buf ) : return if : i += 1 i += 3 nal_start = i while ( buf [ i ]!= 0 or buf [ i + 1 ]!= 0 or buf [ i + 2 ]!= 0 ) and ( buf [ i ]!= 0 or buf [ i + 1 ]!= 0 or buf [ i + 2 ]!= 0x01 ) : i += 1 if i + 3 >= len ( buf ) : nal_end = len ( buf ) yield buf [ nal_start : nal_end ] return nal_end = i yield buf [ nal_start : nal_end ],False,buf[i] != 0 or buf[i + 1] != 0 or buf[i + 2] != 1,i + 4 >= len(buf),0.6563775539398193 193,"def __init__ ( self, fmt = None, * args ) : if not isinstance ( fmt, BaseException ) : Error. __init__ ( self, fmt, * args ) else : e = fmt cls = e. __class__ fmt = ""%s.%s: %s"" % ( cls. __module__, cls. __name__, e ) tb = sys. exc_info ( ) [ 2 ] if : fmt += ""\n"" fmt += """". join ( traceback. format_tb ( tb ) ) Error. __init__ ( self, fmt )",True,tb,tb,0.7045260667800903 194,"def collect_textmate_scheme ( self, scheme_tree ) : scheme = { } for style in scheme_tree. findall ( "".//dict[key='scope']"" ) : try : cur_style = { } cur_tag = None for elem in style. iter ( ) : if : cur_tag = elem. text elif elem. tag == ""string"" and cur_tag is not None : cur_style [ cur_tag ] = elem. text cur_tag = None if ""scope"" in cur_style : scheme [ cur_style [ ""scope"" ] ] = cur_style except ValueError : pass return scheme",False,elem.tag == 'key',elem.tag == 'string',0.6555716395378113 195,"def get_ending ( lines, begin, end, begin_line, begin_char, type ) : end_line = begin_line end_char = 0 if type == MULT : while end_line < len ( lines ) : start = 0 if end_line == begin_line : start = begin_char + len ( begin ) end_char = lines [ end_line ]. find ( end, start ) if end_char >= 0 : break end_line += 1 end_line += 1 elif type == IN : while end_line < len ( lines ) : start = 0 if end_line == begin_line : start = lines [ end_line ]. index ( begin ) if : break end_line += 1 return end_line, end_char",False,not lines[end_line][start:].strip().startswith(begin),start == 0,0.6547279357910156 196,"def pauseAllDownloads ( self, menu ) : active_gids = download. activeDownloads ( ) f = Open ( download_list_file_active ) download_list_file_active_lines = f. readlines ( ) f. close ( ) for i in range ( len ( download_list_file_active_lines ) ) : download_list_file_active_lines [ i ] = download_list_file_active_lines [ i ]. strip ( ) for gid in active_gids : if gid in download_list_file_active_lines : answer = download. downloadPause ( gid ) if : notifySend ( ""Aria2 did not respond!"", ""Try agian!"", 10000, ""critical"" ) sleep ( 0.3 )",False,answer == 'None',answer == 'Successfully created',0.657417893409729 197,"def _get_requested_databases ( self ) : """"""Returns a list of databases requested, not including ignored dbs"""""" requested_databases = [ ] if ( self. _requested_namespaces is not None ) and ( self. _requested_namespaces!= [ ] ) : for requested_namespace in self. _requested_namespaces : if requested_namespace [ 0 ] is ""*"" : return [ ] elif : requested_databases. append ( requested_namespace [ 0 ] ) return requested_databases",False,requested_namespace[0] not in IGNORE_DBS,requested_namespace[0] not in requested_databases,0.6561185717582703 198,"def read_work_titles ( fields ) : found = [ ] if ""240"" in fields : for line in fields [ ""240"" ] : title = join_subfield_values ( line, [ ""a"", ""m"", ""n"", ""p"", ""r"" ] ) if : found. append ( title ) if ""130"" in fields : for line in fields [ ""130"" ] : title = "" "". join ( get_lower_subfields ( line ) ) if : found. append ( title ) return { ""work_titles"" : found } if found else { }",False,title not in found,title,0.671471118927002 199,"def generic_tag_compiler ( params, defaults, name, node_class, parser, token ) : ""Returns a template.Node subclass."" bits = token. split_contents ( ) [ 1 : ] bmax = len ( params ) def_len = defaults and len ( defaults ) or 0 bmin = bmax - def_len if len ( bits ) < bmin or len ( bits ) > bmax : if : message = ""%s takes %s arguments"" % ( name, bmin ) else : message = ""%s takes between %s and %s arguments"" % ( name, bmin, bmax ) raise TemplateSyntaxError ( message ) return node_class ( bits )",False,bmin == bmax,bmin > bmax,0.6748461723327637 200,"def _handle_control_flow_operator ( self, operator, values ) : if operator == ""$switch"" : if not isinstance ( values, dict ) : raise OperationFailure ( ""$switch requires an object as an argument, "" ""found: %s"" % type ( values ) ) branches = values. get ( ""branches"", [ ] ) if not isinstance ( branches, ( list, tuple ) ) : raise OperationFailure ( ""$switch expected an array for 'branches', "" ""found: %s"" % type ( branches ) ) if not branches : raise OperationFailure ( ""$switch requires at least one branch."" ) for branch in branches : if not isinstance ( branch, dict ) : raise OperationFailure ( ""$switch expected each branch to be an object, "" ""found: %s"" % type ( branch ) ) if : raise OperationFailure ( ""$switch requires each branch have a 'case' expression"" ) if ""then"" not in branch : raise OperationFailure ( ""$switch requires each branch have a 'then' expression."" ) for branch in branches : if self.",False,'case' not in branch,'case' in branch and (not 'then' in branch),0.6548657417297363 201,"def load_stack ( self, stack, index = None ) : self. stack = stack self. clear ( ) for i in range ( len ( stack ) ) : frame, lineno = stack [ i ] try : modname = frame. f_globals [ ""__name__"" ] except : modname = ""?"" code = frame. f_code filename = code. co_filename funcname = code. co_name import linecache sourceline = linecache. getline ( filename, lineno ) import string sourceline = string. strip ( sourceline ) if funcname in ( ""?"", """", None ) : item = ""%s, line %d: %s"" % ( modname, lineno, sourceline ) else : item = ""%s.%s(), line %d: %s"" % ( modname, funcname, lineno, sourceline ) if : item = ""> "" + item self. append ( item ) if index is not None : self. select ( index )",False,i == index,item is not None,0.6747851371765137 202,"def can_read_or_exception ( self, user, doc_class, doc_id, exception_class = PopupException ) : if doc_id is None : return try : ct = ContentType. objects. get_for_model ( doc_class ) doc = Document. objects. get ( object_id = doc_id, content_type = ct ) if : return doc else : message = _ ( ""Permission denied. %(username)s does not have the permissions required to access document %(id)s"" ) % { ""username"" : user. username, ""id"" : doc. id } raise exception_class ( message ) except Document. DoesNotExist : raise exception_class ( _ ( ""Document %(id)s does not exist"" ) % { ""id"" : doc_id } )",False,doc.can_read(user),user.username == doc.id,0.6482595205307007 203,"def _defuse_padding ( self, IR_node ) : auto_pad = IR_node. get_attr ( ""auto_pad"" ) if auto_pad : input_node = self. parent_variable_name ( IR_node ) if : padding = False elif auto_pad. startswith ( ""SAME"" ) : padding = True else : raise ValueError ( ""Unknown padding type [{}]."". format ( auto_pad ) ) return input_node, padding else : padding = IR_node. get_attr ( ""pads"" ) if not is_valid_padding ( padding ) : dim = len ( padding ) // 2 padding_str = list ( ) for i in xrange ( 1, dim ) : padding_str. append ( ( padding [ i ], padding [ i + dim ] ) ) input_node = IR_node. variable_name + ""_pad"" self. add_body ( 1, ""{:<15} = cntk.pad({}, pattern={})"". format ( input_node, self. parent_variable_name ( IR_node ), padding_str ), ) else : input_node = self. parent_variable_name ( IR_node ) return input_node, False",False,auto_pad == 'VALID',input_node.is_const(),0.6594029664993286 204,"def append_chunk ( self, source, chunk ) : try : data = json. loads ( chunk ) except ValueError : logger. error ( ""unable to decode chunk %s"", chunk, exc_info = True ) else : try : ts = data [ ""timestamp"" ] self. results. setdefault ( ts, { } ) for key, value in data [ ""fields"" ]. iteritems ( ) : if data [ ""name"" ] == ""diskio"" : data [ ""name"" ] = ""{metric_name}-{disk_id}"". format ( metric_name = data [ ""name"" ], disk_id = data [ ""tags"" ] [ ""name"" ] ) elif data [ ""name"" ] == ""net"" : data [ ""name"" ] = ""{metric_name}-{interface}"". format ( metric_name = data [ ""name"" ], interface = data [ ""tags"" ] [ ""interface"" ] ) elif data [ ""name"" ] == ""cpu"" : data [ ""name"" ] = ""{metric_name}-{cpu_id}"". format ( metric_name = data [ ""name"" ], cpu_id = data [ ""tags"" ] [ ""cpu"" ] ) key = data [ ""name"" ]",False,key.endswith('_exec_value'),data['fields'],0.6448678970336914 205,"def CastClass ( c, graph = None ) : graph = graph is None and c. factoryGraph or graph for kind in graph. objects ( subject = classOrIdentifier ( c ), predicate = RDF. type ) : if kind == OWL_NS. Restriction : kwArgs = { ""identifier"" : classOrIdentifier ( c ), ""graph"" : graph } for s, p, o in graph. triples ( ( classOrIdentifier ( c ), None, None ) ) : if p!= RDF. type : if p == OWL_NS. onProperty : kwArgs [ ""onProperty"" ] = o else : if : continue kwArgs [ str ( p. split ( OWL_NS ) [ - 1 ] ) ] = o if not set ( [ str ( i. split ( OWL_NS ) [ - 1 ] ) for i in Restriction. restrictionKinds ] ). intersection ( kwArgs ) : raise MalformedClass ( ""Malformed owl:Restriction"" ) return Restriction ( ** kwArgs ) else : for s, p, o in graph. triples_choices ( ( classOrIdentifier ( c ), [ OWL_NS. intersectionOf, OWL_NS. unionOf, OWL_NS",False,p not in Restriction.restrictionKinds,s == OWL_NS.queryByC,0.6659875512123108 206,"def get_unique_attribute ( self, name : str ) : feat = None for f in self. features : if self. _return_feature ( f ) and hasattr ( f, name ) : if : raise RuntimeError ( ""The attribute was not unique."" ) feat = f if feat is None : raise RuntimeError ( ""The attribute did not exist"" ) return getattr ( feat, name )",False,feat is not None,f.unique(),0.6638916730880737 207,"def _patch ( ) : """"""Monkey-patch pyopengl to fix a bug in glBufferSubData."""""" import sys from OpenGL import GL if sys. version_info > ( 3, ) : buffersubdatafunc = GL. glBufferSubData if : buffersubdatafunc = buffersubdatafunc. wrapperFunction _m = sys. modules [ buffersubdatafunc. __module__ ] _m. long = int try : from OpenGL. GL. VERSION import GL_2_0 GL_2_0. GL_OBJECT_SHADER_SOURCE_LENGTH = GL_2_0. GL_SHADER_SOURCE_LENGTH except Exception : pass",False,"hasattr(buffersubdatafunc, 'wrapperFunction')",buffersubdatafunc.wrapperFunction is not None,0.6605888605117798 208,"def formatmonthname ( self, theyear, themonth, withyear = True ) : with TimeEncoding ( self. locale ) as encoding : s = month_name [ themonth ] if encoding is not None : s = s. decode ( encoding ) if : s = ""%s %s"" % ( s, theyear ) return '%s' % s",False,withyear,withyear is True,0.6964929103851318 209,"def _write_summaries ( self, summary_dict, relative_path = """" ) : for name, value in summary_dict. items ( ) : if : self. _write_summaries ( value, relative_path = os. path. join ( relative_path, name ) ) else : with self. summary_writer ( relative_path ). as_default ( ) : self. _summary_fn ( name, value, step = self. _global_step )",False,"isinstance(value, dict)",relative_path,0.6482176780700684 210,"def execute_many ( self, query : str, values : list ) -> None : async with self. acquire_connection ( ) as connection : self. log. debug ( ""%s: %s"", query, values ) async with connection. cursor ( ) as cursor : if : await connection. begin ( ) try : await cursor. executemany ( query, values ) except Exception : await connection. rollback ( ) raise else : await connection. commit ( ) else : await cursor. executemany ( query, values )",False,self.capabilities.supports_transactions,self.enable_multi_tab,0.6564269065856934 211,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. I32 : self. protocol_version = iprot. readI32 ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRING : self. propertyName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. STRING : self. defaultValue = iprot. readString ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",True,fid == 2,fid == 2,0.675895094871521 212,"def _iter_ns_range ( self ) : """"""Iterates over self._ns_range, delegating to self._iter_key_range()."""""" while True : if : query = self. _ns_range. make_datastore_query ( ) namespace_result = query. Get ( 1 ) if not namespace_result : break namespace = namespace_result [ 0 ]. name ( ) or """" self. _current_key_range = key_range. KeyRange ( namespace = namespace, _app = self. _ns_range. app ) yield ALLOW_CHECKPOINT for key, o in self. _iter_key_range ( copy. deepcopy ( self. _current_key_range ) ) : self. _current_key_range. advance ( key ) yield o if ( self. _ns_range. is_single_namespace or self. _current_key_range. namespace == self. _ns_range. namespace_end ) : break self. _ns_range = self. _ns_range. with_start_after ( self. _current_key_range. namespace ) self. _current_key_range = None",False,self._current_key_range is None,self._current_key_range,0.6533119678497314 213,"def __init__ ( self, artifact = None, pad = None ) : if pad is None : if : raise TypeError ( ""Either artifact or pad is needed to "" ""construct a context."" ) pad = artifact. build_state. pad if artifact is not None : self. artifact = artifact self. source = artifact. source_obj self. build_state = self. artifact. build_state else : self. artifact = None self. source = None self. build_state = None self. exc_info = None self. pad = pad self. referenced_dependencies = set ( ) self. referenced_virtual_dependencies = { } self. sub_artifacts = [ ] self. flow_block_render_stack = [ ] self. _forced_base_url = None self. cache = { } self. _dependency_collectors = [ ]",True,artifact is None,artifact is None,0.6947907209396362 214,"def _fix_default ( self, action ) : if ( hasattr ( action, ""default"" ) and hasattr ( action, ""dest"" ) and action. default!= SUPPRESS ) : as_type = get_type ( action ) names = OrderedDict ( ( i. lstrip ( ""-"" ). replace ( ""-"", ""_"" ), None ) for i in action. option_strings ) outcome = None for name in names : outcome = get_env_var ( name, as_type, self. env ) if : break if outcome is None and self. file_config : for name in names : outcome = self. file_config. get ( name, as_type ) if : break if : action. default, action. default_source = outcome else : outcome = action. default, ""default"" self. options. set_src ( action. dest, * outcome )",True,outcome is not None,outcome is not None,0.6617662906646729 215,"def disassemble ( self, byte_parser, histogram = False ) : """"""Disassemble code, for ad-hoc experimenting."""""" for bp in byte_parser. child_parsers ( ) : if bp. text : srclines = bp. text. splitlines ( ) else : srclines = None print ( ""\n%s: "" % bp. code ) upto = None for disline in disgen. disgen ( bp. code ) : if histogram : opcode_counts [ disline. opcode ] += 1 continue if : if srclines : upto = upto or disline. lineno - 1 while upto <= disline. lineno - 1 : print ( ""%100s%s"" % ( """", srclines [ upto ] ) ) upto += 1 elif disline. offset > 0 : print ( """" ) line = disgen. format_dis_line ( disline ) print ( ""%-70s"" % ( line, ) ) print ( """" )",False,disline.first,not upto,0.6575723886489868 216,"def send_request ( self, request_method, * args, ** kwargs ) : request_func = getattr ( self. client, request_method ) status_code = None if ""content_type"" not in kwargs and request_method!= ""get"" : kwargs [ ""content_type"" ] = ""application/json"" if ( ""data"" in kwargs and request_method!= ""get"" and kwargs [ ""content_type"" ] == ""application/json"" ) : data = kwargs. get ( ""data"", """" ) kwargs [ ""data"" ] = json. dumps ( data ) if ""status_code"" in kwargs : status_code = kwargs. pop ( ""status_code"" ) if hasattr ( self, ""token"" ) : if : kwargs [ ""HTTP_AUTHORIZATION"" ] = ""JWT %s"" % self. token else : kwargs [ ""HTTP_AUTHORIZATION"" ] = ""Token %s"" % self. token self. response = request_func ( * args, ** kwargs ) is_json = bool ( [ x for x in self. response. _headers [ ""content-type"" ] if ""json"" in x ] ) self. response. json = { } if is_json and self. response. content : self. response. json = json. loads ( force_text ( self. response. content ) ) if status_code : self. assertEqual ( self. response. status_code, status_code ) return self. response",False,"getattr(settings, 'REST_USE_JWT', False)",self.token,0.6538990139961243 217,"def _wait_for_bot_presense ( self, online ) : for _ in range ( 10 ) : time. sleep ( 2 ) if : break if not online and not self. _is_testbot_online ( ) : break else : raise AssertionError ( ""test bot is still {}"". format ( ""offline"" if online else ""online"" ) )",False,online and self._is_testbot_online(),online and self._is_offline(),0.6514002084732056 218,"def set_logging ( self, showOnCmd = True, loggingFile = None, loggingLevel = logging. INFO ) : if showOnCmd!= self. showOnCmd : if showOnCmd : self. logger. addHandler ( self. cmdHandler ) else : self. logger. removeHandler ( self. cmdHandler ) self. showOnCmd = showOnCmd if loggingFile!= self. loggingFile : if : self. logger. removeHandler ( self. fileHandler ) self. fileHandler. close ( ) if loggingFile is not None : self. fileHandler = logging. FileHandler ( loggingFile ) self. logger. addHandler ( self. fileHandler ) self. loggingFile = loggingFile if loggingLevel!= self. loggingLevel : self. logger. setLevel ( loggingLevel ) self. loggingLevel = loggingLevel",False,self.loggingFile is not None,self.fileHandler is not None,0.6544582843780518 219,"def render ( self, mcanv, op, idx ) : value = self. imm hint = mcanv. syms. getSymHint ( op. va, idx ) if hint is not None : if mcanv. mem. isValidPointer ( value ) : mcanv. addVaText ( hint, value ) else : mcanv. addNameText ( hint ) elif mcanv. mem. isValidPointer ( value ) : name = addrToName ( mcanv, value ) mcanv. addVaText ( name, value ) else : if : mcanv. addNameText ( ""0x%.4x:0x%.8x"" % ( value >> 32, value & 0xFFFFFFFF ) ) elif self. imm >= 4096 : mcanv. addNameText ( ""0x%.8x"" % value ) else : mcanv. addNameText ( str ( value ) )",False,self.tsize == 6,self.imm >= 512,0.6593555212020874 220,"def _guardAgainstUnicode ( self, data ) : if _pythonMajorVersion < 3 : if : data = data. encode ( ""utf8"" ) else : if isinstance ( data, str ) : try : return data. encode ( ""ascii"" ) except UnicodeEncodeError : pass raise ValueError ( ""pyDes can only work with encoded strings, not Unicode."" ) return data",True,"isinstance(data, unicode)","isinstance(data, unicode)",0.6502571105957031 221,"def get ( self, block = True, timeout = None ) : if block and timeout is None : self. _rlock. acquire ( ) try : res = self. _recv ( ) self. _sem. release ( ) return res finally : self. _rlock. release ( ) else : if : deadline = time. time ( ) + timeout if not self. _rlock. acquire ( block, timeout ) : raise Empty try : if not self. _poll ( block and ( deadline - time. time ( ) ) or 0.0 ) : raise Empty res = self. _recv ( ) self. _sem. release ( ) return res finally : self. _rlock. release ( )",False,block,timeout is not None,0.6791608333587646 222,"def __init__ ( self, name, lines ) : self. name = name self. flds = [ ] self. parms = { } self. recfmt = None line_pat = re. compile ( r""(\w+) = (.*)"", re. VERBOSE ) for lne in lines : mtch = line_pat. match ( lne ) if : self. parms [ mtch. group ( 1 ) ] = mtch. group ( 2 ) self. fileext = self. parms. get ( ""fileext"", None ) if ""n_fields"" in self. parms : self. get_fields ( ) self. recfmt = self. get_recfmt ( ) self. nam2fld = { } self. nam2idx = { } self. recflds = [ ] j = 0 for fld in enumerate ( self. flds ) : nam = fld [ 1 ]. name self. nam2fld [ nam ] = fld if fld [ 1 ]. size!= 0 : self. nam2idx [ nam ] = j self. recflds. append ( fld [ 1 ] ) j += 1",True,mtch,mtch,0.7154645919799805 223,"def __getitem__ ( self, key ) : arch = self. _project. arch if key in arch. registers : reg_offset, size = arch. registers [ key ] cfg_node = self. _cfg. model. get_any_node ( self. _insn_addr, anyaddr = True ) if cfg_node is None : raise KeyError ( ""CFGNode for instruction %#x is not found."" % self. _insn_addr ) vex_block = self. _project. factory. block ( cfg_node. addr, size = cfg_node. size, opt_level = self. _cfg. _iropt_level ). vex stmt_idx = None insn_addr = cfg_node. addr for i, stmt in enumerate ( vex_block. statements ) : if isinstance ( stmt, pyvex. IRStmt. IMark ) : insn_addr = stmt. addr + stmt. delta elif insn_addr == self. _insn_addr : if : stmt_idx = i break elif insn_addr > self. _insn_addr : break if stmt_idx is None : raise KeyError ( ""Cannot find the statement."" ) variable = SimRegisterVariable ( reg_offset, size ) if not dependency_manager : valid_dep_managers = RUNTIME_TO_DEPENDENCY_MANAGERS. get ( runtime ) if valid_dep_managers is None : dependency_manager = None elif : dependency_manager = valid_dep_managers [ 0 ] else : choices = list ( map ( str, range ( 1, len ( valid_dep_managers ) + 1 ) ) ) choice_num = 1 click. echo ( ""\nWhich dependency manager would you like to use?"" ) for dm in valid_dep_managers : msg = ""\t"" + str ( choice_num ) + "" - "" + dm click. echo ( msg ) choice_num = choice_num + 1 choice = click. prompt ( ""Dependency manager"", type = click. Choice ( choices ), show_choices = False ) dependency_manager = valid_dep_managers [ int ( choice ) - 1 ] return dependency_manager",True,len(valid_dep_managers) == 1,len(valid_dep_managers) == 1,0.6554791331291199 225,"def compare ( self, first, second, scope_bracket = False ) : """"""Compare brackets. This function allows bracket plugins to add additional logic."""""" if : match = first is not None and second is not None else : match = first. type == second. type if not self. rules. check_compare : return match if match : if : bracket = self. rules. scopes [ first. scope ] [ ""brackets"" ] [ first. type ] else : bracket = self. rules. brackets [ first. type ] try : if bracket. compare is not None and match : match = bracket. compare ( bracket. name, bh_plugin. BracketRegion ( first. begin, first. end ), bh_plugin. BracketRegion ( second. begin, second. end ), self. search. get_buffer ( ), ) except Exception : log ( ""Plugin Compare Error:\n%s"" % str ( traceback. format_exc ( ) ) ) return match",True,scope_bracket,scope_bracket,0.6667361259460449 226,"def _to_record ( self, data, zone ) : records = [ ] rrset_values = data [ ""rrset_values"" ] multiple_value_record = len ( rrset_values ) > 1 for index, rrset_value in enumerate ( rrset_values ) : record = self. _to_record_sub ( data, zone, rrset_value ) record. extra [ ""_multi_value"" ] = multiple_value_record if multiple_value_record : record. extra [ ""_other_records"" ] = [ ] records. append ( record ) if multiple_value_record : for index in range ( 0, len ( records ) ) : record = records [ index ] for other_index, other_record in enumerate ( records ) : if : continue extra = copy. deepcopy ( other_record. extra ) extra. pop ( ""_multi_value"" ) extra. pop ( ""_other_records"" ) item = { ""name"" : other_record. name, ""data"" : other_record. data, ""type"" : other_record. type, ""extra"" : extra, } record. extra [ ""_other_records"" ]. append ( item ) return records",False,index == other_index,"hasattr(other_record, '_other_records')",0.656635046005249 227,"def decompress ( self, data ) : if not data : return data if not self. _first_try : return self. _obj. decompress ( data ) self. _data += data try : decompressed = self. _obj. decompress ( data ) if : self. _first_try = False self. _data = None return decompressed except zlib. error : self. _first_try = False self. _obj = zlib. decompressobj ( - zlib. MAX_WBITS ) try : return self. decompress ( self. _data ) finally : self. _data = None",True,decompressed,decompressed,0.6855396032333374 228,"def CountButtons ( self ) : """"""Returns the number of visible buttons in the docked pane."""""" n = 0 if self. HasCaption ( ) or self. HasCaptionLeft ( ) : if isinstance ( wx. GetTopLevelParent ( self. window ), AuiFloatingFrame ) : return 1 if self. HasCloseButton ( ) : n += 1 if self. HasMaximizeButton ( ) : n += 1 if : n += 1 if self. HasPinButton ( ) : n += 1 return n",False,self.HasMinimizeButton(),self.HasPinButton(),0.661885142326355 229,"def layer_op ( self, image ) : if image. ndim == 3 : return self. __make_mask_3d ( image ) if image. ndim == 5 : mod_to_mask = [ m for m in range ( image. shape [ 4 ] ) if np. any ( image [..., :, m ] ) ] mask = np. zeros_like ( image, dtype = bool ) mod_mask = None for mod in mod_to_mask : for t in range ( image. shape [ 3 ] ) : mask [..., t, mod ] = self. __make_mask_3d ( image [..., t, mod ] ) if : if mod_mask is None : mod_mask = np. zeros ( image. shape [ : 4 ], dtype = bool ) mod_mask = np. logical_or ( mod_mask, mask [..., mod ] ) elif self. multimod_fusion == ""and"" : if mod_mask is None : mod_mask = np. ones ( image. shape [ : 4 ], dtype = bool ) mod_mask = np. logical_and ( mod_mask, mask [..., mod ] ) for mod in mod_to_mask : mask [..., mod ] = mod_mask return mask else : raise ValueError ( ""unknown input format"" )",True,self.multimod_fusion == 'or',self.multimod_fusion == 'or',0.6509130597114563 230,"def process_resource ( self, resource, related ) : related_ids = self. get_related_ids ( [ resource ] ) model = self. manager. get_model ( ) op = self. data. get ( ""operator"", ""or"" ) found = [ ] if self. data. get ( ""match-resource"" ) is True : self. data [ ""value"" ] = self. get_resource_value ( self. data [ ""key"" ], resource ) if self. data. get ( ""value_type"" ) == ""resource_count"" : count_matches = OPERATORS [ self. data. get ( ""op"" ) ] ( len ( related_ids ), self. data. get ( ""value"" ) ) if count_matches : self. _add_annotations ( related_ids, resource ) return count_matches for rid in related_ids : robj = related. get ( rid, None ) if robj is None : self. log. warning ( ""Resource %s:%s references non existant %s: %s"", self. manager. type, resource [ model. id ], self. RelatedResource. rsplit ( ""."", 1 ) [ - 1 ], rid, ) continue if : found. append ( rid ) if found : self. _add_annotations ( found, resource ) if op == ""or"" and found : return True elif op == ""and"" and len ( found",False,self.match(robj),rid in related_ids,0.6497014760971069 231,"def write_custom_dns_config ( config, env ) : from collections import OrderedDict config = list ( config ) dns = OrderedDict ( ) seen_qnames = set ( ) for qname in [ rec [ 0 ] for rec in config ] : if qname in seen_qnames : continue seen_qnames. add ( qname ) records = [ ( rec [ 1 ], rec [ 2 ] ) for rec in config if rec [ 0 ] == qname ] if : dns [ qname ] = records [ 0 ] [ 1 ] else : dns [ qname ] = OrderedDict ( ) seen_rtypes = set ( ) for rtype in [ rec [ 0 ] for rec in records ] : if rtype in seen_rtypes : continue seen_rtypes. add ( rtype ) values = [ rec [ 1 ] for rec in records if rec [ 0 ] == rtype ] if len ( values ) == 1 : values = values [ 0 ] dns [ qname ] [ rtype ] = values config_yaml = rtyaml. dump ( dns ) with open ( os. path. join ( env [ ""STORAGE_ROOT"" ], ""dns/custom.yaml"" ), ""w"" ) as f : f. write ( config_yaml )",False,len(records) == 1 and records[0][0] == 'A',len(records) == 1,0.6518056988716125 232,"def translate ( self, line ) : parsed = self. RE_LINE_PARSER. match ( line ) if parsed : value = parsed. group ( 3 ) stage = parsed. group ( 1 ) if : return ""\n# HTTP Request:\n"" + self. stripslashes ( value ) elif stage == ""reply"" : return ""\n\n# HTTP Response:\n"" + self. stripslashes ( value ) elif stage == ""header"" : return value + ""\n"" else : return value return line",False,stage == 'send',stage == 'request',0.665850043296814 233,"def _encode_regex ( name, value, dummy0, dummy1 ) : """"""Encode a python regex or bson.regex.Regex."""""" flags = value. flags if flags == 0 : return b""\x0B"" + name + _make_c_string_check ( value. pattern ) + b""\x00"" elif flags == re. UNICODE : return b""\x0B"" + name + _make_c_string_check ( value. pattern ) + b""u\x00"" else : sflags = b"""" if flags & re. IGNORECASE : sflags += b""i"" if flags & re. LOCALE : sflags += b""l"" if : sflags += b""m"" if flags & re. DOTALL : sflags += b""s"" if flags & re. UNICODE : sflags += b""u"" if flags & re. VERBOSE : sflags += b""x"" sflags += b""\x00"" return b""\x0B"" + name + _make_c_string_check ( value. pattern ) + sflags",False,flags & re.MULTILINE,flags & re.DOTALL,0.6712432503700256 234,"def find_field_type_differ ( self, meta, table_description, table_name, func = None ) : db_fields = dict ( [ ( row [ 0 ], row ) for row in table_description ] ) for field in all_local_fields ( meta ) : if field. name not in db_fields : continue description = db_fields [ field. name ] model_type = self. get_field_model_type ( field ) db_type = self. get_field_db_type ( description, field, table_name ) if : model_type, db_type = func ( field, description, model_type, db_type ) if not self. strip_parameters ( db_type ) == self. strip_parameters ( model_type ) : self. add_difference ( ""field-type-differ"", table_name, field. name, model_type, db_type )",False,func,func is not None,0.6735049486160278 235,"def _activate_plugins_of_category ( self, category ) : """"""Activate all the plugins of a given category and return them."""""" plugins = [ ] for plugin_info in self. plugin_manager. getPluginsOfCategory ( category ) : if : self. plugin_manager. removePluginFromCategory ( plugin_info, category ) else : self. plugin_manager. activatePluginByName ( plugin_info. name ) plugin_info. plugin_object. set_site ( self ) plugins. append ( plugin_info ) return plugins",False,plugin_info.name in self.config.get('DISABLED_PLUGINS'),plugin_info.plugin_object.get_site() == self,0.6542697548866272 236,"def makeStaircaseCtrls ( self ) : """"""Setup the controls for a StairHandler"""""" panel = wx. Panel ( parent = self ) panelSizer = wx. GridBagSizer ( 5, 5 ) panel. SetSizer ( panelSizer ) row = 0 handler = self. stairHandler for fieldName in handler. params : try : label = handler. params [ fieldName ]. label if not label : label = fieldName except Exception : label = fieldName if : continue if fieldName in self. globalCtrls : ctrls = self. globalCtrls [ fieldName ] else : ctrls = ParamCtrls ( dlg = self, parent = panel, label = label, fieldName = fieldName, param = handler. params [ fieldName ], ) panelSizer. Add ( ctrls. nameCtrl, [ row, 0 ] ) if hasattr ( ctrls. valueCtrl, ""_szr"" ) : panelSizer. Add ( ctrls. valueCtrl. _szr, [ row, 1 ] ) else : panelSizer. Add ( ctrls. valueCtrl, [ row, 1 ] ) self.label),0.6770456433296204 237,"def get_rules ( self, map ) : for rulefactory in self. rules : for rule in rulefactory. get_rules ( map ) : new_defaults = subdomain = None if : new_defaults = { } for key, value in iteritems ( rule. defaults ) : if isinstance ( value, string_types ) : value = format_string ( value, self. context ) new_defaults [ key ] = value if rule. subdomain is not None : subdomain = format_string ( rule. subdomain, self. context ) new_endpoint = rule. endpoint if isinstance ( new_endpoint, string_types ) : new_endpoint = format_string ( new_endpoint, self. context ) yield Rule ( format_string ( rule. rule, self. context ), new_defaults, subdomain, rule. methods, rule. build_only, new_endpoint, rule. strict_slashes, )",False,rule.defaults,rule.defaults is not None,0.6711133718490601 238,"def cmd_exec_stdout ( self, command, errormsg = """", log = True ) : """"""Run shell command from Python"""""" try : log and Log. debug ( self, ""Running command: {0}"". format ( command ) ) with subprocess. Popen ( [ command ], stdout = subprocess. PIPE, stderr = subprocess. PIPE, shell = True ) as proc : ( cmd_stdout_bytes, cmd_stderr_bytes ) = proc. communicate ( ) ( cmd_stdout, cmd_stderr ) = ( cmd_stdout_bytes. decode ( ""utf-8"", ""replace"" ), cmd_stderr_bytes. decode ( ""utf-8"", ""replace"" ), ) if : Log. debug ( self, ""Command Output: {0}, \nCommand Error: {1}"". format ( cmd_stdout, cmd_stderr ), ) return cmd_stdout else : Log. debug ( self, ""Command Output: {0}, \nCommand Error: {1}"". format ( cmd_stdout, cmd_stderr ), ) return cmd_stdout except OSError as e : Log. debug ( self, str ( e ) ) <",False,proc.returncode == 0,log,0.6539502739906311 239,"def getUnread ( self ) : unreadMessages = 0 unreadSubscriptions = 0 queryreturn = sqlQuery ( """"""SELECT msgid, toaddress, read FROM inbox where folder='inbox' """""" ) for row in queryreturn : msgid, toAddress, read = row try : if toAddress == str_broadcast_subscribers : toLabel = str_broadcast_subscribers else : toLabel = shared. config. get ( toAddress, ""label"" ) except : toLabel = """" if toLabel == """" : toLabel = toAddress if not read : if : unreadSubscriptions = unreadSubscriptions + 1 else : unreadMessages = unreadMessages + 1 return unreadMessages, unreadSubscriptions",False,toLabel == str_broadcast_subscribers,toLabel != toLabel,0.6621248722076416 240,"def populate_disk_bus_combo ( self, devtype, no_default ) : buslist = self. widget ( ""disk-bus-combo"" ) busmodel = buslist. get_model ( ) busmodel. clear ( ) buses = [ ] if devtype == virtinst. VirtualDisk. DEVICE_FLOPPY : buses. append ( [ ""fdc"", ""Floppy"" ] ) elif devtype == virtinst. VirtualDisk. DEVICE_CDROM : buses. append ( [ ""ide"", ""IDE"" ] ) if self. vm. rhel6_defaults ( ) : buses. append ( [ ""scsi"", ""SCSI"" ] ) else : if self. vm. is_hvm ( ) : buses. append ( [ ""ide"", ""IDE"" ] ) if self. vm. rhel6_defaults ( ) : buses. append ( [ ""scsi"", ""SCSI"" ] ) buses. append ( [ ""usb"", ""USB"" ] ) if : buses. append ( [ ""sata"", ""SATA"" ] ) buses. append ( [ ""virtio"", ""Virtio"" ] ) if self. vm. conn. is_xen ( ) or self. vm. get_hv_type ( ) == ""test"" : buses. append ( [ ""xen"", ""Xen"" ] ) for row in buses : busmodel. append ( row ) if not no_default : busmodel. append ( [ None, ""default"" ] )",False,"self.vm.get_hv_type() in ['kvm', 'test']",self.vm.conn.is_sata(),0.6479510068893433 241,"def _find_w9xpopen ( self ) : """"""Find and return absolute path to w9xpopen.exe"""""" w9xpopen = os. path. join ( os. path. dirname ( GetModuleFileName ( 0 ) ), ""w9xpopen.exe"" ) if : w9xpopen = os. path. join ( os. path. dirname ( sys. exec_prefix ), ""w9xpopen.exe"" ) if : raise RuntimeError ( ""Cannot locate w9xpopen.exe, which is "" ""needed for Popen to work with your "" ""shell or platform."" ) return w9xpopen",True,not os.path.exists(w9xpopen),not os.path.exists(w9xpopen),0.6520127058029175 242,"def get_first_param_index ( self, group_id, param_group, partition_id ) : for index, param in enumerate ( param_group ) : param_id = self. get_param_id ( param ) if : return index return None",False,partition_id in self.param_to_partition_ids[group_id][param_id],param_id == group_id and param_id == partition_id,0.6472944617271423 243,"def parse_bash_set_output ( output ) : """"""Parse Bash-like'set' output"""""" if not sys. platform. startswith ( ""win"" ) : output = output. replace ( ""\\\n"", """" ) environ = { } for line in output. splitlines ( 0 ) : line = line. rstrip ( ) if : continue item = _ParseBashEnvStr ( line ) if item : environ [ item [ 0 ] ] = item [ 1 ] return environ",True,not line,not line,0.677064836025238 244,"def _convert_to_seconds ( value ) : """"""Converts TTL strings into seconds"""""" try : return int ( value ) except ValueError : seconds = 0 ttl_string = value. lower ( ) for component in [ ""w"", ""d"", ""h"", ""m"", ""s"" ] : regex = date_regex_dict [ component ] [ ""regex"" ] match = regex. search ( ttl_string ) if : match_string = match. group ( 0 ) ttl_string = ttl_string. replace ( match_string, """" ) match_value = int ( match_string. strip ( component ) ) seconds += match_value * date_regex_dict [ component ] [ ""scale"" ] if not ttl_string : return seconds try : seconds += int ( ttl_string ) return seconds except ValueError : raise InvalidArgumentValueError ( ""Unable to convert value '{}' to seconds."". format ( value ) )",True,match,match,0.6835880279541016 245,"def test_sin_values ( ) : firstval = None for i, v in zip ( range ( 1000 ), sin_values ( ) ) : assert - 1 <= v <= 1 assert isclose ( v, sin ( radians ( i ) ), abs_tol = 1e-9 ) if i == 0 : firstval = v else : if : assert v == firstval for period in ( 360, 100 ) : firstval = None for i, v in zip ( range ( 1000 ), sin_values ( period ) ) : assert - 1 <= v <= 1 if i == 0 : firstval = v else : if i % period == 0 : assert v == firstval",False,i % 360 == 0,i % period == 1,0.6792773008346558 246,"def wait_complete ( self ) : """"""Wait for futures complete done."""""" for future in concurrent. futures. as_completed ( self. _futures. keys ( ) ) : try : error = future. exception ( ) except concurrent. futures. CancelledError : break name = self. _futures [ future ] if : err_msg = 'Extracting ""{0}"", got: {1}'. format ( name, error ) logger. error ( err_msg )",True,error is not None,error is not None,0.6587857007980347 247,"def _wrapper ( self, * args, ** kwargs ) : if self. rebuild is False : self. rebuild = True if : logger. info ( ""[Warning] Vocabulary has reached the max size {} when calling {} method. "" ""Adding more words may cause unexpected behaviour of Vocabulary. "". format ( self. max_size, func. __name__ ) ) return func ( self, * args, ** kwargs )",False,self.max_size is not None and len(self.word_count) >= self.max_size,self.max_size is not None,0.6538647413253784 248,"def formatted_addon ( self, obj ) : if obj. version : return format_html ( '{}' ""
"" """" "" "" "" "" ""
Version:{}
Channel:{}
"", urljoin ( settings. EXTERNAL_SITE_URL, reverse ( ""reviewers.review"", args = [ ( ""listed"" if : else ""unlisted"" ), obj. version. addon. id, ], ), ), obj. version. addon. name, obj. version. version, obj. version. get_channel_display ( ), ) return ""-""",False,obj.version.channel == amo.RELEASE_CHANNEL_LISTED,self.has_tab,0.6566486358642578 249,"def home ( request ) : from django. conf import settings print ( settings. SOME_VALUE ) subject = None message = None size = 0 print ( request. META ) if request. POST : form = MsgForm ( request. POST, request. FILES ) print ( request. FILES ) if : subject = form. cleaned_data [ ""subject"" ] message = form. cleaned_data [ ""message"" ] f = request. FILES [ ""f"" ] if not hasattr ( f, ""fileno"" ) : size = len ( f. read ( ) ) else : try : size = int ( os. fstat ( f. fileno ( ) ) [ 6 ] ) except io. UnsupportedOperation : size = len ( f. read ( ) ) else : form = MsgForm ( ) return render ( request, ""home.html"", { ""form"" : form, ""subject"" : subject, ""message"" : message, ""size"" : size }, )",True,form.is_valid(),form.is_valid(),0.6509881019592285 250,"def backup_txs ( self, txs, is_unspendable ) : undo_info = self. db. read_undo_info ( self. height ) if undo_info is None : raise ChainError ( ""no undo information found for height {:,d}"". format ( self. height ) ) put_utxo = self. utxo_cache. __setitem__ spend_utxo = self. spend_utxo add_touched = self. touched. add undo_entry_len = 13 + HASHX_LEN n = 0 for tx, tx_hash in txs : for txin in tx. inputs : if : continue undo_item = undo_info [ n : n + undo_entry_len ] put_utxo ( txin. prev_hash + pack_le_uint32 ( txin. prev_idx ), undo_item ) add_touched ( undo_item [ : - 13 ] ) n += undo_entry_len assert n == len ( undo_info ) for tx, tx_hash in txs : for idx, txout in enumerate ( tx. outputs ) : if is_unspendable ( txout. pk_script ) : continue cache_value = spend_utxo ( tx_hash, idx ) add_touched ( cache_value [ : - 13 ] ) self. tx_count -= len ( txs )",False,txin.is_generation(),n >= len(undo_info),0.6525170207023621 251,"def __setitem__ ( self, key, value ) : key = self. __fixkey__ ( key ) checker = self. get_rule ( key ) [ self. RULE_CHECKER ] if not checker is True : if checker is False : if isinstance ( value, dict ) and isinstance ( self [ key ], dict ) : for k, v in value. iteritems ( ) : self [ key ] [ k ] = v return raise ConfigValueError ( _ ( ""Modifying %s/%s is not "" ""allowed"" ) % ( self. _name, key ) ) elif isinstance ( checker, ( list, set, tuple ) ) : if : raise ConfigValueError ( _ ( ""Invalid value for %s/%s: %s"" ) % ( self. _name, key, value ) ) elif isinstance ( checker, ( type, type ( RuledContainer ) ) ) : try : if value is None : value = checker ( ) else : value = checker ( value ) except ( ConfigValueError ) : raise except ( validators. IgnoreValue ) : return except ( ValueError, TypeError ) : raise",False,value not in checker,value is not None and checker(value),0.668428361415863 252,"def _merge_dict ( stack, obj ) : strategy = obj. pop ( ""__"", ""merge-last"" ) if strategy not in strategies : raise Exception ( 'Unknown strategy ""{0}"", should be one of {1}'. format ( strategy, strategies ) ) if strategy == ""overwrite"" : return _cleanup ( obj ) else : for k, v in six. iteritems ( obj ) : if strategy == ""remove"" : stack. pop ( k, None ) continue if k in stack : if strategy == ""merge-first"" : stack_k = stack [ k ] stack [ k ] = _cleanup ( v ) v = stack_k if type ( stack [ k ] )!= type ( v ) : log. debug ( ""Force overwrite, types differ: '%s'!= '%s'"", stack [ k ], v ) stack [ k ] = _cleanup ( v ) elif : stack [ k ] = _merge_dict ( stack [ k ], v ) elif isinstance ( v, list ) : <",False,"isinstance(v, dict)","isinstance(stack[k], dict)",0.6511378288269043 253,"def icyparser ( self, url : str ) -> Optional [ str ] : try : async with self. session. get ( url, headers = { ""Icy-MetaData"" : ""1"" } ) as resp : metaint = int ( resp. headers [ ""icy-metaint"" ] ) for _ in range ( 5 ) : await resp. content. readexactly ( metaint ) metadata_length = ( struct. unpack ( ""B"", await resp. content. readexactly ( 1 ) ) [ 0 ] * 16 ) metadata = await resp. content. readexactly ( metadata_length ) m = re. search ( STREAM_TITLE, metadata. rstrip ( b""\0"" ) ) if : title = m. group ( 1 ) if title : title = title. decode ( ""utf-8"", errors = ""replace"" ) return title else : return None except ( KeyError, aiohttp. ClientConnectionError, aiohttp. ClientResponseError ) : return None",True,m,m,0.6963084936141968 254,"def readTables ( self ) : """"""Read tables section"""""" while True : table = self. readTable ( ) if : return if table [ ""type"" ] == ""LAYER"" : name = table. get ( ""name"" ) if name is not None : self. layers [ name ] = Layer ( name, table )",True,table is None,table is None,0.6637412309646606 255,"def Handle ( self, args, context = None ) : result = ApiListClientActionRequestsResult ( ) request_cache = { } for r in data_store. REL_DB. ReadAllClientActionRequests ( str ( args. client_id ) ) : stub = action_registry. ACTION_STUB_BY_ID [ r. action_identifier ] client_action = compatibility. GetName ( stub ) request = ApiClientActionRequest ( leased_until = r. leased_until, session_id = ""%s/%s"" % ( r. client_id, r. flow_id ), client_action = client_action, ) result. items. append ( request ) if not args. fetch_responses : continue if : req_res = data_store. REL_DB. ReadAllFlowRequestsAndResponses ( str ( args. client_id ), r. flow_id ) request_cache [ r. flow_id ] = req_res for req, responses in request_cache [ r. flow_id ] : if req. request_id == r. request_id : res = [ ] for resp_id in sorted ( responses ) : m = responses [ resp_id ]. AsLegacyGrrMessage ( ) res. append ( m ) request. responses = res return result",False,r.flow_id not in request_cache,args.client_id,0.6535675525665283 256,"def _should_mark_node_dnr ( self, node, parent_nodes ) : for p in parent_nodes : if : pass elif p. job : if p. job. status == ""successful"" : if node in ( self. get_children ( p, ""success_nodes"" ) + self. get_children ( p, ""always_nodes"" ) ) : return False elif p. job. status in [ ""failed"", ""error"", ""canceled"" ] : if node in ( self. get_children ( p, ""failure_nodes"" ) + self. get_children ( p, ""always_nodes"" ) ) : return False else : return False elif not p. do_not_run and p. unified_job_template is None : if node in ( self. get_children ( p, ""failure_nodes"" ) + self. get_children ( p, ""always_nodes"" ) ) : return False else : return False return True",False,p.do_not_run is True,p.node_dnr == node,0.6510770320892334 257,"def update_metadata ( self ) : for attrname in dir ( self ) : if attrname. startswith ( ""__"" ) : continue attrvalue = getattr ( self, attrname, None ) if attrvalue == 0 : continue if : attrname = ""version"" if hasattr ( self. metadata, ""set_{0}"". format ( attrname ) ) : getattr ( self. metadata, ""set_{0}"". format ( attrname ) ) ( attrvalue ) elif hasattr ( self. metadata, attrname ) : try : setattr ( self. metadata, attrname, attrvalue ) except AttributeError : pass",False,attrname == 'salt_version',attrname == 'version',0.6533090472221375 258,"def _end_completion ( self, args ) : value = args [ ""completion_text"" ] paths = args [ ""paths"" ] if args [ ""forward_completion"" ] : common_prefix = os. path. commonprefix ( paths ) if : self. path_entry. set_text ( common_prefix, set_file_chooser_folder = True, trigger_event = True ) self. path_entry. text_entry. set_position ( len ( self. path_entry. get_text ( ) ) ) self. completion_popup. set_values ( paths, preserve_selection = True ) if self. use_popup and len ( paths ) > 1 : self. completion_popup. popup ( ) elif self. completion_popup. is_popped_up ( ) and args [ ""forward_completion"" ] : self. completion_popup. popdown ( )",False,len(common_prefix) > len(value),not self.path_entry.is_empty() and common_prefix,0.6451213359832764 259,"def R_op ( self, inputs, eval_points ) : outs = self ( * inputs, ** dict ( return_list = True ) ) rval = [ None for x in outs ] for idx, out in enumerate ( outs ) : ograds = [ x. zeros_like ( ) for x in outs ] ograds [ idx ] = theano. tensor. ones_like ( out ) bgrads = self. _bgrad ( inputs, outs, ograds ) rop_out = None for jdx, ( inp, eval_point ) in enumerate ( izip ( inputs, eval_points ) ) : if bgrads [ jdx ] is None or isinstance ( bgrads [ jdx ]. type, DisconnectedType ) : pass elif : if rop_out is None : rop_out = bgrads [ jdx ] * eval_point else : rop_out = rop_out + bgrads [ jdx ] * eval_point rval [ idx ] = rop_out return rval",False,eval_point is not None,len(bgrads) > 0,0.6541682481765747 260,"def assert_warns ( expected ) : with warnings. catch_warnings ( record = True ) as w : warnings. simplefilter ( ""always"" ) yield if sys. version_info >= ( 3, 0 ) : if : try : exc_name = expected. __name__ except AttributeError : exc_name = str ( expected ) raise AssertionError ( ""%s not triggerred"" % exc_name )",False,"not any((isinstance(m.message, expected) for m in w))","hasattr(expected, '__name__')",0.661018967628479 261,"def init_params ( net ) : """"""Init layer parameters."""""" for module in net. modules ( ) : if isinstance ( module, nn. Conv2d ) : init. kaiming_normal ( module. weight, mode = ""fan_out"" ) if module. bias : init. constant ( module. bias, 0 ) elif isinstance ( module, nn. BatchNorm2d ) : init. constant ( module. weight, 1 ) init. constant ( module. bias, 0 ) elif : init. normal ( module. weight, std = 1e-3 ) if module. bias : init. constant ( module. bias, 0 )",True,"isinstance(module, nn.Linear)","isinstance(module, nn.Linear)",0.6549795866012573 262,"def _mock_send_packet ( eio_sid, pkt ) : epkt = pkt. encode ( ) if not isinstance ( epkt, list ) : pkt = packet. Packet ( encoded_packet = epkt ) else : pkt = packet. Packet ( encoded_packet = epkt [ 0 ] ) for att in epkt [ 1 : ] : pkt. add_attachment ( att ) if pkt. packet_type == packet. EVENT or pkt. packet_type == packet. BINARY_EVENT : if eio_sid not in self. queue : self. queue [ eio_sid ] = [ ] if : self. queue [ eio_sid ]. append ( { ""name"" : pkt. data [ 0 ], ""args"" : pkt. data [ 1 ], ""namespace"" : pkt. namespace or ""/"", } ) else : self. queue [ eio_sid ]. append ( { ""name"" : pkt. data [ 0 ], ""args"" : pkt. data [ 1 : ], ""namespace"" : pkt. namespace or ""/"", } ) elif pkt. packet_type == packet. ACK or pkt. packet_type == packet. BINARY_ACK : self",False,pkt.data[0] == 'message' or pkt.data[0] == 'json',len(self.queue) > 0,0.6525523066520691 263,"def mergeCombiners ( self, x, y ) : for item in y : if : self. heap. push ( x, item ) else : self. heap. push_pop ( x, item ) return x",False,len(x) < self.heap_limit,"isinstance(item, list)",0.6545305252075195 264,"def test_write_buffer ( self ) : try : for mode in ( ""b"", """" ) : with open ( ""foo"", ""w+"" + mode ) as foo : b = buffer ( b""hello world"", 6 ) foo. write ( b ) with open ( ""foo"", ""r"" ) as foo : self. assertEqual ( foo. readlines ( ), [ ""world"" ] ) with open ( ""foo"", ""w+"" ) as foo : b = buffer ( u""hello world"", 6 ) foo. write ( b ) with open ( ""foo"", ""r"" ) as foo : self. assertEqual ( foo. readlines ( ), [ ""world"" ] ) with open ( ""foo"", ""w+b"" ) as foo : b = buffer ( u""hello world"", 6 ) foo. write ( b ) with open ( ""foo"", ""r"" ) as foo : if : self. assertEqual ( foo. readlines ( ), [ ""l\x00o\x00 \x00w\x00o\x00r\x00l\x00d\x00"" ] ) else : self. assertEqual ( foo. readlines ( ), [ ""world"" ] ) finally : self. delete_files ( ""foo"" )",False,is_cpython,"hasattr(foo, '__iter__')",0.6585351228713989 265,"def read_callback ( ) : """"""Parse stats response from Marathon"""""" log_verbose ( ""Read callback called"" ) try : metrics = json. load ( urllib2. urlopen ( MARATHON_URL, timeout = 10 ) ) for group in [ ""gauges"", ""histograms"", ""meters"", ""timers"", ""counters"" ] : for name, values in metrics. get ( group, { } ). items ( ) : for metric, value in values. items ( ) : if : dispatch_stat ( ""gauge"", ""."". join ( ( name, metric ) ), value ) except urllib2. URLError as e : collectd. error ( ""marathon plugin: Error connecting to %s - %r"" % ( MARATHON_URL, e ) )",False,"not isinstance(value, basestring)",metric,0.6501896381378174 266,"def ReceiveMessageLoop ( self ) : while self. connected == True : tmp = await self. ReadSocketData ( 16 ) if tmp is None : break ( expr, ) = struct. unpack ( ""!I"", tmp [ : 4 ] ) ( num, ) = struct. unpack ( ""!I"", tmp [ 8 : 12 ] ) num2 = expr - 16 tmp = await self. ReadSocketData ( num2 ) if tmp is None : break if num2!= 0 : num -= 1 if : ( num3, ) = struct. unpack ( ""!I"", tmp ) self. _UserCount = num3 continue elif num == 3 or num == 4 : try : messages = tmp. decode ( ""utf-8"" ) except : continue await self. parseDanMu ( messages ) continue elif num == 5 or num == 6 or num == 7 : continue else : if num!= 16 : pass else : continue",False,num == 0 or num == 1 or num == 2,num3 != 0,0.6565470695495605 267,"def _rmtree ( self, path ) : for name in self. _listdir ( path ) : fullname = self. _path_join ( path, name ) try : isdir = self. _isdir ( fullname ) except self. _os_error : isdir = False if : self. _rmtree ( fullname ) else : try : self. _remove ( fullname ) except self. _os_error : pass try : self. _rmdir ( path ) except self. _os_error : pass",True,isdir,isdir,0.6735173463821411 268,"def write ( self, * bits ) : for bit in bits : if : self. bytestream. append ( 0 ) byte = self. bytestream [ self. bytenum ] if self. bitnum == 8 : if self. bytenum == len ( self. bytestream ) - 1 : byte = 0 self. bytestream += bytes ( [ byte ] ) self. bytenum += 1 self. bitnum = 0 mask = 2 ** self. bitnum if bit : byte |= mask else : byte &= ~ mask self. bytestream [ self. bytenum ] = byte self. bitnum += 1",False,not self.bytestream,bit,0.6641024351119995 269,"def _write_ready ( self ) : assert self. _buffer, ""Data should not be empty"" try : n = self. _sock. send ( self. _buffer ) except ( BlockingIOError, InterruptedError ) : pass except Exception as exc : self. _loop. remove_writer ( self. _sock_fd ) self. _buffer. clear ( ) self. _fatal_error ( exc, ""Fatal write error on socket transport"" ) else : if n : del self. _buffer [ : n ] self. _maybe_resume_protocol ( ) if not self. _buffer : self. _loop. remove_writer ( self. _sock_fd ) if self. _closing : self. _call_connection_lost ( None ) elif : self. _sock. shutdown ( socket. SHUT_WR )",False,self._eof,shutdown,0.6725741624832153 270,"def jupyter_progress_bar ( min = 0, max = 1.0 ) : """"""Returns an ipywidget progress bar or None if we can't import it"""""" widgets = wandb. util. get_module ( ""ipywidgets"" ) try : if : from IPython. html import widgets assert hasattr ( widgets, ""VBox"" ) assert hasattr ( widgets, ""Label"" ) assert hasattr ( widgets, ""FloatProgress"" ) return ProgressWidget ( widgets, min = min, max = max ) except ( ImportError, AssertionError ) : return None",False,widgets is None,"hasattr(widgets, 'getItem')",0.6759294867515564 271,"def call ( self, step_input, states ) : new_states = [ ] for i in range ( self. num_layers ) : out, new_state = self. lstm_cells [ i ] ( step_input, states [ i ] ) step_input = ( layers. dropout ( out, self. dropout_prob, dropout_implementation = ""upscale_in_train"" ) if : else out ) new_states. append ( new_state ) return step_input, new_states",False,self.dropout_prob > 0.0,new_state is None,0.6524040102958679 272,"def _get_stream ( self, mem, base, sat, sec_size, start_sid, size = None, name = """" ) : sectors = [ ] s = start_sid if size is None : while s >= 0 : start_pos = base + s * sec_size sectors. append ( mem [ start_pos : start_pos + sec_size ] ) try : s = sat [ s ] except IndexError : raise CompDocError ( ""OLE2 stream %r: sector allocation table invalid entry (%d)"" % ( name, s ) ) assert s == EOCSID else : todo = size while s >= 0 : start_pos = base + s * sec_size grab = sec_size if grab > todo : grab = todo todo -= grab sectors. append ( mem [ start_pos : start_pos + grab ] ) try : s = sat [ s ] except IndexError : raise CompDocError ( ""OLE2 stream %r: sector allocation table invalid entry (%d)"" % ( name, s ) )",False,todo != 0,s == None,0.6870219707489014 273,"def __call__ ( self, trainer ) : keys = self. _keys observation = trainer. observation summary = self. _summary if keys is None : summary. add ( observation ) else : summary. add ( { k : observation [ k ] for k in keys if k in observation } ) if trainer. is_before_training or self. _trigger ( trainer ) : stats = self. _summary. compute_mean ( ) stats_cpu = { } for name, value in six. iteritems ( stats ) : stats_cpu [ name ] = float ( value ) updater = trainer. updater stats_cpu [ ""epoch"" ] = updater. epoch stats_cpu [ ""iteration"" ] = updater. iteration stats_cpu [ ""elapsed_time"" ] = trainer. elapsed_time if : self. _postprocess ( stats_cpu ) self. _log. append ( stats_cpu ) if self. _log_name is not None : log_name = self. _log_name. format ( ** stats_cpu ) with utils. tempdir ( prefix = log_name, dir = trainer. out ) as tempd : path = os. path. join ( tempd, ""log.json"" ) with open ( path, ""w"" ) as f : json. dump ( self. _log, f, indent = 4 ) new_path = os. path. join ( trainer. out, log_name ) shutil. move ( path,",False,self._postprocess is not None,self._log_cpu is not None,0.6575281620025635 274,"def _from_to_normal ( self, pymodule, import_stmt ) : resource = pymodule. get_resource ( ) from_import = import_stmt. import_info module_name = from_import. module_name for name, alias in from_import. names_and_aliases : imported = name if : imported = alias occurrence_finder = occurrences. create_finder ( self. pycore, imported, pymodule [ imported ], imports = False ) source = rename. rename_in_module ( occurrence_finder, module_name + ""."" + name, pymodule = pymodule, replace_primary = True, ) if source is not None : pymodule = self. pycore. get_string_module ( source, resource ) return pymodule",True,alias is not None,alias is not None,0.6716960668563843 275,"def test_with_three_points ( self ) : cba = ia. Polygon ( [ ( 1, 2 ), ( 3, 4 ), ( 5, 5 ) ] ) for i, xy in enumerate ( cba ) : assert i in [ 0, 1, 2 ] if : assert np. allclose ( xy, ( 1, 2 ) ) elif i == 1 : assert np. allclose ( xy, ( 3, 4 ) ) elif i == 2 : assert np. allclose ( xy, ( 5, 5 ) ) assert i == 2",True,i == 0,i == 0,0.6719369888305664 276,"def resize ( self, newshape ) : ( datashape, ) = self. _data. shape if newshape > datashape : ( shape, ) = self. shape newdatashape = max ( newshape, int ( shape * self. factor ) + 1 ) if : self. data = None self. _data. resize ( newdatashape, refcheck = self. refcheck ) else : newdata = zeros ( newdatashape, dtype = self. dtype ) newdata [ : shape ] = self. data self. _data = newdata elif newshape < self. shape [ 0 ] : self. _data [ newshape : ] = 0 self. data = self. _data [ : newshape ] self. shape = ( newshape, )",False,self.use_numpy_resize and self._data.flags['C_CONTIGUOUS'],newdatashape > 0,0.6508387327194214 277,"def handle ( self, input ) : match = self. _rx. match ( input ) if match is not None : query = self. _yamlfy_query ( match. group ( ""query"" ) ) if query is not None : query [ ""millis"" ] = match. group ( ""query_time"" ) query [ ""ns"" ] = match. group ( ""ns"" ) if : query [ ""orderby"" ] = query [ ""query"" ] [ ""$orderby"" ] del query [ ""query"" ] [ ""$orderby"" ] if query [ ""query"" ]. has_key ( ""$query"" ) : query [ ""query"" ] = query [ ""query"" ] [ ""$query"" ] query [ ""stats"" ] = parse_line_stats ( match. group ( ""stats"" ) ) return query return None",False,query['query'].has_key('$orderby'),len(query) > 0,0.6572615504264832 278,"def setUp ( self ) : CFTPClientTestBase. setUp ( self ) self. startServer ( ) cmds = ( ""-p %i -l testuser "" ""--known-hosts kh_test "" ""--user-authentications publickey "" ""--host-key-algorithms ssh-rsa "" ""-i dsa_test "" ""-a "" ""-v "" ""127.0.0.1"" ) port = self. server. getHost ( ). port cmds = test_conch. _makeArgs ( ( cmds % port ). split ( ), mod = ""cftp"" ) log. msg ( ""running {} {}"". format ( sys. executable, cmds ) ) d = defer. Deferred ( ) self. processProtocol = SFTPTestProcess ( d ) d. addCallback ( lambda _ : self. processProtocol. clearBuffer ( ) ) env = os. environ. copy ( ) env [ ""PYTHONPATH"" ] = os. pathsep. join ( sys. path ) encodedCmds = [ ] encodedEnv = { } for cmd in cmds : if isinstance ( cmd, str ) : cmd = cmd. encode ( ""utf-8"" ) encodedCmds. append ( cmd ) for var in env : val = env [ var ] if : var = var. encode ( ""utf-8"" ) if isinstance ( val, str ) : val = val. encode ( ""utf-8"" ) encodedEnv [ var ] = val log. msg ( encodedCmds ) log. msg ( encodedEnv ) reactor. spawnProcess ( self. processProtocol, sys. executable, encodedCmd",False,"isinstance(var, str)","isinstance(val, str)",0.6510794162750244 279,"def __new__ ( mcs, name, bases, attrs ) : include_profile = include_trace = include_garbage = True bases = list ( bases ) if name == ""SaltLoggingClass"" : for base in bases : if : include_trace = False if hasattr ( base, ""garbage"" ) : include_garbage = False if include_profile : bases. append ( LoggingProfileMixin ) if include_trace : bases. append ( LoggingTraceMixin ) if include_garbage : bases. append ( LoggingGarbageMixin ) return super ( LoggingMixinMeta, mcs ). __new__ ( mcs, name, tuple ( bases ), attrs )",True,"hasattr(base, 'trace')","hasattr(base, 'trace')",0.6494925022125244 280,"def alloc ( self ) : with self. lock : for item in tuple ( self. ban ) : if item [ ""counter"" ] == 0 : self. free ( item [ ""addr"" ] ) self. ban. remove ( item ) else : item [ ""counter"" ] -= 1 base = 0 for cell in self. addr_map : if cell : bit = 0 while True : if ( 1 << bit ) & self. addr_map [ base ] : self. addr_map [ base ] ^= 1 << bit break bit += 1 ret = base * self. cell_size + bit if self. reverse : ret = self. maxaddr - ret else : ret = ret + self. minaddr if self. minaddr <= ret <= self. maxaddr : if : self. free ( ret, ban = self. release ) self. allocated += 1 ",True,self.release,self.release,0.6604821681976318 281,"def _wait_for_launcher ( self ) -> None : log. debug ( ""Waiting for Lavalink server to be ready"" ) lastmessage = 0 for i in itertools. cycle ( range ( 50 ) ) : line = await self. _proc. stdout. readline ( ) if _RE_READY_LINE. search ( line ) : self. ready. set ( ) break if _FAILED_TO_START. search ( line ) : raise RuntimeError ( f""Lavalink failed to start: {line.decode().strip()}"" ) if : lastmessage = time. time ( ) log. critical ( ""Internal lavalink server exited early"" ) if i == 49 : await asyncio. sleep ( 0.1 )",False,self._proc.returncode is not None and lastmessage + 2 < time.time(),i == 53,0.6497751474380493 282,"def get_type ( request : HttpRequest, payload : Dict [ str, Any ] ) -> str : if payload. get ( ""push"" ) : return ""push"" elif payload. get ( ""fork"" ) : return ""fork"" elif payload. get ( ""comment"" ) and payload. get ( ""commit"" ) : return ""commit_comment"" elif payload. get ( ""commit_status"" ) : return ""change_commit_status"" elif payload. get ( ""issue"" ) : if payload. get ( ""changes"" ) : return ""issue_updated"" if : return ""issue_commented"" return ""issue_created"" elif payload. get ( ""pullrequest"" ) : pull_request_template = ""pull_request_{}"" event_key = validate_extract_webhook_http_header ( request, ""X_EVENT_KEY"", ""BitBucket"" ) assert event_key is not None action = re. match ( ""pullrequest:(?P.*)$"", event_key ) if action : action_group = action. group ( ""action"" ) if action_group in PULL_REQUEST_SUPPORTED_ACTIONS : return pull_request_template. format ( action_group ) else : event_key = validate_extract_webhook_http_header ( request, ""X_EVENT_KEY"", ""BitBucket"" ) if event_key == ""repo:updated"" : return event_",False,payload.get('comment'),"payload.get( ""commented""",0.6542719602584839 283,"def _get_contrast ( second_level_contrast, design_matrix ) : """"""Check and return contrast when testing one contrast at the time"""""" if isinstance ( second_level_contrast, str ) : if second_level_contrast in design_matrix. columns. tolist ( ) : contrast = second_level_contrast else : raise ValueError ( '""{}"" is not a valid contrast name'. format ( second_level_contrast ) ) else : if second_level_contrast is None : if : second_level_contrast = np. ones ( [ 1 ] ) else : raise ValueError ( ""No second-level contrast is specified."" ) elif ( np. nonzero ( second_level_contrast ) [ 0 ] ). size!= 1 : raise ValueError ( ""second_level_contrast must be "" ""a list of 0s and 1s"" ) con_val = np. asarray ( second_level_contrast, dtype = bool ) contrast = np. asarray ( design_matrix. columns. tolist ( ) ) [ con_val ] [ 0 ] return contrast",False,design_matrix.shape[1] == 1,len(second_level_contrast) == 0,0.6579049825668335 284,"def _get_left_part ( self, bar ) : import socket, os, pwd try : username = pwd. getpwuid ( os. geteuid ( ) ). pw_name except : username = ""???"" if username == ""root"" : clr = ""bad"" else : clr = ""good"" bar. add ( username, ""hostname"", clr, fixedsize = True ) bar. add ( ""@"", ""hostname"", clr, fixedsize = True ) bar. add ( socket. gethostname ( ), ""hostname"", clr, fixedsize = True ) for path in self. env. pathway : if : clr = ""link"" else : clr = ""directory"" bar. add ( path. basename, clr ) bar. add ( ""/"", clr, fixedsize = True ) if self. env. cf is not None : bar. add ( self. env. cf. basename, ""file"", fixedsize = True )",False,path.islink,path.basename,0.655146598815918 285,"def assert_registration_mailbox ( self, match = None ) : if match is None : match = ""[Weblate] Your registration on Weblate"" self. assertEqual ( len ( mail. outbox ), 1 ) self. assertEqual ( mail. outbox [ 0 ]. subject, match ) live_url = getattr ( self, ""live_server_url"", None ) for line in mail. outbox [ 0 ]. body. splitlines ( ) : if ""verification_code"" not in line : continue if ""("" in line or "")"" in line or ""<"" in line or "">"" in line : continue if : return line + ""&confirm=1"" if line. startswith ( ""http://example.com/"" ) : return line [ 18 : ] + ""&confirm=1"" self. fail ( ""Confirmation URL not found"" ) return """"",False,live_url and line.startswith(live_url),live_url and live_url.lower().startswith(line.startswith('https://example.com/'),0.6480209827423096 286,"def __init__ ( self, document, collection ) : self. _document = document self. _collection_obj = collection self. _mongo_query = None self. _query_obj = Q ( ) self. _cls_query = { } self. _where_clause = None self. _loaded_fields = QueryFieldList ( ) self. _ordering = None self. _snapshot = False self. _timeout = True self. _read_preference = None self. _read_concern = None self. _iter = False self. _scalar = [ ] self. _none = False self. _as_pymongo = False self. _search_text = None if document. _meta. get ( ""allow_inheritance"" ) is True : if : self. _cls_query = { ""_cls"" : self. _document. _subclasses [ 0 ] } else : self. _cls_query = { ""_cls"" : { ""$in"" : self. _document. _subclasses } } self. _loaded_fields = QueryFieldList ( always_include = [ ""_cls"" ] ) self. _cursor_obj = None self. _limit = None self. _skip = None self. _hint = - 1 self. _collation = None self. _batch_size = None self. _max_time_ms = None self. _comment = None self. _empty = False",False,len(self._document._subclasses) == 1,len(self._document) == 1,0.6623180508613586 287,"def wait_for_child ( pid, timeout = 1.0 ) : deadline = mitogen. core. now ( ) + timeout while timeout < mitogen. core. now ( ) : try : target_pid, status = os. waitpid ( pid, os. WNOHANG ) if : return except OSError : e = sys. exc_info ( ) [ 1 ] if e. args [ 0 ] == errno. ECHILD : return time. sleep ( 0.05 ) assert False, ""wait_for_child() timed out""",False,target_pid == pid,status == 0,0.6709791421890259 288,"def resolve_none ( self, data ) : for tok_idx in range ( len ( data ) ) : for feat_idx in range ( len ( data [ tok_idx ] ) ) : if : data [ tok_idx ] [ feat_idx ] = ""_"" return data",True,data[tok_idx][feat_idx] is None,data[tok_idx][feat_idx] is None,0.6543666124343872 289,"def test_attributes_types ( self ) : if not self. connection. strategy. pooled : if : self. connection. refresh_server_info ( ) self. assertEqual ( type ( self. connection. server. schema. attribute_types [ ""cn"" ] ), AttributeTypeInfo )",False,not self.connection.server.info,self.connection.schema is not None,0.6560783386230469 290,"def get_modified_addr ( self, erase_last = False ) : last = self. last_iteration new = self. feed ( self. last_value, erase_last = erase_last ) ret = { } for type, l in last. iteritems ( ) : typeset = set ( new [ type ] ) for addr in l : if addr not in typeset : if : ret [ type ] = [ ] ret [ type ]. append ( addr ) return ret",True,type not in ret,type not in ret,0.6687414646148682 291,"def _get_compressor ( self, algorithm ) : try : if algorithm. lower ( ) in ( ""none"", ""off"", ""no"" ) : return None if algorithm. lower ( ) in ( ""zlib"", ""gzip"" ) : import zlib as compressor result = compressor elif algorithm. lower ( ) in ( ""bz2"", ""bzip2"" ) : import bz2 as compressor result = compressor else : result = None if : return eventlet. tpool. Proxy ( result ) except ImportError : pass err = _ ( ""unsupported compression algorithm: %s"" ) % algorithm raise ValueError ( err )",False,result,result is not None,0.6955109238624573 292,"def choices ( ) : """"""Return a dict of different choices."""""" choices = { } for choice in Action. __dict__ : if : try : value = int ( getattr ( Action, choice ) ) choices [ value ] = choice except ( TypeError, ValueError ) : pass return choices",True,"hasattr(Action, choice)","hasattr(Action, choice)",0.6596171259880066 293,"def _walkingCount ( self, limFn = None, cntFn = None ) : tot = 0 pcounts = { } for did in self. col. decks. active ( ) : did = int ( did ) lim = limFn ( self. col. decks. get ( did ) ) if not lim : continue parents = self. col. decks. parents ( did ) for p in parents : if : pcounts [ p [ ""id"" ] ] = limFn ( p ) lim = min ( pcounts [ p [ ""id"" ] ], lim ) cnt = cntFn ( did, lim ) for p in parents : pcounts [ p [ ""id"" ] ] -= cnt pcounts [ did ] = lim - cnt tot += cnt return tot",True,p['id'] not in pcounts,p['id'] not in pcounts,0.6638380885124207 294,"def generate_eway_bill ( self, ** kwargs ) : args = frappe. _dict ( kwargs ) headers = self. get_headers ( ) eway_bill_details = get_eway_bill_details ( args ) data = json. dumps ( { ""Irn"" : args. irn, ""Distance"" : cint ( eway_bill_details. distance ), ""TransMode"" : eway_bill_details. mode_of_transport, ""TransId"" : eway_bill_details. gstin, ""TransName"" : eway_bill_details. transporter, ""TrnDocDt"" : eway_bill_details. document_date, ""TrnDocNo"" : eway_bill_details. document_name, ""VehNo"" : eway_bill_details. vehicle_no, ""VehType"" : eway_bill_details. vehicle_type, }, indent = 4, ) try : res = self. make_request ( ""post"", self. generate_ewaybill_url, headers, data ) if : self. invoice. ewaybill = res. get ( ""result"" ). get ( ""EwbNo"" ) self. invoice. eway_bill_cancelled = 0 self. invoice. update ( args ) self. invoice. flags. updater_reference = { ""doctype"" : self. invoice. doctype, ""docname"" : self. invoice",False,res.get('success'),res.get('result') is not None,0.6566387414932251 295,"def removeKey ( self, key, group = None, locales = True ) : if not group : group = self. defaultGroup try : if locales : for name in list ( self. content [ group ] ) : if : del self. content [ group ] [ name ] value = self. content [ group ]. pop ( key ) self. tainted = True return value except KeyError as e : if debug : if e == group : raise NoGroupError ( group, self. filename ) else : raise NoKeyError ( key, group, self. filename ) else : return """"",False,"re.match('^' + key + xdg.Locale.regex + '$', name) and name != key",name in self.content[group],0.6521607637405396 296,"def clean_requires_python ( candidates ) : """"""Get a cleaned list of all the candidates with valid specifiers in the `requires_python` attributes."""""" all_candidates = [ ] py_version = parse_version ( os. environ. get ( ""PIP_PYTHON_VERSION"", ""."". join ( map ( str, sys. version_info [ : 3 ] ) ) ) ) for c in candidates : if getattr ( c, ""requires_python"", None ) : if : c. requires_python = "">={0},<{1!s}"". format ( c. requires_python, int ( c. requires_python ) + 1 ) try : specifierset = SpecifierSet ( c. requires_python ) except InvalidSpecifier : continue else : if not specifierset. contains ( py_version ) : continue all_candidates. append ( c ) return all_candidates",False,"len(c.requires_python) == 1 and c.requires_python in ('2', '3')",c.requires_python is None,0.6525837182998657 297,"def JujuWait ( self ) : """"""Wait for all deployed services to be installed, configured, and idle."""""" status = yaml. safe_load ( self. JujuStatus ( ) ) for service in status [ ""services"" ] : ss = status [ ""services"" ] [ service ] [ ""service-status"" ] [ ""current"" ] if ss not in [ ""active"", ""unknown"" ] : raise errors. Juju. TimeoutException ( ""Service %s is not ready; status is %s"" % ( service, ss ) ) if ss in [ ""error"" ] : debuglog = self. JujuRun ( ""juju debug-log --limit 200"" ) logging. warn ( debuglog ) raise errors. Juju. UnitErrorException ( ""Service %s is in an error state"" % service ) for unit in status [ ""services"" ] [ service ] [ ""units"" ] : unit_data = status [ ""services"" ] [ service ] [ ""units"" ] [ unit ] ag = unit_data [ ""agent-state"" ] if ag!= ""started"" : raise errors. Juju. TimeoutException ( ""Service %s is not ready; agent-state is %s"" % ( service, ag ) ) ws = unit_data [ ""workload-status"" ] [ ""current"" ] if : raise errors. Juju. TimeoutException ( ",False,"ws not in ['active', 'unknown']",ws != 'cancel',0.6560594439506531 298,"def docroutine ( self, object, name = None, mod = None, cl = None ) : """"""Produce text documentation for a function or method object."""""" realname = object. __name__ name = name or realname note = """" skipdocs = 0 if inspect. ismethod ( object ) : imclass = object. im_class if cl : if imclass is not cl : note = "" from "" + classname ( imclass, mod ) else : if object. im_self is not None : note = "" method of %s instance"" % classname ( object. im_self. __class__, mod ) else : note = "" unbound %s method"" % classname ( imclass, mod ) object = object. im_func if name == realname : title = self. bold ( realname ) else : if cl and realname in cl. __dict__ and cl. __dict__ [ realname ] is object : skipdocs = 1 title = self. bold ( name ) + "" = "" + realname if inspect. isfunction ( object ) : args, varargs, varkw, defaults = inspect. getargspec ( object ) argspec = inspect. formatargspec ( args, varargs, varkw, defaults, formatvalue = self. formatvalue ) if : title = self. bold ( name ) + "" lambda "" argspec = argspec [ 1 : - 1 ] else",False,realname == '',name != None,0.6732968091964722 299,"def __call__ ( self, context ) : obj = self. param. parent ( context ) name = self. param. key if obj is None : raise AssertionError ( ""No such object: %s"" % self. param. parent. name ) try : function = obj [ name ] if : raise KeyError except ( TypeError, KeyError ) : if hasattr ( obj, name ) and isinstance ( getattr ( obj, name ), ExpressionFunction ) : function = getattr ( obj, name ) else : wrapper = self. wrap_object ( obj ) if ( wrapper is not None and hasattr ( wrapper, name ) and isinstance ( getattr ( wrapper, name ), ExpressionFunction ) ) : function = getattr ( wrapper, name ) else : raise AssertionError ( ""Not a valid function: %s"" % self. param. name ) if : raise AssertionError ( ""Not a valid function: %s"" % self. param. name ) args = self. args ( context ) return function ( * args )",False,"not isinstance(function, ExpressionFunction)",function is None,0.6545538902282715 300,"def process_response ( self, request, response ) : if not response. streaming and len ( response. content ) < 200 : return response patch_vary_headers ( response, ( ""Accept-Encoding"", ) ) if response. has_header ( ""Content-Encoding"" ) : return response if ""msie"" in request. META. get ( ""HTTP_USER_AGENT"", """" ). lower ( ) : ctype = response. get ( ""Content-Type"", """" ). lower ( ) if : return response ae = request. META. get ( ""HTTP_ACCEPT_ENCODING"", """" ) if not re_accepts_gzip. search ( ae ) : return response if response. streaming : response. streaming_content = compress_sequence ( response. streaming_content ) del response [ ""Content-Length"" ] else : compressed_content = compress_string ( response. content ) if len ( compressed_content ) >= len ( response. content ) : return response response. content = compressed_content response [ ""Content-Length"" ] = str ( len ( response. content ) ) if response. has_header ( ""ETag"" ) : response [ ""ETag"" ] = re. sub ( '""$', ';gzip""', response [ ""ETag"" ] ) response [ ""Content-Encoding"" ] = ""gzip"" return response",False,not ctype.startswith('text/') or 'javascript' in ctype,ctype in _accepts_gzip,0.6522215604782104 301,"def brushengine_paint_hires ( ) : from lib import tiledsurface, brush s = tiledsurface. Surface ( ) with open ( ""brushes/v2/watercolor.myb"" ) as fp : bi = brush. BrushInfo ( fp. read ( ) ) b = brush. Brush ( bi ) events = np. loadtxt ( ""painting30sec.dat"" ) t_old = events [ 0 ] [ 0 ] yield start_measurement s. begin_atomic ( ) trans_time = 0.0 for t, x, y, pressure in events : dtime = t - t_old t_old = t b. stroke_to ( s. backend, x * 5, y * 5, pressure, 0.0, 0.0, dtime ) trans_time += dtime if : trans_time = 0.0 s. end_atomic ( ) s. begin_atomic ( ) s. end_atomic ( ) yield stop_measurement",False,trans_time > 0.05,trans_time > s.end_atomic(),0.6561314463615417 302,"def _tile_series ( cls, op ) : series = op. inputs [ 0 ] if len ( series. chunks ) == 1 : chunk = series. chunks [ 0 ] chunk_op = op. copy ( ). reset_key ( ) out_chunks = [ chunk_op. new_chunk ( series. chunks, shape = chunk. shape, index = chunk. index, index_value = op. outputs [ 0 ]. index_value, dtype = chunk. dtype, name = chunk. name, ) ] new_op = op. copy ( ) kws = op. outputs [ 0 ]. params. copy ( ) kws [ ""nsplits"" ] = series. nsplits kws [ ""chunks"" ] = out_chunks return new_op. new_seriess ( op. inputs, ** kws ) else : if : raise NotImplementedError ( ""Only support puts NaNs at the end."" ) return cls. _tile_psrs ( op, series )",False,op.na_position != 'last',len(series.outputs) > 0,0.6548214554786682 303,"def post_config_hook ( self ) : self. last_transmitted_bytes = 0 self. last_received_bytes = 0 self. last_time = time. perf_counter ( ) if : with Path ( ""/proc/net/route"" ). open ( ) as fh : for line in fh : fields = line. strip ( ). split ( ) if fields [ 1 ] == ""00000000"" and int ( fields [ 3 ], 16 ) & 2 : self. nic = fields [ 0 ] break if : self. nic = ""lo"" self. py3. log ( f""selected nic: {self.nic}"" ) self. thresholds_init = self. py3. get_color_names_list ( self. format )",False,self.nic is None,self.nic,0.6575226187705994 304,"def import_data ( self ) : if os. path. isfile ( self. _path ) : with open ( self. _path, ""r"" ) as db_file : import_data = json. loads ( db_file. read ( ) ) data = import_data [ ""data"" ] for key_data in data : key = key_data [ 0 ] key_type = key_data [ 1 ] key_ttl = key_data [ 2 ] key_val = key_data [ 3 ] if key_type == ""set"" : key_val = set ( key_val ) elif key_type == ""deque"" : key_val = collections. deque ( key_val ) self. _data [ key ] = { ""ttl"" : key_ttl, ""val"" : key_val, } if ""timers"" in import_data : for key in import_data [ ""timers"" ] : if key not in self. _data : continue ttl = self. _data [ key ] [ ""ttl"" ] if not ttl : <",False,'commit_log' in import_data,len(self._data),0.6545328497886658 305,"def _process_mempool ( self, all_hashes ) : txs = self. txs hashXs = self. hashXs touched = set ( ) for tx_hash in set ( txs ). difference ( all_hashes ) : tx = txs. pop ( tx_hash ) tx_hashXs = { hashX for hashX, value in tx. in_pairs } tx_hashXs. update ( hashX for hashX, value in tx. out_pairs ) for hashX in tx_hashXs : hashXs [ hashX ]. remove ( tx_hash ) if : del hashXs [ hashX ] touched. update ( tx_hashXs ) new_hashes = list ( all_hashes. difference ( txs ) ) if new_hashes : fetches = [ ] for hashes in chunks ( new_hashes, 200 ) : fetches. append ( self. _fetch_and_accept ( hashes, all_hashes, touched ) ) tx_map = { } utxo_map = { } for fetch in asyncio. as_completed ( fetches ) : deferred, unspent = await fetch tx_map. update ( deferred ) utxo_map. update ( unspent ) prior_count = 0 while tx_map and len ( tx_map )!= prior_count : prior_count = len ( tx_map ) tx_map, utxo_map = self. _accept_transactions ( tx_map, utxo_map, touched ) ",False,not hashXs[hashX],hashX,0.6548793315887451 306,"def forward ( self, hidden_states, attention_mask = None, head_mask = None, output_attentions = False, output_hidden_states = False, return_dict = True, ) : all_hidden_states = ( ) if output_hidden_states else None all_attentions = ( ) if output_attentions else None for i, layer_module in enumerate ( self. layer ) : if output_hidden_states : all_hidden_states = all_hidden_states + ( hidden_states, ) layer_outputs = layer_module ( hidden_states, attention_mask, head_mask [ i ], output_attentions, ) hidden_states = layer_outputs [ 0 ] if output_attentions : all_attentions = all_attentions + ( layer_outputs [ 1 ], ) if output_hidden_states : all_hidden_states = all_hidden_states + ( hidden_states, ) if not return_dict : return tuple ( v for v in [ hidden_states, all_hidden_states, all_attentions ] if : ) return BaseModelOutput ( last_hidden_state = hidden_states, hidden_states = all_hidden_states, attentions = all_attentions, )",False,v is not None,return_dict,0.6533644199371338 307,"def sanitizeTreeKobo ( filetree ) : pageNumber = 0 for root, dirs, files in os. walk ( filetree ) : dirs, files = walkSort ( dirs, files ) for name in files : splitname = os. path. splitext ( name ) slugified = str ( pageNumber ). zfill ( 5 ) pageNumber += 1 while ( os. path. exists ( os. path. join ( root, slugified + splitname [ 1 ] ) ) and splitname [ 0 ]. upper ( )!= slugified. upper ( ) ) : slugified += ""A"" newKey = os. path. join ( root, slugified + splitname [ 1 ] ) key = os. path. join ( root, name ) if : os. replace ( key, newKey )",False,key != newKey,os.path.exists(key),0.6999104022979736 308,"def testCheckpointMiddleOfSequence ( self ) : tm1 = BacktrackingTM ( numberOfCols = 100, cellsPerColumn = 12, verbosity = VERBOSITY ) sequences = [ self. generateSequence ( ) for _ in xrange ( 5 ) ] train = list ( itertools. chain. from_iterable ( sequences [ : 3 ] + [ sequences [ 3 ] [ : 5 ] ] ) ) for bottomUpInput in train : if : tm1. reset ( ) else : tm1. compute ( bottomUpInput, True, True ) checkpointPath = os. path. join ( self. _tmpDir, ""a"" ) tm1. saveToFile ( checkpointPath ) tm2 = pickle. loads ( pickle. dumps ( tm1 ) ) tm2. loadFromFile ( checkpointPath ) self. assertTMsEqual ( tm1, tm2 ) test = list ( itertools. chain. from_iterable ( [ sequences [ 3 ] [ 5 : ] ] + sequences [ 3 : ] ) ) for bottomUpInput in test : if : tm1. reset ( ) tm2. reset ( ) else : result1 = tm1. compute ( bottomUpInput, True, True ) result2 = tm2. compute ( bottomUpInput, True, True ) self. assertTMsEqual ( tm1, tm2 ) self. assertTrue ( numpy. array_equal ( result1, result2 ) )",False,bottomUpInput is None,bottomUpInput.hasReset,0.6644439697265625 309,"def __init__ ( self, size, comm, decay = 0.9, eps = 2e-5, dtype = None, use_gamma = True, use_beta = True, initial_gamma = None, initial_beta = None, communication_backend = ""auto"", ) : chainer. utils. experimental ( ""chainermn.links.MultiNodeBatchNormalization"" ) super ( MultiNodeBatchNormalization, self ). __init__ ( ) self. _highprec_dtype = chainer. get_dtype ( dtype, map_mixed16 = numpy. float32 ) self. comm = comm self. avg_mean = numpy. zeros ( size, dtype = self. _highprec_dtype ) self. register_persistent ( ""avg_mean"" ) self. avg_var = numpy. zeros ( size, dtype = self. _highprec_dtype ) self. register_persistent ( ""avg_var"" ) self. N = 0 self. register_persistent ( ""N"" ) self. decay = decay self. eps = eps self. _communication_backend = ( chainermn_batch_normalization. get_communication_backend ( comm, communication_backend ) ) with self. init_scope ( ) : if use_gamma : if : initial_gamma = 1 initial_gamma = initializers. _get_initializer ( initial_gamma ) initial_gamma. dtype = self. _highprec_dtype self. gamma = variable. Parameter ( initial_gamma, size ) if use_beta : if initial_beta is None : strip_ext = True set_mode = False if sys. platform == ""win32"" : if : strip_ext = False targ_basename = os. path. basename ( src ) if strip_ext and targ_basename. endswith ( "".py"" ) : targ_basename = targ_basename [ : - 3 ] targ = os. path. join ( self. install_dir, targ_basename ) self. announce ( ""installing %s as %s"" % ( src, targ_basename ), level = 2 ) if self. dry_run : return [ ] with open ( src, ""rU"" ) as in_fp : with open ( targ, ""w"" ) as out_fp : line = in_fp. readline ( ). rstrip ( ) if line. startswith ( ""#!"" ) : print ( line, file = out_fp ) print ( header, file = out_fp ) if os. name == ""posix"" : set_mode = True else : print ( header, file = out_fp ) print ( line, file = out_fp ) for line in in_fp. readlines ( ) : line = line. rstrip ( ) print ( line, file = out_fp ) if set_mode : mode = ( ( os. stat ( targ ). st_mode ) | 0o555 ) & 0o7777 try : if len ( field [ 1 ] ) == 1 : if isinstance ( field [ 1 ] [ 0 ], nodes. paragraph ) : authors = self. authors_from_one_paragraph ( field ) elif : authors = self. authors_from_bullet_list ( field ) else : raise TransformError else : authors = self. authors_from_paragraphs ( field ) authornodes = [ nodes. author ( """", """", * author ) for author in authors if author ] if len ( authornodes ) >= 1 : docinfo. append ( nodes. authors ( """", * authornodes ) ) else : raise TransformError except TransformError : field [ - 1 ] += self. document. reporter. warning ( 'Bibliographic field ""%s"" incompatible with extraction:' ""it must contain either a single paragraph (with authors "" 'separated by one of ""%s""), multiple paragraphs (one per' ""author), or a bullet list with one paragraph (one author) "" ""per item."" % ( name, """". join ( self. language. author_separators ) ), base_node = field, ) raise",True,"isinstance(field[1][0], nodes.bullet_list)","isinstance(field[1][0], nodes.bullet_list)",0.6546348333358765 312,"def on_task_filter ( self, task, config ) : if not task. accepted : log. debug ( ""No accepted entries, not scanning for existing."" ) return log. verbose ( ""Scanning path(s) for existing files."" ) config = self. prepare_config ( config ) filenames = { } for folder in config : folder = Path ( folder ). expanduser ( ) if not folder. exists ( ) : raise plugin. PluginWarning ( ""Path %s does not exist"" % folder, log ) for p in folder. rglob ( ""*"" ) : if : key = p. name if platform. system ( ) == ""Windows"" : key = key. lower ( ) filenames [ key ] = p for entry in task. accepted : name = Path ( entry. get ( ""filename"", entry. get ( ""location"", entry [ ""title"" ] ) ) ). name if platform. system ( ) == ""Windows"" : name = name. lower ( ) if name in filenames : log. debug ( ""Found %s in %s"" % ( name, filenames [ name ] ) ) entry. reject ( ""exists in %s"" % filenames [ name ] )",False,p.is_file(),p.exists(),0.6524482369422913 313,"def _update_cds_vdims ( self ) : element = self. plot. current_frame cds = self. plot. handles [ ""cds"" ] for d in element. vdims : scalar = element. interface. isscalar ( element, d ) dim = dimension_sanitizer ( d. name ) if : if scalar : cds. data [ dim ] = element. dimension_values ( d, not scalar ) else : cds. data [ dim ] = [ arr [ :, 0 ] for arr in element. split ( datatype = ""array"", dimensions = [ dim ] ) ]",False,dim not in cds.data,dim,0.670620858669281 314,"def progress_bar_update ( count1 = None, count2 = None, count3 = None, count4 = None, count5 = None, count6 = None ) : lock. acquire ( ) global pbar_file_permission_done if count1 is not None : if count1 <= 100 : pbar1. update ( count1 ) if count2 is not None : if count2 <= 100 : pbar2. update ( count2 ) if count3 is not None : if not pbar_file_permission_done : if count3 < 100 : pbar3. update ( count3 ) else : pbar3. update ( count3 ) pbar_file_permission_done = True else : pbar4. update ( count3 ) if count4 is not None : if count4 <= 100 : pbar5. update ( count4 ) if count5 is not None : if count5 <= 100 : pbar6. update ( count5 ) if count6 is not None : if : pbar7. update ( count6 ) lock. release ( )",True,count6 <= 100,count6 <= 100,0.6802228689193726 315,"def _executables_in_windows ( path ) : if not os. path. isdir ( path ) : return extensions = builtins. __xonsh__. env [ ""PATHEXT"" ] try : for x in scandir ( path ) : try : is_file = x. is_file ( ) except OSError : continue if : fname = x. name else : continue base_name, ext = os. path. splitext ( fname ) if ext. upper ( ) in extensions : yield fname except FileNotFoundError : return",True,is_file,is_file,0.6598045825958252 316,"def test_payload_splitter ( self ) : with open ( FIXTURE_PATH + ""/legacy_payload.json"" ) as f : legacy_payload = json. load ( f ) legacy_payload_split, metrics_payload, checkruns_payload = split_payload ( dict ( legacy_payload ) ) series = metrics_payload [ ""series"" ] legacy_payload_split [ ""metrics"" ] = [ ] for s in series : attributes = { } if s. get ( ""type"" ) : attributes [ ""type"" ] = s [ ""type"" ] if s. get ( ""host"" ) : attributes [ ""hostname"" ] = s [ ""host"" ] if s. get ( ""tags"" ) : attributes [ ""tags"" ] = s [ ""tags"" ] if : attributes [ ""device_name"" ] = s [ ""device"" ] formatted_sample = [ s [ ""metric"" ], s [ ""points"" ] [ 0 ] [ 0 ], s [ ""points"" ] [ 0 ] [ 1 ], attributes, ] legacy_payload_split [ ""metrics"" ]. append ( formatted_sample ) del legacy_payload [ ""service_checks"" ] self. assertEqual ( legacy_payload, legacy_payload_split ) with open ( FIXTURE_PATH + ""/sc_payload.json"" ) as f : expected_sc_payload = json. load ( f ) self. assertEqual ( checkruns_payload, expected_sc_payload )",False,s.get('device'),s.get(device),0.6555224061012268 317,"def write ( self, data ) : if mock_target. _mirror_on_stderr : if self. _write_line : sys. stderr. write ( fn + "": "" ) if : sys. stderr. write ( data. decode ( ""utf8"" ) ) else : sys. stderr. write ( data ) if ( data [ - 1 ] ) == ""\n"" : self. _write_line = True else : self. _write_line = False super ( Buffer, self ). write ( data )",False,bytes,"isinstance(data, bytes)",0.6905951499938965 318,"def _calculateParams ( self, listPackages ) : self. mapCyclesToPackageList. clear ( ) self. mapPackageToCycle. clear ( ) self. sortedPackageList = [ ] self. listOfPackagesAlreadyBuilt = self. _readAlreadyAvailablePackages ( ) if self. listOfPackagesAlreadyBuilt : self. logger. debug ( ""List of already available packages:"" ) self. logger. debug ( self. listOfPackagesAlreadyBuilt ) listPackagesToBuild = copy. copy ( listPackages ) for pkg in listPackages : if pkg in self. listOfPackagesAlreadyBuilt and not constants. rpmCheck : listPackagesToBuild. remove ( pkg ) if constants. rpmCheck : self. sortedPackageList = listPackagesToBuild else : if : return False if self. sortedPackageList : self. logger. info ( ""List of packages yet to be built..."" ) self. logger. info ( str ( set ( self. sortedPackageList ) - set ( self. listOfPackagesAlreadyBuilt ) ) ) self. logger. info ( """" ) return True",False,not self._readPackageBuildData(listPackagesToBuild),not list(self.sortedPackageList),0.6537013053894043 319,"def PyJs_anonymous_53_ ( ast, comments, tokens, this, arguments, var = var ) : var = Scope ( { u""tokens"" : tokens, u""this"" : this, u""arguments"" : arguments, u""comments"" : comments, u""ast"" : ast, }, var, ) var. registers ( [ u""tokens"", u""comments"", u""ast"" ] ) if var. get ( u""ast"" ) : if PyJsStrictEq ( var. get ( u""ast"" ). get ( u""type"" ), Js ( u""Program"" ) ) : return var. get ( u""t"" ). callprop ( u""file"", var. get ( u""ast"" ), ( var. get ( u""comments"" ) or Js ( [ ] ) ), ( var. get ( u""tokens"" ) or Js ( [ ] ) ), ) else : if : return var. get ( u""ast"" ) PyJsTempException = JsToPyException ( var. get ( u""Error"" ). create ( Js ( u""Not a valid ast?"" ) ) ) raise PyJsTempException",False,"PyJsStrictEq(var.get(u'ast').get(u'type'), Js(u'File'))",PYJsTempException,0.6549234390258789 320,"def AdjustLabels ( self, axis, minimum_label_spacing ) : if minimum_label_spacing is None : return if len ( axis. labels ) <= 1 : return if axis. max is not None and axis. min is not None : maximum_possible_spacing = ( axis. max - axis. min ) / ( len ( axis. labels ) - 1 ) if minimum_label_spacing > maximum_possible_spacing : minimum_label_spacing = maximum_possible_spacing labels = [ list ( x ) for x in zip ( axis. label_positions, axis. labels ) ] labels = sorted ( labels, reverse = True ) for i in range ( 1, len ( labels ) ) : if labels [ i - 1 ] [ 0 ] - labels [ i ] [ 0 ] < minimum_label_spacing : new_position = labels [ i - 1 ] [ 0 ] - minimum_label_spacing if axis. min is not None and new_position < axis. min : new_position = axis. min labels [ i ] [ 0 ] = new_position for i in range ( len ( labels ) - 2, - 1, - 1 ) : if labels [ i ] [ 0 ] - labels [ i + 1 ] [ 0 ] < minimum_label_spacing : new_position = labels [ i + 1 ] [ 0 ] + minimum_label_spacing if : new_position = axis. max labels [ i ] [ 0 ] = new_position label_positions, labels = zip ( * labels ) axis. labels = labels axis. label_positions = label_positions",False,axis.max is not None and new_position > axis.max,new_position > axis.max,0.6511911153793335 321,"def __get_property_type_info ( cls, property_proto ) : """"""Returns the type mapping for the provided property."""""" name = property_proto. name ( ) is_repeated = bool ( property_proto. multiple ( ) ) primitive_type = None entity_type = None if property_proto. has_meaning ( ) : primitive_type = MEANING_TO_PRIMITIVE_TYPE. get ( property_proto. meaning ( ) ) if primitive_type is None : value = property_proto. value ( ) if value. has_int64value ( ) : primitive_type = backup_pb2. EntitySchema. INTEGER elif value. has_booleanvalue ( ) : primitive_type = backup_pb2. EntitySchema. BOOLEAN elif value. has_stringvalue ( ) : if property_proto. meaning ( ) == entity_pb. Property. ENTITY_PROTO : entity_proto = entity_pb. EntityProto ( ) try : entity_proto. ParsePartialFromString ( value. stringvalue ( ) ) except Exception : pass else : entity_type = EntityTypeInfo. create_from_entity_proto ( entity_proto ) else : primitive_type = backup_pb2. EntitySchema. STRING elif : primitive_type = backup_pb2. EntitySchema. FLOAT elif value. has_pointvalue ( ) : primitive_",False,value.has_doublevalue(),value.has_floating(),0.6519709229469299 322,"def initialize_batcher ( self, dataset, batch_size = 128, bucketing_field = None, should_shuffle = True, ignore_last = False, ) : if self. horovod : batcher = DistributedBatcher ( dataset, self. horovod. rank ( ), self. horovod, batch_size, should_shuffle = should_shuffle, ignore_last = ignore_last, ) elif bucketing_field is not None : input_features = self. hyperparameters [ ""input_features"" ] bucketing_feature = [ feature for feature in input_features if feature [ ""name"" ] == bucketing_field ] if not bucketing_feature : raise ValueError ( ""Bucketing field {} not present in input features"". format ( bucketing_field ) ) else : bucketing_feature = bucketing_feature [ 0 ] should_trim = bucketing_feature [ ""encoder"" ] in dynamic_length_encoders if : trim_side = bucketing_feature [ ""preprocessing"" ] [ ""padding"" ] else : trim_side = self. hyperparameters [ ""preprocessing"" ] [ bucketing_feature [ ""type"" ] ] [ ""padding""",False,'preprocessing' in bucketing_feature,should_trim,0.6586976051330566 323,"def get ( self, request, * args, ** kwargs ) : if request. GET. get ( ""format"", None ) == ""json"" : self. setup_queryset ( * args, ** kwargs ) if ""pid"" in kwargs : self. static_context_extra [ ""pid"" ] = kwargs [ ""pid"" ] cmd = request. GET. get ( ""cmd"", None ) if : data = self. get_filter_info ( request, ** kwargs ) else : data = self. get_data ( request, ** kwargs ) return HttpResponse ( data, content_type = ""application/json"" ) return super ( ToasterTable, self ). get ( request, * args, ** kwargs )",False,cmd and 'filterinfo' in cmd,cmd == None,0.658203125 324,"def wakeUp ( self ) : """"""Write one byte to the pipe, and flush it."""""" if self. o is not None : try : util. untilConcludes ( os. write, self. o, b""x"" ) except OSError as e : if : raise",False,e.errno != errno.EAGAIN,e.args[0] not in _EBADF_TAB,0.6523231267929077 325,"def contact_me ( request, attribute = """", response_format = ""html"" ) : ""My Contact card"" contact = request. user. profile. get_contact ( ) if not request. user. profile. has_permission ( contact ) : return user_denied ( request, message = ""You don't have access to this Contact"" ) types = Object. filter_by_request ( request, ContactType. objects. order_by ( ""name"" ) ) if not contact : return render_to_response ( ""identities/contact_me_missing"", { ""types"" : types }, context_instance = RequestContext ( request ), response_format = response_format, ) subcontacts = Object. filter_by_request ( request, contact. child_set ) contact_values = contact. contactvalue_set. order_by ( ""field__name"" ) objects = get_contact_objects ( request. user. profile, contact, preformat = True ) module = None for key in objects : if not attribute : if : module = objects [ key ] [ ""module"" ] else : if attribute in objects [ key ] [ ""objects"" ]. keys ( ) : module = objects [ key ] [ ""module"" ] break return render_to_response ( ""identities/contact_me"", { ""contact"" : contact, ""subcontacts"" : subcontacts, ""objects"" : objects, ",False,objects[key]['count'],module and key in objects,0.6654888391494751 326,"def findfiles ( path ) : files = [ ] for name in os. listdir ( path ) : if name. startswith ( ""."" ) or name == ""lastsnap.jpg"" : continue pathname = os. path. join ( path, name ) st = os. lstat ( pathname ) mode = st. st_mode if : files. extend ( findfiles ( pathname ) ) elif stat. S_ISREG ( mode ) : files. append ( ( pathname, name, st ) ) return files",False,stat.S_ISDIR(mode),mode,0.6498696208000183 327,"def make_parser ( func : tp. Callable, subparser : ap. _SubParsersAction = None, params : tp. Dict [ str, tp. Dict [ str, tp. Any ] ] = None, ** kwargs ) -> ""ap.ArgumentParser"" : """"""A bare-bones argparse builder from functions"""""" doc = get_doc ( func ) kwargs. setdefault ( ""formatter_class"", ap. RawTextHelpFormatter ) if subparser is None : kwargs. setdefault ( ""description"", doc ) parser = ap. ArgumentParser ( ** kwargs ) parser. set_defaults ( ** { _FUNC_NAME : lambda stdout : parser. print_help ( file = stdout ) } ) return parser else : parser = subparser. add_parser ( kwargs. pop ( ""prog"", func. __name__ ), help = doc, ** kwargs, ) parser. set_defaults ( ** { _FUNC_NAME : func } ) if : for par, args in params. items ( ) : args. setdefault ( ""help"", get_doc ( func, par ) ) parser. add_argument ( par, ** args ) return parser",False,params,params is not None,0.7064627408981323 328,"def load_ip ( self ) : if os. path. isfile ( self. ip_list_fn ) : file_path = self. ip_list_fn elif self. default_ip_list_fn and os. path. isfile ( self. default_ip_list_fn ) : file_path = self. default_ip_list_fn else : return with open ( file_path, ""r"" ) as fd : lines = fd. readlines ( ) for line in lines : try : if : continue str_l = line. split ( "" "" ) if len ( str_l ) < 4 : self. logger. warning ( ""line err: %s"", line ) continue ip_str = str_l [ 0 ] domain = str_l [ 1 ] server = str_l [ 2 ] handshake_time = int ( str_l [ 3 ] ) if len ( str_l ) > 4 : fail_times = int ( str_l [ 4 ] ) else : fail_times = 0 if len ( str_l ) > 5 : down_fail = int ( str_l [ 5 ] ) else : down_fail = 0 self. add_ip ( <",False,line.startswith('#'),not line,0.6520669460296631 329,"def tms_to_quadkey ( self, tms, google = False ) : quadKey = """" x, y, z = tms if not google : y = ( 2 ** z - 1 ) - y for i in range ( z, 0, - 1 ) : digit = 0 mask = 1 << ( i - 1 ) if ( x & mask )!= 0 : digit += 1 if : digit += 2 quadKey += str ( digit ) return quadKey",False,y & mask != 0,y & mask,0.6735246181488037 330,"def wait_success ( self, timeout = 60 * 10 ) : for i in range ( timeout // 10 ) : time. sleep ( 10 ) status = self. query_job ( ) print ( ""job {} status is {}"". format ( self. job_id, status ) ) if : return True if status and status in [ StatusSet. CANCELED, StatusSet. TIMEOUT, StatusSet. FAILED, ] : return False return False",False,status and status == StatusSet.SUCCESS,not status,0.6639950275421143 331,"def create_connection ( self, address, protocol_factory = None, ** kw ) : """"""Helper method for creating a connection to an ``address``."""""" protocol_factory = protocol_factory or self. create_protocol if isinstance ( address, tuple ) : host, port = address if : self. logger. debug ( ""Create connection %s:%s"", host, port ) _, protocol = await self. _loop. create_connection ( protocol_factory, host, port, ** kw ) await protocol. event ( ""connection_made"" ) else : raise NotImplementedError ( ""Could not connect to %s"" % str ( address ) ) return protocol",False,self.debug,self.logger and (not self.logger.isEnabledFor(logging.DEBUG)),0.6623555421829224 332,"def _import_module_with_version_check ( module_name, minimum_version, install_info = None ) : """"""Check that module is installed with a recent enough version"""""" from distutils. version import LooseVersion try : module = __import__ ( module_name ) except ImportError as exc : user_friendly_info = ( 'Module ""{0}"" could not be found. {1}' ). format ( module_name, install_info or ""Please install it properly to use nilearn."" ) exc. args += ( user_friendly_info, ) if : exc. msg += "". "" + user_friendly_info raise module_version = getattr ( module, ""__version__"", ""0.0.0"" ) version_too_old = not LooseVersion ( module_version ) >= LooseVersion ( minimum_version ) if version_too_old : message = ( ""A {module_name} version of at least {minimum_version} "" ""is required to use nilearn. {module_version} was found. "" ""Please upgrade {module_name}"" ). format ( module_name = module_name, minimum_version = minimum_version, module_version = module_version, ) raise ImportError ( message ) return module",False,"hasattr(exc, 'msg')",install_info,0.6600459814071655 333,"def do_search ( lo, hi ) : if hi - lo <= 1 : return hi mid = int ( math. floor ( ( hi - lo ) / 2 ) + lo ) log. info ( ""Testing {0}"". format ( draw_graph ( lo, mid, hi, len ( commit_hashes ) ) ) ) with log. indent ( ) : lo_result = None while lo_result is None : lo_result = do_benchmark ( lo ) if not non_null_results ( lo_result ) : lo_result = None lo += 1 if lo >= mid : raise util. UserError ( ""Too many commits failed"" ) mid_result = None while mid_result is None : mid_result = do_benchmark ( mid ) if not non_null_results ( mid_result, lo_result ) : mid_result = None mid += 1 if : raise util. UserError ( ""Too many commits failed"" ) hi_result = None while hi_result is None : hi_result = do_benchmark ( hi ) if not non_null_results ( lo_result, mid_result, hi_result ) : hi_result = None hi -= 1 if hi <= mid : = hi,lo >= mid and mid_result is None,0.7026615142822266 334,"def _load ( self, path : str ) : ds = DataSet ( ) with open ( path, ""r"", encoding = ""utf-8"" ) as f : for line in f : line = line. strip ( ) if line : parts = line. split ( ""\t"" ) raw_words1 = parts [ 1 ] raw_words2 = parts [ 2 ] target = parts [ 0 ] if : ds. append ( Instance ( raw_words1 = raw_words1, raw_words2 = raw_words2, target = target ) ) return ds",False,raw_words1 and raw_words2 and target,target,0.6569558382034302 335,"def FallbackGetIndex ( self, targetMO, argMOs, errorSuggestionMO ) : if not targetMO. HasValue or not all ( map ( lambda x : x. HasValue, argMOs ) ) : return self. Defer ( ( targetMO, ) + tuple ( argMOs ) ) isCom, com = ComBinder. TryBindGetIndex ( self, targetMO, argMOs ) if isCom : return com if type ( targetMO. Value ) is Cons : if : return errorSuggestionMO or CreateThrow ( targetMO, argMOs, BindingRestrictions. Empty, InvalidOperationException, ""Indexing Sympl list requires exactly one argument."", ) return DynamicMetaObject ( EnsureObjectResult ( GetIndexExpression ( targetMO, argMOs ) ), GetTargetArgsRestrictions ( targetMO, argMOs, False ), )",False,len(argMOs) != 1,errorSuggestionMO,0.6629995107650757 336,"def _find_completions ( self, doc, incomplete ) : """"""Find completions for incomplete word and save them."""""" self. _completions = [ ] self. _remains = [ ] favorites = self. _favorite_words. get ( doc, ( ) ) _all_words = set ( ( ) ) for words in self. _all_words. itervalues ( ) : _all_words. update ( words ) limit = self. _settings. max_completions_show for sequence in ( favorites, _all_words ) : for word in sequence : if not word. startswith ( incomplete ) : continue if word == incomplete : continue if : continue self. _completions. append ( word ) self. _remains. append ( word [ len ( incomplete ) : ] ) if len ( self. _remains ) >= limit : break",False,word in self._completions,len(word) == 0,0.6740433573722839 337,"def run ( self ) : while True : try : with DelayedKeyboardInterrupt ( ) : raw_inputs = self. _parent_task_queue. get ( ) if self. _has_stop_signal ( raw_inputs ) : self. _rq. put ( raw_inputs, block = True ) break if self. _flow_type == BATCH : self. _rq. put ( raw_inputs, block = True ) elif : try : self. _rq. put ( raw_inputs, block = False ) except : pass except KeyboardInterrupt : continue",False,self._flow_type == REALTIME,self._flow_type == DEBUG,0.6589759588241577 338,"def run ( algs ) : for alg in algs : vcs = alg. get ( ""variantcaller"" ) if vcs : if isinstance ( vcs, dict ) : vcs = reduce ( operator. add, vcs. values ( ) ) if : vcs = [ vcs ] return any ( vc. startswith ( prefix ) for vc in vcs if vc )",False,"not isinstance(vcs, (list, tuple))","isinstance(vcs, string_types)",0.6504191160202026 339,"def getProperty ( self, name ) : if name == handler. property_lexical_handler : return self. _lex_handler_prop elif name == property_interning_dict : return self. _interning elif name == property_xml_string : if self. _parser : if : return self. _parser. GetInputContext ( ) else : raise SAXNotRecognizedException ( ""This version of expat does not support getting"" "" the XML string"" ) else : raise SAXNotSupportedException ( ""XML string cannot be returned when not parsing"" ) raise SAXNotRecognizedException ( ""Property '%s' not recognized"" % name )",True,"hasattr(self._parser, 'GetInputContext')","hasattr(self._parser, 'GetInputContext')",0.6580386161804199 340,"def visible_settings ( self ) : visible_settings = super ( RelateObjects, self ). visible_settings ( ) visible_settings += [ self. wants_per_parent_means, self. find_parent_child_distances, self. wants_child_objects_saved, ] if self. wants_child_objects_saved : visible_settings += [ self. output_child_objects_name ] if self. find_parent_child_distances!= D_NONE and self. has_step_parents : visible_settings += [ self. wants_step_parent_distances ] if : for group in self. step_parent_names : visible_settings += group. visible_settings ( ) visible_settings += [ self. add_step_parent_button ] return visible_settings",False,self.wants_step_parent_distances,self.has_step_parent_names,0.6558775901794434 341,"def get_host_ipv6 ( with_nic = True ) : nic_info = get_all_nic_info ( ) ipv4 = get_host_ip ( ) ipv6 = None for nic, info in nic_info. items ( ) : ip4 = info [ ""inet4"" ] ip6 = info [ ""inet6"" ] if : continue ip4, ip6 = ip4. pop ( ), ip6. pop ( ) if ip4 == ipv4 : ipv6 = ip6 if ip6 else None if ipv6 and ""%"" not in ipv6 : ipv6 = ipv6 + ""%"" + nic break if ipv6 : if not with_nic : ipv6 = ipv6. split ( ""%"" ) [ 0 ] return ipv6",False,"not all([ip4, ip6])",ip4 and ip6,0.6592468023300171 342,"def _listVMsCallback ( self, result, ignore_error = False, error = False, ** kwargs ) : if error : if ""message"" in result : if : QtWidgets. QMessageBox. critical ( self, ""List vms"", ""Error while listing vms: {}"". format ( result [ ""message"" ] ), ) return if not sip_is_deleted ( self. uiVMListComboBox ) : self. uiVMListComboBox. clear ( ) for vm in result : self. uiVMListComboBox. addItem ( vm [ ""vmname"" ], vm [ ""vmname"" ] ) index = self. uiVMListComboBox. findText ( self. _settings [ ""vmname"" ] ) if index == - 1 : index = self. uiVMListComboBox. findText ( ""GNS3 VM"" ) if index == - 1 : index = 0 self. uiVMListComboBox. setCurrentIndex ( index ) self. _initialized = True",False,not ignore_error,ignore_error,0.65879887342453 343,"def get_library_dirs ( platform, arch = None ) : if platform == ""win32"" : jre_home = get_jre_home ( platform ) jdk_home = JAVA_HOME if : jre_home = jre_home. decode ( ""utf-8"" ) return [ join ( jdk_home, ""lib"" ), join ( jdk_home, ""bin"", ""server"" ) ] elif platform == ""android"" : return [ ""libs/{}"". format ( arch ) ] return [ ]",False,"isinstance(jre_home, bytes)",jdk_home,0.6489929556846619 344,"def transform ( self, data ) : with timer ( ""transform %s"" % self. name, logging. DEBUG ) : if self. operator in { ""lat"", ""latitude"" } : return self. series ( data ). apply ( GeoIP. get_latitude ) elif self. operator in { ""lon"", ""longitude"" } : return self. series ( data ). apply ( GeoIP. get_longitude ) elif : return self. series ( data ). apply ( GeoIP. get_accuracy ) raise NameError ( ""Unknown GeoIP operator [lat, lon, acc]: %s"" % self. operator )",False,"self.operator in {'acc', 'accuracy'}","self.operator in {'acc', 'longitude'}",0.6575528383255005 345,"def parseFunctionSourceElements ( ) : global strict sourceElement = None sourceElements = [ ] token = None directive = None firstRestricted = None oldLabelSet = None oldInIteration = None oldInSwitch = None oldInFunctionBody = None skipComment ( ) delegate. markStart ( ) expect ( ""{"" ) while index < length : if lookahead. type!= Token. StringLiteral : break token = lookahead sourceElement = parseSourceElement ( ) sourceElements. append ( sourceElement ) if sourceElement. expression. type!= Syntax. Literal : break directive = source [ ( token. range [ 0 ] + 1 ) : ( token. range [ 1 ] - 1 ) ] if directive == ""use strict"" : strict = True if firstRestricted : throwErrorTolerant ( firstRestricted, Messages. StrictOctalLiteral ) else : if ( not firstRestricted ) and token. octal : firstRestricted = token oldLabelSet = state. labelSet oldInIteration = state. inIteration oldInSwitch = state. inSwitch oldInFunctionBody = state. inFunctionBody state. labelSet = jsdict ( { } ) state. inIteration = False state. inSwitch = False state. inFunctionBody = True while index < length : if : break sourceElement = parseSourceElement ( ) if ( ""undefined"" if not ""sourceElement"" in locals ( ) else typeof ( sourceElement )",False,match('}'),sourceElement.expression.type != Token.StringLiteral,0.6626948118209839 346,"def publish ( self, channel, * args, ** kwargs ) : """"""Return output of all subscribers for the given channel."""""" if channel not in self. listeners : return [ ] exc = ChannelFailures ( ) output = [ ] items = [ ( self. _priorities [ ( channel, listener ) ], listener ) for listener in self. listeners [ channel ] ] try : items. sort ( key = lambda item : item [ 0 ] ) except TypeError : items. sort ( ) for priority, listener in items : try : output. append ( listener ( * args, ** kwargs ) ) except KeyboardInterrupt : raise except SystemExit : e = sys. exc_info ( ) [ 1 ] if : e. code = 1 raise except : exc. handle_exception ( ) if channel == ""log"" : pass else : self. log ( ""Error in %r listener %r"" % ( channel, listener ), level = 40, traceback = True, ) if exc : raise exc return output",False,exc and e.code == 0,e,0.6606873273849487 347,"def bitcoin_done ( request ) : with mock. patch ( ""bitcoinrpc.connection.BitcoinConnection"" ) as MockBitcoinConnection : connection = MockBitcoinConnection ( ) connection. getnewaddress. return_value = BTC_TEST_ADDRESS connection. listtransactions. return_value = BTC_TEST_SUCCESSFUL_TXNS amount = 0.01 bitcoin_obj = get_gateway ( ""bitcoin"" ) address = request. session. get ( ""bitcoin_address"", None ) if not address : return HttpResponseRedirect ( reverse ( ""app_bitcoin"" ) ) result = bitcoin_obj. purchase ( amount, address ) if : del request. session [ ""bitcoin_address"" ] return render ( request, ""app/bitcoin_done.html"", { ""title"" : ""Bitcoin"", ""amount"" : amount, ""address"" : address, ""result"" : result, }, )",False,result['status'] == 'SUCCESS',result,0.6622365713119507 348,"def ensemble ( self, pairs, other_preds ) : """"""Ensemble the dict with statistical model predictions."""""" lemmas = [ ] assert len ( pairs ) == len ( other_preds ) for p, pred in zip ( pairs, other_preds ) : w, pos = p if ( w, pos ) in self. composite_dict : lemma = self. composite_dict [ ( w, pos ) ] elif w in self. word_dict : lemma = self. word_dict [ w ] else : lemma = pred if : lemma = w lemmas. append ( lemma ) return lemmas",False,lemma is None,lemma is not None,0.6770522594451904 349,"def __editorKeyPress ( editor, event ) : if event. key == ""B"" : __findBookmark ( editor ) return True if event. key in [ str ( x ) for x in range ( 0, 10 ) ] : numericBookmark = int ( event. key ) if event. modifiers == event. modifiers. Control : node = None if isinstance ( editor, GafferUI. GraphEditor ) : selection = editor. scriptNode ( ). selection ( ) if len ( selection ) == 1 : node = selection [ 0 ] else : backdrops = [ n for n in selection if isinstance ( n, Gaffer. Backdrop ) ] if : node = backdrops [ 0 ] elif isinstance ( editor, GafferUI. NodeSetEditor ) : nodeSet = editor. getNodeSet ( ) node = nodeSet [ - 1 ] if len ( nodeSet ) else None if node is not None : __assignNumericBookmark ( node, numericBookmark ) elif not event. modifiers : if numericBookmark!= 0 : __findNumericBookmark ( editor, numericBookmark ) elif isinstance ( editor, GafferUI. Node",False,len(backdrops) == 1,len(backdrops) > 0,0.6597782373428345 350,"def kc_pressed ( self, key, modifierFlags ) : if modifierFlags == CTRL_KEY_FLAG : if key == ""C"" : self. send ( ""\x03"" ) self. telnet. running = False elif key == ""D"" : self. send ( ""\x04"" ) elif : self. send ( ""\x01"" ) elif key == ""E"" : self. send ( ""\x05"" ) elif key == ""K"" : self. send ( ""\x0B"" ) elif key == ""L"" : self. send ( ""\x0C"" ) elif key == ""U"" : self. send ( ""\x15"" ) elif key == ""Z"" : self. send ( ""\x1A"" ) elif key == ""["" : self. send ( ""\x1B"" ) elif modifierFlags == 0 : if key == ""UIKeyInputUpArrow"" : self. send ( ""\x10"" ) elif key == ""UIKeyInputDownArrow"" : self. send ( ""\x0E"" ) elif key == ""UIKeyInputLeftArrow"" : self. send ( ""\033[D"" ) elif key == ""UIKeyInputRightArrow"" : self. send ( ""\033[C"" )",False,key == 'A',key == 'M',0.6609237194061279 351,"def starttag ( self, quoteattr = None ) : if quoteattr is None : quoteattr = pseudo_quoteattr parts = [ self. tagname ] for name, value in self. attlist ( ) : if value is None : parts. append ( '%s=""True""' % name ) continue if : values = [ serial_escape ( ""%s"" % ( v, ) ) for v in value ] value = "" "". join ( values ) else : value = unicode ( value ) value = quoteattr ( value ) parts. append ( u""%s=%s"" % ( name, value ) ) return u""<%s>"" % u"" "". join ( parts )",False,"isinstance(value, list)","isinstance(value, basestring)",0.6511034965515137 352,"def get_tag_values ( self, event ) : http = event. interfaces. get ( ""sentry.interfaces.Http"" ) if not http : return [ ] if not http. headers : return [ ] headers = http. headers if isinstance ( headers, dict ) : headers = headers. items ( ) output = [ ] for key, value in headers : if key!= ""User-Agent"" : continue ua = Parse ( value ) if : continue result = self. get_tag_from_ua ( ua ) if result : output. append ( result ) return output",False,not ua,ua,0.7112003564834595 353,"def post ( self ) : old = self. _fetch_existing_config ( ) new = dict ( ) for key in self. ALLOWED. keys ( ) : if self. ALLOWED [ key ] == bool : val = self. get_argument ( key, False ) else : val = self. get_argument ( key, None ) if val is None or val == """" : new [ key ] = old [ key ] elif key == ""pwdhash"" : new [ key ] = bcrypt. hashpw ( val, bcrypt. gensalt ( ) ) el if : new [ key ] = str ( val ) elif self. ALLOWED [ key ] == int : new [ key ] = int ( val ) elif self. ALLOWED [ key ] == bool : new [ key ] = bool ( val ) config_file = open ( self. settings. config_path. web, ""w"" ) for key, val in new. items ( ) : if : config_file. write ( ""%s='%s'\n"" % ( key, val ) ) else : config_file. write ( ""%s=%s\n"" % ( key, val ) ) config_file. close ( ) self. redirect ( ""/"" )",True,self.ALLOWED[key] == str,self.ALLOWED[key] == str,0.659371554851532 354,"def check_samplers_fit_resample ( name, sampler_orig ) : sampler = clone ( sampler_orig ) X, y = make_classification ( n_samples = 1000, n_classes = 3, n_informative = 4, weights = [ 0.2, 0.3, 0.5 ], random_state = 0, ) target_stats = Counter ( y ) X_res, y_res = sampler. fit_resample ( X, y ) if isinstance ( sampler, BaseOverSampler ) : target_stats_res = Counter ( y_res ) n_samples = max ( target_stats. values ( ) ) assert all ( value >= n_samples for value in Counter ( y_res ). values ( ) ) elif isinstance ( sampler, BaseUnderSampler ) : n_samples = min ( target_stats. values ( ) ) if : assert all ( Counter ( y_res ) [ k ] <= target_stats [ k ] for k in target_stats. keys ( ) ) else : assert all ( value == n_samples for value in Counter ( y_res ). values ( ) ) elif isinstance ( sampler, BaseCleaningSampler ) : target_stats_res = Counter ( y_res ) class_minority = min ( target_stats, key = target_stats. get ) assert all ( target_stats [ class_sample ] > target_stats_res [ class_sample ] for class_sample in target_stats. keys ( ) <",False,name == 'InstanceHardnessThreshold',"isinstance(sampler, BaseMultiSampler)",0.6591370701789856 355,"def preprocess_raw_enwik9 ( input_filename, output_filename ) : with open ( input_filename, ""r"" ) as f1 : with open ( output_filename, ""w"" ) as f2 : while True : line = f1. readline ( ) if not line : break line = list ( enwik9_norm_transform ( [ line ] ) ) [ 0 ] if line!= "" "" and line!= """" : if : line = line [ 1 : ] f2. writelines ( line + ""\n"" )",False,line[0] == '',line.startswith(' <,0.6549445390701294 356,"def __setitem__ ( self, key, value ) : if isinstance ( value, ( tuple, list ) ) : info, reference = value if info not in self. _reverse_infos : self. _reverse_infos [ info ] = len ( self. _infos ) self. _infos. append ( info ) if : self. _reverse_references [ reference ] = len ( self. _references ) self. _references. append ( reference ) self. _trails [ key ] = ""%d,%d"" % ( self. _reverse_infos [ info ], self. _reverse_references [ reference ], ) else : raise Exception ( ""unsupported type '%s'"" % type ( value ) )",False,reference not in self._reverse_references,reference not in self._trails,0.664270281791687 357,"def init ( self, view, items = None ) : selections = [ ] if view. sel ( ) : for region in view. sel ( ) : selections. append ( view. substr ( region ) ) values = [ ] for idx, index in enumerate ( map ( int, items ) ) : if : break i = index - 1 if i >= 0 and i < len ( selections ) : values. append ( selections [ i ] ) else : values. append ( None ) for idx, value in enumerate ( selections ) : if len ( values ) + 1 < idx : values. append ( value ) self. stack = values",False,idx >= len(selections),index == 0,0.657119631767273 358,"def viewrendered ( event ) : """"""Open render view for commander"""""" c = event. get ( ""c"" ) if not c : return None global controllers, layouts vr = controllers. get ( c. hash ( ) ) if vr : vr. activate ( ) vr. show ( ) vr. adjust_layout ( ""open"" ) else : h = c. hash ( ) controllers [ h ] = vr = ViewRenderedController ( c ) layouts [ h ] = c. db. get ( ""viewrendered_default_layouts"", ( None, None ) ) if hasattr ( c, ""free_layout"" ) : vr. _ns_id = ""_leo_viewrendered"" vr. splitter = splitter = c. free_layout. get_top_splitter ( ) if splitter : vr. store_layout ( ""closed"" ) sizes = split_last_sizes ( splitter. sizes ( ) ) ok = splitter. add_adjacent ( vr, ""bodyFrame"", ""right-of"" ) if : splitter. insert ( 0, vr ) else : if splitter. orientation ( ) == QtCore. Qt. Horizontal : splitter. setSizes ( sizes ) vr. adjust_layout ( ""open"" ) else : vr. setWindowTitle ( ""Rendered View"" ) <",False,not ok,ok,0.6844743490219116 359,"def _stringify ( value ) : """"""Internal function."""""" if isinstance ( value, ( list, tuple ) ) : if len ( value ) == 1 : value = _stringify ( value [ 0 ] ) if : value = ""{%s}"" % value else : value = ""{%s}"" % _join ( value ) else : if isinstance ( value, basestring ) : value = unicode ( value ) else : value = str ( value ) if not value : value = ""{}"" elif _magic_re. search ( value ) : value = _magic_re. sub ( r""\\\1"", value ) value = _space_re. sub ( r""\\\1"", value ) elif value [ 0 ] == '""' or _space_re. search ( value ) : value = ""{%s}"" % value return value",False,value[0] == '{',"isinstance(value, dict)",0.6642587184906006 360,"def __init__ ( self, host : str, port : int, app : ""WSGIApplication"", handler : t. Optional [ t. Type [ WSGIRequestHandler ] ] = None, passthrough_errors : bool = False, ssl_context : t. Optional [ _TSSLContextArg ] = None, fd : t. Optional [ int ] = None, ) -> None : if handler is None : handler = WSGIRequestHandler self. address_family = select_address_family ( host, port ) if fd is not None : real_sock = socket. fromfd ( fd, self. address_family, socket. SOCK_STREAM ) port = 0 server_address = get_sockaddr ( host, int ( port ), self. address_family ) if self. address_family == af_unix : server_address = t. cast ( str, server_address ) if os. path. exists ( server_address ) : os. unlink ( server_address ) super ( ). __init__ ( server_address, handler ) self. app = app self. passthrough_errors = passthrough_errors self. shutdown_signal = False self. host = host self. port = self. socket. getsockname ( ) [ 1 ] if fd is not None : self. socket. close ( ) self. socket = real_sock self. server_address = self. socket. getsockname ( ) if ssl_context is not None : if : ssl_context = load_ssl_context ( * ssl_context ) if ssl_context == ""adhoc"" : ssl_context = generate_adhoc_ssl_context ( ) """"""This is where the logic for role unmutes is taken care of"""""" log. debug ( ""Checking server unmutes"" ) for g_id in self. _server_mutes : guild = self. bot. get_guild ( g_id ) if guild is None or await self. bot. cog_disabled_in_guild ( self, guild ) : continue await i18n. set_contextual_locales_from_guild ( self. bot, guild ) for u_id in self. _server_mutes [ guild. id ] : if self. _server_mutes [ guild. id ] [ u_id ] [ ""until"" ] is None : continue time_to_unmute = ( self. _server_mutes [ guild. id ] [ u_id ] [ ""until"" ] - datetime. now ( timezone. utc ). timestamp ( ) ) if time_to_unmute < 60.0 : task_name = f""server-unmute-{g_id}-{u_id}"" if : continue log. debug ( f""Creating task: {task_name}"" ) self. _unmute_tasks [ task_name ] = asyncio. create_task ( self. _auto_unmute_user ( guild, self. _server_mutes [ guild. id ] [ u_id ] ) ",False,task_name in self._unmute_tasks,task_name not in self._unmute_tasks,0.6589851975440979 362,"def indent ( elem, level = 0 ) : i = ""\n"" + level * "" "" if len ( elem ) : if not elem. text or not elem. text. strip ( ) : elem. text = i + "" "" if not elem. tail or not elem. tail. strip ( ) : elem. tail = i for elem in elem : indent ( elem, level + 1 ) if not elem. tail or not elem. tail. strip ( ) : elem. tail = i else : if : elem. tail = i",False,level and (not elem.tail or not elem.tail.strip()),len(elem.tail),0.6508496403694153 363,"def pg_launcher ( pre_created_pgs, num_pgs_to_create ) : pgs = [ ] pgs += pre_created_pgs for i in range ( num_pgs_to_create ) : pgs. append ( placement_group ( bundles, strategy = ""STRICT_SPREAD"", name = str ( i ) ) ) pgs_removed = [ ] pgs_unremoved = [ ] for pg in pgs : if random ( ) < 0.5 : pgs_removed. append ( pg ) else : pgs_unremoved. append ( pg ) tasks = [ ] max_actor_cnt = 5 actor_cnt = 0 actors = [ ] for pg in pgs_unremoved : if random ( ) < 0.5 : tasks. append ( mock_task. options ( placement_group = pg ). remote ( ) ) else : if : actors. append ( MockActor. options ( placement_group = pg ). remote ( ) ) actor_cnt += 1 for pg in pgs_removed : remove_placement_group ( pg ) ray. get ( [ pg. ready ( ) for pg in pgs_unremoved ] ) ray. get ( tasks ) ray. get ( [ actor. ping. remote ( ) for actor in actors ] ) for pg in pgs_unremoved : remove_placement_group ( pg )",False,actor_cnt < max_actor_cnt,random() < 0.5,0.6598740816116333 364,"def _find_names ( self, lr_schedulers ) -> List [ str ] : names = [ ] for scheduler in lr_schedulers : sch = scheduler [ ""scheduler"" ] if : name = scheduler [ ""name"" ] else : opt_name = ""lr-"" + sch. optimizer. __class__. __name__ i, name = 1, opt_name while True : if name not in names : break i, name = i + 1, f""{opt_name}-{i}"" param_groups = sch. optimizer. param_groups if len ( param_groups )!= 1 : for i, pg in enumerate ( param_groups ) : temp = f""{name}/pg{i + 1}"" names. append ( temp ) else : names. append ( name ) self. lr_sch_names. append ( name ) return names",False,scheduler['name'] is not None,sch.has_option,0.6601338386535645 365,"def _adjust_to_data ( self, trace, data_trace ) : subsampled_idxs = dict ( ) for name, site in trace. iter_stochastic_nodes ( ) : if : site [ ""fn"" ] = data_trace. nodes [ name ] [ ""fn"" ] site [ ""value"" ] = data_trace. nodes [ name ] [ ""value"" ] orig_cis_stack = site [ ""cond_indep_stack"" ] site [ ""cond_indep_stack"" ] = data_trace. nodes [ name ] [ ""cond_indep_stack"" ] assert len ( orig_cis_stack ) == len ( site [ ""cond_indep_stack"" ] ) site [ ""fn"" ] = data_trace. nodes [ name ] [ ""fn"" ] for ocis, cis in zip ( orig_cis_stack, site [ ""cond_indep_stack"" ] ) : assert ocis. name == cis. name assert not site_is_subsample ( site ) batch_dim = cis. dim - site [ ""fn"" ]. event_dim subsampled_idxs [ cis. name ] = subsampled_idxs. get ( cis. name, torch. randint ( 0, ocis. size, ( cis. size, ), device = site [ ""value"" ]. device ), ) site [ ""value"" ] = site [ ""value"" ]. index_select ( batch_",False,site_is_subsample(site),name in data_trace.nodes,0.6477853059768677 366,"def deserialize ( self, data ) : parts = data. pop ( ""parts"" ) self. parts = { } self. __dict__. update ( data ) if parts : for part_id, part in six. iteritems ( parts ) : self. parts [ part_id ] = { } for language, sub_data in six. iteritems ( part ) : self. parts [ part_id ] [ language ] = { } for sub_key, subtitle_data in six. iteritems ( sub_data ) : if sub_key == ""current"" : if : subtitle_data = tuple ( subtitle_data. split ( ""__"" ) ) self. parts [ part_id ] [ language ] [ ""current"" ] = subtitle_data elif sub_key == ""blacklist"" : bl = dict ( ( tuple ( [ str ( a ) for a in k. split ( ""__"" ) ] ), v ) for k, v in six. iteritems ( subtitle_data ) ) self. parts [ part_id ] [ language ] [ ""blacklist"" ] = bl else : sub = JSONStoredSubtitle ( )",False,"not isinstance(subtitle_data, tuple)",subtitle_data,0.6483365297317505 367,"def get_php_interpreter_path ( ver ) : config = get_config ( ) candidates = [ join ( config. phpsBaseDir, ver + ""*"" ), join ( config. phpsBaseDir, ""php-%s*"" % ver ), ] for pattern in candidates : base_dirs = glob ( pattern ) if base_dirs : base_dir = base_dirs [ 0 ] break else : import subprocess exe_paths = findPathsForInterpreters ( [ ""php"" ], ""PHP"" ) for exe in exe_paths : try : p = subprocess. Popen ( [ exe, ""-r"", ""echo phpversion();"" ], stdout = subprocess. PIPE ) stdout, _ = p. communicate ( ) if stdout. strip ( ). startswith ( ver ) : return exe except IOError : pass raise TestSkipped ( ""could not find PHP %s for testing: '%s' don't "" ""exist"" % ( ver, ""', '"". join ( candidates ) ) ) if sys. platform == ""win32"" : candidates = [ join ( base_dir, ""php.exe"" ), join ( base_dir, ""Release_TS"", ""php.exe"" ), ] <",False,exists(candidate),len(candidates) > 0,0.6603810787200928 368,"def __getitem__ ( self, name, set = set, getattr = getattr, id = id ) : visited = set ( ) mydict = self. basedict while 1 : value = mydict [ name ] if value is not None : return value myid = id ( mydict ) assert myid not in visited visited. add ( myid ) mydict = mydict. Parent if : return",False,mydict is None,myid not in visited,0.6602520942687988 369,"def selectionToChunks ( self, remove = False, add = False ) : box = self. selectionBox ( ) if box : if box == self. level. bounds : self. selectedChunks = set ( self. level. allChunks ) return selectedChunks = self. selectedChunks boxedChunks = set ( box. chunkPositions ) if boxedChunks. issubset ( selectedChunks ) : remove = True if : selectedChunks. difference_update ( boxedChunks ) else : selectedChunks. update ( boxedChunks ) self. selectionTool. selectNone ( )",False,remove and (not add),remove,0.6538796424865723 370,"def change_opacity_function ( self, new_f ) : self. opacity_function = new_f dr = self. radius / self. num_levels sectors = [ ] for submob in self. submobjects : if : sectors. append ( submob ) for ( r, submob ) in zip ( np. arange ( 0, self. radius, dr ), sectors ) : if type ( submob )!= AnnularSector : continue alpha = self. opacity_function ( r ) submob. set_fill ( opacity = alpha )",False,type(submob) == AnnularSector,type(submob) != AnnularSector,0.6634690165519714 371,"def addOutput ( self, data, isAsync = None, ** kwargs ) : isAsync = _get_async_param ( isAsync, ** kwargs ) if isAsync : self. terminal. eraseLine ( ) self. terminal. cursorBackward ( len ( self. lineBuffer ) + len ( self. ps [ self. pn ] ) ) self. terminal. write ( data ) if isAsync : if self. _needsNewline ( ) : self. terminal. nextLine ( ) self. terminal. write ( self. ps [ self. pn ] ) if : oldBuffer = self. lineBuffer self. lineBuffer = [ ] self. lineBufferIndex = 0 self. _deliverBuffer ( oldBuffer )",True,self.lineBuffer,self.lineBuffer,0.6624734401702881 372,"def testSimple ( self, useCluster = False ) : """"""Run with one really bad swarm to see if terminator picks it up correctly"""""" if not g_myEnv. options. runInProc : self. skipTest ( ""Skipping One Node test since runInProc is not specified"" ) self. _printTestHeader ( ) expDir = os. path. join ( g_myEnv. testSrcExpDir, ""swarm_v2"" ) ( jobID, jobInfo, resultInfos, metricResults, minErrScore ) = self. runPermutations ( expDir, hsImp = ""v2"", loggingLevel = g_myEnv. options. logLevel, maxModels = None, onCluster = useCluster, env = self. env, dummyModel = { ""iterations"" : 200 }, ) cjDB = ClientJobsDAO. get ( ) jobResultsStr = cjDB. jobGetFields ( jobID, [ ""results"" ] ) [ 0 ] jobResults = json. loads ( jobResultsStr ) terminatedSwarms = jobResults [ ""terminatedSwarms"" ] swarmMaturityWindow = int ( configuration. Configuration. get ( ""nupic.hypersearch.swarmMaturityWindow"" ) ) prefix = ""modelParams|sensorParams|encoders|"" for swarm, ( generation, scores ) in terminatedSwarms. iteritems ( ) : if : self. assertEqual ( generation, swarmMaturityWindow - 1 ) else : self. assertEqual ( generation, swarmMaturityWindow - 1 + 4 )",False,prefix + 'gym' in swarm.split('.'),useCluster,0.650718092918396 373,"def fit ( self, dataset, force_retrain ) : if force_retrain : self. sub_unit_1 [ ""fitted"" ] = True self. sub_unit_1 [ ""calls"" ] += 1 self. sub_unit_2 [ ""fitted"" ] = True self. sub_unit_2 [ ""calls"" ] += 1 else : if : self. sub_unit_1 [ ""fitted"" ] = True self. sub_unit_1 [ ""calls"" ] += 1 if not self. sub_unit_2 [ ""fitted"" ] : self. sub_unit_2 [ ""fitted"" ] = True self. sub_unit_2 [ ""calls"" ] += 1 return self",True,not self.sub_unit_1['fitted'],not self.sub_unit_1['fitted'],0.6576662063598633 374,"def event_cb ( self, widget, event ) : if event. type == Gdk. EventType. EXPOSE : return False msg = self. event2str ( widget, event ) motion_reports_limit = 5 if event. type == Gdk. EventType. MOTION_NOTIFY : if : dt = event. time - self. last_motion_time self. motion_event_counter += 1 self. motion_dtime_sample. append ( dt ) self. motion_dtime_sample = self. motion_dtime_sample [ - 10 : ] self. last_motion_time = event. time if len ( self. motion_reports ) < motion_reports_limit : self. report ( msg ) self. motion_reports. append ( msg ) else : unreported = self. motion_reports [ motion_reports_limit : ] if unreported : last_report = unreported. pop ( ) if unreported : self. report ( ""... MOTION_NOTIFY %d events suppressed"" % len ( unreported ) ) self. report ( last_report ) self. motion_reports = [ ] self. report ( msg ) return False",False,widget is self.app.doc.tdw,self.motion_event_counter >= 10,0.6579163074493408 375,"def _terminal_messenger ( tp = ""write"", msg = """", out = sys. stdout ) : try : if tp == ""write"" : out. write ( msg ) elif tp == ""flush"" : out. flush ( ) elif : out. write ( msg ) out. flush ( ) elif tp == ""print"" : print ( msg, file = out ) else : raise ValueError ( ""Unsupported type: "" + tp ) except IOError as e : logger. critical ( ""{}: {}"". format ( type ( e ). __name__, ucd ( e ) ) ) pass",False,tp == 'write_flush',tp == 'flush_all',0.6675392389297485 376,"def test_file_output ( ) : """"""Test output to arbitrary file-like objects"""""" with closing ( StringIO ( ) ) as our_file : for i in tqdm ( _range ( 3 ), file = our_file ) : if : our_file. seek ( 0 ) assert ""0/3"" in our_file. read ( )",False,i == 1,i % 2 == 0,0.6780132055282593 377,def _transmit_from_storage ( self ) -> None : for blob in self. storage. gets ( ) : if : envelopes = [ TelemetryItem ( ** x ) for x in blob. get ( ) ] result = self. _transmit ( list ( envelopes ) ) if result == ExportResult. FAILED_RETRYABLE : blob. lease ( 1 ) else : blob. delete ( ),False,blob.lease(self._timeout + 5),blob.has(),0.6557530164718628 378,"def __plugContextMenuSignal ( graphEditor, plug, menuDefinition ) : nodeGadget = graphEditor. graphGadget ( ). nodeGadget ( plug. node ( ) ) if not nodeGadget : return nodule = nodeGadget. nodule ( plug ) if : plug = plug. parent ( ) if isinstance ( plug, Gaffer. Plug ) : nodule = nodeGadget. nodule ( plug ) if : return childNames = """". join ( c. getName ( ) for c in plug ). upper ( ) if len ( nodule ) > 0 : menuDefinition. append ( ""/Collapse {} Components"". format ( childNames ), { ""command"" : functools. partial ( __applyChildVisibility, plug, False ), ""active"" : not Gaffer. MetadataAlgo. readOnly ( plug ), }, ) else : menuDefinition. append ( ""/Expand {} Components"". format ( childNames ), { ""command"" : functools. partial ( __applyChildVisibility, plug, True ), ""active"" : not Gaffer. MetadataAlgo. readOnly ( plug ), }, )",False,"not isinstance(nodule, GafferUI.CompoundNumericNodule)",not nodule,0.6510072946548462 379,"def main ( ) : parser = argparse. ArgumentParser ( description = ""Dispatcher command line parser"" ) parser. add_argument ( ""--exp_params"", type = str, required = True ) args, _ = parser. parse_known_args ( ) exp_params_decode = base64. b64decode ( args. exp_params ). decode ( ""utf-8"" ) logger. debug ( ""decoded exp_params: [%s]"", exp_params_decode ) exp_params = json. loads ( exp_params_decode ) logger. debug ( ""exp_params json obj: [%s]"", json. dumps ( exp_params, indent = 4 ) ) if exp_params. get ( ""multiThread"" ) : enable_multi_thread ( ) if exp_params. get ( ""multiPhase"" ) : enable_multi_phase ( ) if exp_params. get ( ""advisor"" ) is not None : _run_advisor ( exp_params ) else : assert exp_params. get ( ""tuner"" ) is not None tuner = _create_tuner ( exp_params ) if exp_params. get ( ""assessor"" ) is not None : assessor = _create_assessor ( exp_params ) else : assessor = None dispatcher = MsgDispatcher ( tuner, assessor ) try : dispatcher. run ( ) tuner. _on_exit ( ) if : assessor. _on_exit ( ) except Exception as exception : logger. exception ( exception ) ",True,assessor is not None,assessor is not None,0.6554909348487854 380,"def main ( args ) : alphabet = args. alphabet subsize = args. length if args. lookup : pat = args. lookup try : pat = int ( pat, 0 ) except ValueError : pass pat = flat ( pat, bytes = args. length ) if : log. critical ( ""Subpattern must be %d bytes"" % subsize ) sys. exit ( 1 ) if not all ( c in alphabet for c in pat ) : log. critical ( ""Pattern contains characters not present in the alphabet"" ) sys. exit ( 1 ) offset = cyclic_find ( pat, alphabet, subsize ) if offset == - 1 : log. critical ( ""Given pattern does not exist in cyclic pattern"" ) sys. exit ( 1 ) else : print ( offset ) else : want = args. count result = cyclic ( want, alphabet, subsize ) got = len ( result ) if want is not None and got < want : log. failure ( ""Alphabet too small (max length = %i)"" % got ) out = getattr ( sys. stdout, ""buffer"", sys. stdout ) out. write ( result ) if out. isatty ( ) : out. write ( b""\n"" )",False,len(pat) != subsize,subsize and len(pat) > 0,0.6584582328796387 381,"def post_create ( self, user, billing = None ) : from weblate. trans. models import Change if billing : billing. projects. add ( self ) if : self. access_control = Project. ACCESS_PRIVATE else : self. access_control = Project. ACCESS_PUBLIC self. save ( ) if not user. is_superuser : self. add_user ( user, ""@Administration"" ) Change. objects. create ( action = Change. ACTION_CREATE_PROJECT, project = self, user = user, author = user )",False,billing.plan.change_access_control,user.is_private,0.6539928913116455 382,"def _determine_tool_runners ( self, config, profile ) : if config. tools is None : to_run = set ( DEFAULT_TOOLS ) for tool in tools. TOOLS. keys ( ) : if profile. is_tool_enabled ( tool ) : to_run. add ( tool ) else : to_run = set ( config. tools ) for tool in config. with_tools : to_run. add ( tool ) for tool in config. without_tools : if tool in to_run : to_run. remove ( tool ) if ( config. tools is None and len ( config. with_tools ) == 0 and len ( config. without_tools ) == 0 ) : for tool in tools. TOOLS. keys ( ) : enabled = profile. is_tool_enabled ( tool ) if enabled is None : enabled = tool in DEFAULT_TOOLS if : to_run. remove ( tool ) return sorted ( list ( to_run ) )",False,tool in to_run and (not enabled),enabled,0.6497543454170227 383,"def sample_admin_user ( ) : """"""List of iris messages"""""" with iris_ctl. db_from_config ( sample_db_config ) as ( conn, cursor ) : cursor. execute ( ""SELECT `name` FROM `target` JOIN `user` on `target`.`id` = `user`.`target_id` WHERE `user`.`admin` = TRUE LIMIT 1"" ) result = cursor. fetchone ( ) if : return result [ 0 ]",True,result,result,0.6948719620704651 384,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. type = iprot. readString ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRING,self.type == TType.STRING,0.6586868762969971 385,"def groups ( self, trans, ** kwargs ) : if ""operation"" in kwargs : operation = kwargs [ ""operation"" ]. lower ( ). replace ( ""+"", "" "" ) if operation == ""groups"" : return self. group ( trans, ** kwargs ) if operation == ""create"" : return self. create_group ( trans, ** kwargs ) if operation == ""delete"" : return self. mark_group_deleted ( trans, ** kwargs ) if operation == ""undelete"" : return self. undelete_group ( trans, ** kwargs ) if operation == ""purge"" : return self. purge_group ( trans, ** kwargs ) if : return self. manage_users_and_roles_for_group ( trans, ** kwargs ) if operation == ""rename"" : return self. rename_group ( trans, ** kwargs ) return self. group_list_grid ( trans, ** kwargs )",False,operation == 'manage users and roles',operation == 'manage_users_and_roles',0.6560624837875366 386,"def compare_hash ( hash_of_gold, path_to_file ) : with open ( path_to_file, ""rb"" ) as f : hash_of_file = hashlib. sha256 ( f. read ( ) ). hexdigest ( ) if : print ( ""########## Hash sum of"", path_to_file, ""differs from the target, the topology will be deleted!!! ##########"", ) shutil. rmtree ( os. path. dirname ( path_to_file ) )",False,hash_of_file != hash_of_gold,hash_of_gold != path_to_file or hash_of_file != path_to_file,0.6501489877700806 387,"def _get_node ( self, node_id ) : self. non_terminated_nodes ( { } ) with self. lock : if : return self. cached_nodes [ node_id ] instance = ( self. compute. instances ( ) . get ( project = self. provider_config [ ""project_id"" ], zone = self. provider_config [ ""availability_zone"" ], instance = node_id, ) . execute ( ) ) return instance",True,node_id in self.cached_nodes,node_id in self.cached_nodes,0.660037636756897 388,"def _validate_and_define ( params, key, value ) : ( key, force_generic ) = _validate_key ( _unescape ( key ) ) if key in params : raise SyntaxError ( f'duplicate key ""{key}""' ) cls = _class_for_key. get ( key, GenericParam ) emptiness = cls. emptiness ( ) if value is None : if : raise SyntaxError ( ""value cannot be empty"" ) value = cls. from_value ( value ) else : if force_generic : value = cls. from_wire_parser ( dns. wire. Parser ( _unescape ( value ) ) ) else : value = cls. from_value ( value ) params [ key ] = value",False,emptiness == Emptiness.NEVER,emptiness,0.6540172100067139 389,"def get_components_list ( component_revisions_dict, job_type ) : """"""Return a prioritized order of components based on job type."""""" components = sorted ( component_revisions_dict. keys ( ) ) if utils. is_chromium ( ) : return components project_name = data_handler. get_project_name ( job_type ) if not project_name : return components main_repo = data_handler. get_main_repo ( job_type ) project_src = ""/src/"" + project_name for component in components. copy ( ) : if component_revisions_dict [ component ] [ ""url"" ] == main_repo : components. remove ( component ) components. insert ( 0, component ) break if : components. remove ( component ) components. insert ( 0, component ) break if project_name. lower ( ) in os. path. basename ( component ). lower ( ) : components. remove ( component ) components. insert ( 0, component ) return components",False,component == project_src,project_src.lower() in os.path.basename(component),0.6638051271438599 390,"def initEnv ( self, mandatory = True, detailed = False, web = False, forceInit = False ) : self. _initRunAs ( ) if self. envInitialized and not forceInit : return if web : self. webInit ( ) else : self. checkDbmsOs ( detailed ) if : warnMsg = ""functionality requested probably does not work because "" warnMsg += ""the curent session user is not a database administrator"" if not conf. dbmsCred and Backend. getIdentifiedDbms ( ) in ( DBMS. MSSQL, DBMS. PGSQL, ) : warnMsg += "". You can try to use option '--dbms-cred' "" warnMsg += ""to execute statements as a DBA user if you "" warnMsg += ""were able to extract and crack a DBA "" warnMsg += ""password by any mean"" logger. warn ( warnMsg ) if Backend. getIdentifiedDbms ( ) in ( DBMS. MYSQL, DBMS. PGSQL ) : success = self. udfInjectSys ( ) if success is not True : msg = ""unable to mount the operating system takeover"" raise SqlmapFilePathException ( msg ) elif Backend. isDbms ( DBMS. MSSQL ) : if mandatory : self. xpCmdshellInit ( ) if self. _lastVLCPositionUpdate is None : return self. _client. getGlobalPosition ( ) diff = time. time ( ) - self. _lastVLCPositionUpdate if diff > constants. PLAYER_ASK_DELAY and not self. _paused : self. _client. ui. showDebugMessage ( ""VLC did not response in time, so assuming position is {} ({}+{})"". format ( self. _position + diff, self. _position, diff ) ) if : if not self. shownVLCLatencyError or constants. DEBUG_MODE : self. _client. ui. showErrorMessage ( getMessage ( ""media-player-latency-warning"" ). format ( int ( diff ) ) ) self. shownVLCLatencyError = True return self. _position + diff else : return self. _position",False,diff > constants.VLC_LATENCY_ERROR_THRESHOLD,diff > 0,0.654369592666626 392,"def build_query_from_field ( self, field_name, operation ) : if field_name == ""permission"" : if operation [ ""op"" ]!= ""eq"" : raise InvalidFilterOperator ( value = operation [ ""op"" ], valid_operators = [ ""eq"" ] ) query_val = operation [ ""value"" ]. lower ( ). strip ( ) if : raise InvalidFilterValue ( value = operation [ ""value"" ] ) resource = self. get_resource ( ) if query_val == READ : return Q ( user_id__in = resource. contributors. values_list ( ""id"", flat = True ) ) elif query_val == WRITE : return Q ( user_id__in = ( resource. get_group ( WRITE ). user_set. values_list ( ""id"", flat = True ) | resource. get_group ( ADMIN ). user_set. values_list ( ""id"", flat = True ) ) ) elif query_val == ADMIN : return Q ( user_id__in = resource. get_group ( ADMIN ). user_set. values_list ( ""id"", flat = True ) ) return super ( BaseContributorList, self ). build_query",False,query_val not in API_CONTRIBUTOR_PERMISSIONS,query_val != None,0.6540019512176514 393,"def login ( self ) : error = None Form = self. get_login_form ( ) if request. method == ""POST"" : form = Form ( request. form ) next_url = request. form. get ( ""next"" ) or self. default_next_url if : authenticated_user = self. authenticate ( form. username. data, form. password. data, ) if authenticated_user : self. login_user ( authenticated_user ) return redirect ( next_url ) else : flash ( ""Incorrect username or password"" ) else : form = Form ( ) next_url = request. args. get ( ""next"" ) return render_template ( ""auth/login.html"", error = error, form = form, login_url = url_for ( ""%s.login"" % self. blueprint. name ), next = next_url, )",False,form.validate(),self.login_form and self.password,0.6515189409255981 394,"def preflight ( ) : """"""Preflight checks."""""" logger. warning ( ""This action is deprecated. Use https://github.com/hacs/action instead"" ) event_data = get_event_data ( ) ref = None if REPOSITORY and CATEGORY : repository = REPOSITORY category = CATEGORY pr = False elif GITHUB_REPOSITORY == ""hacs/default"" : category = chose_category ( ) repository = chose_repository ( category ) pr = False logger. info ( f""Actor: {GITHUB_ACTOR}"" ) else : category = CATEGORY. lower ( ) pr = True if event_data. get ( ""pull_request"" ) is not None else False if pr : head = event_data [ ""pull_request"" ] [ ""head"" ] ref = head [ ""ref"" ] repository = head [ ""repo"" ] [ ""full_name"" ] else : repository = GITHUB_REPOSITORY logger. info ( f""Category: {category}"" ) logger. info ( f""Repository: {repository}"" ) if TOKEN is None : error ( ""No GitHub token found, use env GITHUB_TOKEN to set this."" ) if repository is None : error ( ""No repository found, use env REPOSITORY to set this."" ) if category is None : error ( ""No category found, use env CATEGORY to set this."" ) async with aiohttp. ClientSession ( ) as session : github = GitHub ( TOKEN, session ) repo = await github. get_repo ( repository ) ",False,not pr and repo.description is None,get_category() is False,0.6517397165298462 395,"def _wrap_ssl_client ( sock, ssl, server_hostname, alpn_protocols ) : if ssl : if isinstance ( ssl, bool ) : sslcontext = curiossl. create_default_context ( ) if : sslcontext. _context. check_hostname = False sslcontext. _context. verify_mode = curiossl. CERT_NONE if alpn_protocols : sslcontext. set_alpn_protocols ( alpn_protocols ) else : sslcontext = ssl if server_hostname : extra_args = { ""server_hostname"" : server_hostname } else : extra_args = { } if isinstance ( sslcontext, curiossl. CurioSSLContext ) : sock = await sslcontext. wrap_socket ( sock, do_handshake_on_connect = False, ** extra_args ) else : extra_args [ ""do_handshake_on_connect"" ] = sock. _socket. gettimeout ( )!= 0.0 sock = Socket ( sslcontext. wrap_socket ( sock. _socket, ** extra_args ) ) await sock. do_handshake ( ) return",False,not server_hostname,"hasattr(sslcontext, '_context')",0.6621442437171936 396,"def _evaluate_local_single ( self, iterator ) : for batch in iterator : in_arrays = convert. _call_converter ( self. converter, batch, self. device ) with function. no_backprop_mode ( ) : if isinstance ( in_arrays, tuple ) : results = self. calc_local ( * in_arrays ) elif : results = self. calc_local ( ** in_arrays ) else : results = self. calc_local ( in_arrays ) if self. _progress_hook : self. _progress_hook ( batch ) yield results",False,"isinstance(in_arrays, dict)","isinstance(in_arrays, list)",0.6513357162475586 397,"def get_note_title_file ( note ) : mo = note_title_re. match ( note. get ( ""content"", """" ) ) if mo : fn = mo. groups ( ) [ 0 ] fn = fn. replace ( "" "", ""_"" ) fn = fn. replace ( ""/"", ""_"" ) if not fn : return """" if : fn = unicode ( fn, ""utf-8"" ) else : fn = unicode ( fn ) if note_markdown ( note ) : fn += "".mkdn"" else : fn += "".txt"" return fn else : return """"",False,"isinstance(fn, str)","isinstance(fn, unicode)",0.6525890827178955 398,"def run ( self, edit, reverse = False ) : for region in self. view. sel ( ) : line = self. view. line ( region ) line_content = self. view. substr ( line ) bullets = self. view. settings ( ). get ( ""mde.list_indent_bullets"", [ ""*"", ""-"", ""+"" ] ) bullet_pattern = ""(["" + """". join ( re. escape ( i ) for i in bullets ) + ""])"" new_line = line_content if self. view. settings ( ). get ( ""mde.list_indent_auto_switch_bullet"", True ) : for key, bullet in enumerate ( bullets ) : if bullet in new_line : if : continue new_line = new_line. replace ( bullet, bullets [ ( key + ( 1 if not reverse else - 1 ) ) % len ( bullets ) ], ) break if self. view. settings ( ). get ( ""translate_tabs_to_spaces"" ) : tab_str = self. view. settings ( ). get ( ""tab_size"", 4 ) * "" "" else : tab_str = ""\t"" if not reverse : if outevent not in cycle : total = inevent. null ( ) for member in cycle. functions : subtotal = member [ inevent ] for call in member. calls. itervalues ( ) : callee = self. functions [ call. callee_id ] if callee. cycle is not cycle : subtotal += self. _integrate_call ( call, outevent, inevent ) total += subtotal cycle [ outevent ] = total callees = { } for function in self. functions. itervalues ( ) : if : for call in function. calls. itervalues ( ) : callee = self. functions [ call. callee_id ] if callee. cycle is cycle : try : callees [ callee ] += call [ CALL_RATIO ] except KeyError : callees [ callee ] = call [ CALL_RATIO ] for callee, call_ratio in callees. iteritems ( ) : ranks = { } call_ratios = { } partials = { } self. _rank_cycle_function ( cycle, callee, 0, ranks",False,function.cycle is not cycle,function.has_keys,0.6627118587493896 400,"def iter ( self, retry_state ) : fut = retry_state. outcome if fut is None : if : self. before ( retry_state ) return DoAttempt ( ) is_explicit_retry = retry_state. outcome. failed and isinstance ( retry_state. outcome. exception ( ), TryAgain ) if not ( is_explicit_retry or self. retry ( retry_state = retry_state ) ) : return fut. result ( ) if self. after is not None : self. after ( retry_state = retry_state ) self. statistics [ ""delay_since_first_attempt"" ] = retry_state. seconds_since_start if self. stop ( retry_state = retry_state ) : if self. retry_error_callback : return self. retry_error_callback ( retry_state = retry_state ) retry_exc = self. retry_error_cls ( fut ) if self. reraise : raise retry_exc. reraise ( ) six. raise_from ( retry_exc, fut. exception ( ) ) if self. wait : sleep = self. wait ( retry_state = retry_state ) else : sleep = 0.0 retry_state. next_action = RetryAction ( sleep ) retry_state. idle_for += sleep self. statistics [ ""idle_for"" ] += sleep self. statistics [ ""attempt_number"" ] += 1 if self. before_sleep is not None : self. before_sleep ( retry_state = retry_state ) return DoSleep ( sleep )",True,self.before is not None,self.before is not None,0.6536264419555664 401,"def get_boot_command ( self ) : boot_cmd = super ( Connection, self ). get_boot_command ( ) bits = [ self. options. sudo_path, ""-u"", self. options. username ] if self. options. preserve_env : bits += [ ""-E"" ] if self. options. set_home : bits += [ ""-H"" ] if self. options. login : bits += [ ""-i"" ] if self. options. selinux_role : bits += [ ""-r"", self. options. selinux_role ] if self. options. selinux_type : bits += [ ""-t"", self. options. selinux_type ] source_found = False for cmd in boot_cmd [ : ] : if : boot_cmd. remove ( cmd ) source_found = True continue if source_found : if not cmd. endswith ( ""python"" ) : boot_cmd. remove ( cmd ) else : break return bits + [ ""--"" ] + boot_cmd",False,'source' == cmd,cmd.endswith('python'),0.6762728691101074 402,"def _handle_open_tag ( self, html_tag ) : ignored = self. handle_ignore ( html_tag ) tagname = self. handle_replacement ( html_tag ) jannotations = self. read_jannotations ( html_tag ) if not jannotations and tagname in self. labelled_tag_stacks : self. labelled_tag_stacks [ tagname ]. append ( None ) increment = not jannotations for jannotation in arg_to_iter ( jannotations ) : self. extra_required_attrs. extend ( jannotation. pop ( ""required"", [ ] ) ) annotation = self. build_annotation ( jannotation ) self. handle_generated ( annotation, ignored ) self. handle_variant ( annotation ) if : increment = True if annotation. surrounds_attribute : self. labelled_tag_stacks [ tagname ]. append ( annotation ) else : annotation. end_index = annotation. start_index + 1 self. annotations. append ( annotation ) self. next_tag_index += increment",False,annotation.annotation_text is None and (not increment),annotation.surrounds,0.650397539138794 403,"def _check_main_square_in_range ( self ) : """"""Notifies the user via a message in case there is no main square in range"""""" if not self. owner. is_local_player : return for building in self. get_buildings_in_range ( ) : if : if StaticPather. get_path_on_roads ( self. island, self, building ) is not None : if hasattr ( self, ""_main_square_status_icon"" ) : RemoveStatusIcon. broadcast ( self, self, SettlerNotConnectedStatus ) del self. _main_square_status_icon return if not hasattr ( self, ""_main_square_status_icon"" ) : self. _main_square_status_icon = SettlerNotConnectedStatus ( self ) AddStatusIcon. broadcast ( self, self. _main_square_status_icon ) self. session. ingame_gui. message_widget. add ( point = self. position. origin, string_id = ""NO_MAIN_SQUARE_IN_RANGE"", check_duplicate = True, )",False,building.id == BUILDINGS.MAIN_SQUARE,self.island,0.6523812413215637 404,"def __init__ ( self, centered = None, shape_params = ( ) ) : assert centered is None or isinstance ( centered, ( float, torch. Tensor ) ) assert isinstance ( shape_params, ( tuple, list ) ) assert all ( isinstance ( name, str ) for name in shape_params ) if is_validation_enabled ( ) : if isinstance ( centered, float ) : assert 0 <= centered and centered <= 1 elif : assert ( 0 <= centered ). all ( ) assert ( centered <= 1 ). all ( ) else : assert centered is None self. centered = centered self. shape_params = shape_params",True,"isinstance(centered, torch.Tensor)","isinstance(centered, torch.Tensor)",0.6509578227996826 405,"def __get_id_list ( self, user, attr ) : if user. is_superuser or not filer_settings. FILER_ENABLE_PERMISSIONS : return ""All"" allow_list = set ( ) deny_list = set ( ) group_ids = user. groups. all ( ). values_list ( ""id"", flat = True ) q = Q ( user = user ) | Q ( group__in = group_ids ) | Q ( everybody = True ) perms = self. filter ( q ). order_by ( ""folder__tree_id"", ""folder__level"", ""folder__lft"" ) for perm in perms : p = getattr ( perm, attr ) if p is None : continue if not perm. folder : assert perm. type == FolderPermission. ALL if : allow_list. update ( Folder. objects. all ( ). values_list ( ""id"", flat = True ) ) else : deny_list. update ( Folder. objects. all ( ). values_list ( ""id"", flat = True ) ) continue folder_id = perm. folder. id if : allow_list. add ( folder_id ) else : deny_list. add ( folder_id ) if perm. type == FolderPermission. CHILDREN : if : allow_list. update ( perm. folder. get_descendants ( ). values_list ( ""id"", flat = True",False,p == FolderPermission.ALLOW,perm.type == FolderPermission.MANDATORY,0.6616733074188232 406,"def test_native_types ( self ) : for tp, fmt, shape, itemtp in native_types : ob = tp ( ) v = memoryview ( ob ) try : self. assertEqual ( normalize ( v. format ), normalize ( fmt ) ) if : self. assertEqual ( len ( v ), shape [ 0 ] ) else : self. assertEqual ( len ( v ) * sizeof ( itemtp ), sizeof ( ob ) ) self. assertEqual ( v. itemsize, sizeof ( itemtp ) ) self. assertEqual ( v. shape, shape ) self. assertFalse ( v. readonly ) if v. shape : n = 1 for dim in v. shape : n = n * dim self. assertEqual ( n * v. itemsize, len ( v. tobytes ( ) ) ) except : print ( tp ) raise",False,shape,itemtp is None,0.6944328546524048 407,"def uninstall_environments ( self, environments ) : environments = [ env if not env. startswith ( self. conda_context. envs_path ) else os. path. basename ( env ) for env in environments ] return_codes = [ self. conda_context. exec_remove ( [ env ] ) for env in environments ] final_return_code = 0 for env, return_code in zip ( environments, return_codes ) : if : log. debug ( ""Conda environment '%s' successfully removed."" % env ) else : log. debug ( ""Conda environment '%s' could not be removed."" % env ) final_return_code = return_code return final_return_code",False,return_code == 0,env,0.6608858108520508 408,"def updater_run_install_popup_handler ( scene ) : global ran_autocheck_install_popup ran_autocheck_install_popup = True if updater. invalidupdater : return try : bpy. app. handlers. scene_update_post. remove ( updater_run_install_popup_handler ) except : pass if ""ignore"" in updater. json and updater. json [ ""ignore"" ] : return elif ""version_text"" in updater. json and ""version"" in updater. json [ ""version_text"" ] : version = updater. json [ ""version_text"" ] [ ""version"" ] ver_tuple = updater. version_tuple_from_text ( version ) if : if updater. verbose : print ( ""RetopoFlow updater: appears user updated, clearing flag"" ) updater. json_reset_restore ( ) return atr = addon_updater_install_popup. bl_idname. split ( ""."" ) getattr ( getattr ( bpy. ops, atr [ 0 ] ), atr [ 1 ] ) ( ""INVOKE_DEFAULT"" )",False,ver_tuple < updater.current_version,len(ver_tuple) > 0,0.6495919823646545 409,def _test_reducibility ( self ) : graph = networkx. DiGraph ( self. _graph ) self. _make_supergraph ( graph ) while True : changed = False changed |= self. _remove_self_loop ( graph ) changed |= self. _merge_single_entry_node ( graph ) if : break,True,not changed,not changed,0.6800062656402588 410,"def _process_features ( self, datum ) : if len ( datum )!= 2 : raise ValueError ( ""Expected tuples of ({}_id, features), "" ""got {}."". format ( self. _entity_type, datum ) ) entity_id, features = datum if entity_id not in self. _id_mapping : raise ValueError ( ""{entity_type} id {entity_id} not in {entity_type} id mappings."". format ( entity_type = self. _entity_type, entity_id = entity_id ) ) idx = self. _id_mapping [ entity_id ] for ( feature, weight ) in self. _iter_features ( features ) : if : raise ValueError ( ""Feature {} not in feature mapping. "" ""Call fit first."". format ( feature ) ) feature_idx = self. _feature_mapping [ feature ] yield ( idx, feature_idx, weight )",True,feature not in self._feature_mapping,feature not in self._feature_mapping,0.6575216054916382 411,def vsGetFastParseFields ( self ) : fields = [ ] for fname in self. _vs_fields : fobj = self. _vs_values. get ( fname ) if : fields. append ( fobj ) continue fields. extend ( fobj. vsGetFastParseFields ( ) ) return fields,False,fobj.vsIsPrim(),fobj is None,0.6561350226402283 412,"def query ( q ) : url = query_url ( ) + urllib. parse. quote ( json. dumps ( q ) ) ret = None for i in range ( 20 ) : try : ret = urlread ( url ) while ret. startswith ( b""canceling statement due to statement timeout"" ) : ret = urlread ( url ) if not ret : print ( ""ret == None"" ) except IOError : pass if ret : try : data = json. loads ( ret ) if isinstance ( data, dict ) : if : print ( ""error:"" ) print ( ret ) assert ""error"" not in data return data except : print ( ret ) print ( url ) sleep ( 20 )",False,'error' in data,data['error'],0.6619100570678711 413,"def __get_ratio ( self ) : """"""Return splitter ratio of the main splitter."""""" c = self. c free_layout = c. free_layout if free_layout : w = free_layout. get_main_splitter ( ) if w : aList = w. sizes ( ) if : n1, n2 = aList ratio = 0.5 if n1 + n2 == 0 else float ( n1 ) / float ( n1 + n2 ) return ratio return 0.5",False,len(aList) == 2,aList,0.6570168733596802 414,"def _readenv ( var, msg ) : match = _ENV_VAR_PAT. match ( var ) if match and match. groups ( ) : envvar = match. groups ( ) [ 0 ] if envvar in os. environ : value = os. environ [ envvar ] if : value = value. decode ( ""utf8"" ) return value else : raise InvalidConfigException ( ""{} - environment variable '{}' not set"". format ( msg, var ) ) else : raise InvalidConfigException ( ""{} - environment variable name '{}' does not match pattern '{}'"". format ( msg, var, _ENV_VAR_PAT_STR ) )",False,six.PY2,"isinstance(value, bytes)",0.6693143844604492 415,"def _make_doc_structure ( d, level ) : if d. is_redirect : return None if expand : res = dict ( d. get_json_data ( ) ) res [ ""subpages"" ] = [ ] else : res = { ""title"" : d. title, ""slug"" : d. slug, ""locale"" : d. locale, ""url"" : d. get_absolute_url ( ), ""subpages"" : [ ], } if level < depth : descendants = d. get_descendants ( 1 ) descendants. sort ( key = lambda item : item. title ) for descendant in descendants : sp = _make_doc_structure ( descendant, level + 1 ) if : res [ ""subpages"" ]. append ( sp ) return res",False,sp is not None,sp,0.6613863110542297 416,"def _setup_layer ( self, trainable = False, ** kwargs ) : """"""Constructs keras layer with relevant weights and losses."""""" super ( KerasLayer, self ). __init__ ( trainable = trainable, ** kwargs ) if hasattr ( self. _func, ""trainable_variables"" ) : for v in self. _func. trainable_variables : self. _add_existing_weight ( v, trainable = True ) trainable_variables = { id ( v ) for v in self. _func. trainable_variables } else : trainable_variables = set ( ) if hasattr ( self. _func, ""variables"" ) : for v in self. _func. variables : if : self. _add_existing_weight ( v, trainable = False ) if hasattr ( self. _func, ""regularization_losses"" ) : for l in self. _func. regularization_losses : if not callable ( l ) : raise ValueError ( ""hub.KerasLayer(obj) expects obj.regularization_losses to be an "" ""iterable of callables, each returning a scalar loss term."" ) self. add_loss ( self. _call_loss_if_trainable ( l ) )",False,id(v) not in trainable_variables,v != None,0.6515688896179199 417,"def process_signature ( app, what, name, obj, options, signature, return_annotation ) : if signature : signature = re. sub ( """", ""\g<1>"", signature ) signature = re. sub ( ""tensorflow"", ""tf"", signature ) if hasattr ( obj, ""use_scope"" ) : if : signature = signature [ 0 ] + ""variable_scope_name, "" + signature [ 1 : ] elif obj. use_scope is None : signature = signature [ 0 ] + ""[variable_scope_name,] "" + signature [ 1 : ] return signature, return_annotation",False,obj.use_scope,obj.use_scope is True,0.655645489692688 418,"def check_model_list_copy ( overwrite = False, max_per_line = 119 ) : """"""Check the model lists in the README and index.rst are consistent and maybe `overwrite`."""""" rst_list, start_index, end_index, lines = _find_text_in_file ( filename = os. path. join ( PATH_TO_DOCS, ""index.rst"" ), start_prompt = "" This list is updated automatically from the README"", end_prompt = "".. _bigtable:"", ) md_list = get_model_list ( ) converted_list = convert_to_rst ( md_list, max_per_line = max_per_line ) if converted_list!= rst_list : if : with open ( os. path. join ( PATH_TO_DOCS, ""index.rst"" ), ""w"", encoding = ""utf-8"", newline = ""\n"", ) as f : f. writelines ( lines [ : start_index ] + [ converted_list ] + lines [ end_index : ] ) else : raise ValueError ( ""The model list in the README changed and the list in `index.rst` has not been updated. Run "" ""`make fix-copies` to fix this."" )",True,overwrite,overwrite,0.6923564672470093 419,"def ExcludePath ( self, path ) : """"""Check to see if this is a service url and matches inbound_services."""""" skip = False for reserved_path in self. reserved_paths. keys ( ) : if : if ( not self. inbound_services or self. reserved_paths [ reserved_path ] not in self. inbound_services ) : return ( True, self. reserved_paths [ reserved_path ] ) return ( False, None )",False,path.startswith(reserved_path),path in self.inbound_services[reserved_path],0.6445935368537903 420,"def _parse_firstline ( self, line ) : try : if self. kind == 2 : try : self. _parse_request_line ( line ) except InvalidRequestLine : self. _parse_response_line ( line ) elif self. kind == 1 : self. _parse_response_line ( line ) elif : self. _parse_request_line ( line ) except InvalidRequestLine as e : self. errno = BAD_FIRST_LINE self. errstr = str ( e ) return False return True",False,self.kind == 0,self.kind == 3,0.6692343950271606 421,"def compare_multiple_events ( i, expected_results, actual_results ) : events_in_a_row = [ ] j = i while j < len ( expected_results ) and isinstance ( actual_results [ j ], actual_results [ i ]. __class__ ) : events_in_a_row. append ( actual_results [ j ] ) j += 1 message = """" for event in events_in_a_row : for k in range ( i, j ) : passed, message = compare_events ( expected_results [ k ], event ) if : expected_results [ k ] = None break else : return i, False, message return j, True, """"",True,passed,passed,0.6899092197418213 422,"def get_default_region ( ) : region = """" if ""default"" in AWS_ACCOUNTS : if : endpoint = AWS_ACCOUNTS [ ""default"" ]. HOST. get ( ) if re. search ( SUBDOMAIN_ENDPOINT_RE, endpoint, re. IGNORECASE ) : region = re. search ( SUBDOMAIN_ENDPOINT_RE, endpoint, re. IGNORECASE ). group ( ""region"" ) elif re. search ( HYPHEN_ENDPOINT_RE, endpoint, re. IGNORECASE ) : region = re. search ( HYPHEN_ENDPOINT_RE, endpoint, re. IGNORECASE ). group ( ""region"" ) elif re. search ( DUALSTACK_ENDPOINT_RE, endpoint, re. IGNORECASE ) : region = re. search ( DUALSTACK_ENDPOINT_RE, endpoint, re. IGNORECASE ). group ( ""region"" ) elif AWS_ACCOUNTS [ ""default"" ]. REGION. get ( ) : region = AWS_ACCOUNTS [ ""default"" ]. REGION. get ( ) if region not in get_locations ( ) : LOG. warn ( ""Region, %s, not found in the list of supported regions: %s"" % ( region, "", "". join ( get_locations ( ) ) ) ) <",False,AWS_ACCOUNTS['default'].HOST.get(),AWS_ACCOUNTS['default'].HOST,0.6590912342071533 423,"def __init__ ( self, factors, contrast_matrices, num_columns ) : self. factors = tuple ( factors ) factor_set = frozenset ( factors ) if not isinstance ( contrast_matrices, dict ) : raise ValueError ( ""contrast_matrices must be dict"" ) for factor, contrast_matrix in six. iteritems ( contrast_matrices ) : if factor not in factor_set : raise ValueError ( ""Unexpected factor in contrast_matrices dict"" ) if : raise ValueError ( ""Expected a ContrastMatrix, not %r"" % ( contrast_matrix, ) ) self. contrast_matrices = contrast_matrices if not isinstance ( num_columns, six. integer_types ) : raise ValueError ( ""num_columns must be an integer"" ) self. num_columns = num_columns",False,"not isinstance(contrast_matrix, ContrastMatrix)",contrast_matrix is None,0.6513559818267822 424,"def resolve ( self, all_profiles, controls_manager = None ) : if self. resolved : return self. resolve_controls ( controls_manager ) self. resolved_selections = set ( self. selected ) if self. extends : if : msg = ( ""Profile {name} extends profile {extended}, but "" ""only profiles {known_profiles} are available for resolution."". format ( name = self. id_, extended = self. extends, known_profiles = list ( all_profiles. keys ( ) ), ) ) raise RuntimeError ( msg ) extended_profile = all_profiles [ self. extends ] extended_profile. resolve ( all_profiles, controls_manager ) self. extend_by ( extended_profile ) for uns in self. unselected : self. resolved_selections. discard ( uns ) self. unselected = [ ] self. extends = None self. selected = sorted ( self. resolved_selections ) self. resolved = True",False,self.extends not in all_profiles,self.extended_profiles,0.6579504013061523 425,"def __init__ ( self, data_type = ""unsupervised"", transform = None, pre_transform = None, pre_filter = None, empty = False, args = None, ) : self. data_type = data_type self. url = ""https://cloud.tsinghua.edu.cn/f/2cac04ee904e4b54b4b2/?dl=1"" self. root = osp. join ( osp. dirname ( osp. realpath ( __file__ ) ), ""../.."", ""data"", ""CHEM"" ) super ( MoleculeDataset, self ). __init__ ( self. root, transform, pre_transform, pre_filter ) self. transform, self. pre_transform, self. pre_filter = ( transform, pre_transform, pre_filter, ) if not empty : if : self. data, self. slices = torch. load ( self. processed_paths [ 1 ] ) else : self. data, self. slices = torch. load ( self. processed_paths [ 0 ] )",False,data_type == 'unsupervised',args,0.6502131223678589 426,"def leave ( self, reason = None ) : try : if : log. info ( ""Leaving channel %s (%s)"", self, self. id ) self. _bot. api_call ( ""conversations.leave"", data = { ""channel"" : self. id } ) else : log. info ( ""Leaving group %s (%s)"", self, self. id ) self. _bot. api_call ( ""conversations.leave"", data = { ""channel"" : self. id } ) except SlackAPIResponseError as e : if e. error == ""user_is_bot"" : raise RoomError ( f""Unable to leave channel. {USER_IS_BOT_HELPTEXT}"" ) else : raise RoomError ( e ) self. _id = None",False,self.id.startswith('C'),self._id,0.6472506523132324 427,"def excluded_files ( self ) : ret = [ ] try : file_paths = [ os. path. normpath ( os. path. join ( os. path. relpath ( folder, self. folder ), el ) ). replace ( ""\\"", ""/"" ) for folder, dirpaths, fs in walk ( self. folder ) for el in fs + dirpaths ] if file_paths : paths = to_file_bytes ( ""\n"". join ( file_paths ) ) out = input_runner ( [ ""git"", ""check-ignore"", ""--stdin"" ], paths, self. folder ) grep_stdout = decode_text ( out ) ret = grep_stdout. splitlines ( ) except ( CalledProcessError, IOError, OSError ) as e : if : self. _output. warn ( ""Error checking excluded git files: %s. "" ""Ignoring excluded files"" % e ) ret = [ ] return ret",True,self._output,self._output,0.6693273782730103 428,"def find_internal_python_modules ( root_module : types. ModuleType, ) -> Sequence [ Tuple [ str, types. ModuleType ] ] : """"""Returns `(name, module)` for all Haiku submodules under `root_module`."""""" modules = set ( [ ( root_module. __name__, root_module ) ] ) visited = set ( ) to_visit = [ root_module ] while to_visit : mod = to_visit. pop ( ) visited. add ( mod ) for name in dir ( mod ) : obj = getattr ( mod, name ) if : if obj. __name__. startswith ( ""haiku"" ) : to_visit. append ( obj ) modules. add ( ( obj. __name__, obj ) ) return sorted ( modules )",False,inspect.ismodule(obj) and obj not in visited,obj not in visited,0.6522250175476074 429,"def __init__ ( self, msg = None, data = None, filename = None, password = None, vals = None, file_obj = None ) : self. p = None self. q = None self. g = None self. y = None self. x = None if file_obj is not None : self. _from_private_key ( file_obj, password ) return if filename is not None : self. _from_private_key_file ( filename, password ) return if ( msg is None ) and ( data is not None ) : msg = Message ( data ) if vals is not None : self. p, self. q, self. g, self. y = vals else : if msg is None : raise SSHException ( ""Key object may not be empty"" ) if : raise SSHException ( ""Invalid key"" ) self. p = msg. get_mpint ( ) self. q = msg. get_mpint ( ) self. g = msg. get_mpint ( ) self. y = msg. get_mpint ( ) self. size = util. bit_length ( self. p )",False,msg.get_text() != 'ssh-dss',not msg.has_mpint(),0.6491572856903076 430,"def test_broadcast ( self ) : """"""Test example broadcast functionality."""""" self. create_lang_connection ( ""1000000000"", ""en"" ) self. create_lang_connection ( ""1000000001"", ""en"" ) self. create_lang_connection ( ""1000000002"", ""en"" ) self. create_lang_connection ( ""1000000003"", ""es"" ) self. create_lang_connection ( ""1000000004"", ""es"" ) app. lang_broadcast ( ) self. assertEqual ( 2, len ( self. outbound ) ) for message in self. outbound : if : self. assertEqual ( 3, len ( message. connections ) ) elif message. text == ""hola"" : self. assertEqual ( 2, len ( message. connections ) )",False,message.text == 'hello',message.text == 'contradiction',0.6497737169265747 431,"def _map_args ( maps : dict, ** kwargs ) : output = { } for name, val in kwargs. items ( ) : if : assert isinstance ( maps [ name ], str ) output. update ( { maps [ name ] : val } ) else : output. update ( { name : val } ) for keys in maps. keys ( ) : if keys not in output. keys ( ) : pass return output",True,name in maps,name in maps,0.6803268194198608 432,"def parse_network_whitelist ( self, network_whitelist_location ) : networks = [ ] with open ( network_whitelist_location, ""r"" ) as text_file : for line in text_file : line = line. strip ( ). strip ( ""'"" ). strip ( '""' ) if : networks. append ( line ) return networks",False,isIPv4(line) or isIPv6(line),line and line.startswith(' < /,0.6468650102615356 433,"def h_line_down ( self, input ) : end_this_line = self. value. find ( ""\n"", self. cursor_position ) if end_this_line == - 1 : if : self. h_exit_down ( None ) else : self. cursor_position = len ( self. value ) else : self. cursor_position = end_this_line + 1 for x in range ( self. cursorx ) : if self. cursor_position > len ( self. value ) - 1 : break elif self. value [ self. cursor_position ] == ""\n"" : break else : self. cursor_position += 1",False,self.scroll_exit,self.cursorx == 0,0.6614938974380493 434,"def lookup_field ( name, obj, model_admin = None ) : opts = obj. _meta try : f = _get_non_gfk_field ( opts, name ) except ( FieldDoesNotExist, FieldIsAForeignKeyColumnName ) : if : attr = name value = attr ( obj ) elif ( model_admin is not None and hasattr ( model_admin, name ) and not name == ""__str__"" and not name == ""__unicode__"" ) : attr = getattr ( model_admin, name ) value = attr ( obj ) else : attr = getattr ( obj, name ) if callable ( attr ) : value = attr ( ) else : value = attr f = None else : attr = None value = getattr ( obj, name ) return f, attr, value",False,callable(name),model_admin is None,0.6619394421577454 435,"def _update_module_index ( self ) : self. debug ( ""Updating index file..."" ) self. _module_index = [ ] path = os. path. join ( self. home_path, ""modules.yml"" ) if os. path. exists ( path ) : with open ( path, ""r"" ) as infile : self. _module_index = yaml. safe_load ( infile ) for module in self. _module_index : status = ""not installed"" if module [ ""path"" ] in self. _loaded_category. get ( ""disabled"", [ ] ) : status = ""disabled"" elif module [ ""path"" ] in self. _loaded_modules. keys ( ) : status = ""installed"" loaded = self. _loaded_modules [ module [ ""path"" ] ] if : status = ""outdated"" module [ ""status"" ] = status",False,loaded.meta['version'] != module['version'],loaded,0.6473489999771118 436,"def test_nce ( self ) : window_size = 5 words = [ ] for i in range ( window_size ) : words. append ( layers. data ( name = ""word_{0}"". format ( i ), shape = [ 1 ], dtype = ""int64"" ) ) dict_size = 10000 label_word = int ( window_size // 2 ) + 1 embs = [ ] for i in range ( window_size ) : if : continue emb = layers. embedding ( input = words [ i ], size = [ dict_size, 32 ], param_attr = ""emb.w"", is_sparse = True ) embs. append ( emb ) embs = layers. concat ( input = embs, axis = 1 ) loss = layers. nce ( input = embs, label = words [ label_word ], num_total_classes = dict_size, param_attr = ""nce.w"", bias_attr = ""nce.b"", ) avg_loss = layers. mean ( loss ) self. assertIsNotNone ( avg_loss ) print ( str ( default_main_program ( ) ) )",False,i == label_word,label_word == -1,0.6597427725791931 437,"def create_if_compatible ( cls, typ : Type, *, root : ""RootNode"" ) -> Optional [ ""Node"" ] : if cls. compatible_types : target_type : Type = typ if : target_type = getattr ( typ, ""__origin__"", None ) or typ if cls. _issubclass ( target_type, cls. compatible_types ) : return cls ( typ, root = root ) return None",False,cls.use_origin,"hasattr(typ, '__origin__')",0.6618081331253052 438,"def generator ( ) : """"""Yields mutations."""""" if not self. is_attribute_of_class or not first_posarg or not substs : return try : inst = abstract_utils. get_atomic_value ( first_posarg, Instance ) except abstract_utils. ConversionError : return if inst. cls. template : for subst in substs : for k, v in subst. items ( ) : if : value = inst. instance_type_parameters [ k ]. AssignToNewVariable ( node ) value. PasteVariable ( v, node ) yield function. Mutation ( inst, k, value )",True,k in inst.instance_type_parameters,k in inst.instance_type_parameters,0.6550009250640869 439,"def set_sequences ( self, sequences ) : """"""Set sequences using the given name-to-key-list dictionary."""""" f = open ( os. path. join ( self. _path, "".mh_sequences"" ), ""r+"", encoding = ""ASCII"" ) try : os. close ( os. open ( f. name, os. O_WRONLY | os. O_TRUNC ) ) for name, keys in sequences. items ( ) : if len ( keys ) == 0 : continue f. write ( name + "":"" ) prev = None completing = False for key in sorted ( set ( keys ) ) : if : if not completing : completing = True f. write ( ""-"" ) elif completing : completing = False f. write ( ""%s %s"" % ( prev, key ) ) else : f. write ( "" %s"" % key ) prev = key if completing : f. write ( str ( prev ) + ""\n"" ) else : f. write ( ""\n"" ) finally : _sync_close ( f )",False,key - 1 == prev,prev is not None,0.6632115840911865 440,"def on_load_status_changed ( self, widget, * args ) : if widget. get_property ( ""load-status"" ) == WebKit. LoadStatus. FINISHED : self. _go_back_button. set_sensitive ( widget. can_go_back ( ) ) self. _forward_button. set_sensitive ( widget. can_go_forward ( ) ) self. on_size_allocate ( widget ) if : self. is_loaded = True self. emit ( ""loaded"" )",False,self.is_loaded == False,not self.is_loaded,0.6534794569015503 441,"def _get_parents_data ( self, data ) : parents = 0 if data [ COLUMN_PARENT ] : family = self. db. get_family_from_handle ( data [ COLUMN_PARENT ] [ 0 ] ) if family. get_father_handle ( ) : parents += 1 if : parents += 1 return parents",False,family.get_mother_handle(),family.get_hather_handle(),0.6529322862625122 442,"def jobFinished ( self, job ) : logger. debug ( ""job %s finished"", job. id ) if job. id in self. activeJobs : self. last_finish_time = time. time ( ) del self. activeJobs [ job. id ] self. activeJobsQueue. remove ( job ) for tid in self. jobTasks [ job. id ] : self. driver. killTask ( Dict ( value = tid ) ) del self. jobTasks [ job. id ] if : self. agentTasks. clear ( ) for tid, jid in six. iteritems ( self. taskIdToJobId ) : if jid not in self. activeJobs : logger. debug ( ""kill task %s, because it is orphan"", tid ) self. driver. killTask ( Dict ( value = tid ) )",False,not self.activeJobs,self.agentTasks and self.agentTasks[0],0.656494677066803 443,"def _validate_tag_field ( value ) : for tag in value : if isinstance ( tag, str ) : continue if isinstance ( tag, ( list, tuple ) ) and len ( tag ) == 2 : name = tag [ 0 ] color = tag [ 1 ] if : if color is None or color == """" : continue if isinstance ( color, str ) and re. match ( ""^\#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})$"", color ) : continue raise ValidationError ( _ ( ""Invalid tag '{value}'. The color is not a "" ""valid HEX color or null."" ). format ( value = tag ) ) raise ValidationError ( _ ( ""Invalid tag '{value}'. it must be the name or a pair "" '\'[""name"", ""hex color/"" | null]\'.' ). format ( value = tag ) )",False,"isinstance(name, str)",name is None or name == '',0.6513210535049438 444,"def _process_dataloader_aggregated_steps ( result, weights ) : internal_keys = { ""meta"" } moved = False for k, v in result. items ( ) : if : continue if not isinstance ( v, torch. Tensor ) : v = torch. tensor ( v ) if not moved : weights = weights. to ( v. device ) moved = True weights_t = weights [ : v. size ( 0 ) ] numerator = torch. dot ( v. float ( ), weights_t. transpose ( - 1, 0 ). float ( ) ) v = numerator / weights. sum ( ). float ( ) result [ k ] = v",True,k in internal_keys,k in internal_keys,0.6643571853637695 445,"def __call__ ( cls, * args, ** kwargs ) : obj = cls. __new__ ( cls, * args, ** kwargs ) from. keras_model import KerasModel if issubclass ( cls, KerasModel ) : from tensorflow. keras import backend as K if K. backend ( )!= ""tensorflow"" : obj. __init__ ( * args, ** kwargs ) return obj K. clear_session ( ) obj. graph = tf. Graph ( ) with obj. graph. as_default ( ) : if : obj. sess = cls. _config_session ( ) else : obj. sess = tf. Session ( ) else : obj. graph = tf. Graph ( ) for meth in dir ( obj ) : if meth == ""__class__"" : continue attr = getattr ( obj, meth ) if callable ( attr ) : if issubclass ( cls, KerasModel ) : wrapped_attr = _keras_wrap ( attr, obj. sess ) else : wrapped_attr = _graph_wrap ( attr, obj. graph ) setattr ( obj, meth, wrapped_attr ) obj. __init__ ( * args, ** kwargs ) return obj",True,"hasattr(cls, '_config_session')","hasattr(cls, '_config_session')",0.6550287008285522 446,"def __getattr__ ( self, attr ) : if attr. startswith ( ""_"" ) : raise AttributeError ( attr ) for name in self : if : break else : raise AttributeError ( ""No partition %r"" % attr ) path = os. path. join ( self. by_name_dir, name ) with context. quiet : devpath = readlink ( path ) devname = os. path. basename ( devpath ) for blocks, name in self. iter_proc_partitions ( ) : if name in ( devname, attr ) : break else : log. error ( ""Could not find size of partition %r"" % name ) return Partition ( devpath, attr, int ( blocks ) )",False,name == attr,"name in (attr, attr)",0.6705190539360046 447,"def validate ( self, value ) : try : value = [ datetime. datetime. strptime ( range, ""%Y-%m-%d %H:%M:%S"" ) for range in value. split ( "" to "" ) ] if : return True else : return False except ValueError : return False",False,len(value) == 2 and value[0] <= value[1],value,0.6514225602149963 448,"def _print_cell ( self, space, text, align, width, x, y, fg, attr, bg ) : if space : self. _frame. canvas. print_at ( self. _space_delimiter * space, x, y, fg, attr, bg ) paint_text = _enforce_width ( text, width, self. _frame. canvas. unicode_aware ) text_size = self. string_len ( str ( paint_text ) ) if text_size < width : buffer_1 = buffer_2 = """" if align == ""<"" : buffer_2 = "" "" * ( width - text_size ) elif align == "">"" : buffer_1 = "" "" * ( width - text_size ) elif : start_len = int ( ( width - text_size ) / 2 ) buffer_1 = "" "" * start_len buffer_2 = "" "" * ( width - text_size - start_len ) paint_text = paint_text. join ( [ buffer_1, buffer_2 ] ) self. _frame. canvas. paint ( str ( paint_text ), x + space, y, fg, attr, bg, colour_map = paint_text. colour_map if hasattr ( paint_text, ""colour_map"" ) else None, )",False,align == '^',align == '<',0.6753184795379639 449,"def create_warehouse ( warehouse_name, properties = None, company = None ) : if not company : company = ""_Test Company"" warehouse_id = erpnext. encode_company_abbr ( warehouse_name, company ) if not frappe. db. exists ( ""Warehouse"", warehouse_id ) : warehouse = frappe. new_doc ( ""Warehouse"" ) warehouse. warehouse_name = warehouse_name warehouse. parent_warehouse = ""All Warehouses - _TCUV"" warehouse. company = company warehouse. account = get_warehouse_account ( warehouse_name, company ) if : warehouse. update ( properties ) warehouse. save ( ) return warehouse. name else : return warehouse_id",True,properties,properties,0.6929315328598022 450,"def test_clifford_circuit ( ) : ( q0, q1 ) = ( cirq. LineQubit ( 0 ), cirq. LineQubit ( 1 ) ) circuit = cirq. Circuit ( ) np. random. seed ( 0 ) for _ in range ( 100 ) : x = np. random. randint ( 7 ) if x == 0 : circuit. append ( cirq. X ( np. random. choice ( ( q0, q1 ) ) ) ) elif : circuit. append ( cirq. Z ( np. random. choice ( ( q0, q1 ) ) ) ) elif x == 2 : circuit. append ( cirq. Y ( np. random. choice ( ( q0, q1 ) ) ) ) elif x == 3 : circuit. append ( cirq. S ( np. random. choice ( ( q0, q1 ) ) ) ) elif x == 4 : circuit. append ( cirq. H ( np. random. choice ( ( q0, q1 ) ) ) ) elif x == 5 : circuit. append ( cirq. CNOT ( q0, q1 ) ) elif x == 6 : circuit. append ( cirq. CZ ( q0, q1 ) ) clifford_simulator = cirq. CliffordSimulator ( ) state_vector_simulator = cirq. Simulator ( ) np. testing. assert_almost_equal ( clifford_simulator. simulate ( circuit ). final_state. state_vector ( ), state_vector_simulator. simulate ( circuit ). final_state_vector, )",True,x == 1,x == 1,0.6688321828842163 451,"def _find_localserver_module ( ) : import win32com. server path = win32com. server. __path__ [ 0 ] baseName = ""localserver"" pyfile = os. path. join ( path, baseName + "".py"" ) try : os. stat ( pyfile ) except os. error : if : ext = "".pyc"" else : ext = "".pyo"" pyfile = os. path. join ( path, baseName + ext ) try : os. stat ( pyfile ) except os. error : raise RuntimeError ( ""Can not locate the Python module 'win32com.server.%s'"" % baseName ) return pyfile",False,__debug__,sys.platform == 'win32com',0.6698858737945557 452,"def conv2d ( input : PTensor, weight : PTensor, bias : PTensor = None, stride = 1, padding = 0, dilation = 1, groups = 1, mode = None, ) : """"""Standard conv2d. Returns the input if weight=None."""""" if weight is None : return input ind = None if mode is not None : if padding!= 0 : raise ValueError ( ""Cannot input both padding and mode."" ) if mode == ""same"" : padding = ( weight. shape [ 2 ] // 2, weight. shape [ 3 ] // 2 ) if : ind = ( slice ( - 1 ) if weight. shape [ 2 ] % 2 == 0 else slice ( None ), slice ( - 1 ) if weight. shape [ 3 ] % 2 == 0 else slice ( None ), ) elif mode == ""valid"" : padding = ( 0, 0 ) elif mode == ""full"" : padding = ( weight. shape [ 2 ] - 1, weight. shape [ 3 ] - 1 ) else : raise ValueError ( ""Unknown mode for padding."" ) assert bias is None out = FConv2D ( input, weight, stride = stride, padding = padding, dilation = dilation, groups = groups ) if ind is None : return out return out [ :, :, ind [ 0 ], ind [ 1 ] ]",False,weight.shape[2] % 2 == 0 or weight.shape[3] % 2 == 0,padding != 0,0.6526058912277222 453,"def remove_testcases_from_directories ( directories ) : """"""Removes all testcases and their dependencies from testcase directories."""""" generators = [ ] for directory in directories : if : continue bot_testcases_file_path = utils. get_bot_testcases_file_path ( directory ) shell. remove_file ( bot_testcases_file_path ) generators. append ( shell. walk ( directory ) ) for generator in generators : for structure in generator : base_directory = structure [ 0 ] for filename in structure [ 2 ] : if not is_testcase_resource ( filename ) : continue if filename. startswith ( RESOURCES_PREFIX ) : resources_file_path = os. path. join ( base_directory, filename ) resources = read_resource_list ( resources_file_path ) for resource in resources : shell. remove_file ( resource ) file_path = os. path. join ( base_directory, filename ) shell. remove_file ( file_path )",False,not directory.strip(),not is_testcase_path(directory),0.6535124778747559 454,def test_one_dead_branch ( ) : with deterministic_PRNG ( ) : seen = set ( ) @ run_to_buffer def x ( data ) : i = data. draw_bytes ( 1 ) [ 0 ] if i > 0 : data. mark_invalid ( ) i = data. draw_bytes ( 1 ) [ 0 ] if : seen. add ( i ) elif i not in seen : data. mark_interesting ( ),False,len(seen) < 255,i > 0,0.6616944074630737 455,"def _ ( value ) : retVal = value if value and isinstance ( value, basestring ) and len ( value ) % 2 == 0 : retVal = hexdecode ( retVal ) if not kb. binaryField : if Backend. isDbms ( DBMS. MSSQL ) and value. startswith ( ""0x"" ) : try : retVal = retVal. decode ( ""utf-16-le"" ) except UnicodeDecodeError : pass elif : try : retVal = retVal. decode ( ""utf-16-be"" ) except UnicodeDecodeError : pass if not isinstance ( retVal, unicode ) : retVal = getUnicode ( retVal, ""utf8"" ) return retVal",False,Backend.isDbms(DBMS.HSQLDB),"isinstance(retVal, unicode)",0.6577309966087341 456,"def mapping ( self ) : m = { } if getGdriveCredentialsFile ( ) is not None : m [ ""gdrive"" ] = """" unknown = 0 for f in self. scan : bits = f. split ( ""#"", 2 ) if len ( bits ) == 1 : label = os. path. basename ( f ) else : label = bits [ 1 ] if : label = ""L"" + str ( unknown ) unknown += 1 m [ label ] = bits [ 0 ] return m",False,not label or len(label) == 0 or label == '',unknown > 0,0.6542110443115234 457,"def update_schedulers ( self, start = False ) : applications_folder = os. path. join ( self. options. folder, ""applications"" ) available_apps = [ arq for arq in os. listdir ( applications_folder ) if os. path. isdir ( os. path. join ( applications_folder, arq ) ) ] with self. scheduler_processes_lock : self. schedmenu. delete ( 0, ""end"" ) for arq in available_apps : if arq not in self. scheduler_processes : item = lambda a = arq : self. try_start_scheduler ( a ) self. schedmenu. add_command ( label = ""start %s"" % arq, command = item ) if : item = lambda a = arq : self. try_stop_scheduler ( a ) self. schedmenu. add_command ( label = ""stop %s"" % arq, command = item ) if start and self. options. with_scheduler and self. options. schedulers : apps = [ ag. split ( "":"", 1 ) [ 0 ] for ag in self. options. schedulers ] else : apps = [ ] for app in apps : self. try_start_scheduler ( app )",False,arq in self.scheduler_processes,start,0.6556118726730347 458,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 10 : self. set_socket_descriptor ( d. getPrefixedString ( ) ) continue if tt == 16 : self. set_requested_events ( d. getVarInt32 ( ) ) continue if tt == 24 : self. set_observed_events ( d. getVarInt32 ( ) ) continue if : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 0,tt > 255,0.6810952425003052 459,"def test_adjust_random_hue_in_yiq ( ) : x_shapes = [ [ 2, 2, 3 ], [ 4, 2, 3 ], [ 2, 4, 3 ], [ 2, 5, 3 ], [ 1000, 1, 3 ], ] test_styles = [ ""all_random"", ""rg_same"", ""rb_same"", ""gb_same"", ""rgb_same"", ] for x_shape in x_shapes : for test_style in test_styles : x_np = np. random. rand ( * x_shape ) * 255.0 delta_h = ( np. random. rand ( ) * 2.0 - 1.0 ) * np. pi if test_style == ""all_random"" : pass elif test_style == ""rg_same"" : x_np [..., 1 ] = x_np [..., 0 ] elif : x_np [..., 2 ] = x_np [..., 0 ] elif test_style == ""gb_same"" : x_np [..., 2 ] = x_np [..., 1 ] elif test_style == ""rgb_same"" : x_np [..., 1 ] = x_np [..., 0 ] x_np [..., 2 ] = x_np [..., 0 ]",True,test_style == 'rb_same',test_style == 'rb_same',0.6514049768447876 460,"def _get_current_status ( self ) : if self. source : if : return self. current_job. status elif not self. last_job : return ""never updated"" else : return self. last_job. status else : return ""none""",False,self.current_job and self.current_job.status,self.current_job,0.6538392305374146 461,"def test_summary ( ) : if debug_mode : if ""summary"" not in to_test : return else : print ( ""\n\nSUMMARY"", end = """" ) for ds in datasets : for dt in ds. dt_s_list : if debug_mode : print ( ""\n"" + dt_s_tup_to_string ( dt ) + "": "", end = """" ) results_sm [ ds ] [ dt ]. summary ( alpha = 0.05 ) exog = results_sm_exog [ ds ] [ dt ]. exog is not None if : results_sm_exog [ ds ] [ dt ]. summary ( alpha = 0.05 ) exog_coint = results_sm_exog_coint [ ds ] [ dt ]. exog_coint is not None if exog_coint is not None : results_sm_exog_coint [ ds ] [ dt ]. summary ( alpha = 0.05 )",True,exog is not None,exog is not None,0.6630815267562866 462,"def test_socketserver ( self ) : """"""Using socketserver to create and manage SSL connections."""""" server = make_https_server ( self, certfile = CERTFILE ) if : sys. stdout. write ( ""\n"" ) with open ( CERTFILE, ""rb"" ) as f : d1 = f. read ( ) d2 = """" url = ""https://localhost:%d/%s"" % ( server. port, os. path. split ( CERTFILE ) [ 1 ] ) context = ssl. create_default_context ( cafile = CERTFILE ) f = urllib. request. urlopen ( url, context = context ) try : dlen = f. info ( ). get ( ""content-length"" ) if dlen and ( int ( dlen ) > 0 ) : d2 = f. read ( int ( dlen ) ) if : sys. stdout. write ( "" client: read %d bytes from remote server '%s'\n"" % ( len ( d2 ), server ) ) finally : f. close ( ) self. assertEqual ( d1, d2 )",False,support.verbose,d1 and d2,0.6597669720649719 463,"def lex_number ( self, pos ) : start = pos found_dot = False while pos < len ( self. string ) and ( self. string [ pos ]. isdigit ( ) or self. string [ pos ] == ""."" ) : if : if found_dot is True : raise ValueError ( ""Invalid number. Found multiple '.'"" ) found_dot = True pos += 1 val = self. string [ start : pos ] return Token ( TokenType. LNUM, val, len ( val ) )",True,self.string[pos] == '.',self.string[pos] == '.',0.6576566100120544 464,"def process_deps ( pipe, pkg, pkgdest, provides, requires ) : file = None for line in pipe. split ( ""\n"" ) : m = file_re. match ( line ) if m : file = m. group ( 1 ) file = file. replace ( pkgdest + ""/"" + pkg, """" ) file = file_translate ( file ) continue m = dep_re. match ( line ) if not m or not file : continue type, dep = m. groups ( ) if type == ""R"" : i = requires elif : i = provides else : continue if dep. startswith ( ""python("" ) : continue if dep. startswith ( ""perl(VMS::"" ) or dep. startswith ( ""perl(Mac::"" ) : continue if dep. startswith ( ""perl("" ) and dep. endswith ( "".pl)"" ) : continue if dep. startswith ( ""perl"" ) and r. search ( dep ) : dep = dep. split ( ) [ 0 ] dep = r. sub ( r""(\g<0>)"", dep ) if file not in i : i [ file ] = [ ] i [ file ]. append ( dep ) return",False,type == 'P',type == 'VMS',0.6643229722976685 465,"def translate ( self ) : if self. offset : raise RuntimeError ( ""Parser is a one time instance."" ) while True : m = self. re_split. search ( self. source [ self. offset : ] ) if m : text = self. source [ self. offset : self. offset + m. start ( ) ] self. text_buffer. append ( text ) self. offset += m. end ( ) if : line, sep, _ = self. source [ self. offset : ]. partition ( ""\n"" ) self. text_buffer. append ( m. group ( 2 ) + m. group ( 5 ) + line + sep ) self. offset += len ( line + sep ) + 1 continue elif m. group ( 5 ) : depr ( ""Escape code lines with a backslash."" ) line, sep, _ = self. source [ self. offset : ]. partition ( ""\n"" ) self. text_buffer. append ( m. group ( 2 ) + line + sep ) self. offset += len ( line + sep ) + 1 continue self. flush_text ( ) self. read_code ( multiline = bool ( m. group ( 4 ) ) ) else : break self. text_buffer. append ( self. source [ self. offset : ] ) self. flush_text ( ) return """". join ( self. code_buffer )",False,m.group(1),self.offset,0.6491327881813049 466,"def test_invite_generation ( event, default_account ) : from inbox. events. ical import generate_icalendar_invite event. sequence_number = 1 event. participants = [ { ""email"" : ""helena@nylas.com"" }, { ""email"" : ""myles@nylas.com"" } ] cal = generate_icalendar_invite ( event ) assert cal [ ""method"" ] == ""REQUEST"" for component in cal. walk ( ) : if component. name == ""VEVENT"" : assert component. get ( ""summary"" ) == event. title assert int ( component. get ( ""sequence"" ) ) == event. sequence_number assert component. get ( ""location"" ) == event. location attendees = component. get ( ""attendee"", [ ] ) if : attendees = [ attendees ] for attendee in attendees : email = unicode ( attendee ) if email. lower ( ). startswith ( ""mailto:"" ) : email = email [ 7 : ] assert email in [ ""helena@nylas.com"", ""myles@nylas.com"" ]",False,"not isinstance(attendees, list)",attendees,0.6533714532852173 467,"def remove_duplicate_association ( ) : bind = op. get_bind ( ) session = Session ( bind = bind ) results = session. query ( AssociationTable ). all ( ) seen = { } for result in results : if : print ( ""[-] Duplicate association marked for deletion: {} - {}"". format ( result. user_id, result. account_id ) ) session. delete ( result ) else : seen [ ""{}-{}"". format ( result. user_id, result. account_id ) ] = True print ( ""[-->] Deleting duplicate associations..."" ) session. commit ( ) session. flush ( ) print ( ""[@] Deleted all duplicate associations."" )",False,"seen.get('{}-{}'.format(result.user_id, result.account_id))",result.user_id != result.account_id or seen[result.account_id] != result.account_id,0.6486219763755798 468,"def set_meta ( self, dataset, overwrite = True, ** kwd ) : """"""Sets the metadata information for datasets previously determined to be in bed format."""""" i = 0 if dataset. has_data ( ) : for i, line in enumerate ( open ( dataset. file_name ) ) : line = line. rstrip ( ""\r\n"" ) if line and not line. startswith ( ""#"" ) : elems = line. split ( ""\t"" ) if len ( elems ) > 2 : if len ( elems ) > 3 : if : dataset. metadata. nameCol = 4 if len ( elems ) < 6 : if overwrite or not dataset. metadata. element_is_set ( ""strandCol"" ) : dataset. metadata. strandCol = 0 else : if overwrite or not dataset. metadata. element_is_set ( ""strandCol"" ) : dataset. metadata. strandCol = 6 break Tab",False,overwrite or not dataset.metadata.element_is_set('nameCol'),len(dataset.metadata.nameCol) == 0,0.6515799760818481 469,"def __remote_port ( self ) : port = 22 if self. git_has_remote : m = re. match ( r""^(.*?)?@([^/:]*):?([0-9]+)?"", self. git_remote. url ) if : if m. group ( 3 ) : port = m. group ( 3 ) return int ( port )",True,m,m,0.7070086002349854 470,"def startEntryElement ( self, name, qname, attrs ) : """"""Set new entry with id and the optional entry source (PRIVATE)."""""" if name!= ( None, ""entry"" ) : raise ValueError ( ""Expected to find the start of an entry element"" ) if qname is not None : raise RuntimeError ( ""Unexpected qname for entry element"" ) record = SeqRecord ( """", id = None ) if self. speciesName is not None : record. annotations [ ""organism"" ] = self. speciesName if self. ncbiTaxID is not None : record. annotations [ ""ncbi_taxid"" ] = self. ncbiTaxID record. annotations [ ""source"" ] = self. source for key, value in attrs. items ( ) : namespace, localname = key if namespace is None : if : record. id = value elif localname == ""source"" : record. annotations [ ""source"" ] = value else : raise ValueError ( ""Unexpected attribute %s in entry element"" % localname ) else : raise ValueError ( ""Unexpected namespace '%s' for entry attribute"" % namespace ) if record. id is None : raise ValueError ( ""Failed to find entry ID"" ) self. records. append ( record ) self. startElementNS = self. startEntryFieldElement self. endElementNS = self. endEntryElement",True,localname == 'id',localname == 'id',0.659353494644165 471,"def process_error ( self, data ) : if data. get ( ""error"" ) : if : raise AuthCanceled ( self, data. get ( ""error_description"", """" ) ) raise AuthFailed ( self, data. get ( ""error_description"" ) or data [ ""error"" ] ) elif ""denied"" in data : raise AuthCanceled ( self, data [ ""denied"" ] )",False,'denied' in data['error'] or 'cancelled' in data['error'],'error_description' in data,0.6532981395721436 472,"def __init__ ( self, endog, exog = None, rho = 1, missing = ""none"", hasconst = None, ** kwargs ) : if isinstance ( rho, ( int, np. integer ) ) : self. order = int ( rho ) self. rho = np. zeros ( self. order, np. float64 ) else : self. rho = np. squeeze ( np. asarray ( rho ) ) if : raise ValueError ( ""AR parameters must be a scalar or a vector"" ) if self. rho. shape == ( ) : self. rho. shape = ( 1, ) self. order = self. rho. shape [ 0 ] if exog is None : super ( GLSAR, self ). __init__ ( endog, np. ones ( ( endog. shape [ 0 ], 1 ) ), missing = missing, hasconst = None, ** kwargs ) else : super ( GLSAR, self ). __init__ ( endog, exog, missing = missing, ** kwargs )",False,"len(self.rho.shape) not in [0, 1]",not np.isscalar(self.rho),0.6566652655601501 473,"def __exit__ ( self, type_ = None, value = None, traceback = None ) : reset_Breakpoint ( ) sys. settrace ( None ) not_empty = """" if self. tracer. set_list : not_empty += ""All paired tuples have not been processed, "" not_empty += ""the last one was number %d"" % self. tracer. expect_set_no if type_ is not None and issubclass ( BdbNotExpectedError, type_ ) : if isinstance ( value, BaseException ) and value. args : err_msg = value. args [ 0 ] if not_empty : err_msg += ""\n"" + not_empty if : print ( err_msg ) return True else : self. test_case. fail ( err_msg ) else : assert False, ""BdbNotExpectedError with empty args"" if not_empty : if : print ( not_empty ) else : self. test_case. fail ( not_empty )",False,self.dry_run,self.debug,0.6586962342262268 474,"def __init__ ( self, addr, conf, log, fd = None ) : if fd is None : try : st = os. stat ( addr ) except OSError as e : if e. args [ 0 ]!= errno. ENOENT : raise else : if : os. remove ( addr ) else : raise ValueError ( ""%r is not a socket"" % addr ) super ( UnixSocket, self ). __init__ ( addr, conf, log, fd = fd )",False,stat.S_ISSOCK(st.st_mode),os.path.exists(addr),0.6496707201004028 475,def iter_open_logger_fds ( ) : seen = set ( ) loggers = list ( values ( logging. Logger. manager. loggerDict ) ) + [ logging. getLogger ( None ) ] for l in loggers : try : for handler in l. handlers : try : if : yield handler. stream seen. add ( handler ) except AttributeError : pass except AttributeError : pass,False,handler not in seen,"hasattr(handler, 'stream') and (not seen.has_key(handler))",0.6733078956604004 476,"def get_all_tracks ( self ) : try : listing = self. __MTPDevice. get_tracklisting ( callback = self. __callback ) except Exception as exc : logger. error ( ""unable to get file listing %s (%s)"" ) tracks = [ ] for track in listing : title = track. title if not title or title == """" : title = track. filename if len ( title ) > 50 : title = title [ 0 : 49 ] + ""..."" artist = track. artist if artist and len ( artist ) > 50 : artist = artist [ 0 : 49 ] + ""..."" length = track. filesize age_in_days = 0 date = self. __mtp_to_date ( track. date ) if : modified = track. date modified_sort = - 1 else : modified = util. format_date ( date ) modified_sort = date t = SyncTrack ( title, length, modified, modified_sort = modified_sort, mtptrack = track, podcast = artist, ) tracks. append ( t ) return tracks",False,not date,self.__mode == 'read_date',0.6768538951873779 477,"def _dup_file_descriptor ( self, source_fd, dest_fd, mode ) : source_fd = int ( source_fd ) if source_fd not in self. _descriptors : raise RedirectionError ( '""%s"" is not a valid file descriptor' % str ( source_fd ) ) source = self. _descriptors [ source_fd ] if source. mode ( )!= mode : raise RedirectionError ( 'Descriptor %s cannot be duplicated in mode ""%s""' % ( str ( source ), mode ) ) if dest_fd == ""-"" : del self. _descriptors [ source_fd ] source. close ( ) else : dest_fd = int ( dest_fd ) if : raise RedirectionError ( ""Cannot replace file descriptor %s"" % str ( dest_fd ) ) dest = self. _descriptors [ dest_fd ] if dest. mode ( )!= mode : raise RedirectionError ( 'Descriptor %s cannot be cannot be redirected in mode ""%s""' % ( str ( dest ), mode ) ) self. _descriptors [ dest_fd ] = source. dup ( ) dest. close ( )",False,dest_fd not in self._descriptors,dest_fd in self._descriptors,0.6681382656097412 478,"def get_maxcov_downsample_cl ( data, in_pipe = None ) : """"""Retrieve command line for max coverage downsampling, fitting into bamsormadup output."""""" max_cov = ( _get_maxcov_downsample ( data ) if dd. get_aligner ( data ) not in [ ""snap"" ] else None ) if max_cov : if in_pipe == ""bamsormadup"" : prefix = ""level=0"" elif : prefix = ""-l 0"" else : prefix = """" core_arg = """" return ""%s | variant - -b %s --mark-as-qc-fail --max-coverage %s"" % ( prefix, core_arg, max_cov, ) else : if in_pipe == ""bamsormadup"" : prefix = ""indexfilename={tx_out_file}.bai"" else : prefix = """" return prefix",False,in_pipe == 'samtools',in_pipe == 'gitgit',0.6557019948959351 479,"def __init__ ( self, fuzzer_name, job_types, stats_columns, group_by, date_start, date_end ) : assert group_by self. fuzzer_name = fuzzer_name self. job_types = job_types self. group_by = group_by self. date_start = date_start self. date_end = date_end self. job_run_query = None self. testcase_run_query = None job_run_fields = [ ] testcase_run_fields = [ ] fields = parse_stats_column_fields ( stats_columns ) for field in fields : if not isinstance ( field, QueryField ) : continue if field. table_alias == JobQuery. ALIAS : job_run_fields. append ( field ) elif : testcase_run_fields. append ( field ) if job_run_fields and self. group_by!= QueryGroupBy. GROUP_BY_TIME : self. job_run_query = JobQuery ( fuzzer_name, job_types, job_run_fields, group_by, date_start, date_end ) if testcase_run_fields : self. testcase_run_query = TestcaseQuery ( fuzzer_name, job_types, testcase_run_fields, group_by, date_start, date_end ) assert self. job_run_query or self. testcase_run_query, ""Unable to create query.""",False,field.table_alias == TestcaseQuery.ALIAS,field.table_alias == TestField.aliased_for_sql,0.6603524684906006 480,"def create_initial ( self ) : pkgs = dict ( ) with open ( self. initial_manifest, ""w+"" ) as manifest : manifest. write ( self. initial_manifest_file_header ) for var in self. var_maps [ self. manifest_type ] : if : split_pkgs = self. _split_multilib ( self. d. getVar ( var ) ) if split_pkgs is not None : pkgs = dict ( list ( pkgs. items ( ) ) + list ( split_pkgs. items ( ) ) ) else : pkg_list = self. d. getVar ( var ) if pkg_list is not None : pkgs [ self. var_maps [ self. manifest_type ] [ var ] ] = self. d. getVar ( var ) for pkg_type in pkgs : for pkg in pkgs [ pkg_type ]. split ( ) : manifest. write ( ""%s,%s\n"" % ( pkg_type, pkg ) )",False,var in self.vars_to_split,self.d.getVar(var) is not None,0.6522716283798218 481,"def parse_object_id ( _, values ) : if values : for key in values : if key. endswith ( ""_id"" ) : val = values [ key ] if : try : values [ key ] = utils. ObjectIdSilent ( val ) except : values [ key ] = None",False,len(val) > 10,"hasattr(val, '__getitem__') and val.__getitem__",0.6680481433868408 482,"def net ( self, input, output_stride = 32, class_dim = 1000, end_points = None, decode_points = None ) : self. stride = 2 self. block_point = 0 self. output_stride = output_stride self. decode_points = decode_points self. short_cuts = dict ( ) with scope ( self. backbone ) : data = self. entry_flow ( input ) if : return data, self. short_cuts data = self. middle_flow ( data ) if : return data, self. short_cuts data = self. exit_flow ( data ) if : return data, self. short_cuts data = fluid. layers. reduce_mean ( data, [ 2, 3 ], keep_dim = True ) data = fluid. layers. dropout ( data, 0.5 ) stdv = 1.0 / math. sqrt ( data. shape [ 1 ] * 1.0 ) with scope ( ""logit"" ) : out = fluid. layers. fc ( input = data, size = class_dim, param_attr = fluid. param_attr. ParamAttr ( name = ""fc_weights"", initializer = fluid. initializer. Uniform ( - stdv, stdv ), ), bias_attr = fluid. param_attr. Param",False,"check_points(self.block_point, end_points)",end_points,0.64871746301651 483,"def deploy_component ( self, components = None ) : if self. _train_dsl is None : raise ValueError ( ""Before deploy model, training should be finish!!!"" ) if components is None : components = self. _components deploy_cpns = [ ] for cpn in components : if isinstance ( cpn, str ) : deploy_cpns. append ( cpn ) elif isinstance ( cpn, Component ) : deploy_cpns. append ( cpn. name ) else : raise ValueError ( ""deploy component parameters is wrong, expect str or Component object, but {} find"". format ( type ( cpn ) ) ) if : raise ValueError ( ""Component {} does not exist in pipeline"". format ( deploy_cpns [ - 1 ] ) ) if isinstance ( self. _components. get ( deploy_cpns [ - 1 ] ), Reader ) : raise ValueError ( ""Reader should not be include in predict pipeline"" ) res_dict = self. _job_invoker. model_deploy ( model_id = self. _model_info. model_id, model_version = self. _model_info. model_version, cpn_list = deploy_cpns, ) self. _predict_dsl = self. _job_invoker. get_predict_dsl ( model_id = res_dict [ ""model_id"" ], model_version = res_dict [ ""model_version"" ] with open ( sys. argv [ 1 ], ""rb"" ) as f : for line in f. read ( ). split ( b""\n"" ) : line = BYTES2SYSTEMSTR ( line. strip ( ) ) if : continue elif line. startswith ( ""#"" ) : print ( line ) else : print ( "">>> "" + line ) os. system ( line ) sys. stdout. write ( ""\npress enter to continue..."" ) if PY3 : input ( ) else : raw_input ( ) sys. stdout. write ( ""\n"" )",False,not line,line.startswith(b'#'),0.6739428043365479 485,"def reposition_division ( f1 ) : lines = f1. splitlines ( ) if lines [ 2 ] == division : lines. pop ( 2 ) found = 0 for i, line in enumerate ( lines ) : if : found += 1 if found == 2 : if division in ""\n"". join ( lines ) : break lines. insert ( i + 1, """" ) lines. insert ( i + 2, division ) break return ""\n"". join ( lines )",False,"line.startswith('""""""')",line.startswith(division),0.6546669006347656 486,"def _WriteActionParams ( f, actions, counter ) : param_names = [ ] for key in sorted ( actions ) : action = actions [ key ] to_write = None if isinstance ( action, args. SetToString ) : if action. valid : to_write = action. valid elif isinstance ( action, args. SetNamedOption ) : if action. names : to_write = action. names elif : if action. names : to_write = action. names if to_write : uniq = counter. next ( ) var_name = ""params_%d"" % uniq _WriteStrArray ( f, var_name, to_write ) else : var_name = None param_names. append ( var_name ) return param_names",False,"isinstance(action, args.SetNamedAction)","isinstance(action, args.SetSlice)",0.6523057818412781 487,"def __lt__ ( self, other ) : try : A, B = self [ 0 ], other [ 0 ] if : if A == B : return self [ 2 ] < other [ 2 ] return A < B return self [ 1 ] < other [ 1 ] except IndexError : return NotImplemented",False,A and B,A == other,0.6828909516334534 488,"def reset ( self ) : self. tree. reset ( ) self. firstStartTag = False self. errors = [ ] self. log = [ ] self. compatMode = ""no quirks"" if self. innerHTMLMode : self. innerHTML = self. container. lower ( ) if self. innerHTML in cdataElements : self. tokenizer. state = self. tokenizer. rcdataState elif self. innerHTML in rcdataElements : self. tokenizer. state = self. tokenizer. rawtextState elif : self. tokenizer. state = self. tokenizer. plaintextState else : pass self. phase = self. phases [ ""beforeHtml"" ] self. phase. insertHtmlElement ( ) self. resetInsertionMode ( ) else : self. innerHTML = False self. phase = self. phases [ ""initial"" ] self. lastPhase = None self. beforeRCDataPhase = None self. framesetOK = True",False,self.innerHTML == 'plaintext',self.innerHTML,0.6636630296707153 489,"def get_host_metadata ( self ) : meta = { } if self. agent_url : try : resp = requests. get ( self. agent_url + ECS_AGENT_METADATA_PATH, timeout = 1 ). json ( ) if ""Version"" in resp : match = AGENT_VERSION_EXP. search ( resp. get ( ""Version"" ) ) if : meta [ ""ecs_version"" ] = match. group ( 1 ) except Exception as e : self. log. debug ( ""Error getting ECS version: %s"" % str ( e ) ) return meta",False,match is not None and len(match.groups()) == 1,match,0.64990234375 490,"def _build_request ( url : str ) -> HTTPResponse : user_passwd = None u = urllib. parse. urlparse ( url ) if u. username is not None or u. password is not None : if u. username and u. password : user_passwd = ""%s:%s"" % ( u. username, u. password ) host_port = u. hostname if : host_port += "":"" + str ( u. port ) url = urllib. parse. urlunparse ( u. _replace ( netloc = host_port ) ) req = urllib. request. Request ( url ) req. add_header ( ""User-Agent"", ""SABnzbd/%s"" % sabnzbd. __version__ ) req. add_header ( ""Accept-encoding"", ""gzip"" ) if user_passwd : req. add_header ( ""Authorization"", ""Basic "" + ubtou ( base64. b64encode ( utob ( user_passwd ) ) ). strip ( ), ) return urllib. request. urlopen ( req )",False,u.port,u.port and u.port,0.668384313583374 491,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. I64 : self. maxColLen = iprot. readI64 ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. DOUBLE : self. avgColLen = iprot. readDouble ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. I64 : self. numNulls = iprot. readI64 ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",True,fid == 3,fid == 3,0.6727274656295776 492,"def writeback ( self, doc_type, body, rule = None, match_body = None ) : if self. replace_dots_in_field_names : writeback_body = replace_dots_in_field_names ( body ) else : writeback_body = body for key in list ( writeback_body. keys ( ) ) : if : writeback_body [ key ] = dt_to_ts ( writeback_body [ key ] ) if self. debug : elastalert_logger. info ( ""Skipping writing to ES: %s"" % ( writeback_body ) ) return None if ""@timestamp"" not in writeback_body : writeback_body [ ""@timestamp"" ] = dt_to_ts ( ts_now ( ) ) try : index = self. writeback_es. resolve_writeback_index ( self. writeback_index, doc_type ) if self. writeback_es. is_atleastsixtwo ( ) : res = self. writeback_es. index ( index = index, body = body ) else : res = self. writeback_es. index ( index = index, doc_type = doc_type, body = body ) return res except ElasticsearchException as e : logging. exception ( ""Error writing alert info to Elasticsearch: %s"" % ( e ) )",False,"isinstance(writeback_body[key], datetime.datetime)",key in writeback_body,0.6496360301971436 493,"def update_sockets ( self ) : """"""Upate sockets based on selected operation"""""" inputs = self. inputs if self. operation in Q_operations : for a in ABC [ 1 : ] : if a in inputs : inputs. remove ( inputs [ a ] ) elif self. operation in QQ_operations : for a in ABC [ 2 : ] : if a in inputs : inputs. remove ( inputs [ a ] ) if not ""B"" in inputs : inputs. new ( ""SvQuaternionSocket"", ""B"" ) else : if not ""B"" in inputs : inputs. new ( ""SvQuaternionSocket"", ""B"" ) inputs [ ""Scale"" ]. hide_safe = self. operation!= ""SCALE"" outputs = self. outputs if self. operation in output_S_operations : outputs [ ""Quaternion"" ]. hide_safe = True if outputs [ ""Value"" ]. hide : outputs [ ""Value"" ]. hide_safe = False else : if : outputs [ ""Quaternion"" ]. hide_safe = False outputs [ ""Value"" ]. hide_safe = True self. update ( )",True,outputs['Quaternion'].hide,outputs['Quaternion'].hide,0.6670730710029602 494,"def _return_poolnumber ( self, nominated_pools ) : """"""Select pool form nominated pools."""""" selected_pool = - 1 min_ldn = 0 for pool in nominated_pools : nld = len ( pool [ ""ld_list"" ] ) if : selected_pool = pool [ ""pool_num"" ] min_ldn = nld if selected_pool < 0 : msg = _ ( ""No available pools found."" ) raise exception. VolumeBackendAPIException ( data = msg ) return selected_pool",False,selected_pool == -1 or min_ldn > nld,nld > min_ldn,0.6589834094047546 495,"def __call__ ( self, x : JaxArray, training : bool ) -> JaxArray : if self. stride > 1 : shortcut = objax. functional. max_pool_2d ( x, size = 1, strides = self. stride ) else : shortcut = x for i, ( bn_i, conv_i ) in enumerate ( self. layers ) : x = bn_i ( x, training ) x = self. activation_fn ( x ) if : shortcut = self. proj_conv ( x ) x = conv_i ( x ) return x + shortcut",False,i == 0 and self.use_projection,self.proj_conv is not None,0.6522647142410278 496,"def tokens ( self, event, next ) : kind, data, _ = event if kind == START : tag, attribs = data name = tag. localname namespace = tag. namespace converted_attribs = { } for k, v in attribs : if isinstance ( k, QName ) : converted_attribs [ ( k. namespace, k. localname ) ] = v else : converted_attribs [ ( None, k ) ] = v if : for token in self. emptyTag ( namespace, name, converted_attribs, not next or next [ 0 ]!= END or next [ 1 ]!= tag, ) : yield token else : yield self. startTag ( namespace, name, converted_attribs ) elif kind == END : name = data. localname namespace = data. namespace if namespace!= namespaces [ ""html"" ] or name not in voidElements : yield self. endTag ( namespace, name ) elif kind == COMMENT : yield self. comment ( data ) elif kind == TEXT : for token in self. text ( data ) : yield token elif kind == DOCTYPE : yield self. doctype ( * data ) elif kind in ( XML_NAMESPACE, DOCTYPE, START_NS, END_NS, START_CDATA, END_CDATA, PI ) : <",False,namespace == namespaces['html'] and name in voidElements,kind == START,0.6586155891418457 497,"def slowSorted ( qq ) : ""Reference sort peformed by insertion using only <"" rr = list ( ) for q in qq : i = 0 for i in range ( len ( rr ) ) : if : rr. insert ( i, q ) break else : rr. append ( q ) return rr",False,q < rr[i],i < len(q) - 1,0.6682837009429932 498,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. db_name = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 2 : if : self. tbl_name = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. LIST : self. part_vals = [ ] ( _etype725, _size722 ) = iprot. readListBegin ( ) for _i726 in xrange ( _size722 ) : _elem727 = iprot. readString ( ) self. part_vals. append ( _elem727 ) ",True,ftype == TType.STRING,ftype == TType.STRING,0.663554310798645 499,"def set_random_avatar ( user ) : galleries = get_available_galleries ( include_default = True ) if not galleries : raise RuntimeError ( ""no avatar galleries are set"" ) avatars_list = [ ] for gallery in galleries : if : avatars_list = gallery [ ""images"" ] break else : avatars_list += gallery [ ""images"" ] random_avatar = random. choice ( avatars_list ) store. store_new_avatar ( user, Image. open ( random_avatar. image ) )",False,gallery['name'] == DEFAULT_GALLERY,include_default,0.6590063571929932 500,"def fetch_rvalue ( self ) -> List [ Token ] : """"""Fetch right-hand value of assignment."""""" tokens = [ ] while self. fetch_token ( ) : tokens. append ( self. current ) if self. current == [ OP, ""("" ] : tokens += self. fetch_until ( [ OP, "")"" ] ) elif self. current == [ OP, ""{"" ] : tokens += self. fetch_until ( [ OP, ""}"" ] ) elif self. current == [ OP, ""["" ] : tokens += self. fetch_until ( [ OP, ""]"" ] ) elif : tokens += self. fetch_until ( DEDENT ) elif self. current == [ OP, "";"" ] : break elif self. current. kind not in ( OP, NAME, NUMBER, STRING ) : break return tokens",False,self.current == INDENT,self.current == [DEDENT],0.6982951164245605 501,"def describe ( self, done = False ) : description = ShellCommand. describe ( self, done ) if done : if not description : description = [ ""compile"" ] description. append ( ""%d projects"" % self. getStatistic ( ""projects"", 0 ) ) description. append ( ""%d files"" % self. getStatistic ( ""files"", 0 ) ) warnings = self. getStatistic ( ""warnings"", 0 ) if warnings > 0 : description. append ( ""%d warnings"" % warnings ) errors = self. getStatistic ( ""errors"", 0 ) if : description. append ( ""%d errors"" % errors ) return description",True,errors > 0,errors > 0,0.6735945343971252 502,"def notify ( self, message = """", data = None, listener = None ) : if not data : data = { } nma = pynma. PyNMA ( ) keys = splitString ( self. conf ( ""api_key"" ) ) nma. addkey ( keys ) nma. developerkey ( self. conf ( ""dev_key"" ) ) response = nma. push ( application = self. default_title, event = message. split ( "" "" ) [ 0 ], description = message, priority = self. conf ( ""priority"" ), batch_mode = len ( keys ) > 1, ) successful = 0 for key in keys : if : log. error ( ""Could not send notification to NotifyMyAndroid (%s). %s"", ( key, response [ key ] [ ""message"" ] ), ) else : successful += 1 return successful == len ( keys )",False,not response[str(key)]['code'] == six.u('200'),"not self.notify_mute(key, listener)",0.6516914963722229 503,"def _run_and_test ( self, hparams, test_transform = False ) : scalar_data = ScalarData ( hparams ) self. assertEqual ( scalar_data. list_items ( ) [ 0 ], hparams [ ""dataset"" ] [ ""data_name"" ] ) iterator = DataIterator ( scalar_data ) i = 0 for batch in iterator : self. assertEqual ( set ( batch. keys ( ) ), set ( scalar_data. list_items ( ) ) ) value = batch [ scalar_data. data_name ] [ 0 ] if : self. assertEqual ( 2 * i, value ) else : self. assertEqual ( i, value ) i += 1 data_type = hparams [ ""dataset"" ] [ ""data_type"" ] if data_type == ""int"" : self. assertEqual ( value. dtype, torch. int32 ) elif data_type == ""float"" : self. assertEqual ( value. dtype, torch. float32 ) elif data_type == ""bool"" : self. assertTrue ( value. dtype, torch_bool ) self. assertIsInstance ( value, torch. Tensor )",True,test_transform,test_transform,0.665885329246521 504,"def check_WinExec ( self, emu ) : profile = emu. emu_profile_output. decode ( ) while True : offset = profile. find ( ""WinExec"" ) if offset < 0 : break profile = profile [ offset : ] p = profile. split ( "";"" ) if not p : profile = profile [ 1 : ] continue s = p [ 0 ]. split ( '""' ) if len ( s ) < 2 : profile = profile [ 1 : ] continue url = s [ 1 ] if : profile = profile [ 1 : ] continue self. retrieve_WinExec ( url ) profile = profile [ 1 : ]",False,not url.startswith('\\\\'),url.find('/') < 0,0.6551992893218994 505,"def convert_path ( ctx, tpath ) : for points, code in tpath. iter_segments ( ) : if code == Path. MOVETO : ctx. move_to ( * points ) elif code == Path. LINETO : ctx. line_to ( * points ) elif code == Path. CURVE3 : ctx. curve_to ( points [ 0 ], points [ 1 ], points [ 0 ], points [ 1 ], points [ 2 ], points [ 3 ] ) elif : ctx. curve_to ( * points ) elif code == Path. CLOSEPOLY : ctx. close_path ( )",False,code == Path.CURVE4,code == Path.CURVE,0.6630367040634155 506,"def msg_ser ( inst, sformat, lev = 0 ) : if sformat in [ ""urlencoded"", ""json"" ] : if : res = inst. serialize ( sformat, lev ) else : res = inst elif sformat == ""dict"" : if : res = inst. serialize ( sformat, lev ) elif isinstance ( inst, dict ) : res = inst elif isinstance ( inst, str ) : res = inst else : raise MessageException ( ""Wrong type: %s"" % type ( inst ) ) else : raise PyoidcError ( ""Unknown sformat"", inst ) return res",False,"isinstance(inst, Message)","isinstance(inst, list)",0.6594626903533936 507,"def tokeneater ( self, type, token, srow_scol, erow_ecol, line ) : srow, scol = srow_scol erow, ecol = erow_ecol if not self. started : if token in ( ""def"", ""class"", ""lambda"" ) : if : self. islambda = True self. started = True self. passline = True elif type == tokenize. NEWLINE : self. passline = False self. last = srow if self. islambda : raise EndOfBlock elif self. passline : pass elif type == tokenize. INDENT : self. indent = self. indent + 1 self. passline = True elif type == tokenize. DEDENT : self. indent = self. indent - 1 if self. indent <= 0 : raise EndOfBlock elif self. indent == 0 and type not in ( tokenize. COMMENT, tokenize. NL ) : raise EndOfBlock",False,token == 'lambda',token == tokenize.START,0.6736103296279907 508,"def precheck ( self, runner, script, info ) : if getattr ( script, ""modelVars"", None ) is None : script. modelVars = { } if isinstance ( info, ast. Assign ) : if isinstance ( info. value, ast. Call ) : if isinstance ( info. value. func, ast. Name ) : if : for target in info. targets : if isinstance ( target, ast. Name ) : script. modelVars [ target. id ] = info. value. func. id elif isinstance ( target, ast. Tuple ) : for elt in target. elts : if isinstance ( elt, ast. Name ) : script. modelVars [ elt. id ] = info. value. func. id else : for target in info. targets : if isinstance ( target, ast. Name ) : if target. id in script. modelVars : del script. modelVars [ target. id ] elif isinstance ( target, ast. Tuple ) : None : if isinstance ( modules, types. ModuleType ) : self. _load_ui_modules ( dict ( ( n, getattr ( modules, n ) ) for n in dir ( modules ) ) ) elif isinstance ( modules, list ) : for m in modules : self. _load_ui_modules ( m ) else : assert isinstance ( modules, dict ) for name, cls in modules. items ( ) : try : if : self. ui_modules [ name ] = cls except TypeError : pass",False,"issubclass(cls, UIModule)",cls is not None,0.6570519208908081 510,"def postprocess_message ( self, msg ) : if msg [ ""type"" ] in ( ""subsample"", ""param"" ) and self. dim is not None : event_dim = msg [ ""kwargs"" ]. get ( ""event_dim"" ) if event_dim is not None : assert event_dim >= 0 dim = self. dim - event_dim shape = jnp. shape ( msg [ ""value"" ] ) if len ( shape ) >= - dim and shape [ dim ]!= 1 : if shape [ dim ]!= self. size : if msg [ ""type"" ] == ""param"" : statement = ""numpyro.param({},..., event_dim={})"". format ( msg [ ""name"" ], event_dim ) else : statement = ""numpyro.subsample(..., event_dim={})"". format ( event_dim ) raise ValueError ( ""Inside numpyro.plate({}, {}, dim={}) invalid shape of {}: {}"". format ( self. name, self. size, self. dim, statement, shape ) <",False,self.subsample_size < self.size,self.dim is not None,0.6525208353996277 511,"def forward ( self, input ) : s0 = s1 = self. stem ( input ) weights2 = None for i, cell in enumerate ( self. cells ) : if cell. reduction : weights = fluid. layers. softmax ( self. alphas_reduce ) if : n = 3 start = 2 weights2 = fluid. layers. softmax ( self. betas_reduce [ 0 : 2 ] ) for i in range ( self. _steps - 1 ) : end = start + n tw2 = fluid. layers. softmax ( self. betas_reduce [ start : end ] ) start = end n += 1 weights2 = fluid. layers. concat ( [ weights2, tw2 ] ) else : weights = fluid. layers. softmax ( self. alphas_normal ) if : n = 3 start = 2 weights2 = fluid. layers. softmax ( self. betas_normal [ 0 : 2 ] ) for i in range ( self. _steps - 1 ) : end = start + n tw2 = fluid. layers. softmax ( self. betas_normal [ start : end ] ) start = end",False,self._method == 'PC-DARTS',self.betas_normal is not None,0.6522391438484192 512,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. username = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 2 : if : self. password = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if : self. consumerKey = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 4 : if : self. consumerSecret = iprot. readString ( ) else : iprot. skip ( ftype ) else : <",False,ftype == TType.STRING,fid == 5,0.6608601808547974 513,"def include ( elem, loader = None ) : if loader is None : loader = default_loader i = 0 while i < len ( elem ) : e = elem [ i ] if : href = e. get ( ""href"" ) parse = e. get ( ""parse"", ""xml"" ) if parse == ""xml"" : node = loader ( href, parse ) if node is None : raise FatalIncludeError ( ""cannot load %r as %r"" % ( href, parse ) ) node = copy. copy ( node ) if e. tail : node. tail = ( node. tail or """" ) + e. tail elem [ i ] = node elif parse == ""text"" : text = loader ( href, parse, e. get ( ""encoding"" ) ) if text is None : raise FatalIncludeError ( ""cannot load %r as %r"" % ( href, parse ) ) if i : node = elem [ i - 1 ] node. tail = ( node. tail or """" ) + text else : elem. text = ( elem. text or """" ) + text + ( e. tail or """" ) <",False,e.tag == XINCLUDE_INCLUDE,e,0.6595921516418457 514,"def make_row ( self ) : res = [ ] for i in range ( self. num_cols ) : t = sqlite3_column_type ( self. stmnt, i ) if t == SQLITE_INTEGER : res. append ( sqlite3_column_int ( self. stmnt, i ) ) elif t == SQLITE_FLOAT : res. append ( sqlite3_column_double ( self. stmnt, i ) ) elif : res. append ( sqlite3_column_text ( self. stmnt, i ) ) else : raise NotImplementedError return tuple ( res )",True,t == SQLITE_TEXT,t == SQLITE_TEXT,0.6896180510520935 515,def startNotificationService ( self ) : if self. __enabled : if self. __polling_service : self. __polling_service. startNotificationService ( ) if : self. __os_file_service. startNotificationService ( ),False,self.os_notifications_available,self.__os_file_service,0.6588118076324463 516,"def add_to_path ( self, fnames ) : """"""Add fnames to path"""""" indexes = [ ] for path in fnames : project = self. get_source_project ( path ) if : self. parent_widget. emit ( SIGNAL ( ""pythonpath_changed()"" ) ) indexes. append ( self. get_index ( path ) ) if indexes : self. reset_icon_provider ( ) for index in indexes : self. update ( index )",False,project.add_to_pythonpath(path),project,0.650256872177124 517,"def test_adding_a_mutually_exclusive_label_replaces_the_other ( db, api_client, default_account, folder_and_message_maps, label ) : folder_map, message_map = folder_and_message_maps label_to_add = folder_map [ label ] for key in message_map : if : continue message = message_map [ key ] resp_data = api_client. get_data ( ""/messages/{}"". format ( message. public_id ) ) labels = resp_data [ ""labels"" ] assert len ( labels ) == 1 assert labels [ 0 ] [ ""name"" ] == key existing_label = labels [ 0 ] [ ""id"" ] response = api_client. put_data ( ""/messages/{}"". format ( message. public_id ), { ""label_ids"" : [ label_to_add. category. public_id, existing_label ] }, ) labels = json. loads ( response. data ) [ ""labels"" ] assert len ( labels ) == 1 assert labels [ 0 ] [ ""name"" ] == label",False,key == label,key == 'public_id',0.6733598113059998 518,"def _process_include ( cmd, _model, _data, _default, options = None ) : if len ( cmd ) == 1 : raise IOError ( ""Cannot execute 'include' command without a filename"" ) if len ( cmd ) > 2 : raise IOError ( ""The 'include' command only accepts a single filename"" ) global Filename Filename = cmd [ 1 ] global Lineno Lineno = 0 try : scenarios = parse_data_commands ( filename = cmd [ 1 ] ) except IOError : raise err = sys. exc_info ( ) [ 1 ] raise IOError ( ""Error parsing file '%s': %s"" % ( Filename, str ( err ) ) ) if scenarios is None : return False for scenario in scenarios : for cmd in scenarios [ scenario ] : if : _data [ scenario ] = { } if cmd [ 0 ] in ( ""include"", ""import"", ""load"" ) : _tmpdata = { } _process_data ( cmd, _model, _tmpdata, _default, Filename, Lineno ) if scenario is None : for key in _tmpdata : if key in _data : _data [ key ]. update ( _tmpdata [ key ] ) else : _data [ key ] = _tmpdata [ key ] <",False,scenario not in _data,cmd[0] == 'include',0.6651172041893005 519,"def add_channels ( cls, voucher, add_channels ) : for add_channel in add_channels : channel = add_channel [ ""channel"" ] defaults = { ""currency"" : channel. currency_code } if ""discount_value"" in add_channel. keys ( ) : defaults [ ""discount_value"" ] = add_channel. get ( ""discount_value"" ) if : defaults [ ""min_spent_amount"" ] = add_channel. get ( ""min_amount_spent"", None ) models. VoucherChannelListing. objects. update_or_create ( voucher = voucher, channel = channel, defaults = defaults, )",True,'min_amount_spent' in add_channel.keys(),'min_amount_spent' in add_channel.keys(),0.6495001316070557 520,"def unknown_starttag ( self, tag, attrs ) : uattrs = [ ] strattrs = """" if attrs : for key, value in attrs : value = ( value. replace ( "">"", "">"" ). replace ( ""<"", ""<"" ). replace ( '""', """"" ) ) value = self. bare_ampersand. sub ( ""&"", value ) if : value = value. decode ( self. encoding, ""ignore"" ) try : uattrs. append ( ( unicode ( key, self. encoding ), value ) ) except TypeError : uattrs. append ( ( key, value ) ) strattrs = u"""". join ( [ u' %s=""%s""' % ( key, value ) for key, value in uattrs ] ) if self. encoding : try : strattrs = strattrs. encode ( self. encoding ) except ( UnicodeEncodeError, LookupError ) : pass if tag in self. elements_no_end_tag : self. pieces. append ( ""<%s%s />"" % ( tag, strattrs ) ) else : self. pieces. append ( ""<%s%s>"" % ( tag, strattrs ) )",False,"not isinstance(value, unicode)",self.encoding,0.6460530757904053 521,"def serialize_content_range ( value ) : if isinstance ( value, ( tuple, list ) ) : if len ( value ) not in ( 2, 3 ) : raise ValueError ( ""When setting content_range to a list/tuple, it must "" ""be length 2 or 3 (not %r)"" % value ) if : begin, end = value length = None else : begin, end, length = value value = ContentRange ( begin, end, length ) value = str ( value ). strip ( ) if not value : return None return value",False,len(value) == 2,"isinstance(value, ContentRange)",0.6565661430358887 522,"def aggregate_to_full_tokens ( attn, tokens, token_starts, token_ends, attention = True ) : to_combine = [ ] spacy_attn = [ ] spacy_token_starts = [ ] spacy_token_ends = [ ] spacy_start = None for token, prob, start, end in zip ( tokens, attn, token_starts, token_ends ) : to_combine. append ( prob ) if : spacy_start = start if token. endswith ( """" ) : if attention : spacy_attn. append ( np. max ( to_combine, 0 ) ) else : spacy_attn. append ( to_combine [ - 1 ] ) spacy_token_starts. append ( spacy_start ) spacy_token_ends. append ( end ) to_combine = [ ] spacy_start = None if attention : spacy_attn = spacy_attn / sum ( spacy_attn ) key = ""attention_weights"" else : key = ""explanation"" return { key : spacy_attn, ""token_starts"" : spacy_token_starts, ""token_ends"" : spacy_token_ends, }",False,spacy_start is None,"token.endswith('"")",0.6620572209358215 523,"def _get_next_segment ( self, segment_path, page_size, segment_cursor = None ) : if segment_path : if : return None return Segment ( self. client, segment_path, page_size, segment_cursor ) return None",False,self.end_time and self._is_later_than_end_time(segment_path),not self.client.has_segment(segment_path),0.6470015048980713 524,"def update_completion ( self ) : """"""Update completion model with exist tags"""""" orig_text = self. widget. text ( ) text = "", "". join ( orig_text. replace ( "", "", "","" ). split ( "","" ) [ : - 1 ] ) tags = [ ] for tag in self. tags_list : if : if orig_text [ - 1 ] not in ( "","", "" "" ) : tags. append ( ""%s,%s"" % ( text, tag ) ) tags. append ( ""%s, %s"" % ( text, tag ) ) else : tags. append ( tag ) if tags!= self. completer_model. stringList ( ) : self. completer_model. setStringList ( tags )",False,"',' in orig_text",tag != text,0.6616669297218323 525,"def configure_httpretty ( sitedir ) : httpretty. enable ( ) dir = Path ( f""tests/test_sites/data/test_{sitedir}/"" ) data_file = dir / ""data.json"" data = None with open ( data_file ) as f : data = json. load ( f ) for obj in data : method = httpretty. POST if : method = httpretty. GET with open ( dir / obj [ ""file"" ] ) as f : httpretty. register_uri ( method, obj [ ""url"" ], f. read ( ), )",False,obj['method'] == 'GET','file' in obj,0.6592972278594971 526,"def __call__ ( self, x, y, axes = 2 ) : xnd = x. ndimension ( ) ynd = y. ndimension ( ) if isinstance ( axes, int ) : axes = range ( xnd - axes, xnd ), range ( axes ) if isinstance ( axes [ 0 ], int ) : axes = ( axes [ 0 ], ), axes [ 1 ] if isinstance ( axes [ 1 ], int ) : axes = axes [ 0 ], ( axes [ 1 ], ) x_ix = [ None ] * xnd y_ix = [ None ] * ynd out_ix = [ ] available_ix = iter ( EINSUM_SYMBOLS_BASE ) for ax1, ax2 in zip ( * axes ) : repeat = next ( available_ix ) x_ix [ ax1 ] = repeat y_ix [ ax2 ] = repeat for i in range ( xnd ) : if x_ix [ i ] is None : leave = next ( available_ix ) x_ix [ i ] = leave out_ix. append ( leave ) for i in range ( ynd ) : if : leave = next ( available_ix ) y_ix [ i ] = leave out_ix. append ( leave ) einsum_str = ""{},{}->{}"". format ( * map ( """". join, ( x_ix, y_ix, out_ix ) ) ) return self. einsum ( einsum_str, x, y )",False,y_ix[i] is None,x_ix[i] is None,0.655064582824707 527,def insert_broken_add_sometimes ( node ) : if node. op == theano. tensor. add : last_time_replaced [ 0 ] = not last_time_replaced [ 0 ] if : return [ off_by_half ( * node. inputs ) ] return False,False,last_time_replaced[0],node.inputs and last_time_replaced[0],0.6536835432052612 528,"def __test_using_best_weights ( self, ckpt_path, test_dataloaders ) : model = self. lightning_module if ckpt_path == ""best"" and not self. checkpoint_callback. best_model_path : raise MisconfigurationException ( 'ckpt_path is ""best"", but ModelCheckpoint is not configured to save the best model.' ) if ckpt_path is not None : if ckpt_path == ""best"" : ckpt_path = self. checkpoint_callback. best_model_path if len ( ckpt_path ) == 0 : rank_zero_warn ( f"".test() found no path for the best weights, {ckpt_path}. Please "" f""specify a path for a checkpoint.test(ckpt_path=PATH)"" ) return { } if : self. accelerator. barrier ( ) ckpt = pl_load ( ckpt_path, map_location = lambda storage, loc : storage ) model. load_state_dict ( ckpt [ ""state_dict"" ] ) if test_dataloaders is not None : self. data_connector. attach_dataloaders ( model, test_dataloaders = test_dataloaders ) self. tested_ckpt_path = ckpt_path results = self. fit ( model ) if self. is_function_implemented ( ""teardown"" ) : model_ref = self. lightning_module model_ref. teardown ( ""test"" ) return",False,not self._device_type == DeviceType.TPU,self.accelerator is not None,0.6573450565338135 529,"def __call__ ( self, gradients ) : """"""Accumulates :obj:`gradients` on the current replica."""""" if not self. _gradients : _ = self. step self. _gradients. extend ( [ tf. Variable ( tf. zeros_like ( gradient ), trainable = False, synchronization = tf. VariableSynchronization. ON_READ, aggregation = tf. VariableAggregation. ONLY_FIRST_REPLICA, ) if : else gradient for gradient in gradients ] ) if len ( gradients )!= len ( self. _gradients ) : raise ValueError ( ""Expected %s gradients, but got %d"" % ( len ( self. _gradients ), len ( gradients ) ) ) for accum_gradient, gradient in zip ( self. _gradients, gradients ) : if accum_gradient is not None and gradient is not None : accum_gradient. assign_add ( gradient ) self. _accum_steps. assign_add ( 1 )",False,gradient is not None,"isinstance(gradients, dict)",0.6660048961639404 530,"def handle_startendtag ( self, tag, attrs ) : for i, attr in enumerate ( attrs ) : attrname, attrvalue = attr if : self. addhtmlblock ( attrvalue ) attrs [ i ] = ( attrname, self. callback ( normalize_html ( attrvalue ). replace ( ""\n"", "" "" ) ), ) if self. currenttag is not None : self. currentblock += self. get_starttag_text ( ) self. currentsrc += self. get_starttag_text ( ) else : self. filesrc += self. buildtag ( tag, attrs, startend = True )",False,attrname in self.INCLUDEATTRS and self.currentblock == '',i % 3,0.6535142064094543 531,"def act_mapping ( self, items, actions, mapping ) : """"""Executes all the actions on the list of pods."""""" success = True for action in actions : for key, method in mapping. items ( ) : if key in action : params = action. get ( key ) ret = method ( items, params ) if : success = False return success",True,not ret,not ret,0.6718182563781738 532,"def check_twobit_file ( dbkey, GALAXY_DATA_INDEX_DIR ) : twobit_file = ""%s/twobit.loc"" % GALAXY_DATA_INDEX_DIR twobit_path = """" twobits = { } for i, line in enumerate ( open ( twobit_file ) ) : line = line. rstrip ( ""\r\n"" ) if line and not line. startswith ( ""#"" ) : fields = line. split ( ""\t"" ) if : continue twobits [ ( fields [ 0 ] ) ] = fields [ 1 ] if dbkey in twobits : twobit_path = twobits [ ( dbkey ) ] return twobit_path",False,len(fields) < 2,len(fields) > 1,0.6576115489006042 533,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRING : self. key = iprot. readString ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRUCT : self. column_path = ColumnPath ( ) self. column_path. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. I32 : self. consistency_level = iprot. readI32 ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",True,fid == 2,fid == 2,0.6763397455215454 534,"def beginRendering ( self, canvasRegion ) : if None!= self. canvas : if None == canvasRegion : self. canvas. resize ( 0, 0 ) self. canvasWidth = self. canvasHeight = 0 else : width = int ( round ( canvasRegion. width ) ) height = int ( round ( canvasRegion. height ) ) if : self. canvas. clear ( ) else : self. canvas. resize ( width, height ) self. canvasWidth = width self. canvasHeight = height self. x0 = int ( round ( canvasRegion. x ) ) self. y0 = int ( round ( canvasRegion. y ) ) if self. x0 == - 1 and self. y0 == - 1 : self. x0 = 0 self. canvasPanel. setWidgetPosition ( self. canvas, self. x0, self. y0 ) self. imageIndex = 0",False,width == self.canvasWidth and height == self.canvasHeight,None == self.canvas,0.6608942747116089 535,"def limits ( self, value, square = False ) : """"""TODO: doc + server side implementation"""""" if isinstance ( value, six. string_types ) : import re match = re. match ( r""(\d*)(\D*)"", value ) if : raise ValueError ( ""do not understand limit specifier %r, examples are 90%, 3sigma"" ) else : value, type = match. groups ( ) import ast value = ast. literal_eval ( value ) type = type. strip ( ) if type in [ ""s"", ""sigma"" ] : return self. limits_sigma ( value ) elif type in [ ""ss"", ""sigmasquare"" ] : return self. limits_sigma ( value, square = True ) elif type in [ ""%"", ""percent"" ] : return self. limits_percentage ( value ) elif type in [ ""%s"", ""%square"", ""percentsquare"" ] : return self. limits_percentage ( value, square = True ) if value is None : return self. limits_percentage ( square = square ) else : return value",False,match is None,not match,0.6622588634490967 536,"def load_coll ( self, name, coll_config ) : if coll_config == ""$all"" and self. auto_handler : return self. auto_handler if isinstance ( coll_config, str ) : index = coll_config archive_paths = None acl_paths = None default_access = self. default_access elif isinstance ( coll_config, dict ) : index = coll_config. get ( ""index"" ) if not index : index = coll_config. get ( ""index_paths"" ) archive_paths = coll_config. get ( ""archive_paths"" ) acl_paths = coll_config. get ( ""acl_paths"" ) default_access = coll_config. get ( ""default_access"", self. default_access ) else : raise Exception ( ""collection config must be string or dict"" ) if index : agg = init_index_agg ( { name : index } ) else : if not isinstance ( coll_config, dict ) : raise Exception ( ""collection config missing"" ) sequence = coll_config. get ( ""sequence"" ) if sequence : return self. init_sequence ( name, sequence ) index_group = coll_config. get ( ""index_group"" ) if : raise Exception ( ""no index, index_group or sequence found"" ) timeout = int ( coll_config. get ( ""timeout"", 0 ) ) agg = init_index_agg ( index_group, True, timeout ) if not archive_paths : archive_paths = self. config. get ( """,False,not index_group,index_group is None or index_group is None,0.656926155090332 537,"def getOptions ( self, section = None, ignoreWrong = True ) : """"""Reads configuration for jail(s) and adds enabled jails to __jails"""""" opts = [ ] self. __opts = ConfigReader. getOptions ( self, ""Definition"", opts ) if section is None : sections = self. sections ( ) else : sections = [ section ] parse_status = 0 for sec in sections : if sec == ""INCLUDES"" : continue jail = JailReader ( sec, force_enable = self. __force_enable, share_config = self. share_config, use_config = self. _cfg, ) ret = jail. getOptions ( ) if : if jail. isEnabled ( ) : parse_status |= 1 self. __jails. append ( jail ) else : logSys. error ( ""Errors in jail %r.%s"", sec, "" Skipping..."" if ignoreWrong else """" ) self. __jails. append ( jail ) parse_status |= 2 return ( ignoreWrong and parse_status & 1 ) or not ( parse_status & 2 )",False,ret,ret & 1,0.6952841281890869 538,"def write_Leo_file ( self, fileName, outlineOnlyFlag, toString = False, toOPML = False ) : """"""Write the.leo file."""""" c, fc = self. c, self structure_errors = c. checkOutline ( ) if structure_errors : g. error ( ""Major structural errors! outline not written"" ) return False if not outlineOnlyFlag or toOPML : g. app. recentFilesManager. writeRecentFilesFile ( c ) fc. writeAllAtFileNodesHelper ( ) if fc. isReadOnly ( fileName ) : return False if g. SQLITE and fileName and fileName. endswith ( "".db"" ) : return fc. exportToSqlite ( fileName ) try : fc. putCount = 0 fc. toString = toString if : ok = fc. writeToStringHelper ( fileName ) else : ok = fc. writeToFileHelper ( fileName, toOPML ) finally : fc. outputFile = None fc. toString = False return ok",False,toString,toOPML,0.6930158138275146 539,"def _send_internal ( self, bytes_ ) : if self. pendings : self. pendings += bytes_ bytes_ = self. pendings try : self. _reconnect ( ) self. socket. sendall ( bytes_ ) self. pendings = None except Exception : self. _close ( ) if : self. pendings = None else : self. pendings = bytes_",False,self.pendings and len(self.pendings) > self.bufmax,self.send_response,0.6528598666191101 540,"def get_profile_cutoff ( profile_id ) : cutoff_language = None if not len ( profile_id_list ) : update_profile_id_list ( ) if profile_id : cutoff_language = [ ] for profile in profile_id_list : profileId, name, cutoff, items = profile. values ( ) if : if profileId == int ( profile_id ) : for item in ast. literal_eval ( items ) : if item [ ""id"" ] == cutoff : return [ item ] elif cutoff == 65535 : cutoff_language. append ( item ) if not len ( cutoff_language ) : cutoff_language = None return cutoff_language",False,cutoff,len(items),0.6935614943504333 541,"def assert_conll_writer_output ( dataset : InternalBioNerDataset, expected_output : List [ str ], sentence_splitter : SentenceSplitter = None, ) : outfile_path = tempfile. mkstemp ( ) [ 1 ] try : sentence_splitter = ( sentence_splitter if : else NoSentenceSplitter ( tokenizer = SpaceTokenizer ( ) ) ) writer = CoNLLWriter ( sentence_splitter = sentence_splitter ) writer. write_to_conll ( dataset, Path ( outfile_path ) ) contents = [ l. strip ( ) for l in open ( outfile_path ). readlines ( ) if l. strip ( ) ] finally : os. remove ( outfile_path ) assert contents == expected_output",False,sentence_splitter,sentence_splitter is None,0.6725471019744873 542,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if ftype == TType. LIST : self. success = [ ] ( _etype987, _size984 ) = iprot. readListBegin ( ) for _i988 in xrange ( _size984 ) : _elem989 = iprot. readString ( ) self. success. append ( _elem989 ) iprot. readListEnd ( ) else : iprot. skip ( ftype ) elif fid == 1 : if : self. o1 = MetaException ( ) self. o1. read ( iprot ) else : iprot. skip ( ftype ) else : <",False,ftype == TType.STRUCT,self.o1 is None,0.6632502675056458 543,"def get_versions ( *, all = False, quiet = None ) : import bonobo from bonobo. util. pkgs import bonobo_packages yield _format_version ( bonobo, quiet = quiet ) if all : for name in sorted ( bonobo_packages ) : if : try : mod = __import__ ( name. replace ( ""-"", ""_"" ) ) try : yield _format_version ( mod, name = name, quiet = quiet ) except Exception as exc : yield ""{} ({})"". format ( name, exc ) except ImportError as exc : yield ""{} is not importable ({})."". format ( name, exc )",False,name != 'bonobo',name.startswith('bonobo_'),0.6598379015922546 544,"def visit_type_type ( self, t : TypeType ) -> ProperType : if isinstance ( self. s, TypeType ) : typ = self. meet ( t. item, self. s. item ) if : typ = TypeType. make_normalized ( typ, line = t. line ) return typ elif isinstance ( self. s, Instance ) and self. s. type. fullname == ""builtins.type"" : return t elif isinstance ( self. s, CallableType ) : return self. meet ( t, self. s ) else : return self. default ( self. s )",False,"not isinstance(typ, NoneType)","isinstance(self.s, NormalizeType)",0.6546109914779663 545,"def get_ou_from_path ( client, path ) : ou = client. list_roots ( ) [ ""Roots"" ] [ 0 ] if path == ""/"" : ou [ ""Path"" ] = path return ou ou_pager = client. get_paginator ( ""list_organizational_units_for_parent"" ) for part in path. strip ( ""/"" ). split ( ""/"" ) : found = False for page in ou_pager. paginate ( ParentId = ou [ ""Id"" ] ) : for child in page. get ( ""OrganizationalUnits"" ) : if child [ ""Name"" ] == part : found = True ou = child break if found : break if : raise ValueError ( ""No OU named:%r found in path: %s"" % ( path, path ) ) ou [ ""Path"" ] = path return ou",False,found is False,not ou,0.662794828414917 546,"def hash_path ( path, algorithm = ""blake2b"", suffix_len = None, num_orig_chars = None, constant_len = False ) : path = Path ( path ) if suffix_len is None : suffix_len = len ( path. suffix ) stem = str ( path. stem ) replaced_stem = stem. replace ( "" "", ""_"" ) replaced_stem = replaced_stem. replace ( ""-"", ""_"" ) filtered_stem = non_word_pattern. sub ( """", replaced_stem ) if len ( filtered_stem ) == len ( stem ) : return replaced_stem path = str ( path ) if algorithm == ""blake2b"" : hashstr = hashlib. blake2b ( path. encode ( ), digest_size = 16 ). hexdigest ( ) elif algorithm == ""md5"" : hashstr = hashlib. md5 ( path. encode ( ) ). hexdigest ( ) else : raise ValueError ( ""Unsupported algorithm {}"". format ( algorithm ) ) max_orig_chars = PATH_MAXLEN - ( len ( hashstr ) + 1 ) - suffix_len orig_take_chars = ( max_orig_chars if num_orig_chars is None else min ( num_orig_chars, max_orig_chars ) ) if orig_take_chars > 0 : trunc_stem = filtered_stem [ : orig_take_chars ] if : trunc_stem = trunc_stem. ljust ( orig_take_chars, ""_"" ) new_stem = ""{}_{}"". format ( trunc_stem, hashstr ) else : new_stem = hashstr return new_stem",False,num_orig_chars and constant_len,constant_len,0.6487576365470886 547,"def profile_by_id ( request, user_id ) : user = User. objects. get ( pk = user_id ) if request. method == ""POST"" : form = ProfileForm ( request. POST, request. FILES ) if form. is_valid ( ) : print ( ""made it!"" ) if request. POST. get ( ""username"" )!= user. username : user. username = request. POST. get ( ""username"" ) if request. POST. get ( ""first_name"" )!= user. first_name : user. first_name = request. POST. get ( ""first_name"" ) if request. POST. get ( ""last_name"" )!= user. last_name : user. last_name = request. POST. get ( ""last_name"" ) if : user. email = request. POST. get ( ""email"" ) if request. POST. get ( ""password"" ) : user. set_password ( request. POST. get ( ""password"" ) ) if request. FILES : user. userprofile. image = store_uploaded_file ( user. username + ""."" + request. FILES [ ""picture"" ]. name. split ( ""."" ) [ - 1 ], request. FILES [ ""picture"" ], ) user. userprofile. save ( ) user. save ( ) <",False,request.POST.get('email') != user.email,request.POST.get('email'),0.6517688035964966 548,"def __call__ ( self, list_data ) : coords, feats, labels = list ( zip ( * list_data ) ) coords_batch, feats_batch, labels_batch = [ ], [ ], [ ] batch_num_points = 0 for batch_id, _ in enumerate ( coords ) : num_points = coords [ batch_id ]. shape [ 0 ] batch_num_points += num_points if : num_full_points = sum ( len ( c ) for c in coords ) num_full_batch_size = len ( coords ) logging. warning ( f""\tCannot fit {num_full_points} points into"" "" {self.limit_numpoints} points limit. Truncating batch "" f""size at {batch_id} out of {num_full_batch_size} with "" f""{batch_num_points - num_points}."" ) break coords_batch. append ( coords [ batch_id ] ) feats_batch. append ( feats [ batch_id ] ) labels_batch. append ( labels [ batch_id ] ) return sparse_collate ( coords_batch, feats_batch, labels_batch, dtype = self. dtype, device = self. device, )",False,self.limit_numpoints > 0 and batch_num_points > self.limit_numpoints,num_points > self.max_points,0.6534451246261597 549,"def dtdwrite ( dtdfile, entities, force = False ) : if not entities : return dtdEntities = [ '' % ( id, escape ( val, entitydefs ) ) for id, val in entities. items ( ) ] dtdEntities. sort ( ) dtdFileData = ""\n"". join ( dtdEntities ) + ""\n"" if type ( dtdfile ) in types. StringTypes : if : if force : os. remove ( dtdfile ) else : raise DTDGenError ( ""dtd '%s' already exists, use '--force' to "" ""allow overwrite"" % dtdfile ) dtdf = open ( dtdfile, ""w"" ) else : dtdf = dtdfile dtdf. write ( dtdFileData ) if dtdf!= dtdfile : dtdf. close ( )",True,os.path.exists(dtdfile),os.path.exists(dtdfile),0.6584407091140747 550,"def _mixture_ ( self ) -> Sequence [ Tuple [ float, np. ndarray ] ] : ps = [ ] for pauli in self. _error_probabilities : Pi = np. identity ( 1 ) for gate in pauli : if gate == ""I"" : Pi = np. kron ( Pi, protocols. unitary ( identity. I ) ) elif gate == ""X"" : Pi = np. kron ( Pi, protocols. unitary ( pauli_gates. X ) ) elif : Pi = np. kron ( Pi, protocols. unitary ( pauli_gates. Y ) ) elif gate == ""Z"" : Pi = np. kron ( Pi, protocols. unitary ( pauli_gates. Z ) ) ps. append ( Pi ) return tuple ( zip ( self. _error_probabilities. values ( ), ps ) )",True,gate == 'Y',gate == 'Y',0.6620485782623291 551,"def newtodolink ( self, url, origin ) : if self. todo. has_key ( url ) : if : self. todo [ url ]. append ( origin ) self. note ( 3, "" Seen todo link %s"", self. format_url ( url ) ) else : self. todo [ url ] = [ origin ] self. note ( 3, "" New todo link %s"", self. format_url ( url ) )",False,origin not in self.todo[url],url in self.todo[url],0.6547436714172363 552,"def checkpoint ( self, metrics_dict, iteration, model, optimizer, lr_scheduler ) : if self. checkpoint_runway : if iteration < self. checkpoint_runway : return elif iteration == self. checkpoint_runway : print ( ""Checkpoint runway has been met. Checkpointing will now occur."" ) if ( self. checkpoint_every and iteration > 0 and iteration % self. checkpoint_every == 0 ) : score = None state = self. bundle_state ( iteration, score, model, optimizer, lr_scheduler ) checkpoint_path = f""{self.checkpoint_dir}/model_checkpoint_{iteration}.pth"" torch. save ( state, checkpoint_path ) if self. checkpoint_best and self. checkpoint_metric in metrics_dict : score = metrics_dict [ self. checkpoint_metric ] if self. is_best ( score ) : if : print ( f""Saving model at iteration {iteration:.2f} with best "" f""({self.checkpoint_metric_mode}) score "" f""{self.checkpoint_metric}={score:.3f}"" ) self. best_model_found = True self. best_iteration = iteration self. best_score = score state = self. bundle_state ( iteration, score, model, optimizer, lr_scheduler ) checkpoint",False,self.verbose,self.best_model_found,0.6675825119018555 553,"def format_bpe_text ( symbols, delimiter = b""@@"" ) : """"""Convert a sequence of bpe words into sentence."""""" words = [ ] word = b"""" if isinstance ( symbols, str ) : symbols = symbols. encode ( ) delimiter_len = len ( delimiter ) for symbol in symbols : if : word += symbol [ : - delimiter_len ] else : word += symbol words. append ( word ) word = b"""" return b"" "". join ( words )",False,len(symbol) >= delimiter_len and symbol[-delimiter_len:] == delimiter,delimiter_len > 0,0.6539909839630127 554,"def configure_slurm_ddp ( self ) : if self. use_ddp or self. use_ddp2 : num_requested_gpus = self. num_gpus * self. num_nodes num_slurm_tasks = 0 try : num_slurm_tasks = int ( os. environ [ ""SLURM_NTASKS"" ] ) self. is_slurm_managing_tasks = num_slurm_tasks == num_requested_gpus if num_requested_gpus == 0 : self. is_slurm_managing_tasks = num_slurm_tasks == self. num_processes job_name = os. environ [ ""SLURM_JOB_NAME"" ] if : self. is_slurm_managing_tasks = False except Exception : self. is_slurm_managing_tasks = False try : should_fake = int ( os. environ [ ""FAKE_SLURM_MANAGING_TASKS"" ] ) if should_fake : self. is_slurm_managing_tasks = True except Exception : pass if self. is_slurm_managing_tasks : rank_zero_info ( ""Multi-processing is handled by Slurm."" )",False,job_name == 'bash',job_name in os.environ[SLURM_JOB_NAME],0.6583789587020874 555,"def _build_dom ( cls, content, mode ) : assert mode in ( ""html"", ""xml"" ) if mode == ""html"" : if not hasattr ( THREAD_STORAGE, ""html_parser"" ) : THREAD_STORAGE. html_parser = HTMLParser ( ) dom = defusedxml. lxml. parse ( StringIO ( content ), parser = THREAD_STORAGE. html_parser ) return dom. getroot ( ) else : if : THREAD_STORAGE. xml_parser = XMLParser ( ) dom = defusedxml. lxml. parse ( BytesIO ( content ), parser = THREAD_STORAGE. xml_parser ) return dom. getroot ( )",True,"not hasattr(THREAD_STORAGE, 'xml_parser')","not hasattr(THREAD_STORAGE, 'xml_parser')",0.649681806564331 556,"def useful ( self, pos ) : global TIMESTAMP TIMESTAMP += 1 square = self. squares [ pos ] if self. useful_fast ( square ) : return True old_hash = self. zobrist. hash self. zobrist. update ( square, self. color ) empties = opps = weak_opps = neighs = weak_neighs = 0 for neighbour in square. neighbours : neighcolor = neighbour. color if neighcolor == EMPTY : empties += 1 continue neighbour_ref = neighbour. find ( ) if neighbour_ref. timestamp!= TIMESTAMP : if : neighs += 1 else : opps += 1 neighbour_ref. timestamp = TIMESTAMP neighbour_ref. temp_ledges = neighbour_ref. ledges neighbour_ref. temp_ledges -= 1 if neighbour_ref. temp_ledges == 0 : if : weak_neighs += 1 else : weak_opps += 1 neighbour_ref. remove ( neighbour_ref, update = False ) dupe = self. zobrist. dupe ( ) self. zobrist. hash = old_hash strong_neighs = neighs - weak_neighs strong_opps = opps - weak_opps return not dupe and ( empties or weak_opps or ( strong_neigh",False,neighcolor == self.color,negate_ref.update != False,0.6684244871139526 557,"def __call__ ( self, engine : Optional [ Engine ], name : Optional [ str ] = None ) -> None : value = self. get_param ( ) if isinstance ( value, list ) : if len ( value )!= len ( self. optimizer_param_groups ) : raise ValueError ( ""size of value is different than optimizer_param_groups "" f""{len(value)}!= {len(self.optimizer_param_groups)}"" ) for i, param_group in enumerate ( self. optimizer_param_groups ) : param_group [ self. param_name ] = value [ i ] else : for i, param_group in enumerate ( self. optimizer_param_groups ) : param_group [ self. param_name ] = value if name is None : name = self. param_name if self. save_history and engine : if : setattr ( engine. state, ""param_history"", { } ) engine. state. param_history. setdefault ( name, [ ] ) values = [ pg [ self. param_name ] for pg in self. optimizer_param_groups ] engine. state. param_history [ name ]. append ( values ) self. event_index += 1",False,"not hasattr(engine.state, 'param_history') or engine.state.param_history is None",name is None,0.6503086090087891 558,"def getitem_tuple_lower ( context, builder, sig, args ) : tupty, idx = sig. args idx = idx. literal_value tup, _ = args if isinstance ( idx, int ) : if : idx += len ( tupty ) if not 0 <= idx < len ( tupty ) : raise IndexError ( ""cannot index at %d in %s"" % ( idx, tupty ) ) res = builder. extract_value ( tup, idx ) elif isinstance ( idx, slice ) : items = cgutils. unpack_tuple ( builder, tup ) [ idx ] res = context. make_tuple ( builder, sig. return_type, items ) else : raise NotImplementedError ( ""unexpected index %r for %s"" % ( idx, sig. args [ 0 ] ) ) return impl_ret_borrowed ( context, builder, sig. return_type, res )",False,idx < 0,tup[0] == 0,0.6798169016838074 559,"def migrate_InternalTip ( self ) : for old_obj in self. session_old. query ( self. model_from [ ""InternalTip"" ] ) : new_obj = self. model_to [ ""InternalTip"" ] ( ) for key in new_obj. __table__. columns. _data. keys ( ) : new_obj. status = ""antani!"" if key == ""status"" or key == ""substatus"" : pass elif : setattr ( new_obj, key, getattr ( old_obj, key ) ) self. session_new. add ( new_obj ) if old_obj. receipt_hash : new_wbtip = self. model_to [ ""WhistleblowerTip"" ] ( ) new_wbtip. id = old_obj. id new_wbtip. tid = old_obj. tid new_wbtip. receipt_hash = old_obj. receipt_hash self. session_new. add ( new_wbtip )",False,key in old_obj.__table__.columns._data.keys(),"hasattr(old_obj, key)",0.6521081924438477 560,"def postprocess_element ( elements, processed ) : """"""Fix unresolved references"""""" if elements in processed : return processed. append ( elements ) for k, v in elements. items ( ) : if isinstance ( v, Struct ) : if v!= elements : try : postprocess_element ( v, processed ) except RuntimeError as e : warnings. warn ( unicode ( e ), RuntimeWarning ) if v. refers_to : if isinstance ( v. refers_to, dict ) : extend_element ( v, v. refers_to ) v. refers_to = None else : elements [ k ] = v. refers_to if v. array : elements [ k ] = [ v ] if isinstance ( v, list ) : for n in v : if : postprocess_element ( n, processed )",False,"isinstance(n, (Struct, list))",n != n,0.6580489873886108 561,"def _open ( file_, mode = ""r"" ) : """"""Open file object given filenames, open files or even archives."""""" if isinstance ( file_, string_types ) : _, ext = path. splitext ( file_ ) if : s = tarfile. open ( file_ ) return s. extractfile ( s. next ( ) ) else : return open ( file_, mode ) return file_",False,"ext in {'.bz2', '.gz'}",ext.lower() == '.tar',0.6605916023254395 562,"def d3_box_overlap_kernel ( boxes, qboxes, rinc, criterion = - 1 ) : N, K = boxes. shape [ 0 ], qboxes. shape [ 0 ] for i in numba. prange ( N ) : for j in numba. prange ( K ) : if rinc [ i, j ] > 0 : iw = min ( boxes [ i, 1 ], qboxes [ j, 1 ] ) - max ( boxes [ i, 1 ] - boxes [ i, 4 ], qboxes [ j, 1 ] - qboxes [ j, 4 ] ) if : area1 = boxes [ i, 3 ] * boxes [ i, 4 ] * boxes [ i, 5 ] area2 = qboxes [ j, 3 ] * qboxes [ j, 4 ] * qboxes [ j, 5 ] inc = iw * rinc [ i, j ] if criterion == - 1 : ua = area1 + area2 - inc elif criterion == 0 : ua = area1 elif criterion == 1 : ua = area2 else : ua = inc <",False,iw > 0,rinc[j] > 0,0.6861975193023682 563,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRUCT : self. hiveObject = HiveObjectRef ( ) self. hiveObject. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRING : self. principalName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. I32 : self. principalType = iprot. readI32 ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRUCT : self. grantInfo = PrivilegeGrantInfo",True,fid == 4,fid == 4,0.676572322845459 564,"def _recv_value ( self, server, flags, rlen ) : rlen += 2 buf = server. recv ( rlen ) if len ( buf )!= rlen : raise _Error ( ""received %d bytes when expecting %d"" % ( len ( buf ), rlen ) ) if len ( buf ) == rlen : buf = buf [ : - 2 ] if flags & Client. _FLAG_COMPRESSED : buf = zlib. decompress ( buf ) if flags == 0 or flags == Client. _FLAG_COMPRESSED : val = buf elif flags & Client. _FLAG_INTEGER : val = int ( buf ) elif flags & Client. _FLAG_LONG : val = long ( buf ) elif flags & Client. _FLAG_PICKLE : try : file = BytesIO ( buf ) unpickler = self. unpickler ( file ) if : unpickler. persistent_load = self. persistent_load val = unpickler. load ( ) except Exception as e : self. debuglog ( ""Pickle error: %s\n"" % e ) return None else : self. debuglog ( ""unknown flags on get: %x\n"" % flags ) raise ValueError ( ""Unknown flags on get: %x"" % flags ) return val",False,self.persistent_load,unpickler.persistent_load is not None,0.6598360538482666 565,def isFinished ( self ) : if self. count > self. epiLen : self. res ( ) return True else : if : self. pertGlasPos ( 0 ) if self. count == self. epiLen / 2 + 1 : self. env. reset ( ) self. pertGlasPos ( 1 ) self. count += 1 return False,False,self.count == 1,self.count > 0,0.6627359390258789 566,"def group_by_heading ( lines ) : from collections import OrderedDict ret = OrderedDict ( ) k = [ ] ret [ ""No Category"" ] = k for line_type, line_args, line_kwargs in lines : if : k = [ ] ret [ line_args [ 0 ] ] = k else : k. append ( ( line_type, line_args, line_kwargs ) ) return ret",False,line_type == 'add_heading',line_type == 'Category',0.6528555154800415 567,"def create_paddle_predictor ( args ) : config = Config ( args. model_file, args. params_file ) if args. use_gpu : config. enable_use_gpu ( args. gpu_mem, 0 ) else : config. disable_gpu ( ) if args. enable_mkldnn : config. set_mkldnn_cache_capacity ( 10 ) config. enable_mkldnn ( ) config. set_cpu_math_library_num_threads ( args. cpu_num_threads ) if args. enable_profile : config. enable_profile ( ) config. disable_glog_info ( ) config. switch_ir_optim ( args. ir_optim ) if args. use_tensorrt : config. enable_tensorrt_engine ( precision_mode = Config. Precision. Half if : else Config. Precision. Float32, max_batch_size = args. batch_size, ) config. enable_memory_optim ( ) config. switch_use_feed_fetch_ops ( False ) predictor = create_predictor ( config ) return predictor",False,args.use_fp16,args.batch_size > 0,0.6535663604736328 568,"def do_debug ( self, args ) : """"""Implementation of 'coverage debug'."""""" if not args : show_help ( ""What information would you like: config, data, sys, premain?"" ) return ERR for info in args : if info == ""sys"" : sys_info = self. coverage. sys_info ( ) print ( info_header ( ""sys"" ) ) for line in info_formatter ( sys_info ) : print ( "" %s"" % line ) elif info == ""data"" : self. coverage. load ( ) data = self. coverage. get_data ( ) print ( info_header ( ""data"" ) ) print ( ""path: %s"" % self. coverage. get_data ( ). data_filename ( ) ) if : print ( ""has_arcs: %r"" % data. has_arcs ( ) ) summary = line_counts ( data, fullpath = True ) filenames = sorted ( summary. keys ( ) ) print ( ""\n%d files:"" % len ( filenames ) ) for f in filenames : line = ""%s: %d lines"" % ( f, summary [ f ] ) plugin = data. file_tracer ( f ) if plugin : line += "" [%s]"" % plugin _call = _Call ( ( args, kwargs ), two = True ) self. await_count += 1 self. await_args = _call self. await_args_list. append ( _call ) effect = self. side_effect if effect is not None : if _is_exception ( effect ) : raise effect elif : try : result = next ( effect ) except StopIteration : raise StopAsyncIteration if _is_exception ( result ) : raise result elif iscoroutinefunction ( effect ) : result = await effect ( * args, ** kwargs ) else : result = effect ( * args, ** kwargs ) if result is not DEFAULT : return result if self. _mock_return_value is not DEFAULT : return self. return_value if self. _mock_wraps is not None : if iscoroutinefunction ( self. _mock_wraps ) : return await self. _mock_wraps ( * args, ** kwargs ) return self. _mock_wraps ( * args, ** kwargs ) return self. return_value",False,not _callable(effect),_is_async_iteration(effect),0.6560366153717041 570,"def createform ( self, xfields ) : formstyle = self. formstyle if isinstance ( formstyle, basestring ) : if : formstyle = SQLFORM. formstyles [ formstyle ] else : raise RuntimeError ( ""formstyle not found"" ) if callable ( formstyle ) : try : table = formstyle ( self, xfields ) for id, a, b, c in xfields : self. field_parent [ id ] = ( getattr ( b, ""parent"", None ) if isinstance ( b, XmlComponent ) else None ) except TypeError : table = TABLE ( ) for id, a, b, c in xfields : newrows = formstyle ( id, a, b, c ) self. field_parent [ id ] = ( getattr ( b, ""parent"", None ) if isinstance ( b, XmlComponent ) else None ) if type ( newrows ). __name__!= ""tuple"" : newrows = [ newrows ] for newrow in newrows : table. append ( newrow ) else : raise RuntimeError ( ""formstyle not supported"" ) return table",False,formstyle in SQLFORM.formstyles,"hasattr(formstyle, '__formstyle__')",0.661724328994751 571,"def push_solution_to_instance ( self ) : scenario_instance = self. _instance scenariotree_sm_bySymbol = scenario_instance. _ScenarioTreeSymbolMap. bySymbol for tree_node in self. _node_list : stage_name = tree_node. _stage. name cost_variable_name, cost_variable_index = tree_node. _stage. _cost_variable stage_cost_component = self. _instance. find_component ( cost_variable_name ) [ cost_variable_index ] if not stage_cost_component. is_expression ( ) : stage_cost_component. value = self. _stage_costs [ stage_name ] for tree_node in self. _node_list : for variable_id, var_value in iteritems ( self. _x [ tree_node. _name ] ) : compdata = scenariotree_sm_bySymbol [ variable_id ] if : compdata. value = var_value for variable_id in self. _fixed [ tree_node. _name ] : vardata = scenariotree_sm_bySymbol [ variable_id ] vardata. fix ( ) for variable_id in self. _stale [ tree_node. _name ] : vardata = scenariotree_sm_bySymbol [ variable_id ] vardata. stale = True",False,not compdata.is_expression(),self._fixed,0.6484264135360718 572,"def change_sel ( self ) : """"""Change the view's selections."""""" if self. alter_select and len ( self. sels ) > 0 : if : self. view. show ( self. sels [ 0 ] ) self. view. sel ( ). clear ( ) self. view. sel ( ). add_all ( self. sels )",False,self.multi_select is False,len(self.sels) > 0,0.6536827087402344 573,"def get_type ( type_ref ) : kind = type_ref. get ( ""kind"" ) if kind == TypeKind. LIST : item_ref = type_ref. get ( ""ofType"" ) if : raise Exception ( ""Decorated type deeper than introspection query."" ) return GraphQLList ( get_type ( item_ref ) ) elif kind == TypeKind. NON_NULL : nullable_ref = type_ref. get ( ""ofType"" ) if not nullable_ref : raise Exception ( ""Decorated type deeper than introspection query."" ) return GraphQLNonNull ( get_type ( nullable_ref ) ) return get_named_type ( type_ref [ ""name"" ] )",True,not item_ref,not item_ref,0.6661830544471741 574,"def gotAvatar ( avatar ) : if avatar. realm is not None : raise ewords. AlreadyLoggedIn ( ) for iface in interfaces : facet = iface ( avatar, None ) if : avatar. loggedIn ( self, mind ) mind. name = avatarId mind. realm = self mind. avatar = avatar return iface, facet, self. logoutFactory ( avatar, facet ) raise NotImplementedError ( self, interfaces )",False,facet is not None,facet.name != self.name,0.6652309894561768 575,def contains_only_whitespace ( node ) : if is_tag ( node ) : if : if not any ( [ unicode ( s ). strip ( ) for s in node. contents ] ) : return True return False,False,not any([not is_text(s) for s in node.contents]),node.contents and node.contents[0] != '\n',0.6514248251914978 576,"def validate_cell ( self, cell ) : super ( MetadataValidatorV2, self ). validate_cell ( cell ) if ""nbgrader"" not in cell. metadata : return meta = cell. metadata [ ""nbgrader"" ] grade = meta [ ""grade"" ] solution = meta [ ""solution"" ] locked = meta [ ""locked"" ] if ""cell_type"" in meta : if : self. log. warning ( ""Cell type has changed from {} to {}!"". format ( meta [ ""cell_type"" ], cell. cell_type ), cell, ) if grade or solution or locked : if ""grade_id"" not in meta : raise ValidationError ( ""nbgrader cell does not have a grade_id: {}"". format ( cell. source ) ) if meta [ ""grade_id"" ] == """" : raise ValidationError ( ""grade_id is empty"" ) if grade : if ""points"" not in meta : raise ValidationError ( ""nbgrader cell '{}' does not have points"". format ( meta [ ""grade_id"" ] ) ) if cell. cell_type == ""markdown"" and grade and not solution : raise ValidationError ( ""Markdown grade cell '{}' is not marked as a solution cell"". format ( meta [ ""grade_id"" ] ",False,meta['cell_type'] != cell.cell_type,locked,0.6515710949897766 577,"def __method_playback ( self, symbol, * args, ** dargs ) : if self. _debug : print >> sys. __stdout__, ( "" * Mock call: "" + _dump_function_call ( symbol, args, dargs ) ) if len ( self. recording )!= 0 : func_call = self. recording [ 0 ] if func_call. symbol!= symbol : msg = ""Unexpected call: %s\nExpected: %s"" % ( _dump_function_call ( symbol, args, dargs ), func_call, ) self. _append_error ( msg ) return None if : msg = ""Incorrect call: %s\nExpected: %s"" % ( _dump_function_call ( symbol, args, dargs ), func_call, ) self. _append_error ( msg ) return None self. recording. popleft ( ) if func_call. error : raise func_call. error else : return func_call. return_obj else : msg = ""unexpected call: %s"" % ( _dump_function_call ( symbol, args, dargs ) ) self. _append_error ( msg ) return None",False,"not func_call.match(*args, **dargs)",symbol.has_terminal(),0.6471517086029053 578,"def __next__ ( self ) : if not self. has_catalogs : raise StopIteration ( ""No catalogs"" ) if self. current_catalog_index >= len ( self. manifest. catalog_paths ) : raise StopIteration ( ""No more catalogs"" ) if self. current_catalog is None : current_catalog_path = os. path. join ( self. manifest. base_path, self. manifest. catalog_paths [ self. current_catalog_index ], ) self. current_catalog = Catalog ( current_catalog_path, read_only = self. manifest. read_only ) self. current_catalog. seekable. seek_line_start ( 1 ) contents = self. current_catalog. seekable. readline ( ) if contents is not None and len ( contents ) > 0 : current_index = self. current_index self. current_index += 1 if : return self. __next__ ( ) else : try : record = json. loads ( contents ) return record except Exception : print ( ""Ignoring record at index %s"" % ( current_index ) ) return self. __next__ ( ) else : self. current_catalog = None self. current_catalog_index += 1 return self. __next__ ( )",False,current_index in self.manifest.deleted_indexes,contents == None,0.6504038572311401 579,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 10 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. add_subscription ( ). TryMerge ( tmp ) continue if : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 0,tt > 255,0.6789340972900391 580,"def filter_errors ( self, errors : List [ str ] ) -> List [ str ] : real_errors : List [ str ] = list ( ) current_file = __file__ current_path = os. path. split ( current_file ) for line in errors : line = line. strip ( ) if not line : continue fn, lno, lvl, msg = self. parse_trace_line ( line ) if : _path = os. path. split ( fn ) if _path [ - 1 ]!= current_path [ - 1 ] : continue real_errors. append ( line ) return real_errors",False,fn is not None,fn,0.6597561240196228 581,"def parser_info ( self, video, info, stream, lvid, uid ) : if not ""allot"" in info or lvid!= info [ ""id"" ] : return stream_id = self. types_2_id [ stream ] stream_profile = self. id_2_profile [ stream_id ] host = info [ ""allot"" ] data = info [ ""data"" ] size = sum ( map ( int, data [ ""clipsBytes"" ] ) ) urls = [ ] assert len ( data [ ""clipsURL"" ] ) == len ( data [ ""clipsBytes"" ] ) == len ( data [ ""su"" ] ) for ( new, ck, ) in zip ( data [ ""su"" ], data [ ""ck"" ] ) : params = { ""ch"" : data [ ""ch"" ], ""num"" : data [ ""num"" ], ""new"" : new, ""key"" : ck, ""uid"" : uid, ""prod"" : ""h5n"", ""pt"" : 1, ""pg"" : 2, } if : cdnurl = ""https://{}/ip?{}"". format ( host, urlencode ( params ) ) url = json. loads ( get_content ( cdnurl ) ) [ ""servers"" ] [ 0 ] [ ""url"" ] else : url = new urls. append ( url ) video. streams [ stream_id ] = { ""container"" : ""mp4"", ""video_profile",False,urlparse(new).netloc == '',host,0.6574864983558655 582,"def parseImpl ( self, instring, loc, doActions = True ) : try : loc, tokens = self. expr. _parse ( instring, loc, doActions, callPreParse = False ) except ( ParseException, IndexError ) : if : if self. expr. resultsName : tokens = ParseResults ( [ self. defaultValue ] ) tokens [ self. expr. resultsName ] = self. defaultValue else : tokens = [ self. defaultValue ] else : tokens = [ ] return loc, tokens",False,self.defaultValue is not self.__optionalNotMatched,self.defaultValue,0.6559591293334961 583,"def validate_configuration ( self, configuration : Optional [ ExpectationConfiguration ] ) : """"""Validating that user has inputted a value set and that configuration has been initialized"""""" super ( ). validate_configuration ( configuration ) try : assert ""value_set"" in configuration. kwargs, ""value_set is required"" assert isinstance ( configuration. kwargs [ ""value_set"" ], ( list, set, dict ) ), ""value_set must be a list or a set"" if : assert ( ""$PARAMETER"" in configuration. kwargs [ ""value_set"" ] ), 'Evaluation Parameter dict for value_set kwarg must have ""$PARAMETER"" key' except AssertionError as e : raise InvalidExpectationConfigurationError ( str ( e ) ) return True",False,"isinstance(configuration.kwargs['value_set'], dict)",configuration.kwargs and 'value_set' in configuration.kwargs,0.6528669595718384 584,"def post ( self, request, * args, ** kwargs ) : settings_form = self. get_settings_form ( ) children_formset = self. get_children_formset ( ) data = request. POST. copy ( ) if settings_form : if : settings = settings_form. cleaned_data data [ ""settings"" ] = self. module. dump_settings ( settings ) else : return self. form_invalid ( self. get_form ( self. get_form_class ( ) ) ) if children_formset : if children_formset. is_valid ( ) : self. module. children = self. clean_children_data ( children_formset. cleaned_data ) data [ ""children"" ] = self. module. dump_children ( ) else : return self. form_invalid ( self. get_form ( self. get_form_class ( ) ) ) request. POST = data return super ( UpdateDashboardModuleView, self ). post ( request, * args, ** kwargs )",True,settings_form.is_valid(),settings_form.is_valid(),0.6488329172134399 585,"def _safe_coalesce ( t ) : tc = t. coalesce ( ) value_map = { } for idx, val in zip ( t. _indices ( ). t ( ), t. _values ( ) ) : idx_tup = tuple ( idx ) if : value_map [ idx_tup ] += val else : value_map [ idx_tup ] = val. clone ( ) if torch. is_tensor ( val ) else val new_indices = sorted ( list ( value_map. keys ( ) ) ) new_values = [ value_map [ idx ] for idx in new_indices ] if t. _values ( ). dim ( ) < 2 : new_values = t. _values ( ). new_tensor ( new_values ) else : new_values = torch. stack ( new_values ) new_indices = t. _indices ( ). new_tensor ( new_indices ). t ( ) tg = t. new ( new_indices, new_values, t. size ( ) ) assert ( tc. _indices ( ) == tg. _indices ( ) ). all ( ) assert ( tc. _values ( ) == tg. _values ( ) ). all ( ) return tg",True,idx_tup in value_map,idx_tup in value_map,0.6547656059265137 586,"def updateToolForHostInformation ( self, update = True ) : if self. validateCommandTabs ( self. hostActionNameText, self. hostLabelText, self. hostCommandText ) : if : pass else : self. updateHostActions ( ) self. hostTableRow = self. toolForHostsTableWidget. currentRow ( ) self. hostLabelText. setReadOnly ( False ) if self. toolForHostsTableWidget. item ( self. hostTableRow, 0 ) is not None : key = self. toolForHostsTableWidget. item ( self. hostTableRow, 0 ). text ( ) for tool in self. settings. hostActions : if tool [ 1 ] == key : self. hostActionNameText. setText ( tool [ 1 ] ) self. hostLabelText. setText ( tool [ 0 ] ) self. hostCommandText. setText ( tool [ 2 ] ) else : self. toolForHostsTableWidget. selectRow ( self. hostTableRow )",False,self.hostTableRow == -1 or update == False,update,0.6565169095993042 587,"def slo ( environ, start_response, user ) : client = environ [ ""repoze.who.plugins"" ] [ ""saml2auth"" ] sc = client. saml_client if ""QUERY_STRING"" in environ : query = parse_qs ( environ [ ""QUERY_STRING"" ] ) logger. info ( ""query: %s"", query ) try : response = sc. parse_logout_request_response ( query [ ""SAMLResponse"" ] [ 0 ], binding = BINDING_HTTP_REDIRECT ) if : logger. info ( ""LOGOUT response parsed OK"" ) except KeyError : response = None if response is None : request = sc. lo headers = [ ] delco = delete_cookie ( environ, ""pysaml2"" ) if delco : headers. append ( delco ) resp = Redirect ( ""/done"", headers = headers ) return resp ( environ, start_response )",False,response,response is True,0.6979637145996094 588,"def _probe ( self ) : """"""Copy all probed signals to buffers."""""" self. _probe_step_time ( ) for probe in self. model. probes : period = 1 if probe. sample_every is None else probe. sample_every / self. dt if : tmp = self. signals [ self. model. sig [ probe ] [ ""in"" ] ]. copy ( ) self. _probe_outputs [ probe ]. append ( tmp )",False,self.n_steps % period < 1,period > 0,0.6609293222427368 589,"def services_to_report ( self ) : services = self. _parse_services ( self. default ( ""REPORT_SERVICES"", """" ), None ) for service in services : if service. protocol == ""rpc"" : raise ServiceError ( f""bad protocol for REPORT_SERVICES: {service.protocol}"" ) if : ip_addr = service. host if ( ip_addr. is_multicast or ip_addr. is_unspecified or ( ip_addr. is_private and self. peer_announce ) ) : raise ServiceError ( f""bad IP address for REPORT_SERVICES: {ip_addr}"" ) elif service. host. lower ( ) == ""localhost"" : raise ServiceError ( f""bad host for REPORT_SERVICES: {service.host}"" ) return services",False,"isinstance(service.host, (IPv4Address, IPv6Address))",service.host == None,0.6529442071914673 590,"def next_ohlcv ( self ) -> pd. DataFrame : if self. _has_loaded_historical : frame = self. data_frame [ self. _current_index ] self. _current_index += 1 return frame data = self. exchange. fetchOHLCV ( symbol = self. symbol_pair, timeframe = self. timeframe, since = self. _current_index, limit = 1, ) if len ( data ) : self. _current_index = data [ len ( data ) - 1 ] [ ""timestamp"" ] frame = pd. DataFrame ( data, columns = self. in_columns ) frame = self. prepare_data ( frame ) if : self. data_frame = pd. DataFrame ( None, columns = self. columns ) self. data_frame = self. data_frame. append ( frame, ignore_index = True ) return frame return None",True,self.data_frame is None,self.data_frame is None,0.653052806854248 591,"def _handle_loaded ( objs ) : try : data_locations = storage_client. get_data_locations ( session_id, keys_to_fetch ) shared_quota_keys = [ ] inproc_keys = [ ] inproc_quota_keys = [ ] context_dict. update ( zip ( keys_to_fetch, objs ) ) for k, locations in zip ( keys_to_fetch, data_locations ) : quota_key = build_quota_key ( session_id, k, owner = self. proc_id ) if ( self. proc_id, DataStorageDevice. PROC_MEMORY ) not in locations : shared_quota_keys. append ( quota_key ) else : inproc_keys. append ( k ) inproc_quota_keys. append ( quota_key ) if : self. _mem_quota_ref. hold_quotas ( shared_quota_keys, _tell = True ) if inproc_keys : self. _mem_quota_ref. hold_quotas ( inproc_quota_keys, _tell = True ) if self. _remove_intermediate : storage_client. delete ( session_id, inproc_keys, [ self. _calc_intermediate_device ] ) finally : objs [ : ] = [ ]",True,shared_quota_keys,shared_quota_keys,0.6616040468215942 592,"def _cobra_getsock ( self, thr = None ) : if self. _cobra_spoolcnt : sock = self. _cobra_sockpool. get ( ) else : if not thr : thr = currentThread ( ) tsocks = getattr ( thr, ""cobrasocks"", None ) if tsocks is None : tsocks = { } thr. cobrasocks = tsocks sock = tsocks. get ( self. _cobra_slookup ) if not sock or sock. trashed : sock = self. _cobra_newsock ( ) authinfo = self. _cobra_kwargs. get ( ""authinfo"" ) if authinfo is not None : mtype, rver, data = sock. cobraTransaction ( COBRA_AUTH, """", authinfo ) if : raise CobraAuthException ( ""Authentication Failed!"" ) if not self. _cobra_spoolcnt : tsocks [ self. _cobra_slookup ] = sock return sock",False,mtype != COBRA_AUTH,rver or rver != 0,0.6569886207580566 593,"def get_other ( self, data, items ) : is_tuple = False if type ( data ) == tuple : data = list ( data ) is_tuple = True if type ( data ) == list : m_items = items. copy ( ) for idx, item in enumerate ( items ) : if item < 0 : m_items [ idx ] = len ( data ) - abs ( item ) for i in sorted ( set ( m_items ), reverse = True ) : if i < len ( data ) and i > - 1 : del data [ i ] if : return tuple ( data ) else : return data else : return None",True,is_tuple,is_tuple,0.6593594551086426 594,"def __init__ ( self, document_data ) : self. document_data = document_data self. field_paths = [ ] self. deleted_fields = [ ] self. server_timestamps = [ ] self. array_removes = { } self. array_unions = { } self. increments = { } self. minimums = { } self. maximums = { } self. set_fields = { } self. empty_document = False prefix_path = FieldPath ( ) iterator = self. _get_document_iterator ( prefix_path ) for field_path, value in iterator : if field_path == prefix_path and value is _EmptyDict : self. empty_document = True elif value is transforms. DELETE_FIELD : self. deleted_fields. append ( field_path ) elif value is transforms. SERVER_TIMESTAMP : self. server_timestamps. append ( field_path ) elif isinstance ( value, transforms. ArrayRemove ) : self. array_removes [ field_path ] = value. values elif isinstance ( value, transforms. ArrayUnion ) : self. array_unions [ field_path ] = value. values elif isinstance ( value, transforms. Increment ) : self. increments [ field_path ] = value. value elif : self. maximums [ field_path ] = value. value elif isinstance ( value, transforms. Minimum ) : self. minimums [ field_path ] = value. value else : self. field_paths. append ( field_path ) set_field",True,"isinstance(value, transforms.Maximum)","isinstance(value, transforms.Maximum)",0.6532870531082153 595,"def _ParseAndCheckSubOpts ( self ) : delta = None method = ""GET"" content_type = """" passwd = None for o, v in self. sub_opts : if o == ""-d"" : if delta is not None : delta += _DurationToTimeDelta ( v ) else : delta = _DurationToTimeDelta ( v ) elif o == ""-m"" : method = v elif o == ""-c"" : content_type = v elif : passwd = v else : self. RaiseInvalidArgumentException ( ) if delta is None : delta = timedelta ( hours = 1 ) expiration = calendar. timegm ( ( datetime. utcnow ( ) + delta ). utctimetuple ( ) ) if method not in [ ""GET"", ""PUT"", ""DELETE"", ""HEAD"" ] : raise CommandException ( ""HTTP method must be one of [GET|HEAD|PUT|DELETE]"" ) return method, expiration, content_type, passwd",False,o == '-p',o == '-passwd',0.6747341156005859 596,"def exe ( self, ret ) : if not ret : self. assertEqual ( ret, """" ) else : assert os. path. isabs ( ret ), ret if POSIX : assert os. path. isfile ( ret ), ret if : self. assertTrue ( os. access ( ret, os. X_OK ) )",False,"hasattr(os, 'access') and hasattr(os, 'X_OK')",os.path.isfile(ret),0.6477186679840088 597,"def package_exists ( self, pref ) : try : if : path = self. server_store. package ( pref ) else : path = self. test_server. server_store. package_revisions_root ( pref ) return self. test_server. server_store. path_exists ( path ) except NotFoundException : return False",False,pref.revision,self.server_store is None,0.6656686067581177 598,"def find_subdomains ( domain, data ) : subdomains = set ( ) js_urls = set ( ) db = Database ( ) for infos in data : jump_history = infos. get ( ""history"" ) req_url = infos. get ( ""url"" ) subdomains. update ( find_in_history ( domain, req_url, jump_history ) ) rsp_html = db. get_resp_by_url ( domain, req_url ) if : logger. log ( ""DEBUG"", f""an abnormal response occurred in the request {req_url}"" ) continue subdomains. update ( find_in_resp ( domain, req_url, rsp_html ) ) js_urls. update ( find_js_urls ( domain, req_url, rsp_html ) ) req_data = convert_to_dict ( js_urls ) resp_data = request. bulk_request ( domain, req_data, ret = True ) while not resp_data. empty ( ) : _, resp = resp_data. get ( ) if not isinstance ( resp, Response ) : continue text = utils. decode_resp_text ( resp ) subdomains. update ( find_in_resp ( domain, resp. url, text ) ) return subdomains",False,not rsp_html,rsp_html,0.6653605103492737 599,"def _allocate_nbd ( self ) : if not os. path. exists ( ""/sys/block/nbd0"" ) : self. error = _ ( ""nbd unavailable: module not loaded"" ) return None while True : if : self. error = _ ( ""No free nbd devices"" ) return None device = self. _DEVICES. pop ( ) if not os. path. exists ( ""/sys/block/%s/pid"" % os. path. basename ( device ) ) : break return device",True,not self._DEVICES,not self._DEVICES,0.6696393489837646 600,"def __setattr__ ( self, name, value ) : self. __lock__. acquire ( ) try : ident = get_ident ( ) storage = self. __storage__ if : storage [ ident ] [ name ] = value else : storage [ ident ] = { name : value } finally : self. __lock__. release ( )",True,ident in storage,ident in storage,0.6794613599777222 601,"def get_price_list_rate ( args, item_doc, out ) : meta = frappe. get_meta ( args. parenttype or args. doctype ) if meta. get_field ( ""currency"" ) or args. get ( ""currency"" ) : pl_details = get_price_list_currency_and_exchange_rate ( args ) args. update ( pl_details ) if : validate_conversion_rate ( args, meta ) price_list_rate = get_price_list_rate_for ( args, item_doc. name ) or 0 if not price_list_rate and item_doc. variant_of : price_list_rate = get_price_list_rate_for ( args, item_doc. variant_of ) if not price_list_rate : if args. price_list and args. rate : insert_item_price ( args ) return { } out. price_list_rate = ( flt ( price_list_rate ) * flt ( args. plc_conversion_rate ) / flt ( args. conversion_rate ) ) if not out. price_list_rate and args. transaction_type == ""buying"" : from erpnext. stock. doctype. item. item import get_last_purchase_details out. update ( get_last_purchase_details ( item_doc. name, args. name, args. conversion_rate if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if ftype == TType. STRUCT : self. success = ExecStats. ttypes. TExecSummary ( ) self. success. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 1 : if ftype == TType. STRUCT : self. error = beeswaxd. ttypes. QueryNotFoundException ( ) self. error. read ( iprot ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRUCT : self. error2 = beeswaxd. ttypes. BeeswaxException ( ) self. error2. read ( iprot ) else : <",True,fid == 2,fid == 2,0.6789374351501465 603,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. I64 : self. id = iprot. readI64 ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. I32 : self. state = iprot. readI32 ( ) else : iprot. skip ( ftype ) elif fid == 3 : if : self. user = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 4 : if : self. hostname = iprot. readString ( ) else : iprot. skip ( ftype ) <",False,ftype == TType.STRING,fid == 5,0.6610672473907471 604,"def validate_arguments ( args ) : if args. num_pss < 1 : print ( ""Value error: must have ore than one parameter servers."" ) exit ( 1 ) if not GPU_IDS : num_cpus = multiprocessing. cpu_count ( ) if : print ( ""Value error: there are %s available CPUs but you are requiring %s."" % ( num_cpus, args. cpu_trainers ) ) exit ( 1 ) if not os. path. isfile ( args. file ) : print ( ""Value error: model trainning file does not exist"" ) exit ( 1 )",False,args.cpu_trainers > num_cpus,args.num_cpus > 0 and args.cpu_trainers > 0,0.6555589437484741 605,"def KoUserEnviron ( startupEnvFileName = None ) : koEnviron = components. classes [ ""@activestate.com/koUserEnviron;1"" ]. createInstance ( components. interfaces. koIUserEnviron ) if startupEnvFileName : environ = UnwrapObject ( koEnviron ) environ. __init__ ( startupEnvFileName ) current_encoding = locale. getlocale ( ) [ 1 ] if : if sys. platform. startswith ( ""win"" ) : current_encoding = ""mbcs"" elif sys. platform. startswith ( ""darwin"" ) : current_encoding = ""mac-roman"" elif sys. platform. startswith ( ""linux"" ) : current_encoding = ""utf-8"" environ. startupEnvironEncoding = current_encoding return koEnviron",False,not current_encoding,current_encoding,0.6557108163833618 606,"def make_table ( items ) : if isinstance ( items, dict ) : items = items. items ( ) items. sort ( ) rows = [ ] i = 0 for name, value in items : i += 1 out = StringIO ( ) try : pprint. pprint ( value, out ) except Exception as e : print ( ""Error: %s"" % e, file = out ) value = html_quote ( out. getvalue ( ) ) if len ( value ) > 100 : orig_value = value value = value [ : 100 ] value += '...' value += '%s' % orig_value [ 100 : ] value = formatter. make_wrappable ( value ) if : attr ='class=""even""' else : attr ='class=""odd""' rows. append ( '' '%s%s' % ( attr, html_quote ( name ), preserve_whitespace ( value, quote = False ) ) ) return ""%s
"" % ( ""\n"". join ( rows ) )",False,i % 2,"hasattr(value, '__html__')",0.6779868006706238 607,"def test_ml_sigma ( ) : if debug_mode : if ""Sigma_u"" not in to_test : return print ( ""\n\nSIGMA_U"", end = """" ) for ds in datasets : for dt in ds. dt_s_list : if debug_mode : print ( ""\n"" + dt_s_tup_to_string ( dt ) + "": "", end = """" ) exog = results_sm_exog [ ds ] [ dt ]. exog is not None exog_coint = results_sm_exog_coint [ ds ] [ dt ]. exog_coint is not None err_msg = build_err_msg ( ds, dt, ""Sigma_u"" ) obtained = results_sm [ ds ] [ dt ]. sigma_u obtained_exog = results_sm_exog [ ds ] [ dt ]. sigma_u obtained_exog_coint = results_sm_exog_coint [ ds ] [ dt ]. sigma_u desired = results_ref [ ds ] [ dt ] [ ""est"" ] [ ""Sigma_u"" ] assert_allclose ( obtained, desired, rtol, atol, False, err_msg ) if exog : assert_equal ( obtained_exog, obtained, ""WITH EXOG"" + err_msg ) if : assert_equal ( obtained_exog_coint, obtained, ""WITH EXOG_COINT"" + err_msg )",False,exog_coint,coint,0.666644811630249 608,"def test_sortagrad_trainable_with_batch_bins ( module ) : args = make_arg ( sortagrad = 1 ) idim = 10 odim = 5 dummy_json = make_dummy_json ( 2, [ 3, 5 ], [ 3, 5 ], idim = idim, odim = odim ) if module == ""pytorch"" : import espnet. nets. pytorch_backend. e2e_asr as m else : import espnet. nets. chainer_backend. e2e_asr as m batch_elems = 2000 batchset = make_batchset ( dummy_json, batch_bins = batch_elems, shortest_first = True ) for batch in batchset : n = 0 for uttid, info in batch : ilen = int ( info [ ""input"" ] [ 0 ] [ ""shape"" ] [ 0 ] ) olen = int ( info [ ""output"" ] [ 0 ] [ ""shape"" ] [ 0 ] ) n += ilen * idim + olen * odim assert olen < batch_elems model = m. E2E ( idim, odim, args ) for batch in batchset : loss = model ( * convert_batch ( batch, module, idim = idim, odim = odim ) ) if : loss [ 0 ]. backward ( ) else : loss. backward ( ) with torch. no_grad ( ), chainer. no_backprop_mode ( ) : in_data = np. random. randn ( 10, idim ) model. recognize ( in_data, args, args. char",False,"isinstance(loss, tuple)",sortagrad == 1,0.6531214714050293 609,"def handleEvent ( self, event ) : eventName = event. eventType srcModuleName = event. module eventData = event. data parentEvent = event if self. errorState : return None self. sf. debug ( f""Received event, {eventName}, from {srcModuleName}"" ) if self. opts [ ""api_key"" ] == """" : self. sf. error ( ""You enabled sfp_honeypot but did not set an API key!"" ) self. errorState = True return None if eventData in self. results : return None self. results [ eventData ] = True if eventName == ""NETBLOCK_OWNER"" : if not self. opts [ ""netblocklookup"" ] : return None else : if IPNetwork ( eventData ). prefixlen < self. opts [ ""maxnetblock"" ] : self. sf. debug ( ""Network size bigger than permitted: "" + str ( IPNetwork ( eventData ). prefixlen ) + "" > "" + str ( self. opts [ ""maxnetblock"" ] ) ) return None if eventName == ""NETBLOCK_MEMBER"" : if : return None else : if IPNetwork ( eventData ). prefixlen < self. opts [ ""maxsubnet"" ] : self. sf. debug ( ""Network size bigger",False,not self.opts['subnetlookup'],not self.opts['netsubnet'],0.6654548645019531 610,"def get_state ( self ) : """"""See class definition."""""" obs = { } max_speed = self. k. scenario. max_speed ( ) max_length = self. k. scenario. length ( ) for rl_id in self. k. vehicle. get_rl_ids ( ) : this_speed = self. k. vehicle. get_speed ( rl_id ) lead_id = self. k. vehicle. get_leader ( rl_id ) follower = self. k. vehicle. get_follower ( rl_id ) if lead_id in [ """", None ] : lead_speed = max_speed lead_head = max_length else : lead_speed = self. k. vehicle. get_speed ( lead_id ) lead_head = self. k. vehicle. get_headway ( lead_id ) if : follow_speed = 0 follow_head = max_length else : follow_speed = self. k. vehicle. get_speed ( follower ) follow_head = self. k. vehicle. get_headway ( follower ) observation = np. array ( [ this_speed / max_speed, ( lead_speed - this_speed ) / max_speed, lead_head / max_length, ( this_speed - follow_speed ) / max_speed, ",True,"follower in ['', None]","follower in ['', None]",0.6593718528747559 611,"def _real_extract ( self, url ) : dj_id = self. _match_id ( url ) name = None desc = None entries = [ ] for offset in compat_itertools_count ( start = 0, step = self. _PAGE_SIZE ) : info = self. query_api ( ""dj/program/byradio?asc=false&limit=%d&radioId=%s&offset=%d"" % ( self. _PAGE_SIZE, dj_id, offset ), dj_id, ""Downloading dj programs - %d"" % offset, ) entries. extend ( [ self. url_result ( ""http://music.163.com/#/program?id=%s"" % program [ ""id"" ], ""NetEaseMusicProgram"", program [ ""id"" ], ) for program in info [ ""programs"" ] ] ) if : radio = info [ ""programs"" ] [ 0 ] [ ""radio"" ] name = radio [ ""name"" ] desc = radio [ ""desc"" ] if not info [ ""more"" ] : break return self. playlist_result ( entries, dj_id, name, desc )",False,name is None,len(info) > 0,0.6688082218170166 612,"def _test_configuration ( self ) : config_path = self. _write_config ( ) try : self. _log. debug ( ""testing configuration"" ) verboseflag = ""-Q"" if self. _log. isEnabledFor ( logging. DEBUG ) : verboseflag = ""-v"" p = subprocess. Popen ( [ self. PATH_SLAPTEST, verboseflag, ""-f"", config_path ] ) if : raise RuntimeError ( ""configuration test failed"" ) self. _log. debug ( ""configuration seems ok"" ) finally : os. remove ( config_path )",False,p.wait() != 0,p.returncode != 0,0.660539984703064 613,"def __new__ ( cls, key, secret = None, api_version = DEFAULT_API_VERSION, ** kwargs ) : if cls is OpenNebulaNodeDriver : if : cls = OpenNebula_1_4_NodeDriver elif api_version in [ ""2.0"", ""2.2"" ] : cls = OpenNebula_2_0_NodeDriver elif api_version in [ ""3.0"" ] : cls = OpenNebula_3_0_NodeDriver elif api_version in [ ""3.2"" ] : cls = OpenNebula_3_2_NodeDriver elif api_version in [ ""3.6"" ] : cls = OpenNebula_3_6_NodeDriver elif api_version in [ ""3.8"" ] : cls = OpenNebula_3_8_NodeDriver if ""plain_auth"" not in kwargs : kwargs [ ""plain_auth"" ] = cls. plain_auth else : cls. plain_auth = kwargs [ ""plain_auth"" ] else : raise NotImplementedError ( ""No OpenNebulaNodeDriver found for API version %s"" % ( api_version ) ) return super ( OpenNebulaNodeDriver, cls ). __new__ ( cls )",False,api_version in ['1.4'],api_version == DEFAULT_API_VERSION,0.6551579236984253 614,"def wait_for_image_to_analyze ( image_id, api_conf : callable ) : status = ""analyzing"" start_time_sec = time. time ( ) while status!= ""analyzed"" and time. time ( ) - start_time_sec < WAIT_TIMEOUT_SEC : resp = http_get ( [ ""images"", ""by_id"", image_id ], config = api_conf ) status = resp. body [ 0 ]. get ( ""analysis_status"", None ) if : _logger. info ( ""Waiting for Image Analysis to complete. Elapsed Time={}sec"". format ( int ( time. time ( ) - start_time_sec ) ) ) time. sleep ( 5 ) if time. time ( ) - start_time_sec >= WAIT_TIMEOUT_SEC : raise TimeoutError ( ""Timed out waiting for Image to Analyze (timeout={}sec)"". format ( WAIT_TIMEOUT_SEC ) ) else : _logger. info ( ""Image Analysis Complete, wait time: {}sec"". format ( int ( time. time ( ) - start_time_sec ) ) )",False,status != 'analyzed',status != 'cancel',0.6683658957481384 615,"def provider_forms ( self ) -> list : providers = [ ] responses = register_ticket_outputs. send ( self. request. event ) for receiver, response in responses : provider = response ( self. request. event ) provider. form = ProviderForm ( obj = self. request. event, settingspref = ""ticketoutput_%s_"" % provider. identifier, data = ( self. request. POST if self. request. method == ""POST"" else None ), files = ( self. request. FILES if self. request. method == ""POST"" else None ), ) provider. form. fields = OrderedDict ( [ ( ""ticketoutput_%s_%s"" % ( provider. identifier, k ), v ) for k, v in provider. settings_form_fields. items ( ) ] ) provider. settings_content = provider. settings_content_render ( self. request ) provider. form. prepare_fields ( ) provider. evaluated_preview_allowed = True if : provider. evaluated_preview_allowed = False else : for k, v in provider. settings_form_fields. items ( ) : if v. required and not self. request. event. settings. get ( ""ticketoutput_%s_%s"" % ( provider. identifier, k ) ) : provider. evaluated_preview_allowed = False ",False,not provider.preview_allowed,self.request.method == 'POST',0.6531155705451965 616,"def books ( self ) : """"""The list of KoboBook objects in the library."""""" if len ( self. _books )!= 0 : return self. _books """"""Drm-ed kepub"""""" for row in self. __cursor. execute ( ""SELECT DISTINCT volumeid, Title, Attribution, Series FROM content_keys, content WHERE contentid = volumeid"" ) : self. _books. append ( KoboBook ( row [ 0 ], row [ 1 ], self. __bookfile ( row [ 0 ] ), ""kepub"", self. __cursor, author = row [ 2 ], series = row [ 3 ], ) ) self. _volumeID. append ( row [ 0 ] ) """"""Drm-free"""""" for f in os. listdir ( self. bookdir ) : if : row = self. __cursor. execute ( ""SELECT Title, Attribution, Series FROM content WHERE ContentID = '"" + f + ""'"" ). fetchone ( ) if row is not None : fTitle = row [ 0 ] self. _books. append ( KoboBook ( <",False,f not in self._volumeID,len(f) > 0,0.6642298698425293 617,"def end_object ( self, obj ) : fields = self. selected_fields if fields is not None : missing = set ( fields ). difference ( self. _current. keys ( ) ) if : _nothing = object ( ) for f in missing : fs = f. split ( ""__"" ) value = obj while fs : value = getattr ( value, fs. pop ( 0 ), _nothing ) if value is not _nothing : self. _current [ f ] = value return super ( ). end_object ( obj )",True,missing,missing,0.6811259984970093 618,"def main ( client ) : placement_service = client. GetService ( ""PlacementService"", version = ""v202008"" ) statement = ( ad_manager. StatementBuilder ( version = ""v202008"" ) . Where ( ""status = :status"" ) . WithBindVariable ( ""status"", ""ACTIVE"" ) ) while True : response = placement_service. getPlacementsByStatement ( statement. ToStatement ( ) ) if : for placement in response [ ""results"" ] : print ( 'Placement with ID ""%d"" and name ""%s"" was found.\n' % ( placement [ ""id"" ], placement [ ""name"" ] ) ) statement. offset += statement. limit else : break print ( ""\nNumber of results found: %s"" % response [ ""totalResultSetSize"" ] )",False,'results' in response and len(response['results']),response[0],0.6575170159339905 619,"def _LazyAddAttr_ ( self, attr ) : if self. _lazydata_ is None : return 0 res = 0 typeinfo, typecomp = self. _lazydata_ olerepr = self. _olerepr_ for i in ALL_INVOKE_TYPES : try : x, t = typecomp. Bind ( attr, i ) if : x, t = typecomp. Bind ( attr [ 3 : ], i ) if x == 1 : r = olerepr. _AddFunc_ ( typeinfo, t, 0 ) elif x == 2 : r = olerepr. _AddVar_ ( typeinfo, t, 0 ) else : r = None if not r is None : key, map = r [ 0 ], r [ 1 ] item = map [ key ] if map == olerepr. propMapPut : olerepr. _propMapPutCheck_ ( key, item ) elif map == olerepr. propMapGet : olerepr. _propMapGetCheck_ ( key, item ) res = 1 except : pass return res",False,"x == 0 and attr[:3] in ('Set', 'Get')",x == 0,0.6554028987884521 620,"def _convert ( self, value, context ) : if value is None : return None if self. is_allowed_model ( value ) : return value if not isinstance ( value, dict ) : if : instanceof_msg = ""one of: {}"". format ( "", "". join ( cls. __name__ for cls in self. model_classes ) ) else : instanceof_msg = self. model_classes [ 0 ]. __name__ raise ConversionError ( ""Please use a mapping for this field or "" ""an instance of {}"". format ( instanceof_msg ) ) model_class = self. find_model ( value ) return model_class ( value, context = context )",False,len(self.model_classes) > 1,self.model_classes,0.6559975147247314 621,"def _establish ( self ) : self. fsm. change ( FSM. ACTIVE ) if not self. proto : for action in self. _connect ( ) : if action in ACTION. ALL : yield action self. fsm. change ( FSM. CONNECT ) if self. neighbor. local_as : for sent_open in self. _send_open ( ) : if sent_open in ACTION. ALL : yield sent_open self. proto. negotiated. sent ( sent_open ) self. fsm. change ( FSM. OPENSENT ) for received_open in self. _read_open ( ) : if : yield received_open self. proto. negotiated. received ( received_open ) self. proto. connection. msg_size = self. proto. negotiated. msg_size if not self. neighbor. local_as : for sent_open in self. _send_open ( ) : if sent_open in ACTION. ALL : yield sent_open self. proto. negotiated. sent ( sent_open ) self. fsm. change ( FSM. OPENSENT ) self. proto. validate_open ( ) self. fsm. change ( FSM. OPENCONFIRM ) self. recv_timer = ReceiveTimer ( self. proto. connection. session, self. proto. negotiated. holdtime, 4, 0 ) for action in self. _send_ka ( ) : yield action for action in self. _read_ka ( ) : yield action self. fsm. change ( FSM",True,received_open in ACTION.ALL,received_open in ACTION.ALL,0.6625270843505859 622,"def aggregate ( cls, dataset, dimensions, function, ** kwargs ) : data = dataset. data cols = [ d. name for d in dataset. kdims if d in dimensions ] vdims = dataset. dimensions ( ""value"", label = ""name"" ) dtypes = data. dtypes numeric = [ c for c, dtype in zip ( dtypes. index, dtypes. values ) if dtype. kind in ""iufc"" and c in vdims ] reindexed = data [ cols + numeric ] inbuilts = { ""amin"" : ""min"", ""amax"" : ""max"", ""mean"" : ""mean"", ""std"" : ""std"", ""sum"" : ""sum"", ""var"" : ""var"", } if len ( dimensions ) : groups = reindexed. groupby ( cols ) if function. __name__ in inbuilts : agg = getattr ( groups, inbuilts [ function. __name__ ] ) ( ) else : agg = groups. apply ( function ) df = agg. reset_index ( ) else : if function. __name__ in inbuilts : agg = getattr ( reindexed, inbuilts [ function. __name__ ] ) ( ) else : raise NotImplementedError df = pd. DataFrame ( agg. compute ( ) ). T dropped = [ ] for vd in vdims : if : dropped. append ( vd ) return df, dropped",False,vd not in df.columns,vd not in df,0.6690123081207275 623,"def checkbox_callback ( checked_value ) : global search_box_area, phrases_area group_info_box. text = """" if 0 in checked_value : annotation_layout. children = [ annotation_input, annotate_button, annotation_output, ] else : annotation_layout. children = [ ] annotation_output. text = """" if 1 in checked_value : if : working_label. text = fetching_text get_vocab ( ) if not phrases_list. options : working_label. text = working_text phrases_list. options = list ( cut_vocab_dict. keys ( ) ) [ 0 : max_visible_phrases ] phrases_area. children = [ search_input_box, search_working_label, phrases_list ] working_label. text = """" else : phrases_area. children = [ ] group_info_box. text = """"",False,vocab is None,cut_vocab_dict.keys(),0.6771036386489868 624,"def get_tokens_unprocessed ( self, text ) : bashlexer = BashLexer ( ** self. options ) pos = 0 curcode = """" insertions = [ ] for match in line_re. finditer ( text ) : line = match. group ( ) m = re. match ( r""^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)"" r""?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)"", line, ) if m : if not insertions : pos = match. start ( ) insertions. append ( ( len ( curcode ), [ ( 0, Generic. Prompt, m. group ( 1 ) ) ] ) ) curcode += m. group ( 2 ) elif line. startswith ( "">"" ) : insertions. append ( ( len ( curcode ), [ ( 0, Generic. Prompt, line [ : 1 ] ) ] ) ) curcode += line [ 1 : ] else : if : toks = bashlexer. get_tokens_unprocessed ( curcode ) for i, t, v in do_insertions ( insertions, toks ) : yield pos + i, t, v yield match. start ( ), Generic. Output, line insertions = [ ]",False,insertions,self.options.options.get('no_token'),0.691433310508728 625,"def display_list_by_prefix ( names_list, starting_spaces = 0 ) : """"""Creates a help string for names_list grouped by prefix."""""" cur_prefix, result_lines = None, [ ] space = "" "" * starting_spaces for name in sorted ( names_list ) : split = name. split ( ""_"", 1 ) prefix = split [ 0 ] if : result_lines. append ( space + prefix + "":"" ) cur_prefix = prefix result_lines. append ( space + "" * "" + name ) return ""\n"". join ( result_lines )",True,cur_prefix != prefix,cur_prefix != prefix,0.6608850955963135 626,"def interact ( show_tokens = False ) : try : import readline except ImportError : pass sys. modules [ ""__main__"" ] = global_env while True : buffer = """" continuation_flag = False tokens = [ ] while True : try : if : s = input ( ""... "" ) if s == ""\n"" : continue buffer = buffer + ""\n"" + s else : s = input ( "">>> "" ) if s == ""\n"" : continue buffer = s except EOFError : print ( ) sys. exit ( ) try : lexer = lex ( buffer, repl_mode = True, debug = show_tokens ) for last in lexer : tokens. append ( last ) if len ( tokens ) == 0 : buffer = """" continue if ( <",False,continuation_flag,show_tokens,0.6654371023178101 627,"def incr_and_sum ( self, key, keys, amount, maximum, ttl ) : ttl = int ( ttl / 1000 ) with self. pool. reserve ( block = True ) as client : client. add ( key, 0, time = ttl ) while True : value, cid = client. gets ( key ) if cid is None : return False value += amount if value > maximum : return False key_list = keys ( ) if callable ( keys ) else keys mapping = client. get_multi ( key_list ) total = amount + sum ( mapping. values ( ) ) if : return False try : swapped = client. cas ( key, value, cid, ttl ) if swapped : return True except NotFound : continue",True,total > maximum,total > maximum,0.684869647026062 628,"def _validate_cfg ( self ) : if not isinstance ( self. paramwise_cfg, dict ) : raise TypeError ( ""paramwise_cfg should be None or a dict, "" f""but got {type(self.paramwise_cfg)}"" ) if ""custom_keys"" in self. paramwise_cfg : if not isinstance ( self. paramwise_cfg [ ""custom_keys"" ], dict ) : raise TypeError ( ""If specified, custom_keys must be a dict, "" f'but got {type(self.paramwise_cfg[""custom_keys""])}' ) if self. base_wd is None : for key in self. paramwise_cfg [ ""custom_keys"" ] : if : raise ValueError ( ""base_wd should not be None"" ) if ( ""bias_decay_mult"" in self. paramwise_cfg or ""norm_decay_mult"" in self. paramwise_cfg or ""dwconv_decay_mult"" in self. paramwise_cfg ) : if self. base_wd is None : raise ValueError ( ""base_wd should not be None"" )",False,'decay_mult' in self.paramwise_cfg['custom_keys'][key],self.base_wd is None,0.6486297845840454 629,"def tile ( cls, op : ""DataFrameToSQLTable"" ) : inp = op. inputs [ 0 ] out = op. outputs [ 0 ] if inp. ndim == 2 : inp = inp. rechunk ( { 1 : ( inp. shape [ 1 ], ) } ). _inplace_tile ( ) chunks = [ ] for c in inp. chunks : new_op = op. copy ( ). reset_key ( ) new_op. _if_exists = ""append"" index_value = parse_index ( c. index_value. to_pandas ( ) [ : 0 ], c ) if : columns_value = parse_index ( c. columns_value. to_pandas ( ) [ : 0 ], store_data = True ) chunks. append ( new_op. new_chunk ( [ c ], shape = ( 0, 0 ), index = c. index, dtypes = out. dtypes, index_value = index_value, columns_value = columns_value, ) ) else : chunks. append ( new_op. new_chunk ( [ c ], shape = ( 0, ), ",False,c.ndim == 2,new_op._if_exists,0.6627962589263916 630,"def tokenize ( self, s ) : """"""Tokenize comments, strings, identifiers, whitespace and operators."""""" i, result = 0, [ ] while i < len ( s ) : j = i ch = s [ i ] if ch in ""@\n"" : j += 1 elif ch == ""#"" : j = g. skip_to_end_of_line ( s, i ) elif ch in "" \t"" : j = g. skip_ws ( s, i ) elif : j = g. skip_c_id ( s, i ) elif g. match ( s, i, ""//"" ) : j = g. skip_line ( s, i ) elif g. match ( s, i, ""/*"" ) : j = self. skip_block_comment ( s, i ) elif ch in ""'\"""" : j = g. skip_string ( s, i ) else : j += 1 assert j > i result. append ( """". join ( s [ i : j ] ) ) i = j return result",False,ch.isalpha() or ch == '_',"g.match(s, i)",0.6549491882324219 631,"def _check_init_script ( path, sentinel ) : if not os. path. exists ( path ) : return lines = open ( path ). readlines ( ) for i, line in enumerate ( lines ) : if : cli. out ( ""Guild completion is already installed in %s on line %i:\n %s"" % ( util. format_dir ( path ), i + 1, line. rstrip ( ) ), err = True, ) raise SystemExit ( 0 )",False,sentinel in line,sentinel,0.6660369634628296 632,"def postprocess_slice ( slicename, skipped ) : pngsliceFName = slicename + "".png"" hotsliceFName = slicename + "".hotspot.png"" for i, size in enumerate ( sizes ) : subdir = ""bitmaps/{}x{}"". format ( size, size ) relslice = ""{}/{}"". format ( subdir, pngsliceFName ) csize = get_csize ( i, size ) if relslice not in skipped : new_base = cropalign ( csize, relslice ) if : hotrelslice = ""{}/{}"". format ( subdir, hotsliceFName ) cropalign_hotspot ( new_base, csize, hotrelslice ) for scale in scale_pairs : subdir = ""bitmaps/{}x{}_{}"". format ( size, size, scale [ 1 ] ) relslice = ""{}/{}"". format ( subdir, pngsliceFName ) if relslice not in skipped : new_base = cropalign ( csize, relslice ) if : hotrelslice = ""{}/{}"". format ( subdir, hotsliceFName ) cropalign_hotspot ( new_base, csize, hotrelslice )",False,options.hotspots,hotrelslice not in skipped,0.6548711657524109 633,def remove ( self ) : for collector in self. __collectors [ : ] : if : collector. remove ( ) else : collector. decouple_from_home_building ( ) collector. remove ( ) assert not [ c for c in self. __collectors ] super ( ). remove ( ) self. __collectors = None self. path_nodes = None,False,not collector.is_ship,self.home_building,0.6545703411102295 634,"def get_error_diagnostics ( self ) : diagnostics = [ ] class_name = self. __class__. __name__ if self. stdout is not None : with open ( self. stdout. name ) as fds : contents = fds. read ( ). strip ( ) if : diagnostics. append ( class_name + "" STDOUT:\n"" + contents ) if self. stderr is not None : with open ( self. stderr. name ) as fds : contents = fds. read ( ). strip ( ) if : diagnostics. append ( class_name + "" STDERR:\n"" + contents ) return diagnostics",True,contents,contents,0.6944578289985657 635,"def PyJs_anonymous_1469_ ( that, key, this, arguments, var = var ) : var = Scope ( { u""this"" : this, u""arguments"" : arguments, u""key"" : key, u""that"" : that }, var ) var. registers ( [ u""index"", u""that"", u""key"", u""entry"" ] ) var. put ( u""index"", var. get ( u""fastKey"" ) ( var. get ( u""key"" ) ) ) if PyJsStrictNeq ( var. get ( u""index"" ), Js ( u""F"" ) ) : return var. get ( u""that"" ). get ( u""_i"" ). get ( var. get ( u""index"" ) ) var. put ( u""entry"", var. get ( u""that"" ). get ( u""_f"" ) ) while var. get ( u""entry"" ) : try : if : return var. get ( u""entry"" ) finally : var. put ( u""entry"", var. get ( u""entry"" ). get ( u""n"" ) )",False,var.get(u'entry').get(u'k') == var.get(u'key'),var.get(u'entry'),0.649939775466919 636,"def _format ( node ) : if isinstance ( node, AST ) : fields = [ ( a, _format ( b ) ) for a, b in iter_fields ( node ) ] rv = ""%s(%s"" % ( node. __class__. __name__, "", "". join ( ( ""%s=%s"" % field for field in fields ) if annotate_fields else ( b for a, b in fields ) ), ) if : rv += fields and "", "" or "" "" rv += "", "". join ( ""%s=%s"" % ( a, _format ( getattr ( node, a ) ) ) for a in node. _attributes ) return rv + "")"" elif isinstance ( node, list ) : return ""[%s]"" % "", "". join ( _format ( x ) for x in node ) return repr ( node )",False,include_attributes and node._attributes,"hasattr(node, '_attributes')",0.6536455750465393 637,"def expandWithRefs ( self, s, varname ) : if not isinstance ( s, str ) : return VariableParse ( varname, self, s ) if varname and varname in self. expand_cache : return self. expand_cache [ varname ] varparse = VariableParse ( varname, self ) while s. find ( ""${"" )!= - 1 : olds = s try : s = __expand_var_regexp__. sub ( varparse. var_sub, s ) try : s = __expand_python_regexp__. sub ( varparse. python_sub, s ) except SyntaxError as e : if : raise if s == olds : break except ExpansionError : raise except bb. parse. SkipRecipe : raise except Exception as exc : raise ExpansionError ( varname, s, exc ) from exc varparse. value = s if varname : self. expand_cache [ varname ] = varparse return varparse",False,e.msg != 'EOL while scanning string literal',s == None,0.6579976081848145 638,"def check_network ( self ) -> NetworkStatus : try : loop = asyncio. get_event_loop ( ) async with aiohttp. ClientSession ( loop = loop, connector = aiohttp. TCPConnector ( verify_ssl = False ) ) as session : async with session. get ( self. log_server_url ) as resp : status_text = await resp. text ( ) if : raise Exception ( ""Log proxy server is down."" ) except asyncio. CancelledError : raise except Exception : return NetworkStatus. NOT_CONNECTED return NetworkStatus. CONNECTED",False,status_text != 'OK',status_text != 'DOWN',0.6585826873779297 639,"def main ( client, key_id ) : custom_targeting_service = client. GetService ( ""CustomTargetingService"", version = ""v202008"" ) statement = ( ad_manager. StatementBuilder ( version = ""v202008"" ) . Where ( ""customTargetingKeyId = :keyId"" ) . WithBindVariable ( ""keyId"", int ( key_id ) ) ) while True : response = custom_targeting_service. getCustomTargetingValuesByStatement ( statement. ToStatement ( ) ) if : updated_values = [ ] for value in response [ ""results"" ] : if not value [ ""displayName"" ] : value [ ""displayName"" ] = value [ ""name"" ] value [ ""displayName"" ] += "" (Deprecated)"" updated_values. append ( value ) values = custom_targeting_service. updateCustomTargetingValues ( updated_values ) for value in values : print ( 'Custom targeting value with id ""%s"", name ""%s"", and display' 'name ""%s"" was updated.' % ( value [ ""id"" ], value [ ""name"" ], value [ ""displayName"" ] ) for sn, app in cherrypy. tree. apps. items ( ) : if : continue if not app. config : continue for key in app. config. keys ( ) : if key. startswith ( ""["" ) or key. endswith ( ""]"" ) : warnings. warn ( ""The application mounted at %r has config "" ""section names with extraneous brackets: %r. "" ""Config *files* need brackets; config *dicts* "" ""(e.g. passed to tree.mount) do not."" % ( sn, key ) )",False,"not isinstance(app, cherrypy.Application)",not app,0.6531991958618164 641,"def printErrors ( self ) : if self. errors or self. failures : if : self. stream. writeln ( ) self. printErrorList ( ""ERROR"", self. errors ) self. printErrorList ( ""FAIL"", self. failures )",False,self.dots or self.showAll,self.stream and self.stream.isatty(),0.65559983253479 642,"def _check_connectivity ( self ) -> None : """"""Check system connectivity."""""" value = self. _cache. get ( ""connectivity"", 0 ) if value >= 600 : pass elif ( self. sys_supervisor. connectivity and self. sys_host. network. connectivity is None ) or ( self. sys_supervisor. connectivity and self. sys_host. network. connectivity is not None and self. sys_host. network. connectivity ) : self. _cache [ ""connectivity"" ] = value + RUN_CHECK_CONNECTIVITY return try : await self. sys_supervisor. check_connectivity ( ) if : await self. sys_host. network. check_connectivity ( ) finally : self. _cache [ ""connectivity"" ] = 0",False,HostFeature.NETWORK in self.sys_host.features,value >= 600,0.6530295610427856 643,"def set_active_tools ( tools_to_activate, permanently_activate, system ) : tools_to_activate = process_tool_list ( tools_to_activate, log_errors = True ) if tools_to_activate : tools = [ x for x in tools_to_activate if not x. is_sdk ] print ( ""Setting the following tools as active:\n "" + ""\n "". join ( map ( lambda x : str ( x ), tools ) ) ) print ( """" ) generate_dot_emscripten ( tools_to_activate ) if WINDOWS : env_vars_to_add = get_env_vars_to_add ( tools_to_activate, system, user = permanently_activate ) env_string = construct_env_with_vars ( env_vars_to_add ) write_set_env_script ( env_string ) if : win_set_environment_variables ( env_vars_to_add, system, user = permanently_activate ) return tools_to_activate",False,permanently_activate,WINDOWS,0.659022867679596 644,"def _getnameinfo ( sockaddr, flags = 0 ) : host = sockaddr [ 0 ] port = sockaddr [ 1 ] if len ( sockaddr ) == 4 : scope = sockaddr [ 3 ] family = socket. AF_INET6 else : scope = None family = socket. AF_INET tuples = _getaddrinfo ( host, port, family, socket. SOCK_STREAM, socket. SOL_TCP, 0 ) if len ( tuples ) > 1 : raise socket. error ( ""sockaddr resolved to multiple addresses"" ) addr = tuples [ 0 ] [ 4 ] [ 0 ] if flags & socket. NI_DGRAM : pname = ""udp"" else : pname = ""tcp"" qname = dns. reversename. from_address ( addr ) if flags & socket. NI_NUMERICHOST == 0 : try : answer = _resolver. resolve ( qname, ""PTR"" ) hostname = answer. rrset [ 0 ]. target. to_text ( True ) except ( dns. resolver. NXDOMAIN, dns. resolver. NoAnswer ) : if : raise socket. gaierror ( socket. EAI_NONAME, ""Name or service not known"" ) hostname = addr if scope is not None : hostname += ""%"" + str ( scope ) else : hostname = addr if scope is not None : hostname += ""%"" + str ( scope ) if flags & socket. NI_NUMERICSERV : service = str ( port ) else : service = socket",False,flags & socket.NI_NAMEREQD,not addr,0.651248574256897 645,"def parse_many ( self, values ) : for value in values : try : yield self. parse ( value ) except KeyError : if : yield None else : raise",False,self._ignore_missing_keys,self.has_empty_tab(),0.6538208723068237 646,"def __new__ ( meta, cls_name, bases, cls_dict ) : func = cls_dict. get ( ""func"" ) monad_cls = super ( FuncMonadMeta, meta ). __new__ ( meta, cls_name, bases, cls_dict ) if func : if : functions = func else : functions = ( func, ) for func in functions : registered_functions [ func ] = monad_cls return monad_cls",False,type(func) is tuple,"hasattr(func, '__call__')",0.6527308225631714 647,"def mergeHiLo ( self, x_stats ) : """"""Merge the highs and lows of another accumulator into myself."""""" if x_stats. min is not None : if self. min is None or x_stats. min < self. min : self. min = x_stats. min self. mintime = x_stats. mintime if x_stats. max is not None : if : self. max = x_stats. max self. maxtime = x_stats. maxtime if x_stats. lasttime is not None : if self. lasttime is None or x_stats. lasttime >= self. lasttime : self. lasttime = x_stats. lasttime self. last = x_stats. last",False,self.max is None or x_stats.max > self.max,self.max is None or x_stats.maxtime is not None,0.653265118598938 648,"def get_attribute_value ( self, nodeid, attr ) : with self. _lock : self. logger. debug ( ""get attr val: %s %s"", nodeid, attr ) if nodeid not in self. _nodes : dv = ua. DataValue ( ) dv. StatusCode = ua. StatusCode ( ua. StatusCodes. BadNodeIdUnknown ) return dv node = self. _nodes [ nodeid ] if : dv = ua. DataValue ( ) dv. StatusCode = ua. StatusCode ( ua. StatusCodes. BadAttributeIdInvalid ) return dv attval = node. attributes [ attr ] if attval. value_callback : return attval. value_callback ( ) return attval. value",True,attr not in node.attributes,attr not in node.attributes,0.656838059425354 649,"def _eval ( self, code, ns, pos ) : try : try : value = eval ( code, self. default_namespace, ns ) except SyntaxError as e : raise SyntaxError ( ""invalid syntax in expression: %s"" % code ) return value except : exc_info = sys. exc_info ( ) e = exc_info [ 1 ] if : arg0 = e. args [ 0 ] else : arg0 = coerce_text ( e ) e. args = ( self. _add_line_info ( arg0, pos ), ) if PY3 : raise ( e ) else : raise ( exc_info [ 1 ], e, exc_info [ 2 ] )",False,"getattr(e, 'args', None)",e.args and len(e.args) > 0,0.6512769460678101 650,"def _build_initiator_target_map ( self, target_wwns, connector ) : """"""Build the target_wwns and the initiator target map."""""" init_targ_map = { } if self. _lookup_service : dev_map = self. _lookup_service. get_device_mapping_from_network ( connector [ ""wwpns"" ], target_wwns ) for fabric_name in dev_map : fabric = dev_map [ fabric_name ] for initiator in fabric [ ""initiator_port_wwn_list"" ] : if : init_targ_map [ initiator ] = [ ] init_targ_map [ initiator ] += fabric [ ""target_port_wwn_list"" ] init_targ_map [ initiator ] = list ( set ( init_targ_map [ initiator ] ) ) else : init_targ_map = dict. fromkeys ( connector [ ""wwpns"" ], target_wwns ) return init_targ_map",False,initiator not in init_targ_map,init_targ_map is None,0.6574866771697998 651,"def scan_options ( self ) : """"""Set all configuration-related ivars."""""" if not self. config_fn : return self. parser = parser = self. create_parser ( ) s = self. get_config_string ( ) self. init_parser ( s ) if self. files : files = self. files elif parser. has_section ( ""Global"" ) : files = parser. get ( ""Global"", ""files"" ) files = [ z. strip ( ) for z in files. split ( ""\n"" ) if z. strip ( ) ] else : return files2 = [ ] for z in files : files2. extend ( glob. glob ( self. finalize ( z ) ) ) self. files = [ z for z in files2 if z and os. path. exists ( z ) ] if ""output_directory"" in parser. options ( ""Global"" ) : s = parser. get ( ""Global"", ""output_directory"" ) output_dir = self. finalize ( s ) if : self. output_directory = output_dir if self. verbose : print ( ""output directory: %s\n"" % output_dir ) else : print ( ""output directory not found: %s\n"" % output_dir ) self. output_directory = None if ""prefix_lines"" in parser. options ( ""Global"" ) : prefix = parser. get ( ""Global"", ""prefix_lines"" ) self. prefix_lines = prefix. split ( ""\n"" )",False,os.path.exists(output_dir),output_dir is not None,0.647266149520874 652,"def parse_known_args ( self, args = None, namespace = None ) : if args is None : args = _sys. argv [ 1 : ] if namespace is None : namespace = Namespace ( ) for action in self. _actions : if action. dest is not SUPPRESS : if : if action. default is not SUPPRESS : setattr ( namespace, action. dest, action. default ) for dest in self. _defaults : if not hasattr ( namespace, dest ) : setattr ( namespace, dest, self. _defaults [ dest ] ) try : namespace, args = self. _parse_known_args ( args, namespace ) if hasattr ( namespace, _UNRECOGNIZED_ARGS_ATTR ) : args. extend ( getattr ( namespace, _UNRECOGNIZED_ARGS_ATTR ) ) delattr ( namespace, _UNRECOGNIZED_ARGS_ATTR ) return namespace, args except ArgumentError : err = _sys. exc_info ( ) [ 1 ] self. error ( str ( err ) )",False,"not hasattr(namespace, action.dest)",action.default is None,0.6544973254203796 653,"def test_canonicalise ( self ) : from quodlibet. util. path import normalize_path as norm f, path = tempfile. mkstemp ( ) path = os. path. realpath ( path ) os. close ( f ) path = norm ( path ) link_dir = mkdtemp ( ) link = None if not is_win : link = os. path. join ( link_dir, str ( uuid. uuid4 ( ) ) ) os. symlink ( path, link ) try : self. failUnlessEqual ( norm ( path, canonicalise = True ), path ) self. failUnlessEqual ( norm ( os. path. join ( path, ""foo"", "".."" ), True ), path ) if : self. failUnlessEqual ( norm ( link, True ), path ) self. failIfEqual ( norm ( link, False ), path ) unnormalised_path = os. path. join ( link, ""foo"", "".."" ) self. failUnlessEqual ( norm ( unnormalised_path, True ), path ) finally : if : os. remove ( link ) os. remove ( path ) os. rmdir ( link_dir )",False,link,norm,0.6871761679649353 654,"def testLimit ( self ) : ""Verify that CPU limits are within a 2% tolerance of limit for each scheduler"" p = pexpect. spawn ( ""python -m mininet.examples.limit"" ) opts = [ ""\*\*\* Testing network ([\d\.]+) Mbps"", ""\*\*\* Results: \[([\d\., ]+)\]"", pexpect. EOF, ] count = 0 bw = 0 tolerance = 2 while True : index = p. expect ( opts ) if : bw = float ( p. match. group ( 1 ) ) count += 1 elif index == 1 : results = p. match. group ( 1 ) for x in results. split ( "","" ) : result = float ( x ) self. assertTrue ( result < bw + tolerance ) self. assertTrue ( result > bw - tolerance ) else : break self. assertTrue ( count > 0 )",True,index == 0,index == 0,0.6702772974967957 655,"def _real_extract ( self, url ) : course_name = self. _match_id ( url ) webpage = self. _download_webpage ( url, course_name ) props = self. _parse_json ( self. _search_regex ( r""data\s*=\s*({.+?})\s*;"", webpage, ""data"" ), course_name ) [ ""initialProps"" ] entries = [ ] for chapter_num, chapter in enumerate ( props [ ""concepts"" ], 1 ) : if not isinstance ( chapter, dict ) : continue materials = chapter. get ( ""materials"" ) if : continue chapter_title = chapter. get ( ""title"" ) chapter_id = str_or_none ( chapter. get ( ""id"" ) ) for material in materials : if not isinstance ( material, dict ) : continue if material. get ( ""material_type"" )!= ""video"" : continue video_url = urljoin ( url, material. get ( ""url"" ) ) if not video_url : continue entries. append ( { ""_type"" : ""url_transparent"", ""url"" : video_url, ""title"" : str_or_none ( material. get ( ""name"" ) ), ""id"" : str_or_",False,"not materials or not isinstance(materials, list)",not materials,0.6594485640525818 656,"def perform_search ( self, dir, s = None, start = None, update_search_start = False ) : self. cancel_highlight ( ) if : s = self. last_search_string if : self. ui. message ( ""No previous search term."" ) return False else : self. last_search_string = s if start is None : start = self. search_start case_insensitive = s. lower ( ) == s if start > len ( self. ui. source ) : start = 0 i = ( start + dir ) % len ( self. ui. source ) if i >= len ( self. ui. source ) : i = 0 while i!= start : sline = self. ui. source [ i ]. text if case_insensitive : sline = sline. lower ( ) if s in sline : sl = self. ui. source [ i ] sl. set_highlight ( True ) self. highlight_line = sl self. ui. source. set_focus ( i ) if update_search_start : self. search_start = i return True i = ( i + dir ) % len ( self. ui. source ) return False",True,s is None,s is None,0.6601827144622803 657,"def acquire ( cls, node, floating = None ) : if isinstance ( node, Gaffer. ScriptNode ) : script = node else : script = node. scriptNode ( ) scriptWindow = GafferUI. ScriptWindow. acquire ( script ) if floating in ( None, False ) : for editor in scriptWindow. getLayout ( ). editors ( type = cls ) : if node. isSame ( editor. _lastAddedNode ( ) ) : editor. reveal ( ) return editor if floating in ( None, True ) : childWindows = scriptWindow. childWindows ( ) for window in childWindows : if : if ( isinstance ( window. getChild ( ), cls ) and node in window. getChild ( ). getNodeSet ( ) ) : window. setVisible ( True ) return window. getChild ( ) editor = cls ( script ) editor. setNodeSet ( Gaffer. StandardSet ( [ node ] ) ) if floating is False : scriptWindow. getLayout ( ). addEditor ( editor ) else : window = _EditorWindow ( scriptWindow, editor ) scriptWindow. menuBar ( ). addShortcutTarget ( window ) window. setVisible ( True ) if isinstance ( editor, GafferUI. NodeEditor ) : var = Scope ( { u""this"" : this, u""arguments"" : arguments, u""updateScopeInfo"" : PyJs_updateScopeInfo_823_, }, var, ) var. registers ( [ u""letRefs"", u""binding"", u""key"", u""parentScope"", u""scope"", u""ref"" ] ) var. put ( u""scope"", var. get ( u""this"" ). get ( u""scope"" ) ) var. put ( u""parentScope"", var. get ( u""scope"" ). callprop ( u""getFunctionParent"" ) ) var. put ( u""letRefs"", var. get ( u""this"" ). get ( u""letReferences"" ) ) for PyJsTemp in var. get ( u""letRefs"" ) : var. put ( u""key"", PyJsTemp ) var. put ( u""ref"", var. get ( u""letRefs"" ). get ( var. get ( u""key"" ) ) ) var. put ( u""binding"", var. get ( u""scope"" ). callprop ( u""getBinding"", var. get ( u""ref"" ). get ( u""name"" ) ), ) if : continue if PyJsStrictEq ( var. get ( u""binding"" ). get ( u""kind"" ), Js ( u""let"" ) ) or PyJsStrictEq ( var. get ( u""binding"" ). get ( u""kind"" ), Js ( u""const"" ) )",False,var.get(u'binding').neg(),"hasattr(var, '__getitem__')",0.6478897929191589 659,"def validate_cpu ( self, value ) : for k, v in value. viewitems ( ) : if v is None : continue if : raise serializers. ValidationError ( ""Process types can only contain [a-z]"" ) shares = re. match ( CPUSHARE_MATCH, str ( v ) ) if not shares : raise serializers. ValidationError ( ""CPU shares must be an integer"" ) for v in shares. groupdict ( ). viewvalues ( ) : try : i = int ( v ) except ValueError : raise serializers. ValidationError ( ""CPU shares must be an integer"" ) if i > 1024 or i < 0 : raise serializers. ValidationError ( ""CPU shares must be between 0 and 1024"" ) return value",False,"not re.match(PROCTYPE_MATCH, k)","k in ['cpu_types', 'a-z']",0.6512911319732666 660,"def tables_size ( results ) : print ( ""\nSIZE RESULTS\n"" ) sizes_per_datatype = { } for ser in results : for datatype in results [ ser ] [ ""sizes"" ] : size = results [ ser ] [ ""sizes"" ] [ datatype ] if datatype not in sizes_per_datatype : sizes_per_datatype [ datatype ] = [ ] sizes_per_datatype [ datatype ]. append ( ( size, ser ) ) sizes_per_datatype = { datatype : sorted ( sizes ) for datatype, sizes in sizes_per_datatype. items ( ) } for dt in sorted ( sizes_per_datatype ) : print ( dt ) for pos, ( size, serializer ) in enumerate ( sizes_per_datatype [ dt ] ) : if : size = ""unsupported"" else : size = ""%8d"" % size print ( "" %2d: %-8s %s"" % ( pos + 1, serializer, size ) ) print ( )",False,size == no_result,pos == 0,0.6601654887199402 661,"def _get_sources ( self ) : server_links = { ""mp4upload"" : ""https://www.mp4upload.com/embed-{}.html"", ""trollvid"" : ""https://trollvid.net/embed/{}"", } resp = helpers. soupify ( helpers. get ( self. url ). text ). find_all ( ""script"" ) for i in resp : if i. string : if : res = i. string hosts = json. loads ( re. search ( r""(\[[^)]+\])"", res ). group ( 1 ) ) logger. debug ( ""Hosts: {}"". format ( hosts ) ) sources_list = [ ] for i in hosts : for j in server_links : if i. get ( ""host"" ) in j and i. get ( ""source"" ) : sources_list. append ( { ""extractor"" : j, ""url"" : server_links [ j ]. format ( i [ ""source"" ] ), ""server"" : j, ""version"" : i [ ""source"" ], } ) return self. sort_sources ( sources_list )",False,'sources' in i.string,i.string[1] not in' '',0.6623613238334656 662,"def get_command ( cls ) : ifconfig_cmd = ""ifconfig"" for path in [ ""/sbin"", ""/usr/sbin"", ""/bin"", ""/usr/bin"" ] : if os. path. exists ( os. path. join ( path, ifconfig_cmd ) ) : : break ifconfig_cmd = ifconfig_cmd + "" -a"" return ifconfig_cmd",False,"ifconfig_cmd = os.path.join(path, ifconfig_cmd)","os.path.exists(os.path.join(path, ifconfig_cmd))",0.6442980170249939 663,"def registerExtensions ( self, extensions, configs ) : if not configs : configs = { } for ext in extensions : extension_module_name = ""mdx_"" + ext try : module = __import__ ( extension_module_name ) except : message ( CRITICAL, ""couldn't load extension %s (looking for %s module)"" % ( ext, extension_module_name ), ) else : if : configs_for_ext = configs [ ext ] else : configs_for_ext = [ ] extension = module. makeExtension ( configs_for_ext ) extension. extendMarkdown ( self, globals ( ) )",False,configs.has_key(ext),ext in configs,0.6512950658798218 664,"def eventloop ( self ) : poll = select. poll ( ) event_read_mask = self. errorevents | self. readevents poll. register ( self. serversock. fileno ( ) ) poll. register ( self. readpipe, event_read_mask ) breakout = False self. running = True self. logger. debug ( ""Starting thread event loop"" ) while not breakout : events = poll. poll ( ) for event in events : if event [ 1 ] & self. errorevents : raise Exception ( self. stringify_event ( event [ 1 ] ) ) if self. readpipe == event [ 0 ] : self. logger. debug ( ""Stop event received"" ) breakout = True break elif : self. logger. debug ( ""Connection request received"" ) self. readsock, _ = self. serversock. accept ( ) self. readsock. setblocking ( 0 ) poll. unregister ( self. serversock. fileno ( ) ) poll. register ( self. readsock. fileno ( ), event_read_mask ) self. logger. debug ( ""Setting connection established event"" ) self. connection_established. set ( ) elif self. readsock. fileno ( ) == event [ 0 ] : <",False,self.serversock.fileno() == event[0],self.connection_established == event[0],0.6566035151481628 665,"def _list_item_sub ( self, match ) : item = match. group ( 4 ) leading_line = match. group ( 1 ) if leading_line or ""\n\n"" in item or self. _last_li_endswith_two_eols : item = self. _run_block_gamut ( self. _outdent ( item ) ) else : item = self. _do_lists ( self. _outdent ( item ) ) if : item = item [ : - 1 ] item = self. _run_span_gamut ( item ) self. _last_li_endswith_two_eols = len ( match. group ( 5 ) ) == 2 if ""task_list"" in self. extras : item = self. _task_list_item_re. sub ( self. _task_list_item_sub, item ) return ""
  • %s
  • \n"" % item",False,item.endswith('\n'),item.endswith(b''),0.6490521430969238 666,"def update ( self, x, who = None, metadata = None ) : self. _retain_refs ( metadata ) y = self. _get_key ( x ) if self. keep == ""last"" : self. _buffer. pop ( y, None ) self. _metadata_buffer. pop ( y, None ) self. _buffer [ y ] = x self. _metadata_buffer [ y ] = metadata else : if : self. _buffer [ y ] = x self. _metadata_buffer [ y ] = metadata return self. last",False,y not in self._buffer,self.keep == 'previous',0.6640939712524414 667,"def _GetValue ( value_pb ) : """"""Gets the value from the value_pb."""""" if value_pb. type ( ) in _PROTO_FIELDS_STRING_VALUE : if : return value_pb. string_value ( ) return None if value_pb. type ( ) == document_pb. FieldValue. DATE : if : return search_util. DeserializeDate ( value_pb. string_value ( ) ) return None if value_pb. type ( ) == document_pb. FieldValue. NUMBER : if : return float ( value_pb. string_value ( ) ) return None if value_pb. type ( ) == document_pb. FieldValue. GEO : if value_pb. has_geo ( ) : geo_pb = value_pb. geo ( ) return GeoPoint ( latitude = geo_pb. lat ( ), longitude = geo_pb. lng ( ) ) return None raise TypeError ( ""unknown FieldValue type %d"" % value_pb. type ( ) )",False,value_pb.has_string_value(),value_pb.type() == document_pb.FieldValue.FLOAT,0.6523259282112122 668,"def forms_list ( self, trans, payload = None, ** kwd ) : message = kwd. get ( ""message"", """" ) status = kwd. get ( ""status"", """" ) if ""operation"" in kwd : id = kwd. get ( ""id"" ) if not id : return self. message_exception ( trans, ""Invalid form id (%s) received."" % str ( id ) ) ids = util. listify ( id ) operation = kwd [ ""operation"" ]. lower ( ) if operation == ""delete"" : message, status = self. _delete_form ( trans, ids ) elif : message, status = self. _undelete_form ( trans, ids ) if message and status : kwd [ ""message"" ] = util. sanitize_text ( message ) kwd [ ""status"" ] = status return self. forms_grid ( trans, ** kwd )",True,operation == 'undelete',operation == 'undelete',0.6647933721542358 669,"def update_obj ( obj, new, fatal_errors = True ) : elems = list ( obj. keys ( ) ) if ""Conf"" in elems : elems. remove ( ""Conf"" ) elems. insert ( 0, ""Conf"" ) if ""Env"" in elems : elems. remove ( ""Env"" ) obj [ ""Env"" ]. update ( new [ ""Env"" ] ) if ""Hist"" in elems : elems. remove ( ""Hist"" ) obj [ ""Hist"" ] += new [ ""Hist"" ] for elem in elems : if : for key, value in new [ elem ]. items ( ) : try : obj [ elem ] [ key ] = value except Exception as error : item_repr = ""session.%s.%s"" % ( elem, key ) msg_prefix = ""[-] Couldn't set %s"" % item_repr if fatal_errors : print ( ""%s:"" % msg_prefix ) raise else : print ( ""%s: %s"" % ( msg_prefix, error ) ) else : obj [ elem ] = new [ elem ] return obj",False,"isinstance(obj[elem], dict)","hasattr(obj, elem)",0.6576023697853088 670,"def _process_rtdest ( self ) : LOG. debug ( ""Processing RT NLRI destination..."" ) if self. _rtdest_queue. is_empty ( ) : return else : processed_any = False while not self. _rtdest_queue. is_empty ( ) : next_dest = self. _rtdest_queue. pop_first ( ) if : next_dest. process ( ) processed_any = True if processed_any : self. _core_service. update_rtfilters ( )",True,next_dest,next_dest,0.6787724494934082 671,"def display_my_sessions_view ( ) : placeholder_images = DataGetter. get_event_default_images ( ) custom_placeholder = DataGetter. get_custom_placeholders ( ) upcoming_events_sessions = DataGetter. get_sessions_of_user ( upcoming_events = True ) im_config = DataGetter. get_image_configs ( ) im_size = """" for config in im_config : if : im_size = config. size past_events_sessions = DataGetter. get_sessions_of_user ( upcoming_events = False ) page_content = { ""tab_upcoming_events"" : ""Upcoming Sessions"", ""tab_past_events"" : ""Past Sessions"", ""title"" : ""My Session Proposals"", } if not AuthManager. is_verified_user ( ) : flash ( Markup ( ""Your account is unverified. "" ""Please verify by clicking on the confirmation link that has been emailed to you."" '
    Did not get the email? Please ' ""click here to resend the confirmation."" ) ) return render_template ( ""gentelella/users/mysessions/mysessions_list.html"", upcoming_events_sessions = upcoming_events_sessions, past_events_sessions = past_events_sessions, page_content = page_content, placeholder_images = placeholder_images, custom_",False,config.page == 'mysession',config.size,0.6551402807235718 672,"def get_engine ( user, engine = ""solr"", facet = None, source = ""data"", cluster = '""""' ) : if isinstance ( engine, dict ) : if source == ""data"" : source = engine. get ( ""source"" ) engine = engine. get ( ""engine"", ""solr"" ) if engine == ""report"" and facet : engine = facet [ ""properties"" ]. get ( ""engine"" ) if engine!= ""solr"" : if : from impala. dashboard_api import ImpalaDashboardApi return ImpalaDashboardApi ( user, engine, source = source, cluster = cluster ) elif engine == ""hive"" : from beeswax. dashboard_api import HiveDashboardApi return HiveDashboardApi ( user, engine, source = source, cluster = cluster ) else : from notebook. dashboard_api import SQLDashboardApi return SQLDashboardApi ( user, engine, source = source, cluster = cluster ) else : from search. dashboard_api import SearchApi return SearchApi ( user, cluster )",False,engine == 'impala',engine == 'Impala',0.658246636390686 673,"def printHexFormat ( data, addr, nocolor = False ) : for i in range ( ( int ( len ( data ) / 16 ) ) + 1 ) : part = data [ i * 16 : i * 16 + 16 ] bytes = cstr ( """" ) c = 0 for j in range ( 0, len ( part ), 2 ) : if j == len ( part ) - 1 : bytes += cstr ( ( ""%.2x "" % tuple ( part [ j : j + 1 ] ) ), Color. WHITE if c % 2 else Color. LIGHT_GRAY, ) else : bytes += cstr ( ( ""%.2x%.2x "" % tuple ( part [ j : j + 2 ] ) ), Color. WHITE if c % 2 else Color. LIGHT_GRAY, ) c += 1 string = """" if nocolor : if len ( bytes ) < 40 : bytes += "" "" * ( 40 - len ( bytes ) ) else : if len ( bytes ) < 227 : bytes += "" "" * ( ( 8 - int ( len ( bytes ) / 29 ) ) * 5 ) for b in part : if : string += ""."" else : <",False,b < 32 or b > 126,b in addr,0.6571352481842041 674,"def __iter__ ( self ) : for name, value in self. __class__. __dict__. items ( ) : if isinstance ( value, alias_flag_value ) : continue if : yield ( name, self. _has_flag ( value. flag ) )",False,"isinstance(value, flag_value)","isinstance(value, value._field_value)",0.648002564907074 675,"def _read_allele_freq_table ( f ) : line = f. readline ( ) while "" --"" not in line : if : raise StopIteration if ""No data"" in line : return None, None line = f. readline ( ) alleles = [ x for x in f. readline ( ). rstrip ( ). split ( "" "" ) if x!= """" ] alleles = [ _gp_int ( x ) for x in alleles ] line = f. readline ( ). rstrip ( ) table = [ ] while line!= """" : parts = [ x for x in line. split ( "" "" ) if x!= """" ] try : table. append ( ( parts [ 0 ], [ _gp_float ( x ) for x in parts [ 1 : - 1 ] ], _gp_int ( parts [ - 1 ] ) ) ) except ValueError : table. append ( ( parts [ 0 ], [ None ] * len ( alleles ), 0 ) ) line = f. readline ( ). rstrip ( ) return alleles, table",True,line == '',line == '',0.6783540844917297 676,"def check_require ( require_modules, require_lines ) : for require_module in require_modules : st = try_import ( require_module ) if st == 0 : continue elif st == 1 : print ( ""installed {}: {}\n"". format ( require_module, require_lines [ require_module ] ) ) elif : print ( ""failed installed {}: {}\n"". format ( require_module, require_lines [ require_module ] ) )",True,st == 2,st == 2,0.6821781396865845 677,"def prefixed ( self, prefix : _StrType ) -> typing. Iterator [ ""Env"" ] : """"""Context manager for parsing envvars with a common prefix."""""" try : old_prefix = self. _prefix if : self. _prefix = prefix else : self. _prefix = f""{old_prefix}{prefix}"" yield self finally : self. _prefix = None self. _prefix = old_prefix",True,old_prefix is None,old_prefix is None,0.6638089418411255 678,"def compute_up ( expr, data, ** kwargs ) : if isinstance ( expr. slice, _inttypes ) : idx = expr. slice + 1 if idx < 1 : msg = ""Index {} out-of-bounds for SQL string indexing."" raise IndexError ( msg. format ( expr. slice ) ) args = idx, 1 elif isinstance ( expr. slice, tuple ) : start, stop, step = expr. slice if step is not None : msg = ""step value {} not valid for SQL string indexing."" raise ValueError ( msg. format ( step ) ) norm_start = start if isinstance ( start, _inttypes ) else 0 if norm_start < 0 : msg = ""Negative indexing not valid for SQL strings; given {}."" raise ValueError ( msg. format ( norm_start ) ) if isinstance ( stop, _inttypes ) : if stop < 0 : msg = ""Negative indexing not valid for SQL strings; given {}."" raise ValueError ( msg. format ( stop ) ) args = norm_start + 1, ( stop - norm_start ) elif : args = ( norm_start + 1, ) return sa. sql. func. substring ( data, * args )",False,stop is None,norm_start > 0,0.6667633056640625 679,"def handle_read ( self, socket_ ) : try : data, ( addr, port ) = socket_. recvfrom ( _MAX_MSG_ABSOLUTE ) except Exception : self. log_exception_warning ( ) return log. debug ( ""Received from %r:%r: %r "", addr, port, data ) self. data = data msg = DNSIncoming ( data ) if not msg. valid : pass elif msg. is_query ( ) : if port == _MDNS_PORT : self. zc. handle_query ( msg, _MDNS_ADDR, _MDNS_PORT ) elif : self. zc. handle_query ( msg, addr, port ) self. zc. handle_query ( msg, _MDNS_ADDR, _MDNS_PORT ) else : self. zc. handle_response ( msg )",False,port == _DNS_PORT,addr == _MDNS_ADDR,0.6592108011245728 680,"def get_schema ( form_fields ) : attrs = { } for field in form_fields : if : field_type = marshmallow. fields. Str elif field. type == ""email"" : field_type = marshmallow. fields. Email elif field. type == ""number"" : field_type = marshmallow. fields. Float else : raise UnprocessableEntityError ( { ""pointer"" : ""/data/complex-field-values/"" + field. identifier }, ""Invalid Field Type: "" + field. type, ) attrs [ field. identifier ] = field_type ( required = field. is_required ) return type ( ""DynamicSchema"", ( marshmallow. Schema, ), attrs )",False,"field.type in ['text', 'checkbox', 'select']",field.type == 'str',0.6524022817611694 681,"def __init__ ( self, app ) : self. _credential = app. credential db_url = app. options. get ( ""databaseURL"" ) if db_url : self. _db_url = db_url else : self. _db_url = None auth_override = _DatabaseService. _get_auth_override ( app ) if auth_override not in ( self. _DEFAULT_AUTH_OVERRIDE, { } ) : self. _auth_override = json. dumps ( auth_override, separators = ( "","", "":"" ) ) else : self. _auth_override = None self. _timeout = app. options. get ( ""httpTimeout"", _http_client. DEFAULT_TIMEOUT_SECONDS ) self. _clients = { } emulator_host = os. environ. get ( _EMULATOR_HOST_ENV_VAR ) if emulator_host : if : raise ValueError ( 'Invalid {0}: ""{1}"". It must follow format ""host:port"".'. format ( _EMULATOR_HOST_ENV_VAR, emulator_host ) ) self. _emulator_host = emulator_host else : self. _emulator_host = None",False,'//' in emulator_host,not _IP_PORT_ADDRESS.match(emulator_host),0.6563094258308411 682,"def platformGetMaps ( self ) : maps = [ ] address = ctypes. c_ulong ( 0 ) mapsize = ctypes. c_ulong ( 0 ) name = ctypes. c_uint32 ( 0 ) count = ctypes. c_uint32 ( VM_REGION_BASIC_INFO_COUNT_64 ) info = vm_region_basic_info_64 ( ) while True : r = self. libc. mach_vm_region ( self. task, addrof ( address ), addrof ( mapsize ), VM_REGION_BASIC_INFO_64, addrof ( info ), addrof ( count ), addrof ( name ), ) if r == 1 : break if r!= 0 : self. libc. mach_error ( ""mach_vm_region"", r ) raise Exception ( ""vm_region Failed for 0x%.8x: 0x%.8x"" % ( address. value, r ) ) perms = 0 p = info. protection if p & VM_PROT_READ : perms |= e_mem. MM_READ if p & VM_PROT_WRITE : perms |= e_mem. MM_WRITE if p & VM_PROT_EXECUTE : perms |= e_mem. MM_EXEC if info. shared : perms |= e_mem. MM_SHARED ",False,perms,maps[0],0.6890894174575806 683,"def _set_qresult_hits ( qresult, hit_rows = ( ) ) : """"""Append Hits without alignments into QueryResults (PRIVATE)."""""" for hit_row in hit_rows : hit_id, remainder = hit_row. split ( "" "", 1 ) if : frag = HSPFragment ( hit_id, qresult. id ) hsp = HSP ( [ frag ] ) hit = Hit ( [ hsp ] ) qresult. append ( hit ) return qresult",False,hit_id not in qresult,remainder,0.671747624874115 684,"def process_ifconfig_nodes ( app : Sphinx, doctree : nodes. document, docname : str ) -> None : ns = { confval. name : confval. value for confval in app. config } ns. update ( app. config. __dict__. copy ( ) ) ns [ ""builder"" ] = app. builder. name for node in doctree. traverse ( ifconfig ) : try : res = eval ( node [ ""expr"" ], ns ) except Exception as err : from traceback import format_exception_only msg = """". join ( format_exception_only ( err. __class__, err ) ) newnode = doctree. reporter. error ( ""Exception occurred in "" ""ifconfig expression: \n%s"" % msg, base_node = node, ) node. replace_self ( newnode ) else : if : node. replace_self ( [ ] ) else : node. replace_self ( node. children )",False,not res,node.children is None,0.6831022500991821 685,"def explain ( self, other, depth = 0 ) : exp = super ( UnionType, self ). explain ( other, depth ) for ndx, subtype in enumerate ( self. params [ ""allowed_types"" ] ) : if : exp += ""\n{}and"". format ( """". join ( [ ""\t"" ] * depth ) ) exp += ""\n"" + subtype. explain ( other, depth = depth + 1 ) return exp",True,ndx > 0,ndx > 0,0.6678018569946289 686,"def convert_with_key ( self, key, value, replace = True ) : result = self. configurator. convert ( value ) if value is not result : if : self [ key ] = result if type ( result ) in ( ConvertingDict, ConvertingList, ConvertingTuple ) : result. parent = self result. key = key return result",True,replace,replace,0.6944632530212402 687,"def OnLeftUp ( self, event ) : btnpos = self. GetButtonsPos ( ) btnsize = self. GetButtonsSize ( ) if self. HasCapture ( ) : self. ReleaseMouse ( ) for btn in range ( 2 ) : if self. HitTest ( btnpos [ btn ], event. GetPosition ( ), btnsize [ btn ] ) : if btn == 0 : if : self. searchButtonPressed = False self. Refresh ( ) self. SetFocus ( ) wx. PostEvent ( self, SearchButton ( ) ) if btn == 1 : if self. cancelButtonPressed : self. cancelButtonPressed = False self. Refresh ( ) self. SetFocus ( ) wx. PostEvent ( self, CancelButton ( ) ) else : if btn == 0 : if : self. searchButtonPressed = False self. Refresh ( ) if btn == 1 : if self. cancelButtonPressed : self. cancelButtonPressed = False self. Refresh ( )",True,self.searchButtonPressed,self.searchButtonPressed,0.6666840314865112 688,"def get_boarding_status ( project ) : status = ""Pending"" if project : doc = frappe. get_doc ( ""Project"", project ) if : status = ""In Process"" elif flt ( doc. percent_complete ) == 100.0 : status = ""Completed"" return status",False,flt(doc.percent_complete) > 0.0 and flt(doc.percent_complete) < 100.0,doc.percent_complete >= 0.0,0.655147910118103 689,"def replace_all ( self, event = None ) : prog = self. engine. getprog ( ) if not prog : return repl = self. replvar. get ( ) text = self. text res = self. engine. search_text ( text, prog ) if not res : text. bell ( ) return text. tag_remove ( ""sel"", ""1.0"", ""end"" ) text. tag_remove ( ""hit"", ""1.0"", ""end"" ) line = res [ 0 ] col = res [ 1 ]. start ( ) if self. engine. iswrap ( ) : line = 1 col = 0 ok = 1 first = last = None text. undo_block_start ( ) while 1 : res = self. engine. search_forward ( text, prog, line, col, 0, ok ) if not res : break line, m = res chars = text. get ( ""%d.0"" % line, ""%d.0"" % ( line + 1 ) ) orig = m. group ( ) new = self. _replace_expand ( m, repl ) if : break i, j = m. span ( ) first = ""%d.%d"" % ( line, i ) last = ""%d.%d"" % ( line, j ) if new == orig : text. mark_set ( ""insert"", last ) else : text. mark_set ( ""insert"", first ) if first!= last : ",False,new is None,new == chars,0.6622529029846191 690,"def normalize_host ( host ) : """"""Normalize a host string."""""" if misc. IPv6_MATCHER. match ( host ) : percent = host. find ( ""%"" ) if : percent_25 = host. find ( ""%25"" ) if ( percent_25 == - 1 or percent < percent_25 or ( percent == percent_25 and percent_25 == len ( host ) - 4 ) ) : host = host. replace ( ""%"", ""%25"", 1 ) return host [ : percent ]. lower ( ) + host [ percent : ] return host. lower ( )",False,percent != -1,percent > -1,0.6667181849479675 691,"def get_indexes ( self, cursor, table_name ) : indexes = { } for info in self. _table_info ( cursor, table_name ) : if info [ ""pk"" ]!= 0 : indexes [ info [ ""name"" ] ] = { ""primary_key"" : True, ""unique"" : False } cursor. execute ( ""PRAGMA index_list(%s)"" % self. connection. ops. quote_name ( table_name ) ) for index, unique in [ ( field [ 1 ], field [ 2 ] ) for field in cursor. fetchall ( ) ] : cursor. execute ( ""PRAGMA index_info(%s)"" % self. connection. ops. quote_name ( index ) ) info = cursor. fetchall ( ) if : continue name = info [ 0 ] [ 2 ] indexes [ name ] = { ""primary_key"" : False, ""unique"" : unique } return indexes",False,len(info) != 1,len(info) < 3,0.6550526022911072 692,"def __init__ ( self, parent, name, description = None ) : FieldSet. __init__ ( self, parent, name, description ) self. _size = ( self [ ""size"" ]. value + 3 * 4 ) * 8 if MAX_CHUNK_SIZE < ( self. _size // 8 ) : raise ParserError ( ""PNG: Chunk is too big (%s)"" % humanFilesize ( self. _size // 8 ) ) tag = self [ ""tag"" ]. value self. desc_func = None self. value_func = None if tag in self. TAG_INFO : self. _name, self. parse_func, desc, value_func = self. TAG_INFO [ tag ] if value_func : self. value_func = value_func self. createValue = self. createValueFunc if : if isinstance ( desc, str ) : self. _description = desc else : self. desc_func = desc else : self. _description = """" self. parse_func = None",True,desc,desc,0.701418399810791 693,"def extract ( self, mp3 ) : if ""/frames/frame[0]"" in mp3 : frame = mp3 [ ""/frames/frame[0]"" ] self. nb_channel = ( frame. getNbChannel ( ), frame [ ""channel_mode"" ]. display ) self. format_version = u""MPEG version %s layer %s"" % ( frame [ ""version"" ]. display, frame [ ""layer"" ]. display, ) self. sample_rate = frame. getSampleRate ( ) self. bits_per_sample = 16 if mp3 [ ""frames"" ]. looksConstantBitRate ( ) : self. computeBitrate ( frame ) else : self. computeVariableBitrate ( mp3 ) if ""id3v1"" in mp3 : id3 = mp3 [ ""id3v1"" ] self. comment = id3 [ ""comment"" ]. value self. author = id3 [ ""author"" ]. value self. title = id3 [ ""song"" ]. value self. album = id3 [ ""album"" ]. value if : self. creation_date = id3 [ ""year"" ]. value if ""track_nb"" in id3 : self. track_number = id3 [ ""track_nb"" ]. value if ""id3v2"" in mp3 : self. readID3v2 ( mp3 [ ""id3v2"" ] ) if ""frames"" in mp3 : computeComprRate ( self, mp3 [ ""frames"" ]. size )",False,id3['year'].value != '0','year' in id3,0.6536756753921509 694,"def tool_lineages ( self, trans ) : rval = [ ] for id, tool in self. app. toolbox. tools ( ) : if : lineage_dict = tool. lineage. to_dict ( ) else : lineage_dict = None entry = dict ( id = id, lineage = lineage_dict ) rval. append ( entry ) return rval",True,"hasattr(tool, 'lineage')","hasattr(tool, 'lineage')",0.6558094620704651 695,"def _div ( self, op, isInvalid = None ) : oper = op. opers [ 0 ] divbase = self. getOperObj ( op, 0 ) if isInvalid is None : limit = ( 2 ** ( oper. tsize * 8 ) ) - 1 isInvalid = lambda val : val > limit if oper. tsize == 1 : ax = self. getRegObj ( e_i386. REG_AX ) quot = ax / divbase rem = ax % divbase if : raise envi. DivideError ( ""i386 #DE"" ) self. effSetVariable ( ""eax"", ( rem << 8 ) + quot ) elif oper. tsize == 2 : ax = self. getRegObj ( e_i386. REG_AX ) dx = self. getRegObj ( e_i386. REG_DX ) tot = ( edx << Const ( 16, self. _psize ) ) + eax quot = tot / divbase rem = tot % divbase if : raise envi. DivideError ( ""i386 #DE"" ) self. effSetVariable ( ""eax"", quot ) self. effSetVariable ( ""edx"", rem ) elif oper. tsize == 4 : eax = Var ( ""eax"", self. _psize ) edx = Var ( ""edx"", self. _psize ) tot = ( edx << Const ( 32, self. _psize ) ) + eax quot = tot / divbase rem = tot % divbase if : ",False,quot.isDiscrete() and isInvalid(quot),not isInvalid,0.649432897567749 696,"def batch_slice ( data, batch_size, sort = True ) : batch_num = int ( np. ceil ( len ( data ) / float ( batch_size ) ) ) for i in range ( batch_num ) : cur_batch_size = batch_size if i < batch_num - 1 else len ( data ) - batch_size * i src_sents = [ data [ i * batch_size + b ] [ 0 ] for b in range ( cur_batch_size ) ] tgt_sents = [ data [ i * batch_size + b ] [ 1 ] for b in range ( cur_batch_size ) ] if : src_ids = sorted ( range ( cur_batch_size ), key = lambda src_id : len ( src_sents [ src_id ] ), reverse = True, ) src_sents = [ src_sents [ src_id ] for src_id in src_ids ] tgt_sents = [ tgt_sents [ src_id ] for src_id in src_ids ] yield src_sents, tgt_sents",True,sort,sort,0.680050790309906 697,"def serialize_to_cmessage ( self ) : from... engines. light import SpOffset cmsg = self. _get_cmsg ( ) if self. memory_data is not None : if : cmsg. target_type = primitives_pb2. CodeReference. CodeTarget else : cmsg. target_type = primitives_pb2. CodeReference. DataTarget cmsg. location = primitives_pb2. CodeReference. Internal cmsg. data_ea = self. memory_data. addr elif self. dst is not None : if isinstance ( self. dst, SpOffset ) : cmsg. target_type = primitives_pb2. CodeReference. StackTarget cmsg. data_ea = self. dst. offset else : cmsg. data_ea = self. dst else : cmsg. data_ea = - 1 if self. insn_op_idx is None : cmsg. operand_idx = - 1 else : cmsg. operand_idx = self. insn_op_idx cmsg. ea = self. ins_addr cmsg. block_ea = self. block_addr cmsg. stmt_idx = self. stmt_idx cmsg. ref_type = self. type return cmsg",False,self.memory_data.sort == MemoryDataSort.CodeReference,"isinstance(self.memory_data.addr, addr.Array)",0.6545739769935608 698,"def _find_key_in_yaml_file ( yaml_file_path, search_keys, full_key_name, value_is_relative_path ) : """"""Find a key in a yaml file."""""" if not os. path. isfile ( yaml_file_path ) : return None result = _load_yaml_file ( yaml_file_path ) if not search_keys : return result for search_key in search_keys : if : raise errors. InvalidConfigKey ( full_key_name ) if search_key not in result : return None result = result [ search_key ] if value_is_relative_path : yaml_directory = os. path. dirname ( yaml_file_path ) if isinstance ( result, list ) : result = [ os. path. join ( yaml_directory, str ( i ) ) for i in result ] else : result = os. path. join ( yaml_directory, str ( result ) ) return result",False,"not isinstance(result, dict)",full_key_name in result,0.6480915546417236 699,"def call ( self, inputs, state ) : """""" """""" ( c_prev, m_prev ) = state self. _batch_size = inputs. shape [ 0 ]. value or array_ops. shape ( inputs ) [ 0 ] scope = vs. get_variable_scope ( ) with vs. variable_scope ( scope, initializer = self. _initializer ) : x = array_ops. concat ( [ inputs, m_prev ], axis = 1 ) with vs. variable_scope ( ""first_gemm"" ) : if self. _linear1 is None : self. _linear1 = _Linear ( x, self. _fact_size, False ) R_fact = self. _linear1 ( x ) with vs. variable_scope ( ""second_gemm"" ) : if self. _linear2 is None : self. _linear2 = _Linear ( R_fact, 4 * self. _num_units, True ) R = self. _linear2 ( R_fact ) i, j, f, o = array_ops. split ( R, 4, 1 ) c = math_ops. sigmoid ( f + self. _forget_bias ) * c_prev + math_ops. sigmoid ( i ) * math_ops. tanh ( j ) m = math_ops. sigmoid ( o ) * self. _activation ( c ) if self. _num_proj is not None : with vs. variable_scope ( ""projection"" ) : if : self. _linear3 = _Linear ( m, self. _num_proj, False )",True,self._linear3 is None,self._linear3 is None,0.6679277420043945 700,"def log ( self, level, msg, * args, ** kw ) : if args : if kw : raise TypeError ( ""You may give positional or keyword arguments, not both"" ) args = args or kw rendered = None for consumer_level, consumer in self. consumers : if self. level_matches ( level, consumer_level ) : if : self. in_progress_hanging = False print ( """" ) sys. stdout. flush ( ) if rendered is None : if args : rendered = msg % args else : rendered = msg rendered = "" "" * self. indent + rendered if hasattr ( consumer, ""write"" ) : consumer. write ( rendered + ""\n"" ) else : consumer ( rendered )",False,"self.in_progress_hanging and consumer in (sys.stdout, sys.stderr)",self.in_progress_hanging,0.653225839138031 701,"def remove_data_directory ( self ) : self. set_role ( ""uninitialized"" ) logger. info ( ""Removing data directory: %s"", self. _data_dir ) try : if os. path. islink ( self. _data_dir ) : os. unlink ( self. _data_dir ) elif not os. path. exists ( self. _data_dir ) : return elif os. path. isfile ( self. _data_dir ) : os. remove ( self. _data_dir ) elif : for pg_wal_realpath in self. pg_wal_realpath ( ). values ( ) : logger. info ( ""Removing WAL directory: %s"", pg_wal_realpath ) shutil. rmtree ( pg_wal_realpath ) for pg_tsp_rpath in self. pg_tblspc_realpaths ( ). values ( ) : logger. info ( ""Removing user defined tablespace directory: %s"", pg_tsp_rpath ) shutil. rmtree ( pg_tsp_rpath, ignore_errors = True ) shutil. rmtree ( self. _data_dir ) except ( IOError, OSError ) : logger. exception ( ""Could not remove data directory %s"", self. _data_dir ) self. move_data_directory ( )",True,os.path.isdir(self._data_dir),os.path.isdir(self._data_dir),0.6478986740112305 702,"def _simple_interactive_update ( self ) : while True : stale_packages = [ ] stale = partial = False for info in sorted ( getattr ( self. _ds, ""packages"" ) ( ), key = str ) : if self. _ds. status ( info ) == self. _ds. STALE : stale_packages. append ( ( info. id, info. name ) ) print ( ) if : print ( ""Will update following packages (o=ok; x=cancel)"" ) for pid, pname in stale_packages : name = textwrap. fill ( ""-"" * 27 + ( pname ), 75, subsequent_indent = 27 * "" "" ) [ 27 : ] print ( "" [ ] %s %s"" % ( pid. ljust ( 20, ""."" ), name ) ) print ( ) user_input = unicode ( input ( "" Identifier> "" ) ) if user_input. lower ( ) == ""o"" : for pid, pname in stale_packages : try : self. _ds. download ( pid, prefix = "" "" ) except ( IOError, ValueError ) as e : print ( e ) break elif user_input. lower ( ) in ( ""x"", ""q"", """"",False,stale_packages,self.partial,0.665532112121582 703,"def deploy_arm_template_at_subscription_scope ( cmd, template_file = None, template_uri = None, parameters = None, deployment_name = None, deployment_location = None, no_wait = False, handle_extended_json_format = None, no_prompt = False, confirm_with_what_if = None, what_if_result_format = None, what_if_exclude_change_types = None, template_spec = None, query_string = None, ) : if confirm_with_what_if : what_if_deploy_arm_template_at_subscription_scope ( cmd, template_file = template_file, template_uri = template_uri, parameters = parameters, deployment_name = deployment_name, deployment_location = deployment_location, result_format = what_if_result_format, exclude_change_types = what_if_exclude_change_types, no_prompt = no_prompt, template_spec = template_spec, query_string = query_string, ) from knack. prompting import prompt_y_n if : return None return _deploy_arm_template_at_subscription_scope ( cmd = cmd, template_file = template_file, template_uri = template_uri, parameters = parameters, deployment_name = deployment_name, deployment_location =",False,not prompt_y_n('\nAre you sure you want to execute the deployment?'),no_prompt,0.6487878561019897 704,"def readchunk ( self, inode, index, chunkopflags = 0 ) : cnt = 0 while True : cnt += 1 if self. version < ( 3, 0, 4 ) : ans = self. sendAndReceive ( CLTOMA_FUSE_READ_CHUNK, inode, index ) else : ans = self. sendAndReceive ( CLTOMA_FUSE_READ_CHUNK, inode, index, uint8 ( chunkopflags ) ) n = len ( ans ) if : from. utils import Error err = ord ( ans ) if err == ERROR_LOCKED : if cnt < 100 : time. sleep ( 0.1 ) continue logger. warning ( ""Waited too long for locked chunk %s:%s"", inode, index ) raise Error ( ord ( ans ) ) if n < 20 : raise Exception ( ""read chunk invalid length: %s(expected 20 above)"" % n ) if self. version >= ( 3, 0, 10 ) : assert ( n - 21 ) % 14 == 0, n protocolid, length, id_, version = unpack ( ""BQQI"", ans ) return Chunk ( id_, length, version, ans [ 21 : ], ele_width = 14 ) elif self. version >= ( 1, 7, 32 ) : assert ( n - 21 ) % 10",False,n == 1,n > 0,0.6724562048912048 705,"def tearDown ( self ) : """"""Shutdown the UDP server."""""" try : if self. server : self. server. stop ( 2.0 ) if : self. root_logger. removeHandler ( self. sock_hdlr ) self. sock_hdlr. close ( ) finally : BaseTest. tearDown ( self )",False,self.sock_hdlr,self.root_logger,0.6611213088035583 706,"def labels_to_inputs ( self, labels, converter ) : inputs = [ ] for label_arr in labels : input_ = np. zeros ( ( len ( label_arr ), converter. input_depth ), converter. input_dtype ) for i, l in enumerate ( label_arr ) : if l == converter. end_token : input_ [ i, - 2 ] = 1 elif : input_ [ i, - 1 ] = 1 else : j = 0 while l : input_ [ i, j ] = l % 2 l >>= 1 j += 1 assert np. any ( input_ [ i ] ), label_arr. astype ( np. int ) inputs. append ( input_ ) return inputs",False,l == 0,l == converter.start_token,0.6759565472602844 707,"def package_files ( self ) : seen_package_directories = ( ) directories = self. distribution. package_dir or { } empty_directory_exists = """" in directories packages = self. distribution. packages or [ ] for package in packages : if package in directories : package_directory = directories [ package ] elif empty_directory_exists : package_directory = os. path. join ( directories [ """" ], package ) else : package_directory = package if : seen_package_directories += ( package_directory + ""."", ) yield package_directory",False,not package_directory.startswith(seen_package_directories),package_directory not in seen_package_directories,0.6473897695541382 708,"def _resolve ( self, debug : bool, silent : bool, level : Optional [ int ], spinner ) -> Optional [ bool ] : if silent : logger. debug ( ""next iteration"", extra = dict ( layers = len ( self. graph. _layers ), mutations = self. mutator. mutations, ), ) else : spinner. text = ""layers: {layers}, mutations: {mutations}"". format ( layers = len ( self. graph. _layers ), mutations = self. mutator. mutations, ) deps = self. graph. get_leafs ( level = level ) if not deps : return True for dep in deps : if not dep. python_compat : self. graph. conflict = dep return False no_conflicts = self. _apply_deps ( deps, debug = debug ) if no_conflicts : return None groups = self. mutator. mutate ( self. graph ) if groups is None : return False self. graph. conflict = None for group in groups : dep = self. graph. get ( group. name ) if : logger. debug ( ""mutated"", extra = dict ( group_from = str ( dep. group ), <",False,dep.group.number != group.number,dep,0.6473860740661621 709,"def apply ( self, items, evaluation ) : ""%(name)s[items___]"" items = items. flatten ( Symbol ( ""List"" ) ). get_sequence ( ) results = [ ] best = None for item in items : if item. has_form ( ""List"", None ) : leaves = item. leaves else : leaves = [ item ] for leaf in leaves : if best is None : best = leaf results. append ( best ) continue c = do_cmp ( leaf, best ) if : results. append ( leaf ) elif ( self. sense == 1 and c > 0 ) or ( self. sense == - 1 and c < 0 ) : results. remove ( best ) best = leaf results. append ( leaf ) if not results : return Expression ( ""DirectedInfinity"", - self. sense ) if len ( results ) == 1 : return results. pop ( ) if len ( results ) < len ( items ) : return Expression ( self. get_name ( ), * results ) return None",False,c is None,self.sense == 0,0.6652126312255859 710,"def finish ( self ) : self. done = True if self. has_trailers and hasattr ( self. fp, ""read_trailer_lines"" ) : self. trailers = { } try : for line in self. fp. read_trailer_lines ( ) : if line [ 0 ] in ntob ( "" \t"" ) : v = line. strip ( ) else : try : k, v = line. split ( ntob ( "":"" ), 1 ) except ValueError : raise ValueError ( ""Illegal header line."" ) k = k. strip ( ). title ( ) v = v. strip ( ) if k in cheroot. server. comma_separated_headers : existing = self. trailers. get ( k ) if existing : v = ntob ( "", "" ). join ( ( existing, v ) ) self. trailers [ k ] = v except Exception : e = sys. exc_info ( ) [ 1 ] if : raise cherrypy.",False,e.__class__.__name__ == 'MaxSizeExceeded',e,0.6711167097091675 711,"def sync_up_to_new_location ( self, worker_ip ) : if worker_ip!= self. worker_ip : logger. debug ( ""Setting new worker IP to %s"", worker_ip ) self. set_worker_ip ( worker_ip ) self. reset ( ) if : logger. warning ( ""Sync up to new location skipped. This should not occur."" ) else : logger. warning ( ""Sync attempted to same IP %s."", worker_ip )",False,not self.sync_up(),worker_ip == self.worker_ip,0.653356671333313 712,"def __keyPress ( self, widget, event ) : if self. getSelectedIndex ( ) is None : return False if event. key in ( ""Left"", ""Right"", ""Up"", ""Down"" ) : if self. __positionIncrement == 0 : return False if : pixelIncrement = 1 else : pixelIncrement = self. __positionIncrement * self. size ( ). x x = self. getPositions ( ) [ self. getSelectedIndex ( ) ] * self. size ( ). x x += pixelIncrement if event. key in ( ""Right"", ""Up"" ) else - pixelIncrement self. __setPositionInternal ( self. getSelectedIndex ( ), x, self. PositionChangedReason. Increment, clamp = not ( event. modifiers & event. modifiers. Shift ), ) return True elif event. key in ( ""Backspace"", ""Delete"" ) : index = self. getSelectedIndex ( ) if ( index is not None and self. getSizeEditable ( ) and len ( self. getPositions ( ) ) > self. getMinimumSize ( ) ) : del self. __positions [ index ] signal = getattr ( self, ""_indexRemovedSignal"", None ) if signal is not None : signal ( self, index ) self. __emitPositionChanged ( self. PositionChangedReason. IndexRemoved ) self. _qtWidget",False,self.__positionIncrement is None,self.size() == None,0.6725320219993591 713,"def results_default_iter ( commit_hash ) : for result in iter_results_for_machine_and_hash ( conf. results_dir, machine, commit_hash ) : if : continue for key in result. get_all_result_keys ( ) : params = result. get_result_params ( key ) result_value = result. get_result_value ( key, params ) result_stats = result. get_result_stats ( key, params ) result_samples = result. get_result_samples ( key, params ) result_version = result. benchmark_version. get ( key ) yield ( key, params, result_value, result_stats, result_samples, result_version, result. params [ ""machine"" ], result. env_name, )",False,env_names is not None and result.env_name not in env_names,result.is_empty(),0.6507217884063721 714,"def _binary ( self, other : ""Table"", func, do_func ) : session_id = self. _session. session_id left, right = self, other if left. _partitions!= right. _partitions : if : left = left. save_as ( str ( uuid. uuid1 ( ) ), session_id, partition = right. _partitions ) else : right = other. save_as ( str ( uuid. uuid1 ( ) ), session_id, partition = left. _partitions ) results = self. _session. _submit_binary ( func, do_func, left. _partitions, left. _name, left. _namespace, right. _name, right. _namespace, ) result : _Operand = results [ 0 ] return _create_table ( session = self. _session, name = result. name, namespace = result. namespace, partitions = left. _partitions, )",False,other.count() > self.count(),self._session.session_id == other._session_id,0.6512644290924072 715,"def _form_master_re ( relist, reflags, ldict, toknames ) : if not relist : return [ ] regex = ""|"". join ( relist ) try : lexre = re. compile ( regex, re. VERBOSE | reflags ) lexindexfunc = [ None ] * ( max ( lexre. groupindex. values ( ) ) + 1 ) lexindexnames = lexindexfunc [ : ] for f, i in lexre. groupindex. items ( ) : handle = ldict. get ( f, None ) if type ( handle ) in ( types. FunctionType, types. MethodType ) : lexindexfunc [ i ] = ( handle, toknames [ f ] ) lexindexnames [ i ] = f elif : lexindexnames [ i ] = f if f. find ( ""ignore_"" ) > 0 : lexindexfunc [ i ] = ( None, None ) else : lexindexfunc [ i ] = ( None, toknames [ f ] ) return [ ( lexre, lexindexfunc ) ], [ regex ], [ lexindexnames ] except Exception : m = int ( len ( relist ) / 2 ) if m == 0 : m = 1 llist, lre, lnames = _form_master_re ( relist [ : m ], reflags, ldict, toknames ) rlist, rre, rnames = _form_master_re ( relist [",False,handle is not None,"type(f) in (types.StringTypes, types.StringTypes)",0.6600191593170166 716,"def decStep ( self, event = None ) : if event is not None and not self. acceptKey ( ) : return step, power = abcControlFrame. _stepPower ( self. step. get ( ) ) s = step - power if s <= 0.0 : s = step - power / 10.0 if s < _LOWSTEP : s = _LOWSTEP elif s > _HIGHSTEP : s = _HIGHSTEP if self. astep is not self. step and self. astep. get ( )!= _NOASTEP : step, power = abcControlFrame. _stepPower ( self. astep. get ( ) ) aas = step - power if aas <= 0.0 : aas = step - power / 10.0 if : aas = _LOWSTEP elif aas > _HIGHASTEP : aas = _HIGHASTEP else : aas = None self. setStep ( s, aas )",False,aas < _LOWSTEP,aas > _LOWSTEP,0.6739943027496338 717,"def _nested_ui_dict ( self, ui_schema : List [ Dict [ str, Any ] ], option_dict : Dict [ str, Any ], key : str, multiple : bool = False, ) -> None : """"""UI nested dict items."""""" ui_node = { ""name"" : key, ""type"" : ""schema"", ""optional"" : True, ""multiple"" : multiple, } nested_schema = [ ] for c_key, c_value in option_dict. items ( ) : if : self. _nested_ui_list ( nested_schema, c_value, c_key ) else : self. _single_ui_option ( nested_schema, c_value, c_key ) ui_node [ ""schema"" ] = nested_schema ui_schema. append ( ui_node )",True,"isinstance(c_value, list)","isinstance(c_value, list)",0.6476407647132874 718,"def test_https ( ) : for proto in [ ""http"", ""https"" ] : for convention in [ ""virtualhost"", ""path"", ""subdomain"" ] : opts = calling_format. _s3connection_opts_from_uri ( ""{0}+{1}://"". format ( proto, convention ) ) assert ( proto == ""https"" ) == opts [ ""is_secure"" ] assert ( proto == ""http"" ) == ( not opts [ ""is_secure"" ] ) cf = opts [ ""calling_format"" ] if convention == ""virtualhost"" : assert isinstance ( cf, connection. VHostCallingFormat ) elif convention == ""path"" : assert isinstance ( cf, connection. OrdinaryCallingFormat ) elif : assert isinstance ( cf, connection. SubdomainCallingFormat ) else : assert False",False,convention == 'subdomain',conconation == 'subdomain',0.6675941944122314 719,"def render ( self, name, value, attrs = None, renderer = None ) : output = [ ] for option in self. subwidgets ( name, value, attrs ) : option_value = option [ ""value"" ] option [ ""widget"" ] = self. create_option ( name = name, value = option [ ""value"" ], label = option [ ""label"" ], selected = option_value == value, index = option [ ""index"" ], attrs = option [ ""attrs"" ], ) if option_value. split ( ""/"" ) [ 0 ] == ""icon"" or option_value == """" : icon_name = option [ ""label"" ] original_widget = self. _render ( self. option_template_name, option ) if : original_widget = format_html ( '', label_id = option [ ""widget"" ] [ ""attrs"" ] [ ""id"" ], widget = original_widget, ) output. append ( format_html ( self. base_html, active = ""active"" if option_value == value else """", static = settings. STATIC_URL, warn ( ""Unquoted etag emitted."", HTTPWarning, stacklevel = 4 ) location = headers. get ( ""location"" ) if location is not None : if not urlparse ( location ). netloc : warn ( ""Absolute URLs required for location header."", HTTPWarning, stacklevel = 4, )",False,etag.startswith('w/'),"etag.startswith(('w/', 'w/'))",0.6573379039764404 726,"def __init__ ( self, format ) : format = "" "". join ( re. split ( r""(?:\s|%t|%n)+"", format ) ) pattern = [ ] try : for spec in re. findall ( r""%\w|%%|."", format ) : if : spec = SPEC [ spec ] pattern. append ( spec ) except KeyError : raise ValueError ( ""unknown specificer:{}"". format ( spec ) ) self. pattern = re. compile ( r""(?i)"" + """". join ( pattern ) )",False,spec[0] == '%',spec in SPEC,0.6606450080871582 727,"def _update_indexes ( self, _rev, data ) : _id, new_rev, db_data = self. _update_id_index ( _rev, data ) with self. main_lock : self. id_revs [ _id ] = new_rev for index in self. indexes [ 1 : ] : with self. main_lock : curr_rev = self. id_revs. get ( _id ) if : break self. _single_update_index ( index, data, db_data, _id ) with self. main_lock : if self. id_revs [ _id ] == new_rev : del self. id_revs [ _id ] return _id, new_rev",False,curr_rev != new_rev,curr_rev == new_rev,0.653876781463623 728,"def connect ( self ) : self. sock = sockssocket ( ) self. sock. setproxy ( * proxy_args ) if type ( self. timeout ) in ( int, float ) : self. sock. settimeout ( self. timeout ) self. sock. connect ( ( self. host, self. port ) ) if isinstance ( self, compat_http_client. HTTPSConnection ) : if : self. sock = self. _context. wrap_socket ( self. sock, server_hostname = self. host ) else : self. sock = ssl. wrap_socket ( self. sock )",False,"hasattr(self, '_context')",self._context,0.6537714004516602 729,"def train ( epoch ) : model. train ( ) train_loss = 0 for batch_idx, ( data, _ ) in enumerate ( train_loader ) : data = data. to ( device ) optimizer. zero_grad ( ) recon_batch, mu, logvar = model ( data ) loss = loss_function ( recon_batch, data, mu, logvar ) loss. backward ( ) train_loss += loss. item ( ) optimizer. step ( ) if : print ( ""Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}"". format ( epoch, batch_idx * len ( data ), len ( train_loader. dataset ), 100.0 * batch_idx / len ( train_loader ), loss. item ( ) / len ( data ), ) ) print ( ""====> Epoch: {} Average loss: {:.4f}"". format ( epoch, train_loss / len ( train_loader. dataset ) ) )",False,batch_idx % args.log_interval == 0,"hasattr(train_loader, 'dataset')",0.6529800891876221 730,"def create_unit ( self, key : str, source : Union [ str, List [ str ] ], target : Optional [ Union [ str, List [ str ] ] ] = None, ) : if isinstance ( source, list ) : context = source [ 0 ] unit = self. construct_unit ( context ) if : source = context else : source = multistring ( source ) else : context = source unit = self. construct_unit ( source ) if isinstance ( target, list ) : if len ( target ) == 1 : target = target [ 0 ] else : target = multistring ( target ) if key : unit. setid ( key ) elif target is not None and self. set_context_bilingual : unit. setid ( context ) unit. context = context if target is None : target = source source = self. create_unit_key ( key, source ) unit. source = source if isinstance ( unit, LISAunit ) and self. language_code : unit. settarget ( target, self. language_code ) else : unit. target = target return unit",True,len(source) == 1,len(source) == 1,0.6580545902252197 731,"def __setattr__ ( self, name, value ) : if name == ""path"" : if value and value!= """" : if : raise ValueError ( 'The page path should always start with a slash (""/"").' ) elif name == ""load_time"" : if value and not isinstance ( value, int ) : raise ValueError ( ""Page load time must be specified in integer milliseconds."" ) object. __setattr__ ( self, name, value )",False,value[0] != '/',not value.startswith('/'),0.6724666953086853 732,"def predict ( self, img : Union [ str, np. ndarray ], save_path : str = ""openpose_body"", visualization : bool = True, ) : self. eval ( ) self. visualization = visualization if isinstance ( img, str ) : orgImg = cv2. imread ( img ) else : orgImg = img data, imageToTest_padded, pad = self. transform ( orgImg ) Mconv7_stage6_L1, Mconv7_stage6_L2 = self. forward ( paddle. to_tensor ( data ) ) Mconv7_stage6_L1 = Mconv7_stage6_L1. numpy ( ) Mconv7_stage6_L2 = Mconv7_stage6_L2. numpy ( ) heatmap_avg = self. remove_pad ( Mconv7_stage6_L2, imageToTest_padded, orgImg, pad ) paf_avg = self. remove_pad ( Mconv7_stage6_L1, imageToTest_padded, orgImg, pad ) all_peaks = self. get_peak ( heatmap_avg ) connection_all, special_k = self. get_connection ( all_peaks, paf_avg, orgImg ) candidate, subset = self. get_candidate ( all_peaks, connection_all, special_k ) canvas = copy. deepcopy ( orgImg ) canvas = self. draw_pose ( canvas, candidate, subset ) if self. visualization : if : os. mkdir ( save_path ) img_name = str ( time. time ( ) ) + "".png"" save_path = os. path. join ( save_path, img_name ) cv2. imwrite ( save_path, canvas ) results = { ""candidate"" : candidate, ""subset"" : subset, ""data"" :",True,not os.path.exists(save_path),not os.path.exists(save_path),0.646492600440979 733,"def zip_readline_read_test ( self, f, compression ) : self. make_test_archive ( f, compression ) with zipfile. ZipFile ( f, ""r"" ) as zipfp, zipfp. open ( TESTFN ) as zipopen : data = b"""" while True : read = zipopen. readline ( ) if : break data += read read = zipopen. read ( 100 ) if : break data += read self. assertEqual ( data, self. data )",False,not read,len(read) > 100,0.6653151512145996 734,def update_trackers_info ( self ) : old_trackers = self. get_old_trackers ( ) with db_session : trackers = self. mds. TrackerState. select ( ). for_update ( ) [ : ] for tracker in trackers : if : tracker. set ( ** old_trackers [ tracker. url ] ),False,tracker.url in old_trackers,old_trackers.has_key(tracker.url),0.6619849801063538 735,"def convertPlaylistInfoToString ( seld, aPlaylistInfo, aTrackItems ) : str = """" str += ""[Title] %s\n"" % ( aPlaylistInfo [ ""title"" ] ) str += ""[Type] %s\n"" % ( aPlaylistInfo [ ""type"" ] ) str += ""[NumberOfTracks] %s\n"" % ( aPlaylistInfo [ ""numberOfTracks"" ] ) str += ""[NumberOfVideos] %s\n"" % ( aPlaylistInfo [ ""numberOfVideos"" ] ) str += ""[Duration] %s\n"" % ( aPlaylistInfo [ ""duration"" ] ) i = 0 str += ""===========Track=============\n"" for item in aTrackItems : type = item [ ""type"" ] item = item [ ""item"" ] if : continue i = i + 1 str += ""{:<8}"". format ( ""[%d]"" % i ) + item [ ""title"" ] + ""\n"" i = 0 str += ""\n===========Video=============\n"" for item in aTrackItems : type = item [ ""type"" ] item = item [ ""item"" ] if type!= ""video"" : continue i = i + 1 str += ""{:<8}"". format ( ""[%d]"" % i ) + item [ ""title"" ] + ""\n"" return str",False,type != 'track',type != 'video',0.6633092164993286 736,"def build_inputs ( self ) : inputs = { } for encoder in self. model. pipeline. encoders : if hasattr ( encoder, ""sequence_length"" ) : for i in range ( encoder. sequence_length ) : inputs [ encoder. sequence_name ( i ) ] = Input ( shape = ( 1, ), name = encoder. sequence_name ( i ) ) if : inputs [ encoder. sequence_name ( i, suffix = ""_twin"" ) ] = Input ( shape = ( 1, ), name = encoder. sequence_name ( i, suffix = ""_twin"" ) ) else : inputs [ encoder. name ] = Input ( shape = ( 1, ), name = encoder. name ) if : inputs [ encoder. twin_name ] = Input ( shape = ( 1, ), name = encoder. twin_name ) return inputs",False,encoder.twin,"hasattr(encoder, 'twin_name')",0.6700774431228638 737,"def process_url_iterable ( self, iterable, opts, functionality, enabled_functionality ) : self. out. debug ( ""base_plugin_internal.process_url_iterable"" ) timeout_host = opts [ ""timeout_host"" ] i = 0 with ThreadPoolExecutor ( max_workers = opts [ ""threads_scan"" ] ) as executor : results = [ ] for url in iterable : args = [ url, opts, functionality, enabled_functionality, True ] future = executor. submit ( self. url_scan, * args ) url_to_log = str ( url ). rstrip ( ) results. append ( { ""future"" : future, ""url"" : url_to_log, } ) if : self. _process_results_multisite ( results, functionality, timeout_host ) results = [ ] i += 1 if len ( results ) > 0 : self. _process_results_multisite ( results, functionality, timeout_host ) results = [ ]",False,i % 1000 == 0 and i != 0,len(results) > 0,0.6647021770477295 738,"def __process_family ( self, family, person1, person2, append_list ) : if family. get_handle ( ) in self. __processed_families : return self. __processed_families [ family. get_handle ( ) ] = True missingbits = [ ] if person1 is UnknownPerson or person1 is None : name1 = _ ( ""(unknown person)"" ) else : name1 = name_displayer. display ( person1 ) if not name1 : name1 = _ ( ""(person with unknown name)"" ) if person2 is UnknownPerson or person2 is None : name2 = _ ( ""(unknown person)"" ) else : name2 = name_displayer. display ( person2 ) if not name2 : name2 = _ ( ""(person with unknown name)"" ) name = _ ( ""%(name1)s and %(name2)s"" ) % { ""name1"" : name1, ""name2"" : name2 } has_marriage = False for event_ref in family. get_event_ref_list ( ) : event = self. dbstate. db. get_event_from_handle ( event_ref. ref ) if event. get_type ( ) not in [ EventType. MARRIAGE, EventType. DIVORCE ] : continue missingbits. extend ( self. __process_event ( event ) ) if : has_marriage = True if family. get_relationship ( ) == FamilyRelType. MARRIED : if not has_marriage : missingbits. append ( _ ( ""marriage event missing"" ) ) elif family. get_relationship ( ) == FamilyRelType. UNKNOWN : <",False,event.get_type() == EventType.MARRIAGE,append_list,0.6539820432662964 739,"def test_main ( self ) : allowed = [ ] for license in self. ALLOWED : allowed. append ( """". join ( license. split ( ) ) ) found = set ( ) missing = [ ] for path in iter_py_paths ( ) : header = b"""" with open ( path, ""rb"" ) as h : for line in h : line = line. strip ( ) if not line. startswith ( b""#"" ) : break header += line. lstrip ( b""# "" ) + b""\n"" norm = b"""". join ( header. split ( ) ) norm = norm. decode ( ""utf-8"" ) for license_ in allowed : if : found. add ( license_ ) break else : missing. append ( path ) self. assertFalse ( missing, msg = ""Missing license: %r"" % missing ) assert len ( allowed ) == len ( found )",False,license_ in norm,license_ in found,0.6635072231292725 740,"def init ( self ) : r = self. get_redis ( ) if r : key = ""pocsuite_target"" info_msg = ""[PLUGIN] try fetch targets from redis..."" logger. info ( info_msg ) targets = r. get ( key ) count = 0 if targets : for target in targets : if : count += 1 info_msg = ""[PLUGIN] get {0} target(s) from redis"". format ( count ) logger. info ( info_msg )",False,self.add_target(target),target,0.6513898372650146 741,"def ShouldAttachKey ( cls, config, vcards = None, emails = None, ttl = 90 ) : now = datetime. now ( ) offset = timedelta ( days = ttl ) never = datetime. fromtimestamp ( 0 ) dates = [ ] who = dict ( ( vc. email, vc ) for vc in ( vcards or [ ] ) if vc ) for e in emails or [ ] : if e not in who : who [ e ] = config. vcards. get ( e ) needs_key = 0 for email, vc in who. iteritems ( ) : if : continue ts = None if vc : try : ts = datetime. fromtimestamp ( float ( vc. pgp_key_shared ) ) except ( ValueError, TypeError, AttributeError ) : pass if ( ts or never ) + offset < now : needs_key += 1 ratio = cls. _encryption_ratio ( config. background, config. index, email, minimum = 1 ) if ratio <= 0 : return False return needs_key > 0",False,vc and vc.kind == 'profile',email == '',0.655734121799469 742,"def test_insert ( self ) : funcs = [ functions. sin, functions. cos, functions. tan, ] orig = [ ] for orig_is_link in self. orig : if : orig. append ( links. Linear ( ( 3, 3 ) ) ) else : orig. append ( funcs. pop ( 0 ) ) if self. is_link : subj = links. Linear ( ( 3, 3 ) ) else : subj = funcs. pop ( 0 ) seq = chainer. Sequential ( * orig ) if self. expect_error : with pytest. raises ( IndexError ) : seq. insert ( self. pos, subj ) else : seq. insert ( self. pos, subj ) orig. insert ( self. pos, subj ) assert len ( seq ) == len ( self. orig ) + 1 for i in range ( len ( self. orig ) + 1 ) : assert seq [ i ] is orig [ i ]",True,orig_is_link,orig_is_link,0.657129168510437 743,"def _build_nightly_dict ( registered_ds : FullNamesDict, stable_version_ds : FullNamesDict, ) -> NightlyDict : """"""Computes the nightly dict from the registered and stable dict."""""" nightly_ds = collections. defaultdict ( lambda : collections. defaultdict ( lambda : collections. defaultdict ( bool ) ) ) for dataset in registered_ds : if dataset in stable_version_ds : for config in registered_ds [ dataset ] : if config in stable_version_ds [ dataset ] : for version in registered_ds [ dataset ] [ config ] : if : nightly_ds [ dataset ] [ config ] [ version ] = False else : nightly_ds [ dataset ] [ config ] [ version ] = True else : nightly_ds [ dataset ] [ config ] = True else : nightly_ds [ dataset ] = True return nightly_ds",False,version in stable_version_ds[dataset][config],version in nightly_ds,0.6587651968002319 744,"def single_line_beta_description ( schema_or_field, strict = True ) : if ""\n"" in schema_or_field [ ""field_details"" ] [ ""beta"" ] : msg = ""Beta descriptions must be single line.\n"" msg += ( f""Offending field or field set: {schema_or_field['field_details']['name']}"" ) if : raise ValueError ( msg ) else : ecs_helpers. strict_warning ( msg )",True,strict,strict,0.6842748522758484 745,"def get_displayname ( self, schema : s_schema. Schema ) -> str : if self. is_view ( schema ) and not self. get_alias_is_persistent ( schema ) : schema, mtype = self. material_type ( schema ) else : mtype = self union_of = mtype. get_union_of ( schema ) if union_of : if : std_obj = schema. get ( ""std::BaseObject"", type = ObjectType ) return std_obj. get_displayname ( schema ) else : comps = sorted ( union_of. objects ( schema ), key = lambda o : o. id ) return "" | "". join ( c. get_displayname ( schema ) for c in comps ) else : intersection_of = mtype. get_intersection_of ( schema ) if intersection_of : comps = sorted ( intersection_of. objects ( schema ), key = lambda o : o. id ) comp_dns = ( c. get_displayname ( schema ) for c in comps ) return "" & "". join ( dn for dn in comp_dns if dn!= ""std::BaseObject"" ) elif mtype is self : return super ( ). get_displayname ( schema ) else : return mtype. get_displayname ( schema )",False,self.get_is_opaque_union(schema),schema is None,0.6482722759246826 746,"def get_prep_value ( self ) : prep_value = [ ] for i, item in enumerate ( self. _bound_blocks ) : if item : if not item. id : item. id = str ( uuid. uuid4 ( ) ) prep_value. append ( item. get_prep_value ( ) ) else : raw_item = self. _raw_data [ i ] if : raw_item [ ""id"" ] = str ( uuid. uuid4 ( ) ) prep_value. append ( raw_item ) return prep_value",False,not raw_item.get('id'),not raw_item.id,0.6522339582443237 747,"def parse_condexpr ( self ) : lineno = self. stream. current. lineno expr1 = self. parse_or ( ) while self. stream. skip_if ( ""name:if"" ) : expr2 = self. parse_or ( ) if : expr3 = self. parse_condexpr ( ) else : expr3 = None expr1 = nodes. CondExpr ( expr2, expr1, expr3, lineno = lineno ) lineno = self. stream. current. lineno return expr1",False,self.stream.skip_if('name:else'),expr2 is None,0.6482930779457092 748,"def _to_string_term ( self, ostream, _idx, _sub, _name_buffer, verbose ) : if _idx == 0 and self. _const!= 0 : ostream. write ( ""%s"" % ( self. _const, ) ) else : coef = self. _coef [ id ( _sub ) ] _coeftype = coef. __class__ if _idx and _coeftype is _NegationExpression : coef = coef. _args [ 0 ] _coeftype = coef. __class__ if _coeftype in native_numeric_types : if _idx : coef = abs ( coef ) if coef == 1 : ostream. write ( _sub. cname ( True, _name_buffer ) ) return ostream. write ( str ( coef ) ) elif : coef. to_string ( ostream = ostream, verbose = verbose, precedence = _ProductExpression. PRECEDENCE, ) else : ostream. write ( str ( coef ) ) ostream. write ( ""*%s"" % ( _sub. cname ( True, _name_buffer ) ) )",False,coef.is_expression(),coef == 2,0.6549028158187866 749,"def run ( self ) : display_fileno = self. _display. fileno ( ) while True : if : try : rlist, wlist, xlist = select. select ( ( self. _pipe [ 0 ], display_fileno ), ( ), ( ) ) except select. error as err : if isinstance ( err, OSError ) : code = err. errno else : code = err [ 0 ] if code!= errno. EINTR : raise continue assert not wlist assert not xlist if self. _pipe [ 0 ] in rlist : break continue self. _on_event ( self. _display. next_event ( ) )",False,not self._display.pending_events(),self._pipe[0] >= 0,0.6534614562988281 750,"def options_to_cli ( self, options ) : verbose = options. pop ( ""verbose"", 0 ) args = { ""become"" : False, ""check"" : True } args. update ( options ) cli = [ ] cli_args = [ ] if verbose : cli. append ( ""-"" + ""v"" * verbose ) for arg_name, value in args. items ( ) : option = self. _known_options [ arg_name ] opt_cli = option [ ""cli"" ] opt_type = option [ ""type"" ] if opt_type == ""boolean"" : if : cli. append ( opt_cli ) elif opt_type == ""string"" : cli. append ( opt_cli + "" %s"" ) cli_args. append ( value ) elif opt_type == ""json"" : cli. append ( opt_cli + "" %s"" ) value_json = json. dumps ( value ) cli_args. append ( value_json ) else : raise TypeError ( ""Unsupported argument type '%s'."" % opt_type ) return "" "". join ( cli ), cli_args",False,value,opt_type == 'bool',0.6882610321044922 751,"def __run ( self ) : threads = self. parameters ( ) [ ""threads"" ]. getTypedValue ( ) with IECore. tbb_global_control ( IECore. tbb_global_control. parameter. max_allowed_parallelism, IECore. hardwareConcurrency ( ) if threads == 0 else threads, ) : self. _executeStartupFiles ( self. root ( ). getName ( ) ) defaultMessageHandler = IECore. MessageHandler. getDefaultHandler ( ) if : IECore. MessageHandler. setDefaultHandler ( Gaffer. ProcessMessageHandler ( defaultMessageHandler ) ) return self. _run ( self. parameters ( ). getValidatedValue ( ) )",False,"not isinstance(defaultMessageHandler, Gaffer.ProcessMessageHandler)",defaultMessageHandler is not None,0.6522072553634644 752,def run ( self ) : try : next ( self. coro ) while True : with self. abort_lock : if : return msg = self. in_queue. get ( ) if msg is POISON : break with self. abort_lock : if : return out = self. coro. send ( msg ) for msg in _allmsgs ( out ) : with self. abort_lock : if : return self. out_queue. put ( msg ) except BaseException : self. abort_all ( sys. exc_info ( ) ) return self. out_queue. release ( ),False,self.abort_flag,msg is not None,0.6556994915008545 753,"def _check_is_max_context ( doc_spans, cur_span_index, position ) : """"""chech is max context"""""" best_score = None best_span_index = None for ( span_index, doc_span ) in enumerate ( doc_spans ) : end = doc_span. start + doc_span. length - 1 if position < doc_span. start : continue if : continue num_left_context = position - doc_span. start num_right_context = end - position score = min ( num_left_context, num_right_context ) + 0.01 * doc_span. length if best_score is None or score > best_score : best_score = score best_span_index = span_index return cur_span_index == best_span_index",False,position > end,end < doc_span.length,0.6760665774345398 754,"def async_to_sync_wrap ( * args, ** kwargs ) : coroutine = function ( * args, ** kwargs ) try : loop = asyncio. get_event_loop ( ) except RuntimeError : loop = main_loop if loop. is_running ( ) : if threading. current_thread ( ) is threading. main_thread ( ) : return coroutine else : if : return asyncio. run_coroutine_threadsafe ( coroutine, loop ). result ( ) if inspect. isasyncgen ( coroutine ) : return asyncio. run_coroutine_threadsafe ( consume_generator ( coroutine ), loop ). result ( ) if : return loop. run_until_complete ( coroutine ) if inspect. isasyncgen ( coroutine ) : return loop. run_until_complete ( consume_generator ( coroutine ) )",True,inspect.iscoroutine(coroutine),inspect.iscoroutine(coroutine),0.6525558233261108 755,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. I64 : self. timestamp = iprot. readI64 ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRING : self. value = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. STRUCT : self. host = Endpoint ( ) self. host. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",True,fid == 1,fid == 1,0.6738317012786865 756,"def open_file_action ( self ) : wildcard = ""All files (*.*)|*.*"" for src in registry. sources : if len ( src. extensions ) > 0 : if : wildcard += src. wildcard else : wildcard += ""|"" + src. wildcard dialog = FileDialog ( parent = None, title = ""Open supported data file"", action = ""open"", wildcard = wildcard ) if dialog. open ( ) == OK : if not isfile ( dialog. path ) : error ( ""File '%s' does not exist!"" % dialog. path ) return object = self. object engine = get_engine ( object ) engine. open ( dialog. path, object )",False,wildcard.endswith('|') or src.wildcard.startswith('|'),"hasattr(src, 'wildcard')",0.652045726776123 757,"def format_drf_errors ( response, context, exc ) : errors = [ ] if isinstance ( response. data, list ) : for message in response. data : errors. extend ( format_error_object ( message, ""/data"", response ) ) else : for field, error in response. data. items ( ) : field = format_value ( field ) pointer = ""/data/attributes/{}"". format ( field ) if : errors. extend ( format_error_object ( error, None, response ) ) elif isinstance ( error, str ) : classes = inspect. getmembers ( exceptions, inspect. isclass ) if isinstance ( exc, tuple ( x [ 1 ] for x in classes ) ) : pointer = ""/data"" errors. extend ( format_error_object ( error, pointer, response ) ) elif isinstance ( error, list ) : errors. extend ( format_error_object ( error, pointer, response ) ) else : errors. extend ( format_error_object ( error, pointer, response ) ) context [ ""view"" ]. resource_name = ""errors"" response. data = errors return response",False,"isinstance(exc, Http404) and isinstance(error, str)","isinstance(error, dict)",0.6502106189727783 758,"def find_missing_file ( file_path ) : """"""Find a missing file name or file path, and return valid path."""""" _ = get_app ( ). _tr modified = False skipped = False if os. path. exists ( file_path ) : return ( file_path, modified, skipped ) file_name = os. path. split ( file_path ) [ - 1 ] for known_path in known_paths : possible_path = os. path. join ( known_path, file_name ) if os. path. exists ( possible_path ) : modified = True return ( possible_path, modified, skipped ) while not os. path. exists ( file_path ) : recommended_path = get_app ( ). project. current_filepath or """" if not recommended_path : recommended_path = info. HOME_PATH else : recommended_path = os. path. dirname ( recommended_path ) QMessageBox. warning ( None, _ ( ""Missing File (%s)"" ) % file_name, _ ( ""%s cannot be found."" ) % file_name, ) modified = True folder_to_check = QFileDialog. getExistingDirectory ( None, _ ( ""Find directory that contains: %s"" % file_name ), recommended_path ) if folder_to_check and folder_to_check not in known_paths : known_paths. append ( folder_to_check ) if : max_in_list_size = connection. ops. max_in_list_size ( ) lhs, lhs_params = self. process_lhs ( compiler, connection ) rhs, rhs_params = self. batch_process_rhs ( compiler, connection ) in_clause_elements = [ ""("" ] params = [ ] for offset in range ( 0, len ( rhs_params ), max_in_list_size ) : if : in_clause_elements. append ( "" OR "" ) in_clause_elements. append ( ""%s IN ("" % lhs ) params. extend ( lhs_params ) sqls = rhs [ offset : offset + max_in_list_size ] sqls_params = rhs_params [ offset : offset + max_in_list_size ] param_group = "", "". join ( sqls ) in_clause_elements. append ( param_group ) in_clause_elements. append ( "")"" ) params. extend ( sqls_params ) in_clause_elements. append ( "")"" ) return """". join ( in_clause_elements ), params",False,offset > 0,lhs_params[offset + max_in_list_size] > 0,0.6728770732879639 760,"def _set_http_cookie ( ) : if conf. cookie : if : conf. http_headers [ HTTP_HEADER. COOKIE ] = ""; "". join ( map ( lambda x : ""="". join ( x ), conf. cookie. items ( ) ) ) else : conf. http_headers [ HTTP_HEADER. COOKIE ] = conf. cookie",False,"isinstance(conf.cookie, dict)","hasattr(conf.cookie, 'items')",0.6557393074035645 761,"def daemon_stop ( pid_file ) : import errno try : with open ( pid_file ) as f : buf = f. read ( ) pid = common. to_str ( buf ) if not buf : logging. error ( ""not running"" ) except IOError as e : shell. print_exception ( e ) if : logging. error ( ""not running"" ) return sys. exit ( 1 ) pid = int ( pid ) if pid > 0 : try : os. kill ( pid, signal. SIGTERM ) except OSError as e : if e. errno == errno. ESRCH : logging. error ( ""not running"" ) return shell. print_exception ( e ) sys. exit ( 1 ) else : logging. error ( ""pid is not positive: %d"", pid ) for i in range ( 0, 200 ) : try : os. kill ( pid, 0 ) except OSError as e : if e. errno == errno. ESRCH : break time. sleep ( 0.05 ) else : logging. error ( ""timed out when stopping pid %d"", pid ) sys. exit (",False,e.errno == errno.ENOENT,pid < 0,0.657820463180542 762,"def __init__ ( self, red_op, axis, acc_dtype, dtype, return_indices ) : DnnBase. __init__ ( self, [ ""c_code/dnn_redux.c"" ], ""APPLY_SPECIFIC(dnn_redux)"" ) assert cudnn. cudnnReduceTensorOp_t. has_alias ( red_op ) self. red_op = red_op assert acc_dtype in [ ""float16"", ""float32"", ""float64"" ] self. acc_dtype = acc_dtype assert dtype in [ ""float16"", ""float32"", ""float64"" ] self. dtype = dtype if axis is not None : if len ( axis ) > 8 : raise ValueError ( ""Too many axes to reduce on"" ) if : raise ValueError ( ""Axes larger than 8 not supported"" ) axis = tuple ( axis ) self. c_axis = self. _convert_axis ( axis ) self. axis = axis if return_indices and ( red_op!= ""maximum"" and red_op!= ""minimum"" ) : raise ValueError ( ""Can't request indices for something other than"" "" minimum or maximum"" ) self. return_indices = return_indices",False,any((a >= 8 for a in axis)),len(axis) > 8,0.6600173711776733 763,"def testformat ( formatstr, args, output = None, limit = None, overflowok = False ) : if verbose : if output : print ( ""{!a} % {!a} =? {!a}..."". format ( formatstr, args, output ), end = "" "" ) else : print ( ""{!a} % {!a} works?..."". format ( formatstr, args ), end = "" "" ) try : result = formatstr % args except OverflowError : if not overflowok : raise if verbose : print ( ""overflow (this is fine)"" ) else : if : if verbose : print ( ""no"" ) raise AssertionError ( ""%r %% %r == %r!= %r"" % ( formatstr, args, result, output ) ) elif ( output and limit is not None and ( len ( result )!= len ( output ) or result [ : limit ]!= output [ : limit ] ) ) : if verbose : print ( ""no"" ) print ( ""%s %% %s == %s!= %s"" % ( repr ( formatstr ), repr ( args ), repr ( result ), repr ( output ) ) ",False,output and limit is None and (result != output),overflowok,0.6537837982177734 764,"def check_format_version ( filename, data ) : format_version_ = data. pop ( ""format_version"", None ) if format_version_ is not None : try : format_version_ = int ( format_version_ ) except : return if : print_warning ( ""Loading from %s may fail: newer format version (%d) than current "" ""format version (%d)"" % ( filename, format_version_, format_version ) )",False,format_version_ > format_version,format_version_ > 6,0.6510088443756104 765,"def describe_volumes ( self, volume_ids = None, filters = None ) : matches = self. volumes. values ( ) if volume_ids : matches = [ vol for vol in matches if vol. id in volume_ids ] if : unknown_ids = set ( volume_ids ) - set ( matches ) raise InvalidVolumeIdError ( unknown_ids ) if filters : matches = generic_filter ( filters, matches ) return matches",False,len(volume_ids) > len(matches),volume_ids,0.6511069536209106 766,"def acquire ( self, timeout = None ) : timeout = timeout if timeout is not None else self. timeout end_time = time. time ( ) if timeout is not None and timeout > 0 : end_time += timeout while True : try : os. symlink ( self. unique_name, self. lock_file ) except OSError : if self. i_am_locking ( ) : return else : if timeout is not None and time. time ( ) > end_time : if : raise LockTimeout ( ""Timeout waiting to acquire"" "" lock for %s"" % self. path ) else : raise AlreadyLocked ( ""%s is already locked"" % self. path ) time. sleep ( timeout / 10 if timeout is not None else 0.1 ) else : return",False,timeout > 0,timeout is None or timeout < 0,0.679872989654541 767,"def save_data ( data, _id, path, do_pickle = True, silent = False ) : """"""Save data to a diskfile"""""" if not silent : logging. debug ( ""[%s] Saving data for %s in %s"", misc. caller_name ( ), _id, path ) path = os. path. join ( path, _id ) for t in range ( 3 ) : try : with open ( path, ""wb"" ) as data_file : if do_pickle : pickle. dump ( data, data_file, protocol = pickle. HIGHEST_PROTOCOL ) else : data_file. write ( data ) break except : if silent : pass elif : logging. error ( T ( ""Saving %s failed"" ), path ) logging. info ( ""Traceback: "", exc_info = True ) else : time. sleep ( 0.1 )",False,t == 2,not silent,0.673535943031311 768,"def open_file_input ( cli_parsed ) : files = glob. glob ( os. path. join ( cli_parsed. d, ""*report.html"" ) ) if len ( files ) > 0 : print ( ""\n[*] Done! Report written in the "" + cli_parsed. d + "" folder!"" ) print ( ""Would you like to open the report now? [Y/n]"" ) while True : try : response = input ( ). lower ( ) if : return True else : return strtobool ( response ) except ValueError : print ( ""Please respond with y or n"" ) else : print ( ""[*] No report files found to open, perhaps no hosts were successful"" ) return False",False,response == '',response == 'y',0.685572624206543 769,"def _IsDBInstanceReady ( self, instance_id, timeout = IS_READY_TIMEOUT ) : cmd = util. GcloudCommand ( self, ""sql"", ""instances"", ""describe"", instance_id ) start_time = datetime. datetime. now ( ) while True : if : logging. exception ( ""Timeout waiting for sql instance to be ready"" ) return False stdout, _, _ = cmd. Issue ( suppress_warning = True, raise_on_failure = False ) try : json_output = json. loads ( stdout ) state = json_output [ ""state"" ] logging. info ( ""Instance %s state: %s"", instance_id, state ) if state == ""RUNNABLE"" : break except : logging. exception ( ""Error attempting to read stdout. Creation failure."" ) return False time. sleep ( 5 ) return True",False,(datetime.datetime.now() - start_time).seconds > timeout,not cmd.issue,0.6474670767784119 770,"def __modules ( self ) : raw_output = self. __module_avail_output ( ). decode ( ""utf-8"" ) for line in StringIO ( raw_output ) : line = line and line. strip ( ) if not line or line. startswith ( ""-"" ) : continue line_modules = line. split ( ) for module in line_modules : if module. endswith ( self. default_indicator ) : module = module [ 0 : - len ( self. default_indicator ) ]. strip ( ) module_parts = module. split ( ""/"" ) module_version = None if : module_version = module_parts [ 1 ] module_name = module_parts [ 0 ] yield module_name, module_version",False,len(module_parts) == 2,len(module_parts) > 1,0.6514577269554138 771,"def _parse_literal_set ( self ) : valid_types = ( six. string_types, Binary, Decimal ) first_type = type ( self. _current [ ""value"" ] ) elements = set ( ) while True : element = self. _current if not self. _match ( ""literal"" ) : raise UnexpectedTokenError ( token = self. _current, expression = self. _expression, expected_type = ""literal"", ) if not isinstance ( element [ ""value"" ], valid_types ) : message = ( ""Sets may only contain numbers, strings, or bytes, "" ""but literal of type `%s` was found"" ) raise InvalidLiteralValueError ( token = self. _current, expression = self. _expression, message = message % type ( element [ ""type"" ] ), ) if not isinstance ( element [ ""value"" ], first_type ) : message = ( ""Set values must all be of the same type. First type was "" ""`%s`, but found value of type `%s`"" ) raise InvalidLiteralValueError ( token = self. _current, expression = self. _expression, ",False,not self._match('comma'),self._type is None,0.6520204544067383 772,"def index_structure ( structure, path, path_separator = ""/"" ) : """"""Follows :obj:`path` in a nested structure of objects, lists, and dicts."""""" keys = path. split ( path_separator ) for i, key in enumerate ( keys ) : current_path = ""%s%s"" % ( path_separator, path_separator. join ( keys [ : i ] ) ) if isinstance ( structure, list ) : try : index = int ( key ) except ValueError : raise ValueError ( ""Object referenced by path '%s' is a list, but got non "" ""integer index '%s'"" % ( current_path, key ) ) if index < 0 or index >= len ( structure ) : raise ValueError ( ""List referenced by path '%s' has length %d, but got "" ""out of range index %d"" % ( current_path, len ( structure ), index ) ) structure = structure [ index ] elif : structure = structure. get ( key ) if structure is None : raise ValueError ( ""Dictionary referenced by path '%s' does not have the "" ""key '%s'"" % ( current_path, key ) ) else : optimal_dd = None optimal_measure = numpy. inf for tup in tools. find_conf_files ( cwd ) : dd = tup [ 1 ] if : if dd [ ""results.train_y_misclass"" ] < optimal_measure : optimal_measure = dd [ ""results.train_y_misclass"" ] optimal_dd = dd print ( ""Optimal results.train_y_misclass:"", str ( optimal_measure ) ) for key, value in optimal_dd. items ( ) : if ""hyper_parameters"" in key : print ( key + "": "" + str ( value ) )",False,'results.train_y_misclass' in dd,dd is not None,0.6543463468551636 774,"def from_json ( self, value, trans, other_values = { } ) : if self. multiple : tag_list = [ ] if isinstance ( value, list ) or isinstance ( value, str ) : if not isinstance ( value, list ) : value = value. split ( ""\n"" ) for tag_str in value : for tag in str ( tag_str ). split ( "","" ) : tag = tag. strip ( ) if : tag_list. append ( tag ) value = tag_list else : if not value : value = None return super ( ). from_json ( value, trans, other_values, require_legal_value = False )",True,tag,tag,0.6900748014450073 775,"def live ( self ) : if os. geteuid ( )!= 0 : self. session. logging. error ( ""You are not root. It is likely that some operations "" ""may not be available."" ) with self. session : self. session. SetParameter ( ""cache"", ""timed"" ) self. session. SetParameter ( ""live_mode"", self. plugin_args. mode ) self. session. SetParameter ( ""session_name"", ""Live (%s)"" % self. plugin_args. mode ) if : try : load_as = self. session. plugins. load_as ( session = self. session ) base_as = standard. FileAddressSpace ( session = self. session, filename = ""/proc/kcore"" ) self. session. physical_address_space = load_as. GuessAddressSpace ( base_as = base_as ) self. session. SetParameter ( ""session_name"", ""Live(/proc/kcore)"" ) except IOError as e : self. session. logging. error ( ""Unable to load physical memory: %s "", e )",False,self.plugin_args.mode == 'Memory',self.session.physical_address_space is not None,0.651208221912384 776,"def _generate_tags ( self, base = None ) : """"""Return dictionary of tags, mapping tag names to values."""""" tags = { } for key in self. tag_fields : if key. startswith ( ""rg_"" ) : tags [ key ] = 1.0 elif : tags [ key ] = - 1 else : tags [ key ] = ""value\u2010%s"" % key for key in [ ""disc"", ""disctotal"", ""track"", ""tracktotal"", ""bpm"" ] : tags [ key ] = 1 tags [ ""art"" ] = self. jpg_data tags [ ""comp"" ] = True date = datetime. date ( 2001, 4, 3 ) tags [ ""date"" ] = date tags [ ""year"" ] = date. year tags [ ""month"" ] = date. month tags [ ""day"" ] = date. day original_date = datetime. date ( 1999, 5, 6 ) tags [ ""original_date"" ] = original_date tags [ ""original_year"" ] = original_date. year tags [ ""original_month"" ] = original_date. month tags [ ""original_day"" ] = original_date. day return tags",False,key.startswith('r128_'),key == 'clcl',0.654512345790863 777,"def parse_simple ( d, data ) : units = { } for v in data [ d ] : key = v [ ""name"" ] if : continue key_to_insert = make_key ( key ) if key_to_insert in units : index = 2 tmp = f""{key_to_insert}_{index}"" while tmp in units : index += 1 tmp = f""{key_to_insert}_{index}"" key_to_insert = tmp units [ key_to_insert ] = v [ ""id"" ] return units",False,not key,key in units,0.669816255569458 778,"def _create_examples ( cls, data_path, set_type ) : curr_token_list, curr_pos_list = [ ], [ ] data_lines = read_file_lines ( data_path, ""r"", encoding = ""utf-8"" ) examples = [ ] idx = 0 for data_line in data_lines : data_line = data_line. strip ( ) if : if set_type == ""test"" : line_tokens = data_line. split ( ""\t"" ) if len ( line_tokens ) == 2 : token, pos = line_tokens else : token, pos = data_line, None else : token, pos = data_line. split ( ""\t"" ) curr_token_list. append ( token ) curr_pos_list. append ( pos ) else : examples. append ( Example ( guid = ""%s-%s"" % ( set_type, idx ), tokens = curr_token_list, pos_list = curr_pos_list, ) ) idx += 1 curr_token_list, curr_pos_list = [ ], [ ] if curr_token_list : examples",True,data_line,data_line,0.6569391489028931 779,"def post ( self, request, * args, ** kwargs ) : contact_id = kwargs. get ( ""pk"" ) self. object = get_object_or_404 ( Contact, id = contact_id ) if ( self. request. user. role!= ""ADMIN"" and not self. request. user. is_superuser and self. request. user!= self. object. created_by ) or self. object. company!= self. request. company : raise PermissionDenied else : if : self. object. address. delete ( ) self. object. delete ( ) if self. request. is_ajax ( ) : return JsonResponse ( { ""error"" : False } ) return redirect ( ""contacts:list"" )",False,self.object.address_id,self.object.address,0.651253879070282 780,"def _get_data_from_buffer ( obj ) : try : view = memoryview ( obj ) except TypeError : if : view = memoryview ( buffer ( obj ) ) warnings. warn ( ""using old buffer interface to unpack %s; "" ""this leads to unpacking errors if slicing is used and "" ""will be removed in a future version"" % type ( obj ), RuntimeWarning, stacklevel = 3, ) else : raise if view. itemsize!= 1 : raise ValueError ( ""cannot unpack from multi-byte object"" ) return view",False,PY2,view.itemsize == 0,0.7054789066314697 781,"def __repr__ ( self ) : class_name = self. __class__. __name__ configuration = { } self. config. _populate_values ( ) for hp_name in self. config : if self. config [ hp_name ] is not None : configuration [ hp_name ] = self. config [ hp_name ] configuration_string = """". join ( [ ""configuration={\n "", "",\n "". join ( [ ""'%s': %s"" % ( hp_name, repr ( configuration [ hp_name ] ) ) for hp_name in sorted ( configuration ) ] ), ""}"", ] ) if len ( self. dataset_properties ) > 0 : dataset_properties_string = [ ] dataset_properties_string. append ( ""dataset_properties={"" ) for i, item in enumerate ( self. dataset_properties. items ( ) ) : if : dataset_properties_string. append ( "",\n "" ) else : dataset_properties_string. append ( ""\n "" ) if isinstance ( item [ 1 ], str ) : dataset_properties_string. append ( ""'%s': '%s'"" % ( item [ 0 ], item [ 1 ] ) ) else : dataset_properties_string. append ( ""'%s':",False,i != 0,i > 0,0.6818699836730957 782,"def unwrap ( self ) : if not self. _sslobj : raise ValueError ( ""No SSL wrapper around "" + str ( self ) ) try : shutdown = self. _sslobj. shutdown except AttributeError : shutdown = self. _sslobj. unwrap s = self. _sock while True : try : s = shutdown ( ) break except SSLWantReadError : if : raise self. _wait ( self. _read_event ) except SSLWantWriteError : if : raise self. _wait ( self. _write_event ) except OSError as e : if e. errno == 0 : break raise self. _sslobj = None assert s is self. _sock return self",False,self.timeout == 0.0,self._sock is None,0.660361647605896 783,"def render_text ( self, outfd, data ) : self. table_header ( outfd, [ ( ""Offset (V)"", ""[addrpad]"" ), ( ""Filter Name"", ""50"" ), ( ""Filter Member"", ""16"" ), ( ""Socket (V)"", ""[addrpad]"" ), ( ""Handler"", ""[addrpad]"" ), ( ""Module"", ""30"" ), ( ""Status"", """" ), ], ) for ( good, filter, filter_name, filter_socket, member, ptr, module ) in data : status = ""OK"" if : status = ""UNKNOWN"" self. table_row ( outfd, filter. obj_offset, filter_name, member, filter_socket, ptr, module, status, )",False,good == 0,good,0.6830132603645325 784,"def _extract_prev_responses ( self, batch ) : warn_once ( ""WARNING: This code is specific to self-feeding formatted examples"" ) p1 = self. dict. txt2vec ( ""__p1__"" ) [ 0 ] p2 = self. dict. txt2vec ( ""__p2__"" ) [ 0 ] self. prev_responses = [ ] for text_vec in batch. text_vec : p1s = ( text_vec == p1 ). nonzero ( ) p2s = ( text_vec == p2 ). nonzero ( ) if : response_vec = text_vec [ p2s [ - 1 ] + 1 : p1s [ - 1 ] ] else : response_vec = [ self. NULL_IDX ] response = self. dict. vec2txt ( response_vec ) self. prev_responses. append ( response )",False,len(p1s) and len(p2s),p1s,0.6521165370941162 785,"def set_clock ( self, prompt ) : """"""Set station clock to current time."""""" ans = None while ans not in [ ""y"", ""n"" ] : v = self. station. getTime ( ) vstr = weeutil. weeutil. timestamp_to_string ( v ) print ( ""Station clock is"", vstr ) if prompt : ans = input ( ""Set station clock (y/n)? "" ) else : print ( ""Setting station clock"" ) ans = ""y"" if ans == ""y"" : self. station. setTime ( ) v = self. station. getTime ( ) vstr = weeutil. weeutil. timestamp_to_string ( v ) print ( ""Station clock is now"", vstr ) elif : print ( ""Set clock cancelled."" )",False,ans == 'n',prompt,0.6654626131057739 786,"def draw_grayscale_heightmap ( world, target ) : min_elev_sea = None max_elev_sea = None min_elev_land = None max_elev_land = None for y in range ( world. height ) : for x in range ( world. width ) : e = world. elevation [ ""data"" ] [ y ] [ x ] if world. is_land ( ( x, y ) ) : if min_elev_land is None or e < min_elev_land : min_elev_land = e if max_elev_land is None or e > max_elev_land : max_elev_land = e else : if min_elev_sea is None or e < min_elev_sea : min_elev_sea = e if : max_elev_sea = e elev_delta_land = max_elev_land - min_elev_land elev_delta_sea = max_elev_sea - min_elev_sea for y in range ( world. height ) : for x in range ( world. width ) : e = world. elevation [ ""data"" ] [ y ] [ x ] if world. is_land ( ( x, y ) ) : c = int ( ( ( e - min_elev_land ) * 127 ) / elev_delta",False,max_elev_sea is None or e > max_elev_sea,max_elev_sea is None or e < min_elev_sea,0.650437593460083 787,"def post ( self, request, format = None ) : token_limit_per_user = self. get_token_limit_per_user ( ) if token_limit_per_user is not None : now = timezone. now ( ) token = request. user. auth_token_set. filter ( expiry__gt = now ) if : return Response ( { ""error"" : ""Maximum amount of tokens allowed per user exceeded."" }, status = status. HTTP_403_FORBIDDEN, ) token_ttl = self. get_token_ttl ( ) instance, token = AuthToken. objects. create ( request. user, token_ttl ) user_logged_in. send ( sender = request. user. __class__, request = request, user = request. user ) data = self. get_post_response_data ( request, token, instance ) return Response ( data )",False,token.count() >= token_limit_per_user,token_limit_per_user.valid(),0.6475284099578857 788,"def get_external_addresses ( self, label = None ) -> List [ str ] : result = [ ] for c in self. _conf [ ""pools"" ]. values ( ) : if : if label == c [ ""label"" ] : result. append ( c [ ""external_address"" ] [ 0 ] ) else : result. append ( c [ ""external_address"" ] [ 0 ] ) return result",True,label is not None,label is not None,0.6610390543937683 789,"def _cast_attr ( value, default ) : env. require ( lore. dependencies. DATEUTIL ) import dateutil if isinstance ( default, bool ) : return value. lower ( ) in [ ""1"", ""t"", ""true"" ] elif isinstance ( default, int ) : return int ( value ) elif isinstance ( default, float ) : return float ( value ) elif isinstance ( default, datetime. date ) : return dateutil. parser. parse ( value ). date ( ) elif isinstance ( default, datetime. datetime ) : return dateutil. parser. parse ( value ) elif isinstance ( value, str ) and default is None : if value. lower ( ) in [ ""t"", ""true"" ] : return True elif value. lower ( ) in [ ""f"", ""false"" ] : return False elif value. lower ( ) in [ ""none"" ] : return None try : f = float ( value ) i = int ( f ) if : return i elif str ( f ) == value : return f except ValueError : pass try : return dateutil. parser. parse ( value ) except ValueError : pass return value",False,str(i) == value,i > default,0.6571345329284668 790,"def get_formatted_stats ( self ) : """"""Get percentage or number of rar's done"""""" if self. cur_setname and self. cur_setname in self. total_volumes : if : return ""%02d/%02d"" % ( self. cur_volume, self. total_volumes [ self. cur_setname ] ) return self. cur_volume",False,self.total_volumes[self.cur_setname] >= self.cur_volume and self.cur_volume,self.cur_volume and self.cur_setname in self.total_volumes[self.cur_setname],0.6495836973190308 791,"def __call__ ( self, environ, start_response ) : """"""Dispatch the requests."""""" request = Request ( environ ) response = self. debug_application if request. args. get ( ""__debugger__"" ) == ""yes"" : cmd = request. args. get ( ""cmd"" ) arg = request. args. get ( ""f"" ) secret = request. args. get ( ""s"" ) traceback = self. tracebacks. get ( request. args. get ( ""tb"", type = int ) ) frame = self. frames. get ( request. args. get ( ""frm"", type = int ) ) if cmd == ""resource"" and arg : response = self. get_resource ( request, arg ) elif cmd == ""paste"" and traceback is not None and secret == self. secret : response = self. paste_traceback ( request, traceback ) elif : response = self. get_source ( request, frame ) elif ( self. evalex and cmd is not None and frame is not None and self. secret == secret ) : response = self. execute_command ( request, cmd, frame ) elif ( self. evalex and self. console_path is not None and request. path == self. console_path ) : response = self. display_console ( request ) return response ( environ, start_response )",False,cmd == 'source' and frame and (self.secret == secret),cmd == 'source',0.6521545648574829 792,"def get_tokens_unprocessed ( self, text ) : bashlexer = BashLexer ( ** self. options ) pos = 0 curcode = """" insertions = [ ] for match in line_re. finditer ( text ) : line = match. group ( ) m = re. match ( r""^((?:\[?\S+@[^$#%]+\]?\s*)[$#%])(.*\n?)"", line ) if m : if : pos = match. start ( ) insertions. append ( ( len ( curcode ), [ ( 0, Generic. Prompt, m. group ( 1 ) ) ] ) ) curcode += m. group ( 2 ) else : if insertions : toks = bashlexer. get_tokens_unprocessed ( curcode ) for i, t, v in do_insertions ( insertions, toks ) : yield pos + i, t, v yield match. start ( ), Generic. Output, line insertions = [ ] curcode = """" if insertions : for i, t, v in do_insertions ( insertions, bashlexer. get_tokens_unprocessed ( curcode ) ) : yield pos + i, t, v",False,not insertions,pos > 0,0.6784878969192505 793,"def parse_bzr_stats ( status ) : stats = RepoStats ( ) statustype = ""changed"" for statusline in status : if statusline [ : 2 ] == "" "" : setattr ( stats, statustype, getattr ( stats, statustype ) + 1 ) elif : statustype = ""staged"" elif statusline == ""unknown:"" : statustype = ""new"" else : statustype = ""changed"" return stats",False,statusline == 'added:',statusline == 'staged:',0.6653767824172974 794,"def testFunctions ( self ) : from zim. formats. wiki import match_url, is_url for input, input_is_url, tail in self. examples : if : if tail : self. assertEqual ( match_url ( input ), input [ : - len ( tail ) ] ) self. assertFalse ( is_url ( input ) ) else : self. assertEqual ( match_url ( input ), input ) self. assertTrue ( is_url ( input ) ) else : self. assertEqual ( match_url ( input ), None ) self. assertFalse ( is_url ( input ) )",True,input_is_url,input_is_url,0.65309739112854 795,"def execute ( self, fullpath, fstat, test = False ) : result = [ ] for arg in self. fmt : if arg == ""path"" : result. append ( fullpath ) elif : result. append ( os. path. basename ( fullpath ) ) elif arg == ""size"" : result. append ( fstat [ stat. ST_SIZE ] ) elif arg == ""type"" : result. append ( _FILE_TYPES. get ( stat. S_IFMT ( fstat [ stat. ST_MODE ] ), ""?"" ) ) elif arg == ""mode"" : result. append ( int ( oct ( fstat [ stat. ST_MODE ] ) [ - 3 : ], 8 ) ) elif arg == ""mtime"" : result. append ( fstat [ stat. ST_MTIME ] ) elif arg == ""user"" : uid = fstat [ stat. ST_UID ] try : result. append ( pwd. getpwuid ( uid ). pw_name ) except KeyError : result. append ( uid ) elif arg == ""group"" : gid = fstat [ stat. ST_GID ] try : result. append ( grp. getgrgid ( gid ). gr_name ) except KeyError : result. append ( gid ) elif arg == ""md5"" : if stat. S_",False,arg == 'name',arg == 'basename',0.6608589887619019 796,"def tagset ( bot, event, * args ) : """"""set a single tag. usage: tagset <""conv""|""user""|""convuser""> """""" if len ( args ) == 3 : [ type, id, tag ] = args type, id = _tagshortcuts ( event, type, id ) if : message = _ ( ""tagged
    {}
    with
    {}
    "". format ( id, tag ) ) else : message = _ ( ""
    {}
    unchanged"". format ( id ) ) else : message = _ ( ""supply type, id, tag"" ) yield from bot. coro_send_message ( event. conv_id, message )",False,"bot.tags.add(type, id, tag)",type == 'tag',0.6495245099067688 797,"def _convert_args ( self, config : JobTemplateConfig, args : Dict [ str, Any ] ) -> JobTemplateRequest : """"""convert arguments from argparse into a JobTemplateRequest"""""" user_fields = { } for field in config. user_fields : value = None if : value = args [ field. name ] elif field. name in args [ ""parameters"" ] : value = args [ ""parameters"" ] [ field. name ] elif field. required : raise Exception ( ""missing field: %s"" % field. name ) if field. name == ""target_exe"" and isinstance ( value, str ) : value = os. path. basename ( value ) if value is not None : user_fields [ field. name ] = value containers = self. _convert_container_args ( config, args ) request = JobTemplateRequest ( name = config. name, user_fields = user_fields, containers = containers ) return request",True,field.name in args,field.name in args,0.66373610496521 798,"def test_updater ( self ) : res, value = linkcheck. updater. check_update ( ) self. assertTrue ( type ( res ) == bool ) if res : self. assertTrue ( value is None or isinstance ( value, tuple ), repr ( value ) ) if : self. assertEqual ( len ( value ), 2 ) version, url = value self. assertTrue ( isinstance ( version, basestring ), repr ( version ) ) self. assertTrue ( url is None or isinstance ( url, basestring ), repr ( url ) ) else : self. assertTrue ( isinstance ( value, unicode ), repr ( value ) )",True,"isinstance(value, tuple)","isinstance(value, tuple)",0.6493678092956543 799,"def _flush ( self ) : if self. _data : if self. _last is not None : text = """". join ( self. _data ) if : assert self. _last. tail is None, ""internal error (tail)"" self. _last. tail = text else : assert self. _last. text is None, ""internal error (text)"" self. _last. text = text self. _data = [ ]",True,self._tail,self._tail,0.6738963723182678 800,"def oauth_auth ( self, token = None, oauth_verifier = None, signature_type = SIGNATURE_TYPE_AUTH_HEADER ) : key, secret = self. get_key_and_secret ( ) oauth_verifier = oauth_verifier or self. data. get ( ""oauth_verifier"" ) if token : resource_owner_key = token. get ( ""oauth_token"" ) resource_owner_secret = token. get ( ""oauth_token_secret"" ) if not resource_owner_key : raise AuthTokenError ( self, ""Missing oauth_token"" ) if : raise AuthTokenError ( self, ""Missing oauth_token_secret"" ) else : resource_owner_key = None resource_owner_secret = None state = self. get_or_create_state ( ) return OAuth1 ( key, secret, resource_owner_key = resource_owner_key, resource_owner_secret = resource_owner_secret, callback_uri = self. get_redirect_uri ( state ), verifier = oauth_verifier, signature_type = signature_type, )",False,not resource_owner_secret,not secret,0.6539005041122437 801,"def get_conv_output_size ( input_size, kernel_size, stride, padding, dilation ) : ndim = len ( input_size ) output_size = [ ] for i in range ( ndim ) : size = ( input_size [ i ] + 2 * padding [ i ] - dilation [ i ] * ( kernel_size [ i ] - 1 ) - 1 ) // stride [ i ] + 1 if : output_size. append ( 1 ) else : output_size. append ( size ) return output_size",False,kernel_size[i] == -1,size == -1,0.6572177410125732 802,"def process_question ( qtxt ) : question = """" skip = False for letter in qtxt : if letter == ""<"" : skip = True if letter == "">"" : skip = False if : continue if letter. isalnum ( ) or letter == "" "" : if letter == "" "" : letter = ""_"" question += letter. lower ( ) return question",True,skip,skip,0.6906230449676514 803,"def compute_out ( v, downsample, stride ) : if ignore_border : if downsample == stride : return v // stride else : out = ( v - downsample ) // stride + 1 if : return tensor. maximum ( out, 0 ) else : return np. maximum ( out, 0 ) else : if isinstance ( v, theano. Variable ) : return tensor. switch ( tensor. ge ( stride, downsample ), ( v - 1 ) // stride + 1, tensor. maximum ( 0, ( v - 1 - downsample ) // stride + 1 ) + 1, ) elif stride >= downsample : return ( v - 1 ) // stride + 1 else : return max ( 0, ( v - 1 - downsample + stride ) // stride ) + 1",False,"isinstance(out, theano.Variable)",downsample == downsample,0.6468735933303833 804,"def gather_callback_args ( self, obj, callbacks ) : session = sa. orm. object_session ( obj ) for callback in callbacks : backref = callback. backref root_objs = getdotattr ( obj, backref ) if backref else obj if : if not isinstance ( root_objs, Iterable ) : root_objs = [ root_objs ] with session. no_autoflush : for root_obj in root_objs : if root_obj : args = self. get_callback_args ( root_obj, callback ) if args : yield args",False,root_objs,self.has_root_functions,0.6755106449127197 805,"def authenticate ( self, * args, ** kwargs ) : authenticated = super ( ). authenticate ( * args, ** kwargs ) if authenticated : allow_anonymous = self. auth_config. get ( ""allow-anonymous"", True ) if : authenticated = True self. context. logger. debug ( ""Authentication success: config allows anonymous"" ) else : try : session = kwargs. get ( ""session"", None ) authenticated = True if session. username else False if self. context. logger. isEnabledFor ( logging. DEBUG ) : if authenticated : self. context. logger. debug ( ""Authentication success: session has a non empty username"" ) else : self. context. logger. debug ( ""Authentication failure: session has an empty username"" ) except KeyError : self. context. logger. warning ( ""Session informations not available"" ) authenticated = False return authenticated",False,allow_anonymous,allow_anonymous is False,0.6651857495307922 806,"def _writeMockResultFile ( result ) : """"""writes a test result as a gtest compatible test runner would do"""""" with open ( result. filename, ""w"" ) as f : f. write ( '\n' ) if : f. write ( ""\n"" ) for suite in result. suites : f. write ( ' + str ( suite. tests ) + '"" failures=""' + str ( suite. fail ) + '"" time=""' + str ( suite. time ) + '"" errors=""' + str ( suite. errors ) + '"" name=""' + suite. name + '"">\n' ) for case in suite. cases : f. write ( ' + case. name + '"" status=""run"" time=""' + str ( case. time ) + '"" classname=""' + case. classname + '"">\n' ",False,len(result.suites) > 1 or result.noSuitesRoot is False,result.suites,0.6570720672607422 807,"def LeaseCronJobs ( self, cronjob_ids = None, lease_time = None ) : """"""Leases all available cron jobs."""""" leased_jobs = [ ] now = rdfvalue. RDFDatetime. Now ( ) expiration_time = now + lease_time for job in self. cronjobs. values ( ) : if cronjob_ids and job. cron_job_id not in cronjob_ids : continue existing_lease = self. cronjob_leases. get ( job. cron_job_id ) if : self. cronjob_leases [ job. cron_job_id ] = ( expiration_time, utils. ProcessIdString ( ), ) job = job. Copy ( ) job. leased_until, job. leased_by = self. cronjob_leases [ job. cron_job_id ] leased_jobs. append ( job ) return leased_jobs",False,existing_lease is None or existing_lease[0] < now,existing_lease,0.6531012058258057 808,"def __get__ ( self, instance, instance_type = None ) : if instance : if : rel_obj = self. get_obj ( instance ) if rel_obj : instance. _obj_cache [ self. att_name ] = rel_obj return instance. _obj_cache. get ( self. att_name ) return self",True,self.att_name not in instance._obj_cache,self.att_name not in instance._obj_cache,0.6591638922691345 809,"def _flatten_settings_from_form ( self, settings, form, form_values ) : """"""Take a nested dict and return a flat dict of setting values."""""" setting_values = { } for field in form. c : if isinstance ( field, _ContainerMixin ) : setting_values. update ( self. _flatten_settings_from_form ( settings, field, form_values [ field. _name ] ) ) elif : setting_values [ field. _name ] = form_values [ field. _name ] return setting_values",False,field._name in settings,"isinstance(field, field_BaseField)",0.6617257595062256 810,"def _file_path_changed ( self, fpath ) : value = fpath. get ( ) if len ( value ) == 0 : return else : if : d_type = find_file_data_type ( fpath. get ( ) ) self. reader = eval ( ""tvtk.XML%sReader()"" % d_type ) reader = self. reader reader. file_name = value reader. update ( ) try : n = reader. number_of_outputs except AttributeError : n = reader. number_of_output_ports outputs = [ ] for i in range ( n ) : outputs. append ( reader. get_output ( i ) ) aa = self. _assign_attribute aa. input = outputs [ 0 ] outputs [ 0 ] = aa. output self. update_data ( ) self. outputs = outputs self. output_info. datasets = [ get_tvtk_dataset_name ( outputs [ 0 ] ) ] self. name = self. _get_name ( )",False,self.reader is None,self.read_tab == False,0.6607459783554077 811,"def run_for ( self, args ) : """"""Running commands from args namespace"""""" logger. debug ( ""Call run_for on {}"". format ( self. name ) ) if args. remove : if args. destdir : message = ""You can't specify a destination dir while removing a framework"" logger. error ( message ) UI. return_main_screen ( status_code = 2 ) self. remove ( ) else : install_path = None auto_accept_license = False if args. destdir : install_path = os. path. abspath ( os. path. expanduser ( args. destdir ) ) if : auto_accept_license = True self. setup ( install_path = install_path, auto_accept_license = auto_accept_license )",False,self.expect_license and args.accept_license,not install_path or install_path == '',0.6493919491767883 812,"def test_base ( expressions, sources ) : base, x, sql, bc, mongo = sources for expr, exclusions in expressions. items ( ) : if : model = into ( DataFrame, into ( np. ndarray, expr. _subs ( { t : data ( base, t. dshape ) } ) ) ) else : model = compute ( expr. _subs ( { t : data ( base, t. dshape ) } ) ) print ( ""\nexpr: %s\n"" % expr ) for source in sources : if source is None or id ( source ) in map ( id, exclusions ) : continue print ( ""%s <- %s"" % ( typename ( model ), typename ( source ) ) ) T = data ( source ) if : result = into ( type ( model ), expr. _subs ( { t : T } ) ) if isscalar ( expr. dshape. measure ) : assert set ( into ( list, result ) ) == set ( into ( list, model ) ) else : assert df_eq ( result, model ) elif isrecord ( expr. dshape ) : result = compute ( expr. _subs ( { t : T } ) ) assert into ( tuple, result ) == into ( tuple, model ) else : result = compute ( expr.",False,iscollection(expr.dshape),isrecord(expr.dshape),0.6528378129005432 813,"def load ( self, *, config_fd : TextIO = None ) -> None : config = """" if config_fd : config = config_fd. read ( ) else : file_path = """" if os. path. exists ( LOCAL_CONFIG_FILENAME ) : file_path = LOCAL_CONFIG_FILENAME logger. warning ( ""Using local configuration ({!r}), changes will not be "" ""persisted."". format ( file_path ) ) else : file_path = BaseDirectory. load_first_config ( ""snapcraft"", ""snapcraft.cfg"" ) if : with open ( file_path, ""r"" ) as f : config = f. read ( ) if config : _load_potentially_base64_config ( self. parser, config )",False,file_path and os.path.exists(file_path),file_path,0.6474583148956299 814,"def main ( ) : args = parse_cmdline ( ) if not os. path. exists ( args. input_file ) : print ( f""Error: input file {args.input_file} inaccessible or does not exist, check path"" ) sys. exit ( 1 ) with open_file_read ( args. input_file ) as fh : yaml_input = fh. read ( ) if args. prepend_file : if : print ( f""Error: prepend input file {args.prepend_file} inaccessible or does not exist, check path"" ) sys. exit ( 1 ) with open_file_read ( args. prepend_file ) as fh : prepend = fh. read ( ) yaml_input = prepend + yaml_input if not args. output_file : file_out = args. input_file pre, _ = os. path. splitext ( file_out ) file_out = pre else : file_out = args. output_file file_out = os. path. abspath ( file_out ) parse ( yaml_input, file_out = file_out )",True,not os.path.exists(args.prepend_file),not os.path.exists(args.prepend_file),0.6468584537506104 815,"def GetPoseS_GTF ( cfg, dlc_cfg, sess, inputs, outputs, cap, nframes ) : """"""Non batch wise pose estimation for video cap."""""" if cfg [ ""cropping"" ] : ny, nx = checkcropping ( cfg, cap ) pose_tensor = predict. extract_GPUprediction ( outputs, dlc_cfg ) PredictedData = np. zeros ( ( nframes, 3 * len ( dlc_cfg [ ""all_joints_names"" ] ) ) ) pbar = tqdm ( total = nframes ) counter = 0 step = max ( 10, int ( nframes / 100 ) ) while cap. isOpened ( ) : if counter % step == 0 : pbar. update ( step ) ret, frame = cap. read ( ) if : frame = cv2. cvtColor ( frame, cv2. COLOR_BGR2RGB ) if cfg [ ""cropping"" ] : frame = img_as_ubyte ( frame [ cfg [ ""y1"" ] : cfg [ ""y2"" ], cfg [ ""x1"" ] : cfg [ ""x2"" ] ] ) else : frame = img_as_ubyte ( frame ) pose = sess. run ( pose_tensor, feed_dict = { inputs : np. expand_dims ( frame, axis = 0 ). astype ( float ) }, ) pose [ :, [ 0, 1, 2 ] ] = pose [ :, [ 1",True,ret,ret,0.7049673795700073 816,"def _arg_with_type ( self ) : for t in self. d [ ""Args"" ] : m = re. search ( ""([A-Za-z0-9_-]+)\s{0,4}(\(.+\))\s{0,4}:"", t ) if : self. args [ m. group ( 1 ) ] = m. group ( 2 ) return self. args",True,m,m,0.6985384821891785 817,"def recent_events ( self, events ) : frame = events. get ( ""frame"" ) if self. active and frame : recent_pupil_positions = events [ ""pupil_positions"" ] gray_img = frame. gray if self. clicks_to_close <= 0 : self. stop ( ) return self. markers = find_concetric_circles ( gray_img, min_ring_count = 4 ) if len ( self. markers ) > 0 : self. detected = True marker_pos = self. markers [ 0 ] [ 0 ] [ 0 ] self. pos = normalize ( marker_pos, ( frame. width, frame. height ), flip_y = True ) else : self. detected = False self. pos = None on_position = self. lead_in < self. screen_marker_state if : ref = { } ref [ ""norm_pos"" ] = self. pos ref [ ""screen_pos"" ] = marker_pos ref [ ""timestamp"" ] = frame. timestamp self. ref_list. append ( ref ) for p_pt in recent_pupil_positions : if p_pt [ ""confidence"" ] > self. pupil_confidence_threshold : self. pupil_list. append ( p_pt ) if self. detected or not on_position : <",False,on_position and self.detected,on_position,0.655582070350647 818,"def _config ( _molecule_file, request ) : with open ( _molecule_file ) as f : d = util. safe_load ( f ) if hasattr ( request, ""param"" ) : if : d2 = util. safe_load ( request. getfixturevalue ( request. param ) ) else : d2 = request. getfixturevalue ( request. param ) d = util. merge_dicts ( d, d2 ) return d",False,"isinstance(request.getfixturevalue(request.param), str)",request.param_type == 'string',0.6551940441131592 819,"def __init__ ( self, name = None, invoke_without_command = False, no_args_is_help = None, subcommand_metavar = None, chain = False, result_callback = None, ** attrs ) : Command. __init__ ( self, name, ** attrs ) if no_args_is_help is None : no_args_is_help = not invoke_without_command self. no_args_is_help = no_args_is_help self. invoke_without_command = invoke_without_command if subcommand_metavar is None : if : subcommand_metavar = SUBCOMMANDS_METAVAR else : subcommand_metavar = SUBCOMMAND_METAVAR self. subcommand_metavar = subcommand_metavar self. chain = chain self. result_callback = result_callback if self. chain : for param in self. params : if isinstance ( param, Argument ) and not param. required : raise RuntimeError ( ""Multi commands in chain mode cannot "" ""have optional arguments."" )",False,chain,subcommand_metavar is None,0.6929587125778198 820,"def _get_ilo_version ( self ) : try : self. _get_ilo2 ( '' ) except ResponseError as e : if hasattr ( e, ""code"" ) : if e. code == 405 : return 3 if : return 1 raise return 2",False,e.code == 501,e.code == 504,0.6616497039794922 821,"def del_ ( self, key ) : hash_ = self. hash ( key ) node_ = self. _table [ hash_ ] pre_node = None while node_ is not None : if : if pre_node is None : self. _table [ hash_ ] = node_. next else : pre_node. next = node_. next self. _len -= 1 pre_node = node_ node_ = node_. next",False,node_.key == key,node_.hash() == hash_,0.6645716428756714 822,"def htmlentityreplace_errors ( exc ) : if isinstance ( exc, ( UnicodeEncodeError, UnicodeTranslateError ) ) : res = [ ] codepoints = [ ] skip = False for i, c in enumerate ( exc. object [ exc. start : exc. end ] ) : if skip : skip = False continue index = i + exc. start if : codepoint = utils. surrogatePairToCodepoint ( exc. object [ index : index + 2 ] ) skip = True else : codepoint = ord ( c ) codepoints. append ( codepoint ) for cp in codepoints : e = encode_entity_map. get ( cp ) if e : res. append ( ""&"" ) res. append ( e ) if not e. endswith ( "";"" ) : res. append ( "";"" ) else : res. append ( ""&#x%s;"" % ( hex ( cp ) [ 2 : ] ) ) return ( """". join ( res ), exc. end ) else : return xmlcharrefreplace_errors ( exc )",False,"utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])])",index >= 0,0.6538420915603638 823,"def formatd ( x, code, precision, flags = 0 ) : if flags & DTSF_ALT : alt = ""#"" else : alt = """" if code == ""r"" : fmt = ""%r"" else : fmt = ""%%%s.%d%s"" % ( alt, precision, code ) s = fmt % ( x, ) if flags & DTSF_ADD_DOT_0 : idx = len ( s ) for idx in range ( len ( s ), 0, - 1 ) : c = s [ idx - 1 ] if c in ""eE"" : if s [ idx ] in ""+-"" : idx += 1 s = s [ : idx ] + ""%02d"" % ( int ( s [ idx : ] ) ) break if : break else : if len ( s ) < precision : s += "".0"" else : sign = ""+"" if x < 1 : sign = ""-"" s = ""%s.%se%s%02d"" % ( s [ 0 ], s [ 1 : ], sign, len ( s ) - 1 ) elif code == ""r"" and s. endswith ( "".0"" ) : data = [ ] options = defined_options. copy ( ) for leaf in leaves : if : if len ( leaf. leaves )!= 2 : raise BoxConstructError name, value = leaf. leaves name_head = name. get_head_name ( ) if name_head == ""System`Symbol"" : py_name = name. get_name ( ) elif name_head == ""System`String"" : py_name = ""System`"" + name. get_string_value ( ) else : raise BoxConstructError options [ py_name ] = value else : data. append ( leaf ) return data, options",False,leaf.get_head_name() == 'System`Rule',"hasattr(leaf, 'leaves')",0.6552551984786987 825,"def verify ( self ) : """"""Verify specific targets after build is complete."""""" verify_history = self. _load_verify_history ( ) header_inclusion_history = verify_history [ ""header_inclusion_dependencies"" ] error = 0 verify_details = { } verify_suppress = config. get_item ( ""cc_config"", ""hdr_dep_missing_suppress"" ) for k in sorted ( self. __expanded_command_targets ) : target = self. __build_targets [ k ] if : ok, details = target. verify_hdr_dep_missing ( header_inclusion_history, verify_suppress. get ( target. key, { } ) ) if not ok : error += 1 if details : verify_details [ target. key ] = details self. _dump_verify_details ( verify_details ) self. _dump_verify_history ( ) return error == 0",False,target.type.startswith('cc_') and target.srcs,target.verify_hdr_dep_missing,0.6518244743347168 826,"def validate_common_ids ( self ) -> None : if ( not self. _extracted_metadata or not self. _extracted_metadata. common_id_list or ""apps"" not in self. _config_data ) : return common_id_list = self. _extracted_metadata. common_id_list for app in self. _config_data [ ""apps"" ] : app_common_id = self. _config_data [ ""apps"" ] [ app ]. get ( ""common-id"" ) if : logger. warning ( ""Common ID {common_id!r} specified in app {app!r} is "" ""not used in any metadata file."". format ( common_id = app_common_id, app = app ) )",False,app_common_id not in common_id_list,app_common_id and common_id_list and (common_id_list[common_id] not in self._config_data),0.6500170230865479 827,def test_erratic_draws ( ) : n = [ 0 ] with pytest. raises ( Flaky ) : @ run_to_buffer def x ( data ) : data. draw_bytes ( n [ 0 ] ) data. draw_bytes ( 255 - n [ 0 ] ) if : data. mark_interesting ( ) else : n [ 0 ] += 1,True,n[0] == 255,n[0] == 255,0.6632730960845947 828,"def __init__ ( self, element = None, parentOffset = None, parentEndTime = None, parentage = None, offset = None, endTime = None, ) : super ( ). __init__ ( offset = offset, endTime = endTime ) self. element = element if parentage is not None : parentage = tuple ( parentage ) self. parentage = parentage if parentOffset is not None : parentOffset = float ( parentOffset ) self. parentOffset = parentOffset if parentEndTime is not None : parentEndTime = float ( parentEndTime ) self. parentEndTime = parentEndTime if parentOffset is not None and parentEndTime is not None : if : raise TimespanException ( f""offset {parentOffset!r} must be after parentEndTime {parentEndTime!r}"" )",False,parentOffset > parentEndTime,parentOffset > self._max_time,0.6775654554367065 829,"def run ( self ) : try : print ( ""======== request ======== \n Url: %s \n Data: %s"" % ( self. url, self. data ) ) response = pool. urlopen ( ""POST"", self. url, body = self. data, timeout = self. timeout ). data if not response : print ( ""======== response ======== \n response is empty"" ) self. callback ( None ) else : if response. startswith ( codecs. BOM_UTF8 ) : decodeddata = response. decode ( ""utf-8-sig"" ) else : decodeddata = response. decode ( ""utf-8"" ) print ( ""======== response ======== \n %s"" % decodeddata ) self. callback ( json. loads ( decodeddata ) ) print ( ""======== end ========"" ) except Exception as ex : if : print ( str ( ex ) ) set_omnisharp_status ( ""Error talking to "" + self. url ) else : set_omnisharp_status ( ""Server Not Running"" ) self. callback ( None )",False,'checkalivestatus' not in self.url,ex.__class__.__name__ == 'NoSuchConnection',0.6552556753158569 830,"def analyze ( vw ) : align = vw. arch. getPointerSize ( ) rlen = vw. config. viv. analysis. pointertables. table_min_len plist = [ ] for va, pval in vw. findPointers ( ) : if len ( plist ) : lastva, lastptr = plist [ - 1 ] if lastva!= va - align : nloc = vw. getLocation ( lastva + align ) while nloc is not None : if nloc [ L_LTYPE ]!= LOC_POINTER : break lva = nloc [ L_VA ] plist. append ( ( lva, vw. castPointer ( lva ) ) ) nloc = vw. getLocation ( lva + nloc [ L_SIZE ] ) if lastva!= va - align : if : handleArray ( vw, plist ) plist = [ ] plist. append ( ( va, pval ) ) if : handleArray ( vw, plist )",False,len(plist) > rlen,rlen == 1,0.6712081432342529 831,"def root_item_selected ( self, item ) : """"""Root item has been selected: expanding it and collapsing others"""""" for index in range ( self. topLevelItemCount ( ) ) : root_item = self. topLevelItem ( index ) if : self. expandItem ( root_item ) else : self. collapseItem ( root_item )",False,root_item is item,"isinstance(root_item, basestring)",0.6588168144226074 832,"def __get_dynamic_attr ( self, attname, obj, default = None ) : try : attr = getattr ( self, attname ) except AttributeError : return default if callable ( attr ) : if hasattr ( attr, ""func_code"" ) : argcount = attr. func_code. co_argcount else : argcount = attr. __call__. func_code. co_argcount if : return attr ( obj ) else : return attr ( ) return attr",False,argcount == 2,argcount == 0,0.6746383905410767 833,"def read ( self, size = - 1 ) : buf = bytearray ( ) while size!= 0 and self. cursor < self. maxpos : if not self. in_current_block ( self. cursor ) : self. seek_to_block ( self. cursor ) part = self. current_stream. read ( size ) if size > 0 : if : raise EOFError ( ) size -= len ( part ) self. cursor += len ( part ) buf += part return bytes ( buf )",False,len(part) == 0,not part,0.6572363376617432 834,"def Run ( self, cmd_val ) : attrs, arg_r = flag_spec. ParseCmdVal ( ""mapfile"", cmd_val ) arg = arg_types. mapfile ( attrs. attrs ) var_name, _ = arg_r. Peek2 ( ) if var_name is None : var_name = ""MAPFILE"" else : if var_name. startswith ( "":"" ) : var_name = var_name [ 1 : ] lines = [ ] while True : line = _ReadLine ( ) if len ( line ) == 0 : break if : line = line [ : - 1 ] lines. append ( line ) state. SetRefArray ( self. mem, var_name, lines ) return 0",False,arg.t and line.endswith('\n'),line.endswith('\n'),0.6455460786819458 835,"def create_bundle ( request ) : bundle = Bundle ( owner = request. user, schema_version = ""uri:oozie:bundle:0.2"" ) if request. method == ""POST"" : bundle_form = BundleForm ( request. POST, instance = bundle ) if : bundle = bundle_form. save ( ) Document. objects. link ( bundle, owner = bundle. owner, name = bundle. name, description = bundle. description, ) return redirect ( reverse ( ""oozie:edit_bundle"", kwargs = { ""bundle"" : bundle. id } ) ) else : request. error ( _ ( ""Errors on the form: %s"" ) % bundle_form. errors ) else : bundle_form = BundleForm ( instance = bundle ) return render ( ""editor/create_bundle.mako"", request, { ""bundle"" : bundle, ""bundle_form"" : bundle_form, }, )",True,bundle_form.is_valid(),bundle_form.is_valid(),0.6521379947662354 836,"def suite ( module_prefix = """", timing_check = None ) : test_modules = [ ""test_associate"", ""test_basics"", ""test_dbenv"", ""test_db"", ""test_compare"", ""test_compat"", ""test_cursor_pget_bug"", ""test_dbobj"", ""test_dbshelve"", ""test_dbtables"", ""test_distributed_transactions"", ""test_early_close"", ""test_fileid"", ""test_get_none"", ""test_join"", ""test_lock"", ""test_misc"", ""test_pickle"", ""test_queue"", ""test_recno"", ""test_replication"", ""test_sequence"", ""test_thread"", ] alltests = unittest. TestSuite ( ) for name in test_modules : module = __import__ ( module_prefix + name, globals ( ), locals ( ), name ) alltests. addTest ( module. test_suite ( ) ) if : alltests. addTest ( unittest. makeSuite ( timing_check ) ) return alltests",False,timing_check,timing_check is not None,0.6534469127655029 837,"def _execute_with_error ( command, error, message ) : try : cli. invocation = cli. invocation_cls ( cli_ctx = cli, parser_cls = cli. parser_cls, commands_loader_cls = cli. commands_loader_cls, help_cls = cli. help_cls, ) cli. invocation. execute ( command. split ( ) ) except CLIError as ex : if : raise AssertionError ( ""{}\nExpected: {}\nActual: {}"". format ( message, error, ex ) ) return except Exception as ex : raise ex raise AssertionError ( ""exception not raised for '{0}'"". format ( message ) )",False,error not in str(ex),error is None or ex.args[0] or error < ex.args[1],0.6559051275253296 838,"def say ( self, phrase ) : self. _logger. debug ( ""Saying '%s' with '%s'"", phrase, self. SLUG ) with tempfile. NamedTemporaryFile ( suffix = "".wav"", delete = False ) as f : fname = f. name cmd = [ ""pico2wave"", ""--wave"", fname ] if self. language not in self. languages : raise ValueError ( ""Language '%s' not supported by '%s'"", self. language, self. SLUG ) cmd. extend ( [ ""-l"", self. language ] ) cmd. append ( phrase ) self. _logger. debug ( ""Executing %s"", "" "". join ( [ pipes. quote ( arg ) for arg in cmd ] ) ) with tempfile. TemporaryFile ( ) as f : subprocess. call ( cmd, stdout = f, stderr = f ) f. seek ( 0 ) output = f. read ( ) if : self. _logger. debug ( ""Output was: '%s'"", output ) self. play ( fname ) os. remove ( fname )",False,output,output != '',0.6841362118721008 839,"def get_connection ( self, url, proxies = None ) : with self. pools. lock : pool = self. pools. get ( url ) if : return pool pool = NpipeHTTPConnectionPool ( self. npipe_path, self. timeout, maxsize = self. max_pool_size ) self. pools [ url ] = pool return pool",False,pool,pool and proxies or pool[url],0.6944013833999634 840,"def get_user_from_api_key ( api_key, query_id ) : if not api_key : return None user = None org = current_org. _get_current_object ( ) try : user = models. User. get_by_api_key_and_org ( api_key, org ) if user. is_disabled : user = None except models. NoResultFound : try : api_key = models. ApiKey. get_by_api_key ( api_key ) user = models. ApiUser ( api_key, api_key. org, [ ] ) except models. NoResultFound : if : query = models. Query. get_by_id_and_org ( query_id, org ) if query and query. api_key == api_key : user = models. ApiUser ( api_key, query. org, list ( query. groups. keys ( ) ), name = ""ApiKey: Query {}"". format ( query. id ), ) return user",True,query_id,query_id,0.6675068736076355 841,"def describe_return_bits ( self, data, names ) : i = 0x01 << len ( names ) - 1 bit = 0 while i!= 0 : if : self. putx ( 3 if ( data & i ) else 4, 1, names [ bit ] ) else : self. advance_ann ( 3, 1 ) i >>= 1 bit += 1",False,names[bit] != '',names[bit] & 1,0.6616164445877075 842,"def create ( self, path, wipe = False ) : _path = self. validatepath ( path ) with ftp_errors ( self, path ) : if : empty_file = io. BytesIO ( ) self. ftp. storbinary ( str ( ""STOR "" ) + _encode ( _path, self. ftp. encoding ), empty_file ) return True return False",False,wipe or not self.isfile(path),wipe,0.6491141319274902 843,"def computeData ( self ) : self. nameList = [ ] self. names = { } self. tnodes = { } for p in self. c. all_unique_positions ( ) : h = p. h. strip ( ) v = p. v nameList = self. names. get ( h, [ ] ) if : if p. parent ( ) : key = ""%s, parent: %s"" % ( h, p. parent ( ). h ) else : key = ""%s, child index: %d"" % ( h, p. childIndex ( ) ) else : key = h self. nameList. append ( key ) self. tnodes [ key ] = v nameList. append ( key ) self. names [ h ] = nameList",False,nameList,len(nameList) > 0,0.6689449548721313 844,"def remote_change_labels ( crispin_client, account_id, message_ids, removed_labels, added_labels ) : uids_for_message = { } with session_scope ( account_id ) as db_session : for message_id in message_ids : folder_uids_map = uids_by_folder ( message_id, db_session ) for folder_name, uids in folder_uids_map. items ( ) : if folder_name not in uids_for_message : uids_for_message [ folder_name ] = [ ] uids_for_message [ folder_name ]. extend ( uids ) for folder_name, uids in uids_for_message. items ( ) : crispin_client. select_folder_if_necessary ( folder_name, uidvalidity_cb ) if : crispin_client. conn. add_gmail_labels ( uids, _encode_labels ( added_labels ), silent = True ) if len ( removed_labels ) > 0 : crispin_client. conn. remove_gmail_labels ( uids, _encode_labels ( removed_labels ), silent = True )",True,len(added_labels) > 0,len(added_labels) > 0,0.6525024175643921 845,"def _sanitize_outputs ( component_cls, outputs ) : if outputs is None : outputs = component_cls. outputs if outputs is None : outputs = [ ] if isinstance ( outputs, ( list, tuple ) ) : streams = { } for output in outputs : if isinstance ( output, Stream ) : streams [ output. name ] = StreamInfo ( output_fields = output. fields, direct = output. direct ) elif : default = streams. setdefault ( ""default"", StreamInfo ( output_fields = [ ], direct = False ) ) default. output_fields. append ( output ) else : raise TypeError ( ""Outputs must either be a list of strings "" ""or a list of Streams. Invalid entry: {!r}"". format ( output ) ) else : raise TypeError ( ""Outputs must either be a list of strings or a list"" "" of Streams. Given: {!r}"". format ( outputs ) ) return streams",False,"isinstance(output, str)","isinstance(output, Stream)",0.6495030522346497 846,"def saveFileMain ( self, dlg ) : if ( not self. fileNames [ self. ind ] ) or dlg : markupClass = self. getMarkupClass ( ) if ( markupClass is None ) or not hasattr ( markupClass, ""default_extension"" ) : defaultExt = self. tr ( ""Plain text (*.txt)"" ) ext = "".txt"" else : defaultExt = ( self. tr ( ""%s files"", ""Example of final string: Markdown files"" ) % markupClass. name + "" ("" + str. join ( "" "", ( ""*"" + extension for extension in markupClass. file_extensions ) ) + "")"" ) ext = markupClass. default_extension newFileName = getSaveFileName ( self, self. tr ( ""Save file"" ), """", defaultExt ) if newFileName : if not QFileInfo ( newFileName ). suffix ( ) : newFileName += ext self. fileNames [ self. ind ] = newFileName self. actionSetEncoding. setDisabled ( self. autoSaveActive ( ) ) if self. fileNames [ self. ind ] : result = self. saveFileCore ( self. fileNames [ self. ind ] ) if : self. setCurrentFile ( ) self. editBoxes [ self. ind ]. document ( ). setModified ( False ) ",True,result,result,0.6872763633728027 847,"def mat_mul ( job_id, idx, data_list ) : _, all_parties = session_init ( job_id, idx ) with SPDZ ( ) : if : x = FixedPointTensor. from_source ( ""x"", data_list [ 0 ] ) y = FixedPointTensor. from_source ( ""y"", all_parties [ 1 ] ) else : x = FixedPointTensor. from_source ( ""x"", all_parties [ 0 ] ) y = FixedPointTensor. from_source ( ""y"", data_list [ 1 ] ) return ( x @ y ). get ( )",False,idx == 0,len(all_parties) == 2,0.6795542240142822 848,"def loop_accept_pipe ( f = None ) : pipe = None try : if f : pipe = f. result ( ) server. _free_instances. discard ( pipe ) if server. closed ( ) : pipe. close ( ) return protocol = protocol_factory ( ) self. _make_duplex_pipe_transport ( pipe, protocol, extra = { ""addr"" : address } ) pipe = server. _get_unconnected_pipe ( ) if : return f = self. _proactor. accept_pipe ( pipe ) except OSError as exc : if pipe and pipe. fileno ( )!= - 1 : self. call_exception_handler ( { ""message"" : ""Pipe accept failed"", ""exception"" : exc, ""pipe"" : pipe, } ) pipe. close ( ) elif self. _debug : logger. warning ( ""Accept pipe failed on pipe %r"", pipe, exc_info = True ) except exceptions. CancelledError : if pipe : pipe. close ( ) else : server. _accept_pipe_future = f ",False,pipe is None,self._proactor is not None,0.6645748615264893 849,"def check_program ( self, program ) : quantized_ops = { } persistable_vars = [ v. name for v in filter ( lambda var : var. persistable, program. list_vars ( ) ) ] for block in program. blocks : for idx, op in enumerate ( block. ops ) : if op. type in self. quantizable_op_and_inputs : for i, arg_name in enumerate ( op. input_arg_names ) : quant_op_type = ( self. weight_quant_op_type if : else self. act_quant_op_type ) self. assertTrue ( arg_name. endswith ( "".quantized.dequantized"" ) ) if arg_name not in quantized_ops : self. assertEqual ( block. ops [ idx - 2 * i - 1 ]. type, self. dequant_op_type ) self. assertEqual ( block. ops [ idx - 2 * i - 2 ]. type, quant_op_type ) quantized_ops [ arg_name ] = block. ops [ idx - 2 * i - 2 ] ",False,_original_var_name(arg_name) in persistable_vars,quant_op_type is None,0.6491057872772217 850,"def process ( self ) : if not any ( socket. is_linked for socket in self. outputs ) : return vertices_s = self. inputs [ ""Vertices"" ]. sv_get ( ) vertices_s = ensure_nesting_level ( vertices_s, 3 ) epsilon_s = self. inputs [ ""Epsilon"" ]. sv_get ( ) smooth_s = self. inputs [ ""Smooth"" ]. sv_get ( ) curves_out = [ ] for vertices, epsilon, smooth in zip_long_repeat ( vertices_s, epsilon_s, smooth_s ) : if : epsilon = epsilon [ 0 ] if isinstance ( smooth, ( list, int ) ) : smooth = smooth [ 0 ] vertices = np. array ( vertices ) ts = make_euclidian_ts ( vertices ) rbf = Rbf ( ts, vertices, function = self. function, smooth = smooth, epsilon = epsilon, mode = ""N-D"", ) curve = SvRbfCurve ( rbf, ( 0.0, 1.0 ) ) curves_out. append ( curve ) self. outputs [ ""Curve"" ]. sv_set ( curves_out )",True,"isinstance(epsilon, (list, int))","isinstance(epsilon, (list, int))",0.6550108194351196 851,"def get_ast_subexprs ( claripy_ast ) : queue = [ claripy_ast ] while queue : ast = queue. pop ( 0 ) if : queue += ast. args [ 1 : ] yield ast. args [ 0 ] elif ast. op == ""Or"" : common = None for arg in ast. args : subexprs = get_ast_subexprs ( arg ) if common is None : common = set ( subexprs ) else : common = common. intersection ( subexprs ) if len ( common ) == 0 : break for expr in common : yield expr else : yield ast",True,ast.op == 'And',ast.op == 'And',0.6530584096908569 852,"def net_arch ( input, drop_prob, drop_path_mask, is_train, num_classes ) : c_in = 36 stem_multiplier = 3 c_curr = stem_multiplier * c_in x = self. _conv_bn ( input, c_curr, kernel_size = 3, padding = 1, stride = 1, name = ""cifar10_darts_conv0"" ) s0 = s1 = x logits_aux = None reduction_prev = False for i, layer_setting in enumerate ( self. bottleneck_params_list ) : filter_num, stride = layer_setting [ 0 ], layer_setting [ 1 ] if stride == 2 : reduction = True else : reduction = False if is_train : drop_path_cell = drop_path_mask [ :, i, :, : ] else : drop_path_cell = drop_path_mask s0, s1 = s1, self. _cell ( s0, s1, filter_num, stride, reduction_prev, drop_prob, drop_path_cell, is_train, name = ""cifar10_darts_layer{}"". format ( i + 1 ), ) reduction_prev = reduction if : if is_train : logits_aux = self. _auxiliary",False,i == 2 * 20 // 3,logits_aux is None,0.6636972427368164 853,"def has_bad_headers ( self ) : headers = [ self. sender, self. reply_to ] + self. recipients for header in headers : if _has_newline ( header ) : return True if self. subject : if _has_newline ( self. subject ) : for linenum, line in enumerate ( self. subject. split ( ""\r\n"" ) ) : if not line : return True if linenum > 0 and line [ 0 ] not in ""\t "" : return True if : return True if len ( line. strip ( ) ) == 0 : return True return False",False,_has_newline(line),linenum > 0 and line[0] not in '\t ',0.6545993089675903 854,"def _get_external_data ( url ) : result = { } try : resp = urlopen ( url ) headers = resp. info ( ) ct = headers. get ( ""Content-Type"" ) if : logger. debug ( ""Unexpected response for JSON request: %s"", ct ) else : reader = codecs. getreader ( ""utf-8"" ) ( resp ) result = json. load ( reader ) except Exception as e : logger. exception ( ""Failed to get external data for %s: %s"", url, e ) return result",True,not ct.startswith('application/json'),not ct.startswith('application/json'),0.6471430063247681 855,"def get_item_address ( self, item ) : """"""Get an item's address as a collection of names"""""" result = [ ] while True : name = self. tree_ctrl. GetItemPyData ( item ) if : break else : result. insert ( 0, name ) item = self. tree_ctrl. GetItemParent ( item ) return result",True,name is None,name is None,0.6606528759002686 856,"def _calc_block_io ( self, blkio ) : """"""Calculate block IO stats."""""" for stats in blkio [ ""io_service_bytes_recursive"" ] : if : self. _blk_read += stats [ ""value"" ] elif stats [ ""op"" ] == ""Write"" : self. _blk_write += stats [ ""value"" ]",False,stats['op'] == 'Read',"stats[op""] == ""Read""",0.6567075252532959 857,"def append ( self, val, label = None ) : if hasattr ( val, ""__len__"" ) : if val. isdigit ( ) and len ( val ) > 2 : self. century_specified = True if label not in [ None, ""Y"" ] : raise ValueError ( label ) label = ""Y"" elif val > 100 : self. century_specified = True if label not in [ None, ""Y"" ] : raise ValueError ( label ) label = ""Y"" super ( self. __class__, self ). append ( int ( val ) ) if label == ""M"" : if : raise ValueError ( ""Month is already set"" ) self. mstridx = len ( self ) - 1 elif label == ""D"" : if self. has_day : raise ValueError ( ""Day is already set"" ) self. dstridx = len ( self ) - 1 elif label == ""Y"" : if self. has_year : raise ValueError ( ""Year is already set"" ) self. ystridx = len ( self ) - 1",True,self.has_month,self.has_month,0.6591854691505432 858,"def _setup ( self, rnn_type, ntoken, ninp, nhid, nlayers, dropout = 0.5, tie_weights = False ) : self. drop = nn. Dropout ( dropout ) self. encoder = nn. Embedding ( ntoken, ninp ) if rnn_type in [ ""LSTM"", ""GRU"" ] : self. rnn = getattr ( nn, rnn_type ) ( ninp, nhid, nlayers, dropout = dropout ) else : try : nonlinearity = { ""RNN_TANH"" : ""tanh"", ""RNN_RELU"" : ""relu"" } [ rnn_type ] except KeyError : raise ValueError ( ""An invalid option for `--model` was supplied, "" ""options are ['LSTM', 'GRU', 'RNN_TANH' or 'RNN_RELU']"" ) self. rnn = nn. RNN ( ninp, nhid, nlayers, nonlinearity = nonlinearity, dropout = dropout ) self. decoder = nn. Linear ( nhid, ntoken ) if tie_weights : if : raise ValueError ( ""When using the tied flag, nhid must be equal to emsize"" ) self. decoder. weight = self. encoder. weight self. _init_weights ( ) self. rnn_type = rnn_type self. nhid = nhid self. nlayers = nlayers",False,nhid != ninp,nhid > nm,0.6781313419342041 859,"def _create ( self ) : pkgs_to_install = self. manifest. parse_initial_manifest ( ) rpm_pre_process_cmds = self. d. getVar ( ""RPM_PREPROCESS_COMMANDS"" ) rpm_post_process_cmds = self. d. getVar ( ""RPM_POSTPROCESS_COMMANDS"" ) self. pm. write_index ( ) execute_pre_post_process ( self. d, rpm_pre_process_cmds ) if self. progress_reporter : self. progress_reporter. next_stage ( ) if self. inc_rpm_image_gen == ""1"" : self. _create_incremental ( pkgs_to_install ) if self. progress_reporter : self. progress_reporter. next_stage ( ) self. pm. update ( ) pkgs = [ ] pkgs_attempt = [ ] for pkg_type in pkgs_to_install : if : pkgs_attempt += pkgs_to_install [ pkg_type ] else : pkgs += pkgs_to_install [ pkg_type ] if self. progress_reporter : self. progress_reporter. next_stage ( ) self. pm. install ( pkgs ) if self. progress_reporter : self. progress_reporter. next_stage ( ) self. pm. install ( pkgs_attempt, True ) if self. progress_reporter : self. progress_reporter. next_stage ( ) self. pm. install_complementary ( ) if self. progress_reporter : self. progress_reporter. next_stage ( ) self. _setup_dbg_rootfs ( [ ""/etc"", ""/var/lib/rpm"", ""/var/cache/",False,pkg_type == Manifest.PKG_TYPE_ATTEMPT_ONLY,pkg_type in pkgs,0.649940013885498 860,"def load_syntax ( syntax ) : context = _create_scheme ( ) or { } partition_scanner = PartitionScanner ( syntax. get ( ""partitions"", [ ] ) ) scanners = { } for part_name, part_scanner in list ( syntax. get ( ""scanner"", { } ). items ( ) ) : scanners [ part_name ] = Scanner ( part_scanner ) formats = [ ] for fname, fstyle in list ( syntax. get ( ""formats"", { } ). items ( ) ) : if : if fstyle. startswith ( ""%("" ) and fstyle. endswith ( "")s"" ) : key = fstyle [ 2 : - 2 ] fstyle = context [ key ] else : fstyle = fstyle % context formats. append ( ( fname, fstyle ) ) return partition_scanner, scanners, formats",False,"isinstance(fstyle, basestring)",fname in context,0.6503206491470337 861,"def _open_files ( self ) : self. _opened_files = [ ] self. _log_destination = [ ] self. _log_destination_is_tty = [ ] for dest in utils. to_list ( self. log_destination ) : if : file = open ( dest, ""a"" ) self. _opened_files. append ( file ) self. _log_destination_is_tty. append ( False ) else : if not hasattr ( dest, ""write"" ) : raise ValueError ( f""Log destination {dest} is not a "" f""file-like object"" ) try : isatty = dest. isatty ( ) except AttributeError : isatty = False file = dest self. _log_destination_is_tty. append ( isatty ) self. _log_destination. append ( file )",False,"isinstance(dest, (str, Path))","isinstance(dest, basestring)",0.6609674096107483 862,"def formTypesHistogram ( self, prepared ) : histo = { } keys = [ ""isTriad"", ""isSeventh"", ""isMajorTriad"", ""isMinorTriad"", ""isIncompleteMajorTriad"", ""isIncompleteMinorTriad"", ""isDiminishedTriad"", ""isAugmentedTriad"", ""isDominantSeventh"", ""isDiminishedSeventh"", ""isHalfDiminishedSeventh"", ] for c in prepared : for thisKey in keys : if : histo [ thisKey ] = 0 if getattr ( c, thisKey ) ( ) : histo [ thisKey ] += 1 return histo",False,thisKey not in histo,"getattr(c, thisKey) is None",0.6639055609703064 863,"def generate_scraper ( class_name, host_name ) : with open ( ""templates/scraper.py"" ) as source : code = source. read ( ) program = ast. parse ( code ) state = GenerateScraperState ( class_name, host_name, code ) for node in ast. walk ( program ) : if : break output = f""recipe_scrapers/{class_name.lower()}.py"" with open ( output, ""w"" ) as target : target. write ( state. result ( ) )",False,not state.step(node),node.error(),0.6513723134994507 864,"def f_freeze ( _ ) : repos = utils. get_repos ( ) for name, path in repos. items ( ) : url = """" cp = subprocess. run ( [ ""git"", ""remote"", ""-v"" ], cwd = path, capture_output = True ) if : url = cp. stdout. decode ( ""utf-8"" ). split ( ""\n"" ) [ 0 ]. split ( ) [ 1 ] print ( f""{url},{name},{path}"" )",True,cp.returncode == 0,cp.returncode == 0,0.6603929400444031 865,"def load ( cls, storefile, template_store ) : if not hasattr ( storefile, ""read"" ) : storefile = open ( storefile, ""rb"" ) store = cls. convertfile ( storefile, template_store ) for unit in store. units : if : continue if cls. needs_target_sync : unit. target = unit. source unit. rich_target = unit. rich_source return store",False,unit.isheader(),unit.is_multigraph,0.65279221534729 866,"def fixup_pth_and_egg_link ( home_dir, sys_path = None ) : """"""Makes.pth and.egg-link files use relative paths"""""" home_dir = os. path. normcase ( os. path. abspath ( home_dir ) ) if sys_path is None : sys_path = sys. path for a_path in sys_path : if : a_path = ""."" if not os. path. isdir ( a_path ) : continue a_path = os. path. normcase ( os. path. abspath ( a_path ) ) if not a_path. startswith ( home_dir ) : logger. debug ( ""Skipping system (non-environment) directory %s"", a_path ) continue for filename in os. listdir ( a_path ) : filename = os. path. join ( a_path, filename ) if filename. endswith ( "".pth"" ) : if not os. access ( filename, os. W_OK ) : logger. warn ( ""Cannot write.pth file %s, skipping"", filename ) else : fixup_pth_file ( filename ) if filename. endswith ( "".egg-link"" ) : if not os. access ( filename, os. W_OK ) : logger. warn ( ""Cannot write.egg-link file %s, skipping"", filename ) else : fix",False,not a_path,a_path == None,0.6560148000717163 867,"def validate_pull_secret ( namespace ) : if namespace. pull_secret is None : warning = ( ""No --pull-secret provided: cluster will not include samples or operators from "" + ""Red Hat or from certified partners."" ) logger. warning ( warning ) else : try : if : raise Exception ( ) except : raise InvalidArgumentValueError ( ""Invalid --pull-secret."" )",False,"not isinstance(json.loads(namespace.pull_secret), dict)","namespace.pull_secret.lower() not in ['AVG', 'AVG', 'AVG', 'AVG', 'AVG', 'AVG']",0.6529994010925293 868,"def setdefault ( self, key, default = None ) : try : o = self. data [ key ] ( ) except KeyError : o = None if o is None : if : self. _commit_removals ( ) self. data [ key ] = KeyedRef ( default, self. _remove, key ) return default else : return o",True,self._pending_removals,self._pending_removals,0.659338653087616 869,"def run_cmd ( self, util, value ) : state = util. state if not state. argument_supplied : state. argument_supplied = True if value == ""by_four"" : state. argument_value = 4 elif : state. argument_negative = True else : state. argument_value = value elif value == ""by_four"" : state. argument_value *= 4 elif isinstance ( value, int ) : state. argument_value *= 10 state. argument_value += value elif : state. argument_value = - state. argument_value",True,value == 'negative',value == 'negative',0.664093017578125 870,"def close ( self ) : with self. _lock : """"""Close this _MultiFileWatcher object forever."""""" if : self. _folder_handlers = { } LOGGER. debug ( ""Stopping observer thread even though there is a non-zero "" ""number of event observers!"" ) else : LOGGER. debug ( ""Stopping observer thread"" ) self. _observer. stop ( ) self. _observer. join ( timeout = 5 )",False,len(self._folder_handlers) != 0,self._observer is not None,0.6578959226608276 871,"def update_completion ( self ) : if not self. notebook : return text = self. get_text ( ) completion = self. get_completion ( ) if completion is None : return model = completion. get_model ( ) model. clear ( ) if not text or not self. get_input_valid ( ) : return if "":"" in text : i = text. rfind ( "":"" ) prefix = text [ : i + 1 ] if : path = Path ( "":"" ) else : reference = self. notebookpath or Path ( "":"" ) link = prefix if self. subpaths_only and not link. startswith ( ""+"" ) : link = ""+"" + link. lstrip ( "":"" ) try : path = self. notebook. pages. lookup_from_user_input ( link, reference ) except ValueError : return try : self. _fill_completion_for_anchor ( path, prefix, text ) except IndexNotFoundError : pass elif text. startswith ( ""+"" ) and self. notebookpath : prefix = ""+"" path = self. notebookpath try : self. _fill_completion_for_anchor ( path, prefix, text ) except IndexNotFoundError : pass else : path = self. notebookpath or Path ( "":"" ) try : denv = builtins. __xonsh__. env. detype ( ) try : branches = xt. decode_bytes ( subprocess. check_output ( [ ""git"", ""branch"" ], env = denv, stderr = subprocess. DEVNULL ) ). splitlines ( ) except ( subprocess. CalledProcessError, OSError, FileNotFoundError ) : q. put ( None ) else : for branch in branches : if : continue elif branch. endswith ( "")"" ) : branch = branch. split ( ) [ - 1 ] [ : - 1 ] else : branch = branch. split ( ) [ - 1 ] q. put ( branch ) break else : q. put ( None )",False,not branch.startswith('* '),"not branch.endswith('."")",0.6480258703231812 873,"def _normalize_dict_keys ( transformer, keys ) : res = [ ] for key in keys : if isinstance ( key, str ) : key = ast. Str ( key ) elif isinstance ( key, JSStr ) : key = ast. Str ( key. args [ 0 ] ) if not isinstance ( key, ast. Str ) : if transformer. enable_es6 : key = JSKeySubscript ( key ) else : if isinstance ( key, ast. AST ) : py_node = key elif : py_node = key. py_node else : raise ValueError ( ""Value of type %r cannot "" ""be use as key"" % type ( key ) ) transformer. unsupported ( py_node, True, ""Value of type %r cannot "" ""be use as key"" % type ( key ), ) res. append ( key ) return res",False,"isinstance(key, TargetNode) and key.py_node is not None","isinstance(key, ast.Subscript)",0.6515085101127625 874,"def recv_some ( p, t = 0.1, e = 1, tr = 5, stderr = 0 ) : if tr < 1 : tr = 1 x = time. time ( ) + t y = [ ] r = """" if stderr : pr = p. recv_err else : pr = p. recv while time. time ( ) < x or r : r = pr ( ) if r is None : break elif : y. append ( r ) else : time. sleep ( max ( ( x - time. time ( ) ) / tr, 0 ) ) return """". join ( y )",False,r,e,0.6913201808929443 875,"def _extract_lemma ( self, parse : Parse ) -> str : special_feats = [ x for x in self. SPECIAL_FEATURES if x in parse. tag ] if len ( special_feats ) == 0 : return parse. normal_form for other in parse. lexeme : tag = other. tag if : continue if ( tag. case == ""nomn"" and tag. gender == parse. tag. gender and tag. number == ""sing"" ) : return other. word return parse. normal_form",False,any((x not in tag for x in special_feats)),tag == parse.tag,0.6533888578414917 876,"def run_async ( self ) : for entry in self. interface. entries : if self. commit_is_merge ( entry. long_hash ) : sublime. message_dialog ( ""Unable to squash a merge."" ) return last_commit_idx = len ( self. interface. entries ) - 1 commit_chain = self. perpare_rewrites ( self. interface. entries ) for idx, commit in enumerate ( commit_chain ) : commit. modified = True if : commit. do_commit = False commit_chain [ idx + 1 ]. msg = commit. msg + ""\n\n"" + commit_chain [ idx + 1 ]. msg commit. msg = None else : commit. squashed = True self. make_changes ( commit_chain, ""squashed all commits"" )",False,idx < last_commit_idx,commit.do_commit and idx + 1 < len(commit_chain),0.6533656120300293 877,"def verify_installed_apps ( captured_outerr, package_name : str, test_error_fh : io. StringIO, deps : bool = False ) -> bool : package_apps = PKG [ package_name ] [ ""apps"" ]. copy ( ) if deps : package_apps += PKG [ package_name ] [ ""apps_of_dependencies"" ] reported_apps_re = re. search ( r""These apps are now globally available(.+)"", captured_outerr. out, re. DOTALL ) if reported_apps_re : reported_apps = [ x. strip ( ) [ 2 : ] for x in reported_apps_re. group ( 1 ). strip ( ). split ( ""\n"" ) ] if : app_success = False print ( ""verify_install: REPORTED APPS DO NOT MATCH PACKAGE"", file = test_error_fh ) print ( f""pipx reported apps: {reported_apps}"", file = test_error_fh ) print ( f"" true package apps: {package_apps}"", file = test_error_fh ) else : app_success = True else : app_success = False print ( ""verify_install: APPS TESTING ERROR"", file = test_error_fh ) return app_success",False,set(reported_apps) != set(package_apps),package_apps,0.6491948962211609 878,"def _get ( self, heappop = heapq. heappop ) : while self. queue : item = heappop ( self. queue ) if : continue self. queue_dict. pop ( item. taskid, None ) return item return None",True,item.taskid is None,item.taskid is None,0.6567336320877075 879,"def logic ( ) : while 1 : yield PI, PT PT. next [ n : ] = PI for l in range ( 1, m + 1 ) : for k in range ( 2 ** ( m - l ) ) : for i in range ( 2 ** ( l - 1 ) ) : if ( k * 2 ** l + i ) < n : PT. next [ l * n + k * 2 ** l + i ] = PT [ ( l - 1 ) * n + k * 2 ** l + i ] if : PT. next [ l * n + k * 2 ** l + 2 ** ( l - 1 ) + i ] = ( PT [ ( l - 1 ) * n + k * 2 ** l + 2 ** ( l - 1 ) + i ] & PT [ ( l - 1 ) * n + k * 2 ** l + 2 ** ( l - 1 ) - 1 ] ) PO. next = PT [ ( m + 1 ) * n : m * n ]",False,k * 2 ** l + 2 ** (l - 1) + i < n,l > 0,0.6610714197158813 880,"def onMessage ( self, payload, isBinary ) : if not isBinary : self. result = ""Expected binary message with payload, but got binary."" else : if : self. result = ( ""Expected binary message with payload of length %d, but got %d."" % ( self. DATALEN, len ( payload ) ) ) else : self. behavior = Case. OK self. result = ""Received binary message of length %d."" % len ( payload ) self. p. createWirelog = True self. p. sendClose ( self. p. CLOSE_STATUS_CODE_NORMAL )",False,len(payload) != self.DATALEN,len(payload) > self.DATALEN,0.6611055135726929 881,"def __init__ ( self, ** kwargs ) : self. theme = kwargs. pop ( ""theme"" ) super ( GenericThemeForm, self ). __init__ ( ** kwargs ) if self. theme. stylesheets : if : choices = [ ( style [ ""stylesheet"" ], style [ ""name"" ] ) for style in self. theme. stylesheets ] else : warnings. warn ( ""Using list of tuples in theme.stylesheets will deprecate "" ""in Shuup 0.5.7. Use list of dictionaries instead."", RemovedInFutureShuupWarning, ) choices = self. theme. stylesheets self. fields [ ""stylesheet"" ] = forms. ChoiceField ( label = _ ( ""Stylesheets"" ), choices = choices, initial = choices [ 0 ], required = True ) fields = self. theme. fields if hasattr ( fields, ""items"" ) : fields = fields. items ( ) for name, field in fields : self. fields [ name ] = deepcopy ( field ) self. initial. update ( self. instance. get_settings ( ) )",False,"isinstance(self.theme.stylesheets[0], dict)","hasattr(self.theme.stylesheets, '__iter__')",0.6531034111976624 882,"def samples_to_records ( samples, default_keys = None ) : """"""Convert samples into output CWL records."""""" from bcbio. pipeline import run_info RECORD_CONVERT_TO_LIST = set ( [ ""config__algorithm__tools_on"", ""config__algorithm__tools_off"", ""reference__genome_context"", ] ) all_keys = _get_all_cwlkeys ( samples, default_keys ) out = [ ] for data in samples : for raw_key in sorted ( list ( all_keys ) ) : key = raw_key. split ( ""__"" ) if tz. get_in ( key, data ) is None : data = tz. update_in ( data, key, lambda x : None ) if raw_key not in data [ ""cwl_keys"" ] : data [ ""cwl_keys"" ]. append ( raw_key ) if raw_key in RECORD_CONVERT_TO_LIST : val = tz. get_in ( key, data ) if not val : val = [ ] elif : val = [ val ] data = tz. update_in ( data, key, lambda x : val ) if isinstance ( tz. get_in ( key, data ), bool ) : data = tz. update_",False,"not isinstance(val, (list, tuple))",val,0.6498900055885315 883,"def check_related_active_jobs ( self, obj ) : active_jobs = obj. get_active_jobs ( ) if len ( active_jobs ) > 0 : raise ActiveJobConflict ( active_jobs ) time_cutoff = now ( ) - dateutil. relativedelta. relativedelta ( minutes = 1 ) recent_jobs = obj. _get_related_jobs ( ). filter ( finished__gte = time_cutoff ) for unified_job in recent_jobs. get_real_instances ( ) : if : raise PermissionDenied ( _ ( ""Related job {} is still processing events."" ). format ( unified_job. log_format ) )",False,not unified_job.event_processing_finished,unified_job.has_events(),0.6505641341209412 884,"def _parse_param_value ( name, datatype, default ) : if datatype == ""bool"" : if default. lower ( ) == ""true"" : return True elif : return False else : _s = ""{}: Invalid default value '{}' for bool parameter {}"" raise SyntaxError ( _s. format ( self. name, default, p ) ) elif datatype == ""int"" : if type ( default ) == int : return default else : return int ( default, 0 ) elif datatype == ""real"" : if type ( default ) == float : return default else : return float ( default ) else : return str ( default )",True,default.lower() == 'false',default.lower() == 'false',0.6580437421798706 885,"def create_new_file ( obj, source, destination, destination_node ) : if not source [ ""materialized"" ]. startswith ( ""/"" ) : source [ ""materialized"" ] = ""/"" + source [ ""materialized"" ] if not destination [ ""materialized"" ]. startswith ( ""/"" ) : destination [ ""materialized"" ] = ""/"" + destination [ ""materialized"" ] if not source [ ""path"" ]. endswith ( ""/"" ) : data = dict ( destination ) new_file = BaseFileNode. resolve_class ( destination [ ""provider"" ], BaseFileNode. FILE ). get_or_create ( destination_node, destination [ ""path"" ] ) if destination [ ""provider"" ]!= ""osfstorage"" : new_file. update ( revision = None, data = data ) else : new_file = find_and_create_file_from_metadata ( destination. get ( ""children"", [ ] ), source, destination, destination_node, obj ) if not new_file : if : new_path = obj. referent. path else : new_path = obj. referent. materialized_path. replace ( source [ ""materialized"" ], destination [ ""materialized"" ] ) new_file = BaseFileNode. resolve_class ( destination [ ""provider"" ], BaseFileNode. FILE ). get_or_create ( destination_node, new_path ) ",False,source['provider'] == 'box',obj.referent.path,0.6547295451164246 886,"def __init__ ( self, tree ) : for k, v in sorted ( tree. items ( ) ) : if v is None : continue if : setattr ( self, k, globals ( ) [ self. members [ k ] ] ( v ) ) elif k in self. lists : if k. endswith ( ""_append"" ) : _k = k [ : - 7 ] _l = getattr ( self, _k, [ ] ) [ : ] else : _k = k _l = [ ] for _item in v : try : _l. append ( globals ( ) [ self. lists [ _k ] ] ( _item ) ) except TypeError as e : raise SyntaxError ( ""Bad option '{}' in section '{}'"". format ( _item, k ) ) setattr ( self, _k, _l ) elif k in self. dicts : if not isinstance ( v, dict ) : raise SyntaxError ( ""Object in '{}' section must be a dict"". format ( k ) ) _d = { } for _name, _items in v. items ( ) : try : _d",True,k in self.members,k in self.members,0.6650645732879639 887,"def tile ( cls, op : ""LGBMAlign"" ) : inputs = [ d for d in [ op. data, op. label, op. sample_weight, op. init_score ] if d is not None ] data = op. data check_chunks_unknown_shape ( inputs, TilesError ) ctx = get_context ( ) if ctx. running_mode!= RunningMode. distributed : outputs = [ inp. rechunk ( tuple ( ( s, ) for s in inp. shape ) ). _inplace_tile ( ) for inp in inputs ] else : if : data = data. rechunk ( { 1 : data. shape [ 1 ] } ). _inplace_tile ( ) outputs = [ data ] for inp in inputs [ 1 : ] : if inp is not None : outputs. append ( inp. rechunk ( ( data. nsplits [ 0 ], ) ). _inplace_tile ( ) ) kws = [ ] for o in outputs : kw = o. params. copy ( ) kw. update ( dict ( chunks = o. chunks, nsplits = o. nsplits ) ) kws. append ( kw ) new_op = op. copy ( ). reset_key ( ) tileables = new_op. new_tileables ( inputs, kws = kws ) return tileables",False,len(data.nsplits[1]) != 1,data is not None,0.6540103554725647 888,"def get_code ( self, fullname = None ) : fullname = self. _fix_name ( fullname ) if self. code is None : mod_type = self. etc [ 2 ] if : source = self. get_source ( fullname ) self. code = compile ( source, self. filename, ""exec"" ) elif mod_type == imp. PY_COMPILED : self. _reopen ( ) try : self. code = read_code ( self. file ) finally : self. file. close ( ) elif mod_type == imp. PKG_DIRECTORY : self. code = self. _get_delegate ( ). get_code ( ) return self. code",False,mod_type == imp.PY_SOURCE,mod_type == imp.NONE,0.6575285196304321 889,"def reprocess_lines ( processed_lines ) : reprocessed_lines = [ ] for line in processed_lines : text = """". join ( line ) chunks = sent_tokenize ( text ) if sum ( len ( x ) for x in chunks )!= len ( text ) : raise ValueError ( ""Got unexpected text length: \n{}\nvs\n{}"". format ( text, chunks ) ) chunk_lengths = [ len ( x ) for x in chunks ] current_length = 0 new_line = [ ] for word in line : if len ( word ) + current_length < chunk_lengths [ 0 ] : new_line. append ( word ) current_length = current_length + len ( word ) elif : new_line. append ( word ) reprocessed_lines. append ( new_line ) new_line = [ ] chunk_lengths = chunk_lengths [ 1 : ] current_length = 0 else : remaining_len = chunk_lengths [ 0 ] - current_length new_line. append ( word [ : remaining_len ] ) reprocessed_lines. append ( new_line ) word = word [ remaining_len : ] chunk_lengths = chunk_lengths [ 1 : ] <",False,len(word) + current_length == chunk_lengths[0],len(word) + current_length < chunk_lengths[0],0.6559857130050659 890,"def clean_permissions ( cls, requestor : ""User"", group : auth_models. Group, errors : Dict [ Optional [ str ], List [ ValidationError ] ], cleaned_input : dict, ) : field = ""add_permissions"" permission_items = cleaned_input. get ( field ) if permission_items : cleaned_input [ field ] = get_permissions ( permission_items ) if : cls. ensure_can_manage_permissions ( requestor, errors, field, permission_items )",False,not requestor.is_superuser,errors,0.6512376070022583 891,"def choose_detectors ( args ) : all_detector_classes = get_detectors_classes ( ) detectors = { d. ARGUMENT : d for d in all_detector_classes } arguments = list ( detectors. keys ( ) ) detectors_to_run = [ ] if not args. exclude_all : exclude = [ ] if args. detectors_to_exclude : exclude = args. detectors_to_exclude. split ( "","" ) for e in exclude : if : raise Exception ( f""{e} is not a detector name, must be one of {arguments}. See also `--list-detectors`."" ) for arg, detector_cls in detectors. items ( ) : if arg not in exclude : detectors_to_run. append ( detector_cls ) return detectors_to_run",False,e not in arguments,e not in detectors,0.6711958646774292 892,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. req = TGetTableTypesReq ( ) self. req. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRUCT,ftype == TType.TABLE_TYPE,0.6619229912757874 893,"def read_plugin_info ( plugin, zip_data ) : file = StringIO. StringIO ( zip_data ) archive = zipfile. ZipFile ( file ) has_info = False for name in archive. namelist ( ) : if : data = json. load ( archive. open ( name ) ) plugin. name = data [ ""name"" ] plugin. info_json = json. dumps ( data ) plugin. categories = data. get ( ""categories"", [ ""Other"" ] ) has_info = True elif name. endswith ( ""/Icon.png"" ) or name. endswith ( ""/icon.png"" ) : data = archive. open ( name ). read ( ) plugin. icon_url = resize_and_store ( data, 128 ) elif name. endswith ( ""/Screenshot.png"" ) : screenshot = archive. open ( name ). read ( ) plugin. screenshot_url = resize_and_store ( screenshot, 800 ) elif name. endswith ( "".version"" ) : plugin. version = int ( name. split ( ""/"" ) [ - 1 ]. split ( ""."" ) [ 0 ] ) return has_info",False,name.endswith('/info.json'),name.endswith('.json'),0.643671452999115 894,"def _actions_read ( self, c ) : self. action_input. handle_read ( c ) if c in [ curses. KEY_ENTER, util. KEY_ENTER2 ] : if self. action_input. selected_index == 0 : self. back_to_parent ( ) elif self. action_input. selected_index == 1 : self. _apply_prefs ( ) client. core. get_config ( ). addCallback ( self. _update_preferences ) elif : self. _apply_prefs ( ) self. back_to_parent ( )",True,self.action_input.selected_index == 2,self.action_input.selected_index == 2,0.6521742343902588 895,"def untokenize ( self, iterable ) : it = iter ( iterable ) indents = [ ] startline = False for t in it : if len ( t ) == 2 : self. compat ( t, it ) break tok_type, token, start, end, line = t if tok_type == ENDMARKER : break if : indents. append ( token ) continue elif tok_type == DEDENT : indents. pop ( ) self. prev_row, self. prev_col = end continue elif tok_type in ( NEWLINE, NL ) : startline = True elif startline and indents : indent = indents [ - 1 ] if start [ 1 ] >= len ( indent ) : self. tokens. append ( indent ) self. prev_col = len ( indent ) startline = False self. add_whitespace ( start ) self. tokens. append ( token ) self. prev_row, self. prev_col = end if tok_type in ( NEWLINE, NL ) : self. prev_row += 1 self. prev_col = 0 return """". join ( self. tokens )",False,tok_type == INDENT,startline,0.6676044464111328 896,"def force_ipv4 ( self, * args ) : """"""only ipv4 localhost in /etc/hosts"""""" logg. debug ( ""checking /etc/hosts for '::1 localhost'"" ) lines = [ ] for line in open ( self. etc_hosts ( ) ) : if : newline = re. sub ( ""\\slocalhost\\s"", "" "", line ) if line!= newline : logg. info ( ""/etc/hosts: '%s' => '%s'"", line. rstrip ( ), newline. rstrip ( ) ) line = newline lines. append ( line ) f = open ( self. etc_hosts ( ), ""w"" ) for line in lines : f. write ( line ) f. close ( )",False,'::1' in line,self.ipv4_ipv4 and line,0.654106855392456 897,"def build_share ( config, share ) : if share [ ""paths"" ] : result = [ p. rstrip ( ""/"" ) for p in share [ ""paths"" ] ] if share [ ""alldirs"" ] : result. append ( ""-alldirs"" ) if share [ ""ro"" ] : result. append ( ""-ro"" ) if share [ ""quiet"" ] : result. append ( ""-quiet"" ) if : s = '-mapall=""' + share [ ""mapall_user"" ]. replace ( ""\\"", ""\\\\"" ) + '""' if share [ ""mapall_group"" ] : s += ':""' + share [ ""mapall_group"" ]. replace ( ""\\"", ""\\\\"" ) + '""' result. append ( s ) elif share [ ""maproot_user"" ] : s = '-maproot=""' + share [ ""maproot_user"" ]. replace ( ""\\"", ""\\\\"" ) + '""' if share [ ""maproot_group"" ] : s += ':""' + share [ ""maproot_group"" ]. replace ( ""\\"", ""\\\\"" ) + '""' result. append ( s ) if config [ ""v4"" ] and share [ ""security"" ] : result. append ( ""-sec="" + "":"". join ( [ s. lower ( ) for s in share [ ""security"" ] ] ) ) targets = build_share_targets ( share ) if targets : return [ "" "". join ( result + [ target ] ) for target in targets ] else : """"""Get descendants."""""" if not no_iframe or not self. is_iframe ( el ) : next_good = None for child in el. descendants : if next_good is not None : if child is not next_good : continue next_good = None is_tag = self. is_tag ( child ) if no_iframe and is_tag and self. is_iframe ( child ) : if child. next_sibling is not None : next_good = child. next_sibling else : last_child = child while self. is_tag ( last_child ) and last_child. contents : last_child = last_child. contents [ - 1 ] next_good = last_child. next_element yield child if : break continue if not tags or is_tag : yield child",False,next_good is None,next_good and tags,0.6640669107437134 899,"def resolve_name ( self, modname, parents, path, base ) : if modname is None : if path : mod_cls = path. rstrip ( ""."" ) else : mod_cls = None mod_cls = self. env. temp_data. get ( ""autodoc:class"" ) if mod_cls is None : mod_cls = self. env. temp_data. get ( ""py:class"" ) if mod_cls is None : return None, [ ] modname, accessor = rpartition ( mod_cls, ""."" ) modname, cls = rpartition ( modname, ""."" ) parents = [ cls, accessor ] if : modname = self. env. temp_data. get ( ""autodoc:module"" ) if : if sphinx. __version__ > ""1.3"" : modname = self. env. ref_context. get ( ""py:module"" ) else : modname = self. env. temp_data. get ( ""py:module"" ) return modname, parents + [ base ]",False,not modname,sphinx.__version__ > '1.3',0.6826549768447876 900,"def create_fb_format ( data, dpath ) : fw1 = open ( os. path. join ( dpath, ""train.txt"" ), ""w"" ) fw2 = open ( os. path. join ( dpath, ""valid.txt"" ), ""w"" ) fw3 = open ( os. path. join ( dpath, ""test.txt"" ), ""w"" ) for i in range ( 0, len ( data ) - 1, 2 ) : fout = fw1 if : fout = fw2 elif ( i % 500 ) == 2 : fout = fw3 use = True x = data [ i ]. rstrip ( "" "" ). lstrip ( "" "" ). replace ( ""\t"", "" "" ) y = data [ i + 1 ]. rstrip ( "" "" ). lstrip ( "" "" ). replace ( ""\t"", "" "" ) x = x. replace ( ""|"", "" __PIPE__ "" ) y = y. replace ( ""|"", "" __PIPE__ "" ) x = """". join ( list ( map ( replace_emoji, x ) ) ) y = """". join ( list ( map ( replace_emoji, y ) ) ) x = split_punctuation ( unidecode. unidecode ( x ) ) y = split_punctuation ( unidecode. unidecode ( y ) ) x = "" "". join ( x. split ( ) ) y = "" "". join ( y. split ( ) ) if len ( x ) < 1 or len ( y ) < 1 : use = False if use : s = ""text:"" + x + ""\tlabels:"" + y + ""\tepisode_done:True"" if self. mm : proc_as = self. get_process_address_space ( ) if proc_as == None : return """" start = self. mm. arg_start. v ( ) size_to_read = self. mm. arg_end - self. mm. arg_start if : name = """" else : argv = proc_as. read ( start, size_to_read ) if argv : name = "" "". join ( argv. split ( ""\x00"" ) ) else : name = """" else : name = ""["" + self. comm + ""]"" if len ( name ) > 1 and name [ - 1 ] == "" "" : name = name [ : - 1 ] return name",False,size_to_read < 1 or size_to_read > 4096,size_to_read > 0,0.6515439748764038 902,"def _convert_dense_data ( instances, bin_inner_param : BinInnerParam, bin_results : BinResults, abnormal_list : list, convert_type : str = ""bin_num"", ) : instances = copy. deepcopy ( instances ) features = instances. features transform_cols_idx = bin_inner_param. transform_bin_indexes split_points_dict = bin_results. all_split_points for col_idx, col_value in enumerate ( features ) : if col_idx in transform_cols_idx : if col_value in abnormal_list : features [ col_idx ] = col_value continue col_name = bin_inner_param. header [ col_idx ] split_points = split_points_dict [ col_name ] bin_num = BaseBinning. get_bin_num ( col_value, split_points ) if convert_type == ""bin_num"" : features [ col_idx ] = bin_num elif : col_results = bin_results. all_cols_results. get ( col_name ) woe_value = col_results. woe_array [ bin_num ] features [ col_idx ] = woe_value else : features [ col_idx ] = col_value instances. features = features return instances",False,convert_type == 'woe',convert_type == 'bin_results',0.6594418287277222 903,"def refresh ( self, inicontents = None ) : comment_count = 1 unknown_count = 1 curr_indent = """" inicontents = inicontents or self. inicontents inicontents = inicontents. strip ( os. linesep ) if not inicontents : return for opt in self : self. pop ( opt ) for opt_str in inicontents. split ( os. linesep ) : com_match = COM_REGX. match ( opt_str ) if com_match : name = ""#comment{0}"". format ( comment_count ) self. com = com_match. group ( 1 ) comment_count += 1 self. update ( { name : opt_str } ) continue indented_match = INDENTED_REGX. match ( opt_str ) if indented_match : indent = indented_match. group ( 1 ). replace ( ""\t"", "" "" ) if indent > curr_indent : options = list ( self ) if options : prev_opt = options [ - 1 ] value = self. get ( prev_opt ) self. update ( { prev_opt : os. linesep. join ( ( value, opt_str ) ) } ) continue opt_match = self. opt_regx. match ( opt_str ) <",False,opt_match,self.unknown_count == 1,0.6569211483001709 904,"def _GetCSVRow ( self, value ) : row = [ ] for type_info in value. __class__. type_infos : if : row. extend ( self. _GetCSVRow ( value. Get ( type_info. name ) ) ) elif isinstance ( type_info, rdf_structs. ProtoBinary ) : row. append ( text. Asciify ( value. Get ( type_info. name ) ) ) else : row. append ( str ( value. Get ( type_info. name ) ) ) return row",False,"isinstance(type_info, rdf_structs.ProtoEmbedded)","isinstance(type_info, rdf_structs.ProtoTable)",0.6501303315162659 905,"def _parse_job ( output ) : BSE_exitons_patt = re. compile ( r""^exiton \s+ (\d+) : \s+ ([\d.]+) \( \s+ ([-\d.]+) \) \s+ \|.* "", re. VERBOSE, ) end_patt = re. compile ( r""\s*program returned normally\s*"" ) total_time_patt = re. compile ( r""\s*total \s+ time: \s+ ([\d.]+).*"", re. VERBOSE ) BSE_results = { } parse_BSE_results = False parse_total_time = False for l in output. split ( ""\n"" ) : if parse_total_time : m = end_patt. search ( l ) if m : BSE_results. update ( end_normally = True ) m = total_time_patt. search ( l ) if m : BSE_results. update ( total_time = m. group ( 1 ) ) if parse_BSE_results : if : parse_total_time = True parse_BSE_results = False continue m = BSE_exitons_patt. search ( l ) if m : d = { } d. update ( bse_eig = m. group ( 2 ), osc_strength = m. group ( 3 ) ) conduction transitions weight:') != -1,total_time,0.6527070999145508 906,"def _setSitemapTargets ( ) : if not conf. sitemapUrl : return infoMsg = ""parsing sitemap '%s'"" % conf. sitemapUrl logger. info ( infoMsg ) found = False for item in parseSitemap ( conf. sitemapUrl ) : if : found = True kb. targets. add ( ( item. strip ( ), None, None, None, None ) ) if not found and not conf. forms and not conf. crawlDepth : warnMsg = ""no usable links found (with GET parameters)"" logger. warn ( warnMsg )",False,"re.match('[^ ]+\\?(.+)', item, re.I)",item.strip(),0.6478191614151001 907,def on_update ( self ) : self. max_per_well = 0 for pd in list ( self. plate_well_site. values ( ) ) : for wd in list ( pd. values ( ) ) : nplanes = sum ( [ len ( x ) for x in list ( wd. values ( ) ) ] ) if : self. max_per_well = nplanes for registrant in self. registrants : registrant ( ),True,nplanes > self.max_per_well,nplanes > self.max_per_well,0.6519075632095337 908,"def get_all_ns ( ) : ns_set = [ ] gridfs_ns_set = [ ] db_list = self. namespace_config. get_included_databases ( ) if not db_list : db_list = retry_until_ok ( self. primary_client. database_names ) for database in db_list : if : continue coll_list = retry_until_ok ( self. primary_client [ database ]. collection_names ) for coll in coll_list : if coll. startswith ( ""system."" ) : continue if coll. endswith ( "".chunks"" ) : continue if coll. endswith ( "".files"" ) : namespace = ""%s.%s"" % ( database, coll ) namespace = namespace [ : - len ( "".files"" ) ] if self. namespace_config. gridfs_namespace ( namespace ) : gridfs_ns_set. append ( namespace ) else : namespace = ""%s.%s"" % ( database, coll ) if self. namespace_config. map_namespace ( namespace ) : ns_set. append ( namespace ) return ns_set, gridfs_ns_set",False,database == 'config' or database == 'local',database not in self.primary_client,0.6518546342849731 909,"def parse_ec2_keys ( ) : path = os. path. expanduser ( ""~/.ec2-keys"" ) if os. path. isfile ( path ) : with open ( path, ""r"" ) as f : contents = f. read ( ) for l in contents. splitlines ( ) : l = l. split ( ""#"" ) [ 0 ] w = l. split ( ) if len ( w ) < 2 or len ( w ) > 3 : continue if len ( w ) == 3 and w [ 2 ] == access_key_id : return ( w [ 0 ], w [ 1 ] ) if : return ( access_key_id, w [ 1 ] ) return None",False,w[0] == access_key_id,len(w) == 2,0.6499176025390625 910,"def EnsureTempDirIsSane ( directory ) : """"""Checks that the directory exists and has the correct permissions set."""""" if not os. path. isabs ( directory ) : raise ErrorBadPath ( ""Directory %s is not absolute"" % directory ) if os. path. isdir ( directory ) : if not client_utils. VerifyFileOwner ( directory ) : shutil. rmtree ( directory ) if not os. path. isdir ( directory ) : os. makedirs ( directory ) if : from grr_response_client import ( client_utils_windows, ) client_utils_windows. WinChmod ( directory, [ ""FILE_GENERIC_READ"", ""FILE_GENERIC_WRITE"" ] ) else : os. chmod ( directory, stat. S_IXUSR | stat. S_IRUSR | stat. S_IWUSR )",False,sys.platform == 'win32',os.path.isfile(directory),0.660055935382843 911,"def __getitem__ ( self, index ) : if isinstance ( index, slice ) : if index. step is not None and index. step!= 1 : return pdeque ( tuple ( self ) [ index ], maxlen = self. _maxlen ) result = self if : result = result. popleft ( index. start % self. _length ) if index. stop is not None : result = result. pop ( self. _length - ( index. stop % self. _length ) ) return result if not isinstance ( index, Integral ) : raise TypeError ( ""'%s' object cannot be interpreted as an index"" % type ( index ). __name__ ) if index >= 0 : return self. popleft ( index ). left shifted = len ( self ) + index if shifted < 0 : raise IndexError ( ""pdeque index {0} out of range {1}"". format ( index, len ( self ) ), ) return self. popleft ( shifted ). left",True,index.start is not None,index.start is not None,0.6576463580131531 912,"def send_response ( client : BaseSocketModeClient, req : SocketModeRequest, bolt_resp : BoltResponse, start_time : float, ) : if bolt_resp. status == 200 : content_type = bolt_resp. headers. get ( ""content-type"", [ """" ] ) [ 0 ] if bolt_resp. body is None or len ( bolt_resp. body ) == 0 : client. send_socket_mode_response ( SocketModeResponse ( envelope_id = req. envelope_id ) ) elif content_type. startswith ( ""application/json"" ) : dict_body = json. loads ( bolt_resp. body ) client. send_socket_mode_response ( SocketModeResponse ( envelope_id = req. envelope_id, payload = dict_body ) ) else : client. send_socket_mode_response ( SocketModeResponse ( envelope_id = req. envelope_id, payload = { ""text"" : bolt_resp. body } ) ) if : spent_time = int ( ( time ( ) - start_time ) * 1000 ) client. logger. debug ( f""Response time: {spent_time} milliseconds"" ) else : client. logger. info ( f""Unsuccessful Bolt execution result (status: {bolt_resp.status}, body: {bolt_resp.body})"" )",False,client.logger.level <= logging.DEBUG,start_time is not None,0.6523712873458862 913,"def sdl_audio_load ( snd, fn = None, reload = False ) : global _sdl_audio_sounds, _libsdl, _libsdl_mixer if snd not in _sdl_audio_sounds or reload : sdl_audio_init ( ) if : _libsdl_mixer. Mix_FreeChunk ( _sdl_audio_sounds [ snd ] ) rw = _libsdl. SDL_RWFromFile ( fn, ""rb"" ) _sdl_audio_sounds [ snd ] = _libsdl_mixer. Mix_LoadWAV_RW ( rw, 1 )",False,snd in _sdl_audio_sounds,fn is not None,0.6603587865829468 914,"def _get_qnames_to_try ( self, qname, search ) : if search is None : search = self. use_search_by_default qnames_to_try = [ ] if qname. is_absolute ( ) : qnames_to_try. append ( qname ) else : abs_qname = qname. concatenate ( dns. name. root ) if search : if len ( self. search ) > 0 : search_list = self. search [ : ] elif : search_list = [ self. domain ] else : search_list = [ ] if self. ndots is None : ndots = 1 else : ndots = self. ndots for suffix in search_list : qnames_to_try. append ( qname + suffix ) if len ( qname ) > ndots : qnames_to_try. insert ( 0, abs_qname ) else : qnames_to_try. append ( abs_qname ) self. _localtrack_folder = localtrack_folder self. _path = path if isinstance ( path, ( Path, WindowsPath, PosixPath, LocalPath ) ) : path = str ( path. absolute ( ) ) elif path is not None : path = str ( path ) self. cwd = Path. cwd ( ) _lt_folder = Path ( self. _localtrack_folder ) if self. _localtrack_folder else self. cwd _path = Path ( path ) if path else self. cwd if _lt_folder. parts [ - 1 ]. lower ( ) == ""localtracks"" and not kwargs. get ( ""forced"" ) : self. localtrack_folder = _lt_folder elif kwargs. get ( ""forced"" ) : if _path. parts [ - 1 ]. lower ( ) == ""localtracks"" : self. localtrack_folder = _path else : self. localtrack_folder = _path / ""localtracks"" else : self. localtrack_folder = _lt_folder / ""localtracks"" try : _path = Path ( path ) _path. relative_to ( self. localtrack_folder ) self. path = _path except ( ValueError, TypeError ) : for sep in _PATH_SEPS : if path and path. startswith ( f""localtracks{sep}{sep}"" ) : path = path. replace ( f""localtracks{sep}{sep}"", """", 1 ) elif : path = path. replace ( f""localtracks{sep}"", """", 1 ) self. path",True,path and path.startswith(f'localtracks{sep}'),path and path.startswith(f'localtracks{sep}'),0.6505128145217896 916,"def setup ( self ) : self. _pre_setup ( ) logs. log ( ""Retrieving symbolized build r%d."" % self. revision ) build_update = not self. exists ( ) if build_update : if : return False logs. log ( ""Retrieved symbolized build r%d."" % self. revision ) else : logs. log ( ""Build already exists."" ) if self. release_build_url : self. _setup_application_path ( self. release_build_dir, build_update = build_update ) environment. set_value ( ""BUILD_URL"", self. release_build_url ) if self. debug_build_url : self. _setup_application_path ( self. debug_build_dir, ""APP_PATH_DEBUG"", build_update = build_update ) self. _post_setup_success ( update_revision = build_update ) return True",False,not self._unpack_builds(),self.exists(),0.6584277153015137 917,"def check_seed_limits ( self, torrent, session ) : seed_limit_ok = True idle_limit_ok = True if torrent. seedRatioMode == 1 : seed_limit_ok = torrent. uploadRatio >= torrent. seedRatioLimit elif torrent. seedRatioMode == 0 : if session. seedRatioLimited : seed_limit_ok = torrent. uploadRatio >= session. seedRatioLimit if torrent. seedIdleMode == 1 : idle_limit_ok = ( torrent. date_active + timedelta ( minutes = torrent. seedIdleLimit ) < datetime. now ( ) ) elif torrent. seedIdleMode == 0 : if : idle_limit_ok = ( torrent. date_active + timedelta ( minutes = session. idle_seeding_limit ) < datetime. now ( ) ) return seed_limit_ok, idle_limit_ok",False,session.idle_seeding_limit_enabled,session.idle_seeding_limit,0.6527003049850464 918,"def content ( self, val ) : """"""Set content"""""" soup = BeautifulSoup ( val ) for todo in soup. findAll ( ""en-todo"" ) : todo. name = ""input"" todo [ ""type"" ] = ""checkbox"" if todo. get ( ""checked"" ) == ""false"" : del todo [ ""checked"" ] self. changed_by_default = True for media in soup. findAll ( ""en-media"" ) : if : media. name = ""img"" res = self. parent. resource_edit. get_by_hash ( media [ ""hash"" ] ) if res : if media [ ""type"" ]. find ( ""image"" ) == 0 : media [ ""src"" ] = ""file://%s"" % res. file_path else : media [ ""src"" ] = file_icon_path media [ ""title"" ] = res. file_name res. in_content = True tag = Tag ( soup, ""a"", [ ( ""href"", ""file://%s"" % res. file_path ) ] ) media. replaceWith ( tag ) tag. insert ( 0, media ) else : media [ ""src"" ] = """" media [ ""title"" ] = """" else : ",False,media.get('hash'),media[0],0.6525328159332275 919,"def apply ( self, db, person ) : families = person. get_parent_family_handle_list ( ) if families == [ ] : return True for family_handle in person. get_parent_family_handle_list ( ) : family = db. get_family_from_handle ( family_handle ) if : father_handle = family. get_father_handle ( ) mother_handle = family. get_mother_handle ( ) if not father_handle : return True if not mother_handle : return True return False",True,family,family,0.6950480937957764 920,"def _flatten_keywords ( self, context, flattened ) : match = FlattenKeywordMatcher ( flattened ). match started = - 1 for event, elem in context : tag = elem. tag if : if started >= 0 : started += 1 elif match ( elem. get ( ""name"" ), elem. get ( ""type"" ) ) : started = 0 if started == 0 and event == ""end"" and tag == ""doc"" : elem. text = ( ""%s\n\n_*Keyword content flattened.*_"" % ( elem. text or """" ) ). strip ( ) if started <= 0 or tag == ""msg"" : yield event, elem else : elem. clear ( ) if started >= 0 and event == ""end"" and tag == ""kw"" : started -= 1",False,event == 'start' and tag == 'kw',tag == 'kw',0.6539344787597656 921,"def _skip_start ( self ) : start, stop = self. start, self. stop for chunk in self. app_iter : self. _pos += len ( chunk ) if : continue elif self. _pos == start : return b"""" else : chunk = chunk [ start - self. _pos : ] if stop is not None and self. _pos > stop : chunk = chunk [ : stop - self. _pos ] assert len ( chunk ) == stop - start return chunk else : raise StopIteration ( )",False,self._pos < start,self._pos == stop,0.6676946878433228 922,"def _train_by_any_executor ( self, event_handler, exe, num_epochs, reader ) : if self. checkpoint_cfg : epochs = [ epoch_id for epoch_id in range ( num_epochs ) if epoch_id >= self. checkpoint_cfg. epoch_id ] else : epochs = [ epoch_id for epoch_id in range ( num_epochs ) ] for epoch_id in epochs : event_handler ( BeginEpochEvent ( epoch_id ) ) for step_id, data in enumerate ( reader ( ) ) : if : if self. checkpoint_cfg : self. _clean_checkpoint ( ) return if ( self. checkpoint_cfg and self. checkpoint_cfg. load_serial and self. checkpoint_cfg. step_id >= step_id and self. checkpoint_cfg. epoch_id == epoch_id ) : continue begin_event = BeginStepEvent ( epoch_id, step_id ) event_handler ( begin_event ) if begin_event. fetch_metrics : metrics = exe. run ( feed = data, fetch_list = [ var. name for var in self. train_func_outputs ] ) <",False,self.__stop,step_id >= 0,0.6690965294837952 923,"def process_logs_and_crash ( self, artifact_prefix ) : """"""Fetch symbolized logs and crashes."""""" if not artifact_prefix : return crash_location_regex = r""(.*)(Test unit written to )(data/.*)"" _, processed_log_path = tempfile. mkstemp ( ) with open ( processed_log_path, mode = ""w"", encoding = ""utf-8"" ) as new_file : with open ( self. fuzzer. logfile, encoding = ""utf-8"", errors = ""ignore"" ) as old_file : for line in old_file : line_match = re. match ( crash_location_regex, line ) if : crash_name = line_match. group ( 3 ). replace ( ""data/"", """" ) self. device. fetch ( self. fuzzer. data_path ( crash_name ), artifact_prefix ) crash_testcase_file_path = os. path. join ( artifact_prefix, crash_name ) line = re. sub ( crash_location_regex, r""\1\2"" + crash_testcase_file_path, line ) new_file",True,line_match,line_match,0.6560198068618774 924,"def test_is_power_of_k ( self ) : f = mathutil. is_power_of_k for i in range ( 1, 100 ) : if : self. failUnless ( f ( i, 2 ), ""but %d *is* a power of 2"" % i ) else : self. failIf ( f ( i, 2 ), ""but %d is *not* a power of 2"" % i ) for i in range ( 1, 100 ) : if i in ( 1, 3, 9, 27, 81 ) : self. failUnless ( f ( i, 3 ), ""but %d *is* a power of 3"" % i ) else : self. failIf ( f ( i, 3 ), ""but %d is *not* a power of 3"" % i )",False,"i in (1, 2, 4, 8, 16, 32, 64)","i in (1, 2, 9, 27, 81)",0.6488779783248901 925,"def extract_certs ( srvs ) : res = [ ] for srv in srvs : if ""key_descriptor"" in srv : for key in srv [ ""key_descriptor"" ] : if : for dat in key [ ""key_info"" ] [ ""x509_data"" ] : cert = repack_cert ( dat [ ""x509_certificate"" ] [ ""text"" ] ) if cert not in res : res. append ( cert ) elif not ""use"" in key : for dat in key [ ""key_info"" ] [ ""x509_data"" ] : cert = repack_cert ( dat [ ""x509_certificate"" ] [ ""text"" ] ) if cert not in res : res. append ( cert ) return res",False,'use' in key and key['use'] == use,key in key,0.65516197681427 926,"def narrow_declared_type ( declared : Type, narrowed : Type ) -> Type : """"""Return the declared type narrowed down to another type."""""" declared = get_proper_type ( declared ) narrowed = get_proper_type ( narrowed ) if declared == narrowed : return declared if isinstance ( declared, UnionType ) : return make_simplified_union ( [ narrow_declared_type ( x, narrowed ) for x in declared. relevant_items ( ) ] ) elif not is_overlapping_types ( declared, narrowed, prohibit_none_typevar_overlap = True ) : if : return UninhabitedType ( ) else : return NoneType ( ) elif isinstance ( narrowed, UnionType ) : return make_simplified_union ( [ narrow_declared_type ( declared, x ) for x in narrowed. relevant_items ( ) ] ) elif isinstance ( narrowed, AnyType ) : return narrowed elif isinstance ( declared, TypeType ) and isinstance ( narrowed, TypeType ) : return TypeType. make_normalized ( narrow_declared_type ( declared. item, narrowed. item ) ) elif isinstance ( declared, ( Instance, TupleType, TypeType, LiteralType ) ) : return meet_types ( declared, narrowed ) elif isinstance ( declared, TypedDictType ) and isinstance ( narrowed, Instance ) : if narrowed. type. fullname == ""builtins.dict"" and all ( isinstance ( t, AnyType ) for t in get_proper_types ( narrowed. args ) <",False,state.strict_optional,"isinstance(declared, UninhabitedType)",0.6519722938537598 927,"def handle ( self ) : from poetry. utils. env import EnvManager manager = EnvManager ( self. poetry ) current_env = manager. get ( ) for venv in manager. list ( ) : name = venv. path. name if : name = str ( venv. path ) if venv == current_env : self. line ( ""{} (Activated)"". format ( name ) ) continue self. line ( name )",False,self.option('full-path'),name == '',0.6458825469017029 928,"def _iter_fields ( self ) : _fields = self. fields if hasattr ( self. fields, ""items"" ) : _fields = list ( self. fields. items ( ) ) for k, v in _fields : file_name = None file_type = None file_headers = None if isinstance ( v, ( list, tuple ) ) : if : file_name, file_pointer = v elif len ( v ) == 3 : file_name, file_pointer, file_type = v else : file_name, file_pointer, file_type, file_headers = v else : file_pointer = v field = fields. RequestField ( name = k, data = file_pointer, filename = file_name, headers = file_headers ) field. make_multipart ( content_type = file_type ) yield field",False,len(v) == 2,len(v) == 4,0.6548320651054382 929,"def run ( self ) : if not self. __credentials : raise AnalyzerRunException ( ""no credentials retrieved"" ) split_credentials = self. __credentials. split ( ""|"" ) if len ( split_credentials )!= 2 : raise AnalyzerRunException ( ""CIRCL credentials not properly configured."" ""Template to use: '|'"" ) user = split_credentials [ 0 ] pwd = split_credentials [ 1 ] pssl = pypssl. PyPSSL ( basic_auth = ( user, pwd ) ) result = pssl. query ( self. observable_name ) certificates = [ ] if result. get ( self. observable_name, { } ) : certificates = list ( result. get ( self. observable_name ). get ( ""certificates"", [ ] ) ) parsed_result = { ""ip"" : self. observable_name, ""certificates"" : [ ] } for cert in certificates : subject = ( result. get ( self. observable_name ) . get ( ""subjects"", { } ) . get ( cert, { } ) . get ( ""values"", [ ] ) ) if : parsed_result [ ""certificates"" ]. append ( { ""fingerprint"" : cert, ""subject"" : subject [ 0 ] } ) return parsed_result",False,subject,len(parsed_result) > 0,0.7046061754226685 930,"def _populate_cache_dir ( self ) -> None : if self. stage_cache is None : return cache_etc_apt_path = Path ( self. stage_cache, ""etc"", ""apt"" ) if not cache_etc_apt_path. exists ( ) : cache_etc_apt_path. parent. mkdir ( parents = True, exist_ok = True ) os. symlink ( Path ( ""/etc/apt"" ), cache_etc_apt_path ) dpkg_path = shutil. which ( ""dpkg"" ) if dpkg_path : destination = Path ( self. stage_cache, dpkg_path [ 1 : ] ) if : destination. parent. mkdir ( parents = True, exist_ok = True ) os. symlink ( dpkg_path, destination ) else : logger. warning ( ""Cannot find 'dpkg' command needed to support multiarch"" )",False,not destination.exists(),destination.exists(),0.655684232711792 931,"def number_operators ( self, a, b, skip = [ ] ) : dict = { ""a"" : a, ""b"" : b } for name, expr in self. binops. items ( ) : if name not in skip : name = ""__%s__"" % name if : res = eval ( expr, dict ) self. binop_test ( a, b, res, expr, name ) for name, expr in self. unops. items ( ) : if name not in skip : name = ""__%s__"" % name if : res = eval ( expr, dict ) self. unop_test ( a, res, expr, name )",False,"hasattr(a, name)",a and b,0.6512517929077148 932,"def compose ( self, keys = None ) : composes = [ ] explored = set ( ) keys = set ( keys or [ ] ) graph = self. _graph for v in graph. topological_iter ( ) : if v. op. gpu or v. op. sparse : return [ ] if : continue if v in explored or type ( v. op ) in REDUCTION_OP : continue if graph. count_successors ( v )!= 1 : continue selected = [ v ] cur_node = graph. successors ( v ) [ 0 ] while ( graph. count_predecessors ( cur_node ) == 1 and _support ( cur_node ) and cur_node. key not in keys ) : selected. append ( cur_node ) if ( graph. count_successors ( cur_node )!= 1 or type ( cur_node. op ) in REDUCTION_OP ) : break else : cur_node = graph. successors ( cur_node ) [ 0 ] if len ( selected ) > 1 : explored. update ( selected ) composes. append ( list ( selected ) ) return self. _compose_graph ( composes )",False,type(v.op) not in SUPPORT_OP or v.key in keys,v.op.gpu and v.op.sparse,0.6524445414543152 933,"def unpack_namespace_single ( self, var_obj, in_vector, in_scalar ) : code = [ ] if isinstance ( var_obj, ArrayVariable ) : array_name = self. generator. get_array_name ( var_obj ) dtype = self. c_data_type ( var_obj. dtype ) if in_vector : code += [ ( ""_GSL_dataholder.{array} = <{dtype} *> "" ""_buf_{array}.data"". format ( array = array_name, dtype = dtype ) ) ] if : code += [ ( ""{array} = <{dtype} *> "" ""_buf_{array}.data"". format ( array = array_name, dtype = dtype ) ) ] else : if in_vector : code += [ '_GSL_dataholder.{var} = _namespace[""{var}""]'. format ( var = var_obj. name ) ] if : code += [ '{var} = _namespace[""{var}""]'. format ( var = var_obj. name ) ] return ""\n"". join ( code )",True,in_scalar,in_scalar,0.6746989488601685 934,"def get_ami_list_from_ec2 ( main_region, regions, owner, credentials, filters ) : """"""Get the AMI mappings structure given the constraints represented by the args."""""" amis_json = get_initialized_mappings_dicts ( ) for region_name in regions : images_for_region = get_images_ec2 ( filters, owner, region_name ) for architecture, mapping_name in ARCHITECTURES_TO_MAPPING_NAME. items ( ) : amis_json [ mapping_name ] [ region_name ] = get_amis_for_architecture ( images_for_region, architecture ) if : for credential in credentials : credential_region = credential [ 0 ] images_for_credential_region = get_images_ec2_credential ( filters, main_region, credential ) amis_json [ mapping_name ] [ credential_region ] = get_amis_for_architecture ( images_for_credential_region, architecture ) return amis_json",False,main_region == region_name,credentials,0.6523140668869019 935,"def serialize ( self ) : data = { } if self. size : data [ ""size"" ] = self. size if self. order : if self. order not in self. ORDER_VALUES : raise RuntimeError ( ""Invalid order value:%s"" % self. order ) data [ ""order"" ] = self. order if self. key_field : data [ ""key_field"" ] = self. key_field if self. value_field : data [ ""value_field"" ] = self. value_field else : raise RuntimeError ( ""Invalid key_field: value_field required"" ) elif self. key_script : data [ ""key_script"" ] = self. key_script if : data [ ""value_script"" ] = self. value_script else : raise RuntimeError ( ""Invalid key_script: value_script required"" ) if self. params : data [ ""params"" ] = self. params params = self. _base_parameters ( ) params [ self. _internal_name ] = data return { self. name : params }",True,self.value_script,self.value_script,0.6631819009780884 936,"def _update_tileable_and_chunk_shape ( self, tileable_graph, chunk_result, failed_ops ) : for n in tileable_graph : if : continue tiled_n = get_tiled ( n ) if has_unknown_shape ( tiled_n ) : if any ( c. key not in chunk_result for c in tiled_n. chunks ) : continue new_nsplits = self. get_tileable_nsplits ( n, chunk_result = chunk_result ) for node in ( n, tiled_n ) : node. _update_shape ( tuple ( sum ( nsplit ) for nsplit in new_nsplits ) ) tiled_n. _nsplits = new_nsplits",False,n.op in failed_ops,n in failed_ops,0.6544344425201416 937,"def i2repr ( self, pkt, x ) : s = [ ] for v in x : if : if v [ 0 ] in DHCPRevOptions and isinstance ( DHCPRevOptions [ v [ 0 ] ] [ 1 ], Field ) : f = DHCPRevOptions [ v [ 0 ] ] [ 1 ] vv = "","". join ( f. i2repr ( pkt, val ) for val in v [ 1 : ] ) else : vv = "","". join ( repr ( val ) for val in v [ 1 : ] ) r = ""%s=%s"" % ( v [ 0 ], vv ) s. append ( r ) else : s. append ( sane ( v ) ) return ""[%s]"" % ( "" "". join ( s ) )",False,type(v) is tuple and len(v) >= 2,v,0.6507476568222046 938,"def h2i ( self, pkt, s ) : t = ( ) if type ( s ) is str : t = time. strptime ( s ) t = t [ : 2 ] + t [ 2 : - 3 ] else : if : y, m, d, h, min, sec, rest, rest, rest = time. gmtime ( time. time ( ) ) t = ( y, m, d, h, min, sec ) else : t = s return t",False,not s,pkt is not None,0.6916464567184448 939,"def handle ( self, input ) : raw_query = OrderedDict ( { } ) if ( input is not None ) and ( input. has_key ( ""op"" ) ) : if input [ ""op"" ] == ""insert"" : return None elif input [ ""op"" ] == ""query"" : if input [ ""query"" ]. has_key ( ""$query"" ) : raw_query [ ""query"" ] = input [ ""query"" ] [ ""$query"" ] if : orderby = input [ ""query"" ] [ ""$orderby"" ] raw_query [ ""orderby"" ] = orderby else : raw_query [ ""query"" ] = input [ ""query"" ] raw_query [ ""millis"" ] = input [ ""millis"" ] raw_query [ ""ns"" ] = input [ ""ns"" ] return raw_query elif input [ ""op"" ] == ""update"" : raw_query [ ""query"" ] = input [ ""query"" ] if input. has_key ( ""updateobj"" ) : if input [ ""updateobj"" ]. has_key ( ""orderby"" ) : orderby = input [ ""updateobj"" ] [ ""orderby"" ] raw_query [ ""orderby"" ] = orderby raw_query [ ""millis"" ] = input [ ""millis"" ] raw_query [ ""ns"" ] =",False,input['query'].has_key('$orderby'),input.has_key('$query'),0.6595568656921387 940,"def split_magnets ( self, filename ) : log. debug ( ""Attempting to open %s for splitting magnets."", filename ) magnets = [ ] try : with open ( filename, ""r"" ) as _file : magnets = list ( filter ( len, _file. read ( ). splitlines ( ) ) ) except IOError as ex : log. warning ( ""Unable to open %s: %s"", filename, ex ) if len ( magnets ) < 2 : return [ ] path = filename. rsplit ( os. sep, 1 ) [ 0 ] for magnet in magnets : if not is_magnet ( magnet ) : log. warning ( ""Found line which is not a magnet: %s"", magnet ) continue for part in magnet. split ( ""&"" ) : if part. startswith ( ""dn="" ) : name = part [ 3 : ]. strip ( ) if : mname = os. sep. join ( [ path, name + "".magnet"" ] ) break else : short_hash = magnet. split ( ""btih:"" ) [ 1 ] [ : 8 ] mname = ""."". join ( [ os. path. splitext ( filename ) [ 0 ], short_hash, ""magnet"" ] ) try : with open ( mname, ""w"" ) as _mfile : _mfile. write ( magnet ) except IOError as ex : log.",False,name,path,0.6815444231033325 941,"def fix_reference_name ( name, blacklist = None ) : """"""Return a syntax-valid Python reference name from an arbitrary name"""""" import re name = """". join ( re. split ( r""[^0-9a-zA-Z_]"", name ) ) while name and not re. match ( r""([a-zA-Z]+[0-9a-zA-Z_]*)$"", name ) : if : name = name [ 1 : ] continue name = str ( name ) if not name : name = ""data"" if blacklist is not None and name in blacklist : get_new_name = lambda index : name + ( ""%03d"" % index ) index = 0 while get_new_name ( index ) in blacklist : index += 1 name = get_new_name ( index ) return name",False,"not re.match('[a-zA-Z]', name[0])",name and name.startswith('.'),0.6477032899856567 942,"def test_slice_indexing ( self ) : self. set_up_indexing ( ) for np_fix, prime_fix in zip ( self. np_fixtures, self. prime_fixtures ) : ndim = len ( np_fix. shape ) if ndim == 1 : np. testing. assert_equal ( np_fix [ 2 : 5 ], prime_fix [ 2 : 5 ]. value ) continue np. testing. assert_equal ( np_fix [ :, 0 ], prime_fix [ :, 0 ]. value ) np. testing. assert_equal ( np_fix [ :, 1 ], prime_fix [ :, 1 ]. value ) np. testing. assert_equal ( np_fix [ :, - 1 ], prime_fix [ :, - 1 ]. value ) if : np. testing. assert_equal ( np_fix [ :, : - 1,... ], prime_fix [ :, : - 1,... ]. value ) np. testing. assert_equal ( np_fix [ :, : 1,... ], prime_fix [ :, : 1,... ]. value ) np. testing. assert_equal ( np_fix [ :, 1 :,... ], prime_fix [ :, 1 :,... ]. value ) elif ndim == 2 : np. testing. assert_equal ( np_fix [ :, : 2 ], prime_fix [ :, : - 1 ]. value ) np. testing. assert_equal ( np_fix [ :, 1 : ], prime_fix [ :, 1 : ]. value )",False,ndim > 2,ndim == 3,0.6682177782058716 943,"def op_fuse ( self ) : """"""fuse bn and scale"""""" new_layers = [ ] temp_layers = { } changed_layers = { } for index, pl in enumerate ( self. predict_layer ) : op_type = pl. type if op_type == ""Input"" : new_layers. append ( pl ) continue elif : if ( index!= len ( self. predict_layer ) - 1 ) and ( self. predict_layer [ index + 1 ]. type == ""Scale"" ) : temp_layers [ ""bn"" ] = pl continue else : new_layers. append ( pl ) temp_layers. clear ( ) elif op_type == ""Scale"" : if self. predict_layer [ index - 1 ]. type == ""BatchNorm"" : temp_layers [ ""scale"" ] = pl else : new_layers. append ( pl ) temp_layers. clear ( ) else : temp_layers. clear ( ) if len ( temp_layers ) == 2 : layer = self. fuse_op ( temp_layers ) new_layers. append ( layer ) changed_layers [ temp_layers [ ""scale"" ]. name ] = temp_layers [ ""bn"" ]. name while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. mutable_cost ( ). TryMerge ( tmp ) continue if tt == 24 : self. add_version ( d. getVarInt64 ( ) ) continue if tt == 0 : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 10,tt == 24,0.6944561004638672 945,"def _get_port ( self, state, c_id, tpl_var ) : """"""Extract a port from a container_inspect or the k8s API given a template variable."""""" container_inspect = state. inspect_container ( c_id ) ports = [ ] try : ports = [ x. split ( ""/"" ) [ 0 ] for x in container_inspect [ ""NetworkSettings"" ] [ ""Ports"" ]. keys ( ) ] if len ( ports ) == 0 : raise IndexError except ( IndexError, KeyError, AttributeError ) : if : spec = state. get_kube_container_spec ( c_id ) if spec : ports = [ str ( x. get ( ""containerPort"" ) ) for x in spec. get ( ""ports"", [ ] ) ] else : ports = [ p. split ( ""/"" ) [ 0 ] for p in container_inspect [ ""Config"" ]. get ( ""ExposedPorts"", { } ). keys ( ) ] ports = sorted ( ports, key = int ) return self. _extract_port_from_list ( ports, tpl_var )",False,Platform.is_k8s(),state.has_kube_container(c_id),0.6565148830413818 946,"def upgrade ( ) : bind = op. get_bind ( ) session = db. Session ( bind = bind ) tables = [ Annotation, Dashboard, Database, DruidCluster, DruidColumn, DruidDatasource, DruidMetric, Slice, SqlaTable, SqlMetric, TableColumn, ] for table in tables : for record in session. query ( table ). all ( ) : for col in record. __table__. columns. values ( ) : if not col. primary_key : value = getattr ( record, col. name ) if : setattr ( record, col. name, None ) session. commit ( ) session. close ( )",False,value is not None and value.strip() == '',value,0.655017614364624 947,"def __init__ ( self, * args ) : if len ( args ) == 3 : assert ( isinstance ( args [ 0 ], Point3 ) and isinstance ( args [ 1 ], Vector3 ) and type ( args [ 2 ] ) == float ) self. p = args [ 0 ]. copy ( ) self. v = args [ 1 ] * args [ 2 ] / abs ( args [ 1 ] ) elif len ( args ) == 2 : if isinstance ( args [ 0 ], Point3 ) and isinstance ( args [ 1 ], Point3 ) : self. p = args [ 0 ]. copy ( ) self. v = args [ 1 ] - args [ 0 ] elif : self. p = args [ 0 ]. copy ( ) self. v = args [ 1 ]. copy ( ) else : raise AttributeError ( ""%r"" % ( args, ) ) elif len ( args ) == 1 : if isinstance ( args [ 0 ], Line3 ) : self. p = args [ 0 ]. p. copy ( ) self. v = args [ 0 ]. v. copy ( ) else : raise AttributeError ( ""%r"" % ( args, ) ) else : raise AttributeError ( ""%r"" % ( args, ) )",False,"isinstance(args[0], Point3) and isinstance(args[1], Vector3)",len(args) == 1,0.6530184745788574 948,"def _getOpcodeSuffix ( self, trace, va, op ) : pc = trace. getProgramCounter ( ) if va!= pc : return """" ovals = [ ] for o in op. opers : if o. isDeref ( ) : ova = o. getOperAddr ( op, trace ) else : ova = o. getOperValue ( op, trace ) sym = None if : rova = trace. readMemoryFormat ( ova, "" sym = trace. getSymByAddr ( rova ) if sym is None : sym = trace. getSymByAddr ( ova ) if sym : ovals. append ( repr ( sym ) ) elif o. isDeref ( ) : ovals. append ( ""[0x%.8x]"" % ova ) else : ovals. append ( ""0x%.8x"" % ova ) if [ branch for branch, flag in op. getBranches ( ) if flag & envi. BR_COND ] : emu = self. emu_cache. get ( self. arch, vtrace. getEmu ( trace ) ) emu. setRegisters ( trace. getRegisters ( ) ) emu. setProgramCounter ( va ) emu. executeOpcode ( op ) nextpc = emu. getProgramCounter ( ) if va + len ( op )!= nextpc : ovals. append ( ""Branch taken: 0x%08x"" % nextpc ) ",False,trace.isValidPointer(ova),o.isMemory(),0.6635071039199829 949,"def _connect_line2_line2 ( A, B ) : d = B. v. y * A. v. x - B. v. x * A. v. y if d == 0 : if : p1, p2 = _connect_point2_line2 ( B. p, A ) return p2, p1 return _connect_point2_line2 ( A. p, B ) dy = A. p. y - B. p. y dx = A. p. x - B. p. x ua = ( B. v. x * dy - B. v. y * dx ) / d if not A. _u_in ( ua ) : ua = max ( min ( ua, 1.0 ), 0.0 ) ub = ( A. v. x * dy - A. v. y * dx ) / d if not B. _u_in ( ub ) : ub = max ( min ( ub, 1.0 ), 0.0 ) return LineSegment2 ( Point2 ( A. p. x + ua * A. v. x, A. p. y + ua * A. v. y ), Point2 ( B. p. x + ub * B. v. x, B. p. y + ub * B. v. y ), )",False,"isinstance(B, Ray2) or isinstance(B, LineSegment2)",A == B,0.652072012424469 950,"def process_output ( self, data, output_prompt, input_lines, output, is_doctest, image_file ) : """"""Process data block for OUTPUT token."""""" if is_doctest : submitted = data. strip ( ) found = output if : found = found. strip ( ) ind = found. find ( output_prompt ) if ind < 0 : e = 'output prompt=""%s"" does not match out line=%s' % ( output_prompt, found, ) raise RuntimeError ( e ) found = found [ len ( output_prompt ) : ]. strip ( ) if found!= submitted : e = ( 'doctest failure for input_lines=""%s"" with' 'found_output=""%s"" and submitted output=""%s""' % ( input_lines, found, submitted ) ) raise RuntimeError ( e )",False,found is not None,found,0.6617709994316101 951,"def cleanup_ad_hoc_commands ( self ) : skipped, deleted = 0, 0 ad_hoc_commands = AdHocCommand. objects. filter ( created__lt = self. cutoff ) for ad_hoc_command in ad_hoc_commands. iterator ( ) : ad_hoc_command_display = '""%s"" (%d events)' % ( str ( ad_hoc_command ), ad_hoc_command. ad_hoc_command_events. count ( ), ) if ad_hoc_command. status in ( ""pending"", ""waiting"", ""running"" ) : action_text = ""would skip"" if self. dry_run else ""skipping"" self. logger. debug ( ""%s %s ad hoc command %s"", action_text, ad_hoc_command. status, ad_hoc_command_display, ) skipped += 1 else : action_text = ""would delete"" if self. dry_run else ""deleting"" self. logger. info ( ""%s %s"", action_text, ad_hoc_command_display ) if : ad_hoc_command. delete ( ) deleted += 1 skipped += AdHocCommand. objects. filter ( created__gte = self. cutoff ). count ( ) return skipped, deleted",False,not self.dry_run,ad_hoc_command.exists(),0.6582926511764526 952,"def default_loader ( href, parse, encoding = None ) : with open ( href ) as file : if : data = ElementTree. parse ( file ). getroot ( ) else : data = file. read ( ) if encoding : data = data. decode ( encoding ) return data",False,parse == 'xml',parse,0.66475510597229 953,"def handle_ctcp ( self, conn, evt ) : args = evt. arguments ( ) source = evt. source ( ). split ( ""!"" ) [ 0 ] if args : if args [ 0 ] == ""VERSION"" : conn. ctcp_reply ( source, ""VERSION "" + BOT_VERSION ) elif args [ 0 ] == ""PING"" : conn. ctcp_reply ( source, ""PING"" ) elif : conn. ctcp_reply ( source, ""CLIENTINFO PING VERSION CLIENTINFO"" )",True,args[0] == 'CLIENTINFO',args[0] == 'CLIENTINFO',0.6536786556243896 954,"def get_bill_no_and_update_amounts ( reference_doctype, ref_doc, total_amount, exchange_rate, party_account_currency, company_currency, ) : outstanding_amount, bill_no = None if reference_doctype in ( ""Sales Invoice"", ""Purchase Invoice"" ) : outstanding_amount = ref_doc. get ( ""outstanding_amount"" ) bill_no = ref_doc. get ( ""bill_no"" ) elif reference_doctype == ""Expense Claim"" : outstanding_amount = ( flt ( ref_doc. get ( ""total_sanctioned_amount"" ) ) + flt ( ref_doc. get ( ""total_taxes_and_charges"" ) ) - flt ( ref_doc. get ( ""total_amount_reimbursed"" ) ) - flt ( ref_doc. get ( ""total_advance_amount"" ) ) ) elif reference_doctype == ""Employee Advance"" : outstanding_amount = flt ( ref_doc. advance_amount ) - flt ( ref_doc. paid_amount ) if : outstanding_amount = flt ( outstanding_amount ) * flt ( exchange_rate ) if party_account_currency == company_currency : exchange_rate = 1 else : outstanding_amount = flt ( total_amount ) - flt ( ref_doc. advance_paid ) return outstanding_amount, exchange_rate, bill_no",False,party_account_currency != ref_doc.currency,party_account_currency == company_currency,0.6521563529968262 955,"def update_ui ( self ) : if self. _state == ""closed"" : return while not self. _work_events_queue. empty ( ) : self. handle_work_event ( * self. _work_events_queue. get ( ) ) if self. _state == ""closed"" : return if self. _state == ""idle"" : if : self. _ok_button. configure ( state = ""normal"" ) else : self. _ok_button. configure ( state = ""disabled"" ) else : self. _ok_button. configure ( state = ""disabled"" ) if self. _state == ""done"" : set_text_if_different ( self. _cancel_button, tr ( ""Close"" ) ) else : set_text_if_different ( self. _cancel_button, tr ( ""Cancel"" ) )",False,self.is_ready_for_work(),self.is_different_ui_idle(),0.6476125121116638 956,"def _recursive_saxify ( self, element, prefixes ) : content_handler = self. _content_handler tag = element. tag if tag is Comment or tag is ProcessingInstruction : if tag is ProcessingInstruction : content_handler. processingInstruction ( element. target, element. text ) if : content_handler. characters ( element. tail ) return new_prefixes = [ ] build_qname = self. _build_qname attribs = element. items ( ) if attribs : attr_values = { } attr_qnames = { } for attr_ns_name, value in attribs : attr_ns_tuple = _getNsTag ( attr_ns_name ) attr_values [ attr_ns_tuple ] = value attr_qnames [ attr_ns_tuple ] = build_qname ( attr_ns_tuple [ 0 ], attr_ns_tuple [ 1 ], prefixes, new_prefixes ) sax_attributes = self. _attr_class ( attr_values, attr_qnames ) else : sax_attributes = self. _empty_attributes ns_uri, local_name = _getNsTag ( tag ) qname = build_qname ( ns_uri, local_name, prefixes, new_prefixes ) for prefix, uri in new_prefixes : content_handler. startPrefixMapping ( prefix, uri ) content_handler. startElementNS ( ( ns_uri, local_name ), qname, sax_attributes ) if element. text : content_handler. characters ( element. text ) for child in element : self. _recursive_sax",True,element.tail,element.tail,0.6731557846069336 957,"def _mapdict_values ( items ) : opt_val = [ ] for item in items : state = item [ : - 1 ] val = item [ - 1 ] state [ 0 ] if : state = state [ 0 ] or """" else : state = "" "". join ( state ) opt_val. append ( state ) if val is not None : opt_val. append ( val ) return opt_val",False,len(state) == 1,state is not None,0.6603293418884277 958,"def _persist_metadata ( self, dirname, filename ) : metadata_path = ""{0}/{1}.json"". format ( dirname, filename ) if self. media_metadata or self. comments or self. include_location : if self. posts : if self. latest : self. merge_json ( { ""GraphImages"" : self. posts }, metadata_path ) else : self. save_json ( { ""GraphImages"" : self. posts }, metadata_path ) if : if self. latest : self. merge_json ( { ""GraphStories"" : self. stories }, metadata_path ) else : self. save_json ( { ""GraphStories"" : self. stories }, metadata_path )",True,self.stories,self.stories,0.6608641147613525 959,"def _dump_str ( v ) : if sys. version_info < ( 3, ) and hasattr ( v, ""decode"" ) and isinstance ( v, str ) : v = v. decode ( ""utf-8"" ) v = ""%r"" % v if v [ 0 ] == ""u"" : v = v [ 1 : ] singlequote = v. startswith ( ""'"" ) if singlequote or v. startswith ( '""' ) : v = v [ 1 : - 1 ] if singlequote : v = v. replace ( ""\\'"", ""'"" ) v = v. replace ( '""', '\\""' ) v = v. split ( ""\\x"" ) while len ( v ) > 1 : i = - 1 if not v [ 0 ] : v = v [ 1 : ] v [ 0 ] = v [ 0 ]. replace ( ""\\\\"", ""\\"" ) joinx = v [ 0 ] [ i ]!= ""\\"" while v [ 0 ] [ : i ] and v [ 0 ] [ i ] == ""\\"" : joinx = not joinx i -= 1 if : joiner = ""x"" else : joiner = ""u00"" v = [ v [ 0 ] + joiner + v [ 1 ] ] + v [ 2 : ] return unicode ( '""' + v [ 0 ] + '""' )",True,joinx,joinx,0.6794019937515259 960,"def _handle_socket ( self, event, fd, multi, data, _pycurl = pycurl ) : if event == _pycurl. POLL_REMOVE : if : self. hub. remove ( fd ) self. _fds. pop ( fd, None ) else : if : self. hub. remove ( fd ) if event == _pycurl. POLL_IN : self. hub. add_reader ( fd, self. on_readable, fd ) self. _fds [ fd ] = READ elif event == _pycurl. POLL_OUT : self. hub. add_writer ( fd, self. on_writable, fd ) self. _fds [ fd ] = WRITE elif event == _pycurl. POLL_INOUT : self. hub. add_reader ( fd, self. on_readable, fd ) self. hub. add_writer ( fd, self. on_writable, fd ) self. _fds [ fd ] = READ | WRITE",False,fd in self._fds,multi,0.6659975051879883 961,"def load_vocab ( vocab_file ) : """"""Loads a vocabulary file into a dictionary."""""" extra_map = { } extra_map [ ""[unused1]"" ] = ""[X_SEP]"" for i in range ( 10 ) : extra_map [ ""[unused{}]"". format ( i + 2 ) ] = ""[SEP_{}]"". format ( i ) extra_map [ ""[unused12]"" ] = ""[S2S_SEP]"" extra_map [ ""[unused13]"" ] = ""[S2S_CLS]"" extra_map [ ""[unused14]"" ] = ""[L2R_SEP]"" extra_map [ ""[unused15]"" ] = ""[L2R_CLS]"" extra_map [ ""[unused16]"" ] = ""[R2L_SEP]"" extra_map [ ""[unused17]"" ] = ""[R2L_CLS]"" extra_map [ ""[unused18]"" ] = ""[S2S_SOS]"" vocab = collections. OrderedDict ( ) index = 0 with open ( vocab_file, ""r"", encoding = ""utf-8"" ) as reader : while True : token = reader. readline ( ) if not token : break token = token. strip ( ) if : token = extra_map [ token ] vocab [ token ] = index index += 1 return vocab",True,token in extra_map,token in extra_map,0.6640057563781738 962,"def loadFromTVDB ( self, cache = True, tvapi = None, cachedSeason = None ) : logger. log ( str ( self. tvdbid ) + u"": Loading show info from theTVDB"" ) if tvapi is None : ltvdb_api_parms = sickbeard. TVDB_API_PARMS. copy ( ) if not cache : ltvdb_api_parms [ ""cache"" ] = False if : ltvdb_api_parms [ ""language"" ] = self. lang t = tvdb_api. Tvdb ( ** ltvdb_api_parms ) else : t = tvapi myEp = t [ self. tvdbid ] try : self. name = myEp [ ""seriesname"" ]. strip ( ) except AttributeError : raise tvdb_exceptions. tvdb_attributenotfound ( ""Found %s, but attribute'seriesname' was empty."" % ( self. tvdbid ) ) self. genre = myEp [ ""genre"" ] self. network = myEp [ ""network"" ] if myEp [ ""airs_dayofweek"" ] is not None and myEp [ ""airs_time"" ] is not None : self. airs = myEp [ ""airs_dayofweek"" ] + "" "" + myEp [ ""airs_time"" ] if myEp [ ""firstaired"" ] is not None and myEp [ ""firstaired"" ] : self. startyear = int ( myEp [ ""firstaired"" ]. split ( ""-"" ) [ 0 ] ) if self. airs is None : self. airs = """" if myEp [ ""status"" ] is not None : self, node, target, position = ""last-child"", save = True, refresh_target = True ) : if self. tree_model. _mptt_is_tracking : return self. insert_node ( node, target, position = position, save = save, allow_existing_pk = True, refresh_target = refresh_target, ) else : if : if node. is_child_node ( ) : self. _make_child_root_node ( node ) elif target. is_root_node ( ) and position in ( ""left"", ""right"" ) : self. _make_sibling_of_root_node ( node, target, position ) else : if node. is_root_node ( ) : self. _move_root_node ( node, target, position ) else : self. _move_child_node ( node, target, position )",False,target is None,save or refresh_target,0.6632372140884399 964,"def scan ( self, offset = 0, ** kwargs ) : for pool_header in super ( PoolScanShutdownCallback, self ). scan ( offset = offset, ** kwargs ) : callback = self. profile. _SHUTDOWN_PACKET ( offset = pool_header. end ( ), vm = self. address_space ) if : continue driver_obj = callback. DeviceObject. dereference ( vm = self. kernel_address_space ). DriverObject function_pointer = driver_obj. MajorFunction [ ""IRP_MJ_SHUTDOWN"" ] details = driver_obj. DriverName yield ""IoRegisterShutdownNotification"", function_pointer, details",False,not callback.sanity_check(self.kernel_address_space),self.is_tab_valid(),0.6504683494567871 965,"def process ( self ) : L, S, R, A = self. inputs Ma = self. outputs [ 0 ] if not Ma. is_linked : return loc = Vector_generate ( L. sv_get ( ) ) scale = Vector_generate ( S. sv_get ( ) ) rot = Vector_generate ( R. sv_get ( ) ) rotA, angle = [ [ ] ], [ [ 0.0 ] ] if A. is_linked : if A. links [ 0 ]. from_socket. bl_idname == ""SvVerticesSocket"" : rotA = Vector_generate ( A. sv_get ( ) ) angle = [ [ ] ] elif : angle = A. sv_get ( ) rotA = [ [ ] ] else : angle = A. sv_get ( ) rotA = [ [ ] ] result = [ ] m_add = result. extend if self. flat_output else result. append params = match_long_repeat ( [ loc, scale, rot, angle, rotA ] ) for par in zip ( * params ) : matrixes = matrix_in ( par ) m_add ( matrixes ) Ma. sv_set ( result )",False,A.links[0].from_socket.bl_idname == 'SvStringsSocket',A.is_linked,0.6534470319747925 966,"def compare ( self, d1, d2, p1, p2, root ) : """"""Compare dicts d1 and d2."""""" for h in sorted ( d1. keys ( ) ) : p1, p2 = d1. get ( h ), d2. get ( h ) if : lines1, lines2 = g. splitLines ( p1. b ), g. splitLines ( p2. b ) aList = list ( difflib. unified_diff ( lines1, lines2, ""vr1"", ""vr2"" ) ) if aList : p = root. insertAsLastChild ( ) p. h = h p. b = """". join ( aList ) p1. clone ( ). moveToLastChildOf ( p ) p2. clone ( ). moveToLastChildOf ( p ) elif p1. b. strip ( ) : p = root. insertAsLastChild ( ) p. h = h + ""(%s only)"" % p1. h p1. clone ( ). moveToLastChildOf ( p ) for h in sorted ( d2. keys ( ) ) : p2 = d2. get ( h ) if h not in d1 and p2. b. strip ( ) : p = root. insertAsLastChild ( ) p. h = h + ""(%s only)"" % p2. h p2. clone ( ). moveToLastChildOf ( p ) ",False,h in d2,p1.h and p2.h,0.6693493127822876 967,"def get_data ( self, df : pd. DataFrame ) -> VizData : if df. empty : return None for key in self. spatial_control_keys : df = self. process_spatial_data_obj ( key, df ) features = [ ] for d in df. to_dict ( orient = ""records"" ) : feature = self. get_properties ( d ) extra_props = self. get_js_columns ( d ) if : feature [ ""extraProps"" ] = extra_props features. append ( feature ) return { ""features"" : features, ""mapboxApiKey"" : config [ ""MAPBOX_API_KEY"" ], ""metricLabels"" : self. metric_labels, }",True,extra_props,extra_props,0.6635777354240417 968,"def collation_cb ( * args, ** kwargs ) : layout = self [ args ] layout_type = type ( layout ). __name__ if len ( container. keys ( ) )!= len ( layout. keys ( ) ) : raise ValueError ( ""Collated DynamicMaps must return "" ""%s with consistent number of items."" % layout_type ) key = kwargs [ ""selection_key"" ] index = kwargs [ ""selection_index"" ] obj_type = kwargs [ ""selection_type"" ] dyn_type_map = defaultdict ( list ) for k, v in layout. data. items ( ) : if : return layout [ k ] dyn_type_map [ type ( v ) ]. append ( v ) dyn_type_counter = { t : len ( vals ) for t, vals in dyn_type_map. items ( ) } if dyn_type_counter!= type_counter : raise ValueError ( ""The objects in a %s returned by a "" ""DynamicMap must consistently return "" ""the same number of items of the "" ""same type."" % layout_type ) return dyn_type_map [ obj_type ] [ index ]",True,k == key,k == key,0.6799268126487732 969,"def handle ( self, * args, ** options ) : """"""Command entry point."""""" exts_pool. load_all ( ) for filename in options [ ""files"" ] : try : with transaction. atomic ( ) : self. _import ( filename, options ) except CommandError as exc : raise exc except UnicodeDecodeError : self. stdout. write ( self. style. NOTICE ( _ ( ""CSV file is not encoded in UTF-8, attempting to guess "" ""encoding"" ) ) ) detector = UniversalDetector ( ) with io. open ( filename, ""rb"" ) as fp : for line in fp : detector. feed ( line ) if : break detector. close ( ) self. stdout. write ( self. style. NOTICE ( _ ( ""Reading CSV file using %(encoding)s encoding"" ) % detector. result ) ) try : with transaction.",False,detector.done,options['force'],0.6640725135803223 970,"def extractValues ( self, constraints, analysis, arch ) : if not constraints : return [ ] to_return = [ ] for constraintString in constraints : m = re. match ( Searcher. CONSTRAINT_REGEX, constraintString ) if : raise RopperError ( ""Not a valid constraint"" ) reg1 = m. group ( 1 ) reg2 = m. group ( 3 ) reg1 = reg1. replace ( ""["", """" ) reg1 = reg1. replace ( ""]"", """" ) reg1 = arch. getRegisterName ( reg1 ) reg2 = reg2. replace ( ""["", """" ) reg2 = reg2. replace ( ""]"", """" ) if reg2. isdigit ( ) or isHex ( reg2 ) : reg2 = None reg2 = arch. getRegisterName ( reg2 ) to_return. append ( ( reg1, reg2 ) ) return to_return",False,not m,m is None,0.6749292612075806 971,"def filtercomments ( source ) : """"""NOT USED: strips trailing comments and put them at the top."""""" trailing_comments = [ ] comment = True while comment : if re. search ( r""^\s*\/\*"", source ) : comment = source [ 0, source. index ( ""*/"" ) + 2 ] elif : comment = re. search ( r""^\s*\/\/"", source ). group ( 0 ) else : comment = None if comment : source = re. sub ( r""^\s+"", """", source [ len ( comment ) : ] ) trailing_comments. append ( comment ) return ""\n"". join ( trailing_comments ) + source",False,"re.search('^\\s*\\/\\/', source)","source.search('^\\s*\\/\\/', source)",0.6493881940841675 972,"def deleteMenu ( self, menuName ) : try : menu = self. getMenu ( menuName ) if : self. destroy ( menu ) self. destroyMenu ( menuName ) else : g. es ( ""can't delete menu:"", menuName ) except Exception : g. es ( ""exception deleting"", menuName, ""menu"" ) g. es_exception ( )",True,menu,menu,0.6883183717727661 973,"def __init__ ( self, opt, data_loader = None, cands = None, shared = None, ** kwargs ) : super ( ). __init__ ( opt, data_loader, cands, shared, ** kwargs ) self. cycle = kwargs [ ""cycle"" ] if ""cycle"" in kwargs else True if shared : self. reset_data = shared [ ""reset"" ] self. datafile = shared [ ""datafile"" ] self. data_loader = shared [ ""data_loader"" ] if : self. lock = shared [ ""lock"" ] else : self. data_loader = data_loader if ""datafile"" not in opt : raise KeyError ( ERROR_MESSAGE_NO_DATAFILE. format ( class_name = self. __class__. __name__ ) ) self. datafile = opt [ ""datafile"" ] self. reset_data = None self. is_reset = True self. entry_idx = 0 self. cur_episode = self. _FIRST_PASS self. num_eps = None self. num_exs = None self. rank = get_rank ( ) self. num_workers = num_workers ( ) self. is_distributed_and_is_eval = ( self. num_workers > 1 and not DatatypeHelper. is_training ( opt [ ""datatype"" ] ) )",False,'lock' in shared,shared,0.6630645394325256 974,"def to_mongo ( self, document ) : if isinstance ( document, DBRef ) : if not self. dbref : return document. id return document if isinstance ( document, Document ) : id_ = document. pk if : self. error ( ""You can only reference documents once they have"" "" been saved to the database"" ) cls = document else : id_ = document cls = self. document_type id_field_name = cls. _meta [ ""id_field"" ] id_field = cls. _fields [ id_field_name ] id_ = id_field. to_mongo ( id_ ) if self. document_type. _meta. get ( ""abstract"" ) : collection = cls. _get_collection_name ( ) return DBRef ( collection, id_, cls = cls. _class_name ) elif self. dbref : collection = cls. _get_collection_name ( ) return DBRef ( collection, id_ ) return id_",False,id_ is None,id_ == None,0.6651803255081177 975,"def get_subkeys ( self, key ) : parent_path = key. get_path ( ) subkeys = [ ] for k in self. keys : test_path = k. get_path ( ) if test_path. lower ( ). startswith ( parent_path. lower ( ) ) : sub = test_path [ len ( parent_path ) : ] if sub. startswith ( ""\\"" ) : sub = sub [ 1 : ] end_slash = sub. find ( ""\\"" ) if end_slash >= 0 : sub = sub [ : end_slash ] if : continue subkeys. append ( sub ) return subkeys",False,not sub,end_slash == -1,0.6703736186027527 976,"def generator ( self, data ) : if self. _config. SILENT : silent_vars = self. _get_silent_vars ( ) for task in data : for var, val in task. environment_variables ( ) : if self. _config. SILENT : if : continue yield ( 0, [ int ( task. UniqueProcessId ), str ( task. ImageFileName ), Address ( task. Peb. ProcessParameters. Environment ), str ( var ), str ( val ), ], )",False,var in silent_vars,silent_vars,0.6612224578857422 977,"def get_documents_data ( self ) : """"""Return Editors: path, project, cursor position"""""" files = [ ] for i in range ( self. count ( ) ) : if : files. append ( [ self. widget ( i ). ID, self. widget ( i ). get_cursor_position ( ) ] ) self. widget ( i ). _sidebarWidget. _save_breakpoints_bookmarks ( ) return files",False,type(self.widget(i)) is editor.Editor and self.widget(i).ID != '',self.widget(i).exists(),0.6579665541648865 978,"def get ( self, block = True, timeout = None ) : self. not_empty. acquire ( ) try : if not block : if : return None elif timeout is None : while self. qsize ( ) == 0 : self. not_empty. wait ( ) elif timeout < 0 : raise ValueError ( ""'timeout' must be a positive number"" ) else : end_time = time. time ( ) + timeout while not self. qsize ( ) : remaining = end_time - time. time ( ) if remaining <= 0.0 : return None self. not_empty. wait ( remaining ) item = self. _get ( ) return item finally : self. not_empty. release ( )",True,self.qsize() == 0,self.qsize() == 0,0.6634102463722229 979,"def _optimize ( self, S : np. ndarray, u : Optional [ np. ndarray ] = None, w0 : Optional [ np. ndarray ] = None ) -> np. ndarray : if self. method == self. OPT_INV : if u is not None : warnings. warn ( ""`u` is set but will not be used for `inv` portfolio"" ) if : warnings. warn ( ""`w0` is set but will not be used for `inv` portfolio"" ) return self. _optimize_inv ( S ) if self. method == self. OPT_GMV : if u is not None : warnings. warn ( ""`u` is set but will not be used for `gmv` portfolio"" ) return self. _optimize_gmv ( S, w0 ) if self. method == self. OPT_MVO : return self. _optimize_mvo ( S, u, w0 ) if self. method == self. OPT_RP : if u is not None : warnings. warn ( ""`u` is set but will not be used for `rp` portfolio"" ) return self. _optimize_rp ( S, w0 )",True,w0 is not None,w0 is not None,0.6668111085891724 980,"def getTempMarkdownPreviewPath ( view ) : """"""return a permanent full path of the temp markdown preview file"""""" settings = sublime. load_settings ( ""MarkdownPreview.sublime-settings"" ) tmp_filename = ""%s.html"" % view. id ( ) tmp_dir = tempfile. gettempdir ( ) if settings. get ( ""path_tempfile"" ) : if : tmp_dir = settings. get ( ""path_tempfile"" ) else : tmp_dir = os. path. join ( os. path. dirname ( view. file_name ( ) ), settings. get ( ""path_tempfile"" ) ) if not os. path. isdir ( tmp_dir ) : os. makedirs ( tmp_dir ) tmp_fullpath = os. path. join ( tmp_dir, tmp_filename ) return tmp_fullpath",False,os.path.isabs(settings.get('path_tempfile')),view.file_name() == None,0.6477821469306946 981,"def compute_value ( self ) : now = int ( time. time ( ) ) current_interval = now - ( now % self. aggregation_frequency ) age_threshold = current_interval - ( settings [ ""MAX_AGGREGATION_INTERVALS"" ] * self. aggregation_frequency ) for buffer in list ( self. interval_buffers. values ( ) ) : if buffer. active : value = self. aggregation_func ( buffer. values ) datapoint = ( buffer. interval, value ) state. events. metricGenerated ( self. metric_path, datapoint ) state. instrumentation. increment ( ""aggregateDatapointsSent"" ) buffer. mark_inactive ( ) if buffer. interval < age_threshold : del self. interval_buffers [ buffer. interval ] if : self. close ( ) self. configured = False del BufferManager. buffers [ self. metric_path ]",False,not self.interval_buffers,self.configured,0.6556923389434814 982,"def selectRow ( self, rowNumber, highlight = None ) : if rowNumber == ""h"" : rowNumber = 0 else : rowNumber = int ( rowNumber ) + 1 if 1 > rowNumber >= len ( self. cells ) + 1 : raise Exception ( ""Invalid row number."" ) else : selected = self. cells [ rowNumber ] [ 0 ]. selected for cell in self. cells [ rowNumber ] : if highlight is None : if selected : cell. deselect ( ) else : cell. select ( ) else : if : cell. mouseEnter ( ) else : cell. mouseLeave ( )",True,highlight,highlight,0.7138727307319641 983,"def kendall_tau ( a, b ) : n_samples = a. shape [ 0 ] assert a. shape == b. shape n_concordant = 0 n_disconcordant = 0 for i in range ( n_samples ) : for j in range ( i + 1, n_samples ) : if a [ i ] > a [ j ] and b [ i ] > b [ j ] : n_concordant = n_concordant + 1 if a [ i ] < a [ j ] and b [ i ] < b [ j ] : n_concordant = n_concordant + 1 if : n_disconcordant = n_disconcordant + 1 if a [ i ] < a [ j ] and b [ i ] > b [ j ] : n_disconcordant = n_disconcordant + 1 return ( n_concordant - n_disconcordant ) / ( 0.5 * n_samples * ( n_samples - 1 ) )",False,a[i] > a[j] and b[i] < b[j],n_concordant > 0,0.6508519649505615 984,"def reader ( ) : with open ( file_list ) as flist : full_lines = [ line. strip ( ) for line in flist ] if shuffle : random. shuffle ( full_lines ) if mode == ""train"" : trainer_id = int ( os. getenv ( ""PADDLE_TRAINER_ID"" ) ) trainer_count = int ( os. getenv ( ""PADDLE_TRAINERS"" ) ) per_node_lines = len ( full_lines ) / trainer_count lines = full_lines [ trainer_id * per_node_lines : ( trainer_id + 1 ) * per_node_lines ] print ( ""read images from %d, length: %d, lines length: %d, total: %d"" % ( trainer_id * per_node_lines, per_node_lines, len ( lines ), len ( full_lines ), ) ) else : lines = full_lines for line in lines : if mode == ""train"" : img_path, label = line. split ( ) img_path = img_path. replace ( ""JPEG"", ""jpeg"" ) <",False,mode == 'val',len(line) > 0,0.6595242023468018 985,"def on_task_start ( self, task, config ) : """"""Task starting, install cookiejar"""""" import os config = self. prepare_config ( config ) cookie_type = config. get ( ""type"" ) cookie_file = os. path. expanduser ( config. get ( ""file"" ) ) cj = self. cookiejars. get ( cookie_file, None ) if cj is not None : log. debug ( ""Loading cookiejar from cache."" ) elif cookie_type == ""firefox3"" : log. debug ( ""Loading %s cookies"" % cookie_type ) cj = self. sqlite2cookie ( cookie_file ) self. cookiejars [ cookie_file ] = cj else : if cookie_type == ""mozilla"" : log. debug ( ""Loading %s cookies"" % cookie_type ) cj = http. cookiejar. MozillaCookieJar ( ) self. cookiejars [ cookie_file ] = cj elif : log. debug ( ""Loading %s cookies"" % cookie_type ) cj = http. cookiejar. LWPCookieJar ( ) self. cookiejars [ cookie_file ] = cj else : raise plugin. PluginError ( ""Unknown cookie type %s"" % cookie_type, log ) try : cj. load ( filename = cookie_file, ignore_expires = True ) log. debug ( ""%s cookies loaded"" % cookie_type ) except ( http. cookiejar. LoadError, IOError ) : import sys raise plugin. PluginError ( <",False,cookie_type == 'lwp',cookie_type == 'lWPCcookie',0.6705670952796936 986,"def dont_let_stderr_buffer ( ) : while True : line = context. daemon. stderr. readline ( ) if not line : return if : context. num_workers_crashed += 1 print ( f""deployd stderr: {line}"" )",False,DEAD_DEPLOYD_WORKER_MESSAGE.encode('utf-8') in line,context.num_workers_crashed >= len(line),0.6471341252326965 987,"def expand ( self, pvalue ) : result = { } for dataset_key_idx, dataset_key in enumerate ( self. _sorted_dataset_keys ) : dataset_cache_path = _get_dataset_cache_path ( self. _cache_base_dir, dataset_key ) manifest_file = _ManifestFile ( dataset_cache_path ) manifest = manifest_file. read ( ) if not manifest : continue result [ dataset_key ] = { } for key, cache_key_idx in manifest. items ( ) : if : result [ dataset_key ] [ key ] = pvalue. pipeline | ""Read[AnalysisIndex{}][CacheKeyIndex{}]"". format ( dataset_key_idx, cache_key_idx ) >> self. _source ( ""{}{}"". format ( os. path. join ( dataset_cache_path, str ( cache_key_idx ) ), ""-*-of-*"" ) ) return result",False,self._should_read_cache_entry_key(key),key in result,0.6488548517227173 988,"def compute ( self, split ) : import msgpack with closing ( self. open_file ( ) ) as f : magic = f. read ( 8 ) start = split. index * self. splitSize end = ( split. index + 1 ) * self. splitSize start = self. find_magic ( f, start, magic ) if : return f. seek ( start ) hdr_size = 12 while start < end : m = f. read ( len ( magic ) ) if m!= magic : break compressed, count, size = struct. unpack ( ""III"", f. read ( hdr_size ) ) d = f. read ( size ) assert len ( d ) == size, ""unexpected end"" if compressed : d = zlib. decompress ( d ) for r in msgpack. Unpacker ( BytesIO ( d ) ) : yield r start += len ( magic ) + hdr_size + size",False,start < 0,start == end,0.6737005710601807 989,"def _checkHttpProxy ( selfip, proxies, isHttp = True ) : types = - 1 speed = - 1 if isHttp : test_url = config. TEST_HTTP_HEADER else : test_url = config. TEST_HTTPS_HEADER try : start = time. time ( ) r = requests. get ( url = test_url, headers = config. get_header ( ), timeout = config. TIMEOUT, proxies = proxies, ) if r. ok : speed = round ( time. time ( ) - start, 2 ) content = json. loads ( r. text ) headers = content [ ""headers"" ] ip = content [ ""origin"" ] proxy_connection = headers. get ( ""Proxy-Connection"", None ) if "","" in ip : types = 2 elif : types = 1 else : types = 0 return True, types, speed else : return False, types, speed except Exception as e : return False, types, speed",False,proxy_connection,'.' in proxy_connection,0.6645303964614868 990,"def cube_report ( cube_name ) : report_request = json. loads ( request. data ) try : queries = report_request [ ""queries"" ] except KeyError : raise RequestError ( ""Report request does not contain 'queries' key"" ) cell_cuts = report_request. get ( ""cell"" ) if cell_cuts : cuts = [ cut_from_dict ( cut ) for cut in cell_cuts ] cell = Cell ( g. cube, cuts ) logger. info ( ""using cell from report specification (URL parameters "" ""are ignored)"" ) if workspace. authorizer : cell = workspace. authorizer. restricted_cell ( g. auth_identity, cube = g. cube, cell = cell ) else : if : cell = Cell ( g. cube ) else : cell = g. cell result = g. browser. report ( cell, queries ) return jsonify ( result )",False,not g.cell,cube_name,0.662129282951355 991,"def read_lccn ( line, is_marc8 = False ) : found = [ ] for k, v in get_raw_subfields ( line, [ ""a"" ] ) : lccn = v. strip ( ) if : continue m = re_lccn. search ( lccn ) if not m : continue lccn = re_letters_and_bad. sub ( """", m. group ( 1 ) ). strip ( ) if lccn : found. append ( lccn ) return found",False,re_question.match(lccn),is_marc8 and lccn,0.6495903730392456 992,"def set_shape ( self, shape ) : """"""Sets a shape."""""" if self. _shape is not None : logger. warning ( 'Modifying the shape of Placeholder ""%s"".', self. name ) if not isinstance ( shape, ( list, tuple ) ) : shape = ( shape, ) shape = tuple ( x if x!= ""None"" else None for x in shape ) for x in shape : if : raise ParsingError ( 'All entries in ""shape"" must be integers, or in special' ""cases None. Shape is: {}"". format ( shape ) ) self. _shape = shape",False,"not isinstance(x, (int, type(None)))",not (x == 'None' or x == 'cases'),0.6489138603210449 993,"def evaluateWord ( self, argument ) : wildcard_count = argument [ 0 ]. count ( ""*"" ) if wildcard_count > 0 : if wildcard_count == 1 and argument [ 0 ]. startswith ( ""*"" ) : return self. GetWordWildcard ( argument [ 0 ] [ 1 : ], method = ""endswith"" ) if : return self. GetWordWildcard ( argument [ 0 ] [ : - 1 ], method = ""startswith"" ) else : _regex = argument [ 0 ]. replace ( ""*"", "".+"" ) matched = False for w in self. words : matched = bool ( re. search ( _regex, w ) ) if matched : break return matched return self. GetWord ( argument [ 0 ] )",False,wildcard_count == 1 and argument[0].endswith('*'),wildcard_count == 1,0.6540383100509644 994,"def __init__ ( self, changes, useweeks ) : authordateinfo_list = sorted ( changes. get_authordateinfo_list ( ). items ( ) ) self. changes = changes self. entries = { } self. total_changes_by_period = { } self. useweeks = useweeks for i in authordateinfo_list : key = None if useweeks : yearweek = datetime. date ( int ( i [ 0 ] [ 0 ] [ 0 : 4 ] ), int ( i [ 0 ] [ 0 ] [ 5 : 7 ] ), int ( i [ 0 ] [ 0 ] [ 8 : 10 ] ) ). isocalendar ( ) key = ( i [ 0 ] [ 1 ], str ( yearweek [ 0 ] ) + ""W"" + ""{0:02d}"". format ( yearweek [ 1 ] ) ) else : key = ( i [ 0 ] [ 1 ], i [ 0 ] [ 0 ] [ 0 : 7 ] ) if self. entries. get ( key, None ) == None : self. entries [ key ] = i [ 1 ] else : self. entries [ key ]. insertions += i [ 1 ]. insertions self. entries [ key ]. deletions += i [ 1 ]. deletions for period in self. get_periods ( ) : total_insertions = 0 total_deletions = 0 for author in self. get_authors ( ) : entry = self. entries. get ( ( author [ 0 ], period ), None ) if : total_insertions",False,entry != None,entry,0.6711770296096802 995,"def update_ui ( self ) : self. _action_group. set_sensitive ( self. _window. get_active_document ( )!= None ) self. doc = self. _window. get_active_document ( ) if self. doc : self. view = self. _window. get_active_view ( ) self. view. connect ( ""key-press-event"", self. fold_off ) table = self. doc. get_tag_table ( ) self. fld = table. lookup ( ""fld"" ) if : self. fld = self. doc. create_tag ( ""fld"", foreground = ""#333333"", paragraph_background = ""#aadc5c"" ) self. inv = table. lookup ( ""inv"" ) if self. inv == None : self. inv = self. doc. create_tag ( ""inv"", invisible = True )",True,self.fld == None,self.fld == None,0.6581529974937439 996,"def schedule_logger ( job_id = None, delete = False ) : if not job_id : return getLogger ( ""fate_flow_schedule"" ) else : if delete : with LoggerFactory. lock : try : for key in LoggerFactory. schedule_logger_dict. keys ( ) : if : del LoggerFactory. schedule_logger_dict [ key ] except : pass return True key = job_id + ""schedule"" if key in LoggerFactory. schedule_logger_dict : return LoggerFactory. schedule_logger_dict [ key ] return LoggerFactory. get_schedule_logger ( job_id )",False,job_id in key,key in LoggerFactory.schedule_logger_dict,0.6663422584533691 997,"def layout_draw_solid_categories ( layout, node_details, sub_category ) : for node_info in node_details : if node_info [ 0 ] == ""separator"" : layout. separator ( ) continue if not node_info : print ( repr ( node_info ), ""is incomplete, or unparsable"" ) continue bl_idname = node_info [ 0 ] if : continue node_ref = get_node_class_reference ( bl_idname ) if hasattr ( node_ref, ""bl_label"" ) : layout_params = dict ( text = node_ref. bl_label, ** node_icon ( node_ref ) ) else : continue if ( hasattr ( node_ref, ""solid_catergory"" ) and node_ref. solid_catergory == sub_category ) : node_op = draw_add_node_operator ( layout, bl_idname, params = layout_params )",False,bl_idname == 'ScalarMathNode',not bl_idname,0.6586661338806152 998,"def get_boxes ( self, generation, what ) : retval = """" if self. box_mode == ""UTF"" : space = "" "" elif self. box_mode == ""ASCII"" : space = "" "" space_len = len ( space ) + 2 for i in range ( generation + 1 ) : if : retval += space + ""|"" else : retval += space + "" "" if retval [ - 1 ] == "" "" : if what == ""sf"" : retval = retval [ : - space_len ] + ""/"" elif what == ""sm"" : retval = retval [ : - space_len ] + ""\\"" elif retval. endswith ( ""|"" + space + ""|"" ) : retval = retval [ : - space_len ] + ""+"" if self. box_mode == ""UTF"" : retval += ""-"" retval = retval. replace ( ""\\"", ""\u2514"" ) retval = retval. replace ( ""-"", ""\u2500"" ) retval = retval. replace ( ""|"", ""\u2502"" ) retval = retval. replace ( ""/"", ""\u250c"" ) elif self. box_mode == ""ASCII"" : retval += ""--"" return retval",False,self._boxes[i],i == 0,0.6571937203407288 999,"def do_query ( data, q ) : ret = [ ] if not q : return ret qkey = q [ 0 ] for key, value in iterate ( data ) : if len ( q ) == 1 : if key == qkey : ret. append ( value ) elif : ret. extend ( do_query ( value, q ) ) else : if not is_iterable ( value ) : continue if key == qkey : ret. extend ( do_query ( value, q [ 1 : ] ) ) else : ret. extend ( do_query ( value, q ) ) return ret",False,is_iterable(value),len(q) == 2,0.6517411470413208 1000,"def token_producer ( source ) : token = source. read_uint8 ( ) while token is not None : if : yield DataToken ( read_data ( token, source ) ) elif is_small_integer ( token ) : yield SmallIntegerToken ( read_small_integer ( token ) ) else : yield Token ( token ) token = source. read_uint8 ( )",False,is_push_data_token(token),is_data(token),0.6522794961929321 1001,"def _generate_toc ( self ) : """"""Generate key-to-(start, stop) table of contents."""""" starts, stops = [ ], [ ] last_was_empty = False self. _file. seek ( 0 ) while True : line_pos = self. _file. tell ( ) line = self. _file. readline ( ) if : if len ( stops ) < len ( starts ) : if last_was_empty : stops. append ( line_pos - len ( os. linesep ) ) else : stops. append ( line_pos ) starts. append ( line_pos ) last_was_empty = False elif not line : if last_was_empty : stops. append ( line_pos - len ( os. linesep ) ) else : stops. append ( line_pos ) break elif line == os. linesep : last_was_empty = True else : last_was_empty = False self. _toc = dict ( enumerate ( zip ( starts, stops ) ) ) self. _next_key = len ( self. _toc ) self. _file_length = self. _file. tell ( )",False,line.startswith('From '),line,0.6488137245178223 1002,"def __set__ ( self, obj, value ) : if ( value is not None and self. field. _currency_field. null and not isinstance ( value, MONEY_CLASSES + ( Decimal, ) ) ) : raise ValueError ( ""Missing currency value"" ) if isinstance ( value, BaseExpression ) : if isinstance ( value, Value ) : value = self. prepare_value ( obj, value. value ) elif : validate_money_expression ( obj, value ) prepare_expression ( value ) else : value = self. prepare_value ( obj, value ) obj. __dict__ [ self. field. name ] = value",False,"not isinstance(value, Func)","isinstance(value, MONEY_CLASSES)",0.6532878279685974 1003,"def _collect_peers_of_interest ( self, new_best_path ) : """"""Collect all peers that qualify for sharing a path with given RTs."""""" path_rts = new_best_path. get_rts ( ) qualified_peers = set ( self. _peers. values ( ) ) qualified_peers = self. _rt_manager. filter_by_origin_as ( new_best_path, qualified_peers ) if path_rts : path_rts. append ( RouteTargetMembershipNLRI. DEFAULT_RT ) qualified_peers = set ( self. _get_non_rtc_peers ( ) ) peer_to_rtfilter_map = self. _peer_to_rtfilter_map for peer, rt_filter in peer_to_rtfilter_map. items ( ) : if : continue if rt_filter is None : qualified_peers. add ( peer ) elif rt_filter. intersection ( path_rts ) : qualified_peers. add ( peer ) return qualified_peers",True,peer is None,peer is None,0.6669421195983887 1004,"def get_integration ( integration, * args, ** kwargs ) : """"""Return a integration instance specified by `integration` name"""""" klass = integration_cache. get ( integration, None ) if not klass : integration_filename = ""%s_integration"" % integration integration_module = None for app in settings. INSTALLED_APPS : try : integration_module = import_module ( "".integrations.%s"" % integration_filename, package = app ) break except ImportError : pass if : raise IntegrationModuleNotFound ( ""Missing integration: %s"" % ( integration ) ) integration_class_name = """". join ( integration_filename. title ( ). split ( ""_"" ) ) try : klass = getattr ( integration_module, integration_class_name ) except AttributeError : raise IntegrationNotConfigured ( ""Missing %s class in the integration module."" % integration_class_name ) integration_cache [ integration ] = klass return klass ( * args, ** kwargs )",False,not integration_module,integration_module is None,0.6701433658599854 1005,"def parse_rules ( self, content ) : end_fix = [ ] hosts = [ ] content = utils. to_bytes ( content ) content = content. replace ( b"","", b""\n"" ). replace ( b"";"", b""\n"" ) lines = content. split ( b""\n"" ) for line in lines : line = line. strip ( ) if not line : continue if b""="" in line : lp = line. split ( b""="" ) left = lp [ 0 ]. strip ( ) right = lp [ 1 ]. strip ( ) else : left = line right = None if : left = left [ 7 : ] if left. startswith ( b""https://"" ) : left = left [ 8 : ] if left. startswith ( b""*"" ) : left = left [ 1 : ] if b""/"" in left : p = left. find ( b""/"" ) host = left [ : p ] else : host = left if host. startswith ( b""."" ) : end_fix. append ( host ) elif host. startswith ( b""*."" ) : end_fix. append ( host [ 1 : ] ) else : hosts. append ( host ) return hosts, end_fix",False,left.startswith(b'http://'),right,0.645950436592102 1006,"def execute_and_fetch ( self, q ) : try : if self. show_sql : print ( repr ( q ) ) self. cursor. execute ( q ) if self. cursor. description is not None : if : query_column_names = [ unicode ( c [ 0 ], ""utf-8"" ) for c in self. cursor. description ] else : query_column_names = [ c [ 0 ] for c in self. cursor. description ] else : query_column_names = None result = self. cursor. fetchall ( ) finally : pass return Sqlite3DBResults ( query_column_names, result )",False,six.PY2,"isinstance(self.cursor.description, list)",0.6689985990524292 1007,"def set_xml_text_value ( self, target, xmltarget ) : if ""<"" in target : target = target. replace ( ""&"", ""&"" ) try : newstring = etree. fromstring ( ""%s"" % target ) except : target = target. replace ( ""<"", ""<"" ) newstring = etree. fromstring ( ""%s"" % target ) if newstring. text is None : xmltarget. text = """" else : xmltarget. text = newstring. text for x in xmltarget. iterchildren ( ) : xmltarget. remove ( x ) for x in newstring. iter ( ) : x. text = self. escape ( x. text, False ) if : x. prefix = self. escape ( x. prefix, False ) if x. tail is not None : x. tail = self. escape ( x. tail, False ) for x in newstring. iterchildren ( ) : xmltarget. append ( x ) else : xmltarget. text = self. escape ( target )",True,x.prefix is not None,x.prefix is not None,0.6522507071495056 1008,"def next_frame ( self ) : frame_index = None rate = self. rate time_base = self. time_base self. pts_seen = False for packet in self. file. demux ( self. stream ) : if packet. pts : self. pts_seen = True for frame in packet. decode ( ) : if frame_index is None : if self. pts_seen : pts = frame. pts else : pts = frame. dts if : frame_index = pts_to_frame ( pts, time_base, rate, self. start_time ) elif not frame_index is None : frame_index += 1 if not frame. dts in self. pts_map : secs = None if : secs = pts * time_base self. pts_map [ frame. dts ] = secs yield frame_index, frame",False,not pts is None,self.sec_frame is None,0.6681995391845703 1009,"def get_converter ( in_ext, out_ext = None, templ_ext = None ) : convert_candidates = None if templ_ext : if ( in_ext, templ_ext ) in converters : convert_candidates = converters [ ( in_ext, templ_ext ) ] else : raise UnsupportedConversionError ( in_ext, out_ext, templ_ext ) else : if : convert_candidates = converters [ in_ext ] elif ( in_ext, ) in converters : convert_candidates = converters [ ( in_ext, ) ] else : raise UnsupportedConversionError ( in_ext, out_ext ) convert_fn = None if not out_ext : out_ext, convert_fn = convert_candidates [ 0 ] else : for ext, func in convert_candidates : if ext == out_ext : convert_fn = func break if not convert_fn : raise UnsupportedConversionError ( in_ext, out_ext, templ_ext ) return convert_fn",False,in_ext in converters,in_ext,0.6615919470787048 1010,"def _apply_transformation ( self, image, interp_order = 3 ) : if interp_order < 0 : return image assert self. _rand_zoom is not None full_zoom = np. array ( self. _rand_zoom ) while len ( full_zoom ) < image. ndim : full_zoom = np. hstack ( ( full_zoom, [ 1.0 ] ) ) is_undersampling = all ( full_zoom [ : 3 ] < 1 ) run_antialiasing_filter = self. antialiasing and is_undersampling if : : sigma = self. _get_sigma ( full_zoom [ : 3 ] ) if image. ndim == 4 : output = [ ] for mod in range ( image. shape [ - 1 ] ) : to_scale = ( ndi. gaussian_filter ( image [..., mod ], sigma ) if : else image [..., mod ] ) scaled = ndi. zoom ( to_scale, full_zoom [ : 3 ], order = interp_order ) output. append ( scaled [..., np. newaxis ] ) return np. concatenate ( output, axis = - 1 ) elif image. ndim == 3 : to_scale = ( ndi. gaussian_filter ( image, sigma ) if : else image ) scaled = ndi. zoom ( to_scale, full_zoom [ : 3 ], order = interp_order ) return scaled [..., np. newaxis ] else : raise NotImplementedError ( ""not implemented random scaling"" )",True,run_antialiasing_filter,run_antialiasing_filter,0.6510010957717896 1011,"def _get_split_on_quotes ( self, line ) : doublequotesplits = line. split ( '""' ) quoted = False quotesplits = [ ] if len ( doublequotesplits ) > 1 and ""'"" in doublequotesplits [ 0 ] : singlequotesplits = doublequotesplits [ 0 ]. split ( ""'"" ) doublequotesplits = doublequotesplits [ 1 : ] while len ( singlequotesplits ) % 2 == 0 and len ( doublequotesplits ) : singlequotesplits [ - 1 ] += '""' + doublequotesplits [ 0 ] doublequotesplits = doublequotesplits [ 1 : ] if ""'"" in singlequotesplits [ - 1 ] : singlequotesplits = singlequotesplits [ : - 1 ] + singlequotesplits [ - 1 ]. split ( ""'"" ) quotesplits += singlequotesplits for doublequotesplit in doublequotesplits : if : quotesplits. append ( doublequotesplit ) else : quotesplits += doublequotesplit. split ( ""'"" ) quoted = not quoted return quotesplits",False,quoted,"doublequotesplit == ""'""",0.6908809542655945 1012,"def extended_noun_chunks ( sentence ) : noun_chunks = { ( np. start, np. end ) for np in sentence. noun_chunks } np_start, cur_np = 0, ""NONE"" for i, token in enumerate ( sentence ) : np_type = token. pos_ if token. pos_ in { ""NOUN"", ""PROPN"" } else ""NONE"" if : if cur_np!= ""NONE"" : noun_chunks. add ( ( np_start, i ) ) if np_type!= ""NONE"" : np_start = i cur_np = np_type if cur_np!= ""NONE"" : noun_chunks. add ( ( np_start, len ( sentence ) ) ) return [ sentence [ s : e ] for ( s, e ) in sorted ( noun_chunks ) ]",False,np_type != cur_np,np_type == 'NONE',0.6510747671127319 1013,"def _loop_writing ( self, f = None, data = None ) : try : assert f is self. _write_fut self. _write_fut = None self. _pending_write = 0 if : f. result ( ) if data is None : data = self. _buffer self. _buffer = None if not data : if self. _closing : self. _loop. call_soon ( self. _call_connection_lost, None ) if self. _eof_written : self. _sock. shutdown ( socket. SHUT_WR ) self. _maybe_resume_protocol ( ) else : self. _write_fut = self. _loop. _proactor. send ( self. _sock, data ) if not self. _write_fut. done ( ) : assert self. _pending_write == 0 self. _pending_write = len ( data ) self. _write_fut. add_done_callback ( self. _loop_writing ) self. _maybe_pause_protocol ( ) else : self. _write_fut. add_done_callback ( self. _loop_writing ) except",False,f,f is not None,0.6815915107727051 1014,"def find_pingback_urls ( self, urls ) : """"""Find the pingback urls of each urls"""""" pingback_urls = { } for url in urls : try : page = urlopen ( url ) headers = page. info ( ) if ""text/"" not in headers. get ( ""Content-Type"", """" ). lower ( ) : continue server_url = headers. get ( ""X-Pingback"" ) if not server_url : server_url = self. find_pingback_href ( page. read ( ) ) if : server_url_splitted = urlsplit ( server_url ) if not server_url_splitted. netloc : url_splitted = urlsplit ( url ) server_url = ""%s://%s%s"" % ( url_splitted. scheme, url_splitted. netloc, server_url, ) pingback_urls [ url ] = server_url except IOError : pass return pingback_urls",True,server_url,server_url,0.667713463306427 1015,"def _check_step ( self, step, step_num ) : """"""Don't try to run steps that include commands or use manifests."""""" super ( SparkMRJobRunner, self ). _check_step ( step, step_num ) if step. get ( ""input_manifest"" ) : raise NotImplementedError ( ""spark runner does not support input manifests"" ) if step [ ""type"" ] == ""streaming"" : if not self. _mrjob_cls : raise ValueError ( ""You must set mrjob_cls to run streaming steps"" ) for mrc in ( ""mapper"", ""combiner"", ""reducer"" ) : if : if ""command"" in step [ mrc ] or ""pre_filter"" in step [ mrc ] : raise NotImplementedError ( ""step %d's %s runs a command, but spark"" "" runner does not support commands"" % ( step_num, mrc ) )",False,step.get(mrc),mrc in step,0.6509639024734497 1016,"def read_http ( sock ) : fd = sock. makefile ( ""rb"" ) try : response_line = bytes_to_str ( fd. readline ( ). rstrip ( b""\r\n"" ) ) except socket. error as exc : if support. get_errno ( exc ) in ( 10053, 54 ) : raise ConnectionClosed raise if not response_line : raise ConnectionClosed ( response_line ) header_lines = [ ] while True : line = fd. readline ( ) if line == b""\r\n"" : break else : header_lines. append ( line ) headers_original = { } headers_lower = { } for x in header_lines : x = x. strip ( ) if : continue key, value = bytes_to_str ( x ). split ( "":"", 1 ) key = key. rstrip ( ) value = value. lstrip ( ) key_lower = key. lower ( ) assert key_lower not in headers_lower, ""header duplicated: {0}"". format ( key ) headers_original [ key ] = value headers_lower [ key_lower ] = value content_length_str = headers_lower. get ( CONTENT_LENGTH. lower ( ), """" ) if content_length_str : num = int ( content_length_str ) body = fd. read ( num ) else : body",True,not x,not x,0.6765691637992859 1017,"def migration_10 ( env ) : import datetime system_certificate = os. path. join ( env [ ""STORAGE_ROOT"" ], ""ssl/ssl_certificate.pem"" ) if not os. path. islink ( system_certificate ) : new_path = os. path. join ( env [ ""STORAGE_ROOT"" ], ""ssl"", env [ ""PRIMARY_HOSTNAME"" ] + ""-"" + datetime. datetime. now ( ). date ( ). isoformat ( ). replace ( ""-"", """" ) + "".pem"", ) print ( ""Renamed"", system_certificate, ""to"", new_path, ""and created a symlink for the original location."", ) shutil. move ( system_certificate, new_path ) os. symlink ( new_path, system_certificate ) for sslcert in glob. glob ( os. path. join ( env [ ""STORAGE_ROOT"" ], ""ssl/*/ssl_certificate.pem"" ) ) : d = os. path. dirname ( sslcert ) if : newname = os. path. join ( env [ ""STORAGE_ROOT"" ], ""ssl"", os. path. basename ( d ) + "".pem"" ) if not os. path. exists ( newname ) : <",False,len(os.listdir(d)) == 1,d,0.6512948274612427 1018,def _cleanup ( ) : for inst in _active [ : ] : res = inst. _internal_poll ( _deadstate = sys. maxint ) if : try : _active. remove ( inst ) except ValueError : pass,False,res is not None and res >= 0,res,0.6581555604934692 1019,"def _augment_maps_by_samples ( self, augmentables, arr_attr_name, ks ) : arrs = [ getattr ( map_i, arr_attr_name ) for map_i in augmentables ] arrs_aug = self. _augment_arrays_by_samples ( arrs, ks, self. keep_size, None ) maps_aug = [ ] gen = zip ( augmentables, arrs, arrs_aug, ks ) for augmentable_i, arr, arr_aug, k_i in gen : shape_orig = arr. shape setattr ( augmentable_i, arr_attr_name, arr_aug ) if : augmentable_i = augmentable_i. resize ( shape_orig [ 0 : 2 ] ) elif k_i % 2 == 1 : h, w = augmentable_i. shape [ 0 : 2 ] augmentable_i. shape = tuple ( [ w, h ] + list ( augmentable_i. shape [ 2 : ] ) ) else : pass maps_aug. append ( augmentable_i ) return maps_aug",False,self.keep_size,k_i % 2 == 1,0.6544825434684753 1020,"def _write_opf_spine ( self, root, ncx_id ) : spine_attributes = { ""toc"" : ncx_id or ""ncx"" } if self. book. direction and self. options [ ""spine_direction"" ] : spine_attributes [ ""page-progression-direction"" ] = self. book. direction spine = etree. SubElement ( root, ""spine"", spine_attributes ) for _item in self. book. spine : is_linear = True if isinstance ( _item, tuple ) : item = _item [ 0 ] if len ( _item ) > 1 : if _item [ 1 ] == ""no"" : is_linear = False else : item = _item if isinstance ( item, EpubHtml ) : opts = { ""idref"" : item. get_id ( ) } if not item. is_linear or not is_linear : opts [ ""linear"" ] = ""no"" elif isinstance ( item, EpubItem ) : opts = { ""idref"" : item. get_id ( ) } if not item. is_linear or not is_linear : opts [ ""linear"" ] = ""no"" else : opts = { ""idref"" : item } try : itm = self. book. get_item_with_id ( item ) state_handlers = { } for module in import_submodules ( galaxy. jobs. runners. state_handlers, ordered = True ) : for func in getattr ( module, ""__all__"", [ ] ) : if : state_handlers [ func ] = [ ] state_handlers [ func ]. append ( getattr ( module, func ) ) log. debug ( ""Loaded '%s' state handler from module %s"", func, module. __name__ ) return state_handlers",True,func not in state_handlers,func not in state_handlers,0.6623349189758301 1022,"def __init__ ( self, * args, ** kwargs ) : if kwargs. get ( ""bindAddress"", None ) is None : import socket if : raise ValueError ( ""Dynamic FCGI server not available on this platform. "" ""You must use a static or external one by providing a "" ""legal bindAddress."" ) self. args = args self. kwargs = kwargs self. ready = False",False,"not hasattr(socket, 'fromfd')",socket.get_terminal() is None,0.652373731136322 1023,"def ftp_login ( host, port, username = None, password = None, anonymous = False ) : ret = False try : ftp = ftplib. FTP ( ) ftp. connect ( host, port, timeout = 6 ) if : ftp. login ( ) else : ftp. login ( username, password ) ret = True ftp. quit ( ) except Exception : pass return ret",True,anonymous,anonymous,0.6977392435073853 1024,"def _parse_value ( value ) : """"""Internal helper for parsing configuration values into python values"""""" if isinstance ( value, bool ) : return ""true"" if value else ""false"" elif isinstance ( value, six. string_types ) : listparser = re. compile ( r""""""((?:[^,""']|""[^""]*""|'[^']*')+)"""""" ) value = value. strip ( ) if value. startswith ( ""["" ) and value. endswith ( ""]"" ) : return listparser. split ( value [ 1 : - 1 ] ) [ 1 : : 2 ] elif value. startswith ( ""("" ) and value. endswith ( "")"" ) : rval = { } for pair in listparser. split ( value [ 1 : - 1 ] ) [ 1 : : 2 ] : pair = pair. split ( ""="" ) if '""' in pair [ 1 ] : pair [ 1 ] = pair [ 1 ]. replace ( '""', """" ) if pair [ 1 ]. isdigit ( ) : rval [ pair [ 0 ] ] = int ( pair [ 1 ] ) elif pair [ 1 ] == ""true"" : rval [ pair [ 0 ] ] = True elif pair [ 1 ] == ""false"" : rval [ pair [ 0 ] ] = False else : rval [ pair [ 0 ] ] = pair [ 1 ] return rval else : ",False,value.isdigit(),value is None,0.6561739444732666 1025,"def _handle_whq ( self ) : self. assertToken ( self. token ( ), ""("" ) self. assertToken ( self. token ( ), ""["" ) ans_types = [ ] while self. token ( 0 )!= ""]"" : cat = self. token ( ) self. assertToken ( self. token ( ), "":"" ) if cat == ""des"" : ans_types. append ( self. token ( ) ) elif cat == ""num"" : ans_types. append ( ""number"" ) typ = self. token ( ) if : ans_types. append ( ""count"" ) else : ans_types. append ( typ ) else : ans_types. append ( self. token ( ) ) self. token ( ) self. assertToken ( self. token ( ), "","" ) d1 = self. parse_Expression ( None ) self. assertToken ( self. token ( ), "","" ) ref = self. parse_variable ( ) self. assertToken ( self. token ( ), "","" ) d2 = self. parse_Expression ( None ) self. assertToken ( self. token ( ), "")"" ) return lambda sent_index, word_indices : BoxerWhq ( self. discourse_id, sent_index, word_indices, ans_types, d1, ref, d2 )",False,typ == 'cou',typ == 'count',0.659706175327301 1026,"def dgl_mp_batchify_fn ( data ) : if isinstance ( data [ 0 ], tuple ) : data = zip ( * data ) return [ dgl_mp_batchify_fn ( i ) for i in data ] for dt in data : if dt is not None : if : return [ d for d in data if isinstance ( d, dgl. DGLGraph ) ] elif isinstance ( dt, nd. NDArray ) : pad = Pad ( axis = ( 1, 2 ), num_shards = 1, ret_length = False ) data_list = [ dt for dt in data if dt is not None ] return pad ( data_list )",True,"isinstance(dt, dgl.DGLGraph)","isinstance(dt, dgl.DGLGraph)",0.6500488519668579 1027,"def test_main ( self, c1 ) : for line in self : try : c1 = 6 except : if : try : c1 = 5 except : pass else : c1 = 1 continue pass",False,c1,c1 == 6,0.6797674894332886 1028,"def _parse ( self, engine ) : """"""Parse the layer."""""" if isinstance ( self. args, dict ) : if ""axis"" in self. args : self. axis = engine. evaluate ( self. args [ ""axis"" ], recursive = True ) if : raise ParsingError ( '""axis"" must be an integer.' ) if ""momentum"" in self. args : self. momentum = engine. evaluate ( self. args [ ""momentum"" ], recursive = True ) if not isinstance ( self. momentum, ( int, float ) ) : raise ParsingError ( '""momentum"" must be numeric.' )",False,"not isinstance(self.axis, int)","not isinstance(self.axis, (int, float))",0.6546884775161743 1029,"def build ( self, settlement_manager, resource_id ) : village_builder = settlement_manager. village_builder building_purpose = self. get_purpose ( resource_id ) building_id = BUILDING_PURPOSE. get_building ( building_purpose ) building_class = Entities. buildings [ building_id ] for coords, ( purpose, ( section, _ ) ) in village_builder. plan. items ( ) : if : continue object = village_builder. land_manager. island. ground_map [ coords ]. object if object is not None and object. id == self. id : continue if building_purpose!= BUILDING_PURPOSE. MAIN_SQUARE : if not self. _need_producer ( settlement_manager, coords, resource_id ) : continue if not village_builder. have_resources ( building_id ) : return ( BUILD_RESULT. NEED_RESOURCES, None ) if ( coords not in village_builder. settlement. buildability_cache. cache [ building_class. size ] ) : position = Rect. init_from_topleft_and_size_tuples ( coords, building_class. size ) return ( BUILD_RESULT. OUT_OF_SETTLEMENT, position ) building = BasicBuilder ( building_id, coords, 0 ). execute ( settlement_manager. land_manager <",False,section > village_builder.current_section or purpose != building_purpose,self.id is None,0.6574970483779907 1030,"def valid_fieldnames ( fieldnames ) : """"""check if fieldnames are valid"""""" for fieldname in fieldnames : if : return True elif fieldname in fieldname_map and fieldname_map [ fieldname ] == ""source"" : return True return False",False,fieldname in canonical_field_names and fieldname == 'source',fieldname == 'password',0.6545164585113525 1031,"def _parse_top_intents ( self, text, top_n, intents = None ) : if isinstance ( intents, str ) : intents = { intents } elif isinstance ( intents, list ) : intents = set ( intents ) if top_n < 1 : raise ValueError ( ""top_n argument must be greater or equal to 1, but got: %s"" % top_n ) results_per_intent = defaultdict ( list ) for text_candidate, entities in self. _get_candidates ( text, intents ) : val = self. _map. get ( hash_str ( text_candidate ) ) if : result = self. _parse_map_output ( text, val, entities, intents ) if result : intent_name = result [ RES_INTENT ] [ RES_INTENT_NAME ] results_per_intent [ intent_name ]. append ( result ) results = [ ] for intent_results in itervalues ( results_per_intent ) : sorted_results = sorted ( intent_results, key = lambda res : len ( res [ RES_SLOTS ] ) ) results. append ( sorted_results [ 0 ] ) weights = [ 1.0 / ( 1.0 + len ( res [ RES_SLOTS ] ) ) for res in results ] total_weight = sum ( weights ) for res, weight in zip ( results, weights ) : res [ RES_INTENT ] [ RES_PROBA ] = weight / total_weight results = sorted ( results, key = lambda r : - r [ RES_INTENT ] [ RES_PROBA ] ) return results [ : top_n ]",False,val is not None,val,0.6625601053237915 1032,"def middleware ( self, request, handler ) : try : overrides = { } headers = request. headers forwarded_for = self. get_forwarded_for ( headers ) if forwarded_for : overrides [ ""remote"" ] = str ( forwarded_for [ - self. _num ] ) proto = self. get_forwarded_proto ( headers ) if proto : overrides [ ""scheme"" ] = proto [ - self. _num ] host = self. get_forwarded_host ( headers ) if : overrides [ ""host"" ] = host prefix = self. get_forwarded_path ( headers ) if prefix is not None : prefix = ""/"" + prefix. strip ( ""/"" ) + ""/"" request_path = URL ( request. path. lstrip ( ""/"" ) ) overrides [ ""rel_url"" ] = URL ( prefix ). join ( request_path ) request = request. clone ( ** overrides ) return await handler ( request ) except RemoteError as exc : exc. log ( request ) await self. raise_error ( request )",False,host is not None,host,0.6642715930938721 1033,"def _as_key_indices ( keys, key_names ) : key_names = _as_tuple ( key_names ) keys = _bool_to_indices ( _as_tuple ( keys ), len ( key_names ) ) for key in keys : if : key_index = key if key_index < 0 : key_index += len ( key_names ) if key_index not in range ( 0, len ( key_names ) ) : raise IndexError ( ""index {} is out of bounds for keys with size {}"". format ( key, len ( key_names ) ) ) else : try : key_index = key_names. index ( key ) except ValueError : raise KeyError ( ""{} does not exists"". format ( key ) ) yield key_index",False,"isinstance(key, numbers.Integral)",_is_tab_char(key),0.6479559540748596 1034,"def showUserList ( self, currentUser, rooms ) : for room in rooms : message = ""In room '{}':"". format ( room ) self. showMessage ( message, True ) for user in rooms [ room ] : userflags = """" if user. isController ( ) : userflags += ""({}) "". format ( getMessage ( ""controller-userlist-userflag"" ) ) if user. isReady ( ) : userflags += ""({}) "". format ( getMessage ( ""ready-userlist-userflag"" ) ) username = ( userflags + ""*<{}>*"". format ( user. username ) if user == currentUser else userflags + ""<{}>"". format ( user. username ) ) if : message = getMessage ( ""userlist-playing-notification"" ). format ( username ) self. showMessage ( message, True ) message = "" {}: '{}' ({})"". format ( getMessage ( ""userlist-file-notification"" ), user. file [ ""name"" ], formatTime ( user. file [ ""duration"" ] ), ) if currentUser. file : if ( user. file [",False,user.file,user.isPLAYING(),0.6592985391616821 1035,"def callback ( lexer, match, context ) : text = match. group ( ) extra = """" if start : context. next_indent = len ( text ) if : while context. next_indent < context. indent : context. indent = context. indent_stack. pop ( ) if context. next_indent > context. indent : extra = text [ context. indent : ] text = text [ : context. indent ] else : context. next_indent += len ( text ) if text : yield match. start ( ), TokenClass, text if extra : yield match. start ( ) + len ( text ), TokenClass. Error, extra context. pos = match. end ( )",False,context.next_indent < context.indent,len(text) > context.next_indent,0.6498527526855469 1036,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if : self. set_name_space ( d. getPrefixedString ( ) ) continue if tt == 18 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. add_item ( ). TryMerge ( tmp ) continue if tt == 26 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. mutable_override ( ). TryMerge ( tmp ) continue if tt == 0 : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 10,tt == 33554432,0.6911654472351074 1037,"def save_labels ( dataset_list, output_dir ) : if is_main_process ( ) : logger = logging. getLogger ( __name__ ) ids_to_labels = { } for dataset in dataset_list : if hasattr ( dataset, ""categories"" ) : ids_to_labels. update ( dataset. categories ) else : logger. warning ( ""Dataset [{}] has no categories attribute, labels.json file won't be created"". format ( dataset. __class__. __name__ ) ) if : labels_file = os. path. join ( output_dir, ""labels.json"" ) logger. info ( ""Saving labels mapping into {}"". format ( labels_file ) ) with open ( labels_file, ""w"" ) as f : json. dump ( ids_to_labels, f, indent = 2 )",False,ids_to_labels,output_dir is not None,0.6549862623214722 1038,"def _feed ( self, end ) : if self. _current_size < end : if self. checked : raise ReadStreamError ( end - self. _size, self. _size ) a, fa, fs = self. fragments [ - 1 ] while self. stream. sizeGe ( fa + min ( fs, end - a ) ) : a += fs f = self. next if a >= end : self. _current_size = end if a == end and not f : self. _setSize ( ) return False if f : self. next = f. next f = f. getData ( ) if : self. _current_size = a self. _setSize ( ) return True fa = f. absolute_address fs = f. size self. fragments += [ ( a, fa, fs ) ] self. _current_size = a + max ( 0, self. stream. size - fa ) self. _setSize ( ) return True return False",False,not f,a > end,0.6777458786964417 1039,"def check_duplicates ( self, message ) : guild = message. guild author = message. author guild_cache = self. cache. get ( guild. id, None ) if guild_cache is None : repeats = await self. config. guild ( guild ). delete_repeats ( ) if : return False guild_cache = self. cache [ guild. id ] = defaultdict ( lambda : deque ( maxlen = repeats ) ) if not message. content : return False guild_cache [ author ]. append ( message. content ) msgs = guild_cache [ author ] if len ( msgs ) == msgs. maxlen and len ( set ( msgs ) ) == 1 : try : await message. delete ( ) return True except discord. HTTPException : pass return False",False,repeats == -1,guild.id not in self.cache,0.6627511382102966 1040,"def extract ( self ) : self. set2 [ ""total"" ] = [ 0, 0 ] for line in self. fd [ 0 ]. readlines ( ) : l = line. replace ( "" /"", ""/"" ). split ( ) if len ( l )!= 12 : continue if : continue if ( "","". join ( l ) == ""Name,Mtu/TSO,Network,Address,Ipkts,Ierrs,Ibytes,Opkts,Oerrs,Obytes,Coll,Time"" ) : continue if l [ 0 ] == ""Usage:"" : continue name = l [ 0 ] if name in self. vars : self. set2 [ name ] = ( int ( l [ 6 ] ), int ( l [ 9 ] ) ) if name!= ""lo0"" : self. set2 [ ""total"" ] = ( self. set2 [ ""total"" ] [ 0 ] + int ( l [ 6 ] ), self. set2 [ ""total"" ] [ 1 ] + int ( l [ 9 ] ), ) if update : for name in self. set2 : self. val [ name ] = list ( map ( lambda x, y : ( y - x ) * 1.0 / elapsed, self. set1 [ name ], pyfalog. debug ( ""Doing change of module charges according to map {} on fit {}"". format ( self. chargeMap, self. fitID ) ) sFit = Fit. getInstance ( ) fit = sFit. getFit ( self. fitID ) container = fit. modules if not self. projected else fit. projectedModules changes = False self. savedChargeMap = { } sMkt = Market. getInstance ( ) for position, chargeItemID in self. chargeMap. items ( ) : mod = container [ position ] if : continue if mod. chargeID is None and chargeItemID is None : continue if mod. chargeID == chargeItemID : continue chargeItem = sMkt. getItem ( chargeItemID ) if chargeItemID is not None else None if chargeItem is not None and not chargeItem. isCharge : continue if not self. ignoreRestriction and not mod. isValidCharge ( chargeItem ) : pyfalog. warning ( ""Invalid charge {} for {}"". format ( chargeItem, mod ) ) continue pyfalog. debug ( ""Setting charge {} for {} on fit {}"". format ( chargeItem, mod, self. fitID ) ) self. savedChargeMap [ position ] = mod. chargeID changes = True mod. charge = chargeItem if not changes : return False if self. recalc : sFit. recalc ( fit ) self. savedStateCheckChanges",False,mod.isEmpty,not mod,0.6595723628997803 1042,"def _prepare_fc_map ( self, fc_map_id, timeout, restore ) : self. ssh. prestartfcmap ( fc_map_id, restore ) mapping_ready = False max_retries = ( timeout // self. WAIT_TIME ) + 1 for try_number in range ( 1, max_retries ) : mapping_attrs = self. _get_flashcopy_mapping_attributes ( fc_map_id ) if mapping_attrs is None or ""status"" not in mapping_attrs : break if mapping_attrs [ ""status"" ] == ""prepared"" : mapping_ready = True break elif mapping_attrs [ ""status"" ] == ""stopped"" : self. ssh. prestartfcmap ( fc_map_id, restore ) elif : msg = _ ( ""Unexecpted mapping status %(status)s for mapping "" ""%(id)s. Attributes: %(attr)s."" ) % { ""status"" : mapping_attrs [ ""status"" ], ""id"" : fc_map_id, ""attr"" : mapping_attrs, } LOG. error ( msg ) raise exception. VolumeBackendAPIException ( data = msg ) greenthread. sleep ( self. WAIT_TIME ) if not mapping_ready : msg = _ ( ""Mapping %(id)s prepare failed to complete within the "" ""allotted %(to)d seconds",False,mapping_attrs['status'] != 'preparing',restore and fc_map_id and (fc_map_id[0] == 'id' or restore),0.6515494585037231 1043,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. message = iprot. readString ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRING,message is not None,0.658729076385498 1044,"def __call__ ( self, input_tensor, index, name ) : inputs = [ input_tensor ] if index is not None : if not isinstance ( index, pd. Index ) : if isinstance ( index, INDEX_TYPE ) : self. _index = index index_value = index. index_value inputs. append ( index ) elif : self. _index = index index = astensor ( index ) if index. ndim!= 1 : raise ValueError ( f""index should be 1-d, got {index.ndim}-d"" ) index_value = parse_index ( pd. Index ( [ ], dtype = index. dtype ), index, type ( self ). __name__ ) inputs. append ( index ) else : index = pd. Index ( index ) index_value = parse_index ( index, store_data = True ) else : index_value = parse_index ( index, store_data = True ) else : index_value = parse_index ( pd. RangeIndex ( start = 0, stop = input_tensor. shape [ 0 ] ) ) return self. new_series ( inputs, shape = input_tensor. shape, dtype = self. dtype, index_value = index",False,"isinstance(index, (Base, Entity))","isinstance(index, pd.Series)",0.6572250723838806 1045,"def redirect ( self ) : c = self. c if c. config. getBool ( ""eval-redirect"" ) : self. old_stderr = g. stdErrIsRedirected ( ) self. old_stdout = g. stdOutIsRedirected ( ) if : g. redirectStderr ( ) if not self. old_stdout : g. redirectStdout ( )",True,not self.old_stderr,not self.old_stderr,0.6628884077072144 1046,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. filesAdded = [ ] ( _etype451, _size448 ) = iprot. readListBegin ( ) for _i452 in xrange ( _size448 ) : _elem453 = iprot. readString ( ) self. filesAdded. append ( _elem453 ) iprot. readListEnd ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.LIST,fid == 2,0.660045862197876 1047,"def persist ( self, * _ ) : for key, obj in self. _objects. items ( ) : try : state = obj. get_state ( ) if not state : continue md5 = hashlib. md5 ( state ). hexdigest ( ) if : continue self. _persist_provider. store ( key, state ) except Exception as e : system_log. exception ( ""PersistHelper.persist fail"" ) else : self. _last_state [ key ] = md5",False,self._last_state.get(key) == md5,self._persist_provider is None or md5 < TAB > self.get_state(key),0.6504796743392944 1048,"def _joinrealpath ( path, rest, seen ) : if isabs ( rest ) : rest = rest [ 1 : ] path = sep while rest : name, _, rest = rest. partition ( sep ) if not name or name == curdir : continue if name == pardir : if path : path, name = split ( path ) if name == pardir : path = join ( path, pardir, pardir ) else : path = pardir continue newpath = join ( path, name ) if not islink ( newpath ) : path = newpath continue if newpath in seen : path = seen [ newpath ] if path is not None : continue return join ( newpath, rest ), False seen [ newpath ] = None path, ok = _joinrealpath ( path, os. readlink ( newpath ), seen ) if : return join ( path, rest ), False seen [ newpath ] = path return path, True",False,not ok,ok,0.6789413094520569 1049,"def _find_children ( node, test_func ) : result = [ ] if node in visited : return result else : visited. add ( node ) for field, value in base. iter_fields ( node, include_meta = False ) : field_spec = node. _fields [ field ] if isinstance ( value, ( list, set, frozenset ) ) : for n in value : if : continue try : if not field_spec. hidden and test_func ( n, * args, ** kwargs ) : result. append ( n ) if terminate_early : return result except SkipNode : continue if field_spec. child_traverse or force_traversal : _n = _find_children ( n, test_func ) if _n is not None : result. extend ( _n ) if terminate_early : return result elif base. is_ast_node ( value ) : try : if not field_spec. hidden and",False,not base.is_ast_node(n),n is None,0.6484529972076416 1050,"def get_meta_data ( self, doc ) : data = defaultdict ( dict ) properties = self. parser. css_select ( doc, ""meta"" ) for prop in properties : key = prop. attrib. get ( ""property"" ) or prop. attrib. get ( ""name"" ) value = prop. attrib. get ( ""content"" ) or prop. attrib. get ( ""value"" ) if not key or not value : continue key, value = key. strip ( ), value. strip ( ) if value. isdigit ( ) : value = int ( value ) if "":"" not in key : data [ key ] = value continue key = key. split ( "":"" ) key_head = key. pop ( 0 ) ref = data [ key_head ] if isinstance ( ref, str ) : data [ key_head ] = { key_head : ref } ref = data [ key_head ] for idx, part in enumerate ( key ) : if idx == len ( key ) - 1 : ref [ part ] = value break if : ref [ part ] = dict ( ) elif isinstance ( ref. get ( part ), str ) : ref [ part ] = { ""identifier"" : ref [ part ] } ref = ref [ part ] return data",False,not ref.get(part),part is None,0.6537399291992188 1051,"def check_no_overlapping_paths ( paths : Sequence [ str ] ) -> None : """"""Given a list of paths, ensure that all are unique and do not have the same prefix."""""" for path in paths : list_copy_without_path = list ( paths ) list_copy_without_path. remove ( path ) if path in list_copy_without_path : raise ValueError ( ""{} appeared more than once. All paths must be unique."". format ( path ) ) for p in list_copy_without_path : if : raise ValueError ( ""{} and {} have the same prefix. All paths must be unique and cannot overlap."". format ( path, p ) )",False,path in p,p in list_copy_with_path and (not p in list_copy_without_path),0.6779139041900635 1052,"def ensure_slice ( x ) : """"""Try to convert an object into a slice, complain on failure"""""" if not x and x!= 0 : return slice ( None ) elif is_slice ( x ) : return x try : x = int ( x ) if x!= - 1 : s = slice ( x, x + 1 ) else : s = slice ( - 1, None, None ) except ValueError : x = x. strip ( ""[]()"" ) m = SLICE_REG. fullmatch ( x ) if : groups = ( int ( i ) if i else None for i in m. groups ( ) ) s = slice ( * groups ) else : raise ValueError ( ""cannot convert {!r} to slice"". format ( x ) ) except TypeError : try : s = slice ( * ( int ( i ) for i in x ) ) except ( TypeError, ValueError ) : raise ValueError ( ""cannot convert {!r} to slice"". format ( x ) ) return s",True,m,m,0.6932783126831055 1053,"def flickr_download_main ( url, output_dir = ""."", info_only = False, ** kwargs ) : urls = None size = ""o"" title = None if ""stream_id"" in kwargs : size = kwargs [ ""stream_id"" ] if match1 ( url, pattern_url_single_photo ) : url, title = get_single_photo_url ( url ) urls = [ url ] else : urls, title = fetch_photo_url_list ( url, size ) index = 1 for url in urls : mime, ext, size = url_info ( url ) print_info ( site_info, title, mime, size ) if : title = ""{} {}"". format ( title, index ) download_urls ( [ url ], title, ext, size, output_dir, ** kwargs ) index += 1",False,not info_only,info_only,0.6536680459976196 1054,"def generate_breadcrumb ( self ) : if getattr ( self, ""ci"" ) : parent_id = self. ci. id else : return [ ] breadcrumbs = [ ] counter = 0 while parent_id and counter < 100 : try : ci = db. CI. objects. filter ( id = parent_id ). all ( ) [ 0 ] except IndexError : break breadcrumbs. insert ( 0, ci ) try : parent_id = db. CI. objects. filter ( parent__child = parent_id ). all ( ) [ 0 ]. id except IndexError : parent_id = None if : parent_id = None counter += 1 return breadcrumbs",False,parent_id == ci.id,parent_id and counter >= 100,0.6662282943725586 1055,"def test_mode ( self ) -> None : if not has_stat : return dir = self. do_create ( ) try : mode = stat. S_IMODE ( os. stat ( dir ). st_mode ) mode &= 0o777 expected = 0o700 if : user = expected >> 6 expected = user * ( 1 + 8 + 64 ) self. assertEqual ( mode, expected ) finally : os. rmdir ( dir )",False,"sys.platform in ('win32', 'os2emx')",mode & 128,0.6534214019775391 1056,"def compose ( self, keys = None ) : composes = [ ] explored = set ( ) keys = set ( keys or [ ] ) graph = self. _graph for v in graph. topological_iter ( ) : if type ( v. op ) not in CP_OP : continue if v in explored : continue if : continue if v. key in keys : continue selected = [ v ] cur_node = graph. successors ( v ) [ 0 ] while ( graph. count_predecessors ( cur_node ) == 1 and type ( cur_node. op ) in CP_OP and cur_node. key not in keys ) : selected. append ( cur_node ) if graph. count_successors ( cur_node )!= 1 : break else : cur_node = graph. successors ( cur_node ) [ 0 ] if len ( selected ) > 1 : explored. update ( selected ) composes. append ( list ( selected ) ) return self. _compose_graph ( composes )",False,graph.count_successors(v) != 1,v.op == CP_OP,0.6556057333946228 1057,"def optimize ( self, graph : Graph ) -> Tuple [ Graph, bool ] : flag_changed = False matches = search_sub_structure ( graph, [ self. pattern [ 0 ], Variable, self. pattern [ 1 ] ] ) while len ( matches ) > 0 : op1, v1, op2 = matches. pop ( ) if len ( v1. input_to ) > 1 : continue if : flag_changed = True matches = search_sub_structure ( graph, [ self. pattern [ 0 ], Variable, self. pattern [ 1 ] ] ) return graph, flag_changed",False,"self.optimize_pair(graph, op1, op2)",op1.input_to == op2 and op1.input_to == op2,0.6514405012130737 1058,"def close ( self ) : """"""Close the window and uninitialize the resources"""""" if self. winHandle. context is None : return if self. _origGammaRamp is not None : self. gammaRamp = self. _origGammaRamp _hw_handle = None try : _hw_handle = self. win. _hw_handle self. winHandle. close ( ) except Exception : pass try : if : from psychopy. iohub. client import ioHubConnection conn = ioHubConnection. ACTIVE_CONNECTION conn. unregisterWindowHandles ( _hw_handle ) except Exception : pass",False,window.IOHUB_ACTIVE and _hw_handle,"hasattr(self.win, 'ACTIVE_CONNECTION')",0.6594565510749817 1059,"def f ( view, s ) : if mode == modes. INTERNAL_NORMAL : if count == 1 : if : eol = view. line ( s. b ). b return R ( s. b, eol ) return s return s",False,view.line(s.b).size() > 0,view.line is not None and s.b is not None,0.6558170318603516 1060,"def __init__ ( self, weight = None, pos_weight = None, ignore_index = 255, edge_label = False ) : super ( ). __init__ ( ) self. weight = weight self. pos_weight = pos_weight self. ignore_index = ignore_index self. edge_label = edge_label self. EPS = 1e-10 if self. weight is not None : if : if self. weight!= ""dynamic"" : raise ValueError ( ""if type of `weight` is str, it should equal to 'dynamic', but it is {}"". format ( self. weight ) ) elif isinstance ( self. weight, paddle. VarBase ) : raise TypeError ( ""The type of `weight` is wrong, it should be Tensor or str, but it is {}"". format ( type ( self. weight ) ) ) if self. pos_weight is not None : if isinstance ( self. pos_weight, str ) : if self. pos_weight!= ""dynamic"" : raise ValueError ( ""if type of `pos_weight` is str, it should equal to 'dynamic', but it is {}"". format ( self. pos_weight ) """"""Trash expired sessions."""""" now = datetime. datetime. now ( ) for item in self. get ( ) : last_visit = item. last_visit_default ( ) try : session = item. get ( ) if : if session. auth. expiration and not self. force : self. expiration = session. auth. expiration if session. auth. last_visit : last_visit = session. auth. last_visit except : pass age = 0 if last_visit : age = total_seconds ( now - last_visit ) if age > self. expiration or not self. expiration : item. delete ( ) status = ""trashed"" else : status = ""OK"" if self. verbose > 1 : print ( ""key: %s"" % item ) print ( ""expiration: %s seconds"" % self. expiration ) print ( ""last visit: %s"" % last_visit ) print ( ""age: %s seconds"" % age ) print ( ""status: %s"" % status ) print ( """" ) elif self. verbose > 0 : print ( ""%s %s"" % ( item, status ) )",False,session.auth,self.force,0.665627658367157 1062,"def setup ( self, stage ) : if self. queue_length > 0 : queue_folder = os. path. join ( self. logger. log_dir, self. queue_path ) if not os. path. exists ( queue_folder ) : os. makedirs ( queue_folder ) self. queue_path = os. path. join ( queue_folder, ""queue"" + str ( self. trainer. global_rank ) + "".pth"" ) if : self. queue = torch. load ( self. queue_path ) [ ""queue"" ]",False,os.path.isfile(self.queue_path),stage == 'load',0.6441220045089722 1063,"def convert_to_bytes ( self, BitRateString ) : bit_rate_bytes = 0 s = BitRateString. lower ( ). split ( "" "" ) measurement = ""kb"" try : if len ( s ) >= 2 : raw_number_string = s [ 0 ] raw_measurement = s [ 1 ] raw_number = locale. atof ( raw_number_string ) if ""kb"" in raw_measurement : measurement = ""kb"" bit_rate_bytes = raw_number * 1000.0 elif ""mb"" in raw_measurement : measurement = ""mb"" bit_rate_bytes = raw_number * 1000.0 * 1000.0 elif : measurement = ""crf"" if raw_number > 63 : raw_number = 63 if raw_number < 0 : raw_number = 0 bit_rate_bytes = raw_number except : pass return str ( int ( bit_rate_bytes ) )",True,'crf' in raw_measurement,'crf' in raw_measurement,0.6572733521461487 1064,"def edit ( self ) : trace = False and not g. unitTesting if : g. trace ( ""===== (MultiLine:%s)"" % self. __class__. __name__ ) self. editing = True self. how_exited = None self. display ( ) while self. editing : if : g. trace ( ""(MultiLine:%s) LOOP"" % self. __class__. __name__ ) self. get_and_use_key_press ( ) self. update ( clear = None ) self. parent. refresh ( ) if : g. trace ( ""(MultiLine:%s) DONE"" % self. __class__. __name__ )",False,trace,self.how_exited,0.6919523477554321 1065,"def subscribe ( self, channel, timeout = None ) : event = threading. Event ( ) self. _channels [ channel ] [ ""subs"" ]. add ( event ) try : try : cursor = self. _channels [ channel ] [ ""msgs"" ] [ - 1 ] [ 0 ] except IndexError : cursor = None while True : if not cursor : cursor_found = True else : cursor_found = False if not event. wait ( timeout ) : break event. clear ( ) messages = copy. copy ( self. _channels [ channel ] [ ""msgs"" ] ) for message in messages : if : yield message [ 1 ] elif message [ 0 ] == cursor : cursor_found = True if not cursor_found : for message in messages : yield message [ 1 ] try : cursor = messages [ - 1 ] [ 0 ] except IndexError : cursor = None finally : try : self. _channels [ channel ] [ ""subs"" ]. remove ( event ) except KeyError : pass",False,cursor_found,cursor and cursor_found,0.6654545068740845 1066,"def test_didl_object_inheritance ( ) : """"""Test that DIDL object inheritance is as indicated by the didl class"""""" class_dict = data_structures. _DIDL_CLASS_TO_CLASS. copy ( ) class_dict [ ""object"" ] = data_structures. DidlObject for didl_class, soco_class in data_structures. _DIDL_CLASS_TO_CLASS. items ( ) : if : continue if didl_class == ""object"" : continue assert didl_class == soco_class. item_class base_didl_class = ""."". join ( didl_class. split ( ""."" ) [ : - 1 ] ) base_class = data_structures. _DIDL_CLASS_TO_CLASS [ base_didl_class ] assert base_class == soco_class. __bases__ [ 0 ]",False,didl_class == 'object.itemobject.item.sonos-favorite',didl_class == 'class',0.6502702236175537 1067,"def process_lib ( vars_, coreval ) : for d in vars_ : var = d. upper ( ) if : continue value = env [ ""LIBPATH_"" + var ] if value : core = env [ coreval ] accu = [ ] for lib in value : if lib in core : continue accu. append ( lib ) env [ ""LIBPATH_"" + var ] = accu",False,var == 'QTCORE',var not in env,0.6675856113433838 1068,"def generateData ( evaluator, hof1, hof2, symmetric = True ) : assert len ( hof1 ) == len ( hof2 ) gens = len ( hof1 ) res = zeros ( ( gens, gens ) ) for g1, ind1 in enumerate ( hof1 ) : for g2, ind2 in enumerate ( hof2 [ : g1 + 1 ] ) : res [ g1, g2 ] = evaluator ( ind1, ind2 ) if symmetric : res [ g2, g1 ] = res [ g1, g2 ] elif : res [ g1, g2 ] += evaluator ( ind2, ind1 ) else : res [ g2, g1 ] = evaluator ( ind2, ind1 ) return res",False,g1 == g2,ind1 > 0,0.6651709079742432 1069,"def main ( ) : parser = argparse. ArgumentParser ( description = ""Output k-mer abundance distribution."" ) parser. add_argument ( ""hashname"" ) parser. add_argument ( ""seqfile"" ) parser. add_argument ( ""histout"" ) args = parser. parse_args ( ) hashfile = args. hashname seqfile = args. seqfile histout = args. histout outfp = open ( histout, ""w"" ) print ( ""hashtable from"", hashfile ) ht = khmer. load_countgraph ( hashfile ) hist = { } for i in range ( 65536 ) : hist [ i ] = 0 for n, record in enumerate ( screed. open ( seqfile ) ) : if : print ( ""..."", n ) seq = record. sequence. replace ( ""N"", ""A"" ) try : med, _, _ = ht. get_median_count ( seq ) except ValueError : continue hist [ med ] = hist [ med ] + 1 histlist = list ( hist. items ( ) ) histlist. sort ( ) maxk = max ( hist. keys ( ) ) sumk = sum ( hist. values ( ) ) sofar = 0 for n, m in histlist : sofar += m percent = float ( sofar ) / sumk outfp. write ( ""%d %d %d %.3f\n"" % ( n, m, sofar, percent ) ) outfp. close ( )",False,n > 0 and n % 100000 == 0,n % 2 != 0,0.6720070838928223 1070,"def _create_examples ( self, lines, set_type ) : """"""Creates examples for the training and dev sets."""""" examples = [ ] for ( i, line ) in enumerate ( lines ) : if set_type == ""test"" and i == 0 : continue guid = ""%s-%s"" % ( set_type, i ) if : text_a = tokenization. convert_to_unicode ( line [ 1 ] ) label = ""0"" else : text_a = tokenization. convert_to_unicode ( line [ 3 ] ) label = tokenization. convert_to_unicode ( line [ 1 ] ) examples. append ( InputExample ( guid = guid, text_a = text_a, text_b = None, label = label ) ) return examples",False,set_type == 'test',line[0] == 0,0.6525334119796753 1071,"def assertFormFieldValueTransformCorrect ( self, form_field, expected, read_results = None ) : if expected is None : return field_type = expected. type if field_type == ""string"" : self. assertEqual ( form_field. value, expected. value_string ) if field_type == ""number"" : self. assertEqual ( form_field. value, expected. value_number ) if field_type == ""integer"" : self. assertEqual ( form_field. value, expected. value_integer ) if field_type == ""date"" : self. assertEqual ( form_field. value, expected. value_date ) if field_type == ""phoneNumber"" : self. assertEqual ( form_field. value, expected. value_phone_number ) if field_type == ""time"" : self. assertEqual ( form_field. value, expected. value_time ) if field_type == ""array"" : for i in range ( len ( expected. value_array ) ) : self. assertFormFieldValueTransformCorrect ( form_field. value [ i ], expected. value_array [ i ], read_results ) if field_type == ""object"" : self. assertFormFieldsTransformCorrect ( form_field. value, expected. value_object, read_results ) if field_type not in [ ""array"", ""object"" ] and form_field. value_data : self. assertBoundingBoxTransformCorrect ( form_field. value_data. bounding_box, expected. bounding_box ) self. assertEqual ( expected. text, form_field. value_data. text ) if not v : return 0 z = [ ( 1000000000, _ ( ""b"" ) ), ( 1000000, _ ( ""m"" ) ), ( 1000, _ ( ""k"" ) ), ] v = int ( v ) for x, y in z : o, p = divmod ( v, x ) if : if len ( str ( o ) ) > 2 or not p : return ""%d%s"" % ( o, y ) return ""%.1f%s"" % ( v / float ( x ), y ) return v",False,o,o > 0,0.7001277804374695 1073,"def run ( self, params = None ) : databus = conpot_core. get_databus ( ) cmd_ok = """" if params : params_split = params. split ( "" "" ) cmd_ok = ""OK"" kap_port = parse_port ( params_split [ 0 ] ) if : databus. set_value ( ""kap_a_server_port"", kap_port ) if len ( params_split ) > 1 : cha_port = parse_port ( params_split [ 1 ] ) if cha_port!= 0 : databus. set_value ( ""channel_a_port"", cha_port ) if len ( params_split ) > 2 : chb_port = parse_port ( params_split [ 2 ] ) if chb_port!= 0 : databus. set_value ( ""channel_b_port"", chb_port ) return self. CMD_OUTPUT. format ( cmd_ok, databus. get_value ( ""kap_a_server_port"" ), databus. get_value ( ""channel_a_port"" ), databus. get_value ( ""channel_b_port"" ), 50100, )",True,kap_port != 0,kap_port != 0,0.6604142189025879 1074,"def help ( self, request ) : if type ( request ) is type ( """" ) : if request == ""help"" : self. intro ( ) elif request == ""keywords"" : self. listkeywords ( ) elif : self. listtopics ( ) elif request == ""modules"" : self. listmodules ( ) elif request [ : 8 ] == ""modules "" : self. listmodules ( split ( request ) [ 1 ] ) elif request in self. keywords : self. showtopic ( request ) elif request in self. topics : self. showtopic ( request ) elif request : doc ( request, ""Help on %s:"" ) elif isinstance ( request, Helper ) : self ( ) else : doc ( request, ""Help on %s:"" ) self. output. write ( ""\n"" )",True,request == 'topics',request == 'topics',0.6826637387275696 1075,"def _grouped_backends ( cls, options, backend ) : ""Group options by backend and filter out output group appropriately"" if options is None : return [ ( backend or Store. current_backend, options ) ] dfltdict = defaultdict ( dict ) for spec, groups in options. items ( ) : if ""output"" not in groups. keys ( ) or len ( groups [ ""output"" ] ) == 0 : dfltdict [ backend or Store. current_backend ] [ spec. strip ( ) ] = groups elif : dfltdict [ groups [ ""output"" ] [ ""backend"" ] ] [ spec. strip ( ) ] = groups elif [ ""backend"" ] == list ( groups [ ""output"" ]. keys ( ) ) : filtered = { k : v for k, v in groups. items ( ) if k!= ""output"" } dfltdict [ groups [ ""output"" ] [ ""backend"" ] ] [ spec. strip ( ) ] = filtered else : raise Exception ( ""The output options group must have the backend keyword"" ) return [ ( bk, bk_opts ) for ( bk, bk_opts ) in dfltdict. items ( ) ]",False,set(groups['output'].keys()) - set(['backend']),"[backend""] == list(groups[""output""])",0.6504580974578857 1076,"def index ( request, response_format = ""html"" ) : ""All available tickets"" if request. GET : if : query = _get_filter_query ( request. GET ) else : query = Q ( status__hidden = False ) & _get_filter_query ( request. GET ) tickets = Object. filter_by_request ( request, Ticket. objects. filter ( query ) ) else : tickets = Object. filter_by_request ( request, Ticket. objects. filter ( status__hidden = False ) ) filters = FilterForm ( request. user. profile, """", request. GET ) context = _get_default_context ( request ) context. update ( { ""tickets"" : tickets, ""filters"" : filters, } ) return render_to_response ( ""services/index"", context, context_instance = RequestContext ( request ), response_format = response_format, )",False,'status' in request.GET and request.GET['status'],status__hidden,0.6580737829208374 1077,"def fwd_normalize ( fwd : OptionsIterable ) -> Options : """"""Normalize and convert values extracted from forwarded headers."""""" ret : Dict [ str, Union [ int, str ] ] = { } for key, val in fwd : if : try : if key in ( ""by"", ""for"" ) : ret [ key ] = fwd_normalize_address ( val ) elif key in ( ""host"", ""proto"" ) : ret [ key ] = val. lower ( ) elif key == ""port"" : ret [ key ] = int ( val ) elif key == ""path"" : ret [ key ] = unquote ( val ) else : ret [ key ] = val except ValueError : pass return ret",False,val is not None,key == 'address',0.6647884249687195 1078,"def _well_known_rules ( conf ) : yield iptables. Rule ( protocol = ""ip"", src = ""0.0.0.0/0.0.0.0"", dst = ""0.0.0.0/0.0.0.0"", target = ""PAASTA-COMMON"", matches = ( ), target_parameters = ( ), ) for dep in conf. get_dependencies ( ) or ( ) : resource = dep. get ( ""well-known"" ) if resource == ""internet"" : yield iptables. Rule ( protocol = ""ip"", src = ""0.0.0.0/0.0.0.0"", dst = ""0.0.0.0/0.0.0.0"", target = ""PAASTA-INTERNET"", matches = ( ), target_parameters = ( ), ) elif : raise AssertionError ( resource )",False,resource is not None,resource != 'error',0.6598106622695923 1079,"def binaryPrecedence ( token = None, allowIn = None ) : prec = 0 if ( token. type!= Token. Punctuator ) and ( token. type!= Token. Keyword ) : return 0 while 1 : if token. value == ""||"" : prec = 1 break elif token. value == ""&&"" : prec = 2 break elif token. value == ""|"" : prec = 3 break elif : prec = 4 break elif token. value == ""&"" : prec = 5 break elif ( token. value == ""!=="" ) or ( ( token. value == ""==="" ) or ( ( token. value == ""!="" ) or ( token. value == ""=="" ) ) ) : prec = 6 break elif ( token. value == ""instanceof"" ) or ( ( token. value == "">="" ) or ( ( token. value == ""<="" ) or ( ( token. value == "">"" ) or ( token. value == ""<"" ) ) ) ) : prec = 7 break elif token. value == ""in"" : prec = 7 if allowIn else 0 break elif ( token. value == "">>>"" ) or ( ( token. value == "">>"" ) or ( token. value == ""<<"" ) ) : ",False,token.value == '^',token.value == 'in',0.6612650752067566 1080,"def __init__ ( self, family = None, style = None, weight = None, color = None, size = None, ha = None, va = None, rotation = None, linespacing = None, backgroundcolor = None, margin = None, ** kwargs ) : d = { ""visible"" : True } with suppress ( KeyError ) : linespacing = kwargs. pop ( ""lineheight"" ) with suppress ( KeyError ) : color = color or kwargs. pop ( ""colour"" ) with suppress ( KeyError ) : _face = kwargs. pop ( ""face"" ) if _face == ""plain"" : style = ""normal"" elif _face == ""italic"" : style = ""italic"" elif _face == ""bold"" : weight = ""bold"" elif : style = ""italic"" weight = ""bold"" with suppress ( KeyError ) : ha = self. _translate_hjust ( kwargs. pop ( ""hjust"" ) ) with suppress ( KeyError ) : va = self. _translate_vjust ( kwargs. pop ( ""vjust"" ) ) with suppress ( KeyError ) : rotation = kwargs. pop ( ""angle"" ) if margin is not None : margin = Margin ( self, ** margin ) names = ( ""backgroundcolor"", ""color"", ""family"", ""ha"", ""linespacing"", ""rotation",False,_face == 'bold.italic',_face == 'italic',0.6530894041061401 1081,"def wrapper_function ( * args, ** kwargs ) : if fn. __class__ in _simple_constraint_rule_types : value = fn else : value = fn ( * args, ** kwargs ) if value. __class__ in _simple_constraint_rule_types : if value is None : return ConstraintList. End elif : return Constraint. Feasible elif value is False : return Constraint. Infeasible return value",True,value is True,value is True,0.6682366132736206 1082,"def sayText ( text, voice = None, verbose = False ) : if ViewClient. isLinux : if : print ( ""\x1b[{}{}m>> saying: {}\x1b[0m"". format ( 35, """", text ) ) time. sleep ( 2 ) if DEBUG : print ( 'Saying ""%s"" using festival' % text, file = sys. stderr ) pipe = subprocess. Popen ( [ ""/usr/bin/festival"" ] ) pipe. communicate ( '(SayText ""%s"")' % text ) pipe. terminate ( ) time. sleep ( 5 ) elif ViewClient. isDarwin : if : print ( ""\x1b[{}{}m>> saying: {}\x1b[0m"". format ( 35, """", text ) ) time. sleep ( 1 ) if not voice : voice = ""Samantha"" if DEBUG : print ( 'Saying ""%s"" as %s' % ( text, voice ), file = sys. stderr ) subprocess. check_call ( [ ""/usr/bin/say"", ""-v"", voice, text ] ) time. sleep ( 5 ) else : print ( ""sayText: Unsupported OS: {}"". format ( ViewClient. osName ), file = sys. stderr )",True,verbose,verbose,0.690322995185852 1083,"def calc ( self, arg ) : op = arg [ ""op"" ] if op == ""C"" : self. clear ( ) return str ( self. current ) num = decimal. Decimal ( arg [ ""num"" ] ) if self. op : if self. op == ""+"" : self. current += num elif self. op == ""-"" : self. current -= num elif : self. current *= num elif self. op == ""/"" : self. current /= num self. op = op else : self. op = op self. current = num res = str ( self. current ) if op == ""="" : self. clear ( ) return res",False,self.op == '*',self.op == '+',0.664408802986145 1084,"def set_text_from_of ( self, direction ) : """"""Sets the text of the numbers of displayed pages in table."""""" if self. pagination : if : if len ( self. _row_data_parts [ self. _rows_number ] ) < self. _to_value : self. _current_value = self. _current_value + self. rows_num else : self. _current_value = self. _current_value + len ( self. _row_data_parts [ self. _rows_number ] ) self. _to_value = self. _to_value + len ( self. _row_data_parts [ self. _rows_number ] ) if direction == ""back"" : self. _current_value = self. _current_value - len ( self. _row_data_parts [ self. _rows_number ] ) self. _to_value = self. _to_value - len ( self. _row_data_parts [ self. _rows_number ] ) if direction == ""increment"" : self. _current_value = 1 self. _to_value = self. rows_num + self. _current_value - 1 self. pagination. ids. label_rows_per_page. text = ( f""{self._current_value}-{self._to_value} of {len(",False,direction == 'forward',self._rows_number >= 0,0.6607716679573059 1085,"def PyJs_anonymous_3997_ ( min, max, this, arguments, var = var ) : var = Scope ( { u""this"" : this, u""max"" : max, u""arguments"" : arguments, u""min"" : min }, var ) var. registers ( [ u""max"", u""folded"", u""$this"", u""min"" ] ) var. put ( u""$this"", var. get ( u""this"" ) ) while 1 : var. put ( u""folded"", var. get ( u""caseFold"" ) ( var. get ( u""min"" ) ) ) if : var. get ( u""$this"" ). callprop ( u""add"", var. get ( u""folded"" ) ) if not ( var. put ( u""min"", Js ( var. get ( u""min"" ). to_number ( ) ) + Js ( 1 ) ) <= var. get ( u""max"" ) ) : break return var. get ( u""$this"" )",False,var.get(u'folded'),"hasattr(var, 'get')",0.6533318758010864 1086,"def check_value_shape ( self, value, slice_ ) : """"""Checks if value can be set to the slice"""""" if None not in self. shape and self. dtype!= ""O"" : if not all ( [ isinstance ( sh, int ) for sh in slice_ ] ) : expected_value_shape = tuple ( [ len ( range ( * slice_shape. indices ( self. shape [ i ] ) ) ) for i, slice_shape in enumerate ( slice_ ) if not isinstance ( slice_shape, int ) ] ) if isinstance ( value, list ) : value = np. array ( value ) if isinstance ( value, np. ndarray ) : value_shape = [ dim for dim in value. shape if dim!= 1 ] expected_shape = [ dim for dim in expected_value_shape if dim!= 1 ] if value_shape!= expected_shape : raise ValueShapeError ( expected_value_shape, value. shape ) else : value = value. reshape ( expected_value_shape ) else : expected_value_shape = ( 1, ) if isinstance ( value, list ) : value = np. array ( value ) if : raise",False,"isinstance(value, np.ndarray) and value.shape != expected_value_shape",value != expected_value_shape,0.6493576765060425 1087,"def _can_serialize_limited_fsim ( theta : float, phi : float ) : if _near_mod_2pi ( phi, 0 ) or isinstance ( phi, sympy. Symbol ) : if isinstance ( theta, sympy. Symbol ) : return True if _near_mod_2pi ( theta, 0 ) : return True if : return True if _near_mod_2pi ( theta, np. pi / 4 ) : return True if ( ( _near_mod_2pi ( theta, np. pi / 2 ) or isinstance ( theta, sympy. Symbol ) ) and ( _near_mod_2pi ( phi, np. pi / 6 ) ) or isinstance ( phi, sympy. Symbol ) ) : return True if ( ( _near_mod_2pi ( theta, 0 ) or isinstance ( theta, sympy. Symbol ) ) and ( _near_mod_2pi ( phi, np. pi ) ) or isinstance ( phi, sympy. Symbol ) ) : return True return False",False,"_near_mod_2pi(theta, -np.pi / 4)",phi is None,0.6483414173126221 1088,"def tokens_to_spans ( ) -> Iterable [ Tuple [ str, Optional [ Style ] ] ] : """"""Convert tokens to spans."""""" tokens = iter ( line_tokenize ( ) ) line_no = 0 _line_start = line_start - 1 while line_no < _line_start : _token_type, token = next ( tokens ) yield ( token, None ) if : line_no += 1 for token_type, token in tokens : yield ( token, _get_theme_style ( token_type ) ) if : line_no += 1 if line_no >= line_end : break",False,token.endswith('\n'),token_type in THRESHOLD_STANDALONE_CLASSES,0.651739239692688 1089,"def getCustomProperties ( self ) : self. fields = { } self. relations = { } self. columns = [ ] self. meta = self. klass. _meta for name in self. meta. get_all_field_names ( ) : x = self. meta. get_field_by_name ( name ) [ 0 ] if isinstance ( x, files. FileField ) : self. readonly_attrs. update ( [ name ] ) if : continue if isinstance ( x, related. ManyToManyField ) : self. relations [ name ] = x elif not isinstance ( x, related. ForeignKey ) : self. fields [ name ] = x else : self. relations [ name ] = x parent_fields = [ ] for field in self. meta. parents. values ( ) : parent_fields. append ( field. attname ) del self. relations [ field. name ] self. exclude_attrs. update ( parent_fields ) props = self. fields. keys ( ) self. encodable_properties. update ( props ) self. decodable_properties. update ( props ) self. exclude_attrs. update ( [ ""_state"" ] )",False,"isinstance(x, related.RelatedObject)",not x,0.6547265648841858 1090,"def kwargs ( self ) : kwargs = { } kwargs_started = False for param_name, param in self. _signature. parameters. items ( ) : if not kwargs_started : if param. kind in ( _VAR_KEYWORD, _KEYWORD_ONLY ) : kwargs_started = True else : if param_name not in self. arguments : kwargs_started = True continue if not kwargs_started : continue try : arg = self. arguments [ param_name ] except KeyError : pass else : if : kwargs. update ( arg ) else : kwargs [ param_name ] = arg return kwargs",False,param.kind == _VAR_KEYWORD,arg,0.6594990491867065 1091,"def ki_protection_enabled ( frame ) : while frame is not None : if : return frame. f_locals [ LOCALS_KEY_KI_PROTECTION_ENABLED ] if frame. f_code. co_name == ""__del__"" : return True frame = frame. f_back return True",True,LOCALS_KEY_KI_PROTECTION_ENABLED in frame.f_locals,LOCALS_KEY_KI_PROTECTION_ENABLED in frame.f_locals,0.6562126278877258 1092,"def wrapper ( self, * args, ** kwargs ) : initial_switch_count = getattr ( _get_hub ( ), ""switch_count"", None ) self. switch_expected = getattr ( self, ""switch_expected"", True ) if initial_switch_count is not None : fullname = getattr ( self, ""fullname"", None ) if self. switch_expected == ""default"" and fullname : self. switch_expected = get_switch_expected ( fullname ) result = method ( self, * args, ** kwargs ) if initial_switch_count is not None and self. switch_expected is not None : switch_count = _get_hub ( ). switch_count - initial_switch_count if : assert switch_count >= 0 if not switch_count : raise AssertionError ( ""%s did not switch"" % fullname ) elif self. switch_expected is False : if switch_count : raise AssertionError ( ""%s switched but not expected to"" % fullname ) else : raise AssertionError ( ""Invalid value for switch_expected: %r"" % ( self. switch_expected, ) ) return result",False,self.switch_expected is True,fullname,0.6529824733734131 1093,"def ResolveTarget ( build_file, target, toolset ) : [ parsed_build_file, target, parsed_toolset ] = ParseQualifiedTarget ( target ) if parsed_build_file : if : build_file = os. path. normpath ( os. path. join ( os. path. dirname ( build_file ), parsed_build_file ) ) if not os. path. isabs ( build_file ) : build_file = RelativePath ( build_file, ""."" ) else : build_file = parsed_build_file if parsed_toolset : toolset = parsed_toolset return [ build_file, target, toolset ]",True,build_file,build_file,0.6688035726547241 1094,"def unpack_response ( response ) : try : data = response. task. read ( size = 2 ) if : raise GAE_Exception ( 600, ""get protocol head fail"" ) if len ( data )!= 2 : raise GAE_Exception ( 600, ""get protocol head fail, data:%s, len:%d"" % ( data, len ( data ) ) ) ( headers_length, ) = struct. unpack ( ""!h"", data ) data = response. task. read ( size = headers_length ) if : raise GAE_Exception ( 600, ""get protocol head fail, len:%d"" % headers_length ) raw_response_line, headers_data = inflate ( data ). split ( b""\r\n"", 1 ) rl = raw_response_line. split ( ) response. app_status = int ( rl [ 1 ] ) if len ( rl ) >= 3 : response. app_reason = rl [ 2 ]. strip ( ) headers_block, app_msg = headers_data. split ( b""\r\n\r\n"" ) headers_pairs = headers_block. split ( b""\r\n"" ) response. headers = { } for pair in headers_pairs : if not pair : break k, v = pair. split ( b"": "", 1 ) response. headers [ k ] = v response. app_msg = app_msg return response except Exception as e : self, retry, machines_cache, request_executor, method, path, fields = None, ** kwargs ) : if fields is not None : kwargs [ ""fields"" ] = fields some_request_failed = False for i, base_uri in enumerate ( machines_cache ) : if i > 0 : logger. info ( ""Retrying on %s"", base_uri ) try : response = request_executor ( method, base_uri + path, ** kwargs ) response. data. decode ( ""utf-8"" ) if : self. set_base_uri ( base_uri ) self. _refresh_machines_cache ( ) return response except ( HTTPError, HTTPException, socket. error, socket. timeout ) as e : self. http. clear ( ) if not retry and i + 1 < len ( machines_cache ) : self. set_base_uri ( machines_cache [ i + 1 ] ) if ( isinstance ( fields, dict ) and fields. get ( ""wait"" ) == ""true"" and isinstance ( e, ( ReadTimeoutError, ProtocolError ) ) ) : logger. debug ( ""Watch timed out."" ) raise etcd. EtcdWatchTimedOut ( ""Watch timed out: {0}"". format ( e ), cause = e ) ",True,some_request_failed,some_request_failed,0.6598477959632874 1096,"def check ( self, result_info ) : if ""Scored"" not in result_info [ ""status"" ] : raise TestFailure ( ""Expected a successful evaluation, got: %s"" % result_info [ ""status"" ] ) if not result_info [ ""evaluations"" ] : raise TestFailure ( ""No evaluations found."" ) for evaluation in result_info [ ""evaluations"" ] : score = float ( evaluation [ ""outcome"" ] ) text = evaluation [ ""text"" ] if score!= 0.0 : raise TestFailure ( ""Should have %s. Scored %g."" % ( self. short_adjective, score ) ) if : raise TestFailure ( ""Should have %s, got %s"" % ( self. short_adjective, text ) )",False,self.failure_string not in text,text != 0.0,0.6532106399536133 1097,"def _try_passwordless_openssh ( server, keyfile ) : """"""Try passwordless login with shell ssh command."""""" if pexpect is None : raise ImportError ( ""pexpect unavailable, use paramiko"" ) cmd = ""ssh -f "" + server if keyfile : cmd += "" -i "" + keyfile cmd += "" exit"" env = os. environ. copy ( ) env. pop ( ""SSH_ASKPASS"", None ) ssh_newkey = ""Are you sure you want to continue connecting"" p = pexpect. spawn ( cmd, env = env ) while True : try : i = p. expect ( [ ssh_newkey, _password_pat ], timeout = 0.1 ) if : raise SSHException ( ""The authenticity of the host can't be established."" ) except pexpect. TIMEOUT : continue except pexpect. EOF : return True else : return False",False,i == 0,i is None,0.676398515701294 1098,"def process_batch_data ( input_data, settings, mode, color_jitter, rotate ) : batch_data = [ ] for sample in input_data : if os. path. isfile ( sample [ 0 ] ) : tmp_data = process_image ( sample, settings, mode, color_jitter, rotate ) if : continue batch_data. append ( tmp_data ) else : logger. info ( ""File not exist : {0}"". format ( sample [ 0 ] ) ) return batch_data",False,tmp_data is None,len(tmp_data) == 0,0.6562032699584961 1099,"def _GetMSBuildConfigurationDetails ( spec, build_file ) : properties = { } for name, settings in spec [ ""configurations"" ]. iteritems ( ) : msbuild_attributes = _GetMSBuildAttributes ( spec, settings, build_file ) condition = _GetConfigurationCondition ( name, settings ) character_set = msbuild_attributes. get ( ""CharacterSet"" ) _AddConditionalProperty ( properties, condition, ""ConfigurationType"", msbuild_attributes [ ""ConfigurationType"" ], ) if : _AddConditionalProperty ( properties, condition, ""CharacterSet"", character_set ) return _GetMSBuildPropertyGroup ( spec, ""Configuration"", properties )",False,character_set,"not property_exists(spec, property_names)",0.663638174533844 1100,"def _get_attr ( sdk_path, mod_attr_path, checked = True ) : try : attr_mod, attr_path = ( mod_attr_path. split ( ""#"" ) if ""#"" in mod_attr_path else ( mod_attr_path, """" ) ) full_mod_path = ""{}.{}"". format ( sdk_path, attr_mod ) if attr_mod else sdk_path op = import_module ( full_mod_path ) if : for part in attr_path. split ( ""."" ) : op = getattr ( op, part ) return op except ( ImportError, AttributeError ) as ex : if checked : return None raise ex",True,attr_path,attr_path,0.6652535200119019 1101,"def _process_sample_weight ( self, interactions, sample_weight ) : if sample_weight is not None : if : raise NotImplementedError ( ""k-OS loss with sample weights "" ""not implemented."" ) if not isinstance ( sample_weight, sp. coo_matrix ) : raise ValueError ( ""Sample_weight must be a COO matrix."" ) if sample_weight. shape!= interactions. shape : raise ValueError ( ""Sample weight and interactions "" ""matrices must be the same shape"" ) if not ( np. array_equal ( interactions. row, sample_weight. row ) and np. array_equal ( interactions. col, sample_weight. col ) ) : raise ValueError ( ""Sample weight and interaction matrix "" ""entries must be in the same order"" ) if sample_weight. data. dtype!= CYTHON_DTYPE : sample_weight_data = sample_weight. data. astype ( CYTHON_DTYPE ) else : sample_weight_data = sample_weight. data else : if np. array_equiv ( interactions. data, 1.0 ) : sample_weight_data = interactions. data else : sample_weight_data = np. ones_like ( interactions. data,",False,self.loss == 'warp-kos',self.k_OS_Loss,0.6573790311813354 1102,"def correct_awareness ( value ) : if isinstance ( value, datetime ) : if settings. USE_TZ : return make_aware ( value ) elif : default_tz = timezone. get_default_timezone ( ) return timezone. make_naive ( value, default_tz ) return value",False,timezone.is_aware(value),"isinstance(value, timezone.timezone)",0.6515957117080688 1103,"def _init_weights ( self, module ) : if isinstance ( module, nn. Linear ) : module. weight. data. normal_ ( mean = 0.0, std = self. config. init_std ) if module. bias is not None : module. bias. data. zero_ ( ) elif isinstance ( module, nn. Embedding ) : module. weight. data. normal_ ( mean = 0.0, std = self. config. init_std ) if : module. weight. data [ module. padding_idx ]. zero_ ( )",True,module.padding_idx is not None,module.padding_idx is not None,0.6553285121917725 1104,"def _create_examples ( self, lines, set_type ) : """"""Creates examples for the training/dev/test sets."""""" examples = [ ] for i, line in enumerate ( lines ) : if : continue guid = ""%s-%s"" % ( set_type, i ) if set_type == ""test"" : text_a = self. process_text_fn ( line [ 1 ] ) label = ""0"" else : text_a = self. process_text_fn ( line [ 3 ] ) label = self. process_text_fn ( line [ 1 ] ) examples. append ( InputExample ( guid = guid, text_a = text_a, text_b = None, label = label ) ) return examples",False,set_type == 'test' and i == 0,i == 0,0.6546224355697632 1105,"def there_are_num_which_tasks ( context, num, which, state, exact ) : context. max_tasks = num app_id = which_id ( context, which ) for _ in range ( 180 ) : app = context. current_client. get_app ( app_id, embed_tasks = True ) happy_tasks = get_happy_tasks ( app, context. service, ""fake_nerve_ns"", context. system_paasta_config ) happy_count = len ( happy_tasks ) if state == ""healthy"" : if exact : if happy_count == context. max_tasks : return else : if : return elif state == ""unhealthy"" : if exact : if len ( app. tasks ) - happy_count == context. max_tasks : return else : if len ( app. tasks ) - happy_count >= context. max_tasks : return time. sleep ( 0.5 ) raise Exception ( ""timed out waiting for %d %s tasks on %s; there are %d"" % ( context. max_tasks, state, app_id, len ( app. tasks ) ) )",False,happy_count >= context.max_tasks,len(app.tasks) == context.max_tasks,0.653680682182312 1106,"def _dump_arg_defaults ( kwargs ) : """"""Inject default arguments for dump functions."""""" if current_app : kwargs. setdefault ( ""cls"", current_app. json_encoder ) if : kwargs. setdefault ( ""ensure_ascii"", False ) kwargs. setdefault ( ""sort_keys"", current_app. config [ ""JSON_SORT_KEYS"" ] ) else : kwargs. setdefault ( ""sort_keys"", True ) kwargs. setdefault ( ""cls"", JSONEncoder )",False,not current_app.config['JSON_AS_ASCII'],'JSON_SORT_KEYS' in current_app.config,0.6479122638702393 1107,"def clear_except ( self, retained_segments ) : sn = set ( s. name for s in retained_segments ) try : for n in os. listdir ( self. running ) : if n not in sn and re. match ( storage. SEGMENT_REGEXP, n ) : try : shutil. rmtree ( path. join ( self. running, n ) ) except EnvironmentError as e : if : raise except EnvironmentError as e : if : raise try : for n in os. listdir ( self. prefetched_dir ) : if n not in sn and re. match ( storage. SEGMENT_REGEXP, n ) : try : os. remove ( path. join ( self. prefetched_dir, n ) ) except EnvironmentError as e : if : raise except EnvironmentError as e : if : raise",False,e.errno != errno.ENOENT,e.exit_code,0.6543263792991638 1108,"def _check_extra_fetches ( self, extra_fetches ) : fetch_values = None if extra_fetches is not None : fetch_values = list ( extra_fetches. values ( ) ) if fetch_values is not None : if self. _samples in fetch_values : raise ValueError ( ""`samples` must not be included in `extra_fetches`. "" ""It is added automatically."" ) if self. _sequence_length in fetch_values : raise ValueError ( ""`sequence_length` must not be included in `extra_fetches`."" "" It is added automatically."" ) if : raise ValueError ( ""Key'samples' is preserved and must not be used "" ""in `extra_fetches`."" ) if ""sequence_length"" in extra_fetches : raise ValueError ( ""Key'sequence_length' is preserved and must not be used "" ""in `extra_fetches`."" )",True,'samples' in extra_fetches,'samples' in extra_fetches,0.6581804156303406 1109,"def generate_and_check_random ( ) : random_size = 256 while True : random = os. urandom ( random_size ) a = int. from_bytes ( random, ""big"" ) A = pow ( g, a, p ) if is_good_mod_exp_first ( A, p ) : a_for_hash = big_num_for_hash ( A ) u = int. from_bytes ( sha256 ( a_for_hash, b_for_hash ), ""big"" ) if : return ( a, a_for_hash, u )",False,u > 0,"is_good_mod_exp_first(u, p)",0.6681889295578003 1110,"def get_children ( node ) : result = [ ] if node. _fields is not None : for name in node. _fields : if : continue child = getattr ( node, name ) result. append ( child ) return result",False,"name in ['lineno', 'col_offset']","hasattr(node, name) is False",0.6571792364120483 1111,"def from_unparsed_string ( self, chunk ) : """"""Parse an unknown string into body and prefix."""""" chunk = chunk. strip ( ) if not Flag. indicates_flag ( chunk ) : return Flag. Builder ( ) for prefix in Flag. SEPARABLE_PREFIXES : if chunk. startswith ( prefix ) : self. __prefix = prefix rest = chunk [ len ( prefix ) : ] if : self. __separator = rest [ 0 ] rest = rest [ 1 : ] self. __body = rest. strip ( ) return self if not self. __body : self. __body = chunk return self",False,rest and rest[0] in Flag.POSSIBLE_SEPARATORS,rest,0.6559641361236572 1112,"def _format_input_map_as_tensors ( self, input_map ) : """"""Returns a map from string to `tf.Tensor` or `CompositeTensor`."""""" result = { } for key, value in input_map. items ( ) : if : result [ key ] = value else : result [ key ] = tf. convert_to_tensor ( value ) return result",False,"isinstance(value, (tf.Tensor, composite_tensor.CompositeTensor))","isinstance(value, (tf.Tensor, tf.CompositeTensor))",0.6526621580123901 1113,"def _create_win ( self ) : try : key = _winreg. OpenKey ( _winreg. HKEY_LOCAL_MACHINE, r""Software\Microsoft\Windows NT\CurrentVersion\Fonts"", ) except EnvironmentError : try : key = _winreg. OpenKey ( _winreg. HKEY_LOCAL_MACHINE, r""Software\Microsoft\Windows\CurrentVersion\Fonts"", ) except EnvironmentError : raise FontNotFound ( ""Can't open Windows font registry key"" ) try : path = self. _lookup_win ( key, self. font_name, STYLES [ ""NORMAL"" ], True ) self. fonts [ ""NORMAL"" ] = ImageFont. truetype ( path, self. font_size ) for style in ( ""ITALIC"", ""BOLD"", ""BOLDITALIC"" ) : path = self. _lookup_win ( key, self. font_name, STYLES [ style ] ) if path : self. fonts [ style ] = ImageFont. truetype ( path, self. font_size ) else : if : self. fonts [ style ] = self. fonts [ ""BOLD"" ] else : self. fonts [ style ] = self. fonts [ ""NORMAL"" ] finally : _winreg. CloseKey ( key )",False,style == 'BOLDITALIC',self.has_font,0.6627036333084106 1114,"def data ( self, index, role ) : row_offset = self. get_row_offset ( ) if not index. isValid ( ) : return None elif role!= QtCore. Qt. DisplayRole : return None if index. column ( ) == 0 : return ""{:,}"". format ( index. row ( ) + self. row_count_start + row_offset ) else : row = row_offset + index. row ( ) column_name = self. get_column_names ( ) [ index. column ( ) - 1 ] try : value = self. dataset. evaluate ( column_name, row, row + 1 ) except Exception as e : logger. exception ( ""Error evaluating: %s %s"", column_name, row ) return ""Error: %r"" % e try : value = value [ 0 ] if : return str ( value ) else : return ""%s %s"" % ( value. dtype. name, value. shape ) except : pass return str ( value )",False,len(value.shape) == 0,role == QtCore.Qt.DisplayRole,0.655818521976471 1115,"def run ( self ) : self. alive = True if _log. isEnabledFor ( _DEBUG ) : _log. debug ( ""started"" ) while self. alive : task = self. queue. get ( ) if : function, args, kwargs = task assert function try : function ( * args, ** kwargs ) except : _log. exception ( ""calling %s"", function ) if _log. isEnabledFor ( _DEBUG ) : _log. debug ( ""stopped"" )",True,task,task,0.7189342975616455 1116,"def record_error ( e ) : if isinstance ( e, failure. Failure ) : e = e. value with self. lock : if self. _already_closed ( factory. i ) : extra_logger. info ( ""[%s] Ignoring error for already closed connection: %s"", label, e ) elif : extra_logger. info ( ""[%s] Received error for connection which has not been fully initialized: %s"", label, e, ) self. errors [ factory. i ] = e else : extra_logger. info ( ""[%s] Recording fatal error for connection: %s"", label, e ) self. errors [ factory. i ] = e",False,factory.i not in self.clients,self._has_error(factory.i),0.6604547500610352 1117,"def _as_key_indices ( keys, key_names ) : if keys is None : return keys key_indices = [ ] for key in keys : if isinstance ( key, numbers. Integral ) : key_index = key if : key_index += len ( key_names ) if key_index < 0 or len ( key_names ) <= key_index : raise IndexError ( ""index {} is out of bounds for keys with size {}"". format ( key, len ( key_names ) ) ) else : try : key_index = key_names. index ( key ) except ValueError : raise KeyError ( ""{} does not exists"". format ( key ) ) key_indices. append ( key_index ) return tuple ( key_indices )",True,key_index < 0,key_index < 0,0.6618554592132568 1118,"def merge_weekdays ( base_wd, icu_wd ) : result = [ ] for left, right in zip ( base_wd, icu_wd ) : if : result. append ( left ) continue left = set ( left. split ( ""|"" ) ) right = set ( right. split ( ""|"" ) ) result. append ( ""|"". join ( left | right ) ) return result",False,left == right,not right,0.6880174875259399 1119,"def _clean_regions ( items, region ) : """"""Intersect region with target file if it exists"""""" variant_regions = bedutils. population_variant_regions ( items, merged = True ) with utils. tmpfile ( ) as tx_out_file : target = subset_variant_regions ( variant_regions, region, tx_out_file, items ) if : if isinstance ( target, six. string_types ) and os. path. isfile ( target ) : target = _load_regions ( target ) else : target = [ target ] return target",True,target,target,0.6975597739219666 1120,"def data_dir ( self ) -> Path : try : from appdirs import user_data_dir except ImportError : path = Path. home ( ) / "".local"" / ""share"" if : return path / ""dephell"" path = Path. home ( ) / ""Library"" / ""Application Support"" if : return path / ""dephell"" self. pip_main ( [ ""install"", ""appdirs"" ] ) from appdirs import user_data_dir return Path ( user_data_dir ( ""dephell"" ) )",False,path.exists(),self.in_app_support(),0.6595215797424316 1121,"def _get_booster_best_score ( self, booster : ""lgb.Booster"" ) -> float : metric = self. _get_metric_for_objective ( ) valid_sets : Optional [ VALID_SET_TYPE ] = self. lgbm_kwargs. get ( ""valid_sets"" ) if self. lgbm_kwargs. get ( ""valid_names"" ) is not None : if type ( self. lgbm_kwargs [ ""valid_names"" ] ) is str : valid_name = self. lgbm_kwargs [ ""valid_names"" ] elif : valid_name = self. lgbm_kwargs [ ""valid_names"" ] [ - 1 ] else : raise NotImplementedError elif type ( valid_sets ) is lgb. Dataset : valid_name = ""valid_0"" elif isinstance ( valid_sets, ( list, tuple ) ) and len ( valid_sets ) > 0 : valid_set_idx = len ( valid_sets ) - 1 valid_name = ""valid_{}"". format ( valid_set_idx ) else : raise NotImplementedError val_score = booster. best_score [ valid_name ] [ metric ] return val_score",False,"type(self.lgbm_kwargs['valid_names']) in [list, tuple]",type(self.lgbm_kwargs['valid_names']) is str,0.6504154801368713 1122,"def get_changed_module ( self ) : source = self. resource. read ( ) change_collector = codeanalyze. ChangeCollector ( source ) if self. replacement is not None : change_collector. add_change ( self. skip_start, self. skip_end, self. replacement ) for occurrence in self. occurrence_finder. find_occurrences ( self. resource ) : start, end = occurrence. get_primary_range ( ) if : self. handle. occurred_inside_skip ( change_collector, occurrence ) else : self. handle. occurred_outside_skip ( change_collector, occurrence ) result = change_collector. get_changed ( ) if result is not None and result!= source : return result",False,self.skip_start <= start < self.skip_end,start == end,0.6526960134506226 1123,"def check_send_webhook_message ( request : HttpRequest, user_profile : UserProfile, topic : str, body : str, stream : Optional [ str ] = REQ ( default = None ), user_specified_topic : Optional [ str ] = REQ ( ""topic"", default = None ), unquote_url_parameters : bool = False, ) -> None : if stream is None : assert user_profile. bot_owner is not None check_send_private_message ( user_profile, request. client, user_profile. bot_owner, body ) else : if : stream = unquote ( stream ) if user_specified_topic is not None : topic = user_specified_topic if : topic = unquote ( topic ) try : check_send_stream_message ( user_profile, request. client, stream, topic, body ) except StreamDoesNotExistError : pass",False,unquote_url_parameters,topic is not None,0.6574562788009644 1124,"def log_metrics ( self, metrics : Dict [ str, float ], step : Optional [ int ] = None ) -> None : assert rank_zero_only. rank == 0, ""experiment tried to log from global_rank!= 0"" metrics = self. _add_prefix ( metrics ) timestamp_ms = int ( time ( ) * 1000 ) for k, v in metrics. items ( ) : if : log. warning ( f""Discarding metric with string value {k}={v}."" ) continue new_k = re. sub ( ""[^a-zA-Z0-9_/. -]+"", """", k ) if k!= new_k : rank_zero_warn ( ""MLFlow only allows '_', '/', '.' and'' special characters in metric name."" f"" Replacing {k} with {new_k}."", RuntimeWarning, ) k = new_k self. experiment. log_metric ( self. run_id, k, v, timestamp_ms, step )",False,"isinstance(v, str)",k == '',0.6483108401298523 1125,"def call_init ( self, node, instance ) : for b in instance. bindings : if : continue self. _initialized_instances. add ( b. data ) node = self. _call_init_on_binding ( node, b ) return node",False,b.data in self._initialized_instances,not b.data,0.6558672189712524 1126,"def removeUser ( self, username ) : hideFromOSD = not constants. SHOW_DIFFERENT_ROOM_OSD if username in self. _users : user = self. _users [ username ] if : if self. isRoomSame ( user. room ) : hideFromOSD = not constants. SHOW_SAME_ROOM_OSD if username in self. _users : self. _users. pop ( username ) message = getMessage ( ""left-notification"" ). format ( username ) self. ui. showMessage ( message, hideFromOSD ) self. _client. lastLeftTime = time. time ( ) self. _client. lastLeftUser = username self. userListChange ( )",False,user.room,user.isOpen(),0.6622039079666138 1127,"def content ( self ) : """"""Content of the response, in bytes."""""" if self. _content is False : if : raise RuntimeError ( ""The content for this response was already consumed"" ) if self. status_code == 0 or self. raw is None : self. _content = None else : self. _content = ( bytes ( ). join ( self. iter_content ( CONTENT_CHUNK_SIZE ) ) or bytes ( ) ) self. _content_consumed = True return self. _content",True,self._content_consumed,self._content_consumed,0.6681613922119141 1128,"def has_google_credentials ( ) : global _HAS_GOOGLE_CREDENTIALS if _HAS_GOOGLE_CREDENTIALS is None : provider = Provider ( ""google"" ) if : _HAS_GOOGLE_CREDENTIALS = False else : _HAS_GOOGLE_CREDENTIALS = True return _HAS_GOOGLE_CREDENTIALS",False,provider.get_access_key() is None or provider.get_secret_key() is None,provider is None,0.6513177156448364 1129,"def get_order ( self, aBuf ) : if not aBuf : return - 1, 1 first_char = wrap_ord ( aBuf [ 0 ] ) if ( 0x81 <= first_char <= 0x9F ) or ( 0xE0 <= first_char <= 0xFC ) : charLen = 2 else : charLen = 1 if len ( aBuf ) > 1 : second_char = wrap_ord ( aBuf [ 1 ] ) if : return second_char - 0x9F, charLen return - 1, charLen",False,first_char == 202 and 159 <= second_char <= 241,129 <= second_char <= 159,0.6537764072418213 1130,"def _serialize ( self, value, attr, obj, ** kwargs ) : if self. allow_none and value is None : return None for type_, schema_ in self. desc. items ( ) : if _issubclass_safe ( type ( value ), type_ ) : if is_dataclass ( value ) : res = schema_. _serialize ( value, attr, obj, ** kwargs ) res [ ""__type"" ] = str ( type_. __name__ ) return res break elif : return schema_. _serialize ( value, attr, obj, ** kwargs ) else : warnings. warn ( f'The type ""{type(value).__name__}"" (value: ""{value}"")' f""is not in the list of possible types of typing.Union "" f""(dataclass: {self.cls.__name__}, field: {self.field.name}). "" f""Value cannot be serialized properly."" ) return super ( ). _serialize ( value, attr, obj, ** kwargs )",False,"isinstance(value, _get_type_origin(type_))","isinstance(value, list)",0.6508442163467407 1131,"def decode ( self, value, force = False ) : ""Return a unicode string from the bytes-like representation"" if self. decode_responses or force : if isinstance ( value, memoryview ) : value = value. tobytes ( ) if : value = value. decode ( self. encoding, self. encoding_errors ) return value",False,"isinstance(value, bytes)",self.encoding and force,0.6479377746582031 1132,"def repack_pyz ( pyz, obfpath, cipher = None, clean = False ) : code_dict = { } obflist = [ ] n = len ( obfpath ) + 1 for dirpath, dirnames, filenames in os. walk ( obfpath ) : for pyfile in [ x for x in filenames if x. endswith ( "".py"" ) ] : pyfile = os. path. join ( dirpath, pyfile ) logger. info ( ""Compile %s"", pyfile ) name = pyfile [ n : ]. replace ( ""\\"", ""."" ). replace ( ""/"", ""."" ) [ : - 3 ] if : name = name [ : - len ( ""__init__.py"" ) ]. strip ( ""."" ) with open ( pyfile, ""r"" ) as f : source = f. read ( ) logger. debug ( ""Got obfuscated item: %s"", name ) code_dict [ name ] = compile ( source, ""<%s>"" % name, ""exec"" ) obflist. append ( name ) logger. info ( ""Got %d obfuscated items"", len ( obflist ) ) logger. info ( 'Patching PYZ file ""%s""', pyz ) arch = ZlibArchive ( pyz ) logic_toc = [ ] for name in arch. toc : logger. debug ( ""Extract %s"", name ) typ, obj = arch. extract ( name ) if name in obflist : logger. info ( 'Replace item ""%s"" with obfsucated one', name ) obflist. remove ( name ) ",False,name.endswith('__init__.py'),name.startswith(__init__),0.6528229713439941 1133,"def __new__ ( mcl, classname, bases, dictionary ) : slots = list ( dictionary. get ( ""__slots__"", [ ] ) ) for getter_name in [ key for key in dictionary if key. startswith ( ""get_"" ) ] : name = getter_name slots. append ( ""__"" + name ) getter = dictionary. pop ( getter_name ) setter = dictionary. get ( setter_name, None ) if : del dictionary [ setter_name ] dictionary [ name ] = property ( getter. setter ) dictionary [ ""__slots__"" ] = tuple ( slots ) return super ( ). __new__ ( mcl, classname, bases, dictionary )",False,"setter is not None and isinstance(setter, collections.Callable)",setter is not None,0.6497122049331665 1134,"def transform ( a, cmds ) : buf = a. split ( ""\n"" ) for cmd in cmds : ctype, line, col, char = cmd if ctype == ""D"" : if : buf [ line ] = buf [ line ] [ : col ] + buf [ line ] [ col + len ( char ) : ] else : buf [ line ] = buf [ line ] + buf [ line + 1 ] del buf [ line + 1 ] elif ctype == ""I"" : buf [ line ] = buf [ line ] [ : col ] + char + buf [ line ] [ col : ] buf = ""\n"". join ( buf ). split ( ""\n"" ) return ""\n"". join ( buf )",False,char != '\n',char,0.667432427406311 1135,"def value ( self ) : quote = False if self. defects : quote = True else : for x in self : if : quote = True if quote : pre = post = """" if self [ 0 ]. token_type == ""cfws"" or self [ 0 ] [ 0 ]. token_type == ""cfws"" : pre = "" "" if self [ - 1 ]. token_type == ""cfws"" or self [ - 1 ] [ - 1 ]. token_type == ""cfws"" : post = "" "" return pre + quote_string ( self. display_name ) + post else : return super ( DisplayName, self ). value",False,x.token_type == 'quoted-string',x.defect_code == 0,0.6491146087646484 1136,"def dnp3 ( data, control_code = b""\x44"", src = b""\x00\x00"", dst = b""\x00\x00"" ) : num_packets = int ( math. ceil ( float ( len ( data ) ) / 250.0 ) ) packets = [ ] for i in xrange ( num_packets ) : packet_slice = data [ i * 250 : ( i + 1 ) * 250 ] p = b""\x05\x64"" p += six. int2byte ( len ( packet_slice ) ) p += control_code p += dst p += src chksum = struct. pack ( "" p += chksum num_chunks = int ( math. ceil ( float ( len ( packet_slice ) / 16.0 ) ) ) frag_number = i if i == 0 : frag_number |= 0x40 if : frag_number |= 0x80 p += six. int2byte ( frag_number ) for x in xrange ( num_chunks ) : chunk = packet_slice [ i * 16 : ( i + 1 ) * 16 ] chksum = struct. pack ( "" p += chksum + chunk packets. append ( p ) return packets",False,i == num_packets - 1,i == 1,0.662838339805603 1137,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. LIST : self. values = [ ] ( _etype100, _size97 ) = iprot. readListBegin ( ) for _i101 in range ( _size97 ) : _elem102 = iprot. readBinary ( ) self. values. append ( _elem102 ) iprot. readListEnd ( ) else : iprot. skip ( ftype ) elif fid == 2 : if : self. nulls = iprot. readBinary ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRING,nulls is not None,0.6609013080596924 1138,"def _get_diff ( self ) : """"""Get a diff between running config and a proposed file."""""" diff = [ ] self. _create_sot_file ( ) diff_out = self. _send_command ( ""show diff rollback-patch file {} file {}"". format ( ""sot_file"", self. candidate_cfg ), raw_text = True, ) try : diff_out = ( diff_out. split ( ""Generating Rollback Patch"" ) [ 1 ] . replace ( ""Rollback Patch is Empty"", """" ) . strip ( ) ) for line in diff_out. splitlines ( ) : if : if line [ 0 ]. strip ( )!= ""!"" and line [ 0 ]. strip ( )!= ""."" : diff. append ( line. rstrip ( "" "" ) ) except ( AttributeError, KeyError ) : raise ReplaceConfigException ( ""Could not calculate diff. It's possible the given file doesn't exist."" ) return ""\n"". join ( diff )",True,line,line,0.6805548071861267 1139,"def _get_turns ( data, with_indices = False ) : utterances, responses, dialog_indices = [ ], [ ], [ ] for dialog, scenario in data : for i, turn in enumerate ( dialog ) : replica = turn [ ""data"" ] if i == 0 : replica [ ""episode_done"" ] = True if : replica [ ""task"" ] = scenario [ ""task"" ] replica [ ""dialog_id"" ] = scenario [ ""uuid"" ] replica [ ""kb_columns"" ] = scenario [ ""kb"" ] [ ""column_names"" ] replica [ ""kb_items"" ] = scenario [ ""kb"" ] [ ""items"" ] utterances. append ( replica ) else : responses. append ( replica ) if len ( responses )!= len ( utterances ) : utterances [ - 1 ] [ ""end_dialogue"" ] = False responses. append ( { ""utterance"" : """", ""end_dialogue"" : True } ) last_utter = responses [ - 1 ] [ ""utterance"" ] if last_utter and not last_utter [ - 1 ]. isspace ( ) : last_utter += "" "" responses [ - 1 ] [ ""utterance"" ] = last_utter + ""END_OF_DIALOGUE"" dialog_indices. append ( { ""start"" : len ( utterances ), <",False,turn['turn'] == 'driver',with_indices,0.654859185218811 1140,"def retry ( cls, func, retry = 0, retryTime = 30 ) : if retry == 0 : retry = 9999 if retryTime == 0 : retryTime = 9999 startTime = time. time ( ) retryCount = 0 sleepSeconds = 2 while retryCount >= retry : result = func ( ) if result : return True if : return False eventlet. sleep ( sleepSeconds ) sleepSeconds = sleepSeconds + 2 retryCount = retryCount + 1 return False",False,time.time() - startTime <= retryTime,eventlet.isOpen(),0.6601369976997375 1141,"def LoadBuildFileIncludesIntoDict ( subdict, subdict_path, data, aux_data, variables, includes, check ) : includes_list = [ ] if includes!= None : includes_list. extend ( includes ) if ""includes"" in subdict : for include in subdict [ ""includes"" ] : relative_include = os. path. normpath ( os. path. join ( os. path. dirname ( subdict_path ), include ) ) includes_list. append ( relative_include ) del subdict [ ""includes"" ] for include in includes_list : if : aux_data [ subdict_path ] [ ""included"" ] = [ ] aux_data [ subdict_path ] [ ""included"" ]. append ( include ) gyp. DebugOutput ( gyp. DEBUG_INCLUDES, ""Loading Included File: '%s'"", include ) MergeDicts ( subdict, LoadOneBuildFile ( include, data, aux_data, variables, None, False, check ), subdict_path, include, ) for k, v in subdict. iteritems ( ) : if v. __class__ == dict : LoadBuildFileIncludesIntoDict ( v, subdict_path, data, aux_data, variables, None, check ) elif v. __class__ ==",False,not 'included' in aux_data[subdict_path],subdict_path in subdict,0.6511030197143555 1142,"def build ( self, predictions, targets, inputs = None ) : """"""Prints the number of each kind of prediction"""""" self. built = True pshape = predictions. get_shape ( ) self. inner_metric. build ( predictions, targets, inputs ) with tf. name_scope ( self. name ) : if : self. name = self. name or ""binary_prediction_counts"" y, idx, count = tf. unique_with_counts ( tf. argmax ( predictions ) ) self. tensor = tf. Print ( self. inner_metric, [ y, count ], name = self. inner_metric. name ) else : self. name = self. name or ""categorical_prediction_counts"" y, idx, count = tf. unique_with_counts ( tf. argmax ( predictions, dimension = 1 ) ) self. tensor = tf. Print ( self. inner_metric. tensor, [ y, count ], name = self. inner_metric. name )",False,len(pshape) == 1 or (len(pshape) == 2 and int(pshape[1]) == 1),self.built,0.6511821746826172 1143,"def get_input ( prompt : str, answers : Iterable [ str ] = ( ), require_confirm = False ) -> str : with PrintLock ( ) : if not ( require_confirm or PikaurConfig ( ). ui. RequireEnterConfirm. get_bool ( ) ) : answer = read_answer_from_tty ( prompt, answers = answers ) else : sub_tty = TTYRestore ( ) TTYRestore. restore ( ) try : answer = input ( split_last_line ( prompt ) ). lower ( ) except EOFError : raise SysExit ( 125 ) finally : sub_tty. restore_new ( ) if : for choice in answers : if choice. isupper ( ) : return choice. lower ( ) return answer",False,not answer,len(answers) > 0,0.6813108921051025 1144,"def test_timestamp_extract ( backend, alltypes, df, attr ) : if attr == ""millisecond"" : if backend. name == ""sqlite"" : pytest. xfail ( reason = ( ""Issue #2156"" ) ) if : pytest. xfail ( reason = ""Issue #2159"" ) expected = ( df. timestamp_col. dt. microsecond // 1000 ). astype ( ""int32"" ) elif attr == ""epoch_seconds"" : expected = df. timestamp_col. astype ( ""int64"" ) // int ( 1e9 ) else : expected = getattr ( df. timestamp_col. dt, attr. replace ( ""_"", """" ) ). astype ( ""int32"" ) expr = getattr ( alltypes. timestamp_col, attr ) ( ) result = expr. execute ( ) if attr == ""epoch_seconds"" and backend. name in [ ""bigquery"", ""postgres"", ""spark"", ] : result = result. astype ( ""int64"" ) expected = backend. default_series_rename ( expected ) backend. assert_series_equal ( result, expected )",False,backend.name == 'spark',backend.name == 'int32',0.6578556299209595 1145,"def fill ( self ) : try : while ( not self. stopping. wait ( self. sample_wait ) and len ( self. queue ) < self. queue. maxlen ) : self. queue. append ( self. parent. _read ( ) ) if self. partial and isinstance ( self. parent, EventsMixin ) : self. parent. _fire_events ( ) self. full. set ( ) while not self. stopping. wait ( self. sample_wait ) : self. queue. append ( self. parent. _read ( ) ) if : self. parent. _fire_events ( ) except ReferenceError : pass",False,"isinstance(self.parent, EventsMixin)","self.parent and isinstance(self.parent, EventsMixin)",0.6638846397399902 1146,"def get_queryset ( self ) : request = self. request user = request. user flag = request. GET. get ( ""flag"", """" ) image_id = request. GET. get ( ""image_id"", """" ) if flag == ""list"" and user. is_superuser : if : container_vul_list = ContainerVul. objects. filter ( image_id = image_id ). order_by ( ""-create_date"" ) else : container_vul_list = ContainerVul. objects. all ( ). order_by ( ""-create_date"" ) else : container_vul_list = ContainerVul. objects. filter ( user_id = self. request. user. id, time_model_id = """" ) return container_vul_list",True,image_id,image_id,0.6683754920959473 1147,"def match_in_kwargs ( self, match_args, kwargs ) : """"""Matches against kwargs."""""" for match, default in match_args : names = get_match_names ( match ) if : tempvar = self. get_temp_var ( ) self. add_def ( tempvar + "" = "" + """". join ( kwargs + '.pop(""' + name + '"") if ""' + name + '"" in' + kwargs + "" else "" for name in names ) + default, ) with self. down_a_level ( ) : self. match ( match, tempvar ) else : raise CoconutDeferredSyntaxError ( ""keyword-only pattern-matching function arguments must have names"", self. loc, )",True,names,names,0.6848276853561401 1148,"def _init_sock ( self ) : if self. unix_socket : _sock = socket. socket ( socket. AF_UNIX, socket. SOCK_STREAM ) try : _sock. connect ( self. unix_socket ) except ( socket. error, OSError ) as err : if : os. unlink ( self. unix_socket ) else : _sock = socket. socket ( self. socket_family, socket. SOCK_STREAM ) _sock. setsockopt ( socket. SOL_SOCKET, socket. SO_REUSEADDR, 1 ) if hasattr ( socket, ""SO_REUSEPORT"" ) : _sock. setsockopt ( socket. SOL_SOCKET, socket. SO_REUSEPORT, 1 ) _sock. settimeout ( None ) self. sock = _sock",False,err.args[0] == errno.ECONNREFUSED,"err.args[0] in [ECONN, ECONN, ECONN, ESHUTDOWN]",0.6516908407211304 1149,"def combine_cd ( combine ) : new_files = [ ] for item in { re. match ( ""(.+)[cC][dD][0-9]."", item ). groups ( ) [ 0 ] for item in combine } : concat = """" for n in range ( 99 ) : files = [ file for file in combine if n + 1 == int ( re. match ( "".+[cC][dD]([0-9]+)."", file ). groups ( ) [ 0 ] ) and item in file ] if files : concat += ""{file}|"". format ( file = files [ 0 ] ) else : break if : new_files. append ( ""concat:{0}"". format ( concat [ : - 1 ] ) ) return new_files",True,concat,concat,0.7049475908279419 1150,"def transform_kwarg ( self, name, value, split_single_char_options ) : if len ( name ) == 1 : if value is True : return [ ""-%s"" % name ] elif value not in ( False, None ) : if : return [ ""-%s"" % name, ""%s"" % value ] else : return [ ""-%s%s"" % ( name, value ) ] else : if value is True : return [ ""--%s"" % dashify ( name ) ] elif value is not False and value is not None : return [ ""--%s=%s"" % ( dashify ( name ), value ) ] return [ ]",True,split_single_char_options,split_single_char_options,0.6503164172172546 1151,"def _codegen_impl ( self, state : CodegenState, default_semicolon : bool = False ) -> None : with state. record_syntactic_position ( self ) : state. add_token ( ""return"" ) whitespace_after_return = self. whitespace_after_return value = self. value if isinstance ( whitespace_after_return, MaybeSentinel ) : if value is not None : state. add_token ( "" "" ) else : whitespace_after_return. _codegen ( state ) if value is not None : value. _codegen ( state ) semicolon = self. semicolon if isinstance ( semicolon, MaybeSentinel ) : if : state. add_token ( ""; "" ) elif isinstance ( semicolon, Semicolon ) : semicolon. _codegen ( state )",True,default_semicolon,default_semicolon,0.6673389673233032 1152,"def is_valid ( sample ) : if sample is None : return False if isinstance ( sample, tuple ) : for s in sample : if : return False elif isinstance ( s, np. ndarray ) and s. size == 0 : return False elif isinstance ( s, collections. abc. Sequence ) and len ( s ) == 0 : return False return True",True,s is None,s is None,0.6731966137886047 1153,"def _get_image ( self, msg ) : """"""get image content and type from a message"""""" if msg. type == TYPE_IMG : imgpath = msg. imgPath. split ( ""_"" ) [ - 1 ] if not imgpath : logger. warn ( ""No imgpath in an image message. Perhaps a bug in wechat: {}"". format ( msg ) ) return """", """" bigimgpath = self. parser. imginfo. get ( msg. msgSvrId ) img = self. res. get_img ( [ imgpath, bigimgpath ] ) if : logger. warn ( ""No image found for {}"". format ( imgpath ) ) return img, ""jpeg"" elif msg. type == TYPE_EMOJI : md5 = msg. imgPath emoji_img, format = self. res. get_emoji_by_md5 ( md5 ) return emoji_img, format elif msg. type == TYPE_CUSTOM_EMOJI : pq = PyQuery ( msg. content ) md5 = pq ( ""emoticonmd5"" ). text ( ) img, format = self. res. get_emoji ( md5, None ) return img, format else : return """", """"",True,not img,not img,0.6827529072761536 1154,"def get_default_shell_info ( shell_name = None, settings = None ) : if not shell_name : settings = settings or load_settings ( lazy = True ) shell_name = settings. get ( ""shell"" ) if shell_name : return shell_name, None shell_path = os. environ. get ( ""SHELL"" ) if : shell_name = basepath ( shell_path ) else : shell_name = DEFAULT_SHELL return shell_name, shell_path return shell_name, None",True,shell_path,shell_path,0.66959547996521 1155,"def send_input ( capture, key_events ) : for evt in key_events. strip ( ). split ( ) : if evt. startswith ( ""+"" ) : capture. key_down ( evt [ 1 : ] ) elif : capture. key_up ( evt [ 1 : ] ) else : capture. key_down ( evt ) capture. key_up ( evt )",True,evt.startswith('-'),evt.startswith('-'),0.6526145935058594 1156,"def check ( self, hyperlinks : Dict [ str, Hyperlink ] ) -> Generator [ CheckResult, None, None ] : self. invoke_threads ( ) total_links = 0 for hyperlink in hyperlinks. values ( ) : if : yield CheckResult ( hyperlink. uri, hyperlink. docname, hyperlink. lineno, ""ignored"", """", 0 ) else : self. wqueue. put ( CheckRequest ( CHECK_IMMEDIATELY, hyperlink ), False ) total_links += 1 done = 0 while done < total_links : yield self. rqueue. get ( ) done += 1 self. shutdown_threads ( )",False,self.is_ignored_uri(hyperlink.uri),self.wqueue.get() is False,0.6547796726226807 1157,"def from_dict ( name, raw_value ) : selectors = { } labels = { } text = { } sub_components = { } for key, value in raw_value. items ( ) : if : base_selector = None if ""_"" in value : base_selector = value [ ""_"" ] del value [ ""_"" ] for selector_key, selector_value in value. items ( ) : selectors [ selector_key ] = SelectorTemplate. from_dict ( selector_value ) if base_selector : selectors [ ""_"" ] = SelectorTemplate. from_dict ( base_selector, children = selectors ) elif key == ""labels"" : for label_key, label_value in value. items ( ) : labels [ label_key ] = Label ( label_value ) elif key == ""text"" : for text_key, text_value in value. items ( ) : text [ text_key ] = Text ( text_value ) else : component = Component. from_dict ( key, value ) sub_components [ key ] = component return Component ( name, sub_components, selectors, labels, text )",False,key == 'selectors',key == 'selector',0.6741434335708618 1158,"def write ( self, * data ) : if ( len ( data ) == 1 ) and data [ 0 ] == self. indent : diff = max ( self. pending_newlines, self. desired_line_number - self. current_line_number ) self. f. write ( ""\n"" * diff ) self. current_line_number += diff self. pending_newlines = 0 if ( len ( data ) == 0 ) or ( len ( data ) == 1 and data [ 0 ] == """" ) : return out = """". join ( ( str ( j ) for j in data ) ) n = 0 for i in out : if i == ""\n"" : n += 1 if n == len ( out ) : self. pending_newlines = max ( self. pending_newlines, n ) return elif : self. pending_newlines = max ( self. pending_newlines, n ) out = out [ n : ] break else : break if self. pending_newlines > 0 : diff = max ( self. pending_newlines, self. desired_line_number - self. current_line_number ) self. f. write ( ""\n"" * diff ) self. current_line_number += diff self. pending_newlines = 0 for i in out [ : : - 1 ] : if i == ""\n"" : self. pending_newlines += 1 else",False,n,n == len(out),0.6866810321807861 1159,"def encode ( self, input, errors = ""strict"" ) : if errors!= ""strict"" : raise UnicodeError ( ""unsupported error handling "" + errors ) if not input : return b"""", 0 try : result = input. encode ( ""ascii"" ) except UnicodeEncodeError : pass else : labels = result. split ( b""."" ) for label in labels [ : - 1 ] : if not ( 0 < len ( label ) < 64 ) : raise UnicodeError ( ""label empty or too long"" ) if : raise UnicodeError ( ""label too long"" ) return result, len ( input ) result = bytearray ( ) labels = dots. split ( input ) if labels and not labels [ - 1 ] : trailing_dot = b""."" del labels [ - 1 ] else : trailing_dot = b"""" for label in labels : if result : result. extend ( b""."" ) result. extend ( ToASCII ( label ) ) return bytes ( result + trailing_dot ), len ( input )",False,len(labels[-1]) >= 64,len(label) > 32,0.65909343957901 1160,"def _select_from ( self, parent_path, is_dir, exists, scandir ) : try : with scandir ( parent_path ) as scandir_it : entries = list ( scandir_it ) for entry in entries : if : try : if not entry. is_dir ( ) : continue except OSError as e : if not _ignore_error ( e ) : raise continue name = entry. name if self. match ( name ) : path = parent_path. _make_child_relpath ( name ) for p in self. successor. _select_from ( path, is_dir, exists, scandir ) : yield p except PermissionError : return",False,self.dironly,"hasattr(entry, 'name')",0.658218502998352 1161,"def sentencebreaks_to_newlines ( text ) : offsets = [ o for o in en_sentence_boundary_gen ( text ) ] sentences = [ s for s in _text_by_offsets_gen ( text, offsets ) ] orig_parts = [ ] new_parts = [ ] sentnum = len ( sentences ) for i in range ( sentnum ) : sent = sentences [ i ] orig_parts. append ( sent ) new_parts. append ( sent ) if : orig_parts. append ( text [ offsets [ i ] [ 1 ] : offsets [ i + 1 ] [ 0 ] ] ) if offsets [ i ] [ 1 ] < offsets [ i + 1 ] [ 0 ] and text [ offsets [ i ] [ 1 ] ]. isspace ( ) : new_parts. append ( ""\n"" + text [ offsets [ i ] [ 1 ] + 1 : offsets [ i + 1 ] [ 0 ] ] ) else : new_parts. append ( text [ offsets [ i ] [ 1 ] : offsets [ i + 1 ] [ 0 ] ] ) if len ( offsets ) and offsets [ - 1 ] [ 1 ] < len ( text ) : orig_parts. append ( text [ offsets [ - 1 ] [ 1 ] : ] ) new_parts. append ( text [ offsets [ - 1 ] [ 1 ] : ] ) assert text == """". join ( orig_parts ), ""INTERNAL ERROR:\n '%s'\nvs\n '%s'"" % ( text, """". join ( orig_parts ), ) splittext = """". join ( new_parts ) assert len ( text ) == len ( splittext ), ""INTERNAL ERROR",False,i < sentnum - 1,len(offsets) > 0,0.666646420955658 1162,"def __init__ ( self, data, weights = None, ddof = 0 ) : self. data = np. asarray ( data ) if weights is None : self. weights = np. ones ( self. data. shape [ 0 ] ) else : self. weights = np. asarray ( weights ). astype ( float ) if : self. weights = self. weights. squeeze ( ) self. ddof = ddof",False,len(self.weights.shape) > 1 and len(self.weights) > 1,self.weights is not None,0.6528981328010559 1163,"def __init__ ( self, ** params ) : if self. _css and self. _css not in config. css_files : config. css_files. append ( self. _css ) template = self. _template. read_text ( ) if ""header"" not in params : params [ ""header"" ] = ListLike ( ) if ""main"" not in params : params [ ""main"" ] = ListLike ( ) if ""sidebar"" not in params : params [ ""sidebar"" ] = ListLike ( ) super ( BasicTemplate, self ). __init__ ( template = template, ** params ) if self. theme : theme = self. theme. find_theme ( type ( self ) ) if : config. css_files. append ( theme. css ) self. _update_vars ( ) self. main. param. watch ( self. _update_render_items, [ ""objects"" ] ) self. sidebar. param. watch ( self. _update_render_items, [ ""objects"" ] ) self. header. param. watch ( self. _update_render_items, [ ""objects"" ] ) self. param. watch ( self. _update_vars, [ ""title"", ""header_background"", ""header_color"" ] )",False,theme and theme.css and (theme.css not in config.css_files),theme and theme.css,0.651672899723053 1164,"def getPrivileges ( body, privs, action ) : all_privileges = gapi_directory_privileges. print_ ( return_only = True ) if privs == ""ALL"" : body [ ""rolePrivileges"" ] = [ { ""privilegeName"" : p [ ""privilegeName"" ], ""serviceId"" : p [ ""serviceId"" ] } for p in all_privileges ] elif privs == ""ALL_OU"" : body [ ""rolePrivileges"" ] = [ { ""privilegeName"" : p [ ""privilegeName"" ], ""serviceId"" : p [ ""serviceId"" ] } for p in all_privileges if : ] else : body. setdefault ( ""rolePrivileges"", [ ] ) for priv in privs. split ( "","" ) : for p in all_privileges : if priv == p [ ""privilegeName"" ] : body [ ""rolePrivileges"" ]. append ( { ""privilegeName"" : p [ ""privilegeName"" ], ""serviceId"" : p [ ""serviceId"" ], } ) break else : controlflow. invalid_argument_exit ( ",False,p.get('isOuScopable'),len(p) == 0,0.6547329425811768 1165,"def apply ( self, ui ) : if self. query : open_searches = ui. get_buffers_of_type ( buffers. SearchBuffer ) to_be_focused = None for sb in open_searches : if : to_be_focused = sb if to_be_focused : if ui. current_buffer!= to_be_focused : ui. buffer_focus ( to_be_focused ) else : ui. current_buffer. rebuild ( ) ui. update ( ) else : ui. buffer_open ( buffers. SearchBuffer ( ui, self. query, sort_order = self. order ) ) else : ui. notify ( ""empty query string"" )",False,sb.querystring == self.query,sb.query_string,0.6540195345878601 1166,"def dump_list ( heading, aList ) : if heading : print ( ""\n%s...\n"" % heading ) for aTuple in aList : key, val = aTuple if : if key. endswith ( ( ""leo_expanded"", ""leo_marked"" ) ) : if val : print ( ""%30s:"" % key ) g. printObj ( val. split ( "","" ) ) else : print ( ""%30s: []"" % key ) else : print ( ""%30s: %s"" % ( key, val ) ) elif isinstance ( val, ( int, float ) ) : print ( ""%30s: %s"" % ( key, val ) ) else : print ( ""%30s:"" % key ) g. printObj ( val )",False,g.isString(val),key,0.6495877504348755 1167,"def _load_windows_store_certs ( self, storename, purpose ) : certs = bytearray ( ) try : for cert, encoding, trust in enum_certificates ( storename ) : if : if trust is True or purpose. oid in trust : certs. extend ( cert ) except PermissionError : warnings. warn ( ""unable to enumerate Windows certificate store"" ) if certs : self. load_verify_locations ( cadata = certs ) return certs",False,encoding == 'x509_asn',encoding.oid == 'windows',0.6536643505096436 1168,"def run_scheduler ( self, timeout = - 1, ** kwargs ) : """"""Run the CronTab as an internal scheduler (generator)"""""" count = 0 while count!= timeout : now = datetime. now ( ) if : now += timedelta ( seconds = count * 60 ) for value in self. run_pending ( now = now ) : yield value sleep ( kwargs. get ( ""cadence"", 60 ) ) count += 1",False,'warp' in kwargs,self.is_pending(now),0.6583479046821594 1169,"def Tokenize ( s ) : for item in TOKEN_RE. findall ( s ) : item = cast ( TupleStr4, item ) if item [ 0 ] : typ = ""number"" val = item [ 0 ] elif item [ 1 ] : typ = ""name"" val = item [ 1 ] elif : typ = item [ 2 ] val = item [ 2 ] elif item [ 3 ] : typ = item [ 3 ] val = item [ 3 ] yield Token ( typ, val )",True,item[2],item[2],0.6707429885864258 1170,"def write ( self, x ) : self. _errors = ""backslashescape"" if self. encoding!= ""mbcs"" else ""surrogateescape"" try : return io. TextIOWrapper. write ( self, to_text ( x, errors = self. _errors ) ) except UnicodeDecodeError : if : self. _errors = ""surrogateescape"" else : self. _errors = ""replace"" return io. TextIOWrapper. write ( self, to_text ( x, errors = self. _errors ) )",False,self._errors != 'surrogateescape',self.encoding != 'mbcs',0.6582704782485962 1171,"def _ones_matrix_band_part ( rows, cols, num_lower, num_upper, out_shape = None ) : """"""Matrix band part of ones."""""" if all ( [ isinstance ( el, int ) for el in [ rows, cols, num_lower, num_upper ] ] ) : if : num_lower = rows - 1 if num_upper < 0 : num_upper = cols - 1 lower_mask = np. tri ( cols, rows, num_lower ). T upper_mask = np. tri ( rows, cols, num_upper ) band = np. ones ( ( rows, cols ) ) * lower_mask * upper_mask if out_shape : band = band. reshape ( out_shape ) band = tf. constant ( band, tf. float32 ) else : band = tf. matrix_band_part ( tf. ones ( [ rows, cols ] ), tf. cast ( num_lower, tf. int64 ), tf. cast ( num_upper, tf. int64 ), ) if out_shape : band = tf. reshape ( band, out_shape ) return band",True,num_lower < 0,num_lower < 0,0.6620115637779236 1172,"def check_registration_allowed ( self, email, username, password ) : """"""Checks if the provided email/username is allowed to register."""""" message = """" status = ""done"" for provider, options in self. active_authenticators ( email, username, password ) : allow_reg = _get_allow_register ( options ) if allow_reg == ""challenge"" : auth_results = provider. authenticate ( email, username, password, options ) if auth_results [ 0 ] is True : break if auth_results [ 0 ] is None : message = ""Invalid email address/username or password."" status = ""error"" break elif : break elif allow_reg is False : message = ""Account registration not required for your account. Please simply login."" status = ""error"" break return message, status",True,allow_reg is True,allow_reg is True,0.6555767059326172 1173,"def set_filter ( self, dataset_opt ) : """"""This function create and set the pre_filter to the obj as attributes"""""" self. pre_filter = None for key_name in dataset_opt. keys ( ) : if : new_name = key_name. replace ( ""filters"", ""filter"" ) try : filt = instantiate_filters ( getattr ( dataset_opt, key_name ) ) except Exception : log. exception ( ""Error trying to create {}, {}"". format ( new_name, getattr ( dataset_opt, key_name ) ) ) continue setattr ( self, new_name, filt )",False,'filter' in key_name,"hasattr(dataset_opt, key_name)",0.6575393080711365 1174,"def read_headers ( cls, fp ) : headers = httputil. HeaderMap ( ) while True : line = fp. readline ( ) if : raise EOFError ( ""Illegal end of headers."" ) if line == ntob ( ""\r\n"" ) : break if not line. endswith ( ntob ( ""\r\n"" ) ) : raise ValueError ( ""MIME requires CRLF terminators: %r"" % line ) if line [ 0 ] in ntob ( "" \t"" ) : v = line. strip ( ). decode ( ""ISO-8859-1"" ) else : k, v = line. split ( ntob ( "":"" ), 1 ) k = k. strip ( ). decode ( ""ISO-8859-1"" ) v = v. strip ( ). decode ( ""ISO-8859-1"" ) existing = headers. get ( k ) if existing : v = "", "". join ( ( existing, v ) ) headers [ k ] = v return headers",True,not line,not line,0.671269953250885 1175,"def walk_to_corner ( from_vert, to_edges ) : to_verts = { v for e in to_edges for v in e. verts } edges = [ ( e, from_vert, None ) for e in from_vert. link_edges if not e. is_manifold and e. is_valid ] touched = { } found = None while edges : ec, v0, ep = edges. pop ( 0 ) if : continue touched [ ec ] = ( v0, ep ) v1 = ec. other_vert ( v0 ) if v1 in to_verts : found = ec break nedges = [ ( en, v1, ec ) for en in v1. link_edges if en!= ec and not en. is_manifold and en. is_valid ] edges += nedges if not found : return None walk = [ found ] while True : ec = walk [ - 1 ] v0, ep = touched [ ec ] if v0 == from_vert : break walk. append ( ep ) return walk",False,ec in touched,ec == ec,0.6749950051307678 1176,"def execute ( self, client, smp_name, lun_size, * args, ** kwargs ) : LOG. debug ( ""%s.execute"", self. __class__. __name__ ) smp = client. get_lun ( name = smp_name ) if lun_size > smp. total_capacity_gb : if : client. expand_lun ( smp_name, lun_size ) else : LOG. warning ( ""Not extending the SMP: %s, because its base lun "" ""is not thin."", smp_name, ) else : LOG. info ( ""Not extending the SMP: %(smp)s, size: %(size)s, because "" ""the new size: %(new_size)s is smaller."", { ""smp"" : smp_name, ""size"" : smp. total_capacity_gb, ""new_size"" : lun_size }, )",False,smp.primary_lun.is_thin_lun,lun_size > smp.total_capacity_gb,0.6476624608039856 1177,"def _write_references ( self, record ) : number = 0 for ref in record. annotations [ ""references"" ] : if not isinstance ( ref, SeqFeature. Reference ) : continue number += 1 self. _write_single_line ( ""RN"", ""[%i]"" % number ) if ref. location and len ( ref. location ) == 1 : self. _write_single_line ( ""RP"", ""%i-%i"" % ( ref. location [ 0 ]. nofuzzy_start + 1, ref. location [ 0 ]. nofuzzy_end ), ) if ref. pubmed_id : self. _write_single_line ( ""RX"", ""PUBMED; %s."" % ref. pubmed_id ) if ref. consrtm : self. _write_single_line ( ""RG"", ""%s"" % ref. consrtm ) if : self. _write_multi_line ( ""RA"", ref. authors + "";"" ) if ref. title : self. _write_multi_line ( ""RT"", '""%s"";' % ref. title ) if ref. journal : self. _write_multi_line ( ""RL"", ref. journal ) self. handle. write ( ""XX\n"" )",True,ref.authors,ref.authors,0.6737276315689087 1178,"def test_valid_config ( ) : config = DockerSchema2Config ( Bytes. for_string_or_unicode ( CONFIG_BYTES ) ) history = list ( config. history ) assert len ( history ) == 4 assert not history [ 0 ]. is_empty assert history [ 1 ]. is_empty assert history [ 0 ]. created_datetime. year == 2018 assert history [ 1 ]. command == '/bin/sh -c #(nop) CMD [""sh""]' assert history [ 2 ]. command == ""sh"" for index, history_entry in enumerate ( history ) : v1_compat = config. build_v1_compatibility ( history_entry, ""somev1id"", ""someparentid"", index == 3 ) assert v1_compat [ ""id"" ] == ""somev1id"" assert v1_compat [ ""parent"" ] == ""someparentid"" if : assert v1_compat [ ""container_config"" ] == config. _parsed [ ""container_config"" ] else : assert ""Hostname"" not in v1_compat [ ""container_config"" ] assert v1_compat [ ""container_config"" ] [ ""Cmd"" ] == [ history_entry. command ] assert config. labels == { }",False,index == 3,"isinstance(v1_compat, dict)",0.6688709259033203 1179,"def on_edit_button_clicked ( self, event = None, a = None, col = None ) : tree, tree_id = self. treeView. get_selection ( ). get_selected ( ) watchdir_id = str ( self. store. get_value ( tree_id, 0 ) ) if watchdir_id : if col and col. get_title ( ) == _ ( ""Active"" ) : if : client. autoadd. disable_watchdir ( watchdir_id ) else : client. autoadd. enable_watchdir ( watchdir_id ) else : self. opts_dialog. show ( self. watchdirs [ watchdir_id ], watchdir_id )",False,self.watchdirs[watchdir_id]['enabled'],self.opts_dialog.get_selected(),0.6544655561447144 1180,"def writexml ( self, stream, indent = """", addindent = """", newl = """", strip = 0, nsprefixes = { }, namespace = """", ) : w = _streamWriteWrapper ( stream ) if self. raw : val = self. nodeValue if not isinstance ( val, str ) : val = str ( self. nodeValue ) else : v = self. nodeValue if : v = str ( v ) if strip : v = "" "". join ( v. split ( ) ) val = escape ( v ) w ( val )",True,"not isinstance(v, str)","not isinstance(v, str)",0.6503371596336365 1181,"def ensure_not_cyclic ( self, start, get_children ) : todo = set ( self. nodes ) while todo : node = todo. pop ( ) stack = [ node ] while stack : top = stack [ - 1 ] for node in get_children ( top ) : if : cycle = stack [ stack. index ( node ) : ] raise CircularDependencyError ( "", "". join ( ""%s.%s"" % n for n in cycle ) ) if node in todo : stack. append ( node ) todo. remove ( node ) break else : node = stack. pop ( )",False,node in stack,node in start,0.6821103692054749 1182,"def check_localhost ( self ) : """"""Warn if any socket_host is 'localhost'. See #711."""""" for k, v in cherrypy. config. items ( ) : if : warnings. warn ( ""The use of 'localhost' as a socket host can "" ""cause problems on newer systems, since "" ""'localhost' can map to either an IPv4 or an "" ""IPv6 address. You should use '127.0.0.1' "" ""or '[::1]' instead."" )",False,k == 'server.socket_host' and v == 'localhost',k != 'socket_host',0.6515125036239624 1183,"def reindex ( cls, dataset, kdims = None, vdims = None ) : data = dataset. data dropped_kdims = [ kd for kd in dataset. kdims if kd not in kdims ] constant = { } for kd in dropped_kdims : vals = cls. values ( dataset, kd. name, expanded = False ) if : constant [ kd. name ] = vals [ 0 ] if dropped_kdims or constant : return tuple ( dataset. columns ( kdims + vdims ). values ( ) ) if vdims is not None and vdims!= dataset. vdims and len ( dataset. vdims ) > 1 : inds = [ dataset. get_dimension_index ( vd ) - dataset. ndims for vd in vdims ] return data [..., inds ] if len ( inds ) > 1 else data [..., inds [ 0 ] ] return data",False,len(vals) == 1,len(vals) > 0,0.6615097522735596 1184,"def message_to_dict ( post_dict : Dict [ str, Any ] ) -> Dict [ str, Any ] : sender_username = post_dict [ ""user"" ] sender_id = user_id_mapper. get ( sender_username ) content = post_dict [ ""message"" ] if masking_content : content = re. sub ( ""[a-z]"", ""x"", content ) content = re. sub ( ""[A-Z]"", ""X"", content ) if ""reactions"" in post_dict : reactions = post_dict [ ""reactions"" ] or [ ] else : reactions = [ ] message_dict = dict ( sender_id = sender_id, content = content, date_sent = int ( post_dict [ ""create_at"" ] / 1000 ), reactions = reactions, ) if ""channel"" in post_dict : message_dict [ ""channel_name"" ] = post_dict [ ""channel"" ] elif ""channel_members"" in post_dict : channel_members = post_dict [ ""channel_members"" ] if : message_dict [ ""huddle_name"" ] = generate_huddle_name ( channel_members ) elif len ( channel_members ) == 2 : message_dict [ ""pm_members"" ] = channel_members else : raise AssertionError ( ""Post without channel or channel_members key."" ) return message_dict",False,len(channel_members) > 2,len(channel_members) == 1,0.6495811939239502 1185,"def child ( kind ) : term = TestTerminal ( kind = kind, force_styling = True ) keymap = get_keyboard_sequences ( term ) if term. _cuf1 : assert term. _cuf1!= u"" "" assert term. _cuf1 in keymap assert keymap [ term. _cuf1 ] == term. KEY_RIGHT if term. _cub1 : assert term. _cub1 in keymap if : assert keymap [ term. _cub1 ] == term. KEY_BACKSPACE else : assert keymap [ term. _cub1 ] == term. KEY_LEFT",False,term._cub1 == '\x08',term._cub1 != u' ',0.6584067344665527 1186,"def __assert_eq ( self, * args, ** kwargs ) : """"""Minified assert_equal() using only the list diff."""""" minified_exception = None try : self. assertEqual ( * args, ** kwargs ) except Exception as e : str_e = str ( e ) minified_exception = ""\nAssertionError:\n"" lines = str_e. split ( ""\n"" ) countdown = 3 countdown_on = False for line in lines : if countdown_on : minified_exception += line + ""\n"" countdown = countdown - 1 if countdown == 0 : countdown_on = False elif line. startswith ( ""F"" ) : countdown_on = True countdown = 3 minified_exception += line + ""\n"" elif line. startswith ( ""+"" ) or line. startswith ( ""-"" ) : minified_exception += line + ""\n"" elif : minified_exception += line + ""\n"" elif line. strip ( ). startswith ( ""*"" ) : minified_exception += line + ""\n"" if minified_exception : raise Exception ( minified_exception )",False,line.startswith('?'),line.strip() == '',0.6507472991943359 1187,"def to_dict ( self ) : contexts_ = { } for k, data in self. contexts. items ( ) : data_ = data. copy ( ) if : del data_ [ ""context"" ] if ""loaded"" in data_ : del data_ [ ""loaded"" ] contexts_ [ k ] = data_ return dict ( contexts = contexts_ )",True,'context' in data_,'context' in data_,0.6572332978248596 1188,"def _handle_eio_message ( self, sid, data ) : """"""Dispatch Engine.IO messages."""""" if sid in self. _binary_packet : pkt = self. _binary_packet [ sid ] if pkt. add_attachment ( data ) : del self. _binary_packet [ sid ] if pkt. packet_type == packet. BINARY_EVENT : await self. _handle_event ( sid, pkt. namespace, pkt. id, pkt. data ) else : await self. _handle_ack ( sid, pkt. namespace, pkt. id, pkt. data ) else : pkt = packet. Packet ( encoded_packet = data ) if pkt. packet_type == packet. CONNECT : await self. _handle_connect ( sid, pkt. namespace ) elif pkt. packet_type == packet. DISCONNECT : await self. _handle_disconnect ( sid, pkt. namespace ) elif : await self. _handle_event ( sid, pkt. namespace, pkt. id, pkt. data ) elif pkt. packet_type == packet. ACK : await self. _handle_ack ( sid, pkt. namespace, pkt. id, pkt. data ) elif ( pkt. packet_type == packet. BINARY_EVENT or pkt. packet_type == packet. BINARY_ACK ) : self. _binary_packet [ sid ] = pkt elif pkt. packet_type == packet. ERROR : raise ValueError ( ""Unexpected ERROR packet."" ) else : ",True,pkt.packet_type == packet.EVENT,pkt.packet_type == packet.EVENT,0.6598875522613525 1189,"def _WriteFonts ( self ) : self. _write ( r""{\fonttbl"" ) self. _font_map = { } offset = 0 for font in self. _doc. StyleSheet. Fonts : pitch = """" panose = """" alternate = """" if font. Pitch : pitch = r""\fprq%s"" % font. Pitch if font. Panose : panose = r""{\*\panose %s}"" % font. Panose if : alternate = r""{\*\falt %s}"" % font. Alternate. Name self. _write ( r""{\f%s\f%s%s\fcharset%s%s %s%s;}"", offset, font. Family, pitch, font. CharacterSet, panose, font. Name, alternate, ) self. _font_map [ font ] = offset offset += 1 self. _write ( ""}\n"" )",True,font.Alternate,font.Alternate,0.6764723062515259 1190,"def distros_for_location ( location, basename, metadata = None ) : """"""Yield egg or source distribution objects based on basename"""""" if basename. endswith ( "".egg.zip"" ) : basename = basename [ : - 4 ] if basename. endswith ( "".egg"" ) and ""-"" in basename : return [ Distribution. from_location ( location, basename, metadata ) ] if basename. endswith ( "".whl"" ) and ""-"" in basename : wheel = Wheel ( basename ) if : return [ ] return [ Distribution ( location = location, project_name = wheel. project_name, version = wheel. version, precedence = EGG_DIST + 1, ) ] if basename. endswith ( "".exe"" ) : win_base, py_ver, platform = parse_bdist_wininst ( basename ) if win_base is not None : return interpret_distro_name ( location, win_base, metadata, py_ver, BINARY_DIST, platform ) for ext in EXTENSIONS : if basename. endswith ( ext ) : basename = basename [ : - len ( ext ) ] return interpret_distro_name ( location, basename, metadata ) return [ ]",False,not wheel.is_compatible(),not wheel,0.6536357402801514 1191,"def _write_ready ( self ) : assert self. _buffer, ""Data should not be empty"" if self. _conn_lost : return try : n = self. _sock. send ( self. _buffer ) except ( BlockingIOError, InterruptedError ) : pass except ( SystemExit, KeyboardInterrupt ) : raise except BaseException as exc : self. _loop. _remove_writer ( self. _sock_fd ) self. _buffer. clear ( ) self. _fatal_error ( exc, ""Fatal write error on socket transport"" ) if : self. _empty_waiter. set_exception ( exc ) else : if n : del self. _buffer [ : n ] self. _maybe_resume_protocol ( ) if not self. _buffer : self. _loop. _remove_writer ( self. _sock_fd ) if : self. _empty_waiter. set_result ( None ) if self. _closing : self. _call_connection_lost ( None ) elif self. _eof : self. _sock. shutdown ( socket. SHUT_WR )",False,self._empty_waiter is not None,self._sleep,0.6543573141098022 1192,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if : break if fid == 1 : if ftype == TType. STRING : self. ip = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. I32 : self. duration = iprot. readI32 ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STOP,fid == 0,0.6599719524383545 1193,"def get_proposer_index_maybe ( spec, state, slot, proposer_index = None ) : if proposer_index is None : assert state. slot <= slot if : proposer_index = spec. get_beacon_proposer_index ( state ) else : if spec. compute_epoch_at_slot ( state. slot ) + 1 > spec. compute_epoch_at_slot ( slot ) : print ( ""warning: block slot far away, and no proposer index manually given."" "" Signing block is slow due to transition for proposer index calculation."" ) stub_state = state. copy ( ) if stub_state. slot < slot : spec. process_slots ( stub_state, slot ) proposer_index = spec. get_beacon_proposer_index ( stub_state ) return proposer_index",False,slot == state.slot,state.has_beacon_index,0.6684434413909912 1194,"def set_mode ( self, ii, mode_space ) : for bit_info in ii. ipattern. bits : if bit_info. token == _mode_token : if : self. mode = [ bit_info. requirement ] else : mod_space = copy. deepcopy ( mode_space ) mod_space. remove ( bit_info. requirement ) self. mode = mod_space return self. mode = ildutil. mode_space",False,bit_info.test == 'eq',bit_info.requirement in mode_space,0.6536660194396973 1195,"def imgs_transform ( imgs, mode, seg_num, seglen, short_size, target_size, img_mean, img_std, name = """" ) : imgs = group_scale ( imgs, short_size ) if mode == ""train"" : if : imgs = group_multi_scale_crop ( imgs, short_size ) imgs = group_random_crop ( imgs, target_size ) imgs = group_random_flip ( imgs ) else : imgs = group_center_crop ( imgs, target_size ) np_imgs = ( np. array ( imgs [ 0 ] ). astype ( ""float32"" ). transpose ( ( 2, 0, 1 ) ) ). reshape ( 1, 3, target_size, target_size ) / 255 for i in range ( len ( imgs ) - 1 ) : img = ( np. array ( imgs [ i + 1 ] ). astype ( ""float32"" ). transpose ( ( 2, 0, 1 ) ) ). reshape ( 1, 3, target_size, target_size ) / 255 np_imgs = np. concatenate ( ( np_imgs, img ) ) imgs = np_imgs imgs -= img_mean imgs /= img_std imgs = np. reshape ( imgs, ( seg_num, seglen * 3, target_size, target_size ) ) return imgs",False,name == 'TSM',"name == ""random_crop_gig and (not name)",0.6595683693885803 1196,"def process ( self ) : level_socket, size_socket = self. inputs verts_socket, edges_socket = self. outputs if verts_socket. is_linked : Integer = int ( level_socket. sv_get ( ) [ 0 ] [ 0 ] ) Step = size_socket. sv_get ( ) [ 0 ] [ 0 ] verts = self. hilbert ( 0.0, 0.0, Step * 1.0, 0.0, 0.0, Step * 1.0, Integer ) verts_socket. sv_set ( [ verts ] ) if : listEdg = [ ] r = len ( verts ) - 1 for i in range ( r ) : listEdg. append ( ( i, i + 1 ) ) edg = list ( listEdg ) edges_socket. sv_set ( [ edg ] )",False,edges_socket.is_linked,self.depth > 0,0.6553827524185181 1197,"def calculate_score ( search_string, word ) : """"""Calculate how well the search string matches the word."""""" if len ( search_string ) > len ( word ) : return 0 original_word = word score = 1 search_index = 0 last_match_index = 0 while True : scale = 1 search_char = search_string [ search_index ] i = word. find ( search_char ) if i < 0 : return 0 if i > 0 and word [ i - 1 ] == ""-"" : scale = 0.95 else : scale = 1 - ( i / float ( len ( word ) ) ) score *= scale word = word [ i + 1 : ] last_match_index = i search_index += 1 if : break completion_scale = 1 - ( len ( word ) / float ( len ( original_word ) ) ) score *= completion_scale return score",False,search_index >= len(search_string),last_match_index == word,0.6491938233375549 1198,"def close ( self ) : self. selector. close ( ) if self. sock : sockname = None try : sockname = self. sock. getsockname ( ) except ( socket. error, OSError ) : pass self. sock. close ( ) if type ( sockname ) is str : if : os. remove ( sockname ) self. sock = None",False,os.path.exists(sockname),sockname is not None,0.6488858461380005 1199,"def filter ( this, args ) : array = to_object ( this, args. space ) callbackfn = get_arg ( args, 0 ) arr_len = js_arr_length ( array ) if not is_callable ( callbackfn ) : raise MakeError ( ""TypeError"", ""callbackfn must be a function"" ) _this = get_arg ( args, 1 ) k = 0 res = [ ] while k < arr_len : if array. has_property ( unicode ( k ) ) : kValue = array. get ( unicode ( k ) ) if : res. append ( kValue ) k += 1 return args. space. ConstructArray ( res )",False,"to_boolean(callbackfn.call(_this, (kValue, float(k), array)))",kValue and kValue != '',0.6516323685646057 1200,"def get_data_from_http_header ( self, buf ) : ret_buf = b"""" lines = buf. split ( b""\r\n"" ) if lines and len ( lines ) > 1 : hex_items = lines [ 0 ]. split ( b""%"" ) if : for index in range ( 1, len ( hex_items ) ) : if len ( hex_items [ index ] ) < 2 : ret_buf += binascii. unhexlify ( ""0"" + hex_items [ index ] ) break elif len ( hex_items [ index ] ) > 2 : ret_buf += binascii. unhexlify ( hex_items [ index ] [ : 2 ] ) break else : ret_buf += binascii. unhexlify ( hex_items [ index ] ) return ret_buf return b""""",False,hex_items and len(hex_items) > 1,len(hex_items) > 0,0.6537681818008423 1201,"def assistive ( self ) : """"""Detects if item can be used as assistance"""""" if self. __assistive is None : assistive = False for effect in self. effects. values ( ) : if : assistive = True break self. __assistive = assistive return self. __assistive",False,effect.isAssistance is True,not assistive,0.6646853685379028 1202,"def __init__ ( self, listeners ) : self. __command_listeners = _LISTENERS. command_listeners [ : ] self. __server_listeners = _LISTENERS. server_listeners [ : ] lst = _LISTENERS. server_heartbeat_listeners self. __server_heartbeat_listeners = lst [ : ] self. __topology_listeners = _LISTENERS. topology_listeners [ : ] if listeners is not None : for lst in listeners : if isinstance ( lst, CommandListener ) : self. __command_listeners. append ( lst ) if isinstance ( lst, ServerListener ) : self. __server_listeners. append ( lst ) if : self. __server_heartbeat_listeners. append ( lst ) if isinstance ( lst, TopologyListener ) : self. __topology_listeners. append ( lst ) self. __enabled_for_commands = bool ( self. __command_listeners ) self. __enabled_for_server = bool ( self. __server_listeners ) self. __enabled_for_server_heartbeat = bool ( self. __server_heartbeat_listeners ) self. __enabled_for_topology = bool ( self. __topology_listeners )",False,"isinstance(lst, ServerHeartbeatListener)","isinstance(lst, ServerheartbeatListener)",0.6616869568824768 1203,"def findtype ( self, variable ) : """""":see Expression.findtype()"""""" assert isinstance ( variable, Variable ), ""%s is not a Variable"" % variable if self. is_atom ( ) : function, args = self. uncurry ( ) else : function = self. function args = [ self. argument ] found = [ arg. findtype ( variable ) for arg in [ function ] + args ] unique = [ ] for f in found : if f!= ANY_TYPE : if unique : for u in unique : if : break else : unique. append ( f ) if len ( unique ) == 1 : return list ( unique ) [ 0 ] else : return ANY_TYPE",False,f.matches(u),u != ANY_TYPE,0.660801351070404 1204,"def add_network_rule ( cmd, client, resource_group_name, account_name, action = ""Allow"", subnet = None, vnet_name = None, ip_address = None, ) : sa = client. get_properties ( resource_group_name, account_name ) rules = sa. network_rule_set if subnet : from msrestazure. tools import is_valid_resource_id if : raise CLIError ( ""Expected fully qualified resource ID: got '{}'"". format ( subnet ) ) VirtualNetworkRule = cmd. get_models ( ""VirtualNetworkRule"" ) if not rules. virtual_network_rules : rules. virtual_network_rules = [ ] rules. virtual_network_rules = [ r for r in rules. virtual_network_rules if r. virtual_network_resource_id. lower ( )!= subnet. lower ( ) ] rules. virtual_network_rules. append ( VirtualNetworkRule ( virtual_network_resource_id = subnet, action = action ) ) if ip_address : IpRule = cmd. get_models ( ""IPRule"" ) if not rules. ip_rules : rules. ip_rules = [ ] rules. ip_rules = [ r for r in rules. ip_rules if r. ip_address_or_range!= ip_address ] rules. ip_rules. append ( IpRule ( ip_address_or_range = ip_",False,not is_valid_resource_id(subnet),is_valid_resource_id(subnet),0.6568795442581177 1205,"def to_pandas ( self ) : """"""Returns a pandas DataFrame with the response of the query."""""" if not self. _raw_response : self. _execute_query ( ) return_list = [ ] timelines = { t. index_name : t. name for t in self. _sketch. list_timelines ( ) } return_field_list = [ ] return_fields = self. _return_fields if return_fields : if return_fields. startswith ( ""'"" ) : return_fields = return_fields [ 1 : ] if : return_fields = return_fields [ : - 1 ] return_field_list = return_fields. split ( "","" ) for result in self. _raw_response. get ( ""objects"", [ ] ) : source = result. get ( ""_source"", { } ) if not return_fields or ""_id"" in return_field_list : source [ ""_id"" ] = result. get ( ""_id"" ) if not return_fields or ""_type"" in return_field_list : source [ ""_type"" ] = result. get ( ""_type"" ) if not return_fields or ""_index"" in return_field_list : source [ ""_index"" ] = result. get ( ""_index"" ) if not return_fields or ""_source"" in return_field_list : source [ ""_source"" ] = timelines. get ( result. get ( ""_index"" ) ) return_list. append ( source ) data_frame = pandas. DataFrame ( return_list ) if ""datetime"" in data_frame : try : data_frame [ ""datetime"" ] = pandas. to",False,"return_fields.endswith(""'"")","return_fields.endswith(',')",0.6494438648223877 1206,"def process_extra_fields ( self ) : if self. instance. pk is not None : if self. cleaned_data. get ( ""initialize"", None ) : self. instance. initialize ( ) if : self. instance. update_from_templates ( )",False,"self.cleaned_data.get('update', None) or not self.instance.stores.count()","self.cleaned_data.get('update_from_templates', None)",0.6510922312736511 1207,"def load_www ( self, filename, config_dict ) : if ""www"" not in config_dict : return www_cfg = config_dict [ ""www"" ] allowed = set ( [ ""port"", ""debug"", ""json_cache_seconds"", ""rest_minimum_version"", ""allowed_origins"", ""jsonp"", ""plugins"", ""auth"", ""authz"", ""avatar_methods"", ""logfileName"", ""logRotateLength"", ""maxRotatedFiles"", ""versions"", ""change_hook_dialects"", ""change_hook_auth"", ""custom_templates_dir"", ""cookie_expiration_time"", ] ) unknown = set ( list ( www_cfg ) ) - allowed if unknown : error ( ""unknown www configuration parameter(s) %s"" % ( "", "". join ( unknown ), ) ) versions = www_cfg. get ( ""versions"" ) if versions is not None : cleaned_versions = [ ] if not isinstance ( versions, list ) : error ( ""Invalid www configuration value of versions"" ) else : for i, v in enumerate ( versions ) : ",False,"not isinstance(cookie_expiration_time, datetime.timedelta)",self.load_versions and filename not in allowed,0.6454242467880249 1208,"def canonical_custom_headers ( self, headers ) : hoi = [ ] custom_headers = { } for key in headers : lk = key. lower ( ) if : if lk. startswith ( ""x-amz-"" ) : custom_headers [ lk ] = "","". join ( v. strip ( ) for v in headers. get_all ( key ) ) sorted_header_keys = sorted ( custom_headers. keys ( ) ) for key in sorted_header_keys : hoi. append ( ""%s:%s"" % ( key, custom_headers [ key ] ) ) return ""\n"". join ( hoi )",False,headers[key] is not None,lk.startswith('x-amz-'),0.6531437039375305 1209,"def check ( self, value ) : val = self. rule. match ( value ) try : ( h, m, s ) = ( int ( val. group ( ""h"" ) ), 0, 0 ) if not val. group ( ""m"" ) is None : m = int ( val. group ( ""m"" ) ) if not val. group ( ""s"" ) is None : s = int ( val. group ( ""s"" ) ) if : h = h + 12 if val. group ( ""d"" ) == ""am"" and h == 12 : h = 0 if not ( h in range ( 24 ) and m in range ( 60 ) and s in range ( 60 ) ) : raise ValueError ( ""Hours or minutes or seconds are outside of allowed range"" ) val = time ( h, m, s ) return val, None except AttributeError : pass except ValueError : pass return value, translate ( self. message )",False,val.group('d') == 'pm' and 0 < h < 12,val.group('h') and m == 11,0.6507682800292969 1210,"def _fix_up_module ( ns, name, pathname, cpathname = None ) : loader = ns. get ( ""__loader__"" ) spec = ns. get ( ""__spec__"" ) if not loader : if : loader = spec. loader elif pathname == cpathname : loader = SourcelessFileLoader ( name, pathname ) else : loader = SourceFileLoader ( name, pathname ) if not spec : spec = spec_from_file_location ( name, pathname, loader = loader ) try : ns [ ""__spec__"" ] = spec ns [ ""__loader__"" ] = loader ns [ ""__file__"" ] = pathname ns [ ""__cached__"" ] = cpathname except Exception : pass",False,spec,"hasattr(spec, 'loader')",0.6974095106124878 1211,"def parse_ep_str ( anime, grammar ) : episodes = [ ] if not grammar : return split_anime ( anime, parse_episode_range ( anime, grammar ) ) for episode_grammar in grammar. split ( "","" ) : if : start, end = parse_episode_range ( anime, episode_grammar ). split ( "":"" ) episode_grammar = ""%d:%d"" % ( int ( start ), int ( end ) + 1 ) for episode in split_anime ( anime, episode_grammar ) : episodes. append ( episode ) else : from anime_downloader. sites. anime import AnimeEpisode ep = [ x for x in anime. _episode_urls if x [ 0 ] == int ( grammar ) ] [ 0 ] ep_cls = AnimeEpisode. subclasses [ anime. sitename ] episodes. append ( ep_cls ( ep [ 1 ], parent = anime, ep_no = ep [ 0 ] ) ) return episodes",False,':' in episode_grammar,grammar,0.6632068157196045 1212,"def parse_messages ( self, analyzer_result ) : """"""Parse the given analyzer result."""""" if not os. path. exists ( analyzer_result ) : LOG. error ( ""Report file does not exist: %s"", analyzer_result ) return try : with open ( analyzer_result, ""r"", encoding = ""utf-8"", errors = ""ignore"" ) as report_f : reports = json. load ( report_f ) except ( IOError, json. decoder. JSONDecodeError ) : LOG. error ( ""Failed to parse the given analyzer result '%s'. Please "" ""give a valid json file generated by Pylint."", analyzer_result, ) return for report in reports : file_path = os. path. join ( os. path. dirname ( analyzer_result ), report. get ( ""path"" ) ) if : LOG. warning ( ""Source file does not exists: %s"", file_path ) continue message = self. __parse_report ( report, file_path ) if message : self. messages. append ( message ) return self. messages",True,not os.path.exists(file_path),not os.path.exists(file_path),0.6452673673629761 1213,"def computeResultAndTermination ( obj, result, previousResult ) : possible_overall_result = result terminate = False if result == FAILURE : if not obj. flunkOnFailure : possible_overall_result = SUCCESS if obj. warnOnFailure : possible_overall_result = WARNINGS if : possible_overall_result = FAILURE if obj. haltOnFailure : terminate = True elif result == WARNINGS : if not obj. warnOnWarnings : possible_overall_result = SUCCESS else : possible_overall_result = WARNINGS if obj. flunkOnWarnings : possible_overall_result = FAILURE elif result in ( EXCEPTION, RETRY, CANCELLED ) : terminate = True result = worst_status ( previousResult, possible_overall_result ) return result, terminate",False,obj.flunkOnFailure,result == SUCCESS,0.6667091250419617 1214,"def parse ( self ) : rargs = list ( self. args ) pos = 0 while pos < len ( rargs ) : arg = rargs [ pos ] if arg == ""--"" : self. passthrough = "" "". join ( rargs [ pos : ] ) break elif arg [ 0 ] == ""-"" : if arg [ 1 ] == ""-"" : self. process_long_opt ( arg [ 2 : ] ) else : value = None if : value = rargs [ pos + 1 ] pos += 1 self. process_short_opt ( arg [ 1 : ], value ) else : self. values. append ( arg ) pos += 1",False,len(rargs) > pos + 1 and rargs[pos + 1][0] != '-',rargs[pos] > 0,0.657852292060852 1215,"def _get_argument ( self, name ) : i = 0 end = len ( name ) while i < end : c = name [ i ] if c == ""["" or c == ""."" : break i += 1 empty = not i if empty : index = - 1 else : index, stop = _parse_int ( name, 0, i ) if : index = - 1 use_numeric = empty or index!= - 1 if self. auto_numbering_state == self. ANS_INIT and use_numeric : if empty : self. auto_numbering_state = self. ANS_AUTO else : self. auto_numbering_state = self. ANS_MANUAL if use_numeric : if self. auto_numbering_state == self. ANS_MANUAL : if empty : msg = ""switching from manual to automatic numbering"" raise ValueError ( msg ) elif not empty : msg = ""switching from automatic to manual numbering"" raise ValueError ( msg ) if empty : index = self. auto_numbering self. auto_numbering += 1 if index == - 1 : kwarg = name [ : i ] arg_key = kwarg try : w_arg = self. kwargs [ arg_key ] except KeyError : raise KeyError ( arg_key ) """"""Returns a list of dictionaries containing each search result."""""" search_results = [ ] for result in soup. find_all ( ""div"", class_ = ""question-summary search-result"" ) : title_container = result. find_all ( ""div"", class_ = ""result-link"" ) [ 0 ]. find_all ( ""a"" ) [ 0 ] if : answer_count = int ( result. find_all ( ""div"", class_ = ""status answered"" ) [ 0 ] . find_all ( ""strong"" ) [ 0 ] . text ) elif ( result. find_all ( ""div"", class_ = ""status answered-accepted"" )!= [ ] ) : answer_count = int ( result. find_all ( ""div"", class_ = ""status answered-accepted"" ) [ 0 ] . find_all ( ""strong"" ) [ 0 ] . text ) else : answer_count = 0 search_results. append ( { ""Title"" : title_container [ ""title"" ], ""Answers"" : answer_count, ""URL"" : SO_URL + title_",False,"result.find_all('div', class_='status answered') != []","result.find_all('h', class_)",0.6556181311607361 1217,"def _start_modules ( self ) : if self. available_slots : non_started_executors = [ e for e in self. executors if e not in self. started_modules ] for executor in non_started_executors : self. engine. logging_level_up ( ) if : executor. startup ( ) self. started_modules. append ( executor ) self. available_slots -= 1 msg = ""Starting execution: %s, rest of available slots: %s"" self. log. debug ( msg, executor, self. available_slots ) if not self. available_slots : break self. engine. logging_level_down ( )",False,time.time() >= self.start_time + executor.delay,self.available_slots > 0,0.6526844501495361 1218,"def update ( self, space_id ) : form = SpaceForm ( request. form, csrf = False ) form. set_id ( space_id ) if form. validate_on_submit ( ) : space = SpaceModel ( ). get_by_id ( space_id ) data = form. form2dict ( ) current_app. logger. info ( data ) member_model = MemberModel ( group_id = space_id ) old_owner = space. user_id new_owner = data [ ""user_id"" ] space. update ( data ) if str ( old_owner )!= str ( new_owner ) : member_model. change_owner ( old_owner, new_owner ) current_owner = { ""user_id"" : new_owner, ""role"" : OWNER } if : members = json. loads ( request. form [ ""members"" ] ) members. append ( current_owner ) member_model. update_group ( members = members ) return self. render_json ( data = space. item ( ) ) else : return self. render_error ( code = Code. form_error, message = form. errors )",True,'members' in request.form,'members' in request.form,0.6528851985931396 1219,"def sort_and_validate ( self ) : try : assert len ( self. data ) > 0 except AssertionError : logger. critical ( ""No data series found in plot"" ) raise l = - 1 reference_x = None for series_name, data_points in self. data. iteritems ( ) : if l > 0 : assert l == len ( data_points ) else : l = len ( data_points ) data_points. sort ( key = itemgetter ( 0 ) ) if : assert reference_x == [ seq [ 0 ] for seq in data_points ] else : reference_x = [ seq [ 0 ] for seq in data_points ]",False,reference_x,reference_x is not None,0.6662988662719727 1220,"def getBranches ( self ) : returned = [ ] for git_branch_line in self. _executeGitCommandAssertSuccess ( ""branch"" ). stdout : if : git_branch_line = git_branch_line [ 1 : ] git_branch_line = git_branch_line. strip ( ) if BRANCH_ALIAS_MARKER in git_branch_line : alias_name, aliased = git_branch_line. split ( BRANCH_ALIAS_MARKER ) returned. append ( branch. LocalBranchAlias ( self, alias_name, aliased ) ) else : returned. append ( branch. LocalBranch ( self, git_branch_line ) ) return returned",False,git_branch_line.startswith('*'),git_branch_line.startswith('/'),0.6466782689094543 1221,"def formContourList ( self, prepared ) : cList = [ ] if prepared. hasPartLikeStreams ( ) : parts = prepared. parts else : parts = [ prepared ] for p in parts : post = p. findConsecutiveNotes ( skipRests = True, skipChords = False, skipGaps = True, noNone = True ) for i, n in enumerate ( post ) : if : iNext = i + 1 nNext = post [ iNext ] if n. isChord : ps = n. sortDiatonicAscending ( ). pitches [ - 1 ]. midi else : ps = n. pitch. midi if nNext. isChord : psNext = nNext. sortDiatonicAscending ( ). pitches [ - 1 ]. midi else : psNext = nNext. pitch. midi cList. append ( psNext - ps ) return cList",False,i < len(post) - 1,n.hasThasing,0.6607534885406494 1222,"def _pkg_status ( self, info, filepath ) : if not os. path. exists ( filepath ) : return self. NOT_INSTALLED try : filestat = os. stat ( filepath ) except OSError : return self. NOT_INSTALLED if filestat. st_size!= int ( info. size ) : return self. STALE if filepath. endswith ( "".zip"" ) : unzipdir = filepath [ : - 4 ] if not os. path. exists ( unzipdir ) : return self. INSTALLED if not os. path. isdir ( unzipdir ) : return self. STALE unzipped_size = sum ( os. stat ( os. path. join ( d, f ) ). st_size for d, _, files in os. walk ( unzipdir ) for f in files ) if : return self. STALE return self. INSTALLED",False,unzipped_size != info.unzipped_size,info.size > 0 and info.size > 0,0.6507558822631836 1223,"def test_events ( self ) : """"""Tests that events are correctly yielded"""""" from pynput. mouse import Button, Events with Events ( ) as events : self. notify ( ""Move the mouse"" ) for event in events : if isinstance ( event, Events. Move ) : break self. notify ( ""Press the left mouse button"" ) for event in events : if : break self. notify ( ""Press the right mouse button"" ) for event in events : if isinstance ( event, Events. Click ) and event. button == Button. right : break self. notify ( ""Scroll the mouse"" ) for event in events : if isinstance ( event, Events. Scroll ) : break self. notify ( ""Do not touch the mouse"", delay = 2.0 ) self. assertIsNone ( events. get ( 1.0 ) )",False,"isinstance(event, Events.Click) and event.button == Button.left",event.button == Button.left,0.6537449359893799 1224,"def _add_resource_group ( obj ) : if isinstance ( obj, list ) : for array_item in obj : _add_resource_group ( array_item ) elif isinstance ( obj, dict ) : try : if : if obj [ ""id"" ] : obj [ ""resourceGroup"" ] = _parse_id ( obj [ ""id"" ] ) [ ""resource-group"" ] except ( KeyError, IndexError, TypeError ) : pass for item_key in obj : if item_key!= ""sourceVault"" : _add_resource_group ( obj [ item_key ] )",False,'resourcegroup' not in [x.lower() for x in obj.keys()],obj[0] != 'sourceVault',0.6506759524345398 1225,"def _pivot_res_data ( self, res_data ) : res_data_pivot = { } for blobname, toplevelnames_from_ilk in res_data. iteritems ( ) : for ilk, toplevelnames in toplevelnames_from_ilk. iteritems ( ) : pivot_bft = res_data_pivot. setdefault ( ilk, { } ) for toplevelname in toplevelnames : if : pivot_bft [ toplevelname ] = set ( [ blobname ] ) else : pivot_bft [ toplevelname ]. add ( blobname ) return res_data_pivot",True,toplevelname not in pivot_bft,toplevelname not in pivot_bft,0.661950945854187 1226,"def _compare_dirs ( self, dir1 : str, dir2 : str ) -> List [ str ] : diff = [ ] for root, dirs, files in os. walk ( dir1 ) : for file_ in files : path = os. path. join ( root, file_ ) target_path = os. path. join ( dir2, os. path. split ( path ) [ - 1 ] ) if : diff. append ( file_ ) return diff",False,not os.path.exists(target_path),os.path.exists(target_path),0.6456561088562012 1227,"def test_object_role_JT_attach ( rando, job_template, workflow_job_template, inventory_source, notification_template, res_role, expect, ) : nt_organization = Organization. objects. create ( name = ""organization just for the notification template"" ) nt_organization. notification_admin_role. members. add ( rando ) notification_template. organization = nt_organization notification_template. save ( ) kwargs = dict ( sub_obj = notification_template, relationship = ""notification_templates_success"", data = { ""id"" : notification_template. id }, ) permissions = { } expected_permissions = { } for resource in ( job_template, workflow_job_template, inventory_source ) : permission_resource = resource if resource == inventory_source : permission_resource = inventory_source. inventory model_name = resource. __class__. __name__ if : if res_role is not None : getattr ( permission_resource, res_role ). members. add ( rando ) permissions [ model_name ] = rando. can_access ( resource. __class__, ""attach"", resource, ** kwargs ) expected_permissions [ model_name ] = expect else : permissions [ model_name ] = None expected_permissions [ model_name ] = None assert permissions == expected_permissions",False,"res_role is None or hasattr(permission_resource, res_role)",model_name in permissions,0.6503365635871887 1228,"def get_dict ( self ) : result = online. bits_to_dict ( self. register ) if self. mintime == 65535 : result. update ( { ""mintime"" : ""MAX"" } ) else : result. update ( { ""mintime"" : ""{:.3f}s"". format ( float ( self. mintime ) / 1000 ) } ) if result [ ""ntp"" ] : if self. offset in ( 32767, - 32768 ) : word = ""MAX"" if : word = ""MIN"" result. update ( { ""ntp-offset"" : word } ) else : result. update ( { ""ntp-offset"" : ""{:.3f}s"". format ( float ( self. offset ) / 1000000 ) } ) else : result. update ( { ""ntp-offset"" : ""N/A"" } ) return result",False,self.offset < 0,self.offset == -1,0.6579854488372803 1229,"def _blend ( x, y ) : """"""Implements the ""blend"" strategy for `deep_merge`."""""" if isinstance ( x, ( dict, OrderedDict ) ) : if not isinstance ( y, ( dict, OrderedDict ) ) : return y return _merge ( x, y, recursion_func = _blend ) if isinstance ( x, ( list, tuple ) ) : if not isinstance ( y, ( list, tuple ) ) : return y result = [ _blend ( * i ) for i in zip ( x, y ) ] if len ( x ) > len ( y ) : result += x [ len ( y ) : ] elif : result += y [ len ( x ) : ] return result return y",False,len(x) < len(y),len(y) > len(x),0.6478318572044373 1230,"def do_shorts ( opts : List [ Tuple [ str, str ] ], optstring : str, shortopts : str, args : List [ str ] ) -> Tuple [ List [ Tuple [ str, str ] ], List [ str ] ] : while optstring!= """" : opt, optstring = optstring [ 0 ], optstring [ 1 : ] if short_has_arg ( opt, shortopts ) : if : if not args : raise GetoptError ( ""option -%s requires argument"" % opt, opt ) optstring, args = args [ 0 ], args [ 1 : ] optarg, optstring = optstring, """" else : optarg = """" opts. append ( ( ""-"" + opt, optarg ) ) return opts, args",False,optstring == '',optstring != optstring,0.6816678047180176 1231,"def update_user ( self ) : user = frappe. get_doc ( ""User"", self. user_id ) user. flags. ignore_permissions = True if ""Employee"" not in user. get ( ""roles"" ) : user. append_roles ( ""Employee"" ) if self. employee_name and not ( user. first_name and user. last_name ) : employee_name = self. employee_name. split ( "" "" ) if : user. last_name = "" "". join ( employee_name [ 2 : ] ) user. middle_name = employee_name [ 1 ] elif len ( employee_name ) == 2 : user. last_name = employee_name [ 1 ] user. first_name = employee_name [ 0 ] if self. date_of_birth : user. birth_date = self. date_of_birth if self. gender : user. gender = self. gender if self. image : if not user. user_image : user. user_image = self. image try : frappe. get_doc ( { ""doctype"" : ""File"", ""file_name"" : self. image, ""attached_to_doctype"" : ""User"", ""attached_to_name"" : self. user_id, <",False,len(employee_name) >= 3,len(employee_name) > 2,0.6578024625778198 1232,"def _update_scale ( self, skip ) : if self. dynamic_loss_scale : prev_scale = self. cur_scale if : self. cur_scale = max ( self. cur_scale / self. scale_factor, self. min_loss_scale ) self. last_overflow_iter = self. cur_iter if self. verbose : logger. info ( ""Grad overflow on iteration: %s"", self. cur_iter ) logger. info ( f""Reducing dynamic loss scale from {prev_scale} to {self.cur_scale}"" ) else : stable_interval = ( self. cur_iter - self. last_overflow_iter ) - 1 if ( stable_interval > 0 ) and ( stable_interval % self. scale_window == 0 ) : self. cur_scale *= self. scale_factor if self. verbose : logger. info ( f""No Grad overflow for {self.scale_window} iterations"" ) logger. info ( f""Increasing dynamic loss scale from {prev_scale} to {self.cur_scale}"" ) else : if : logger. info ( ""Grad overflow on iteration %s"", self. cur_iter ) <",True,skip,skip,0.6807229518890381 1233,"def __annotations_bytes ( self ) : if self. annotations : a = [ ] for k, v in self. annotations. items ( ) : if len ( k )!= 4 : raise errors. ProtocolError ( ""annotation key must be of length 4"" ) if : k = k. encode ( ""ASCII"" ) a. append ( struct. pack ( ""!4sH"", k, len ( v ) ) ) a. append ( v ) return b"""". join ( a ) return b""""",False,"sys.version_info >= (3, 0)",k in self.data,0.6506531238555908 1234,"def check_output_command ( file_path, head = None, tail = None ) : """"""call check_output command to read content from a file"""""" if os. path. exists ( file_path ) : if sys. platform == ""win32"" : cmds = [ ""powershell.exe"", ""type"", file_path ] if head : cmds += [ ""|"", ""select"", ""-first"", str ( head ) ] elif : cmds += [ ""|"", ""select"", ""-last"", str ( tail ) ] return check_output ( cmds, shell = True ). decode ( ""utf-8"" ) else : cmds = [ ""cat"", file_path ] if head : cmds = [ ""head"", ""-"" + str ( head ), file_path ] elif : cmds = [ ""tail"", ""-"" + str ( tail ), file_path ] return check_output ( cmds, shell = False ). decode ( ""utf-8"" ) else : print_error ( ""{0} does not exist!"". format ( file_path ) ) exit ( 1 )",True,tail,tail,0.725208044052124 1235,"def show_help ( error = None, topic = None, parser = None ) : """"""Display an error message, or the named topic."""""" assert error or topic or parser program_path = sys. argv [ 0 ] if program_path. endswith ( os. path. sep + ""__main__.py"" ) : program_path = os. path. dirname ( program_path ) program_name = os. path. basename ( program_path ) if env. WINDOWS : auto_suffix = ""-script.py"" if : program_name = program_name [ : - len ( auto_suffix ) ] help_params = dict ( coverage. __dict__ ) help_params [ ""program_name"" ] = program_name if CTracer is not None : help_params [ ""extension_modifier"" ] = ""with C extension"" else : help_params [ ""extension_modifier"" ] = ""without C extension"" if error : print ( error, file = sys. stderr ) print ( ""Use '%s help' for help."" % ( program_name, ), file = sys. stderr ) elif parser : print ( parser. format_help ( ). strip ( ) ) print ( ) else : help_msg = textwrap. dedent ( HELP_TOPICS. get ( topic, """" ) ). strip ( ) if help_msg : print ( help_msg. format ( ** help_params ) ) else : print ( ""Don't know topic %r"" % topic ) print ( ""Full documentation is at {__url__}"". format ( ** help_",False,program_name.endswith(auto_suffix),program_name.startswith(auto_suffix),0.6458815336227417 1236,"def generate_app_composes ( ) : composes = [ ] for app in pathlib. Path ( config. APPS_PATH ). iterdir ( ) : if : try : zip_ref = zipfile. ZipFile ( app, ""r"" ) zip_ref. extractall ( config. APPS_PATH ) zip_ref. close ( ) os. remove ( app ) except Exception as e : logger. error ( f""Zip error: {e}"" ) continue for app in pathlib. Path ( config. APPS_PATH ). iterdir ( ) : if app. is_dir ( ) and not re. fullmatch ( r""(__.*)"", app. name ) : for version in app. iterdir ( ) : if re. fullmatch ( r""((\d\.?)+)"", version. name ) : composes. append ( compose_from_app ( version, f""app_{app.name}"" ) ) logger. info ( f""Generated compose for {app.name} version: {version.name}"" ) return composes",False,not app.is_dir(),app.is_dir(),0.6518459320068359 1237,"def async_trace ( * fn_args, ** fn_kwargs ) : span_gen = _span_generator ( scope, sub_scope, trace_kwargs, fn_args = fn_args, fn_kwargs = fn_kwargs ) next ( span_gen ) completed = False try : result = await fn ( * fn_args, ** fn_kwargs ) completed = True try : span_gen. send ( TracedFunctionReturned ( result ) ) except StopIteration : pass return result except : if : error_type, error_value, traceback = sys. exc_info ( ) try : span_gen. send ( TracedFunctionThrew ( error_type, error_value, traceback ) ) except StopIteration : pass raise",True,not completed,not completed,0.7020494937896729 1238,"def check_bounds ( geometry ) : if isinstance ( geometry [ 0 ], ( list, tuple ) ) : return list ( map ( check_bounds, geometry ) ) else : if geometry [ 0 ] > 180 or geometry [ 0 ] < - 180 : raise ValueError ( ""Longitude is out of bounds, check your JSON format or data"" ) if : raise ValueError ( ""Latitude is out of bounds, check your JSON format or data"" )",False,geometry[1] > 90 or geometry[1] < -90,geometry[0] > 90 or geometry[0] < -90,0.6568539142608643 1239,"def openPage ( self, name, ** args ) : if name. strip ( ) in self. plugins. keys ( ) : if self. plugins [ name ]. isWebPlugin ( ) : pageInfo = self. plugins [ name ]. getPage ( ** args ) if : page, content = pageInfo if page : return ( ""plugins/%s"" % page, content ) else : return None else : return ( ""error.html"", { ""status"" : { ""except"" : ""plugin-page-missing"" } } ) else : return ( ""error.html"", { ""status"" : { ""except"" : ""plugin-not-webplugin"" } } ) return ( ""error.html"", { ""status"" : { ""except"" : ""plugin-not-loaded"" } } )",False,type(pageInfo) == tuple,pageInfo,0.6535598039627075 1240,"def visit_UnaryOp ( self, node ) : if isinstance ( node. op, ast. USub ) : if : return ""-{}"". format ( self. visit ( node. operand ) ) else : return ""-({})"". format ( self. visit ( node. operand ) ) if isinstance ( node. op, ast. Invert ) : if : return ""~{}"". format ( self. visit ( node. operand ) ) else : return ""~({})"". format ( self. visit ( node. operand ) ) else : raise ValueError ( ""Unary op not supported: {}"". format ( node. op ) )",False,"isinstance(node.operand, (ast.Name, ast.Num))","isinstance(node.op, ast.Call) and node.op in ['+', '-', '*']",0.6477424502372742 1241,"def read_input ( self ) : c = self. stdscr. getch ( ) if is_printable_chr ( c ) : if chr ( c ) == ""Q"" : component. get ( ""ConsoleUI"" ). quit ( ) elif self. inlist : if chr ( c ) == ""q"" : return elif chr ( c ) == ""D"" : host_id = self. popup. current_selection ( ) [ 1 ] self. delete_host ( host_id ) return elif : self. add_popup ( ) return if self. popup : if self. popup. handle_read ( c ) and self. popup. closed ( ) : self. pop_popup ( ) self. refresh ( )",False,chr(c) == 'a',chr(c) == 'C',0.6524997353553772 1242,"def _get_xarrays ( self, element, coords, xtype, ytype ) : x, y = element. kdims dims = [ y. name, x. name ] irregular = any ( element. interface. irregular ( element, d ) for d in dims ) if irregular : coord_dict = { x. name : ( ( ""y"", ""x"" ), coords [ 0 ] ), y. name : ( ( ""y"", ""x"" ), coords [ 1 ] ) } else : coord_dict = { x. name : coords [ 0 ], y. name : coords [ 1 ] } arrays = { } for vd in element. vdims : if element. interface is XArrayInterface : xarr = element. data [ vd. name ] if ""datetime"" in ( xtype, ytype ) : xarr = xarr. copy ( ) if dims!= xarr. dims and not irregular : xarr = xarr. transpose ( * dims ) elif : arr = element. dimension_values ( vd, flat = False ) xarr = xr. DataArray ( arr, coords = coord_dict, dims = [ ""y"", ""x"" ] ) else : arr = element. dimension_values ( vd, flat = False ) xarr = xr. DataArray ( arr, coords = coord_dict, dims = dims ) if xtype == ""datetime"" : xarr [ x. name ] = [ dt_to_int ( v, ""ns"" ) for v in xarr [ x. name ]. values ] if ytype == ""datetime"" : <",False,irregular,element.dimension_values is not None,0.6738534569740295 1243,"def __backgroundUpdate ( self ) : selectedPaths = GafferSceneUI. ContextAlgo. getSelectedPaths ( Gaffer. Context. current ( ) ) parameterInspectors = { } for path in selectedPaths. paths ( ) : if : continue history = GafferScene. SceneAlgo. history ( self. __sceneView [ ""in"" ] [ ""attributes"" ], path ) for attribute, group in self. __groups. items ( ) : attributeHistory = GafferScene. SceneAlgo. attributeHistory ( history, attribute ) if attributeHistory is not None : for parameter in group. parameters ( ) : parameterInspectors. setdefault ( attribute, { } ). setdefault ( parameter, [ ] ). append ( _ParameterInspector ( attributeHistory, parameter, self. __sceneView. editScope ( ) ) ) return parameterInspectors",False,not self.__sceneView['in'].exists(path),"path == '__sceneView[ ""in""] or path == '__sceneView[ ""in""]",0.65535968542099 1244,"def _wait_for_finish ( self ) -> PollExitResponse : while True : if self. _backend : poll_exit_resp = self. _backend. interface. communicate_poll_exit ( ) logger. info ( ""got exit ret: %s"", poll_exit_resp ) if : done = poll_exit_resp. done pusher_stats = poll_exit_resp. pusher_stats if pusher_stats : self. _on_finish_progress ( pusher_stats, done ) if done : return poll_exit_resp time. sleep ( 2 )",True,poll_exit_resp,poll_exit_resp,0.658441424369812 1245,"def convert ( installers, dest_dir, verbose ) : require_pkgresources ( ""wheel convert"" ) from.. wininst2wheel import bdist_wininst2wheel from.. egg2wheel import egg2wheel for pat in installers : for installer in iglob ( pat ) : if os. path. splitext ( installer ) [ 1 ] == "".egg"" : conv = egg2wheel else : conv = bdist_wininst2wheel if : sys. stdout. write ( ""{0}... "". format ( installer ) ) sys. stdout. flush ( ) conv ( installer, dest_dir ) if : sys. stdout. write ( ""OK\n"" )",True,verbose,verbose,0.6907249093055725 1246,"def __init__ ( self, image_base_dir, anno_path, with_mask = True, dataset_filter = None, eval = False ) : self. metas = [ ] self. eval = eval self. image_base_dir = image_base_dir self. anno_path = anno_path self. with_mask = with_mask self. coco = COCO ( self. anno_path ) self. get_image_annos ( ) self. image_list = os. listdir ( self. image_base_dir ) if dataset_filter!= None : filter_metas = [ ] for meta in self. metas : if : filter_metas. append ( meta ) self. metas = filter_metas",False,dataset_filter(meta) == True,dataset_filter.get(meta) != None,0.6530323028564453 1247,"def recarray2dict ( arr ) : res = { } tcds_types = [ time_cds_short, time_cds, time_cds_expanded ] for dtuple in arr. dtype. descr : key = dtuple [ 0 ] ntype = dtuple [ 1 ] data = arr [ key ] if ntype in tcds_types : if data. size > 1 : res [ key ] = np. array ( [ timecds2datetime ( item ) for item in data. ravel ( ) ] ). reshape ( data. shape ) else : res [ key ] = timecds2datetime ( data ) elif isinstance ( ntype, list ) : res [ key ] = recarray2dict ( data ) else : if data. size == 1 : data = data [ 0 ] if : try : data = data. decode ( ) except ValueError : pass data = data. split ( "":"" ) [ 0 ]. strip ( ) res [ key ] = data else : res [ key ] = data. squeeze ( ) return res",False,ntype[:2] == '|S',"isinstance(data, np.ndarray)",0.6625974178314209 1248,"def __patch_houdini_env ( self, local_path, mode = ""change"" ) : filepath = HOUDINI_ENV if platform. system ( ) == ""Windows"" : sep = "";"" quote_char = """" else : sep = "":"" quote_char = '""' to_write = [ ] has_houdini_path_defined = False with open ( filepath, ""r"" ) as fp : skip_next_line = False skipped_lines = 0 for line in fp. readlines ( ) : if skip_next_line : skipped_lines += 1 if skipped_lines == 3 : skip_next_line = False skipped_lines = 0 continue if ""# "" + self. repo_name. upper ( ) in line : skip_next_line = True continue if : has_houdini_path_defined = True to_write. append ( line ) if to_write [ - 1 ] [ - 1 ]!= ""\n"" : to_write [ - 1 ] += ""\n"" to_write. append ( ""\n"" ) if to_write [ - 1 ]!= ""\n"" : to_write. append ( ""\n"" ) if mode == ""change"" : to_write. append ( ""# "" + self. repo_name. upper ( ) + ""\n"" ) <",False,'HOUDINI_PATH' in line,skip_next_line,0.6586650609970093 1249,"def _index_from_records ( self, recarr ) : index = recarr. dtype. metadata [ ""index"" ] if len ( index ) == 1 : rtn = Index ( np. copy ( recarr [ str ( index [ 0 ] ) ] ), name = index [ 0 ] ) if : rtn = rtn. tz_localize ( ""UTC"" ). tz_convert ( recarr. dtype. metadata [ ""index_tz"" ] ) else : level_arrays = [ ] index_tz = recarr. dtype. metadata. get ( ""index_tz"", [ ] ) for level_no, index_name in enumerate ( index ) : level = Index ( np. copy ( recarr [ str ( index_name ) ] ) ) if level_no < len ( index_tz ) : tz = index_tz [ level_no ] if tz is not None : if not isinstance ( level, DatetimeIndex ) and len ( level ) == 0 : level = DatetimeIndex ( [ ], tz = tz ) else : level = level. tz_localize ( ""UTC"" ). tz_convert ( tz ) level_arrays. append ( level ) rtn = MultiIndex. from_arrays ( level_arrays, names = index ) return rtn",False,"isinstance(rtn, DatetimeIndex) and 'index_tz' in recarr.dtype.metadata",tz is not None,0.6531420946121216 1250,"def refresh ( self ) : super ( ). refresh ( ) name = self. settlement. get_component ( NamedComponent ). name text = T ( ""Production overview of {settlement}"" ). format ( settlement = name ) self. _gui. findChild ( name = ""headline"" ). text = text forward_button = self. _gui. findChild ( name = ""forwardButton"" ) backward_button = self. _gui. findChild ( name = ""backwardButton"" ) if self. current_page == 0 : backward_button. set_inactive ( ) else : backward_button. set_active ( ) max_left_page_idx = self. max_pages - 1 max_left_page_idx -= max_left_page_idx % 2 if self. current_page == max_left_page_idx : forward_button. set_inactive ( ) else : forward_button. set_active ( ) data = self. displayed_resources data = data [ self. current_page * self. LINES_PER_PAGE : ( self. current_page + 2 ) * self. LINES_PER_PAGE ] for idx, ( resource_id, amount ) in enumerate ( data, start = 1 ) : if : container = self. _page_right else : container = self. _page_left self. _add_line_to_gui ( container, resource_id, amount ) self. _page_left. adaptLayout ( ) self. _page_right. adaptLayout ( )",False,idx > self.LINES_PER_PAGE,idx == 0,0.6549789309501648 1251,"def run ( self ) : okay = [ AssertionError ] if self. writing is False : okay. append ( ValueError ) try : self. info = ""Starting: %s"" % self. writing self. _copy_loop ( ) except tuple ( okay ) : pass except : self. exc_info = sys. exc_info ( ) traceback. print_exc ( ) if : try : self. error_callback ( ) except : pass finally : fd, self. my_pipe_fd = self. my_pipe_fd, None if fd is not None : fd. close ( ) self. info = ""Dead""",False,self.error_callback,self.error_callback is not None,0.6547931432723999 1252,"def find_word_bounds ( self, text, index, allowed_chars ) : right = left = index done = False while not done : if left == 0 : done = True elif not self. word_boundary_char ( text [ left - 1 ] ) : left -= 1 else : done = True done = False while not done : if right == len ( text ) : done = True elif : right += 1 else : done = True return left, right",False,not self.word_boundary_char(text[right]),left > right - 1 and allowed_chars[left] in text,0.6508366465568542 1253,"def close ( self ) : if ( self. _entity_stack or self. _parser is None or isinstance ( self. _parser, _ClosedParser ) ) : return try : self. feed ( """", isFinal = 1 ) self. _cont_handler. endDocument ( ) self. _parsing = 0 self. _parser = None finally : self. _parsing = 0 if : parser = _ClosedParser ( ) parser. ErrorColumnNumber = self. _parser. ErrorColumnNumber parser. ErrorLineNumber = self. _parser. ErrorLineNumber self. _parser = parser",True,self._parser is not None,self._parser is not None,0.6698061227798462 1254,"def block2assignblks ( self, block ) : irblocks_list = super ( mipsCGen, self ). block2assignblks ( block ) for irblocks in irblocks_list : for blk_idx, irblock in enumerate ( irblocks ) : has_breakflow = any ( assignblock. instr. breakflow ( ) for assignblock in irblock ) if not has_breakflow : continue irs = [ ] for assignblock in irblock : if : irs. append ( AssignBlock ( assignments, assignblock. instr ) ) continue assignments = dict ( assignblock ) assignments [ self. delay_slot_dst ] = assignblock [ self. ir_arch. pc ] assignments [ self. delay_slot_set ] = m2_expr. ExprInt ( 1, 32 ) dst_loc_key = self. ir_arch. get_next_instr ( assignblock. instr ) assignments [ self. ir_arch. IRDst ] = m2_expr. ExprLoc ( dst_loc_key, 32 ) irs. append ( AssignBlock ( assignments, assignblock. instr ) ) irblocks [ blk_idx ] = IRBlock ( irblock. loc_key, irs ) return irblocks_list",False,self.ir_arch.pc not in assignblock,len(assignblock) == 0,0.6533259749412537 1255,"def get_attribute ( self, selector, attribute, by = By. CSS_SELECTOR, timeout = None, hard_fail = True ) : """"""This method uses JavaScript to get the value of an attribute."""""" self. __check_scope ( ) if not timeout : timeout = settings. SMALL_TIMEOUT if self. timeout_multiplier and timeout == settings. SMALL_TIMEOUT : timeout = self. __get_new_timeout ( timeout ) selector, by = self. __recalculate_selector ( selector, by ) self. wait_for_ready_state_complete ( ) time. sleep ( 0.01 ) element = page_actions. wait_for_element_present ( self. driver, selector, by, timeout ) try : attribute_value = element. get_attribute ( attribute ) except ( StaleElementReferenceException, ENI_Exception ) : self. wait_for_ready_state_complete ( ) time. sleep ( 0.14 ) element = page_actions. wait_for_element_present ( self. driver, selector, by, timeout ) attribute_value = element. get_attribute ( attribute ) if attribute_value is not None : return attribute_value else : if : raise Exception ( ""Element {%s} has no attribute {%s}!"" % ( selector, attribute ) ) else : return None",True,hard_fail,hard_fail,0.6700891852378845 1256,"def __init__ ( self, view, syntax = None ) : self. platform = sublime. platform ( ) self. classmap = { } self. st_version = 2 if sublime. version ( ) == """" or int ( sublime. version ( ) ) > 3000 : self. st_version = 3 self. file_name = view. file_name ( ) self. settings = sublime. load_settings ( ""CodeFormatter.sublime-settings"" ) self. packages_path = sublime. packages_path ( ) self. syntax_file = view. settings ( ). get ( ""syntax"" ) self. syntax = syntax or self. get_syntax ( ) map_settings_formatter = [ ( ""codeformatter_php_options"", PhpFormatter ), ( ""codeformatter_js_options"", JsFormatter ), ( ""codeformatter_css_options"", CssFormatter ), ( ""codeformatter_html_options"", HtmlFormatter ), ( ""codeformatter_python_options"", PyFormatter ), ( ""codeformatter_vbscript_options"", VbscriptFormatter ), ( ""codeformatter_scss_options"", ScssFormatter ), ( ""codeformatter_coldfusion_options"", ColdfusionFormatter ), ( ""codeformatter_go_options"", GoFormatter ), ] for name, _class in map_settings_formatter : syntaxes = self. settings. get ( name, { } ). get ( ""syntaxes"" ) if : continue for _formatter in syntaxes. split ( "","" ) : self. classmap [ _formatter. strip ( ) ] = _class",False,"not syntaxes or not isinstance(syntaxes, str)",syntaxes is None,0.6483757495880127 1257,"def register_type_handlers ( connection, ** kwargs ) : if connection. vendor!= ""postgresql"" : return try : if : register_hstore ( connection. connection, globally = True, unicode = True ) else : register_hstore ( connection. connection, globally = True ) except ProgrammingError : pass try : with connection. cursor ( ) as cursor : cursor. execute ( ""SELECT typarray FROM pg_type WHERE typname = 'citext'"" ) oids = tuple ( row [ 0 ] for row in cursor ) array_type = psycopg2. extensions. new_array_type ( oids, ""citext[]"", psycopg2. STRING ) psycopg2. extensions. register_type ( array_type, None ) except ProgrammingError : pass",False,six.PY2,connection.vendor == 'sqlite',0.6716198921203613 1258,"def readexactly ( self, n ) : buf = b"""" while n : yield IORead ( self. s ) res = self. s. read ( n ) assert res is not None if : yield IOReadDone ( self. s ) break buf += res n -= len ( res ) return buf",False,not res,len(res) == 0,0.6899232864379883 1259,"def __ludcmp ( self, index ) : size = self. rows vv = [ 0.0 ] * size for i in range ( size ) : big = 0.0 for j in range ( size ) : big = max ( abs ( self [ i ] [ j ] ), big ) if big == 0 : raise Exception ( ""Singular matrix found"" ) vv [ i ] = 1.0 / big for j in range ( size ) : for i in range ( j ) : s = self [ i ] [ j ] for k in range ( i ) : s -= self [ i ] [ k ] * self [ k ] [ j ] self [ i ] [ j ] = s big = 0.0 for i in range ( j, size ) : s = self [ i ] [ j ] for k in range ( j ) : s -= self [ i ] [ k ] * self [ k ] [ j ] self [ i ] [ j ] = s dum = vv [ i ] * abs ( s ) if dum >= big : big = dum imax = i if : for k in range ( size ) : dum = self [ imax ] [ k ] self [ imax ] [ k ] = self [ j ] [ k ] self [ j ] [ k",False,j != imax,i % size > 0,0.7049155235290527 1260,"def check_response ( self, response ) : """"""Specialized version of check_response()."""""" for line in response : if not line. strip ( ) : continue if : return elif line. startswith ( b""Benutzer/Passwort Fehler"" ) : raise BadLogin ( line ) else : raise FailedPost ( ""Server returned '%s'"" % six. ensure_text ( line ) )",False,line.startswith(b'OK'),line.startswith(b'<'),0.6486492156982422 1261,"def attach_to_cluster ( self, cluster ) : client_info = self. client. info ( ) revision = client_info [ ""version"" ] [ ""build_hash"" ] distribution_version = client_info [ ""version"" ] [ ""number"" ] distribution_flavor = client_info [ ""version"" ]. get ( ""build_flavor"", ""oss"" ) cluster. distribution_version = distribution_version cluster. distribution_flavor = distribution_flavor cluster. source_revision = revision for node_stats in self. client. nodes. stats ( metric = ""_all"" ) [ ""nodes"" ]. values ( ) : node_name = node_stats [ ""name"" ] if : cluster_node = cluster. node ( node_name ) else : host = node_stats. get ( ""host"", ""unknown"" ) cluster_node = cluster. add_node ( host, node_name ) self. add_node_stats ( cluster, cluster_node, node_stats ) for node_info in self. client. nodes. info ( node_id = ""_all"" ) [ ""nodes"" ]. values ( ) : self. add_node_info ( cluster, node_info )",False,cluster.has_node(node_name),node_name.startswith('node:'),0.6499686241149902 1262,"def _is_match_rule ( self, flow, rules ) : if not rules : return False for rule_key, pattern in rules. items ( ) : targets = self. _get_rule_targets ( rule_key, flow ) if : return False if not self. _is_target_pattern_matched ( pattern, targets ) : return False return True",True,not targets,not targets,0.6781860589981079 1263,"def sort_key ( self, dupe, crit_value ) : value = self. extract_value ( dupe ) if crit_value in { self. ENDS_WITH_NUMBER, self. DOESNT_END_WITH_NUMBER } : ends_with_digit = value. strip ( ) [ - 1 : ]. isdigit ( ) if crit_value == self. ENDS_WITH_NUMBER : return 0 if ends_with_digit else 1 else : return 1 if ends_with_digit else 0 else : value = len ( value ) if : value *= - 1 return value",False,crit_value == self.LONGEST,value < 0,0.6688231229782104 1264,"def load_annotations ( self ) : """"""Load annotation file to get video information."""""" if self. ann_file. endswith ( "".json"" ) : return self. load_json_annotations ( ) video_infos = [ ] with open ( self. ann_file, ""r"" ) as fin : for line in fin : line_split = line. strip ( ). split ( ) video_dir = line_split [ 0 ] label = int ( line_split [ 1 ] ) num_clips = int ( line_split [ 2 ] ) positive_clip_inds = [ int ( ind ) for ind in line_split [ 3 : ] ] if : video_dir = osp. join ( self. data_prefix, video_dir ) video_infos. append ( dict ( video_dir = video_dir, label = label, num_clips = num_clips, positive_clip_inds = positive_clip_inds, ) ) return video_infos",False,self.data_prefix is not None,self.has_data_prefix and self.data_prefix,0.651530385017395 1265,"def claim_ownership ( self, ownership_list ) : result = [ ] for ownership in ownership_list : fully_qualified_namespace = ownership [ ""fully_qualified_namespace"" ] eventhub_name = ownership [ ""eventhub_name"" ] consumer_group = ownership [ ""consumer_group"" ] partition_id = ownership [ ""partition_id"" ] etag = ownership. get ( ""etag"" ) old_ownership_node = self. _ownerships_trie. lookup ( ( fully_qualified_namespace, eventhub_name, consumer_group, partition_id ) ) if : old_ownership = old_ownership_node. value if etag == old_ownership [ ""etag"" ] : ownership [ ""etag"" ] = str ( uuid. uuid4 ( ) ) ownership [ ""last_modified_time"" ] = time. time ( ) old_ownership [ ""etag"" ] = ownership [ ""etag"" ] old_ownership [ ""last_modified_time"" ] = ownership [ ""last_modified_time"" ] old_ownership [ ""owner_id"" ] = ownership [ ""owner_id"" ] result. append ( old_ownership ) else : ownership [ ""etag"" ] = str ( uuid. uuid4 ( ) ) ownership [ ""last_modified_time"" ] = time. time ( ) self. _ownerships_trie. set_ele ( ownership ) result. append ( ownership )",True,old_ownership_node,old_ownership_node,0.6676054000854492 1266,"def run ( self ) : while True : with self. lock : if : return print ( ""\n============= THREAD FRAMES: ================"" ) for id, frame in sys. _current_frames ( ). items ( ) : if id == threading. current_thread ( ). ident : continue try : name = threading. _active. get ( id, None ) except : name = None if name is None : try : name = QtCore. QThread. _names. get ( id ) except : name = None if name is None : name = ""???"" print ( '<< thread %d ""%s"" >>' % ( id, name ) ) traceback. print_stack ( frame ) print ( ""===============================================\n"" ) time. sleep ( self. interval )",False,self._stop is True,self.thread_mode,0.6634032726287842 1267,"def is_test_finished ( self ) : try : if hasattr ( self. volta_core, ""phone"" ) : if hasattr ( self. volta_core. phone, ""test_performer"" ) : if : logger. warning ( ""There is no test performer process on the phone, interrupting test"" ) return 1 if not self. volta_core. phone. test_performer. is_finished ( ) : logger. debug ( ""Waiting for phone test to finish..."" ) return - 1 else : return self. volta_core. phone. test_performer. retcode except : logger. error ( ""Unknown exception of Android plugin. Interrupting test"", exc_info = True ) return 1",False,not self.volta_core.phone.test_performer,self.volta_core.phone.test_performer.retcode is None,0.6532264947891235 1268,"def dump ( self ) : data = b"""" dict_data = self. dfl_dict for key in list ( dict_data. keys ( ) ) : if dict_data [ key ] is None : dict_data. pop ( key ) for chunk in self. chunks : if : self. chunks. remove ( chunk ) break last_app_chunk = 0 for i, chunk in enumerate ( self. chunks ) : if chunk [ ""m_h"" ] & 0xF0 == 0xE0 : last_app_chunk = i dflchunk = { ""name"" : ""APP15"", ""m_h"" : 0xEF, ""data"" : pickle. dumps ( dict_data ), ""ex_data"" : None, } self. chunks. insert ( last_app_chunk + 1, dflchunk ) for chunk in self. chunks : data += struct. pack ( ""BB"", 0xFF, chunk [ ""m_h"" ] ) chunk_data = chunk [ ""data"" ] if chunk_data is not None : data += struct. pack ( "">H"", len ( chunk_data ) + 2 ) data += chunk_data chunk_ex_data = chunk [ ""ex_data"" ] if chunk_ex_data is not None : data += chunk_ex_data return data",False,chunk['name'] == 'APP15',chunk[0] & 240 and chunk[1] & 240,0.6562092304229736 1269,"def _parse_hh_mm_ss_ff ( tstr ) : len_str = len ( tstr ) time_comps = [ 0, 0, 0, 0 ] pos = 0 for comp in range ( 0, 3 ) : if ( len_str - pos ) < 2 : raise ValueError ( ""Incomplete time component"" ) time_comps [ comp ] = int ( tstr [ pos : pos + 2 ] ) pos += 2 next_char = tstr [ pos : pos + 1 ] if not next_char or comp >= 2 : break if : raise ValueError ( ""Invalid time separator: %c"" % next_char ) pos += 1 if pos < len_str : if tstr [ pos ]!= ""."" : raise ValueError ( ""Invalid microsecond component"" ) else : pos += 1 len_remainder = len_str - pos if len_remainder not in ( 3, 6 ) : raise ValueError ( ""Invalid microsecond component"" ) time_comps [ 3 ] = int ( tstr [ pos : ] ) if len_remainder == 3 : time_comps [ 3 ] *= 1000 return time_comps",False,next_char != ':',next_char and comp > 0,0.6659705638885498 1270,"def as_proto ( self, ** kwargs ) : from nnabla. utils import nnabla_pb2 from nnabla. utils. save_function import _create_function_nntxt if kwargs : self. arguments. update ( kwargs ) n = nnabla_pb2. Network ( ) n. name = self. name n. batch_size = self. arguments. get ( ""batch_size"", 1 ) variables = OrderedDict ( self. variables ) variables. update ( self. parameters ) functions = self. functions for name, variable in variables. items ( ) : v = n. variable. add ( ) v. name = name v. type = variable. type shape = list ( variable. shape ) v. shape. dim. extend ( shape ) if variable. info : i = v. initializer i. type = variable. info. initializer. __class__. __name__. replace ( ""Initializer"", """" ) i. multiplier = 0.0 if : i. multiplier = variable. info. initializer. value elif i. type == ""Uniform"" : i. multiplier = - variable. info. initializer. lim [ 0 ] elif i. type == ""Normal"" : i. multiplier = variable. info. initializer. sigma else : pass for name, function in functions. items ( ) : f = n. function. add ( ) _create_",False,i.type == 'Constant',variable.type == 'Constant',0.6556774973869324 1271,"def _competing ( self, ctx : commands. Context, *, competing : str = None ) : """"""Sets [botname]'s competing status."""""" status = ( ctx. bot. guilds [ 0 ]. me. status if len ( ctx. bot. guilds ) > 0 else discord. Status. online ) if competing : if : await ctx. send ( _ ( ""The maximum length of competing descriptions is 128 characters."" ) ) return activity = discord. Activity ( name = competing, type = discord. ActivityType. competing ) else : activity = None await ctx. bot. change_presence ( status = status, activity = activity ) if activity : await ctx. send ( _ ( ""Status set to ``Competing in {competing}``."" ). format ( competing = competing ) ) else : await ctx. send ( _ ( ""Competing cleared."" ) )",False,len(competing) > 128,len(ctx.guilds) > 128,0.6695790886878967 1272,"def process ( self, resources ) : results = [ ] client = local_session ( self. manager. session_factory ). client ( ""sns"" ) for r in resources : policy = json. loads ( r. get ( ""Policy"" ) or ""{}"" ) policy_statements = policy. setdefault ( ""Statement"", [ ] ) new_policy, removed = self. remove_statements ( policy_statements, r, CrossAccountAccessFilter. annotation_key ) if : new_policy = policy_statements new_policy, added = self. add_statements ( new_policy ) if not removed and not added : continue results += { ""Name"" : r [ ""TopicArn"" ], ""State"" : ""PolicyModified"", ""Statements"" : new_policy, } policy [ ""Statement"" ] = new_policy client. set_topic_attributes ( TopicArn = r [ ""TopicArn"" ], AttributeName = ""Policy"", AttributeValue = json. dumps ( policy ), ) return results",False,new_policy is None,new_policy,0.6592462062835693 1273,"def then ( self, matches, when_response, context ) : if is_iterable ( when_response ) : ret = [ ] when_response = list ( when_response ) for match in when_response : if : if self. match_name : match. name = self. match_name matches. append ( match ) ret. append ( match ) return ret if self. match_name : when_response. name = self. match_name if when_response not in matches : matches. append ( when_response ) return when_response",False,match not in matches,match,0.6602612733840942 1274,"def process_response ( self, request, response ) : now = datetime. datetime. utcnow ( ) response [ ""Date"" ] = now. strftime ( ""%a, %d %b %Y %H:%M:%S GMT"" ) if not response. has_header ( ""Content-Length"" ) : response [ ""Content-Length"" ] = str ( len ( response. content ) ) if response. has_header ( ""ETag"" ) : : if if_none_match == response [ ""ETag"" ] : response. status_code = 304 response. content = """" response [ ""Content-Length"" ] = ""0"" if response. has_header ( ""Last-Modified"" ) : last_mod = response [ ""Last-Modified"" ] if_modified_since = request. META. get ( ""HTTP_IF_MODIFIED_SINCE"", None ) if if_modified_since == response [ ""Last-Modified"" ] : response. status_code = 304 response. content = """" response [ ""Content-Length"" ] = ""0"" if request. method == ""HEAD"" : response. content = """" return response",False,"if_none_match = request.META.get('HTTP_IF_NONE_MATCH', None)","self._is_last_modified(request, response)",0.6515220403671265 1275,"def _on_frame_start ( self, data ) : self. _wire_bytes_in += len ( data ) header, payloadlen = struct. unpack ( ""BB"", data ) self. _final_frame = header & self. FIN reserved_bits = header & self. RSV_MASK self. _frame_opcode = header & self. OPCODE_MASK self. _frame_opcode_is_control = self. _frame_opcode & 0x8 if self. _decompressor is not None and self. _frame_opcode!= 0 : self. _frame_compressed = bool ( reserved_bits & self. RSV1 ) reserved_bits &= ~ self. RSV1 if reserved_bits : self. _abort ( ) return self. _masked_frame = bool ( payloadlen & 0x80 ) payloadlen = payloadlen & 0x7F if self. _frame_opcode_is_control and payloadlen >= 126 : self. _abort ( ) return try : if : self. _frame_length = payloadlen if self. _masked_frame : self. stream. read_bytes ( 4, self. _on_masking_key ) else : self. _read_frame_data ( False ) elif payloadlen == 126 : self. stream. read_bytes ( 2, self. _on_frame_length_16 ) elif payloadlen == 127 : self. stream. read_bytes ( 8, self. _on_frame_length_64 ) except StreamClosedError : <",False,payloadlen < 126,self._frame_compressed,0.6725018620491028 1276,"def _initialize ( bot ) : bot. spawn_lock = asyncio. Lock ( ) config = bot. get_config_option ( ""spawn"" ) if not config : return cmds = config. get ( ""commands"" ) get_location = False for cmd, cnf in cmds. items ( ) : command. register ( _spawn, admin = True, final = True, name = cmd ) if : get_location = True logger. info ( ""spawn - %s"", "", "". join ( [ ""*"" + cmd for cmd in cmds ] ) ) plugins. register_admin_command ( list ( cmds ) ) if get_location : global _MAP_MATCH _MAP_MATCH = re. compile ( config. get ( ""map_regex"", _MAP_REGEX ), re. IGNORECASE | re. MULTILINE ) plugins. register_handler ( _location_handler, type = ""message"" )",False,cnf.get('allow_location'),admin,0.6533006429672241 1277,"def stale_possible_simple_keys ( self ) : for level in list ( self. possible_simple_keys ) : key = self. possible_simple_keys [ level ] if : if key. required : raise ScannerError ( ""while scanning a simple key"", key. mark, ""could not found expected ':'"", self. get_mark ( ), ) del self. possible_simple_keys [ level ]",False,key.line != self.line or self.index - key.index > 1024,key.mark is not None,0.6518813371658325 1278,"def add_css ( self, data ) : if data : for medium, paths in data. items ( ) : for path in paths : if : self. _css. setdefault ( medium, [ ] ). append ( path )",False,not self._css.get(medium) or path not in self._css[medium],path,0.6537683606147766 1279,"def parse_dataset ( data_dir : Path ) -> InternalBioNerDataset : entities_per_document = defaultdict ( list ) texts_per_document = { } with ( data_dir / ""S800.tsv"" ). open ( encoding = ""utf8"" ) as f : for line in f : fields = line. strip ( ). split ( ""\t"" ) if not fields : continue fname, pmid = fields [ 1 ]. split ( "":"" ) start, end = int ( fields [ 2 ] ), int ( fields [ 3 ] ) if : continue entities_per_document [ fname ]. append ( Entity ( ( start, end ), ""Species"" ) ) for fname in entities_per_document : with ( data_dir / ""abstracts"" / fname ). with_suffix ( "".txt"" ). open ( encoding = ""utf8"" ) as f : texts_per_document [ fname ] = f. read ( ) return InternalBioNerDataset ( documents = texts_per_document, entities_per_document = entities_per_document )",False,start == end,pmid == 'species',0.6690071225166321 1280,"def remove_participant ( request, thread, user ) : """"""remove thread participant, set ""recound private threads"" flag on user"""""" removed_owner = False remaining_participants = [ ] for participant in thread. participants_list : if participant. user == user : removed_owner = participant. is_owner else : remaining_participants. append ( participant. user ) set_users_unread_private_threads_sync ( participants = thread. participants_list ) if not remaining_participants : thread. delete ( ) else : thread. threadparticipant_set. filter ( user = user ). delete ( ) thread. subscription_set. filter ( user = user ). delete ( ) if removed_owner : thread. is_closed = True if : event_type = ""owner_left"" else : event_type = ""removed_owner"" else : if : event_type = ""participant_left"" else : event_type = ""removed_participant"" record_event ( request, thread, event_type, { ""user"" : { ""id"" : user. id, ""username"" : user. username, dsize = op. opers [ 0 ]. tsize if len ( op. opers ) == 3 : src1 = self. getOperValue ( op, 1 ) src2 = self. getOperValue ( op, 2 ) else : src1 = self. getOperValue ( op, 0 ) src2 = self. getOperValue ( op, 1 ) if src1 is None or src2 is None : self. undefFlags ( ) self. setOperValue ( op, 0, None ) return usrc1 = e_bits. unsigned ( src1, dsize ) usrc2 = e_bits. unsigned ( src2, dsize ) ures = usrc1 ^ usrc2 self. setOperValue ( op, 0, ures ) curmode = self. getProcMode ( ) if op. iflags & IF_PSR_S : if op. opers [ 0 ]. reg == 15 : if : self. setCPSR ( self. getSPSR ( curmode ) ) else : raise Exception ( ""Messed up opcode... adding to r15 from PM_usr or PM_sys"" ) self. setFlag ( PSR_N_bit, e_bits. is_signed ( ures, dsize ) ) self. setFlag ( PSR_Z_bit, not ures ) self. setFlag ( PSR_C_bit, e_bits. is_unsigned_carry ( ures, dsize ) ) self. setFlag ( PSR_V_",False,curmode != PM_sys and curmode != PM_usr,curmode,0.6520432233810425 1282,"def handle_facts_hpacu ( facts ) : disks = { } for k, value in facts. iteritems ( ) : m = HPACU_GENERAL_REGEX. match ( k ) if not m : continue n = HPACU_LOGICAL_PHYSICAL_REGEX. match ( m. group ( 2 ) ) physical_disk = n. group ( 1 ) if n else None property = n. group ( 2 ) if n else m. group ( 2 ) if not physical_disk : continue disks. setdefault ( physical_disk, { } ) [ property ] = value. strip ( ) detected_disks = [ ] for disk_handle, disk in disks. iteritems ( ) : if : continue size_value, size_unit = disk [ ""size"" ]. split ( ) detected_disks. append ( { ""serial_number"" : disk [ ""serial_number"" ], ""label"" : ""{} {}"". format ( "" "". join ( disk [ ""model"" ]. split ( ) ), disk [ ""interface_type"" ], ), ""size"" : int ( float ( size_value ) / units. size_divisor [ size_unit ] ), ""family"" : "" "". join ( disk [ ""model"" ]. split ( ) ), } ) return detected_disks",False,not disk.get('serial_number'),disk_handle != 'hPACU',0.6511922478675842 1283,"def is_eligible ( self, t, status, notif_number, in_notif_time, interval ) : small_states = { ""WARNING"" : ""w"", ""UNKNOWN"" : ""u"", ""CRITICAL"" : ""c"", ""RECOVERY"" : ""r"", ""FLAPPING"" : ""f"", ""DOWNTIME"" : ""s"", ""DOWN"" : ""d"", ""UNREACHABLE"" : ""u"", ""OK"" : ""o"", ""UP"" : ""o"", } if not self. time_based : if notif_number < self. first_notification : return False if self. last_notification!= 0 and notif_number > self. last_notification : return False else : if : return False if ( self. last_notification_time!= 0 and in_notif_time > self. last_notification_time * interval ) : return False if status in small_states and small_states [ status ] not in self. escalation_options : return False if self. escalation_period is not None and not self. escalation_period. is_time_valid ( t ) : return False return True",False,in_notif_time < self.first_notification_time * interval,self.last_notification_time and in_notif_time,0.6497511863708496 1284,"def offsets ( self ) : offsets = { } offset_so_far = 0 for name, ty in self. fields. items ( ) : if : l. warning ( ""Found a bottom field in struct %s. Ignore and increment the offset using the default "" ""element size."", self. name, ) continue if not self. _pack : align = ty. alignment if offset_so_far % align!= 0 : offset_so_far += align - offset_so_far % align offsets [ name ] = offset_so_far offset_so_far += ty. size // self. _arch. byte_width return offsets",False,"isinstance(ty, SimTypeBottom)",name in self.fields,0.654800534248352 1285,"def _internal_attach ( self, engine : Engine, usage : MetricUsage ) -> None : self. engine = engine for index, metric in enumerate ( itertools. chain ( self. args, self. kwargs. values ( ) ) ) : if isinstance ( metric, MetricsLambda ) : metric. _internal_attach ( engine, usage ) elif isinstance ( metric, Metric ) : if : engine. add_event_handler ( usage. STARTED, metric. started ) if not engine. has_event_handler ( metric. iteration_completed, usage. ITERATION_COMPLETED ) : engine. add_event_handler ( usage. ITERATION_COMPLETED, metric. iteration_completed )",False,"not engine.has_event_handler(metric.started, usage.STARTED)",not engine.has_event_handler,0.6532597541809082 1286,"def can_read ( self ) : if hasattr ( self. file, ""__iter__"" ) : iterator = iter ( self. file ) head = next ( iterator, None ) if head is None : self. repaired = [ ] return True if : self. repaired = itertools. chain ( [ head ], iterator ) return True else : raise IOSourceError ( ""Could not open source: %r (mode: %r)"" % ( self. file, self. options [ ""mode"" ] ) ) return False",False,"isinstance(head, str)",len(head) > 0,0.6563901901245117 1287,"def show_container_cd_url ( cmd, resource_group_name, name, slot = None ) : settings = get_app_settings ( cmd, resource_group_name, name, slot ) docker_enabled = False for setting in settings : if setting [ ""name"" ] == ""DOCKER_ENABLE_CI"" and setting [ ""value"" ] == ""true"" : docker_enabled = True break cd_settings = { } cd_settings [ ""DOCKER_ENABLE_CI"" ] = docker_enabled if docker_enabled : credentials = list_publishing_credentials ( cmd, resource_group_name, name, slot ) if : cd_url = credentials. scm_uri + ""/docker/hook"" cd_settings [ ""CI_CD_URL"" ] = cd_url else : cd_settings [ ""CI_CD_URL"" ] = """" return cd_settings",True,credentials,credentials,0.7043042182922363 1288,"def __lookingForWrite4Where ( self, gadgetsAlreadyTested ) : for gadget in self. __gadgets : if gadget in gadgetsAlreadyTested : continue f = gadget [ ""gadget"" ]. split ( "" ; "" ) [ 0 ] regex = re. search ( ""mov dword ptr \[(?P([(eax)|(ebx)|(ecx)|(edx)|(esi)|(edi)]{3}))\], (?P([(eax)|(ebx)|(ecx)|(edx)|(esi)|(edi)]{3}))$"", f, ) if : lg = gadget [ ""gadget"" ]. split ( "" ; "" ) [ 1 : ] try : for g in lg : if g. split ( ) [ 0 ]!= ""pop"" and g. split ( ) [ 0 ]!= ""ret"" : raise if g!= ""ret"" : if g. split ( ) [ 0 ] == ""ret"" and g. split ( ) [ 1 ]!= """" : raise print ( ""\t[+] Gadget found: 0x%x %s"" % ( gadget [ ""vaddr"" ], gadget [ ""gad",False,regex,gadget in gadgetsAlreadyTested,0.6866661906242371 1289,"def open_ ( self, path, flags, follow_link = True ) : path = self. resolve_path ( path, follow_link = follow_link ) if not os. path. exists ( path ) : return - 1 fd = self. linux_env. next_fd ( ) acc_mode = flags & self. linux_env. O_ACCMODE if os. path. isdir ( path ) : assert flags & self. linux_env. O_DIRECTORY == self. linux_env. O_DIRECTORY if : fdesc = FileDescriptorDirectory ( fd, flags, self, path ) else : raise RuntimeError ( ""Not implemented"" ) elif os. path. isfile ( path ) : if acc_mode == os. O_RDONLY : real_fd = os. open ( path, os. O_RDONLY ) else : raise RuntimeError ( ""Not implemented"" ) fdesc = FileDescriptorRegularFile ( fd, flags, self, real_fd ) elif os. path. islink ( path ) : raise RuntimeError ( ""Not implemented"" ) else : raise RuntimeError ( ""Unknown file type for %r"" % path ) self. linux_env. file_descriptors [ fd ] = fdesc fdesc. cont_device_id = self. device_id fdesc. inode = self. get_path_inode ( path ) fdesc. uid = self. linux_env. user_uid fdesc. gid = self. linux_env. user_gid size = os. path. getsize ( path ) fdesc. size = size fdesc. blksize = self. blocksize fdesc. blocks = ( size + ( ( 512 -",False,acc_mode == self.linux_env.O_RDONLY,os.path.isdir(path),0.6542212963104248 1290,"def numerify_args ( items, evaluation ) : items_sequence = items. get_sequence ( ) all_numeric = all ( item. is_numeric ( ) and item. get_precision ( ) is None for item in items_sequence ) if all_numeric and any ( not isinstance ( item, Number ) for item in items_sequence ) : items = items_sequence n_items = [ ] for item in items : if : n_expr = Expression ( ""N"", item, Integer ( 50 ) ) item = n_expr. evaluate ( evaluation ) n_items. append ( item ) items = n_items else : items = items. numerify ( evaluation ). get_sequence ( ) return items",False,"not isinstance(item, Number)",all_numeric,0.6530721783638 1291,"def move_to_next_word ( self, forward = True ) : if forward : match_iterator = re. finditer ( r""(\b\W+|$)"", self. edit_text, flags = re. UNICODE ) match_positions = [ m. start ( ) for m in match_iterator ] op = operator. gt else : match_iterator = re. finditer ( r""(\w+\b|^)"", self. edit_text, flags = re. UNICODE ) match_positions = reversed ( [ m. start ( ) for m in match_iterator ] ) op = operator. lt for pos in match_positions : if : self. set_edit_pos ( pos ) return pos",False,"op(pos, self.edit_pos)",op & pos > 0,0.6509331464767456 1292,"def status ( self ) : app = self. _app info = app. player. info if info : if app. player. paused : state = ""pause"" else : state = ""play"" else : state = ""stop"" status = [ ( ""volume"", int ( app. player. volume * 100 ) ), ( ""repeat"", int ( self. _options. repeat ) ), ( ""random"", int ( self. _options. shuffle ) ), ( ""single"", int ( self. _options. single ) ), ( ""consume"", 0 ), ( ""playlist"", self. _pl_ver ), ( ""playlistlength"", int ( bool ( app. player. info ) ) ), ( ""mixrampdb"", 0.0 ), ( ""state"", state ), ] if info : status. append ( ( ""audio"", ""%d:%d:%d"" % ( info ( ""~#samplerate"" ) or 0, info ( ""~#bitdepth"" ) or 0, info ( ""~#channels"" ) or 0, ), ) ) total_time = int ( info ( ""~#length"" ) ) elapsed_time = int ( app. player. get_",False,state != 'stop',self._options.status,0.6563791036605835 1293,"def main ( argv = None ) : """"""Entry point for script."""""" arg_parser = ArgumentParser ( description = DESCRIPTION ) arg_parser. add_argument ( ""--file"", default = ""galaxy.log"" ) arg_parser. add_argument ( ""--print_lines"", default = False, action = ""store_true"" ) arg_parser. add_argument ( ""--pattern"", default = None ) args = arg_parser. parse_args ( argv ) print_lines = args. print_lines pattern_str = args. pattern filter_pattern = re. compile ( pattern_str ) if pattern_str is not None else None times = [ ] for line in open ( args. file, ""r"" ) : if : continue match = TIMING_LINE_PATTERN. search ( line ) if not match : continue times. append ( float ( match. group ( 1 ) ) ) if print_lines : print ( line. strip ( ) ) template = ""Summary (ms) - Mean: %f, Median: %f, Max: %f, Min: %f, StdDev: %f"" message = template % ( numpy. mean ( times ), numpy. median ( times ), numpy. max ( times ), numpy. min ( times ), numpy. std ( times ), ) print ( message )",False,filter_pattern and (not filter_pattern.search(line)),filter_pattern.search(line) is None,0.6445654630661011 1294,"def get_data_service_client ( cli_ctx, service_type, account_name, account_key, connection_string = None, sas_token = None, socket_timeout = None, token_credential = None, endpoint_suffix = None, location_mode = None, ) : logger. debug ( ""Getting data service client service_type=%s"", service_type. __name__ ) try : client_kwargs = { ""account_name"" : account_name, ""account_key"" : account_key, ""connection_string"" : connection_string, ""sas_token"" : sas_token, } if socket_timeout : client_kwargs [ ""socket_timeout"" ] = socket_timeout if token_credential : client_kwargs [ ""token_credential"" ] = token_credential if endpoint_suffix : client_kwargs [ ""endpoint_suffix"" ] = endpoint_suffix client = service_type ( ** client_kwargs ) if location_mode : client. location_mode = location_mode except ValueError as exc : _ERROR_STORAGE_MISSING_INFO = get_sdk ( cli_ctx, ResourceType. DATA_STORAGE, ""common._error#_ERROR_STORAGE_MISSING_INFO"", ) if : raise ValueError ( exc ) raise CLIError ( ""Unable to obtain data client. Check your connection parameters."" ",False,_ERROR_STORAGE_MISSING_INFO in str(exc),exc,0.6544076204299927 1295,"def _do_cmp ( f1, f2 ) : bufsize = BUFSIZE with open ( f1, ""rb"" ) as fp1, open ( f2, ""rb"" ) as fp2 : while True : b1 = fp1. read ( bufsize ) b2 = fp2. read ( bufsize ) if : return False if not b1 : return True",False,b1 != b2,not b2,0.6706905364990234 1296,"def resolve ( self, context, ignore_failures = False ) : try : obj = resolve_variable ( self. var, context ) except VariableDoesNotExist : if ignore_failures : obj = None else : if : return settings. TEMPLATE_STRING_IF_INVALID else : obj = settings. TEMPLATE_STRING_IF_INVALID for func, args in self. filters : arg_vals = [ ] for lookup, arg in args : if not lookup : arg_vals. append ( arg ) else : arg_vals. append ( resolve_variable ( arg, context ) ) obj = func ( obj, * arg_vals ) return obj",False,settings.TEMPLATE_STRING_IF_INVALID,self.filters is None,0.652380108833313 1297,def _setup_ec2 ( self ) : if self. ec2 and self. _instance and self. _reservation : return if self. id : if self. region_name : for region in boto. ec2. regions ( ) : if region. name == self. region_name : self. ec2 = region. connect ( ) if : try : rs = self. ec2. get_all_reservations ( [ self. instance_id ] ) if len ( rs ) >= 1 : for instance in rs [ 0 ]. instances : if instance. id == self. instance_id : self. _reservation = rs [ 0 ] self. _instance = instance except EC2ResponseError : pass,False,self.instance_id and (not self._instance),self.instance_id,0.6545414924621582 1298,"def scatter_add ( x0, indices, x1, axis ) : output = np. copy ( x0 ) if x0. ndim == 2 : for i in range ( indices. shape [ 0 ] ) : for j in range ( indices. shape [ 1 ] ) : if axis == 0 or axis == - 2 : output [ indices [ i ] [ j ] ] [ j ] += x1 [ i ] [ j ] elif : output [ i ] [ indices [ i ] [ j ] ] += x1 [ i ] [ j ] elif x0. ndim == 3 : for i in range ( indices. shape [ 0 ] ) : for j in range ( indices. shape [ 1 ] ) : for k in range ( indices. shape [ 2 ] ) : if axis == 0 or axis == - 3 : output [ indices [ i ] [ j ] [ k ] ] [ j ] [ k ] += x1 [ i ] [ j ] [ k ] elif axis == 1 or axis == - 2 : output [ i ] [ indices [ i ] [ j ] [ k ] ] [ k ] += x1 [ i ] [ j ] [ k ] elif axis == 2 or axis == - 1 : output [ i ] [ j ] [ indices [ i ] [ j ] [ k ] ] += x1 [ i ] [ j ] [ k ] return output",False,axis == 1 or axis == -1,x0.ndim == 1,0.664332389831543 1299,"def cmd_ShowNode ( event ) : c = event. get ( ""c"" ) nd = geotag_Controller. getAttr ( c. p ) try : txt = nd. h. split ( None, 5 ) what = ""dummy"", ""lat"", ""lng"", ""zoom"", ""maptype"", ""description"" data = dict ( zip ( what, txt ) ) data [ ""lat"" ] = float ( data [ ""lat"" ] ) data [ ""lng"" ] = float ( data [ ""lng"" ] ) if : data [ ""zoom"" ] = int ( data [ ""zoom"" ] ) if ""description"" not in data or not data [ ""description"" ]. strip ( ) : data [ ""description"" ] = c. p. h except ( ValueError, TypeError ) : data = { ""description"" : c. p. h } g. pygeotag. show_position ( data )",True,'zoom' in data,'zoom' in data,0.6696645021438599 1300,"def open ( self ) : cmd = self. _check_cmd ( ) params = self. params. copy ( ) params [ ""_bg"" ] = True if : tmpfile = tempfile. NamedTemporaryFile ( prefix = ""livestreamer"", suffix = "".err"", delete = False ) params [ ""_err"" ] = tmpfile else : params [ ""_err"" ] = open ( os. devnull, ""wb"" ) with params [ ""_err"" ] : stream = cmd ( ** params ) time. sleep ( 0.5 ) process_alive = stream. process. returncode is None if not process_alive : if : raise StreamError ( ( ""Error while executing subprocess, "" ""error output logged to: {0}"" ). format ( tmpfile. name ) ) else : raise StreamError ( ""Error while executing subprocess"" ) return StreamProcessIO ( self. session, stream. process, timeout = self. timeout )",False,self.errorlog,self.delete_stderr,0.6551488041877747 1301,"def user_can_administer_repository ( self, user, repository ) : """"""Return True if the received user can administer the received repository."""""" if user : if repository : repository_admin_role = repository. admin_role for rra in repository. roles : role = rra. role if role. id == repository_admin_role. id : for ura in role. users : role_member = ura. user if : return True for gra in role. groups : group = gra. group for uga in group. members : member = uga. user if member. id == user. id : return True return False",False,role_member.id == user.id,role_member.id == role_member.id,0.6542470455169678 1302,"def get_all_possible_filenames ( version_str ) : """"""Get a list of all filename on this system."""""" current_system = platform. system ( ) POSSIBLE_FILENAMES = [ ] for suffix in ClangUtils. SUFFIXES [ current_system ] : for name in ClangUtils. POSSIBLE_FILENAMES [ current_system ] : if : name = name. replace ( ""$version"", version_str ) POSSIBLE_FILENAMES. append ( ""{name}{suffix}"". format ( name = name, suffix = suffix ) ) return POSSIBLE_FILENAMES",False,platform.system() == 'Linux',version_str,0.6528959274291992 1303,"def getChartList ( self ) : cache_key = ""bluray.charts"" movie_list = { ""name"" : ""Blu-ray.com - New Releases"", ""url"" : self. display_url, ""order"" : self. chart_order, ""list"" : self. getCache ( cache_key ) or [ ], } if not movie_list [ ""list"" ] : movie_ids = [ ] max_items = 10 rss_movies = self. getRSSData ( self. rss_url ) for movie in rss_movies : name = ( self. getTextElement ( movie, ""title"" ) . lower ( ) . split ( ""blu-ray"" ) [ 0 ] . strip ( ""("" ) . rstrip ( ) ) year = ( self. getTextElement ( movie, ""description"" ) . split ( ""|"" ) [ 1 ] . strip ( ""("" ) . strip ( ) ) if not name. find ( ""/"" ) == - 1 : continue movie = self. search ( name, year ) if movie : if movie. get ( ""imdb"" ) in movie_ids : ",False,len(movie_list['list']) >= max_items,self.getConfig('imdb'),0.6596227288246155 1304,"def get_norm ( norm, out_channels ) : if isinstance ( norm, str ) : if : return None norm = { ""BN"" : BatchNorm2d, ""GN"" : lambda channels : nn. GroupNorm ( 32, channels ), ""nnSyncBN"" : nn. SyncBatchNorm, """" : lambda x : x, } [ norm ] return norm ( out_channels )",False,len(norm) == 0,norm == 'nnSyncBN',0.6588277816772461 1305,"def delete_s3_bucket ( bucket_name, dry_run = True, quiet = False ) : s3client = boto3. client ( ""s3"" ) versions = s3client. list_object_versions ( Bucket = bucket_name ). get ( ""Versions"", [ ] ) objects = [ { ""Key"" : o [ ""Key"" ], ""VersionId"" : o [ ""VersionId"" ] } for o in versions ] if objects : for obj in objects : if : puts ( colored. red ( ""AWS::S3::Key {}/{}"". format ( bucket_name, obj [ ""Key"" ] ) ) ) if not dry_run : s3client. delete_objects ( Bucket = bucket_name, Delete = { ""Objects"" : objects, ""Quiet"" : False } ) if : puts ( colored. red ( ""S3 Bucket: {}"". format ( bucket_name ) ) ) if not dry_run : s3client. delete_bucket ( Bucket = bucket_name )",False,not quiet,quiet,0.6698378324508667 1306,"def remove ( self, url ) : try : i = self. items. index ( url ) except ( ValueError, IndexError ) : pass else : was_selected = i in self. selectedindices ( ) self. list. delete ( i ) del self. items [ i ] if not self. items : self. mp. hidepanel ( self. name ) elif : if i >= len ( self. items ) : i = len ( self. items ) - 1 self. list. select_set ( i )",True,was_selected,was_selected,0.6687973141670227 1307,"def get_ndarray_bounds ( self ) -> List [ Tuple [ float, float ] ] : bounds = [ ] final_bound = None for hp in self. config_space. get_hyperparameters ( ) : if isinstance ( hp, CS. CategoricalHyperparameter ) : if not self. _fix_attribute_value ( hp. name ) : bound = [ ( 0.0, 1.0 ) ] * len ( hp. choices ) else : bound = [ ( 0.0, 0.0 ) ] * len ( hp. choices ) bound [ int ( self. value_for_last_pos ) ] = ( 1.0, 1.0 ) else : if not self. _fix_attribute_value ( hp. name ) : bound = [ ( 0.0, 1.0 ) ] else : val_int = float ( hp. _inverse_transform ( np. array ( [ self. value_for_last_pos ] ) ). item ( ) ) bound = [ ( val_int, val_int ) ] if : final_bound = bound else : bounds. extend ( bound ) if final_bound is not None : bounds. extend ( final_bound ) return bounds",False,hp.name == self.name_last_pos,bound is not None and final_bound is not None,0.6517331600189209 1308,"def _find_mountains_mask ( world, factor ) : _mask = [ [ False for x in range ( factor * world. width ) ] for y in range ( factor * world. height ) ] for y in range ( factor * world. height ) : for x in range ( factor * world. width ) : if world. is_mountain ( ( int ( x / factor ), int ( y / factor ) ) ) : v = len ( world. tiles_around ( ( int ( x / factor ), int ( y / factor ) ), radius = 3, predicate = world. is_mountain, ) ) if : _mask [ y ] [ x ] = v / 4 return _mask",False,v > 32,_mask is not None,0.6718873977661133 1309,"def _validate ( self ) : if not self. allow_non_single and ( self. _value [ 0 ] is None or self. _value [ 0 ] is None ) : raise InvalidArgumentValue ( ""Address cannot be unbounded if allow_non_single is not set."" ) if self. _value [ 0 ] : row = int ( self. _value [ 0 ] ) if : raise InvalidArgumentValue ( ""Address coordinates may not be below zero: "" + repr ( self. _value ) ) if self. _value [ 1 ] : col = int ( self. _value [ 1 ] ) if col < 1 : raise InvalidArgumentValue ( ""Address coordinates may not be below zero: "" + repr ( self. _value ) )",True,row < 1,row < 1,0.6835970282554626 1310,"def _filter_imgs ( self, min_size = 32 ) : """"""Filter images too small or without ground truths."""""" valid_inds = [ ] ids_with_ann = set ( _ [ ""image_id"" ] for _ in self. coco. anns. values ( ) ) ids_in_cat = set ( ) for i, class_id in enumerate ( self. cat_ids ) : ids_in_cat |= set ( self. coco. cat_img_map [ class_id ] ) ids_in_cat &= ids_with_ann valid_img_ids = [ ] for i, img_info in enumerate ( self. data_infos ) : img_id = self. img_ids [ i ] if : continue if min ( img_info [ ""width"" ], img_info [ ""height"" ] ) >= min_size : valid_inds. append ( i ) valid_img_ids. append ( img_id ) self. img_ids = valid_img_ids return valid_inds",False,self.filter_empty_gt and img_id not in ids_in_cat,img_id in valid_inds and img_id in ids_in_cat,0.6515251398086548 1311,"def comparisons ( self, predicates, simple_cover ) : compounder = self. Compounder ( simple_cover ) comparison_count = { } for pred in predicates : if : estimate = self. estimate ( compounder ( pred ) ) else : estimate = self. estimate ( simple_cover [ pred ] ) comparison_count [ pred ] = estimate return comparison_count",False,len(pred) > 1,pred == pred,0.6681349277496338 1312,"def find_cookie ( line ) : try : line_string = line. decode ( ""utf-8"" ) except UnicodeDecodeError : msg = ""invalid or missing encoding declaration"" if filename is not None : msg = ""{} for {!r}"". format ( msg, filename ) raise SyntaxError ( msg ) match = cookie_re. match ( line_string ) if not match : return None encoding = _get_normal_name ( match. group ( 1 ) ) try : codecs. lookup ( encoding ) except LookupError : if filename is None : msg = ""unknown encoding: "" + encoding else : msg = ""unknown encoding for {!r}: {}"". format ( filename, encoding ) raise SyntaxError ( msg ) if bom_found : if : if filename is None : msg = ""encoding problem: utf-8"" else : msg = ""encoding problem for {!r}: utf-8"". format ( filename ) raise SyntaxError ( msg ) encoding += ""-sig"" return encoding",False,encoding != 'utf-8',encoding is not None,0.6532049775123596 1313,"def getBranches ( self, emu = None ) : ret = [ ] flags = self. iflags & envi. ARCH_MASK addb = False if self. iflags & IF_BRANCH : addb = True if not ( self. iflags & IF_NOFALL ) : flags |= envi. BR_COND if not self. iflags & envi. IF_NOFALL : ret. append ( ( self. va + self. size, flags | envi. BR_FALL ) ) if len ( self. opers ) == 0 : return ret if self. iflags & IF_CALL : flags |= envi. BR_PROC addb = True if addb : oper0 = self. opers [ 0 ] if : flags |= envi. BR_DEREF tova = oper0. getOperAddr ( self, emu = emu ) else : tova = oper0. getOperValue ( self, emu = emu ) ret. append ( ( tova, flags ) ) return ret",False,oper0.isDeref(),addb,0.6546223163604736 1314,"def actor_from_stream ( self, stream : Optional [ StreamT ], *, index : int = None, active_partitions : Set [ TP ] = None, channel : ChannelT = None ) -> ActorRefT : """"""Create new actor from stream."""""" we_created_stream = False actual_stream : StreamT if stream is None : actual_stream = self. stream ( channel = channel, concurrency_index = index, active_partitions = active_partitions, ) we_created_stream = True else : assert stream. concurrency_index == index assert stream. active_partitions == active_partitions actual_stream = stream res = self. fun ( actual_stream ) if isinstance ( res, AsyncIterable ) : if : actual_stream. add_processor ( self. _maybe_unwrap_reply_request ) return cast ( ActorRefT, AsyncIterableActor ( self, actual_stream, res, index = actual_stream. concurrency_index, active_partitions = actual_stream. active_partitions, loop = self. loop, beacon = self. beacon, ), ) else : return cast ( ActorRefT, <",False,we_created_stream,self._create_reply_request,0.6563795804977417 1315,def compiled_query ( self ) : if : self. lazy_init_lock_. acquire ( ) try : if : self. compiled_query_ = CompiledQuery ( ) finally : self. lazy_init_lock_. release ( ) return self. compiled_query_,True,self.compiled_query_ is None,self.compiled_query_ is None,0.6553391218185425 1316,"def _omit_keywords ( self, context ) : omitted_kws = 0 for event, elem in context : omit = elem. tag == ""kw"" and elem. get ( ""type"" )!= ""teardown"" start = event == ""start"" if omit and start : omitted_kws += 1 if : yield event, elem elif not start : elem. clear ( ) if omit and not start : omitted_kws -= 1",False,not omitted_kws,omit_kws == 0,0.6594420671463013 1317,"def __exit__ ( self, exc_type, exc_val, exc_tb ) : if self. _should_meta_profile : end_time = timezone. now ( ) exception_raised = exc_type is not None if : Logger. error ( ""Exception when performing meta profiling, dumping trace below"" ) traceback. print_exception ( exc_type, exc_val, exc_tb ) request = getattr ( DataCollector ( ). local, ""request"", None ) if request : curr = request. meta_time or 0 request. meta_time = curr + _time_taken ( self. start_time, end_time )",True,exception_raised,exception_raised,0.6654812097549438 1318,"def correct_for_autogen_constraints ( self, conn_unique_constraints, conn_indexes, metadata_unique_constraints, metadata_indexes, ) : conn_indexes_by_name = dict ( ( c. name, c ) for c in conn_indexes ) doubled_constraints = set ( index for index in conn_indexes if index. info. get ( ""duplicates_constraint"" ) ) for ix in doubled_constraints : conn_indexes. remove ( ix ) for idx in list ( metadata_indexes ) : if idx. name in conn_indexes_by_name : continue exprs = idx. expressions for expr in exprs : while isinstance ( expr, UnaryExpression ) : expr = expr. element if : util. warn ( ""autogenerate skipping functional index %s; "" ""not supported by SQLAlchemy reflection"" % idx. name ) metadata_indexes. discard ( idx )",False,"not isinstance(expr, Column)",not expr.has_function,0.6546345949172974 1319,"def add_model ( self, model, initial = None ) : """"""Register a model with the state machine, initializing triggers and callbacks."""""" models = listify ( model ) if initial is None : if : raise ValueError ( ""No initial state configured for machine, must specify when adding model."" ) else : initial = self. initial for mod in models : mod = self if mod == ""self"" else mod if mod not in self. models : self. _checked_assignment ( mod, ""trigger"", partial ( self. _get_trigger, mod ) ) for trigger in self. events : self. _add_trigger_to_model ( trigger, mod ) for state in self. states. values ( ) : self. _add_model_to_state ( state, mod ) self. set_state ( initial, model = mod ) self. models. append ( mod )",True,self.initial is None,self.initial is None,0.6569606065750122 1320,"def environ_add_POST ( env, data, content_type = None ) : if data is None : return if env [ ""REQUEST_METHOD"" ] not in ( ""POST"", ""PUT"" ) : env [ ""REQUEST_METHOD"" ] = ""POST"" has_files = False if hasattr ( data, ""items"" ) : data = data. items ( ) data. sort ( ) has_files = filter ( lambda _ : isinstance ( _ [ 1 ], ( tuple, list ) ), data ) if content_type is None : content_type = ( ""multipart/form-data"" if has_files else ""application/x-www-form-urlencoded"" ) if content_type. startswith ( ""multipart/form-data"" ) : if not isinstance ( data, str ) : content_type, data = _encode_multipart ( data, content_type ) elif content_type. startswith ( ""application/x-www-form-urlencoded"" ) : if : raise ValueError ( ""Submiting files is not allowed for"" "" content type `%s`"" % content_type ) if not isinstance ( data, str ) : data = urllib. urlencode ( data ) else : if not isinstance ( data, str ) : raise ValueError ( ""Please provide `POST` data as string"" "" for content type `%s`"" % content_type ) env [ ""wsgi.input"" ] = StringIO ( data ) env [ ""webob.is_body_seekable"" ] = True",True,has_files,has_files,0.6598866581916809 1321,"def _transform_init_kwargs ( cls, kwargs ) : transformed = [ ] for field in list ( kwargs. keys ( ) ) : prop = getattr ( cls, field, None ) if : value = kwargs. pop ( field ) _transform_single_init_kwarg ( prop, field, value, kwargs ) transformed. append ( ( field, value ) ) return transformed",False,"isinstance(prop, MoneyProperty)",prop is not None,0.6531053781509399 1322,"def map ( callbackfn ) : array = this. to_object ( ) arr_len = array. get ( ""length"" ). to_uint32 ( ) if not callbackfn. is_callable ( ) : raise this. MakeError ( ""TypeError"", ""callbackfn must be a function"" ) T = arguments [ 1 ] A = this. Js ( [ ] ) k = 0 while k < arr_len : Pk = str ( k ) if : kValue = array. get ( Pk ) mappedValue = callbackfn. call ( T, ( kValue, this. Js ( k ), array ) ) A. define_own_property ( Pk, { ""value"" : mappedValue, ""writable"" : True, ""enumerable"" : True, ""configurable"" : True, }, ) k += 1 return A",False,array.has_property(Pk),array.has_key(Pk),0.6559935808181763 1323,"def join ( self, * args ) : rv = self if self. _model_ is not None : joins = [ ] jdata = [ ] auto_select_tables = [ self. _model_. table ] for arg in args : condition, table, rel_type = self. _parse_rjoin ( arg ) joins. append ( condition ) jdata. append ( ( arg, table. _tablename, rel_type ) ) auto_select_tables. append ( table ) if : q = joins [ 0 ] for join in joins [ 1 : ] : q = q & join rv = rv. where ( q, model = self. _model_ ) return self. _join_set_builder ( rv, jdata, auto_select_tables ) return rv",True,joins,joins,0.7149646282196045 1324,"def best_image ( width, height ) : image = images [ 0 ] for img in images : if : return img elif img. width >= width and img. width * img. height > image. width * image. height : image = img return image",False,img.width == width and img.height == height,img.width < width and img.height < height,0.6538563966751099 1325,"def get_full_path ( path ) : if ""://"" not in path : path = os. path. join ( self. AUTO_COLL_TEMPL, path, """" ) if : path = os. path. join ( abs_path, path ) return path",False,abs_path,os.path.exists(abs_path),0.6650346517562866 1326,"def _read_data_from_all_categories ( self, directory, config, categories ) : lines = [ ] for category in categories : data_file = os. path. join ( directory, _DATASET_VERSION, category, config ) if : with open ( data_file ) as f : ls = f. read ( ). split ( ""\n"" ) for l in ls [ : : - 1 ] : if not l : ls. remove ( l ) lines. extend ( ls ) return lines",True,os.path.exists(data_file),os.path.exists(data_file),0.6471824645996094 1327,"def get_bit_length_from_plateau_lengths ( merged_plateau_lengths ) -> int : if len ( merged_plateau_lengths ) == 0 : return 0 if len ( merged_plateau_lengths ) == 1 : return int ( merged_plateau_lengths [ 0 ] ) round_plateau_lengths ( merged_plateau_lengths ) histogram = c_auto_interpretation. get_threshold_divisor_histogram ( merged_plateau_lengths ) if len ( histogram ) == 0 : return 0 else : sorted_indices = np. argsort ( histogram ) [ : : - 1 ] max_count = histogram [ sorted_indices [ 0 ] ] result = sorted_indices [ 0 ] for i in range ( 1, len ( sorted_indices ) ) : if histogram [ sorted_indices [ i ] ] < 0.25 * max_count : break if : result = sorted_indices [ i ] return int ( result )",False,sorted_indices[i] <= 0.5 * result,result == 0.25 * max_count,0.6554892063140869 1328,"def empty_trash ( ) : from app import current_app as app with app. app_context ( ) : events = Event. query. filter ( Event. deleted_at. isnot ( None ) ). all ( ) users = User. query. filter ( User. deleted_at. isnot ( None ) ). all ( ) sessions = Session. query. filter ( Session. deleted_at. isnot ( None ) ). all ( ) pending_orders = Order. query. filter_by ( status = ""pending"" ) for event in events : if datetime. now ( ) - event. deleted_at >= timedelta ( days = 30 ) : DataManager. delete_event ( event. id ) for user in users : if datetime. now ( ) - user. deleted_at >= timedelta ( days = 30 ) : transaction = transaction_class ( Event ) transaction. query. filter_by ( user_id = user. id ). delete ( ) delete_from_db ( user, ""User deleted permanently"" ) for session_ in sessions : if : delete_from_db ( session_, ""Session deleted permanently"" ) for pending_order in pending_orders : if datetime. now ( ) - pending_order. created_at >= timedelta ( days = 3 ) : pending_order. status = ""expired"" save_to_db ( pending_order, ""Pending order expired."" )",False,datetime.now() - session_.deleted_at >= timedelta(days=30),session_.status == 'deleted',0.6461283564567566 1329,"def __new__ ( cls, * nodes ) : if not nodes : raise TypeError ( ""DisjunctionNode() requires at least one node"" ) elif len ( nodes ) == 1 : return nodes [ 0 ] self = super ( DisjunctionNode, cls ). __new__ ( cls ) self. __nodes = [ ] for node in nodes : if not isinstance ( node, Node ) : raise TypeError ( ""DisjunctionNode() expects Node instances as arguments;"" "" received a non-Node instance %r"" % node ) if : self. __nodes. extend ( node. __nodes ) else : self. __nodes. append ( node ) return self",False,"isinstance(node, DisjunctionNode)","hasattr(node, '__nodes')",0.6517279148101807 1330,"def main ( argv ) : opts = setup ( ). parse_args ( argv ) vw = vivisect. VivWorkspace ( ) vw. loadWorkspace ( opts. vw ) print ( ""# %s"" % opts. vw ) fnames = { } for fva, etype, ename, fname in vw. getExports ( ) : enamekey = ename. lower ( ) fnamekey = fname. lower ( ) fnames [ fname ] = True if : continue rtype, rname, ccname, funcname, args = vw. getFunctionApi ( fva ) args = tuple ( [ ( type_lookup. get ( t, t ), name_lookup. get ( t ) ) for t, name in args ] ) print ( "" '%s.%s':( %r, None, %r, '%s.%s', %r ),"" % ( fnamekey, enamekey, rtype, ccname, fname, ename, args ) ) for fwdfname in fnames. keys ( ) : for rva, name, fwdname in vw. getFileMeta ( fwdfname, ""forwarders"", ( ) ) : fwdapi = vw. getImpApi ( fwdname ) if not fwdapi : print ( "" # FIXME unresolved %s -> %s"" % ( name, fwdname ) ) continue print ( "" '%s.%s':%r,"" % ( fwdfname. lower ( ), name. lower ( ), fwdapi ) )",False,not vw.isFunction(fva),not rtype,0.6537138223648071 1331,"def email ( request ) : if request. method == ""POST"" : form = EmailForm ( request. POST, request. FILES ) if form. is_valid ( ) : subject = request. POST. get ( ""subject"", """" ) message = request. POST. get ( ""message"", """" ) from_email = request. POST. get ( ""from_email"", """" ) to_email = request. POST. get ( ""to_email"", """" ) file = request. FILES. get ( ""files"", None ) status = request. POST. get ( ""email_draft"", """" ) email = EmailMessage ( subject, message, from_email, [ to_email ] ) email. content_subtype = ""html"" f = form. save ( ) if : email. attach ( file. name, file. read ( ), file. content_type ) f. file = file if status : f. status = ""draft"" else : email. send ( fail_silently = False ) f. save ( ) return HttpResponseRedirect ( reverse ( ""emails:list"" ) ) else : return render ( request, ""create_mail.html"", { ""form"" : form } ) else : form = EmailForm ( ) return render ( request, ""create_mail.html"", { ""form"" : form } )",False,file is not None,file.has_file(),0.655273973941803 1332,"def stop ( self ) : with self. lock : if : return self. task_queue. put ( None ) self. result_queue. put ( None ) process = self. process self. process = None self. task_queue = None self. result_queue = None process. join ( timeout = 0.1 ) if process. exitcode is None : os. kill ( process. pid, signal. SIGKILL ) process. join ( )",False,not self.process,self.process is None,0.6593582034111023 1333,"def __init__ ( self, certificate = None, ssl_version = None, certreqs = None, cacerts = None, chatty = True, connectionchatty = False, starttls_server = False, npn_protocols = None, alpn_protocols = None, ciphers = None, context = None, ) : if context : self. context = context else : self. context = ssl. SSLContext ( ssl_version if ssl_version is not None else ssl. PROTOCOL_TLS ) self. context. verify_mode = certreqs if certreqs is not None else ssl. CERT_NONE if : self. context. load_verify_locations ( cacerts ) if certificate : self. context. load_cert_chain ( certificate ) if npn_protocols : self. context. set_npn_protocols ( npn_protocols ) if alpn_protocols : self. context. set_alpn_protocols ( alpn_protocols ) if ciphers : self. context. set_ciphers ( ciphers ) self. chatty = chatty self. connectionchatty = connectionchatty self. starttls_server = starttls_server self. sock = socket. socket ( ) self. port = support. bind_port ( self. sock ) self. flag = None self. active = False self. selected_npn_protocols = [ ] self. selected_alpn_protocols = [ ] self. shared_ciphers = [ ] self. conn_errors = [ ] threading. Thread.",True,cacerts,cacerts,0.6620203256607056 1334,"def to_key ( literal_or_identifier ) : """"""returns string representation of this object"""""" if literal_or_identifier [ ""type"" ] == ""Identifier"" : return literal_or_identifier [ ""name"" ] elif literal_or_identifier [ ""type"" ] == ""Literal"" : k = literal_or_identifier [ ""value"" ] if isinstance ( k, float ) : return unicode ( float_repr ( k ) ) elif ""regex"" in literal_or_identifier : return compose_regex ( k ) elif isinstance ( k, bool ) : return ""true"" if k else ""false"" elif : return ""null"" else : return unicode ( k )",True,k is None,k is None,0.6674331426620483 1335,"def load_package_list ( self, options_file ) : json_wrapper_option_list = JsonWrapper ( options_file ) option_list_json = json_wrapper_option_list. read ( ) options_sorted = option_list_json. items ( ) self. package_menu_items = [ ] base_path = os. path. dirname ( options_file ) package_list = [ ] if len ( options_sorted ) == 1 : self. inactive_screen = True list ( options_sorted ) [ 0 ] [ 1 ] [ ""visible"" ] = True if platform. machine ( ) == ""aarch64"" and ""realtime"" in dict ( options_sorted ) : dict ( options_sorted ) [ ""realtime"" ] [ ""visible"" ] = False default_selected = 0 visible_options_cnt = 0 for install_option in options_sorted : if install_option [ 1 ] [ ""visible"" ] == True : package_list = PackageSelector. get_packages_to_install ( install_option [ 1 ], base_path ) self. package_menu_items. append ( ( install_option [ 1 ] [ ""title"" ], self. exit_function, [ install_option [ 0 ], package_list ], ) ) if : default_selected = visible_options_cnt visible_options_cnt = visible_options_cnt + 1 if self.",False,install_option[0] == 'minimal',visible_options_cnt >= 0,0.6488913297653198 1336,"def pack_identifier ( self ) : """"""Return a combined identifier for the whole pack if this has more than one episode."""""" if self. id_type == ""ep"" : if : return ""S%02dE%02d-E%02d"" % ( self. season, self. episode, self. episode + self. episodes - 1, ) else : return self. identifier else : return self. identifier",False,self.episodes > 1,self.identifier is None,0.6666706800460815 1337,"def create_files ( self, problem_id, source_code, * args, ** kwargs ) : super ( ). create_files ( problem_id, source_code, * args, ** kwargs ) try : source_code = utf8text ( source_code ) except UnicodeDecodeError : raise CompileError ( ""Your UTF-8 is bad, and you should feel bad"" ) class_name = find_class ( source_code ) self. _code = self. _file ( ""%s.java"" % class_name. group ( 1 ) ) try : with open ( self. _code, ""wb"" ) as fo : fo. write ( utf8bytes ( source_code ) ) except IOError as e : if : raise CompileError ( ""Why do you need a class name so long? As a judge, I sentence your code to death.\n"" ) raise self. _class_name = class_name. group ( 1 )",False,"e.errno in (errno.ENAMETOOLONG, errno.ENOENT, errno.EINVAL)",e.args[0] not in [TAB > 2,0.6500363349914551 1338,"def get_multi ( self, hrefs ) : hrefs = set ( hrefs ) href_xml = [ ] for href in hrefs : if href!= self. _normalize_href ( href ) : raise exceptions. NotFoundError ( href ) href_xml. append ( ""{}"". format ( href ) ) if not href_xml : return ( ) data = self. get_multi_template. format ( hrefs = ""\n"". join ( href_xml ) ). encode ( ""utf-8"" ) response = self. session. request ( ""REPORT"", """", data = data, headers = self. session. get_default_headers ( ) ) root = _parse_xml ( response. content ) rv = [ ] hrefs_left = set ( hrefs ) for href, etag, prop in self. _parse_prop_responses ( root ) : raw = prop. find ( self. get_multi_data_query ) if raw is None : dav_logger. warning ( ""Skipping {}, the item content is missing."". format ( href ) ) continue raw = raw. text or u"""" if isinstance ( raw, bytes ) : raw = raw. decode ( response. encoding ) if isinstance ( etag, bytes ) : etag = etag. decode ( response. encoding ) try : hrefs_left. remove ( href ) except KeyError : if : dav_logger. warning ( ""Server sent item twice: {}"". format ( href ) ) else : ",False,href in hrefs,href in hrefs_left,0.6747580766677856 1339,"def keyvault_id ( cmd, namespace ) : """"""Validate storage account name"""""" from azure. cli. core. profiles import ResourceType from azure. cli. core. commands. client_factory import get_mgmt_service_client if not namespace. keyvault : return if ""/providers/Microsoft.KeyVault/vaults/"" in namespace. keyvault : resource = namespace. keyvault. split ( ""/"" ) kv_name = resource [ resource. index ( ""Microsoft.KeyVault"" ) + 2 ] kv_rg = resource [ resource. index ( ""resourceGroups"" ) + 1 ] else : kv_name = namespace. keyvault kv_rg = namespace. resource_group_name try : keyvault_client = get_mgmt_service_client ( cmd. cli_ctx, ResourceType. MGMT_KEYVAULT ) vault = keyvault_client. vaults. get ( kv_rg, kv_name ) if : raise ValueError ( ""KeyVault named '{}' not found in the resource group '{}'."". format ( kv_name, kv_rg ) ) namespace. keyvault = vault. id namespace. keyvault_url = vault. properties. vault_uri except Exception as exp : raise ValueError ( ""Invalid KeyVault reference: {}\n{}"". format ( namespace. keyvault, exp ) )",True,not vault,not vault,0.7056794166564941 1340,"def _get_dendrogram_key ( adata, dendrogram_key, groupby ) : if not isinstance ( dendrogram_key, str ) : if isinstance ( groupby, str ) : dendrogram_key = f""dendrogram_{groupby}"" elif : dendrogram_key = f'dendrogram_{""_"".join(groupby)}' if dendrogram_key not in adata. uns : from.. tools. _dendrogram import dendrogram logg. warning ( f""dendrogram data not found (using key={dendrogram_key}). "" ""Running `sc.tl.dendrogram` with default parameters. For fine "" ""tuning it is recommended to run `sc.tl.dendrogram` independently."" ) dendrogram ( adata, groupby, key_added = dendrogram_key ) if ""dendrogram_info"" not in adata. uns [ dendrogram_key ] : raise ValueError ( f""The given dendrogram key ({dendrogram_key!r}) does not contain "" ""valid dendrogram information."" ) return dendrogram_key",False,"isinstance(groupby, list)","isinstance(groupby, str)",0.6581349968910217 1341,"def iterate ( self, handle ) : """"""Iterate over the records in the IntelliGenetics file."""""" for line in handle : if : break else : return if line [ 0 ]!= "";"" : raise ValueError ( ""Records should start with ';' and not:\n%r"" % line ) while line : comment_lines = [ ] while line. startswith ( "";"" ) : comment_lines. append ( line [ 1 : ]. strip ( ) ) line = next ( handle ) title = line. rstrip ( ) seq_lines = [ ] for line in handle : if line [ 0 ] == "";"" : break seq_lines. append ( line. rstrip ( ). replace ( "" "", """" ) ) else : line = None seq_str = """". join ( seq_lines ) if seq_str. endswith ( ""1"" ) : seq_str = seq_str [ : - 1 ] if ""1"" in seq_str : raise ValueError ( ""Potential terminator digit one found within sequence."" ) yield SeqRecord ( Seq ( seq_str ), id = title, name = title, <",False,not line.startswith(';;'),line == None,0.6516513228416443 1342,"def generateConstPy ( self ) : log. debug ( ""========== generate_const.py()"" ) fin = open ( os. path. join ( self. build_src, ""const.py.in"" ), ""r"" ) in_lines = fin. readlines ( ) fin. close ( ) fout = open ( os. path. join ( self. build_src, ""const.py"" ), ""w"" ) for line in in_lines : if : corrline = line. replace ( ""@VERSIONSTRING@"", self. gramps_version. replace ( FULL_COLON_SUBST, "":"" ) ) fout. write ( corrline ) else : fout. write ( line ) fout. close ( )",False,'@VERSIONSTRING@' in line,self.gramps_version and line[0] in self.cw,0.6654828786849976 1343,"def prompt ( default = None ) : editor = ""nano"" with tempfile. NamedTemporaryFile ( mode = ""r+"" ) as tmpfile : if default : tmpfile. write ( default ) tmpfile. flush ( ) child_pid = os. fork ( ) is_child = child_pid == 0 if : os. execvp ( editor, [ editor, tmpfile. name ] ) else : os. waitpid ( child_pid, 0 ) tmpfile. seek ( 0 ) return tmpfile. read ( ). strip ( )",True,is_child,is_child,0.6657383441925049 1344,"def replaceClassMethods ( ast, module_name, tree, class_name, class_node ) : old_class_node = None for child in tree. node : if isinstance ( child, ast. Class ) and child. name == class_name : old_class_node = child break if not old_class_node : raise TranslationError ( ""class not found: "" + class_name, class_node, module_name ) for node in class_node. code : if isinstance ( node, ast. Function ) : found = False for child in old_class_node. code : if isinstance ( child, ast. Function ) and child. name == node. name : found = True copyFunction ( child, node ) break if not found : raise TranslationError ( ""class method not found: "" + class_name + ""."" + node. name, node, module_name, ) elif isinstance ( node, ast. Assign ) and isinstance ( node. nodes [ 0 ], ast. AssName ) : found = False for child in old_class_node. code : if : found = True <",False,"isinstance(child, ast.Assign) and eqNodes(child.nodes, node.nodes)",found,0.6480258703231812 1345,"def prepare_request ( next_link = None ) : header_parameters = { } header_parameters [ ""Accept"" ] = self. _serialize. header ( ""accept"", accept, ""str"" ) if not next_link : url = self. list_for_scope. metadata [ ""url"" ] path_format_arguments = { ""scope"" : self. _serialize. url ( ""scope"", scope, ""str"", skip_quote = True ), } url = self. _client. format_url ( url, ** path_format_arguments ) query_parameters = { } if : query_parameters [ ""$filter"" ] = self. _serialize. query ( ""filter"", filter, ""str"" ) query_parameters [ ""api-version"" ] = self. _serialize. query ( ""api_version"", api_version, ""str"" ) request = self. _client. get ( url, query_parameters, header_parameters ) else : url = next_link query_parameters = { } request = self. _client. get ( url, query_parameters, header_parameters ) return request",False,filter is not None,filter,0.6641092300415039 1346,"def postprocess ( self, upload_remote = True, handle_log = True, handle_touch = True, handle_temp = True, error = False, ignore_missing_output = False, assume_shared_fs = True, latency_wait = None, keep_metadata = True, ) : if assume_shared_fs : if : self. dag. handle_touch ( self ) if handle_log : self. dag. handle_log ( self ) if not error : self. dag. check_and_touch_output ( self, wait = latency_wait, ignore_missing_output = ignore_missing_output ) self. dag. unshadow_output ( self, only_log = error ) if not error : self. dag. handle_remote ( self, upload = upload_remote ) self. dag. handle_protected ( self ) self. close_remote ( ) else : if not error : self. dag. check_and_touch_output ( self, wait = latency_wait, no_touch = True, force_stay_on_remote = True ) if not error : try : self. dag. workflow. persistence. finished ( self, keep_metadata = keep_metadata ) except IOError as e : logger. warning ( ""Error recording metadata for finished job "" ""({}). Please",False,not error and handle_touch,handle_temp,0.6568975448608398 1347,"def IncrementErrorCount ( self, category ) : """"""Bumps the module's error statistic."""""" self. error_count += 1 if self. counting in ( ""toplevel"", ""detailed"" ) : if : category = category. split ( ""/"" ) [ 0 ] if category not in self. errors_by_category : self. errors_by_category [ category ] = 0 self. errors_by_category [ category ] += 1",False,self.counting != 'detailed',category.startswith('/'),0.6580263376235962 1348,"def _get_sub_path_ranges_and_colors ( self, start : float, end : float ) : sub_path_ranges = [ ] colors = [ ] start = max ( 0, int ( start ) ) end = int ( math. ceil ( end ) ) if not self. protocol. messages : return None, None for message in self. protocol. messages : if message. bit_sample_pos [ - 2 ] < start : continue color = ( None if message. participant is None else settings. PARTICIPANT_COLORS [ message. participant. color_index ] ) if color is None : continue sub_path_ranges. append ( ( start, message. bit_sample_pos [ 0 ] ) ) if : colors. append ( None ) else : colors. append ( color ) if message. bit_sample_pos [ - 2 ] > end : sub_path_ranges. append ( ( message. bit_sample_pos [ 0 ], end ) ) colors. append ( color ) break sub_path_ranges. append ( ( message. bit_sample_pos [ 0 ], message. bit_sample_pos [ - 2 ] + 1 ) ) colors. append ( color ) start = message. bit_sample_pos [ - 2 ] + 1 if sub_path_ranges and sub_path_ranges [ - 1 ] [ 1 ]!= end : idx = 0 ret = [ ] output_size = self. output_size ignore_range = self. ignore_range bsize = self. bsize total_size = len ( bytes ) rappend = ret. append reduce_errors = self. reduce_errors while 1 : chunk_size = idx * bsize buf = bytes [ chunk_size : chunk_size + bsize ] char = modsum ( buf ) if reduce_errors : if char!= 255 and char!= 0 : rappend ( chr ( char ) ) else : rappend ( chr ( char ) ) idx += 1 if chunk_size + bsize > total_size : break ret = """". join ( ret ) size = len ( ret ) / output_size size = min ( int ( size ), 1 ) buf = [ ] for c in range ( 0, output_size ) : if aggresive : buf. append ( ret [ c : c + size + 1 ] [ ignore_range : ignore_range + 1 ] ) else : buf. append ( ret [ c : c + size + 1 ] [ 1 : 2 ] ) i = 0 for x in ret [ c : c + size + 1 ] : i += 1 if : continue i = 0 buf += x ",False,i != ignore_range,i == 0,0.6598858833312988 1350,"def _update_scale ( self, skip ) : if self. dynamic_loss_scale : prev_scale = self. cur_scale if : self. cur_scale = max ( self. cur_scale / self. scale_factor, self. min_loss_scale ) self. last_overflow_iter = self. cur_iter if self. verbose : logger. info ( f""\nGrad overflow on iteration {self.cur_iter}"" ) logger. info ( f""Reducing dynamic loss scale from {prev_scale} to {self.cur_scale}"" ) else : stable_interval = ( self. cur_iter - self. last_overflow_iter ) - 1 if ( stable_interval > 0 ) and ( stable_interval % self. scale_window == 0 ) : self. cur_scale *= self. scale_factor if self. verbose : logger. info ( f""No Grad overflow for {self.scale_window} iterations"" ) logger. info ( f""Increasing dynamic loss scale from {prev_scale} to {self.cur_scale}"" ) else : if : logger. info ( ""Grad overflow on iteration: %s"", self. cur_iter ) <",True,skip,skip,0.681114912033081 1351,"def consume ( self, event : Mapping [ str, Any ] ) -> None : user_profile_id = event [ ""user_profile_id"" ] user_profile = get_user_profile_by_id ( user_profile_id ) message : Dict [ str, Any ] = event [ ""message"" ] services = get_bot_services ( user_profile_id ) for service in services : bot_handler = get_bot_handler ( str ( service. name ) ) if bot_handler is None : logging. error ( ""Error: User %s has bot with invalid embedded bot service %s"", user_profile_id, service. name, ) continue try : if : bot_handler. initialize ( self. get_bot_api_client ( user_profile ) ) if event [ ""trigger"" ] == ""mention"" : message [ ""content"" ] = extract_query_without_mention ( message = message, client = cast ( ExternalBotHandler, self. get_bot_api_client ( user_profile ) ), ) assert message [ ""content"" ] is not None bot_handler. handle_message ( message = message, if self. proto < 5 : if : self. save_reduce ( bytearray, ( ), obj = obj ) else : self. save_reduce ( bytearray, ( bytes ( obj ), ), obj = obj ) return n = len ( obj ) if n >= self. framer. _FRAME_SIZE_TARGET : self. _write_large_bytes ( BYTEARRAY8 + pack ( "" else : self. write ( BYTEARRAY8 + pack ( "" self. run_function ( ""saltutil.sync_grains"" ) module = os. path. join ( RUNTIME_VARS. RUNTIME_CONFIGS [ ""minion"" ] [ ""cachedir"" ], ""files"", ""base"", ""_grains"", ""custom_grain2.py"", ) tries = 0 while not os. path. exists ( module ) : tries += 1 if : self. fail ( ""Failed to found custom grains module in cache path {}"". format ( module ) ) break time. sleep ( 1 ) grains = self. run_function ( ""grains.items"" ) self. assertEqual ( { ""k2"" : ""v2"" }, grains [ ""a_custom"" ] )",False,tries > 60,tries > 3,0.689618706703186 1354,"def test_patches ( ) : print ( ""Botocore version: {} aiohttp version: {}"". format ( botocore. __version__, aiohttp. __version__ ) ) success = True for obj, digests in chain ( _AIOHTTP_DIGESTS. items ( ), _API_DIGESTS. items ( ) ) : digest = hashlib. sha1 ( getsource ( obj ). encode ( ""utf-8"" ) ). hexdigest ( ) if : print ( ""Digest of {}:{} not found in: {}"". format ( obj. __qualname__, digest, digests ) ) success = False assert success",False,digest not in digests,digest,0.6627613306045532 1355,"def clean ( self ) : super ( ). clean ( ) if self. cluster. site is not None : for device in self. cleaned_data. get ( ""devices"", [ ] ) : if : raise ValidationError ( { ""devices"" : ""{} belongs to a different site ({}) than the cluster ({})"". format ( device, device. site, self. cluster. site ) } )",True,device.site != self.cluster.site,device.site != self.cluster.site,0.6559698581695557 1356,"def _invoke ( self, args, remote, autoraise ) : raise_opt = [ ] if remote and self. raise_opts : autoraise = int ( bool ( autoraise ) ) opt = self. raise_opts [ autoraise ] if opt : raise_opt = [ opt ] cmdline = [ self. name ] + raise_opt + args if remote or self. background : inout = file ( os. devnull, ""r+"" ) else : inout = None setsid = getattr ( os, ""setsid"", None ) if not setsid : setsid = getattr ( os, ""setpgrp"", None ) p = subprocess. Popen ( cmdline, close_fds = True, stdin = inout, stdout = ( self. redirect_stdout and inout or None ), stderr = inout, preexec_fn = setsid, ) if remote : time. sleep ( 1 ) rc = p. poll ( ) if : time. sleep ( 4 ) rc = p. poll ( ) if : return True return not rc elif self. background : if p. poll ( ) is None : return True else : return False else : return not p. wait ( )",False,rc is None,rc,0.6618880033493042 1357,"def send_request_check_version ( self, record ) : assert record. control. req_resp result = wandb_internal_pb2. Result ( uuid = record. uuid ) current_version = record. request. check_version. current_version or wandb. __version__ messages = update. check_available ( current_version ) if messages : upgrade_message = messages. get ( ""upgrade_message"" ) if : result. response. check_version_response. upgrade_message = upgrade_message yank_message = messages. get ( ""yank_message"" ) if yank_message : result. response. check_version_response. yank_message = yank_message delete_message = messages. get ( ""delete_message"" ) if delete_message : result. response. check_version_response. delete_message = delete_message self. _result_q. put ( result )",True,upgrade_message,upgrade_message,0.6672766208648682 1358,"def __eq__ ( self, other : object ) -> bool : if not isinstance ( other, Unk ) : return NotImplemented if not np. allclose ( self. ng, other. ng ) : return False if self. ik!= other. ik : return False if self. is_noncollinear!= other. is_noncollinear : return False if self. nbnd!= other. nbnd : return False for ib in range ( self. nbnd ) : if : if not ( np. allclose ( self. data [ ib, 0 ], other. data [ ib, 0 ], atol = 1e-4 ) and np. allclose ( self. data [ ib, 1 ], other. data [ ib, 1 ], atol = 1e-4 ) ) : return False else : if not np. allclose ( self. data [ ib ], other. data [ ib ], atol = 1e-4 ) : return False return True",False,self.is_noncollinear,len(self.data) != len(other.data),0.6547691822052002 1359,"def _load_test_data ( ) : configs = [ ] datasets = [ ] for _ in range ( NUM_ENVS ) : config = get_config ( CFG_TEST ) if : pytest. skip ( ""Please download Habitat test data to data folder."" ) datasets. append ( habitat. make_dataset ( id_dataset = config. DATASET. TYPE, config = config. DATASET ) ) config. defrost ( ) config. SIMULATOR. SCENE = datasets [ - 1 ]. episodes [ 0 ]. scene_id if not os. path. exists ( config. SIMULATOR. SCENE ) : pytest. skip ( ""Please download Habitat test data to data folder."" ) config. freeze ( ) configs. append ( config ) return configs, datasets",False,not PointNavDatasetV1.check_config_paths_exist(config.DATASET),not os.path.exists(config.DATASET.TYPE),0.6497238874435425 1360,"def update_sysconfig_file ( fn, adjustments, allow_empty = False ) : if not adjustments : return ( exists, contents ) = read_sysconfig_file ( fn ) updated_am = 0 for ( k, v ) in adjustments. items ( ) : if : continue v = str ( v ) if len ( v ) == 0 and not allow_empty : continue contents [ k ] = v updated_am += 1 if updated_am : lines = [ str ( contents ), ] if not exists : lines. insert ( 0, util. make_header ( ) ) util. write_file ( fn, ""\n"". join ( lines ) + ""\n"", 0o644 )",False,v is None,k == 'TAB > or exists,0.6659239530563354 1361,"def phpfilter_extract ( content ) : ftemp = """" found = [ ] lines = content. split ( ""\n"" ) for line in lines : ftemp = """" length = len ( line ) for x in range ( 0, length ) : if : ftemp += line [ x ] else : if ( length > 100 and base64_check ( line [ x ] ) is False and len ( ftemp ) >= int ( ( length / 2 ) ) ) : break ftemp = """" if len ( ftemp ) > 0 : found. append ( ftemp ) final = """" if len ( found ) > 50 : maxim = 0 x = """" index = - 1 for x in range ( 0, len ( found ) ) : length = len ( found [ x ] ) if length > maxim : maxim = length index = x final = found [ x ] return final",False,base64_check(line[x]),ftemp,0.6507961750030518 1362,"def __merge_rpc_configs ( self, rpc_params, rpc_config ) : config = { } options = { ""nodeId"" : self. UNKNOWN_ARBITRATION_ID, ""isExtendedId"" : self. DEFAULT_EXTENDED_ID_FLAG, ""isFd"" : self. DEFAULT_FD_FLAG, ""bitrateSwitch"" : self. DEFAULT_BITRATE_SWITCH_FLAG, ""response"" : True, ""dataLength"" : 1, ""dataByteorder"" : self. DEFAULT_BYTEORDER, ""dataSigned"" : self. DEFAULT_SIGNED_FLAG, ""dataExpression"" : None, ""dataEncoding"" : self. DEFAULT_ENCODING, ""dataBefore"" : None, ""dataAfter"" : None, ""dataInHex"" : None, } for option_name, option_value in options. items ( ) : if : config [ option_name ] = rpc_params [ option_name ] elif option_value is not None : config [ option_name ] = rpc_config. get ( option_name, option_value ) return config",True,option_name in rpc_params,option_name in rpc_params,0.663283109664917 1363,"def _form_master_re ( relist, reflags, ldict, toknames ) : if not relist : return [ ] regex = ""|"". join ( relist ) try : lexre = re. compile ( regex, reflags ) lexindexfunc = [ None ] * ( max ( lexre. groupindex. values ( ) ) + 1 ) lexindexnames = lexindexfunc [ : ] for f, i in lexre. groupindex. items ( ) : handle = ldict. get ( f, None ) if type ( handle ) in ( types. FunctionType, types. MethodType ) : lexindexfunc [ i ] = ( handle, toknames [ f ] ) lexindexnames [ i ] = f elif handle is not None : lexindexnames [ i ] = f if f. find ( ""ignore_"" ) > 0 : lexindexfunc [ i ] = ( None, None ) else : lexindexfunc [ i ] = ( None, toknames [ f ] ) return [ ( lexre, lexindexfunc ) ], [ regex ], [ lexindexnames ] except Exception : m = int ( len ( relist ) / 2 ) if : m = 1 llist, lre, lnames = _form_master_re ( relist [ : m ], reflags, ldict, toknames ) rlist, rre, rnames = _form_master_re ( relist [ m : ]",False,m == 0,m > 1,0.6690990924835205 1364,"def remaining_paragraph_count ( self ) : """"""Return the remaining paragraph count for this post (does not include teaser)."""""" if self. _remaining_paragraph_count is None : try : document = lxml. html. fragment_fromstring ( self. text ( teaser_only = True, show_read_more_link = False ), ""body"" ) except lxml. etree. ParserError as e : if : return """" raise ( e ) self. _remaining_paragraph_count = self. paragraph_count - int ( document. xpath ( ""count(//p)"" ) ) return self. _remaining_paragraph_count",False,str(e) == 'Document is empty',self.paragraph_count is None,0.6522060036659241 1365,"def get_exe_prefixes ( exe_filename ) : """"""Get exe->egg path translations for a given.exe file"""""" prefixes = [ ( ""PURELIB/"", """" ), ( ""PLATLIB/pywin32_system32"", """" ), ( ""PLATLIB/"", """" ), ( ""SCRIPTS/"", ""EGG-INFO/scripts/"" ), ( ""DATA/lib/site-packages"", """" ), ] z = zipfile. ZipFile ( exe_filename ) try : for info in z. infolist ( ) : name = info. filename parts = name. split ( ""/"" ) if len ( parts ) == 3 and parts [ 2 ] == ""PKG-INFO"" : if parts [ 1 ]. endswith ( "".egg-info"" ) : prefixes. insert ( 0, ( ""/"". join ( parts [ : 2 ] ), ""EGG-INFO/"" ) ) break if len ( parts )!= 2 or not name. endswith ( "".pth"" ) : continue if name. endswith ( ""-nspkg.pth"" ) : continue if parts [ 0 ]. upper ( ) in ( ""PURELIB"", ""PLATLIB"" ) : contents = z. read ( name ) if : contents = contents. decode ( ) for pth in yield_lines ( contents ) : <",False,six.PY3,len(contents) > 0,0.6605470180511475 1366,"def is_move_suicidal ( self, move ) : potential_libs = set ( ) for n in NEIGHBORS [ move ] : neighbor_group_id = self. lib_tracker. group_index [ n ] if : return False neighbor_group = self. lib_tracker. groups [ neighbor_group_id ] if neighbor_group. color == self. to_play : potential_libs |= neighbor_group. liberties elif len ( neighbor_group. liberties ) == 1 : return False potential_libs -= set ( [ move ] ) return not potential_libs",False,neighbor_group_id == MISSING_GROUP_ID,neighbor_group_id == 0,0.6545580625534058 1367,"def get_build_provider_flags ( build_provider : str, ** kwargs ) -> Dict [ str, str ] : """"""Get configured options applicable to build_provider."""""" build_provider_flags : Dict [ str, str ] = dict ( ) if build_provider not in _ALL_PROVIDERS : raise RuntimeError ( f""Invalid build provider: {build_provider}"" ) for option in _PROVIDER_OPTIONS : key : str = option [ ""param_decls"" ] is_flag : bool = option. get ( ""is_flag"", False ) envvar : Optional [ str ] = option. get ( ""envvar"" ) supported_providers : List [ str ] = option [ ""supported_providers"" ] if key == ""--provider"" : continue if : continue key_formatted = _param_decls_to_kwarg ( key ) if is_flag and not kwargs. get ( key_formatted ) : continue if envvar is not None and key_formatted in kwargs : build_provider_flags [ envvar ] = kwargs [ key_formatted ] return build_provider_flags",False,build_provider not in supported_providers,supported_providers,0.6572281122207642 1368,"def find_exception_in_output ( data ) : have_traceback = False for line in data. splitlines ( ) : line = line. decode ( ""ascii"" ) if line. startswith ( ""Traceback ("" ) : have_traceback = True continue if line. startswith ( "" "" ) : continue if have_traceback : m = re_exception. match ( line ) if : exc_type, args = m. groups ( ) bits = exc_type. split ( ""."", 1 ) if len ( bits ) > 1 : mod = __import__ ( bits [ 0 ] ) exc = getattr ( mod, bits [ 1 ] ) else : exc = eval ( bits [ 0 ] ) else : try : exc = eval ( line. strip ( ) ) args = ""()"" except : return None try : args = eval ( args ) if ""header"" in png : self. useHeader ( png [ ""header"" ] ) if ""time"" in png : self. useTime ( png [ ""time"" ] ) if ""physical"" in png : self. usePhysical ( png [ ""physical"" ] ) for comment in png. array ( ""text"" ) : if ""text"" not in comment : continue keyword = comment [ ""keyword"" ]. value text = comment [ ""text"" ]. value try : key = self. TEXT_TO_ATTR [ keyword. lower ( ) ] setattr ( self, key, text ) except KeyError : if : self. comment = ""%s=%s"" % ( keyword, text ) else : self. comment = text compr_size = sum ( data. size for data in png. array ( ""data"" ) ) computeComprRate ( self, compr_size )",False,keyword.lower() != 'comment',keyword.lower() in self.TEXT_TO_ATTR,0.6543689966201782 1370,"def post_config_hook ( self ) : servers = [ ""dunst"", ""mako"", ""xfce4-notifyd"", None ] if not self. server : for server in servers : if server : try : if self. py3. command_output ( [ ""pgrep"", ""-x"", server ] ) : self. server = server break except self. py3. CommandError : pass else : self. server = self. py3. check_commands ( servers [ : - 1 ] ) or ""dunst"" elif self. server not in servers : raise Exception ( STRING_INVALID_SERVER. format ( self. server ) ) else : command = self. server. replace ( ""notifyd"", ""notifyd-config"" ) if : raise Exception ( STRING_NOT_INSTALLED. format ( command ) ) if self. server == ""dunst"" : self. backend = Dunst ( self ) elif self. server == ""mako"" : self. backend = Mako ( self ) elif self. server == ""xfce4-notifyd"" : self. backend = Xfce4_notifyd ( self ) if self. state is not None : if self. state == ""last"" : self. state = self. py3. storage_get ( ""state"" ) or 0 if self. state in [ False, True ] : ",False,not self.py3.check_commands(command),command not in self.COMMAND_MAP,0.6521279215812683 1371,"def writer ( self ) : """"""loop forever and copy socket->serial"""""" while self. alive : try : data = self. socket. recv ( 1024 ) if : break self. serial. write ( b"""". join ( self. rfc2217. filter ( data ) ) ) except socket. error as msg : self. log. error ( ""{}"". format ( msg ) ) break self. stop ( )",False,not data,len(data) > 0,0.6720389127731323 1372,"def removedir ( self, path ) : _path = self. validatepath ( path ) if _path == ""/"" : raise errors. RemoveRootError ( ) with ftp_errors ( self, path ) : try : self. ftp. rmd ( _encode ( _path, self. ftp. encoding ) ) except error_perm as error : code, _ = _parse_ftp_error ( error ) if : if self. isfile ( path ) : raise errors. DirectoryExpected ( path ) if not self. isempty ( path ) : raise errors. DirectoryNotEmpty ( path ) raise",False,code == '550',code == 0,0.6604022979736328 1373,"def parse_raw_res ( raw_res ) : """"""this function copied from dagster-airflow.operators.util for now"""""" assert isinstance ( raw_res, list ) res = None lines = [ ] coalesced = [ ] in_split_line = False for line in raw_res : if not in_split_line and line. startswith ( ""{"" ) : if : lines. append ( line ) continue else : coalesced. append ( line ) in_split_line = True continue if in_split_line : coalesced. append ( line ) if : lines. append ( """". join ( coalesced ) ) coalesced = [ ] in_split_line = False for line in reversed ( lines ) : try : res = seven. json. loads ( line ) break except seven. JSONDecodeError as e : print ( ""[parse_raw_res error]"", e ) continue return res",False,line.endswith('}'),coalesced,0.6589871644973755 1374,"def find_songs_that_start_with_word ( word ) : max_titles = 20 max_offset = 200 offset = 0 out = [ ] while offset < max_offset and len ( out ) < max_titles : results = sp. search ( q = word, type = ""track"", limit = 50, offset = offset ) if len ( results [ ""tracks"" ] [ ""items"" ] ) == 0 : break for item in results [ ""tracks"" ] [ ""items"" ] : name = item [ ""name"" ]. lower ( ) if name in seen : continue seen. add ( name ) if : continue if ""-"" in name : continue if ""/"" in name : continue words = name. split ( ) if len ( words ) > 1 and words [ 0 ] == word and words [ - 1 ] not in skiplist : out. append ( item ) offset += 50 return out",False,'(' in name,not name,0.6635199785232544 1375,"def handle ( self, * args, ** options ) : app_name = options. get ( ""app_name"" ) job_name = options. get ( ""job_name"" ) if app_name and not job_name : job_name = app_name app_name = None if options. get ( ""list_jobs"" ) : print_jobs ( only_scheduled = False, show_when = True, show_appname = True ) else : if : print ( ""Run a single maintenance job. Please specify the name of the job."" ) return self. runjob ( app_name, job_name, options )",False,not job_name,options.get('list_maintenance_job'),0.6594098806381226 1376,"def _songmeanings ( artist, song ) : service_name = ""Songmeanings"" url = """" try : searchurl = ""http://songmeanings.com/m/query/?q=%s %s"" % ( artist, song ) searchresults = requests. get ( searchurl, proxies = proxy ) soup = BeautifulSoup ( searchresults. text, ""html.parser"" ) url = """" for link in soup. find_all ( ""a"", href = True ) : if : url = ""https:"" + link [ ""href"" ] break elif ""/m/songs/view/"" in link [ ""href"" ] : result = ""http://songmeanings.com"" + link [ ""href"" ] lyricspage = requests. get ( result, proxies = proxy ) soup = BeautifulSoup ( lyricspage. text, ""html.parser"" ) url = ""http://songmeanings.com"" + link [ ""href"" ] [ 2 : ] break else : pass templyrics = soup. find_all ( ""li"" ) [ 4 ] lyrics = templyrics. getText ( ) lyrics = lyrics. split ( ""(r,s)};})();"" ) [ 1 ] except Exception : lyrics = error if lyrics == ""We are currently missing these lyrics."" : lyrics = error return ( lyrics, url,",False,'songmeanings.com/m/songs/view/' in link['href'],link['href'],0.6497451663017273 1377,"def test_find_directive_from_block ( self ) : blocks = self. config. parser_root. find_blocks ( ""virtualhost"" ) found = False for vh in blocks : if : servername = vh. find_directives ( ""servername"" ) self. assertEqual ( servername [ 0 ]. parameters [ 0 ], ""certbot.demo"" ) found = True self. assertTrue ( found )",False,vh.filepath.endswith('sites-enabled/certbot.conf'),vh.name == 'servername',0.6487401723861694 1378,"def readAll ( self, root, force = False ) : """"""Scan positions, looking for @ nodes to read."""""" at, c = self, self. c old_changed = c. changed if force : c. endEditing ( ) t1 = time. time ( ) c. init_error_dialogs ( ) files = at. findFilesToRead ( force, root ) for p in files : at. readFileAtPosition ( force, p ) for p in files : p. v. clearDirty ( ) if not g. unitTesting : if files : t2 = time. time ( ) g. es ( ""read %s files in %2.2f seconds"" % ( len ( files ), t2 - t1 ) ) elif : g. es ( ""no @ nodes in the selected tree"" ) c. changed = old_changed c. raise_error_dialogs ( )",False,force,not old_changed,0.6818838715553284 1379,"def _check_witness_program_v0 ( self, witness_solution_stack, witness_program ) : size = len ( witness_program ) if size == 32 : if len ( witness_solution_stack ) == 0 : raise ScriptError ( ""witness program witness empty"", errno. WITNESS_PROGRAM_WITNESS_EMPTY ) puzzle_script = witness_solution_stack [ - 1 ] if sha256 ( puzzle_script ). digest ( )!= witness_program : raise ScriptError ( ""witness program mismatch"", errno. WITNESS_PROGRAM_MISMATCH ) stack = list ( witness_solution_stack [ : - 1 ] ) elif size == 20 : if : raise ScriptError ( ""witness program mismatch"", errno. WITNESS_PROGRAM_MISMATCH ) puzzle_script = self. _puzzle_script_for_len20_segwit ( witness_program ) stack = list ( witness_solution_stack ) else : raise ScriptError ( ""witness program wrong length"", errno. WITNESS_PROGRAM_WRONG_LENGTH ) return stack, puzzle_script",False,len(witness_solution_stack) != 2,sha256(puzzle_script) != witness_program,0.6599516868591309 1380,"def getAllRowTablesWidget ( self ) : """"""dump all settings from table"""""" model = self. TabSettings. model ( ) data, datafilter, self. key = [ ], OrderedDict ( ), None for row in range ( model. rowCount ( ) ) : data. append ( [ ] ) for column in range ( model. columnCount ( ) ) : index = model. index ( row, column ) data [ row ]. append ( str ( model. data ( index ). toString ( ) ) ) datafilter [ ""ESP"" ] = { } datafilter [ ""LinuxIntelx86"" ] = { } datafilter [ ""LinuxIntelx64"" ] = { } datafilter [ ""WindowsIntelx86"" ] = { } datafilter [ ""WindowsIntelx64"" ] = { } datafilter [ ""MachoIntelx86"" ] = { } datafilter [ ""MachoIntelx64"" ] = { } for count, item in enumerate ( data ) : if : if item [ 0 ]!= """" or item [ 1 ]!= """" : datafilter [ ""ESP"" ] [ item [ 0 ] ] = item [ 1 ] else : if item [ 0 ]!= """" or item [ 1 ]!= """" : if item [ 1 ] in datafilter. keys ( ) : self. key = item [ 1 ] else : datafilter [ self. key ] [ item [ 0 ] ] = item [ 1 ] return datafilter",False,count < 5,count > 0,0.6658114790916443 1381,"def start_span ( self, operation_name = None, child_of = None, references = None, tags = None, start_time = None, ignore_active_span = False, ) : start_time = time. time ( ) if start_time is None else start_time parent_ctx = None if child_of is not None : parent_ctx = ( child_of if : else child_of. context ) elif references is not None and len ( references ) > 0 : parent_ctx = references [ 0 ]. referenced_context if not ignore_active_span and parent_ctx is None : scope = self. scope_manager. active if scope is not None : parent_ctx = scope. span. context ctx = SpanContext ( span_id = self. _generate_id ( ) ) if parent_ctx is not None : if parent_ctx. _baggage is not None : ctx. _baggage = parent_ctx. _baggage. copy ( ) ctx. trace_id = parent_ctx. trace_id else : ctx. trace_id = self. _generate_id ( ) return MockSpan ( self, operation_name = operation_name, context = ctx, parent_id = ( None if parent_ctx is None else parent_ctx. span_id ), tags = tags, start_time = start_time, )",False,"isinstance(child_of, opentracing.SpanContext)",parent_ctx is None,0.6485050916671753 1382,"def test_raise_for_status ( server ) : async with httpx. AsyncClient ( ) as client : for status_code in ( 200, 400, 404, 500, 505 ) : response = await client. request ( ""GET"", server. url. copy_with ( path = f""/status/{status_code}"" ) ) if : with pytest. raises ( httpx. HTTPStatusError ) as exc_info : response. raise_for_status ( ) assert exc_info. value. response == response else : assert response. raise_for_status ( ) is None",False,400 <= status_code < 600,response.has_error,0.6672827005386353 1383,"def loads ( self, text, profile = False ) : self. _pattern_confs = { } for line in text. splitlines ( ) : line = line. strip ( ) if not line or line. startswith ( ""#"" ) : continue left, value = line. split ( ""="", 1 ) value = value. strip ( ) tokens = left. strip ( ). split ( "":"", 2 ) if len ( tokens ) == 3 : pattern, conf_module, name = tokens else : assert len ( tokens ) == 2 conf_module, name = tokens pattern = None if not _is_profile_module ( conf_module ) : if profile : raise ConanException ( ""[conf] '{}' not allowed in profiles"". format ( line ) ) if : raise ConanException ( ""Conf '{}' cannot have a package pattern"". format ( line ) ) conf = self. _pattern_confs. setdefault ( pattern, Conf ( ) ) conf. set_value ( conf_module, name, value )",False,pattern is not None,not pattern,0.6567394733428955 1384,"def cache_sns_topics_across_accounts ( ) -> bool : function : str = f""{__name__}.{sys._getframe().f_code.co_name}"" accounts_d : list = async_to_sync ( get_account_id_to_name_mapping ) ( ) for account_id in accounts_d. keys ( ) : if config. get ( ""environment"" ) == ""prod"" : cache_sns_topics_for_account. delay ( account_id ) else : if : cache_sns_topics_for_account. delay ( account_id ) stats. count ( f""{function}.success"" ) return True",False,"account_id in config.get('celery.test_account_ids', [])","config.get( ""environment"""") == 'local'",0.649046003818512 1385,"def process ( self, fuzzresult ) : base_url = urljoin ( fuzzresult. url, "".."" ) for line in fuzzresult. history. content. splitlines ( ) : record = line. split ( ""/"" ) if : self. queue_url ( urljoin ( base_url, record [ 1 ] ) ) if record [ 0 ] == ""D"" : self. queue_url ( urljoin ( base_url, record [ 1 ] ) ) self. queue_url ( urljoin ( base_url, ""%s/CVS/Entries"" % ( record [ 1 ] ) ) )",False,len(record) == 6 and record[1],record[0] == 'C',0.6494237184524536 1386,"def _set_parse_context ( self, tag, tag_attrs ) : if not self. _wb_parse_context : if : self. _wb_parse_context = ""style"" elif tag == ""script"" : if self. _allow_js_type ( tag_attrs ) : self. _wb_parse_context = ""script""",True,tag == 'style',tag == 'style',0.6587623953819275 1387,"def download ( request ) : if not ENABLE_DOWNLOAD. get ( ) : return serve_403_error ( request ) try : file_format = ( ""csv"" if ""csv"" == request. POST. get ( ""type"" ) else ""xls"" if ""xls"" == request. POST. get ( ""type"" ) else ""json"" ) facet = json. loads ( request. POST. get ( ""facet"", ""{}"" ) ) json_response = search ( request ) response = json. loads ( json_response. content ) if : response [ ""response"" ] [ ""docs"" ] = response [ ""normalized_facets"" ] [ 0 ] [ ""docs"" ] collection = facet if not collection [ ""template"" ] [ ""fieldsSelected"" ] : facet [ ""fields"" ] = facet [ ""template"" ] [ ""fieldsAttributes"" ] else : collection = json. loads ( request. POST. get ( ""collection"", ""{}"" ) ) if file_format == ""json"" : docs = response [ ""response"" ] [ ""docs"" ] resp = JsonResponse ( docs, safe = False ) resp [ ""Content-Disposition"" ] = 'attachment; filename=""%s.%s""' % ( ""query_result"", file_format, ) return resp else : <",False,facet,response.get('normalized_facets'),0.7092268466949463 1388,"def __get_right_line ( self, widget_output ) : """"""Gets next line for right panel"""""" right_line = """" if widget_output : right_line = widget_output. pop ( 0 ) if len ( right_line ) > self. right_panel_width : right_line_plain = self. markup. clean_markup ( right_line ) if : right_line = right_line [ : self. right_panel_width ] + self. markup. RESET return right_line",False,len(right_line_plain) > self.right_panel_width,right_line_plain and len(right_line) > self.right_panel_width,0.6472867131233215 1389,"def _parse_gene ( element ) : for genename_element in element : if : ann_key = ""gene_%s_%s"" % ( genename_element. tag. replace ( NS, """" ), genename_element. attrib [ ""type"" ], ) if genename_element. attrib [ ""type"" ] == ""primary"" : self. ParsedSeqRecord. annotations [ ann_key ] = genename_element. text else : append_to_annotations ( ann_key, genename_element. text )",False,'type' in genename_element.attrib,genename_element.attrib[0] == 'gene',0.6544368267059326 1390,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. req = TRenewDelegationTokenReq ( ) self. req. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRUCT,self.req is None,0.6624554395675659 1391,"def do_endpoints ( conf, endpoints ) : service = { } for endpoint, ( eclass, indexed ) in endpoints. items ( ) : try : servs = [ ] i = 1 for args in conf [ endpoint ] : if : args = { ""location"" : args, ""binding"" : DEFAULT_BINDING [ endpoint ] } elif isinstance ( args, tuple ) or isinstance ( args, list ) : if len ( args ) == 2 : args = { ""location"" : args [ 0 ], ""binding"" : args [ 1 ] } elif len ( args ) == 3 : args = { ""location"" : args [ 0 ], ""binding"" : args [ 1 ], ""index"" : args [ 2 ], } if indexed : if ""index"" not in args : args [ ""index"" ] = ""%d"" % i i += 1 else : try : target_model_item = self. __extractData ( target ) result = [ f for f in sourceItems if f. parent ( ) not in sourceItems ] self. window ( ). app. monitor. suspend ( ) for source in result : self. __removeItem ( source ) source_model_item = self. __extractData ( source ) if : target_model_item. add_folder ( source_model_item ) self. __moveRecurseUpdate ( source_model_item ) else : target_model_item. add_item ( source_model_item ) source_model_item. path = None source_model_item. persist ( ) target. addChild ( source ) self. window ( ). app. monitor. unsuspend ( ) self. treeWidget. sortItems ( 0, Qt. AscendingOrder ) self. window ( ). app. config_altered ( True )",False,"isinstance(source_model_item, model.Folder)",target_model_item,0.6477488279342651 1393,"def _init_choices ( self, choices, default = None ) : self. choices = [ ] searching_first_choice = True for i, c in enumerate ( choices ) : if isinstance ( c, Separator ) : self. choices. append ( ( c, None, None ) ) else : if : self. choices. append ( ( c, c, None ) ) else : name = c. get ( ""name"" ) value = c. get ( ""value"", name ) disabled = c. get ( ""disabled"", None ) self. choices. append ( ( name, value, disabled ) ) if searching_first_choice : self. selected_option_index = i searching_first_choice = False",False,"isinstance(c, basestring)",default and c,0.6527134776115417 1394,"def configure_logger ( verbose ) : from polyaxon import settings from polyaxon. plugins. sentry import set_raven_client if ( verbose or settings. CLIENT_CONFIG. debug or os. environ. get ( POLYAXON_KEYS_DEBUG, False ) ) : log_level = logging. DEBUG settings. CLIENT_CONFIG. debug = True else : if not settings. CLIENT_CONFIG. disable_errors_reporting : set_raven_client ( ) log_level = ( logging. DEBUG if : else logging. INFO ) if settings. CLIENT_CONFIG. log_level : try : log_level = logging. getLevelName ( settings. CLIENT_CONFIG. log_level ) except : pass logging. basicConfig ( format = ""%(message)s"", level = log_level, stream = sys. stdout )",False,"os.environ.get(POLYAXON_KEYS_LOG_LEVEL) in ['debug', 'DEBUG']",verbose,0.6599088311195374 1395,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. I64 : self. numTrues = iprot. readI64 ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. I64 : self. numFalses = iprot. readI64 ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. I64 : self. numNulls = iprot. readI64 ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",True,fid == 1,fid == 1,0.6735950708389282 1396,"def update_handler ( self, fd, event_state ) : if event_state & base_connection. BaseConnection. READ : if fd not in self. readers : self. loop. add_reader ( fd, partial ( self. handlers [ fd ], fd = fd, events = base_connection. BaseConnection. READ ), ) self. readers. add ( fd ) else : if fd in self. readers : self. loop. remove_reader ( fd ) self. readers. remove ( fd ) if event_state & base_connection. BaseConnection. WRITE : if fd not in self. writers : self. loop. add_writer ( fd, partial ( self. handlers [ fd ], fd = fd, events = base_connection. BaseConnection. WRITE, ), ) self. writers. add ( fd ) else : if : self. loop. remove_writer ( fd ) self. writers. remove ( fd )",False,fd in self.writers,fd in self.writer,0.6652296781539917 1397,"def replace_field_to_value ( layout, cb ) : for i, lo in enumerate ( layout. fields ) : if : layout. fields [ i ] = ShowField ( cb, * lo. fields, attrs = lo. attrs, wrapper_class = lo. wrapper_class ) elif isinstance ( lo, basestring ) : layout. fields [ i ] = ShowField ( cb, lo ) elif hasattr ( lo, ""get_field_names"" ) : replace_field_to_value ( lo, cb )",False,"isinstance(lo, Field) or issubclass(lo.__class__, Field)","isinstance(lo, ShowField)",0.6551836729049683 1398,"def setDatePattern ( self, pattern ) : if pattern is None : self. dateDetector = None return else : dd = DateDetector ( ) dd. default_tz = self. __logtimezone if : pattern = filter ( bool, map ( str. strip, re. split ( ""\n+"", pattern ) ) ) for pattern in pattern : dd. appendTemplate ( pattern ) self. dateDetector = dd",False,"not isinstance(pattern, (list, tuple))","isinstance(pattern, string_types)",0.6524550914764404 1399,"def _import_hash ( self, operator ) : for key in sorted ( operator. import_hash. keys ( ) ) : module_list = "", "". join ( sorted ( operator. import_hash [ key ] ) ) if : exec ( ""from {} import {}"". format ( key [ 4 : ], module_list ) ) else : exec ( ""from {} import {}"". format ( key, module_list ) ) for var in operator. import_hash [ key ] : self. operators_context [ var ] = eval ( var )",False,key.startswith('tpot.'),len(module_list) > 4,0.6503951549530029 1400,"def iter_event_handlers ( self, resource : resources_. Resource, event : bodies. RawEvent, ) -> Iterator [ handlers. ResourceWatchingHandler ] : warnings. warn ( ""SimpleRegistry.iter_event_handlers() is deprecated; use "" ""ResourceWatchingRegistry.iter_handlers()."", DeprecationWarning, ) cause = _create_watching_cause ( resource, event ) for handler in self. _handlers : if not isinstance ( handler, handlers. ResourceWatchingHandler ) : pass elif : yield handler",False,"registries.match(handler=handler, cause=cause, ignore_fields=True)","isinstance(handler, cause)",0.6483447551727295 1401,"def send_packed_command ( self, command, check_health = True ) : if not self. _sock : self. connect ( ) try : if isinstance ( command, str ) : command = [ command ] for item in command : self. _sock. sendall ( item ) except socket. error as e : self. disconnect ( ) if : _errno, errmsg = ""UNKNOWN"", e. args [ 0 ] else : _errno, errmsg = e. args raise ConnectionError ( ""Error %s while writing to socket. %s."" % ( _errno, errmsg ) ) except Exception : self. disconnect ( ) raise",False,len(e.args) == 1,check_health,0.6547377109527588 1402,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if ftype == TType. STRUCT : self. success = Results ( ) self. success. read ( iprot ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRUCT : self. error = QueryNotFoundException ( ) self. error. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRUCT : self. error2 = BeeswaxException ( ) self. error2. read ( iprot ) else : iprot. skip ( ftype ) else : <",True,fid == 1,fid == 1,0.6793604493141174 1403,"def _bc_covmat ( self, cov_naive ) : cov_naive = cov_naive / self. scaling_factor endog = self. endog_li exog = self. exog_li varfunc = self. family. variance cached_means = self. cached_means scale = self. estimate_scale ( ) bcm = 0 for i in range ( self. num_group ) : expval, lpr = cached_means [ i ] resid = endog [ i ] - expval dmat = self. mean_deriv ( exog [ i ], lpr ) sdev = np. sqrt ( varfunc ( expval ) ) rslt = self. cov_struct. covariance_matrix_solve ( expval, i, sdev, ( dmat, ) ) if : return None vinv_d = rslt [ 0 ] vinv_d /= scale hmat = np. dot ( vinv_d, cov_naive ) hmat = np. dot ( hmat, dmat. T ). T f = self. weights_li [ i ] if self. weights is not None else 1.0 aresid = np. linalg. solve ( np. eye ( len ( resid ) ) - hmat, resid ) rslt = self. cov_struct. covariance_matrix_solve ( expval, i, sdev, ( aresid, ) ) if : return None srt = rslt [ 0 ] srt = f * np. dot ( dmat. T, srt ) / scale bcm += np. outer ( srt, srt ) cov_robust_bc = np. dot ( cov_naive, np. dot",False,rslt is None,scale > self.max_scale,0.6712028980255127 1404,"def register_rule_types ( ) : LOG. debug ( ""Start : register default RuleTypes."" ) registered_count = 0 for rule_type in RULE_TYPES : rule_type = copy. deepcopy ( rule_type ) try : rule_type_db = RuleType. get_by_name ( rule_type [ ""name"" ] ) update = True except StackStormDBObjectNotFoundError : rule_type_db = None update = False rule_type_api = RuleTypeAPI ( ** rule_type ) rule_type_api. validate ( ) rule_type_model = RuleTypeAPI. to_model ( rule_type_api ) if rule_type_db : rule_type_model. id = rule_type_db. id try : rule_type_db = RuleType. add_or_update ( rule_type_model ) extra = { ""rule_type_db"" : rule_type_db } if : LOG. audit ( ""RuleType updated. RuleType %s"", rule_type_db, extra = extra ) else : LOG. audit ( ""RuleType created. RuleType %s"", rule_type_db, extra = extra ) except Exception : LOG. exception ( ""Unable to register RuleType %s."", rule_type [ ""name"" ] ) else : registered_count += 1 LOG. debug ( ""End : register default RuleTypes."" ) return registered_count",True,update,update,0.6884036660194397 1405,"def check_backward ( self, s_data, i_data, gx_data, gt_data ) : gt_old = gt_data. copy ( ) s = chainer. Variable ( s_data ) i = chainer. Variable ( i_data ) x, t = thin_stack. thin_stack_get ( s, i ) x. grad = gx_data t. grad = gt_data t. backward ( ) for j, ind in enumerate ( i_data ) : for k in range ( self. shape [ 1 ] ) : if : testing. assert_allclose ( s. grad [ j, k ], gt_old [ j, k ] + gx_data [ j ] ) else : testing. assert_allclose ( s. grad [ j, k ], gt_old [ j, k ] ) self. assertIsNone ( i. grad ) self. assertIs ( s. grad, gt_data )",False,k == ind,ind == 0,0.6705917119979858 1406,"def is_notebook ( ) : try : shell = get_ipython ( ). __class__. __name__ if shell == ""ZMQInteractiveShell"" : return True elif : return False else : return False except NameError : return False",True,shell == 'TerminalInteractiveShell',shell == 'TerminalInteractiveShell',0.6718345880508423 1407,"def token_access ( endpoint, client_id, token_info ) : allow = False if endpoint == ""revocation_endpoint"" : if ""azr"" in token_info and client_id == token_info [ ""azr"" ] : allow = True elif len ( token_info [ ""aud"" ] ) == 1 and token_info [ ""aud"" ] == [ client_id ] : allow = True else : if ""azr"" in token_info and client_id == token_info [ ""azr"" ] : allow = True elif ""aud"" in token_info : if : allow = True return allow",False,client_id in token_info['aud'],client_id == token_info['aud'],0.6551052331924438 1408,"def _pair_samples_with_pipelines ( run_info_yaml, config ) : """"""Map samples defined in input file to pipelines to run."""""" samples = config_utils. load_config ( run_info_yaml ) if isinstance ( samples, dict ) : resources = samples. pop ( ""resources"" ) samples = samples [ ""details"" ] else : resources = { } ready_samples = [ ] for sample in samples : if : del sample [ ""files"" ] usample = copy. deepcopy ( sample ) usample. pop ( ""algorithm"", None ) if ""resources"" not in usample : usample [ ""resources"" ] = { } for prog, pkvs in resources. items ( ) : if prog not in usample [ ""resources"" ] : usample [ ""resources"" ] [ prog ] = { } if pkvs is not None : for key, val in pkvs. items ( ) : usample [ ""resources"" ] [ prog ] [ key ] = val config = config_utils. update_w_custom ( config, usample ) sample [ ""resources"" ] = { } ready_samples. append ( sample ) paired = [ ( x, _get_pipeline ( x ) ) for x in ready_samples ] d = defaultdict ( list ) for x in paired : d [ x [ 1 ] ]. append ( [ x [ 0 ] ] ) return d, config",True,'files' in sample,'files' in sample,0.6599195003509521 1409,"def add_permissions ( self ) : for permission in self. permissions : try : kwargs = { ""FunctionName"" : self. name, ""StatementId"" : permission [ ""statement_id"" ], ""Action"" : permission [ ""action"" ], ""Principal"" : permission [ ""principal"" ], } source_arn = permission. get ( ""source_arn"", None ) if source_arn : kwargs [ ""SourceArn"" ] = source_arn source_account = permission. get ( ""source_account"", None ) if : kwargs [ ""SourceAccount"" ] = source_account response = self. _lambda_svc. add_permission ( ** kwargs ) LOG. debug ( response ) except Exception : LOG. exception ( ""Unable to add permission"" )",True,source_account,source_account,0.6680928468704224 1410,"def create_tree ( self ) : """"""The vs-create-tree command."""""" c = self. c p = c. p tag = ""valuespace"" if p. h == tag : r = p else : r = p. insertAsLastChild ( ) r. h = tag for k, v in self. d. items ( ) : if : child = r. insertAsLastChild ( ) child. h = ""@@r "" + k self. render_value ( child, v ) c. bodyWantsFocus ( ) c. redraw ( )",False,not k.startswith('__'),r.h == tag,0.6543979644775391 1411,"def generate_supported_providers_table ( api, provider_matrix ) : data = [ ] header = [ ""Provider"", ""Documentation"", ""Provider Constant"", ""Supported Regions"", ""Module"", ""Class Name"", ] data. append ( header ) for provider, values in sorted ( provider_matrix. items ( ) ) : name_str = ""`%s`_"" % ( values [ ""name"" ] ) module_str = "":mod:`%s`"" % ( values [ ""module"" ] ) class_str = "":class:`%s`"" % ( values [ ""class"" ] ) params = { ""api"" : api, ""provider"" : provider. lower ( ) } driver_docs_path = pjoin ( this_dir, ""../docs/%(api)s/drivers/%(provider)s.rst"" % params ) if os. path. exists ( driver_docs_path ) : docs_link = "":doc:`Click `"" % params else : docs_link = """" cls = values [ ""cls"" ] supported_regions = cls. list_regions ( ) if hasattr ( cls, ""list_regions"" ) else None if : supported_regions = sorted ( supported_regions ) supported_regions = "", "". join ( supported_regions ) else : supported_regions = ""single region driver"" row = [ name_str, if not paths : paths = [ ] for p in listdir ( dir_path ) : path = join ( dir_path, p ) if : paths = add_directory_csv_files ( path, paths ) elif isfile ( path ) and path. endswith ( "".csv"" ) : paths. append ( path ) return paths",True,isdir(path),isdir(path),0.6547225713729858 1413,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. STRING : self. guid = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRING : self. noteKey = iprot. readString ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",True,fid == 1,fid == 1,0.6750129461288452 1414,"def get_step_query ( request, units_queryset ) : """"""Narrows down unit query to units matching conditions in GET and POST"""""" if ""unit"" in request. GET or ""page"" in request. GET : return units_queryset if ""unitstates"" in request. GET : unitstates = request. GET [ ""unitstates"" ]. split ( "","" ) if unitstates : state_queryset = units_queryset. none ( ) for unitstate in unitstates : if : state_queryset = state_queryset | units_queryset. filter ( state = UNTRANSLATED ) elif unitstate == ""translated"" : state_queryset = state_queryset | units_queryset. filter ( state = TRANSLATED ) elif unitstate == ""fuzzy"" : state_queryset = state_queryset | units_queryset. filter ( state = FUZZY ) units_queryset = state_queryset if ""matchnames"" in request. GET : matchnames = request. GET [ ""matchnames"" ]. split ( "","" ) if matchnames : match_queryset = units_queryset. none ( ) if ""hassuggestion"" in matchnames : match_queryset = units_queryset. exclude ( suggestion = None ) if not isinstance ( padding, AsymmetricPadding ) : raise TypeError ( ""Padding must be an instance of AsymmetricPadding."" ) if isinstance ( padding, PKCS1v15 ) : padding_enum = backend. _lib. RSA_PKCS1_PADDING elif isinstance ( padding, OAEP ) : padding_enum = backend. _lib. RSA_PKCS1_OAEP_PADDING if : raise UnsupportedAlgorithm ( ""Only MGF1 is supported by this backend."", _Reasons. UNSUPPORTED_MGF ) if not backend. rsa_padding_supported ( padding ) : raise UnsupportedAlgorithm ( ""This combination of padding and hash algorithm is not "" ""supported by this backend."", _Reasons. UNSUPPORTED_PADDING, ) else : raise UnsupportedAlgorithm ( ""{} is not supported by this backend."". format ( padding. name ), _Reasons. UNSUPPORTED_PADDING, ) return _enc_dec_rsa_pkey_ctx ( backend, key, data, padding_enum, padding )",False,"not isinstance(padding._mgf, MGF1)",not backend.rsa_MGF1_supported(padding),0.6498615741729736 1416,"def regenerate_curve ( self, curve_name : str, vertices : Union [ List [ list ], List [ np. ndarray ] ], spline_type : str = ""POLY"", vertices_radius : Union [ List [ list ], List [ np. ndarray ] ] = None, close_spline : Union [ List [ bool ], List [ int ] ] = None, use_smooth : bool = True, tilt : Union [ List [ list ], List [ np. ndarray ] ] = None, ) : if not self. curve : self. curve = bpy. data. curves. new ( name = curve_name, type = ""CURVE"" ) if len ( self. curve. splines )!= len ( vertices ) or any ( len ( s. points )!= len ( v ) for s, v in zip ( self. curve. splines, vertices ) ) : self. curve. splines. clear ( ) [ self. curve. splines. new ( spline_type ) for _ in range ( len ( vertices ) ) ] [ s. points. add ( len ( v ) - 1 ) for s, v in zip ( self. curve. splines, vertices ) ] for s, v, r, t, c in zip ( self. curve. splines, vertices, repeat_last ( vertices_radius or [ None ] ), repeat_last ( tilt or [ None ] ), repeat_last ( close_spline ), ) : v = np. asarray ( v, dtype = np. float32 ) if : r = np. ones ( len ( v ), dtype = np. float32 ) r =",False,r is None,len(v) == 0,0.664576530456543 1417,"def process ( self ) : if not any ( socket. is_linked for socket in self. outputs ) : return solids_in = self. inputs [ 0 ]. sv_get ( deepcopy = False ) matrixes = self. inputs [ 1 ]. sv_get ( deepcopy = False ) slices = [ ] slices_face = [ ] faces_add = slices_face. extend if self. flat_output else slices_face. append slices_add = slices. extend if self. flat_output else slices. append for solid, matrix in zip ( * mlr ( [ solids_in, matrixes ] ) ) : location = matrix. decompose ( ) [ 0 ] norm = ( matrix @ Vector ( ( 0, 0, 1 ) ) ) - location dist = norm. dot ( location ) wires = solid. slice ( Base. Vector ( norm ), dist ) edges_curves = [ ] faces = [ ] for wire in wires : for edge in wire. Edges : curve = SvSolidEdgeCurve ( edge ) edges_curves. append ( curve ) if wires : face = Part. Face ( wires ) faces. append ( SvSolidFaceSurface ( face ). to_nurbs ( ) ) if faces : faces_add ( faces ) if : slices_add ( edges_curves ) self. outputs [ ""Edges"" ]. sv_set ( slices ) self. outputs [ ""Faces"" ]. sv_set ( slices_face )",True,edges_curves,edges_curves,0.6743174195289612 1418,"def wrapped_f ( * args, ** kwargs ) : if request_method and request_method!= request. method : raise HTTPMethodNotAllowed ( ). exception result = f ( * args, ** kwargs ) tmpl = template if tmpl == ""string"" : return result if hasattr ( request, ""override_template"" ) : tmpl = request. override_template if tmpl == ""json"" : if : msg = ( ""JSON responses with Array envelopes are susceptible "" ""to cross-site data leak attacks, see "" ""http://wiki.pylonshq.com/display/pylonsfaq/Warnings"" ) if config [ ""debug"" ] : raise TypeError ( msg ) warnings. warn ( msg, Warning, 2 ) log. warning ( msg ) response. headers [ ""Content-Type"" ] = ""application/json"" return simplejson. dumps ( result ) if request. environ. get ( ""paste.testing"", False ) : request. environ [ ""paste.testing_variables"" ] [ ""tmpl_vars"" ] = result if response. content_type == ""text/html"" : response. content_type = ""application/xhtml+xml"" return render ( tmpl, tmpl_vars = result, method = ""auto"" )",False,"isinstance(result, (list, tuple))",result,0.6519782543182373 1419,"def _put_tasklet ( self, todo, options ) : if not todo : raise RuntimeError ( ""Nothing to do."" ) datastore_entities = [ ] for unused_fut, ent in todo : datastore_entities. append ( ent ) keys = yield self. _conn. async_put ( options, datastore_entities ) for key, ( fut, ent ) in zip ( keys, todo ) : if : if ent. _has_complete_key ( ) : raise datastore_errors. BadKeyError ( ""Entity key differs from the one returned by the datastore. "" ""Expected %r, got %r"" % ( key, ent. _key ) ) ent. _key = key fut. set_result ( key )",True,key != ent._key,key != ent._key,0.6613764762878418 1420,"def _listen_output ( self ) : ""NB! works in background thread"" try : while True : chars = self. _proc. read ( 1 ) if : as_bytes = chars. encode ( self. encoding ) self. _make_output_available ( as_bytes ) else : self. _error = ""EOF"" break except Exception as e : self. _error = str ( e )",True,len(chars) > 0,len(chars) > 0,0.6568546891212463 1421,def encode ( self ) : for char in self. text : if : self. ctext += self. galactic_dict [ char ] else : self. ctext += char return self. ctext,False,char in self.galactic_dict.keys(),char in self.galactic_dict,0.6574978828430176 1422,"def _rpc_request_loop_thread ( self ) : while True : ( peer, data ) = self. _rpc_events. get ( ) msgid, target_method, params = data error = None result = None try : if : result = self. _config ( msgid, params ) elif target_method == b""vrrp_list"" : result = self. _list ( msgid, params ) elif target_method == b""vrrp_config_change"" : result = self. _config_change ( msgid, params ) else : error = ""Unknown method %s"" % ( target_method ) except RPCError as e : error = str ( e ) peer. _endpoint. send_response ( msgid, error = error, result = result )",False,target_method == b'vrrp_config',target_method == b'config',0.652177095413208 1423,"def parse_fqdn_whitelist ( self, fqdn_whitelist_location ) : fqdns = [ ] with open ( fqdn_whitelist_location, ""r"" ) as text_file : for line in text_file : line = line. strip ( ). strip ( ""'"" ). strip ( '""' ) if : fqdns. append ( line ) return fqdns",False,isFQDN(line),line and (not line.startswith('#')),0.6519243121147156 1424,"def _get_sort_map ( tags ) : """"""See TAG_TO_SORT"""""" tts = { } for name, tag in tags. items ( ) : if : if tag. user : tts [ name ] = ""%ssort"" % name if tag. internal : tts [ ""~%s"" % name ] = ""~%ssort"" % name return tts",False,tag.has_sort,name in tts,0.6583970785140991 1425,"def end_a ( self ) : self. doc. do_translation = False if self. a_href : last_write = self. doc. pop_write ( ) last_write = last_write. rstrip ( "" "" ) if : if "":"" in last_write : last_write = last_write. replace ( "":"", r""\:"" ) self. doc. push_write ( last_write ) self. doc. push_write ( "" <%s>`__"" % self. a_href ) elif last_write == ""`"" : self. doc. push_write ( ""`<%s>`__"" % self. a_href ) else : self. doc. push_write ( self. a_href ) self. doc. hrefs [ self. a_href ] = self. a_href self. doc. write ( ""`__"" ) self. a_href = None self. doc. write ( "" "" )",False,last_write and last_write != '`',last_write,0.656735360622406 1426,"def _retrieve_key ( self ) : url = ""http://www.canadapost.ca/cpo/mc/personal/postalcode/fpc.jsf"" text = """" try : r = requests. get ( url, timeout = self. timeout, proxies = self. proxies ) text = r. text except : self. error = ""ERROR - URL Connection"" if text : expression = r""'(....-....-....-....)';"" pattern = re. compile ( expression ) match = pattern. search ( text ) if : self. key = match. group ( 1 ) return self. key else : self. error = ""ERROR - No API Key""",True,match,match,0.6769827604293823 1427,"def execute_spark ( self, cell, output_var, samplemethod, maxrows, samplefraction, session_name, coerce ) : ( success, out, mimetype ) = self. spark_controller. run_command ( Command ( cell ), session_name ) if not success : if conf. shutdown_session_on_spark_statement_errors ( ) : self. spark_controller. cleanup ( ) raise SparkStatementException ( out ) else : if : if mimetype == MIMETYPE_TEXT_HTML : self. ipython_display. html ( out ) else : self. ipython_display. write ( out ) else : self. ipython_display. display ( out ) if output_var is not None : spark_store_command = self. _spark_store_command ( output_var, samplemethod, maxrows, samplefraction, coerce ) df = self. spark_controller. run_command ( spark_store_command, session_name ) self. shell. user_ns [ output_var ] = df",False,"isinstance(out, string_types)",mimetype is not None,0.6489458084106445 1428,"def mode ( self, expression, binby = [ ], limits = None, shape = 256, mode_shape = 64, mode_limits = None, progressbar = False, selection = None, ) : """"""Calculate/estimate the mode."""""" if len ( binby ) == 0 : raise ValueError ( ""only supported with binby argument given"" ) else : try : len ( shape ) shape = tuple ( shape ) except : shape = len ( binby ) * ( shape, ) shape = ( mode_shape, ) + shape subspace = self ( * ( list ( binby ) + [ expression ] ) ) if : subspace = subspace. selected ( ) limits = self. limits ( list ( binby ), limits ) mode_limits = self. limits ( [ expression ], mode_limits ) limits = list ( limits ) + list ( mode_limits ) counts = subspace. histogram ( limits = limits, size = shape, progressbar = progressbar ) indices = np. argmax ( counts, axis = 0 ) pmin, pmax = limits [ - 1 ] centers = np. linspace ( pmin, pmax, mode_shape + 1 ) [ : - 1 ] centers += ( centers [ 1 ] - centers [ 0 ] ) / 2 modes = centers [ indices ] ok = counts. sum ( axis = 0 ) > 0 modes [ ~ ok ] = np. nan return modes",False,selection,mode_limits is not None,0.6832214593887329 1429,"def getMTRoute ( self, order ) : mtroutes = self. mt_routing_table. getAll ( ) for e in mtroutes : if : self. log. debug ( ""getMTRoute [order:%s] returned a MTRoute"", order ) return e [ order ] self. log. debug ( ""getMTRoute [order:%s] returned None"", order ) return None",False,order == list(e)[0],e[order],0.6612941026687622 1430,"def iterate_batches ( env, net, batch_size ) : batch = [ ] episode_reward = 0.0 episode_steps = [ ] obs = env. reset ( ) sm = nn. Softmax ( dim = 1 ) while True : obs_v = torch. FloatTensor ( [ obs ] ) act_probs_v = sm ( net ( obs_v ) ) act_probs = act_probs_v. data. numpy ( ) [ 0 ] action = np. random. choice ( len ( act_probs ), p = act_probs ) next_obs, reward, is_done, _ = env. step ( action ) episode_reward += reward episode_steps. append ( EpisodeStep ( observation = obs, action = action ) ) if : batch. append ( Episode ( reward = episode_reward, steps = episode_steps ) ) episode_reward = 0.0 episode_steps = [ ] next_obs = env. reset ( ) if len ( batch ) == batch_size : yield batch batch = [ ] obs = next_obs",True,is_done,is_done,0.6616964340209961 1431,"def __init__ ( self, test_env, args, stdin, stdout, stderr, returncode, files_before, files_after ) : self. test_env = test_env self. args = args self. stdin = stdin self. stdout = stdout self. stderr = stderr self. returncode = returncode self. files_before = files_before self. files_after = files_after self. files_deleted = { } self. files_updated = { } self. files_created = files_after. copy ( ) for path, f in files_before. items ( ) : if path not in files_after : self. files_deleted [ path ] = f continue del self. files_created [ path ] if : self. files_updated [ path ] = files_after [ path ]",False,f.mtime < files_after[path].mtime,path in files_after,0.6528380513191223 1432,"def wrap_keypress ( self, key ) : """"""Handle confirmation and throw event on bad input."""""" try : key = self. keypress ( key ) except ColumnDeleteEvent as e : if e. letter == COLUMN_KEYS [ 1 ] : return if not self. column_empty ( e. letter ) : if : raise e self. delete_column ( e. letter ) except UpdateParentEvent as e : self. update_parent_columns ( ) return if key is None : return if self. columns. get_focus_column ( ) == 0 : if key not in ( ""up"", ""down"", ""page up"", ""page down"" ) : raise CalcEvent ( E_invalid_in_help_col ) if key not in EDIT_KEYS and key not in MOVEMENT_KEYS : raise CalcEvent ( E_invalid_key % key. upper ( ) )",False,"not isinstance(self.event, ColumnDeleteEvent)",e.letter == 'delete',0.6519345045089722 1433,"def getExtras ( self, ArtistID, newstyle = False, ** kwargs ) : if not newstyle : extras = ""1,2,3,4,5,6,7,8,9,10,11,12,13,14"" else : temp_extras_list = [ ] i = 1 for extra in headphones. POSSIBLE_EXTRAS : if : temp_extras_list. append ( i ) i += 1 extras = "","". join ( str ( n ) for n in temp_extras_list ) myDB = db. DBConnection ( ) controlValueDict = { ""ArtistID"" : ArtistID } newValueDict = { ""IncludeExtras"" : 1, ""Extras"" : extras } myDB. upsert ( ""artists"", newValueDict, controlValueDict ) thread = threading. Thread ( target = importer. addArtisttoDB, args = [ ArtistID, True, False ] ) thread. start ( ) thread. join ( 1 ) raise cherrypy. HTTPRedirect ( ""artistPage?ArtistID=%s"" % ArtistID )",False,extra in kwargs,extra,0.6789532899856567 1434,"def traverse_trees ( node_pos, sample, trees : List [ HeteroDecisionTreeGuest ] ) : if node_pos [ ""reach_leaf_node"" ]. all ( ) : return node_pos for t_idx, tree in enumerate ( trees ) : cur_node_idx = node_pos [ ""node_pos"" ] [ t_idx ] if : continue rs, reach_leaf = HeteroSecureBoostingTreeGuest. traverse_a_tree ( tree, sample, cur_node_idx ) if reach_leaf : node_pos [ ""reach_leaf_node"" ] [ t_idx ] = True node_pos [ ""node_pos"" ] [ t_idx ] = rs return node_pos",False,cur_node_idx == -1,cur_node_idx == 0,0.6567609310150146 1435,"def run ( self ) : for k, v in iteritems ( self. objs ) : if k. startswith ( ""_"" ) : continue if v [ ""_class"" ] == ""Dataset"" : limit = v [ ""time_limit"" ] if limit == 0.0 : limit = None if limit is not None and limit <= 0.0 : logger. warning ( ""Previous time limit %s was updated, "" ""no time limit is enforced now."", limit, ) limit = None v [ ""time_limit"" ] = limit limit = v [ ""memory_limit"" ] if : limit = None if limit is not None and limit <= 0 : logger. warning ( ""Previous memory limit %s was updated, "" ""no memory limit is enforced now."", limit, ) limit = None v [ ""memory_limit"" ] = limit return self. objs",False,limit == 0,limit is None,0.6795771718025208 1436,"def numpy_match_long_cycle ( list_of_arrays ) : """"""match numpy arrays length by cycling over the array"""""" out = [ ] maxl = 0 for array in list_of_arrays : maxl = max ( maxl, array. shape [ 0 ] ) for array in list_of_arrays : length_diff = maxl - array. shape [ 0 ] if length_diff > 0 : if : array = np_concatenate ( ( array, array [ : length_diff ] ) ) else : new_part = np_repeat ( array, ceil ( length_diff / array. shape [ 0 ] ), axis = 0 ) if len ( array. shape ) > 1 : shape = ( ceil ( length_diff / array. shape [ 0 ] ), 1 ) else : shape = ceil ( length_diff / array. shape [ 0 ] ) new_part = np_tile ( array, shape ) array = np_concatenate ( ( array, new_part [ : length_diff ] ) ) out. append ( array ) return out",False,length_diff < array.shape[0],length_diff > array.shape[0],0.6534295082092285 1437,"def connect ( self ) : host = cleanHost ( self. conf ( ""host"" ), protocol = False ). split ( "":"" ) if not isInt ( host [ 1 ] ) : log. error ( ""Config properties are not filled in correctly, port is missing."" ) return False if self. conf ( ""version"" ) == ""v4"" : if not self. conf ( ""api_key"" ) : log. error ( ""Config properties are not filled in correctly, API key is missing."" ) return False url = ""http://"" + str ( host [ 0 ] ) + "":"" + str ( host [ 1 ] ) + ""/jsonrpc"" client = JsonRpcClient ( url, ""Token "" + self. conf ( ""api_key"" ) ) self. hadouken_api = HadoukenAPIv4 ( client ) return True else : auth_type = self. conf ( ""auth_type"" ) header = None if auth_type == ""api_key"" : header = ""Token "" + self. conf ( ""api_key"" ) elif : header = ""Basic "" + b64encode ( self. conf ( ""auth_user"" ) + "":"" + self. conf ( ""auth_pass"" ) ) url = ""http://"" + str ( host [ 0 ] ) + "":"" + str ( host [ 1 ] ) + ""/api"" client = JsonRpcClient ( url, header ) self. hadouken_api = HadoukenAPIv5 ( client ) return True return",False,auth_type == 'user_pass',auth_type == 'auth_user',0.6496493220329285 1438,"def results ( parsed, original_query ) : settings = json. load ( open ( ""preferences.json"" ) ) shortcuts = settings [ ""shortcuts"" ] count = len ( shortcuts ) for i in range ( count ) : url = shortcuts [ i ] [ ""url"" ] shortcut = shortcuts [ i ] [ ""shortcut"" ] if shortcut. lower ( ) == original_query. lower ( ) : if : if not ""//"" in url : url = ""http://"" + url link = ""{0}"". format ( url ) return { ""title"" : u""Open "" + url, ""run_args"" : [ url ], ""html"" : centered_text ( link, hint_text = ""Press enter to launch a browser"" ), ""webview_transparent_background"" : True, }",False,url.startswith('http') == False,url,0.6529065370559692 1439,"def login ( ) : error = None if request. method == ""POST"" : form = await request. form if form [ ""username"" ]!= app. config [ ""USERNAME"" ] : error = ""Invalid username"" elif : error = ""Invalid password"" else : session [ ""logged_in"" ] = True await flash ( ""You were logged in"" ) return redirect ( url_for ( ""posts"" ) ) return await render_template ( ""login.html"", error = error )",True,form['password'] != app.config['PASSWORD'],form['password'] != app.config['PASSWORD'],0.6505357027053833 1440,"def resources_include_url ( name ) : env = self. environment mime_type, encoding = mimetypes. guess_type ( name ) try : data = env. loader. get_source ( env, name ) [ 0 ]. encode ( ""utf8"" ) except UnicodeDecodeError : pieces = split_template_path ( name ) searchpaths = self. get_template_paths ( ) for searchpath in searchpaths : filename = os. path. join ( searchpath, * pieces ) print ( filename, os. path. exists ( filename ) ) if : with open ( filename, ""rb"" ) as f : data = f. read ( ) break else : raise ValueError ( ""No file %r found in %r"" % ( name, searchpaths ) ) data = base64. b64encode ( data ) data = data. replace ( b""\n"", b"""" ). decode ( ""ascii"" ) src = ""data:{mime_type};base64,{data}"". format ( mime_type = mime_type, data = data ) return jinja2. Markup ( src )",True,os.path.exists(filename),os.path.exists(filename),0.6482568979263306 1441,"def value ( self, new_value ) : old_value = self. _value self. _value = new_value for i, [ _, value ] in enumerate ( self. _options ) : if value == new_value : self. _line = i break else : if : self. _line = 0 self. _value = self. _options [ self. _line ] [ 1 ] else : self. _line = - 1 self. _value = None if self. _validator : self. _is_valid = self. _validator ( self. _value ) if old_value!= self. _value and self. _on_change : self. _on_change ( ) self. _start_line = max ( 0, max ( self. _line - self. _h + 1, min ( self. _start_line, self. _line ) ) )",False,len(self._options) > 0,self._line > len(self._options),0.6534853577613831 1442,"def _renderObject ( self, obj, brightness = 0, addSink = True ) : glPushMatrix ( ) if addSink : glTranslate ( obj. getPosition ( ) [ 0 ], obj. getPosition ( ) [ 1 ], obj. getSize ( ) [ 2 ] / 2 - profile. getProfileSettingFloat ( ""object_sink"" ), ) else : glTranslate ( obj. getPosition ( ) [ 0 ], obj. getPosition ( ) [ 1 ], obj. getSize ( ) [ 2 ] / 2 ) if self. tempMatrix is not None and obj == self. _selectedObj : glMultMatrixf ( openglHelpers. convert3x3MatrixTo4x4 ( self. tempMatrix ) ) offset = obj. getDrawOffset ( ) glTranslate ( - offset [ 0 ], - offset [ 1 ], - offset [ 2 ] - obj. getSize ( ) [ 2 ] / 2 ) glMultMatrixf ( openglHelpers. convert3x3MatrixTo4x4 ( obj. getMatrix ( ) ) ) n = 0 for m in obj. _meshList : if m. vbo is None : m. vbo = openglHelpers. GLVBO ( GL_TRIANGLES, m. vertexes, m. normal ) if : glColor4fv ( map ( lambda idx : idx * brightness, self. _objColors [ n ] ) ) n += 1 m. vbo. render ( ) glPopMatrix ( )",False,brightness != 0,bubble,0.6689261198043823 1443,"def __init__ ( self, filename = None ) : try : self. _config = ConfigParser ( strict = False ) except TypeError : self. _config = ConfigParser ( ) if filename : if : self. _config. readfp ( filename ) else : if not os. path. exists ( filename ) : sys. exit ( ""Configuration file '%s' does not exist"" % filename ) self. _config. read ( os. path. expanduser ( filename ) ) else : for path in self. CONFIG_FILES : full_path = os. path. expanduser ( path ) if os. path. exists ( full_path ) and full_path in self. _config. read ( full_path ) : filename = full_path break else : sys. exit ( ""Could not find any configuration file at "" ""default locations.\n"" ""Check Barman's documentation for more help."" ) self. config_file = filename self. _servers = None self. servers_msg_list = [ ] self. _parse_global_config ( )",False,"hasattr(filename, 'read')",os.path.isfile(filename),0.6545233130455017 1444,"def get_files ( d ) : res = [ ] for p in glob. glob ( os. path. join ( d, ""*"" ) ) : if not p : continue ( pth, fname ) = os. path. split ( p ) if skip_file ( fname ) : continue if : continue if os. path. isdir ( p ) : res += get_dir ( p ) else : res. append ( p ) return res",False,os.path.islink(p),not os.path.exists(p),0.648548424243927 1445,"def __init__ ( self, type, object_or_type = None ) : self. __thisclass__ = type if object_or_type is None : self. __self__ = self. __self_class__ = None else : if : debugger ( ) JS ( ""@{{_issubtype}}(@{{object_or_type}}, @{{type}})"" ) raise TypeError ( ""super(type, obj): obj must be an instance or subtype of type"" ) if JS ( ""@{{object_or_type}}['$inst'] === true"" ) : self. __self_class__ = object_or_type. __class__ else : self. __self_class__ = object_or_type self. __self__ = object_or_type",False,"JS('!@{{_issubtype}}(@{{object_or_type}}, @{{type}})')","isinstance(object_or_type, basestring)",0.6665154695510864 1446,"def deleteHold ( ) : v = buildGAPIObject ( ) hold = sys. argv [ 3 ] matterId = None i = 4 while i < len ( sys. argv ) : myarg = sys. argv [ i ]. lower ( ). replace ( ""_"", """" ) if : matterId = getMatterItem ( v, sys. argv [ i + 1 ] ) holdId = convertHoldNameToID ( v, hold, matterId ) i += 2 else : controlflow. invalid_argument_exit ( myarg, ""gam delete hold"" ) if not matterId : controlflow. system_error_exit ( 3, ""you must specify a matter for the hold."" ) print ( f""Deleting hold {hold} / {holdId}"" ) gapi. call ( v. matters ( ). holds ( ), ""delete"", matterId = matterId, holdId = holdId )",False,myarg == 'matter',i + 1 < len(sys.argv),0.6635769605636597 1447,"def _tags_to_preslots ( tags, tokens, is_start_of_slot, is_end_of_slot ) : slots = [ ] current_slot_start = 0 for i, tag in enumerate ( tags ) : if is_start_of_slot ( tags, i ) : current_slot_start = i if : slots. append ( { RANGE : { START : tokens [ current_slot_start ]. start, END : tokens [ i ]. end, }, SLOT_NAME : tag_name_to_slot_name ( tag ), } ) current_slot_start = i return slots",True,"is_end_of_slot(tags, i)","is_end_of_slot(tags, i)",0.6529837846755981 1448,"def pipeline_template_post_save_handler ( sender, instance, created, ** kwargs ) : template = instance if template. is_deleted : TemplateRelationship. objects. filter ( ancestor_template_id = template. template_id ). delete ( ) return with transaction. atomic ( ) : TemplateRelationship. objects. filter ( ancestor_template_id = template. template_id ). delete ( ) acts = list ( template. data [ PE. activities ]. values ( ) ) subprocess_nodes = [ act for act in acts if act [ ""type"" ] == PE. SubProcess ] rs = [ ] for sp in subprocess_nodes : version = ( sp. get ( ""version"" ) or PipelineTemplate. objects. get ( template_id = sp [ ""template_id"" ] ). version ) rs. append ( TemplateRelationship ( ancestor_template_id = template. template_id, descendant_template_id = sp [ ""template_id"" ], subprocess_node_id = sp [ ""id"" ], version = version, ) ) if : TemplateRelationship. objects. bulk_create ( rs ) TemplateVersion. objects. track ( template ) TemplateCurrentVersion. objects. update_current_version",False,rs,created,0.6844005584716797 1449,"def getSimilar ( ) : myDB = db. DBConnection ( ) results = myDB. select ( ""SELECT ArtistID from artists ORDER BY HaveTracks DESC"" ) logger. info ( ""Fetching similar artists from Last.FM for tag cloud"" ) artistlist = [ ] for result in results [ : 12 ] : data = request_lastfm ( ""artist.getsimilar"", mbid = result [ ""ArtistId"" ] ) if : artists = data [ ""similarartists"" ] [ ""artist"" ] for artist in artists : try : artist_mbid = artist [ ""mbid"" ] artist_name = artist [ ""name"" ] except KeyError : continue if not any ( artist_mbid in x for x in results ) : artistlist. append ( ( artist_name, artist_mbid ) ) logger. debug ( ""Fetched %d artists from Last.FM"", len ( artistlist ) ) count = defaultdict ( int ) for artist, mbid in artistlist : count [ artist, mbid ] += 1 items = count. items ( ) top_list = sorted ( items, key = lambda x : x [ 1 ], reverse = True ) [ : 25 ] random. shuffle ( top_list ) myDB. action ( ""DELETE from lastfmcloud"" ) for item in top_list : artist_name, artist_mbid = item [ 0 ] count = item [ 1 ] myDB. action ( <",False,data and 'similarartists' in data,data != {},0.6625669002532959 1450,"def ident_values ( self ) : value = self. _ident_values if value is False : value = None if : wrapped = self. wrapped idents = getattr ( wrapped, ""ident_values"", None ) if idents : value = [ self. _wrap_hash ( ident ) for ident in idents ] self. _ident_values = value return value",False,not self.orig_prefix,"hasattr(self, 'wrapped')",0.6577386260032654 1451,"def xontrib_data ( ns ) : """"""Collects and returns the data about xontribs."""""" meta = get_xontribs ( ) data = { } names : tp. Set [ str ] = set ( ) if not ns else set ( ns. names ) for xo_name in meta : if xo_name not in names : continue spec = find_xontrib ( xo_name ) if spec is None : installed = loaded = False else : installed = True loaded = spec. name in sys. modules data [ xo_name ] = { ""name"" : xo_name, ""installed"" : installed, ""loaded"" : loaded } installed_xontribs = xontrib_installed ( names ) for name in installed_xontribs : if : loaded = f""xontrib.{name}"" in sys. modules data [ name ] = { ""name"" : name, ""installed"" : True, ""loaded"" : loaded } return dict ( sorted ( data. items ( ) ) )",False,name not in data,name in installed_xontribs,0.6616744995117188 1452,"def lines_filter ( self, lines : List [ str ], location : Tuple [ str, int ] = None ) -> List [ str ] : linespec = self. options. get ( ""lines"" ) if linespec : linelist = parselinenos ( linespec, len ( lines ) ) if : logger. warning ( __ ( ""line number spec is out of range(1-%d): %r"" ) % ( len ( lines ), linespec ), location = location, ) if ""lineno-match"" in self. options : first = linelist [ 0 ] if all ( first + i == n for i, n in enumerate ( linelist ) ) : self. lineno_start += linelist [ 0 ] else : raise ValueError ( __ ( 'Cannot use ""lineno-match"" with a disjoint''set of ""lines""' ) ) lines = [ lines [ n ] for n in linelist if n < len ( lines ) ] if lines == [ ] : raise ValueError ( __ ( ""Line spec %r: no lines pulled from include file %r"" ) % ( linespec, self. filename ) ) return lines",False,any((i >= len(lines) for i in linelist)),location and location < linelist[0],0.6610935926437378 1453,"def PyJs_anonymous_1847_ ( a, b, this, arguments, var = var ) : var = Scope ( { u""a"" : a, u""this"" : this, u""b"" : b, u""arguments"" : arguments }, var ) var. registers ( [ u""a"", u""b"", u""result"" ] ) if ( var. get ( u""isObject"" ) ( var. get ( u""a"" ) ) and var. get ( u""isExtensible"" ) ( var. get ( u""a"" ) ). neg ( ) ) : if : var. get ( u""this"" ). put ( u""_f"", var. get ( u""InternalMap"" ). create ( ) ) var. put ( u""result"", var. get ( u""this"" ) . get ( u""_f"" ) . callprop ( var. get ( u""key"" ), var. get ( u""a"" ), var. get ( u""b"" ) ), ) return ( var. get ( u""this"" ) if ( var. get ( u""key"" ) == Js ( u""set"" ) ) else var. get ( u""result"" ) ) return var. get ( u""method"" ). callprop ( u""call"", var. get ( u""this"" ), var. get ( u""a"" ), var. get ( u""b"" ) )",False,var.get(u'this').get(u'_f').neg(),"hasattr(var, '__getitem__')",0.6477598547935486 1454,"def assertTagInTemplateScript ( self, needle, haystack, count = None, msg_prefix = """" ) : needle = assert_and_parse_html ( self, needle, None, ""First argument is not valid HTML:"" ) haystack = assert_and_parse_html ( self, haystack, None, ""Second argument is not valid HTML:"" ) real_count = 0 for script_tag in self. _find_template_script_tags ( haystack ) : if : self. assertEqual ( len ( script_tag. children ), 1 ) script_html = assert_and_parse_html ( self, script_tag. children [ 0 ], None, ""Script tag content is not valid HTML:"", ) real_count += self. _count_tag_occurrences ( needle, script_html ) if count is not None : self. assertEqual ( real_count, count, msg_prefix + ""Found %d instances of '%s' in template script (expected %d)"" % ( real_count, needle, count ), ) else : self. assertTrue ( real_count!= 0, msg_prefix + ""Couldn't find '%s' in template script"" % needle, )",False,script_tag.children,script_tag is not None,0.6564608812332153 1455,"def parse_production ( xml_text ) : """"""Returns a tuple containing two lists."""""" if not xml_text : return None soup = BeautifulSoup ( xml_text, ""html.parser"" ) productions = [ ] datetimes = [ ] for timeseries in soup. find_all ( ""timeseries"" ) : resolution = timeseries. find_all ( ""resolution"" ) [ 0 ]. contents [ 0 ] datetime_start = arrow. get ( timeseries. find_all ( ""start"" ) [ 0 ]. contents [ 0 ] ) is_production = ( len ( timeseries. find_all ( ""inBiddingZone_Domain.mRID"". lower ( ) ) ) > 0 ) psr_type = ( timeseries. find_all ( ""mktpsrtype"" ) [ 0 ]. find_all ( ""psrtype"" ) [ 0 ]. contents [ 0 ] ) for entry in timeseries. find_all ( ""point"" ) : quantity = float ( entry. find_all ( ""quantity"" ) [ 0 ]. contents [ 0 ] ) position = int ( entry. find_all ( ""position"" ) [ 0 ]. contents [ 0 ] ) datetime = datetime_from_position ( datetime_start, position, resolution ) try : i = datetimes. index ( datetime ) if : productions [ i ] [ psr_type ] += quantity else : productions [ i ] [ psr_type ] -= quantity except ValueError : if hasattr ( self, ""venv"" ) : return self. venv = join ( self. buildozer_dir, ""venv"" ) if not self. file_exists ( self. venv ) : self. cmd ( ""virtualenv --python=python2.7./venv"", cwd = self. buildozer_dir ) output = self. cmd ( 'bash -c ""source venv/bin/activate && env""', get_stdout = True, cwd = self. buildozer_dir, ) self. env_venv = copy ( self. environ ) for line in output [ 0 ]. splitlines ( ) : args = line. split ( ""="", 1 ) if len ( args )!= 2 : continue key, value = args if : self. env_venv [ key ] = value if ""PYTHONHOME"" in self. env_venv : del self. env_venv [ ""PYTHONHOME"" ] self. env_venv [ ""CC"" ] = ""/bin/false"" self. env_venv [ ""CXX"" ] = ""/bin/false""",False,"key in ('VIRTUAL_ENV', 'PATH')",key in self.env_venv,0.6636096239089966 1457,"def __call__ ( self, req ) : access_key = str ( req. params [ ""AWSAccessKeyId"" ] ) failures_key = ""authfailures-%s"" % access_key failures = int ( self. mc. get ( failures_key ) or 0 ) if failures >= FLAGS. lockout_attempts : detail = _ ( ""Too many failed authentications."" ) raise webob. exc. HTTPForbidden ( detail = detail ) res = req. get_response ( self. application ) if res. status_int == 403 : failures = self. mc. incr ( failures_key ) if : self. mc. set ( failures_key, ""1"", time = FLAGS. lockout_window * 60 ) elif failures >= FLAGS. lockout_attempts : lock_mins = FLAGS. lockout_minutes msg = ( _ ( ""Access key %(access_key)s has had %(failures)d"" "" failed authentications and will be locked out"" "" for %(lock_mins)d minutes."" ) % locals ( ) ) LOG. warn ( msg ) self. mc. set ( failures_key, str ( failures ), time = FLAGS. lockout_minutes * 60 ) return res",False,failures is None,failures >= FLAGS.lockout_window,0.6843120455741882 1458,"def zapToCharacter ( self, event ) : """"""Kill characters from the insertion point to a given character."""""" k = self. c. k w = self. editWidget ( event ) if not w : return state = k. getState ( ""zap-to-char"" ) if state == 0 : k. setLabelBlue ( ""Zap To Character: "" ) k. setState ( ""zap-to-char"", 1, handler = self. zapToCharacter ) else : ch = event. char if event else "" "" k. resetLabel ( ) k. clearState ( ) s = w. getAllText ( ) ins = w. getInsertPoint ( ) i = s. find ( ch, ins ) if : return self. beginCommand ( w, undoType = ""zap-to-char"" ) self. addToKillBuffer ( s [ ins : i ] ) g. app. gui. replaceClipboardWith ( s [ ins : i ] ) w. setAllText ( s [ : ins ] + s [ i : ] ) w. setInsertPoint ( ins ) self. endCommand ( changed = True, setLabel = True )",False,i == -1,i < 0,0.6703222990036011 1459,"def children ( self, ** kwargs ) : """"""Build a list of treeview nodes from the child nodes."""""" if ""sid"" not in kwargs : return precondition_required ( gettext ( ""Required properties are missing."" ) ) from pgadmin. utils. driver import get_driver manager = get_driver ( PG_DEFAULT_DRIVER ). connection_manager ( sid = kwargs [ ""sid"" ] ) did = None if ""did"" in kwargs : did = kwargs [ ""did"" ] try : conn = manager. connection ( did = did ) if not conn. connected ( ) : status, msg = conn. connect ( ) if : return internal_server_error ( errormsg = msg ) except ( ConnectionLost, SSHTunnelConnectionLost, CryptKeyMissing ) : raise except Exception : return precondition_required ( gettext ( ""Connection to the server has been lost."" ) ) return make_json_response ( data = sorted ( self. get_children_nodes ( manager, ** kwargs ), key = lambda c : c [ ""label"" ] ) )",False,not status,status,0.6787869334220886 1460,"def scan_exist_ip_worker ( self ) : while self. running and self. keep_scan_all_exist_ip : try : ip_str = self. scan_exist_ip_queue. get_nowait ( ) except : break result = self. check_ip ( ip_str ) if : self. ip_lock. acquire ( ) try : if ip_str not in self. ip_dict : continue if self. ip_dict [ ip_str ] [ ""fail_times"" ] == 0 : self. _add_ip_num ( ip_str, - 1 ) self. ip_dict [ ip_str ] [ ""fail_times"" ] += 1 self. ip_dict [ ip_str ] [ ""fail_time"" ] = time. time ( ) finally : self. ip_lock. release ( ) elif result. ok : self. add_ip ( ip_str, result. request_time, result. domain ) else : self. report_connect_fail ( ip_str, force_remove = True )",False,not result,result.ok,0.6645494103431702 1461,"def testParseModel ( self ) : res_dir = os. path. join ( self. results_dir, ""baseml"", ""model"" ) for results_file in os. listdir ( res_dir ) : version = results_file. split ( ""-"" ) [ 1 ]. split ( ""."" ) [ 0 ] model = results_file [ 5 ] version_msg = ""Improper parsing for model %s version %s"" % ( model, version. replace ( ""_"", ""."" ), ) results_path = os. path. join ( res_dir, results_file ) results = baseml. read ( results_path ) self. assertEqual ( len ( results ), 6, version_msg ) self. assertIn ( ""parameters"", results, version_msg ) params = results [ ""parameters"" ] self. assertIn ( ""alpha"", params, version_msg ) self. assertIn ( ""rates"", params, version_msg ) self. assertIn ( ""parameter list"", params, version_msg ) self. assertIn ( ""rate frequencies"", params, version_msg ) if : self. assertIn ( ""kappa"", params, version_msg ) if model in [ ""7"", ""8"" ] : self. assertIn ( ""base frequencies"", params, version_msg ) self. assertIn ( ""rate parameters"", params, version_msg ) self. assertIn ( ""Q matrix"", params, version_msg ) qmat = params [ ""Q matrix"" ] self. assertEqual",False,"model in ['1', '3', '4', '5', '6']","model in ['6', '7']",0.6486180424690247 1462,"def set_version ( self, value ) : value = value. strip ( ). replace ( "" "", """" ) if re. match ( r""^\d+(\.\d+){1,3}$"", value ) is not None : check = True v_i = value. split ( ""."" ) for item in v_i : try : i = int ( item ) except ValueError : check = False break if : self [ ""arm_project_version"" ] = value",True,check,check,0.6907080411911011 1463,"def load_dataset ( self ) : cfg = self. cfg file_name = os. path. join ( self. cfg. project_path, cfg. dataset ) mlab = sio. loadmat ( file_name ) self. raw_data = mlab mlab = mlab [ ""dataset"" ] num_images = mlab. shape [ 1 ] data = [ ] has_gt = True for i in range ( num_images ) : sample = mlab [ 0, i ] item = DataItem ( ) item. image_id = i item. im_path = sample [ 0 ] [ 0 ] item. im_size = sample [ 1 ] [ 0 ] if len ( sample ) >= 3 : joints = sample [ 2 ] [ 0 ] [ 0 ] joint_id = joints [ :, 0 ] if : assert ( joint_id < cfg. num_joints ). any ( ) joints [ :, 0 ] = joint_id item. joints = [ joints ] else : has_gt = False data. append ( item ) self. has_gt = has_gt return data",False,joint_id.size != 0,cfg.num_joints > 0,0.6641768217086792 1464,"def _ReadHereLines ( line_reader, h, delimiter, ) : here_lines = [ ] last_line = None strip_leading_tabs = h. op. id == Id. Redir_DLessDash while True : line_id, line, unused_offset = line_reader. GetLine ( ) if line is None : p_die ( ""Couldn't find terminator for here doc that starts here"", token = h. op ) assert len ( line )!= 0 start_offset = 0 if strip_leading_tabs : n = len ( line ) i = 0 while i < n : if : break i += 1 start_offset = i if line [ start_offset : ]. rstrip ( ) == delimiter : last_line = ( line_id, line, start_offset ) break here_lines. append ( ( line_id, line, start_offset ) ) return here_lines, last_line",False,line[i] != '\t',last_line is None,0.6575617790222168 1465,"def flatten_nest_dict ( d ) : """"""Return the dict with all nested keys flattened joined with '/'."""""" flat_dict = NonMutableDict ( ) for k, v in d. items ( ) : if : flat_dict. update ( { ""{}/{}"". format ( k, k2 ) : v2 for k2, v2 in flatten_nest_dict ( v ). items ( ) } ) else : flat_dict [ k ] = v return flat_dict",True,"isinstance(v, dict)","isinstance(v, dict)",0.6524205207824707 1466,"def export_speaker_csv_task ( event_id ) : try : os. mkdir ( app. config [ ""TEMP_UPLOADS_FOLDER"" ] ) except OSError as exc : if : raise exc filename = ""speaker-{}.csv"". format ( uuid. uuid1 ( ). hex ) file_path = app. config [ ""TEMP_UPLOADS_FOLDER"" ] + ""/"" + filename with open ( file_path, ""w"" ) as temp_file : writer = csv. writer ( temp_file ) content = SpeakerCsv. export ( event_id ) for row in content : row = [ s. encode ( ""utf-8"" ) for s in row ] writer. writerow ( row ) speaker_csv_file = UploadedFile ( file_path = file_path, filename = filename ) speaker_csv_url = upload ( speaker_csv_file, UPLOAD_PATHS [ ""exports"" ] [ ""csv"" ]. format ( event_id = event_id ) ) return speaker_csv_url",False,exc.errno != errno.EEXIST,not exc.has_error,0.6544767618179321 1467,"def test_fit2 ( self ) : scale_param = self. get_scale_param ( ) scale_param. scale_column_idx = [ ] scale_param. feat_upper = [ 2, 2, 2, 2, 2, 2 ] scale_param. feat_lower = [ 1, 1, 1, 1, 1, 1 ] scale_obj = MinMaxScale ( scale_param ) fit_instance = scale_obj. fit ( self. table_instance ) column_min_value = scale_obj. column_min_value column_max_value = scale_obj. column_max_value for i, line in enumerate ( self. test_data ) : for j, value in enumerate ( line ) : if value > 2 : self. test_data [ i ] [ j ] = 2 elif : self. test_data [ i ] [ j ] = 1 scaler = MMS ( ) scaler. fit ( self. test_data ) self. assertListEqual ( self. get_table_instance_feature ( fit_instance ), np. around ( scaler. transform ( self. test_data ), 6 ). tolist ( ), ) data_min = list ( scaler. data_min_ ) data_max = list ( scaler. data_max_ ) self. assertListEqual ( column_min_value, data_min ) self. assertListEqual ( column_max_value, data_max ) transform_data = scale_obj. transform ( self. table_instance ) self. assertListEqual ( self. get_table_instance_feature ( fit_instance ), self. get_table_instance_feature ( transform_data ), )",False,value < 1,value > 1,0.6671333312988281 1468,"def PyJs_normaliseOptions_261_ ( this, arguments, var = var ) : var = Scope ( { u""this"" : this, u""normaliseOptions"" : PyJs_normaliseOptions_261_, u""arguments"" : arguments, }, var, ) var. registers ( [ u""_key3"", u""val"", u""option"", u""opts"" ] ) var. put ( u""opts"", var. get ( u""this"" ). get ( u""options"" ) ) for PyJsTemp in var. get ( u""_config3"" ). get ( u""default"" ) : var. put ( u""_key3"", PyJsTemp ) var. put ( u""option"", var. get ( u""_config3"" ). get ( u""default"" ). get ( var. get ( u""_key3"" ) ) ) var. put ( u""val"", var. get ( u""opts"" ). get ( var. get ( u""_key3"" ) ) ) if : continue if var. get ( u""option"" ). get ( u""alias"" ) : var. get ( u""opts"" ). put ( var. get ( u""option"" ). get ( u""alias"" ), ( var. get ( u""opts"" ). get ( var. get ( u""option"" ). get ( u""alias"" ) ) or var. get ( u""val"" ) ), <",False,var.get(u'val').neg() and var.get(u'option').get(u'optional'),not var,0.6499732732772827 1469,"def kurfile ( request, example_directory, jinja_engine ) : result = Kurfile ( os. path. join ( example_directory, request. param ), jinja_engine ) modify_kurfile ( result. data ) for k in ( ""train"", ""validate"", ""test"", ""evaluate"" ) : if : for data_source in result. data [ k ] [ ""data"" ] : if ( ""speech_recognition"" in data_source and ""normalization"" in data_source [ ""speech_recognition"" ] ) : del data_source [ ""speech_recognition"" ] [ ""normalization"" ] result. parse ( ) return result",False,k in result.data and 'data' in result.data[k],k in result.data,0.6520867347717285 1470,"def convert_core ( capi1_file, capi2_file ) : try : core = open_core ( capi1_file ) if : return coredata = { } coredata [ ""name"" ] = str ( core. name ) if core. main. description : coredata [ ""description"" ] = strip_quotes ( core. main. description ) if core. provider : coredata [ ""provider"" ] = core. provider. config if core. scripts : coredata [ ""scripts"" ] = gather_scripts ( core ) filesets = gather_filesets ( core ) if filesets : coredata [ ""filesets"" ] = filesets targets = gather_targets ( core ) if targets : coredata [ ""targets"" ] = targets if core. parameter : parameters = gather_parameters ( core ) if parameters : coredata [ ""parameters"" ] = parameters if core. vpi : coredata [ ""vpi"" ] = gather_vpi ( core ) write_core ( capi2_file, coredata ) except Exception as e : logger. error ( ""Unable to convert core {}: {}"". format ( capi1_file, str ( e ) ) ) sys. exit ( 1 )",False,not core,core is None,0.7023690938949585 1471,"def _expand_file_annotations ( src_path, dst_path, nas_mode ) : with open ( src_path ) as src, open ( dst_path, ""w"" ) as dst : try : annotated_code = code_generator. parse ( src. read ( ), nas_mode ) if : shutil. copyfile ( src_path, dst_path ) return False dst. write ( annotated_code ) return True except Exception as exc : if exc. args : raise RuntimeError ( src_path + "" "" + ""\n"". join ( str ( arg ) for arg in exc. args ) ) else : raise RuntimeError ( ""Failed to expand annotations for %s: %r"" % ( src_path, exc ) )",False,annotated_code is None,"hasattr(dst, 'read')",0.6705628633499146 1472,"def wrapped ( * args ) : if not manual_session and not use_fixture : s, p = new_session ( mapgen = mapgen, human_player = human_player, ai_players = ai_players ) elif use_fixture : path = os. path. join ( TEST_FIXTURES_DIR, use_fixture + "".sqlite"" ) if : raise Exception ( ""Savegame {} not found"". format ( path ) ) s = load_session ( path ) timelimit = Timer ( handler ) timelimit. start ( timeout ) try : if use_fixture : return func ( s, * args ) elif not manual_session : return func ( s, p, * args ) else : return func ( * args ) finally : try : if use_fixture : s. end ( remove_savegame = False, keep_map = True ) elif not manual_session : s. end ( ) except Exception : pass finally : SPTestSession. cleanup ( ) timelimit. stop ( )",True,not os.path.exists(path),not os.path.exists(path),0.6483596563339233 1473,"def box_nms ( boxes, scores, proposals, thresh, topk, nms_type = ""normal"" ) : assert nms_type in [ ""normal"", ""rotate"" ], ""unknown nms type {}"". format ( nms_type ) order = np. argsort ( - scores ) boxes = boxes [ order ] scores = scores [ order ] proposals = proposals [ order ] nmsed_scores = [ ] nmsed_proposals = [ ] cnt = 0 while boxes. shape [ 0 ] : nmsed_scores. append ( scores [ 0 ] ) nmsed_proposals. append ( proposals [ 0 ] ) cnt += 1 if : break iou = box_iou ( boxes [ 0 ], boxes [ 1 : ], nms_type ) boxes = boxes [ 1 : ] [ iou < thresh ] scores = scores [ 1 : ] [ iou < thresh ] proposals = proposals [ 1 : ] [ iou < thresh ] return nmsed_scores, nmsed_proposals",False,cnt >= topk or boxes.shape[0] == 1,cnt > 3,0.6555474996566772 1474,"def on_activated_async ( self, view ) : if settings [ ""modified_lines_only"" ] : self. freeze_last_version ( view ) if settings [ ""enabled"" ] : match_trailing_spaces ( view ) if : active_views [ view. id ( ) ] = view. visible_region ( ) self. update_on_region_change ( view )",False,not view.id() in active_views,view.id() not in active_views,0.6515470743179321 1475,"def setTopGeometry ( self, w, h, x, y, adjustSize = True ) : x = max ( 10, x ) y = max ( 10, y ) if adjustSize : top = self. top sw = top. winfo_screenwidth ( ) sh = top. winfo_screenheight ( ) w = min ( sw - 10, w ) h = min ( sh - 10, h ) if x + w > sw : x = 10 if : y = 10 geom = ""%dx%d%+d%+d"" % ( w, h, x, y ) self. top. geometry ( geom )",False,y + h > sh,y + y > sw,0.6667530536651611 1476,"def __init__ ( self, classifier, layer_name = None, transpose = None, distance = None, copy_weights = True, ) : super ( ). __init__ ( ) self. copy_weights = copy_weights if layer_name is not None : self. set_weights ( getattr ( classifier, layer_name ) ) else : for x in self. possible_layer_names : layer = getattr ( classifier, x, None ) if : self. set_weights ( layer ) break self. distance = classifier. distance if distance is None else distance self. transpose = transpose",True,layer is not None,layer is not None,0.6652194857597351 1477,"def check_services_health ( self ) : """"""Check connectivity of all services"""""" for name, service in self. _service_map. items ( ) : if : continue try : await Server. from_orm ( service. orm. server ). wait_up ( timeout = 1 ) except TimeoutError : self. log. warning ( ""Cannot connect to %s service %s at %s"", service. kind, name, service. url, ) else : self. log. debug ( ""%s service %s running at %s"", service. kind. title ( ), name, service. url, )",False,not service.url,service.kind == 'server',0.6602045297622681 1478,"def undo_filter_paeth ( filter_unit, scanline, previous, result ) : """"""Undo Paeth filter."""""" ai = - filter_unit for i in range ( len ( result ) ) : x = scanline [ i ] if ai < 0 : a = c = 0 else : a = result [ ai ] c = previous [ ai ] b = previous [ i ] p = a + b - c pa = abs ( p - a ) pb = abs ( p - b ) pc = abs ( p - c ) if : pr = a elif pb <= pc : pr = b else : pr = c result [ i ] = ( x + pr ) & 0xFF ai += 1",False,pa <= pb and pa <= pc,pa <= pc,0.6689061522483826 1479,"def test_errors ( self ) : self. assertRaises ( TypeError, grp. getgrgid ) self. assertRaises ( TypeError, grp. getgrnam ) self. assertRaises ( TypeError, grp. getgrall, 42 ) self. assertRaises ( ValueError, grp. getgrnam, ""a\x00b"" ) bynames = { } bygids = { } for ( n, p, g, mem ) in grp. getgrall ( ) : if not n or n == ""+"" : continue bynames [ n ] = g bygids [ g ] = n allnames = list ( bynames. keys ( ) ) namei = 0 fakename = allnames [ namei ] while fakename in bynames : chars = list ( fakename ) for i in range ( len ( chars ) ) : if chars [ i ] == ""z"" : chars [ i ] = ""A"" break elif : continue else : chars [ i ] = chr ( ord ( chars [ i ] ) + 1 ) break else : namei = namei + 1 try : fakename = allnames [ namei ] except IndexError : break fakename = """". join ( chars ) self. assertRaises ( KeyError,",False,chars[i] == 'Z',chars[i] == 't',0.6575480699539185 1480,"def validate ( self ) : super ( CloudTrailMode, self ). validate ( ) from c7n import query events = self. policy. data [ ""mode"" ]. get ( ""events"" ) assert events, ""cloud trail mode requires specifiying events to subscribe"" for e in events : if isinstance ( e, str ) : assert e in CloudWatchEvents. trail_events, ( ""event shortcut not defined: %s"" % e ) if : jmespath. compile ( e [ ""ids"" ] ) if isinstance ( self. policy. resource_manager, query. ChildResourceManager ) : if not getattr ( self. policy. resource_manager. resource_type, ""supports_trailevents"", False ) : raise ValueError ( ""resource:%s does not support cloudtrail mode policies"" % ( self. policy. resource_type ) )",False,"isinstance(e, dict)",e,0.6523284912109375 1481,"def discover_hdfstore ( f ) : d = dict ( ) for key in f. keys ( ) : d2 = d key2 = key. lstrip ( ""/"" ) while ""/"" in key2 : group, key2 = key2. split ( ""/"", 1 ) if : d2 [ group ] = dict ( ) d2 = d2 [ group ] d2 [ key2 ] = f. get_storer ( key ) return discover ( d )",True,group not in d2,group not in d2,0.6638458371162415 1482,"def callUpdate ( ii ) : if ii % updateMultiply!= 0 : return if updateFunction is True : tasksDone = min ( [ ii, iterLength ] ) print ( f""Done {tasksDone} tasks of {iterLength}"" ) elif updateFunction not in ( False, None ) : for thisPosition in range ( ii - updateMultiply, ii ) : if : continue if thisPosition >= len ( resultsList ) : thisResult = None else : thisResult = resultsList [ thisPosition ] if updateSendsIterable is False : updateFunction ( thisPosition, iterLength, thisResult ) else : updateFunction ( thisPosition, iterLength, thisResult, iterable [ thisPosition ] )",False,thisPosition < 0,thisPosition >= len(resultsList),0.6682397127151489 1483,"def __getitem__ ( self, name ) : nstat = os. stat ( name ) if name in self. cache : stat, pixmap = self. cache [ name ] if : return pixmap else : del self. cache [ name ] pixmap = self. loadImage ( name ) self. cache [ name ] = ( nstat, pixmap ) return pixmap",False,stat.st_size == nstat.st_size and stat.st_mtime == nstat.st_mtime,nstat in [TAB>,0.6516166925430298 1484,"def create_default_site ( app_config, verbosity = 2, interactive = True, using = DEFAULT_DB_ALIAS, apps = global_apps, ** kwargs ) : try : Site = apps. get_model ( ""sites"", ""Site"" ) except LookupError : return if not router. allow_migrate_model ( using, Site ) : return if not Site. objects. using ( using ). exists ( ) : if verbosity >= 2 : print ( ""Creating example.com Site object"" ) Site ( pk = getattr ( settings, ""SITE_ID"", 1 ), domain = ""example.com"", name = ""example.com"" ). save ( using = using ) sequence_sql = connections [ using ]. ops. sequence_reset_sql ( no_style ( ), [ Site ] ) if : if verbosity >= 2 : print ( ""Resetting sequence"" ) with connections [ using ]. cursor ( ) as cursor : for command in sequence_sql : cursor. execute ( command )",False,sequence_sql,verbosity >= 1,0.6666785478591919 1485,"def rename ( checkpoint, op, dry_run ) : import tensorflow as tf tf. compat. v1. reset_default_graph ( ) with tf. compat. v1. Session ( ) as sess : for var_name, _ in tf. compat. v1. train. list_variables ( checkpoint ) : var = tf. compat. v1. train. load_variable ( checkpoint, var_name ) new_name = op ( var_name ) if dry_run : logger. info ( f""{var_name} would be renamed to {new_name}."" ) else : if var_name == new_name : logger. info ( f""No change for {var_name}"" ) else : logger. info ( f""Renaming {var_name} to {new_name}."" ) tf. Variable ( var, name = new_name ) if : saver = tf. compat. v1. train. Saver ( ) sess. run ( tf. compat. v1. global_variables_initializer ( ) ) saver. save ( sess, checkpoint ) tf. compat. v1. reset_default_graph ( )",False,not dry_run,op == tf.compat.v1.train.save_variable and (not dry_run),0.6640776991844177 1486,"def _init_scheme_list ( self, data ) : """"""initialize.handlers and.schemes attributes"""""" handlers = [ ] schemes = [ ] if isinstance ( data, native_string_types ) : data = splitcomma ( data ) for elem in data or ( ) : if hasattr ( elem, ""name"" ) : handler = elem scheme = handler. name _validate_handler_name ( scheme ) elif : handler = get_crypt_handler ( elem ) scheme = handler. name else : raise TypeError ( ""scheme must be name or CryptHandler, "" ""not %r"" % type ( elem ) ) if scheme in schemes : raise KeyError ( ""multiple handlers with same name: %r"" % ( scheme, ) ) handlers. append ( handler ) schemes. append ( scheme ) self. handlers = tuple ( handlers ) self. schemes = tuple ( schemes )",False,"isinstance(elem, native_string_types)","hasattr(elem, 'crypt_handler')",0.6519653797149658 1487,"def to_value ( self, value ) : ret = { } for key, val in value. items ( ) : if key in [ ""attachments"", ""custom_attributes"", ""description_diff"" ] : ret [ key ] = val elif : ret [ key ] = { k : { ""from"" : v [ 0 ], ""to"" : v [ 1 ] } for k, v in val. items ( ) } else : ret [ key ] = { ""from"" : val [ 0 ], ""to"" : val [ 1 ] } return ret",False,key == 'points',"isinstance(val, dict)",0.6632466912269592 1488,"def lint ( ) : """"""Run linter on the provided text and return the results."""""" if ""text"" in request. values : text = unquote ( request. values [ ""text"" ] ) job = q. enqueue ( worker_function, text ) return jsonify ( job_id = job. id ), 202 elif ""job_id"" in request. values : job = q. fetch_job ( request. values [ ""job_id"" ] ) if not job : return jsonify ( status = ""error"", message = ""No job with requested job_id."" ), 404 elif : return jsonify ( status = ""error"", message = ""Job is not yet ready."" ), 202 else : errors = [ ] for i, e in enumerate ( job. result ) : app. logger. debug ( e ) errors. append ( { ""check"" : e [ 0 ], ""message"" : e [ 1 ], ""line"" : e [ 2 ], ""column"" : e [ 3 ], ""start"" : e [ 4 ], ""end"" : e [ 5 ], ""extent"" : e [ 5 ] - e [ 4 ], list : _, raw_results = await self. db. execute_query ( query. get_sql ( ) ) instance_list = [ ] for row in raw_results : if self. select_related_idx : _, current_idx, _, _ = self. select_related_idx [ 0 ] dict_row = dict ( row ) keys = list ( dict_row. keys ( ) ) values = list ( dict_row. values ( ) ) instance : ""Model"" = self. model. _init_from_db ( ** dict ( zip ( keys [ : current_idx ], values [ : current_idx ] ) ) ) instances = [ instance ] for model, index, model_name, parent_model in self. select_related_idx [ 1 : ] : obj = model. _init_from_db ( ** dict ( zip ( map ( lambda x : x. split ( ""."" ) [ 1 ], keys [ current_idx : current_idx + index ], ), values [ current_idx : current_idx + index ], <",False,"isinstance(ins, parent_model)",custom_fields,0.6493746638298035 1490,"def run ( self ) : while True : if not self. initialized : try : with Client ( ) as c : self. disks = c. call ( ""disk.disks_for_temperature_monitoring"" ) self. powermode = c. call ( ""smart.config"" ) [ ""powermode"" ] except Exception as e : print ( f""Failed to query disks for temperature monitoring: {e!r}"" ) else : self. initialized = True if not self. initialized : time. sleep ( self. interval ) continue if not self. disks : return try : with Client ( ) as c : self. temperatures = { disk : temperature * 1000 for disk, temperature in c. call ( ""disk.temperatures"", self. disks, self. powermode ). items ( ) if : } except Exception as e : print ( f""Failed to collect disks temperatures: {e!r}"" ) self. temperatures = { } time. sleep ( self. interval )",False,temperature is not None,self.temperatures,0.6723409295082092 1491,"def _finalize_sv ( solution_file, data ) : """"""Add output files from TitanCNA calling optional solution."""""" out = { ""variantcaller"" : ""titancna"" } with open ( solution_file ) as in_handle : solution = dict ( zip ( in_handle. readline ( ). strip ( ""\r\n"" ). split ( ""\t"" ), in_handle. readline ( ). strip ( ""\r\n"" ). split ( ""\t"" ), ) ) if solution. get ( ""path"" ) : out [ ""purity"" ] = solution [ ""purity"" ] out [ ""ploidy"" ] = solution [ ""ploidy"" ] out [ ""cellular_prevalence"" ] = [ x. strip ( ) for x in solution [ ""cellPrev"" ]. split ( "","" ) ] base = os. path. basename ( solution [ ""path"" ] ) out [ ""plot"" ] = dict ( [ ( n, solution [ ""path"" ] + ext ) for ( n, ext ) in [ ( ""rplots"", "".Rplots.pdf"" ), ( ""cf"", ""/%s_CF.pdf"" % base ), ( ""cna"", ""/%s_CNA.pdf"" % base ), ( ""loh"", ""/%s_LOH.pdf"" % base ), ",False,os.path.exists(solution['path'] + ext),d.get('vq'),0.6477440595626831 1492,"def reconfigure_levels ( self ) : """"""Adjust the log levels for some modules."""""" self. log_level = get_default_level ( ) mapping = dict ( ) modules_config = { ""subliminal"" : app. SUBLIMINAL_LOG, ""tornado"" : app. WEB_LOG } for modname, active in viewitems ( modules_config ) : if : mapping. update ( { modname : CRITICAL } ) for logger in self. loggers : fullname = logger. name basename = fullname. split ( ""."" ) [ 0 ] level = mapping. get ( fullname ) or mapping. get ( basename ) or self. log_level logger. setLevel ( level ) for handler in ( self. console_handler, self. file_handler ) : if handler : handler. setLevel ( self. log_level )",False,not active,active,0.6758536696434021 1493,"def get_all_values ( self, project ) : if isinstance ( project, models. Model ) : project_id = project. id else : project_id = project if project_id not in self. __cache : cache_key = self. _make_key ( project_id ) result = cache. get ( cache_key ) if : result = self. reload_cache ( project_id ) else : self. __cache [ project_id ] = result return self. __cache. get ( project_id, { } )",False,result is None,result == {},0.6683422327041626 1494,"def check_message ( neighbor, message ) : message = message. replace ( "":"", """" ) raw = concat_bytes_i ( character ( int ( _, 16 ) ) for _ in ( message [ i * 2 : ( i * 2 ) + 2 ] for i in range ( len ( message ) // 2 ) ) ) if raw. startswith ( b""\xff"" * 16 ) : kind = ordinal ( raw [ 18 ] ) if kind == 1 : return check_open ( neighbor, raw [ 18 : ] ) elif : return check_update ( neighbor, raw ) elif kind == 3 : return check_notification ( raw ) else : return check_update ( neighbor, raw )",True,kind == 2,kind == 2,0.6761634945869446 1495,"def reload ( self, begin = True ) : """"""Begin or end of reloading resp. refreshing of all parameters"""""" if begin : self. _reload_actions = dict ( ) else : if hasattr ( self, ""_reload_actions"" ) : for name, initOpts in self. _reload_actions. iteritems ( ) : if : self. _actions [ name ]. reload ( ** ( initOpts if initOpts else { } ) ) delacts = OrderedDict ( ( name, action ) for name, action in self. _actions. iteritems ( ) if name not in self. _reload_actions ) if len ( delacts ) : self. __flushBan ( db = False, actions = delacts ) self. stopActions ( actions = delacts ) delattr ( self, ""_reload_actions"" )",True,name in self._actions,name in self._actions,0.6712414622306824 1496,"def merge ( self, other ) : d = self. _name2ft for name, ( f, t ) in other. _name2ft. items ( ) : if : f2, t2 = d [ name ] f = f + f2 t = t + t2 d [ name ] = f, t",True,name in d,name in d,0.6783638000488281 1497,"def __new__ ( meta, name, bases, clsdict ) : if not ( ""__doc__"" in clsdict and clsdict [ ""__doc__"" ] ) : for mro_cls in ( mro_cls for base in bases for mro_cls in base. mro ( ) ) : doc = mro_cls. __doc__ if doc : clsdict [ ""__doc__"" ] = doc break for attr, attribute in listitems ( clsdict ) : if not attribute. __doc__ : for mro_cls in ( mro_cls for base in bases for mro_cls in base. mro ( ) if hasattr ( mro_cls, attr ) ) : doc = getattr ( getattr ( mro_cls, attr ), ""__doc__"" ) if doc : if : clsdict [ attr ] = property ( attribute. fget, attribute. fset, attribute. fdel, doc ) else : attribute. __doc__ = doc break return type. __new__ ( meta, name, bases, clsdict )",False,"isinstance(attribute, property)","hasattr(attribute, '__fget')",0.6502389907836914 1498,"def stop ( self, timeout = 5 ) : for worker in self. _threads : self. _queue. put ( _SHUTDOWNREQUEST ) current = threading. currentThread ( ) if timeout and timeout >= 0 : endtime = time. time ( ) + timeout while self. _threads : worker = self. _threads. pop ( ) if worker is not current and worker. isAlive ( ) : try : if timeout is None or timeout < 0 : worker. join ( ) else : remaining_time = endtime - time. time ( ) if : worker. join ( remaining_time ) if worker. isAlive ( ) : c = worker. conn if c and not c. rfile. closed : try : c. socket. shutdown ( socket. SHUT_RD ) except TypeError : c. socket",False,remaining_time > 0,remaining_time is not None,0.6739023923873901 1499,"def _token_led_lparen ( self, left ) : if left [ ""type"" ]!= ""field"" : prev_t = self. _lookahead_token ( - 2 ) raise exceptions. ParseError ( prev_t [ ""start"" ], prev_t [ ""value"" ], prev_t [ ""type"" ], ""Invalid function name '%s'"" % prev_t [ ""value"" ], ) name = left [ ""value"" ] args = [ ] while not self. _current_token ( ) == ""rparen"" : expression = self. _expression ( ) if : self. _match ( ""comma"" ) args. append ( expression ) self. _match ( ""rparen"" ) function_node = ast. function_expression ( name, args ) return function_node",False,self._current_token() == 'comma',expression,0.6553242206573486 1500,"def example_reading_spec ( self ) : data_fields = { ""targets"" : tf. VarLenFeature ( tf. int64 ) } if : data_fields [ ""inputs"" ] = tf. VarLenFeature ( tf. int64 ) if self. packed_length : if : data_fields [ ""inputs_segmentation"" ] = tf. VarLenFeature ( tf. int64 ) data_fields [ ""inputs_position"" ] = tf. VarLenFeature ( tf. int64 ) data_fields [ ""targets_segmentation"" ] = tf. VarLenFeature ( tf. int64 ) data_fields [ ""targets_position"" ] = tf. VarLenFeature ( tf. int64 ) data_items_to_decoders = None return ( data_fields, data_items_to_decoders )",False,self.has_inputs,self.stochastic_length,0.6544179916381836 1501,"def _augment_images_by_samples ( self, images, samples, image_shapes = None, return_matrices = False ) : nb_images = len ( images ) input_was_array = ia. is_np_array ( images ) input_dtype = None if not input_was_array else images. dtype result = [ ] if : matrices = [ None ] * nb_images for i in sm. xrange ( nb_images ) : image = images [ i ] image_shape = image. shape if image_shapes is None else image_shapes [ i ] matrix, output_shape = samples. to_matrix ( i, image. shape, image_shape, self. fit_output ) cval = samples. cval [ i ] mode = samples. mode [ i ] order = samples. order [ i ] if not _is_identity_matrix ( matrix ) : image_warped = _warp_affine_arr ( image, matrix, order = order, mode = mode, cval = cval, output_shape = output_shape, backend = self. backend, ) result. append ( image_warped ) else : result. append ( image ) if : matrices [ i ] = matrix if input_was_array : for i in range ( self. window ( ). downloads_list. topLevelItemCount ( ) ) : item = self. window ( ). downloads_list. topLevelItem ( i ) if : continue filter_match = ( self. window ( ). downloads_filter_input. text ( ). lower ( ) in item. download_info [ ""name"" ]. lower ( ) ) is_channel = item. download_info [ ""channel_download"" ] if self. filter == DOWNLOADS_FILTER_CHANNELS : item. setHidden ( not is_channel or not filter_match ) else : item. setHidden ( not item. get_raw_download_status ( ) in DOWNLOADS_FILTER_DEFINITION [ self. filter ] or not filter_match or is_channel )",False,"not isinstance(item, DownloadWidgetItem)",item is None,0.6617163419723511 1503,"def step ( self ) -> None : """"""Performs a single optimization step."""""" for group in self. param_groups : for p in group [ ""params"" ] : if : continue p. add_ ( p. grad, alpha = ( - group [ ""lr"" ] * self. num_data ) ) return None",True,p.grad is None,p.grad is None,0.6571645736694336 1504,"def setup_package ( version ) : cmdclass = { ""test"" : PyTest, ""flake8"" : Flake8 } install_requires = [ ] for r in read ( ""requirements.txt"" ). split ( ""\n"" ) : r = r. strip ( ) if : continue extra = False for e, v in EXTRAS_REQUIRE. items ( ) : if v and r. startswith ( v [ 0 ] ) : EXTRAS_REQUIRE [ e ] = ( [ r ] if e!= ""kubernetes"" or sys. version_info < ( 3, 0, 0 ) else [ ] ) extra = True if not extra : install_requires. append ( r ) command_options = { ""test"" : { } } if COVERAGE_XML : command_options [ ""test"" ] [ ""cov_xml"" ] = ""setup.py"", True if COVERAGE_HTML : command_options [ ""test"" ] [ ""cov_html"" ] = ""setup.py"", True setup ( name = NAME, version = version, url = URL, author = AUTHOR, author_email = AUTHOR_EMAIL, description = DESCRIPTION, license = LICENSE, keywords = KEYWORDS, long_description = read ( ""README.rst"" ), classifiers = CLASSIFIERS, packages = find_packages ( exclude = [ ""tests"", ""tests.*",True,r == '',r == '',0.6907327175140381 1505,"def _get_ispdb ( self, email, domain ) : domain = self. _clean_domain ( domain ) if domain in ( ""localhost"", ) : return None self. _progress ( _ ( ""Checking ISPDB for %s"" ) % domain ) settings = self. _get_xml_autoconfig ( self. ISPDB_URL % { ""domain"" : domain }, domain, email ) if settings : self. _log_result ( _ ( ""Found %s in ISPDB"" ) % domain ) return settings dparts = domain. split ( ""."" ) if len ( dparts ) > 2 : domain = ""."". join ( dparts [ 1 : ] ) if : return self. _get_xml_autoconfig ( self. ISPDB_URL % { ""domain"" : domain }, domain, email ) return None",False,"domain not in ('co.uk', 'pagekite.me')",domain,0.6490097641944885 1506,"def __gt__ ( self, other ) : if isinstance ( other, self. __class__ ) : if almost_equal ( self. intersection, other. intersection, self. accuracy ) : if : return False else : return self. product > other. product else : return self. intersection > other. intersection else : if almost_equal ( self. intersection, other, self. accuracy ) : return False else : return self. intersection > other",False,"almost_equal(self.product, other.product, self.accuracy)",self.product is not None and other.product is not None,0.651659369468689 1507,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if : break if fid == 1 : if ftype == TType. STRUCT : self. status = TStatus ( ) self. status. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRING : self. delegationToken = ( iprot. readString ( ). decode ( ""utf-8"" ) if sys. version_info [ 0 ] == 2 else iprot. readString ( ) ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STOP,fid == 0,0.6614224910736084 1508,"def short_repr ( obj ) : if isinstance ( obj, ( type, types. ModuleType, types. BuiltinMethodType, types. BuiltinFunctionType ), ) : return obj. __name__ if isinstance ( obj, types. MethodType ) : if : return obj. im_func. __name__ + "" (bound)"" else : return obj. im_func. __name__ if isinstance ( obj, ( tuple, list, dict, set ) ) : return ""%d items"" % len ( obj ) if isinstance ( obj, weakref. ref ) : return ""all_weakrefs_are_one"" return repr ( obj ) [ : 40 ]",False,obj.im_self is not None,"isinstance(obj, types.ParameterType)",0.6586785316467285 1509,"def validate_domain ( domain : Optional [ str ] ) -> None : if domain is None or len ( domain ) == 0 : raise ValidationError ( _ ( ""Domain can't be empty."" ) ) if ""."" not in domain : raise ValidationError ( _ ( ""Domain must have at least one dot (.)"" ) ) if len ( domain ) > 255 : raise ValidationError ( _ ( ""Domain is too long"" ) ) if domain [ 0 ] == ""."" or domain [ - 1 ] == ""."" : raise ValidationError ( _ ( ""Domain cannot start or end with a dot (.)"" ) ) for subdomain in domain. split ( ""."" ) : if not subdomain : raise ValidationError ( _ ( ""Consecutive '.' are not allowed."" ) ) if : raise ValidationError ( _ ( ""Subdomains cannot start or end with a '-'."" ) ) if not re. match ( ""^[a-z0-9-]*$"", subdomain ) : raise ValidationError ( _ ( ""Domain can only have letters, numbers, '.' and '-'s."" ) )",False,subdomain[0] == '-' or subdomain[-1] == '-','-' in domain,0.6548717021942139 1510,"def _write_image ( out, imgName, data, append, icon, catalog, functionCompatible, old_index ) : out. write ( ""#"" + ""-"" * 70 + ""\n"" ) if not append : out. write ( ""# This file was generated by %s\n#\n"" % sys. argv [ 0 ] ) out. write ( ""from wx.lib.embeddedimage import PyEmbeddedImage\n\n"" ) if : out. write ( ""catalog = {}\n"" ) out. write ( ""index = []\n\n"" ) varName = _replace_non_alphanumeric_with_underscore ( imgName ) out. write ( ""%s = PyEmbeddedImage(\n%s\n"" % ( varName, data ) ) if : if imgName in old_index : print ( ""Warning: %s already in catalog."" % imgName ) print ( "" Only the last entry will be accessible.\n"" ) old_index. append ( imgName ) out. write ( ""index.append('%s')\n"" % imgName ) out. write ( ""catalog['%s'] = %s\n"" % ( imgName, varName ) ) if functionCompatible : out. write ( ""get%sData = %s.GetData\n"" % ( varName, varName ) ) out. write ( ""get%sImage = %s.GetImage\n"" % ( varName, varName ) ) out. write ( ""get%sBitmap = %s.GetBitmap\n"" % ( varName, varName ) ) if icon : out. write ( ""get%sIcon = %s.GetIcon\n"" % ( varName, varName ) ) out. write ( ""\n"" )",True,catalog,catalog,0.7033922672271729 1511,"def __init__ ( self, data = None, ** params ) : if isinstance ( data, Element ) : params = dict ( get_param_values ( data ), ** params ) if ""kdims"" not in params : params [ ""kdims"" ] = data. kdims if : params [ ""vdims"" ] = data. vdims data = data. mapping ( ) super ( Collator, self ). __init__ ( data, ** params )",False,'vdims' not in params,"""vdims' not in params",0.6707237362861633 1512,"def tree ( self, media = None, media_id = None ) : db = get_db ( ) if media : result = media elif media_id : result = db. get ( ""id"", media_id, with_doc = True ) else : return None items = db. get_many ( ""media_children"", result [ ""_id"" ], with_doc = True ) keys = [ ] for item in items : key = self. key ( item [ ""doc"" ] [ ""type"" ] ) + ""s"" if : result [ key ] = { } elif type ( result [ key ] ) is not dict : result [ key ] = { } if key not in keys : keys. append ( key ) result [ key ] [ item [ ""_id"" ] ] = fireEvent ( ""library.tree"", item [ ""doc"" ], single = True ) for key in keys : result [ key ] = result [ key ]. values ( ) result [ ""releases"" ] = fireEvent ( ""release.for_media"", result [ ""_id"" ], single = True ) return result",True,key not in result,key not in result,0.6674259305000305 1513,"def start ( self ) : self. logger. debug ( ""Starting..."" ) self. server = ThreadedTCPServer ( ( self. local_ip, int ( self. config [ ""port"" ] ) ), ThreadedTCPRequestHandler ) if self. config. get ( ""usessl"" ) == ""Yes"" : self. logger. debug ( ""Using SSL socket"" ) keyfile_path = ""listeners/ssl_utils/privkey.pem"" keyfile_path = ListenerBase. abs_config_path ( keyfile_path ) if keyfile_path is None : self. logger. error ( ""Could not locate %s"", keyfile_path ) sys. exit ( 1 ) certfile_path = ""listeners/ssl_utils/server.pem"" certfile_path = ListenerBase. abs_config_path ( certfile_path ) if : self. logger. error ( ""Could not locate %s"", certfile_path ) sys. exit ( 1 ) self. server. socket = ssl. wrap_socket ( self. server. socket, keyfile = ""privkey.pem"", certfile = ""server.pem"", server_side = True, ciphers = ""RSA"", ) self. server. logger = self. logger self. server. config = self. config self. server_thread = threading. Thread ( target = self. server. serve_forever ) self. server_thread. daemon = True self. server_thread. start ( )",True,certfile_path is None,certfile_path is None,0.653950035572052 1514,"def test_primitive_options_class_names ( es ) : options1 = { ""mean"" : { ""include_entities"" : [ ""customers"" ] } } options2 = { Mean : { ""include_entities"" : [ ""customers"" ] } } bad_options = { ""mean"" : { ""include_entities"" : [ ""customers"" ] }, Mean : { ""ignore_entities"" : [ ""customers"" ] }, } conflicting_error_text = ""Multiple options found for primitive mean"" primitives = [ [ ""mean"" ], [ Mean ] ] options = [ options1, options2 ] features = [ ] for primitive in primitives : with pytest. raises ( KeyError, match = conflicting_error_text ) : DeepFeatureSynthesis ( target_entity_id = ""cohorts"", entityset = es, agg_primitives = primitive, trans_primitives = [ ], primitive_options = bad_options, ) for option in options : dfs_obj = DeepFeatureSynthesis ( target_entity_id = ""cohorts"", entityset = es, agg_primitives = primitive, trans_primitives = [ ], primitive_options = option, ) features. append ( set ( dfs_obj. build_features ( ) ) ) for f in features [ 0 ] : ",False,"isinstance(f.primitive, Mean)",primitives,0.6532958745956421 1515,"def readAtAutoNodes ( self ) : c = self. c p = c. p after = p. nodeAfterTree ( ) found = False while p and p!= after : if p. isAtAutoNode ( ) : if : g. warning ( ""ignoring"", p. h ) p. moveToThreadNext ( ) else : fileName = p. atAutoNodeName ( ) c. atFileCommands. readOneAtAutoNode ( fileName, p ) found = True p. moveToNodeAfterTree ( ) else : p. moveToThreadNext ( ) if not g. unitTesting : message = ""finished"" if found else ""no @auto nodes in the selected tree"" g. blue ( message ) c. redraw ( )",False,p.isAtIgnoreNode(),found,0.6575652360916138 1516,"def _LeaseMessageHandlerRequests ( self, lease_time, limit ) : """"""Read and lease some outstanding message handler requests."""""" leased_requests = [ ] now = rdfvalue. RDFDatetime. Now ( ) zero = rdfvalue. RDFDatetime. FromSecondsSinceEpoch ( 0 ) expiration_time = now + lease_time leases = self. message_handler_leases for requests in self. message_handler_requests. values ( ) : for r in requests. values ( ) : existing_lease = leases. get ( r. handler_name, { } ). get ( r. request_id, zero ) if : leases. setdefault ( r. handler_name, { } ) [ r. request_id ] = expiration_time r. leased_until = expiration_time r. leased_by = utils. ProcessIdString ( ) leased_requests. append ( r ) if len ( leased_requests ) >= limit : break return leased_requests",False,existing_lease < now,existing_lease,0.6596664786338806 1517,"def inner ( collection = ""test"" ) : if collection is None : return davical_args assert collection. startswith ( ""test"" ) for _ in range ( 4 ) : args = self. storage_class. create_collection ( collection + str ( uuid. uuid4 ( ) ), ** davical_args ) s = self. storage_class ( ** args ) if : request. addfinalizer ( lambda : s. session. request ( ""DELETE"", """" ) ) return args raise RuntimeError ( ""Failed to find free collection."" )",False,not list(s.list()),s.session is not None,0.6495428085327148 1518,"def determine_local_world_size ( nproc_per_node : str ) : try : logging. info ( f""Using nproc_per_node={nproc_per_node}."" ) return int ( nproc_per_node ) except ValueError : if : num_proc = os. cpu_count ( ) device_type = ""cpu"" elif nproc_per_node == ""gpu"" : if not torch. cuda. is_available ( ) : raise ValueError ( ""Cuda is not available."" ) device_type = ""gpu"" num_proc = torch. cuda. device_count ( ) elif nproc_per_node == ""auto"" : if torch. cuda. is_available ( ) : num_proc = torch. cuda. device_count ( ) device_type = ""gpu"" else : num_proc = os. cpu_count ( ) device_type = ""cpu"" else : raise ValueError ( f""Unsupported nproc_per_node value: {nproc_per_node}"" ) log. info ( f""Using nproc_per_node={nproc_per_node},"" f"" seting to {num_proc} since the instance "" f""has {os.cpu_count()} {device_type}"" ) return num_proc",False,nproc_per_node == 'cpu',nproc_per_node == 'auto',0.6565909385681152 1519,"def change_status ( self, enabled_accounts, status_message ) : if not self. interface : try : self. interface = Gio. DBusProxy. new_for_bus_sync ( Gio. BusType. SESSION, Gio. DBusProxyFlags. NONE, None, ""org.gajim.dbus"", ""/org/gajim/dbus/RemoteObject"", ""org.gajim.dbus.RemoteInterface"", None, ) except GLib. Error : self. interface = None if self. interface : try : for account in self. interface. list_accounts ( ) : status = self. interface. get_status ( ""(s)"", account ) if : continue if status in self. statuses : self. interface. change_status ( ""(sss)"", status, status_message, account ) except GLib. Error : self. interface = None",False,enabled_accounts != [] and account not in enabled_accounts,enabled_accounts,0.65614253282547 1520,"def dot ( self, other, sparse = True ) : other_shape = other. shape try : other = naked ( other ) except TypeError : return NotImplemented if not sparse : a = self. toarray ( ) if : other = other. toarray ( ). reshape ( other_shape ) x = a. dot ( other ) else : if len ( other_shape ) == 1 : x = self. spmatrix. dot ( other. T ) else : x = self. spmatrix. dot ( other ) if issparse ( x ) : if x. shape == ( 1, 1 ) : return x. toarray ( ) [ 0, 0 ] shape = ( x. shape [ 1 ], ) return SparseNDArray ( x, shape = shape ) return get_array_module ( x ). asarray ( x )",False,issparse(other),len(other_shape) == 2,0.6557013988494873 1521,"def test_statvfs_result_pickle ( self ) : result = os. statvfs ( self. fname ) for proto in range ( pickle. HIGHEST_PROTOCOL + 1 ) : p = pickle. dumps ( result, proto ) self. assertIn ( b""statvfs_result"", p ) if : self. assertIn ( b""cos\nstatvfs_result\n"", p ) unpickled = pickle. loads ( p ) self. assertEqual ( result, unpickled )",False,proto < 4,b'cos' in p,0.6694612503051758 1522,"def __new__ ( mcs, name, bases, attrs, ** kw ) : if name in ( ""Event"", ) : return super ( ). __new__ ( mcs, name, bases, attrs, ** kw ) if not ( name. startswith ( ""Evt"" ) or name. startswith ( ""_Evt"" ) or name. startswith ( ""_Msg"" ) ) : raise ValueError ( 'Event class names must begin with ""Evt (%s)""' % name ) if ""__doc__"" not in attrs : raise ValueError ( ""Event classes must have a docstring"" ) props = set ( ) for base in bases : if hasattr ( base, ""_props"" ) : props. update ( base. _props ) newattrs = { ""_internal"" : False } for k, v in attrs. items ( ) : if k [ 0 ] == ""_"" : newattrs [ k ] = v continue if : raise ValueError ( ""Event class %s duplicates property %s defined in superclass"" % ( mcs, k ) ) props. add ( k ) newattrs [ k ] = docstr ( v ) newattrs [ ""_props"" ] = props newattrs [ ""_props_sorted"" ] = sorted ( props ) if name [ 0 ] == ""_"" : newattrs [ ""_internal"" ] = True name = name [ 1 : ] newattrs [ ""event_name"" ] = _rprop ( name ) return super ( ). __new__ ( mcs, name, bases, newattrs, ** kw )",False,k in props,newattrs.has_key(k),0.6743497252464294 1523,"def htmlify ( path, text ) : fname = os. path. basename ( path ) if any ( ( fnmatch. fnmatchcase ( fname, p ) for p in _patterns ) ) : sql = ""SELECT files.id FROM files WHERE path =? LIMIT 1"" row = _conn. execute ( sql, ( path, ) ). fetchone ( ) if : return ClangHtmlifier ( _tree, _conn, path, text, row [ 0 ] ) return None",False,row,row[0],0.6894035935401917 1524,"def on_button_press ( self, target, event, user_data ) : if event. button == 3 : if not event. get_state ( ) & Gdk. ModifierType. SHIFT_MASK : if : return True menu = mk_terminal_context_menu ( self. terminal, self. get_window ( ), self. get_settings ( ), TerminalContextMenuCallbacks ( self. terminal, self. get_window ( ), self. get_settings ( ), self. get_root_box ( ). get_notebook ( ), ), ) menu. connect ( ""hide"", MenuHideCallback ( self. get_window ( ) ). on_hide ) HidePrevention ( self. get_window ( ) ). prevent ( ) try : menu. popup_at_pointer ( event ) except AttributeError : menu. popup ( None, None, None, None, event. button, event. time ) self. terminal. grab_focus ( ) return True self. terminal. grab_focus ( ) return False",False,"Vte.Terminal.do_button_press_event(self.terminal, event)",self.terminal.get_context(),0.6471275091171265 1525,"def validate ( self, name, object ) : if not isinstance ( object, dict ) : yield ""%s is not sourced properties (not a dict)"" % ( name, ) return for k, v in iteritems ( object ) : if : yield ""%s property name %r is not unicode"" % ( name, k ) if not isinstance ( v, tuple ) or len ( v )!= 2 : yield ""%s property value for '%s' is not a 2-tuple"" % ( name, k ) return propval, propsrc = v if not isinstance ( propsrc, text_type ) : yield ""%s[%s] source %r is not unicode"" % ( name, k, propsrc ) try : json. loads ( bytes2NativeString ( propval ) ) except ValueError : yield ""%s[%r] value is not JSON-able"" % ( name, k )",False,"not isinstance(k, text_type)","not isinstance(v, text_type)",0.6480089426040649 1526,"def query ( self, qtype, value ) : url = ""https://whois.arin.net/rest/"" if qtype == ""domain"" : url += ""pocs;domain=@"" + value try : if : fname, lname = value. split ( "" "", 1 ) if fname. endswith ( "","" ) : t = fname fname = lname lname = t url += ""pocs;first="" + fname + "";last="" + lname except Exception as e : self. sf. debug ( ""Couldn't process name: "" + value + "" ("" + str ( e ) + "")"" ) return None if qtype == ""contact"" : url = value res = self. fetchRir ( url ) if res [ ""content"" ] is None : self. sf. debug ( ""No info found/available for "" + value + "" at ARIN."" ) return None try : j = json. loads ( res [ ""content"" ] ) return j except Exception as e : self. sf. debug ( f""Error processing JSON response: {e}"" ) return None",False,qtype == 'name',qtype == 'string',0.6622380018234253 1527,"def _parse_display ( display ) : """"""Parse an X11 display value"""""" try : host, dpynum = display. rsplit ( "":"", 1 ) if : host = host [ 1 : - 1 ] idx = dpynum. find ( ""."" ) if idx >= 0 : screen = int ( dpynum [ idx + 1 : ] ) dpynum = dpynum [ : idx ] else : screen = 0 except ( ValueError, UnicodeEncodeError ) : raise ValueError ( ""Invalid X11 display"" ) from None return host, dpynum, screen",False,host.startswith('[') and host.endswith(']'),host.startswith('.'),0.6479363441467285 1528,"def __init__ ( self, selectable, name = None ) : baseselectable = selectable while isinstance ( baseselectable, Alias ) : baseselectable = baseselectable. element self. original = baseselectable self. supports_execution = baseselectable. supports_execution if self. supports_execution : self. _execution_options = baseselectable. _execution_options self. element = selectable if name is None : if : name = getattr ( self. original, ""name"", None ) name = _anonymous_label ( ""%%(%d %s)s"" % ( id ( self ), name or ""anon"" ) ) self. name = name",False,self.original.named_with_column,self.original.name is not None,0.650634229183197 1529,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if ftype == TType. STRUCT : self. success = SyncChunk ( ) self. success. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 1 : if ftype == TType. STRUCT : self. userException = evernote. edam. error. ttypes. EDAMUserException ( ) self. userException. read ( iprot ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRUCT : self. systemException = evernote. edam. error. ttypes. EDAMSystemException ( ) self. systemException. read ( iprot ) else : buffer = self. buffer nonzero_buffer = False if len ( params ) > 1 : offset = 0 for p in params : sz = p. numel ( ) if : buffer [ offset : offset + sz ]. copy_ ( p. grad. data. view ( - 1 ) ) nonzero_buffer = True else : buffer [ offset : offset + sz ]. zero_ ( ) offset += sz else : p = params [ 0 ] if : buffer = p. grad. data nonzero_buffer = True elif p. numel ( ) <= self. buffer. numel ( ) : buffer = buffer [ : p. numel ( ) ] buffer. zero_ ( ) else : buffer = torch. zeros_like ( p ) if nonzero_buffer : buffer. div_ ( self. world_size ) distributed_utils. all_reduce ( buffer, self. process_group ) offset = 0 for p in params : sz = p. numel ( ) if : p. grad. data. copy_ ( buffer [ offset : offset + sz ]. view_as ( p ) ) else : p. grad = buffer [ offset : offset + sz ]. view_as ( p ). clone ( ) offset += sz",True,p.grad is not None,p.grad is not None,0.6614325642585754 1531,"def decode_pickle ( sample, mode, seg_num, seglen, short_size, target_size, img_mean, img_std ) : pickle_path = sample [ 0 ] try : if python_ver < ( 3, 0 ) : data_loaded = pickle. load ( open ( pickle_path, ""rb"" ) ) else : data_loaded = pickle. load ( open ( pickle_path, ""rb"" ), encoding = ""bytes"" ) vid, label, frames = data_loaded if : logger. error ( ""{} frame length {} less than 1."". format ( pickle_path, len ( frames ) ) ) return None, None except : logger. info ( ""Error when loading {}"". format ( pickle_path ) ) return None, None if mode == ""train"" or mode == ""valid"" or mode == ""test"" : ret_label = label elif mode == ""infer"" : ret_label = vid imgs = video_loader ( frames, seg_num, seglen, mode ) return ( imgs_transform ( imgs, mode, seg_num, seglen, short_size, target_size, img_mean, img_std ), ret_label, )",False,len(frames) < 1,len(frames) > 1,0.6532013416290283 1532,"def fromutc ( self, dt ) : ""datetime in UTC -> datetime in local time."" if not isinstance ( dt, datetime ) : raise TypeError ( ""fromutc() requires a datetime argument"" ) if dt. tzinfo is not self : raise ValueError ( ""dt.tzinfo is not self"" ) dtoff = dt. utcoffset ( ) if dtoff is None : raise ValueError ( ""fromutc() requires a non-None utcoffset() "" ""result"" ) dtdst = dt. dst ( ) if : raise ValueError ( ""fromutc() requires a non-None dst() result"" ) delta = dtoff - dtdst if delta : dt += delta dtdst = dt. dst ( ) if : raise ValueError ( ""fromutc(): dt.dst gave inconsistent "" ""results; cannot convert"" ) if dtdst : return dt + dtdst else : return dt",True,dtdst is None,dtdst is None,0.6813958883285522 1533,"def _matches_metadata ( *, pattern : filters. MetaFilter, content : Mapping [ str, str ], kwargs : MutableMapping [ str, Any ], cause : causation. ResourceCause, ) -> bool : for key, value in pattern. items ( ) : if value is filters. MetaFilterToken. ABSENT and key not in content : continue elif : continue elif value is None and key in content : continue elif callable ( value ) : if not kwargs : kwargs. update ( invocation. build_kwargs ( cause = cause ) ) if value ( content. get ( key, None ), ** kwargs ) : continue else : return False elif key not in content : return False elif value!= content [ key ] : return False else : continue return True",False,value is filters.MetaFilterToken.PRESENT and key in content,"isinstance(content, str)",0.6558524370193481 1534,"def __str__ ( self ) : t = "" "" if self. _name!= ""root"" : r = f""{t * (self._level-1)}{self._name}:\n"" else : r = """" level = self. _level for i, ( k, v ) in enumerate ( self. _pointer. items ( ) ) : if : r += f""{t * (self._level)}{v}\n"" self. _level += 1 else : r += f""{t * (self._level)}{k}: {v} ({type(v).__name__})\n"" self. _level = level return r [ : - 1 ]",False,"isinstance(v, Config)",i % 2,0.6512355208396912 1535,"def _add_communication_type ( apps, schema_editor, communication_type ) : Worker = apps. get_model ( ""orchestra"", ""Worker"" ) CommunicationPreference = apps. get_model ( ""orchestra"", ""CommunicationPreference"" ) for worker in Worker. objects. all ( ) : ( communication_preference, created, ) = CommunicationPreference. objects. get_or_create ( worker = worker, communication_type = communication_type ) if : communication_preference. methods. slack = True communication_preference. methods. email = True communication_preference. save ( )",False,created,communication_type,0.6908230781555176 1536,"def _graph_info ( nn_layers ) : blob_dst = dict ( ) blob_src = dict ( ) for i, layer in enumerate ( nn_layers ) : for inp in layer. input : if : blob_dst [ inp ]. append ( i ) else : blob_dst [ inp ] = [ i ] for out in layer. output : if out in blob_src : raise ValueError ( ""Blob %s has been generated by more than 1 layers"" % ( out ) ) blob_src [ out ] = i return blob_dst, blob_src",True,inp in blob_dst,inp in blob_dst,0.6704934239387512 1537,"def server_udp_post_decrypt ( self, buf ) : uid = buf [ - 8 : - 4 ] if uid in self. server_info. users : user_key = self. hashfunc ( self. server_info. users [ uid ] ). digest ( ) else : uid = None if : user_key = self. server_info. key else : user_key = self. server_info. recv_iv if hmac. new ( user_key, buf [ : - 4 ], self. hashfunc ). digest ( ) [ : 4 ]!= buf [ - 4 : ] : return ( b"""", None ) return ( buf [ : - 8 ], uid )",False,not self.server_info.users,self.server_info.key,0.6496450901031494 1538,"def get_therapy_sessions_to_invoice ( patient, company ) : therapy_sessions_to_invoice = [ ] therapy_plans = frappe. db. get_all ( ""Therapy Plan"", { ""therapy_plan_template"" : ( ""!="", """" ) } ) therapy_plans_created_from_template = [ ] for entry in therapy_plans : therapy_plans_created_from_template. append ( entry. name ) therapy_sessions = frappe. get_list ( ""Therapy Session"", fields = ""*"", filters = { ""patient"" : patient. name, ""invoiced"" : 0, ""company"" : company, ""therapy_plan"" : ( ""not in"", therapy_plans_created_from_template ), }, ) for therapy in therapy_sessions : if : if therapy. therapy_type and frappe. db. get_value ( ""Therapy Type"", therapy. therapy_type, ""is_billable"" ) : therapy_sessions_to_invoice. append ( { ""reference_type"" : ""Therapy Session"", ""reference_name"" : therapy. name,",False,not therapy.appointment,therapy.therapy_id and therapy.therapy_id,0.6551961898803711 1539,"def cb_import_data_from_elem ( self, elem ) : alias = elem. get ( ""alias"" ) symbol = elem. get ( ""symbol"" ) module = elem. get ( ""module"" ) if symbol : if symbol == ""*"" : name = module detail = ""use %s"" % module elif : name = module detail = ""use %s qw(:)"" % module else : name = ""::"". join ( [ module, symbol ] ) detail = ""use %s qw(%s)"" % ( module, symbol ) else : name = module detail = ""require %s"" % module return { ""name"" : name, ""detail"" : detail }",False,symbol == '**',alias == '__tag__',0.6705427169799805 1540,"def parseNode ( self, node ) : for child in node. childNodes : if child. nodeType == ELEMENT_NODE : if : try : self. parseMenuname ( child. childNodes [ 0 ]. nodeValue, child. getAttribute ( ""show_empty"" ) or ""false"", child. getAttribute ( ""inline"" ) or ""false"", child. getAttribute ( ""inline_limit"" ) or 4, child. getAttribute ( ""inline_header"" ) or ""true"", child. getAttribute ( ""inline_alias"" ) or ""false"", ) except IndexError : raise ValidationError ( ""Menuname cannot be empty"", """" ) elif child. tagName == ""Separator"" : self. parseSeparator ( ) elif child. tagName == ""Filename"" : try : self. parseFilename ( child. childNodes [ 0 ]. nodeValue ) except IndexError : raise ValidationError ( ""Filename cannot be empty"", """" ) elif child. tagName == ""Merge"" : self. parseMerge ( child. getAttribute (",False,child.tagName == 'Menuname',child.nodeType == NODE_ELEMENT_NODE,0.6599260568618774 1541,"def _connect_job_io ( self, imported_job, job_attrs, _find_hda, _find_hdca, _find_dce ) : for output_key in job_attrs [ ""output_datasets"" ] : output_hda = _find_hda ( output_key ) if output_hda : if not self. dataset_state_serialized : output_hda. state = imported_job. state imported_job. add_output_dataset ( output_hda. name, output_hda ) if ""input_mapping"" in job_attrs : for input_name, input_key in job_attrs [ ""input_mapping"" ]. items ( ) : input_hda = _find_hda ( input_key ) if : imported_job. add_input_dataset ( input_name, input_hda )",True,input_hda,input_hda,0.6629029512405396 1542,"def _init ( self ) : self. row_length = None self. y_direction = False good_x = [ ] good_y = [ ] for vert in self. bmesh. verts : if : for loop in vert. link_loops : other_vert = loop. edge. other_vert ( vert ) if other_vert. co. x > vert. co. x : good_x. append ( loop ) if other_vert. co. y > vert. co. y : good_y. append ( loop ) if good_x : loop = good_x [ 0 ] elif good_y : loop = good_y [ 0 ] self. y_direction = True else : raise Exception ( ""Could not find a vertex to start from"" ) vert = loop. vert self. current_loop = loop self. current_vert = vert self. start_loop = loop self. start_vert = vert",False,len(vert.link_edges) == 2,"hasattr(vert, 'link_loops')",0.6582843661308289 1543,"def update ( self, * event ) : if event : self. _last_keypress = time. time ( ) self. grammar = grammar = self. grammarbox. get ( ""1.0"", ""end"" ) normalized_grammar = self. normalize_grammar ( grammar ) if normalized_grammar == self. normalized_grammar : return else : self. normalized_grammar = normalized_grammar if self. _history_index < len ( self. _history ) - 1 : self. grammarlabel [ ""text"" ] = ""Grammar:"" self. _syntax_highlight_grammar ( grammar ) try : if : rules = [ RegexpChunkRule. parse ( line ) for line in normalized_grammar. split ( ""\n"" ) ] else : rules = [ ] except ValueError as e : self. _grammarcheck ( grammar ) self. chunker = None return self. chunker = RegexpChunkParser ( rules ) self. grammarbox. tag_remove ( ""error"", ""1.0"", ""end"" ) self. grammar_changed = time. time ( ) if self. _showing_trace : self. show_trace ( ) else : self. _highlight_devset ( ) if not self. _eval_demon_running : self. _eval_demon ( )",False,normalized_grammar,self._grammar_in_grammar,0.6662144660949707 1544,"def detect_source_destination ( self, message : Message ) : participants = self. simulator_config. participants source = None if len ( participants ) < 2 else participants [ 0 ] destination = self. simulator_config. broadcast_part if message. participant : source = message. participant dst_address_label = next ( ( lbl for lbl in message. message_type if lbl. field_type and lbl. field_type. function == FieldType. Function. DST_ADDRESS ), None, ) if : start, end = message. get_label_range ( dst_address_label, view = 1, decode = True ) dst_address = message. decoded_hex_str [ start : end ] dst = next ( ( p for p in participants if p. address_hex == dst_address ), None ) if dst is not None and dst!= source : destination = dst return source, destination",True,dst_address_label,dst_address_label,0.6548735499382019 1545,"def _get_orientation ( self ) : if self. state : rotation = [ 0 ] * 9 inclination = [ 0 ] * 9 gravity = [ ] geomagnetic = [ ] gravity = self. listener_a. values geomagnetic = self. listener_m. values if gravity [ 0 ] is not None and geomagnetic [ 0 ] is not None : ff_state = SensorManager. getRotationMatrix ( rotation, inclination, gravity, geomagnetic ) if : values = [ 0, 0, 0 ] values = SensorManager. getOrientation ( rotation, values ) return values",True,ff_state,ff_state,0.6766324043273926 1546,"def _get_links ( self, link_to_self ) : links = { } for service, link_name in self. links : for container in service. containers ( ) : links [ link_name or service. name ] = container. name links [ container. name ] = container. name links [ container. name_without_project ] = container. name if link_to_self : for container in self. containers ( ) : links [ self. name ] = container. name links [ container. name ] = container. name links [ container. name_without_project ] = container. name for external_link in self. options. get ( ""external_links"" ) or [ ] : if : link_name = external_link else : external_link, link_name = external_link. split ( "":"" ) links [ link_name ] = external_link return [ ( alias, container_name ) for ( container_name, alias ) in links. items ( ) ]",False,':' not in external_link,"isinstance(external_link, str)",0.655548095703125 1547,"def set_mypy_args ( self, mypy_args = None ) : """"""Set MyPy arguments."""""" if mypy_args is None : self. mypy_args = None else : self. mypy_errs = [ ] self. mypy_args = list ( mypy_args ) if not any ( arg. startswith ( ""--python-version"" ) for arg in mypy_args ) : self. mypy_args += [ ""--python-version"", ""."". join ( str ( v ) for v in get_target_info_len2 ( self. comp. target, mode = ""nearest"" ) ), ] if logger. verbose : for arg in verbose_mypy_args : if : self. mypy_args. append ( arg ) logger. log ( ""MyPy args:"", self. mypy_args )",False,arg not in self.mypy_args,arg,0.6659483313560486 1548,"def __init__ ( self ) : global pymongo import pymongo try : self. m = pymongo. MongoClient ( mongodb_host ) if : self. m. admin. authenticate ( mongodb_user, mongodb_pwd ) self. db = self. m. admin except Exception as e : raise Exception ( ""Cannot interface with MongoDB server: %s"" % e ) self. name = ""mongodb queues"" self. nick = ( ""ar"", ""aw"", ""qt"", ""qw"" ) self. vars = ( ""ar"", ""aw"", ""qt"", ""qw"" ) self. type = ""d"" self. width = 5 self. scale = 2 self. lastVal = { }",False,mongodb_pwd,mongo_user,0.6775995492935181 1549,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 10 : self. set_content ( d. getPrefixedString ( ) ) continue if tt == 16 : self. set_statuscode ( d. getVarInt32 ( ) ) continue if tt == 27 : self. add_header ( ). TryMerge ( d ) continue if : self. set_contentwastruncated ( d. getBoolean ( ) ) continue if tt == 56 : self. set_externalbytessent ( d. getVarInt64 ( ) ) continue if tt == 64 : self. set_externalbytesreceived ( d. getVarInt64 ( ) ) continue if tt == 74 : self. set_finalurl ( d. getPrefixedString ( ) ) continue if tt == 80 : self. set_apicpumilliseconds ( d. getVarInt64 ( ) ) continue if tt == 88 : self. set_apibytessent ( d. getVarInt64 ( ) ) continue if tt == 96 : self. set_apibytesreceived ( d. getVarInt64 ( ) ) continue if tt == 0 : ",False,tt == 48,tt == 32,0.6848076581954956 1550,"def _1_0_cloud_ips_cip_jsjc5_map ( self, method, url, body, headers ) : if method == ""POST"" : body = json. loads ( body ) if : return self. test_response ( httplib. ACCEPTED, """" ) else : data = '{""error_name"":""bad destination"", ""errors"": [""Bad destination""]}' return self. test_response ( httplib. BAD_REQUEST, data )",False,'destination' in body,body.find('<') > -1,0.6726816892623901 1551,"def execute ( self ) : self. _cache_factory. get_write_cache ( ) with self. invalidated ( self. context. targets ( ) ) as invalidation : for vt in invalidation. all_vts : if : if self. get_options ( ). regular_file_in_results_dir : regular_file_path = os. path. join ( vt. results_dir, DUMMY_FILE_NAME ) else : with temporary_dir ( cleanup = False ) as tmpdir : regular_file_path = os. path. join ( tmpdir, DUMMY_FILE_NAME ) with safe_open ( regular_file_path, mode = ""wb"" ) as fp : fp. write ( DUMMY_FILE_CONTENT ) else : with temporary_dir ( ) as tmpdir : regular_file_path = os. path. join ( tmpdir, DUMMY_FILE_NAME ) symlink_y = os. path. join ( vt. results_dir, SYMLINK_NAME ) os. symlink ( regular_file_path, symlink_y ) return invalidation. all_vts",False,self.get_options().regular_file,self.get_options().create_out_of_file,0.6490850448608398 1552,"def glyph_to_bzs ( g ) : bzs = [ ] for i in range ( g. numberOfContours ) : beg = 0 if i == 0 else g. endPtsOfContours [ i - 1 ] + 1 end = g. endPtsOfContours [ i ] + 1 n = end - beg pts = g. coordinates [ beg : end ] flags = g. flags [ beg : end ] bz = [ ] for j in range ( n ) : x1, y1 = pts [ ( j + 1 ) % n ] if flags [ j ] and flags [ ( j + 1 ) % n ] : bz. append ( ( pts [ j ], ( x1, y1 ) ) ) elif : if flags [ j - 1 ] : x0, y0 = pts [ j - 1 ] else : x0, y0 = lerppt ( 0.5, pts [ j - 1 ], pts [ j ] ) if not flags [ ( j + 1 ) % n ] : x1, y1 = lerppt ( 0.5, ( x1, y1 ), pts [ j ] ) if pts [ j ] == ( x0, y0 ) or pts [ j ] == ( x1, y1 ) : bz. append ( ( ( x0, y0 ), ( x1, y1 ) ) ) = 0,0.6621302366256714 1553,"def test_reductions ( expr, rdd ) : result = compute ( expr, rdd ) expected = compute ( expr, data ) if not result == expected : print ( result ) print ( expected ) if : assert abs ( result - expected ) < 0.001 else : assert result == expected",False,"isinstance(result, float)",result > expected,0.6520752906799316 1554,"def run_step ( self ) -> None : to_delete = set ( self. watches ) for path in _find_watchdog_paths ( self. extra_files, self. exclude_patterns ) : if : try : self. watches [ path ] = self. observer. schedule ( self. event_handler, path, recursive = True ) except OSError : self. watches [ path ] = None to_delete. discard ( path ) for path in to_delete : watch = self. watches. pop ( path, None ) if watch is not None : self. observer. unschedule ( watch )",False,path not in self.watches,path in self.watchdog_list,0.6600096225738525 1555,"def launch_repl ( self, targets ) : with temporary_dir ( ) as temp_dir : node_paths = self. context. products. get_data ( NodePaths ) package_json_path = os. path. join ( temp_dir, ""package.json"" ) package = { ""name"" : self. SYNTHETIC_NODE_TARGET_NAME, ""version"" : ""0.0.0"", ""dependencies"" : { target. package_name : node_paths. node_path ( target ) if self. is_node_module ( target ) else target. version for target in targets }, } with open ( package_json_path, ""w"" ) as fp : json. dump ( package, fp, indent = 2 ) args = self. get_passthru_args ( ) node_repl = self. node_distribution. node_command ( args = args, node_paths = node_paths. all_node_paths if node_paths else None ) with pushd ( temp_dir ) : result, command = self. install_module ( package_manager = self. node_distribution. get_package_manager ( package_manager = PACKAGE_MANAGER_NPM ), workunit_name = self. SYNTHETIC_NODE_TARGET_NAME, component. Component. __init__ ( self, ""PreferencesManager"" ) self. config = deluge. configmanager. ConfigManager ( ""core.conf"", DEFAULT_PREFS ) if ""proxies"" in self. config : log. warning ( 'Updating config file for proxy, using ""peer"" values to fill new ""proxy"" setting' ) self. config [ ""proxy"" ]. update ( self. config [ ""proxies"" ] [ ""peer"" ] ) log. warning ( ""New proxy config is: %s"", self. config [ ""proxy"" ] ) del self. config [ ""proxies"" ] if ""i2p_proxy"" in self. config and self. config [ ""i2p_proxy"" ] [ ""hostname"" ] : self. config [ ""proxy"" ]. update ( self. config [ ""i2p_proxy"" ] ) self. config [ ""proxy"" ] [ ""type"" ] = 6 del self. config [ ""i2p_proxy"" ] if ""anonymous_mode"" in self. config : self. config [ ""proxy"" ] [ ""anonymous_mode"" ] = self. config [ ""anonymous_mode"" ] del self. config [ ""anonymous_mode"" ] if ""proxy"" in self. config : for key in DEFAULT_PREFS [ ""proxy"" ] : if : self. config [ ""proxy"" ] [ key ] = DEFAULT_PREFS [ ""proxy"" ] [ key ] self. core = component. get ( ""Core"" ) self. new_release_timer = None",False,key not in self.config['proxy'],key in self.config['proxy'],0.6520766019821167 1557,"def _send_event_data ( self, timeout_time = None, last_exception = None ) : if self. _unsent_events : self. _open ( ) self. _set_msg_timeout ( timeout_time, last_exception ) self. _handler. queue_message ( * self. _unsent_events ) self. _handler. wait ( ) self. _unsent_events = self. _handler. pending_messages if : if self. _outcome == constants. MessageSendResult. Timeout : self. _condition = OperationTimeoutError ( ""Send operation timed out"" ) if self. _condition : raise self. _condition",False,self._outcome != constants.MessageSendResult.Ok,self._outcome == constants.MessageSendResult.OK,0.6533485651016235 1558,"def get_score ( string : str, query_chars : str ) -> float : best_score : float = float ( len ( string ) ) head, tail = query_chars [ 0 ], query_chars [ 1 : ] for first_index in ( idx for idx, val in enumerate ( string ) if val == head ) : score, last_index = find_end_of_match ( string, tail, first_index ) if : best_score = score best_score = best_score * ( len ( string ) ** 0.5 ) return best_score",False,last_index and score and (score < best_score),score != None,0.6526854634284973 1559,"def _set_rl_property_value ( self, obj, key, val, path = """" ) : """"""Sets a property on obj to val, or to a sub-object within obj if key looks like ""foo.bar"" """""" if key. find ( ""."" ) >= 0 : top_key, sub_keys = key_list = key. split ( ""."", 1 ) if top_key. startswith ( ""__"" ) : raise ValueError ( ""Attempting to set unsafe property name %s"" % top_key ) if isinstance ( obj, dict ) : sub_obj = obj [ top_key ] else : sub_obj = obj. __dict__ [ top_key ] return self. _set_rl_property_value ( sub_obj, sub_keys, val, ""%s.%s"" % ( path, top_key ) ) else : key, val = self. _parse_type ( key, val ) if : raise ValueError ( ""Attempting to set unsafe property name %s"" % key ) if isinstance ( obj, dict ) : obj [ key ] = val else : obj. __dict__ [ key ] = val",False,key.startswith('__'),val is None,0.6516975164413452 1560,"def search ( self, query ) : results = [ ] search = { ""q"" : query } response = requests. session ( ). get ( self. _base_url + ""search"", params = search ) content = parse ( response. content, namespaceHTMLElements = False ) for result in content. findall ( "".//*[@class='package-snippet']"" ) : name = result. find ( ""h3/*[@class='package-snippet__name']"" ). text version = result. find ( ""h3/*[@class='package-snippet__version']"" ). text if : continue description = result. find ( ""p[@class='package-snippet__description']"" ). text if not description : description = """" try : result = Package ( name, version, description ) result. description = to_str ( description. strip ( ) ) results. append ( result ) except ParseVersionError : self. _log ( 'Unable to parse version ""{}"" for the {} package, skipping'. format ( version, name ), level = ""debug"", ) return results",False,not name or not version,version is None,0.65926593542099 1561,"def startElement ( self, name, attrs ) : if name == ""regexp"" : self. _regexp = sanitizeStr ( attrs. get ( ""value"" ) ) _ = re. match ( r""\A[A-Za-z0-9]+"", self. _regexp ) if : self. _match = re. search ( self. _regexp, self. _banner, re. I | re. M ) else : self. _match = None if name == ""info"" and self. _match : self. _feedInfo ( ""type"", attrs. get ( ""type"" ) ) self. _feedInfo ( ""distrib"", attrs. get ( ""distrib"" ) ) self. _feedInfo ( ""release"", attrs. get ( ""release"" ) ) self. _feedInfo ( ""codename"", attrs. get ( ""codename"" ) ) self. _dbmsVersion = sanitizeStr ( attrs. get ( ""dbms_version"" ) ) self. _techVersion = sanitizeStr ( attrs. get ( ""tech_version"" ) ) self. _sp = sanitizeStr ( attrs. get ( ""sp"" ) ) if self. _dbmsVersion and self. _dbmsVersion. isdigit ( ) : self. _feedInfo ( ""dbmsVersion"", self. _match. group ( int ( self. _dbmsVersion ) ) ) if self. _techVersion and self. _techVersion. isdigit ( ) : self. _feedInfo ( ""technology"", ""%s %s"" % ( attrs. get ( """,False,_ and self._banner and (_.group(0).lower() in self._banner.lower()) or not _,self._banner,0.65169358253479 1562,def _to_pbs ( self ) : pbs = [ ] if self. _start : if self. _start_incl : op = datastore_pb. Query_Filter. GREATER_THAN_OR_EQUAL else : op = datastore_pb. Query_Filter. GREATER_THAN pb = datastore_pb. Query_Filter ( ) pb. set_op ( op ) pb. add_property ( ). CopyFrom ( self. _start ) pbs. append ( pb ) if self. _end : if : op = datastore_pb. Query_Filter. LESS_THAN_OR_EQUAL else : op = datastore_pb. Query_Filter. LESS_THAN pb = datastore_pb. Query_Filter ( ) pb. set_op ( op ) pb. add_property ( ). CopyFrom ( self. _end ) pbs. append ( pb ) return pbs,False,self._end_incl,self._start_incl,0.6690181493759155 1563,"def _get_matched_layout ( command ) : cmd = command. script. split ( "" "" ) for source_layout in source_layouts : is_all_match = True for cmd_part in cmd : if : is_all_match = False break if is_all_match : return source_layout",False,not all([ch in source_layout or ch in '-_' for ch in cmd_part]),cmd_part.find(' ') >= 0,0.6501979827880859 1564,"def process_bind_param ( self, value, dialect ) : """"""Encrypt a value on the way in."""""" if value is not None : self. _update_key ( ) try : value = self. underlying_type. process_bind_param ( value, dialect ) except AttributeError : type_ = self. underlying_type. python_type if issubclass ( type_, bool ) : value = ""true"" if value else ""false"" elif : value = value. isoformat ( ) elif issubclass ( type_, JSONType ) : value = six. text_type ( json. dumps ( value ) ) return self. engine. encrypt ( value )",False,"issubclass(type_, (datetime.date, datetime.time))","issubclass(type_, datetime)",0.6534264087677002 1565,"def link_pantsrefs ( soups, precomputed ) : """"""Transorm soups: becomes """""" for ( page, soup ) in soups. items ( ) : for a in soup. find_all ( ""a"" ) : if not a. has_attr ( ""pantsref"" ) : continue pantsref = a [ ""pantsref"" ] if : raise TaskError ( f'Page {page} has pantsref ""{pantsref}"" and I cannot find pantsmark for it' ) a [ ""href"" ] = rel_href ( page, precomputed. pantsref [ pantsref ] )",False,pantsref not in precomputed.pantsref,pantsref in precomputed.pantsref,0.6592947244644165 1566,"def html_before_write ( self, book, chapter ) : from lxml import etree, html from pygments import highlight from pygments. formatters import HtmlFormatter from ebooklib import epub try : tree = parse_html_string ( chapter. content ) except : return root = tree. getroottree ( ) had_source = False if len ( root. find ( ""body"" ) )!= 0 : body = tree. find ( ""body"" ) for source in body. xpath ( '//pre[contains(@class,""source-"")]' ) : css_class = source. get ( ""class"" ) source_text = ( source. text or """" ) + """". join ( [ html. tostring ( child ) for child in source. iterchildren ( ) ] ) if ""source-python"" in css_class : from pygments. lexers import PythonLexer _text = highlight ( source_text, PythonLexer ( ), HtmlFormatter ( ) ) if : from pygments. lexers import CssLexer _text = highlight ( source_text, CssLexer ( ), HtmlFormatter ( ) ) _parent = source. getparent ( ) _parent. replace ( source, etree. XML ( _text ) ) had_source = True if had_source : chapter. add_link ( href = ""style/code.css"", rel = ""stylesheet"", type = ""text/css""",False,'source-css' in css_class,'css-lexer' in css_class,0.652148962020874 1567,"def command ( self ) : session_id = self. session. ui. html_variables. get ( ""http_session"" ) if self. data. get ( ""_method"", """" ) == ""POST"" : if : with GLOBAL_LOGIN_LOCK : return self. _do_login ( self. data. get ( ""user"", [ None ] ) [ 0 ], self. data [ ""pass"" ] [ 0 ], redirect = True, ) elif not self. data : password = self. session. ui. get_password ( _ ( ""Your password: "" ) ) return self. _do_login ( None, password, load_index = True ) elif ( session_id in SESSION_CACHE and SESSION_CACHE [ session_id ]. auth and ""_method"" in self. data ) : self. _do_redirect ( ) return self. _success ( _ ( ""Please log in"" ) )",False,'pass' in self.data,self.data,0.6540184020996094 1568,"def generate_sitemaps ( filename ) : rows = ( line. strip ( ). split ( ""\t"" ) for line in open ( filename ) ) for sortkey, chunk in itertools. groupby ( rows, lambda row : row [ 0 ] ) : things = [ ] _chunk = list ( chunk ) for segment in _chunk : sortkey = segment. pop ( 0 ) last_modified = segment. pop ( - 1 ) path = """". join ( segment ) things. append ( web. storage ( path = path, last_modified = last_modified ) ) if : write ( ""sitemaps/sitemap_%s.xml.gz"" % sortkey, sitemap ( things ) )",True,things,things,0.6706187725067139 1569,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 10 : self. set_query_kind ( d. getPrefixedString ( ) ) continue if tt == 18 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. mutable_query_ancestor ( ). TryMerge ( tmp ) continue if tt == 25 : self. set_query_thiscursor ( d. get64 ( ) ) continue if : self. set_query_nextcursor ( d. get64 ( ) ) continue if tt == 40 : self. add_get_successful_fetch ( d. getBoolean ( ) ) continue if tt == 50 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. add_keys_read ( ). TryMerge ( tmp ) continue if tt == 58 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length",False,tt == 33,tt == 33554432,0.6741406917572021 1570,"def _add_middleware ( self, conf, app ) : errorware = conf [ ""tg.errorware"" ] if errorware. get ( ""enable"", True ) and not asbool ( conf. get ( ""debug"" ) ) : reporters = [ ] if : from backlash. tracing. reporters. mail import EmailReporter reporters. append ( EmailReporter ( ** errorware ) ) if errorware. get ( ""sentry_dsn"" ) : from backlash. tracing. reporters. sentry import SentryReporter reporters. append ( SentryReporter ( ** errorware ) ) if errorware. get ( ""reporters"", [ ] ) : for reporter in errorware [ ""reporters"" ] : reporters. append ( reporter ) try : import backlash except ImportError : log. warning ( ""backlash not installed, email tracebacks won't be available"" ) else : return backlash. TraceErrorsMiddleware ( app, reporters, context_injectors = [ _turbogears_backlash_context ] ) return app",False,errorware.get('error_email'),errorware.get('mail_dsn'),0.650448203086853 1571,"def find_strings ( start_address ) : strings = [ ] import_ea = start_address while import_ea < SegEnd ( start_address ) : import_name = Name ( import_ea ) if : xref_start = import_ea xref_cur = DfirstB ( xref_start ) while xref_cur!= BADADDR : string_arg = get_arguments ( xref_cur ) if string_arg and string_arg not in strings : strings. append ( string_arg ) xref_cur = DnextB ( xref_start, xref_cur ) import_ea += 4 for function_ea in Functions ( SegByName ( "".text"" ), SegEnd ( start_address ) ) : flags = GetFunctionFlags ( function_ea ) if flags & FUNC_LIB : lib_name = GetFunctionName ( function_ea ) if len ( lib_name ) > 1 and ""cmp"" in lib_name : xref_start = function_ea xref_cur = RfirstB ( xref_start ) while xref_cur!= BADADDR : string_arg = get_arguments ( xref_cur ) if string_arg and string_arg not in strings : strings. append ( string_",False,len(import_name) > 1 and 'cmp' in import_name,len(import_ea) > 0,0.6562709808349609 1572,"def run ( self, execution_info ) -> driver_output_pb2. DriverOutput : span = 2 with self. _mlmd_connection as m : previous_output = inputs_utils. resolve_input_artifacts ( m, self. _self_output ) version = 0 if : version = ( max ( [ artifact. get_int_custom_property ( ""version"" ) for artifact in previous_output [ ""examples"" ] if artifact. get_int_custom_property ( ""span"" ) == span ] or [ - 1 ] ) + 1 ) output_example = copy. deepcopy ( execution_info. output_dict [ ""output_examples"" ] [ 0 ]. mlmd_artifact ) output_example. custom_properties [ ""span"" ]. int_value = span output_example. custom_properties [ ""version"" ]. int_value = version result = driver_output_pb2. DriverOutput ( ) result. output_artifacts [ ""output_examples"" ]. artifacts. append ( output_example ) result. exec_properties [ ""span"" ]. int_value = span result. exec_properties [ ""version"" ]. int_value = version return result",False,previous_output,has_artifacts(previous_output),0.667296290397644 1573,"def remove_dead_csv ( csv_node, lives, arg_aliases, alias_map, func_ir, typemap ) : new_df_colnames = [ ] new_out_vars = [ ] new_out_types = [ ] new_usecols = [ ] for i, col_var in enumerate ( csv_node. out_vars ) : if : new_df_colnames. append ( csv_node. df_colnames [ i ] ) new_out_vars. append ( csv_node. out_vars [ i ] ) new_out_types. append ( csv_node. out_types [ i ] ) new_usecols. append ( csv_node. usecols [ i ] ) csv_node. df_colnames = new_df_colnames csv_node. out_vars = new_out_vars csv_node. out_types = new_out_types csv_node. usecols = new_usecols if len ( csv_node. out_vars ) == 0 : return None return csv_node",False,col_var.name in lives,col_var.dead_csv,0.6563782691955566 1574,"def _calc_cmrc2018_f1_score ( answers, prediction ) : f1_scores = [ ] for ans in answers : ans_segs = _cn_segmentation ( ans, rm_punc = True ) prediction_segs = _cn_segmentation ( prediction, rm_punc = True ) lcs, lcs_len = _find_lcs ( ans_segs, prediction_segs ) if : f1_scores. append ( 0 ) continue precision = 1.0 * lcs_len / len ( prediction_segs ) recall = 1.0 * lcs_len / len ( ans_segs ) f1 = ( 2 * precision * recall ) / ( precision + recall ) f1_scores. append ( f1 ) return max ( f1_scores )",True,lcs_len == 0,lcs_len == 0,0.6590019464492798 1575,"def add_reversed_tensor ( i, X, reversed_X ) : if X in stop_mapping_at_tensors : return if X not in reversed_tensors : reversed_tensors [ X ] = { ""id"" : ( nid, i ), ""tensor"" : reversed_X } else : tmp = reversed_tensors [ X ] if ""tensor"" in tmp and ""tensors"" in tmp : raise Exception ( ""Wrong order, tensors already aggregated!"" ) if : tmp [ ""tensors"" ] = [ tmp [ ""tensor"" ], reversed_X ] del tmp [ ""tensor"" ] else : tmp [ ""tensors"" ]. append ( reversed_X )",False,'tensor' in tmp,'tensors' in tmp,0.6678755283355713 1576,"def eventFilter ( self, obj, event ) : if event. type ( ) == QEvent. MouseButtonPress : button = event. button ( ) if button == Qt. BackButton : self. _app. browser. back ( ) return True elif : self. _app. browser. forward ( ) return True return False",True,button == Qt.ForwardButton,button == Qt.ForwardButton,0.6688909530639648 1577,"def validate ( self, entry, field_uri = None ) : super ( ). validate ( entry, field_uri ) if entry is None : return field_uri = field_uri or self. field_uri try : path = Path ( entry ) except TypeError : self. raise_error ( entry, field_uri, ""values is expected to be path-like"" ) if self. check_exists and not path. exists ( ) : self. raise_error ( entry, field_uri, ""path does not exist"" ) else : if self. is_directory and not path. is_dir ( ) : self. raise_error ( entry, field_uri, ""is not a directory"" ) if : self. raise_error ( entry, field_uri, ""is a directory, regular file expected"" )",False,self.is_directory is False and (not path.is_file()),self.is_regular_file and (not path.is_dir()),0.6444675922393799 1578,"def host_local_traverse_tree ( data_inst, tree_node, use_missing = True, zero_as_missing = True ) : nid = 0 while True : if : return nid cur_node = tree_node [ nid ] fid, bid = cur_node. fid, cur_node. bid missing_dir = cur_node. missing_dir if use_missing and zero_as_missing : if ( data_inst. features. get_data ( fid ) == NoneType ( ) or data_inst. features. get_data ( fid, None ) is None ) : nid = ( tree_node [ nid ]. right_nodeid if missing_dir == 1 else tree_node [ nid ]. left_nodeid ) elif data_inst. features. get_data ( fid ) <= bid : nid = tree_node [ nid ]. left_nodeid else : nid = tree_node [ nid ]. right_nodeid elif data_inst. features. get_data ( fid ) == NoneType ( ) : nid = ( tree_node [ nid ]. right_nodeid if missing_dir == 1 else tree_node [ nid ]. left_nodeid <",False,tree_node[nid].is_leaf,nid >= len(tree_node),0.6573371887207031 1579,"def _filter_paths ( basename, path, is_dir, exclude ) : """""".gitignore style file filtering."""""" for item in exclude : if : continue match = path if item. startswith ( ""/"" ) else basename if fnmatch. fnmatch ( match, item. strip ( ""/"" ) ) : return True return False",False,item.endswith('/') and (not is_dir),is_dir,0.6449323892593384 1580,"def has_dirty_objects ( self ) : language = self. current_lang if self. page : if : dirty = self. page. has_translation ( language ) else : dirty = self. page. is_dirty ( language ) or self. page_is_pending ( self. page, language ) else : dirty = bool ( self. dirty_statics ) return dirty",False,self.dirty_statics,self.page.has_translation(language),0.6614997386932373 1581,"def get_snmp_information ( self ) : snmp_information = { } command = ""show running-config"" output = self. _send_command ( command ) snmp_config = helpers. textfsm_extractor ( self, ""snmp_config"", output ) if not snmp_config : return snmp_information snmp_information = { ""contact"" : str ( """" ), ""location"" : str ( """" ), ""community"" : { }, ""chassis_id"" : str ( """" ), } for snmp_entry in snmp_config : contact = str ( snmp_entry. get ( ""contact"", """" ) ) if contact : snmp_information [ ""contact"" ] = contact location = str ( snmp_entry. get ( ""location"", """" ) ) if location : snmp_information [ ""location"" ] = location community_name = str ( snmp_entry. get ( ""community"", """" ) ) if not community_name : continue if : snmp_information [ ""community"" ] [ community_name ] = { ""acl"" : str ( snmp_entry. get ( ""acl"", """" ) ), ""mode"" : str ( snmp_entry. get ( ""mode"", """" ). lower ( ) ), } else : acl = str ( snmp_entry. get ( ""acl"", """" ) ) if acl : snmp_information [ ""community"" ] [ community_name ] [ ""acl"" ] =",False,community_name not in snmp_information['community'].keys(),community_name in snmp_information['community'],0.6492284536361694 1582,"def docs ( self ) : proc = subprocess. Popen ( [ ""python"", self. wiki_extractor, ""--no-templates"", ""--processes"", str ( self. n_jobs ), ""--output"", ""-"", self. fawiki_dump, ], stdout = subprocess. PIPE, ) doc_pattern = re. compile ( r'' ) doc = [ ] for line in iter ( proc. stdout. readline, """" ) : line = line. strip ( ). decode ( ""utf8"" ) if : doc. append ( line ) if line == """" : del doc [ 1 ] id, url, title = doc_pattern. match ( doc [ 0 ] ). groups ( ) html = ""\n"". join ( doc [ 1 : - 1 ] ) yield { ""id"" : id, ""url"" : url, ""title"" : title, ""html"" : html, ""text"" : html } doc = [ ]",False,line,line != '',0.688806414604187 1583,"def _maybe_namespace ( cls, data : Any, *, preferred_type : Type [ ModelT ] = None, fast_types : Tuple [ Type,... ] = ( bytes, str ), isinstance : Callable = isinstance ) -> Optional [ Type [ ModelT ] ] : if data is None or isinstance ( data, fast_types ) : return None try : ns = data [ cls. _blessed_key ] [ ""ns"" ] except ( KeyError, TypeError ) : pass else : type_is_abstract = ( preferred_type is None or preferred_type is ModelT or preferred_type is Model ) try : model = registry [ ns ] except KeyError : if : raise return None else : if ( type_is_abstract or model. _options. allow_blessed_key or model. _options. polymorphic_fields ) : return model return None",False,type_is_abstract,model is None,0.6619807481765747 1584,"def modify ( cls, profile ) : if : profile. add_overlay ( svcscan_base_x86 ) else : profile. add_overlay ( svcscan_base_x64 ) version = profile. metadata ( ""version"" ) if version < 6.0 : profile. add_classes ( { ""_SERVICE_RECORD"" : _SERVICE_RECORD_LEGACY, ""_SERVICE_HEADER"" : _SERVICE_HEADER, } ) profile. add_constants ( dict ( ServiceTag = b""sErv"" ) ) elif 6.0 <= version <= 6.2 : profile. add_classes ( { ""_SERVICE_RECORD"" : _SERVICE_RECORD_RECENT, ""_SERVICE_HEADER"" : _SERVICE_HEADER, } ) profile. add_constants ( dict ( ServiceTag = b""serH"" ) ) if : profile. add_overlay ( _SERVICE_RECORD_VISTA_X86 ) else : profile. add_overlay ( _SERVICE_RECORD_VISTA_X64 ) elif 6.2 <= version : profile. add_classes ( { ""_SERVICE_RECORD"" : _SERVICE_RECORD_RECENT, ""_SERVICE_HEADER"" : _SERVICE_HEADER, } ) <",False,profile.metadata('arch') == 'I386',version > 6.2,0.6534208059310913 1585,"def _entries_kraken ( self, channel_name, broadcast_type, sort ) : access_token = self. _download_access_token ( channel_name ) channel_id = self. _extract_channel_id ( access_token [ ""token"" ], channel_name ) offset = 0 counter_override = None for counter in itertools. count ( 1 ) : response = self. _call_api ( ""kraken/channels/%s/videos/"" % channel_id, channel_id, ""Downloading video JSON page %s"" % ( counter_override or counter ), query = { ""offset"" : offset, ""limit"" : self. _PAGE_LIMIT, ""broadcast_type"" : broadcast_type, ""sort"" : sort, }, ) videos = response. get ( ""videos"" ) if : break for video in videos : if not isinstance ( video, dict ) : continue video_url = url_or_none ( video. get ( ""url"" ) ) if not video_url : continue yield { ""_type"" : ""url_transparent"", ""ie_key"" : TwitchVodIE. ie_key ( ), ""id"" : video. get ( ""_id"" ),",False,"not isinstance(videos, list)",videos,0.657272219657898 1586,"def process_package_tag_set ( self, elem, message, skip_actions_tags = True ) : elem_altered = False new_elem = copy. deepcopy ( elem ) for sub_index, sub_elem in enumerate ( elem ) : altered = False error_message = """" if sub_elem. tag == ""install"" : altered, new_sub_elem, error_message = self. process_install_tag_set ( elem = sub_elem, message = message, skip_actions_tags = skip_actions_tags ) elif sub_elem. tag == ""repository"" : altered, new_sub_elem, error_message = self. process_repository_tag_set ( parent_elem = elem, elem_index = sub_index, elem = sub_elem, message = message ) if error_message and error_message not in message : message += error_message if : if not self. altered : self. altered = True if not elem_altered : elem_altered = True new_elem [ sub_index ] = new_sub_elem return elem_altered, new_elem, message",False,altered,skip_actions_tags,0.6898657083511353 1587,"def on_api_command ( self, command, data ) : if command == ""select"" : if not Permissions. PLUGIN_ACTION_COMMAND_PROMPT_INTERACT. can ( ) : return flask. abort ( 403, ""Insufficient permissions"" ) if self. _prompt is None : return flask. abort ( 409, ""No active prompt"" ) choice = data [ ""choice"" ] if : return flask. abort ( 400, ""{!r} is not a valid value for choice"". format ( choice ) ) self. _answer_prompt ( choice )",False,"not isinstance(choice, int) or not self._prompt.validate_choice(choice)",not choice.valid(),0.6513665914535522 1588,"def workflow_tag_handler ( view, left, operator, right ) : if operator == ""="" : view. do_query = True view. query = view. query. filter ( StoredWorkflow. id == StoredWorkflowTagAssociation. stored_workflow_id ) tmp = right. split ( "":"" ) view. query = view. query. filter ( StoredWorkflowTagAssociation. user_tname == tmp [ 0 ] ) if : view. query = view. query. filter ( StoredWorkflowTagAssociation. user_value == tmp [ 1 ] ) else : raise GalaxyParseError ( ""Invalid comparison operator: %s"" % ( operator ) )",False,len(tmp) > 1,operator == 'delete',0.6611274480819702 1589,"def _wrap_df ( cls, op, value, index = None ) : xdf = cudf if op. gpu else pd axis = op. axis ndim = op. inputs [ 0 ]. ndim if ndim == 2 : dtype = None if : value = xdf. DataFrame ( [ value ], columns = index ) elif not isinstance ( value, xdf. DataFrame ) : new_index = None if not op. gpu else getattr ( value, ""index"", None ) dtype = getattr ( value, ""dtype"", None ) value = xdf. DataFrame ( value, columns = index, index = new_index ) else : return value value = value. T if axis == 0 else value if ( dtype == np. dtype ( ""O"" ) and getattr ( op. outputs [ 0 ], ""dtypes"", None ) is not None ) : value = value. astype ( op. outputs [ 0 ]. dtypes ) return value else : if : value = xdf. Series ( [ value ], index = index ) elif isinstance ( value, np. ndarray ) : value = xdf. Series ( value. tolist ( ), index = index ) return value",False,"isinstance(value, (np.generic, int, float, complex))","isinstance(value, xdf.Series)",0.6513369083404541 1590,"def process ( self ) : if not self. outputs [ ""Angles"" ]. is_linked : return vertices_s = self. inputs [ ""Vertices"" ]. sv_get ( default = [ [ ] ] ) edges_s = self. inputs [ ""Edges"" ]. sv_get ( default = [ [ ] ] ) faces_s = self. inputs [ ""Polygons"" ]. sv_get ( default = [ [ ] ] ) result_angles = [ ] meshes = match_long_repeat ( [ vertices_s, edges_s, faces_s ] ) for vertices, edges, faces in zip ( * meshes ) : new_angles = [ ] bm = bmesh_from_pydata ( vertices, edges, faces ) bm. normal_update ( ) for edge in bm. edges : if self. signed : angle = edge. calc_face_angle_signed ( 180 ) else : angle = edge. calc_face_angle ( 180 ) if self. complement : angle = math. copysign ( math. pi, angle ) - angle if self. is_degenerated ( edge ) : angle = self. get_degenerated_angle ( angle ) if : angle = math. degrees ( angle ) new_angles. append ( angle ) if edges : new_angles = untangle_edges ( edges, bm. edges, new_angles )",False,self.angles_mode == 'degrees' and angle is not None,self.is_tangle,0.656069278717041 1591,"def _reinit_optimizers_with_oss ( self ) : optimizers = self. lightning_module. trainer. optimizers for x, optimizer in enumerate ( optimizers ) : if : optimizer = optimizer. _optimizer if not isinstance ( optimizer, OSS ) : optim_class = type ( optimizer ) zero_optimizer = OSS ( params = optimizer. param_groups, optim = optim_class, ** optimizer. defaults ) optimizers [ x ] = zero_optimizer del optimizer trainer = self. lightning_module. trainer trainer. optimizers = optimizers trainer. convert_to_lightning_optimizers ( )",False,is_lightning_optimizer(optimizer),"hasattr(optimizer, '_optimizer')",0.652013897895813 1592,"def OnKey ( self, evt ) : key = evt. GetKeyCode ( ) if key >= 32 and key <= 127 : self. typedText = self. typedText + chr ( key ) item = self. FindPrefix ( self. typedText ) if : self. SetSelection ( item ) elif key == wx. WXK_BACK : self. typedText = self. typedText [ : - 1 ] if not self. typedText : self. SetSelection ( 0 ) else : item = self. FindPrefix ( self. typedText ) if : self. SetSelection ( item ) else : self. typedText = """" evt. Skip ( )",False,item != -1,not self.typedText,0.670193076133728 1593,"def write ( self, key, value = None, ttl = None, dir = False ) : key = _normalize_path ( key ) segments = key. lstrip ( ""/"" ). split ( ""/"" ) path = """" parent = ""/"" event_paths = [ ] for s in segments : if parent not in self. _children : self. _children [ parent ] = set ( ) self. _children [ parent ]. add ( s ) event_paths. append ( parent ) path += ""/"" + s if path!= key and path in self. _store : raise KeyError ( f""Not a directory: {key}"" ) parent = path if dir : self. _children [ key ] = set ( ) else : self. _store [ key ] = value if ttl : self. _expire_time [ key ] = datetime. now ( ) + timedelta ( seconds = ttl ) event_paths. append ( key ) for p in event_paths : if : [ e. set ( ) for e in list ( self. _watch_event_r [ p ] ) ] if key in self. _watch_event : [ e. set ( ) for e in list ( self. _watch_event [ key ] ) ]",True,p in self._watch_event_r,p in self._watch_event_r,0.657435417175293 1594,"def _add_select ( self, table_set ) : to_select = [ ] has_select_star = False for expr in self. select_set : if isinstance ( expr, ir. ValueExpr ) : arg = self. _translate ( expr, named = True ) elif : if expr. equals ( self. table_set ) : cached_table = self. context. get_table ( expr ) if cached_table is None : has_select_star = True continue else : arg = table_set else : arg = self. context. get_table ( expr ) if arg is None : raise ValueError ( expr ) to_select. append ( arg ) if has_select_star : if table_set is None : raise ValueError ( ""table_set cannot be None here"" ) clauses = [ table_set ] + to_select else : clauses = to_select if self. exists : result = sa. exists ( clauses ) else : result = sa. select ( clauses ) if self. distinct : result = result. distinct ( ) if not has_select_star : if table_set is not None : return result. select_",False,"isinstance(expr, ir.TableExpr)",self.table_set is not None,0.6515713930130005 1595,"def _finish_closing ( self, _ ) : if self. socket_type == SERVER_SOCKET : log. debug ( ""Shutting down server socket parent group"", extra = { ""sock"" : self } ) self. parent_group. shutdownGracefully ( 0, 100, TimeUnit. MILLISECONDS ) self. accepted_children -= 1 while True : child = self. child_queue. poll ( ) if : break log. debug ( ""Closed child socket %s not yet accepted"", child, extra = { ""sock"" : self } ) child. close ( ) else : msgs = [ ] self. incoming. drainTo ( msgs ) for msg in msgs : if msg is not _PEER_CLOSED : msg. release ( ) log. debug ( ""Closed socket"", extra = { ""sock"" : self } )",True,child is None,child is None,0.6680395603179932 1596,"def format_ret ( self, arg ) : """"""Format arg, the value returned by a ""return"" statement."""""" try : if isinstance ( arg, types. GeneratorType ) : ret = """" elif isinstance ( arg, ( tuple, list ) ) : ret = ""[%s]"" % "","". join ( [ self. show ( z ) for z in arg ] ) if len ( ret ) > 40 : ret = ""[\n%s]"" % ( ""\n,"". join ( [ self. show ( z ) for z in arg ] ) ) elif : ret = self. show ( arg ) if len ( ret ) > 40 : ret = ""\n %s"" % ret else : ret = """" if arg is None else repr ( arg ) except Exception : exctype, value = sys. exc_info ( ) [ : 2 ] s = ""<**exception: %s,%s arg: %r**>"" % ( exctype. __name__, value, arg ) ret = "" ->\n %s"" % ( s ) if len ( s ) > 40 else "" -> %s"" % ( s ) return "" -> %s"" % ret",False,arg,"isinstance(arg, string_types)",0.7060309648513794 1597,"def get_sink_args_which_propagate ( sink, ast_node ) : sink_args_with_positions = CallVisitor. get_call_visit_results ( sink. trigger. call, ast_node ) sink_args = [ ] kwargs_present = set ( ) for i, vars in enumerate ( sink_args_with_positions. args ) : kwarg = sink. trigger. get_kwarg_from_position ( i ) if kwarg : kwargs_present. add ( kwarg ) if : sink_args. extend ( vars ) for keyword, vars in sink_args_with_positions. kwargs. items ( ) : kwargs_present. add ( keyword ) if sink. trigger. kwarg_propagates ( keyword ) : sink_args. extend ( vars ) if ( not sink. trigger. arg_list_propagates or sink. trigger. kwarg_list - kwargs_present ) : sink_args. extend ( sink_args_with_positions. unknown_args ) sink_args. extend ( sink_args_with_positions. unknown_kwargs ) return sink_args",False,sink.trigger.kwarg_propagates(kwarg),vars,0.6532992124557495 1598,"def read ( self, item, recursive = False, sort = False ) : item = _normalize_path ( item ) if item in self. _store : if : del self. _store [ item ] raise KeyError ( item ) return PathResult ( item, value = self. _store [ item ] ) else : return self. _read_dir ( item, recursive = recursive, sort = sort )",False,item in self._expire_time and self._expire_time[item] < datetime.now(),recursive,0.653357744216919 1599,"def _get_new_version ( config_file : str = ""./setup.cfg"", current_version : str = None, micro_release : bool = False, ) : if micro_release : if : return _change_micro_version ( current_version ) elif config_file : return _change_micro_version ( _fetch_current_version ( config_file ) ) else : return _fetch_default_calendar_release_version ( ) else : return _fetch_default_calendar_release_version ( )",True,current_version,current_version,0.6658474206924438 1600,"def test_list ( self ) : self. _create_locations ( ) response = self. client. get ( self. geojson_boxedlocation_list_url ) self. assertEqual ( response. status_code, 200 ) self. assertEqual ( len ( response. data [ ""features"" ] ), 2 ) for feature in response. data [ ""features"" ] : self. assertIn ( ""bbox"", feature ) fid = feature [ ""id"" ] if : self. assertEqual ( feature [ ""bbox"" ], self. bl1. bbox_geometry. extent ) elif fid == 2 : self. assertEqual ( feature [ ""bbox"" ], self. bl2. bbox_geometry. extent ) else : self. fail ( ""Unexpected id: {0}"". format ( fid ) ) BoxedLocation. objects. all ( ). delete ( )",True,fid == 1,fid == 1,0.6808070540428162 1601,"def fee_amount_in_quote ( self, trading_pair : str, price : Decimal, order_amount : Decimal ) : fee_amount = Decimal ( ""0"" ) if self. percent > 0 : fee_amount = ( price * order_amount ) * self. percent base, quote = trading_pair. split ( ""-"" ) for flat_fee in self. flat_fees : if : fee_amount += flat_fee [ 1 ] * price elif interchangeable ( flat_fee [ 0 ], quote ) : fee_amount += flat_fee [ 1 ] return fee_amount",False,"interchangeable(flat_fee[0], base)",base,0.6503372192382812 1602,"def _ProcessName ( self, name, dependencies ) : """"""Retrieve a module name from a node name."""""" module_name, dot, base_name = name. rpartition ( ""."" ) if dot : if : if module_name in dependencies : dependencies [ module_name ]. add ( base_name ) else : dependencies [ module_name ] = { base_name } else : logging. warning ( ""Empty package name: %s"", name )",False,module_name,base_name,0.6689913868904114 1603,"def _build_args ( self ) : queue = Queue ( ) for i, arg in enumerate ( self. real_args ) : if isinstance ( arg, ( list, tuple ) ) : self. real_args [ i ] = self. data_addr + len ( self. payload ) + queue. size ( ) queue. extend ( arg ) elif isinstance ( arg, bytes ) : self. real_args [ i ] = self. data_addr + len ( self. payload ) + queue. size ( ) queue. append ( MarkedBytes ( arg ) ) while len ( queue ) > 0 : top = queue [ 0 ] if isinstance ( top, ( list, tuple ) ) : top = pack ( self. data_addr + len ( self. payload ) + queue. size ( ) ) queue. extend ( queue [ 0 ] ) elif isinstance ( top, MarkedBytes ) : pass elif : top = pack ( self. data_addr + len ( self. payload ) + queue. size ( ) ) queue. append ( MarkedBytes ( queue [ 0 ] ) ) elif isinstance ( top, six. integer_types ) : top = pack ( top ) self. payload += top queue. pop ( 0 )",False,"isinstance(top, bytes)",top == 0,0.6495963335037231 1604,"def __init__ ( self, debugger ) : infos = [ ] frame = debugger. currentframe while frame : cc = debugger. codeContainerProvider. FromFileName ( frame. f_code. co_filename ) if : try : address = frame. f_locals [ ""__axstack_address__"" ] except KeyError : address = axdebug. GetStackAddress ( ) frameInfo = ( DebugStackFrame ( frame, frame. f_lineno - 1, cc ), address, address + 1, 0, None, ) infos. append ( frameInfo ) frame = frame. f_back gateways. EnumDebugStackFrames. __init__ ( self, infos, 0 )",False,cc is not None,"hasattr(frame, 'f_locals')",0.6673362851142883 1605,"def generate_png ( self, info, png_filename, svg_data ) : with tempfile. NamedTemporaryFile ( ""w+"", encoding = ""utf-8"", suffix = "".svg"", delete = False ) as tmpfile : tmpfile. write ( svg_data ) tmpfile. flush ( ) command = [ self. inkscape, ""--without-gui"", ""-f"", tmpfile. name, ""-e"", png_filename, ""--export-area-drawing"", ""--export-area-snap"", ] if info. get ( ""background"" ) : command. append ( ""--export-background=%s"" % info [ ""background"" ] ) if info. get ( ""dpi"" ) : command. append ( ""--export-dpi=%s"" % info [ ""dpi"" ] ) if : command. append ( ""--export-width=%s"" % info [ ""width"" ] ) if info. get ( ""height"" ) : command. append ( ""--export-height=%s"" % info [ ""height"" ] ) subprocess. check_call ( command )",True,info.get('width'),info.get('width'),0.6529386043548584 1606,"def start ( self, * args, ** kwargs ) : try : super ( ). start ( * args, ** kwargs ) except testprocess. ProcessExited : is_dl_inconsistency = str ( self. captured_log [ - 1 ] ). endswith ( ""_dl_allocate_tls_init: Assertion "" ""`listp->slotinfo[cnt].gen <= GL(dl_tls_generation)' failed!"" ) if : self. captured_log = [ ] self. _log ( ""NOTE: Restarted after libc DL inconsistency!"" ) self. clear_data ( ) super ( ). start ( * args, ** kwargs ) else : raise",False,testutils.ON_CI and is_dl_inconsistency,is_dl_inconsistency,0.6467252969741821 1607,"def _execute_combine ( cls, xp, a, v, op ) : inp_indices, inp_data = a if np. isscalar ( v ) : ind = xp. searchsorted ( inp_data, v, side = op. side ) if : ind -= 1 return inp_indices [ ind ], inp_data [ ind ] else : ret_indices = np. empty ( v. shape, dtype = np. intp ) ret_data = np. empty ( v. shape, dtype = inp_data. dtype ) for idx in itertools. product ( * ( range ( s ) for s in v. shape ) ) : ind = xp. searchsorted ( inp_data [ ( slice ( None ), ) + idx ], v [ idx ], side = op. side ) if ind >= len ( inp_indices ) : ind -= 1 ret_indices [ idx ] = inp_indices [ ( ind, ) + idx ] ret_data [ idx ] = inp_data [ ( ind, ) + idx ] return ret_indices, ret_data",False,ind >= len(inp_data),ind >= len(inp_indices),0.6530814170837402 1608,"def close ( self ) : self. _callbacks. clear ( ) if self. _saved_sighandler is not None : handler = signal. getsignal ( signal. SIGCHLD ) if : logger. warning ( ""SIGCHLD handler was changed by outside code"" ) else : signal. signal ( signal. SIGCHLD, self. _saved_sighandler ) self. _saved_sighandler = None",False,handler != self._sig_chld,handler != None,0.6657562255859375 1609,"def emitSubDomainData ( self, subDomainData, event ) : self. emitRawRirData ( subDomainData, event ) for subDomainElem in subDomainData : if self. checkForStop ( ) : return None subDomain = subDomainElem. get ( ""subdomain"", """" ). strip ( ) if : self. emitHostname ( subDomain, event )",True,subDomain,subDomain,0.6842691898345947 1610,"def create_xxh_env ( self ) : home = p ( self. local_xxh_home ) check_dirs = [ home, home / "".xxh/shells"", home / "".xxh/plugins"" ] for d in check_dirs : if not d. exists ( ) : self. S ( f""mkdir -p {d}"" ) xxh_version_file = home / "".xxh/xxh_version"" if not xxh_version_file. exists ( ) : self. S ( f""echo {__version__} > {xxh_version_file}"" ) config_file = p ( self. config_file ) sample_config_file = self. package_dir_path / ""config.xxhc"" if not config_file. exists ( ) and sample_config_file. exists ( ) : if : eprint ( f""Create sample config file in {config_file}"" ) self. S ( f""mkdir -p {config_file.parent} && cp {sample_config_file} {config_file}"" )",False,not self.quiet,config_file.exists(),0.6715287566184998 1611,"def validate_directory ( d, msg = ""{} {}"" ) : if not os. path. exists ( d ) : try : os. makedirs ( d ) except OSError as e : logger. error ( e ) if : raise PatroniException ( msg. format ( d, ""couldn't create the directory"" ) ) elif os. path. isdir ( d ) : try : fd, tmpfile = tempfile. mkstemp ( dir = d ) os. close ( fd ) os. remove ( tmpfile ) except OSError : raise PatroniException ( msg. format ( d, ""the directory is not writable"" ) ) else : raise PatroniException ( msg. format ( d, ""is not a directory"" ) )",False,e.errno != errno.EEXIST,not os.path.isdir(d),0.6558647155761719 1612,"def _remove_obsolete_leafs ( input_dict ) : if not isinstance ( input_dict, dict ) : return if input_dict [ LEAF_MARKER ] : bottom_leafs = input_dict [ LEAF_MARKER ] for leaf in bottom_leafs : if : input_dict [ LEAF_MARKER ]. remove ( leaf ) for subtree in input_dict. keys ( ) : _remove_obsolete_leafs ( input_dict [ subtree ] )",False,leaf in input_dict,leaf in input_dict[LEAF_MARKER],0.6701984405517578 1613,"def fix_e712 ( self, result ) : """"""Fix (trivial case of) comparison with boolean."""""" ( line_index, offset, target ) = get_index_offset_contents ( result, self. source ) if re. match ( r'^\s*if [\w.""\'\[\]]+ == False:$', target ) : self. source [ line_index ] = re. sub ( r'if ([\w.""\'\[\]]+) == False:', r""if not \1:"", target, count = 1 ) elif re. match ( r'^\s*if [\w.""\'\[\]]+!= True:$', target ) : self. source [ line_index ] = re. sub ( r'if ([\w.""\'\[\]]+)!= True:', r""if not \1:"", target, count = 1 ) else : right_offset = offset + 2 if right_offset >= len ( target ) : return [ ] left = target [ : offset ]. rstrip ( ) center = target [ offset : right_offset ] right = target [ right_offset : ]. lstrip ( ) new_right = None if center. strip ( ) == ""=="" : if re. match ( r""\bTrue\b"", right ) : new_right = re. sub ( r""\bTrue\b *"", """", right, count = 1 ) elif center. strip ( ) == ""!="" : if re. match ( r""\bFalse\b"", right ) : new_right = re. sub ( r""\bFalse\b *"", """",",False,new_right is None,target[count] == True,0.6584903597831726 1614,"def __init__ ( self, ** kwargs ) : dfl = get_model_label ( self. default_model_class ) if ""to"" in kwargs. keys ( ) : old_to = get_model_label ( kwargs. pop ( ""to"" ) ) if : msg = ""%s can only be a ForeignKey to %s; %s passed"" % ( self. __class__. __name__, dfl, old_to, ) warnings. warn ( msg, SyntaxWarning ) kwargs [ ""to"" ] = dfl super ( ). __init__ ( ** kwargs )",False,old_to.lower() != dfl.lower(),old_to and dfl < self.default_model_class,0.6516293287277222 1615,"def _list_outputs ( self ) : outputs = self. _outputs ( ). get ( ) outputs [ ""outlier_files"" ] = [ ] outputs [ ""intensity_files"" ] = [ ] outputs [ ""statistic_files"" ] = [ ] outputs [ ""mask_files"" ] = [ ] if : outputs [ ""norm_files"" ] = [ ] if self. inputs. bound_by_brainmask : outputs [ ""displacement_files"" ] = [ ] if isdefined ( self. inputs. save_plot ) and self. inputs. save_plot : outputs [ ""plot_files"" ] = [ ] for i, f in enumerate ( ensure_list ( self. inputs. realigned_files ) ) : ( outlierfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile, ) = self. _get_output_filenames ( f, os. getcwd ( ) ) outputs [ ""outlier_files"" ]. insert ( i, outlierfile ) outputs [ ""intensity_files"" ]. insert ( i, intensityfile ) outputs [ ""statistic_files"" ]. insert ( i, statsfile ) outputs [ ""mask_files"" ]. insert ( i, maskfile ) if : outputs [ ""norm_files"" ]. insert ( i, normfile ) if self. inputs. bound_by_brainmask : outputs [ ""displacement_files"" ]. insert ( i, displacementfile ) <",False,isdefined(self.inputs.use_norm) and self.inputs.use_norm,self.inputs.save_norm,0.6469634771347046 1616,"def generate_io ( chart_type, race_configs, environment ) : structures = [ ] for race_config in race_configs : if : title = chart_type. format_title ( environment, race_config. track, es_license = race_config. es_license, suffix = ""%s-io"" % race_config. label, ) structures. append ( chart_type. io ( title, environment, race_config ) ) return structures",False,'io' in race_config.charts,race_config.label and race_config.es_license,0.6538827419281006 1617,"def _check_fit_params ( X : TwoDimArrayLikeType, fit_params : Dict, indices : OneDimArrayLikeType ) -> Dict : fit_params_validated = { } for key, value in fit_params. items ( ) : if : fit_params_validated [ key ] = value else : fit_params_validated [ key ] = _make_indexable ( value ) fit_params_validated [ key ] = _safe_indexing ( fit_params_validated [ key ], indices ) return fit_params_validated",False,not _is_arraylike(value) or _num_samples(value) != _num_samples(X),key == 'TAB > ,0.6508034467697144 1618,"def acquire ( self, timeout = None, check_interval = None, fail_when_locked = None ) : if self. _lock : if : raise exceptions. LockException ( ) if self. _pid!= os. getpid ( ) : self. _pid = os. getpid ( ) self. _acquire_count = 0 if self. fh : try : portalocker. unlock ( self. fh ) self. fh. close ( ) except Exception : pass self. fh = None if self. _acquire_count >= 1 : fh = self. fh else : fh = super ( RLock, self ). acquire ( timeout, check_interval, fail_when_locked ) self. _acquire_count += 1 return fh",False,"not self._lock.acquire(block=timeout != 0, timeout=timeout)",self._acquire_count >= 2,0.6550785303115845 1619,"def _GetMSBuildConfigurationDetails ( spec, build_file ) : properties = { } for name, settings in spec [ ""configurations"" ]. iteritems ( ) : msbuild_attributes = _GetMSBuildAttributes ( spec, settings, build_file ) condition = _GetConfigurationCondition ( name, settings ) character_set = msbuild_attributes. get ( ""CharacterSet"" ) _AddConditionalProperty ( properties, condition, ""ConfigurationType"", msbuild_attributes [ ""ConfigurationType"" ], ) if : if ""msvs_enable_winrt"" not in spec : _AddConditionalProperty ( properties, condition, ""CharacterSet"", character_set ) return _GetMSBuildPropertyGroup ( spec, ""Configuration"", properties )",False,character_set,'msvs_enable_stdrt' in spec,0.6597793102264404 1620,"def test_build_root_config_overwrite ( self ) : cfg = build_root_config ( ""tests.files.settings_overwrite"" ) for key, val in DEFAULT_SPIDER_GLOBAL_CONFIG. items ( ) : if : self. assertEqual ( cfg [ ""global"" ] [ key ], [ ""zzz"" ] ) else : self. assertEqual ( cfg [ ""global"" ] [ key ], val )",False,key == 'spider_modules',key in cfg,0.6519770622253418 1621,"def sha_update ( sha_info, buffer ) : if isinstance ( buffer, str ) : raise TypeError ( ""Unicode strings must be encoded before hashing"" ) count = len ( buffer ) buffer_idx = 0 clo = ( sha_info [ ""count_lo"" ] + ( count << 3 ) ) & 0xFFFFFFFF if clo < sha_info [ ""count_lo"" ] : sha_info [ ""count_hi"" ] += 1 sha_info [ ""count_lo"" ] = clo sha_info [ ""count_hi"" ] += count >> 29 if sha_info [ ""local"" ] : i = SHA_BLOCKSIZE - sha_info [ ""local"" ] if i > count : i = count for x in enumerate ( buffer [ buffer_idx : buffer_idx + i ] ) : sha_info [ ""data"" ] [ sha_info [ ""local"" ] + x [ 0 ] ] = x [ 1 ] count -= i buffer_idx += i sha_info [ ""local"" ] += i if : sha_transform ( sha_info ) sha_info [ ""local"" ] = 0 else : return while count >= SHA_BLOCKSIZE : sha_info [ ""data"" ] = list ( buffer [ buffer_idx : buffer_idx + SHA_BLOCKSIZE ] ) count -= SHA_BLOCKSIZE buffer_idx += SHA_BLOCKSIZE sha_transform ( sha_info ) pos = sha_info [ ""local"" ] sha_info [ ""data"" ] [ pos : pos + count ] = list ( buffer [ buffer_idx : buffer",False,sha_info['local'] == SHA_BLOCKSIZE,sha_info['local'] == 0,0.6547240018844604 1622,"def _check_load_bbox ( self, coco, entry ) : """"""Check and load ground-truth labels"""""" entry_id = entry [ ""id"" ] entry_id = [ entry_id ] if not isinstance ( entry_id, ( list, tuple ) ) else entry_id ann_ids = coco. getAnnIds ( imgIds = entry_id, iscrowd = None ) objs = coco. loadAnns ( ann_ids ) valid_objs = [ ] width = entry [ ""width"" ] height = entry [ ""height"" ] for obj in objs : if obj [ ""area"" ] < self. _min_object_area : continue if obj. get ( ""ignore"", 0 ) == 1 : continue if not self. _use_crowd and obj. get ( ""iscrowd"", 0 ) : continue xmin, ymin, xmax, ymax = bbox_clip_xyxy ( bbox_xywh_to_xyxy ( obj [ ""bbox"" ] ), width, height ) if obj [ ""area"" ] > 0 and xmax > xmin and ymax > ymin : contiguous_cid = self. json_id_to_contiguous [ obj [ ""category_id"" ] ] valid_objs. append ( [ xmin, ymin, xmax, ymax, contiguous_cid ] ) if not valid_objs : if : valid_objs. append ( [ - 1, - 1, - 1, - 1, - 1 ] ) return valid_objs",False,not self._skip_empty,self.norm_width > 0.0 or self.norm_height > 0.0,0.6570522785186768 1623,"def check_build_dir ( build_dir ) : profiles_per_benchmark = defaultdict ( list ) for path in glob. glob ( os. path. join ( build_dir, ""ssg-*-ds.xml"" ) ) : gather_profiles_from_datastream ( path, build_dir, profiles_per_benchmark ) for bench_short_id in STABLE_PROFILE_IDS. keys ( ) : if respective_datastream_absent ( bench_short_id, build_dir ) : continue if bench_short_id not in profiles_per_benchmark : raise RuntimeError ( ""Expected benchmark ID '%s' has to be "" ""prefixed with '%s'."" % ( bench_short_id, BENCHMARK_ID_PREFIX ) ) for profile_id in STABLE_PROFILE_IDS [ bench_short_id ] : if : raise RuntimeError ( ""Profile '%s' is required to be in the "" ""'%s' benchmark. It is a stable profile "" ""that can't be renamed or removed!"" % ( profile_id, bench_short_id ) )",False,profile_id not in profiles_per_benchmark[bench_short_id],profile_id not in profiles_per_benchmark,0.6485183238983154 1624,"def candidates ( ) -> Generator [ ""Symbol"", None, None ] : s = self if : Symbol. debug_print ( ""searching in self:"" ) print ( s. to_string ( Symbol. debug_indent + 1 ), end = """" ) while True : if matchSelf : yield s if recurseInAnon : yield from s. children_recurse_anon else : yield from s. _children if s. siblingAbove is None : break s = s. siblingAbove if : Symbol. debug_print ( ""searching in sibling:"" ) print ( s. to_string ( Symbol. debug_indent + 1 ), end = """" )",False,Symbol.debug_lookup,s.siblingAbove is None,0.6518721580505371 1625,"def tagSelected ( tag ) : tag = tag. lower ( ). strip ( ) if tag == ""bold"" : app. textAreaToggleFontSelected ( ""ta"", ""BOLD"" ) if tag == ""underline"" : app. textAreaToggleFontSelected ( ""ta"", ""UNDERLINE"" ) if tag == ""italic"" : app. textAreaToggleFontSelected ( ""ta"", ""ITALIC"" ) if tag == ""boldItalic"" : app. textAreaToggleFontSelected ( ""ta"", ""BOLD_ITALIC"" ) else : op = app. radio ( ""operation"" ) if : app. textAreaToggleTagSelected ( ""ta"", tag ) elif op == ""remove"" : app. textAreaUntagSelected ( ""ta"", tag ) elif op == ""delete"" : app. textAreaDeleteTag ( ""ta"", tag )",False,op == 'add',"op == 'open""",0.6601884961128235 1626,"def _get_field_actual ( cant_be_number, raw_string, field_names ) : for line in raw_string. splitlines ( ) : for field_name in field_names : field_name = field_name. lower ( ) if "":"" in line : left, right = line. split ( "":"", 1 ) left = left. strip ( ). lower ( ) right = right. strip ( ) if : if cant_be_number : if not right. isdigit ( ) : return right else : return right return None",False,left == field_name and len(right) > 0,left.count(),0.6519061326980591 1627,"def _format_unencoded_with_lineno ( self, tokensource, outfile ) : self. _write_lineno ( outfile ) for ttype, value in tokensource : if value. endswith ( ""\n"" ) : self. _write_lineno ( outfile ) value = value [ : - 1 ] color = self. colorscheme. get ( ttype ) while color is None : ttype = ttype [ : - 1 ] color = self. colorscheme. get ( ttype ) if color : color = color [ self. darkbg ] spl = value. split ( ""\n"" ) for line in spl [ : - 1 ] : self. _write_lineno ( outfile ) if : outfile. write ( ircformat ( color, line [ : - 1 ] ) ) if spl [ - 1 ] : outfile. write ( ircformat ( color, spl [ - 1 ] ) ) else : outfile. write ( value ) outfile. write ( ""\n"" )",False,line,spl,0.6757024526596069 1628,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRUCT : self. operationId = THandleIdentifier ( ) self. operationId. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. I32 : self. operationType = iprot. readI32 ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. BOOL : self. hasResultSet = iprot. readBool ( ) else : iprot. skip ( ftype ) elif fid == 4 : if ftype == TType. DOUBLE : self. modifiedRowCount = iprot. readDouble ( ) else : ",True,fid == 3,fid == 3,0.6749258041381836 1629,"def _check_redirects ( self, result ) : if result. history and self. _base_url. startswith ( ""http:"" ) : for item in result. history : if item. status_code not in ( 301, 302 ) : continue if item. request. method == ""GET"" : continue location = item. headers. get ( ""Location"", None ) if : raise RedirectError ( REDIRECT_MSG )",False,location and location.startswith('https://'),location and location.startswith(self._base_url),0.6451317667961121 1630,"def _check_seed ( self, seed ) : if seed is not None : if : self. _raise_error ( ""The random number generator seed value, seed, should be integer type or None."" ) if seed < 0 : self. _raise_error ( ""The random number generator seed value, seed, should be non-negative integer or None."" )",False,type(seed) != int,"seed < 0 and isinstance(seed, int) or seed > 65535",0.6684019565582275 1631,"def __add__ ( self, f ) : if isinstance ( f. value, dict ) : for field in f. fields : self. slice [ field ] = f. value if : self. fields = f. fields el if : self. fields = f. fields self. value = f. value self. slice = { } elif self. value is self. ONLY and f. value is self. ONLY : self. _clean_slice ( ) if self. _only_called : self. fields = self. fields. union ( f. fields ) else : self. fields = f. fields elif self. value is self. EXCLUDE and f. value is self. EXCLUDE : self. fields = self. fields. union ( f. fields ) self. _clean_slice ( ) elif self. value is self. ONLY and f. value is self. EXCLUDE : self. fields -= f. fields self. _clean_slice ( ) elif self. value is self. EXCLUDE and f. value is self. ONLY : self. value = self. ONLY self. fields = f. fields - self. fields self. _clean_slice ( ) if ""_id"" in f. fields : self. _id = f. value if self. always_include : if self. value is self. ONLY and self. fields : if sorted ( self. slice. keys ( ) )!= sorted ( self. fields ) : self. fields = self. fields. union ( self. always_include ) else : self. DEBUG_SENDBACK = False self. DEBUG_SHOW_GUI = True self. previous_grey_img = None self. cv2_major = None self. cv2_minor = None self. MOTION_DETECTION_THRESHOLD = 1000 self. NO_MOTION_DURATION = 5 self. no_motion_start_time = None self. no_motion_image = None for value in cv2. __version__. split ( ""."" ) : if self. cv2_major is None : self. cv2_major = value elif : self. cv2_minor = value break print ( ""self.cv2_major={}, self.cv2_minor={}"". format ( self. cv2_major, self. cv2_minor ) )",True,self.cv2_minor is None,self.cv2_minor is None,0.6618829965591431 1633,"def _lint ( self ) : lints = { } lint_errors = set ( ) for name, test in self. _config. config. tests. items ( ) : lints [ name ] = { ""regions"" : self. _filter_unsupported_regions ( test. regions ) } lints [ name ] [ ""template"" ] = self. _templates [ name ]. template_path lints [ name ] [ ""results"" ] = { } templates = [ ] for template in self. _templates. values ( ) : templates. append ( template ) templates += list ( template. descendents ) templates = set ( templates ) for template in templates : self. _run_checks ( template, name, lint_errors, lints ) for err in lint_errors : LOG. error ( err ) for test in lints : for result in lints [ test ] [ ""results"" ] : if : if self. _is_error ( lints [ test ] [ ""results"" ] [ result ] ) : lint_errors. add ( result ) return lints, lint_errors",False,lints[test]['results'][result],self._is_error(result),0.6587398052215576 1634,"def _unsubscribe ( self, subscription, session ) : was_subscribed, was_last_subscriber = self. _subscription_map. drop_observer ( session, subscription ) if was_subscribed : self. _session_to_subscriptions [ session ]. discard ( subscription ) if self. _router. _realm : service_session = self. _router. _realm. session if service_session and not subscription. uri. startswith ( u""wamp."" ) : if was_subscribed : service_session. publish ( u""wamp.subscription.on_unsubscribe"", session. _session_id, subscription. id, ) if : service_session. publish ( u""wamp.subscription.on_delete"", session. _session_id, subscription. id ) return was_subscribed, was_last_subscriber",True,was_last_subscriber,was_last_subscriber,0.6567608118057251 1635,"def _create_examples ( cls, lines, set_type ) : examples = [ ] for ( i, line ) in enumerate ( lines ) : if ""gold_label"" in line : if : continue examples. append ( Example ( guid = ""%s-%s"" % ( set_type, i ), input_premise = line [ ""sentence1"" ], input_hypothesis = line [ ""sentence2"" ], label = line [ ""gold_label"" ] if set_type!= ""test"" else cls. LABELS [ - 1 ], ) ) else : if line [ ""label"" ] == - 1 : continue examples. append ( Example ( guid = ""%s-%s"" % ( set_type, i ), input_premise = line [ ""premise"" ], input_hypothesis = line [ ""hypothesis"" ], label = line [ ""label"" ] if set_type!= ""test"" else cls. LABELS [ - 1 ], ) ) return examples",False,line['gold_label'] == '-',line['label'] == None,0.6572644710540771 1636,"def _parse_args ( cls, args ) : parts = [ ] for a in args : if isinstance ( a, PurePath ) : parts += a. _parts else : if : a = os. fspath ( a ) else : if hasattr ( a, ""__fspath__"" ) : a = a. __fspath__ ( ) if isinstance ( a, str ) : parts. append ( str ( a ) ) elif six. PY2 and isinstance ( a, six. text_type ) : parts. append ( a. encode ( sys. getfilesystemencoding ( ) ) ) else : raise TypeError ( ""argument should be a str object or an os.PathLike "" ""object returning str, not %r"" % type ( a ) ) return cls. _flavour. parse_parts ( parts )",False,"sys.version_info >= (3, 6)","hasattr(a, '__fspath__')",0.652714729309082 1637,"def enrichTarget ( self, target ) : ret = list ( ) self. sf. info ( ""Identifying aliases for specified target(s)"" ) ret = self. sf. resolveTargets ( target, self. opts [ ""validatereverse"" ] ) if not ret : return target for host in ret : self. sf. debug ( ""Found an alias: "" + host ) if self. sf. validIP ( host ) : target. setAlias ( host, ""IP_ADDRESS"" ) elif self. sf. validIP6 ( host ) : target. setAlias ( host, ""IPV6_ADDRESS"" ) else : target. setAlias ( host, ""INTERNET_NAME"" ) idnahost = host. encode ( ""idna"" ) if : target. setAlias ( idnahost. decode ( ""ascii"", errors = ""replace"" ), ""INTERNET_NAME"" ) self. sf. info ( ""Aliases identified: "" + str ( target. targetAliases ) ) return target",False,idnahost != host,idnahost,0.6686146855354309 1638,"def get_states ( idxs, labels, cls_num, weights = None ) : ins_num = idxs. shape [ 0 ] states = np. zeros ( ( cls_num, 4 ) ). astype ( ""float32"" ) for i in range ( ins_num ) : w = weights [ i ] if weights is not None else 1.0 idx = idxs [ i ] [ 0 ] label = labels [ i ] [ 0 ] if : states [ idx ] [ 0 ] += w for j in range ( cls_num ) : states [ j ] [ 2 ] += w states [ idx ] [ 2 ] -= w else : states [ label ] [ 3 ] += w states [ idx ] [ 1 ] += w for j in range ( cls_num ) : states [ j ] [ 2 ] += w states [ label ] [ 2 ] -= w states [ idx ] [ 2 ] -= w return states",False,idx == label,label == None,0.6789555549621582 1639,"def write_index ( docs ) : for chunk in web. group ( docs, 1000 ) : chunk = list ( chunk ) thing_ids = [ doc [ ""id"" ] for doc in chunk ] t = db. transaction ( ) db. query ( ""DELETE FROM work_ref WHERE thing_id IN $thing_ids"", vars = locals ( ) ) data = [ ] for doc in chunk : thing_id = doc [ ""id"" ] type = doc [ ""type"" ] [ ""key"" ] if : for name, value in doc [ ""_refs"" ] : key_id = get_property_id ( type, name ) data. append ( dict ( thing_id = thing_id, key_id = key_id, value = value ) ) if data : db. multiple_insert ( ""work_ref"", data, seqname = False ) t. commit ( )",False,type == '/type/work',doc[_refs],0.6525618433952332 1640,"def question_vote ( request, question_id ) : """"""I have this problem too."""""" question = get_object_or_404 ( Question, pk = question_id, is_spam = False ) if not question. editable : raise PermissionDenied if not question. has_voted ( request ) : vote = QuestionVote ( question = question ) if : vote. creator = request. user else : vote. anonymous_id = request. anonymous. anonymous_id if not request. limited : vote. save ( ) if ""referrer"" in request. REQUEST : referrer = request. REQUEST. get ( ""referrer"" ) vote. add_metadata ( ""referrer"", referrer ) if referrer == ""search"" and ""query"" in request. REQUEST : vote. add_metadata ( ""query"", request. REQUEST. get ( ""query"" ) ) ua = request. META. get ( ""HTTP_USER_AGENT"" ) if ua : vote. add_metadata ( ""ua"", ua ) statsd. incr ( ""questions.votes.question"" ) if request. is_ajax ( ) : tmpl = ""questions/includes/question_vote_thanks.html"" form = _init_watch_form ( request ) html = render_to_string ( tmpl, { ",False,request.user.is_authenticated(),is_spam,0.6517141461372375 1641,"def __getitem__ ( self, k ) -> ""SimMemView"" : if isinstance ( k, slice ) : if : raise ValueError ( ""Slices with strides are not supported"" ) elif k. start is None : raise ValueError ( ""Must specify start index"" ) elif k. stop is not None : raise ValueError ( ""Slices with stop index are not supported"" ) else : addr = k. start elif self. _type is not None and self. _type. _can_refine_int : return self. _type. _refine ( self, k ) else : addr = k return self. _deeper ( addr = addr )",False,k.step is not None,k.stop is not None,0.6621825695037842 1642,"def create_and_return_ids ( self, tags = None ) : tags_list = [ ] if type ( tags ) is dict : for group, tag in tags. items ( ) : group_id = tag_groups_model. get_or_create_by_name ( group ) _id = self. get_or_create ( name = tag, group_id = group_id ) tags_list. append ( _id ) if type ( tags ) is list : for t in tags : try : group, tag = t. split ( "":"" ) except ValueError : tag = t group = False if : group_id = tag_groups_model. get_or_create_by_name ( group ) _id = self. get_or_create ( name = tag, group_id = group_id ) else : _id = self. get_or_create_by_name ( name = tag ) tags_list. append ( _id ) return tags_list",False,group,group is not None,0.6836246252059937 1643,"def general ( metadata, value ) : if metadata. get ( ""commands"" ) and value : if : v = quote ( value ) else : v = value return u""{0} {1}"". format ( metadata [ ""commands"" ] [ 0 ], v ) else : if not value : return None el if : return quote ( value ) else : return value",False,not metadata.get('nargs'),metadata.get('el') and (not metadata.get('el'),0.6477402448654175 1644,"def _pick ( self, cum ) : if self. _isleaf ( ) : return self. bd [ 0 ], self. s else : if : return self. left. _pick ( cum ) else : return self. right. _pick ( cum - self. left. s )",False,cum < self.left.s,self.abs(cum),0.667629599571228 1645,"def _key_response_head ( self, bucket_name, query, key_name, headers ) : response_headers = { } version_id = query. get ( ""versionId"", [ None ] ) [ 0 ] part_number = query. get ( ""partNumber"", [ None ] ) [ 0 ] if part_number : part_number = int ( part_number ) if_modified_since = headers. get ( ""If-Modified-Since"", None ) if_match = headers. get ( ""If-Match"", None ) if_none_match = headers. get ( ""If-None-Match"", None ) if_unmodified_since = headers. get ( ""If-Unmodified-Since"", None ) key = self. backend. get_object ( bucket_name, key_name, version_id = version_id, part_number = part_number ) if key : response_headers. update ( key. metadata ) response_headers. update ( key. response_dict ) if : if_unmodified_since = str_to_rfc_1123_datetime ( if_unmodified_since ) if key. last_modified > if_unmodified_since : return 412, response_headers, """" if if_match and key. etag!= if_match : return 412, response_headers, """" if if_modified_since : if_modified_since = str_to_rfc_1123_datetime ( if_modified_since ) if key. last_modified < if_modified_since : return 304, response_headers, ""Not Modified"" if if_none_match and key. etag == if_none",True,if_unmodified_since,if_unmodified_since,0.6518108248710632 1646,"def draw_markers ( self, gc, marker_path, marker_trans, path, trans, rgbFace = None ) : write = self. _svgwriter. write key = self. _convert_path ( marker_path, marker_trans + Affine2D ( ). scale ( 1.0, - 1.0 ) ) name = self. _markers. get ( key ) if name is None : name = ""m%s"" % md5 ( key ). hexdigest ( ) write ( '\n' % ( name, key ) ) self. _markers [ key ] = name clipid = self. _get_gc_clip_svg ( gc ) if clipid is None : clippath = """" else : clippath = 'clip-path=""url(#%s)""' % clipid write ( """" % clippath ) trans_and_flip = self. _make_flip_transform ( trans ) tpath = trans_and_flip. transform_path ( path ) for vertices, code in tpath. iter_segments ( ) : if : x, y = vertices [ - 2 : ] details = 'xlink:href=""#%s"" x=""%f"" y=""%f""' % ( name, x, y ) style = self. _get_style ( gc, rgbFace ) self. _svgwriter. write ( '\n' % ( style, details ) ) write ( """" )",False,len(vertices),code.startswith('<') and vertices[-2:] == 'true',0.6615938544273376 1647,"def scan ( self ) : """"""Scan source and grab tokens."""""" self. pre_scan ( ) token = None end = len ( self. source ) while self. pos < end : best_pat = None best_pat_len = 0 for p, regexp in self. patterns : m = regexp. match ( self. source, self. pos ) if : best_pat = p best_pat_len = len ( m. group ( 0 ) ) break if best_pat is None : raise SyntaxError ( ""SyntaxError[@char {0}: {1}]"". format ( self. pos, ""Bad token."" ) ) if best_pat in self. ignore : self. pos += best_pat_len continue token = ( best_pat, self. source [ self. pos : self. pos + best_pat_len ], self. pos, self. pos + best_pat_len, ) self. pos = token [ - 1 ] self. tokens. append ( token )",False,m,m is not None,0.7003533840179443 1648,"def get_version ( self ) : fh = self. _data_file ( ADJ ) for line in fh : match = re. search ( r""WordNet (\d+\.\d+) Copyright"", line ) if : version = match. group ( 1 ) fh. seek ( 0 ) return version",False,match is not None,match,0.6580902934074402 1649,"def get_recipe_env ( self, arch = None, with_flags_in_cc = True ) : env = super ( PythonRecipe, self ). get_recipe_env ( arch, with_flags_in_cc ) env [ ""PYTHONNOUSERSITE"" ] = ""1"" env [ ""LANG"" ] = ""en_GB.UTF-8"" if not self. call_hostpython_via_targetpython : python_name = self. ctx. python_recipe. name env [ ""CFLAGS"" ] += "" -I{}"". format ( self. ctx. python_recipe. include_root ( arch. arch ) ) env [ ""LDFLAGS"" ] += "" -L{} -lpython{}"". format ( self. ctx. python_recipe. link_root ( arch. arch ), self. ctx. python_recipe. major_minor_version_string, ) if : env [ ""LDFLAGS"" ] += ""m"" hppath = [ ] hppath. append ( join ( dirname ( self. hostpython_location ), ""Lib"" ) ) hppath. append ( join ( hppath [ 0 ], ""site-packages"" ) ) builddir = join ( dirname ( self. hostpython_location ), ""build"" ) if exists ( builddir ) : hppath += [ join ( builddir, d ) for d in listdir ( builddir ) if isdir ( join ( builddir, d ) ) ] if len ( hppath ) > 0 : if ""PYTHONPATH"" in env : <",False,python_name == 'python3',"hasattr(self, 'hostpython_location')",0.6552754044532776 1650,"def on_accounts ( accounts, owner ) : log. debug ( ""Got Accounts"" ) selected_iter = None for account in accounts : acc_iter = self. accounts. append ( ) self. accounts. set_value ( acc_iter, 0, account [ ""username"" ] ) if : selected_iter = acc_iter self. builder. get_object ( ""OwnerCombobox"" ). set_active_iter ( selected_iter )",False,account['username'] == owner,owner,0.661219596862793 1651,"def _get_columns_text ( self, context : Context, candidate : Candidate ) -> typing. Tuple [ str, Highlights ] : texts : typing. List [ str ] = [ ] variable_texts : typing. List [ str ] = [ ] ret_highlights : typing. List [ typing. Tuple [ str, int, int ] ] = [ ] start = 0 for column in self. _columns : if self. _ns > 0 : column. start = start if column. is_stop_variable : if variable_texts : variable_texts. append ( """" ) ( text, highlights ) = column. get_with_variable_text ( context, "" "". join ( variable_texts ), candidate ) texts. append ( text ) ret_highlights += highlights variable_texts = [ ] else : if column. has_get_with_highlights : ( text, highlights ) = column. get_with_highlights ( context, candidate ) ret_highlights += highlights else : text = column. get ( context, candidate ) if column. is_start_variable or column. is_within_variable : if : variable_texts. append ( text ) else : texts. append ( text ) module. success ( ""waiting for a connection from the DLL..."" ) for x in xrange ( timeout ) : c = has_proc_migrated ( module. client, pid ) if : module. success ( ""got a connection from migrated DLL!"" ) c. pupsrv. move_id ( c, module. client ) time. sleep ( 0.5 ) try : module. success ( ""exiting old connection"" ) module. client. conn. exit ( ) module. success ( ""exited old connection"" ) except Exception : pass break time. sleep ( 1 )",True,c,c,0.6910111308097839 1653,"def get_first_film ( soup, section, year = None, session = None ) : tag_part = SectionsParts [ section ] tag = None headers = soup. find ( ""div"", ""search-result"" ). find_all ( ""h2"" ) for header in headers : if tag_part in header. text : tag = header break if not tag : return url = None url = SITE_DOMAIN + tag. findNext ( ""ul"" ). find ( ""li"" ). div. a. get ( ""href"" ) for t in tag. findNext ( ""ul"" ). findAll ( ""li"" ) : if isinstance ( t, NavigableString ) or not t. div : continue if : url = SITE_DOMAIN + t. div. a. get ( ""href"" ) break return Film. from_url ( url, session = session )",False,str(year) in t.div.a.string,url is None,0.6508709192276001 1654,"def update_topic_attr_as_not ( modeladmin, request, queryset, attr ) : for topic in queryset : if : topic. sticky = not topic. sticky elif attr == ""closed"" : topic. closed = not topic. closed elif attr == ""hidden"" : topic. hidden = not topic. hidden topic. save ( )",False,attr == 'sticky',attr == 'stst',0.6646889448165894 1655,"def visit_Constant ( self, node ) : value = node. value type_name = _const_node_type_names. get ( type ( value ) ) if type_name is None : for cls, name in _const_node_type_names. items ( ) : if : type_name = name break if type_name is not None : method = ""visit_"" + type_name try : visitor = getattr ( self, method ) except AttributeError : pass else : import warnings warnings. warn ( f""{method} is deprecated; add visit_Constant"", PendingDeprecationWarning, 2, ) return visitor ( node ) return self. generic_visit ( node )",False,"isinstance(value, cls)",cls == node.type,0.6539881825447083 1656,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 8 : self. set_mime_type ( d. getVarInt32 ( ) ) continue if : self. set_quality ( d. getVarInt32 ( ) ) continue if tt == 0 : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 16,tt == 8,0.6876143217086792 1657,"def search_deep_keys ( search_text, arm_dict, path ) : """"""Search deep for keys and get their values"""""" keys = [ ] if isinstance ( arm_dict, dict ) : for key in arm_dict : pathprop = path [ : ] pathprop. append ( key ) if : pathprop. append ( arm_dict [ key ] ) keys. append ( pathprop ) pathprop = pathprop [ : - 1 ] if isinstance ( arm_dict [ key ], dict ) : keys. extend ( ContextParser. search_deep_keys ( search_text, arm_dict [ key ], pathprop ) ) elif isinstance ( arm_dict [ key ], list ) : for index, item in enumerate ( arm_dict [ key ] ) : pathproparr = pathprop [ : ] pathproparr. append ( index ) keys. extend ( ContextParser. search_deep_keys ( search_text, item, pathproparr ) ) elif isinstance ( arm_dict, list ) : for index, item in enumerate ( arm_dict ) : pathprop = path [ : ] ",False,key == search_text,"isinstance(arm_dict[key], dict)",0.6612805128097534 1658,"def expr ( self, arg_type ) : pass self. prec0_expr ( arg_type ) while True : if self. LA ( 1 ) >= EQ and self. LA ( 1 ) <= LE : pass la1 = self. LA ( 1 ) if False : pass elif la1 and la1 in [ EQ ] : pass self. match ( EQ ) op = struct. pack ( ""B"", ptgEQ ) elif la1 and la1 in [ NE ] : pass self. match ( NE ) op = struct. pack ( ""B"", ptgNE ) elif la1 and la1 in [ GT ] : pass self. match ( GT ) op = struct. pack ( ""B"", ptgGT ) elif la1 and la1 in [ LT ] : pass self. match ( LT ) op = struct. pack ( ""B"", ptgLT ) elif : pass self. match ( GE ) op = struct. pack ( ""B"", ptgGE ) elif la1 and la1 in [ LE ] : <",False,la1 and la1 in [GE],ge in [GE],0.6673319339752197 1659,"def _load_config ( ) : from thirdparty. configobj import ConfigObj from thirdparty. validate import Validator rcfileloc = osp. join ( osp. expanduser ( ""~/.cgtrc"" ) ) specfilename = osp. join ( get_cgt_src_root ( ), ""cgtrc_spec.ini"" ) config = ConfigObj ( rcfileloc, configspec = specfilename ) val = Validator ( ) test = config. validate ( val, preserve_errors = True ) if test is not True : for ( k, v ) in test. items ( ) : if v is not True : utils. error ( ""%s: %s in %s"" % ( k, v. message, rcfileloc ) ) raise ValueError envflags = os. getenv ( ""CGT_FLAGS"" ) if envflags : pairs = envflags. split ( "","" ) for pair in pairs : lhs, rhs = pair. split ( ""="" ) assert lhs in config, ""Unrecognized config option %s provided"" % lhs oldrhs = config [ lhs ] config [ lhs ] = rhs assert isinstance ( rhs, ( str, bool, int, float, list ) ), ""You set %s=%s but rhs is invalid"" % ( lhs, rhs ) if isinstance ( oldrhs, str ) : pass elif isinstance ( oldrhs, bool ) : config [ lhs ] = config. as_bool ( lhs ) elif isinstance ( oldrhs, int ) : args : CommandLineArguments, loopdev : str ) -> List [ str ] : params = [ f""--bind-ro={loopdev}"", f""--bind-ro=/dev/block"", f""--bind-ro=/dev/disk"", f""--property=DeviceAllow={loopdev}"", ] for partno in ( args. esp_partno, args. bios_partno, args. root_partno, args. xbootldr_partno, ) : if partno is not None : p = partition ( loopdev, partno ) if : params += [ f""--bind-ro={p}"", f""--property=DeviceAllow={p}"" ] return params",False,os.path.exists(p),p is not None,0.6503428220748901 1661,"def handle ( self, filename, fields = """", delimiter = b"","", ** options ) : fields = [ field. strip ( ) for field in fields. split ( "","" ) ] with open ( filename, ""rb"" ) as f : for row in UnicodeReader ( f, delimiter = delimiter ) : data = dict ( zip ( fields, row ) ) try : dev_id = int ( data [ ""id"" ] ) except ( ValueError, KeyError ) : continue if not dev_id : continue try : dev = Device. objects. get ( id = dev_id ) except Device. DoesNotExist : sys. stderr. write ( ""Device with id=%r doesn't exist!\n"" % dev_id ) continue for field, value in data. iteritems ( ) : if field in ( ""id"", """" ) : continue if : value = None if value is None and field in ( ""remarks"", ) : value = """" print ( ""%r.%s = %r"" % ( dev, field, value ) ) setattr ( dev, field, value ) dev. save ( priority = 50 )",False,"value in ('None', '')","field in (undefined, False)",0.6525683403015137 1662,"def serve ( q : Q ) : if q. client. plot_added : example = q. page [ ""example"" ] if : example. title = ( ""Plot (Log Scale)"", ) example. specification = spec_log_scale example. commands = [ linear_scale_command ] else : example. title = ( ""Plot (Linear Scale)"", ) example. specification = spec_linear_scale example. commands = [ log_scale_command ] else : q. page [ ""example"" ] = ui. vega_card ( box = ""1 1 2 4"", title = ""Plot (Linear Scale)"", specification = spec_linear_scale, data = plot_data, commands = [ log_scale_command ], ) q. client. plot_added = True await q. page. save ( )",False,q.args.to_log_scale,q.client.plot_added,0.6536799073219299 1663,"def get_files ( d ) : res = [ ] for p in glob. glob ( os. path. join ( d, ""*"" ) ) : if not p : continue ( pth, fname ) = os. path. split ( p ) if fname == ""output"" : continue if : continue if fname [ - 4 : ] == "".pyc"" : continue if os. path. isdir ( p ) : get_dir ( p ) else : res. append ( p ) return res",False,fname == 'PureMVC_Python_1_0',fname[-4:] == '.pyc',0.6494544148445129 1664,"def get ( self ) : if ""twitter"" in self. get_user_social ( self. current_user. id ) : enabled = self. get_argument ( ""enabled"", ""a"" ) if : self. redirect ( ""/account/setting"" ) return q = self. db. query ( Social ). filter_by ( service = ""twitter"" ) t = q. filter_by ( user_id = self. current_user. id ). first ( ) t. enabled = enabled self. db. add ( t ) self. db. commit ( ) self. cache. delete ( ""social:%s"" % self. current_user. id ) self. redirect ( ""/account/setting"" ) return if self. get_argument ( ""oauth_token"", None ) : self. get_authenticated_user ( self. _on_auth ) return self. authorize_redirect ( )",False,"enabled not in ('y', 'n')",enabled,0.6513879895210266 1665,"def _get_nlu_target_format ( export_path : Text ) -> Text : guessed_format = loading. guess_format ( export_path ) if guessed_format not in { MARKDOWN, RASA, RASA_YAML } : if rasa. shared. data. is_likely_json_file ( export_path ) : guessed_format = RASA elif : guessed_format = MARKDOWN elif rasa. shared. data. is_likely_yaml_file ( export_path ) : guessed_format = RASA_YAML return guessed_format",True,rasa.shared.data.is_likely_markdown_file(export_path),rasa.shared.data.is_likely_markdown_file(export_path),0.6502915024757385 1666,"def _cmd_add ( self, ctx, command ) : table_name = command. args [ 0 ] record_id = command. args [ 1 ] column = command. args [ 2 ] column_key_value_strings = [ ] for value in command. args [ 3 : ] : if : column_key_value_strings. append ( ""%s:%s"" % ( column, value ) ) else : column_key_value_strings. append ( ""%s=%s"" % ( column, value ) ) table_schema = self. schema. tables [ table_name ] column_values = [ ctx. parse_column_key_value ( table_schema, column_key_value_string ) for column_key_value_string in column_key_value_strings ] self. _add ( ctx, table_name, record_id, column_values )",False,'=' in value,"isinstance(value, str)",0.6678308248519897 1667,"def callback ( actions, form, tablename = None ) : if actions : if : actions = actions. get ( tablename, [ ] ) if not isinstance ( actions, ( list, tuple ) ) : actions = [ actions ] [ action ( form ) for action in actions ]",False,"tablename and isinstance(actions, dict)",tabname,0.6579087972640991 1668,"def processMovie ( self, atom ) : for field in atom : if : self. processTrack ( field [ ""track"" ] ) if ""movie_hdr"" in field : self. processMovieHeader ( field [ ""movie_hdr"" ] )",True,'track' in field,'track' in field,0.6639220118522644 1669,"def replaceRackPosition ( self, rackPosition, mod ) : listPositions = [ ] for currPos in range ( len ( self ) ) : currMod = self [ currPos ] if currMod. slot == mod. slot : listPositions. append ( currPos ) listPositions. sort ( ) try : modListPosition = listPositions [ rackPosition ] except IndexError : self. appendIgnoreEmpty ( mod ) else : oldMod = self [ modListPosition ] if mod. isEmpty : self. __toDummy ( modListPosition ) else : self. __toModule ( modListPosition, mod ) if mod. isInvalid : if : self. __toDummy ( modListPosition ) else : self. __toModule ( modListPosition, oldMod )",True,oldMod.isEmpty,oldMod.isEmpty,0.6644895076751709 1670,"def _check_load_coco_bbox ( coco, entry, min_object_area = 0, use_crowd = False ) : """"""Check and load ground-truth labels"""""" entry_id = entry [ ""id"" ] entry_id = [ entry_id ] if not isinstance ( entry_id, ( list, tuple ) ) else entry_id ann_ids = coco. getAnnIds ( imgIds = entry_id, iscrowd = None ) objs = coco. loadAnns ( ann_ids ) valid_objs = [ ] width = entry [ ""width"" ] height = entry [ ""height"" ] for obj in objs : if obj [ ""area"" ] < min_object_area : continue if obj. get ( ""ignore"", 0 ) == 1 : continue is_crowd = obj. get ( ""iscrowd"", 0 ) if not use_crowd and is_crowd : continue xmin, ymin, xmax, ymax = bbox_clip_xyxy ( bbox_xywh_to_xyxy ( obj [ ""bbox"" ] ), width, height ) if : cname = coco. loadCats ( obj [ ""category_id"" ] ) [ 0 ] [ ""name"" ] valid_objs. append ( { ""xmin"" : xmin / width, ""ymin"" : ymin / height, ""xmax"" : xmax / width, ",False,obj['area'] > 0 and xmax > xmin and (ymax > ymin),obj.has_key('category_id'),0.6565530300140381 1671,"def table_entry ( mode1, bind_type1, mode2, bind_type2 ) : with sock ( mode1 ) as sock1 : bind ( sock1, bind_type1 ) try : with sock ( mode2 ) as sock2 : bind ( sock2, bind_type2 ) except OSError as exc : if : return ""INUSE"" elif exc. winerror == errno. WSAEACCES : return ""ACCESS"" raise else : return ""Success""",False,exc.winerror == errno.WSAEADDRINUSE,exc.winerror == errno.ENOENT,0.6603638529777527 1672,"def pretty ( self, n, comment = True ) : if isinstance ( n, ( str, bytes, list, tuple, dict ) ) : r = repr ( n ) if not comment : r = r. replace ( ""*/"", r""\x2a/"" ) return r if not isinstance ( n, six. integer_types ) : return n if isinstance ( n, constants. Constant ) : if : return ""%s /* %s */"" % ( n, self. pretty ( int ( n ) ) ) else : return ""%s (%s)"" % ( n, self. pretty ( int ( n ) ) ) elif abs ( n ) < 10 : return str ( n ) else : return hex ( n )",False,comment,n < 0,0.6837930679321289 1673,"def __repr__ ( self ) : r_repr = [ ] if len ( self ) > 0 : nb_col = len ( self [ 0 ] ) if : r_repr. append ( self. headers ) else : r_repr. append ( [ ""Field"" ] * nb_col ) for r in self : r1_repr = [ ] for r1 in r : if isinstance ( r1, bytes ) : try : r1 = r1. decode ( ""utf-8"" ) except UnicodeDecodeError : pass r1 = repr ( r1 ) r1_repr. append ( r1 ) r_repr. append ( r1_repr ) cs = list ( zip ( * r_repr ) ) c_ws = [ max ( len ( value ) for value in c ) for c in cs ] line = [ ""-"" * w for w in c_ws ] r_repr. insert ( 1, line ) r_repr. append ( line ) format = "" | "". join ( [ ""%%-%ds"" % w for w in c_ws ] ) result = [ ( format % tuple ( r ) ) for r in r_repr ] return ""\n"". join ( result )",False,self.headers is not None and len(self.headers) == nb_col,nb_col > 0,0.6543200016021729 1674,"def __init__ ( self, filename ) : DatasetArrays. __init__ ( self, filename ) self. filename = filename self. path = filename votable = astropy. io. votable. parse ( self. filename ) self. first_table = votable. get_first_table ( ) self. description = self. first_table. description for field in self. first_table. fields : name = field. name data = self. first_table. array [ name ] type = self. first_table. array [ name ]. dtype clean_name = _python_save_name ( name, self. columns. keys ( ) ) if field. ucd : self. ucds [ clean_name ] = field. ucd if field. unit : unit = _try_unit ( field. unit ) if : self. units [ clean_name ] = unit if field. description : self. descriptions [ clean_name ] = field. description if type. kind in ""fiubSU"" : self. add_column ( clean_name, data ) if type. kind == ""O"" : print ( ""column %r is of unsupported object type, will try to convert it to string"" % ( name, ) ) try : data = data. astype ( ""S"" ) self. add_column ( name, data ) except Exception as e : self, img_feats : Tensor, question_feats : Tensor, actions_in : Tensor, action_lengths : Tensor, hidden : bool = False, ) -> Union [ Tuple [ Tensor, Tensor ], Tuple [ Tensor, Tensor, Tensor ] ] : T = False if self. image_input is True : N, T, _ = img_feats. size ( ) input_feats = img_feats if self. question_input is True : N, D = question_feats. size ( ) question_feats = question_feats. view ( N, 1, D ) if T is False : T = actions_in. size ( 1 ) question_feats = question_feats. repeat ( 1, T, 1 ) if : input_feats = question_feats else : input_feats = torch. cat ( [ input_feats, question_feats ], 2 ) if self. action_input is True : if : input_feats = self. action_embed ( actions_in ) else : input_feats = torch. cat ( [ input_feats, self. action_embed ( actions_in. long ( ) ) ], 2 ) packed_input_feats = pack_padded_sequence ( input_feats, action_lengths, batch_first = True ) packed_output, hidden = self. rnn ( packed_input_feats ) rnn_output, _ = pad_packed_sequence ( packed_output, batch_first = True ) output = self. decoder ( rnn_output. contiguous (",False,len(input_feats) == 0,hidden,0.6537646055221558 1676,"def build ( opt ) : dpath = os. path. join ( opt [ ""datapath"" ], ""QA-ZRE"" ) version = None if not build_data. built ( dpath, version_string = version ) : print ( ""[building data: "" + dpath + ""]"" ) if : build_data. remove_dir ( dpath ) build_data. make_dir ( dpath ) for downloadable_file in RESOURCES : downloadable_file. download_file ( dpath ) build_data. mark_done ( dpath, version_string = version )",False,build_data.built(dpath),dpath in RESOURCES,0.6493850350379944 1677,"def _executables_in_windows ( path ) : if not os. path. isdir ( path ) : return extensions = builtins. __xonsh__. env [ ""PATHEXT"" ] if PYTHON_VERSION_INFO < ( 3, 5, 0 ) : for fname in os. listdir ( path ) : fpath = os. path. join ( path, fname ) if : base_name, ext = os. path. splitext ( fname ) if ext. upper ( ) in extensions : yield fname else : for x in scandir ( path ) : try : is_file = x. is_file ( ) except OSError : continue if is_file : fname = x. name else : continue base_name, ext = os. path. splitext ( fname ) if ext. upper ( ) in extensions : yield fname",False,os.path.exists(fpath) and (not os.path.isdir(fpath)),os.path.isdir(fpath),0.646652340888977 1678,"def printStatus ( self ) : try : names = sorted ( self. buildRequests. keys ( ) ) for n in names : if n not in self. outstanding : code, text = self. results [ n ] t = builder. Results [ code ] if text : t += "" (%s)"" % "" "". join ( text ) elif self. builds [ n ] : t = self. currentStep [ n ] or ""building"" if : t += "" [ETA %ds]"" % ( self. ETA [ n ] - now ( ) ) else : t = ""no build"" self. announce ( ""%s: %s"" % ( n, t ) ) self. announce ( """" ) except Exception : log. err ( None, ""printing status"" )",True,self.ETA[n],self.ETA[n],0.6654027104377747 1679,"def main ( self ) -> None : if self. _pool is None : self. _pool = await create_pool ( self. redis_settings ) logger. info ( ""Starting worker for %d functions: %s"", len ( self. functions ), "", "". join ( self. functions ), ) await log_redis_info ( self. pool, logger. info ) self. ctx [ ""redis"" ] = self. pool if self. on_startup : await self. on_startup ( self. ctx ) async for _ in poll ( self. poll_delay_s ) : await self. _poll_iteration ( ) if : if 0 <= self. max_burst_jobs <= self. _jobs_started ( ) : await asyncio. gather ( * self. tasks ) return None queued_jobs = await self. pool. zcard ( self. queue_name ) if queued_jobs == 0 : await asyncio. gather ( * self. tasks ) return None",False,self.burst,self.max_burst_jobs > 0,0.6701306104660034 1680,"def endElement ( self, name, value, connection ) : if name == ""CreationTime"" : try : self. creation_time = datetime. strptime ( value, ""%Y-%m-%dT%H:%M:%SZ"" ) except ValueError : self. creation_time = datetime. strptime ( value, ""%Y-%m-%dT%H:%M:%S.%fZ"" ) elif name == ""Description"" : self. description = value elif name == ""DisableRollback"" : if : self. disable_rollback = True else : self. disable_rollback = False elif name == ""StackId"" : self. stack_id = value elif name == ""StackName"" : self. stack_name = value elif name == ""StackStatus"" : self. stack_status = value elif name == ""StackStatusReason"" : self. stack_status_reason = value elif name == ""TimeoutInMinutes"" : self. timeout_in_minutes = int ( value ) elif name == ""member"" : pass else : setattr ( self, name, value )",False,str(value).lower() == 'true',name == 'Enablerollback',0.6523566246032715 1681,"def tz_from_string ( _option, _opt_str, value, parser ) : """"""Stores a tzinfo object from a string"""""" if value is not None : if : valarray = [ value [ i : i + 2 ] for i in range ( 1, len ( value ), 2 ) ] multipliers = [ 3600, 60 ] offset = 0 for i in range ( min ( len ( valarray ), len ( multipliers ) ) ) : offset += int ( valarray [ i ] ) * multipliers [ i ] if value [ 0 ] == ""-"" : offset = - offset timezone = OffsetTzInfo ( offset = offset ) else : if tz_pytz : try : timezone = pytz. timezone ( value ) except pytz. UnknownTimeZoneError : debug. error ( ""Unknown display timezone specified"" ) else : if not hasattr ( time, ""tzset"" ) : debug. error ( ""This operating system doesn't support tzset, please either specify an offset (eg. +1000) or install pytz"" ) timezone = value parser. values. tz = timezone",False,"value[0] in ['+', '-']",parser.values.tz is None,0.6626071929931641 1682,"def extract ( self, url, ** kwargs ) : if ""163.fm"" in url : url = get_location ( url ) if ""music.163.com"" in url : self. need_download = False self. netease_cloud_music_download ( url, ** kwargs ) else : html = get_content ( url ) title = match1 ( html, ""movieDescription='([^']+)'"" ) or match1 ( html, ""(.+)"" ) if : title = title [ 1 : ] src = match1 ( html, r' html, r' ) if src : url = src _, ext, size = url_info ( src ) else : url = ( match1 ( html, r'[""\'](.+)-list.m3u8[""\']' ) or match1 ( html, r'[""\'](.+).m3u8[""\']' ) ) + "".mp4"" _, _, size = url_info ( url ) ext = ""mp4"" return { ""urls"" : [ url ], ""title"" : title, ""file_format"" : ext, ""size"" : size, }",False,title[0] == '',title,0.6579124331474304 1683,"def DecodeRepeatedField ( buffer, pos, end, message, field_dict ) : value = field_dict. get ( key ) if value is None : value = field_dict. setdefault ( key, new_default ( message ) ) while 1 : ( element, new_pos ) = _DecodeSignedVarint32 ( buffer, pos ) if element in enum_type. values_by_number : value. append ( element ) else : if : message. _unknown_fields = [ ] message. _unknown_fields. append ( ( tag_bytes, buffer [ pos : new_pos ] ) ) pos = new_pos + tag_len if buffer [ new_pos : pos ]!= tag_bytes or new_pos >= end : if new_pos > end : raise _DecodeError ( ""Truncated message."" ) return new_pos",False,not message._unknown_fields,new_pos > 0 and tag_bytes in buffer,0.6551991701126099 1684,"def test_connection_grouping ( self ) : """"""Make sure group_connections returns list of (lang, connections)"""""" connections = ( self. create_lang_connection ( ""1000000000"", ""en"" ), self. create_lang_connection ( ""1000000001"", ""en"" ), self. create_lang_connection ( ""1000000002"", ""en"" ), self. create_lang_connection ( ""1000000003"", ""es"" ), self. create_lang_connection ( ""1000000004"", ""es"" ), self. create_lang_connection ( ""1000000005"", ""fr"" ), ) grouped_conns = list ( trans_helpers. group_connections ( connections ) ) for lang, conns in grouped_conns : if : self. assertEqual ( 3, len ( conns ) ) elif lang == ""es"" : self. assertEqual ( 2, len ( conns ) ) elif lang == ""fr"" : self. assertEqual ( 1, len ( conns ) )",True,lang == 'en',lang == 'en',0.6592690944671631 1685,"def process ( self, In, display = True ) : if display and self. plot is not None : items = set ( ) for name, vals in In. items ( ) : if : continue if type ( vals ) is not list : vals = [ vals ] for val in vals : vid = id ( val ) if vid in self. items and self. items [ vid ]. scene ( ) is self. plot. scene ( ) : items. add ( vid ) else : if isinstance ( val, QtGui. QGraphicsItem ) : self. plot. addItem ( val ) item = val else : item = self. plot. plot ( val ) self. items [ vid ] = item items. add ( vid ) for vid in list ( self. items. keys ( ) ) : if vid not in items : <",False,vals is None,name in items,0.6844977140426636 1686,"def set_history ( self, history ) : with self. update_lock : self. draw_times = [ ( entry [ ""timestamp"" ], entry [ ""drift"" ] ) for entry in history if : ] if self. draw_times : drifts = [ entry [ 1 ] for entry in self. draw_times ] self. median_drift = round ( statistics. median ( drifts ), 5 ) self. mean_drift = round ( statistics. mean ( drifts ), 5 ) if len ( drifts ) > 1 : self. walk_interval_target = round ( self. draw_times [ - 1 ] [ 0 ] - self. draw_times [ - 2 ] [ 0 ] - self. draw_times [ - 1 ] [ 1 ], 4, ) else : self. walk_interval_target = ""?"" else : self. median_drift = ""?"" self. mean_drift = ""?"" self. walk_interval_target = ""?""",False,entry['timestamp'] > time.time() - 11.0,self.print_times,0.6541649103164673 1687,"def parse_struc ( img ) : nbs = neighbors ( img. shape ) acc = np. cumprod ( ( 1, ) + img. shape [ : : - 1 ] [ : - 1 ] ) [ : : - 1 ] img = img. ravel ( ) pts = np. array ( np. where ( img == 2 ) ) [ 0 ] buf = np. zeros ( 131072, dtype = np. int64 ) num = 10 nodes = [ ] for p in pts : if img [ p ] == 2 : nds = fill ( img, p, num, nbs, acc, buf ) num += 1 nodes. append ( nds ) edges = [ ] for p in pts : for dp in nbs : if : edge = trace ( img, p + dp, nbs, acc, buf ) edges. append ( edge ) return nodes, edges",False,img[p + dp] == 1,img[p] == 1,0.6583623886108398 1688,"def parse_config_v2 ( self, skip_broken = True ) : for command_type, command in self. _config. items ( ) : if command_type in [ ""version"", ""renderer"" ] : continue try : handler = self. command_handlers [ command_type ] except KeyError as e : raise RuntimeError ( ""No handler found for command '%s'"" % command_type ) from e try : handler ( self, command ) self. _v2_common ( command ) except InvalidCommand : if : raise else : LOG. warning ( ""Skipping invalid command: %s"", command, exc_info = True ) LOG. debug ( self. dump_network_state ( ) )",False,not skip_broken,skip_broken,0.6610915660858154 1689,"def config_dict ( filename ) : """"""Convert content of config-file into dictionary."""""" with open ( filename, ""r"" ) as f : cfglines = f. readlines ( ) cfgdict = { } for line in cfglines : line = line. strip ( ) if not line or line. startswith ( ""#"" ) : continue try : key, value = line. split ( ""="" ) except ValueError : print ( ""Bad line in config-file %s:\n%s"" % ( filename, line ) ) continue key = key. strip ( ) value = value. strip ( ) if value in [ ""True"", ""False"", ""None"", ""''"", '""""' ] : value = eval ( value ) else : try : if : value = float ( value ) else : value = int ( value ) except ValueError : pass cfgdict [ key ] = value return cfgdict",False,'.' in value,value in [False],0.6701300740242004 1690,"def run ( self ) : while True : context_id_list_tuple = self. _inflated_addresses. get ( block = True ) if context_id_list_tuple is _SHUTDOWN_SENTINEL : break c_id, inflated_address_list = context_id_list_tuple inflated_value_map = dict ( inflated_address_list ) if : self. _contexts [ c_id ]. set_from_tree ( inflated_value_map )",True,c_id in self._contexts,c_id in self._contexts,0.659544825553894 1691,"def _handlewebError ( self, msg ) : print ( """" ) print ( "" ERROR: %s"" % msg ) if not self. interactive : raise self. failureException ( msg ) p = "" Show: [B]ody [H]eaders [S]tatus [U]RL; [I]gnore, [R]aise, or sys.e[X]it >> "" sys. stdout. write ( p ) sys. stdout. flush ( ) while True : i = getchar ( ). upper ( ) if not isinstance ( i, type ( """" ) ) : i = i. decode ( ""ascii"" ) if i not in ""BHSUIRX"" : continue print ( i. upper ( ) ) if i == ""B"" : for x, line in enumerate ( self. body. splitlines ( ) ) : if ( x + 1 ) % self. console_height == 0 : sys. stdout. write ( ""<-- More -->\r"" ) m = getchar ( ). lower ( ) sys. stdout. write ( "" \r"" ) if m == ""q"" : break print ( line ) elif i == ""H"" : pprint. pprint ( self. headers ) elif i == ""S"" : ",False,i == 'R',self.console_height == 0,0.6609758138656616 1692,"def app_check ( self ) : """"""Check if a target app has been selected, otherwise launch a wizard. Then retrieve its metadata."""""" app = self. _global_options [ ""app"" ] if not app : self. printer. info ( ""Target app not selected. Launching wizard..."" ) self. device. _list_apps ( self. _global_options [ ""hide_system_apps"" ] ) app = self. device. select_target_app ( ) self. _global_options [ ""app"" ] = app if app is None : self. printer. error ( ""Error selecting app. Please retry."" ) return None self. printer. notify ( ""Target app: %s"" % app ) if not self. APP_METADATA or self. APP_METADATA [ ""bundle_id"" ]!= app : self. printer. info ( ""Retrieving app's metadata..."" ) if : self. device. _list_apps ( self. _global_options [ ""hide_system_apps"" ] ) self. APP_METADATA = Framework. APP_METADATA = self. device. app. get_metadata ( app ) return app",False,self.device._applist is None,self.device.app is not None,0.6613767743110657 1693,"def _limit_value ( key, value, config ) : if config [ key ]. get ( ""upper_limit"" ) : limit = config [ key ] [ ""upper_limit"" ] if isinstance ( value, datetime ) and isinstance ( limit, timedelta ) : if config [ key ] [ ""inverse"" ] is True : if ( datetime. now ( ) - limit ) > value : value = datetime. now ( ) - limit else : if ( datetime. now ( ) + limit ) < value : value = datetime. now ( ) + limit elif : value = limit return value",True,value > limit,value > limit,0.6721374988555908 1694,"def do_action ( self, context : UserContext, action_name : str, targets : Candidates ) -> bool : action = self. _get_action_targets ( context, action_name, targets ) if not action : return True for target in targets : source = self. _current_sources [ int ( target [ ""source_index"" ] ) ] target [ ""source_context"" ] = ( { k : v for k, v in source. context. items ( ) if k. startswith ( ""__"" ) } if : else { } ) context [ ""targets"" ] = targets index = action [ ""kind"" ] + "",source/"" + action [ ""source"" ] new_context = ( action [ ""func"" ] ( context ) if action [ ""func"" ] else self. _vim. call ( ""denite#custom#_call_action"", index, action [ ""name"" ], context ) ) if new_context : context. update ( new_context ) return False",False,source.is_public_context,target['targets'],0.6525206565856934 1695,"def test_flow_register_uses_default_storage ( self, monkeypatch, storage ) : monkeypatch. setattr ( ""prefect.Client"", MagicMock ( ) ) f = Flow ( name = ""test"" ) assert f. storage is None with set_temporary_config ( { ""flows.defaults.storage.default_class"" : storage } ) : if : f. register ( ""My-project"", registry_url = ""FOO"", image_name = ""BAR"", image_tag = ""BIG"", no_url = True, ) else : f. register ( ""My-project"" ) assert isinstance ( f. storage, from_qualified_name ( storage ) ) assert f. result == from_qualified_name ( storage ) ( ). result",False,'Docker' in storage,storage,0.6646945476531982 1696,"def _create_profile_cache ( ssg_root ) : profile_cache = { } product_list = [ ""debian9"", ""debian10"", ""fedora"", ""ol7"", ""opensuse"", ""rhel7"", ""sle12"", ""ubuntu1604"", ""ubuntu1804"", ""ubuntu2004"", ""wrlinux"", ] for product in product_list : found_obj_name = False prod_profiles_dir = os. path. join ( ssg_root, product, ""profiles"" ) for _, _, files in os. walk ( prod_profiles_dir ) : for filename in files : profile_path = os. path. join ( prod_profiles_dir, filename ) parsed_profile = yaml. load ( open ( profile_path, ""r"" ) ) for _obj in parsed_profile [ ""selections"" ] : obj = _obj if ""="" in obj : obj = _obj [ : _obj. index ( ""="" ) ] if not obj [ 0 ]. isalpha ( ) : obj = obj [ 1 : ] if : <",False,obj not in profile_cache,found_obj_name,0.6528384685516357 1697,"def test_memory_maps ( self ) : p = psutil. Process ( ) maps = p. memory_maps ( ) paths = [ x for x in maps ] self. assertEqual ( len ( paths ), len ( set ( paths ) ) ) ext_maps = p. memory_maps ( grouped = False ) for nt in maps : if not nt. path. startswith ( ""["" ) : assert os. path. isabs ( nt. path ), nt. path if POSIX : assert os. path. exists ( nt. path ), nt. path else : if ""64"" not in os. path. basename ( nt. path ) : assert os. path. exists ( nt. path ), nt. path for nt in ext_maps : for fname in nt. _fields : value = getattr ( nt, fname ) if fname == ""path"" : continue elif : assert value, value else : self. assertIsInstance ( value, ( int, long ) ) assert value >= 0, value",False,"fname in ('addr', 'perms')",fname == 'instance',0.654965877532959 1698,"def load_sys ( paths ) : src, tgt, hypos, log_probs = { }, { }, { }, { } for path in paths : with open ( path ) as f : for line in f : line = line. rstrip ( ) if line. startswith ( ( ""S-"", ""T-"", ""D-"" ) ) : i = int ( line [ line. find ( ""-"" ) + 1 : line. find ( ""\t"" ) ] ) if line. startswith ( ""S-"" ) : src [ i ] = line. split ( ""\t"" ) [ 1 ] if line. startswith ( ""T-"" ) : tgt [ i ] = line. split ( ""\t"" ) [ 1 ] if : if i not in hypos : hypos [ i ] = [ ] log_probs [ i ] = [ ] hypos [ i ]. append ( line. split ( ""\t"" ) [ 2 ] ) log_probs [ i ]. append ( float ( line. split ( ""\t"" ) [ 1 ] ) ) return dictolist ( src ), dictolist",False,line.startswith('D-'),i >= 0,0.6493213176727295 1699,"def shutdown ( sups ) : global SHOULD_STOP SHOULD_STOP = True LOG. warn ( ""Supervisor shutting down!"" ) for pid in CHILD_PIDS : try : os. kill ( pid, signal. SIGINT ) except OSError : pass LOG. warn ( ""Waiting for children to exit for %d seconds..."" % WAIT_FOR_DEATH ) t = time. time ( ) still_alive = False while time. time ( ) < t + WAIT_FOR_DEATH : still_alive = False for sup in sups : sup. join ( 0.2 ) still_alive = still_alive or sup. isAlive ( ) if : break if still_alive : LOG. warn ( ""Children have not exited after %d seconds. Killing them with SIGKILL."" % WAIT_FOR_DEATH ) for pid in CHILD_PIDS : try : os. kill ( pid, signal. SIGKILL ) except OSError : pass sys. exit ( 1 )",False,not still_alive,SHOULD_STOP,0.6603798866271973 1700,"def post_mortem ( t = None ) : if : t = sys. exc_info ( ) [ 2 ] if : raise ValueError ( ""A valid traceback must be passed if no exception is being handled."" ) p = BPdb ( ) p. reset ( ) p. interaction ( None, t )",True,t is None,t is None,0.6743518114089966 1701,"def filtered_tooltip ( options, filter ) : """"""Returns tooltip for the filter icon if the filter matches one of the filter options"""""" for option in options : if : return ""Showing only %s"" % option [ 0 ] if ( ""daterange"" == option [ 1 ] ) and filter. startswith ( option [ 4 ] ) : return ""Showing only %s"" % option [ 0 ] return """"",False,filter == option[1],'daterange' == option[0] and filter.startswith(option[2]),0.6566956043243408 1702,"def _get_documented_completions ( self, table, startswith = None ) : names = [ ] for key, command in table. items ( ) : if : continue if startswith is not None and not key. startswith ( startswith ) : continue if getattr ( command, ""positional_arg"", False ) : continue names. append ( key ) return names",False,"getattr(command, '_UNDOCUMENTED', False)",command == 'document',0.6609865427017212 1703,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 10 : self. set_app_id ( d. getPrefixedString ( ) ) continue if tt == 16 : self. set_max_rows ( d. getVarInt32 ( ) ) continue if : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 0,tt > 255,0.681990385055542 1704,"def _get_fp ( self, config_file ) : if not os. path. isdir ( static. STARCLUSTER_CFG_DIR ) : os. makedirs ( static. STARCLUSTER_CFG_DIR ) cfg_file = config_file or static. STARCLUSTER_CFG_FILE log. debug ( ""Loading file: %s"" % cfg_file ) if os. path. exists ( cfg_file ) : if : raise exception. ConfigError ( ""config %s exists but is not a regular file"" % cfg_file ) else : raise exception. ConfigNotFound ( ( ""config file %s does not exist\n"" ) % cfg_file, cfg_file, ) return open ( cfg_file )",False,not os.path.isfile(cfg_file),os.path.isfile(cfg_file),0.6476112604141235 1705,"def get_default_queryset ( self ) : qs = [ ] for addon in ADDONS_OAUTH : obj = self. get_addon_settings ( provider = addon, fail_if_absent = False, check_object_permissions = False ) if : qs. append ( obj ) sorted ( qs, key = lambda addon : addon. id, reverse = True ) return qs",True,obj,obj,0.6900901794433594 1706,"def fetch_ssl_details ( paths = None ) : ssl_details = { } ssl_cert_paths = [ ""/var/lib/cloud/data/ssl"", ""/var/lib/cloud/instance/data/ssl"", ] if paths : ssl_cert_paths. extend ( [ os. path. join ( paths. get_ipath_cur ( ""data"" ), ""ssl"" ), os. path. join ( paths. get_cpath ( ""data"" ), ""ssl"" ), ] ) ssl_cert_paths = uniq_merge ( ssl_cert_paths ) ssl_cert_paths = [ d for d in ssl_cert_paths if d and os. path. isdir ( d ) ] cert_file = None for d in ssl_cert_paths : if : cert_file = os. path. join ( d, ""cert.pem"" ) break key_file = None for d in ssl_cert_paths : if os. path. isfile ( os. path. join ( d, ""key.pem"" ) ) : key_file = os. path. join ( d, ""key.pem"" ) break if cert_file and key_file : ssl_details [ ""cert_file"" ] = cert_file ssl_details [ ""key_file"" ] = key_file elif cert_file : ssl_details [ ""cert_file"" ] = cert_file return ssl_details",False,"os.path.isfile(os.path.join(d, 'cert.pem'))",os.path.isfile(d),0.6437021493911743 1707,"def handle ( self, * args, ** options ) : if options [ ""missing"" ] : self. verify_references ( options ) if options [ ""delete_missing"" ] : if : msg = ( ""\nThis will delete entries from your database. Are you sure you want to do this?\n\n"" ""Type 'yes' to continue, or 'no' to cancel: "" ) if input ( msg )!= ""yes"" : self. stdout. write ( ""Aborted: Delete missing file entries from database."" ) return self. verify_references ( options ) if options [ ""orphans"" ] : self. verify_storages ( options ) if options [ ""delete_orphans"" ] : if : msg = ( ""\nThis will delete orphaned files from your storage. Are you sure you want to do this?\n\n"" ""Type 'yes' to continue, or 'no' to cancel: "" ) if input ( msg )!= ""yes"" : self. stdout. write ( ""Aborted: Delete orphaned files from storage."" ) return self. verify_storages ( options )",False,options['interactive'],self.delete_from_storage(),0.6622346043586731 1708,"def _HasPrecedence ( tok ) : """"""Whether a binary operation has precedence within its context."""""" node = tok. node ancestor = node. parent. parent while ancestor is not None : predecessor_type = pytree_utils. NodeName ( ancestor ) if : return True if predecessor_type!= ""atom"" : return False ancestor = ancestor. parent",False,"predecessor_type in ['arith_expr', 'term']",predecess_type == 'atom',0.654313325881958 1709,"def build ( self ) : max_matched = 0 for head in self. heads : for chain in self. combine_chain ( head ) : if : max_matched = chain. num_matched self. chains. append ( chain ) for chain in self. chains : chain. weights. append ( chain. num_matched / float ( max_matched or chain. num_matched or 1 ) ) chain. finish ( ) self. chains. sort ( key = lambda chain : chain. weight, reverse = True ) for chain in self. chains : Logr. debug ( ""chain weight: %.02f"", chain. weight ) Logr. debug ( ""\tInfo: %s"", chain. info ) Logr. debug ( ""\tWeights: %s"", chain. weights ) Logr. debug ( ""\tNumber of Fragments Matched: %s"", chain. num_matched )",False,chain.num_matched > max_matched,len(self.chain) > max_matched,0.6527489423751831 1710,"def test_source_address ( self ) : for addr, is_ipv6 in VALID_SOURCE_ADDRESSES : if : warnings. warn ( ""No IPv6 support: skipping."", NoIPv6Warning ) continue pool = HTTPConnectionPool ( self. host, self. port, source_address = addr, retries = False ) self. addCleanup ( pool. close ) r = pool. request ( ""GET"", ""/source_address"" ) self. assertEqual ( r. data, b ( addr [ 0 ] ) )",False,is_ipv6 and (not HAS_IPV6_AND_DNS),is_ipv6,0.6554077863693237 1711,"def setLabel ( self, s, protect = False ) : """"""Set the label of the minibuffer."""""" c, k, w = self. c, self, self. w if w : if : g. app. gui. set_minibuffer_label ( c, s ) w. setAllText ( s ) n = len ( s ) w. setSelectionRange ( n, n, insert = n ) if protect : k. mb_prefix = s",False,"hasattr(g.app.gui, 'set_minibuffer_label')",c,0.6540514230728149 1712,"def get_setup_script ( cls ) : script = """" if cls. INTERNAL_TESTMODE : script += ""\nCONFIGURE SESSION SET __internal_testmode := true;"" schema = [ ""\nmodule test {}"" ] for name in dir ( cls ) : m = re. match ( r""^SCHEMA(?:_(\w+))?"", name ) if m : module_name = ( m. group ( 1 ) or ""test"" ). lower ( ). replace ( ""__"", ""."" ) schema_fn = getattr ( cls, name ) if : with open ( schema_fn, ""r"" ) as sf : module = sf. read ( ) schema. append ( f""\nmodule {module_name} {{ {module} }}"" ) script += f""\nSTART MIGRATION"" script += f' TO {{ {"""".join(schema)} }};' script += f""\nPOPULATE MIGRATION;"" script += f""\nCOMMIT MIGRATION;"" if cls. SETUP : if not isinstance ( cls. SETUP, ( list, tuple ) ) : scripts = [ cls. SETUP ] else : scripts = cls. SETUP for scr in scripts : if ""\n"" not in scr and os. path. exists ( scr ) : with open ( scr, ""rt"" ) as f : setup = f. read ( ) else : setup = scr <",False,schema_fn is not None,schema_fn,0.6590206027030945 1713,"def do_sub ( m ) : c = m. groupdict ( ) if c [ ""htmlchars"" ] : return cgi. escape ( c [ ""htmlchars"" ] ) if c [ ""lineend"" ] : return ""
    "" elif c [ ""space"" ] : t = m. group ( ). replace ( ""\t"", u"" "" * tabstop ) t = t. replace ( "" "", "" "" ) return t elif c [ ""space"" ] == ""\t"" : return "" "" * tabstop else : url = m. group ( ""protocol"" ) if : prefix = "" "" url = url [ 1 : ] else : prefix = """" last = m. groups ( ) [ - 1 ] if last in [ ""\n"", ""\r"", ""\r\n"" ] : last = ""
    "" return u'{0}
    {2}{3}'. format ( prefix, url, url, last )",False,url.startswith(' '),url.startswith('/'),0.6510301828384399 1714,"def validate_email ( self, data ) : email = data. get ( ""email"" ) if email is None : return existing_team = Teams. query. filter_by ( email = email ). first ( ) if is_admin ( ) : team_id = data. get ( ""id"" ) if team_id : if : raise ValidationError ( ""Email address has already been used"", field_names = [ ""email"" ] ) else : if existing_team : raise ValidationError ( ""Email address has already been used"", field_names = [ ""email"" ] ) else : current_team = get_current_team ( ) if email == current_team. email : return data else : if existing_team : raise ValidationError ( ""Email address has already been used"", field_names = [ ""email"" ] )",False,existing_team and existing_team.id != team_id,email == team_id,0.6556707620620728 1715,"def process ( self ) : if not self. outputs [ 0 ]. is_linked : return var_names = self. get_variables ( ) inputs = self. get_input ( ) results = [ ] if var_names : input_values = [ inputs. get ( name, [ [ 0 ] ] ) for name in var_names ] parameters = match_long_repeat ( input_values ) else : parameters = [ [ [ None ] ] ] for objects in zip ( * parameters ) : object_results = [ ] for values in zip_long_repeat ( * objects ) : variables = dict ( zip ( var_names, values ) ) vector = [ ] for formula in self. formulas ( ) : if formula : value = safe_eval ( formula, variables ) vector. append ( value ) if : object_results. append ( vector ) else : object_results. extend ( vector ) results. append ( object_results ) if self. wrap : results = [ results ] self. outputs [ ""Result"" ]. sv_set ( results )",False,self.separate,self.wrap,0.660106360912323 1716,"def user_line ( self, frame ) : traceenter ( ""user_line"", _dumpf ( frame ) ) if frame. f_lineno!= 0 : breakReason = self. breakReason if : breakReason = axdebug. BREAKREASON_STEP self. _HandleBreakPoint ( frame, None, breakReason )",True,breakReason is None,breakReason is None,0.6683458685874939 1717,"def startDemo ( self ) : self. refreshCanvas ( ) self. dirty = True turtle. TurtleScreen. _RUNNING = True self. configGUI ( DISABLED, DISABLED, NORMAL, DISABLED, ""demo running..."", ""black"" ) self. screen. clear ( ) self. screen. mode ( ""standard"" ) self. state = RUNNING try : result = self. module. main ( ) if result == ""EVENTLOOP"" : self. state = EVENTDRIVEN else : self. state = DONE except turtle. Terminator : if : return self. state = DONE result = ""stopped!"" if self. state == DONE : self. configGUI ( NORMAL, NORMAL, DISABLED, NORMAL, result ) elif self. state == EVENTDRIVEN : self. exitflag = True self. configGUI ( DISABLED, DISABLED, NORMAL, DISABLED, ""use mouse/keys or STOP"", ""red"" )",False,self.root is None,self.state == STATE_STOPPED,0.6546293497085571 1718,"def get_filter_based_on_pattern ( pattern : str, fields : Sequence [ str ] = ( "".name"", ), flag : str = """", *, filter_and : str = """", filter_or : str = """", ) -> Tuple [ str, Dict [ str, str ] ] : if flag : flag = f""(?{flag})"" qkw : Dict [ str, str ] = { } filter_cond = """" if pattern and fields : qkw = { ""re_filter"" : flag + pattern } filters = [ ] for field in fields : filters. append ( f""re_test($re_filter, {field})"" ) filter_cond = f'({"" OR "".join(filters)})' filter_clause = """" if filter_cond : filter_clause += filter_cond if filter_and : if : filter_clause += "" AND "" filter_clause += filter_and if filter_or : if : filter_clause += "" OR "" filter_clause += filter_or if : filter_clause = ""FILTER "" + filter_clause return filter_clause, qkw return """", qkw",False,filter_clause,filter_and and filter_or,0.661453902721405 1719,"def __init__ ( self, isofile ) : try : f = open ( isofile, ""rb"" ) except ( IOError ) : sys. stderr. write ( ""can't open {0}"". format ( isofile ) ) raise if os. path. getsize ( isofile ) == 0 : raise IOError ( ""File {0} appears to be empty"". format ( isofile ) ) self. isoFile = f self. priVol = None self. rootDir = None self. rripOffset = - 1 desc_nr = 0 while True : desc_nr = desc_nr + 1 try : self. isoFile. seek ( BLOCK_SIZE * ( 15 + desc_nr ) ) volume_dsc = self. isoFile. read ( BLOCK_SIZE ) flag = struct. unpack ( ""B"", volume_dsc [ 0 : 1 ] ) [ 0 ] if flag == 1 : self. __readPrimaryVolume__ ( volume_dsc ) continue if : break except Exception as e : gen. log ( ""Got exception when init iso file:"", sys. exc_info ( ) [ 0 ] ) self. priVol = None self. rootDir = None break",False,flag == 255,flag == False,0.6714748740196228 1720,"def text_to_sequence ( self, text, inference = False ) : if inference : pinyin = self. pinyin_parser ( text, style = Style. TONE3, errors = ""ignore"" ) new_pinyin = [ ] for x in pinyin : x = """". join ( x ) if : new_pinyin. append ( x ) phonemes = self. get_phoneme_from_char_and_pinyin ( text, new_pinyin ) text = "" "". join ( phonemes ) print ( f""phoneme seq: {text}"" ) sequence = [ ] for symbol in text. split ( ) : idx = self. symbol_to_id [ symbol ] sequence. append ( idx ) sequence += [ self. eos_id ] return sequence",False,'#' not in x,inclan,0.675324022769928 1721,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. LIST : self. names = [ ] ( _etype360, _size357 ) = iprot. readListBegin ( ) for _i361 in xrange ( _size357 ) : _elem362 = iprot. readString ( ) self. names. append ( _elem362 ) iprot. readListEnd ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. LIST : self. exprs = [ ] ( _etype366, _size363 ) = iprot. readListBegin ( ) for _i367 in xrange ( _size363 ) : <",True,fid == 2,fid == 2,0.6812487840652466 1722,"def __init__ ( self, slither, logger ) : super ( ). __init__ ( slither, logger ) inheritance = [ x. inheritance for x in slither. contracts ] self. inheritance = { item for sublist in inheritance for item in sublist } self. overshadowing_state_variables = { } shadows = detect_state_variable_shadowing ( slither. contracts ) for overshadowing_instance in shadows : overshadowing_state_var = overshadowing_instance [ 1 ] overshadowed_state_var = overshadowing_instance [ 3 ] if : self. overshadowing_state_variables [ overshadowing_state_var ] = set ( ) self. overshadowing_state_variables [ overshadowing_state_var ]. add ( overshadowed_state_var )",True,overshadowing_state_var not in self.overshadowing_state_variables,overshadowing_state_var not in self.overshadowing_state_variables,0.653820276260376 1723,"def reduce_NodeName_LPAREN_OptFuncArgList_RPAREN ( self, * kids ) : module = kids [ 0 ]. val. module func_name = kids [ 0 ]. val. name name = func_name if not module else ( module, func_name ) last_named_seen = None args = [ ] kwargs = { } for argname, argname_ctx, arg in kids [ 2 ]. val : if argname is not None : if : raise EdgeQLSyntaxError ( f""duplicate named argument `{argname}`"", context = argname_ctx ) last_named_seen = argname kwargs [ argname ] = arg else : if last_named_seen is not None : raise EdgeQLSyntaxError ( f""positional argument after named "" f""argument `{last_named_seen}`"", context = arg. context, ) args. append ( arg ) self. val = qlast. FunctionCall ( func = name, args = args, kwargs = kwargs )",False,argname in kwargs,argname in last_named_seen,0.6746421456336975 1724,"def find_prefix_cce ( directory ) : results = find_rules ( directory, has_prefix_cce ) print ( ""Number of rules with prefixed CCEs: %d"" % len ( results ) ) for result in results : rule_path = result [ 0 ] product_yaml_path = result [ 1 ] product_yaml = None if : product_yaml = yaml. open_raw ( product_yaml_path ) fix_file ( rule_path, product_yaml, fix_prefix_cce )",False,product_yaml_path is not None,yaml is not None,0.6574790477752686 1725,"def set_json_body ( cls, request_builder ) : old_body = request_builder. info. pop ( ""data"", { } ) if isinstance ( old_body, abc. Mapping ) : body = request_builder. info. setdefault ( ""json"", { } ) for path in old_body : if : cls. _sequence_path_resolver ( path, old_body [ path ], body ) else : body [ path ] = old_body [ path ] else : request_builder. info. setdefault ( ""json"", old_body )",False,"isinstance(path, tuple)","isinstance(path, abc.Mapping)",0.6489537954330444 1726,"def __init__ ( self, sequence : Optional [ Sequence ] = None, np_histogram : Optional [ ""NumpyHistogram"" ] = None, num_bins : int = 64, ) -> None : if np_histogram : if len ( np_histogram ) == 2 : self. histogram = ( np_histogram [ 0 ]. tolist ( ) if hasattr ( np_histogram [ 0 ], ""tolist"" ) else np_histogram [ 0 ] ) self. bins = ( np_histogram [ 1 ]. tolist ( ) if : else np_histogram [ 1 ] ) else : raise ValueError ( ""Expected np_histogram to be a tuple of (values, bin_edges) or sequence to be specified"" ) else : np = util. get_module ( ""numpy"", required = ""Auto creation of histograms requires numpy"" ) self. histogram, self. bins = np. histogram ( sequence, bins = num_bins ) self. histogram = self. histogram. tolist ( ) self. bins = self. bins. tolist ( ) if len ( self. histogram ) > self. MAX_LENGTH : raise ValueError ( ""The maximum length of a histogram is %i"" % self. MAX_LENGTH ) if len ( self. histogram ) + 1!= len ( self. bins ) : raise ValueError ( ""len(bins) must be len(histogram) +",False,"hasattr(np_histogram[1], 'tolist')","hasattr(np_histogram[1], 'bin_edges')",0.6648555994033813 1727,"def extract ( self ) : for l in self. splitlines ( ) : if len ( l ) < 2 : continue l [ 0 ]. split ( ) name = l [ 0 ] if : self. set2 [ name ] = int ( l [ 2 ] ) for i, name in enumerate ( self. vars ) : if self. counter [ i ] : self. val [ name ] = ( self. set2 [ name ] - self. set1 [ name ] ) * 1.0 / elapsed else : self. val [ name ] = self. set2 [ name ] self. val [ ""total"" ] = self. val [ ""hits"" ] + self. val [ ""misses"" ] if self. val [ ""total"" ] > 0 : self. val [ ""hit_rate"" ] = self. val [ ""hits"" ] / self. val [ ""total"" ] * 100.0 else : self. val [ ""hit_rate"" ] = 0 if step == op. delay : self. set1. update ( self. set2 )",True,name in self.vars,name in self.vars,0.6574908494949341 1728,"def __init__ ( self, env, reward_scale = 1.0, obs_mean = None, obs_std = None, ) : ProxyEnv. __init__ ( self, env ) self. _should_normalize = not ( obs_mean is None and obs_std is None ) if self. _should_normalize : if obs_mean is None : obs_mean = np. zeros_like ( env. observation_space. low ) else : obs_mean = np. array ( obs_mean ) if : obs_std = np. ones_like ( env. observation_space. low ) else : obs_std = np. array ( obs_std ) self. _reward_scale = reward_scale self. _obs_mean = obs_mean self. _obs_std = obs_std ub = np. ones ( self. _wrapped_env. action_space. shape ) self. action_space = Box ( - 1 * ub, ub )",True,obs_std is None,obs_std is None,0.6698172092437744 1729,"def run_actor ( agent : Agent, rng_key : jnp. ndarray, get_params : Callable [ [ ], hk. Params ], enqueue_traj : Callable [ [ Transition ], None ], unroll_len : int, num_trajectories : int, ) : """"""Runs an actor to produce num_trajectories trajectories."""""" env = catch. Catch ( ) state = env. reset ( ) traj = [ ] for i in range ( num_trajectories ) : params = get_params ( ) for _ in range ( unroll_len + int ( i == 0 ) ) : rng_key, step_key = jax. random. split ( rng_key ) state = preprocess_step ( state ) action, logits = agent. step ( params, step_key, state ) transition = Transition ( state, action, logits ) traj. append ( transition ) state = env. step ( action ) if : logging. log_every_n ( logging. INFO, ""Episode ended with reward: %s"", 5, state. reward ) stacked_traj = jax. tree_multimap ( lambda * ts : np. stack ( ts ), * traj ) enqueue_traj ( stacked_traj ) traj = traj [ - 1 : ]",False,state.step_type == dm_env.StepType.LAST,state.reward is not None,0.6495782136917114 1730,"def dump ( self ) : for field in self. _fields_ : if : print ( ""%s: 0x%x"" % ( field [ 0 ], getattr ( self, field [ 0 ] ). value ) ) elif isinstance ( getattr ( self, field [ 0 ] ), sysctl_oid_t. slist_entry ) : print ( ""%s: Struct( 0x%x )"" % ( field [ 0 ], getattr ( self, field [ 0 ] ). sle_next. value ) ) else : print ( ""%s: 0x%x"" % ( field [ 0 ], getattr ( self, field [ 0 ] ) ) )",False,"isinstance(getattr(self, field[0]), POINTER64)","isinstance(field[0], six.string_types)",0.6474175453186035 1731,"def _set_uid ( self, val ) : if val is not None : if : self. bus. log ( ""pwd module not available; ignoring uid."", level = 30 ) val = None elif isinstance ( val, text_or_bytes ) : val = pwd. getpwnam ( val ) [ 2 ] self. _uid = val",False,pwd is None,not pwd,0.6692845821380615 1732,"def compute_work_statistics ( self ) : """"""Computes statistics from all work pieces stored in this class."""""" result = { } for v in itervalues ( self. work ) : submission_id = v [ ""submission_id"" ] if submission_id not in result : result [ submission_id ] = { ""completed"" : 0, ""num_errors"" : 0, ""error_messages"" : set ( ), ""eval_times"" : [ ], ""min_eval_time"" : None, ""max_eval_time"" : None, ""mean_eval_time"" : None, ""median_eval_time"" : None, } if : continue result [ submission_id ] [ ""completed"" ] += 1 if ""error"" in v and v [ ""error"" ] : result [ submission_id ] [ ""num_errors"" ] += 1 result [ submission_id ] [ ""error_messages"" ]. add ( v [ ""error"" ] ) else : result [ submission_id ] [ ""eval_times"" ]. append ( float ( v [ ""elapsed_time"" ] ) ) for v in itervalues ( result ) : if v [ ""eval_times"" ] : v [ ""min_eval_time"" ] = np. min ( v [ ""eval_times"" ] ) v [ ""max_eval_time""",False,not v['is_completed'],'result' in result and result[submission_id],0.6580171585083008 1733,"def _find_base ( self, ptr ) : page_size = 0x1000 page_mask = ~ ( page_size - 1 ) ptr &= page_mask w = None while True : if self. leak. compare ( ptr, b""\x7fELF"" ) : break fast = self. _find_base_optimized ( ptr ) if fast : ptr = fast continue ptr -= page_size if : raise ValueError ( ""Address is negative, something is wrong!"" ) w = w or self. waitfor ( ""Finding base address"" ) self. status ( ""%#x"" % ptr ) if w : self. success ( ""%#x"" % ptr ) return ptr",True,ptr < 0,ptr < 0,0.6740406155586243 1734,"def _run ( args, fallback = None, only_first_line = False ) : try : lines = ( subprocess. Popen ( args, stdout = subprocess. PIPE ). communicate ( ) [ 0 ]. splitlines ( ) ) result_lines = [ line. decode ( ""utf-8"" ) for line in lines ] if : return result_lines [ 0 ] else : return result_lines except : return fallback",True,only_first_line,only_first_line,0.6492031812667847 1735,"def substitute_out ( self, outvar, expr, subject = None, solver = None ) : multiplier = self. terms. pop ( outvar ) self. constant = self. constant + multiplier * expr. constant for clv, coeff in expr. terms. items ( ) : old_coefficient = self. terms. get ( clv ) if old_coefficient : new_coefficient = old_coefficient + multiplier * coeff if approx_equal ( new_coefficient, 0 ) : solver. note_removed_variable ( clv, subject ) del self. terms [ clv ] else : self. set_variable ( clv, new_coefficient ) else : self. set_variable ( clv, multiplier * coeff ) if : solver. note_added_variable ( clv, subject )",False,solver,"approx_equal(clv, 0)",0.7109112739562988 1736,"def get ( self, resultSpec, kwargs ) : if ""buildid"" in kwargs : dbdict = yield self. master. db. builds. getBuild ( kwargs [ ""buildid"" ] ) else : bldr = yield self. getBuilderId ( kwargs ) if bldr is None : return num = kwargs [ ""number"" ] dbdict = yield self. master. db. builds. getBuildByNumber ( bldr, num ) data = yield self. db2data ( dbdict ) if dbdict else None if data : filters = ( resultSpec. popProperties ( ) if hasattr ( resultSpec, ""popProperties"" ) else [ ] ) if : try : props = yield self. master. db. builds. getBuildProperties ( data [ ""buildid"" ] ) except ( KeyError, TypeError ) : props = { } filtered_properties = self. _generate_filtered_properties ( props, filters ) if filtered_properties : data [ ""properties"" ] = filtered_properties defer. returnValue ( data )",False,filters,data,0.7000559568405151 1737,"def _gethostbyname_ex ( self, hostname_bytes, family ) : while True : ares = self. cares try : waiter = Waiter ( self. hub ) ares. gethostbyname ( waiter, hostname_bytes, family ) result = waiter. get ( ) if not result [ - 1 ] : raise herror ( EAI_NONAME, self. EAI_NONAME_MSG ) return result except herror as ex : if : if ex. args [ 0 ] == 1 : raise gaierror ( EAI_NONAME, self. EAI_NONAME_MSG ) raise",False,ares is self.cares,len(ex.args) > 0,0.6622622013092041 1738,"def get_context_data ( self, ** kwargs ) : if not self. device. verified : messages. error ( self. request, ""{} - is not verified, you cannot "" ""deploy this device"". format ( self. device ), ) next_hostname = None first_free_ip_addresses = [ ] rack = self. device. find_rack ( ) if rack : networks = rack. network_set. filter ( environment__isnull = False, ). order_by ( ""name"" ) for network in networks : next_hostname = get_next_free_hostname ( network. environment ) if next_hostname : break for network in networks : first_free_ip = get_first_free_ip ( network. name ) if : first_free_ip_addresses. append ( { ""network_name"" : network. name, ""first_free_ip"" : first_free_ip, } ) return { ""form"" : kwargs [ ""form"" ], ""device"" : self. device, ""next_hostname"" : next_hostname, ""first_free_ip_addresses"" : first_free_ip_addresses, }",True,first_free_ip,first_free_ip,0.6588336229324341 1739,"def assertValidTree ( self, expected_tree ) : root = Category. objects. root_category ( ) queryset = Category. objects. filter ( tree_id = root. tree_id ). order_by ( ""lft"" ) current_tree = [ ] for category in queryset : current_tree. append ( ( category, category. get_level ( ), category. lft, category. rght ) ) for i, category in enumerate ( expected_tree ) : _category = current_tree [ i ] if category [ 0 ]!= _category [ 0 ] : self. fail ( ( ""expected category at index #%s to be %s, "" ""found %s instead"" ) % ( i, category [ 0 ], _category [ 0 ] ) ) if : self. fail ( ( ""expected level at index #%s to be %s, "" ""found %s instead"" ) % ( i, category [ 1 ], _category [ 1 ] ) ) if category [ 2 ]!= _category [ 2 ] : self. fail ( ( ""expected lft at index #%s to be %s, "" ""found %s instead"" ) % ( i, category [ 2 ], _category [ 2 ] ) ) if category [ 3 ]!= _category [ 3 ] : self. fail ( ( ""expected lft at index #%s to be %s, "" ""found %s instead",True,category[1] != _category[1],category[1] != _category[1],0.6555683016777039 1740,"def evaluate_attribute_node ( self, node, to_string = False ) : identifier = node. value. id + ""."" + node. attr if isinstance ( self. simulator_config. item_dict [ identifier ], SimulatorProtocolLabel ) : label = self. simulator_config. item_dict [ identifier ] message = label. parent ( ) start, end = message. get_label_range ( label, 2 if : else 0, False ) return ( message. plain_ascii_str [ start : end ] if : else int ( message. plain_bits_str [ start : end ], 2 ) ) elif isinstance ( self. simulator_config. item_dict [ identifier ], SimulatorCounterAction ) : return self. simulator_config. item_dict [ identifier ]. value elif isinstance ( self. simulator_config. item_dict [ identifier ], SimulatorTriggerCommandAction ) : return self. simulator_config. item_dict [ identifier ]. return_code",True,to_string,to_string,0.6586556434631348 1741,"def _make_headers ( self ) : libraries = self. _df. columns. to_list ( ) columns = [ ] for library in libraries : version = self. _package_versions [ library ] library_description = self. _libraries_description. get ( library ) if : library += "" {}"". format ( library_description ) columns. append ( ""{library}
    {version}"". format ( library = library, version = version ) ) return [ """" ] + columns",True,library_description,library_description,0.6712574362754822 1742,"def test_binary_grid_unaries ( ) : for ds in binary : X, Y = ds ( n_samples = 1 ) x, y = X [ 0 ], Y [ 0 ] for inference_method in get_installed ( ) : if : continue crf = GridCRF ( inference_method = inference_method ) crf. initialize ( X, Y ) w_unaries_only = np. zeros ( 7 ) w_unaries_only [ : 4 ] = np. eye ( 2 ). ravel ( ) inf_unaries = crf. inference ( x, w_unaries_only ) assert_array_equal ( inf_unaries, np. argmax ( x, axis = 2 ), ""Wrong unary inference for %s"" % inference_method, ) assert np. mean ( inf_unaries == y ) > 0.5 X, Y = ds ( n_samples = 1, noise = 0 ) inf_unaries = crf. inference ( X [ 0 ], w_unaries_only ) assert_array_equal ( inf_unaries, Y [ 0 ], ""Wrong unary result for %s"" % inference_method )",False,inference_method == 'ad3+',"hasattr(i2, 'train_grid_unaries')",0.6619477868080139 1743,"def user_delete ( request, user_id, response_format = ""html"" ) : ""User delete"" profile = get_object_or_404 ( User, pk = user_id ) message = """" if profile == request. user. profile : message = _ ( ""This is you!"" ) else : if : if ""delete"" in request. POST : profile. delete ( ) return HttpResponseRedirect ( reverse ( ""core_admin_index_users"" ) ) elif ""cancel"" in request. POST : return HttpResponseRedirect ( reverse ( ""core_admin_user_view"", args = [ profile. id ] ) ) return render_to_response ( ""core/administration/user_delete"", { ""profile"" : profile, ""message"" : message }, context_instance = RequestContext ( request ), response_format = response_format, )",False,request.POST,profile.id in request.POST,0.6697162985801697 1744,"def gradients_X ( self, dL_dK, X, X2 = None ) : if hasattr ( X, ""values"" ) : X = X. values index = np. int_ ( X [ :, 1 ] ) index = index. reshape ( index. size, ) X_flag = index [ 0 ] >= self. output_dim gX = np. zeros ( X. shape ) if X2 is None : if X_flag : index -= self. output_dim gX [ :, 0 ] = 2.0 * ( dL_dK * self. _gkuu_X ( X, index ) ). sum ( 0 ) return gX else : raise NotImplementedError else : if hasattr ( X2, ""values"" ) : X2 = X2. values index2 = np. int_ ( X2 [ :, 1 ] ) index2 = index2. reshape ( index2. size, ) X2_flag = index2 [ 0 ] >= self. output_dim if : index -= self. output_dim gX [ :, 0 ] = ( dL_dK * self. _gkfu_z ( X2, index2, X, index ). T ). sum ( 1 ) return gX else : raise NotImplementedError",False,X_flag and (not X2_flag),X2_flag,0.651221752166748 1745,"def json ( self ) : try : if self. is_json ( ) : raw_data = self. raw_data ( ) if : raw_data = raw_data. decode ( ""utf-8"" ) return json. loads ( raw_data ) except ValueError : pass",False,"not isinstance(raw_data, text_type)",type(raw_data) == bytes,0.6492528915405273 1746,"def __getstate__ ( self ) : prefs = { } for id, ( val, typ ) in self. prefs. items ( ) : if : try : val = UnwrapObject ( val ) except COMException : pass if val. _is_shadow : continue prefs [ id ] = val, typ return ( self. id, self. preftype, self. idref, prefs )",False,typ == 'object',id == 0,0.661811351776123 1747,"def r_verb ( self ) : self. ket = self. cursor among_var = self. find_among_b ( RussianStemmer. a_4, 46 ) if among_var == 0 : return False self. bra = self. cursor if among_var == 0 : return False elif among_var == 1 : try : v_1 = self. limit - self. cursor try : if not self. eq_s_b ( 1, u""\u0430"" ) : raise lab1 ( ) raise lab0 ( ) except lab1 : pass self. cursor = self. limit - v_1 if : return False except lab0 : pass if not self. slice_del ( ) : return False elif among_var == 2 : if not self. slice_del ( ) : return False return True",False,"not self.eq_s_b(1, u'я')",not self.has_l_h(),0.6593855619430542 1748,"def mlist_delete ( request, mlist_id, response_format = ""html"" ) : ""Delete mlist page"" mlist = get_object_or_404 ( MailingList, pk = mlist_id ) if not request. user. profile. has_permission ( mlist, mode = ""w"" ) : return user_denied ( request, message = ""You don't have access to this Mailing List"", response_format = response_format, ) if request. POST : if ""delete"" in request. POST : if : mlist. trash = True mlist. save ( ) else : mlist. delete ( ) return HttpResponseRedirect ( ""/messaging/"" ) elif ""cancel"" in request. POST : return HttpResponseRedirect ( reverse ( ""messaging_mlist_view"", args = [ mlist. id ] ) ) context = _get_default_context ( request ) context. update ( { ""mlist"" : mlist } ) return render_to_response ( ""messaging/mlist_delete"", context, context_instance = RequestContext ( request ), response_format = response_format, )",False,'trash' in request.POST,mlist.trash,0.6564432382583618 1749,"def fragment_count ( self ) : table = self. fragmentruntable. payload. fragment_run_entry_table first_fragment, end_fragment = None, None for i, fragmentrun in enumerate ( table ) : if : if fragmentrun. discontinuity_indicator == 0 : break elif fragmentrun. discontinuity_indicator > 0 : continue if first_fragment is None : first_fragment = fragmentrun. first_fragment end_fragment = fragmentrun. first_fragment fragment_duration = ( fragmentrun. first_fragment_timestamp + fragmentrun. fragment_duration ) if self. timestamp > fragment_duration : offset = ( self. timestamp - fragment_duration ) / fragmentrun. fragment_duration end_fragment += int ( offset ) if first_fragment is None : first_fragment = 1 if end_fragment is None : end_fragment = 1 return first_fragment, end_fragment",False,fragmentrun.discontinuity_indicator is not None,i % 2 == 2,0.6555056571960449 1750,"def get_branch_name ( directory, config_file, get_func, create_watcher ) : global branch_name_cache with branch_lock : fw = branch_watcher ( create_watcher ) is_watched = fw. is_watching ( directory ) try : changed = fw ( directory ) except OSError as e : if getattr ( e, ""errno"", None )!= errno. ENOENT : raise changed = True if changed : branch_name_cache. pop ( config_file, None ) if is_watched : fw. unwatch ( directory ) fw. unwatch ( config_file ) else : try : changed = fw ( config_file ) except OSError as e : if getattr ( e, ""errno"", None )!= errno. ENOENT : raise if : branch_name_cache [ config_file ] = out_u ( get_func ( directory, config_file ) ) if changed : branch_name_cache [ config_file ] = out_u (",True,config_file not in branch_name_cache,config_file not in branch_name_cache,0.651280403137207 1751,"def follow_view ( request ) : if request. method == ""GET"" : from auth. view_utils import render_template from auth. views import after return render_template ( request, ""twitter/follow"", { ""user_to_follow"" : USER_TO_FOLLOW, ""reason_to_follow"" : REASON_TO_FOLLOW }, ) if request. method == ""POST"" : follow_p = bool ( request. POST. get ( ""follow_p"", False ) ) if : from auth. security import get_user user = get_user ( request ) twitter_client = _get_client_by_token ( user. token ) result = twitter_client. oauth_request ( ""http://api.twitter.com/1/friendships/create.json"", args = { ""screen_name"" : USER_TO_FOLLOW }, method = ""POST"", ) from auth. views import after_intervention return HttpResponseRedirect ( reverse ( after_intervention ) )",True,follow_p,follow_p,0.6676169633865356 1752,"def _merged_column_names_from ( self, dataset_list ) : elements = [ ] for idx_dataset, dataset in enumerate ( dataset_list ) : code = self. __dataset_objects__ ( ) [ idx_dataset ]. code for index, column_name in enumerate ( dataset. column_names ) : if self. _include_column ( dataset, index ) : if : elements. append ( self. _rename_columns ( code, column_name ) ) else : elements. append ( column_name ) return list ( unique_everseen ( elements ) )",False,index > 0,code is not None,0.6690303087234497 1753,"def safe_repr ( val ) : try : if : val = _obj_with_safe_repr ( val ) ret = repr ( val ) if six. PY2 : ret = ret. decode ( ""utf-8"" ) except UnicodeEncodeError : ret = red ( ""a %r that cannot be represented"" % type ( val ) ) else : ret = green ( ret ) return ret",False,"isinstance(val, dict)","hasattr(val, '__iter__')",0.6536378264427185 1754,"def extract ( self, obj, k ) : if isinstance ( obj, Mapping ) : return obj. get ( k ) elif isinstance ( obj, MutableSequence ) : cp = [ ] metadata = [ ] for l in obj : if isinstance ( l, tuple ) : cp. append ( self. extract ( l [ 0 ], k ) ) metadata. append ( self. extract ( l [ 1 ], k ) ) else : result = self. extract ( l, k ) if : cp. append ( result [ 0 ] ) metadata. append ( result [ 1 ] ) else : cp. append ( result ) return cp, metadata elif isinstance ( obj, tuple ) : return self. extract ( obj [ 0 ], k ) else : return [ ]",False,"isinstance(result, tuple)","isinstance(result, list)",0.6535873413085938 1755,"def skip_to_close_match ( self ) : nestedCount = 1 while 1 : tok = self. tokenizer. get_next_token ( ) ttype = tok [ ""style"" ] if ttype == SCE_PL_UNUSED : return elif : tval = tok [ ""text"" ] if self. opHash. has_key ( tval ) : if self. opHash [ tval ] [ 1 ] == 1 : nestedCount += 1 else : nestedCount -= 1 if nestedCount <= 0 : break",False,self.classifier.is_index_op(tok),ttype == TCE_PL_LONG,0.6507596373558044 1756,"def _read_pidfiles ( self, pidfile_paths ) : pidfiles = { } for pidfile_path in pidfile_paths : if : continue try : file_object = open ( pidfile_path, ""r"" ) pidfiles [ pidfile_path ] = file_object. read ( ) file_object. close ( ) except IOError : continue return pidfiles",True,not os.path.exists(pidfile_path),not os.path.exists(pidfile_path),0.6514861583709717 1757,"def apply_extractors ( descriptor, template_extractors, extractors ) : type_processor_class = FieldTypeManager ( ). type_processor_class if isinstance ( template_extractors, dict ) : template_extractors = template_extractors. items ( ) attribute_map = descriptor. attribute_map for field_name, field_extractors in template_extractors : equeue = [ ] for eid in field_extractors : e_doc = extractors. get ( eid, { } ) if ""regular_expression"" in e_doc : equeue. append ( create_regex_extractor ( e_doc [ ""regular_expression"" ] ) ) elif ""type_extractor"" in e_doc : try : display_name = attribute_map [ field_name ]. description except KeyError : display_name = field_name field_type = type_processor_class ( e_doc [ ""type_extractor"" ] ) ( ) attribute_map [ field_name ] = SlybotFieldDescriptor ( field_name, display_name, field_type ) if field_name not in attribute_map : attribute_map [ field_name ] = SlybotFieldDescriptor ( field_name, field_name, type_processor_class ( ""text"" ) ( ) ) if",False,equeue,extractors,0.687211811542511 1758,"def process ( data ) : if self. decryptor is not None : eof = not data if : self. buffered += """". join ( [ c for c in data if c not in ( "" "", ""\t"", ""\r"", ""\n"" ) ] ) else : self. buffered += data or """" data = """" for i in ( 256, 64, 8, 1 ) : batch = max ( 1, i * self. decoder_data_bytes ) while eof or ( len ( self. buffered ) >= batch ) : if self. decoder_data_bytes : d = self. buffered [ : batch ] b = self. buffered [ batch : ] self. buffered = b else : d, self. buffered = self. buffered, """" try : data += self. decryptor ( self. decoder ( d ) ) eof = False except TypeError : raise IOError ( ""%s: Bad data, failed to decode"" % self. name ) return data or """"",False,self.decoder_data_bytes and data,self.decoder_data_bytes,0.6579269170761108 1759,"def query_encrypted_roots_keys ( self, filters = None ) : datasets = self. middleware. call_sync ( ""datastore.query"", self. dataset_store, filters ) zfs_keys = self. middleware. call_sync ( ""kmip.retrieve_zfs_keys"" ) keys = { } for ds in datasets : if : keys [ ds [ ""name"" ] ] = ds [ ""encryption_key"" ] elif ds [ ""name"" ] in zfs_keys : keys [ ds [ ""name"" ] ] = zfs_keys [ ds [ ""name"" ] ] return keys",False,ds['encryption_key'],ds['encryption_key'] in keys,0.656036376953125 1760,"def capture ( self, run_info_dict ) : logger. info ( f""Capturing repro information to {self._path}"" ) with open_tar ( self. _path, ""w:gz"", dereference = True, compresslevel = 6 ) as tarout : for relpath in os. listdir ( self. _buildroot ) : if : tarout. add ( os. path. join ( self. _buildroot, relpath ), relpath ) with temporary_file ( binary_mode = False ) as tmpfile : tmpfile. write ( ""# Pants repro captured for the following build:\n"" ) for k, v in sorted ( run_info_dict. items ( ) ) : tmpfile. write ( f""# {k}: {v}\n"" ) cmd_line = list ( sys. argv ) cmd_line [ 0 ] = ""pants"" cmd_line = [ x for x in cmd_line if not x. startswith ( ""--repro-"" ) ] tmpfile. write ( ""'"" + ""' '"". join ( cmd_line ) + ""'\n"" ) tmpfile. flush ( ) chmod_plus_x ( tmpfile. name ) tarout. add ( tmpfile. name, ""repro.sh"" )",False,relpath not in self._ignore,relpath != '',0.659044086933136 1761,"def _AdjustSashPosition ( self, idx, newPos1, newPos2 = - 1, adjustNeighbor = False ) : total = newPos1 + newPos2 win1 = self. _windows [ idx ] win2 = self. _windows [ idx + 1 ] minSize = self. _GetWindowMin ( win1 ) if minSize == - 1 or self. _minimumPaneSize > minSize : minSize = self. _minimumPaneSize minSize += self. _GetBorderSize ( ) if newPos1 < minSize : newPos1 = minSize newPos2 = total - newPos1 if adjustNeighbor : minSize = self. _GetWindowMin ( win2 ) if minSize == - 1 or self. _minimumPaneSize > minSize : minSize = self. _minimumPaneSize minSize += self. _GetBorderSize ( ) if : newPos2 = minSize newPos1 = total - newPos2 return ( newPos1, newPos2 )",False,newPos2 < minSize,newPos1 < newPos2,0.6815242767333984 1762,"def get ( self ) : threshold = int ( self. get_argument ( ""threshold"", - 1 ) ) m = MetricTransaction. get_tr_manager ( ) self. write ( """" ) transactions = m. get_transactions ( ) for tr in transactions : self. write ( """" % ( tr. get_id ( ), tr. get_size ( ), tr. get_error_count ( ), tr. get_next_flush ( ) ) ) self. write ( ""
    IdSizeError countNext flush
    %s%s%s%s
    "" ) if threshold >= 0 : if : self. set_status ( 503 )",False,len(transactions) > threshold,self.set_status() >= 5,0.6573941111564636 1763,"def _parseOffer ( self, offer ) : cores = 0 memory = 0 disk = 0 preemptable = None for attribute in offer. attributes : if : assert preemptable is None, ""Attribute 'preemptable' occurs more than once."" preemptable = strict_bool ( attribute. text. value ) if preemptable is None : log. debug ( ""Agent not marked as either preemptable or not. Assuming non-preemptable."" ) preemptable = False for resource in offer. resources : if resource. name == ""cpus"" : cores += resource. scalar. value elif resource. name == ""mem"" : memory += resource. scalar. value elif resource. name == ""disk"" : disk += resource. scalar. value return cores, memory, disk, preemptable",False,attribute.name == 'preemptable',attribute.text.value != 'preemptable',0.6523033380508423 1764,"def test ( self, text, s, e ) : ret = { } t = text [ s : e ] if self. settings == ""atx"" : if not re. match ( self. ratx, t ) or re. match ( self. ratxc, t ) : ret [ s ] = ""expected atx"" elif self. settings == ""atx_closed"" : if not re. match ( self. ratxc, t ) : ret [ s ] = ""expected atx_closed"" elif self. settings == ""setext"" : if : ret [ s ] = ""expected setext"" elif self. settings == ""any"" : if re. match ( self. ratx, t ) : if re. match ( self. ratxc, t ) : self. settings = ""atx_closed"" else : self. settings = ""atx"" return self. test ( text, s, e ) elif re. match ( self. rsetext, t ) : self. settings = ""setext"" return self. test ( text, s, e ) return ret",True,"not re.match(self.rsetext, t)","not re.match(self.rsetext, t)",0.648151159286499 1765,"def get_all_subnets ( self, subnet_ids = None, filters = None ) : matches = itertools. chain ( * [ x. values ( ) for x in self. subnets. values ( ) ] ) if subnet_ids : matches = [ sn for sn in matches if sn. id in subnet_ids ] if : unknown_ids = set ( subnet_ids ) - set ( matches ) raise InvalidSubnetIdError ( unknown_ids ) if filters : matches = generic_filter ( filters, matches ) return matches",False,len(subnet_ids) > len(matches),subnet_ids,0.6546308398246765 1766,"def _get_item_columns_panel ( items, rows ) : hbox = Gtk. HBox ( False, 4 ) n_item = 0 col_items = 0 vbox = Gtk. VBox ( ) hbox. pack_start ( vbox, False, False, 0 ) while n_item < len ( items ) : item = items [ n_item ] vbox. pack_start ( item, False, False, 0 ) n_item += 1 col_items += 1 if : vbox = Gtk. VBox ( ) hbox. pack_start ( vbox, False, False, 0 ) col_items = 0 return hbox",False,col_items > rows,n_item == len(items) - 1,0.672930896282196 1767,"def _cleanup ( self, exc = None ) : """"""Clean up this channel"""""" if self. _open_waiter : if not self. _open_waiter. cancelled ( ) : self. _open_waiter. set_exception ( ChannelOpenError ( OPEN_CONNECT_FAILED, ""SSH connection closed"" ) ) self. _open_waiter = None if self. _request_waiters : for waiter in self. _request_waiters : if not waiter. cancelled ( ) : if : waiter. set_exception ( exc ) else : waiter. set_result ( False ) self. _request_waiters = [ ] if self. _session : self. _session. connection_lost ( exc ) self. _session = None self. _close_event. set ( ) if self. _conn : self. logger. info ( ""Channel closed%s"", "": "" + str ( exc ) if exc else """" ) self. _conn. remove_channel ( self. _recv_chan ) self. _recv_chan = None self. _conn = None",True,exc,exc,0.6987417340278625 1768,"def _recursive_fx_apply ( input : dict, fx ) : for k, v in input. items ( ) : if : v = torch. tensor ( v ) if isinstance ( v, torch. Tensor ) : v = fx ( v. float ( ) ) input [ k ] = v else : _recursive_fx_apply ( v, fx )",False,"isinstance(v, list)","isinstance(v, dict)",0.6514344215393066 1769,"def get_student_courses ( self, student_id ) : if student_id == ""*"" : student_id = ""{authenticated_user}"" response = None try : response = _query_jupyterhub_api ( ""GET"", ""/users/%s"" % student_id ) except JupyterhubEnvironmentError : self. log. info ( ""Not running on Jupyterhub, not able to GET Jupyterhub user"" ) raise except JupyterhubApiError : self. log. error ( ""Error: Not able to get Jupyterhub user: "" + student_id ) self. log. error ( ""Make sure you start your service with a valid admin_user 'api_token' in your Jupyterhub config"" ) raise courses = set ( ) for group in response [ ""groups"" ] : if : course = group. split ( ""-"", 1 ) [ 1 ] if course : courses. add ( course ) return list ( courses )",False,group.startswith('nbgrader-') or group.startswith('formgrade-'),group,0.6459988355636597 1770,"def get_source ( self, fullname ) : if fullname in self. _cache : compressed = self. _cache [ fullname ] [ 3 ] if : raise ModuleNotFoundError ( self. absent_msg % ( fullname, ) ) source = zlib. decompress ( self. _cache [ fullname ] [ 3 ] ) if PY3 : return to_text ( source ) return source",False,compressed is None,compressed,0.6722270250320435 1771,def run ( self ) : tid = self. ident try : with self. _lock : _GUIS [ tid ] = self self. _state ( True ) self. new_mail_notifications ( summarize = True ) loop_count = 0 while self. _sock : loop_count += 1 self. _select_sleep ( 1 ) self. change_state ( ) if : self. new_mail_notifications ( ) finally : del _GUIS [ tid ],False,loop_count % 5 == 0,loop_count >= 2,0.6665794849395752 1772,"def configure_create_table_epilogue ( store ) : for val in [ """", "" ENGINE=InnoDB"" ] : store. config [ ""create_table_epilogue"" ] = val store. _set_sql_flavour ( ) if : store. log. info ( ""create_table_epilogue='%s'"", val ) return raise Exception ( ""Can not create a transactional table."" )",False,store._test_transaction(),store.sql_flavour,0.6588602066040039 1773,"def search ( self, query ) : query = query. strip ( ). lower ( ) results = [ ] for provider in SidebarItemProvider. all ( self. context ) : for item in provider. provide ( ) : if ""url"" in item : search_source = ""$"". join ( [ item. get ( ""id"", """" ), item. get ( ""name"", """" ) ] ). lower ( ) if : results. append ( { ""title"" : item [ ""name"" ], ""icon"" : item [ ""icon"" ], ""url"" : item [ ""url"" ], } ) return results",False,query in search_source,query.find(search_source) >= 0,0.661675214767456 1774,"def list_revision ( cmd, key = None, fields = None, name = None, label = None, datetime = None, connection_string = None, top = None, all_ = False, auth_mode = ""key"", endpoint = None, ) : azconfig_client = get_appconfig_data_client ( cmd, name, connection_string, auth_mode, endpoint ) key = key if key else SearchFilterOptions. ANY_KEY label = label if label else SearchFilterOptions. ANY_LABEL label = prep_label_filter_for_url_encoding ( label ) try : revisions_iterable = azconfig_client. list_revisions ( key_filter = key, label_filter = label, accept_datetime = datetime, fields = fields ) retrieved_revisions = [ ] count = 0 if all_ : top = float ( ""inf"" ) elif : top = 100 for revision in revisions_iterable : kv_revision = convert_configurationsetting_to_keyvalue ( revision ) if fields : partial_revision = { } for field in fields : partial_revision [ field. name. lower ( ) ] = kv_revision. __dict__ [ field. name. lower ( ) ] retrieved_revisions. append ( partial_revision ) else : = top,0.675089955329895 1775,"def read ( self ) : if not os. path. exists ( self. filename ) : raise IOError ( ""Cannot find file '%s'"" % self. filename ) self. FILE = open ( self. filename, ""r"" ) tmp = [ ] for tokens in csv. reader ( self. FILE ) : if tokens!= [ """" ] : tmp. append ( tokens ) self. FILE. close ( ) if len ( tmp ) == 0 : raise IOError ( ""Empty *.csv file"" ) elif len ( tmp ) == 1 : if : if type ( self. options. param ) in ( list, tuple ) : p = self. options. param [ 0 ] else : p = self. options. param if isinstance ( p, Param ) : self. options. model = p. model ( ) p = p. name self. _info = [ ""param"", p, "":="", tmp [ 0 ] [ 0 ] ] elif len ( self. options. symbol_map ) == 1 : self. _info = [ ""param"", self. options. symbol_map [ self. options. symbol_map. keys ( ) [ 0 ] ], "":="", tmp [ 0 ] [ 0 ], ] else : raise IOError ( ""Data looks like a parameter, but multiple parameter names have",False,not self.options.param is None,self.options.param,0.6543909311294556 1776,"def _addItemToLayout ( self, sample, label ) : col = self. layout. columnCount ( ) row = self. layout. rowCount ( ) if row : row -= 1 nCol = self. columnCount * 2 if col == nCol : for col in range ( 0, nCol, 2 ) : if : break if col + 2 == nCol : col = 0 row += 1 self. layout. addItem ( sample, row, col ) self. layout. addItem ( label, row, col + 1 )",False,"not self.layout.itemAt(row, col)",col + 2 == nCol,0.6500959396362305 1777,"def flush_file ( self, key, f ) : f. flush ( ) if : f. compress = zlib. compressobj ( 9, zlib. DEFLATED, - zlib. MAX_WBITS, zlib. DEF_MEM_LEVEL, 0 ) if len ( self. files ) > self. MAX_OPEN_FILES : if : open_files = sum ( 1 for f in self. files. values ( ) if f. fileobj is not None ) if open_files > self. MAX_OPEN_FILES : f. fileobj. close ( ) f. fileobj = None else : f. close ( ) self. files. pop ( key )",False,self.compress,key in self.files,0.6655057668685913 1778,"def update_stack ( self, full_name, template_url, parameters, tags ) : """"""Updates an existing stack in CloudFormation."""""" try : logger. info ( ""Attempting to update stack %s."", full_name ) self. conn. cloudformation. update_stack ( full_name, template_url = template_url, parameters = parameters, tags = tags, capabilities = [ ""CAPABILITY_IAM"" ], ) return SUBMITTED except BotoServerError as e : if : logger. info ( ""Stack %s did not change, not updating."", full_name ) return SKIPPED raise",False,'No updates are to be performed.' in e.message,e.response['errorCode'] in ERROR_NO_UPDATE,0.6547338962554932 1779,"def load_modules ( self, modules, config ) : """"""Load plugin modules."""""" for pluginclass in get_plugin_classes ( modules ) : name = pluginclass. __name__ if : if issubclass ( pluginclass, _ConnectionPlugin ) : log. debug ( LOG_PLUGIN, ""Enable connection plugin %s"", name ) self. connection_plugins. append ( pluginclass ( config [ name ] ) ) elif issubclass ( pluginclass, _ContentPlugin ) : log. debug ( LOG_PLUGIN, ""Enable content plugin %s"", name ) self. content_plugins. append ( pluginclass ( config [ name ] ) ) elif issubclass ( pluginclass, _ParserPlugin ) : log. debug ( LOG_PLUGIN, ""Enable parser plugin %s"", name ) self. parser_plugins. append ( pluginclass ( config [ name ] ) ) else : raise ValueError ( ""Invalid plugin class %s"" % pluginclass )",False,name in config['enabledplugins'],name in config,0.6623169183731079 1780,"def _move_cursor ( self, event ) : Scrollbar. _move_cursor ( self, event ) pos = event. clientY - self. line. getY ( ) y = pos - self. cursor. getHeight ( ) / 2 y = max ( y, 0 ) y = min ( y, self. line. getHeight ( ) - self. cursor. getHeight ( ) - 2 ) value = ( y / ( self. line. getHeight ( ) - self. cursor. getHeight ( ) - 2 ) ) * ( self. adjustment. upper - self. adjustment. page_size ) if event. type == ""click"" : old_value = self. adjustment. get_value ( ) if pos < 0 : incr = self. adjustment. step_incr self. adjustment. set_value ( old_value - incr ) elif : incr = self. adjustment. step_incr self. adjustment. set_value ( old_value + incr ) else : incr = self. adjustment. page_incr if value > old_value : self. adjustment. set_value ( old_value + incr ) elif value < old_value : self. adjustment. set_value ( old_value - incr ) else : self. adjustment. set_value ( value )",False,pos > self.line.getHeight(),old_value < 0,0.6561996936798096 1781,"def scan_resource_conf ( self, conf ) : if ""properties"" in conf : if ""logs"" in conf [ ""properties"" ] : if conf [ ""properties"" ] [ ""logs"" ] : storage = { } for log in conf [ ""properties"" ] [ ""logs"" ] : if : if str ( log [ ""enabled"" ] ). lower ( ) == ""true"" : storage [ log [ ""category"" ] ] = True if ( ""StorageRead"" in storage. keys ( ) and ""StorageWrite"" in storage. keys ( ) and ""StorageDelete"" in storage. keys ( ) ) : if ( storage [ ""StorageRead"" ] and storage [ ""StorageWrite"" ] and storage [ ""StorageDelete"" ] ) : return CheckResult. PASSED return CheckResult. FAILED",False,'category' in log and 'enabled' in log,log['category'] in conf,0.657490611076355 1782,"def get_doc_object ( obj, what = None, doc = None, name = None, config = { } ) : if what is None : if : what = ""class"" elif inspect. ismodule ( obj ) : what = ""module"" elif callable ( obj ) : what = ""function"" else : what = ""object"" if what == ""class"" : return SphinxClassDoc ( obj, func_doc = SphinxFunctionDoc, doc = doc, name = name, config = config ) elif what in ( ""function"", ""method"" ) : return SphinxFunctionDoc ( obj, doc = doc, config = config ) else : if doc is None : doc = pydoc. getdoc ( obj ) return SphinxObjDoc ( obj, doc, config = config )",True,inspect.isclass(obj),inspect.isclass(obj),0.6542889475822449 1783,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRING : self. ip = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. I32 : self. start_time = iprot. readI32 ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRUCT : self. file = PacketCaptureFile ( ) self. file. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 4 : if ftype == TType. I32 : self. pid = iprot. readI32 ( )",True,fid == 3,fid == 3,0.6759360432624817 1784,"def load_data ( self ) : from GPy. util. datasets import download_url, data_path if : download_url ( Housing. url, self. datapath, messages = True ) if : return False data = np. loadtxt ( os. path. join ( data_path, self. datapath, self. filename ) ) self. data = data data_train = data [ : 250, : - 1 ] label_train = data [ : 250, - 1 : ] self. train = ( data_train, label_train ) data_test = data [ 250 :, : - 1 ] label_test = data [ 250 :, - 1 : ] self. test = ( data_test, label_test ) return True",False,"not os.path.exists(os.path.join(data_path, self.datapath, self.filename))",not os.path.exists(data_path),0.6485799551010132 1785,"def printsumfp ( fp, filename, out = sys. stdout ) : m = md5 ( ) try : while 1 : data = fp. read ( bufsize ) if : break if isinstance ( data, str ) : data = data. encode ( fp. encoding ) m. update ( data ) except IOError as msg : sys. stderr. write ( ""%s: I/O error: %s\n"" % ( filename, msg ) ) return 1 out. write ( ""%s %s\n"" % ( m. hexdigest ( ), filename ) ) return 0",True,not data,not data,0.6703860759735107 1786,"def FileHacks ( self ) : """"""Hacks to make the filesystem look normal."""""" if sys. platform == ""win32"" : import win32api if self. path == ""/"" : self. files = win32api. GetLogicalDriveStrings ( ). split ( ""\x00"" ) self. files = [ drive. rstrip ( ""\\"" ) for drive in self. files if drive ] elif : self. size = 0x7FFFFFFFFFFFFFFF self. path = self. path. rstrip ( ""\\"" ) self. alignment = 512 elif sys. platform == ""darwin"" : if re. match ( ""/dev/r?disk.*"", self. path ) : self. size = 0x7FFFFFFFFFFFFFFF self. alignment = 512",False,"re.match('/*\\\\\\\\.\\\\[^\\\\]+\\\\?$', self.path) is not None",sys.platform == 'win32',0.6573024988174438 1787,"def write ( self, command ) : setting_multiline = get_setting ( ""format_multiline"", True ) setting_trimwhitespace = get_setting ( ""format_trim_whitespace"", True ) setting_injectlet = get_setting ( ""format_inject_let"", True ) new_cmd = """" if command. isspace ( ) or ( not setting_multiline and not setting_trimwhitespace ) : new_cmd = command else : lines = command. splitlines ( True ) if : lines = ghci_remove_whitespace ( lines ) if setting_injectlet : lines = ghci_inject_let ( lines ) if setting_multiline : lines = ghci_wrap_multiline_syntax ( lines ) new_cmd = """". join ( lines ) return super ( SublimeHaskellRepl, self ). write ( new_cmd )",True,setting_trimwhitespace,setting_trimwhitespace,0.6628965735435486 1788,"def validate ( self, value, model_instance ) : super ( ). validate ( value, model_instance ) for l in value : if : raise exceptions. ValidationError ( self. error_messages [ ""delimiter_found"" ], code = ""delimiter_found"", )",False,DELIMITER in l,l not in self.tables,0.6832408905029297 1789,"def delete ( self, repo, user = None ) : if not user : user = self. username try : self. gg. delete_repository ( user, repo ) except ApiFailure as err : if : raise ResourceNotFoundError ( ""Cannot delete: repository {}/{} does not exists."". format ( user, repo ) ) from err elif err. status_code == 403 : raise ResourcePermissionError ( ""You don't have enough permissions for deleting the repository. Check the namespace or the private token's privileges"" ) from err elif err. status_code == 422 : raise ResourceNotFoundError ( ""Cannot delete repository {}/{}: user {} does not exists."". format ( user, repo, user ) ) from err raise ResourceError ( ""Unhandled error: {}"". format ( err ) ) from err except Exception as err : raise ResourceError ( ""Unhandled exception: {}"". format ( err ) ) from err",True,err.status_code == 404,err.status_code == 404,0.6534093022346497 1790,"def display_failures_for_single_test ( result : TestResult ) -> None : """"""Display a failure for a single method / endpoint."""""" display_subsection ( result ) checks = _get_unique_failures ( result. checks ) for idx, check in enumerate ( checks, 1 ) : message : Optional [ str ] if check. message : message = f""{idx}. {check.message}"" else : message = None example = cast ( Case, check. example ) display_example ( example, check. name, message, result. seed ) if : click. echo ( ""\n"" )",False,idx != len(checks),result.print_errors,0.6549217700958252 1791,"def main ( ) : pop = toolbox. population ( n = 5 ) stats = tools. Statistics ( lambda ind : ind. fitness. values ) stats. register ( ""avg"", numpy. mean ) stats. register ( ""std"", numpy. std ) stats. register ( ""min"", numpy. min ) stats. register ( ""max"", numpy. max ) logbook = tools. Logbook ( ) logbook. header = [ ""gen"", ""evals"" ] + stats. fields GEN = 1000 best = None for g in range ( GEN ) : for part in pop : part. fitness. values = toolbox. evaluate ( part ) if : part. best = creator. Particle ( part ) part. best. fitness. values = part. fitness. values if best is None or best. fitness < part. fitness : best = creator. Particle ( part ) best. fitness. values = part. fitness. values for part in pop : toolbox. update ( part, best ) logbook. record ( gen = g, evals = len ( pop ), ** stats. compile ( pop ) ) print ( logbook. stream ) return pop, logbook, best",False,part.best is None or part.best.fitness < part.fitness,creator is not None,0.6480889916419983 1792,"def batch_iterator ( inputs, targets = None, batch_size = None, shuffle = False, allow_smaller_batch = False, repeat = True, ) : """"""A generator that provides batches of samples from the provided inputs."""""" if isinstance ( inputs, set ) : inputs = np. asarray ( list ( inputs ) ) if not isinstance ( inputs, ( np. ndarray, list ) ) : raise TypeError ( ""Unsupported data type %s encountered."" % type ( inputs ) ) if targets is not None and not isinstance ( targets, ( np. ndarray, list ) ) : raise TypeError ( ""Unsupported data type %s encountered."" % type ( targets ) ) num_samples = len ( inputs ) if batch_size is None : batch_size = num_samples if batch_size > num_samples : allow_smaller_batch = True keep_going = True while keep_going : indexes = np. arange ( 0, num_samples ) if : np. random. shuffle ( indexes ) shuffled_inputs = inputs [ indexes ] if targets is not None : shuffled_targets = targets [ indexes ] for start_index in range ( 0, num_samples, batch_size ) : if allow_smaller_batch : end_index = min ( start_index + batch_size, num_samples ) else : end_index = start_index + batch_size if end_index > num_samples : break batch_inputs = shuffled",True,shuffle,shuffle,0.7065909504890442 1793,"def get_system_stats ( proc_path = None ) : systemStats = { ""machine"" : platform. machine ( ), ""platform"" : sys. platform, ""processor"" : platform. processor ( ), ""pythonV"" : platform. python_version ( ), } platf = sys. platform try : if Platform. is_linux ( platf ) : if not proc_path : proc_path = ""/proc"" proc_cpuinfo = os. path. join ( proc_path, ""cpuinfo"" ) output, _, _ = get_subprocess_output ( [ ""grep"", ""model name"", proc_cpuinfo ], log ) systemStats [ ""cpuCores"" ] = len ( output. splitlines ( ) ) if Platform. is_darwin ( platf ) or Platform. is_freebsd ( platf ) : output, _, _ = get_subprocess_output ( [ ""sysctl"", ""hw.ncpu"" ], log ) systemStats [ ""cpuCores"" ] = int ( output. split ( "": "" ) [ 1 ] ) except SubprocessOutputEmptyError as e : log. warning ( ""unable to retrieve number of cpuCores. Failed with error %s"", e ) if Platform. is_linux ( platf ) : name, version, codename = distro. linux_distribution ( full_distribution_name = False ) if : name = ""amazon"" systemStats [ ""nixV"" ] = ( name, version,",False,name == 'amzn',codename,0.653171181678772 1794,"def everythingIsUnicode ( d ) : """"""Takes a dictionary, recursively verifies that every value is unicode"""""" for k, v in d. iteritems ( ) : if isinstance ( v, dict ) and k!= ""headers"" : if not everythingIsUnicode ( v ) : return False elif : for i in v : if isinstance ( i, dict ) and not everythingIsUnicode ( i ) : return False elif isinstance ( i, _bytes ) : return False elif isinstance ( v, _bytes ) : return False return True",True,"isinstance(v, list)","isinstance(v, list)",0.654982328414917 1795,"def execute ( self, host, port = ""513"", luser = ""root"", user = """", password = None, prompt_re = ""\w+:"", timeout = ""10"", persistent = ""1"", ) : fp, _ = self. bind ( host, port, timeout = int ( timeout ) ) trace = """" timeout = int ( timeout ) with Timing ( ) as timing : if self. need_handshake : fp. write ( ""\x00%s\x00%s\x00vt100/9600\x00"" % ( luser, user ) ) self. need_handshake = False else : fp. write ( ""%s\r"" % user ) _, _, resp = fp. expect ( [ prompt_re ], timeout = timeout ) trace += resp if : fp. write ( ""%s\r"" % password ) _, _, resp = fp. expect ( [ prompt_re ], timeout = timeout ) trace += resp if persistent == ""0"" : self. reset ( ) mesg = repr ( resp. strip ( ) ) [ 1 : - 1 ] return self. Response ( 0, mesg, timing, trace )",False,password is not None,password != None,0.6597955226898193 1796,"def move_stdout_to_stderr ( self ) : to_remove = [ ] to_add = [ ] for consumer_level, consumer in self. consumers : if : to_remove. append ( ( consumer_level, consumer ) ) to_add. append ( ( consumer_level, sys. stderr ) ) for item in to_remove : self. consumers. remove ( item ) self. consumers. extend ( to_add )",False,consumer == sys.stdout,consumer,0.6693646311759949 1797,"def import_submodules ( package_name ) : package = sys. modules [ package_name ] results = { } for loader, name, is_pkg in pkgutil. iter_modules ( package. __path__ ) : full_name = package_name + ""."" + name module = importlib. import_module ( full_name ) setattr ( sys. modules [ __name__ ], name, module ) results [ full_name ] = module if is_pkg : valid_pkg = import_submodules ( full_name ) if : results. update ( valid_pkg ) return results",True,valid_pkg,valid_pkg,0.6771539449691772 1798,"def fix_e712 ( self, result ) : """"""Fix comparison with boolean."""""" line_index = result [ ""line"" ] - 1 target = self. source [ line_index ] offset = result [ ""column"" ] - 1 if re. match ( r""^\s*if \w+ == False:$"", target ) : self. source [ line_index ] = re. sub ( r""if (\w+) == False:"", r""if not \1:"", target, count = 1 ) elif re. match ( r""^\s*if \w+!= True:$"", target ) : self. source [ line_index ] = re. sub ( r""if (\w+)!= True:"", r""if not \1:"", target, count = 1 ) else : right_offset = offset + 2 if right_offset >= len ( target ) : return [ ] left = target [ : offset ]. rstrip ( ) center = target [ offset : right_offset ] right = target [ right_offset : ]. lstrip ( ) new_right = None if center. strip ( ) == ""=="" : if re. match ( r""\bTrue\b"", right ) : new_right = re. sub ( r""\bTrue\b *"", """", right, count = 1 ) elif : if re. match ( r""\bFalse\b"", right ) : new_right = re. sub ( r""\bFalse\b *"", """", right, count = 1 ) if new_right is None : if isinstance ( info, ast. FunctionDef ) : for arg in info. args. args : if : if arg. id in script. modelVars : self. problem ( ""Function {0} may shadow model variable {1}"". format ( info. name, arg. id ), lineno = info. lineno, )",False,"isinstance(arg, ast.Name)","hasattr(script, 'modelVars')",0.6494197249412537 1800,"def encryptBlock ( self, plainTextBlock ) : """"""CBC block encryption, IV is set with 'encrypt'"""""" auto_IV = """" if self. encryptBlockCount == 0 : if : self. iv = """". join ( [ chr ( self. r. randrange ( 256 ) ) for i in range ( self. blockSize ) ] ) self. prior_encr_CT_block = self. iv auto_IV = self. prior_encr_CT_block else : assert len ( self. iv ) == self. blockSize, ""IV must be same length as block"" self. prior_encr_CT_block = self. iv """""" encrypt the prior CT XORed with the PT """""" ct = self. baseCipher. encryptBlock ( xor ( self. prior_encr_CT_block, plainTextBlock ) ) self. prior_encr_CT_block = ct return auto_IV + ct",False,self.iv == None,self.iv_len == 0,0.6667077541351318 1801,"def _extract_config ( self, project, test_suites, target_test_case = None ) : execution = [ ] scenarios = { } project_properties = self. _extract_properties ( project, key_prefix = ""#Project#"" ) project_name = project. get ( ""name"" ) interface_exec, interface_scen = self. _extract_interface ( project_name, self. interface ) execution. append ( interface_exec ) scenarios. update ( interface_scen ) for suite in test_suites : suite_props = BetterDict. from_dict ( project_properties ) suite_props. merge ( self. _extract_properties ( suite, key_prefix = ""#TestSuite#"" ) ) test_cases = suite. findall ( "".//con:testCase"", namespaces = self. NAMESPACES ) for case in test_cases : case_name = case. get ( ""name"" ) scenario_name, scenario = self. _extract_test_case ( case, suite, suite_props ) load_exec = self. _extract_execution ( case ) load_exec [ ""scenario"" ] = scenario_name self. log. debug ( ""Extracted execution for scenario %s"", scenario_name ) if : self. log. warning ( ""No requests extracted for scenario %s, skipping it"" % scenario_name ) continue if target_test_case is None or target_test_case == case_name : self. log. debug ( ""Extracted scenario: %s"", scenario",False,not scenario['requests'],load_exec['scenario'] is None,0.6616487503051758 1802,"def make_node ( self, x, y, ilist ) : ctx_name = infer_context_name ( x, y ) x_ = as_gpuarray_variable ( x, ctx_name ) y_ = as_gpuarray_variable ( y, ctx_name ) ilist_ = tensor. as_tensor_variable ( ilist ) assert x_. type. ndim >= y_. type. ndim if ilist_. type. dtype not in tensor. integer_dtypes : raise TypeError ( ""index must be integers"" ) if ilist_. type. ndim!= 1 : raise TypeError ( ""index must be vector"" ) if x_. type. ndim == 0 : raise TypeError ( ""cannot index into a scalar"" ) if y_. type. ndim > x_. type. ndim : if : opname = ""set"" else : opname = ""increment"" raise TypeError ( ""cannot %s x subtensor with ndim=%s by y with ndim=%s "" % ( opname, x_. type. ndim, y_. type. ndim ) ) return gof. Apply ( self, [ x_, y_, ilist_ ], [ x_. type ( ) ] )",False,self.set_instead_of_inc,x_.type.dtype == tensor.integer_dtypes,0.6508128643035889 1803,"def pre_make ( self, data, ** kwargs ) : schema = data. get ( ""schema"" ) kind = data. get ( ""kind"" ) if schema and kind : schema [ ""kind"" ] = ""custom"" if kind in V1ConnectionKind. BLOB_VALUES : schema [ ""kind"" ] = V1BucketConnection. IDENTIFIER if kind == V1ConnectionKind. VOLUME_CLAIM : schema [ ""kind"" ] = V1ClaimConnection. IDENTIFIER if : schema [ ""kind"" ] = V1HostPathConnection. IDENTIFIER if kind == V1ConnectionKind. REGISTRY : schema [ ""kind"" ] = V1HostConnection. IDENTIFIER if kind == V1ConnectionKind. GIT : schema [ ""kind"" ] = V1GitConnection. IDENTIFIER return data",False,kind == V1ConnectionKind.HOST_PATH,kind == V1ConnectionKind.PATH,0.662108838558197 1804,"def _sniff ( filename, oxlitype ) : try : with open ( filename, ""rb"" ) as fileobj : header = fileobj. read ( 4 ) if : fileobj. read ( 1 ) ftype = fileobj. read ( 1 ) if binascii. hexlify ( ftype ) == oxlitype : return True return False except OSError : return False",False,header == b'OXLI',len(header) > 4,0.6647782921791077 1805,"def close ( self, * args, ** kwargs ) : super ( mytqdm, self ). close ( * args, ** kwargs ) if hasattr ( self, ""sp"" ) : if : self. sp ( bar_style = ""danger"" ) else : if self. leave : self. sp ( bar_style = ""success"" ) else : self. sp ( close = True )",False,self.total and self.n < self.total,self.leave,0.6521770358085632 1806,"def check ( self ) : response = b"""" payload = b""\x00"" * 8 udp_client = self. udp_create ( ) udp_client. send ( payload ) if udp_client : response = udp_client. recv ( 1024 ) if : if response. endswith ( b""\xD0\xA5Login:"" ) : return True elif response. endswith ( b""\x00\x00\x00\x05\x00\x01\x00\x00\x00\x00\x01\x00\x00"" ) : return True return False",True,response,response,0.693464994430542 1807,"def visit_Attribute ( self, node ) : self. generic_visit ( node ) reserved = ( ""next"", ""posedge"", ""negedge"", ""max"", ""min"", ""val"", ""signed"", ""verilog_code"", ""vhdl_code"", ) if node. attr in reserved : return node if not isinstance ( node. value, ast. Name ) : return node if node. value. id not in self. data. symdict : return node obj = self. data. symdict [ node. value. id ] if isinstance ( obj, ( EnumType, FunctionType ) ) : return node elif isinstance ( obj, SignalType ) : if : return node attrobj = getattr ( obj, node. attr ) orig_name = node. value. id + ""."" + node. attr if orig_name not in self. name_map : base_name = node. value. id + ""_"" + node. attr self. name_map [ orig_name ] = _suffixer ( base_name, self. data. symdict ) new_name = self. name_map [ orig_name ] self. data. symdict [ new_name ] = attrobj self. data. objlist. append ( new_name ) new_node = ast. Name ( id = new_name, ctx = node. value. ctx ) return ast. copy_location ( new_node, node )",False,"hasattr(SignalType, node.attr)","hasattr(obj, node.attr)",0.651077389717102 1808,"def get_items ( cls, *, html : str = """", url : str = """", html_etree : etree. _Element = None, ** kwargs, ) : if html_etree is None : html_etree = await cls. _get_html ( html, url, ** kwargs ) items_field = getattr ( cls, ""__fields"", { } ). get ( ""target_item"", None ) if items_field : items_field. many = True items_html_etree = items_field. extract ( html_etree = html_etree, is_source = True ) if : for each_html_etree in items_html_etree : item = await cls. _parse_html ( html_etree = each_html_etree ) if not item. ignore_item : yield item else : value_error_info = "" if url : value_error_info = f""{value_error_info} url: {url}.>"" if html : value_error_info = f""{value_error_info} html.>"" raise ValueError ( value_error_info ) else : raise ValueError ( f"""" )",True,items_html_etree,items_html_etree,0.6595181226730347 1809,"def poll ( timeout = 0.0, map = None ) : if map is None : map = socket_map if map : r = [ ] w = [ ] e = [ ] for fd, obj in list ( map. items ( ) ) : is_r = obj. readable ( ) is_w = obj. writable ( ) if is_r : r. append ( fd ) if is_w and not obj. accepting : w. append ( fd ) if is_r or is_w : e. append ( fd ) if [ ] == r == w == e : time. sleep ( timeout ) return r, w, e = select. select ( r, w, e, timeout ) for fd in r : obj = map. get ( fd ) if : continue read ( obj ) for fd in w : obj = map. get ( fd ) if : continue write ( obj ) for fd in e : obj = map. get ( fd ) if : continue _exception ( obj )",False,obj is None,obj == w,0.6680967807769775 1810,"def _execute_combine ( cls, ctx, op : ""DataFrameEwmAgg"" ) : try : cls. _exec_cache [ op. key ] = dict ( ) if len ( op. inputs )!= 1 : pred_data = ctx [ op. inputs [ 1 ]. key ] if : pred_exponent = pred_data [ - 2 ]. shift ( - 1 ) [ : : - 1 ]. cumsum ( ) [ : : - 1 ]. fillna ( 0 ) else : succ_counts = pred_data [ - 1 ]. shift ( - 1 ) succ_counts. iloc [ - 1 ] = 0 pred_exponent = pred_data [ - 2 ]. add ( succ_counts [ : : - 1 ]. cumsum ( ) [ : : - 1 ], axis = op. axis ) cls. _exec_cache [ op. key ] [ ""pred_exponent"" ] = pred_exponent super ( ). _execute_combine ( ctx, op ) finally : cls. _exec_cache. pop ( op. key, None )",False,op.alpha_ignore_na,len(pred_data.shape) == 3,0.660312294960022 1811,def _get_error_file ( self ) -> Optional [ str ] : error_file = None min_timestamp = sys. maxsize for replicas in self. role_replicas. values ( ) : for replica in replicas : if : continue mtime = os. path. getmtime ( replica. error_file ) if mtime < min_timestamp : min_timestamp = mtime error_file = replica. error_file return error_file,False,not os.path.exists(replica.error_file),replica.error_file is None,0.6474065780639648 1812,"def _infer_return_type ( * args ) : """"""Look at the type of all args and divine their implied return type."""""" return_type = None for arg in args : if arg is None : continue if isinstance ( arg, bytes ) : if return_type is str : raise TypeError ( ""Can't mix bytes and non-bytes in "" ""path components."" ) return_type = bytes else : if : raise TypeError ( ""Can't mix bytes and non-bytes in "" ""path components."" ) return_type = str if return_type is None : return str return return_type",False,return_type is bytes,return_type is str,0.6595941781997681 1813,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if : break if fid == 1 : if ftype == TType. I32 : self. protocol_version = iprot. readI32 ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRING : self. requestorUserName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. STRING : self. roleName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 4 : if ftype == TType. STRING : self. component = iprot. readString ( ) else : iprot",False,ftype == TType.STOP,fid == 0,0.6630985736846924 1814,"def get_token ( self ) : ""Get a token from the input stream (or from stack if it's nonempty)"" if self. pushback : tok = self. pushback. popleft ( ) if self. debug >= 1 : print ( ""shlex: popping token "" + repr ( tok ) ) return tok raw = self. read_token ( ) if self. source is not None : while raw == self. source : spec = self. sourcehook ( self. read_token ( ) ) if spec : ( newfile, newstream ) = spec self. push_source ( newstream, newfile ) raw = self. get_token ( ) while raw == self. eof : if not self. filestack : return self. eof else : self. pop_source ( ) raw = self. get_token ( ) if self. debug >= 1 : if : print ( ""shlex: token="" + repr ( raw ) ) else : print ( ""shlex: token=EOF"" ) return raw",False,raw != self.eof,raw,0.6653341054916382 1815,"def assert_open ( self, sock, * rest ) : if isinstance ( sock, fd_types ) : self. __assert_fd_open ( sock ) else : fileno = sock. fileno ( ) assert isinstance ( fileno, fd_types ), fileno sockname = sock. getsockname ( ) assert isinstance ( sockname, tuple ), sockname if : self. __assert_fd_open ( fileno ) else : self. _assert_sock_open ( sock ) if rest : self. assert_open ( rest [ 0 ], * rest [ 1 : ] )",False,not WIN,"isinstance(sock, fd_types)",0.7034542560577393 1816,"def palindromic_substrings ( s ) : if not s : return [ [ ] ] results = [ ] for i in range ( len ( s ), 0, - 1 ) : sub = s [ : i ] if : for rest in palindromic_substrings ( s [ i : ] ) : results. append ( [ sub ] + rest ) return results",False,sub == sub[::-1],sub,0.6545841097831726 1817,"def inner ( self, * iargs, ** ikwargs ) : try : return getattr ( super ( VEXResilienceMixin, self ), func ) ( * iargs, ** ikwargs ) except excs as e : for exc, handler in zip ( excs, handlers ) : if : v = getattr ( self, handler ) ( * iargs, ** ikwargs ) if v is raiseme : raise return v assert False, ""this should be unreachable if Python is working correctly""",False,"isinstance(e, exc)","hasattr(self, handler)",0.6545758843421936 1818,"def _create_network_filters ( self, instance, network_info, instance_secgroup_filter_name ) : if instance [ ""image_ref"" ] == str ( FLAGS. vpn_image_id ) : base_filter = ""nova-vpn"" else : base_filter = ""nova-base"" result = [ ] for ( _n, mapping ) in network_info : nic_id = mapping [ ""mac"" ]. replace ( "":"", """" ) instance_filter_name = self. _instance_filter_name ( instance, nic_id ) instance_filter_children = [ base_filter, ""nova-provider-rules"", instance_secgroup_filter_name, ] if : instance_filter_children. append ( ""nova-project"" ) if FLAGS. use_ipv6 : instance_filter_children. append ( ""nova-project-v6"" ) result. append ( ( instance_filter_name, instance_filter_children ) ) return result",False,FLAGS.allow_same_net_traffic,FLAGS.project,0.6486634016036987 1819,"def select_singularity ( self ) : """"""Set singularity executable and related variables"""""" conf = Config ( ) self. executable = conf. use_singularity_executable if self. executable!= ""UDOCKER"" and not self. executable : self. executable = FileUtil ( ""singularity"" ). find_exec ( ) if self. executable == ""UDOCKER"" or not self. executable : self. executable = """" arch = HostInfo ( ). arch ( ) image_list = [ ] if arch == ""amd64"" : image_list = [ ""singularity-x86_64"", ""singularity"" ] elif arch == ""i386"" : image_list = [ ""singularity-x86"", ""singularity"" ] elif arch == ""arm64"" : image_list = [ ""singularity-arm64"", ""singularity"" ] elif : image_list = [ ""singularity-arm"", ""singularity"" ] f_util = FileUtil ( self. localrepo. bindir ) self. executable = f_util. find_file_in_dir ( image_list ) if not os. path. exists ( self. executable ) : Msg ( ). err ( ""Error: singularity executable not found"" ) sys. exit ( 1 )",False,arch == 'arm',arch == 'locallocal',0.6654393672943115 1820,"def check_weights ( x, y, key : str, index = None ) : for i in [ 2, 0 ] : if : continue left = x [ i ]. get_master_weight ( ) right = y [ i ]. weight. data if not torch. allclose ( left, right, atol = 1.0e-6 ) or index is not None : print ( f""check_weights {key}-{i}: left = {left}, \nright = {right}"" ) if not torch. equal ( left, right ) : print ( f""check_weights NOT_EQUAL {key}-{i}: left = {left}, \nright = {right}"" ) assert torch. allclose ( left, right, atol = 1.0e-6 )",False,index is not None and i != index,x[i] == y[i],0.6566314697265625 1821,"def configuration_install ( app, uri, verify_ssl, config_type = None, args = None, source_folder = None, target_folder = None, ) : cache, output, requester = app. cache, app. out, app. requester configs = [ ] configs_file = cache. config_install_file if os. path. isfile ( configs_file ) : configs = _load_configs ( configs_file ) if uri is None : if config_type or args or not verify_ssl : if not configs : raise ConanException ( ""Called config install without arguments"" ) config = configs [ - 1 ] config. config_type = config_type or config. type config. args = args or config. args config. verify_ssl = verify_ssl or config. verify_ssl _process_config ( config, cache, output, requester ) _save_configs ( configs_file, configs ) else : if not configs : raise ConanException ( ""Called config install without arguments"" ) for config in configs : output. info ( ""Config install: %s"" % _hide_password ( config. uri ) ) _process_config ( config, cache, output, requester ) touch ( cache. config_install_file ) else : config = _ConfigOrigin. from_item ( ",False,config not in configs,source_folder,0.6635862588882446 1822,"def main ( argv = None ) : if not argv : argv = sys. argv s = Store ( ) if argv [ 1 ] in ( ""help"", ""--help"", ""h"", ""-h"" ) : help ( ) elif argv [ 1 ] == ""whoami"" : if os. path. exists ( storefn ) : print ( list ( s. who ( ) ) [ 0 ] ) else : s. who ( argv [ 2 ] ) elif argv [ 1 ]. startswith ( ""http://www.imdb.com/title/tt"" ) : if : raise else : i = imdb. IMDb ( ) movie = i. get_movie ( argv [ 1 ] [ len ( ""http://www.imdb.com/title/tt"" ) : - 1 ] ) print ( ""%s (%s)"" % ( movie [ ""title"" ]. encode ( ""utf-8"" ), movie [ ""year"" ] ) ) for director in movie [ ""director"" ] : print ( ""directed by: %s"" % director [ ""name"" ]. encode ( ""utf-8"" ) ) for writer in movie [ ""writer"" ] : print ( ""written by: %s"" % writer [ ""name"" ]. encode ( ""utf-8"" ) ) s. new_movie ( movie ) rating = None while not rating or ( rating > 5 or rating <= 0 ) : try : rating",False,s.movie_is_in(argv[1]),len(argv) < 3,0.6518588066101074 1823,"def test_assert_set_equal ( estimate : tp. Iterable [ int ], message : str ) -> None : reference = { 1, 2, 3 } try : testing. assert_set_equal ( estimate, reference ) except AssertionError as error : if not message : raise AssertionError ( ""An error has been raised while it should not."" ) from error np. testing. assert_equal ( error. args [ 0 ]. split ( ""\n"" ) [ 1 : ], message ) else : if : raise AssertionError ( ""An error should have been raised."" )",False,message,not estimate,0.694068193435669 1824,"def _buffered_generator ( self, size ) : buf = [ ] c_size = 0 push = buf. append while 1 : try : while c_size < size : c = next ( self. _gen ) push ( c ) if c : c_size += 1 except StopIteration : if : return yield concat ( buf ) del buf [ : ] c_size = 0",False,not c_size,len(buf) == 0,0.6614516973495483 1825,"def getExecutionCode ( self, required ) : yield ""if material is not None:"" yield "" if not material.is_grease_pencil:"" yield "" bpy.data.materials.create_gpencil_data(material)"" s = self. inputs isShowStroke = s [ ""Show Stroke"" ]. isUsed isStrokeDrawMode = s [ ""Stroke Draw Mode"" ]. isUsed isStrokeColor = s [ ""Stroke Color"" ]. isUsed isShowFill = s [ ""Show Fill"" ]. isUsed isFillColor = s [ ""Fill Color"" ]. isUsed isPassIndex = s [ ""Pass Index"" ]. isUsed if any ( [ isShowStroke, isStrokeDrawMode, isStrokeColor, isShowFill, isFillColor, isPassIndex, ] ) : yield "" gpMaterial = material.grease_pencil"" if isShowStroke : yield "" gpMaterial.show_stroke = showStroke"" if isStrokeDrawMode : yield "" self.setStrokeDrawMode(gpMaterial, strokeDrawMode)"" if isStrokeColor : yield "" gpMaterial.color = strokeColor"" if isShowFill : yield "" gpMaterial.show_fill = showFill"" if isFillColor : yield "" gpMaterial.fill_color = fillColor"" if : yield "" gpMaterial.pass_index = passIndex""",True,isPassIndex,isPassIndex,0.66862952709198 1826,"def _load_data ( self, addr, size, endness ) : if isinstance ( addr, SpOffset ) : v = self. state. load_local_variable ( addr. offset, size ) return v elif isinstance ( addr, int ) : if : if self. base_state is not None : _l. debug ( ""Loading %d bytes from %x."", size, addr ) data = self. base_state. memory. load ( addr, size, endness = endness ) if not data. symbolic : return self. base_state. solver. eval ( data ) else : try : val = self. project. loader. memory. unpack_word ( addr, size = size, endness = endness ) return val except KeyError : return None return None",False,"self._allow_loading(addr, size)",size > 0,0.651053249835968 1827,"def _sample_translation ( reference, max_len ) : translation = reference [ : ] while np. random. uniform ( ) < 0.8 and 1 < len ( translation ) < max_len : trans_len = len ( translation ) ind = np. random. randint ( trans_len ) action = np. random. choice ( actions ) if : del translation [ ind ] elif action == ""replacement"" : ind_rep = np. random. randint ( trans_len ) translation [ ind ] = translation [ ind_rep ] else : ind_insert = np. random. randint ( trans_len ) translation. insert ( ind, translation [ ind_insert ] ) return translation",False,action == 'deletion',action == 'replacement',0.6649367809295654 1828,"def backward_impl ( self, inputs, outputs, prop_down, accum ) : x0 = inputs [ 0 ]. data w0 = inputs [ 1 ]. data dy = inputs [ 2 ]. data dx0 = outputs [ 0 ]. data dw0 = outputs [ 1 ]. data g_x0 = inputs [ 0 ]. grad g_w0 = inputs [ 1 ]. grad g_dy = inputs [ 2 ]. grad g_dx0 = outputs [ 0 ]. grad g_dw0 = outputs [ 1 ]. grad if prop_down [ 2 ] : g_dy_ = F. embed ( x0, g_dw0 ) if : g_dy += g_dy_ else : g_dy. copy_from ( g_dy_ )",False,accum[2],prop_down[1],0.6642348766326904 1829,"def convert_ids_to_tokens ( self, ids, skip_special_tokens = False ) : """"""Converts a sequence of ids in BPE tokens using the vocab."""""" tokens = [ ] for i in ids : if : if not skip_special_tokens : tokens. append ( self. special_tokens_decoder [ i ] ) else : tokens. append ( self. decoder [ i ] ) return tokens",False,i in self.special_tokens_decoder,i in self.decoder,0.6585270166397095 1830,"def rerun_failed_tests ( self ) : self. ns. verbose = True self. ns. failfast = False self. ns. verbose3 = False self. first_result = self. get_tests_result ( ) self. log ( ) self. log ( ""Re-running failed tests in verbose mode"" ) self. rerun = self. bad [ : ] for test_name in self. rerun : self. log ( f""Re-running {test_name} in verbose mode"" ) self. ns. verbose = True result = runtest ( self. ns, test_name ) self. accumulate_result ( result, rerun = True ) if : break if self. bad : print ( count ( len ( self. bad ), ""test"" ), ""failed again:"" ) printlist ( self. bad ) self. display_result ( )",False,result.result == INTERRUPTED,self.first_result,0.656733512878418 1831,"def imports_as_stmts ( self, expr ) : """"""Convert the Result's imports to statements"""""" ret = Result ( ) for module, names in self. imports. items ( ) : if : ret += self. compile ( [ HyExpression ( [ HySymbol ( ""import"" ), HySymbol ( module ), ] ). replace ( expr ) ] ) names = sorted ( name for name in names if name ) if names : ret += self. compile ( [ HyExpression ( [ HySymbol ( ""import"" ), HyList ( [ HySymbol ( module ), HyList ( [ HySymbol ( name ) for name in names ] ), Box. _redraw ( self ) count = 0 fix_width = 0 if not self. homogeneous : for child in self. children : if : count += 1 else : fix_width += ( child. minwidth + self. spacing + child. padding + 2 * child. margin ) else : count = len ( self. children ) container = self. widget_cont left = self. margin for child in self. children : if len ( self. children )!= 1 : if child. minheight + 2 * self. margin > self. minheight : self. minheight = child. minheight + 2 * self. margin self. minwidth += ( child. minwidth + 2 * child. margin + self. spacing + child. padding ) container. setPxStyle ( ""minHeight"", self. minheight ) container. setPxStyle ( ""minWidth"", self. minwidth ) count = max ( count, 1 ) horiz_inc = ( container. getWidth ( ) - 2 * self. margin - fix_width ) / count for child in self. children : child_container = child. widget_cont child_container. setPxStyle ( ""height"", container. getHeight ( ) - 2 * self. margin ) child_container. setPxStyle ( ""left"", left + self. spacing / 2 + child. padding / 2 ) if : ",False,child.expand,count > 0,0.6714144945144653 1833,"def set_template_dict ( self, template_dict ) : template_dict [ ""self_name"" ] = self. name template_dict [ ""self_type"" ] = self. parser_type kind = self. function. kind cls = self. function. cls if ( kind in ( METHOD_NEW, METHOD_INIT ) ) and cls and cls. typedef : if : passed_in_type = self. name else : passed_in_type = ""Py_TYPE({})"". format ( self. name ) line = ""({passed_in_type} == {type_object}) &&\n "" d = { ""type_object"" : self. function. cls. type_object, ""passed_in_type"" : passed_in_type, } template_dict [ ""self_type_check"" ] = line. format_map ( d )",False,kind == METHOD_NEW,kind == METHOD_CHECK,0.6639688014984131 1834,"def parseImpl ( self, instring, loc, doActions = True ) : maxExcLoc = - 1 maxException = None matches = [ ] for e in self. exprs : try : loc2 = e. tryParse ( instring, loc ) except ParseException as err : err. __traceback__ = None if err. loc > maxExcLoc : maxException = err maxExcLoc = err. loc except IndexError : if : maxException = ParseException ( instring, len ( instring ), e. errmsg, self ) maxExcLoc = len ( instring ) else : matches. append ( ( loc2, e ) ) if matches : matches. sort ( key = lambda x : - x [ 0 ] ) for _, e in matches : try : return e. _parse ( instring, loc, doActions ) except ParseException as err : err. __traceback__ = None if err. loc > maxExcLoc : maxException = err maxExcLoc = err. loc if maxException is not None : maxException. msg = self. errmsg raise maxException else : raise ParseException ( instring, loc, ""no defined alternatives to match"", self )",False,len(instring) > maxExcLoc,maxException is None,0.6602396368980408 1835,"def get_messages ( self, timeout = 0.1, count = 1 ) : started = time ( ) sleep_time = timeout / 10.0 while count : try : msg = self. subscriber. recv_multipart ( copy = True, flags = zmq. NOBLOCK ) except zmq. Again : if time ( ) - started > timeout : break sleep ( sleep_time ) else : partition_seqno, global_seqno = unpack ( "">II"", msg [ 2 ] ) seqno = global_seqno if self. count_global else partition_seqno if not self. counter : self. counter = seqno elif self. counter!= seqno : if : self. logger. warning ( ""Sequence counter mismatch: expected %d, got %d. Check if system "" ""isn't missing messages."" % ( self. counter, seqno ) ) self. counter = None yield msg [ 1 ] count -= 1 if self. counter : self. counter += 1 self. stats [ self. stat_key ] += 1",False,self.seq_warnings,self.logger,0.6623049974441528 1836,"def run ( self, ** kwargs : Any ) -> None : for node in self. document. traverse ( nodes. title ) : if : for i, index in enumerate ( node. traverse ( addnodes. index ) ) : node. remove ( index ) node. parent. insert ( i + 1, index )",False,"isinstance(node.parent, nodes.section)","hasattr(node, 'parent')",0.649115800857544 1837,"def _instrument_model ( self, model ) : for key, value in list ( model. __dict__. items ( ) ) : if isinstance ( value, tf. keras. layers. Layer ) : new_layer = self. _instrument ( value ) if new_layer is not value : setattr ( model, key, new_layer ) elif isinstance ( value, list ) : for i, item in enumerate ( value ) : if : value [ i ] = self. _instrument ( item ) return model",False,"isinstance(item, tf.keras.layers.Layer)",value[i] is not None,0.649843692779541 1838,"def emitIpToDomainsData ( self, data, event ) : self. emitRawRirData ( data, event ) domains = data. get ( ""domains"" ) if isinstance ( domains, list ) : for domain in domains : if self. checkForStop ( ) : return None domain = domain. strip ( ) if : self. emitHostname ( domain, event )",True,domain,domain,0.6864097118377686 1839,"def delete_item ( self, path ) : config = get_config ( ) headers = { } if self. event. auth_token : headers [ ""Authorization"" ] = f""Bearer {self.event.auth_token}"" try : res = requests. delete ( path, headers = headers, verify = False, timeout = config. network_timeout ) if : parsed_content = json. loads ( res. content ) return parsed_content [ ""metadata"" ] [ ""deletionTimestamp"" ] except ( requests. exceptions. ConnectionError, KeyError ) : pass return None",False,"res.status_code in [200, 201, 202]",res.status_code == 200,0.6555154323577881 1840,"def add_cells ( self, cells ) : for cell in cells : if : id = len ( self. cell_id_map ) self. cell_id_map [ cell ] = id self. id_cell_map [ id ] = cell",False,cell not in self.cell_id_map,cell in self.cell_list,0.6568676829338074 1841,"def _do_directory ( self, make_name, chdir_name, encoded ) : if os. path. isdir ( make_name ) : os. rmdir ( make_name ) os. mkdir ( make_name ) try : with change_cwd ( chdir_name ) : if : cwd_result = os. getcwdu ( ) name_result = make_name else : cwd_result = os. getcwd ( ). decode ( TESTFN_ENCODING ) name_result = make_name. decode ( TESTFN_ENCODING ) cwd_result = unicodedata. normalize ( ""NFD"", cwd_result ) name_result = unicodedata. normalize ( ""NFD"", name_result ) self. assertEqual ( os. path. basename ( cwd_result ), name_result ) finally : os. rmdir ( make_name )",False,not encoded,encoded,0.6740992069244385 1842,"def find_file_copyright_notices ( fname ) : ret = set ( ) f = open ( fname ) lines = f. readlines ( ) for l in lines [ : 80 ] : idx = l. lower ( ). find ( ""copyright"" ) if : continue copyright = l [ idx + 9 : ]. strip ( ) if not copyright : continue copyright = sanitise ( copyright ) if not copyright. find ( ""200"" ) >= 0 and not copyright. find ( ""199"" ) >= 0 : continue ret. add ( copyright ) return ret",True,idx < 0,idx < 0,0.6759262084960938 1843,"def __closest ( widget, compare, x, y ) : closest = None dc2 = 10000000 if widget is None : return closest, dc2 for child in widget. winfo_children ( ) : for class_ in Page. _motionClasses : if isinstance ( child, class_ ) : if child [ ""state"" ] == DISABLED : continue xw = child. winfo_rootx ( ) yw = child. winfo_rooty ( ) if compare ( x, y, xw, yw ) : d2 = ( xw - x ) ** 2 + ( yw - y ) ** 2 if : closest = child dc2 = d2 break else : c, d2 = Page. __closest ( child, compare, x, y ) if : closest = c dc2 = d2 return closest, dc2",False,d2 < dc2,closest is None,0.6674395799636841 1844,"def SetChildMenuBar ( self, pChild ) : if not pChild : if : self. SetMenuBar ( self. _pMyMenuBar ) else : self. SetMenuBar ( self. GetMenuBar ( ) ) self. _pMyMenuBar = None else : if pChild. GetMenuBar ( ) is None : return if self. _pMyMenuBar is None : self. _pMyMenuBar = self. GetMenuBar ( ) self. SetMenuBar ( pChild. GetMenuBar ( ) )",False,self._pMyMenuBar,self._pMyMenuBar is None,0.6639725565910339 1845,"def OnRadioSelect ( self, event ) : fitID = self. mainFrame. getActiveFit ( ) if fitID is not None : self. mainFrame. command. Submit ( cmd. GuiChangeImplantLocationCommand ( fitID = fitID, source = ImplantLocation. FIT if : else ImplantLocation. CHARACTER, ) )",False,self.rbFit.GetValue(),source == ImplantLocation.EXCEPTION,0.6545650959014893 1846,"def _Lookup ( self, node ) : """"""Look up a node by name."""""" module, _, _ = node. name. rpartition ( ""."" ) if module : modules_to_try = [ ( """", module ) ] else : modules_to_try = [ ( """", """" ), ( """", ""builtins"" ), ( ""builtins."", ""builtins"" ) ] modules_to_try += [ ( """", ""*"" ), ( ""builtins."", ""*"" ) ] for prefix, module in modules_to_try : mod_ast = self. _lookup_map. get ( module ) if : try : item = mod_ast. Lookup ( prefix + node. name ) except KeyError : pass else : yield prefix, item",True,mod_ast,mod_ast,0.6672509908676147 1847,"def _apply_base_delta ( self, schema : s_schema. Schema, context : sd. CommandContext, scls : so. InheritingObjectT, ) -> so. ObjectList [ so. InheritingObjectT ] : bases = list ( scls. get_bases ( schema ). objects ( schema ) ) default_base_name = scls. get_default_base_name ( ) if default_base_name : default_base : Optional [ so. InheritingObjectT ] = self. get_object ( schema, context, name = default_base_name ) if bases == [ default_base ] : bases = [ ] else : default_base = None removed_bases = { b. name for b in self. removed_bases } existing_bases = set ( ) for b in bases : if b. get_name ( schema ) in removed_bases : bases. remove ( b ) else : existing_bases. add ( b. get_name ( schema ) ) index = { b. get_name ( schema ) : i for i, b in enumerate ( bases ) } for new_bases, pos in self. added_bases : if isinstance ( pos, tuple ) : pos, ref = pos if pos is None or pos == ""LAST"" : idx = len ( bases ) elif pos == ""FIRST"" : idx = 0 else : idx = index [ ref. name ] bases [ idx : idx ] = [ self. get_object ( schema, context, name = b. name ) LOGGER. debug ( ""Broadcasting events: %s"", events ) with self. _subscribers_cv : subscribers = { conn : sub. copy ( ) for conn, sub in self. _subscribers. items ( ) } if subscribers : for connection_id, subscriber in subscribers. items ( ) : if : subscriber_events = [ event for event in events if subscriber. is_subscribed ( event ) ] event_list = EventList ( events = subscriber_events ) self. _send ( connection_id, event_list. SerializeToString ( ) )",False,subscriber.is_listening(),events,0.6530421376228333 1849,"def continuation_tokens ( self, width, line_number, is_soft_wrap = False ) : """"""Displays dots in multiline prompt"""""" if is_soft_wrap : return """" width = width - 1 dots = builtins. __xonsh__. env. get ( ""MULTILINE_PROMPT"" ) dots = dots ( ) if callable ( dots ) else dots if not dots : return """" basetoks = self. format_color ( dots ) baselen = sum ( len ( t [ 1 ] ) for t in basetoks ) if baselen == 0 : return [ ( Token, "" "" * ( width + 1 ) ) ] toks = basetoks * ( width // baselen ) n = width % baselen count = 0 for tok in basetoks : slen = len ( tok [ 1 ] ) newcount = slen + count if slen == 0 : continue elif newcount <= n : toks. append ( tok ) else : toks. append ( ( tok [ 0 ], tok [ 1 ] [ : n - count ] ) ) count = newcount if : break toks. append ( ( Token, "" "" ) ) return PygmentsTokens ( toks )",False,n <= count,count > line_number,0.6874635815620422 1850,"def s_style_master_page ( self, tag, attrs ) : """"""Collect the formatting for the page layout style."""""" name = attrs [ ( STYLENS, ""name"" ) ] name = name. replace ( ""."", ""_"" ) self. currentstyle = "".MP-"" + name self. stylestack. append ( self. currentstyle ) self. styledict [ self. currentstyle ] = { ( """", ""position"" ) : ""relative"" } pagelayout = attrs. get ( ( STYLENS, ""page-layout-name"" ), None ) if pagelayout : pagelayout = "".PL-"" + pagelayout if : styles = self. styledict [ pagelayout ] for style, val in list ( styles. items ( ) ) : self. styledict [ self. currentstyle ] [ style ] = val else : self. styledict [ self. currentstyle ] [ ""__parent-style-name"" ] = pagelayout self. s_ignorexml ( tag, attrs )",False,pagelayout in self.styledict,pagelayout and self.currentstyle in self.stledict,0.6599200963973999 1851,"def resolve_xref ( self, env : BuildEnvironment, fromdocname : str, builder : ""Builder"", typ : str, target : str, node : pending_xref, contnode : Element, ) -> Element : assert typ in ( ""eq"", ""numref"" ) docname, number = self. equations. get ( target, ( None, None ) ) if docname : node_id = make_id ( ""equation-%s"" % target ) if : if docname in env. toc_fignumbers : numbers = env. toc_fignumbers [ docname ] [ ""displaymath"" ]. get ( node_id, ( ) ) eqno = ""."". join ( map ( str, numbers ) ) else : eqno = """" else : eqno = str ( number ) try : eqref_format = env. config. math_eqref_format or ""({number})"" title = nodes. Text ( eqref_format. format ( number = eqno ) ) except KeyError as exc : logger. warning ( __ ( ""Invalid math_eqref_format: %r"" ), exc, location = node ) title = nodes. Text ( ""(%d)"" % number ) title = nodes. Text ( ""(%d)"" % number ) return make_refnode ( builder, fromdocname, docname, node_id, title ) else : return None",False,env.config.math_numfig and env.config.numfig,number,0.651963472366333 1852,"def __get_limits ( self ) : dimension = len ( self. __tree. get_root ( ). data ) nodes = self. __get_all_nodes ( ) max, min = [ float ( ""-inf"" ) ] * dimension, [ float ( ""+inf"" ) ] * dimension for node in nodes : for d in range ( dimension ) : if : max [ d ] = node. data [ d ] if min [ d ] > node. data [ d ] : min [ d ] = node. data [ d ] return min, max",False,max[d] < node.data[d],max[d] > node.data[d],0.6522320508956909 1853,def identify_page_at_cursor ( self ) : for region in self. view. sel ( ) : text_on_cursor = None pos = region. begin ( ) scope_region = self. view. extract_scope ( pos ) if : text_on_cursor = self. view. substr ( scope_region ) return text_on_cursor. strip ( string. punctuation ) return None,False,not scope_region.empty(),scope_region,0.6508708000183105 1854,"def set_transaction_execution_result ( self, txn_signature, is_valid, context_id, state_changes = None, events = None, data = None, error_message = """", error_data = b"""", ) : with self. _condition : if txn_signature not in self. _scheduled : raise SchedulerError ( ""transaction not scheduled: {}"". format ( txn_signature ) ) if txn_signature not in self. _batches_by_txn_id : return self. _set_least_batch_id ( txn_signature = txn_signature ) if not is_valid : self. _remove_subsequent_result_because_of_batch_failure ( txn_signature ) is_rescheduled = self. _reschedule_if_outstanding ( txn_signature ) if : self. _txn_results [ txn_signature ] = TxnExecutionResult ( signature = txn_signature, is_valid = is_valid, context_id = context_id if is_valid else None, state_hash = self. _first_state_hash if is_valid else None, state_changes = state_changes, events = events, data = data, error_message = error_message, error_data = error_data, ) self. _condition. notify_all ( )",False,not is_rescheduled,is_rescheduled,0.6538586616516113 1855,"def replace_ending_white_space ( self, name, newStr, lineNum ) : if not newStr : return numChars = len ( newStr ) ending_spaces_re = re. compile ( r""[ \t]{1,%d}\Z"" % numChars ) m = ending_spaces_re. search ( self. _pendingWhiteSpace [ name ] ) if m : mglen = len ( m. group ( ) ) if : self [ name ]. append ( self. _pendingWhiteSpace [ name ] [ : - numChars ] ) else : self [ name ]. append ( self. _pendingWhiteSpace [ name ] [ : - mglen ] ) self. _pendingWhiteSpace [ name ] = """" self [ name ]. append ( newStr ) else : if ( self [ name ] and not self. _pendingWhiteSpace [ name ] and not self [ name ] [ - 1 ]. isspace ( ) ) : self. _pendingWhiteSpace [ name ] += "" "" self [ name ] = newStr",False,mglen >= numChars,mglen > 0,0.6648894548416138 1856,"def __cmp__ ( self, y ) : a_start = 0 if self. start is None else self. start a_step = 1 if self. step is None else self. step b_start = 0 if y. start is None else y. start b_step = 1 if y. step is None else y. step if a_start < b_start : return - 1 if a_start > b_start : return 1 if self. stop is not y. stop : if self. stop is None : return 1 if : return - 1 if self. stop < y. stop : return - 1 return 1 if a_step < b_step : return - 1 if a_step > b_step : return 1 return 0",False,y.stop is None,y.stop,0.6589339375495911 1857,"def onMESSAGE ( self, hwnd, msg, wp, lp ) : if msg == fw. WND_WM_NOTIFY : if wp == fw. WND_NM_MSGREFLECT : msgr = fw. WND_MSGREFLECT. from_address ( lp ) msgr. fReturn = self. _base_fMsgReflect if msgr. msg == self. Msg. WM_NOTIFY : nm = NMHDR. from_address ( msgr. lParam ) if : if self. onMSG ( hwnd, ""selchanging"", self. GetSelected ( ), 0 ) == False : return 1 elif nm. code == self. Msg. TCN_SELCHANGE : self. onMSG ( hwnd, ""selchanged"", self. GetSelected ( ), 0 ) elif nm. code == self. Msg. NM_RELEASEDCAPTURE : self. onMSG ( hwnd, ""releasedcapture"", 0, 0 ) elif nm. code == self. Msg. NM_CLICK : self. onMSG ( hwnd, ""click"", 0, 0 ) elif nm. code == self. Msg. NM_RCLICK : self. onMSG ( hwnd, ""rclick"", 0, 0 ) elif nm. code == self. Msg. TCN_KEYDOWN : indices = [ ] for i, token in enumerate ( tokens ) : if : current_index = indices [ - 1 ] + len ( tokens [ i - 1 ] ) indices. append ( current_index + text [ current_index : ]. find ( token ) ) else : indices. append ( text. find ( token ) ) return indices",False,1 <= i,i > 0,0.6778904795646667 1859,"def _cookies_for_domain ( self, domain, request ) : cookies = [ ] if not self. _policy. domain_return_ok ( domain, request ) : return [ ] _debug ( ""Checking %s for cookies to return"", domain ) cookies_by_path = self. _cookies [ domain ] for path in cookies_by_path. keys ( ) : if not self. _policy. path_return_ok ( path, request ) : continue cookies_by_name = cookies_by_path [ path ] for cookie in cookies_by_name. values ( ) : if : _debug ( "" not returning cookie"" ) continue _debug ( "" it's a match"" ) cookies. append ( cookie ) return cookies",False,"not self._policy.return_ok(cookie, request)",cookie not in cookies,0.6499903202056885 1860,"def mine_pow_nonce ( block_number : int, mining_hash : Hash32, difficulty : int ) -> Tuple [ bytes, bytes ] : cache = get_cache ( block_number ) for nonce in range ( MAX_TEST_MINE_ATTEMPTS ) : mining_output = hashimoto_light ( block_number, cache, mining_hash, nonce ) result = big_endian_to_int ( mining_output [ b""result"" ] ) result_cap = 2 ** 256 // difficulty if : return nonce. to_bytes ( 8, ""big"" ), mining_output [ b""mix digest"" ] raise Exception ( ""Too many attempts at POW mining, giving up"" )",False,result <= result_cap,result_cap >= result,0.6631282567977905 1861,"def get_order_taxes ( shopify_order, shopify_settings ) : taxes = [ ] for tax in shopify_order. get ( ""tax_lines"" ) : taxes. append ( { ""charge_type"" : _ ( ""On Net Total"" ), ""account_head"" : get_tax_account_head ( tax ), ""description"" : ""{0} - {1}%"". format ( tax. get ( ""title"" ), tax. get ( ""rate"" ) * 100.0 ), ""rate"" : tax. get ( ""rate"" ) * 100.00, ""included_in_print_rate"" : 1 if : else 0, ""cost_center"" : shopify_settings. cost_center, } ) taxes = update_taxes_with_shipping_lines ( taxes, shopify_order. get ( ""shipping_lines"" ), shopify_settings ) return taxes",False,shopify_order.get('taxes_included'),shopify_settings.cost_center is not None,0.6526694297790527 1862,"def analyze_slots ( self ) : self. __slots = { } for s in Slots : if : meth = self. __methods. get ( s. special ) if meth is not None : self. __slots [ s ] = meth self. __slots [ TP_NAME ] = '""%s.%s""' % ( self. __module, self. __name__ ) if self. __doc__ : self. __slots [ TP_DOC ] = ""%s_doc"" % self. name if self. __struct is not None : self. __slots [ TP_BASICSIZE ] = ""sizeof(%s)"" % self. __struct. name self. __slots [ TP_DEALLOC ] = ""%s_dealloc"" % self. name if self. __methods : self. __slots [ TP_METHODS ] = ""%s_methods"" % self. name if self. __members : self. __slots [ TP_MEMBERS ] = ""%s_members"" % self. name",False,s.special is not None,s.special,0.6569875478744507 1863,"def capitalize_utterances ( utterances, entities, language, ratio, resources, random_state ) : capitalized_utterances = [ ] for utterance in utterances : capitalized_utterance = deepcopy ( utterance ) for i, chunk in enumerate ( capitalized_utterance [ DATA ] ) : capitalized_utterance [ DATA ] [ i ] [ TEXT ] = chunk [ TEXT ]. lower ( ) if ENTITY not in chunk : continue entity_label = chunk [ ENTITY ] if : continue if not entities [ entity_label ] [ CAPITALIZE ] : continue if random_state. rand ( ) > ratio : continue capitalized_utterance [ DATA ] [ i ] [ TEXT ] = capitalize ( chunk [ TEXT ], language, resources ) capitalized_utterances. append ( capitalized_utterance ) return capitalized_utterances",False,is_builtin_entity(entity_label),entity_label not in entities,0.6504583358764648 1864,"def _optimization_function ( self, objective_function : tp. Callable [ [ tp. ArrayLike ], float ] ) -> tp. ArrayLike : budget = np. inf if self. budget is None else self. budget best_res = np. inf best_x : np. ndarray = self. current_bests [ ""average"" ]. x if self. initial_guess is not None : best_x = np. array ( self. initial_guess, copy = True ) remaining = budget - self. _num_ask while remaining > 0 : options : tp. Dict [ str, int ] = ( { } if self. budget is None else { ""maxiter"" : remaining } ) res = scipyoptimize. minimize ( objective_function, best_x if not self. random_restart else self. _rng. normal ( 0.0, 1.0, self. dimension ), method = self. method, options = options, tol = 0, ) if : best_res = res. fun best_x = res. x remaining = budget - self. _num_ask return best_x",False,res.fun < best_res,method in self.method,0.6599200963973999 1865,"def set_defaults ( opt ) : init_model = None if opt. get ( ""init_model"" ) and PathManager. exists ( opt [ ""init_model"" ] ) : init_model = opt [ ""init_model"" ] if opt. get ( ""model_file"" ) and PathManager. exists ( opt [ ""model_file"" ] ) : init_model = opt [ ""model_file"" ] if init_model is None : opt [ ""embedding_file"" ] = modelzoo_path ( opt. get ( ""datapath"" ), opt [ ""embedding_file"" ] ) if : if not PathManager. exists ( opt [ ""embedding_file"" ] ) : raise IOError ( ""No such file: %s"" % opt [ ""embedding_file"" ] ) with PathManager. open ( opt [ ""embedding_file"" ] ) as f : dim = len ( f. readline ( ). strip ( ). split ( "" "" ) ) - 1 if dim == 1 : dim = len ( f. readline ( ). strip ( ). split ( "" "" ) ) - 1 opt [ ""embedding_dim"" ] = dim elif not opt. get ( ""embedding_dim"" ) : raise RuntimeError ( ( ""Either embedding_file or embedding_dim "" ""needs to be specified."" ) ) if opt [ ""tune_partial"" ] > 0 and opt [ ""fix_embeddings""",True,opt.get('embedding_file'),opt.get('embedding_file'),0.6556334495544434 1866,"def read ( self, size = None ) : if size == 0 : return """" data = list ( ) while size is None or size > 0 : line = self. readline ( size or - 1 ) if : break if size is not None : size -= len ( line ) data. append ( line ) return """". join ( data )",True,not line,not line,0.6676050424575806 1867,"def interpolate ( self, mobject1, mobject2, alpha, path_func = straight_path ) : for key in self. data : if key in self. locked_data_keys : continue if : continue if key not in mobject1. data or key not in mobject2. data : continue if key in ( ""points"", ""bounding_box"" ) : func = path_func else : func = interpolate self. data [ key ] [ : ] = func ( mobject1. data [ key ], mobject2. data [ key ], alpha ) for key in self. uniforms : self. uniforms [ key ] = interpolate ( mobject1. uniforms [ key ], mobject2. uniforms [ key ], alpha ) return self",False,len(self.data[key]) == 0,path_func is None,0.6571762561798096 1868,"def text_to_tokens_mask ( self, pair, Y = None, context = None ) : out_gen = self. _text_to_ids ( pair, pad_token = self. config. pad_token ) for i, out in enumerate ( out_gen ) : if context is None : feats = { ""tokens"" : out. token_ids, ""mask"" : out. mask } else : out_forward = ArrayEncodedOutput ( token_ids = out. token_ids [ 0 ], tokens = out. token_ids [ 0 ], labels = None, char_locs = out. char_locs, mask = out. mask [ 0 ], ) out_backward = ArrayEncodedOutput ( token_ids = out. token_ids [ 1 ], tokens = out. token_ids [ 1 ], labels = None, char_locs = out. char_locs, mask = out. mask [ 1 ], ) tokenized_context_forward = tokenize_context ( context [ 0 ], out_forward, self. config ) tokenized_context_backward = tokenize_context ( context [ 1 ], out_backward, self. config ) tokenized_context = [ tokenized_context_forward",False,Y is None,i == 0,0.6638524532318115 1869,"def watch_directory ( self, dir_path, callback = None, recursive = False, ignore_extensions = None, require_extensions = None, ) : dir_path = os. path. abspath ( dir_path ) if dir_path not in self. monitored_dirs : if callback is not None : self. dir_callbacks [ dir_path ] = callback if : self. ignore_extensions [ dir_path ] = ignore_extensions if require_extensions : self. require_extensions [ dir_path ] = require_extensions self. monitor ( dir_path, recursive = recursive ) log. debug ( ""Watching for changes in directory%s: %s"", "" (recursively)"" if recursive else """", dir_path, )",True,ignore_extensions,ignore_extensions,0.6629427075386047 1870,"def _validate_vm_create_nics ( cmd, namespace ) : from msrestazure. tools import resource_id from azure. cli. core. commands. client_factory import get_subscription_id nics_value = namespace. nics nics = [ ] if not nics_value : namespace. nic_type = ""new"" logger. debug ( ""new NIC will be created"" ) return if not isinstance ( nics_value, list ) : nics_value = [ nics_value ] for n in nics_value : nics. append ( { ""id"" : n if : else resource_id ( name = n, resource_group = namespace. resource_group_name, namespace = ""Microsoft.Network"", type = ""networkInterfaces"", subscription = get_subscription_id ( cmd. cli_ctx ), ), ""properties"" : { ""primary"" : nics_value [ 0 ] == n }, } ) namespace. nics = nics namespace. nic_type = ""existing"" namespace. public_ip_address_type = None logger. debug ( ""existing NIC(s) will be used"" )",False,'/' in n,not namespace.resource_group,0.6751554012298584 1871,def timeout ( self ) : now = ptime. time ( ) dt = now - self. lastPlayTime if dt < 0 : return n = int ( self. playRate * dt ) if n!= 0 : self. lastPlayTime += float ( n ) / self. playRate if : self. play ( 0 ) self. jumpFrames ( n ),False,self.currentIndex + n > self.image.shape[self.axes['t']],n == 1,0.6506556868553162 1872,"def __call__ ( self, parser, namespace, values, option_string ) : instance, settings = get_instance ( namespace ) if values : for setting in values : if : if isinstance ( settings [ setting ], dict ) : setting_format = ""\n{}:\n{}"" else : setting_format = ""\n{}: {}"" print ( setting_format. format ( setting, pprint. pformat ( settings [ setting ] ) ) ) else : print ( ""\n{} is not a recognized setting."". format ( setting ) ) break else : pprint. pprint ( settings ) parser. exit ( )",True,setting in settings,setting in settings,0.6679763793945312 1873,"def _restore_freeze ( self, new ) : size_change = [ ] for k, v in six. iteritems ( self. _freeze_backup ) : newv = new. get ( k, [ ] ) if : size_change. append ( ( self. _key_name ( k ), len ( v ), len ( newv ) ) ) if size_change : logger. info ( ""These collections were modified but restored in {}: {}"". format ( self. _name, "", "". join ( map ( lambda t : ""({}: {}->{})"". format ( * t ), size_change ) ), ) ) restore_collection ( self. _freeze_backup )",False,len(v) != len(newv),newv,0.6495283842086792 1874,"def _find_closest_regex_forwards ( self, regexlist, getGroupPos = None ) : sm = self. scimoz ( ) closestPos = None endpos = startpos = curpos = sm. currentPos lastEndPos = sm. length while closestPos is None and endpos < lastEndPos : endpos += 500 endpos = min ( lastEndPos, endpos ) text = sm. getTextRange ( startpos, endpos ) for r in regexlist : match = re. search ( r, text ) if : if getGroupPos : foundPos = startpos + match. start ( getGroupPos ) else : foundPos = startpos + match. start ( ) if closestPos is None or foundPos < closestPos : closestPos = foundPos sm. selectionStart = match. start ( ) sm. selectionEnd = match. end ( ) return closestPos",True,match,match,0.6760464310646057 1875,"def mutated ( self, indiv ) : """"""mutate some genes of the given individual"""""" res = indiv. copy ( ) for i in range ( self. numParameters ) : if : if self. xBound is None : res [ i ] = indiv [ i ] + gauss ( 0, self. mutationStdDev ) else : res [ i ] = max ( min ( indiv [ i ] + gauss ( 0, self. mutationStdDev ), self. maxs [ i ] ), self. mins [ i ], ) return res",False,random() < self.mutationProb,i in indiv.keys(),0.6597429513931274 1876,"def read_stanza ( self ) : while True : try : stanza_end = self. _buffer. index ( b""\n"" ) stanza = self. decoder. decode ( self. _buffer [ : stanza_end ] ) self. _buffer = self. _buffer [ stanza_end + 1 : ] colon = stanza. index ( "":"" ) return stanza [ : colon ], stanza [ colon + 1 : ] except ValueError : bytes = self. read_bytes ( ) if : return None else : self. _buffer += bytes",True,not bytes,not bytes,0.6754790544509888 1877,"def validate_transaction_reference ( self ) : bank_account = self. paid_to if self. payment_type == ""Receive"" else self. paid_from bank_account_type = frappe. db. get_value ( ""Account"", bank_account, ""account_type"" ) if bank_account_type == ""Bank"" : if : frappe. throw ( _ ( ""Reference No and Reference Date is mandatory for Bank transaction"" ) )",False,not self.reference_no or not self.reference_date,self.last_transaction_date is None,0.6513961553573608 1878,"def arith_expr ( self, nodelist ) : node = self. com_node ( nodelist [ 0 ] ) for i in range ( 2, len ( nodelist ), 2 ) : right = self. com_node ( nodelist [ i ] ) if : node = Add ( node, right, lineno = nodelist [ 1 ]. context ) elif nodelist [ i - 1 ]. type == token. MINUS : node = Sub ( node, right, lineno = nodelist [ 1 ]. context ) else : raise ValueError ( ""unexpected token: %s"" % nodelist [ i - 1 ] [ 0 ] ) return node",False,nodelist[i - 1].type == token.PLUS,nodelist[i - 1].type == token.MAXUS,0.6558165550231934 1879,"def _catch_revision_errors ( self, ancestor = None, multiple_heads = None, start = None, end = None, resolution = None ) : try : yield except RangeNotAncestorError as rna : if start is None : start = rna. lower if : end = rna. upper if not ancestor : ancestor = ( ""Requested range %(start)s:%(end)s does not refer to "" ""ancestor/descendant revisions along the same branch"" ) ancestor = ancestor % { ""start"" : start, ""end"" : end } raise Exception ( ancestor ) except MultipleHeads as mh : if not multiple_heads : multiple_heads = ( ""Multiple head revisions are present for given "" ""argument '%(head_arg)s'; please "" ""specify a specific target revision, "" ""'@%(head_arg)s' to "" ""narrow to a specific head, or 'heads' for all heads"" ) multiple_heads = multiple_heads % { ""head_arg"" : end or mh. argument, ""heads"" : str ( mh. heads ), } raise Exception ( multiple_heads ) except ResolutionError as re : if resolution is None : resolution = ""Can't locate revision identified by '%s",True,end is None,end is None,0.6718732118606567 1880,"def doc_help ( doc_lines ) : """"""print formated command's docstring"""""" if len ( doc_lines ) < 2 : return False doc_lines. pop ( 0 ) while not doc_lines [ 0 ]. strip ( ) : doc_lines. pop ( 0 ) trash = len ( doc_lines [ 0 ] ) - len ( doc_lines [ 0 ]. lstrip ( ) ) doc_lines = [ line [ trash : ]. rstrip ( ) for line in doc_lines ] result = """" for line in doc_lines : if line == line. lstrip ( ) : line = colorize ( ""%BoldWhite"", line ) elif line. startswith ( "" * "" ) : line = colorize ( "" * "", ""%Yellow"", line [ 6 : ] ) elif line. startswith ( "" > "" ) : line = colorize ( "" > "", ""%Cyan"", line [ 6 : ] ) elif : line = colorize ( ""%Dim"", line ) elif line. startswith ( "" -"" ) and line [ 5 ]!= "" "" : line = colorize ( ""%Green"", line ) result += line + ""\n"" print ( result ) return True",False,line.startswith(' # '),line.startswith(' -'),0.6523780822753906 1881,"def plot_training_curves ( self, file_name : Union [ str, Path ], plot_values : List [ str ] = [ ""loss"", ""F1"" ] ) : if type ( file_name ) is str : file_name = Path ( file_name ) fig = plt. figure ( figsize = ( 15, 10 ) ) for plot_no, plot_value in enumerate ( plot_values ) : training_curves = self. _extract_evaluation_data ( file_name, plot_value ) plt. subplot ( len ( plot_values ), 1, plot_no + 1 ) if training_curves [ ""train"" ] [ ""score"" ] : x = np. arange ( 0, len ( training_curves [ ""train"" ] [ ""score"" ] ) ) plt. plot ( x, training_curves [ ""train"" ] [ ""score"" ], label = f""training {plot_value}"" ) if : x = np. arange ( 0, len ( training_curves [ ""dev"" ] [ ""score"" ] ) ) plt. plot ( x, training_curves [ ""dev"" ] [ ""score"" ], label = f""validation {plot_value}"" ) if training_curves [ ""test"" ] [ ""score"" ] : x = np. arange ( 0, len ( training_curves [ ""test"" ] [ ""score"" ] ) ) plt. plot ( x, training_curves [ ""test"" ] [ ""score"" ], label = f""test {plot_value}"" ) plt. legend ( bbox_to_anchor = ( 1.04, 0 )",False,training_curves['dev']['score'],training_curves['dev'],0.6562501192092896 1882,"def step_forward ( self, img_feats : Tensor, question_feats : Tensor, actions_in : Tensor, hidden : Tensor, ) -> Tuple [ Tensor, Tensor ] : T = False if self. image_input is True : N, T, _ = img_feats. size ( ) input_feats = img_feats if self. question_input is True : N, D = question_feats. size ( ) question_feats = question_feats. view ( N, 1, D ) if : T = actions_in. size ( 1 ) question_feats = question_feats. repeat ( 1, T, 1 ) if len ( input_feats ) == 0 : input_feats = question_feats else : input_feats = torch. cat ( [ input_feats, question_feats ], 2 ) if self. action_input is True : if len ( input_feats ) == 0 : input_feats = self. action_embed ( actions_in ) else : actions_in = actions_in. long ( ) input_feats = torch. cat ( [ input_feats, self. action_embed ( actions_in ) ], 2 ) output, hidden = self. rnn ( input_feats, hidden ) output = self. decoder ( output. contiguous ( ). view ( output. size ( 0 ) * output. size ( 1 ), output. size ( 2 ) ) ) return output, hidden",False,T is False,self.t_in is True,0.6669957637786865 1883,"def align_comments ( tlist ) : tidx, token = tlist. token_next_by ( i = sql. Comment ) while token : pidx, prev_ = tlist. token_prev ( tidx ) if : tlist. group_tokens ( sql. TokenList, pidx, tidx, extend = True ) tidx = pidx tidx, token = tlist. token_next_by ( i = sql. Comment, idx = tidx )",False,"isinstance(prev_, sql.TokenList)",prev_ and token == prev_,0.6487141251564026 1884,"def _check_with_bulk_schema ( self, field, value ) : if isinstance ( value, _str_type ) : if : return else : self. known_rules_set_refs. add ( value ) definition = self. target_validator. rules_set_registry. get ( value ) if definition is None : self. _error ( field, ""Rules set definition %s not found."" % value ) return else : value = definition _hash = ( mapping_hash ( { ""turing"" : value } ), mapping_hash ( self. target_validator. types_mapping ), ) if _hash in self. target_validator. _valid_schemas : return validator = self. _get_child_validator ( document_crumb = field, allow_unknown = False, schema = self. target_validator. rules, ) validator ( value, normalize = False ) if validator. _errors : self. _error ( validator. _errors ) else : self. target_validator. _valid_schemas. add ( _hash )",True,value in self.known_rules_set_refs,value in self.known_rules_set_refs,0.6493253707885742 1885,"def get_volume_image_metadata ( image_id, image_meta ) : base_metadata = { ""image_id"" : image_id, } name = image_meta. get ( ""name"", None ) if name : base_metadata [ ""image_name"" ] = name for key in IMAGE_ATTRIBUTES : if : continue value = image_meta. get ( key, None ) if value is not None : base_metadata [ key ] = value property_metadata = { } image_properties = image_meta. get ( ""properties"", { } ) for ( key, value ) in image_properties. items ( ) : if value is not None : property_metadata [ key ] = value volume_metadata = dict ( property_metadata ) volume_metadata. update ( base_metadata ) return volume_metadata",False,key not in image_meta,key in base_metadata,0.65754634141922 1886,"def get_data ( row ) : data = [ ] for field_name, field_xpath in fields : result = row. xpath ( field_xpath ) if : result = "" "". join ( text for text in map ( six. text_type. strip, map ( six. text_type, map ( unescape, result ) ) ) if text ) else : result = None data. append ( result ) return data",True,result,result,0.6867662668228149 1887,"def _update_label ( self ) : diff_num = self. table. get_reference_difference ( ) if diff_num is None : if self. table. version_index == 0 : if : txt = ""the only package"" else : txt = ""the latest package"" else : nth = positional_number_string ( self. table. version_index + 1 ) txt = ""the %s latest package"" % nth if self. table. num_versions > 1 : txt += "" of %d packages"" % self. table. num_versions txt = ""%s is %s"" % ( self. variant. qualified_package_name, txt ) else : adj = ""ahead"" if diff_num > 0 else ""behind"" diff_num = abs ( diff_num ) unit = ""version"" if diff_num == 1 else ""versions"" txt = ""Package is %d %s %s"" % ( diff_num, unit, adj ) self. label. setText ( txt )",False,self.table.num_versions == 1,self.variant.qualified_package_name,0.6535567045211792 1888,"def finalize_options ( self ) : self. set_undefined_options ( ""bdist"", ( ""bdist_base"", ""bdist_base"" ) ) if self. rpm_base is None : if : raise DistutilsOptionError ( ""you must specify --rpm-base in RPM 2 mode"" ) self. rpm_base = os. path. join ( self. bdist_base, ""rpm"" ) if self. python is None : if self. fix_python : self. python = sys. executable else : self. python = ""python3"" elif self. fix_python : raise DistutilsOptionError ( ""--python and --fix-python are mutually exclusive options"" ) if os. name!= ""posix"" : raise DistutilsPlatformError ( ""don't know how to create RPM "" ""distributions on platform %s"" % os. name ) if self. binary_only and self. source_only : raise DistutilsOptionError ( ""cannot supply both '--source-only' and '--binary-only'"" ) if not self. distribution. has_ext_modules ( ) : self. use_rpm_opt_flags = 0 self. set_undefined_options ( ""bdist"", ( ""dist_dir"", ""dist_dir"" ) ) self. finalize_package_data ( )",False,not self.rpm3_mode,self.bdist_base is None,0.6567996740341187 1889,"def duplicate_shared_dir_validator ( section_key, section_label, pcluster_config ) : errors = [ ] warnings = [ ] config_parser = pcluster_config. config_parser section = pcluster_config. get_section ( section_key, section_label ) if config_parser : shared_dir_in_cluster = config_parser. has_option ( get_file_section_name ( ""cluster"", section_label ), ""shared_dir"" ) ebs_settings_in_cluster = config_parser. has_option ( get_file_section_name ( ""cluster"", section_label ), ""ebs_settings"" ) if : list_of_ebs_sections = [ ] for ebs_section_label in section. get_param_value ( ""ebs_settings"" ). split ( "","" ) : ebs_section = pcluster_config. get_section ( ""ebs"", ebs_section_label. strip ( ) ) list_of_ebs_sections. append ( ebs_section ) if len ( list_of_ebs_sections ) == 1 and list_of_ebs_sections [ 0 ]. get_param_value ( ""shared_dir"" ) : errors. append ( ""'shared_dir' can not be specified both in cluster section and EBS section"" ) <",False,shared_dir_in_cluster and ebs_settings_in_cluster,ebs_settings_in_cluster,0.6518061757087708 1890,"def Main ( argv ) : set_profile = set_home = trust_os_path = False for arg in argv : if : os. environ [ ""MAILPILE_PROFILE"" ] = arg. split ( ""="", 1 ) [ - 1 ] if ""MAILPILE_HOME"" in os. environ : del os. environ [ ""MAILPILE_HOME"" ] set_profile = True elif arg. startswith ( ""--home="" ) : os. environ [ ""MAILPILE_HOME"" ] = arg. split ( ""="", 1 ) [ - 1 ] if ""MAILPILE_PROFILE"" in os. environ : del os. environ [ ""MAILPILE_PROFILE"" ] set_home = True elif arg == ""--trust-os-path"" : trust_os_path = True if set_home and set_profile : raise ValueError ( ""Please only use one of --home and --profile"" ) state = MailpileState ( ). discover ( argv ) ActivateTranslation ( None, state. pub_config, None ) script = [ GenerateConfig ( state ), GenerateBootstrap ( state, trust_os_path = trust_os_path ), ] if ""--script"" in argv : print ( ""\n"". join ( script ) ) else : from mailpile. safe_popen import MakePopenUnsafe MakePopenUnsafe ( ) from gui_o_matic. control import GUIPipeControl gpc = GUIPipeControl ( StringIO ( ""\n"". join",False,arg.startswith('--profile='),arg.startswith('--load-password -CLUNCHER-HOME'),0.6500493884086609 1891,"def font_variant_numeric ( tokens ) : if len ( tokens ) == 1 : keyword = get_keyword ( tokens [ 0 ] ) if keyword == ""normal"" : return keyword values = [ ] couples = ( ( ""lining-nums"", ""oldstyle-nums"" ), ( ""proportional-nums"", ""tabular-nums"" ), ( ""diagonal-fractions"", ""stacked-fractions"" ), ( ""ordinal"", ), ( ""slashed-zero"", ), ) all_values = [ ] for couple in couples : all_values. extend ( couple ) for token in tokens : if token. type!= ""ident"" : return None if token. value in all_values : concurrent_values = [ couple for couple in couples if token. value in couple ] [ 0 ] if : return None else : values. append ( token. value ) else : return None if values : return tuple ( values )",False,any((value in values for value in concurrent_values)),len(concurrent_values) == 0,0.6551509499549866 1892,"def find_process_imports ( self, task ) : task_space = task. get_process_address_space ( ) all_mods = list ( task. get_load_modules ( ) ) apis = self. _enum_apis ( all_mods ) if not all_mods : self. session. logging. error ( ""Cannot load DLLs in process AS"" ) return base_address = int ( all_mods [ 0 ]. DllBase ) size_to_read = int ( all_mods [ 0 ]. SizeOfImage ) calls_imported = { } for address, iat, destination in self. call_scan ( task_space, base_address, size_to_read ) : self. session. report_progress ( ""Resolving import %s->%s"" % ( address, iat ) ) calls_imported [ iat ] = ( address, destination ) self. _iat_scan ( task_space, calls_imported, apis, base_address, base_address + size_to_read ) for iat, ( _, func_pointer ) in sorted ( calls_imported. items ( ) ) : tmp = apis. get ( func_pointer. obj_offset ) if : module, func_pointer, func_name = tmp yield iat, func_pointer, module, func_name",True,tmp,tmp,0.689765214920044 1893,"def output_package_listing ( self, packages, options ) : packages = sorted ( packages, key = lambda dist : dist. project_name. lower ( ), ) if options. list_format == ""columns"" and packages : data, header = format_for_columns ( packages, options ) self. output_package_listing_columns ( data, header ) elif options. list_format == ""freeze"" : for dist in packages : if : logger. info ( ""%s==%s (%s)"", dist. project_name, dist. version, dist. location ) else : logger. info ( ""%s==%s"", dist. project_name, dist. version ) elif options. list_format == ""json"" : logger. info ( format_for_json ( packages, options ) )",False,options.verbose >= 1,options.list_format == 'location',0.6556912064552307 1894,"def convertDict ( obj ) : obj = dict ( obj ) for k, v in obj. items ( ) : del obj [ k ] if not ( isinstance ( k, str ) or isinstance ( k, unicode ) ) : k = dumps ( k ) if : obj [ Types. KEYS ] = [ ] obj [ Types. KEYS ]. append ( k ) obj [ k ] = convertObjects ( v ) return obj",False,Types.KEYS not in obj,k not in obj,0.6762058734893799 1895,"def deleteEditor ( self, event = None ) : """"""Delete the presently selected body text editor."""""" c, d = self. c, self. editorWidgets wrapper = c. frame. body. wrapper w = wrapper. widget assert g. isTextWrapper ( wrapper ), wrapper assert g. isTextWidget ( w ), w if len ( list ( d. keys ( ) ) ) <= 1 : return name = w. leo_name if hasattr ( w, ""leo_name"" ) else ""1"" if name == ""1"" : g. warning ( ""can not delete leftmost editor"" ) return c. p. b = wrapper. getAllText ( ) del d [ name ] f = c. frame. top. leo_body_inner_frame layout = f. layout ( ) for z in ( w, w. leo_label ) : if z : self. unpackWidget ( layout, z ) w. leo_label = None new_wrapper = list ( d. values ( ) ) [ 0 ] self. numberOfEditors -= 1 if self. numberOfEditors == 1 : w = new_wrapper. widget if : self. unpackWidget ( layout, w. leo_label ) w. leo_label = None self. selectEditor ( new_wrapper )",False,"getattr(w, 'leo_label', None)",w,0.6484655737876892 1896,"def match_var_against_type ( self, var, other_type, subst, node, view ) : """"""Match a variable against a type."""""" if var. bindings : return self. _match_value_against_type ( view [ var ], other_type, subst, node, view ) else : if : other_type = other_type. get_formal_type_parameter ( abstract_utils. T ) if isinstance ( other_type, abstract. Union ) : right_side_options = other_type. options else : right_side_options = [ other_type ] for right in right_side_options : if isinstance ( right, abstract. TypeParameter ) : if right. full_name not in subst : subst = subst. copy ( ) subst [ right. full_name ] = var. program. NewVariable ( ) return subst",False,"isinstance(other_type, abstract.TupleClass)","isinstance(other_type, abstract_utils.Variable)",0.6496050357818604 1897,"def get ( self ) : """"""return a secret by name"""""" results = self. _get ( ""secrets"", self. name ) results [ ""decoded"" ] = { } results [ ""exists"" ] = False if results [ ""returncode"" ] == 0 and results [ ""results"" ] [ 0 ] : results [ ""exists"" ] = True if : if ""data"" in results [ ""results"" ] [ 0 ] : for sname, value in results [ ""results"" ] [ 0 ] [ ""data"" ]. items ( ) : results [ ""decoded"" ] [ sname ] = base64. b64decode ( value ) if results [ ""returncode"" ]!= 0 and '""%s"" not found' % self. name in results [ ""stderr"" ] : results [ ""returncode"" ] = 0 return results",False,self.decode,results['exists'],0.6638282537460327 1898,"def spawnProcess ( pp, cmd, argv, path, usePTY, env ) : self. assertEqual ( [ cmd, argv, path, usePTY, env ], [ exp_cmd, exp_argv, exp_path, exp_usePTY, exp_env ], ) for output in outputs : if output [ 0 ] == ""out"" : pp. outReceived ( output [ 1 ] ) elif output [ 0 ] == ""err"" : pp. errReceived ( output [ 1 ] ) elif : if output [ 1 ]!= 0 : so = error. ProcessTerminated ( exitCode = output [ 1 ] ) else : so = error. ProcessDone ( None ) pp. processEnded ( failure. Failure ( so ) )",False,output[0] == 'rc',output[0] == 'exit',0.6548707485198975 1899,"def _obj_ref_action ( _notify_usage, LOG, obj_ref, extra_info, admin_context, begin, end, notify_about_usage, type_id_str, type_name, ) : _notify_usage ( LOG, obj_ref, extra_info, admin_context ) if CONF. send_actions : if begin < obj_ref. created_at < end : _create_action ( obj_ref, admin_context, LOG, notify_about_usage, type_id_str, type_name ) if : _delete_action ( obj_ref, admin_context, LOG, notify_about_usage, type_id_str, type_name )",False,obj_ref.deleted_at and begin < obj_ref.deleted_at < end,end < obj_ref.created_at < end,0.6494174003601074 1900,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if : break if fid == 0 : if ftype == TType. LIST : self. success = [ ] ( _etype841, _size838 ) = iprot. readListBegin ( ) for _i842 in xrange ( _size838 ) : _elem843 = Partition ( ) _elem843. read ( iprot ) self. success. append ( _elem843 ) iprot. readListEnd ( ) else : iprot. skip ( ftype ) elif fid == 1 : if ftype == TType. STRUCT : self. o1 = MetaException ( ) self. o1. read ( iprot ) else : axis = self. forward_func. info. args [ ""axis"" ] if prop_down [ - 1 ] : g_dy = inputs [ - 1 ]. grad g_dy_ = F. stack ( * [ o. grad for o in outputs ], axis = axis ) if : g_dy += g_dy_ else : g_dy. copy_from ( g_dy_ )",False,accum[-1],accum[0],0.6641381978988647 1902,"def dispose ( cls ) : for inst in cls. instances : old_session = inst. session inst. session = None if old_session : try : old_session. close ( ) except : pass if old_session. bind : try : old_session. bind. dispose ( ) except Exception : pass for attr in list ( Books. __dict__. keys ( ) ) : if attr. startswith ( ""custom_column_"" ) : setattr ( Books, attr, None ) for db_class in cc_classes. values ( ) : Base. metadata. remove ( db_class. __table__ ) cc_classes. clear ( ) for table in reversed ( Base. metadata. sorted_tables ) : name = table. key if name. startswith ( ""custom_column_"" ) or name. startswith ( ""books_custom_column_"" ) : if : Base. metadata. remove ( table )",False,table is not None,table.delete_table,0.6606956720352173 1903,"def _get_annotated_template ( self, template ) : changed = False if template. get ( ""version"", ""0.12.0"" ) >= ""0.13.0"" : using_js = self. spider. _filter_js_urls ( template [ ""url"" ] ) body = ""rendered_body"" if using_js else ""original_body"" if : template [ ""body"" ] = body changed = True if changed or not template. get ( ""annotated"" ) : _build_sample ( template ) return template",False,template.get('body') != body,body and body.get('annotated'),0.6574987769126892 1904,"def _count_split ( cls, input_file, chunk_size, subdir_generator_function ) : """"""Split a FASTA file into chunks based on counting records."""""" log. debug ( ""Attemping to split FASTA file %s into chunks of %i sequences"" % ( input_file, chunk_size ) ) f = open ( input_file ) part_file = None try : part_dir = subdir_generator_function ( ) part_path = os. path. join ( part_dir, os. path. basename ( input_file ) ) part_file = open ( part_path, ""w"" ) log. debug ( ""Writing {} part to {}"". format ( input_file, part_path ) ) rec_count = 0 while True : line = f. readline ( ) if not line : break if : rec_count += 1 if rec_count > chunk_size : part_file. close ( ) part_dir = subdir_generator_function ( ) part_path = os. path. join ( part_dir, os. path. basename ( input_file ) ) part_file = open ( part_path, ""w"" ) log. debug ( ""Writing {} part to {}"". format ( input_file, part_path ) ) <",False,line[0] == '>',line == '',0.658064603805542 1905,"def _gfal ( self, cmd, * args, retry = None, raise_workflow_error = True ) : if retry is None : retry = self. provider. retry _cmd = [ ""gfal-"" + cmd ] + list ( args ) for i in range ( retry + 1 ) : try : logger. debug ( _cmd ) return sp. run ( _cmd, check = True, stderr = sp. PIPE, stdout = sp. PIPE ). stdout. decode ( ) except sp. CalledProcessError as e : if : if raise_workflow_error : raise WorkflowError ( ""Error calling gfal-{}:\n{}"". format ( cmd, e. stderr. decode ( ) ) ) else : raise e else : time. sleep ( 1 ) continue",False,i == retry,e.returncode == 0,0.674399733543396 1906,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. STRUCT : self. req = TGetFunctionsReq ( ) self. req. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,fid == 1,fid == TType.START,0.6731555461883545 1907,"def include_file ( name, fdir = tmp_dir, b64 = False ) : try : if fdir is None : fdir = """" if : with io. open ( os. path. join ( fdir, name ), ""rb"" ) as f : return base64. b64encode ( f. read ( ) ). decode ( ""utf-8"" ) else : with io. open ( os. path. join ( fdir, name ), ""r"", encoding = ""utf-8"" ) as f : return f. read ( ) except ( OSError, IOError ) as e : logger. error ( ""Could not include file '{}': {}"". format ( name, e ) )",True,b64,b64,0.6819143295288086 1908,"def get_overdue_evergreen_documents ( *, db_session ) -> List [ Optional [ Document ] ] : """"""Returns all documents that have need had a recent evergreen notification."""""" documents = ( db_session. query ( Document ). filter ( Document. evergreen == True ) ). all ( ) overdue_documents = [ ] now = datetime. utcnow ( ) for d in documents : next_reminder = d. evergreen_last_reminder_at + timedelta ( days = d. evergreen_reminder_interval ) if : overdue_documents. append ( d ) return overdue_documents",False,now > next_reminder,next_reminder >= now and days > 1,0.6637026071548462 1909,"def build ( self, input_shape ) : assert len ( input_shape ) >= 2 if isinstance ( input_shape, list ) and len ( input_shape ) == 2 : self. data_mode = ""disjoint"" F = input_shape [ 0 ] [ - 1 ] else : if : self. data_mode = ""single"" else : self. data_mode = ""batch"" F = input_shape [ - 1 ] self. attn_kernel = self. add_weight ( shape = ( F, 1 ), initializer = self. attn_kernel_initializer, regularizer = self. attn_kernel_regularizer, constraint = self. attn_kernel_constraint, name = ""attn_kernel"", ) self. built = True",False,len(input_shape) == 2,"isinstance(input_shape[1], list)",0.6554309129714966 1910,"def get ( self, * args, ** kwargs ) : show = self. get_argument ( ""show"" ) show_name = None show_obj = find_show ( int ( show ) ) if show_obj : show_name = quote_plus ( show_obj. name. encode ( ) ) if show_name : if : host = sickrage. app. config. kodi_host. split ( "","" ) [ 0 ]. strip ( ) else : host = sickrage. app. config. kodi_host if sickrage. app. notifier_providers [ ""kodi"" ]. update_library ( showName = show_name ) : sickrage. app. alerts. message ( _ ( ""Library update command sent to KODI host(s): "" ) + host ) else : sickrage. app. alerts. error ( _ ( ""Unable to contact one or more KODI host(s): "" ) + host ) if show_obj : return self. redirect ( ""/home/displayShow?show="" + str ( show_obj. indexer_id ) ) else : return self. redirect ( ""/home/"" )",False,sickrage.app.config.kodi_update_onlyfirst,is_kodi_host,0.6498985886573792 1911,"def pytest_runtest_setup ( item ) : if is_potential_nosetest ( item ) : if isinstance ( item. parent, pytest. Generator ) : gen = item. parent if not hasattr ( gen, ""_nosegensetup"" ) : call_optional ( gen. obj, ""setup"" ) if : call_optional ( gen. parent. obj, ""setup"" ) gen. _nosegensetup = True if not call_optional ( item. obj, ""setup"" ) : call_optional ( item. parent. obj, ""setup"" ) item. session. _setupstate. addfinalizer ( ( lambda : teardown_nose ( item ) ), item )",False,"isinstance(gen.parent, pytest.Instance)",gen._nosegensetup,0.647995114326477 1912,"def find_end_of_match ( to_match : str, chars : str, first_index : int ) -> typing. Tuple [ typing. Optional [ float ], typing. Optional [ int ] ] : score, last_index, last_type = 1.0, first_index, None for char in chars : try : index = to_match. index ( char, last_index + 1 ) except ValueError : return None, None if not index : return None, None if index == last_index + 1 : if last_type!= ""sequential"" : last_type = ""sequential"" score += 1 elif : if last_type!= ""boundary"" : last_type = ""boundary"" score += 1 elif ( char in string. ascii_uppercase and to_match [ index - 1 ] in string. ascii_lowercase ) : if last_type!= ""camelcase"" : last_type = ""camelcase"" score += 1 else : last_type = ""normal"" score += index - last_index last_index = index return ( score, last_index )",False,to_match[index - 1] in BOUNDARY_CHARS,first_index == 0,0.654887318611145 1913,"def _get_torch_exploration_action ( self, action_dist : ActionDistribution, timestep : Union [ TensorType, int ], explore : Union [ TensorType, bool ], ) : self. last_timestep = timestep if timestep is not None else self. last_timestep + 1 if explore : if : action, logp = self. random_exploration. get_torch_exploration_action ( action_dist, explore = True ) else : action = action_dist. sample ( ) logp = action_dist. sampled_action_logp ( ) else : action = action_dist. deterministic_sample ( ) logp = torch. zeros_like ( action_dist. sampled_action_logp ( ) ) return action, logp",False,self.last_timestep < self.random_timesteps,self.random_exploration is not None,0.6579583287239075 1914,"def _on_change ( self ) : changed = False self. save ( ) for key, value in self. data. items ( ) : if isinstance ( value, bool ) : if value : changed = True break if isinstance ( value, int ) : if : changed = True break elif value is None : continue elif len ( value )!= 0 : changed = True break self. _reset_button. disabled = not changed",False,value != 1,len(value) == 0,0.6750129461288452 1915,"def gen_partitions_from_args ( partition_set, kwargs ) : partition_selector_args = [ bool ( kwargs. get ( ""all"" ) ), bool ( kwargs. get ( ""partitions"" ) ), ( bool ( kwargs. get ( ""from"" ) ) or bool ( kwargs. get ( ""to"" ) ) ), ] if sum ( partition_selector_args ) > 1 : raise click. UsageError ( ""error, cannot use more than one of: `--all`, `--partitions`, `--from/--to`"" ) partitions = partition_set. get_partitions ( ) if kwargs. get ( ""all"" ) : return partitions if kwargs. get ( ""partitions"" ) : selected_args = [ s. strip ( ) for s in kwargs. get ( ""partitions"" ). split ( "","" ) if s. strip ( ) ] selected_partitions = [ partition for partition in partitions if partition. name in selected_args ] if : selected_names = [ partition. name for partition in selected_partitions ] unknown = [ selected for selected in selected_args if selected not in selected_names ] raise click. UsageError ( ""Unknown partitions: {}"". format ( unknown. join ( "", "" ) ) ) return selected_partitions start = validate_partition_slice ( partitions, ""from"", kwargs. get ( ""from"" ) ) end = validate_partition_slice ( partitions, ""to"", kwargs. get ( ""to"" ) ) return partitions [ start : end ]",False,len(selected_partitions) < len(selected_args),selected_partitions,0.6476808786392212 1916,"def read ( self ) : """"""Reads the robots.txt URL and feeds it to the parser."""""" try : f = urllib. request. urlopen ( self. url ) except urllib. error. HTTPError as err : if err. code in ( 401, 403 ) : self. disallow_all = True elif : self. allow_all = True else : raw = f. read ( ) self. parse ( raw. decode ( ""utf-8"" ). splitlines ( ) )",False,err.code >= 400 and err.code < 500,"err.code in (502, 404)",0.6559237241744995 1917,"def vi_pos_back_short ( line, index = 0, count = 1 ) : line = vi_list ( line ) try : for i in range ( count ) : index -= 1 while vi_is_space ( line [ index ] ) : index -= 1 in_word = vi_is_word ( line [ index ] ) if : while vi_is_word ( line [ index ] ) : index -= 1 else : while not vi_is_word_or_space ( line [ index ] ) : index -= 1 return index + 1 except IndexError : return 0",True,in_word,in_word,0.6719998717308044 1918,"def _get_headers ( self, headers = None ) : request_headers = headers or { } if self. _client. client. config : config = self. _client. client. config if ""Authorization"" not in request_headers and config. token : request_headers. update ( { ""Authorization"" : ""{} {}"". format ( config. authentication_type, config. token ) } ) if : request_headers. update ( { config. header : config. header_service } ) return request_headers",True,config.header and config.header_service,config.header and config.header_service,0.6502023339271545 1919,"def result ( metrics : Dict [ metric_types. MetricKey, Any ] ) -> Dict [ metric_types. AttributionsKey, Dict [ Text, Union [ float, np. ndarray ] ] ] : """"""Returns mean attributions."""""" total_attributions = metrics [ total_attributions_key ] weighted_count = metrics [ weighted_example_count_key ] attributions = { } for k, v in total_attributions. items ( ) : if : attributions [ k ] = float ( ""nan"" ) else : attributions [ k ] = v / weighted_count return { key : attributions }",False,"np.isclose(weighted_count, 0.0)",weighted_count == 0,0.6566169857978821 1920,"def listing_items ( method ) : marker = None once = True items = [ ] while once or items : for i in items : yield i if once or marker : if marker : items = method ( parms = { ""marker"" : marker } ) else : items = method ( ) if : marker = items [ - 1 ] else : marker = None once = False else : items = [ ]",False,len(items) == 10000,items and items > 1,0.6743246912956238 1921,"def byte_WITH_CLEANUP ( self ) : v = w = None u = self. top ( ) if u is None : exit_func = self. pop ( 1 ) elif isinstance ( u, str ) : if : exit_func = self. pop ( 2 ) else : exit_func = self. pop ( 1 ) u = None elif issubclass ( u, BaseException ) : w, v, u = self. popn ( 3 ) exit_func = self. pop ( ) self. push ( w, v, u ) else : raise VirtualMachineError ( ""Confused WITH_CLEANUP"" ) exit_ret = exit_func ( u, v, w ) err = ( u is not None ) and bool ( exit_ret ) if err : self. popn ( 3 ) self. push ( None )",False,"u in ('return', 'continue')",u == None,0.6562206149101257 1922,"def remove ( self, name ) : for s in [ self. __storage ( self. __category ), self. __storage ( None ) ] : for i, b in enumerate ( s ) : if b. name == name : del s [ i ] if : self. __save ( ) return raise KeyError ( name )",False,b.persistent,self.__save(),0.6736152172088623 1923,"def _get_blade_interfaces ( self, chassis_number, blade_number, ucsm_ip, ucsm_username, ucsm_password ) : """"""Create command"""""" data = self. _get_blade_interfaces_post_data ( chassis_number, blade_number ) response = self. _post_data ( ucsm_ip, ucsm_username, ucsm_password, data ) elements = et. XML ( response ). find ( ""outConfigs"" ). findall ( ""adaptorHostEthIf"" ) blade_interfaces = { } for element in elements : dist_name = element. get ( ""dn"", default = None ) if : order = element. get ( ""order"", default = None ) blade_interface = { const. BLADE_INTF_DN : dist_name, const. BLADE_INTF_ORDER : order, const. BLADE_INTF_LINK_STATE : None, const. BLADE_INTF_OPER_STATE : None, const. BLADE_INTF_INST_TYPE : None, const. BLADE_INTF_RHEL_DEVICE_NAME : self. _get_rhel_device_name ( order ), } blade_interfaces [ dist_name ] = blade_interface return blade_interfaces",True,dist_name,dist_name,0.6642109751701355 1924,"def add_ntds_hash ( ntds_hash, host_id ) : add_ntds_hash. ntds_hashes += 1 self. logger. highlight ( ntds_hash ) if ntds_hash. find ( ""$"" ) == - 1 : if ntds_hash. find ( ""\\"" )!= - 1 : domain, hash = ntds_hash. split ( ""\\"" ) else : domain = self. domain hash = ntds_hash try : username, _, lmhash, nthash, _, _, _ = hash. split ( "":"" ) parsed_hash = "":"". join ( ( lmhash, nthash ) ) if : self. db. add_credential ( ""hash"", domain, username, parsed_hash, pillaged_from = host_id ) add_ntds_hash. added_to_db += 1 return raise except : logging. debug ( ""Dumped hash is not NTLM, not adding to db for now ;)"" ) else : logging. debug ( ""Dumped hash is a computer account, not adding to db"" )",False,validate_ntlm(parsed_hash),ntds_hash.find(parsed_hash) != -1,0.6490787267684937 1925,"def feed ( self, byte_str, num_bytes ) : if self. _done : return i = self. _need_to_skip_char_num while i < num_bytes : order, char_len = self. get_order ( byte_str [ i : i + 2 ] ) i += char_len if i > num_bytes : self. _need_to_skip_char_num = i - num_bytes self. _last_char_order = - 1 else : if ( order!= - 1 ) and ( self. _last_char_order!= - 1 ) : self. _total_rel += 1 if : self. _done = True break self. _rel_sample [ jp2CharContext [ self. _last_char_order ] [ order ] ] += 1 self. _last_char_order = order",False,self._total_rel > self.MAX_REL_THRESHOLD,order >= self._total_rel,0.6545836329460144 1926,"def _repr_info ( self ) : info = [ self. _state. lower ( ) ] if self. _state == _FINISHED : if : info. append ( ""exception={!r}"". format ( self. _exception ) ) else : result = reprlib. repr ( self. _result ) info. append ( ""result={}"". format ( result ) ) if self. _callbacks : info. append ( self. _format_callbacks ( ) ) if self. _source_traceback : frame = self. _source_traceback [ - 1 ] info. append ( ""created at %s:%s"" % ( frame [ 0 ], frame [ 1 ] ) ) return info",False,self._exception is not None,self._exception,0.6636278033256531 1927,"def get ( self, k ) : with self. _lock : if : self. _data1 [ k ] = self. _data2 [ k ] del self. _data2 [ k ] return self. _data1. get ( k )",False,k not in self._data1 and k in self._data2,k in self._data1 and k in self._data2,0.6591451168060303 1928,"def string_record_contents ( self, data ) : bv = self. biff_version bk = self. book lenlen = ( bv >= 30 ) + 1 nchars_expected = unpack ( ""<"" + ""BH"" [ lenlen - 1 ], data [ : lenlen ] ) [ 0 ] offset = lenlen if bv < 80 : enc = bk. encoding or bk. derive_encoding ( ) nchars_found = 0 result = UNICODE_LITERAL ( """" ) while 1 : if bv >= 80 : flag = BYTES_ORD ( data [ offset ] ) & 1 enc = ( ""latin_1"", ""utf_16_le"" ) [ flag ] offset += 1 chunk = unicode ( data [ offset : ], enc ) result += chunk nchars_found += len ( chunk ) if : return result if nchars_found > nchars_expected : msg = ""STRING/CONTINUE: expected %d chars, found %d"" % ( nchars_expected, nchars_found, ) raise XLRDError ( msg ) rc, _unused_len, data = bk. get_record_parts ( ) if rc!= XL_CONTINUE : raise XLRDError ( ""Expected CONTINUE record; found record-type 0x%04X"" % rc ) offset = 0",False,nchars_found == nchars_expected,nchars_found == len(data),0.6623801589012146 1929,"def _load_dataset_area ( self, dsid, file_handlers, coords ) : """"""Get the area for *dsid*."""""" try : return self. _load_area_def ( dsid, file_handlers ) except NotImplementedError : if any ( x is None for x in coords ) : logger. warning ( ""Failed to load coordinates for '{}'"". format ( dsid ) ) return None area = self. _make_area_from_coords ( coords ) if : logger. debug ( ""No coordinates found for %s"", str ( dsid ) ) return area",True,area is None,area is None,0.6651685833930969 1930,"def __reduce__ ( self ) : mod = self. __fn. __module__ name = self. __fn. __name__ try : obj = load_back ( mod, name ) except ( ImportError, KeyError, AttributeError ) : raise pickle. PicklingError ( ""Can't pickle as_op(), not found as %s.%s"" % ( mod, name ) ) else : if : raise pickle. PicklingError ( ""Can't pickle as_op(), not the object "" ""at %s.%s"" % ( mod, name ) ) return load_back, ( mod, name )",False,obj is not self,obj is None,0.6646921038627625 1931,"def _unquote_to_bytes ( string, unsafe = """" ) : if isinstance ( string, text_type ) : string = string. encode ( ""utf-8"" ) if isinstance ( unsafe, text_type ) : unsafe = unsafe. encode ( ""utf-8"" ) unsafe = frozenset ( bytearray ( unsafe ) ) groups = iter ( string. split ( b""%"" ) ) result = bytearray ( next ( groups, b"""" ) ) try : hex_to_byte = _unquote_maps [ unsafe ] except KeyError : hex_to_byte = _unquote_maps [ unsafe ] = { h : b for h, b in _hextobyte. items ( ) if b not in unsafe } for group in groups : code = group [ : 2 ] if : result. append ( hex_to_byte [ code ] ) result. extend ( group [ 2 : ] ) else : result. append ( 37 ) result. extend ( group ) return bytes ( result )",True,code in hex_to_byte,code in hex_to_byte,0.6571916341781616 1932,"def establish_connection ( self, p ) : params = self. get_params ( p. h ) host = params [ ""hostname"" ] port = params [ ""port"" ] user = params [ ""username"" ] passwd = self. get_password ( user, host ) if passwd is None : return ( None, None ) t = paramiko. Transport ( ( host, port ) ) t. connect ( username = user, password = passwd ) hostkey = t. get_remote_server_key ( ) cached_hostkey = self. get_hostkey ( host ) if cached_hostkey is None : store = self. confirm_hostkey ( ""Unknown host: %s"" % host, ""Add the server key for host '%s' to the trusted host list?"" % host, ) if : self. set_hostkey ( host, hostkey ) else : return ( None, None ) elif cached_hostkey!= hostkey : store = self. confirm_hostkey ( ""Hostkey does not match!"", ""The remote host '%s' provided a key that does not match the stored key. "" + "" This could indicate a man-in-the-middle attack. Continue anyway?"" % host, ) if : self. set_hostkey ( host, hostkey ) else : return ( None, None ) sftp = paramiko. SFTPClient. from_transport ( t ) return ( t, sftp )",False,store,hostkey,0.6729249954223633 1933,"def _main_actor ( cls, env_name, env_email, config_reader = None ) : actor = Actor ( """", """" ) default_email = get_user_id ( ) default_name = default_email. split ( ""@"" ) [ 0 ] for attr, evar, cvar, default in ( ( ""name"", env_name, cls. conf_name, default_name ), ( ""email"", env_email, cls. conf_email, default_email ), ) : try : val = os. environ [ evar ] setattr ( actor, attr, val ) except KeyError : if : setattr ( actor, attr, config_reader. get_value ( ""user"", cvar, default ) ) if not getattr ( actor, attr ) : setattr ( actor, attr, default ) return actor",False,config_reader is not None,config_reader,0.6565282344818115 1934,"def _iter_excel_instances ( self ) : asn = subprocess. check_output ( [ ""lsappinfo"", ""visibleprocesslist"", ""-includehidden"" ] ). decode ( ""utf-8"" ) for asn in asn. split ( "" "" ) : if ""Microsoft_Excel"" in asn : pid_info = subprocess. check_output ( [ ""lsappinfo"", ""info"", ""-only"", ""pid"", asn ] ). decode ( ""utf-8"" ) if : yield int ( pid_info. split ( ""="" ) [ 1 ] )",False,"pid_info != '""pid""=[ NULL ] \n'",'Microsoft_Excel' in pid_info,0.6530991196632385 1935,"def _sendExceptionResponse ( self, connection, seq, serializer_id, exc_value, tbinfo, flags = 0, annotations = None ) : """"""send an exception back including the local traceback info"""""" exc_value. _pyroTraceback = tbinfo if : util. fixIronPythonExceptionForPickle ( exc_value, True ) serializer = util. get_serializer_by_id ( serializer_id ) try : data, compressed = serializer. serializeData ( exc_value ) except : xt, xv, tb = sys. exc_info ( ) msg = ""Error serializing exception: %s. Original exception: %s: %s"" % ( str ( xv ), type ( exc_value ), str ( exc_value ), ) exc_value = errors. PyroError ( msg ) exc_value. _pyroTraceback = tbinfo if : util. fixIronPythonExceptionForPickle ( exc_value, True ) data, compressed = serializer. serializeData ( exc_value ) flags |= message. FLAGS_EXCEPTION if compressed : flags |= message. FLAGS_COMPRESSED annotations = dict ( annotations or { } ) annotations. update ( self. annotations ( ) ) msg = message. Message ( message. MSG_RESULT, data, serializer. serializer_id, flags, seq, annotations = annotations, hmac_key = self. _pyroHmacKey, ) if config.",False,sys.platform == 'cli',exc_value._pyroTraceback,0.6609688997268677 1936,"def step ( self, action ) : self. env. step ( action [ self. env. agent_selection ] ) obs_d = { } rew_d = { } done_d = { } info_d = { } while self. env. agents : obs, rew, done, info = self. env. last ( ) a = self. env. agent_selection obs_d [ a ] = obs rew_d [ a ] = rew done_d [ a ] = done info_d [ a ] = info if : self. env. step ( None ) else : break all_done = not self. env. agents done_d [ ""__all__"" ] = all_done return obs_d, rew_d, done_d, info_d",False,self.env.dones[self.env.agent_selection],self.env.agents == 0,0.6482844948768616 1937,"def __init__ ( self, buffer, encoding = None, errors = None, newline = None, line_buffering = False ) : if newline not in ( None, """", ""\n"", ""\r"", ""\r\n"" ) : raise ValueError ( ""illegal newline value: %r"" % ( newline, ) ) if encoding is None : try : encoding = os. device_encoding ( buffer. fileno ( ) ) except ( AttributeError, UnsupportedOperation ) : pass if encoding is None : try : import locale except ImportError : encoding = ""ascii"" else : encoding = locale. getpreferredencoding ( ) if not isinstance ( encoding, basestring ) : raise ValueError ( ""invalid encoding: %r"" % encoding ) if errors is None : errors = ""strict"" else : if : raise ValueError ( ""invalid errors: %r"" % errors ) self. buffer = buffer self. _line_buffering = line_buffering self. _encoding = encoding self. _errors = errors self. _readuniversal = not newline self. _readtranslate = newline is None self. _readnl = newline self. _writetranslate = newline!= """" self. _writenl = newline or os. linesep self. _encoder = None self. _decoder = None self. _decoded_chars = """" self. _decoded_chars_used = 0 self. _snapshot = None self. _seekable = self. _telling",False,"not isinstance(errors, basestring)",not line_buffering,0.6512417197227478 1938,"def app ( scope, receive, send ) : while True : message = await receive ( ) if message [ ""type"" ] == ""websocket.connect"" : await send ( { ""type"" : ""websocket.accept"" } ) elif message [ ""type"" ] == ""websocket.receive"" : pass elif : break",False,message['type'] == 'websocket.disconnect',message['type'] == 'websocket.error',0.6558740139007568 1939,"def verify_model_vm ( input_model, ishapes, idtype = torch. float, idata = None, targets = [ ""llvm"" ] ) : input_names = [ ""i{}"". format ( idx ) for idx, ish in enumerate ( ishapes ) ] input_shapes = list ( zip ( input_names, ishapes ) ) input_data = ( idata if idata else [ torch. randn ( shape, dtype = idtype ) for shape in ishapes ] ) mod, params = relay. frontend. from_pytorch ( input_model, input_shapes ) for tgt in targets : print ( ""Running on target"", tgt ) ctx = tvm. context ( tgt, 0 ) executor = relay. create_executor ( ""vm"", mod = mod, ctx = ctx, target = tgt ) evaluator = executor. evaluate ( ) for name, inp in zip ( input_names, input_data ) : params [ name ] = inp. numpy ( ) vm_res = evaluator ( ** params ) with torch. no_grad ( ) : pt_result = input_model ( * input_data ) if : tvm_res = vm_res. asnumpy ( ). item ( ) assert pt_result == tvm_res else : tvm. testing. assert_allclose ( vm_res. asnumpy ( ), pt_result. numpy ( ), rtol = 1e-5, atol = 1e-5 )",False,"not isinstance(pt_result, torch.Tensor)",vm_res.has_numpy(),0.6462855339050293 1940,"def __open__ ( filename, * args, ** kwargs ) : if os. path. isfile ( filename ) : return __realopen__ ( filename, * args, ** kwargs ) if not os. path. isabs ( filename ) : datafilename = __papplet__. dataPath ( filename ) if : return __realopen__ ( datafilename, * args, ** kwargs ) sketchfilename = __papplet__. sketchPath ( filename ) if os. path. isfile ( sketchfilename ) : return __realopen__ ( sketchfilename, * args, ** kwargs ) return __realopen__ ( filename, * args, ** kwargs )",True,os.path.isfile(datafilename),os.path.isfile(datafilename),0.6473559141159058 1941,"def sql_select ( explain = False ) : statement, params = load_query ( request. args [ ""query"" ] ) engine = SQLAlchemy ( ). get_engine ( current_app ) if explain : if : statement = ""EXPLAIN QUERY PLAN\n%s"" % statement else : statement = ""EXPLAIN\n%s"" % statement result = engine. execute ( statement, params ) return g. debug_toolbar. render ( ""panels/sqlalchemy_select.html"", { ""result"" : result. fetchall ( ), ""headers"" : result. keys ( ), ""sql"" : format_sql ( statement, params ), ""duration"" : float ( request. args [ ""duration"" ] ), }, )",False,engine.driver == 'pysqlite',engine.name == 'sql',0.6601969003677368 1942,"def populate_dest_combo ( self ) : combo = self. widget ( ""migrate-dest"" ) model = combo. get_model ( ) idx = combo. get_active ( ) idxconn = None if idx!= - 1 : idxconn = model [ idx ] [ 1 ] rows = [ [ _ ( ""No connections available."" ), None, False, None ] ] if self. destconn_rows : rows = self. destconn_rows model. clear ( ) for r in rows : if r [ 1 ] == self. conn : continue model. append ( r ) idx = - 1 for i in range ( len ( model ) ) : row = model [ i ] conn = row [ 1 ] if idxconn : if : idx = i break else : if row [ 2 ] : idx = i break combo. set_active ( idx )",False,conn == idxconn and row[2],row[2],0.6590996980667114 1943,"def _make_entry ( filename, rpath = None, relative = None, shell = None, suffix = """", advanced = 0 ) : pkg = os. path. basename ( filename ) == ""__init__.py"" entry_code = entry_lines [ 0 ] % ( ""."" if ( relative is True ) or ( ( relative is None ) and pkg ) else """", suffix, ) with open ( filename, ""r"" ) as f : lines = f. readlines ( ) n = 0 for n in range ( len ( lines ) ) : if lines [ n ]. strip ( ) == """" or lines [ n ]. find ( ""__future__"" ) > 0 : continue if not lines [ n ] [ 0 ] == ""#"" : break for line in lines [ n : ] : if line. strip ( ) == entry_code. strip ( ) : return with open ( filename, ""w"" ) as f : f. write ( """". join ( lines [ : n ] ) ) if shell : f. write ( shell ) f. write ( entry_code ) paras = [ ] if rpath is not None : paras. append ( repr ( rpath ) ) if suffix : paras. append ( ""suffix=%s"" % repr ( suffix ) ) if : paras. append ( ""advanced=1"" ) f. write ( entry_lines [ 1 ] % "", "". join ( paras ) ) f. write ( """". join ( lines [ n : ] ) )",False,advanced,advanced > 0,0.7367694973945618 1944,"def _load_from_pytorch ( self, filename, ctx = None ) : import torch from mxnet import nd loaded = torch. load ( filename ) params = self. _collect_params_with_prefix ( ) new_params = { } for name in loaded : if ""bn"" in name or ""batchnorm"" in name or "".downsample.1."" in name : if : mxnet_name = name. replace ( ""weight"", ""gamma"" ) elif ""bias"" in name : mxnet_name = name. replace ( ""bias"", ""beta"" ) else : mxnet_name = name new_params [ mxnet_name ] = nd. array ( loaded [ name ]. cpu ( ). data. numpy ( ) ) else : new_params [ name ] = nd. array ( loaded [ name ]. cpu ( ). data. numpy ( ) ) for name in new_params : if name not in params : print ( ""==={}==="". format ( name ) ) raise Exception if name in params : params [ name ]. _load_init ( new_params [ name ], ctx = ctx )",True,'weight' in name,'weight' in name,0.6620233058929443 1945,"def evaluate_update_notification ( session, state, latest_version ) : priv_fact = ConfigFactory ( session, 1 ) stored_latest = priv_fact. get_val ( ""latest_version"" ) if parse_version ( stored_latest ) < parse_version ( latest_version ) : Cache. invalidate ( ) priv_fact. set_val ( ""latest_version"", latest_version ) if : return for user_desc in db_get_users ( session, 1, ""admin"" ) : lang = user_desc [ ""language"" ] template_vars = { ""type"" : ""software_update_available"", ""latest_version"" : latest_version, ""node"" : db_admin_serialize_node ( session, 1, lang ), ""notification"" : db_get_notification ( session, 1, lang ), ""user"" : user_desc, } state. format_and_send_mail ( session, 1, user_desc, template_vars )",False,parse_version(__version__) != parse_version(stored_latest),latest_version == latest_version,0.6465925574302673 1946,"def create_author_dict ( source ) : authors = [ ] with open ( source, ""rt"", encoding = ""utf-8-sig"" ) as csv_file : csv_reader = csv. reader ( csv_file, delimiter = "","" ) header_row = next ( csv_reader ) normalized_header_row = [ col_header. strip ( ) for col_header in header_row ] logger. info ( ""Debug data"" ) logger. info ( ""Header row: {}"". format ( header_row ) ) logger. info ( ""Normalized header row: {}"". format ( normalized_header_row ) ) name_index = normalized_header_row. index ( ""Name"" ) email_index = normalized_header_row. index ( ""Email"" ) for line in csv_reader : row = [ cell for cell in line ] logger. info ( ""Adding user: "" + row [ name_index ] ) if : author_dict = { ""name"" : row [ name_index ]. strip ( ), ""email"" : row [ email_index ], } else : author_dict = { ""name"" : row [ name_index ]. strip ( ) } authors. append ( author_dict ) return authors",False,row[email_index] != '',email_index > 0,0.6603578329086304 1947,"def AddIcon ( self, icon, mask = wx. NullBitmap ) : """"""Add an icon to the image list, or get the index if already there"""""" index = self. __magicImageListMapping. get ( id ( icon ) ) if index is None : if isinstance ( icon, wxIconPtr ) : index = self. __magicImageList. AddIcon ( icon ) elif isinstance ( icon, wx. BitmapPtr ) : if : index = self. __magicImageList. AddWithColourMask ( icon, mask ) else : index = self. __magicImageList. Add ( icon, mask ) else : raise ValueError ( ""Unexpected icon object %s, "" ""expected wx.Icon or wx.Bitmap"" % ( icon ) ) self. __magicImageListMapping [ id ( icon ) ] = index return index",False,"isinstance(mask, wx.Colour)","isinstance(icon, wx.Icon)",0.6660116314888 1948,"def resolve_common_type ( parser, commontype ) : try : return _CACHE [ commontype ] except KeyError : cdecl = COMMON_TYPES. get ( commontype, commontype ) if : result, quals = cdecl, 0 elif cdecl in model. PrimitiveType. ALL_PRIMITIVE_TYPES : result, quals = model. PrimitiveType ( cdecl ), 0 elif cdecl == ""set-unicode-needed"" : raise FFIError ( ""The Windows type %r is only available after "" ""you call ffi.set_unicode()"" % ( commontype, ) ) else : if commontype == cdecl : raise FFIError ( ""Unsupported type: %r. Please look at "" ""http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "" ""and file an issue if you think this type should really "" ""be supported."" % ( commontype, ) ) result, quals = parser. parse_type_and_quals ( cdecl ) assert isinstance ( result, model. BaseTypeByIdentity ) _CACHE [ commontype ] = result, quals return result, quals",False,"not isinstance(cdecl, str)",cdecl in model.PrimitiveType.COMMON_TYPES,0.6520634889602661 1949,"def moveToThreadNext ( self ) : """"""Move a position to threadNext position."""""" p = self if p. v : if : p. moveToFirstChild ( ) elif p. hasNext ( ) : p. moveToNext ( ) else : p. moveToParent ( ) while p : if p. hasNext ( ) : p. moveToNext ( ) break p. moveToParent ( ) return p",False,p.v.children,p.firstChild(),0.6624227166175842 1950,"def _roll ( self, mapper ) : t = time. time ( ) dt, self. _lastTime = t - self. _lastTime, t self. _xvel_dq. clear ( ) self. _yvel_dq. clear ( ) _hyp = sqrt ( ( self. _xvel ** 2 ) + ( self. _yvel ** 2 ) ) if _hyp!= 0.0 : _ax = self. _a * ( abs ( self. _xvel ) / _hyp ) _ay = self. _a * ( abs ( self. _yvel ) / _hyp ) else : _ax = self. _a _ay = self. _a _dvx = min ( abs ( self. _xvel ), _ax * dt ) _dvy = min ( abs ( self. _yvel ), _ay * dt ) _xvel = self. _xvel - copysign ( _dvx, self. _xvel ) _yvel = self. _yvel - copysign ( _dvy, self. _yvel ) dx = ( ( ( _xvel + self. _xvel ) / 2 ) * dt ) / self. _radscale dy = ( ( ( _yvel + self. _yvel ) / 2 ) * dt ) / self. _radscale self. _xvel = _xvel self. _yvel = _yvel self. action. add ( mapper, dx * self. speed [ 0 ], dy * self. speed [ 1 ] ) if dx or dy : if : WholeHapticAction. add ( self, mapper, dx, dy ) self. _roll_task = mapper. schedule ( 0.02, self. _roll )",False,self.haptic,self.action is not None,0.6579887270927429 1951,"def allocate_buffers ( self ) : if self. child : self. child. allocate_buffers ( ) for ind in range ( len ( self. buffers ) ) : if : if self. max_shape > 0 : self. buffers [ ind ] = self. be. iobuf ( self. max_shape, persist_values = False, parallelism = ""Data"" )",False,self.buffers[ind] is None,self.buffers[ind],0.6524829864501953 1952,"def test_all ( self ) : if type ( self ) is BaseTestConv : raise SkipTest ( ""base class"" ) ds = self. default_subsamples db = self. default_border_mode dflip = self. default_filter_flip dprovide_shape = self. default_provide_shape for ( i, f ) in zip ( self. inputs_shapes, self. filters_shapes ) : for provide_shape in self. provide_shape : yield ( self. tcase, i, f, ds, db, dflip, provide_shape ) if : for fd in self. filters_dilations : for s in self. subsamples : for b in self. border_modes : yield ( self. tcase, i, f, s, b, dflip, dprovide_shape, fd ) for flip in self. filter_flip : yield ( self. tcase, i, f, ds, db, flip, dprovide_shape )",False,min(i) > 0 and min(f) > 0,self.border_modes is not None,0.6507933139801025 1953,"def serialize ( self ) : data = { } if not self. ranges : raise RuntimeError ( ""Invalid ranges"" ) data [ ""ranges"" ] = self. ranges if self. field : data [ ""field"" ] = self. field elif self. key_field : data [ ""key_field"" ] = self. key_field if self. value_field : data [ ""value_field"" ] = self. value_field else : raise RuntimeError ( ""Invalid key_field: value_field required"" ) elif self. key_script : data [ ""key_script"" ] = self. key_script if : data [ ""value_script"" ] = self. value_script else : raise RuntimeError ( ""Invalid key_script: value_script required"" ) if self. params : data [ ""params"" ] = self. params params = self. _base_parameters ( ) params [ self. _internal_name ] = data return { self. name : params }",True,self.value_script,self.value_script,0.6647767424583435 1954,"def _get_level ( levels, level_ref ) : if level_ref in levels : return levels. index ( level_ref ) if isinstance ( level_ref, six. integer_types ) : if level_ref < 0 : level_ref += len ( levels ) if : raise PatsyError ( ""specified level %r is out of range"" % ( level_ref, ) ) return level_ref raise PatsyError ( ""specified level %r not found"" % ( level_ref, ) )",False,not 0 <= level_ref < len(levels),level_ref > len(levels),0.6495049595832825 1955,"def build_fingerprints ( targets, creds, config ) : fingerprints = list ( ) logger = logging. getLogger ( ""changeme"" ) for target in targets : for c in creds : if : continue if not config. portoverride and ( target. port and not c [ ""default_port"" ] == target. port ) : continue fp = c [ ""fingerprint"" ] for url in fp. get ( ""url"" ) : t = Target ( host = target. host, port = target. port, protocol = target. protocol ) if c. get ( ""ssl"" ) or config. ssl : t. protocol = ""https"" else : t. protocol = ""http"" if not t. port : t. port = c [ ""default_port"" ] t. url = url hfp = HttpFingerprint ( t, fp. get ( ""headers"", None ), fp. get ( ""cookie"", None ), config ) logger. debug ( ""Adding %s to fingerprint list"" % hfp. target ) fingerprints. append ( hfp ) return fingerprints",False,not c['protocol'] == 'http',c[target.target],0.6550403833389282 1956,"def available ( self ) : myenv = os. environ. copy ( ) myenv [ ""LANG"" ] = ""C"" try : ( out, _err ) = subp. subp ( [ ""growpart"", ""--help"" ], env = myenv ) if : return True except subp. ProcessExecutionError : pass return False",False,"re.search('--update\\s+', out)",self.has_help(),0.6482461094856262 1957,"def get_package_info ( self, pre = False ) : from. vendor. pip_shims. shims import pip_version, parse_version, CandidateEvaluator dependency_links = [ ] packages = self. get_installed_packages ( ) if parse_version ( pip_version ) < parse_version ( ""19.0"" ) : for dist in packages : if dist. has_metadata ( ""dependency_links.txt"" ) : dependency_links. extend ( dist. get_metadata_lines ( ""dependency_links.txt"" ) ) with self. get_finder ( ) as finder : if parse_version ( pip_version ) < parse_version ( ""19.0"" ) : finder. add_dependency_links ( dependency_links ) for dist in packages : typ = ""unknown"" all_candidates = finder. find_all_candidates ( dist. key ) if not self. pipfile. get ( ""pre"", finder. allow_all_prereleases ) : all_candidates = [ candidate for candidate in all_candidates if not candidate. version. is_prerelease ] if not all_candidates : continue candidate_evaluator = finder. make_candidate_evaluator ( project_name = dist. key ) best_candidate_result = candidate_evaluator. compute_best_candidate ( all_",False,best_candidate_result.best_candidate.link.is_wheel,pre,0.6498900651931763 1958,"def _run ( self ) : try : empty_sock, empty_chan = None, None while not self. finished. is_set ( ) : r, w, x = select. select ( [ self. sock, self. channel ], [ ], [ ], 1 ) if self. sock in r : empty_sock = self. read_and_write ( self. sock, self. channel, self. socket_chunk_size ) if : empty_chan = self. read_and_write ( self. channel, self. sock, self. channel_chunk_size ) if empty_sock or empty_chan : break finally : self. channel. close ( ) self. sock. close ( )",False,self.channel in r,empty_chan is None,0.6621233224868774 1959,"def readfifo ( data ) : nonlocal fifobuffer fifobuffer. extend ( data ) while fifobuffer : message, token, nextmsg = fifobuffer. partition ( b""\00"" ) if token : splitval = message. split ( b"" "", 1 ) cmd = splitval [ 0 ]. decode ( ""utf-8"" ) if len ( splitval ) > 1 : value = splitval [ 1 ]. decode ( ""utf-8"" ) else : value = """" if cmd == ""bbplain"" : bb. plain ( value ) elif cmd == ""bbnote"" : bb. note ( value ) elif cmd == ""bbwarn"" : bb. warn ( value ) elif cmd == ""bberror"" : bb. error ( value ) elif cmd == ""bbfatal"" : bb. error ( value ) elif : bb. error ( value, forcelog = True ) elif cmd == ""bbdebug"" : splitval = value. split ( "" "", 1 ) level = int ( splitval [ 0 ] ) value = splitval [ 1 ] if self. bin : if obj >= 0 : if obj <= 0xFF : self. write ( BININT1 + pack ( "" return if obj <= 0xFFFF : self. write ( BININT2 + pack ( "" return if - 0x80000000 <= obj <= 0x7FFFFFFF : self. write ( BININT + pack ( "" return if self. proto >= 2 : encoded = encode_long ( obj ) n = len ( encoded ) if : self. write ( LONG1 + pack ( "" else : self. write ( LONG4 + pack ( "" return self. write ( LONG + repr ( obj ). encode ( ""ascii"" ) + b""L\n"" )",False,n < 256,n > 0,0.6833459138870239 1961,"def _decimal_places_for_asset ( self, asset, reference_date ) : if isinstance ( asset, Future ) and asset. tick_size : return number_of_decimal_places ( asset. tick_size ) elif isinstance ( asset, ContinuousFuture ) : oc = self. _asset_finder. get_ordered_contracts ( asset. root_symbol ) contract_sid = oc. contract_before_auto_close ( reference_date. value ) if : contract = self. _asset_finder. retrieve_asset ( contract_sid ) if contract. tick_size : return number_of_decimal_places ( contract. tick_size ) return DEFAULT_ASSET_PRICE_DECIMALS",False,contract_sid is not None,contract_sid,0.6590975522994995 1962,"def __get_photo ( self, person_or_marriage ) : """"""returns the first photo in the media list or None"""""" media_list = person_or_marriage. get_media_list ( ) for media_ref in media_list : media_handle = media_ref. get_reference_handle ( ) media = self. database. get_media_from_handle ( media_handle ) mime_type = media. get_mime_type ( ) if : return media return None",False,mime_type and mime_type.startswith('image'),mime_type in self.photo_types,0.6461145281791687 1963,"def extract_range ( self ) : use_206 = False start = None end = None url = self. url range_h = self. env. get ( ""HTTP_RANGE"" ) if range_h : m = self. RANGE_HEADER. match ( range_h ) if : start = m. group ( 1 ) end = m. group ( 2 ) use_206 = True else : m = self. RANGE_ARG_RX. match ( url ) if : start = m. group ( 2 ) end = m. group ( 3 ) url = url [ : m. start ( 1 ) ] + url [ m. end ( 1 ) : ] use_206 = False if not start : return None start = int ( start ) if end : end = int ( end ) else : end = """" result = ( url, start, end, use_206 ) return result",True,m,m,0.6960936784744263 1964,"def draged ( self, ox, oy, nx, ny, i ) : self. update, self. infoupdate ( ), True if i [ 0 ] == True : for j in range ( len ( i [ 1 ] ) ) : i [ 1 ] [ j ] = ( i [ 1 ] [ j ] [ 0 ] + ( nx - ox ), i [ 1 ] [ j ] [ 1 ] + ( ny - oy ) ) else : i [ 0 ] [ i [ 1 ] ] = ( nx, ny ) if : i [ 0 ] [ - 1 ] = ( nx, ny ) if i [ 1 ] == len ( i [ 0 ] ) - 1 : i [ 0 ] [ 0 ] = ( nx, ny )",False,i[1] == 0,i[1] == len(i[0]) - 1,0.6664236783981323 1965,"def _GetObjectAtPos ( self, pos = - 1, bAllowCalls = 0 ) : left, right = self. _GetWordSplit ( pos, bAllowCalls ) if left : namespace = sys. modules. copy ( ) namespace. update ( __main__. __dict__ ) try : from pywin. framework import interact if interact. edit is not None and interact. edit. currentView is not None : globs, locs = interact. edit. currentView. GetContext ( ) [ : 2 ] if globs : namespace. update ( globs ) if : namespace. update ( locs ) except ImportError : pass try : return eval ( left, namespace ) except : pass return None",True,locs,locs,0.7155642509460449 1966,"def do_button_press_event ( self, event ) : if event. button > 3 : if event. button == self. preferences [ ""mouse_nav_button_back"" ] : self. open_page_back ( ) elif : self. open_page_forward ( ) else : logger. debug ( ""Unused mouse button %i"", event. button )",False,event.button == self.preferences['mouse_nav_button_forw'],event.button == self.preferences['mouse_nav_button_forward'],0.6502617001533508 1967,"def _initPosScale ( self, pos, size, stretch, log = True ) : """"""position (x,y) and size (magnification) of the rating scale"""""" if pos : if : offsetHoriz, offsetVert = pos elif log and self. autoLog : msg = ""RatingScale %s: pos expects a tuple (x,y)"" logging. warning ( msg % self. name ) try : self. offsetHoriz = float ( offsetHoriz ) except Exception : if self. savedWinUnits == ""pix"" : self. offsetHoriz = 0 else : self. offsetHoriz = 0.0 try : self. offsetVert = float ( offsetVert ) except Exception : if self. savedWinUnits == ""pix"" : self. offsetVert = int ( self. win. size [ 1 ] / - 5.0 ) else : self. offsetVert = - 0.4 if self. savedWinUnits == ""pix"" : self. offsetHoriz = float ( self. offsetHoriz ) / self. win. size [ 0 ] / 0.5 self. offsetVert = float ( self. offsetVert ) / self. win. size [ 1 ] / 0.5 self. pos = [ self. offsetHoriz, self. offsetVert ] try : self. stretch = float ( stretch ) except ValueError : self. stretch = 1.0 try : self. size = float ( size ) * 0.6 except ValueError : self. size = 0",False,len(list(pos)) == 2,size,0.6567631363868713 1968,"def store ( self, addr, data, size = None, condition = None, ** kwargs ) : if ( self. state. solver. symbolic ( addr ) and options. AVOID_MULTIVALUED_WRITES in self. state. options ) : return try : concrete_addrs = self. _interleave_ints ( sorted ( self. concretize_write_addr ( addr ) ) ) except SimMemoryError : if : return else : raise trivial = type ( addr ) is int or ( len ( concrete_addrs ) == 1 and ( addr == concrete_addrs [ 0 ] ). is_true ( ) ) if not trivial : constraint_options = [ addr == concrete_addr for concrete_addr in concrete_addrs ] conditional_constraint = self. state. solver. Or ( * constraint_options ) self. _add_constraints ( conditional_constraint, condition = condition, ** kwargs ) if len ( concrete_addrs ) == 1 : super ( ). store ( concrete_addrs [ 0 ], data, size = size, ** kwargs ) return for concrete_addr in concrete_addrs : if trivial : sub_condition = condition else : sub_condition = addr == concrete_addr if condition is not None : sub_condition = condition & sub_condition super ( ). store ( concrete_addr, data, size = size, condition = sub_condition",False,options.CONSERVATIVE_WRITE_STRATEGY in self.state.options,addr == concrete_addr,0.6553521156311035 1969,"def getnotes ( self, origin = None ) : if origin is None : result = self. translator_comments if : if result : result += ""\n"" + self. developer_comments else : result = self. developer_comments return result elif origin == ""translator"" : return self. translator_comments elif origin in ( ""programmer"", ""developer"", ""source code"" ) : return self. developer_comments else : raise ValueError ( ""Comment type not valid"" )",False,self.developer_comments,origin == 'developer',0.6638951897621155 1970,"def _from_word2vec_text ( fname ) : with _open ( fname, ""rb"" ) as fin : words = [ ] header = unicode ( fin. readline ( ) ) vocab_size, layer1_size = list ( map ( int, header. split ( ) ) ) vectors = [ ] for line_no, line in enumerate ( fin ) : try : parts = unicode ( line, encoding = ""utf-8"" ). strip ( ). split ( ) except TypeError as e : parts = line. strip ( ). split ( ) except Exception as e : logger. warning ( ""We ignored line number {} because of erros in parsing"" ""\n{}"". format ( line_no, e ) ) continue if len ( parts ) == layer1_size + 1 : word, weights = parts [ 0 ], list ( map ( float32, parts [ 1 : ] ) ) elif : word, weights = parts [ : 2 ], list ( map ( float32, parts [ 2 : ] ) ) word = u"" "". join ( word ) else : logger. warning ( <",False,len(parts) == layer1_size + 2,len(parts) == layer1_size,0.652237057685852 1971,"def run ( self, app, editor, args ) : line_nums = [ ] for cursor in editor. cursors : if : line_nums. append ( cursor. y ) data = editor. lines [ cursor. y ]. get_data ( ). upper ( ) editor. lines [ cursor. y ]. set_data ( data )",False,cursor.y not in line_nums,cursor.y >= 0 and cursor.y < line_nums,0.6618049144744873 1972,"def check_action ( self ) : params = BorgCheckThread. prepare ( self. profile ( ) ) if not params [ ""ok"" ] : self. _set_status ( params [ ""message"" ] ) return row_selected = self. archiveTable. selectionModel ( ). selectedRows ( ) if row_selected : archive_cell = self. archiveTable. item ( row_selected [ 0 ]. row ( ), 4 ) if : archive_name = archive_cell. text ( ) params [ ""cmd"" ] [ - 1 ] += f""::{archive_name}"" thread = BorgCheckThread ( params [ ""cmd"" ], params, parent = self. app ) thread. updated. connect ( self. _set_status ) thread. result. connect ( self. check_result ) self. _toggle_all_buttons ( False ) thread. start ( )",True,archive_cell,archive_cell,0.6667417287826538 1973,"def GetLogHandlers ( ) : formatter = logging. Formatter ( LOG_FORMAT ) engines = config. CONFIG [ ""Logging.engines"" ] logging. debug ( ""Will use logging engines %s"", engines ) for engine in engines : try : if engine == ""stderr"" : handler = logging. StreamHandler ( ) handler. setFormatter ( formatter ) yield handler elif engine == ""event_log"" : handler = handlers. NTEventLogHandler ( ""GRR"" ) handler. setFormatter ( formatter ) yield handler elif engine == ""syslog"" : socket_name = config. CONFIG [ ""Logging.syslog_path"" ] if "":"" in socket_name : addr, port = socket_name. split ( "":"", 1 ) handler = RobustSysLogHandler ( ( addr, int ( port ) ) ) else : handler = RobustSysLogHandler ( socket_name ) handler. setFormatter ( formatter ) yield handler elif engine == ""file"" : path = config. CONFIG [ ""Logging.filename"" ] logging. info ( ""Writing log file to %s"", path ) <",False,not os.path.isdir(os.path.dirname(path)),engine != 'global',0.6487890481948853 1974,"def best_match ( self, supported_content_types ) : best_quality = - 1 best_content_type = None best_params = { } best_match = ""*/*"" for content_type in supported_content_types : for content_mask, params in self. _content_types : try : quality = float ( params. get ( ""q"", 1 ) ) except ValueError : continue if quality < best_quality : continue elif best_quality == quality : if : continue if self. _match_mask ( content_mask, content_type ) : best_quality = quality best_content_type = content_type best_params = params best_match = content_mask return best_content_type, best_params",False,best_match.count('*') <= content_mask.count('*'),best_match == content_type,0.6548671126365662 1975,"def test_field_attr_existence ( self ) : for name, item in ast. __dict__. items ( ) : if self. _is_ast_node ( name, item ) : if : continue x = item ( ) if isinstance ( x, ast. AST ) : self. assertEqual ( type ( x. _fields ), tuple )",False,name == 'Index',"isinstance(item, ast.Enter)",0.6626070737838745 1976,"def check_buffer ( self ) : for i, pkt in enumerate ( self. buffer ) : last = self. packets [ - 1 : ] [ 0 ] if Raw in last : next_seq = self. seq + len ( last [ Raw ]. load ) else : next_seq = self. seq if next_seq == pkt [ TCP ]. seq : self. packets += self. buffer. pop ( i ) self. seq = pkt [ TCP ]. seq if : self. payload += str ( pkt [ Raw ]. load ) self. data_transfered += len ( pkt [ Raw ]. load ) return True return False",True,Raw in pkt,Raw in pkt,0.6689813137054443 1977,"def _process ( self ) : while True : try : process = await asyncio. create_subprocess_exec ( ""iostat"", ""-d"", ""-x"", ""-z"", str ( self. interval ), stdout = subprocess. PIPE, stderr = subprocess. DEVNULL, ) lines = [ ] is_first_reading = True while True : try : line = await asyncio. wait_for ( process. stdout. readline ( ), 5 ) except asyncio. TimeoutError : if lines : if : self. _on_iostat_output ( lines ) else : is_first_reading = False lines = [ ] else : if not line : break lines. append ( line. decode ( ""utf-8"", ""ignore"" ) )",False,not is_first_reading,is_first_reading,0.6502025127410889 1978,"def get_result ( self ) : try : sz = len ( self. dataset ) except NotImplementedError : sz = 0 with get_tqdm ( total = sz, disable = ( sz == 0 ) ) as pbar : die_cnt = 0 while True : res = self. result_queue. get ( ) pbar. update ( ) if res [ 0 ]!= DIE : yield res [ 1 ] else : die_cnt += 1 if : break self. inqueue_proc. join ( ) self. inqueue_proc. terminate ( ) if self. ordered : self. result_queue. join ( ) self. result_queue. terminate ( ) for p in self. workers : p. join ( ) p. terminate ( )",False,die_cnt == self.nr_proc,die_cnt > 10,0.6527413725852966 1979,"def stream_docker_log ( log_stream ) : async for line in log_stream : if : logger. debug ( line [ ""stream"" ]. strip ( ) ) elif ""status"" in line : logger. debug ( line [ ""status"" ]. strip ( ) ) elif ""error"" in line : logger. error ( line [ ""error"" ]. strip ( ) ) raise DockerBuildError",False,'stream' in line and line['stream'].strip(),'stream' in line,0.6532998085021973 1980,"def as_dict ( path = """", version = ""latest"", section = ""meta-data"" ) : result = { } dirs = dir ( path, version, section ) if not dirs : return None for item in dirs : if item. endswith ( ""/"" ) : records = as_dict ( path + item, version, section ) if records : result [ item [ : - 1 ] ] = records elif : idx, name = is_dict. match ( item ). groups ( ) records = as_dict ( path + idx + ""/"", version, section ) if records : result [ name ] = records else : result [ item ] = valueconv ( get ( path + item, version, section ) ) return result",False,is_dict.match(item),"isinstance(item, dict)",0.6471608877182007 1981,"def _collects_refs_to_upload ( self, package_id, reference_or_pattern, confirm ) : """"""validate inputs and compute the refs (without revisions) to be uploaded"""""" if package_id and not check_valid_ref ( reference_or_pattern, strict_mode = False ) : raise ConanException ( ""-p parameter only allowed with a valid recipe reference, "" ""not with a pattern"" ) if package_id or check_valid_ref ( reference_or_pattern ) : ref = ConanFileReference. loads ( reference_or_pattern ) if ref. revision and not self. _cache. config. revisions_enabled : raise ConanException ( ""Revisions not enabled in the client, specify a "" ""reference without revision"" ) refs = [ ref, ] confirm = True else : refs = search_recipes ( self. _cache, reference_or_pattern ) if : raise NotFoundException ( ( ""No packages found matching pattern '%s'"" % reference_or_pattern ) ) return refs, confirm",True,not refs,not refs,0.6721539497375488 1982,"def unlink_asset_reference ( self ) : for d in self. get ( ""accounts"" ) : if : asset = frappe. get_doc ( ""Asset"", d. reference_name ) for s in asset. get ( ""schedules"" ) : if s. journal_entry == self. name : s. db_set ( ""journal_entry"", None ) idx = cint ( s. finance_book_id ) or 1 finance_books = asset. get ( ""finance_books"" ) [ idx - 1 ] finance_books. value_after_depreciation += s. depreciation_amount finance_books. db_update ( ) asset. set_status ( )",False,d.reference_type == 'Asset' and d.reference_name,d.reference_name,0.6489284634590149 1983,"def read_oclc ( rec ) : found = [ ] tag_001 = rec. get_fields ( ""001"" ) tag_003 = rec. get_fields ( ""003"" ) if tag_001 and tag_003 and re_ocolc. match ( tag_003 [ 0 ] ) : oclc = tag_001 [ 0 ] m = re_ocn_or_ocm. match ( oclc ) if m : oclc = m. group ( 1 ) if oclc. isdigit ( ) : found. append ( oclc ) for f in rec. get_fields ( ""035"" ) : for k, v in f. get_subfields ( [ ""a"" ] ) : m = re_oclc. match ( v ) if not m : m = re_ocn_or_ocm. match ( v ) if m and not m. group ( 1 ). isdigit ( ) : m = None if m : oclc = m. group ( 1 ) if : found. append ( oclc ) return remove_duplicates ( found )",False,oclc not in found,oclc,0.6690476536750793 1984,"def substitute ( substitutions, fname ) : import re var_re = re. compile ( r""\$\{([A-Za-z_0-9]+)\}"" ) string_var_re = re. compile ( r""\$str\{([A-Za-z_0-9]+)\}"" ) fname_in = fname + "".in"" lines = open ( fname_in, ""r"" ). readlines ( ) new_lines = [ ] for line in lines : made_change = True while made_change : made_change = False match = var_re. search ( line ) if : varname = match. group ( 1 ) line = ( line [ : match. start ( ) ] + str ( substitutions [ varname ] ) + line [ match. end ( ) : ] ) made_change = True match = string_var_re. search ( line ) if : varname = match. group ( 1 ) subst = substitutions [ varname ] if subst is None : subst = """" else : subst = '""%s""' % subst line = line [ : match. start ( ) ] + subst + line [ match. end ( ) : ] <",True,match,match,0.6830191612243652 1985,"def translation ( domain, localedir = None, languages = None, class_ = None, fallback = False, codeset = None ) : if class_ is None : class_ = GNUTranslations mofiles = find ( domain, localedir, languages, all = True ) if not mofiles : if fallback : return NullTranslations ( ) raise OSError ( ENOENT, ""No translation file found for domain"", domain ) result = None for mofile in mofiles : key = ( class_, os. path. abspath ( mofile ) ) t = _translations. get ( key ) if t is None : with open ( mofile, ""rb"" ) as fp : t = _translations. setdefault ( key, class_ ( fp ) ) t = copy. copy ( t ) if : t. set_output_charset ( codeset ) if result is None : result = t else : result. add_fallback ( t ) return result",False,codeset,codeset is not None,0.6781377792358398 1986,def is_all_qud ( world ) : m = True for obj in world : if obj. blond : if : m = m and True else : m = m and False else : m = m and True return m,False,obj.nice,m,0.6619960069656372 1987,"def sendCommand ( self, cmd, cmd_type = None, part_of_job = False, processed = False, force = False, on_sent = None, tags = None, ) : if not isinstance ( cmd, QueueMarker ) : cmd = to_unicode ( cmd, errors = ""replace"" ) if : cmd = process_gcode_line ( cmd ) if not cmd : return False gcode = gcode_command_for_cmd ( cmd ) force = force or gcode in self. _emergency_commands if tags is None : tags = set ( ) if part_of_job : self. _job_queue. put ( ( cmd, cmd_type, on_sent, tags | { ""source:job"" } ) ) return True elif ( self. isPrinting ( ) and not self. isSdFileSelected ( ) and not self. job_on_hold and not force ) : try : self. _command_queue. put ( ( cmd, cmd_type, on_sent, tags ), item_type = cmd_type ) return True except TypeAlreadyInQueue as e : self. _logger. debug ( ""Type already in command queue: "" + e. type ) return False elif self. isOperational ( ) or force : return self. _sendCommand ( cmd, cmd_type = cmd_type, on_sent = on_sent, tags = tags )",False,not processed,processed,0.6793094873428345 1988,"def initial_form_count ( self ) : """"""Returns the number of forms that are required in this FormSet."""""" if self. is_bound : return self. management_form. cleaned_data [ INITIAL_FORM_COUNT ] else : initial_forms = self. initial and len ( self. initial ) or 0 if : initial_forms = self. max_num return initial_forms",False,initial_forms > self.max_num >= 0,self.max_num is not None,0.654896080493927 1989,"def _setVolume ( self, value, setClient = True ) : if value is None : self. _volume = None elif hasattr ( value, ""getDynamicContext"" ) : if setClient : if : value = copy. deepcopy ( value ) value. client = self self. _volume = value elif common. isNum ( value ) and setClient : vol = self. _getVolume ( ) if value < 1 : vol. velocityScalar = value else : vol. velocity = value else : raise Exception ( f""this must be a Volume object, not {value}"" )",False,value.client is not None,common.isNum(value),0.6574002504348755 1990,"def get_url ( token, base_url ) : """"""Parse an token."""""" if token. type == ""url"" : return _get_url_tuple ( token. value, base_url ) elif token. type == ""function"" : if token. name == ""attr"" : return check_attr_function ( token, ""url"" ) elif : return _get_url_tuple ( token. arguments [ 0 ]. value, base_url )",False,"token.name == 'url' and len(token.arguments) in (1, 2)",token.name == 'arg',0.6497083902359009 1991,"def initialize ( ) : global args, term_mode if ""READTHEDOCS"" in os. environ : os. environ [ ""PWNLIB_NOTERM"" ] = ""1"" for k, v in os. environ. items ( ) : if not k. startswith ( env_prefix ) : continue k = k [ len ( env_prefix ) : ] if k in hooks : hooks [ k ] ( v ) elif isident ( k ) : args [ k ] = v argv = sys. argv [ : ] for arg in sys. argv [ : ] : orig = arg value = ""True"" if ""="" in arg : arg, value = arg. split ( ""="", 1 ) if : sys. argv. remove ( orig ) hooks [ arg ] ( value ) elif free_form and isident ( arg ) : sys. argv. remove ( orig ) args [ arg ] = value if term_mode : term. init ( )",False,arg in hooks,free_form and isident(arg),0.6889987587928772 1992,"def test_training_script_with_max_history_set ( tmpdir ) : train_dialogue_model ( DEFAULT_DOMAIN_PATH, DEFAULT_STORIES_FILE, tmpdir. strpath, interpreter = RegexInterpreter ( ), policy_config = ""data/test_config/max_hist_config.yml"", kwargs = { }, ) agent = Agent. load ( tmpdir. strpath ) for policy in agent. policy_ensemble. policies : if hasattr ( policy. featurizer, ""max_history"" ) : if : assert policy. featurizer. max_history == 2 else : assert policy. featurizer. max_history == 5",False,type(policy) == FormPolicy,policy.featurizer.max_history == 1 5,0.6545754075050354 1993,"def test_evname_in_mp_events_testcases ( ) : ok = True for evname in ins. mp_events : if evname == ""version"" : continue for i, args in enumerate ( ins. mp_events [ evname ] [ ""test_cases"" ] ) : if : msg = ""Error, for evname %s the testase #%d does not match evname"" print ( msg % ( evname, i ) ) ok = False if ok : print ( ""test_evname_in_mp_events_testcases: passed"" )",False,evname != args[0],not args[0],0.6629520654678345 1994,"def split_curve ( curve, splits, rescale = False ) : if hasattr ( curve, ""split_at"" ) : result = [ ] for split in splits : head, tail = curve. split_at ( split ) if rescale : head = reparametrize_curve ( head, 0, 1 ) result. append ( head ) curve = tail if rescale : tail = reparametrize_curve ( tail, 0, 1 ) result. append ( tail ) return result else : t_min, t_max = curve. get_u_bounds ( ) if : splits. insert ( 0, t_min ) if splits [ - 1 ]!= t_max : splits. append ( t_max ) pairs = zip ( splits, splits [ 1 : ] ) result = [ ] for start, end in pairs : segment = SvCurveSegment ( curve, start, end, rescale ) result. append ( segment ) return result",False,splits[0] != t_min,t_min != t_max,0.663965106010437 1995,"def _ensure_header_written ( self, datasize ) : if not self. _headerwritten : if not self. _nchannels : raise Error ( ""# channels not specified"" ) if not self. _sampwidth : raise Error ( ""sample width not specified"" ) if : raise Error ( ""sampling rate not specified"" ) self. _write_header ( datasize )",False,not self._framerate,not self._samprate,0.6710440516471863 1996,"def backend_supported ( module, manager, ** kwargs ) : if CollectionNodeModule. backend_supported ( module, manager, ** kwargs ) : if : return True conn = manager. connection ( did = kwargs [ ""did"" ] ) template_path = ""partitions/sql/{0}/#{0}#{1}#"". format ( manager. server_type, manager. version ) SQL = render_template ( ""/"". join ( [ template_path, ""backend_support.sql"" ] ), tid = kwargs [ ""tid"" ] ) status, res = conn. execute_scalar ( SQL ) if not status : return internal_server_error ( errormsg = res ) return res",False,'tid' not in kwargs,not kwargs,0.6611295938491821 1997,"def getTTGlyphList ( font ) : if isinstance ( font, str ) : font = ttLib. TTFont ( font ) if not ""cmap"" in font : raise Exception ( ""missing cmap table"" ) gl = { } bestCodeSubTable = None bestCodeSubTableFormat = 0 for st in font [ ""cmap"" ]. tables : if : if st. format > bestCodeSubTableFormat : bestCodeSubTable = st bestCodeSubTableFormat = st. format if bestCodeSubTable is not None : for cp, glyphname in bestCodeSubTable. cmap. items ( ) : if glyphname in gl : gl [ glyphname ]. append ( cp ) else : gl [ glyphname ] = [ cp ] return gl, font",False,st.platformID == 0,st.format > bestCodeSubTableFormat,0.6589378118515015 1998,"def get_type_and_shape ( cls, bitmap ) : w = _FI. FreeImage_GetWidth ( bitmap ) h = _FI. FreeImage_GetHeight ( bitmap ) fi_type = _FI. FreeImage_GetImageType ( bitmap ) if not fi_type : raise ValueError ( ""mahotas.freeimage: unknown image pixel type"" ) dtype = cls. dtypes [ fi_type ] if fi_type == cls. FIT_BITMAP : bpp = _FI. FreeImage_GetBPP ( bitmap ) if bpp == 1 : return ""bit"", None elif bpp == 8 : extra_dims = [ ] elif bpp == 16 : extra_dims = [ ] dtype = np. uint16 elif bpp == 24 : extra_dims = [ 3 ] elif : extra_dims = [ 4 ] else : raise ValueError ( ""mahotas.freeimage: cannot convert %d BPP bitmap"" % bpp ) else : extra_dims = cls. extra_dims [ fi_type ] return np. dtype ( dtype ), extra_dims + [ w, h ]",True,bpp == 32,bpp == 32,0.6772236824035645 1999,"def Decorator ( * args, ** kwargs ) : delay = 0.2 num_attempts = 15 cur_attempt = 0 while True : try : return f ( * args, ** kwargs ) except exceptions. WebDriverException as e : logging. warning ( ""Selenium raised %s"", utils. SmartUnicode ( e ) ) cur_attempt += 1 if : raise time. sleep ( delay )",False,cur_attempt == num_attempts,cur_attempt > num_attempts,0.6543393135070801 2000,"def cleanLinks ( self, links ) : returnLinks = dict ( ) for link in links : linkBase = self. sf. urlBaseUrl ( link ) linkFQDN = self. sf. urlFQDN ( link ) if : continue if self. opts [ ""nosubs"" ] and not self. getTarget ( ). matches ( linkFQDN, includeChildren = False ) : continue if not self. getTarget ( ). matches ( linkFQDN, includeParents = False ) : continue if self. opts [ ""filterusers"" ] and ""/~"" in link : continue if linkBase in self. robotsRules and self. opts [ ""robotsonly"" ] : if list ( filter ( lambda blocked : type ( blocked ). lower ( blocked ) in link. lower ( ) or blocked == ""*"", self. robotsRules [ linkBase ], ) ) : continue self. sf. debug ( ""Adding URL for spidering: "" + link ) returnLinks [ link ] = links [ link ] return returnLinks",False,not self.getTarget().matches(linkFQDN),linkBase in self.opts,0.6576526165008545 2001,"def subscriptions_cancel ( s_id ) : os. system ( ""sudo chown admin:admin {0}"". format ( SUBSCRIPTIONS_FILE ) ) subs = toml. load ( SUBSCRIPTIONS_FILE ) new_list = [ ] removed_cert = None for idx, sub in enumerate ( subs [ ""subscriptions_letsencrypt"" ] ) : if sub [ ""id"" ]!= s_id : new_list. append ( sub ) else : removed_cert = sub subs [ ""subscriptions_letsencrypt"" ] = new_list if removed_cert : acme_result = subprocess. Popen ( [ ""/home/admin/config.scripts/bonus.letsencrypt.sh"", ""remove-cert"", removed_cert [ ""id"" ], removed_cert [ ""target"" ], ], stdout = subprocess. PIPE, stderr = subprocess. STDOUT, encoding = ""utf8"", ) out, err = acme_result. communicate ( ) if : time. sleep ( 6 ) raise BlitzError ( ""letsencrypt acme failed"", out ) with open ( SUBSCRIPTIONS_FILE, ""w"" ) as writer : writer. write ( toml. dumps ( subs ) ) writer. close ( ) print ( json. dumps ( subs, indent = 2 ) )",False,out.find('error=') > -1,err,0.6472088098526001 2002,"def nq ( t ) : p = t [ 0 ] if ( t and t [ 0 ] in ""-+"" ) else """" t = t [ len ( p ) : ] if t. startswith ( ""tag:"" ) or t. startswith ( ""in:"" ) : try : raw_tag = session. config. get_tag ( t. split ( "":"" ) [ 1 ] ) if : t = ""in:%s"" % raw_tag. slug except ( IndexError, KeyError, TypeError ) : pass return p + t",False,raw_tag and raw_tag.hasattr(slug),raw_tag,0.6510717272758484 2003,"def nodes_action ( self, action, node_id, username, ** kwargs ) : if not self. has_node ( node_id ) : message = ""node[node_id={node_id}] not found in task[task_id={task_id}]"". format ( node_id = node_id, task_id = self. id ) return { ""result"" : False, ""message"" : message } if action not in NODE_ACTIONS : return { ""result"" : False, ""message"" : ""task action is invalid"" } try : if action == ""callback"" : action_result = NODE_ACTIONS [ action ] ( node_id, kwargs [ ""data"" ] ) elif : action_result = NODE_ACTIONS [ action ] ( node_id, kwargs [ ""flow_id"" ] ) elif action == ""retry"" : action_result = NODE_ACTIONS [ action ] ( node_id, kwargs [ ""inputs"" ] ) elif action == ""forced_fail"" : action_result = NODE_ACTIONS [ action ] ( node_id, ex_data = ""forced fail by {}"". format ( username ) ) else : action_result = NODE_ACTIONS [ action ] ( node_id ) except Exception as e : message = ""task[id=%s] node[id=%s] action failed:%s"" % ( self. id, node_id, e ) logger. exception ( traceback. format_exc ( ) ) return { ""result"" : False, ""message"" : message } if action_result. result : return { ""result""",False,action == 'skip_exg',action == 'flow',0.6521509885787964 2004,"def execute ( self, quals, columns ) : gc. collect ( ) result = [ ] for obj in gc. get_objects ( ) : tobj = type ( obj ) if isinstance ( obj, bytes ) : obj = obj. decode ( ""utf8"" ) elif : pass else : try : obj = bytes ( obj ). decode ( ""utf8"" ) except ( UnicodeEncodeError, UnicodeDecodeError ) : try : obj = unicode_ ( obj ) except ( UnicodeEncodeError, UnicodeDecodeError ) : obj = unicode_ ( """" ) result. append ( { ""object"" : obj, ""type"" : unicode_ ( tobj ), ""id"" : unicode_ ( id ( obj ) ), ""refcount"" : unicode_ ( sys. getrefcount ( obj ) ), } ) return result",False,"isinstance(obj, unicode_)","isinstance(obj, six.string_types)",0.6486866474151611 2005,"def events_eventId ( dbsession, request_inputs, eventId ) : user_auth = request_inputs [ ""auth"" ] method = request_inputs [ ""method"" ] params = request_inputs [ ""params"" ] userId = request_inputs [ ""userId"" ] return_object = { } httpcode = 500 try : if : ret = db_events. get_byevent_id ( userId = userId, eventId = eventId, session = dbsession ) if not ret : httpcode = 404 raise Exception ( ""Event not found"" ) else : return_object = ret httpcode = 200 elif method == ""DELETE"" : ret = db_events. delete_byevent_id ( userId = userId, eventId = eventId, session = dbsession ) if not ret : httpcode = 404 raise Exception ( ""Event not found"" ) else : return_object = True httpcode = 200 except Exception as err : return_object = anchore_engine. common. helpers. make_response_error ( err, in_httpcode = httpcode ) return return_object, httpcode",True,method == 'GET',method == 'GET',0.6685078740119934 2006,"def get_all_topic_src_files ( self ) : """"""Retrieves the file paths of all the topics in directory"""""" topic_full_paths = [ ] topic_names = os. listdir ( self. topic_dir ) for topic_name in topic_names : if not topic_name. startswith ( ""."" ) : topic_full_path = os. path. join ( self. topic_dir, topic_name ) if : topic_full_paths. append ( topic_full_path ) return topic_full_paths",False,topic_full_path != self.index_file,os.path.exists(topic_full_path),0.6495609283447266 2007,"def generate_primary_file ( self, dataset = None ) : rval = [ ""Spaln Database

    "" ] rval. append ( ""

    This composite dataset is composed of the following files:

      "" ) for composite_name, composite_file in self. get_composite_files ( dataset = dataset ). items ( ) : fn = composite_name opt_text = """" if : rval. append ( '
    • %s (%s)%s
    • ' % ( fn, fn, composite_file. get ( ""description"" ), opt_text ) ) else : rval. append ( '
    • %s%s
    • ' % ( fn, fn, opt_text ) ) rval. append ( ""
    "" ) return ""\n"". join ( rval )",False,composite_file.get('description'),"hasattr(composite_file, 'get')",0.653140127658844 2008,"def _load ( self, name ) : image_glob = os. path. join ( self. data_root, ""images"", name, ""*.jpg"" ) image_files = glob. glob ( image_glob ) gt_dir = os. path. join ( self. data_root, ""groundTruth"", name ) self. data = np. zeros ( ( len ( image_files ), IMG_H, IMG_W, 3 ), dtype = ""uint8"" ) self. label = np. zeros ( ( len ( image_files ), IMG_H, IMG_W ), dtype = ""float32"" ) for idx, f in enumerate ( image_files ) : im = cv2. imread ( f, cv2. IMREAD_COLOR ) assert im is not None if im. shape [ 0 ] > im. shape [ 1 ] : im = np. transpose ( im, ( 1, 0, 2 ) ) assert im. shape [ : 2 ] == ( IMG_H, IMG_W ), ""{}!= {}"". format ( im. shape [ : 2 ], ( IMG_H, IMG_W ) ) imgid = os. path. basename ( f ). split ( ""."" ) [ 0 ] gt_file = os. path. join ( gt_dir, imgid ) gt = loadmat ( gt_file ) [ ""groundTruth"" ] [ 0 ] n_annot = gt. shape [ 0 ] gt = sum ( gt [ k ] [ ""Boundaries"" ] [ 0 ] [ 0 ] for k in range ( n_annot ) ) gt = gt. astype ( ""float32"" ) gt *= 1.0 / n_annot if : gt = gt. transpose ( ) assert gt. shape == ( IMG_H,",False,gt.shape[0] > gt.shape[1],gt.shape[0] > 0,0.6514050960540771 2009,"def get_connection ( self ) : if self. config. proxy_host!= """" : return httplib. HTTPConnection ( self. config. proxy_host, self. config. proxy_port ) else : if : return httplib. HTTPSConnection ( self. config. simpledb_host ) else : return httplib. HTTPConnection ( self. config. simpledb_host )",False,self.config.use_https,self.config.simpledb_host != '',0.6504498720169067 2010,"def __init__ ( self, fileobj ) : """"""Raises MonkeysAudioHeaderError"""""" header = fileobj. read ( 76 ) if len ( header )!= 76 or not header. startswith ( b""MAC "" ) : raise MonkeysAudioHeaderError ( ""not a Monkey's Audio file"" ) self. version = cdata. ushort_le ( header [ 4 : 6 ] ) if self. version >= 3980 : ( blocks_per_frame, final_frame_blocks, total_frames, self. bits_per_sample, self. channels, self. sample_rate, ) = struct. unpack ( "" else : compression_level = cdata. ushort_le ( header [ 6 : 8 ] ) self. channels, self. sample_rate = struct. unpack ( "" total_frames, final_frame_blocks = struct. unpack ( "" if : blocks_per_frame = 73728 * 4 elif self. version >= 3900 or ( self. version >= 3800 and compression_level == 4 ) : blocks_per_frame = 73728 else : blocks_per_frame = 9216 self. bits_per_sample = 0 if header [ 48 : ]. startswith ( b""WAVEfmt"" ) : self. bits_per_sample = struct. unpack ( "" self. version /= 1000.0 self",False,self.version >= 3950,self.version >= 900,0.6752477884292603 2011,"def extract_line_count ( filename, target_dir ) : example_file = os. path. join ( target_dir, filename ) if six. PY2 : lines = open ( example_file ). readlines ( ) else : lines = open ( example_file, encoding = ""utf-8"" ). readlines ( ) start_row = 0 if lines and lines [ 0 ]. startswith ( ""#!"" ) : lines. pop ( 0 ) start_row = 1 line_iterator = iter ( lines ) tokens = tokenize. generate_tokens ( lambda : next ( line_iterator ) ) check_docstring = True erow_docstring = 0 for tok_type, _, _, ( erow, _ ), _ in tokens : tok_type = token. tok_name [ tok_type ] if tok_type in ( ""NEWLINE"", ""COMMENT"", ""NL"", ""INDENT"", ""DEDENT"" ) : continue elif : erow_docstring = erow check_docstring = False return erow_docstring + 1 + start_row, erow + 1 + start_row",False,tok_type == 'STRING' and check_docstring,check_docstring,0.6520117521286011 2012,"def init_errorhandler ( ) : for ex in default_exceptions : if : app. register_error_handler ( ex, error_http ) elif ex == 500 : app. register_error_handler ( ex, internal_error ) if services. ldap : @ app. errorhandler ( services. ldap. LDAPException ) def handle_exception ( e ) : log. debug ( ""LDAP server not accessible while trying to login to opds feed"" ) return error_http ( FailedDependency ( ) )",False,ex < 500,ex == 500,0.7078532576560974 2013,"def _process_checkpoint_store_for_checklist ( self ) : config_commented_map : CommentedMap = self. data_context. get_config ( ). commented_map checkpoint_store_name : Optional [ str ] = config_commented_map. get ( ""checkpoint_store_name"" ) stores : dict = config_commented_map [ ""stores"" ] if checkpoint_store_name : if stores. get ( checkpoint_store_name ) : self. upgrade_log [ ""skipped_upgrade"" ] = True else : self. upgrade_checklist [ ""stores"" ] = { checkpoint_store_name : DataContextConfigDefaults. DEFAULT_STORES. value [ DataContextConfigDefaults. DEFAULT_CHECKPOINT_STORE_NAME. value ] } else : checkpoint_store_name = ( DataContextConfigDefaults. DEFAULT_CHECKPOINT_STORE_NAME. value ) self. upgrade_checklist [ ""checkpoint_store_name"" ] = checkpoint_store_name if : self. upgrade_checklist [ ""stores"" ] = { checkpoint_store_name : DataContextConfigDefaults. DEFAULT_STORES. value [ checkpoint_store_name ] }",False,not stores.get(checkpoint_store_name),stores.get(checkpoint_store_name),0.6544827222824097 2014,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 8 : self. set_value ( d. getVarInt32 ( ) ) continue if : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 0,tt > 255,0.6846723556518555 2015,"def update_actions ( self, items, changed ) : if len ( items ) > 0 : can_create = isinstance ( items [ 0 ], model. Folder ) and len ( items ) == 1 can_copy = True for item in items : if : can_copy = False break self. action_new_top_folder. setEnabled ( True ) self. action_new_sub_folder. setEnabled ( can_create ) self. action_new_phrase. setEnabled ( can_create ) self. action_new_script. setEnabled ( can_create ) self. action_copy_item. setEnabled ( can_copy ) self. action_clone_item. setEnabled ( can_copy ) self. action_paste_item. setEnabled ( can_create and len ( self. central_widget. cutCopiedItems ) > 0 ) self. action_record_script. setEnabled ( isinstance ( items [ 0 ], model. Script ) and len ( items ) == 1 ) self. action_run_script. setEnabled ( isinstance ( items [ 0 ], model. Script ) and len ( items ) == 1 ) self. menu_insert_macros. setEnabled ( isinstance ( items [ 0 ], model. Phrase ) and len ( items ) == 1 ) if changed : self. action_save. setEnabled ( False ) self. action_undo. setEnabled ( False ) self. action_redo. setEnabled ( False )",True,"isinstance(item, model.Folder)","isinstance(item, model.Folder)",0.6504238247871399 2016,"def __str__ ( self ) : wires_print = lambda ob : ""'"". join ( map ( str, ob. wires. tolist ( ) ) ) terms_ls = [ ] for i, obs in enumerate ( self. ops ) : if : obs_strs = [ f""{OBS_MAP.get(ob.name, ob.name)}{wires_print(ob)}"" for ob in obs. obs ] ob_str = "" "". join ( obs_strs ) elif isinstance ( obs, Observable ) : ob_str = f""{OBS_MAP.get(obs.name, obs.name)}{wires_print(obs)}"" term_str = f""({self.coeffs[i]}) [{ob_str}]"" terms_ls. append ( term_str ) return ""\n+ "". join ( terms_ls )",False,"isinstance(obs, Tensor)","isinstance(obs, Observable)",0.6577979326248169 2017,"def _moments ( self, inputs : tf. Tensor, use_batch_stats : types. BoolLike ) -> Tuple [ tf. Tensor, tf. Tensor ] : if use_batch_stats : if : mean = self. _fused_constant variance = self. _fused_constant else : mean, variance = tf. nn. moments ( inputs, self. _axis, keepdims = True ) else : mean = self. moving_mean. value variance = self. moving_variance. value if : mean = tf. squeeze ( mean, self. _axis ) variance = tf. squeeze ( variance, self. _axis ) return mean, variance",False,self._fused,self.moving_mean is None,0.6724948287010193 2018,"def show_message ( self, message, title = None, important = False, widget = None ) : if important : dlg = Gtk. MessageDialog ( self. main_window, Gtk. DialogFlags. MODAL, Gtk. MessageType. INFO, Gtk. ButtonsType. OK, ) if : dlg. set_title ( str ( title ) ) dlg. set_markup ( '%s\n\n%s' % ( title, message ) ) else : dlg. set_markup ( '%s' % ( message ) ) dlg. run ( ) dlg. destroy ( ) else : gpodder. user_extensions. on_notification_show ( title, message )",True,title,title,0.707625150680542 2019,"def devices ( self ) : """"""Wait for new DS4 devices to appear."""""" context = Context ( ) existing_devices = context. list_devices ( subsystem = ""hidraw"" ) future_devices = self. _get_future_devices ( context ) for hidraw_device in itertools. chain ( existing_devices, future_devices ) : hid_device = hidraw_device. parent if hid_device. subsystem!= ""hid"" : continue cls = HID_DEVICES. get ( hid_device. get ( ""HID_NAME"" ) ) if not cls : continue for child in hid_device. parent. children : event_device = child. get ( ""DEVNAME"", """" ) if event_device. startswith ( ""/dev/input/event"" ) : break else : continue try : device_addr = hid_device. get ( ""HID_UNIQ"", """" ). upper ( ) if : device_name = ""{0} {1}"". format ( device_addr, hidraw_device. sys_name ) else : device_name = hidraw_device. sys_name yield cls ( name = device_name, addr = device_addr, type = cls. __type__, hidraw_device = hidraw_device. device_node,",False,device_addr,device_addr and hidraw_device.sys_name,0.6695295572280884 2020,"def iterfieldselect ( source, field, where, complement, missing ) : it = iter ( source ) hdr = next ( it ) yield tuple ( hdr ) indices = asindices ( hdr, field ) getv = operator. itemgetter ( * indices ) for row in it : try : v = getv ( row ) except IndexError : v = missing if : yield tuple ( row )",False,bool(where(v)) != complement,v != None and complement and (v != missing),0.6525516510009766 2021,"def test_connect ( ipaddr, port, device, partition, method, path, headers = None, query_string = None ) : if path == ""/a"" : for k, v in headers. iteritems ( ) : if : break else : test_errors. append ( ""%s: %s not in %s"" % ( test_header, test_value, headers ) )",False,k.lower() == test_header.lower() and v == test_value,test_header in v,0.6469519138336182 2022,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. I64 : self. txn_high_water_mark = iprot. readI64 ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. SET : self. open_txns = set ( ) ( _etype416, _size413 ) = iprot. readSetBegin ( ) for _i417 in xrange ( _size413 ) : _elem418 = iprot. readI64 ( ) self. open_txns. add ( _elem418 ) iprot. readSetEnd ( ) else : iprot. skip ( ftype ) elif : ",True,fid == 3,fid == 3,0.6739218831062317 2023,"def _get_dbutils ( ) : try : import IPython ip_shell = IPython. get_ipython ( ) if : raise _NoDbutilsError return ip_shell. ns_table [ ""user_global"" ] [ ""dbutils"" ] except ImportError : raise _NoDbutilsError except KeyError : raise _NoDbutilsError",False,ip_shell is None,'user_global' not in ip_shell.ns_table,0.6661030054092407 2024,"def split_artists ( self, json ) : if len ( json ) == 0 : ( [ ], [ ] ) elif len ( json ) == 1 : artist = Artist. query. filter_by ( name = json [ 0 ] [ ""name"" ] ). first ( ) return ( [ artist ], [ ] ) my_artists = [ ] other_artists = [ ] for artist_dict in json : artist = Artist. query. filter_by ( name = artist_dict [ ""name"" ] ) if : my_artists. append ( artist. first ( ) ) else : del artist_dict [ ""thumb_url"" ] other_artists. append ( artist_dict ) return ( my_artists, other_artists )",False,artist.count(),artist.first(),0.6635749340057373 2025,"def _get_image ( self, image_list, source ) : if source. startswith ( ""wx"" ) : img = wx. ArtProvider_GetBitmap ( source, wx. ART_OTHER, _SIZE ) else : path = os. path. join ( _BASE, source ) if : img = wx. Image ( path, wx. BITMAP_TYPE_GIF ). ConvertToBitmap ( ) else : img = wx. Image ( path, wx. BITMAP_TYPE_PNG ). ConvertToBitmap ( ) return image_list. Add ( img )",False,source.endswith('gif'),os.path.isdir(path),0.6591418981552124 2026,"def _load_windows_store_certs ( self, storename, purpose ) : certs = bytearray ( ) try : for cert, encoding, trust in enum_certificates ( storename ) : if : if trust is True or purpose. oid in trust : certs. extend ( cert ) except OSError : warnings. warn ( ""unable to enumerate Windows certificate store"" ) if certs : self. load_verify_locations ( cadata = certs ) return certs",False,encoding == 'x509_asn',encoding.oid == 'windows',0.6530070304870605 2027,"def process ( self ) : objects = self. inputs [ ""Object"" ]. sv_get ( ) if not objects : return include_vertex = self. outputs [ ""Vertex Mask"" ]. is_linked include_edges = self. outputs [ ""Edge Mask"" ]. is_linked include_faces = self. outputs [ ""Face Mask"" ]. is_linked face_mask, edge_mask, vertex_mask = [ ], [ ], [ ] get_selected_from = lambda data : [ i. select for i in data ] for obj in objects : mesh = obj. data if obj == bpy. context. edit_object : bm = bmesh. from_edit_mesh ( mesh ) face_data = bm. faces edge_data = bm. edges vertex_data = bm. verts else : face_data = mesh. polygons edge_data = mesh. edges vertex_data = mesh. vertices if include_faces : face_mask. append ( get_selected_from ( face_data ) ) if : edge_mask. append ( get_selected_from ( edge_data ) ) if include_vertex : vertex_mask. append ( get_selected_from ( vertex_data ) ) self. outputs [ ""Vertex Mask"" ]. sv_set ( vertex_mask ) self. outputs [ ""Edge Mask"" ]. sv_set ( edge_mask ) self. outputs [ ""Face Mask"" ]. sv_set ( face_mask )",True,include_edges,include_edges,0.6661674380302429 2028,"def PyMemoryView_GetContiguous ( obj, buffertype, order_int ) : PyBUF_READ = 0x100 PyBUF_WRITE = 0x200 assert buffertype == PyBUF_READ or buffertype == PyBUF_WRITE order = chr ( order_int ) assert order == ""C"" or order == ""F"" or order == ""A"" mv = memoryview ( obj ) release = True try : if buffertype == PyBUF_WRITE and mv. readonly : raise BufferError ( ""underlying buffer is not writable"" ) if mv. contiguous : release = False return mv if buffertype == PyBUF_WRITE : raise BufferError ( ""writable contiguous buffer requested for a non-contiguous object."" ) mv_bytes = memoryview ( mv. tobytes ( order ) ) if : return mv_bytes else : try : return mv_bytes. cast ( mv. format ) finally : mv_bytes. release ( ) finally : if release : mv. release ( )",False,mv.format == 'B',mv.format is None,0.656246542930603 2029,"def monitor_filter ( self ) : """"""Return filtered running container objects list"""""" running_containers = self. running_filter ( ) monitored_containers = [ ] for container in running_containers : ouro_label = container. labels. get ( ""com.ouroboros.enable"", False ) if self. config. label_enable and ouro_label : if : monitored_containers. append ( container ) else : continue elif not self. config. labels_only : if self. config. monitor : if ( container. name in self. config. monitor and container. name not in self. config. ignore ) : monitored_containers. append ( container ) elif container. name not in self. config. ignore : monitored_containers. append ( container ) self. data_manager. monitored_containers [ self. socket ] = len ( monitored_containers ) self. data_manager. set ( self. socket ) return monitored_containers",False,"ouro_label.lower() in ['true', 'yes']",self.config.test_mode,0.6508021950721741 2030,"def get_source ( self, fullname = None ) : fullname = self. _fix_name ( fullname ) if self. source is None : mod_type = self. etc [ 2 ] if mod_type == imp. PY_SOURCE : self. _reopen ( ) try : self. source = self. file. read ( ) finally : self. file. close ( ) elif : if os. path. exists ( self. filename [ : - 1 ] ) : f = open ( self. filename [ : - 1 ], ""rU"" ) self. source = f. read ( ) f. close ( ) elif mod_type == imp. PKG_DIRECTORY : self. source = self. _get_delegate ( ). get_source ( ) return self. source",False,mod_type == imp.PY_COMPILED,mod_type == imp.FILE_FILE,0.6570153832435608 2031,"def scan_address ( ip_address, ** kwargs ) : snmp_name = kwargs. get ( ""snmp_name"", """" ) or """" http_family = kwargs. get ( ""http_family"", """" ) or """" if ""nx-os"" in snmp_name. lower ( ) : raise NoMatchError ( ""Incompatible Nexus found."" ) if ""xen"" not in snmp_name and ""xen"" not in http_family. lower ( ) : raise NoMatchError ( ""XEN not found."" ) auths = SETTINGS. get ( ""xen_auths"" ) messages = [ ] result = get_base_result_template ( ""ssh_xen"", messages ) if not auths : result [ ""status"" ] = ""error"" messages. append ( ""Not configured. Set XEN_AUTHS in your configuration file."", ) else : for user, password in auths : if user is None or password is None : continue try : ssh = _connect_ssh ( ip_address, user, password ) except AuthError : ssh = None continue else : break if : result [ ""status"" ] = ""error"" messages. append ( ""Authorization failed."" ) else : try : device_info = _ssh_xen ( ssh, ip_address ) except ( Error,",False,not ssh,ssh is None,0.6746110320091248 2032,"def get_location ( device ) : location = [ ] node = device while node : position = node. get_position ( ) or """" if : position = "" [%s]"" % position location. append ( node. name + position ) node = node. parent return "" / "". join ( reversed ( location ) )",True,position,position,0.6921048760414124 2033,"def _analyze ( self ) : region = self. _region. recursive_copy ( ) parent_map = { } stack = [ region ] while stack : current_region = stack [ - 1 ] has_region = False for node in networkx. dfs_postorder_nodes ( current_region. graph, current_region. head ) : subnodes = [ ] if type ( node ) is GraphRegion : if : subnodes. append ( node ) else : subnodes. insert ( 0, node ) parent_map [ node ] = current_region has_region = True stack. extend ( subnodes ) if not has_region : stack. pop ( ) parent_region = parent_map. get ( current_region, None ) st = self. project. analyses. Structurer ( current_region, parent_map = parent_map, condition_processor = self. cond_proc, ) if not parent_region : self. result = st. result if not self. fails : return for fail in self. fails : if : break else : raise AssertionError ( ""Unexpected success for '%s' (%s)"" % ( name, "" and "". join ( fail. _as_string ( config ) for fail in self. fails ) ) )",False,not fail(config),fail.fail_code == 0,0.6516794562339783 2035,"def test_mix_alloc_dealloc5_3_7 ( ) : allocs = mixed_alloc_dealloc_list ( [ 5, 3, 7 ], count = 50 ) capacity = sum ( [ a for a in allocs if a > 0 ] ) allocator = RegionAllocator ( capacity ) regions = [ ] for alloc in allocs : if : regions. append ( allocator. alloc ( alloc ) ) else : region = regions [ abs ( alloc ) ] allocator. dealloc ( region ) assert allocator. get_free_size ( ) == allocator. capacity",False,alloc > 0,count > 10,0.6734553575515747 2036,"def __save_ledger_entry_as_je ( self, ledger_entry, quickbooks_id ) : try : accounts = [ ] for line in ledger_entry [ ""lines"" ] : account_line = { ""account"" : line [ ""account"" ], ""cost_center"" : self. default_cost_center, } if : account_line [ ""credit_in_account_currency"" ] = line [ ""credit"" ] else : account_line [ ""debit_in_account_currency"" ] = line [ ""debit"" ] accounts. append ( account_line ) posting_date = ledger_entry [ ""date"" ] self. __save_journal_entry ( quickbooks_id, accounts, posting_date ) except Exception as e : self. _log_error ( e, ledger_entry )",False,line['credit'],"isinstance(line, dict)",0.6667290925979614 2037,def stimulus ( ) : stopped. next = 0 yield delay ( 10 ) exp = intbv ( 0 ) [ W : ] val = intbv ( 0 ) [ W : ] random_word = intbv ( 0 ) [ 32 : ] random_word [ : ] = 93 for i in range ( 2 ** 18 ) : exp [ : ] = 0 for s in range ( L ) : random_word [ : ] = glibc_random ( random_word ) val [ : ] = random_word [ W : ] if : exp [ : ] = val a [ s ]. next = val yield clock. negedge assert z == exp stopped. next = 1 yield delay ( 10 ),False,exp < val,s == 0,0.6713956594467163 2038,"def __init__ ( self, coverage ) : self. coverage = coverage self. config = self. coverage. config self. source_paths = set ( ) if self. config. source : for src in self. config. source : if : if not self. config. relative_files : src = files. canonical_filename ( src ) self. source_paths. add ( src ) self. packages = { } self. xml_out = None",False,os.path.exists(src),src and src.endswith('.git'),0.6563808917999268 2039,"def _parse_tag_specifications ( self, spec ) : try : tag_spec_num = max ( [ int ( key. split ( ""."" ) [ 1 ] ) for key in spec if : ] ) except ValueError : return { } tag_specifications = { } for si in range ( 1, tag_spec_num + 1 ) : resource_type = spec [ ""tag_specification_set.{si}._resource_type"". format ( si = si ) ] tags = [ key for key in spec if key. startswith ( ""tag_specification_set.{si}._tag"". format ( si = si ) ) ] tag_num = max ( [ int ( key. split ( ""."" ) [ 3 ] ) for key in tags ] ) tag_specifications [ resource_type ] = dict ( ( spec [ ""tag_specification_set.{si}._tag.{ti}._key"". format ( si = si, ti = ti ) ], spec [ ""tag_specification_set.{si}._tag.{ti}._value"". format ( si = si, ti = ti ) ], ) for ti in range ( 1, tag_num + 1 ) ) return tag_specifications",False,key.startswith('tag_specification_set'),tag_spec_num == 0,0.6500779390335083 2040,"def _build_index ( self ) : self. _index = { } for start_char, sorted_offsets in self. _offsets. items ( ) : self. _index [ start_char ] = { } for i, offset in enumerate ( sorted_offsets. get_offsets ( ) ) : identifier = sorted_offsets. get_identifier_by_offset ( offset ) if : self. _index [ start_char ] [ identifier [ 0 : self. index_depth ] ] = i",False,identifier[0:self.index_depth] not in self._index[start_char],identifier[self.index_depth] > 0,0.6565222144126892 2041,"def custom_generator ( use_weights = False ) : batch_size = 10 n_samples = 50 while True : batch_index = np. random. randint ( 0, n_samples - batch_size ) start = batch_index end = start + batch_size X = arr_data [ start : end ] y = arr_labels [ start : end ] if : w = arr_weights [ start : end ] yield X, y, w else : yield X, y",True,use_weights,use_weights,0.6656997203826904 2042,"def _output_step ( step_input_def, step_output_def ) : multiple = False if step_input_def in [ ""dataset"", ""dataset_multiple"" ] : input_type = ""dataset"" collection_types = None if : multiple = True else : input_type = ""dataset_collection"" collection_types = ( step_input_def if isinstance ( step_input_def, list ) else [ step_input_def ] ) output = { ""name"" : ""output"", ""extensions"" : [ ""data"" ] } if step_output_def!= ""dataset"" : output [ ""collection"" ] = True output [ ""collection_type"" ] = step_output_def input_connection_input = [ { ""id"" : 0, ""output_name"" : ""output"", ""input_type"" : input_type } ] if step_input_def == ""dataset"" : input_connection_input = input_connection_input [ 0 ] return { ""id"" : 1, ""type"" : ""tool"", ""inputs"" : [ { ""name"" : ""input"", ""multiple"" : multiple, ""input_type"" : input_type, ""collection_types"" : collection_types, ""extensions"" : [ ""data"" ], } ], ""input_connections"" : { """,False,step_input_def == 'dataset_multiple',step_input_def == 'multiple',0.6507815718650818 2043,"def test_set_get_priority ( self ) : base = os. getpriority ( os. PRIO_PROCESS, os. getpid ( ) ) os. setpriority ( os. PRIO_PROCESS, os. getpid ( ), base + 1 ) try : new_prio = os. getpriority ( os. PRIO_PROCESS, os. getpid ( ) ) if : raise unittest. SkipTest ( ""unable to reliably test setpriority "" ""at current nice level of %s"" % base ) else : self. assertEqual ( new_prio, base + 1 ) finally : try : os. setpriority ( os. PRIO_PROCESS, os. getpid ( ), base ) except OSError as err : if err. errno!= errno. EACCES : raise",False,base >= 19 and new_prio <= 19,"new_prio == os.getpriority(os.PRIO_PROCESS, os.getpid(base))",0.6617233753204346 2044,"def convert_to_py2 ( ) : global PY2_CONVERTED if ""+"" not in version : return if source_dir == ""python2_source"" and not PY2_CONVERTED : try : subprocess. check_output ( [ ""3to2"", ""--help"" ] ) subprocess. check_output ( [ ""pasteurize"", ""--help"" ] ) except OSError as e : if : raise if not os. path. exists ( os. path. join ( source_dir, ""pylatex"" ) ) : raise ImportError ( ""3to2 and future need to be installed "" ""before installing when PyLaTeX for Python "" ""2.7 when it is not installed using one of "" ""the pip releases."" ) else : converter = ( os. path. dirname ( os. path. realpath ( __file__ ) ) + ""/convert_to_py2.sh"" ) subprocess. check_call ( [ converter ] ) PY2_CONVERTED = True",False,e.errno != errno.ENOENT,e.args[0] != 0,0.6606593132019043 2045,"def __handle_death_before_start ( self, args ) : if self. _exc_info is None and self. dead : if len ( args ) == 1 : arg = args [ 0 ] if : args = ( arg, arg ( ), None ) else : args = ( type ( arg ), arg, None ) elif not args : args = ( GreenletExit, GreenletExit ( ), None ) assert issubclass ( args [ 0 ], BaseException ) self. __report_error ( args )",False,"issubclass(arg, BaseException)",callable(arg),0.6623687744140625 2046,"def check_for_elastic_ip ( ec2_info ) : elastic_ips = [ ] for region in ec2_info [ ""regions"" ] : if ""elastic_ips"" in ec2_info [ ""regions"" ] [ region ] : for eip in ec2_info [ ""regions"" ] [ region ] [ ""elastic_ips"" ] : elastic_ips. append ( eip ) new_items = [ ] new_macro_items = [ ] for i, item in enumerate ( ec2_info [ ""violations"" ] [ ""non-elastic-ec2-public-ip-whitelisted"" ]. items ) : ip = netaddr. IPNetwork ( item ) found = False for eip in elastic_ips : eip = netaddr. IPNetwork ( eip ) if ip in eip : found = True break if : new_items. append ( ec2_info [ ""violations"" ] [ ""non-elastic-ec2-public-ip-whitelisted"" ]. items [ i ] ) new_macro_items. append ( ec2_info [ ""violations"" ] [ ""non-elastic-ec2-public-ip-whitelisted"" ]. macro_items [ i ] ) ec2_info [ ""violations"" ] [ ""non-elastic-ec2-public-ip-whitelisted"" ]",False,not found,found,0.6775128245353699 2047,"def testTokenProcessMetadata ( self ) : from music21. abcFormat import testFiles for ( tf, titleEncoded, meterEncoded, keyEncoded ) in [ ( testFiles. fyrareprisarn, ""Fyrareprisarn"", ""3/4"", ""F"" ), ( testFiles. mysteryReel, ""Mystery Reel"", ""C|"", ""G"" ), ( testFiles. aleIsDear, ""Ale is Dear, The"", ""4/4"", ""D"", ), ( testFiles. kitchGirl, ""Kitchen Girl"", ""4/4"", ""D"" ), ( testFiles. williamAndNancy, ""William and Nancy"", ""6/8"", ""G"" ), ] : handler = ABCHandler ( ) handler. tokenize ( tf ) handler. tokenProcess ( ) tokens = handler. tokens for t in tokens : if isinstance ( t, ABCMetadata ) : if t. tag == ""T"" : self. assertEqual ( t. data, titleEncoded ) elif t. tag == ""M"" : self. assertEqual ( t. data, meterEncoded ) elif : self. assertEqual ( t. data, keyEncoded )",True,t.tag == 'K',t.tag == 'K',0.658808708190918 2048,"def _to_group_name ( self, field ) : group = field. replace ( ""."", ""_"" ). replace ( ""["", ""_"" ). replace ( ""]"", ""_"" ) n = 1 while group in self. _group_to_name_map : n += 1 if : group = field. replace ( ""."", ""_"" * n ) elif ""_"" in field : group = field. replace ( ""_"", ""_"" * n ) else : raise KeyError ( ""duplicated group name %r"" % ( field, ) ) self. _group_to_name_map [ group ] = field self. _name_to_group_map [ field ] = group return group",False,'.' in field,'' in field,0.6703853011131287 2049,"def _build_for_eval ( self, ds ) : ds. name = ""eval"" program = F. Program ( ) startup_prog = F. Program ( ) with F. program_guard ( program, startup_prog ) : log. info ( ""Building Eval Graph"" ) fea = ds. features ( ) fea = unflatten ( fea, ds. data_schema ) model_spec = _build_net ( self. model_fn, fea, RunMode. EVAL, self. params, self. run_config ) log. info ( ""Done"" ) optimizer_ops = { ""sgd"", ""adam"", ""adagrad"" } for op in program. global_block ( ). ops : if op. type == ""dropout"" : op. _set_attr ( ""is_test"", True ) if op. type == ""batch_norm"" : op. _set_attr ( ""is_test"", True ) if : raise RuntimeError ( ""Found optimizer op in eval graph, op: %s"" % repr ( op ) ) log. info ( ""Eval with: \n> Run_config: %s\n> Params: %s\n> Train_model_spec: %s\n"" % ( repr ( self. run_config ), repr ( self. params ), repr ( model_spec ) ) ) return ProgramPair ( train_program = program, startup_program = startup_prog ), model_spec",False,op.type in optimizer_ops,op.type == 'eval_graph',0.6562021970748901 2050,"def create_unix_server ( self, protocol_factory, path = None, *, sock = None, backlog = 100, ssl = None ) : if isinstance ( ssl, bool ) : raise TypeError ( ""ssl argument must be an SSLContext or None"" ) if path is not None : if sock is not None : raise ValueError ( ""path and sock can not be specified at the same time"" ) sock = socket. socket ( socket. AF_UNIX, socket. SOCK_STREAM ) try : sock. bind ( path ) except OSError as exc : sock. close ( ) if exc. errno == errno. EADDRINUSE : msg = ""Address {!r} is already in use"". format ( path ) raise OSError ( errno. EADDRINUSE, msg ) from None else : raise except : sock. close ( ) raise else : if sock is None : raise ValueError ( ""path was not specified, and no sock specified"" ) if : raise ValueError ( ""A UNIX Domain Socket was expected, got {!r}"". format ( sock ) ) server = base_events. Server ( self, [ sock ] ) sock. listen ( backlog ) sock. setblocking ( False ) self. _start_serving ( protocol_factory, sock, ssl, server ) return server",False,sock.family != socket.AF_UNIX,sock is None,0.6491050124168396 2051,"def test_fvalue ( self ) : if not getattr ( self, ""skip_f"", False ) : rtol = getattr ( self, ""rtol"", 1e-10 ) assert_allclose ( self. res1. fvalue, self. res2. F, rtol = rtol ) if : assert_allclose ( self. res1. f_pvalue, self. res2. Fp, rtol = rtol ) else : raise pytest. skip ( ""TODO: document why this test is skipped"" )",False,"hasattr(self.res2, 'Fp')","getattr(self, 'skip_pvalue', False)",0.6479915976524353 2052,"def can_delete ( conn, vol, path ) : """"""Is the passed path even deleteable"""""" ret = True msg = None if vol : if vol. get_pool ( ). get_type ( ) == virtinst. Storage. StoragePool. TYPE_ISCSI : msg = _ ( ""Cannot delete iscsi share."" ) else : if conn. is_remote ( ) : msg = _ ( ""Cannot delete unmanaged remote storage."" ) elif not os. path. exists ( path ) : msg = _ ( ""Path does not exist."" ) elif : msg = _ ( ""No write access to parent directory."" ) elif stat. S_ISBLK ( os. stat ( path ) [ stat. ST_MODE ] ) : msg = _ ( ""Cannot delete unmanaged block device."" ) if msg : ret = False return ( ret, msg )",False,"not os.access(os.path.dirname(path), os.W_OK)",not os.path.isdir(path),0.6497271656990051 2053,"def __print_receiver ( self, package, receiver, prefix, include_intent_filters = False ) : self. stdout. write ( ""%s%s\n"" % ( prefix, receiver. name ) ) if include_intent_filters : intent_filters = self. find_intent_filters ( receiver, ""receiver"" ) if : for intent_filter in intent_filters : self. stdout. write ( ""%s Intent Filter:\n"" % ( prefix ) ) if len ( intent_filter. actions ) > 0 : self. stdout. write ( ""%s Actions:\n"" % ( prefix ) ) for action in intent_filter. actions : self. stdout. write ( ""%s - %s\n"" % ( prefix, action ) ) if len ( intent_filter. categories ) > 0 : self. stdout. write ( ""%s Categories:\n"" % ( prefix ) ) for category in intent_filter. categories : self. stdout. write ( ""%s - %s\n"" % ( prefix, category ) ) if len ( intent_filter. datas ) > 0 : self. stdout. write ( ""%s Data:\n"" % ( prefix ) ) for data in intent_filter. datas : self. stdout. write ( ""%s - %s\n"" % ( prefix, data ) ) self",False,len(intent_filters) > 0,intent_filters,0.6563029289245605 2054,"def filter_out_test_code ( file_handle ) : found_test_code = False for line in file_handle. readlines ( ) : if line. startswith ( "".. testcode:"" ) : found_test_code = True continue if : if line. startswith ( "" "" ) : continue else : empty_line = line. strip ( ) if len ( empty_line ) == 0 : continue else : found_test_code = False yield line else : for keyword in [ ""|version|"", ""|today|"" ] : if keyword in line : break else : yield line",False,found_test_code is True,found_test_code,0.6561294794082642 2055,"def dataspec ( config ) : master = yield fakemaster. make_master ( ) data = connector. DataConnector ( ) data. setServiceParent ( master ) if config [ ""out"" ]!= ""--"" : dirs = os. path. dirname ( config [ ""out"" ] ) if : os. makedirs ( dirs ) f = open ( config [ ""out"" ], ""w"" ) else : f = sys. stdout if config [ ""global"" ] is not None : f. write ( ""window."" + config [ ""global"" ] + ""="" ) f. write ( json. dumps ( data. allEndpoints ( ), indent = 2 ) ) f. close ( ) defer. returnValue ( 0 )",False,dirs and (not os.path.exists(dirs)),not os.path.exists(dirs),0.6531575918197632 2056,"def _condition ( ct ) : for qobj in args : if : for child in qobj. children : kwargs. update ( dict ( [ child ] ) ) else : raise NotImplementedError ( ""Unsupported Q object"" ) for attr, val in kwargs. items ( ) : if getattr ( ct, attr )!= val : return False return True",False,qobj.connector == 'AND' and (not qobj.negated),"isinstance(qobj, QObject)",0.6548824310302734 2057,"def get_filtering ( self ) : """"""Return filering as a dict for submissions queryset"""""" self. select_date_form = SelectDateForm ( self. request. GET ) result = dict ( ) if self. select_date_form. is_valid ( ) : date_from = self. select_date_form. cleaned_data. get ( ""date_from"" ) date_to = self. select_date_form. cleaned_data. get ( ""date_to"" ) if date_to : date_to += datetime. timedelta ( days = 1 ) if : result [ ""submit_time__range"" ] = [ date_from, date_to ] else : result [ ""submit_time__lte"" ] = date_to el if : result [ ""submit_time__gte"" ] = date_from return result",False,date_from,self.request.POST,0.660735547542572 2058,"def test_bgr2hls ( ) : in_img = np. random. rand ( 10, 10, 3 ). astype ( np. float32 ) out_img = mmcv. bgr2hls ( in_img ) argmax = in_img. argmax ( axis = 2 ) computed_hls = np. empty_like ( in_img ) for i in range ( in_img. shape [ 0 ] ) : for j in range ( in_img. shape [ 1 ] ) : b, g, r = in_img [ i, j ] maxc = max ( r, g, b ) minc = min ( r, g, b ) _l = ( minc + maxc ) / 2.0 if minc == maxc : h = 0.0 s = 0.0 if _l <= 0.5 : s = ( maxc - minc ) / ( maxc + minc ) else : s = ( maxc - minc ) / ( 2.0 - maxc - minc ) if argmax [ i, j ] == 2 : h = 60 * ( g - b ) / ( maxc - minc ) elif argmax [ i, j ] == 1 : h = 60 * ( 2.0 + ( b - r ) / ( maxc - minc ) ) else : h = 60 * ( 4.0 + ( r - g ) / ( maxc - minc ) ) ",False,h < 0,np.random.random() < 0.6,0.67082279920578 2059,"def _fixture_setup ( self ) : for db_name in self. _databases_names ( include_mirrors = False ) : if : self. _reset_sequences ( db_name ) if self. serialized_rollback and hasattr ( connections [ db_name ], ""_test_serialized_contents"" ) : if self. available_apps is not None : apps. unset_available_apps ( ) connections [ db_name ]. creation. deserialize_db_from_string ( connections [ db_name ]. _test_serialized_contents ) if self. available_apps is not None : apps. set_available_apps ( self. available_apps ) if self. fixtures : call_command ( ""loaddata"", * self. fixtures, ** { ""verbosity"" : 0, ""database"" : db_name } )",False,self.reset_sequences,self.has_sequences,0.6590416431427002 2060,"def _parse_json_track ( self, json ) : formats = [ ] file_ = json. get ( ""file"" ) if isinstance ( file_, dict ) : for format_id, format_url in file_. items ( ) : if : continue ext, abr_str = format_id. split ( ""-"", 1 ) formats. append ( { ""format_id"" : format_id, ""url"" : self. _proto_relative_url ( format_url, ""http:"" ), ""ext"" : ext, ""vcodec"" : ""none"", ""acodec"" : ext, ""abr"" : int_or_none ( abr_str ), } ) return { ""duration"" : float_or_none ( json. get ( ""duration"" ) ), ""id"" : str_or_none ( json. get ( ""track_id"" ) or json. get ( ""id"" ) ), ""title"" : json. get ( ""title"" ), ""title_link"" : json. get ( ""title_link"" ), ""number"" : int_or_none ( json. get ( ""track_num"" ) ), ""formats"" : formats, }",False,not url_or_none(format_url),format_url is None,0.6458032131195068 2061,"def _indent ( self, row, step ) : line, level, bullet = self [ row ] self. _indent_row ( row, step ) if row == 0 : for i in range ( 1, len ( self ) ) : if self [ i ] [ self. INDENT_COL ] >= level : self. _indent_row ( i, step ) else : break else : for i in range ( row + 1, len ( self ) ) : if : self. _indent_row ( i, step ) else : break self. buffer. renumber_list_after_indent ( line, level )",False,self[i][self.INDENT_COL] > level,bullet,0.6587383151054382 2062,"def process_style_box ( self, options ) : if self. has_form ( ""StyleBox"", 1, None ) : rules = self. _leaves [ 1 : ] for rule in rules : if rule. has_form ( ""Rule"", 2 ) : name = rule. _leaves [ 0 ]. get_name ( ) value = rule. _leaves [ 1 ] if name == ""System`ShowStringCharacters"" : value = value. is_true ( ) options = options. copy ( ) options [ ""show_string_characters"" ] = value elif : if value. has_form ( ""List"", 2 ) : m1 = value. _leaves [ 0 ]. round_to_float ( ) m2 = value. _leaves [ 1 ]. round_to_float ( ) if m1 is not None and m2 is not None : options = options. copy ( ) options [ ""image_size_multipliers"" ] = ( m1, m2 ) return True, options else : return False, options",False,name == 'System`ImageSizeMultipliers',"has_form('List', 1)",0.6658504009246826 2063,"def add_actors ( self ) : """"""Adds `self.actors` to the scene."""""" if not self. _actors_added : self. reader. render_window = self. scene. render_window self. _update_reader ( ) self. _actors_added = True if : self. _visible_changed ( self. visible ) self. scene. render ( )",False,not self.visible,self.visible is not None,0.6624195575714111 2064,"def file_fetch ( fh, fromTime, untilTime, now = None, archiveToSelect = None ) : header = __readHeader ( fh ) if now is None : now = int ( time. time ( ) ) if untilTime is None : untilTime = now fromTime = int ( fromTime ) untilTime = int ( untilTime ) if fromTime > untilTime : raise InvalidTimeInterval ( ""Invalid time interval: from time '%s' is after until time '%s'"" % ( fromTime, untilTime ) ) oldestTime = now - header [ ""maxRetention"" ] if fromTime > now : return None if untilTime < oldestTime : return None if fromTime < oldestTime : fromTime = oldestTime if untilTime > now : untilTime = now diff = now - fromTime if archiveToSelect : retentionStr = str ( archiveToSelect ) + "":1"" archiveToSelect = parseRetentionDef ( retentionStr ) [ 0 ] for archive in header [ ""archives"" ] : if archiveToSelect : if archive [ ""secondsPerPoint"" ] == archiveToSelect : break archive = None else : if : break if archiveToSelect and not archive : raise ValueError ( ""Invalid granularity: %s"" % ( archiveToSelect ) ) return __archive_fetch ( fh, archive, fromTime, untilTime )",False,archive['retention'] >= diff,not archive,0.6581726670265198 2065,"def get_operation_order_from_stack ( state ) : stack_items = list ( reversed ( stack ( ) ) ) if state. current_deploy_filename : for i, stack_item in enumerate ( stack_items ) : frame = getframeinfo ( stack_item [ 0 ] ) if : break else : i = 0 line_numbers = [ ] for stack_item in stack_items [ i : ] : frame = getframeinfo ( stack_item [ 0 ] ) if frame. filename. startswith ( PYINFRA_API_DIR ) : continue if state. loop_filename and frame. filename == state. loop_filename : line_numbers. extend ( [ state. loop_line, state. loop_counter ] ) line_numbers. append ( frame. lineno ) del stack_items return line_numbers",False,frame.filename == state.current_deploy_filename,frame.filename.startswith(PYINFRA_API_DIR),0.6496627330780029 2066,"def parseXmlNode ( node ) : for element in node. findall ( ""boundary"" ) : boundary = AttribDict ( ) for child in element : if : values = cleanupVals ( child. text, child. tag ) boundary [ child. tag ] = values else : boundary [ child. tag ] = None conf. boundaries. append ( boundary ) for element in node. findall ( ""test"" ) : test = AttribDict ( ) for child in element : if child. text and child. text. strip ( ) : values = cleanupVals ( child. text, child. tag ) test [ child. tag ] = values else : if len ( child. findall ( ""*"" ) ) == 0 : test [ child. tag ] = None continue else : test [ child. tag ] = AttribDict ( ) for gchild in child : if gchild. tag in test [ child. tag ] : prevtext = test [ child. tag ] [ gchild. tag ] test [ child. tag ] [ gchild. tag ] = [ prevtext, gchild. text ] else : ",False,child.text,child.tag == 'con',0.6656943559646606 2067,"def usgs_eros ( self, scene, path ) : """"""Downloads the image from USGS"""""" if self. usgs_user and self. usgs_pass : try : api_key = api. login ( self. usgs_user, self. usgs_pass ) except USGSError as e : error_tree = ElementTree. fromstring ( str ( e. message ) ) error_text = error_tree. find ( ""SOAP-ENV:Body/SOAP-ENV:Fault/faultstring"", api. NAMESPACES ). text raise USGSInventoryAccessMissing ( error_text ) download_url = api. download ( ""LANDSAT_8"", ""EE"", [ scene ], api_key = api_key ) if : self. output ( ""Source: USGS EarthExplorer"", normal = True, arrow = True ) return self. fetch ( download_url [ 0 ], path ) raise RemoteFileDoesntExist ( ""%s is not available on AWS S3, Google or USGS Earth Explorer"" % scene ) raise RemoteFileDoesntExist ( ""%s is not available on AWS S3 or Google Storage"" % scene )",True,download_url,download_url,0.6673058271408081 2068,"def _unbyte ( d ) : if d is None : return for k, v in list ( d. items ( ) ) : if : del d [ k ] d [ k. decode ( ""utf8"" ) ] = v if isinstance ( v, dict ) : _unbyte ( v ) for k, v in d. items ( ) : if isinstance ( v, ( list, tuple ) ) : l = [ ] for sub in v : if isinstance ( sub, dict ) : l. append ( _unbyte ( sub ) ) elif isinstance ( sub, bytes ) : l. append ( sub. decode ( ""utf8"" ) ) else : l. append ( sub ) d [ k ] = tuple ( l ) elif isinstance ( v, bytes ) : try : d [ k ] = v. decode ( ""utf8"" ) except UnicodeDecodeError : d [ k ] = v return d",False,"isinstance(k, bytes)",k in d,0.6499552726745605 2069,"def generateRandomConfigurations ( parser, randomizer ) : while True : randomArgs = randomizer. getRandomSubset ( ) if : randomArgs. append ( ""--run-with-valgrind"" ) build_options = parser. parse_args ( randomArgs ) if areArgsValid ( build_options ) [ 0 ] : build_options. build_options_str = "" "". join ( randomArgs ) build_options. enableRandom = ( True ) return build_options",False,'--build-with-valgrind' in randomArgs and chance(0.95),len(randomArgs) > 0,0.6532500982284546 2070,"def _get_activity ( self, request, group, num ) : activity_items = set ( ) activity = [ ] activity_qs = ( Activity. objects. filter ( group = group, ) . order_by ( ""-datetime"" ) . select_related ( ""user"" ) ) for item in activity_qs [ : num * 2 ] : sig = ( item. type, item. ident, item. user_id ) if : activity. append ( item ) elif sig not in activity_items : activity_items. add ( sig ) activity. append ( item ) activity. append ( Activity ( project = group. project, group = group, type = Activity. FIRST_SEEN, datetime = group. first_seen, ) ) return activity [ : num ]",False,item.type == Activity.NOTE,sig == -datetime,0.6628049612045288 2071,def _exitfunc ( cls ) : reenable_gc = False try : if cls. _registry : import gc if : reenable_gc = True gc. disable ( ) pending = None while True : if pending is None or finalize. _dirty : pending = cls. _select_for_exit ( ) finalize. _dirty = False if not pending : break f = pending. pop ( ) try : f ( ) except Exception : sys. excepthook ( * sys. exc_info ( ) ) assert f not in cls. _registry finally : finalize. _shutdown = True if reenable_gc : gc. enable ( ),False,gc.isenabled(),gc is False,0.6595113277435303 2072,"def _performance_by_month ( user_id, months = 12, end_month = None, end_year = None ) : monthly_data = OrderedDict ( ) now = datetime. now ( ) if not end_month : end_month = now. month if not end_year : end_year = now. year end_time = time. mktime ( ( end_year, end_month + 1, 1, 0, 0, 0, 0, 0, - 1 ) ) start_time = time. mktime ( ( end_year, end_month + 1 - months, 1, 0, 0, 0, 0, 0, - 1 ) ) sql = PerformanceGraph. objects. filter_raw ( ""log_activity.created >="", date. fromtimestamp ( start_time ). isoformat ( ) ). filter_raw ( ""log_activity.created <"", date. fromtimestamp ( end_time ). isoformat ( ) ) for row in sql. all ( ) : label = row. approval_created. isoformat ( ) [ : 7 ] if : xaxis = row. approval_created. strftime ( ""%b %Y"" ) monthly_data [ label ] = dict ( teamcount = 0, usercount = 0, teamamt = 0, label = xaxis ) monthly_data [ label ] [ ""teamamt"" ] = monthly_data [ label ] [ ""teamamt"" ] + 1 monthly_data_count = monthly_data [ label ] [ ""teamcount"" ] monthly_data [ label ] [ ""teamcount"" ] = monthly_data_count + row. total if row. user_id == user_id : user_count = monthly_data [ label ] [ ""usercount"" ] monthly_data [ label ] [ ""usercount"" ] = user_count + row. total if DEV : cmd = ShellCommand ( ""docker"", ""ps"", ""-qf"", ""name=%s"" % self. path. k8s ) if not cmd. check ( f""docker check for {self.path.k8s}"" ) : if : log_cmd = ShellCommand ( ""docker"", ""logs"", self. path. k8s, stderr = subprocess. STDOUT ) if log_cmd. check ( f""docker logs for {self.path.k8s}"" ) : print ( cmd. stdout ) pytest. exit ( f""container failed to start for {self.path.k8s}"" ) return ( )",False,not cmd.stdout.strip(),not cmd.check(f 'docker run < /dev/null 2>&1' + self.path.k8s),0.6573504209518433 2074,"def __init__ ( self, backend, key, algorithm, ctx = None ) : self. _algorithm = algorithm self. _backend = backend if ctx is None : ctx = self. _backend. _lib. Cryptography_HMAC_CTX_new ( ) self. _backend. openssl_assert ( ctx!= self. _backend. _ffi. NULL ) ctx = self. _backend. _ffi. gc ( ctx, self. _backend. _lib. Cryptography_HMAC_CTX_free ) evp_md = self. _backend. _lib. EVP_get_digestbyname ( algorithm. name. encode ( ""ascii"" ) ) if : raise UnsupportedAlgorithm ( ""{0} is not a supported hash on this backend."". format ( algorithm. name ), _Reasons. UNSUPPORTED_HASH, ) res = self. _backend. _lib. Cryptography_HMAC_Init_ex ( ctx, key, len ( key ), evp_md, self. _backend. _ffi. NULL ) self. _backend. openssl_assert ( res!= 0 ) self. _ctx = ctx self. _key = key",False,evp_md == self._backend._ffi.NULL,evp_md and evp_md < self._backend.hash,0.6596512794494629 2075,"def format_args ( self, args, has_variadic, *, include_defaults = True ) : if not args : return """" args_buf = [ ] for argi, arg in enumerate ( args, 1 ) : vararg = has_variadic and ( len ( args ) == argi ) arg_expr = ""VARIADIC "" if vararg else """" if isinstance ( arg, tuple ) : if arg [ 0 ] is not None : arg_expr += qn ( arg [ 0 ] ) if len ( arg ) > 1 : arg_expr += "" "" + qt ( arg [ 1 ] ) if : if len ( arg ) > 2 and arg [ 2 ] is not None : arg_expr += "" = "" + arg [ 2 ] else : arg_expr = arg args_buf. append ( arg_expr ) return "", "". join ( args_buf )",True,include_defaults,include_defaults,0.6723951101303101 2076,"def _get_chunks ( self, shape, max_shape, chunks, dtype, chunksize ) : if chunks is None : prod = _tuple_product ( max_shape [ 1 : ] ) if dtype == ""object"" : return ( self. _object_chunking, ) + max_shape [ 1 : ] if : sz = dtype. itemsize chunks = int ( math. ceil ( chunksize / ( prod * sz ) ) ) return ( chunks, ) + max_shape [ 1 : ] else : return ( 1, ) + self. _determine_chunksizes ( max_shape [ 1 : ], dtype, chunksize ) elif isinstance ( chunks, int ) : assert chunks > 0 if chunks > 1 : return ( chunks, ) + tuple ( [ s or ms for s, ms in zip ( shape [ 1 : ], max_shape [ 1 : ] ) ] ) else : return ( 1, ) + self. _determine_chunksizes ( max_shape [ 1 : ], dtype, chunksize ) else : chunks = tuple ( chunks ) if len ( chunks ) == 1 : return self. _get_chunks ( shape, max_shape, chunks [ 0 ], dtype, chunksize ) else : assert len ( chunks ) == len ( shape ) assert chunks [ 0 ] == 1 return chunks",False,prod <= 2 * chunksize,dtype == 'float32',0.6815425753593445 2077,"def _test ( ) : testfiles = [ arg for arg in sys. argv [ 1 : ] if arg and arg [ 0 ]!= ""-"" ] if not testfiles : name = os. path. basename ( sys. argv [ 0 ] ) if ""__loader__"" in globals ( ) : name, _ = os. path. splitext ( name ) print ( ""usage: {0} [-v] file..."". format ( name ) ) return 2 for filename in testfiles : if filename. endswith ( "".py"" ) : dirname, filename = os. path. split ( filename ) sys. path. insert ( 0, dirname ) m = __import__ ( filename [ : - 3 ] ) del sys. path [ 0 ] failures, _ = testmod ( m ) else : failures, _ = testfile ( filename, module_relative = False ) if : return 1 return 0",True,failures,failures,0.7307915687561035 2078,"def OnPopup ( self, form, popup_handle ) : for num, action_name, menu_name, shortcut in self. actions : if : ida_kernwin. attach_action_to_popup ( form, popup_handle, None ) else : handler = command_handler_t ( self, num, 2 ) desc = ida_kernwin. action_desc_t ( action_name, menu_name, handler, shortcut ) ida_kernwin. attach_dynamic_action_to_popup ( form, popup_handle, desc )",False,menu_name is None,num == 0,0.6513662338256836 2079,"def _init_auxiliary_head ( self, auxiliary_head ) : """"""Initialize ``auxiliary_head``"""""" if auxiliary_head is not None : if : self. auxiliary_head = nn. ModuleList ( ) for head_cfg in auxiliary_head : self. auxiliary_head. append ( builder. build_head ( head_cfg ) ) else : self. auxiliary_head = builder. build_head ( auxiliary_head )",True,"isinstance(auxiliary_head, list)","isinstance(auxiliary_head, list)",0.6627303957939148 2080,"def check ( self, xp, nout ) : input = xp. asarray ( self. x ). astype ( numpy. float32 ) with warnings. catch_warnings ( ) : if : warnings. simplefilter ( ""ignore"", self. ignore_warning ) if self. result : self. check_positive ( xp, self. func, input, self. eps, nout ) else : self. check_negative ( xp, self. func, input, self. eps, nout )",True,self.ignore_warning,self.ignore_warning,0.6564961671829224 2081,"def compute ( self, split ) : for t in self. rdd. iterator ( split ) : for k, v in six. iteritems ( self. filters ) : value = getattr ( t, k ) if isinstance ( v, types. FunctionType ) : if not v ( value ) : break else : if : v = [ v ] if value not in v : break else : yield t",False,"not isinstance(v, list)","isinstance(v, types.StringTypes)",0.6533823609352112 2082,"def convertstore ( self, mydict ) : targetheader = self. mypofile. header ( ) targetheader. addnote ( ""extracted from web2py"", ""developer"" ) for source_str in mydict. keys ( ) : target_str = mydict [ source_str ] if target_str == source_str : target_str = u"""" elif : target_str = u"""" pounit = self. convertunit ( source_str, target_str ) self. mypofile. addunit ( pounit ) return self. mypofile",False,target_str.startswith(u'*** '),target_str == target_str,0.6509428024291992 2083,"def sequence_list ( self ) : ""Returns a list of information about all DB sequences for all models in all apps."" from django. apps import apps from django. db import models, router sequence_list = [ ] for app_config in apps. get_app_configs ( ) : for model in router. get_migratable_models ( app_config, self. connection. alias ) : if : continue if model. _meta. swapped : continue for f in model. _meta. local_fields : if isinstance ( f, models. AutoField ) : sequence_list. append ( { ""table"" : model. _meta. db_table, ""column"" : f. column } ) break for f in model. _meta. local_many_to_many : if f. remote_field. through is None : sequence_list. append ( { ""table"" : f. m2m_db_table ( ), ""column"" : None } ) return sequence_list",False,not model._meta.managed,"hasattr(model, '_meta')",0.6582242250442505 2084,"def _extract_constants_from_irs ( irs : List [ Operation ], all_cst_used : List [ ConstantValue ], all_cst_used_in_binary : Dict [ str, List [ ConstantValue ] ], context_explored : Set [ Node ], ) : for ir in irs : if isinstance ( ir, Binary ) : for r in ir. read : if isinstance ( r, Constant ) : all_cst_used_in_binary [ str ( ir. type ) ]. append ( ConstantValue ( str ( r. value ), str ( r. type ) ) ) if isinstance ( ir, TypeConversion ) : if isinstance ( ir. variable, Constant ) : all_cst_used. append ( ConstantValue ( str ( ir. variable. value ), str ( ir. type ) ) ) continue for r in ir. read : if isinstance ( ir, Member ) : continue if isinstance ( r, Constant ) : all_cst_used. append ( ConstantValue ( str ( r. value ), str ( r. type ) ) ) if isinstance ( r, StateVariable ) : if : if r. node_initialization. irs : if r. node_initialization in context_explored",False,r.node_initialization,r.node_initialization is not None,0.658049464225769 2085,"def _galaxy_loc_iter ( loc_file, galaxy_dt, need_remap = False ) : """"""Iterator returning genome build and references from Galaxy *.loc file."""""" if ""column"" in galaxy_dt : dbkey_i = galaxy_dt [ ""column"" ]. index ( ""dbkey"" ) path_i = galaxy_dt [ ""column"" ]. index ( ""path"" ) else : dbkey_i = None if os. path. exists ( loc_file ) : with open ( loc_file ) as in_handle : for line in in_handle : if line. strip ( ) and not line. startswith ( ""#"" ) : parts = [ x. strip ( ) for x in line. strip ( ). split ( ""\t"" ) ] if len ( parts ) == 1 : parts = [ x. strip ( ) for x in line. strip ( ). split ( "" "" ) if x. strip ( ) ] if len ( parts ) > 1 : raise IOError ( ""Galaxy location file uses spaces instead of "" ""tabs to separate fields: %s"" % loc_file SupportStatus : GVISOR_SECURE_RUNTIME = ""runsc"" if is_linux ( ) : if not shutil. which ( GVISOR_SECURE_RUNTIME ) : return SupportStatus. err ( { UnsupportReason. ENVIRONMENT_NOT_SECURE : self. ENV_ID } ) if : logger. warning ( ""Unable to start GLambda app. Setting "" ""`cgroup.cpuset.cpus` does not match `docker."" ""cpuset.cpus`. Potential fix: `cat /sys/fs/"" ""cgroup/cpuset/cpuset.cpus > /sys/fs/cgroup/"" ""cpuset/docker/cpuset.cpus`."" ) return SupportStatus. err ( { UnsupportReason. ENVIRONMENT_MISCONFIGURED : self. ENV_ID } ) return super ( ). check_support ( )",False,not GLambdaTaskEnvironment._is_cgroup_cpuset_cfg_correct(),not self.has_docker,0.6538712978363037 2087,"def __init__ ( self, f ) : self. _check_type ( f ) self. func = can ( f. func ) self. args = [ can ( a ) for a in f. args ] self. keywords = { k : can ( v ) for k, v in f. keywords. items ( ) } self. buffers = [ ] self. arg_buffer_counts = [ ] self. keyword_buffer_counts = { } for canned_arg in self. args : if : self. arg_buffer_counts. append ( 0 ) continue self. arg_buffer_counts. append ( len ( canned_arg. buffers ) ) self. buffers. extend ( canned_arg. buffers ) canned_arg. buffers = [ ] for key in sorted ( self. keywords ) : canned_kwarg = self. keywords [ key ] if not isinstance ( canned_kwarg, CannedObject ) : continue self. keyword_buffer_counts [ key ] = len ( canned_kwarg. buffers ) self. buffers. extend ( canned_kwarg. buffers ) canned_kwarg. buffers = [ ]",False,"not isinstance(canned_arg, CannedObject)","isinstance(canned_arg, CannedObject)",0.6588249802589417 2088,"def select_from_partition_request ( self, batch_definition_list = None ) : if batch_definition_list is None : return [ ] filter_function : Callable if self. custom_filter_function : filter_function = self. custom_filter_function else : filter_function = self. best_effort_partition_matcher ( ) selected_batch_definitions = list ( filter ( lambda batch_definition : filter_function ( partition_definition = batch_definition. partition_definition, ), batch_definition_list, ) ) if self. index is None : selected_batch_definitions = selected_batch_definitions [ : self. limit ] else : if : selected_batch_definitions = [ selected_batch_definitions [ self. index ] ] else : selected_batch_definitions = list ( itertools. chain. from_iterable ( [ selected_batch_definitions [ self. index ] ] ) ) return selected_batch_definitions",False,"isinstance(self.index, int)",self.limit is None,0.6496145725250244 2089,"def run ( self ) : hs = sublime. load_settings ( ""TextPastryHistory.sublime-settings"" ) item = hs. get ( ""last_command"", { } ) if ( item and ""command"" in item and ""text"" in item and item [ ""command"" ] and item [ ""text"" ] ) : text = item. get ( ""text"" ) separator = item. get ( ""separator"", None ) command = item. get ( ""command"", None ) if text and command : sublime. status_message ( ""Running last command"" ) if command == ""insert_nums"" : ( start, step, padding ) = map ( str, text. split ( "" "" ) ) self. window. active_view ( ). run_command ( ""text_pastry_range"", { ""start"" : start, ""step"" : step, ""padding"" : padding }, ) elif : self. window. active_view ( ). run_command ( command, { ""text"" : text, ""separator"" : separator } ) else : pass",False,command == 'text_pastry_insert_text',command == 'delete_nums',0.6481184959411621 2090,"def model ( data ) : with pyro. plate ( ""plate_0"", data. shape [ - 1 ] ) : alpha = ( pyro. sample ( ""alpha"", dist. HalfCauchy ( 1.0 ) ) if : else torch. tensor ( [ 1.0, 1.0 ] ) ) beta = ( pyro. sample ( ""beta"", dist. HalfCauchy ( 1.0 ) ) if : else torch. tensor ( [ 1.0, 1.0 ] ) ) beta_binom = BetaBinomialPair ( ) with pyro. plate ( ""plate_1"", data. shape [ - 2 ] ) : probs = pyro. sample ( ""probs"", beta_binom. latent ( alpha, beta ) ) with pyro. plate ( ""data"", data. shape [ 0 ] ) : pyro. sample ( ""binomial"", beta_binom. conditional ( probs = probs, total_count = total_count ), obs = data, )",False,hyperpriors,data.shape[1] == 0,0.7024195194244385 2091,"def _get_build_status ( self, job_name, build_number ) : try : build_info = self. server. get_build_info ( job_name, build_number ) if : return ""building"" else : return ""built"" except jenkins. NotFoundException : return ""not found""",False,build_info['building'],build_info,0.6538980007171631 2092,"def get_open_shards ( ) : database_hosts = config. get_required ( ""DATABASE_HOSTS"" ) open_shards = [ ] for host in database_hosts : open_shards. extend ( shard [ ""ID"" ] for shard in host [ ""SHARDS"" ] if : ) return open_shards",False,shard['OPEN'] and (not shard.get('DISABLED')),config.get_option('SHOW_SQL_shards'),0.6628351807594299 2093,"def validate_name ( self, type, name ) : if type == InterfaceType. BRIDGE : if : raise ValueError ( 'Bridge interface must start with ""bridge"" followed by an unique number.' ) if type == InterfaceType. LINK_AGGREGATION : if not ( name. startswith ( ""lagg"" ) and name [ 4 : ]. isdigit ( ) ) : raise ValueError ( 'Link aggregation interface must start with ""lagg"" followed by an unique number.' ) else : if len ( name ) > 5 and name [ 4 ] == ""0"" : raise ValueError ( 'Link aggregation interface name cannot start with ""lagg0"".' ) if type == InterfaceType. VLAN : if not ( name. startswith ( ""vlan"" ) and name [ 4 : ]. isdigit ( ) ) : raise ValueError ( 'VLAN interface must start with ""vlan"" followed by an unique number.' )",False,not (name.startswith('bridge') and name[6:].isdigit()),name[0] == name[0],0.6521009206771851 2094,"def _process_enum_definition ( self, tok ) : fields = [ ] for field in tok. fields : if : expression = self. expression_parser. parse ( field. expression ) else : expression = None fields. append ( c_ast. CEnumField ( name = field. name. first, value = expression ) ) name = tok. enum_name if name : name = ""enum %s"" % tok. enum_name. first else : name = self. _make_anonymous_type ( ""enum"" ) return c_ast. CTypeDefinition ( name = name, type_definition = c_ast. CEnum ( attributes = tok. attributes, fields = fields, name = name ), )",True,field.expression,field.expression,0.6620014309883118 2095,"def __init__ ( self, uuid = None, cluster_state = None, children = None, ** kwargs ) : self. uuid = uuid self. cluster_state = cluster_state if self. cluster_state is not None : self. children = WeakSet ( self. cluster_state. tasks. get ( task_id ) for task_id in children or ( ) if : ) else : self. children = WeakSet ( ) self. _serializer_handlers = { ""children"" : self. _serializable_children, ""root"" : self. _serializable_root, ""parent"" : self. _serializable_parent, } if kwargs : self. __dict__. update ( kwargs )",False,task_id in self.cluster_state.tasks,task_id is None,0.6590691208839417 2096,"def LoadFromFile ( cls ) : """"""Loads a service principal from a file."""""" with open ( object_storage_service. FindCredentialFile ( azure_credentials. AZURE_CREDENTIAL_PROFILE_FILE ), encoding = ""utf-8-sig"", ) as profile_fp, open ( object_storage_service. FindCredentialFile ( azure_credentials. AZURE_CREDENTIAL_TOKENS_FILE ) ) as tokens_fp : subscriptions = json. load ( profile_fp ) [ ""subscriptions"" ] subscription = [ sub for sub in subscriptions if sub [ ""isDefault"" ] ] [ 0 ] subscription_type = subscription [ ""user"" ] [ ""type"" ] if subscription_type!= ""servicePrincipal"" : logging. info ( ""Azure credentials are of type '%s'. "" ""Will try to create a new service principal."", subscription_type, ) return cls ( ) name = subscription [ ""id"" ] app_id = subscription [ ""user"" ] [ ""name"" ] for token in json. load ( tokens_fp ) : if : logging. info ( ""Azure credentials are of type'servicePrincipal'. "" ""Will reuse them for benchmarking."" ) data = [ ] for d in accounts : if : prepare_opening_closing ( d ) has_value = False row = { ""account"" : d. name, ""parent_account"" : d. parent_account, ""indent"" : d. indent, ""from_date"" : filters. from_date, ""to_date"" : filters. to_date, ""currency"" : company_currency, ""account_name"" : ( ""{} - {}"". format ( d. account_number, d. account_name ) if d. account_number else d. account_name ), } for key in value_fields : row [ key ] = flt ( d. get ( key, 0.0 ), 3 ) if abs ( row [ key ] ) >= 0.005 : has_value = True row [ ""has_value"" ] = has_value data. append ( row ) data. extend ( [ { }, total_row ] ) return data",False,parent_children_map.get(d.account),"d.get(parent_children_map, company_currency)",0.6483578681945801 2098,"def modify_bottle_params ( self, output_stride = None ) : if output_stride is not None and output_stride % 2!= 0 : raise Exception ( ""output stride must to be even number"" ) if output_stride is None : return else : stride = 2 for i, layer_setting in enumerate ( self. bottleneck_params_list ) : t, c, n, s = layer_setting stride = stride * s if : s = 1 self. bottleneck_params_list [ i ] = ( t, c, n, s )",False,stride > output_stride,stride == 2,0.673632025718689 2099,"def _Determine_Do ( self ) : self. applicable = 1 configTokens = black. configure. items [ ""configTokens"" ]. Get ( ) buildFlavour = black. configure. items [ ""buildFlavour"" ]. Get ( ) if buildFlavour == ""full"" : self. value = False else : self. value = True for opt, optarg in self. chosenOptions : if opt == ""--with-tests"" : if not self. value : configTokens. append ( ""tests"" ) self. value = True elif opt == ""--without-tests"" : if : configTokens. append ( ""notests"" ) self. value = False self. determined = 1",False,self.value,not self.value,0.662090539932251 2100,"def test_out_of_bounds ( self ) : projection = ccrs. TransverseMercator ( central_longitude = 0 ) rings = [ ( [ ( 86, 1 ), ( 86, - 1 ), ( 88, - 1 ), ( 88, 1 ) ], - 1 ), ( [ ( 86, 1 ), ( 86, - 1 ), ( 130, - 1 ), ( 88, 1 ) ], 1 ), ( [ ( 86, 1 ), ( 86, - 1 ), ( 130, - 1 ), ( 130, 1 ) ], 1 ), ( [ ( 120, 1 ), ( 120, - 1 ), ( 130, - 1 ), ( 130, 1 ) ], 0 ), ] for coords, expected_n_lines in rings : linear_ring = sgeom. LinearRing ( coords ) rings, mlinestr = projection. project_geometry ( linear_ring ) if : assert rings assert not mlinestr else : assert len ( mlinestr ) == expected_n_lines if expected_n_lines == 0 : assert mlinestr. is_empty",False,expected_n_lines == -1,len(mlinestr) == 0,0.6579124927520752 2101,"def get_attached_nodes ( self, external_account ) : for node in self. get_nodes_with_oauth_grants ( external_account ) : if node is None : continue node_settings = node. get_addon ( self. oauth_provider. short_name ) if node_settings is None : continue if : yield node",False,node_settings.external_account == external_account,self._has_attached_nodes(node_settings),0.6480094194412231 2102,"def GetRemote ( self ) : pars = { ""method"" : ""list"", ""path"" : self. rpath, ""by"" : ""name"", ""order"" : ""asc"" } result = self. byp. _ByPy__get ( const. PcsUrl + ""file"", pars, self. GetRemoteAct ) if result == const. ENoError : self. title ( self. rpath ) else : if : err = ( ""Can't retrieve Baidu PCS directories!\n"" + ""Maybe the network is down?\n"" + ""You can still manually input the remote path though"" + ""(But, I doubt it will work)"" ) tkMessageBox. showerror ( GuiTitle, err ) self. Bye ( ) else : self. rpath = const. get_pcs_path ( """" ) self. GetRemote ( )",False,self.rpath.strip('/') == const.AppPcsPath.strip('/'),result == const.SC_DOWN,0.6502629518508911 2103,"def set_presets ( * args, ** kwargs ) : if ""presets"" in kwargs : presets = kwargs [ ""presets"" ] if : return kwargs if not isinstance ( presets, list ) : presets = [ presets ] preset_kwargs = { } for preset in presets : if isinstance ( preset, str ) : preset_orig = preset preset = preset_dict. get ( preset, None ) if preset is None : raise ValueError ( f""Preset '{preset_orig}' was not found. Valid presets: {list(preset_dict.keys())}"" ) if isinstance ( preset, dict ) : for key in preset : preset_kwargs [ key ] = preset [ key ] else : raise TypeError ( f""Preset of type {type(preset)} was given, but only presets of type [dict, str] are valid."" ) for key in preset_kwargs : if key not in kwargs : kwargs [ key ] = preset_kwargs [ key ] return args, kwargs",True,presets is None,presets is None,0.6846630573272705 2104,"def _Determine_Do ( self ) : if sys. platform. startswith ( ""linux"" ) or sys. platform == ""darwin"" : self. applicable = 1 self. value = [ ] applicable = black. configure. items [ ""mozBin"" ]. Determine ( ) if : d = black. configure. items [ ""mozBin"" ]. Get ( ) if not self. Contains ( d ) : self. value. append ( d ) if sys. platform. startswith ( ""linux"" ) : pythonExecutable = black. configure. items [ ""siloedPython"" ]. Get ( ) pythonLibPath = join ( dirname ( dirname ( pythonExecutable ) ), ""lib"" ) if not self. Contains ( pythonLibPath ) : self. value. append ( pythonLibPath ) else : self. applicable = 0 self. determined = 1",False,applicable,applied,0.687721848487854 2105,"def _consume_field ( self, parse_type : bool = True, prefer_type : bool = False ) -> Tuple [ str, str, List [ str ] ] : line = next ( self. _line_iter ) before, colon, after = self. _partition_field_on_colon ( line ) _name, _type, _desc = before, """", after if parse_type : match = _google_typed_arg_regex. match ( before ) if : _name = match. group ( 1 ). strip ( ) _type = match. group ( 2 ) _name = self. _escape_args_and_kwargs ( _name ) if prefer_type and not _type : _type, _name = _name, _type if _type and self. _config. napoleon_preprocess_types : _type = _convert_type_spec ( _type, self. _config. napoleon_type_aliases or { } ) indent = self. _get_indent ( line ) + 1 _descs = [ _desc ] + self. _dedent ( self. _consume_indented_block ( indent ) ) _descs = self. __class__ ( _descs, self. _config ). lines ( ) return _name, _type, _descs",True,match,match,0.6744592189788818 2106,"def run ( self ) : """"""Run the import task."""""" self. logger. info ( u""import started {0}"", time. asctime ( ) ) self. set_config ( config [ ""import"" ] ) if self. query is None : stages = [ read_tasks ( self ) ] else : stages = [ query_tasks ( self ) ] if self. config [ ""pretend"" ] : stages += [ log_files ( self ) ] else : if : stages += [ group_albums ( self ) ] if self. config [ ""autotag"" ] : stages += [ lookup_candidates ( self ), user_query ( self ) ] else : stages += [ import_asis ( self ) ] for stage_func in plugins. early_import_stages ( ) : stages. append ( plugin_stage ( self, stage_func ) ) for stage_func in plugins. import_stages ( ) : stages. append ( plugin_stage ( self, stage_func ) ) stages += [ manipulate_files ( self ) ] pl = pipeline. Pipeline ( stages ) plugins. send ( ""import_begin"", session = self ) try : if config [ ""threaded"" ] : pl. run_parallel ( QUEUE_SIZE ) else : pl. run_sequential ( ) except ImportAbort : pass",False,self.config['group_albums'] and (not self.config['singletons']),self.config['groupalbums'],0.6511656045913696 2107,"def put_secret ( _, request ) : updated_secret = json. loads ( request. body ) for filepath, value in updated_secret [ ""data"" ]. items ( ) : if filepath not in secret [ ""data"" ] : write_file ( config_dir, filepath, base64. b64decode ( value. encode ( ""utf-8"" ) ). decode ( ""ascii"" ), ) for filepath in secret [ ""data"" ] : if : normalized_path = normalize_path ( filepath ) os. remove ( str ( config_dir. join ( normalized_path ) ) ) secret [ ""data"" ] = updated_secret [ ""data"" ] return { ""status_code"" : 200, ""content"" : json. dumps ( secret ) }",False,filepath not in updated_secret['data'],path.exists(str(config_dir.join(filepath)),0.6500364542007446 2108,"def remove_organization_member ( org, user_obj ) : org_admins = [ u. username for u in __get_org_admin_users ( org ) ] if len ( org_admins ) == 1 and user_obj. username in org_admins : raise DataModelException ( ""Cannot remove user as they are the only organization admin"" ) with db_transaction ( ) : permissions = list ( RepositoryPermission. select ( RepositoryPermission. id ) . join ( Repository ) . where ( Repository. namespace_user == org, RepositoryPermission. user == user_obj ) ) if permissions : RepositoryPermission. delete ( ). where ( RepositoryPermission. id << permissions ). execute ( ) members = list ( TeamMember. select ( TeamMember. id ) . join ( Team ) . where ( Team. organization == org, TeamMember. user == user_obj ) ) if : TeamMember. delete ( ). where ( TeamMember. id << members ). execute ( )",True,members,members,0.6883749961853027 2109,"def run ( ip_list, path, rate ) : try : ip_file = open ( ""target.log"", ""w"" ) ip_file. write ( ""\n"". join ( ip_list ) ) ip_file. close ( ) path = str ( path ). translate ( None, "";|&"" ) rate = str ( rate ). translate ( None, "";|&"" ) if not os. path. exists ( path ) : return os. system ( ""%s -p1-65535 -iL target.log -oL tmp.log --randomize-hosts --rate=%s"" % ( path, rate ) ) result_file = open ( ""tmp.log"", ""r"" ) result_json = result_file. readlines ( ) result_file. close ( ) del result_json [ 0 ] del result_json [ - 1 ] open_list = { } for res in result_json : try : ip = res. split ( ) [ 3 ] port = res. split ( ) [ 2 ] if : open_list [ ip ]. append ( port ) else : open_list [ ip ] = [ port ] except : pass os. remove ( ""target.log"" ) os. remove ( ""tmp.",True,ip in open_list,ip in open_list,0.6653150320053101 2110,"def build_vertices ( self, ulines ) : vertex_idx = 0 vertices = collections. OrderedDict ( ) for line in ulines : for vt in line : if vt. replacement is not None : continue new_vertex = ( vt. u, vt. v, 0.0 ) if : continue vt. index = vertex_idx vertex_idx += 1 vertices [ new_vertex ] = 1 return vertex_idx, list ( vertices. keys ( ) )",False,new_vertex in vertices,new_vertex == -1,0.6830917596817017 2111,"def build_campaign_parameters ( self, params ) : campaign = self. tracker. campaign if campaign : params. _utmz = ""%s.%s.%s.%s."" % ( self. _generate_domain_hash ( ), calendar. timegm ( campaign. creation_time. timetuple ( ) ), self. visitor. visit_count, campaign. response_count, ) param_map = { ""utmcid"" : campaign. id, ""utmcsr"" : campaign. source, ""utmgclid"" : campaign. g_click_id, ""utmdclid"" : campaign. d_click_id, ""utmccn"" : campaign. name, ""utmcmd"" : campaign. medium, ""utmctr"" : campaign. term, ""utmcct"" : campaign. content, } for k, v in param_map. items ( ) : if : params. _utmz = ""%s%s=%s%s"" % ( params. _utmz, k, v. replace ( ""+"", ""%20"" ). replace ( "" "", ""%20"" ), Campaign. CAMPAIGN_DELIMITER, ) ",False,v,self.tracker.campaign == 'utmCtl',0.6908220052719116 2112,"def __on_item_activated ( self, event ) : if self. __module_view : module = self. get_event_module ( event ) self. __module_view. set_selection ( module. module_num ) if : self. input_list_ctrl. deactivate_active_item ( ) else : self. list_ctrl. deactivate_active_item ( ) for index in range ( self. list_ctrl. GetItemCount ( ) ) : if self. list_ctrl. IsSelected ( index ) : self. list_ctrl. Select ( index, False ) self. __controller. enable_module_controls_panel_buttons ( )",False,event.EventObject is self.list_ctrl,self.input_list_ctrl.IsActive(),0.6541434526443481 2113,"def zipTest ( self, f, compression ) : zipfp = zipfile. ZipFile ( f, ""w"", compression, allowZip64 = True ) filecount = 6 * 1024 ** 3 // len ( self. data ) next_time = time. time ( ) + _PRINT_WORKING_MSG_INTERVAL for num in range ( filecount ) : zipfp. writestr ( ""testfn%d"" % num, self. data ) if : next_time = time. time ( ) + _PRINT_WORKING_MSG_INTERVAL print >> sys. __stdout__, ( "" zipTest still writing %d of %d, be patient..."" % ( num, filecount ) ) sys. __stdout__. flush ( ) zipfp. close ( ) zipfp = zipfile. ZipFile ( f, ""r"", compression ) for num in range ( filecount ) : self. assertEqual ( zipfp. read ( ""testfn%d"" % num ), self. data ) if : next_time = time. time ( ) + _PRINT_WORKING_MSG_INTERVAL print >> sys. __stdout__, ( "" zipTest still reading %d of %d, be patient..."" % ( num, filecount ) ) sys. __stdout__. flush ( ) zipfp. close ( )",False,next_time <= time.time(),next_time < 0,0.652493953704834 2114,"def err_handler ( out ) : if out. status_code == 500 : print ( out. traceback ) else : print ( out ) if out. status_code == 404 and self. _check_refer_redirect ( ) : return if self. is_content_request ( ) : if self. content_error_redirect : err_context = { ""status"" : out. status_code, ""error"" : out. body } response. status = 303 redirect_url = self. content_error_redirect + ""?"" + urlencode ( err_context ) response. set_header ( ""Location"", redirect_url ) return else : if : return return error_view ( out ) if isinstance ( out. exception, dict ) : return json_error ( out. exception ) else : return json_error ( { ""error"" : ""not_found"" } )",False,self._wrong_content_session_redirect(),"isinstance(out.exception, dict)",0.6515780687332153 2115,"def collect ( self ) : for nickname in self. squid_hosts. keys ( ) : squid_host = self. squid_hosts [ nickname ] fulldata = self. _getData ( squid_host [ ""host"" ], squid_host [ ""port"" ] ) if fulldata is not None : fulldata = fulldata. splitlines ( ) for data in fulldata : matches = self. stat_pattern. match ( data ) if : self. publish_counter ( ""%s.%s"" % ( nickname, matches. group ( 1 ) ), float ( matches. group ( 2 ) ) )",False,matches,matches is not None,0.6868793964385986 2116,"def act_and_mul ( in_deltas, hs, activation ) : if iscontiguous ( in_deltas. _tensor ) and iscontiguous ( hs. _tensor ) : if : _in_deltas = ffi. cast ( ""float *"", ffi. from_buffer ( in_deltas. _tensor ) ) _hs = ffi. cast ( ""float *"", ffi. from_buffer ( hs. _tensor ) ) NervanaObject. be. mathlib. cmath_act_and_mul ( _in_deltas, _hs, in_deltas. size, activation. xcut ) return True else : return False else : return False",False,"isinstance(activation, Rectlinclip) and activation.slope == 0",hs.size > 0,0.6571342945098877 2117,"def _unpack_scales ( scales, vidxs ) : scaleData = [ None, None, None ] for i in range ( 3 ) : if : break scale = scales [ i ] if not math. isnan ( scale ) : vidx1, vidx2 = vidxs [ i * 2 ], vidxs [ i * 2 + 1 ] scaleData [ i ] = ( int ( vidx1 ), int ( vidx2 ), float ( scale ) ) return scaleData",False,"i >= min(len(scales), len(vidxs) // 2)",i == len(scales) - 1,0.655652642250061 2118,"def response_dict ( self ) : res = { ""ETag"" : self. etag, ""last-modified"" : self. last_modified_RFC1123, ""content-length"" : str ( self. size ), } if self. encryption is not None : res [ ""x-amz-server-side-encryption"" ] = self. encryption if : res [ ""x-amz-server-side-encryption-aws-kms-key-id"" ] = self. kms_key_id if self. bucket_key_enabled is not None : res [ ""x-amz-server-side-encryption-bucket-key-enabled"" ] = self. bucket_key_enabled if self. _storage_class!= ""STANDARD"" : res [ ""x-amz-storage-class"" ] = self. _storage_class if self. _expiry is not None : rhdr = 'ongoing-request=""false"", expiry-date=""{0}""' res [ ""x-amz-restore"" ] = rhdr. format ( self. expiry_date ) if self. _is_versioned : res [ ""x-amz-version-id"" ] = str ( self. version_id ) if self. website_redirect_location : res [ ""x-amz-website-redirect-location"" ] = self. website_redirect_location return res",False,self.encryption == 'aws:kms' and self.kms_key_id is not None,self.kms_key_id is not None,0.6521943211555481 2119,"def isSegInside ( self, seg ) : if len ( self ) == 0 : return 2 nbInter = 0 i1 = None i2 = None for segpath in self : a, b = segpath. intersect ( seg ) if a is not None and not eq ( a, i1 ) and not eq ( a, i2 ) : nbInter += 1 i1 = a if b is not None and not eq ( b, i1 ) and not eq ( b, i2 ) : nbInter += 1 i2 = a if nbInter == 0 : result = 1 if self. isInside ( seg. A ) else - 1 if nbInter == 1 : if self. isOnPath ( seg. A ) : if self. isOnPath ( seg. B ) : result = 0 else : result = 1 if self. isInside ( seg. B ) else - 1 elif self. isOnPath ( seg. B ) : result = 1 if self. isInside ( seg. A ) else - 1 else : result = 0 if nbInter >= 2 : if self. hasSeg ( seg ) : result = 0 else : if : result = 1 if self. isInside ( seg. midPoint ( ) ) else - 1 else : result = - 1 return result",False,self.isOnPath(seg.A) and self.isOnPath(seg.B),self.hasSeg(seg),0.6507209539413452 2120,"def __init__ ( self, * args, ** kwargs ) : self. _options_values = { ** kwargs } for name, value in kwargs. items ( ) : setattr ( self, name, value ) position = 0 for name, option in self. __options__ : if not option. positional : break maybe_value = getattr ( type ( self ), name ) if not isoption ( maybe_value ) : continue if len ( args ) <= position : break if : raise ValueError ( ""Already got a value for option {}"". format ( name ) ) setattr ( self, name, args [ position ] ) position += 1",False,name in self._options_values,maybe_value is not None and args[position],0.6568104028701782 2121,"def _codegen_impl ( self, state : CodegenState ) -> None : for ll in self. leading_lines : ll. _codegen ( state ) state. add_indent_tokens ( ) end_node = self. body if len ( self. handlers ) > 0 : end_node = self. handlers [ - 1 ] orelse = self. orelse end_node = end_node if orelse is None else orelse finalbody = self. finalbody end_node = end_node if finalbody is None else finalbody with state. record_syntactic_position ( self, end_node = end_node ) : state. add_token ( ""try"" ) self. whitespace_before_colon. _codegen ( state ) state. add_token ( "":"" ) self. body. _codegen ( state ) for handler in self. handlers : handler. _codegen ( state ) if : orelse. _codegen ( state ) if finalbody is not None : finalbody. _codegen ( state )",True,orelse is not None,orelse is not None,0.6602383852005005 2122,"def _findInTree ( t, n ) : ret = [ ] if type ( t ) is dict : if : ret. append ( t ) for k, v in t. items ( ) : ret += _findInTree ( v, n ) if type ( t ) is list : for v in t : ret += _findInTree ( v, n ) return ret",False,'_name' in t and t['_name'] == n,t.keys() is not None,0.6632386445999146 2123,"def test_cwl_rnaseq ( self, install_test_files ) : with install_cwl_test_files ( ) as work_dir : with utils. chdir ( os. path. join ( work_dir, ""rnaseq"" ) ) : if : shutil. rmtree ( ""cromwell_work"" ) subprocess. check_call ( [ ""bcbio_vm.py"", ""cwlrun"", ""cromwell"", ""rnaseq-workflow"" ] )",True,os.path.exists('cromwell_work'),os.path.exists('cromwell_work'),0.6478400230407715 2124,"def _discard ( self, node, current_klass ) : if isinstance ( node. expr, self. ast. CallFunc ) : expr = self. _callfunc ( node. expr, current_klass, is_statement = True, optlocal_var = isinstance ( node. expr. node, self. ast. Name ), ) if isinstance ( node. expr. node, self. ast. Name ) : name_type, pyname, jsname, depth, is_local = self. lookup ( node. expr. node. name ) if : self. w ( expr ) return self. w ( self. spacing ( ) + expr + "";"" ) elif isinstance ( node. expr, self. ast. Const ) : if node. expr. value in [ ""@"" + ""CONSTANT_DECLARATION@"", ""@"" + ""ATTRIB_REMAP_DECLARATION@"", ] : self. w ( node. expr. value ) return elif isinstance ( node. expr, self. ast. Yield ) : self. _yield ( node. expr, current_klass ) else : expr = self. expr ( node. expr, current_klass ) self. w ( self. spacing ( ) + expr + "";"" )",False,name_type == '__pyjamas__' and jsname in __pyjamas__.native_js_funcs,is_local,0.6506012678146362 2125,"def find_module_pre_py33 ( string, path = None, fullname = None ) : try : module_file, module_path, description = imp. find_module ( string, path ) module_type = description [ 2 ] return module_file, module_path, module_type is imp. PKG_DIRECTORY except ImportError : pass if path is None : path = sys. path for item in path : loader = pkgutil. get_importer ( item ) if loader : try : loader = loader. find_module ( string ) if loader : is_package = loader. is_package ( string ) is_archive = hasattr ( loader, ""archive"" ) try : module_path = loader. get_filename ( string ) except AttributeError : try : module_path = loader. _get_filename ( string ) except AttributeError : continue if : module_path = os. path. dirname ( module_path ) if is_archive :",False,is_package,is_package and module_path,0.657867431640625 2126,"def ql_syscall_fcntl64 ( ql, fcntl_fd, fcntl_cmd, fcntl_arg, * args, ** kw ) : F_GETFD = 1 F_SETFD = 2 F_GETFL = 3 F_SETFL = 4 if fcntl_cmd == F_GETFL : regreturn = 2 elif fcntl_cmd == F_SETFL : if : ql. os. fd [ fcntl_fd ]. fcntl ( fcntl_cmd, fcntl_arg ) regreturn = 0 elif fcntl_cmd == F_GETFD : regreturn = 2 elif fcntl_cmd == F_SETFD : regreturn = 0 else : regreturn = 0 return regreturn",False,"isinstance(ql.os.fd[fcntl_fd], ql_socket)",ll.os.fd[fcntl_fd] is not None,0.6535757780075073 2127,"def __call__ ( self, form, field ) : l = field. data and len ( field. data ) or 0 if l < self. min or self. max!= - 1 and l > self. max : if : if self. max == - 1 : self. message = field. ngettext ( ""Field must be at least %(min)d character long."", ""Field must be at least %(min)d characters long."", self. min, ) elif self. min == - 1 : self. message = field. ngettext ( ""Field cannot be longer than %(max)d character."", ""Field cannot be longer than %(max)d characters."", self. max, ) else : self. message = field. gettext ( ""Field must be between %(min)d and %(max)d characters long."" ) raise ValidationError ( self. message % dict ( min = self. min, max = self. max ) )",False,self.message is None,self.min == 0,0.6602815389633179 2128,"def topic_exists ( self, arn ) : response = self. _conn. get_all_topics ( ) topics = response [ ""ListTopicsResponse"" ] [ ""ListTopicsResult"" ] [ ""Topics"" ] current_topics = [ ] if len ( topics ) > 0 : for topic in topics : topic_arn = topic [ ""TopicArn"" ] current_topics. append ( topic_arn ) if : return True return False",True,arn in current_topics,arn in current_topics,0.6723469495773315 2129,"def refresh ( token = None, fail_silent = False ) : """"""Use self or provided JWT token to get a fresher one. If self token, internalize upon refresh."""""" using_self_token = token is None try : if PyGraphistry. store_token_creds_in_memory ( ) : logger. debug ( ""JWT refresh via creds"" ) return PyGraphistry. relogin ( ) logger. debug ( ""JWT refresh via token"" ) if : PyGraphistry. _is_authenticated = False token = ( ArrowUploader ( server_base_path = PyGraphistry. protocol ( ) + ""://"" + PyGraphistry. server ( ), certificate_validation = PyGraphistry. certificate_validation ( ), ) . refresh ( PyGraphistry. api_token ( ) if using_self_token else token ) . token ) if : PyGraphistry. api_token ( token ) PyGraphistry. _is_authenticated = True return PyGraphistry. api_token ( ) except Exception as e : if not fail_silent : util. error ( ""Failed to refresh token: %s"" % str ( e ) )",False,using_self_token,token and token != None,0.6603431701660156 2130,"def sort ( self ) : sorted_models = [ ] concrete_models = set ( ) models = list ( self. data ) while len ( sorted_models ) < len ( models ) : found = False for model in models : if model in sorted_models : continue dependencies = self. dependencies. get ( model. _meta. concrete_model ) if : sorted_models. append ( model ) concrete_models. add ( model. _meta. concrete_model ) found = True if not found : return self. data = SortedDict ( [ ( model, self. data [ model ] ) for model in sorted_models ] )",False,not (dependencies and dependencies.difference(concrete_models)),dependencies in concrete_models,0.6552073359489441 2131,"def set ( self, new, * args, ** kwargs ) : """"""Change the contents of the default scale"""""" if type ( new ) == str : self. scale = Scale. get_scale ( new ) if : self. scale. tuning = kwargs [ ""tuning"" ] self. pentatonic. update ( self. scale. pentatonic ) elif isinstance ( new, ( list, Pattern, TimeVar ) ) : self. scale = ScalePattern ( new, * args, ** kwargs ) if self. scale. name is not None and self. scale. name not in Scale. names ( ) : Scale [ self. scale. name ] = self. scale self. pentatonic. update ( self. scale. pentatonic ) else : print ( ""Warning: {!r} is not a valid scale"". format ( new ) ) return self",True,'tuning' in kwargs,'tuning' in kwargs,0.6624324321746826 2132,"def _stop ( self, close_output_streams = False ) : if self. is_stopped ( ) : return self. _status = ""stopping"" logger. debug ( ""stopping the %s watcher"" % self. name ) logger. debug ( ""gracefully stopping processes [%s] for %ss"" % ( self. name, self. graceful_timeout ) ) self. call_hook ( ""before_stop"" ) yield self. kill_processes ( ) self. reap_processes ( ) if self. stdout_redirector is not None : self. stdout_redirector. stop ( ) self. stdout_redirector = None if self. stderr_redirector is not None : self. stderr_redirector. stop ( ) self. stderr_redirector = None if close_output_streams : if self. stdout_stream and hasattr ( self. stdout_stream [ ""stream"" ], ""close"" ) : self. stdout_stream [ ""stream"" ]. close ( ) if : self. stderr_stream [ ""stream"" ]. close ( ) if self. evpub_socket is not None : self. notify_event ( ""stop"", { ""time"" : time. time ( ) } ) self. _status = ""stopped"" self. call_hook ( ""after_stop"" ) logger. info ( ""%s stopped"", self. name )",False,"self.stderr_stream and hasattr(self.stderr_stream['stream'], 'close')","self.stderr_stream and hasattr(self.stderr_stream[0], 'close')",0.6488653421401978 2133,"def done ( result ) : reply, result, ct = result if result : data = { ""text/plain"" : result if isinstance ( result, str ) else str ( result ), } if isinstance ( result, BinaryCapsule ) : if : data [ result. content_type ] = result. as_b64 ( ) self. _publish_execute_result ( parent, data, { }, self. execution_count ) super ( SplashKernel, self ). send_execute_reply ( stream, ident, parent, md, reply )",False,"result.content_type in {'image/png', 'image/jpeg'}",result.content_type is not None,0.6495629549026489 2134,"def logic ( ) : count = intbv ( 0, min = 0, max = MAXVAL + 1 ) while True : yield clock. posedge, reset. posedge if reset == 1 : count [ : ] = 0 else : flag. next = 0 if : flag. next = 1 count [ : ] = 0 else : count += 1",False,count == MAXVAL,reset == max,0.6796265244483948 2135,"def sources ( ) : for d in os. listdir ( base ) : if d. endswith ( ""old"" ) : continue if d == ""indcat"" : continue if : continue yield d",False,not os.path.isdir(base + d),d == 'notcat',0.6456693410873413 2136,"def LogURLsFromStr ( self, RawOutput ) : plugin_output = dict ( PLUGIN_OUTPUT ) self. timer. start_timer ( ""LogURLsFromStr"" ) URLList = import_urls ( RawOutput. strip ( ). split ( ""\n"" ) ) NumFound = 0 VisitURLs = False if True : VisitURLs = True for Transaction in self. requester. get_transactions ( True, get_urls_to_visit ( ) ) : if : NumFound += 1 TimeStr = self. timer. get_elapsed_time_as_str ( ""LogURLsFromStr"" ) logging. info ( ""Spider/URL scraper time=%s"", TimeStr ) plugin_output [ ""type"" ] = ""URLsFromStr"" plugin_output [ ""output"" ] = { ""TimeStr"" : TimeStr, ""VisitURLs"" : VisitURLs, ""URLList"" : URLList, ""NumFound"" : NumFound, } return [ plugin_output ]",False,Transaction is not None and Transaction.found,True,0.662529706954956 2137,"def test_object___format___errors ( self ) : errors = [ ( ""+"", ""Sign not allowed in string format specifier"" ), ( ""=+"", ""Sign not allowed in string format specifier"" ), ( ""=10"", ""'=' alignment not allowed in string format specifier"" ), ( ""10r"", ""Unknown format code 'r' for object of type'str'"" ), ( ""=+r"", ""Unknown format code 'r' for object of type'str'"" ), ( ""."", ""Format specifier missing precision"" ), ( "".a"", ""Format specifier missing precision"" ), ] for char in allChars : if : if char == "","" : errors. append ( ( ""10"" + char, ""Cannot specify ',' with's'."" ) ) else : errors. append ( ( ""10"" + char, ""Unknown format code '%s' for object of type'str'"" % char, ) ) for errorFmt, errorMsg in errors : self. assertRaisesMessage ( ValueError, errorMsg, object ( ). __format__, errorFmt ) self. assertRaisesMessage ( TestException, ""booh"", bad_str ( ). __format__, ""+"" ) self. assertRaisesMessage ( TestException, ""booh"", bad_str ( ). __format__, ""=10"" ) self. assertRaisesMessage ( TestException, ""booh"", bad_str ( ). __format__, "".""",False,char != 's' and (char < '0' or char > '9'),char >= 0,0.6499617099761963 2138,"def create_image_upload ( ) : if request. method == ""POST"" : image = request. form [ ""image"" ] if : image_file = uploaded_file ( file_content = image ) image_url = upload_local ( image_file, UPLOAD_PATHS [ ""temp"" ] [ ""image"" ]. format ( uuid = uuid4 ( ) ) ) return jsonify ( { ""status"" : ""ok"", ""image_url"" : image_url } ) else : return jsonify ( { ""status"" : ""no_image"" } )",True,image,image,0.6921366453170776 2139,"def _record_shell ( ex, files, bind_rez = True, print_msg = False ) : if bind_rez and package_commands_sourced_first : ex. source ( context_file ) for file_ in files : if : ex. source ( file_ ) if bind_rez and not package_commands_sourced_first : ex. source ( context_file ) if envvar : ex. unsetenv ( envvar ) if add_rez and bind_rez : ex. interpreter. _bind_interactive_rez ( ) if print_msg and add_rez and not quiet : ex. info ( """" ) ex. info ( ""You are now in a rez-configured environment."" ) ex. info ( """" ) if system. is_production_rez_install : ex. command ( ""rezolve context"" )",False,os.path.exists(os.path.expanduser(file_)),os.path.isfile(file_),0.648218035697937 2140,"def convert_file ( input_file_path ) : print ( ""Reading gzip file {}."". format ( input_file_path ) ) with gzip. open ( input_file_path ) as f : records = json. load ( f ) n = len ( records [ ""passages"" ]. keys ( ) ) for i in tqdm. tqdm ( range ( n ), ""Converting"" ) : newline_dict = { } index = str ( i ) if : newline_dict [ ""answers"" ] = records [ ""answers"" ] [ index ] newline_dict [ ""wellFormedAnswers"" ] = records [ ""wellFormedAnswers"" ] [ index ] newline_dict [ ""passages"" ] = records [ ""passages"" ] [ index ] newline_dict [ ""query"" ] = records [ ""query"" ] [ index ] newline_dict [ ""query_id"" ] = records [ ""query_id"" ] [ index ] newline_dict [ ""query_type"" ] = records [ ""query_type"" ] [ index ] yield newline_dict",False,'test' not in input_file_path,index in records,0.6498531103134155 2141,"def _get_tracks_compositors_list ( ) : tracks_list = [ ] tracks = current_sequence ( ). tracks compositors = current_sequence ( ). compositors for track_index in range ( 1, len ( tracks ) - 1 ) : track_compositors = [ ] for j in range ( 0, len ( compositors ) ) : comp = compositors [ j ] if : track_compositors. append ( comp ) tracks_list. append ( track_compositors ) return tracks_list",False,comp.transition.b_track == track_index,comp,0.6489219665527344 2142,"def delete_stream ( transaction, stream_hash, sd_hash, blob_hashes, blob_dir ) : transaction. execute ( ""delete from content_claim where stream_hash=? "", ( stream_hash, ) ) transaction. execute ( ""delete from file where stream_hash=? "", ( stream_hash, ) ) transaction. execute ( ""delete from stream_blob where stream_hash=?"", ( stream_hash, ) ) transaction. execute ( ""delete from stream where stream_hash=? "", ( stream_hash, ) ) transaction. execute ( ""delete from blob where blob_hash=?"", ( sd_hash, ) ) for blob_hash in blob_hashes : transaction. execute ( ""delete from blob where blob_hash=?"", ( blob_hash, ) ) file_path = os. path. join ( blob_dir, blob_hash ) if : os. unlink ( file_path )",False,os.path.isfile(file_path),os.path.exists(file_path),0.6448550224304199 2143,"def simp_ext ( _, expr ) : if expr. op. startswith ( ""zeroExt_"" ) : arg = expr. args [ 0 ] if : return arg return ExprCompose ( arg, ExprInt ( 0, expr. size - arg. size ) ) if expr. op. startswith ( ""signExt_"" ) : arg = expr. args [ 0 ] add_size = expr. size - arg. size new_expr = ExprCompose ( arg, ExprCond ( arg. msb ( ), ExprInt ( size2mask ( add_size ), add_size ), ExprInt ( 0, add_size ) ), ) return new_expr return expr",False,expr.size == arg.size,expr.op.startswith('arg'),0.6592988967895508 2144,"def cmp ( self, other ) : v_is_ptr = not isinstance ( self, CTypesGenericPrimitive ) w_is_ptr = isinstance ( other, CTypesData ) and not isinstance ( other, CTypesGenericPrimitive ) if v_is_ptr and w_is_ptr : return cmpfunc ( self. _convert_to_address ( None ), other. _convert_to_address ( None ) ) elif v_is_ptr or w_is_ptr : return NotImplemented else : if isinstance ( self, CTypesGenericPrimitive ) : self = self. _value if : other = other. _value return cmpfunc ( self, other )",False,"isinstance(other, CTypesGenericPrimitive)","isinstance(other, CTypesData)",0.651056170463562 2145,"def _analytic_forecast ( self, parameters : NDArray, resids : NDArray, backcast : Union [ float, NDArray ], var_bounds : NDArray, start : int, horizon : int, ) -> VarianceForecast : omega, aw, gw, resids2, indicator = self. _common_forecast_components ( parameters, resids, backcast, horizon ) m = self. m resids2 [ : start ] = np. nan aw_rev = aw [ : : - 1 ] gw_rev = gw [ : : - 1 ] for i in range ( horizon ) : resids2 [ :, m + i ] = omega + resids2 [ :, i : ( m + i ) ]. dot ( aw_rev ) if : resids2_ind = resids2 [ :, i : ( m + i ) ] * indicator [ :, i : ( m + i ) ] resids2 [ :, m + i ] += resids2_ind. dot ( gw_rev ) indicator [ :, m + i ] = 0.5 return VarianceForecast ( resids2 [ :, m : ]. copy ( ) )",False,self._asym,indicator is not None,0.6705334186553955 2146,"def write ( self, data ) : self. size -= len ( data ) passon = None if self. size > 0 : self. data. append ( data ) else : if self. size : data, passon = data [ : self. size ], data [ self. size : ] else : passon = b"""" if : self. data. append ( data ) return passon",False,data,self.size > 0 and passon,0.6889404058456421 2147,"def make_attribute ( key : str, value ) -> IAttributeProto : attr = AttributeProto ( ) attr. name = key is_iterable = isinstance ( value, Iterable ) bytes_or_false = _to_bytes_or_false ( value ) if isinstance ( value, float ) : attr. f = value elif isinstance ( value, numbers. Integral ) : attr. i = value elif bytes_or_false : attr. s = bytes_or_false elif isinstance ( value, TensorProto ) : attr. t. CopyFrom ( value ) elif isinstance ( value, GraphProto ) : attr. g. CopyFrom ( value ) elif is_iterable : byte_array = [ _to_bytes_or_false ( v ) for v in value ] if all ( isinstance ( v, float ) for v in value ) : attr. floats. extend ( value ) elif all ( isinstance ( v, numbers. Integral ) for v in value ) : attr. ints. extend ( int ( v ) for v in value ) elif all ( byte_array ) : attr. strings. extend ( byte_array ) elif : attr. tensors. extend ( value ) elif all ( isinstance ( v, GraphProto ) for v in value ) : attr. graphs. extend ( value ) else : raise ValueError ( ""You passed in an iterable attribute but I cannot figure out "" ""its applicable type."" ) ",False,"all((isinstance(v, TensorProto) for v in value))",is_iterable,0.6574083566665649 2148,"def find_package_modules ( self, package, package_dir ) : self. check_package ( package, package_dir ) module_files = glob. glob ( os. path. join ( glob. escape ( package_dir ), ""*.py"" ) ) modules = [ ] setup_script = os. path. abspath ( self. distribution. script_name ) for f in module_files : abs_f = os. path. abspath ( f ) if : module = os. path. splitext ( os. path. basename ( f ) ) [ 0 ] modules. append ( ( package, module, f ) ) else : self. debug_print ( ""excluding %s"" % setup_script ) return modules",False,abs_f != setup_script,abs_f == setup_script,0.6582242250442505 2149,"def write_track ( outfile, track ) : data = bytearray ( ) running_status_byte = None for msg in fix_end_of_track ( track ) : if not isinstance ( msg. time, Integral ) : raise ValueError ( ""message time must be int in MIDI file"" ) if : raise ValueError ( ""realtime messages are not allowed in MIDI files"" ) data. extend ( encode_variable_int ( msg. time ) ) if msg. is_meta : data. extend ( msg. bytes ( ) ) running_status_byte = None elif msg. type == ""sysex"" : data. append ( 0xF0 ) data. extend ( encode_variable_int ( len ( msg. data ) + 1 ) ) data. extend ( msg. data ) data. append ( 0xF7 ) running_status_byte = None else : msg_bytes = msg. bytes ( ) status_byte = msg_bytes [ 0 ] if status_byte == running_status_byte : data. extend ( msg_bytes [ 1 : ] ) else : data. extend ( msg_bytes ) if status_byte < 0xF0 : running_status_byte = status_byte else : running_status_byte = None write_chunk ( outfile, b",False,msg.is_realtime,msg.time > 0,0.6588345766067505 2150,"def get_proxy_ip ( identifier ) : """"""Get Proxy IP."""""" proxy_ip = None try : if not identifier : return proxy_ip ips = get_network ( ) if : return proxy_ip device_ip = identifier. split ( "":"", 1 ) [ 0 ] ip_range = device_ip. rsplit ( ""."", 1 ) [ 0 ] guess_ip = ip_range + "".1"" if guess_ip in ips : return guess_ip for ip_addr in ips : to_check = ip_addr. rsplit ( ""."", 1 ) [ 0 ] if to_check == ip_range : return ip_addr except Exception : logger. error ( ""Error getting Proxy IP"" ) return proxy_ip",False,':' not in identifier or not ips,identifier,0.6739150285720825 2151,"def Children ( self ) : """"""Returns a list of all of this object's owned (strong) children."""""" children = [ ] for property, attributes in self. _schema. iteritems ( ) : ( is_list, property_type, is_strong ) = attributes [ 0 : 3 ] if : if not is_list : children. append ( self. _properties [ property ] ) else : children. extend ( self. _properties [ property ] ) return children",False,is_strong and property in self._properties,property in self._properties and is_strong,0.6682225465774536 2152,"def build_paths ( obj ) : paths = [ ] if obj [ ""type"" ] == ""Polygon"" : polygons = [ obj [ ""arcs"" ] ] else : polygons = obj [ ""arcs"" ] for polygon in polygons : for ring in polygon : path = [ ] for i, arc in enumerate ( ring ) : if : if arc >= 0 : path. extend ( arcs [ arc ] ) else : path. extend ( ( arcs [ ~ arc ] ) [ : : - 1 ] ) else : if arc >= 0 : path. extend ( arcs [ arc ] [ 1 : ] ) else : path. extend ( ( arcs [ ~ arc ] [ : - 1 ] ) [ : : - 1 ] ) if len ( path ) > 2 : V = np. zeros ( ( len ( path ), 3 ), dtype = np. float32 ) V [ :, : 2 ] = np. array ( path ) paths. append ( V ) return paths",False,i == 0,i % 2,0.6855310201644897 2153,"def _thd_cleanup_instance ( self ) : container_name = self. getContainerName ( ) instances = self. client. containers ( all = 1, filters = dict ( name = container_name ) ) for instance in instances : if : continue try : self. client. remove_container ( instance [ ""Id"" ], v = True, force = True ) except NotFound : pass except docker. errors. APIError as e : if ""Conflict operation on container"" not in str ( e ) : raise",False,''.join(instance['Names']).strip('/') != container_name,'Id' not in instance,0.6511694192886353 2154,def decodeattrs ( attrs ) : names = [ ] for bit in range ( 16 ) : mask = 1 << bit if attrs & mask : if : names. append ( attrnames [ mask ] ) else : names. append ( hex ( mask ) ) return names,True,attrnames.has_key(mask),attrnames.has_key(mask),0.6512173414230347 2155,"def modify_address ( self, name, address, domain ) : if not self. get_entries_by_name ( name, domain ) : raise exception. NotFound infile = open ( self. filename, ""r"" ) outfile = tempfile. NamedTemporaryFile ( ""w"", delete = False ) for line in infile : entry = self. parse_line ( line ) if : outfile. write ( ""%s %s %s\n"" % ( address, self. qualify ( name, domain ), entry [ ""type"" ] ) ) else : outfile. write ( line ) infile. close ( ) outfile. close ( ) shutil. move ( outfile. name, self. filename )",False,"entry and entry['name'].lower() == self.qualify(name, domain).lower()",entry,0.64753657579422 2156,"def pixbufrenderer ( self, column, crp, model, it ) : tok = model. get_value ( it, 0 ) if tok. type == ""class"" : icon = ""class"" else : if : icon = ""method_priv"" elif tok. visibility == ""protected"" : icon = ""method_prot"" else : icon = ""method"" crp. set_property ( ""pixbuf"", imagelibrary. pixbufs [ icon ] )",True,tok.visibility == 'private',tok.visibility == 'private',0.656152606010437 2157,"def host_selection_algorithm ( self, request_spec, all_hosts, selected_hosts, unique ) : size = request_spec [ ""size"" ] drive_type = request_spec [ ""drive_type"" ] best_host = None best_qoscap = None best_cap = None max_avail = 0 for ( host, capabilities ) in all_hosts : for qosgrp, qos_values in capabilities. iteritems ( ) : if : if size == 0 : available = qos_values [ ""FullDrive"" ] [ ""NumFreeDrives"" ] else : available = qos_values [ ""AvailableCapacity"" ] if available > max_avail and self. _allowed_to_use_host ( host, selected_hosts, unique ) : max_avail = available best_host = host best_qoscap = qos_values best_cap = capabilities break if best_host : self. _add_hostcap_to_list ( selected_hosts, best_host, best_cap ) type_str = ""drives"" if size == 0 else ""bytes"" LOG. debug ( _ ( ""\t MostAvailCap: Best host: %(best_host)s",False,"self._qosgrp_match(drive_type, qos_values)",drive_type == 'host',0.6497954726219177 2158,"def _pick_error ( self, log_interpretation, step_type ) : """"""Pick probable cause of failure (only call this if job fails)."""""" logs_needed = self. _logs_needed_to_pick_error ( step_type ) if self. _read_logs ( ) and not all ( log_type in log_interpretation for log_type in logs_needed ) : log. info ( ""Scanning logs for probable cause of failure..."" ) if ""step"" in logs_needed : self. _interpret_step_logs ( log_interpretation, step_type ) if ""history"" in logs_needed : self. _interpret_history_log ( log_interpretation ) if : error_attempt_ids = _pick_error_attempt_ids ( log_interpretation ) self. _interpret_task_logs ( log_interpretation, step_type, error_attempt_ids ) return _pick_error ( log_interpretation )",False,'task' in logs_needed,'tasks' in logs_needed,0.65485018491745 2159,"def _post_sub_clone_resize ( self, path ) : """"""Try post sub clone resize in a transactional manner."""""" st_tm_mv, st_nw_mv, st_del_old = None, None, None seg = path. split ( ""/"" ) LOG. info ( ""Post clone resize LUN %s"", seg [ - 1 ] ) new_lun = ""new-%s"" % ( seg [ - 1 ] ) tmp_lun = ""tmp-%s"" % ( seg [ - 1 ] ) tmp_path = ""/vol/%s/%s"" % ( seg [ 2 ], tmp_lun ) new_path = ""/vol/%s/%s"" % ( seg [ 2 ], new_lun ) try : st_tm_mv = self. zapi_client. move_lun ( path, tmp_path ) st_nw_mv = self. zapi_client. move_lun ( new_path, path ) st_del_old = self. zapi_client. destroy_lun ( tmp_path ) except Exception as e : if : msg = _ ( ""Failure staging LUN %s to tmp."" ) raise exception. VolumeBackendAPIException ( data = msg % ( seg [ - 1 ] ) ) else : if st_nw_mv is None : self. zapi_client. move_lun ( tmp_path, path ) msg = _ ( ""Failure moving new cloned LUN to %s."" ) raise exception. VolumeBackendAPIException ( data = msg % ( seg [ - 1 ] ) ) elif st_del_old is None : LOG. error ( ""Failure deleting staged tmp LUN %s.""",False,st_tm_mv is None,e.args[0] == e.args[0],0.6566102504730225 2160,"def parse_qresult ( self ) : """"""Parse a HMMER2 query block."""""" while self. read_next ( ) : if not self. line. startswith ( ""Query"" ) : return _, id_ = self. parse_key_value ( ) self. qresult = QueryResult ( id = id_ ) description = None while self. read_next ( ) and not self. line. startswith ( ""Scores"" ) : if : self. qresult. accession = self. parse_key_value ( ) [ 1 ] if self. line. startswith ( ""Description"" ) : description = self. parse_key_value ( ) [ 1 ] hit_placeholders = self. parse_hits ( ) if len ( hit_placeholders ) > 0 : self. parse_hsps ( hit_placeholders ) self. parse_hsp_alignments ( ) while not self. line. startswith ( ""Query"" ) : self. read_next ( ) if not self. line : break self. buf. append ( self. line ) if description is not None : self. qresult. description = description yield self. qresult",False,self.line.startswith('Accession'),"self.line.startswith( ""A)",0.6513302326202393 2161,"def test_exist_ok_s_isgid_directory ( self ) : path = os. path. join ( support. TESTFN, ""dir1"" ) S_ISGID = stat. S_ISGID mode = 0o777 old_mask = os. umask ( 0o022 ) try : existing_testfn_mode = stat. S_IMODE ( os. lstat ( support. TESTFN ). st_mode ) try : os. chmod ( support. TESTFN, existing_testfn_mode | S_ISGID ) except PermissionError : raise unittest. SkipTest ( ""Cannot set S_ISGID for dir."" ) if : raise unittest. SkipTest ( ""No support for S_ISGID dir mode."" ) os. makedirs ( path, mode | S_ISGID ) os. makedirs ( path, mode, exist_ok = True ) os. chmod ( path, stat. S_IMODE ( os. lstat ( path ). st_mode ) & ~ S_ISGID ) os. makedirs ( path, mode | S_ISGID, exist_ok = True ) finally : os. umask ( old_mask )",False,os.lstat(support.TESTFN).st_mode & S_ISGID != S_ISGID,S_ISGID is None,0.6544977426528931 2162,"def _perform_check ( self ) : if not self. _enabled : return with self. _check_mutex : self. _logger. debug ( ""Checking against {}:{} if we are online..."". format ( self. _host, self. _port ) ) old_value = self. _online for _ in range ( 3 ) : connection_working = server_reachable ( self. _host, port = self. _port ) if self. _name : if : self. _logger. debug ( ""Checking if we can resolve {}..."". format ( self. _name ) ) resolution_working = len ( resolve_host ( self. _name ) ) > 0 else : resolution_working = False else : resolution_working = True if not ( connection_working and resolution_working ) : time. sleep ( 1.0 ) continue self. _connection_working = connection_working self. _resolution_working = resolution_working if old_value!= self. _online : self. _trigger_change ( old_value, self. _online )",False,connection_working,self._connection_working and port and (self._resolution_working is False),0.6717939376831055 2163,"def expected_forward_with_reduce ( self, x_data, t_data, class_weight ) : loss_expect = 0.0 count = 0 x = numpy. rollaxis ( x_data, 1, x_data. ndim ). reshape ( ( t_data. size, x_data. shape [ 1 ] ) ) t = t_data. ravel ( ) for xi, ti in six. moves. zip ( x, t ) : if ti == - 1 : continue log_z = numpy. ufunc. reduce ( numpy. logaddexp, xi ) if class_weight is None : loss_expect -= ( xi - log_z ) [ ti ] else : loss_expect -= ( xi - log_z ) [ ti ] * class_weight [ ti ] count += 1 if self. normalize : if count == 0 : loss_expect = 0.0 else : loss_expect /= count else : if : loss_expect = 0.0 else : loss_expect /= len ( t_data ) return numpy. asarray ( loss_expect, dtype = x. dtype )",False,len(t_data) == 0,count == 0,0.6535935401916504 2164,"def _create_3par_vlun ( self, volume, hostname, nsp, lun_id = None ) : try : location = None auto = True if lun_id is not None : auto = False if : location = self. client. createVLUN ( volume, hostname = hostname, auto = auto, lun = lun_id ) else : port = self. build_portPos ( nsp ) location = self. client. createVLUN ( volume, hostname = hostname, auto = auto, portPos = port, lun = lun_id ) vlun_info = None if location : vlun = location. split ( "","" ) vlun_info = { ""volume_name"" : vlun [ 0 ], ""lun_id"" : int ( vlun [ 1 ] ), ""host_name"" : vlun [ 2 ], } if len ( vlun ) > 3 : vlun_info [ ""nsp"" ] = vlun [ 3 ] return vlun_info except hpeexceptions. HTTPBadRequest as e : if ""must be in the same domain"" in e. get_description ( ) : LOG. error ( e. get_description ( ) ) raise exception. Invalid3PARDomain",False,nsp is None,volume,0.6635576486587524 2165,"def as_dict ( self ) -> Dict [ str, Any ] : od : Dict [ str, Any ] = { ""_errors"" : self. errors, ""_notices"" : self. notices, ""_fast_validation_disagreements"" : self. fast_validation_disagreements, ""_sources"" : { }, } if self. helm_chart : od [ ""_helm_chart"" ] = self. helm_chart for k, v in self. sources. items ( ) : sd = dict ( v ) if : sd [ ""_errors"" ] = [ x. as_dict ( ) for x in v. _errors ] od [ ""_sources"" ] [ k ] = sd for kind, configs in self. config. items ( ) : od [ kind ] = { } for rkey, config in configs. items ( ) : od [ kind ] [ rkey ] = config. as_dict ( ) return od",False,'_errors' in v,self._errors,0.667580246925354 2166,"def expire_connections ( now, mux ) : remove = [ ] for chan, timeout in dnsreqs. items ( ) : if : debug3 ( ""expiring dnsreqs channel=%d\n"" % chan ) remove. append ( chan ) del mux. channels [ chan ] for chan in remove : del dnsreqs [ chan ] debug3 ( ""Remaining DNS requests: %d\n"" % len ( dnsreqs ) ) remove = [ ] for peer, ( chan, timeout ) in udp_by_src. items ( ) : if : debug3 ( ""expiring UDP channel channel=%d peer=%r\n"" % ( chan, peer ) ) mux. send ( chan, ssnet. CMD_UDP_CLOSE, b"""" ) remove. append ( peer ) del mux. channels [ chan ] for peer in remove : del udp_by_src [ peer ] debug3 ( ""Remaining UDP channels: %d\n"" % len ( udp_by_src ) )",False,timeout < now,timeout > 0,0.6669130325317383 2167,"def format ( self, record ) : """"""Customize the message format based on the log level."""""" if isinstance ( self. fmt, dict ) : self. _fmt = self. fmt [ record. levelname ] if sys. version_info > ( 3, 2 ) : if : raise ValueError ( ""Style must be one of: %s"" % "","". join ( logging. _STYLES. keys ( ) ) ) self. _style = logging. _STYLES [ self. style ] [ 0 ] ( self. _fmt ) if sys. version_info > ( 2, 7 ) : message = super ( LevelFormatter, self ). format ( record ) else : message = ColoredFormatter. format ( self, record ) return message",True,self.style not in logging._STYLES,self.style not in logging._STYLES,0.6563971042633057 2168,"def remove_from_index ( self, type, tag, id ) : tag_to_object = ""tag-{}s"". format ( type ) object_to_tag = ""{}-tags"". format ( type ) if tag in self. indices [ tag_to_object ] : if id in self. indices [ tag_to_object ] [ tag ] : self. indices [ tag_to_object ] [ tag ]. remove ( id ) if len ( self. indices [ tag_to_object ] [ tag ] ) == 0 : del self. indices [ tag_to_object ] [ tag ] if id in self. indices [ object_to_tag ] : if : self. indices [ object_to_tag ] [ id ]. remove ( tag ) if len ( self. indices [ object_to_tag ] [ id ] ) == 0 : del self. indices [ object_to_tag ] [ id ]",False,tag in self.indices[object_to_tag][id],tag in self.indices[object_to_tag],0.6559015512466431 2169,"def process_request ( self, request ) : for exemption in self. exemptions : if : return None if not request. user. is_authenticated ( ) : path = urlquote ( request. get_full_path ( ) ) tup = ( self. login_url, self. redirect_field_name, path ) return HttpResponseRedirect ( ""%s?%s=%s"" % tup )",True,"re.match(exemption, request.path)","re.match(exemption, request.path)",0.6481633186340332 2170,"def check_apns_certificate ( ss ) : mode = ""start"" for s in ss. split ( ""\n"" ) : if mode == ""start"" : if ""BEGIN RSA PRIVATE KEY"" in s or ""BEGIN PRIVATE KEY"" in s : mode = ""key"" elif mode == ""key"" : if : mode = ""end"" break elif s. startswith ( ""Proc-Type"" ) and ""ENCRYPTED"" in s : raise ImproperlyConfigured ( ""Encrypted APNS private keys are not supported"" ) if mode!= ""end"" : raise ImproperlyConfigured ( ""The APNS certificate doesn't contain a private key"" )",False,'END RSA PRIVATE KEY' in s or 'END PRIVATE KEY' in s,s.startswith('APNS private key'),0.6740132570266724 2171,"def _extract_video_id ( data, lesson_id ) : if not data : return groups = try_get ( data, lambda x : x [ ""groups"" ], list ) or [ ] if not groups : return for group in groups : if not isinstance ( group, dict ) : continue contents = try_get ( data, lambda x : x [ ""contents"" ], list ) or [ ] for content in contents : if not isinstance ( content, dict ) : continue ordinal = int_or_none ( content. get ( ""ordinal"" ) ) if ordinal!= lesson_id : continue video_id = content. get ( ""identifier"" ) if : return compat_str ( video_id )",True,video_id,video_id,0.6645613312721252 2172,"def throttle_status ( server = None ) : result = AmonStruct ( ) result. allow = False last_check = server. get ( ""last_check"" ) server_check_period = server. get ( ""check_every"", 60 ) if last_check : period_since_last_check = unix_utc_now ( ) - last_check period_since_last_check = period_since_last_check + 15 if : result. allow = True else : result. allow = True return result",False,period_since_last_check >= server_check_period,period_since_last_check > server_check_period,0.646797776222229 2173,"def parse_body ( f, headers ) : """"""Return HTTP body parsed from a file object, given HTTP header dict."""""" if headers. get ( ""transfer-encoding"", """" ). lower ( ) == ""chunked"" : l_ = [ ] found_end = False while 1 : try : sz = f. readline ( ). split ( None, 1 ) [ 0 ] except IndexError : raise dpkt. UnpackError ( ""missing chunk size"" ) n = int ( sz, 16 ) if : found_end = True buf = f. read ( n ) if f. readline ( ). strip ( ) : break if n and len ( buf ) == n : l_. append ( buf ) else : break if not found_end : raise dpkt. NeedData ( ""premature end of chunked body"" ) body = b"""". join ( l_ ) elif ""content-length"" in headers : n = int ( headers [ ""content-length"" ] ) body = f. read ( n ) if len ( body )!= n : raise dpkt. NeedData ( ""short body (missing %d bytes)"" % ( n - len ( body ) ) ) elif ""content-type"" in headers : ",False,n == 0,n > 0,0.673944354057312 2174,"def search ( self, string, pos = 0, endpos = None ) : if not endpos is None : string = string [ : endpos ] if pos == 0 : groups = self. search_code. Exec ( string ) if : return None _groups = [ ] for i in list ( groups ) : if JS ( ""@{{i}} === null"" ) : _groups. append ( None ) else : _groups. append ( str ( i ) ) groups = _groups elif pos >= len ( string ) : return None else : groups = self. search_code. Exec ( string [ pos : ] ) if : return None _groups = [ ] for i in list ( groups ) : if JS ( ""@{{i}} === null"" ) : _groups. append ( None ) else : _groups. append ( str ( i ) ) groups = _groups return SRE_Match ( self, string, pos, endpos, groups [ 2 : ], pos + len ( groups [ 1 ] ), None, None )",False,JS('@{{groups}} === null'),groups == [],0.6661504507064819 2175,"def find_rulers ( self ) : on_first_line = False on_message_body = False subject_near_limit = ( len ( self. view. substr ( self. view. line ( sublime. Region ( 0 ) ) ). rstrip ( ) ) >= 40 ) for region in self. view. sel ( ) : first_line = self. view. rowcol ( region. begin ( ) ) [ 0 ] last_line = self. view. rowcol ( region. end ( ) ) [ 0 ] if first_line == 0 and subject_near_limit : on_first_line = True if : if first_line in range ( 2, self. first_comment_line ) or last_line in range ( 2, self. first_comment_line ) : on_message_body = True else : if first_line >= 2 or last_line >= 2 : on_message_body = True new_rulers = [ ] if on_first_line : new_rulers. append ( self. first_line_limit ) if on_message_body : new_rulers. append ( self. body_line_limit ) return new_rulers",False,self.first_comment_line,self.first_comment_line and (not on_message_body),0.6497775316238403 2176,"def main ( ) : """"""Main processing."""""" from optparse import OptionParser parser = OptionParser ( usage = ""%%prog [options]\nVersion: %s"" % VERSION ) parser. add_option ( ""-f"", ""--force"", action = ""store_true"", dest = ""force"", default = False, help = ( ""Ignore session expiration. "" ""Force expiry based on -x option or auth.settings.expiration."" ), ) parser. add_option ( ""-o"", ""--once"", action = ""store_true"", dest = ""once"", default = False, help = ""Delete sessions, then exit."", ) parser. add_option ( ""-s"", ""--sleep"", dest = ""sleep"", default = SLEEP_MINUTES * 60, type = ""int"", help = ""Number of seconds to sleep between executions. Default 300."", ) parser. add_option ( ""-v"", ""--verbose"", default = 0, action = ""count"", help = ""print verbose output, a second -v increases verbosity"", ) parser. add_option ( ""-x"", ""--expiration"", dest = ""expiration"", default = None, type = ""int"", help = ""Expiration value for sessions without expiration (in seconds)"", <",False,options.once,default is False,0.669709324836731 2177,"def identifier_to_cached_target ( identifier, hash_func, namespace = None ) : if "":"" in identifier : image_name, version = identifier. rsplit ( "":"", 1 ) else : image_name = identifier version = None if not version or version == ""latest"" : version = None image = None prefix = """" if namespace is not None : prefix = ""quay.io/%s/"" % namespace if image_name. startswith ( prefix + ""mulled-v1-"" ) : if hash_func == ""v2"" : return None hash = image_name build = None if version and version. isdigit ( ) : build = version image = CachedV1MulledImageMultiTarget ( hash, build, identifier ) elif image_name. startswith ( prefix + ""mulled-v2-"" ) : if hash_func == ""v1"" : return None version_hash = None build = None if version and ""-"" in version : version_hash, build = version. rsplit ( ""-"", 1 ) elif version. isdigit ( ) : version_hash, build = None, version elif : log. debug ( ""Unparsable mulled image tag encountered [%s]"" % version ) image = CachedV2MulledImageMultiTarget ( image_name, version_hash, build, identifier ) else : build = None if version and ""--"" in version : version, build = split_",False,version,version and version_hash,0.6854219436645508 2178,"def test_interestingness_1_1_0 ( global_var ) : df = pytest. car_df df [ ""Year"" ] = pd. to_datetime ( df [ ""Year"" ], format = ""%Y"" ) df. set_intent ( [ lux. Clause ( attribute = ""Horsepower"" ), lux. Clause ( attribute = ""Year"" ) ] ) df. _repr_html_ ( ) assert interestingness ( df. recommendation [ ""Enhance"" ] [ 0 ], df )!= None assert interestingness ( df. recommendation [ ""Filter"" ] [ 0 ], df )!= None rank1 = - 1 rank2 = - 1 rank3 = - 1 for f in range ( 0, len ( df. recommendation [ ""Filter"" ] ) ) : vis = df. recommendation [ ""Filter"" ] [ f ] if len ( vis. get_attr_by_attr_name ( ""Cylinders"" ) ) > 0 : if int ( vis. _inferred_intent [ 2 ]. value ) == 6 : rank1 = f if : rank2 = f if len ( vis. get_attr_by_attr_name ( ""Origin"" ) ) > 0 : if str ( vis. _inferred_intent [ 2 ]. value ) == ""Europe"" : rank3 = f assert rank1 < rank2 and rank1 < rank3 and rank2 < rank3 assert interestingness ( df. recommendation [ ""Filter"" ] [ 0 ], df )!= None df. clear_intent ( )",False,int(vis._inferred_intent[2].value) == 8,"len( vis.get_attr_by_attr_name( ""Origin"") > 0",0.6497321128845215 2179,"def __init__ ( self, * args, ** kwargs ) : assert len ( kwargs ) in ( 0, 1, 2 ), ( type ( self ). __name__ + ': expected 0 to 2 extra parameters (""ctype"", ""cname"").' ) ctype = kwargs. pop ( ""ctype"", ""int"" ) cname = kwargs. pop ( ""cname"", None ) for arg_rank, arg in enumerate ( args ) : if isinstance ( arg, ( list, tuple ) ) : if len ( arg )!= 2 : raise TypeError ( ""%s: when using a tuple to define a constant, your tuple should contain 2 values: "" ""constant name followed by constant alias."" % type ( self ). __name__ ) constant_name, constant_alias = arg if not isinstance ( constant_alias, str ) : raise TypeError ( '%s: constant alias should be a string, got ""%s"".' % ( type ( self ). __name__, constant_alias ) ) constant_value = ( constant_alias, arg_rank ) else : constant_name = arg constant_value = arg_rank if not isinstance ( constant_name, str ) : raise TypeError ( '%s: constant name should be a string, got ""%s"".' % ( type ( self ). __name__, constant_name",False,constant_name in kwargs,self.constant_name is None,0.6623759269714355 2180,"def setup_data ( self, path ) : print ( ""loading: "" + path ) tree = ET. parse ( path ) root = tree. getroot ( ) for child in root : asks_for = child. attrib [ ""asks-for"" ] answer = child. attrib [ ""most-plausible-alternative"" ] premise = child [ 0 ]. text alternative_one = child [ 1 ]. text alternative_two = child [ 2 ]. text if asks_for == ""cause"" : premise += "" "" + COPA_CAUSE_SUFFIX else : premise += "" "" + COPA_RESULT_SUFFIX if : answer = [ alternative_one ] else : answer = [ alternative_two ] answer_options = [ alternative_one, alternative_two ] yield ( premise, answer, None, answer_options ), True",False,answer == '1',asks_for == 'e',0.6719765067100525 2181,"def run ( self, parent, blocks ) : """"""Find, set, and remove footnote definitions."""""" block = blocks. pop ( 0 ) m = self. RE. search ( block ) if m : id = m. group ( 1 ) fn_blocks = [ m. group ( 2 ) ] therest = block [ m. end ( ) : ]. lstrip ( ""\n"" ) m2 = self. RE. search ( therest ) if m2 : before = therest [ : m2. start ( ) ]. rstrip ( ""\n"" ) fn_blocks [ 0 ] = ""\n"". join ( [ fn_blocks [ 0 ], self. detab ( before ) ] ). lstrip ( ""\n"" ) blocks. insert ( 0, therest [ m2. start ( ) : ] ) else : fn_blocks [ 0 ] = ""\n"". join ( [ fn_blocks [ 0 ], self. detab ( therest ) ] ). strip ( ""\n"" ) fn_blocks. extend ( self. detectTabbed ( blocks ) ) footnote = ""\n\n"". join ( fn_blocks ) self. footnotes. setFootnote ( id, footnote. rstrip ( ) ) if : blocks. insert ( 0, block [ : m. start ( ) ]. rstrip ( ""\n"" ) ) return True blocks. insert ( 0, block ) return False",False,block[:m.start()].strip(),len(block) > 0,0.6504757404327393 2182,"def needs_appliance ( test_item ) : import json test_item = _mark_test ( ""appliance"", test_item ) if os. getenv ( ""TOIL_SKIP_DOCKER"", """" ). lower ( ) == ""true"" : return unittest. skip ( ""Skipping docker test."" ) ( test_item ) if which ( ""docker"" ) : image = applianceSelf ( ) try : images = subprocess. check_output ( [ ""docker"", ""inspect"", image ] ). decode ( ""utf-8"" ) except subprocess. CalledProcessError : images = [ ] else : images = { i [ ""Id"" ] for i in json. loads ( images ) if image in i [ ""RepoTags"" ] } if : return unittest. skip ( ""Cannot find appliance image %s. Use'make test' target to "" ""automatically build appliance, or just run'make docker' "" ""prior to running this test."" % image ) ( test_item ) elif len ( images ) == 1 : return test_item else : assert False, ""Expected `docker inspect` to return zero or one image."" else : return unittest. skip ( ""Install Docker to include this test."" ) ( test_item )",True,len(images) == 0,len(images) == 0,0.6541647911071777 2183,"def pytest_runtestloop ( self, session ) : self. log ( ""entering main loop"" ) torun = [ ] while 1 : try : name, kwargs = self. channel. receive ( ) except EOFError : return True self. log ( ""received command"", name, kwargs ) if name == ""runtests"" : torun. extend ( kwargs [ ""indices"" ] ) elif name == ""runtests_all"" : torun. extend ( range ( len ( session. items ) ) ) self. log ( ""items to run:"", torun ) while len ( torun ) >= 2 : self. run_one_test ( torun ) if : if torun : self. run_one_test ( torun ) break return True",False,name == 'shutdown',session,0.6624027490615845 2184,"def _generate_measurement_ids ( self ) -> Tuple [ Dict [ str, str ], Dict [ str, Optional [ str ] ] ] : meas_key_id_map = { } meas_comments = { } meas_i = 0 for meas in self. measurements : key = protocols. measurement_key ( meas ) if : continue meas_id = ""m_{}"". format ( key ) if self. is_valid_qasm_id ( meas_id ) : meas_comments [ key ] = None else : meas_id = ""m{}"". format ( meas_i ) meas_i += 1 meas_comments [ key ] = "" "". join ( key. split ( ""\n"" ) ) meas_key_id_map [ key ] = meas_id return meas_key_id_map, meas_comments",True,key in meas_key_id_map,key in meas_key_id_map,0.6538110971450806 2185,"def ns_provides ( self ) : ans = [ ] logTabWidget = self. get_top_splitter ( ). find_child ( QtWidgets. QWidget, ""logTabWidget"" ) for n in range ( logTabWidget. count ( ) ) : text = str ( logTabWidget. tabText ( n ) ) if : continue if text == ""Log"" : continue ans. append ( ( text, ""_leo_tab:"" + text ) ) ans. append ( ( ""Tree"", ""_leo_pane:outlineFrame"" ) ) ans. append ( ( ""Body"", ""_leo_pane:bodyFrame"" ) ) ans. append ( ( ""Tab pane"", ""_leo_pane:logFrame"" ) ) return ans",False,"text in ('Body', 'Tree')","text == ""None'",0.6501554846763611 2186,"def apply_figure ( self, figure ) : super ( legend_text_legend, self ). apply_figure ( figure ) properties = self. properties. copy ( ) with suppress ( KeyError ) : del properties [ ""margin"" ] with suppress ( KeyError ) : texts = figure. _themeable [ ""legend_text_legend"" ] for text in texts : if : text = text. _text text. set ( ** properties )",False,"not hasattr(text, '_x')",properties,0.6529051065444946 2187,"def from_doh_simple ( simple, add_qr = False ) : message = dns. message. QueryMessage ( ) flags = 0 for f in dns. flags. Flag : if simple. get ( f. name, False ) : flags |= f if add_qr : flags |= dns. flags. QR message. flags = flags message. set_rcode ( simple. get ( ""Status"", 0 ) ) for i, sn in enumerate ( dns. message. MessageSection ) : rr_list = simple. get ( sn. name. title ( ), [ ] ) for rr in rr_list : rdtype = dns. rdatatype. RdataType ( rr [ ""type"" ] ) rrs = message. find_rrset ( i, dns. name. from_text ( rr [ ""name"" ] ), dns. rdataclass. IN, rdtype, create = True, ) if : rrs. add ( dns. rdata. from_text ( dns. rdataclass. IN, rdtype, rr [ ""data"" ] ), rr. get ( ""TTL"", 0 ), ) return message",False,'data' in rr,rrs,0.6665961742401123 2188,"def main ( args = None ) : if args is None : args = sys. argv [ 1 : ] import argparse parser = ConfigBackedParser ( ""hg-nbdiffweb"", description = __doc__, formatter_class = argparse. RawDescriptionHelpFormatter, ) nbdifftool. build_arg_parser ( parser ) opts = parser. parse_args ( args ) if not os. path. isfile ( opts. local ) or not os. path. isfile ( opts. remote ) : local, remote = opts. local, opts. remote for a, b in diff_directories ( local, remote ) : opts. local, opts. remote = a, b ret = nbdifftool. main_parsed ( opts ) if : return ret return ret else : return nbdifftool. main_parsed ( opts )",False,ret != 0,ret is not None,0.6697608232498169 2189,"def version_dict ( self ) : self. _version_dict = defaultdict ( list ) for finder_name, finder in self. __finders. items ( ) : for version, entry in finder. versions. items ( ) : if : if entry not in self. _version_dict [ version ] : self. _version_dict [ version ]. append ( entry ) continue if entry not in self. _version_dict [ version ] and entry. is_python : self. _version_dict [ version ]. append ( entry ) for p, entry in self. python_executables. items ( ) : version = entry. as_python if not version : continue if not isinstance ( version, tuple ) : version = version. version_tuple if version and entry not in self. _version_dict [ version ] : self. _version_dict [ version ]. append ( entry ) return self. _version_dict",False,finder_name == 'windows',finder_name == version,0.6603221297264099 2190,"def serialize_bytes ( data ) : """"""Write bytes by using Telegram guidelines"""""" if not isinstance ( data, bytes ) : if isinstance ( data, str ) : data = data. encode ( ""utf-8"" ) else : raise TypeError ( ""bytes or str expected, not {}"". format ( type ( data ) ) ) r = [ ] if len ( data ) < 254 : padding = ( len ( data ) + 1 ) % 4 if : padding = 4 - padding r. append ( bytes ( [ len ( data ) ] ) ) r. append ( data ) else : padding = len ( data ) % 4 if : padding = 4 - padding r. append ( bytes ( [ 254, len ( data ) % 256, ( len ( data ) >> 8 ) % 256, ( len ( data ) >> 16 ) % 256 ] ) ) r. append ( data ) r. append ( bytes ( padding ) ) return b"""". join ( r )",False,padding != 0,padding > 0,0.6864440441131592 2191,"def check_strings ( self ) : """"""Check that all strings have been consumed."""""" for i, aList in enumerate ( self. string_tokens ) : if : g. trace ( ""warning: line %s. unused strings"" % i ) for z in aList : print ( self. dump_token ( z ) )",False,aList,i % self.debug_interval == 0,0.6724624037742615 2192,"def _serve ( self ) : self. _conn = self. manager. request ( REQUEST_DNS_LISTENER, self. domain ) conn = MsgPackMessages ( self. _conn ) while self. active : request = conn. recv ( ) if not request : logger. warning ( ""DNS: Recieved empty request. Shutdown"" ) self. stop ( ) break now = time. time ( ) response = self. handler. process ( request ) if not response : response = [ ] used = time. time ( ) - now if : logger. warning ( ""DNS: Slow processing speed (%s)s"", used ) conn. send ( response )",False,used > 1,used > 0,0.6664760112762451 2193,"def forward ( self, hidden_states, attention_mask = None, head_mask = None, encoder_hidden_states = None, encoder_attention_mask = None, ) : all_hidden_states = ( ) all_attentions = ( ) for i, layer_module in enumerate ( self. layer ) : if self. output_hidden_states : all_hidden_states = all_hidden_states + ( hidden_states, ) layer_outputs = layer_module ( hidden_states, attention_mask, head_mask [ i ], encoder_hidden_states, encoder_attention_mask, ) hidden_states = layer_outputs [ 0 ] if : all_attentions = all_attentions + ( layer_outputs [ 1 ], ) if self. output_hidden_states : all_hidden_states = all_hidden_states + ( hidden_states, ) outputs = ( hidden_states, ) if self. output_hidden_states : outputs = outputs + ( all_hidden_states, ) if : outputs = outputs + ( all_attentions, ) return outputs",False,self.output_attentions,self.attention_mask,0.6525454521179199 2194,"def _find_remote_inputs ( metadata ) : out = [ ] for fr_key in metadata. keys ( ) : if isinstance ( fr_key, ( list, tuple ) ) : frs = fr_key else : frs = [ fr_key ] for fr in frs : if : out. append ( fr ) return out",False,objectstore.is_remote(fr),not fr in out,0.6518054008483887 2195,"def __get ( self, base_call, key, * args, ** kwargs ) : if key == ""title"" and ""title"" not in self and ""organization"" in self : return base_call ( ""organization"", * args, ** kwargs ) if ( not self. multisong and key in ( ""title"", ""artist"" ) and ""title"" in self and ""artist"" not in self ) : title = base_call ( ""title"" ). split ( "" - "", 1 ) if : return ( key == ""title"" and title [ - 1 ] ) or title [ 0 ] if ( key in ( ""artist"", TAG_TO_SORT [ ""artist"" ] ) and not base_call ( key, * args ) and ""website"" in self ) : return base_call ( ""website"", * args ) if key == ""~format"" and ""audio-codec"" in self : return ""%s (%s)"" % ( self. format, base_call ( ""audio-codec"", * args, ** kwargs ) ) return base_call ( key, * args, ** kwargs )",False,len(title) > 1,len(title) > 0,0.6565163731575012 2196,"def manage_comment ( self, webhook_data ) : if webhook_data is None : return { ""message"" : ""Nothing for me"" } message = re. search ( ""@{} (.*)"". format ( self. robot_name ), webhook_data. text, re. I ) response = None if message : command = message. group ( 1 ) split_text = command. lower ( ). split ( ) orderstr = split_text. pop ( 0 ) if orderstr == ""help"" : response = self. help_order ( ) elif : try : webhook_data. comment. create_reaction ( ""+1"" ) except GithubException : pass with exception_to_github ( webhook_data. issue ) : response = getattr ( self. handler, orderstr ) ( webhook_data. issue, * split_text ) else : response = ""I didn't understand your command:\n```bash\n{}\n```\nin this context, sorry :(\n"". format ( command ) response += self. help_order ( ) if response : webhook_data. issue. create_comment ( response ) return { ""message"" : response } return { ""message"" : ""Nothing for me or exception"" }",False,orderstr in self.orders(),response,0.6614961624145508 2197,"def _setup_output_metrics ( self, engine : Engine ) -> Dict [ str, Any ] : """"""Helper method to setup metrics to log"""""" metrics = { } if self. metric_names is not None : if : metrics = engine. state. metrics else : for name in self. metric_names : if name not in engine. state. metrics : warnings. warn ( f""Provided metric name '{name}' is missing "" f""in engine's state metrics: {list(engine.state.metrics.keys())}"" ) continue metrics [ name ] = engine. state. metrics [ name ] if self. output_transform is not None : output_dict = self. output_transform ( engine. state. output ) if not isinstance ( output_dict, dict ) : output_dict = { ""output"" : output_dict } metrics. update ( { name : value for name, value in output_dict. items ( ) } ) return metrics",False,"isinstance(self.metric_names, str) and self.metric_names == 'all'",engine.state.metrics,0.653809666633606 2198,"def _infinite_indices ( self ) : np. random. seed ( self. _seed ) while True : avl_pids = copy. deepcopy ( self. pids ) batch_idxs_dict = { } batch_indices = [ ] while len ( avl_pids ) >= self. num_pids_per_batch : selected_pids = np. random. choice ( avl_pids, self. num_pids_per_batch, replace = False ). tolist ( ) for pid in selected_pids : if pid not in batch_idxs_dict : idxs = copy. deepcopy ( self. pid_index [ pid ] ) if len ( idxs ) < self. num_instances : idxs = np. random. choice ( idxs, size = self. num_instances, replace = True ). tolist ( ) np. random. shuffle ( idxs ) batch_idxs_dict [ pid ] = idxs avl_idxs = batch_idxs_dict [ pid ] for _ in range ( self. num_instances ) : batch_indices. append ( avl_idxs. pop ( 0 ) ) if : <",False,len(avl_idxs) < self.num_instances,len(batch_indices) > 0,0.6497932076454163 2199,"def extract_package ( package ) : if VERSION >= 3006 : package_location = os. path. join ( sublime. installed_packages_path ( ), package + "".sublime-package"" ) if : package_location = os. path. join ( os. path. dirname ( sublime. executable_path ( ) ), ""Packages"", package + "".sublime-package"", ) if : package_location = None if package_location : with zipfile. ZipFile ( package_location ) as zip_file : extract_location = os. path. join ( sublime. packages_path ( ), package ) zip_file. extractall ( extract_location )",False,not os.path.exists(package_location),os.path.exists(package_location),0.6460158824920654 2200,"def __delitem__ ( self, key ) : ""Deleting tag[key] deletes all 'key' attributes for the tag."" for item in self. attrs : if : self. attrs. remove ( item ) self. _getAttrMap ( ) if self. attrMap. has_key ( key ) : del self. attrMap [ key ]",False,item[0] == key,item in self.attrs,0.662900447845459 2201,"def parse_message ( message ) : message = gtp. pre_engine ( message ). strip ( ) first, rest = ( message. split ( "" "", 1 ) + [ None ] ) [ : 2 ] if first. isdigit ( ) : message_id = int ( first ) if : command, arguments = ( rest. split ( "" "", 1 ) + [ None ] ) [ : 2 ] else : command, arguments = None, None else : message_id = None command, arguments = first, rest command = command. replace ( ""-"", ""_"" ) return message_id, command, arguments",False,rest is not None,rest.digit(),0.6679273247718811 2202,"def __exit__ ( self, type, value, traceback ) : try : if : return self. exception_handler ( type, value, traceback ) finally : final_contexts = _state. contexts _state. contexts = self. old_contexts if final_contexts is not self. new_contexts : raise StackContextInconsistentError ( ""stack_context inconsistency (may be caused by yield "" 'within a ""with StackContext"" block)' ) self. new_contexts = None",False,type is not None,self.exception_handler is not None,0.6561484336853027 2203,"def test_15_verify_jail_started ( ) : global freeze, freeze_msg if freeze is True : pytest. skip ( freeze_msg ) freeze = False job_status = wait_on_job ( JOB_ID, 20 ) if job_status [ ""state"" ] in [ ""TIMEOUT"", ""FAILED"" ] : freeze = True freeze_msg = f""Failed to start jail: {JAIL_NAME}"" assert job_status [ ""state"" ] == ""SUCCESS"", str ( job_status [ ""results"" ] ) for run in range ( 10 ) : results = GET ( f""/jail/id/{JAIL_NAME}/"" ) assert results. status_code == 200, results. text if : break time. sleep ( 1 ) else : assert results. json ( ) [ ""state"" ] == ""up"", results. text",False,results.json()['state'] == 'up',run == 0,0.6483564972877502 2204,"def _write_assets ( self, asset_type, assets, txn, chunk_size, mapping_data = None ) : if asset_type == ""future"" : tbl = futures_contracts_table if : raise TypeError ( ""no mapping data expected for futures"" ) elif asset_type == ""equity"" : tbl = equities_table if mapping_data is None : raise TypeError ( ""mapping data required for equities"" ) self. _write_df_to_table ( equity_symbol_mappings, mapping_data, txn, chunk_size, idx_label = ""sid"", ) else : raise ValueError ( ""asset_type must be in {'future', 'equity'}, got: %s"" % asset_type, ) self. _write_df_to_table ( tbl, assets, txn, chunk_size ) pd. DataFrame ( { asset_router. c. sid. name : assets. index. values, asset_router. c. asset_type. name : asset_type, } ). to_sql ( asset_router. name, txn. connection, if_exists = ""append"", index = False, chunksize = chunk_size, )",False,mapping_data is not None,mapping_data is None,0.6581676006317139 2205,"def unfulfilled_items ( self ) : unfulfilled_items = 0 for order_item in self. items. all ( ) : if : aggr = order_item. deliver_item. aggregate ( delivered = Sum ( ""quantity"" ) ) unfulfilled_items += order_item. quantity - ( aggr [ ""delivered"" ] or 0 ) return unfulfilled_items",False,not order_item.canceled,order_item.deliver_item,0.6553289890289307 2206,"def _quotesplit ( line ) : inquote = None inescape = None wordstart = 0 word = """" for i in range ( len ( line ) ) : c = line [ i ] if : if inquote == q and c!= q : word += ""\\"" word += c inescape = False elif c == ""\\"" : inescape = True elif c == inquote : inquote = None yield ( wordstart, word ) word = """" wordstart = i + 1 elif not inquote and not word and ( c == q or c == qq ) : inquote = c wordstart = i elif not inquote and c in [ "" "", ""\n"", ""\r"", ""\t"" ] : if word : yield ( wordstart, word ) word = """" wordstart = i + 1 else : word += c if word : yield ( wordstart, word ) if inquote or inescape or word : raise QuoteError ( )",False,inescape,i == 0,0.6868882775306702 2207,"def _modify_config_data ( max_seq_length, num_train_data, num_classes ) : config_data_exists = os. path. isfile ( ""./config_data.py"" ) if config_data_exists : with open ( ""./config_data.py"", ""r"" ) as file : filedata = file. read ( ) filedata_lines = filedata. split ( ""\n"" ) idx = 0 while True : if : break line = filedata_lines [ idx ] if ( line. startswith ( ""num_classes ="" ) or line. startswith ( ""num_train_data ="" ) or line. startswith ( ""max_seq_length ="" ) ) : filedata_lines. pop ( idx ) idx -= 1 idx += 1 if len ( filedata_lines ) > 0 : insert_idx = 1 else : insert_idx = 0 filedata_lines. insert ( insert_idx, ""{} = {}"". format ( ""num_train_data"", num_train_data ) ) filedata_lines",False,idx >= len(filedata_lines),num_classes > 0,0.6521614789962769 2208,"def new ( obj : bpy. types. Object ) : assert ( type ( obj ) is bpy. types. Object and type ( obj. data ) is bpy. types. Mesh ), ""obj must be mesh object"" rfsource = None if False : if : rfsource = RFSource. __cache [ obj. data. name ] hashed = hash_object ( obj ) if rfsource. hash!= hashed : rfsource = None if not rfsource : RFSource. creating = True rfsource = RFSource ( ) del RFSource. creating rfsource. __setup__ ( obj ) RFSource. __cache [ obj. data. name ] = rfsource else : rfsource = RFSource. __cache [ obj. data. name ] else : RFSource. creating = True rfsource = RFSource ( ) del RFSource. creating rfsource. __setup__ ( obj ) return rfsource",False,obj.data.name in RFSource.__cache,"hasattr(obj, 'data')",0.6579772233963013 2209,"def test_set_classy_state_weight_inflation ( self ) : model_2d_config, model_3d_config = self. _get_model_config_weight_inflation ( ) model_2d = build_model ( model_2d_config ) model_2d_state = model_2d. get_classy_state ( ) model_3d = build_model ( model_3d_config ) model_3d. set_classy_state ( model_2d_state ) model_3d_state = model_3d. get_classy_state ( ) for name, weight_2d in model_2d_state [ ""model"" ] [ ""trunk"" ]. items ( ) : weight_3d = model_3d_state [ ""model"" ] [ ""trunk"" ] [ name ] if weight_2d. dim ( ) == 5 : self. assertEqual ( weight_3d. dim ( ), 5 ) if : weight_2d_inflated = ( weight_2d. repeat ( 1, 1, weight_3d. shape [ 2 ], 1, 1 ) / weight_3d. shape [ 2 ] ) self. assertTrue ( torch. equal ( weight_3d, weight_2d_inflated ) )",False,weight_2d.shape[2] == 1 and weight_3d.shape[2] > 1,weight_3d.dim() == 3,0.6482073068618774 2210,"def _hints_to_binary ( pkg ) : pkg_type = anchore_engine. utils. ensure_str ( pkg. get ( ""type"", ""binary"" ) ). lower ( ) pkg_name = anchore_engine. utils. ensure_str ( pkg. get ( ""name"", """" ) ) pkg_version = anchore_engine. utils. ensure_str ( pkg. get ( ""version"", """" ) ) pkg_location = anchore_engine. utils. ensure_str ( pkg. get ( ""location"", ""/virtual/binarypkg/{}-{}"". format ( pkg_name, pkg_version ) ) ) pkg_license = anchore_engine. utils. ensure_str ( pkg. get ( ""license"", """" ) ) pkg_origin = anchore_engine. utils. ensure_str ( pkg. get ( ""origin"", """" ) ) pkg_files = pkg. get ( ""files"", [ ] ) pkg_metadata = json. dumps ( pkg. get ( ""metadata"", { } ) ) if not pkg_name or not pkg_version or not pkg_type : raise Exception ( ""bad hints record, all hints records must supply at least a name, version and type"" ) for inp in [ pkg_files ] : if : raise Exception ( ""bad hints record ({}), versions, licenses, origins, and files if specified must be list types"". format ( pkg_name ) ) el = { ""name"" : pkg_name, ""version"" : pkg_version, ""origin"" : pkg_origin, ""license"" : pkg_license, ""location"" :",False,type(inp) is not list,not inp[0] or inp[1] or (not pkg_type and pkg_origin and pkg_type),0.6540470123291016 2211,"def evaluate ( env, net, device = ""cpu"" ) : obs = env. reset ( ) reward = 0.0 steps = 0 while True : obs_v = ptan. agent. default_states_preprocessor ( [ obs ] ). to ( device ) action_v = net ( obs_v ) action = action_v. data. cpu ( ). numpy ( ) [ 0 ] obs, r, done, _ = env. step ( action ) reward += r steps += 1 if : break return reward, steps",True,done,done,0.6859234571456909 2212,"def _should_cleanup ( self, config_filename ) : """"""Return True if `config_filename` does not exist or if modtime and hash have changes, else return False."""""" if not os. path. exists ( config_filename ) : return True new_mtime = os. path. getmtime ( config_filename ) tool_hash = self. _hash_by_tool_paths. get ( config_filename ) if tool_hash. modtime < new_mtime : if md5_hash_file ( config_filename )!= tool_hash. hash : return True tool = self. _tools_by_path [ config_filename ] for macro_path in tool. _macro_paths : new_mtime = os. path. getmtime ( macro_path ) if : return True return False",False,self._hash_by_tool_paths.get(macro_path).modtime < new_mtime,new_mtime > md5_hash_file(config_filename),0.6468406915664673 2213,"def __gt__ ( self, other ) : """"""Return True if self appears after other in outline order."""""" stack1, stack2 = self. stack, other. stack n1, n2 = len ( stack1 ), len ( stack2 ) n = min ( n1, n2 ) for item1, item2 in zip ( stack1, stack2 ) : v1, x1 = item1 v2, x2 = item2 if x1 > x2 : return True elif : return False if n1 == n2 : x1, x2 = self. _childIndex, other. _childIndex return x1 > x2 elif n1 < n2 : x1 = self. _childIndex v2, x2 = other. stack [ n ] return x1 > x2 else : x1 = other. _childIndex v2, x2 = self. stack [ n ] return x2 >= x1",False,x1 < x2,v1 > x2,0.6643438339233398 2214,"def add_nets ( self, * nets ) : """"""Add some Net objects to the circuit. Assign a net name if necessary."""""" for net in nets : if net. circuit!= self : if net. is_movable ( ) : if : net. circuit -= net net. circuit = self net. name = net. name net. hierarchy = self. hierarchy self. nets. append ( net ) else : log_and_raise ( logger, ValueError, ""Can't add unmovable net {} to this circuit."". format ( net. name ), )",False,"isinstance(net.circuit, Circuit)","hasattr(self, 'circuit')",0.6605138778686523 2215,"def test_authorize_nocsrf_ratelimiting ( self ) : form = { ""client_id"" : ""deadbeef"", ""redirect_uri"" : ""http://localhost:8000/o2c.html"", ""scope"" : ""user:admin"", } headers = dict ( authorization = gen_basic_auth ( ""devtable"", ""invalidpassword"" ) ) self. postResponse ( ""web.authorize_application"", headers = headers, form = form, with_csrf = False, expected_code = 401, ) counter = 0 while True : r = self. postResponse ( ""web.authorize_application"", headers = headers, form = form, with_csrf = False, expected_code = None, ) self. assertNotEqual ( 200, r. status_code ) counter = counter + 1 if : self. fail ( ""Exponential backoff did not fire"" ) if r. status_code == 429 : break",False,counter > 5,counter == 0,0.6693984270095825 2216,"def __call__ ( self, pymodule, node ) : pyname = self. _evaluate_node ( pymodule, node ) if pyname is None or self. expected is None : return self. unsure if self. _unsure_pyname ( pyname, unbound = self. kind == ""name"" ) : return True if self. kind == ""name"" : return self. _same_pyname ( self. expected, pyname ) else : pyobject = pyname. get_object ( ) if self. kind == ""object"" : objects = [ pyobject ] if self. kind == ""type"" : objects = [ pyobject. get_type ( ) ] if : objects = [ pyobject ] objects. extend ( self. _get_super_classes ( pyobject ) ) objects. extend ( self. _get_super_classes ( pyobject. get_type ( ) ) ) for pyobject in objects : if self. _same_pyobject ( self. expected. get_object ( ), pyobject ) : return True return False",False,self.kind == 'instance',self.kind == 'classed',0.6566417813301086 2217,"def drop ( self ) : sql = ""if object_id('%s') is not null drop table %s"" % ( self. tname, self. tname ) try : self. execute ( sql ) except Exception as e : self. conn. rollback ( ) if : raise sql = ""drop table if exists %s"" % self. tname self. execute ( sql )",False,'syntax error' not in str(e),"e.args[0] not in [E.SUCCESS, E.CMD]",0.6630998849868774 2218,"def starts_block ( self, i, lines, new_state, prev_state ) : """"""True if the line starts a block."""""" if prev_state. context : return False else : line = lines [ i ] for pattern in self. pascal_pattern_table : m = pattern. match ( line ) if : return True return False",True,m,m,0.6917235255241394 2219,"def __cut ( sentence ) : global emit_P prob, pos_list = viterbi ( sentence, ""BMES"", start_P, trans_P, emit_P ) begin, nexti = 0, 0 for i, char in enumerate ( sentence ) : pos = pos_list [ i ] if pos == ""B"" : begin = i elif pos == ""E"" : yield sentence [ begin : i + 1 ] nexti = i + 1 elif : yield char nexti = i + 1 if nexti < len ( sentence ) : yield sentence [ nexti : ]",False,pos == 'S',pos == 'F',0.66117262840271 2220,"def grad ( self, inputs, gout ) : ( x, ) = inputs ( gz, ) = gout if x. dtype not in continuous_dtypes : return [ x. zeros_like ( dtype = theano. config. floatX ) ] if self. structured : if : r = gz * theano. sparse. sp_ones_like ( x ) elif self. axis == 0 : r = col_scale ( theano. sparse. sp_ones_like ( x ), gz ) elif self. axis == 1 : r = row_scale ( theano. sparse. sp_ones_like ( x ), gz ) else : raise ValueError ( ""Illegal value for self.axis."" ) else : o_format = x. format x = dense_from_sparse ( x ) if _is_sparse_variable ( gz ) : gz = dense_from_sparse ( gz ) if : r = tensor. second ( x, gz ) else : ones = tensor. ones_like ( x ) if self. axis == 0 : r = tensor. addbroadcast ( gz. dimshuffle ( ""x"", 0 ), 0 ) * ones elif self. axis == 1 : r = tensor. addbroadcast ( gz. dimshuffle ( 0, ""x"" ), 1 ) * ones else : raise ValueError ( ""Illegal value for self.axis."" ) r = SparseFromDense ( o_format ) ( r",False,self.axis is None,_is_sparse_variable(gz),0.6636162400245667 2221,"def _resolve ( d ) : all_keys = frozenset ( d. keys ( ) ) result = [ ] resolved_keys = set ( ) while d : resolved_this_round = set ( ) for name, links in list ( d. items ( ) ) : if not links or links <= resolved_keys : result. append ( name ) resolved_this_round. add ( name ) del d [ name ] unknown = links - all_keys if len ( unknown ) == 1 : raise BlockadeConfigError ( ""container %s links to unknown container %s"" % ( name, list ( unknown ) [ 0 ] ) ) elif len ( unknown ) > 1 : raise BlockadeConfigError ( ""container %s links to unknown containers %s"" % ( name, unknown ) ) if : raise BlockadeConfigError ( ""containers have circular links!"" ) resolved_keys. update ( resolved_this_round ) return result",False,not resolved_this_round,self._has_circular_links(unknown),0.6526947021484375 2222,"def getBoundsCenter ( uv_layer ) : min_x = getCenter ( uv_layer ) [ 0 ] max_x = getCenter ( uv_layer ) [ 0 ] min_y = getCenter ( uv_layer ) [ 1 ] max_y = getCenter ( uv_layer ) [ 1 ] len = 0 for uv_verts in uv_layer. data : if uv_verts. uv [ 0 ] < min_x : min_x = uv_verts. uv [ 0 ] if : max_x = uv_verts. uv [ 0 ] if uv_verts. uv [ 1 ] < min_y : min_y = uv_verts. uv [ 1 ] if uv_verts. uv [ 1 ] > max_y : max_y = uv_verts. uv [ 1 ] center_x = ( max_x - min_x ) / 2 + min_x center_y = ( max_y - min_y ) / 2 + min_y return ( center_x, center_y )",False,uv_verts.uv[0] > max_x,uv_verts.uv[0] < max_x,0.6599746942520142 2223,"def _populate_map ( self ) : try : with open ( self. filename ) as fh : line_number = 0 for line in fh : line_number += 1 line = line. rstrip ( ""\r\n"" ) if not line. startswith ( self. comment_chars ) : elems = line. split ( self. delimiter ) if len ( elems ) <= self. key_column : die ( ""Location file %s line %d: less than %d columns"" % ( self. filename, line_number, self. key_column + 1 ) ) else : key = elems. pop ( self. key_column ) if : if self. _map [ key ]!= elems : die ( 'Location file %s line %d: duplicate key ""%s""' % ( self. filename, line_number, key ) filtered_extensions = [ ] for ext in extensions : should_include = True for suppression in ext_suppressions : if : should_include = False if should_include : filtered_extensions. append ( ext ) return filtered_extensions,False,should_include and suppression.handle_suppress(ext),suppression not in ['TAB>,0.6457030773162842 2225,"def setup_sampler ( sampler_type, num_iters, batch_size ) : if sampler_type is None : return None, batch_size if sampler_type == ""weighted"" : from torch. utils. data. sampler import WeightedRandomSampler w = torch. ones ( num_iters * batch_size, dtype = torch. float ) for i in range ( num_iters ) : w [ batch_size * i : batch_size * ( i + 1 ) ] += i * 1.0 return ( WeightedRandomSampler ( w, num_samples = num_iters * batch_size, replacement = True ), batch_size, ) if sampler_type == ""distributed"" : import torch. distributed as dist from torch. utils. data. distributed import DistributedSampler num_replicas = 1 rank = 0 if : num_replicas = dist. get_world_size ( ) rank = dist. get_rank ( ) dataset = torch. zeros ( num_iters * batch_size ) return ( DistributedSampler ( dataset, num_replicas = num_replicas, rank = rank ), batch_size // num_replicas, )",False,dist.is_available() and dist.is_initialized(),dist.is_initialized(),0.6488051414489746 2226,"def _try_increase_batch_size ( self, current_batch_size ) : if current_batch_size * 2 <= self. max_batch_size : current_time = time. time ( ) latest_batch_size_change_time = self. latest_batch_size_change_time seconds_since_last_change = ( current_time - latest_batch_size_change_time if : else 0 ) if seconds_since_last_change > BATCH_CHANGE_COOLDOWN_PERIOD_SECONDS : new_batch_size = current_batch_size * 2 self. logger. info ( ""Increasing batch size to {}."". format ( new_batch_size ) ) self. batch_size = new_batch_size self. latest_batch_size_change_time = current_time",False,latest_batch_size_change_time is not None,current_time < self.max_batch_size,0.6491739749908447 2227,"def activate_css ( targetnode ) : scriptnodes = list ( targetnode. getElementsByTagName ( ""link"" ) ) for LC in range ( len ( scriptnodes ) ) : sn = scriptnodes [ LC ] sn. parentNode. removeChild ( sn ) fileref = DOM. createElement ( ""link"" ) if : fileref. href = sn. href else : fileref. text = sn. text fileref. rel = ""stylesheet"" fileref. type = ""text/css"" doc ( ). getElementsByTagName ( ""head"" ). item ( 0 ). appendChild ( fileref )",False,"hassattr(sn, 'href')","sys.version_info >= (3, 0)",0.6528770923614502 2228,"def visit_simple_stmt ( self, node : Node ) -> Iterator [ Line ] : """"""Visit a statement without nested statements."""""" is_suite_like = node. parent and node. parent. type in STATEMENT if is_suite_like : if : yield from self. visit_default ( node ) else : yield from self. line ( + 1 ) yield from self. visit_default ( node ) yield from self. line ( - 1 ) else : if not self. is_pyi or not node. parent or not is_stub_suite ( node. parent ) : yield from self. line ( ) yield from self. visit_default ( node )",False,self.is_pyi and is_stub_body(node),node.parent,0.651760458946228 2229,"def validate_party_details ( self ) : if self. party : if not frappe. db. exists ( self. party_type, self. party ) : frappe. throw ( _ ( ""Invalid {0}: {1}"" ). format ( self. party_type, self. party ) ) if : self. validate_account_type ( self. party_account, [ erpnext. get_party_account_type ( self. party_type ) ] )",False,"self.party_account and self.party_type in ('Customer', 'Supplier')",self.party_account,0.6509919166564941 2230,"def __call__ ( self, x, uttid = None ) : if self. utt2spk is not None : spk = self. utt2spk [ uttid ] else : spk = uttid if not self. reverse : if : x = np. add ( x, self. bias [ spk ] ) if self. norm_vars : x = np. multiply ( x, self. scale [ spk ] ) else : if self. norm_vars : x = np. divide ( x, self. scale [ spk ] ) if : x = np. subtract ( x, self. bias [ spk ] ) return x",False,self.norm_means,self.norm_bias,0.6563746929168701 2231,"def __init__ ( self, operation_def, inputs ) : self. _operation_def = operation_def self. _inputs = inputs if not isinstance ( operation_def, OperationDef ) : raise TypeError ( ""operation_def must be an OperationDef, got {} of type {}"". format ( operation_def, type ( operation_def ) ) ) if not isinstance ( inputs, tuple ) : raise TypeError ( ""inputs must be a tuple, got {} of type {}"". format ( inputs, type ( inputs ) ) ) for value_node in inputs : if : raise TypeError ( ""Inputs to Operation must be a ValueNode, got {} of type {}"". format ( value_node, type ( value_node ) ) )",True,"not isinstance(value_node, ValueNode)","not isinstance(value_node, ValueNode)",0.6510245203971863 2232,"def instantiate_lambda ( transform, lambda_transforms = None ) : if transform. get ( ""__type__"" ) == ""Lambda"" : name = transform [ ""__name__"" ] if : raise ValueError ( ""To deserialize a Lambda transform with name {name} you need to pass a dict with this transform "" ""as the `lambda_transforms` argument"". format ( name = name ) ) transform = lambda_transforms. get ( name ) if transform is None : raise ValueError ( ""Lambda transform with {name} was not found in `lambda_transforms`"". format ( name = name ) ) return transform return None",True,lambda_transforms is None,lambda_transforms is None,0.6757447719573975 2233,"def on_paint ( self, event ) : dc = wx. BufferedPaintDC ( self. panel ) dc. SetBackground ( wx. Brush ( wx. SystemSettings. GetColour ( wx. SYS_COLOUR_WINDOW ) ) ) dc. Clear ( ) dc. SetPen ( wx. Pen ( wx. SystemSettings. GetColour ( wx. SYS_COLOUR_GRAYTEXT ) ) ) sizer = self. panel. Sizer _, panel_width = self. panel. GetClientSize ( ) assert isinstance ( sizer, wx. lib. rcsizer. RowColSizer ) bottom_choice_name = self. get_choice_control_name ( self. n_items ) bottom_choice = self. panel. FindWindowByName ( bottom_choice_name ) if bottom_choice is not None : r = bottom_choice. GetRect ( ) dc. DrawLine ( r. Left - 2, 1, r. Left - 2, r. Bottom ) for i in range ( 1, self. n_items + 1 ) : choice_name = self. get_choice_control_name ( i ) choice = self. panel. FindWindowByName ( choice_name ) if : r = choice. GetRect ( ) dc. DrawLine ( 1, r. Top - 2, panel_width - 1, r. Top - 2 ) event. Skip ( )",True,choice is not None,choice is not None,0.6647061109542847 2234,"def validate_dict ( d ) : for n, v in d. items ( ) : if : continue if n [ 0 : 2 ] == ""t_"" : continue if n [ 0 : 2 ] == ""p_"" : sys. stderr. write ( ""yacc: Warning. '%s' not defined as a function\n"" % n ) if 1 and isinstance ( v, types. FunctionType ) and v. func_code. co_argcount == 1 : try : doc = v. __doc__. split ( "" "" ) if doc [ 1 ] == "":"" : sys. stderr. write ( ""%s:%d: Warning. Possible grammar rule '%s' defined without p_ prefix.\n"" % ( v. func_code. co_filename, v. func_code. co_firstlineno, n ) ) except Exception : pass",False,"n[0:2] == 'p_' and type(v) in (types.FunctionType, types.MethodType)",n[0:2] == 'e_',0.6508049964904785 2235,"def client_post_decrypt ( self, buf ) : if self. raw_trans : return buf self. recv_buf += buf out_buf = b"""" while len ( self. recv_buf ) > 2 : length = struct. unpack ( "">H"", self. recv_buf [ : 2 ] ) [ 0 ] if : self. raw_trans = True self. recv_buf = b"""" raise Exception ( ""client_post_decrypt data error"" ) if length > len ( self. recv_buf ) : break if ( struct. pack ( "" != self. recv_buf [ length - 4 : length ] ) : self. raw_trans = True self. recv_buf = b"""" raise Exception ( ""client_post_decrypt data uncorrect checksum"" ) pos = common. ord ( self. recv_buf [ 2 ] ) + 2 out_buf += self. recv_buf [ pos : length - 4 ] self. recv_buf = self. recv_buf [ length : ] if out_buf : self. decrypt_packet_num += 1 return out_buf",False,length >= 8192 or length < 7,length > len(self.recv_buf),0.660135805606842 2236,"def visit_Enum ( self, node ) : if not node. values : return generator = c_generator. CGenerator ( ) for i, elem in enumerate ( node. values. enumerators ) : if not elem. value : continue try : raw_val = generator. visit ( elem. value ) for item in node. values. enumerators : try : if : raw_val = raw_val. replace ( item. name, item. value. value ) except : pass cooked_value = eval ( raw_val ) elem. value = Constant ( type = ""int"", value = str ( cooked_value ) ) except : pass",False,item.value and item.value.type == 'int',item.name,0.6530643701553345 2237,"def get_palette_for_custom_classes ( self, class_names, palette = None ) : if self. label_map is not None : palette = [ ] for old_id, new_id in sorted ( self. label_map. items ( ), key = lambda x : x [ 1 ] ) : if : palette. append ( self. PALETTE [ old_id ] ) palette = type ( self. PALETTE ) ( palette ) elif palette is None : if self. PALETTE is None : palette = np. random. randint ( 0, 255, size = ( len ( class_names ), 3 ) ) else : palette = self. PALETTE return palette",False,new_id != -1,new_id in class_names,0.6611730456352234 2238,"def save ( self ) : updates = self. cinder_obj_get_changes ( ) if updates : if : metadata = updates. pop ( ""metadata"", None ) self. metadata = db. backup_metadata_update ( self. _context, self. id, metadata, True ) updates. pop ( ""parent"", None ) db. backup_update ( self. _context, self. id, updates ) self. obj_reset_changes ( )",False,'metadata' in updates,self.metadata,0.6707595586776733 2239,"def export_addresses ( self ) : exports = self. _cached_exports_addresses if exports is None : exports = [ ] for export_spec in getattr ( self, ""export_specs"", tuple ( ) ) : if isinstance ( export_spec, Target ) : exports. append ( export_spec. address ) else : exports. append ( Address. parse ( export_spec, relative_to = self. address. spec_path ) ) exports = tuple ( exports ) dep_addresses = { d. address for d in self. dependencies } invalid_export_specs = [ a. spec for a in exports if a not in dep_addresses ] if : raise TargetDefinitionException ( self, ""Invalid exports: these exports must also be dependencies\n {}"". format ( ""\n "". join ( invalid_export_specs ) ), ) self. _cached_exports_addresses = exports return exports",False,len(invalid_export_specs) > 0,invalid_export_specs,0.6557188034057617 2240,"def _set_value ( self, value ) : if self. _module is None : for module in ( _eui48, _eui64 ) : try : self. _value = module. str_to_int ( value ) self. _module = module break except AddrFormatError : try : if : self. _value = int ( value ) self. _module = module break except ValueError : pass if self. _module is None : raise AddrFormatError ( ""failed to detect EUI version: %r"" % value ) else : if _is_str ( value ) : try : self. _value = self. _module. str_to_int ( value ) except AddrFormatError : raise AddrFormatError ( ""address %r is not an EUIv%d"" % ( value, self. _module. version ) ) else : if 0 <= int ( value ) <= self. _module. max_int : self. _value = int ( value ) out_path = argv [ 1 ] mode = zipfile. ZIP_STORED z = zipfile. ZipFile ( out_path, ""w"", mode ) seen = { } for line in sys. stdin : line = line. strip ( ) try : full_path, rel_path = line. split ( None, 1 ) except ValueError : raise RuntimeError ( ""Invalid line %r"" % line ) if : expected = seen [ rel_path ] if expected!= full_path : print >> sys. stderr, ""WARNING: expected %r, got %r"" % ( expected, full_path, ) continue z. write ( full_path, rel_path ) seen [ rel_path ] = full_path",True,rel_path in seen,rel_path in seen,0.6614975333213806 2242,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if : break if fid == 1 : if ftype == TType. STRUCT : self. type = Type ( ) self. type. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STOP,fid == 0,0.6585721373558044 2243,"def __init__ ( self, options, columns ) : super ( TestForeignDataWrapper, self ). __init__ ( options, columns ) self. columns = columns self. test_type = options. get ( ""test_type"", None ) self. test_subtype = options. get ( ""test_subtype"", None ) self. tx_hook = options. get ( ""tx_hook"", False ) self. _row_id_column = options. get ( ""row_id_column"", list ( self. columns. keys ( ) ) [ 0 ] ) log_to_postgres ( str ( sorted ( options. items ( ) ) ) ) log_to_postgres ( str ( sorted ( [ ( key, column. type_name ) for key, column in columns. items ( ) ] ) ) ) for column in columns. values ( ) : if : log_to_postgres ( ""Column %s options: %s"" % ( column. column_name, column. options ) ) if self. test_type == ""logger"" : log_to_postgres ( ""An error is about to occur"", WARNING ) log_to_postgres ( ""An error occured"", ERROR )",False,column.options,self.test_type == 'table',0.6561008095741272 2244,"def filter_command ( self, event = None ) : dir, pat = self. get_filter ( ) try : names = os. listdir ( dir ) except OSError : self. master. bell ( ) return self. directory = dir self. set_filter ( dir, pat ) names. sort ( ) subdirs = [ os. pardir ] matchingfiles = [ ] for name in names : fullname = os. path. join ( dir, name ) if os. path. isdir ( fullname ) : subdirs. append ( name ) elif : matchingfiles. append ( name ) self. dirs. delete ( 0, END ) for name in subdirs : self. dirs. insert ( END, name ) self. files. delete ( 0, END ) for name in matchingfiles : self. files. insert ( END, name ) head, tail = os. path. split ( self. get_selection ( ) ) if tail == os. curdir : tail = """" self. set_selection ( tail )",False,"fnmatch.fnmatch(name, pat)",os.path.isfile(name),0.6435481905937195 2245,"def action ( self, line ) : if line. strip ( ). startswith ( ""#"" ) and self. state!= ""multiline"" : if self. state!= ""init"" or self. tags or self. variant!= ""feature"" : return line = line. strip ( ) [ 1 : ]. strip ( ) if : language = line [ 9 : ]. strip ( ) self. language = language self. keywords = i18n. languages [ language ] return func = getattr ( self, ""action_"" + self. state, None ) if func is None : line = line. strip ( ) msg = u""Parser in unknown state %s;"" % self. state raise ParserError ( msg, self. line, self. filename, line ) if not func ( line ) : line = line. strip ( ) msg = u'\nParser failure in state %s, at line %d: ""%s""\n' % ( self. state, self. line, line, ) reason = self. ask_parse_failure_oracle ( line ) if reason : msg += u""REASON: %s"" % reason raise ParserError ( msg, None, self. filename )",False,line.lstrip().lower().startswith('language:'),len(line) > 9,0.647869348526001 2246,"def OnMacKeyDown ( self, e ) : code = e. GetKeyCode ( ) stopPropagation = True if e. CmdDown ( ) : if code == wx. _core. WXK_LEFT : self. GotoLine ( self. GetCurrentLine ( ) ) elif code == wx. _core. WXK_RIGHT : self. GotoPos ( self. GetLineEndPosition ( self. GetCurrentLine ( ) ) ) elif code == wx. _core. WXK_UP : self. GotoPos ( 0 ) elif code == wx. _core. WXK_DOWN : self. GotoPos ( self. GetLength ( ) ) else : stopPropagation = False elif e. GetModifiers ( ) & 0xF0 : if : self. GotoLine ( self. GetCurrentLine ( ) ) elif code == 69 : self. GotoPos ( self. GetLineEndPosition ( self. GetCurrentLine ( ) ) ) else : stopPropagation = False else : stopPropagation = False if stopPropagation : e. StopPropagation ( ) else : e. Skip ( )",False,code == 65,code == wx._core.WXK_RIGHT,0.6755229234695435 2247,"def _postproc_phase3 ( self ) : try : if : self. end_online ( ) if self. _user. token and self. engine. stopping_reason : exc_class = self. engine. stopping_reason. __class__. __name__ note = ""%s: %s"" % ( exc_class, str ( self. engine. stopping_reason ) ) self. append_note_to_session ( note ) if self. _master : self. append_note_to_master ( note ) except KeyboardInterrupt : raise except BaseException as exc : self. log. debug ( ""Failed to finish online: %s"", traceback. format_exc ( ) ) self. log. warning ( ""Failed to finish online: %s"", exc )",False,self.send_data,self.engine.is_online,0.6547809839248657 2248,"def _setPupilDetection ( self, pmode ) : try : if : self. _eyelink. sendCommand ( ""use_ellipse_fitter = YES"" ) elif pmode. upper ( ) == ""CENTROID_FIT"" : self. _eyelink. sendCommand ( ""force_ellipse_fitter -1"" ) self. _eyelink. sendCommand ( ""use_ellipse_fitter = NO"" ) else : print2err ( ""** EyeLink Warning: _setPupilDetection: Unrecofnized pupil fitting type: "", pmode, ) return EyeTrackerConstants. EYETRACKER_ERROR return EyeTrackerConstants. EYETRACKER_OK except Exception : print2err ( ""EYELINK Error during _setPupilDetection:"" ) printExceptionDetailsToStdErr ( ) return EyeTrackerConstants. EYETRACKER_ERROR",False,pmode.upper() == 'ELLIPSE_FIT',pmode.upper() == 'YES',0.6553326845169067 2249,"def runGenerator ( self ) : generatorrunner = ""shiboken"" for name in ( ""shiboken"", ""generatorrunner"" ) : if PLAT_WINDOWS : name += "".exe"" name = os. path. join ( self. PySideBase, ""bin"", name ) if : generatorrunner = name break args = [ generatorrunner, ""--generator-set="" + self. ShibokenGenerator, ""global.h "", ""--avoid-protected-hack"", ""--enable-pyside-extensions"", ""--include-paths="" + self. AllIncludes, ""--typesystem-paths="" + self. PySideTypeSystem, ""--output-directory=."", ""typesystem_ScintillaEdit.xml"", ] print ( "" "". join ( args ) ) retcode = subprocess. call ( "" "". join ( args ), shell = True, stderr = subprocess. STDOUT ) if retcode : print ( ""Failed in generatorrunner"", retcode ) sys. exit ( )",False,os.path.exists(name),os.path.isdir(name),0.6492661833763123 2250,"def _align_column_choose_padfn ( strings, alignment, has_invisible ) : if alignment == ""right"" : if not PRESERVE_WHITESPACE : strings = [ s. strip ( ) for s in strings ] padfn = _padleft elif alignment == ""center"" : if not PRESERVE_WHITESPACE : strings = [ s. strip ( ) for s in strings ] padfn = _padboth elif alignment == ""decimal"" : if : decimals = [ _afterpoint ( _strip_invisible ( s ) ) for s in strings ] else : decimals = [ _afterpoint ( s ) for s in strings ] maxdecimals = max ( decimals ) strings = [ s + ( maxdecimals - decs ) * "" "" for s, decs in zip ( strings, decimals ) ] padfn = _padleft elif not alignment : padfn = _padnone else : if not PRESERVE_WHITESPACE : strings = [ s. strip ( ) for s in strings ] padfn = _padright return strings, padfn",True,has_invisible,has_invisible,0.663918673992157 2251,"def yview ( self, mode = None, value = None, units = None ) : if type ( value ) == str : value = float ( value ) if mode is None : return self. vsb. get ( ) elif mode == ""moveto"" : frameHeight = self. innerframe. winfo_reqheight ( ) self. _startY = value * float ( frameHeight ) else : clipperHeight = self. _clipper. winfo_height ( ) if : jump = int ( clipperHeight * self. _jfraction ) else : jump = clipperHeight self. _startY = self. _startY + value * jump self. reposition ( )",False,units == 'units',clipperHeight > 0,0.6600906252861023 2252,"def iter_fields ( node, *, include_meta = True, exclude_unset = False ) : exclude_meta = not include_meta for field_name, field in node. _fields. items ( ) : if exclude_meta and field. meta : continue field_val = getattr ( node, field_name, _marker ) if field_val is _marker : continue if exclude_unset : if : default = field. default ( ) else : default = field. default if field_val == default : continue yield field_name, field_val",False,callable(field.default),field_val == _marker,0.6510690450668335 2253,"def _build_coverage_data ( executed_commands ) : coverage_data = { } for command in executed_commands : command_tokens = [ ] param_tokens = [ ] is_command = True for token in command. split ( ) : if : param_tokens. append ( token ) is_command = False elif is_command : command_tokens. append ( token ) else : pass command_name = "" "". join ( command_tokens ) if command_name in coverage_data : coverage_data [ command_name ] = list ( set ( coverage_data [ command_name ] ). union ( set ( param_tokens ) ) ) else : coverage_data [ command_name ] = param_tokens return coverage_data",False,token.startswith('-'),is_command,0.6484431028366089 2254,"def __init__ ( self, name, * sub_params ) : self. name = name types = self. struct_types = OrderedDict ( ) values = self. struct_values = { } for sub in sub_params : if isinstance ( sub, self. __class__ ) : types [ sub. name ] = ""STRUCT"" values [ sub. name ] = sub elif : types [ sub. name ] = ""ARRAY"" values [ sub. name ] = sub else : types [ sub. name ] = sub. type_ values [ sub. name ] = sub. value",False,"isinstance(sub, ArrayQueryParameter)","isinstance(sub, self.__class__)",0.6559240818023682 2255,"def update ( self, content = None ) : if content is not None : self. content = content try : root = ET. fromstring ( self. content ) self. size = len ( self. content ) self. last_modified = int ( ( datetime. datetime. now ( ) - datetime. datetime ( 1970, 1, 1 ) ). total_seconds ( ) ) self. lexemes_count = len ( root. findall ( ""."" ) ) for key, value in root. attrib. items ( ) : if : self. alphabet = value elif key. endswith ( ""lang"" ) : self. language_code = value except Exception as err : raise ValueError ( ""Failure parsing XML: {0}"". format ( err ) )",False,key.endswith('alphabet'),key.endswith('ui_language'),0.651118278503418 2256,"def nextEditable ( self ) : """"""Moves focus of the cursor to the next editable window"""""" if self. currentEditable is None : if len ( self. _editableChildren ) : self. _currentEditableRef = self. _editableChildren [ 0 ] else : for ref in weakref. getweakrefs ( self. currentEditable ) : if : cei = self. _editableChildren. index ( ref ) nei = cei + 1 if nei >= len ( self. _editableChildren ) : nei = 0 self. _currentEditableRef = self. _editableChildren [ nei ] return self. currentEditable",True,ref in self._editableChildren,ref in self._editableChildren,0.6591984033584595 2257,"def buildSearchTrie ( self, choices ) : searchtrie = trie. Trie ( ) for choice in choices : for token in self. tokenizeChoice ( choice ) : if : searchtrie [ token ] = [ ] searchtrie [ token ]. append ( choice ) return searchtrie",False,not searchtrie.has_key(token),token not in searchtrie,0.6557948589324951 2258,"def test_upload_guest ( self ) : response = requests. post ( ""/"". join ( [ server_url, ""data"", ""upload"" ] ), json = self. upload_config ) self. assertTrue ( response. status_code in [ 200, 201 ] ) self. assertTrue ( int ( response. json ( ) [ ""retcode"" ] ) == 0 ) job_id = response. json ( ) [ ""jobId"" ] for i in range ( 60 ) : response = requests. post ( ""/"". join ( [ server_url, ""job"", ""query"" ] ), json = { ""job_id"" : job_id } ) self. assertTrue ( int ( response. json ( ) [ ""retcode"" ] ) == 0 ) if : break time. sleep ( 1 ) self. assertTrue ( response. json ( ) [ ""data"" ] [ 0 ] [ ""f_status"" ] == JobStatus. SUCCESS ) response = test_table_info ( ) self. assertTrue ( response. status_code in [ 200, 201 ] ) self. assertTrue ( int ( response. json ( ) [ ""retcode"" ] ) == 0 ) response = test_table_delete ( ) self. assertTrue ( response. status_code in [ 200, 201 ] ) self. assertTrue ( int ( response. json ( ) [ ""retcode"" ] ) == 0 )",False,response.json()['data'][0]['f_status'] == JobStatus.SUCCESS,5 < 5,0.6520617008209229 2259,"def _get_pid_port_udp ( self, port ) : for item in self. get_extended_udp_table ( ) : lPort = socket. ntohs ( item. dwLocalPort ) lAddr = socket. inet_ntoa ( struct. pack ( ""L"", item. dwLocalAddr ) ) pid = item. dwOwningPid if : return pid else : return None",False,lPort == port,pid and port == pid,0.6643568277359009 2260,"def _process_service_request ( self, pkttype, pktid, packet ) : """"""Process a service request"""""" service = packet. get_string ( ) packet. check_end ( ) if service == self. _next_service : self. logger. debug2 ( ""Accepting request for service %s"", service ) self. _next_service = None self. send_packet ( MSG_SERVICE_ACCEPT, String ( service ) ) if : self. _auth_in_progress = True self. _send_deferred_packets ( ) else : raise DisconnectError ( DISC_SERVICE_NOT_AVAILABLE, ""Unexpected service request received"" )",False,self.is_server() and service == _USERAUTH_SERVICE,self._auth_in_progress,0.6530213356018066 2261,"def __init__ ( self, library, binary = None ) : self. results = { } try : lib = SIM_LIBRARIES [ library ] except KeyError : raise AngrValueError ( ""No such library %s"" % library ) if binary is None : binary = self. project. loader. main_object for func in binary. symbols : if not func. is_function : continue if self. project. is_hooked ( func. rebased_addr ) : l. debug ( ""Skipping %s at %#x, already hooked"", func. name, func. rebased_addr ) continue if : proc = lib. get ( func. name, self. project. arch ) self. results [ func. rebased_addr ] = proc if self. project. is_hooked ( func. rebased_addr ) : l. debug ( ""Skipping %s at %#x, already hooked"", func. name, func. rebased_addr ) else : self. project. hook ( func. rebased_addr, proc ) l. info ( ""Hooked %s at %#x"", func. name, func. rebased_addr ) else : l. debug ( ""Failed to hook %s at %#x"", func. name, func. rebased_addr )",False,lib.has_implementation(func.name),self.project.arch != None,0.6505863666534424 2262,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. I64 : self. lockid = iprot. readI64 ( ) else : iprot. skip ( ftype ) elif fid == 2 : if : self. state = iprot. readI32 ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.I32,fid == 3,0.658821702003479 2263,"def insert ( self, index, item ) : if len ( self. lists ) == 1 : self. lists [ 0 ]. insert ( index, item ) self. _balance_list ( 0 ) else : list_idx, rel_idx = self. _translate_index ( index ) if : raise IndexError ( ) self. lists [ list_idx ]. insert ( rel_idx, item ) self. _balance_list ( list_idx ) return",False,list_idx is None,list_idx not in len(self.lists),0.6620573401451111 2264,def output_handles_from_execution_plan ( execution_plan ) : output_handles_for_current_run = set ( ) for step_level in execution_plan. execution_step_levels ( ) : for step in step_level : for step_input in step. step_inputs : if : output_handles_for_current_run. update ( step_input. source_handles ) return output_handles_for_current_run,True,step_input.source_handles,step_input.source_handles,0.6585800647735596 2265,"def update_metas ( self, schema = None, count = None, part_of_data = None, description = None, partitions = None, in_serialized = None, ** kwargs ) : meta_info = { } for k, v in locals ( ). items ( ) : if k not in [ ""self"", ""kwargs"", ""meta_info"" ] and v is not None : meta_info [ k ] = v meta_info. update ( kwargs ) meta_info [ ""name"" ] = meta_info. get ( ""name"", self. name ) meta_info [ ""namespace"" ] = meta_info. get ( ""namespace"", self. namespace ) update_filters = [ ] primary_keys = StorageTableMetaModel. _meta. primary_key. field_names for p_k in primary_keys : update_filters. append ( operator. attrgetter ( p_k ) ( StorageTableMetaModel ) == meta_info [ p_k. lstrip ( ""f_"" ) ] ) table_meta = StorageTableMetaModel ( ) update_fields = { } for k, v in meta_info. items ( ) : attr_name = ""f_%s"" % k if hasattr ( StorageTableMetaModel, attr_name ) and attr_name not in primary_keys : if : if len ( v ) < 100 : tmp = v else : tmp = v [ : 100 ] update_fields [ ",False,k == 'part_of_data',len(v) > 0,0.6513879895210266 2266,"def use_index ( self, term : Union [ str, Index ], * terms : Union [ str, Index ] ) -> ""QueryBuilder"" : for t in ( term, * terms ) : if : self. _use_indexes. append ( t ) elif isinstance ( t, str ) : self. _use_indexes. append ( Index ( t ) )",True,"isinstance(t, Index)","isinstance(t, Index)",0.6632080674171448 2267,"def generate_self_block ( self, block : PLBlock ) -> Optional [ PLBlock ] : if not self. commands : return None self_block = block. add_block ( ) prefix_code = self. prefix_code ( ) actions = [ ] dynamic_actions = [ ] for cmd in self. commands : if isinstance ( cmd, tuple ) and ( cmd [ 1 ] or cmd [ 2 ] ) : action = cmd [ 0 ]. code ( self_block ) if : subcommand = f""EXECUTE {ql(prefix_code)} ||'' || {action}"" else : subcommand = prefix_code + "" "" + action self_block. add_command ( subcommand, conditions = cmd [ 1 ], neg_conditions = cmd [ 2 ] ) else : action = cmd. code ( self_block ) if : subcommand = f""EXECUTE {ql(prefix_code)} ||'' || {action}"" dynamic_actions. append ( subcommand ) else : actions. append ( action ) if actions : command = prefix_code + "" "" + "", "". join ( actions ) self_block. add_command ( command ) if dynamic_actions : for action in dynamic_actions : self_block. add_command ( action ) extra_block = self_block. add_block ( ) for cmd in self. commands : if isinstance ( cmd, tuple ) and ( cmd [ 1 ] or cmd [ 2 ] ) : files = [ ] commands = [ [ ""mtn"", ""ls"", ""unknown"" ] ] if not ignore_ignored : commands. append ( [ ""mtn"", ""ls"", ""ignored"" ] ) for cmd in commands : stdout = yield self. _dovccmd ( cmd, workdir = self. workdir, collectStdout = True ) if not stdout : continue for filename in stdout. strip ( ). split ( ""\n"" ) : filename = self. workdir + ""/"" + str ( filename ) files. append ( filename ) if not files : rc = 0 else : if : rc = yield self. removeFiles ( files ) else : rc = yield self. runRmdir ( files, abandonOnFailure = False ) if rc!= 0 : log. msg ( ""Failed removing files"" ) raise buildstep. BuildStepFailed ( )",False,"self.workerVersionIsOlderThan('rmdir', '2.14')",self.removeFiles(files),0.6504614949226379 2269,"def _execute ( self, undoinfo ) : if undoinfo is None : return None msg, func, args = UndoRedo. _split ( undoinfo ) if isinstance ( func, list ) : redolist = [ ] while func : redolist. append ( self. _execute ( func. pop ( ) ) ) if msg : return msg, redolist else : return redolist else : redoinfo = func ( * args ) if : return redoinfo elif msg : return ( msg, ) + redoinfo else : return redoinfo",False,"isinstance(redoinfo[0], str)",redoinfo,0.657508909702301 2270,"def __focusDefaultButton ( self ) : defaultButton = self. __defaultButton if defaultButton == self. DefaultButton. FromUserData : defaultButton = self. DefaultButton. OK d = None with IECore. IgnoredExceptions ( KeyError ) : d = self. __node. getParameterised ( ) [ 0 ]. userData ( ) [ ""UI"" ] [ ""defaultButton"" ] if : for v in self. DefaultButton. values ( ) : if str ( v ). lower ( ) == d. value. lower ( ) : defaultButton = v break if defaultButton == self. DefaultButton. None_ : self. _qtWidget ( ). setFocus ( ) elif defaultButton == self. DefaultButton. Cancel : self. __backButton. _qtWidget ( ). setFocus ( ) else : self. __forwardButton. _qtWidget ( ). setFocus ( )",False,d is not None,d,0.6591241359710693 2271,"def sysctlTestAndSet ( name, limit ) : ""Helper function to set sysctl limits"" if ""/"" not in name : name = ""/proc/sys/"" + name. replace ( ""."", ""/"" ) with open ( name, ""r"" ) as readFile : oldLimit = readFile. readline ( ) if : if int ( oldLimit ) < limit : with open ( name, ""w"" ) as writeFile : writeFile. write ( ""%d"" % limit ) else : with open ( name, ""w"" ) as writeFile : writeFile. write ( limit )",False,"isinstance(limit, int)",oldLimit,0.6571223735809326 2272,"def add_listen ( self, addr ) : ip = addr [ 0 ] port = addr [ 1 ] if isinstance ( ip, str ) : ip = ip. encode ( ""ascii"" ) if b"":"" in ip : sock = socket. socket ( socket. AF_INET6, socket. SOCK_STREAM ) else : sock = socket. socket ( socket. AF_INET, socket. SOCK_STREAM ) sock. setsockopt ( socket. SOL_SOCKET, socket. SO_REUSEADDR, 1 ) sock. setsockopt ( socket. SOL_TCP, socket. TCP_NODELAY, True ) addr = tuple ( ( ip, port ) ) try : sock. bind ( addr ) except Exception as e : err_string = ""bind to %s:%d fail:%r"" % ( addr [ 0 ], addr [ 1 ], e ) self. logger. error ( err_string ) raise Exception ( err_string ) if self. use_https : import OpenSSL if : ssl_version = OpenSSL. SSL. TLSv1_2_METHOD elif hasattr ( OpenSSL. SSL, ""TLSv1_1_METHOD"" ) : ssl_version = OpenSSL. SSL. TLSv1_1_METHOD elif hasattr ( OpenSSL. SSL, ""TLSv1_METHOD"" ) : ssl_version = OpenSSL. SSL. TLSv1_METHOD ctx = OpenSSL. SSL. Context ( ssl_version ) fpem = self. cert ctx. use_privatekey_file ( fpem ) ctx. use_certificate_file ( fpem ) sock = OpenSSL. SSL. Connection ( ctx, sock ) sock. listen ( 200 )",True,"hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD')","hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD')",0.646664023399353 2273,"def set_dir_modes ( self, dirname, mode ) : if not self. is_chmod_supported ( ) : return for dirpath, dirnames, fnames in os. walk ( dirname ) : if : continue log. info ( ""changing mode of %s to %o"", dirpath, mode ) if not self. dry_run : os. chmod ( dirpath, mode )",False,os.path.islink(dirpath),"mode in ('w', 'w')",0.647996187210083 2274,def clean_data ( data ) : for k in list ( data. keys ( ) ) : delete_key = False new_k = collector. cached_mapped_file ( k ) if not new_k. startswith ( source_path ) : delete_key = True else : for omit_pattern in omit_patterns : if : delete_key = True break if delete_key : del data [ k ] else : v = data [ k ] del data [ k ] data [ new_k ] = v,False,"fnmatch(new_k, omit_pattern)",new_k.startswith(oml_pattern),0.6461020708084106 2275,"def init_sequence ( self, coll_name, seq_config ) : if not isinstance ( seq_config, list ) : raise Exception ( '""sequence"" config must be a list' ) handlers = [ ] for entry in seq_config : if : raise Exception ( '""sequence"" entry must be a dict' ) name = entry. get ( ""name"", """" ) handler = self. load_coll ( name, entry ) handlers. append ( handler ) return HandlerSeq ( handlers )",True,"not isinstance(entry, dict)","not isinstance(entry, dict)",0.6548457145690918 2276,"def get_dhcp_opts ( context, network_ref ) : """"""Get network's hosts config in dhcp-opts format."""""" hosts = [ ] ips_ref = db. network_get_associated_fixed_ips ( context, network_ref [ ""id"" ] ) if ips_ref : instance_set = set ( [ fixed_ip_ref [ ""instance_id"" ] for fixed_ip_ref in ips_ref ] ) default_gw_network_node = { } for instance_id in instance_set : vifs = db. virtual_interface_get_by_instance ( context, instance_id ) if : default_gw_network_node [ instance_id ] = vifs [ 0 ] [ ""network_id"" ] for fixed_ip_ref in ips_ref : instance_id = fixed_ip_ref [ ""instance_id"" ] try : instance_ref = db. instance_get ( context, instance_id ) except exception. InstanceNotFound : msg = _ ( ""Instance %(instance_id)s not found"" ) LOG. debug ( msg % { ""instance_id"" : instance_id } ) continue if instance_id in default_gw_network_node : target_network_id = default_gw_network_node [ instance_id ] if target_network_id!= fixed_ip_ref [ ""network_id"" ] : <",False,vifs,len(vifs,0.6847608089447021 2277,"def stats ( self, parent ) : nvar = parent. exog. shape [ 1 ] rv = parent. endog. copy ( ) vl = [ ( i, parent. exog [ :, i ] ) for i in range ( nvar ) ] z = np. empty ( nvar ) past = [ ] for i in range ( nvar ) : dp = np. r_ [ [ np. abs ( np. dot ( rv, x [ 1 ] ) ) for x in vl ] ] j = np. argmax ( dp ) z [ vl [ j ] [ 0 ] ] = nvar - i - 1 x = vl [ j ] [ 1 ] del vl [ j ] if : for v in past : x -= np. dot ( x, v ) * v past. append ( x ) rv -= np. dot ( rv, x ) * x z1 = z [ 0 : nvar // 2 ] z2 = z [ nvar // 2 : ] st = np. where ( z1 > z2, z1, z2 ) * np. sign ( z1 - z2 ) return st",False,self.pursuit,past,0.6620579957962036 2278,"def reward ( self ) : """"""Returns a tuple of sum of raw and processed rewards."""""" raw_rewards, processed_rewards = 0, 0 for ts in self. time_steps : if ts. raw_reward is not None : raw_rewards += ts. raw_reward if : processed_rewards += ts. processed_reward return raw_rewards, processed_rewards",True,ts.processed_reward is not None,ts.processed_reward is not None,0.6585056185722351 2279,"def do_uninstall ( self, name ) : """"""Uninstall a plugin."""""" for ( plugin ) in self. site. plugin_manager. getAllPlugins ( ) : if name == plugin. name : p = plugin. path if : p = p + os. sep p = os. path. abspath ( os. path. join ( p, os. pardir ) ) else : p = os. path. dirname ( p ) LOGGER. warning ( ""About to uninstall plugin: {0}"". format ( name ) ) LOGGER. warning ( ""This will delete {0}"". format ( p ) ) sure = utils. ask_yesno ( ""Are you sure?"" ) if sure : LOGGER. warning ( ""Removing {0}"". format ( p ) ) shutil. rmtree ( p ) return 0 return 1 LOGGER. error ( ""Unknown plugin: {0}"". format ( name ) ) return 1",False,os.path.isdir(p),p and os.path.exists(p),0.6460264921188354 2280,"def test_positions_on_track ( self ) -> None : gpx = mod_gpx. GPX ( ) track = mod_gpx. GPXTrack ( ) gpx. tracks. append ( track ) segment = mod_gpx. GPXTrackSegment ( ) track. segments. append ( segment ) location_to_find_on_track = None for i in range ( 1000 ) : latitude = 45 + i * 0.001 longitude = 45 + i * 0.001 elevation = 100 + i * 2 point = mod_gpx. GPXTrackPoint ( latitude = latitude, longitude = longitude, elevation = elevation ) segment. points. append ( point ) if : location_to_find_on_track = mod_gpx. GPXWaypoint ( latitude = latitude, longitude = longitude ) result = gpx. get_nearest_locations ( location_to_find_on_track ) self. assertTrue ( len ( result ) == 1 )",False,i == 500,gpx.get_distance() > 0,0.6912835836410522 2281,"def updateNodeStatistics ( self ) : scenario_solutions = [ ( scenario. _probability, scenario. _x [ self. _name ] ) for scenario in self. _scenarios ] for variable_id in self. _variable_ids : stale = False values = [ ] avg_value = 0.0 for probability, var_values in scenario_solutions : val = var_values [ variable_id ] if val is not None : avg_value += probability * val values. append ( val ) else : stale = True break if : self. _minimums [ variable_id ] = None self. _maximums [ variable_id ] = None self. _averages [ variable_id ] = None else : avg_value /= self. _probability self. _minimums [ variable_id ] = min ( values ) self. _maximums [ variable_id ] = max ( values ) self. _averages [ variable_id ] = avg_value",True,stale,stale,0.7056097984313965 2282,"def __init__ ( self, * args, ** kwargs ) : self. shp = None self. shx = None self. dbf = None self. shapeName = ""Not specified"" self. _offsets = [ ] self. shpLength = None self. numRecords = None self. fields = [ ] self. __dbfHdrLength = 0 if len ( args ) > 0 : if type ( args [ 0 ] ) is type ( ""stringTest"" ) : self. load ( args [ 0 ] ) return if ""shp"" in kwargs. keys ( ) : if : self. shp = kwargs [ ""shp"" ] if hasattr ( self. shp, ""seek"" ) : self. shp. seek ( 0 ) if ""shx"" in kwargs. keys ( ) : if hasattr ( kwargs [ ""shx"" ], ""read"" ) : self. shx = kwargs [ ""shx"" ] if hasattr ( self. shx, ""seek"" ) : self. shx. seek ( 0 ) if ""dbf"" in kwargs. keys ( ) : if hasattr ( kwargs [ ""dbf"" ], ""read"" ) : self. dbf = kwargs [ ""dbf"" ] if hasattr ( self. dbf, ""seek"" ) : self. dbf. seek ( 0 ) if self. shp or self. dbf : self. load ( ) else : raise ShapefileException ( if not PatchPyTorchModelIO. __main_task : return original_fn ( obj, f, * args, ** kwargs ) try : if : filename = f elif hasattr ( f, ""as_posix"" ) : filename = f. as_posix ( ) elif hasattr ( f, ""name"" ) : filename = f. name else : filename = None except Exception : filename = None empty = _Empty ( ) if False and running_remotely ( ) : filename = WeightsFileHandler. restore_weights_file ( empty, filename, Framework. pytorch, PatchPyTorchModelIO. __main_task ) model = original_fn ( obj, filename or f, * args, ** kwargs ) else : model = original_fn ( obj, f, * args, ** kwargs ) WeightsFileHandler. restore_weights_file ( empty, filename, Framework. pytorch, PatchPyTorchModelIO. __main_task ) if empty. trains_in_model : try : model. trains_in_model = empty. trains_in_model except Exception : pass return model",False,"isinstance(f, six.string_types)","hasattr(f, '__load_weights_file')",0.6473408937454224 2284,"def _request_data ( self ) : """"""Return a tuple (url, data, headers)."""""" method = self. method. upper ( ) parts = self. _urlparse ( self. action ) rest, ( query, frag ) = parts [ : - 2 ], parts [ - 2 : ] if method == ""GET"" : self. enctype = ""application/x-www-form-urlencoded"" parts = rest + ( urlencode ( self. _pairs ( ) ), None ) uri = self. _urlunparse ( parts ) return uri, None, [ ] elif method == ""POST"" : parts = rest + ( query, None ) uri = self. _urlunparse ( parts ) if self. enctype == ""application/x-www-form-urlencoded"" : return ( uri, urlencode ( self. _pairs ( ) ), [ ( ""Content-Type"", self. enctype ) ] ) elif : return ( uri, self. _pairs ( ), [ ( ""Content-Type"", self. enctype ) ] ) elif self. enctype == ""multipart/form-data"" : data = _cStringIO ( ) http_hdrs = [ ] mw = MimeWriter ( data, http_hdrs ) f = mw. startmultipartbody ( ""form-data"", add_to_http_hdrs = True, prefix = 0 ) for ii, k, v, control_index in self. _pairs_and_controls ( ) : self. controls [ control_index ]. _write_mime_data ( mw, k, v ) mw. lastpart ( ) <",False,self.enctype == 'text/plain',self.enctype == 'application/x-www-form-data',0.6497485637664795 2285,"def _readline_from_keyboard ( self ) : c = self. console while 1 : self. _update_line ( ) event = c. getkeypress ( ) if : self. next_meta = False control, meta, shift, code = event. keyinfo event. keyinfo = ( control, True, shift, code ) if event. keyinfo in self. exit_dispatch : if lineobj. EndOfLine ( self. l_buffer ) == 0 : raise EOFError dispatch_func = self. key_dispatch. get ( event. keyinfo. tuple ( ), self. vi_key ) log ( ""readline from keyboard:%s->%s"" % ( event. keyinfo. tuple ( ), dispatch_func ) ) r = None if dispatch_func : r = dispatch_func ( event ) self. l_buffer. push_undo ( ) self. previous_func = dispatch_func if r : self. _update_line ( ) break",True,self.next_meta,self.next_meta,0.6536632776260376 2286,"def format_sql ( sql, params ) : rv = [ ] if isinstance ( params, dict ) : conv = _FormatConverter ( params ) if params : sql = sql_to_string ( sql ) sql = sql % conv params = conv. params else : params = ( ) for param in params or ( ) : if : rv. append ( ""NULL"" ) param = safe_repr ( param ) rv. append ( param ) return sql, rv",True,param is None,param is None,0.6670687198638916 2287,"def f ( self, info ) : for k in keys : if callable ( k ) : for k2 in list ( info. keys ( ) ) : if : info. pop ( k2 ) else : info. pop ( k, None )",False,k(k2),k2,0.6592751741409302 2288,"def kdt_closest_edges ( verts, socket_inputs ) : """"""Join verts pairs by defining distance range and number of connections"""""" mindist, maxdist, maxNum, skip = socket_inputs kd = create_kdt ( verts ) maxNum = max ( maxNum, 1 ) skip = max ( skip, 0 ) edges = set ( ) edges_add = edges. add max_dist = abs ( maxdist ) min_dist = abs ( mindist ) for i, vtx in enumerate ( verts ) : num_edges = 0 for edge_idx, ( _, index, dist ) in enumerate ( kd. find_range ( vtx, max_dist ) ) : if skip > 0 : if edge_idx < skip : continue if ( dist <= min_dist ) or ( i == index ) : continue edge = tuple ( sorted ( [ i, index ] ) ) if not edge in edges : edges_add ( edge ) num_edges += 1 if : break return list ( edges )",False,num_edges == maxNum,num_edges >= max_dist,0.6677591800689697 2289,"def _populate_class_variables ( ) : lookup = { } reverse_lookup = { } characters_for_re = [ ] for codepoint, name in list ( codepoint2name. items ( ) ) : character = chr ( codepoint ) if : characters_for_re. append ( character ) lookup [ character ] = name reverse_lookup [ name ] = character re_definition = ""[%s]"" % """". join ( characters_for_re ) return lookup, reverse_lookup, re. compile ( re_definition )",False,codepoint != 34,character >= 32,0.6735028028488159 2290,"def main ( ) : docs = ""Common AST"" docs += ""\n"" + ""="" * len ( docs ) + ""\n\n"" docs += "".. automodule:: commonast\n\n"" docs += "".. autofunction:: commonast.parse\n\n"" docs += ""----\n\n"" docs += ""The nodes\n---------\n\n"" docs += "".. autoclass:: commonast.Node\n :members:\n\n"" code = open ( commonast. __file__, ""rb"" ). read ( ). decode ( ) status = 0 for line in code. splitlines ( ) : if status == 0 : if line. startswith ( ""## --"" ) : status = 1 elif : if line. startswith ( ""## --"" ) : break elif line. startswith ( ""## "" ) : title = line [ 3 : ]. strip ( ) docs += ""%s\n%s\n\n"" % ( title, ""-"" * len ( title ) ) elif line. startswith ( ""class "" ) : clsname = line [ 6 : ]. split ( ""("" ) [ 0 ] docs += "".. autoclass:: %s\n\n"" % ( ""commonast."" + clsname ) cls = getattr ( commonast, clsname ) cls. __doc__ = ""%s()\n%s"" % ( clsname, cls. __doc__ ) filename = os. path. join ( OUTPUT_DIR, """,False,status == 1,status == 2,0.6859836578369141 2291,"def __init__ ( self, input_chunk ) : self. qualifier = None self. may_must = None self. flag_actions = [ ] m = flags_rec_t. _flag_pattern. search ( input_chunk ) if m : flags_input = m. group ( ""flags"" ). strip ( ). split ( ) qualifiers = m. group ( ""qualifiers"" ). strip ( ). split ( ) else : die ( ""Could not find flags in %s"" % input_chunk ) first = qualifiers [ 0 ] if first in flags_rec_t. valid_flags_qualifiers : self. qualifier = first qualifiers. pop ( 0 ) self. may_must = qualifiers [ 0 ] if self. may_must not in flags_rec_t. valid_flags_semantics_specifiers : die ( ""Invalid flags specification: %s"" % input_chunk ) self. read_set = flag_set_t ( ) self. write_set = flag_set_t ( ) self. undefined_set = flag_set_t ( ) self. flag_action_index = - 1 self. simple_id = - 1 for flag_action_str in flags_input : fa = flag_action_t ( flag_action_str ) self. flag_actions. append ( fa ) if fa. flag : if fa. reads_flag ( ) : self. read_set. set ( fa. flag ) if : self. write_set. set ( fa. flag ) if fa. makes_flag_undefined ( ) : self. undefined_set. set ( fa",False,fa.writes_flag(),fa.makes_flag_out(),0.6603126525878906 2292,"def enable ( self ) : log. debug ( ""\n\nEnabling %s"", self. __class__. __name__ ) for event in self. events : if self. __imp == ""core"" : component. get ( ""Notifications"" ). register_custom_email_notification ( event. __class__. __name__, self. custom_email_message_provider ) elif : notifications_component = component. get ( ""Notifications"" ) notifications_component. register_custom_popup_notification ( event. __class__. __name__, self. custom_popup_message_provider ) notifications_component. register_custom_blink_notification ( event. __class__. __name__, self. custom_blink_message_provider ) notifications_component. register_custom_sound_notification ( event. __class__. __name__, self. custom_sound_message_provider ) self. lc. start ( 60, False )",False,self.__imp == 'gtk',self.__imp == 'popup',0.6548651456832886 2293,"def _get_filter ( self, tag, user_id, include_draft, conn ) : filters = [ ] if tag : tag = tag. upper ( ) tag_statement = sqla. select ( [ self. _tag_table. c. id ] ). where ( self. _tag_table. c. text == tag ) tag_result = conn. execute ( tag_statement ). fetchone ( ) if : tag_id = tag_result [ 0 ] tag_filter = sqla. and_ ( self. _tag_posts_table. c. tag_id == tag_id, self. _post_table. c. id == self. _tag_posts_table. c. post_id, ) filters. append ( tag_filter ) if user_id : user_filter = sqla. and_ ( self. _user_posts_table. c. user_id == user_id, self. _post_table. c. id == self. _user_posts_table. c. post_id, ) filters. append ( user_filter ) draft_filter = ( self. _post_table. c. draft == 1 if include_draft else self. _post_table. c. draft == 0 ) filters. append ( draft_filter ) sql_filter = sqla. and_ ( * filters ) return sql_filter",False,tag_result is not None,tag_result,0.6548498868942261 2294,"def generateMapItemNode ( self, node ) : try : self. mappingItem = True fieldname, value = node transformed_fieldname = self. fieldNameMapping ( fieldname, value ) has_wildcard = re. search ( r""((\\(\*|\?|\\))|\*|\?|_|%)"", self. generateNode ( value ) ) if : return self. mapMulti % ( transformed_fieldname, self. generateNode ( value ) ) elif ""LENGTH"" in transformed_fieldname : return self. mapLength % ( transformed_fieldname, value ) elif type ( value ) == list : return self. generateMapItemListNode ( transformed_fieldname, value ) elif ( self. mapListsSpecialHandling == False and type ( value ) in ( str, int, list ) or self. mapListsSpecialHandling == True and type ( value ) in ( str, int ) ) : if has_wildcard : return self. mapWildcard % ( transformed_fieldname, self. generateNode ( value ), ) else : return self. mapExpression % ( transformed_fieldname, self. generateNode ( value ), ) elif ""sourcetype",False,"',' in self.generateNode(value) and (not has_wildcard)","""MULTI_TAB > 0",0.6534509658813477 2295,"def depth_first_search ( split_bin_str, n, l, sol = None, cur_sum = 0 ) : """"""Partition an integer value of n into l bins each with min 1"""""" sol = sol or [ ] cur_idx = len ( sol ) if cur_idx < l : m = len ( split_bin_str [ cur_idx ] ) n_avail = n - cur_sum for j in range ( 1, min ( m, n_avail - ( l - 1 - cur_idx ) ) + 1 ) : depth_first_search ( split_bin_str, n, l, sol = sol + [ j ], cur_sum = cur_sum + j ) elif cur_idx == l : if : partition_list = sol context. coeff += _coeff_monomial_with_partition ( split_bin_str, partition_list )",False,cur_sum == n,n - cur_sum > 0,0.669865608215332 2296,"def _get_omega ( self ) : if self. _omega is None : n = self. get_drift_dim ( ) // 2 omg = sympl. calc_omega ( n ) if : self. _omega = Qobj ( omg, dims = self. dyn_dims ) self. _omega_qobj = self. _omega elif self. oper_dtype == sp. csr_matrix : self. _omega = sp. csr_matrix ( omg ) else : self. _omega = omg return self. _omega",False,self.oper_dtype == Qobj,self.oper_dtype == sp.Qobj,0.6642476320266724 2297,"def HeaderPrintMUADetails ( message, mta = None ) : """"""Summarize what the message tells us directly about the MUA."""""" details = [ ] for header in MUA_ID_HEADERS : value = message. get ( header ) if value : value = "" "". join ( [ v for v in HP_MUA_ID_SPLIT. split ( value. strip ( ) ) if not HP_MUA_ID_IGNORE. search ( v ) ] ) details. extend ( [ header, value. strip ( ) ] ) if not details : if : details. extend ( [ ""Guessed"", ""GMail"" ] ) elif ""x-ms-tnef-correlator"" in message or ""x-ms-has-attach"" in message : details. extend ( [ ""Guessed"", ""Exchange"" ] ) elif ""@mailpile"" in message. get ( ""message-id"", """" ) : details. extend ( [ ""Guessed"", ""Mailpile"" ] ) return details",False,mta and mta[0].startswith('Received by google.com'),'x-ms-mail' in message,0.6513311862945557 2298,"def _renew_registration ( self, serverToRegister, registrationClient, period = DEF_REGINT ) : try : with self. _lock : if : return self. _register_server ( serverToRegister, registrationClient ) except ( BrokenPipeError, OSError ) as e : self. logger. info ( ""Discovery server registration failure: {:s}"". format ( str ( e ) ) ) return except TimeoutError : self. logger. info ( ""Discovery server registration timeout: {:s}"". format ( str ( e ) ) ) if period == 0 : return elif not serverToRegister. iserver. is_running ( ) : return else : self. _schedule_registration ( serverToRegister, registrationClient, period )",False,registrationClient not in self._registration_clients,serverToRegister.isserver.is_running(),0.658024787902832 2299,"def get_cursor_position ( fragment : str = ""[SetCursorPosition]"", ) -> Optional [ Point ] : for y, line in enumerate ( fragment_lines ) : x = 0 for style_str, text, * _ in line : if : return Point ( x = x, y = y ) x += len ( text ) return None",False,fragment in style_str,style_str.startswith(text),0.6661649942398071 2300,"def resize ( self, * e ) : bold = ( ""helvetica"", - self. _size. get ( ), ""bold"" ) helv = ( ""helvetica"", - self. _size. get ( ) ) xspace = self. _size. get ( ) yspace = self. _size. get ( ) for widget in self. _widgets : widget [ ""node_font"" ] = bold widget [ ""leaf_font"" ] = helv widget [ ""xspace"" ] = xspace widget [ ""yspace"" ] = yspace if self. _size. get ( ) < 20 : widget [ ""line_width"" ] = 1 elif : widget [ ""line_width"" ] = 2 else : widget [ ""line_width"" ] = 3 self. _layout ( )",False,self._size.get() < 30,self._size.get() < 50,0.6579011082649231 2301,"def get_generators ( self ) : """"""Get a dict with all registered generators, indexed by name"""""" generators = { } for core in self. db. find ( ) : if : _generators = core. get_generators ( { } ) if _generators : generators [ str ( core. name ) ] = _generators return generators",True,"hasattr(core, 'get_generators')","hasattr(core, 'get_generators')",0.660944938659668 2302,"def transition ( self, context, token, value ) : if context. type == ""from"" : if token == ""NAME"" : if context. expect == ""source"" : if value == ""import"" and context. level : context. expect = ""names"" else : context. source += value context. expect = ""."" elif : context. expect = ""names"" elif context. expect == ""names"" : context. names. append ( value ) context. expect = "","" elif token == ""OP"" : if value == "","" and context. expect == "","" : context. expect = ""names"" elif value == ""."" and context. expect == ""."" : context. source += ""."" context. expect = ""source"" elif value == ""."" and context. expect == ""source"" : context. level += 1 elif context. type == ""import"" : if token == ""NAME"" : if context. expect == ""module"" : if context. modules and context. modules [ - 1 ]. endswith ( ""."" ) : context. modules [ - 1 ] += value params = self. _verify [ ""params"" ] in_port = self. _verify [ ""in_port"" ] timeout = self. _verify [ ""timeout"" ] if timeout : duration_nsec = ( msg. duration_sec * 10 ** 9 ) + msg. duration_nsec timeout_nsec = timeout * 10 ** 9 l = ( timeout - 0.5 ) * 10 ** 9 h = ( timeout + 1.5 ) * 10 ** 9 if : return ""bad duration time. set=%s(nsec), duration=%s(nsec)"" % ( timeout_nsec, duration_nsec, ) for name, val in params. items ( ) : r_val = getattr ( msg, name ) if val!= r_val : return ""%s is mismatched. verify=%s, reply=%s"" % ( name, val, r_val ) for f in msg. match. fields : if f. header == ofproto_v1_2. OXM_OF_IN_PORT : if f. value!= in_port : return ""in_port is mismatched. verify=%s, reply=%s"" % ( in_port, f. value ) return True",False,not l < duration_nsec < h,l > timeout_nsec or h > timeout_nsec,0.6586206555366516 2304,"def display_prompt ( self ) : self. caret_x = - 1 self. caret_y = - 1 self. do_display_prompt ( self. RPROMPT, y_offset = self. PROMPT_OFFSET_V, x_align = ""right"" ) self. do_display_prompt ( self. PROMPT, y_offset = self. PROMPT_OFFSET_V ) try : if : self. screen. move ( self. caret_y, self. caret_x + display. screen_len ( self. model. query, 0, self. model. caret ), ) except curses. error : pass",False,self.caret_x >= 0 and self.caret_y >= 0,self.model.caret,0.6545965075492859 2305,"def _update ( model, lun, value ) : if isinstance ( model, dict ) : luns = model. keys ( ) if lun is None else [ lun ] for lun_item in luns : if : raise CLIError ( ""data disk with lun of '{}' doesn't exist"". format ( lun_item ) ) model [ lun_item ] [ ""writeAcceleratorEnabled"" ] = value else : if lun is None : disks = [ model. os_disk ] + ( model. data_disks or [ ] ) elif lun == ""os"" : disks = [ model. os_disk ] else : disk = next ( ( d for d in model. data_disks if d. lun == lun ), None ) if not disk : raise CLIError ( ""data disk with lun of '{}' doesn't exist"". format ( lun ) ) disks = [ disk ] for disk in disks : disk. write_accelerator_enabled = value",False,lun_item not in model,not model.data_disks,0.6596503853797913 2306,"def device_iter ( ** kwargs ) : for dev in backend. enumerate_devices ( ) : d = Device ( dev, backend ) tests = ( val == _try_getattr ( d, key ) for key, val in kwargs. items ( ) ) if : yield d",False,_interop._all(tests) and (custom_match is None or custom_match(d)),tests,0.6481562852859497 2307,"def test_time_series_intraday_date_indexing ( self, mock_request ) : """"""Test that api call returns a pandas data frame with a date as index"""""" ts = TimeSeries ( key = TestAlphaVantage. _API_KEY_TEST, output_format = ""pandas"", indexing_type = ""date"" ) url = ""https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol=MSFT&interval=1min&outputsize=full&apikey=test&datatype=json"" path_file = self. get_file_from_url ( ""mock_time_series"" ) with open ( path_file ) as f : mock_request. get ( url, text = f. read ( ) ) data, _ = ts. get_intraday ( ""MSFT"", interval = ""1min"", outputsize = ""full"" ) if : assert isinstance ( data. index [ 0 ], Timestamp ) else : if sys. version_info [ 0 ] == 3 : assert isinstance ( data. index [ 0 ], str ) else : assert isinstance ( data. index [ 0 ], basestring )",False,ts.indexing_type == 'date',sys.version_info[0] == 2,0.6527478694915771 2308,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. sessionHandle = TSessionHandle ( ) self. sessionHandle. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. I32 : self. infoType = iprot. readI32 ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRUCT,fid == 2,0.6616673469543457 2309,"def _line_ranges ( statements, lines ) : """"""Produce a list of ranges for `format_lines`."""""" statements = sorted ( statements ) lines = sorted ( lines ) pairs = [ ] start = None lidx = 0 for stmt in statements : if lidx >= len ( lines ) : break if stmt == lines [ lidx ] : lidx += 1 if : start = stmt end = stmt elif start : pairs. append ( ( start, end ) ) start = None if start : pairs. append ( ( start, end ) ) return pairs",False,not start,stmt > lines[lidx],0.6763074398040771 2310,"def _build_auth_record ( response ) : """"""Build an AuthenticationRecord from the result of an MSAL ClientApplication token request"""""" try : id_token = response [ ""id_token_claims"" ] if : client_info = json. loads ( _decode_client_info ( response [ ""client_info"" ] ) ) home_account_id = ""{uid}.{utid}"". format ( ** client_info ) else : home_account_id = id_token [ ""sub"" ] issuer = six. moves. urllib_parse. urlparse ( id_token [ ""iss"" ] ) tenant_id = id_token. get ( ""tid"" ) or issuer. path. strip ( ""/"" ) username = id_token. get ( ""preferred_username"" ) or id_token [ ""upn"" ] return AuthenticationRecord ( authority = issuer. netloc, client_id = id_token [ ""aud"" ], home_account_id = home_account_id, tenant_id = tenant_id, username = username, ) except ( KeyError, ValueError ) as ex : auth_error = ClientAuthenticationError ( message = ""Failed to build AuthenticationRecord from unexpected identity token"" ) six. raise_from ( auth_error, ex )",False,'client_info' in response,id_token['client_info'],0.6563567519187927 2311,"def publish ( self, dashboard_id : int ) -> FlaskResponse : """"""Gets and toggles published status on dashboards"""""" logger. warning ( ""This API endpoint is deprecated and will be removed in version 1.0.0"" ) session = db. session ( ) Role = ab_models. Role dash = session. query ( Dashboard ). filter ( Dashboard. id == dashboard_id ). one_or_none ( ) admin_role = session. query ( Role ). filter ( Role. name == ""Admin"" ). one_or_none ( ) if request. method == ""GET"" : if : return json_success ( json. dumps ( { ""published"" : dash. published } ) ) return json_error_response ( f""ERROR: cannot find dashboard {dashboard_id}"", status = 404 ) edit_perm = is_owner ( dash, g. user ) or admin_role in get_user_roles ( ) if not edit_perm : return json_error_response ( f'ERROR: ""{g.user.username}"" cannot alter' f'dashboard ""{dash.dashboard_title}""', status = 403, ) dash. published = str ( request. form [ ""published"" ] ). lower ( ) == ""true"" session. commit ( ) return json_success ( json. dumps ( { ""published"" : dash. published } ) )",False,dash,dash.published,0.682155191898346 2312,"def buildGAPIServiceObject ( api, soft_errors = False ) : global extra_args auth_as = options. use_admin if options. use_admin else options. email scopes = getAPIScope ( api ) credentials = getSvcAcctCredentials ( scopes, auth_as ) if options. debug : extra_args [ ""prettyPrint"" ] = True if os. path. isfile ( os. path. join ( getProgPath ( ), ""extra-args.txt"" ) ) : config = configparser. ConfigParser ( ) config. optionxform = str config. read ( getGamPath ( ) + ""extra-args.txt"" ) extra_args. update ( dict ( config. items ( ""extra-args"" ) ) ) httpc = _createHttpObj ( ) request = google_auth_httplib2. Request ( httpc ) credentials. refresh ( request ) version = getAPIVer ( api ) try : service = googleapiclient. discovery. build ( api, version, http = httpc, cache_discovery = False ) service. _http = google_auth_httplib2. AuthorizedHttp ( credentials, http = httpc ) return service except ( httplib2. ServerNotFoundError, RuntimeError ) as e : systemErrorExit ( 4, e ) except google. auth. exceptions. RefreshError as e : if : e = e. args [ 0 ] systemErrorExit ( 5, e )",False,"isinstance(e.args, tuple)",e.args,0.6462751626968384 2313,"def create_wallet_source ( wallet : Wallet, include_worth = True ) : exchange_name = wallet. exchange. name symbol = wallet. instrument. symbol with Module ( exchange_name + "":/"" + symbol ) as wallet_ds : free_balance = Lambda ( ""free"", lambda w : w. balance. size, wallet ) locked_balance = Lambda ( ""locked"", lambda w : w. locked_balance. size, wallet ) total_balance = Lambda ( ""total"", lambda w : w. total_balance. size, wallet ) nodes = [ free_balance, locked_balance, total_balance ] if : price = Select ( lambda node : node. name. endswith ( symbol ) ) ( wallet. exchange ) worth = BinOp ( ""worth"", operator. mul ) ( price, total_balance ) nodes += [ worth ] return wallet_ds",True,include_worth,include_worth,0.6667280793190002 2314,"def asset ( * paths ) : for path in paths : fspath = www_root + ""/assets/"" + path etag = """" try : if env. cache_static : etag = asset_etag ( fspath ) else : os. stat ( fspath ) except FileNotFoundError as e : if path == paths [ - 1 ] : if : tell_sentry ( e, { } ) else : continue except Exception as e : tell_sentry ( e, { } ) return asset_url + path + ( etag and ""?etag="" + etag )",False,not os.path.exists(fspath + '.spt'),e,0.652911901473999 2315,"def apply ( self ) : self. maxdepth = self. startnode. details. get ( ""depth"", None ) self. startvalue = self. startnode. details. get ( ""start"", 1 ) self. prefix = self. startnode. details. get ( ""prefix"", """" ) self. suffix = self. startnode. details. get ( ""suffix"", """" ) self. startnode. parent. remove ( self. startnode ) if self. document. settings. sectnum_xform : if : self. maxdepth = sys. maxint self. update_section_numbers ( self. document ) else : self. document. settings. sectnum_depth = self. maxdepth self. document. settings. sectnum_start = self. startvalue self. document. settings. sectnum_prefix = self. prefix self. document. settings. sectnum_suffix = self. suffix",True,self.maxdepth is None,self.maxdepth is None,0.6597217321395874 2316,"def parse ( self, response ) : try : content = response. content. decode ( ""utf-8"", ""ignore"" ) content = json. loads ( content, strict = False ) except : self. logger. error ( ""Fail to parse the response in json format"" ) return for item in content [ ""data"" ] : if ""objURL"" in item : img_url = self. _decode_url ( item [ ""objURL"" ] ) elif : img_url = item [ ""hoverURL"" ] else : continue yield dict ( file_url = img_url )",True,'hoverURL' in item,'hoverURL' in item,0.6621360778808594 2317,"def do_rollout ( agent, env, num_steps, render = False ) : total_rew = 0 ob = env. reset ( ) for t in range ( num_steps ) : a = agent. act ( ob ) ( ob, reward, done, _info ) = env. step ( a ) total_rew += reward if : env. render ( ) if done : break return total_rew, t + 1",False,render and t % 3 == 0,render,0.6644103527069092 2318,"def pickle_to_file ( obj, file_path, gzip = False ) : """"""Pickle obj to file_path with gzipping and failure protection."""""" tmp_file_path = file_path + ""._tmp_"" with tf. io. gfile. GFile ( tmp_file_path, ""wb"" ) as f : if : pickle. dump ( obj, f, protocol = pickle. HIGHEST_PROTOCOL ) else : with gzip_lib. GzipFile ( fileobj = f, compresslevel = 2 ) as gzipf : pickle. dump ( obj, gzipf, protocol = pickle. HIGHEST_PROTOCOL ) tf. io. gfile. rename ( tmp_file_path, file_path, overwrite = True )",False,not gzip,gzip is False,0.6882905960083008 2319,"def _do_load ( self, row ) : values = dict ( ( c. name, row [ c ] ) for c in self. columns if c in row ) if all ( ( v is None ) for v in values. values ( ) ) : return None rv = self. model ( ) for c in self. columns : if : instance_key = self. model. _column_name_map. invert_get ( c. name ) rv. __values__ [ instance_key ] = row [ c ] return rv",True,c in row,c in row,0.681359052658081 2320,"def get_django_comment ( text : str, i : int ) -> str : end = i + 4 unclosed_end = 0 while end <= len ( text ) : if : return text [ i : end ] if not unclosed_end and text [ end ] == ""<"" : unclosed_end = end end += 1 raise TokenizationException ( ""Unclosed comment"", text [ i : unclosed_end ] )",False,text[end - 2:end] == '#}',"text[end] == "" < / ",0.6616141200065613 2321,"def repl ( m, base_path, rel_path = None ) : if m. group ( ""comments"" ) : tag = m. group ( ""comments"" ) else : tag = m. group ( ""open"" ) if : tag += RE_TAG_LINK_ATTR. sub ( lambda m2 : repl_absolute ( m2, base_path ), m. group ( ""attr"" ) ) else : tag += RE_TAG_LINK_ATTR. sub ( lambda m2 : repl_relative ( m2, base_path, rel_path ), m. group ( ""attr"" ) ) tag += m. group ( ""close"" ) return tag",True,rel_path is None,rel_path is None,0.6548975706100464 2322,"def fake_query ( * args ) : kwargs = args [ 1 ] start_key = kwargs. get ( EXCLUSIVE_START_KEY, None ) if start_key : item_idx = 0 for query_item in BATCH_GET_ITEMS. get ( RESPONSES ). get ( UserModel. Meta. table_name ) : item_idx += 1 if : break query_items = BATCH_GET_ITEMS. get ( RESPONSES ). get ( UserModel. Meta. table_name ) [ item_idx : item_idx + 1 ] else : query_items = BATCH_GET_ITEMS. get ( RESPONSES ). get ( UserModel. Meta. table_name ) [ : 1 ] data = { CAMEL_COUNT : len ( query_items ), ITEMS : query_items, SCANNED_COUNT : 2 * len ( query_items ), LAST_EVALUATED_KEY : query_items [ - 1 ] if len ( query_items ) else None, } return data",False,query_item == start_key,query_item,0.6555781364440918 2323,"def n_weights ( self ) : """"""Return the number of weights (parameters) in this network."""""" n_weights = 0 for i, w in enumerate ( self. all_weights ) : n = 1 for s in w. get_shape ( ) : try : s = int ( s ) except : s = 1 if : n = n * s n_weights = n_weights + n return n_weights",True,s,s,0.6920080184936523 2324,"def calculate ( self ) : addr_space = utils. load_as ( self. _config ) regapi = registryapi. RegistryApi ( self. _config ) regapi. reset_current ( ) version = ( addr_space. profile. metadata. get ( ""major"", 0 ), addr_space. profile. metadata. get ( ""minor"", 0 ), ) for value, data_raw in regapi. reg_yield_values ( ""security"", ""Policy\\PolAdtEv"", thetype = ""REG_NONE"" ) : bufferas = addrspace. BufferAddressSpace ( self. _config, data = data_raw ) if : ap = obj. Object ( ""AuditPolDataXP"", offset = 0, vm = bufferas ) elif version <= ( 6, 0 ) : ap = obj. Object ( ""AuditPolDataVista"", offset = 0, vm = bufferas ) elif version == ( 6, 1 ) : ap = obj. Object ( ""AuditPolData7"", offset = 0, vm = bufferas ) elif version == ( 6, 2 ) or version == ( 6, 3 ) : ap = obj. Object ( ""AuditPolData8"", offset = 0, vm = bufferas ) else : ap = obj. Object ( ""AuditPolData10"", offset = 0, vm = bufferas ) if ap == None : debug. error ( ""No AuditPol data found"" ) yield data_raw, ap",False,"version <= (5, 1)","version == (6, 0)",0.654232382774353 2325,"def cdn_ip ( address ) : if not address : return False try : _ = addr_to_int ( address ) for prefix, mask in CDN_RANGES. get ( address. split ( ""."" ) [ 0 ], { } ) : if : return True except ( IndexError, ValueError ) : pass return False",False,_ & mask == prefix,mask.find(prefix) >= 0,0.6695114374160767 2326,"def start ( self ) : self. on_config_change ( ) self. start_config_watch ( ) try : if self. config [ ""MITMf"" ] [ ""DNS"" ] [ ""tcp"" ]. lower ( ) == ""on"" : self. startTCP ( ) else : self. startUDP ( ) except socket. error as e : if : shutdown ( ""\n[DNS] Unable to start DNS server on port {}: port already in use"". format ( self. config [ ""MITMf"" ] [ ""DNS"" ] [ ""port"" ] ) )",False,'Address already in use' in e,e.args[0] in [TAB > 0,0.6601972579956055 2327,"def find_volume_by_name ( volume_name : str, not_found_msg : Optional [ str ] = None, found_several_msg : Optional [ str ] = None, parent = None, ) -> Optional [ str ] : from thonny. languages import tr if not_found_msg is None : not_found_msg = tr ( ""Could not find disk '%s'. Do you want to locate it yourself?"" ) if found_several_msg is None : found_several_msg = tr ( ""Found several '%s' disks. Do you want to choose one yourself?"" ) volumes = find_volumes_by_name ( volume_name ) if len ( volumes ) == 1 : return volumes [ 0 ] else : if len ( volumes ) == 0 : msg = not_found_msg % volume_name else : msg = found_several_msg % volume_name import tkinter as tk from tkinter. messagebox import askyesno from thonny. ui_utils import askdirectory if askyesno ( tr ( ""Can't find suitable disk"" ), msg, master = parent ) : path = askdirectory ( parent = parent ) if : return path return None",False,path,len(path) > 0,0.6868901252746582 2328,"def __init__ ( self, height = 20, width = 20, density = 0.8, minority_pc = 0.2, homophily = 3 ) : """""" """""" self. height = height self. width = width self. density = density self. minority_pc = minority_pc self. homophily = homophily self. schedule = RandomActivation ( self ) self. grid = SingleGrid ( height, width, torus = True ) self. happy = 0 self. datacollector = DataCollector ( { ""happy"" : ""happy"" }, { ""x"" : lambda a : a. pos [ 0 ], ""y"" : lambda a : a. pos [ 1 ] }, ) for cell in self. grid. coord_iter ( ) : x = cell [ 1 ] y = cell [ 2 ] if : if self. random. random ( ) < self. minority_pc : agent_type = 1 else : agent_type = 0 agent = SchellingAgent ( ( x, y ), self, agent_type ) self. grid. position_agent ( agent, ( x, y ) ) self. schedule. add ( agent ) self. running = True self. datacollector. collect ( self )",False,self.random.random() < self.density,self.has_agent,0.6485270261764526 2329,"def cache_cable_devices ( apps, schema_editor ) : Cable = apps. get_model ( ""dcim"", ""Cable"" ) if ""test"" not in sys. argv : print ( ""\nUpdating cable device terminations..."" ) cable_count = Cable. objects. count ( ) for i, cable in enumerate ( Cable. objects. all ( ), start = 1 ) : if not i % 1000 and ""test"" not in sys. argv : print ( ""[{}/{}]"". format ( i, cable_count ) ) termination_a_model = apps. get_model ( cable. termination_a_type. app_label, cable. termination_a_type. model ) termination_a_device = None if hasattr ( termination_a_model, ""device"" ) : termination_a = termination_a_model. objects. get ( pk = cable. termination_a_id ) termination_a_device = termination_a. device termination_b_model = apps. get_model ( cable. termination_b_type. app_label, cable. termination_b_type. model ) termination_b_device = None if : termination_b = termination_b_model. objects. get ( pk = cable. termination_b_id ) termination_b_device = termination_b. device Cable. objects. filter ( pk = cable. pk ). update ( _termination_a_device = termination_a_device, _termination_b_device = termination_b_device, """"""Accept authority or URI and extract only the authority and path."""""" parts = urlparse. urlsplit ( uri ) if parts [ 1 ] : scheme = parts [ 0 ] authority = parts [ 1 ] path = parts [ 2 ] or ""/"" else : scheme = None authority = uri path = ""/"" host, port = splitport ( authority ) if default_port and port is None and scheme is not None : dport = { ""http"" : 80, ""https"" : 443, }. get ( scheme ) if : authority = ""%s:%d"" % ( host, dport ) return authority, path",False,dport is not None,dport,0.6581437587738037 2331,"def read_config ( args, parser ) : """"""Read both user configuration and local configuration."""""" config = SafeConfigParser ( ) try : config. read ( args. global_config ) if : parent = tail = args. files and os. path. abspath ( os. path. commonprefix ( args. files ) ) while tail : if config. read ( [ os. path. join ( parent, fn ) for fn in PROJECT_CONFIG ] ) : break ( parent, tail ) = os. path. split ( parent ) defaults = { } option_list = { o. dest : o. type or type ( o. default ) for o in parser. _actions } for section in [ ""pep8"", ""pycodestyle"", ""flake8"" ] : if not config. has_section ( section ) : continue for norm_opt, k, value in _get_normalize_options ( config, section, option_list ) : if args. verbose : print ( ""enable config: section={}, key={}, value={}"". format ( section, k, value ) ) defaults [ norm_opt",False,not args.ignore_local_config,args.files,0.6543875932693481 2332,"def find_distribution_modules ( name = __name__, file = __file__ ) : current_dist_depth = len ( name. split ( ""."" ) ) - 1 current_dist = os. path. join ( os. path. dirname ( file ), * ( [ os. pardir ] * current_dist_depth ) ) abs = os. path. abspath ( current_dist ) dist_name = os. path. basename ( abs ) for dirpath, dirnames, filenames in os. walk ( abs ) : package = ( dist_name + dirpath [ len ( abs ) : ] ). replace ( ""/"", ""."" ) if : yield package for filename in filenames : if filename. endswith ( "".py"" ) and filename!= ""__init__.py"" : yield ""."". join ( [ package, filename ] ) [ : - 3 ]",False,'__init__.py' in filenames,len(filenames) > 0,0.6513705253601074 2333,"def main ( docs_dir, outfile ) : keys = { } for fname in os. listdir ( docs_dir ) : if : logging. warning ( ""Ignored path: %s"", fname ) continue keywords = index_file ( os. path. join ( docs_dir, fname ) ) for kwrd in keywords : if kwrd not in keys : keys [ kwrd ] = set ( ) keys [ kwrd ]. add ( fname ) with open ( outfile ) as fhr : items = [ ] for kwrd in sorted ( keys. keys ( ) ) : pages = "", "". join ( [ ""[%s](%s)"" % ( x. replace ( "".md"", """" ), x. replace ( "".md"", """" ) ) for x in sorted ( keys [ kwrd ] ) ] ) items. append ( ""* `%s`: %s"" % ( kwrd, pages ) ) text = fhr. read ( ) + ""\n"". join ( items ) with open ( outfile, ""wt"" ) as fhw : fhw. write ( text )",False,not fname.endswith('.md') or fname == 'YAMLTutorial.md',not os.path.isfile(fname),0.6517249345779419 2334,"def wrapper ( * args, ** kwargs ) : start = time. time ( ) try : return f ( * args, ** kwargs ) finally : timing = ( time. time ( ) - start ) * 1000.0 func = fqfn ( f ) lt = logtarget if expand_logtarget : lt += ""."" + func logger = logging. getLogger ( lt ) if : data = { ""func"" : func, ""timing"" : timing } if incl_func_args : data. update ( func_args = "","". join ( map ( repr, args ) ), func_kwargs = "","". join ( map ( lambda x : ""{}={!r}"". format ( x [ 0 ], x [ 1 ] ), kwargs. items ( ) ) ), ) logger. debug ( message. format ( ** data ), extra = data )",False,logger.isEnabledFor(logging.DEBUG),logger is not None,0.6503386497497559 2335,"def serialize_config ( self, session, key, tid, language ) : cache_key = gen_cache_key ( key, tid, language ) cache_obj = None if cache_key not in self. cache : if : cache_obj = db_admin_serialize_node ( session, tid, language ) elif key == ""notification"" : cache_obj = db_get_notification ( session, tid, language ) self. cache [ cache_key ] = cache_obj return self. cache [ cache_key ]",False,key == 'node',key == 'admin',0.662927508354187 2336,"def parse_search_response ( json_data ) : """"""Construct response for any input"""""" if json_data is None : return { ""error"" : ""Error parsing empty search engine response"" } try : return json. loads ( json_data ) except json. JSONDecodeError : logger. exception ( ""Error parsing search engine response"" ) m = re_pre. search ( json_data ) if : return { ""error"" : ""Error parsing search engine response"" } error = web. htmlunquote ( m. group ( 1 ) ) solr_error = ""org.apache.lucene.queryParser.ParseException: "" if error. startswith ( solr_error ) : error = error [ len ( solr_error ) : ] return { ""error"" : error }",True,m is None,m is None,0.6705228090286255 2337,"def _render_xlsx ( self, form_data, output_file = None ) : wb = Workbook ( write_only = True ) n_sheets = len ( self. sheets ) for i_sheet, ( s, l ) in enumerate ( self. sheets ) : ws = wb. create_sheet ( str ( l ) ) total = 0 counter = 0 for i, line in enumerate ( self. iterate_sheet ( form_data, sheet = s ) ) : if isinstance ( line, self. ProgressSetTotal ) : total = line. total continue ws. append ( [ str ( val ) if not isinstance ( val, KNOWN_TYPES ) else val for val in line ] ) if : counter += 1 if counter % max ( 10, total // 100 ) == 0 : self. progress_callback ( counter / total * 100 / n_sheets + 100 / n_sheets * i_sheet ) if output_file : wb. save ( output_file ) return ( self. get_filename ( ) + "".xlsx"", ""application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"", None, ) else : with tempfile. NamedTemporaryFile ( suffix = "".xlsx"" ) as f : wb. save",True,total,total,0.6983985900878906 2338,"def recvcb_actions ( self, l ) : if l. startswith ( ""!!"" ) : self. do_pause ( None ) msg = l. split ( "" "", 1 ) if len ( msg ) > 1 and self. silent is False : self. logError ( msg [ 1 ]. ljust ( 15 ) ) sys. stdout. write ( self. promptf ( ) ) sys. stdout. flush ( ) return True elif l. startswith ( ""//"" ) : command = l. split ( "" "", 1 ) if len ( command ) > 1 : command = command [ 1 ] self. log ( _ ( ""Received command %s"" ) % command ) command = command. split ( "":"" ) if len ( command ) == 2 and command [ 0 ] == ""action"" : command = command [ 1 ] if : self. do_pause ( None ) sys. stdout. write ( self. promptf ( ) ) sys. stdout. flush ( ) return True elif command == ""resume"" : self. do_resume ( None ) sys. stdout. write ( self. promptf ( ) ) sys. stdout. flush ( ) return True elif command ==",True,command == 'pause',command == 'pause',0.6651689410209656 2339,"def __contains__ ( self, item ) : if isinstance ( item, Distribution ) : if item. key!= self. key : return False if self. index : item = item. parsed_version elif isinstance ( item, basestring ) : item = parse_version ( item ) last = None for parsed, trans, op, ver in self. index : action = trans [ cmp ( item, parsed ) ] if action == ""F"" : return False elif action == ""T"" : return True elif : last = True elif action == ""-"" or last is None : last = False if last is None : last = True return last",False,action == '+',action == 'o',0.7027027606964111 2340,"def limit_rate ( self, response : Response ) -> Optional [ float ] : next_check = None retry_after = response. headers. get ( ""Retry-After"" ) if retry_after : try : delay = float ( retry_after ) except ValueError : try : until = parsedate_to_datetime ( retry_after ) except ( TypeError, ValueError ) : pass else : next_check = datetime. timestamp ( until ) delay = ( until - datetime. now ( timezone. utc ) ). total_seconds ( ) else : next_check = time. time ( ) + delay netloc = urlparse ( response. url ). netloc if next_check is None : max_delay = self. config. linkcheck_rate_limit_timeout try : rate_limit = self. rate_limits [ netloc ] except KeyError : delay = DEFAULT_DELAY else : last_wait_time = rate_limit. delay delay = 2.0 * last_wait_time if : delay = max_delay if delay > max_delay : return None next_check = time. time ( ) + delay self. rate",False,delay > max_delay and last_wait_time < max_delay,delay > 0.0,0.653852105140686 2341,"def _get_x_for_y ( self, xValue, x, y ) : x_value = str ( xValue ) for anime in self. xmlMap. findall ( ""anime"" ) : try : if : return int ( anime. get ( y, 0 ) ) except ValueError as e : continue return 0",False,"anime.get(x, False) == x_value","anime.get(x_value, 0) == x_value",0.6519744396209717 2342,"def metadata ( environ, start_response ) : try : path = args. path if path is None or len ( path ) == 0 : path = os. path. dirname ( os. path. abspath ( __file__ ) ) if : path += ""/"" metadata = create_metadata_string ( path + args. config, IDP. config, args. valid, args. cert, args. keyfile, args. id, args. name, args. sign, ) start_response ( ""200 OK"", [ ( ""Content-Type"", ""text/xml"" ) ] ) return metadata except Exception as ex : logger. error ( ""An error occured while creating metadata:"", ex. message ) return not_found ( environ, start_response )",True,path[-1] != '/',path[-1] != '/',0.6730730533599854 2343,"def set_caller_information ( doc, state ) : """"""Called from hooks on creation of Lead or Contact"""""" if doc. doctype not in [ ""Lead"", ""Contact"" ] : return numbers = [ doc. get ( ""phone"" ), doc. get ( ""mobile_no"" ) ] fieldname = doc. doctype. lower ( ) display_name_field = ""{}_name"". format ( fieldname ) if doc. doctype == ""Contact"" : numbers = [ d. phone for d in doc. phone_nos ] for number in numbers : number = strip_number ( number ) if : continue filters = frappe. _dict ( { ""from"" : [ ""like"", ""%{}"". format ( number ) ], fieldname : """" } ) logs = frappe. get_all ( ""Call Log"", filters = filters ) for log in logs : frappe. db. set_value ( ""Call Log"", log. name, { fieldname : doc. name, display_name_field : doc. get_title ( ) }, update_modified = False, )",False,not number,number is None,0.6707401275634766 2344,"def _roll_random ( n ) : """"""returns a random # from 0 to N-1"""""" bits = util. bit_length ( n - 1 ) byte_count = ( bits + 7 ) // 8 hbyte_mask = pow ( 2, bits % 8 ) - 1 while True : x = os. urandom ( byte_count ) if : x = byte_mask ( x [ 0 ], hbyte_mask ) + x [ 1 : ] num = util. inflate_long ( x, 1 ) if num < n : break return num",False,hbyte_mask > 0,len(x) > 0,0.6614553928375244 2345,"def OutputString ( self, attrs = None ) : result = [ ] append = result. append append ( ""%s=%s"" % ( self. key, self. coded_value ) ) if attrs is None : attrs = self. _reserved items = sorted ( self. items ( ) ) for key, value in items : if value == """" : continue if key not in attrs : continue if key == ""expires"" and isinstance ( value, int ) : append ( ""%s=%s"" % ( self. _reserved [ key ], _getdate ( value ) ) ) elif key == ""max-age"" and isinstance ( value, int ) : append ( ""%s=%d"" % ( self. _reserved [ key ], value ) ) elif key == ""secure"" : append ( str ( self. _reserved [ key ] ) ) elif : append ( str ( self. _reserved [ key ] ) ) else : append ( ""%s=%s"" % ( self. _reserved [ key ], value ) ) return _semispacejoin ( result )",False,key == 'httponly',key == 'domain',0.657065212726593 2346,"def raffle_notify ( self, func, value, id = None ) : await asyncio. sleep ( 0 ) if id is None : list_tasks = [ ] for i, user in enumerate ( self. _observers ) : if self. check_status ( func, i ) : task = asyncio. ensure_future ( user. update ( func, value ) ) list_tasks. append ( task ) if : await asyncio. wait ( list_tasks, return_when = asyncio. ALL_COMPLETED ) list_tasks = [ ] if list_tasks : await asyncio. wait ( list_tasks, return_when = asyncio. ALL_COMPLETED ) elif id >= 0 : user = self. _observers [ id ] if self. check_status ( func, id ) : asyncio. ensure_future ( user. update ( func, value ) ) else : user = self. _var_super_user answer = await user. update ( func, value ) return answer",False,not (i + 1) % 100,list_tasks,0.659375786781311 2347,"def print_po_snippet ( en_loc_old_lists, context ) : for m, localized, old in zip ( * en_loc_old_lists ) : if : continue if m == localized : localized = old print ( ""#: {file}:{line}\n"" 'msgid ""{context}{en_month}""\n' 'msgstr ""{localized_month}""\n'. format ( context = context, file = filename, line = print_po_snippet. line, en_month = m, localized_month = localized, ) ) print_po_snippet. line += 1",False,m == '',m == None,0.6893705129623413 2348,"def topology_change_notify ( self, port_state ) : notice = False if port_state is PORT_STATE_FORWARD : for port in self. ports. values ( ) : if : notice = True break else : notice = True if notice : self. send_event ( EventTopologyChange ( self. dp ) ) if self. is_root_bridge : self. _transmit_tc_bpdu ( ) else : self. _transmit_tcn_bpdu ( )",False,port.role is DESIGNATED_PORT,port in self.ports,0.6570047736167908 2349,"def _internal_tell_not_asked ( self, candidate : p. Parameter, loss : tp. FloatLoss ) -> None : discardable : tp. Optional [ str ] = None if len ( self. population ) >= self. llambda : if : uid, worst = max ( self. population. items ( ), key = lambda p : base. _loss ( p [ 1 ] ) ) if loss < base. _loss ( worst ) : discardable = uid else : pareto_uids = { c. uid for c in self. pareto_front ( ) } if candidate. uid in pareto_uids : non_pareto_pop = { c. uid for c in self. population. values ( ) } - pareto_uids if non_pareto_pop : nonpareto = { c. uid : c for c in self. population. values ( ) } [ list ( non_pareto_pop ) [ 0 ] ] discardable = nonpareto. heritage [ ""lineage"" ] if discardable is not None : del self. population [ discardable ] self. _uid_queue. discard ( discardable ) if len ( self. population ) < self. llambda : self. population [ candidate. uid ] = candidate self. _uid_queue. tell ( candidate. uid )",False,self.num_objectives == 1,len(self.population) == 1,0.6569133996963501 2350,"def get_cycle_path ( self, curr_node, goal_node_index ) : for dep in curr_node [ ""deps"" ] : if dep == goal_node_index : return [ curr_node [ ""address"" ] ] for dep in curr_node [ ""deps"" ] : path = self. get_cycle_path ( self. get_by_address ( dep ), goal_node_index ) if : path. insert ( 0, curr_node [ ""address"" ] ) return path return [ ]",False,len(path) > 0,path,0.655383825302124 2351,"def save_plugin_options ( self ) : for name, option_widgets in self. _plugin_option_widgets. items ( ) : if : self. config [ ""plugins"" ] [ name ] = { } plugin_config = self. config [ ""plugins"" ] [ name ] for option_name, option_widget in option_widgets. items ( ) : plugin_config [ option_name ] = option_widget. option. get_widget_value ( option_widget. widget )",False,name not in self.config['plugins'],"name not in self.config[""plugins']",0.6581972241401672 2352,"def __init__ ( self, sources, sinks, effect ) : self. effect = OrderedDict ( ) for lvalue, rvalue in re. findall ( ""(.*?)=([^=]*)(?:,|$)"", effect ) : sink = lvalue. strip ( ) if sink not in sinks : raise SpaParseError ( ""Left-hand module '%s' from effect '%s=%s' "" ""is not defined."" % ( lvalue, lvalue, rvalue ) ) if : raise SpaParseError ( ""Left-hand module '%s' from effect '%s=%s' "" ""is assigned to multiple times in '%s'."" % ( lvalue, lvalue, rvalue, effect ) ) self. effect [ sink ] = Expression ( sources, rvalue )",False,sink in self.effect,lvalue != lvalue or rvalue != lvalue,0.6681115031242371 2353,"def _upload_file_aws_cli ( local_fname, bucket, keyname, config = None, mditems = None ) : """"""Streaming upload via the standard AWS command line interface."""""" s3_fname = ""s3://%s/%s"" % ( bucket, keyname ) args = [ ""--sse"", ""--expected-size"", str ( os. path. getsize ( local_fname ) ) ] if config : if config. get ( ""region"" ) : args += [ ""--region"", config. get ( ""region"" ) ] if : args += [ ""--storage-class"", ""REDUCED_REDUNDANCY"" ] cmd = ( [ os. path. join ( os. path. dirname ( sys. executable ), ""aws"" ), ""s3"", ""cp"" ] + args + [ local_fname, s3_fname ] ) do. run ( cmd, ""Upload to s3: %s %s"" % ( bucket, keyname ) )",False,config.get('reduced_redundancy'),mditems,0.6555967330932617 2354,"def _known_types ( self, config ) : msg = ( ""The config entry %r in section %r is of type %r, "" ""which does not match the expected type %r."" ) for section, conf in config. items ( ) : if : for k, v in conf. items ( ) : if v is not None : expected_type = self. known_config_types. get ( k, None ) vtype = type ( v ) if expected_type and vtype!= expected_type : warnings. warn ( msg % ( k, section, vtype. __name__, expected_type. __name__ ) ) else : k, v = section, conf if v is not None : expected_type = self. known_config_types. get ( k, None ) vtype = type ( v ) if expected_type and vtype!= expected_type : warnings. warn ( msg % ( k, section, vtype. __name__, expected_type. __name__ ) )",False,"isinstance(conf, dict)",conf is not None,0.6516681909561157 2355,"def set_values ( self, vals ) : vals = np. array ( vals ) if vals. ndim == 0 : self. values = np. ones ( self. n_points ) * vals else : if : raise ValueError ( ""Values should be a scalar or a 1D ndarray of the grid size."" ) self. values = vals",False,"vals.shape != (self.n_points,)",not np.isscalar(vals),0.6555777192115784 2356,"def test_run_read_write ( self, mock_writer, mock_select, mock_read ) : mock_select. side_effect = [ ( [ self. in_fd ], [ ], [ ] ), ( [ self. in_fd ], [ ], [ ] ) ] mock_read. side_effect = [ b""A"" * 300, b"""" ] inc = 0 with self. writer. running ( ) : while self. writer. is_alive ( ) : time. sleep ( 0.01 ) inc += 1 if : raise Exception ( ""waited too long."" ) self. assertFalse ( self. writer. is_alive ( ) ) mock_read. assert_called_with ( - 1, io. DEFAULT_BUFFER_SIZE ) self. assertEqual ( mock_read. call_count, 2 ) mock_writer. assert_has_calls ( [ unittest. mock. call ( unittest. mock. ANY, ChunkType. STDIN, b""A"" * 300 ), unittest. mock. call ( unittest. mock. ANY, ChunkType. STDIN_EOF ), ] )",False,inc >= 1000,inc > self.wait_size,0.683684766292572 2357,"def add_msg_info ( self, mid, msg_info, full_threads = False, idxs = None ) : self [ ""data"" ] [ ""metadata"" ] [ mid ] = self. _metadata ( msg_info ) thread_mid = parent_mid = msg_info [ MailIndex. MSG_THREAD_MID ] if ""/"" in thread_mid : thread_mid, parent_mid = thread_mid. split ( ""/"" ) if thread_mid not in self [ ""data"" ] [ ""threads"" ] : thread = self. _thread ( thread_mid ) self [ ""data"" ] [ ""threads"" ] [ thread_mid ] = thread if : idxs. extend ( [ int ( t, 36 ) for t, bar, kids in thread if t not in self [ ""data"" ] [ ""metadata"" ] ] ) for cid in self. _msg_addresses ( msg_info ) : if cid not in self [ ""data"" ] [ ""addresses"" ] : self [ ""data"" ] [ ""addresses"" ] [ cid ] = self. _address ( cid = cid ) if ""tags"" in self. session. config : for tid in self. _msg_tags ( msg_info ) : if tid not in self [ ""data"" ] [ ""tags"" ] : self [ ""data"" ] [ ""tags"" ] [ tid ] = self. _tag ( tid, { ""searched"" : False } )",False,full_threads and idxs,full_threads,0.6638230085372925 2358,"def __init__ ( self, functions = None, type_definitions = None ) : if functions is None : functions = { } elif isinstance ( functions, dict ) : mapped_funcs = { } for k, v in functions. items ( ) : if : k = _expr. GlobalVar ( k ) if not isinstance ( k, _expr. GlobalVar ) : raise TypeError ( ""Expect functions to be Dict[GlobalVar, Function]"" ) mapped_funcs [ k ] = v functions = mapped_funcs if type_definitions is None : type_definitions = { } elif isinstance ( type_definitions, dict ) : mapped_type_defs = { } for k, v in type_definitions. items ( ) : if : k = _ty. GlobalTypeVar ( k ) if not isinstance ( k, _ty. GlobalTypeVar ) : raise TypeError ( ""Expect type_definitions to be Dict[GlobalTypeVar, Type]"" ) mapped_type_defs [ k ] = v type_definitions = mapped_type_defs self. __init_handle_by_constructor__ ( _ffi_api. IRModule, functions, type_definitions )",False,"isinstance(k, string_types)",k in mapped_type_defs,0.651375412940979 2359,"def _get_dimensions ( cls, json, props ) : if json is None : return if ""config"" in json and ""view"" in json [ ""config"" ] : size_config = json [ ""config"" ] [ ""view"" ] else : size_config = json view = { } for w in ( ""width"", ""continuousWidth"" ) : if w in size_config : view [ ""width"" ] = size_config [ w ] for h in ( ""height"", ""continuousHeight"" ) : if h in size_config : view [ ""height"" ] = size_config [ h ] for p in ( ""width"", ""height"" ) : if p not in view or isinstance ( view [ p ], string_types ) : continue if : v = view [ p ] props [ p ] = v + 22 if isinstance ( v, int ) else v responsive_height = json. get ( ""height"" ) == ""container"" responsive_width = json. get ( ""width"" ) == ""container"" if responsive_height and responsive_width : props [ ""sizing_mode"" ] = ""stretch_both"" elif responsive_width : props [ ""sizing_mode"" ] = ""stretch_width"" elif responsive_height : props [ ""sizing_mode"" ] = ""stretch_height""",False,props.get(p) is None or (p in view and props.get(p) < view[p]),p in view,0.651118278503418 2360,"def _check_good_input ( self, X, y = None ) : if isinstance ( X, dict ) : lengths = [ len ( X1 ) for X1 in X. values ( ) ] if : raise ValueError ( ""Not all values of X are of equal length."" ) x_len = lengths [ 0 ] else : x_len = len ( X ) if y is not None : if len ( y )!= x_len : raise ValueError ( ""X and y are not of equal length."" ) if self. regression and y is not None and y. ndim == 1 : y = y. reshape ( - 1, 1 ) return X, y",False,len(set(lengths)) > 1,len(lengths) != len(X),0.6554114818572998 2361,"def _path ( task, opt ) : build ( opt ) suffix = """" dt = opt [ ""datatype"" ]. split ( "":"" ) [ 0 ] if : suffix = ""train"" elif dt == ""test"" : suffix = ""test"" elif dt == ""valid"" : suffix = ""dev"" datafile = os. path. join ( opt [ ""datapath"" ], ""MovieDialog"", ""movie_dialog_dataset"", ""{t}{s}.txt"". format ( t = tasks [ int ( task ) ], s = suffix ), ) if int ( task ) == 4 : if : candpath = None else : candpath = datafile. replace ( suffix + "".txt"", ""cand-{dt}.txt"". format ( dt = dt ) ) else : candpath = os. path. join ( opt [ ""datapath"" ], ""MovieDialog"", ""movie_dialog_dataset"", ""entities.txt"" ) return datafile, candpath",True,dt == 'train',dt == 'train',0.6694294214248657 2362,"def test_label_plate_by_row ( ) : """"""Label one complete plate"""""" nsites = 6 nimagesets = 96 * nsites workspace, module = make_workspace ( nimagesets ) measurements = workspace. measurements assert isinstance ( measurements, cellprofiler_core. measurement. Measurements ) assert isinstance ( module, cellprofiler. modules. labelimages. LabelImages ) module. row_count. value = 8 module. column_count. value = 12 module. order. value = cellprofiler. modules. labelimages. O_ROW module. site_count. value = nsites for i in range ( nimagesets ) : if : measurements. next_image_set ( ) module. run ( workspace ) sites = measurements. get_all_measurements ( ""Image"", M_SITE ) rows = measurements. get_all_measurements ( ""Image"", M_ROW ) columns = measurements. get_all_measurements ( ""Image"", M_COLUMN ) plates = measurements. get_all_measurements ( ""Image"", M_PLATE ) wells = measurements. get_all_measurements ( ""Image"", M_WELL ) for i in range ( nimagesets ) : assert sites [ i ] == ( i % 6 ) + 1 this_row = ""ABCDEFGH"" [ int ( i / 6 / 12 ) ] this_column = ( int ( i / 6 ) % 12 ) + 1 assert rows [ i ] == this_row assert columns [ i ] == this_column assert wells [ i ] == ""%s%02d"" % ( this_row, this_column ) assert plates [ i ] == 1",False,i != 0,i % 6 == 0,0.679051399230957 2363,"def use_params ( self, params ) : key_prefix = f""tensorflow_{self.id}_"" state_dict = { } for k, v in params. items ( ) : if : if cupy is None : assert isinstance ( v, numpy. ndarray ) else : if isinstance ( v, cupy. core. core. ndarray ) : v = cupy. asnumpy ( v ) assert isinstance ( v, numpy. ndarray ) state_dict [ k. replace ( key_prefix, """" ) ] = v if state_dict : backup = self. _create_state_dict ( ) self. _load_weights_from_state_dict ( state_dict ) yield self. _load_weights_from_state_dict ( backup ) else : yield",False,"hasattr(k, 'startswith') and k.startswith(key_prefix)",k.startswith(key_prefix),0.6483956575393677 2364,"def send_request_to_server ( request ) : sock = socket. socket ( socket. AF_INET, socket. SOCK_STREAM ) try : sock. connect ( ( expand_host, expand_port ) ) logger. info ( ""sending request"" ) req_packet = pickle. dumps ( request ) sock. sendall ( req_packet ) data = b"""" ctr = 0 while True : packet = sock. recv ( 134217728 ) logger. info ( ""%s. received: %s"", str ( ctr ), str ( len ( packet ) ) ) ctr += 1 if : break data += packet logger. info ( ""got response, uncompressing"" ) received = pickle. loads ( data ) return received except EOFError : logger. info ( ""No data received"" ) finally : sock. close ( )",False,not packet,ctr > 10,0.676325261592865 2365,"def _update_split ( self, _event, direction ) : self. split_count [ direction ] = self. spinbuttons [ direction ]. get_value_as_int ( ) if self. even_splits [ direction ] : self. model [ direction ]. clear ( ) count = self. split_count [ direction ] frac = 100 // count partition = [ frac ] * ( count - 1 ) partition. append ( 100 - ( count - 1 ) * frac ) for i, frac in enumerate ( partition, start = 1 ) : self. model [ direction ]. append ( [ i, frac ] ) else : delta = self. split_count [ direction ] - len ( self. model [ direction ] ) if : idx = len ( self. model [ direction ] ) + 1 for i in range ( delta ) : self. model [ direction ]. append ( [ idx + i, 0 ] ) if delta < 0 : s = 0 for i in range ( abs ( delta ) ) : s += self. model [ direction ] [ - 1 ] [ 1 ] del self. model [ direction ] [ - 1 ] self. model [ direction ] [ - 1 ] [ 1 ] += s",True,delta > 0,delta > 0,0.6855176687240601 2366,"def _dump_section ( self, name, values, f ) : doc = ""__doc__"" if doc in values : print ( ""# %s"" % values [ doc ], file = f ) print ( ""%s("" % name, file = f ) for k, v in values. items ( ) : if : continue doc = k + ""__doc__"" if doc in values : print ( "" # %s"" % values [ doc ], file = f ) print ( "" %s = %s,"" % ( k, pprint. pformat ( v, indent = 8 ) ), file = f ) print ( "")\n"", file = f )",False,k.endswith('__doc__'),k == '__doc__',0.6502742767333984 2367,"def parse_command ( self, text ) : result = None builtin_commands = sublime. load_settings ( ""TextPastryCommands.json"" ) cmd_shortcuts = global_settings ( ""commands"", [ ] ) cmd_shortcuts. extend ( builtin_commands. get ( ""commands"", [ ] ) ) for item in cmd_shortcuts : if ""parser"" in item : class_ = globals ( ) [ item [ ""parser"" ] ] result = class_ ( text ). parse ( ) elif ""match"" in item : comp = re. compile ( item [ ""match"" ] ) match = comp. match ( text ) if : refs = { } for ( key, value ) in enumerate ( match. groups ( ) ) : refs [ ""$"" + str ( key + 1 ) ] = value refs [ ""$0"" ] = match. group ( 0 ) refs [ ""$clipbord"" ] = sublime. get_clipboard ( ) result = self. create_command ( item, refs ) if result : break return result",True,match,match,0.6823798418045044 2368,"def __computeTagMaps ( self, unique ) : presentTypes = { } skipTypes = { } defaultType = None for namedType in self. __namedTypes : tagMap = namedType. asn1Object. tagMap if isinstance ( tagMap, NamedTypes. PostponedError ) : return tagMap for tagSet in tagMap : if unique and tagSet in presentTypes : return NamedTypes. PostponedError ( ""Non-unique tagSet %s of %s at %s"" % ( tagSet, namedType, self ) ) presentTypes [ tagSet ] = namedType. asn1Object skipTypes. update ( tagMap. skipTypes ) if defaultType is None : defaultType = tagMap. defaultType elif : return NamedTypes. PostponedError ( ""Duplicate default ASN.1 type at %s"" % ( self, ) ) return tagmap. TagMap ( presentTypes, skipTypes, defaultType )",False,tagMap.defaultType is not None,self.defaultType != tagMap.defaultType,0.6499348282814026 2369,"def write ( self, buffer ) : if self. mode == MODE_NUMBER : for i in xrange ( 0, len ( self. data ), 3 ) : chars = self. data [ i : i + 3 ] bit_length = NUMBER_LENGTH [ len ( chars ) ] buffer. put ( int ( chars ), bit_length ) elif self. mode == MODE_ALPHA_NUM : for i in xrange ( 0, len ( self. data ), 2 ) : chars = self. data [ i : i + 2 ] if len ( chars ) > 1 : buffer. put ( ALPHA_NUM. find ( chars [ 0 ] ) * 45 + ALPHA_NUM. find ( chars [ 1 ] ), 11 ) else : buffer. put ( ALPHA_NUM. find ( chars ), 6 ) else : if : data = self. data else : data = [ ord ( c ) for c in self. data ] for c in data : buffer. put ( c, 8 )",False,six.PY3,self.data,0.6646746397018433 2370,"def _process ( self, to_process, batch_size ) : for model, history_model in to_process : if history_model. objects. count ( ) : self. stderr. write ( ""{msg} {model}\n"". format ( msg = self. EXISTING_HISTORY_FOUND, model = model ) ) continue if : self. stdout. write ( self. START_SAVING_FOR_MODEL. format ( model = model ) ) self. _bulk_history_create ( model, batch_size ) if : self. stdout. write ( self. DONE_SAVING_FOR_MODEL. format ( model = model ) )",False,self.verbosity >= 1,history_model.objects.count(),0.6581740379333496 2371,"def _extract_subtitles ( src ) : subtitles = { } for caption in try_get ( src, lambda x : x [ ""captions"" ], list ) or [ ] : subtitle_url = url_or_none ( caption. get ( ""uri"" ) ) if : lang = caption. get ( ""language"", ""deu"" ) subtitles. setdefault ( lang, [ ] ). append ( { ""url"" : subtitle_url, } ) return subtitles",False,subtitle_url,subtitles_url,0.6614885926246643 2372,"def generate_tag_1_data ( ids ) : if len ( ids )!= SAMPLE_NUM : raise ValueError ( ""len ids should equal to sample number"" ) counter = 0 for sample_i in range ( SAMPLE_NUM ) : one_data = [ ids [ sample_i ] ] valid_set = [ x for x in range ( TAG_INTERVAL [ 0 ], TAG_INTERVAL [ 1 ] ) ] features = np. random. choice ( valid_set, FEATURE_NUM, replace = False ) one_data += [ "":"". join ( [ x, ""1.0"" ] ) for x in features ] counter += 1 if : print ( ""generate data {}"". format ( counter ) ) yield one_data",False,counter % 10000 == 0,counter % 10 == 0,0.6736827492713928 2373,"def onZoomFit ( self, * args ) : if 0 : for axisIndex in range ( self. dimensions ) : linkButton = self. linkButtons [ axisIndex ] link = linkButton. link if : logger. debug ( ""sending link messages"" ) link. sendRanges ( self. dialog. ranges [ axisIndex ], linkButton ) link. sendRangesShow ( self. dialog. state. ranges_viewport [ axisIndex ], linkButton ) action = undo. ActionZoom ( self. dialog. undoManager, ""zoom to fit"", self. dialog. set_ranges, list ( range ( self. dialog. dimensions ) ), self. dialog. state. ranges_viewport, self. dialog. state. range_level_show, list ( range ( self. dialog. dimensions ) ), ranges_viewport = [ None ] * self. dialog. dimensions, range_level_show = None, ) action. do ( ) self. dialog. checkUndoRedo ( ) self. dialog. queue_history_change ( ""zoom to fit"" ) if 0 : linked_buttons = [ button for button in self. linkButtons if button. link is not None ] links = [ button. link for button in linked_buttons ] if len ( linked_buttons ) > 0 : logger. debug ( ""sending",False,link,len(link) > 0,0.6946277618408203 2374,"def find ( self, back = False ) : flags = 0 if back : flags = QTextDocument. FindBackward if self. csBox. isChecked ( ) : flags = flags | QTextDocument. FindCaseSensitively text = self. searchEdit. text ( ) if not self. findMain ( text, flags ) : if : cursor = self. editBoxes [ self. ind ]. textCursor ( ) if back : cursor. movePosition ( QTextCursor. End ) else : cursor. movePosition ( QTextCursor. Start ) self. editBoxes [ self. ind ]. setTextCursor ( cursor ) self. findMain ( text, flags )",False,text in self.editBoxes[self.ind].toPlainText(),self.ind < len(self.editBoxes),0.6578768491744995 2375,"def _str_index ( self ) : idx = self [ ""index"" ] out = [ ] if len ( idx ) == 0 : return out out += [ "".. index:: %s"" % idx. get ( ""default"", """" ) ] for section, references in idx. iteritems ( ) : if section == ""default"" : continue elif : out += [ "" single: %s"" % ( "", "". join ( references ) ) ] else : out += [ "" %s: %s"" % ( section, "","". join ( references ) ) ] return out",False,section == 'refguide',reference is not None,0.6575438976287842 2376,"def bufferSaveAs ( self ) : """"""Save buffer to a new filename."""""" if self. bufferHasChanged ( ) and self. buffer. doc. filepath : cancel = self. bufferSuggestSave ( ) if : return cancel filedir = """" if self. buffer and self. buffer. doc. filedir : filedir = self. buffer. doc. filedir result = editor. saveSingle ( title = ""Save PySlices File"", directory = filedir, wildcard = ""PySlices Files (*.pyslices)|*.pyslices"", ) if result. path not in [ """", None ] : if result. path [ - 9 : ]!= "".pyslices"" : result. path += "".pyslices"" self. buffer. doc = document. Document ( result. path ) self. buffer. name = self. buffer. doc. filename self. buffer. modulename = self. buffer. doc. filebase self. simpleSave ( confirmed = True ) cancel = False else : cancel = True return cancel",True,cancel,cancel,0.7059651613235474 2377,"def update ( self ) : for plugin_type in self. PLUGIN_TYPES : if : continue entrypoint_type = ""plover.%s"" % plugin_type for entrypoint in pkg_resources. iter_entry_points ( entrypoint_type ) : if ""gui_qt"" in entrypoint. extras and not HAS_GUI_QT : continue self. register_plugin_from_entrypoint ( plugin_type, entrypoint ) if PLUGINS_PLATFORM is not None : entrypoint_type = ""plover.%s.%s"" % ( PLUGINS_PLATFORM, plugin_type ) for entrypoint in pkg_resources. iter_entry_points ( entrypoint_type ) : self. register_plugin_from_entrypoint ( plugin_type, entrypoint )",False,plugin_type.startswith('gui.qt.') and (not HAS_GUI_QT),plugin_type is None,0.6496763229370117 2378,"def _run_response_middleware ( self, request, response, request_name = None ) : named_middleware = self. named_response_middleware. get ( request_name, deque ( ) ) applicable_middleware = self. response_middleware + named_middleware if applicable_middleware : for middleware in applicable_middleware : _response = middleware ( request, response ) if isawaitable ( _response ) : _response = await _response if : response = _response break return response",False,_response,isawaitable(response),0.6782186031341553 2379,"def _wait_for_positive ( condition, locators, wait_timeout ) : start_time = time. time ( ) while True : locator = None try : locator = get_locator ( locators, ignore_implicit_wait = True, raise_exception = True ) except NoSuchElementException : pass if locator : element = None try : element = WebDriverWait ( _get_driver ( ), wait_timeout ). until ( _get_until_cond ( condition, locator ) ) except TimeoutException : pass if element : return elapsed_time = time. time ( ) - start_time if : raise NoSuchElementException ( ""Timeout occurred while waiting for '%s' condition"" % condition )",False,elapsed_time > wait_timeout,elapsed_time > 5,0.6553369760513306 2380,"def generate ( self, length = 10000, prefix = False ) : replacements2 = { "","" : "","", "" \."" : "".\n"", "" :"" : "":"", "" ;"" : "";"", ""\n\s+"" : ""\n"" } keys = list ( self. db. keys ( ) ) key = keys [ random. randint ( 0, len ( keys ) - 1 ) ] words = key words = words. capitalize ( ) regex = re. compile ( ""[a-z]+"" ) for i in range ( length ) : okey = key if : break db = self. db [ key ] s = sum ( db. values ( ) ) i = random. randint ( 0, s - 1 ) for key, value in db. items ( ) : if i < value : break else : i -= value if okey == ""."" : key1 = key. capitalize ( ) else : key1 = key if prefix and regex. findall ( key1 ) and random. random ( ) < 0.01 : key1 = '%s' % ( prefix, key1, key1 ) words += "" "" + key1 text = words for key, value in replacements2. items ( ) : text = re. sub ( key, value, text ) return text + "".\n""",False,not key in self.db,okey == False,0.6579463481903076 2381,"def ConstructDpbService ( self ) : """"""Create the dpb_service object and create groups for its vms."""""" if self. config. dpb_service is None : return dpb_service_spec = self. config. dpb_service dpb_service_cloud = dpb_service_spec. worker_group. cloud providers. LoadProvider ( dpb_service_cloud ) dpb_service_type = dpb_service_spec. service_type dpb_service_class = dpb_service. GetDpbServiceClass ( dpb_service_type ) self. dpb_service = dpb_service_class ( dpb_service_spec ) if dpb_service_type == dpb_service. UNMANAGED_DPB_SVC_YARN_CLUSTER : if : raise Exception ( ""Invalid Non cluster vm group {0} when benchmarking "" ""unmanaged dpb service"". format ( self. vms_to_boot ) ) base_vm_spec = dpb_service_spec. worker_group base_vm_spec. vm_spec. zone = self. dpb_service. dpb_service_zone worker_group_spec = copy. copy ( base_vm_spec ) worker_group_spec. vm_count = dpb_service_spec. worker_count self. vms_to_boot [ ""worker_group"" ] = worker_group_spec master_group_spec = copy. copy ( base_vm_spec ) master_group_spec. vm_count = 1 self. vms_to_boot [ ""master_group"" ] = master_group_spec logging. info ( str (",False,self.vms_to_boot,dpb_service_type != dpb_service.UNMANAGED_DPB_SVC_YARN_CLUSTER,0.659541130065918 2382,"def _write_inputs ( node ) : lines = [ ] nodename = node. fullname. replace ( ""."", ""_"" ) for key, _ in list ( node. inputs. items ( ) ) : val = getattr ( node. inputs, key ) if : if isinstance ( val, ( str, bytes ) ) : try : func = create_function_from_source ( val ) except RuntimeError : lines. append ( ""%s.inputs.%s = '%s'"" % ( nodename, key, val ) ) else : funcname = [ name for name in func. __globals__ if name!= ""__builtins__"" ] [ 0 ] lines. append ( pickle. loads ( val ) ) if funcname == nodename : lines [ - 1 ] = lines [ - 1 ]. replace ( "" %s("" % funcname, "" %s_1("" % funcname ) funcname = ""%s_1"" % funcname lines. append ( ""from nipype.utils.functions import getsource"" ) lines. append ( """"""This tests that echo may be toggled off."""""" p = pexpect. spawn ( ""cat"", echo = True, timeout = 5 ) try : self. _expect_echo_toggle ( p ) except IOError : if : if hasattr ( unittest, ""SkipTest"" ) : raise unittest. SkipTest ( ""Not supported on this platform."" ) return ""skip"" raise",False,sys.platform.lower().startswith('sunos'),"hasattr(unittest, 'getecho')",0.6518650650978088 2384,"def _compute_substitution_score ( aln1_chars, aln2_chars, substitution_matrix, gap_substitution_score, gap_chars ) : substitution_score = 0 for aln1_char, aln2_char in product ( aln1_chars, aln2_chars ) : if : substitution_score += gap_substitution_score else : try : substitution_score += substitution_matrix [ aln1_char ] [ aln2_char ] except KeyError : offending_chars = [ c for c in ( aln1_char, aln2_char ) if c not in substitution_matrix ] raise ValueError ( ""One of the sequences contains a character that is "" ""not contained in the substitution matrix. Are you "" ""using an appropriate substitution matrix for your "" ""sequence type (e.g., a nucleotide substitution "" ""matrix does not make sense for aligning protein "" ""sequences)? Does your sequence contain invalid "" ""characters? The offending character(s) is: "" "" %s."" % "", "". join ( offending_chars ) ) substitution_score /= len ( aln1_chars ) * len ( aln2_chars ) return substitution_score",False,aln1_char in gap_chars or aln2_char in gap_chars,gap_substitution_score,0.6586174964904785 2385,"def merge_lz_operations ( lzops ) : """"""Merge consecutive LZ operations into single ops if possible."""""" lzops = iter ( lzops ) try : prev = lzops. next ( ) while True : cur = lzops. next ( ) if isinstance ( cur, LZLiteral ) : if isinstance ( prev, LZLiteral ) : prev. data += cur. data else : yield prev prev = cur else : if isinstance ( prev, LZLiteral ) : yield prev prev = cur else : if : prev. length += cur. length else : yield prev prev = cur except StopIteration : pass",False,prev.distance == cur.distance,prev is not None,0.6532402038574219 2386,"def get_output_for ( self, input, ** kwargs ) : out = T. tensordot ( self. W, input, axes = [ [ 1 ], [ 0 ] ] ) if self. b is None : activation = out else : if : bias_axes = range ( input. ndim - 1 ) + [ ""x"" ] else : bias_axes = [ 0 ] + ( [ ""x"" ] * ( input. ndim - 1 ) ) b_shuffled = self. b. dimshuffle ( bias_axes ) activation = out + b_shuffled return self. nonlinearity ( activation )",False,self.untie_biases,self.bias_axes is None,0.6513996124267578 2387,"def compile ( self, filename, obfuscate = False, raw = False, magic = ""\x00"" * 8 ) : body = marshal. dumps ( compile ( self. visit ( self. _source_ast ), filename, ""exec"" ) ) if obfuscate : body_len = len ( body ) offset = 0 if raw else 8 output = bytearray ( body_len + 8 ) for i, x in enumerate ( body ) : output [ i + offset ] = ord ( x ) ^ ( ( 2 ** ( ( 65535 - i ) % 65535 ) ) % 251 ) if : for i in xrange ( 8 ) : output [ i ] = 0 return output el if : return body else : return magic + body",True,raw,raw,0.6872165203094482 2388,"def apply ( self, basket, condition, offer, discount_percent = None, max_total_discount = None, ** kwargs ) : if discount_percent is None : discount_percent = self. value discount_amount_available = max_total_discount line_tuples = self. get_applicable_lines ( offer, basket ) discount_percent = min ( discount_percent, D ( ""100.0"" ) ) discount = D ( ""0.00"" ) affected_items = 0 max_affected_items = self. _effective_max_affected_items ( ) affected_lines = [ ] for price, line in line_tuples : if affected_items >= max_affected_items : break if : break quantity_affected = min ( line. quantity_without_offer_discount ( offer ), max_affected_items - affected_items, ) if quantity_affected <= 0 : break line_discount = self. round ( discount_percent / D ( ""100.0"" ) * price * int ( quantity_affected ) ) if discount_amount_available is not None : line_discount = min ( line_discount, discount_amount_available ) discount_amount_available -= line_discount apply_discount ( line, line_discount, quantity_affected, offer ) affected_lines. append ( ( line, line_discount, quantity_affected ) ) affected_items += quantity_affected discount += line_discount return BasketDiscount ( discount",False,discount_amount_available == 0,line.has_offer_discount,0.6559979915618896 2389,"def __pow__ ( self, power ) : if power == 1 : return self if power == - 1 : from cirq. devices import line_qubit decomposed = protocols. decompose_once_with_qubits ( self, qubits = line_qubit. LineQid. for_gate ( self ), default = None ) if : return NotImplemented inverse_decomposed = protocols. inverse ( decomposed, None ) if inverse_decomposed is None : return NotImplemented return _InverseCompositeGate ( self ) return NotImplemented",True,decomposed is None,decomposed is None,0.6663508415222168 2390,"def detect_object ( inference, camera, classes, threshold, out_dir, range_x = [ 0, 1 ], range_y = [ 0, 1 ] ) : """"""Detects objects belonging to given classes in camera stream."""""" stream = io. BytesIO ( ) camera. capture ( stream, format = ""jpeg"" ) stream. seek ( 0 ) image = Image. open ( stream ) rgb_histogram = np. array ( image. histogram ( ) ). reshape ( ( 3, 256 ) ) green_peak = np. argmax ( rgb_histogram [ 1, : ] ) if green_peak < 3 : time. sleep ( 1.0 ) return False, None, None debug_data = [ ] detection = False max_accumulator = 0.0 print ( ""Inferring..."" ) for p in crop_parameters ( image, range_x, range_y ) : im_crop = image. crop ( p ) accumulator = 0.0 infer_classes = image_classification. get_classes ( inference. run ( im_crop ), top_k = 5, threshold = 0.05 ) corner = [ p [ 0 ], p [ 1 ] ] print ( corner ) for idx, ( label, score ) in enumerate ( infer_classes ) : debug_data. append ( ( corner, im_crop. size, idx, label, score ) ) if : accumulator += score if accumulator > max_accumulator : max_accumulator = accumulator if accumulator >= threshold : detection = True break if out_dir : <",False,label in classes,debug_data,0.670791745185852 2391,"def _handle ( self, environ ) : path = environ [ ""bottle.raw_path"" ] = environ [ ""PATH_INFO"" ] if py3k : try : environ [ ""PATH_INFO"" ] = path. encode ( ""latin1"" ). decode ( ""utf8"" ) except UnicodeError : return HTTPError ( 400, ""Invalid path string. Expected UTF-8"" ) try : environ [ ""bottle.app"" ] = self request. bind ( environ ) response. bind ( ) try : self. trigger_hook ( ""before_request"" ) route, args = self. router. match ( environ ) environ [ ""route.handle"" ] = route environ [ ""bottle.route"" ] = route environ [ ""route.url_args"" ] = args return route. call ( ** args ) finally : self. trigger_hook ( ""after_request"" ) except HTTPResponse : return _e ( ) except RouteReset : route. reset ( ) return self. _handle ( environ ) except ( KeyboardInterrupt, SystemExit, MemoryError ) : raise except Exception : if : raise stacktrace = format_exc ( ) environ [ ""wsgi.errors"" ]. write ( stacktrace ) return HTTPError ( 500, ""Internal Server Error"", _e ( ), stacktrace )",False,not self.catchall,environ['wsgi.errors'] is None,0.659212589263916 2392,"def upload_object_via_stream ( self, iterator, container, object_name, callback = None, extra = None, ** kwargs ) : import boto3. s3. transfer stream = _Stream ( iterator ) try : container. bucket. upload_fileobj ( stream, object_name, Config = boto3. s3. transfer. TransferConfig ( use_threads = container. config. multipart, max_concurrency = self. _max_multipart_concurrency if : else 1, num_download_attempts = container. config. retries, ), Callback = callback, ) except Exception as ex : log. error ( ""Failed uploading: %s"" % ex ) return False return True",False,container.config.multipart,max_concurrency <= 0,0.6499392986297607 2393,"def delete_user ( request ) : if not is_admin ( request. user ) : request. audit = { ""operation"" : ""DELETE_USER"", ""operationText"" : _get_failed_operation_text ( request. user. username, ""DELETE_USER"" ), ""allowed"" : False, } raise PopupException ( _ ( ""You must be a superuser to delete users."" ), error_code = 401 ) if request. method!= ""POST"" : raise PopupException ( _ ( ""A POST request is required."" ) ) ids = request. POST. getlist ( ""user_ids"" ) global __users_lock __users_lock. acquire ( ) try : if : raise PopupException ( _ ( ""You cannot remove yourself."" ), error_code = 401 ) usernames = list ( User. objects. filter ( id__in = ids ). values_list ( ""username"", flat = True ) ) UserProfile. objects. filter ( user__id__in = ids ). delete ( ) User. objects. filter ( id__in = ids ). delete ( ) request. audit = { ""operation"" : ""DELETE_USER"", ""operationText"" : ""Deleted User(s): %s"" % "", "". join ( usernames ), } finally : __users_lock. release ( ) is_embeddable = request. GET. get ( ""is_embeddable"", request. POST",False,str(request.user.id) in ids,ids[0] in self.superuser,0.6517447233200073 2394,"def check_multiple_inheritance ( self, typ : TypeInfo ) -> None : """"""Check for multiple inheritance related errors."""""" if len ( typ. bases ) <= 1 : return mro = typ. mro [ 1 : ] for i, base in enumerate ( mro ) : non_overridden_attrs = base. names. keys ( ) - typ. names. keys ( ) for name in non_overridden_attrs : if is_private ( name ) : continue for base2 in mro [ i + 1 : ] : if : self. check_compatibility ( name, base, base2, typ )",False,name in base2.names and base2 not in base.mro,is_private(name),0.6535847187042236 2395,"def change_args_to_dict ( string ) : if string is None : return None ans = [ ] strings = string. split ( ""\n"" ) ind = 1 start = 0 while ind <= len ( strings ) : if ind < len ( strings ) and strings [ ind ]. startswith ( "" "" ) : ind += 1 else : if start < ind : ans. append ( ""\n"". join ( strings [ start : ind ] ) ) start = ind ind += 1 d = { } for line in ans : if : lines = line. split ( "":"" ) d [ lines [ 0 ] ] = lines [ 1 ]. strip ( ) return d",False,':' in line and len(line) > 0,line.startswith(b''),0.6525073051452637 2396,def reader_leaves ( self ) : self. mutex. acquire ( ) try : self. active_readers -= 1 if : self. active_writers += 1 self. waiting_writers -= 1 self. can_write. release ( ) finally : self. mutex. release ( ),False,self.active_readers == 0 and self.waiting_writers != 0,self.waiting_writers > 0,0.6549704074859619 2397,"def number_headings ( toc, maindoc ) : mdlines = [ ] skip = False for line in maindoc. splitlines ( ) : if line. strip ( ) == ""# Introduction"" : toc. start_numbering = True toc. numbering = [ 0 ] if line == ""```"" : skip = not skip if not skip : m = re. match ( r""^(#+) (.*)"", line ) if : num = toc. add_entry ( len ( m. group ( 1 ) ), m. group ( 2 ) ) line = ""%s %s %s"" % ( m. group ( 1 ), num, m. group ( 2 ) ) line = re. sub ( r""^(https?://\S+)"", r""[\1](\1)"", line ) mdlines. append ( line ) maindoc = ""\n"". join ( mdlines ) return maindoc",True,m,m,0.6910536885261536 2398,"def _tune ( kmeans_estimator, kmeans_train_set, tuner = None, hyperparameter_ranges = None, job_name = None, warm_start_config = None, wait = True, max_jobs = 2, max_parallel_jobs = 2, early_stopping_type = ""Off"", ) : with timeout ( minutes = TUNING_DEFAULT_TIMEOUT_MINUTES ) : if : tuner = HyperparameterTuner ( estimator = kmeans_estimator, objective_metric_name = ""test:msd"", hyperparameter_ranges = hyperparameter_ranges, objective_type = ""Minimize"", max_jobs = max_jobs, max_parallel_jobs = max_parallel_jobs, warm_start_config = warm_start_config, early_stopping_type = early_stopping_type, ) records = kmeans_estimator. record_set ( kmeans_train_set [ 0 ] [ : 100 ] ) test_record_set = kmeans_estimator. record_set ( kmeans_train_set [ 0 ] [ : 100 ], channel = ""test"" ) print ( ""Started hyperparameter tuning job with name: {}"". format ( job_name ) ) tuner. fit ( [ records, test_record_set ], job_name = job_name, wait = wait ) return tuner",False,not tuner,tuner is None,0.6938213109970093 2399,"def writeout ( self, fd, output ) : if isinstance ( output, ( str, unicode ) ) : total = len ( output ) output = StringIO. StringIO ( output ) else : total = 0 try : while True : self. state = ""read"" line = output. read ( BLOCKSIZE ) if : break self. state = ""write"" fd. write ( line ) total -= len ( line ) output. close ( ) except : if not self. partial_write_ok : print ( ""%s: %s bytes left"" % ( self, total ) ) traceback. print_exc ( ) finally : self. state = ""done"" fd. close ( )",False,line == '',total > 0,0.6722662448883057 2400,"def visit_decorator ( self, o : Decorator ) -> None : if not self. use_logical_deps ( ) : if : self. add_dependency ( make_trigger ( o. func. fullname ) ) else : for d in o. decorators : tname = None if isinstance ( d, RefExpr ) and d. fullname is not None : tname = d. fullname if ( isinstance ( d, CallExpr ) and isinstance ( d. callee, RefExpr ) and d. callee. fullname is not None ) : tname = d. callee. fullname if tname is not None : self. add_dependency ( make_trigger ( tname ), make_trigger ( o. func. fullname ) ) super ( ). visit_decorator ( o )",False,not o.func.is_overload and self.scope.current_function_name() is None,o.func.fullname is not None,0.6494070887565613 2401,"def _parse_images ( self, is_train ) : image_ids = self. COCO. getImgIds ( ) image_ids. sort ( ) imgs = copy. deepcopy ( self. COCO. loadImgs ( image_ids ) ) for img in imgs : img [ ""image"" ] = os. path. join ( self. img_dir, img [ ""file_name"" ] ) assert os. path. exists ( img [ ""image"" ] ), ""image {} not found."". format ( img [ ""image"" ] ) box_num = cfg. max_box_num img [ ""gt_boxes"" ] = np. zeros ( ( cfg. max_box_num, 4 ), dtype = np. float32 ) img [ ""gt_labels"" ] = np. zeros ( ( cfg. max_box_num ), dtype = np. int32 ) for k in [ ""date_captured"", ""url"", ""license"", ""file_name"" ] : if k in img : del img [ k ] if : self. _parse_gt_annotations ( img ) print ( ""Loaded {0} images from {1}."". format ( len ( imgs ), cfg. dataset ) ) return imgs",True,is_train,is_train,0.6663854122161865 2402,"def search ( self, search_strings, age = 0, show_id = None, season = None, episode = None, ** kwargs ) : results = [ ] if not self. login ( ) : return results for mode in search_strings : sickrage. app. log. debug ( ""Search Mode: %s"" % mode ) for search_string in search_strings [ mode ] : if : sickrage. app. log. debug ( ""Search string: %s"" % search_string ) searchURL = self. urls [ ""search"" ] % ( quote_plus ( search_string. replace ( ""."", "" "" ) ), ) else : searchURL = self. urls [ ""search"" ] % """" try : data = self. session. get ( searchURL ). text results += self. parse ( data, mode ) except Exception : sickrage. app. log. debug ( ""No data returned from provider"" ) continue return results",False,mode != 'RSS',search_string,0.6759482026100159 2403,"def on_cboSearchDirectoryEntry_changed ( self, entry ) : text = entry. get_text ( ) if text and self. _autoCompleteList!= None : path = os. path. dirname ( text ) start = os. path. basename ( text ) self. _autoCompleteList. clear ( ) try : files = dircache. listdir ( path ) [ : ] except OSError : return dircache. annotate ( path, files ) for f in files : if f. startswith ( ""."" ) and not ( start. startswith ( ""."" ) ) : continue if : if path == ""/"" : match = path + f else : match = path + os. sep + f self. _autoCompleteList. append ( [ match ] )",False,f.startswith(start) and f.endswith('/'),path == '/',0.6463853120803833 2404,"def should_execute ( file_name : str, file_text : str ) -> Tuple [ str, bool ] : if dont_execute_re. search ( file_text ) : return dont_execute_re. sub ( """", file_text ), False m = required_py_re. search ( file_text ) if m : if : return required_py_re. sub ( """", file_text ), True else : v = ""."". join ( m. groups ( ) ) print ( f""WARNING: {file_name} requires python {v}, not running"" ) return ( required_py_re. sub ( f""# requires python {v}, NOT EXECUTED!"", file_text ), False, ) else : return file_text, True",False,sys.version_info >= tuple((int(v) for v in m.groups())),m.groups() is None,0.6552332639694214 2405,"def main ( ) : assert sys. version_info [ 0 ] == 3 moduleset_versions = get_moduleset_versions ( ) pool = ThreadPool ( 20 ) pool_iter = pool. imap_unordered ( _fetch_version, moduleset_versions. keys ( ) ) arch_versions = { } for i, some_versions in enumerate ( pool_iter ) : arch_versions. update ( some_versions ) for name, version in sorted ( moduleset_versions. items ( ) ) : arch_name = fix_name ( name ) if : arch_version, arch_url = arch_versions [ arch_name ] arch_version = arch_version. split ( ""+"", 1 ) [ 0 ] if arch_name == ""readline"" : arch_version = ""."". join ( arch_version. split ( ""."" ) [ : 2 ] ) else : arch_version = ""???"" arch_url = """" if is_maybe_newer ( arch_version, version ) : print ( ""%-30s %-20s %-20s %s"" % ( name, version, arch_version, arch_url ) )",True,arch_name in arch_versions,arch_name in arch_versions,0.6474865674972534 2406,"def load_template ( self, template_path, template_type ) : """"""Load a package info template in Info.plist or PackageInfo format."""""" if template_path. endswith ( "".plist"" ) : try : with open ( self. env [ ""template_path"" ], ""rb"" ) as f : info = plistlib. load ( f ) except Exception : raise ProcessorError ( f""Malformed Info.plist template {self.env['template_path']}"" ) if : return info else : return self. convert_bundle_info_to_flat ( info ) else : try : info = ElementTree. parse ( template_path ) except Exception : raise ProcessorError ( f""Malformed PackageInfo template {self.env['template_path']}"" ) if template_type == ""flat"" : return info else : return self. convert_flat_info_to_bundle ( info )",True,template_type == 'bundle',template_type == 'bundle',0.6578556299209595 2407,"def get_common_timezones ( ) -> Dict [ str, Union [ int, Any ] ] : tzdata = { } normal = datetime. datetime ( 2009, 9, 1 ) for str in pytz. all_timezones : tz = pytz. timezone ( str ) timedelta = tz. utcoffset ( normal ) if not timedelta : continue offset = timedelta. seconds tz_name = tz. tzname ( normal ) tzdata [ tz_name ] = offset if tz_name == ""IST"" : tzdata [ tz_name ] = 19800 if : tzdata [ tz_name ] = - 68400 if tz_name == ""CST"" : tzdata [ tz_name ] = - 64800 return tzdata",False,tz_name == 'CDT',"tz_name == 'S""",0.6602140665054321 2408,"def do_STOU ( self, line ) : """"""Store a file on the server with a unique name."""""" try : if : self. respond ( b""450 Can't STOU while REST request is pending."" ) return _, _file_name = os. path. split ( tempfile. NamedTemporaryFile ( ). name ) if line : line = self. ftp_path ( line ) basedir, prefix = os. path. split ( line ) _file_name = ""."" + _file_name else : basedir = self. working_dir if self. config. stou_suffix : _file_name = _file_name + self. config. stou_suffix if self. config. stou_prefix : _file_name = self. config. stou_prefix + _file_name with self. config. vfs. check_access ( path = basedir, user = self. _uid, perms = ""w"" ) : self. respond ( b""150 FILE: %a"" % _file_name ) self. recv_file ( os. path. join ( basedir, _file_name ), 0, cmd = ""STOR"" ) except FSOperationNotPermitted : self. respond ( b""500 Operation not permitted."" )",False,self._restart_position,self.state & 32768,0.6574013829231262 2409,"def _read_passphrase ( self, buf, size, rwflag, userdata ) : try : if : result = self. _passphrase ( size, rwflag, userdata ) else : result = self. _passphrase ( rwflag ) if not isinstance ( result, bytes ) : raise ValueError ( ""String expected"" ) if len ( result ) > size : if self. _truncate : result = result [ : size ] else : raise ValueError ( ""passphrase returned by callback is too long"" ) for i in range ( len ( result ) ) : buf [ i ] = result [ i : i + 1 ] return len ( result ) except Exception as e : self. _problems. append ( e ) return 0",False,self._more_args,self._passphrase,0.662316083908081 2410,"def get_children ( self, element_name ) : child_spec = self. _check_valid_child ( element_name ) if child_spec. repeated : return _ElementListView ( spec = child_spec, parent = self ) else : for child in self. _children : if child. tag == element_name : return child if : return None else : raise RuntimeError ( ""Cannot find the non-repeated child <{}> of <{}>. "" ""This should never happen, as we pre-create these in __init__. "" ""Please file an bug report. Thank you."". format ( element_name, self. _spec. name ) )",False,child_spec.on_demand,child.tag == element_name,0.6486592292785645 2411,"def _get_field_value ( self, test, key, match ) : if test. ver == ofproto_v1_0. OFP_VERSION : members = inspect. getmembers ( match ) for member in members : if member [ 0 ] == key : field_value = member [ 1 ] elif : wildcards = member [ 1 ] if key == ""nw_src"" : field_value = test. nw_src_to_str ( wildcards, field_value ) elif key == ""nw_dst"" : field_value = test. nw_dst_to_str ( wildcards, field_value ) else : field_value = match [ key ] return field_value",False,member[0] == 'wildcards',member[0] == match[TAB],0.6560980677604675 2412,"def release_dict_file ( ) : """"""Try to gather release information manually when other methods fail"""""" data = { } try : if os. path. exists ( ""/etc/lsb-release"" ) : data = { } with open ( ""/etc/lsb-release"" ) as f : for line in f : key, value = line. split ( ""="", 1 ) data [ key ] = value. strip ( ) elif : data = { } with open ( ""/etc/redhat-release"" ) as f : distro = f. readline ( ). strip ( ) import re match = re. match ( r""(.*) release (.*) \((.*)\)"", distro ) if match : data [ ""DISTRIB_ID"" ] = match. group ( 1 ) data [ ""DISTRIB_RELEASE"" ] = match. group ( 2 ) elif os. path. exists ( ""/etc/SuSE-release"" ) : data = { } data [ ""DISTRIB_ID"" ] = ""SUSE LINUX"" with open ( ""/etc/SuSE-release"" ) as f : for line in f : if line. startswith ( ""VERSION = "" ) : data [ ""DISTRIB_RELEASE"" ] = line [ 10 : ]. rstrip ( ) ",False,os.path.exists('/etc/redhat-release'),"os.path.exists(/etc/redhat-release"")",0.6474566459655762 2413,"def delete ( self, option ) : """"""Deletes media. ""last"", ""all"" or an integer are accepted values for option"""""" if self. whichCam ( ) == constants. Camera. Interface. GPControl : if isinstance ( option, int ) : for _ in range ( option ) : return self. gpControlCommand ( ""storage/delete/"" + ""last"" ) else : return self. gpControlCommand ( ""storage/delete/"" + option ) else : if : for _ in range ( option ) : return self. sendCamera ( ""DL"" ) else : if option == ""last"" : return self. sendCamera ( ""DL"" ) if option == ""all"" : return self. sendCamera ( ""DA"" )",False,"isinstance(option, int) == True",type(option) == int,0.6565436124801636 2414,"def _get_daily_spot_value ( self, asset, column, dt ) : reader = self. _get_pricing_reader ( ""daily"" ) if column == ""last_traded"" : last_traded_dt = reader. get_last_traded_dt ( asset, dt ) if : return pd. NaT else : return last_traded_dt elif column in OHLCV_FIELDS : try : return reader. get_value ( asset, dt, column ) except NoDataOnDate : return np. nan elif column == ""price"" : found_dt = dt while True : try : value = reader. get_value ( asset, found_dt, ""close"" ) if not isnull ( value ) : if dt == found_dt : return value else : return self. get_adjusted_value ( asset, column, found_dt, dt, ""minute"", spot_value = value ) else : found_dt -= self. trading_calendar. day except NoDataOnDate :",False,isnull(last_traded_dt),last_traded_dt is not None,0.6548629403114319 2415,"def get_conditions ( filters ) : conditions = { ""docstatus"" : ( ""="", 1 ) } if filters. get ( ""from_date"" ) and filters. get ( ""to_date"" ) : conditions [ ""result_date"" ] = ( ""between"", ( filters. get ( ""from_date"" ), filters. get ( ""to_date"" ) ), ) filters. pop ( ""from_date"" ) filters. pop ( ""to_date"" ) for key, value in filters. items ( ) : if : conditions [ key ] = value return conditions",False,filters.get(key),value,0.6526401042938232 2416,"def gen_code ( self, phase = ""test"" ) : self. phase = phase self. add_body ( 0, self. header_code ) for layer in self. IR_graph. topological_sort : current_node = self. IR_graph. get_node ( layer ) node_type = current_node. type if : func = getattr ( self, ""emit_"" + node_type ) func ( current_node ) else : print ( ""CaffeEmitter has not supported operator [%s]."" % ( node_type ) ) self. emit_UNKNOWN ( current_node ) self. add_body ( 0, """" ) self. add_body ( 0, self. end_code ) return self. body_code",False,"hasattr(self, 'emit_' + node_type)",node_type != 'const',0.6518499255180359 2417,"def fit ( self ) : print ( ""Start training..."" ) train_acc = 0.0 for i in range ( self. max_epoch ) : train_loss, train_acc = self. _train_step ( ) if self. device!= ""cpu"" : torch. cuda. empty_cache ( ) print ( f""#epoch {i} : train_loss: {train_loss}, train_acc: {train_acc}"" ) if not self. output_model_file == """" : if not os. path. exists ( ""./saved"" ) : os. mkdir ( ""./saved"" ) if isinstance ( self. model, GNNPred ) : model = self. model. gnn else : model = self. model if : torch. save ( model. state_dict ( ), self. output_model_file + ""_ft.pth"" ) else : torch. save ( model. state_dict ( ), self. output_model_file + "".pth"" ) return dict ( Acc = train_acc. item ( ) )",False,self.finetune,self.model.model_type == 'NHWC',0.6575861573219299 2418,"def annotation_specific ( self, mode, annotation_type, chapter, cursor_position ) : try : chapter_annotations = self. pw. annotation_dict [ chapter ] except KeyError : return False for i in chapter_annotations : if annotation_type == ""text"" : cursor_start = i [ ""cursor"" ] [ 0 ] cursor_end = i [ ""cursor"" ] [ 1 ] if cursor_start <= cursor_position <= cursor_end : if mode == ""check"" : return True if mode == ""delete"" : self. pw. annotation_dict [ chapter ]. remove ( i ) if : note = i [ ""note"" ] self. pw. parent. annotationNoteDock. set_annotation ( i ) self. pw. parent. annotationNoteEdit. setText ( note ) self. pw. parent. annotationNoteDock. show ( ) if mode == ""check"" : return False if mode == ""delete"" : scroll_position = self. pw. verticalScrollBar ( ). value ( ) self. clear_annotations ( ) self. load_annotations ( chapter ) self. pw. verticalScrollBar ( ). setValue ( scroll_position )",False,mode == 'note','note' in i,0.6607382297515869 2419,"def _parse_vhosts ( details ) : for service_alias, attr in details. iteritems ( ) : virtual_host_str = attr [ ""virtual_host_str"" ] = attr [ ""virtual_host"" ] parsed_virtual_host = [ ] if virtual_host_str : for h in [ h. strip ( ) for h in virtual_host_str. strip ( ). split ( "","" ) ] : pr = urlparse. urlparse ( h ) if not pr. netloc : pr = urlparse. urlparse ( ""http://%s"" % h ) port = ""443"" if pr. scheme. lower ( ) in [ ""https"", ""wss"" ] else ""80"" host = pr. netloc if "":"" in pr. netloc : host_port = pr. netloc. split ( "":"" ) host = host_port [ 0 ] port = host_port [ 1 ] parsed_virtual_host. append ( { ""scheme"" : pr. scheme, ""host"" : host, ""port"" : port, ""path"" : pr. path } ) details [ service_alias ] [ ""virtual_host"" ] = parsed_virtual_host vhosts = [ ] for service_alias, attr in details. iteritems ( ) : virtual_hosts = attr [ ""virtual_host"" ] if : for v in virtual_hosts : ",True,virtual_hosts,virtual_hosts,0.666537880897522 2420,"def func ( x, y ) : try : if : z = x + 2 * math. sin ( y ) return z ** 2 elif x == y : return 4 else : return 2 ** 3 except ValueError : foo = 0 for i in range ( 4 ) : foo += i return foo except TypeError : return 42 else : return 33 finally : print ( ""finished"" )",False,x > y,"isinstance(x, float) and isinstance(y, int)",0.6868847608566284 2421,"def get_string_width ( self, s ) : ""Get width of a string in the current font"" s = self. normalize_text ( s ) cw = self. current_font [ ""cw"" ] w = 0 l = len ( s ) if self. unifontsubset : for char in s : char = ord ( char ) if len ( cw ) > char : w += cw [ char ] elif : w += self. current_font [ ""desc"" ] [ ""MissingWidth"" ] else : w += 500 else : for i in range ( 0, l ) : w += cw. get ( s [ i ], 0 ) if self. font_stretching!= 100 : w = w * self. font_stretching / 100.0 return w * self. font_size / 1000.0",False,self.current_font['desc']['MissingWidth'],l > 0,0.6554661989212036 2422,"def editSize ( self, rpcObjects = None ) : subs = self. _getSelected ( rpcObjects ) if subs : current = max ( [ sub. data. size for sub in subs ] ) title = ""Edit Subscription Size"" body = ( ""Please enter the new subscription size value:\nThis "" ""should only be changed by administrators.\nPlease "" ""contact the resource department."" ) minSize = 0 decimalPlaces = 0 ( value, choice ) = QtWidgets. QInputDialog. getDouble ( self. _caller, title, body, current / 100.0, minSize, cuegui. Constants. QT_MAX_INT, decimalPlaces, ) if choice : msg = QtWidgets. QMessageBox ( ) msg. setText ( ""You are about to modify a number that can affect a show's billing. Are you "" ""sure you want to do this?"" ) msg. setStandardButtons ( QtWidgets. QMessageBox. Yes | QtWidgets. QMessageBox. No ) msg. setDefaultButton ( QtWidgets. QMessageBox. No ) if : return for sub in subs : self. cuebotCall ( sub. setSize",False,msg.exec_() == QtWidgets.QMessageBox.No,self.clanDialog,0.6532444357872009 2423,"def init_environment ( installdir ) : env_config_file = os. path. join ( installdir, ""environ.ini"" ) if os. path. exists ( env_config_file ) : import configparser env_config = configparser. ConfigParser ( allow_no_value = True, interpolation = configparser. ExtendedInterpolation ( ) ) env_config. optionxform = lambda option : option env_config [ ""DEFAULT"" ]. update ( ( k, v. replace ( ""$"", ""$$"" ) ) for k, v in os. environ. items ( ) ) env_config. read ( env_config_file ) for k, v in env_config [ ""Environment"" ]. items ( ) : os. environ [ k ] = _parse_environment_param ( v, installdir ) data_dir = os. path. normpath ( os. path. join ( installdir, ""share"" ) ) if os. path. exists ( data_dir ) : dirs = os. environ. get ( ""XDG_DATA_DIRS"" ) if : os. environ [ ""XDG_DATA_DIRS"" ] = dirs + os. pathsep + data_dir else : os. environ [ ""XDG_DATA_DIRS"" ] = data_dir",True,dirs,dirs,0.6916582584381104 2424,"def new_func ( self, * args, ** kwargs ) : obj = self. obj_ref ( ) attr = self. attr if obj is not None : args = tuple ( TrackedValue. make ( obj, attr, arg ) for arg in args ) if : kwargs = { key : TrackedValue. make ( obj, attr, value ) for key, value in iteritems ( kwargs ) } result = func ( self, * args, ** kwargs ) self. _changed_ ( ) return result",False,kwargs,len(kwargs),0.6956216096878052 2425,"def _run_somatic ( paired, ref_file, assoc_files, region, out_file, work_dir ) : if not utils. file_exists ( out_file ) : with file_transaction ( paired. tumor_data, work_dir ) as tx_work_dir : workflow_file = _configure_somatic ( paired, ref_file, region, out_file, tx_work_dir ) if : has_variants = True _run_workflow ( paired. tumor_data, workflow_file, tx_work_dir ) else : has_variants = False vcfutils. write_empty_vcf ( out_file, paired. tumor_data [ ""config"" ], [ dd. get_sample_name ( d ) for d in [ paired. tumor_data, paired. normal_data ] ], ) if has_variants : var_dir = os. path. join ( work_dir, ""results"", ""variants"" ) vcfutils. combine_variant_files ( [ _postprocess_somatic ( os. path. join ( var_dir, f ), paired ) ",False,workflow_file,paired.has_sample_name,0.6739505529403687 2426,"def _invoke_async_task ( invocation, planner ) : job_id = ""%016x"" % random. randint ( 0, 2 ** 64 ) context = invocation. connection. spawn_isolated_child ( ) _propagate_deps ( invocation, planner, context ) with mitogen. core. Receiver ( context. router ) as started_recv : call_recv = context. call_async ( ansible_mitogen. target. run_module_async, job_id = job_id, timeout_secs = invocation. timeout_secs, started_sender = started_recv. to_sender ( ), kwargs = planner. get_kwargs ( ), ) for msg in mitogen. select. Select ( [ started_recv, call_recv ] ) : if : raise msg. unpickle ( ) break return { ""stdout"" : json. dumps ( { ""changed"" : True, ""started"" : 1, ""finished"" : 0, ""ansible_job_id"" : job_id, } ) }",False,msg.receiver is call_recv,"hasattr(msg, 'unpickle')",0.6591400504112244 2427,"def onMESSAGE ( self, hwnd, msg, wp, lp ) : if msg == fw. WND_WM_NOTIFY : if wp == fw. WND_NM_MSGREFLECT : msgr = fw. WND_MSGREFLECT. from_address ( lp ) msgr. fReturn = self. _base_fMsgReflect if msgr. msg == self. Msg. WM_HSCROLL : return self. _base_HandleScroll ( hwnd, msgr. msg, msgr. wParam, msgr. lParam ) elif : return self. _base_HandleScroll ( hwnd, msgr. msg, msgr. wParam, msgr. lParam ) return 0 elif msg == self. Msg. WM_DESTROY : self. onMSG ( hwnd, ""destroy"", 0, 0 )",False,msgr.msg == self.Msg.WM_VSCROLL,msgr.msg == self.Msg.WM_SCROLL,0.6620676517486572 2428,"def __getitem__ ( self, index ) : img = Image. open ( self. images [ index ] ). convert ( ""RGB"" ) if self. mode == ""test"" : img = self. _img_transform ( img ) if : img = self. transform ( img ) return img, os. path. basename ( self. images [ index ] ) mask = Image. open ( self. masks [ index ] ) if self. mode == ""train"" : img, mask = self. _sync_transform ( img, mask ) elif self. mode == ""val"" : img, mask = self. _val_sync_transform ( img, mask ) else : assert self. mode == ""testval"" img, mask = self. _img_transform ( img ), self. _mask_transform ( mask ) if : img = self. transform ( img ) return img, mask",False,self.transform is not None,self.mode == 'extract',0.651931881904602 2429,"def _v2_common ( self, cfg ) : LOG. debug ( ""v2_common: handling config:\n%s"", cfg ) if ""nameservers"" in cfg : search = cfg. get ( ""nameservers"" ). get ( ""search"", [ ] ) dns = cfg. get ( ""nameservers"" ). get ( ""addresses"", [ ] ) name_cmd = { ""type"" : ""nameserver"" } if len ( search ) > 0 : name_cmd. update ( { ""search"" : search } ) if : name_cmd. update ( { ""addresses"" : dns } ) LOG. debug ( ""v2(nameserver) -> v1(nameserver):\n%s"", name_cmd ) self. handle_nameserver ( name_cmd )",True,len(dns) > 0,len(dns) > 0,0.655834436416626 2430,"def parse_key_equal_value ( text ) : """"""Parse a string of the form 'key1=value1 key2=value2'"""""" text = text. strip ( ) if not text : return { } last_space_pos = text. rfind ( "" "" ) if not text. startswith ( ""--"" ) and isidentifier ( text [ last_space_pos + 1 : ] ) : key = text [ last_space_pos + 1 : ] value = None result = { key : value } if last_space_pos > 0 : result. update ( parse_key_equal_value ( text [ : last_space_pos ] ) ) return result equal_sign_pos = None while True : equal_sign_pos = text. rfind ( ""="", None, equal_sign_pos ) if equal_sign_pos < 0 : return incorrectly_encoded_metadata ( text ) prev_whitespace = text [ : equal_sign_pos ]. rstrip ( ). rfind ( "" "" ) key = text [ prev_whitespace + 1 : equal_sign_pos ]. strip ( ) if : continue try : value = relax_json_loads ( text [ equal_sign_pos + 1 : ] ) except ( ValueError, SyntaxError ) : continue metadata = ( parse_key_equal_value ( text [ : prev_whitespace ] ) if prev_whitespace > 0 else { } ) metadata [ key ] = value util. ensure_dir ( dest ) for relpath, src, template in self. _file_templates : file_dest = os. path. join ( dest, relpath ) util. ensure_dir ( os. path. dirname ( file_dest ) ) if : shutil. copyfile ( src, file_dest ) else : _render_template ( template, vars, file_dest )",False,template is None,os.path.isfile(src),0.659921407699585 2432,"def parse_resources ( resources_args, fallback = None ) : """"""Parse resources from args."""""" resources = dict ( ) if resources_args is not None : valid = re. compile ( r""[a-zA-Z_]\w*$"" ) for res in resources_args : try : res, val = res. split ( ""="" ) except ValueError : raise ValueError ( ""Resources have to be defined as name=value pairs."" ) if not valid. match ( res ) : raise ValueError ( ""Resource definition must start with a valid identifier."" ) try : val = int ( val ) except ValueError : if : val = fallback ( val ) else : raise ValueError ( ""Resource definiton must contain an integer after the identifier."" ) if res == ""_cores"" : raise ValueError ( ""Resource _cores is already defined internally. Use a different name."" ) resources [ res ] = val return resources",False,fallback is not None,fallback,0.6737782955169678 2433,"def _read_value ( self, item ) : item = _normalize_path ( item ) if item in self. _store : if : del self. _store [ item ] raise KeyError ( item ) return PathResult ( item, value = self. _store [ item ] ) elif item in self. _children : return PathResult ( item, dir = True ) else : raise KeyError ( item )",False,item in self._expire_time and self._expire_time[item] < datetime.now(),self._children[item],0.6545792818069458 2434,"def splitwords ( str, minlength ) : words = [ ] i = 0 n = len ( str ) while i < n : while i < n and str [ i ] in "" \t\n"" : i = i + 1 if : break start = i i = findwordend ( str, i, n ) words. append ( str [ start : i ] ) while len ( words ) < minlength : words. append ( """" ) return words",False,i >= n,i == n,0.689909815788269 2435,"def _strided_slice_shape_func_input_shape ( data_shape, begin, end, strides, slice_mode ) : ndim = data_shape. shape [ 0 ] out = output_tensor ( ( ndim, ), ""int64"" ) for i in const_range ( ndim ) : cbegin = int64 ( 0 ) cend = int64 ( data_shape [ i ] ) cstride = int64 ( 1 ) if : cstride = int64 ( strides [ i ] ) if len ( begin ) > i : cbegin = int64 ( begin [ i ] ) if cbegin < 0 : cbegin += int64 ( data_shape [ i ] ) if len ( end ) <= i : cend = int64 ( data_shape [ i ] ) elif slice_mode!= 0 : cstride = int64 ( 1 ) if end [ i ] < 0 : cend = int64 ( data_shape [ i ] ) else : cend = cbegin + int64 ( end [ i ] ) else : if end [ i ] > data_shape [ i ] : cend = int64 ( data_shape [ i ] ) else : cend = int64 ( end [ i ] ) if cend < 0 : cend += int64 ( data_shape [ i ] ) i,stride != None,0.6640341281890869 2436,"def remove_dot_segments ( path ) : r = [ ] while path : if path. startswith ( ""../"" ) : path = path [ 3 : ] continue if : path = path [ 2 : ] continue if path. startswith ( ""/./"" ) : path = path [ 2 : ] continue if path == ""/."" : path = ""/"" continue if path. startswith ( ""/../"" ) : path = path [ 3 : ] if r : r. pop ( ) continue if path == ""/.."" : path = ""/"" if r : r. pop ( ) continue if path == ""."" : path = path [ 1 : ] continue if path == "".."" : path = path [ 2 : ] continue start = 0 if path. startswith ( ""/"" ) : start = 1 ii = path. find ( ""/"", start ) if ii < 0 : ii = None r. append ( path [ : ii ] ) ",False,path.startswith('./'),"path.startswith(../"")",0.6520379185676575 2437,"def PyJsHoisted__interopRequireWildcard_ ( obj, this, arguments, var = var ) : var = Scope ( { u""this"" : this, u""obj"" : obj, u""arguments"" : arguments }, var ) var. registers ( [ u""obj"", u""key"", u""newObj"" ] ) if var. get ( u""obj"" ) and var. get ( u""obj"" ). get ( u""__esModule"" ) : return var. get ( u""obj"" ) else : PyJs_Object_842_ = Js ( { } ) var. put ( u""newObj"", PyJs_Object_842_ ) if : for PyJsTemp in var. get ( u""obj"" ) : var. put ( u""key"", PyJsTemp ) if ( var. get ( u""Object"" ) . get ( u""prototype"" ) . get ( u""hasOwnProperty"" ) . callprop ( u""call"", var. get ( u""obj"" ), var. get ( u""key"" ) ) ) : var. get ( u""newObj"" ). put ( var. get ( u""key"" ), var. get ( u""obj"" ). get ( var. get ( u""key"" ) ) ) var. get ( u""newObj"" ). put ( u""default"", var. get ( u""obj"" ) ) <",False,var.get(u'obj') != var.get(u'null'),PYJsHoisted__.has_wildcard_(),0.6511889696121216 2438,"def _validate_options ( self ) : for option in self. options : if not type ( self. options [ option ] ) in [ bool, int ] : if self. options. required [ option ] is True and not self. options [ option ] : if : option = ""password"". upper ( ) raise FrameworkException ( ""Value required for the '%s' option."" % ( option. upper ( ) ) ) return",False,option == Constants.PASSWORD_CLEAR,self.options.required[option] is False,0.662002682685852 2439,"def iterscan ( self, string, idx = 0, context = None ) : """"""Yield match, end_idx for each match"""""" match = self. scanner. scanner ( string, idx ). match actions = self. actions lastend = idx end = len ( string ) while True : m = match ( ) if m is None : break matchbegin, matchend = m. span ( ) if lastend == matchend : break action = actions [ m. lastindex ] if action is not None : rval, next_pos = action ( m, context ) if : matchend = next_pos match = self. scanner. scanner ( string, matchend ). match yield rval, matchend lastend = matchend",False,next_pos is not None and next_pos != matchend,next_pos is not None,0.6482467651367188 2440,"def from_darknet ( self ) : """"""To convert the darknet symbol to relay functions."""""" for i in range ( self. _net. n ) : layer = self. _net. layers [ i ] need_skip, sym = self. _preproc_layer ( layer, i ) if need_skip : continue processed, sym = self. _handle_darknet_rnn_layers ( i, sym ) if : continue attr = self. _get_darknet_attrs ( layer, i ) op_name = self. _get_opname ( layer ) prefix = _get_params_prefix ( op_name, i ) params = self. _get_darknet_params ( self. _net. layers [ i ], prefix ) sym = _darknet_convert_symbol ( op_name, _as_list ( sym ), params, attr, prefix ) if params : self. _tvmparams. update ( params ) self. _sym_array [ i ] = sym self. _make_outlist ( sym, prefix, layer, i ) outputs = _as_list ( sym ) + self. _outs outputs = outputs [ 0 ] if len ( outputs ) == 1 else _expr. Tuple ( outputs ) sym = _function. Function ( analysis. free_vars ( outputs ), outputs ) return IRModule. from_expr ( sym ), self. _tvmparams",True,processed,processed,0.7120829820632935 2441,"def load_weights_from_unsupervised ( self, unsupervised_model ) : update_state_dict = copy. deepcopy ( self. network. state_dict ( ) ) for param, weights in unsupervised_model. network. state_dict ( ). items ( ) : if : new_param = ""tabnet."" + param else : new_param = param if self. network. state_dict ( ). get ( new_param ) is not None : update_state_dict [ new_param ] = weights self. network. load_state_dict ( update_state_dict )",False,param.startswith('encoder'),"isinstance(param, str)",0.6528903245925903 2442,"def is_key_allowed_v1 ( self, api_resource : str, api_operation : str, api_name : str, extra_data : Optional [ Dict [ str, Any ] ] = None, ) -> bool : """"""Checks if a key is allowed with v1 permissions"""""" allowed = self. permissions_document [ ""default_allow"" ] try : validation_function = ENDPOINT_MAP [ api_name ] except Exception : _log. exception ( f""Error api_name {api_name} is wrong. This should never happen!"" ) raise RuntimeError ( f""'{api_name}' is not a valid know api_name"" ) group_allow = _process_api_resource ( self. permissions_document [ ""permissions"" ], api_operation ) if group_allow is not None : allowed = group_allow api_resource_permissions = self. permissions_document [ ""permissions"" ]. get ( api_resource ) if api_resource_permissions : group_allow = _process_api_resource ( api_resource_permissions, api_operation ) if group_allow is not None : allowed = group_allow api_name_permissions = api_resource_permissions. get ( api_name ) if api_name_permissions : endpoint_allow = validation_function ( api_name_permissions, extra_data ) if : allowed = endpoint_allow return allowed",True,endpoint_allow is not None,endpoint_allow is not None,0.6556025743484497 2443,"def _serialize ( self, value, attr, obj ) : usages = value. _usages usage_list = { } for usage in usages : if usage == x509. oid. ExtendedKeyUsageOID. CLIENT_AUTH : usage_list [ ""useClientAuthentication"" ] = True elif : usage_list [ ""useServerAuthentication"" ] = True elif usage == x509. oid. ExtendedKeyUsageOID. CODE_SIGNING : usage_list [ ""useCodeSigning"" ] = True elif usage == x509. oid. ExtendedKeyUsageOID. EMAIL_PROTECTION : usage_list [ ""useEmailProtection"" ] = True elif usage == x509. oid. ExtendedKeyUsageOID. TIME_STAMPING : usage_list [ ""useTimestamping"" ] = True elif usage == x509. oid. ExtendedKeyUsageOID. OCSP_SIGNING : usage_list [ ""useOCSPSigning"" ] = True elif usage. dotted_string == ""1.3.6.1.5.5.7.3.14"" : usage_list [ ""useEapOverLAN"" ] = True elif usage. dotted_string == ""1.3.6.1.5.5.7.3.13"" : usage_list [ ""useEapOverPPP"" ] = True elif usage. dotted_string == ""1.3.6.1.4.1.311.20.2.2"" : usage_list [ ""useSmartCardLogon"" ] = True else : current_app. logger. warning ( ""Unable to serialize ExtendedKeyUsage with",True,usage == x509.oid.ExtendedKeyUsageOID.SERVER_AUTH,usage == x509.oid.ExtendedKeyUsageOID.SERVER_AUTH,0.6573565602302551 2444,"def realizeElementExpressions ( innerElement ) : elementHasBeenRealized = False for exp in innerElement. expressions : if : continue before, during, after = exp. realize ( innerElement ) elementHasBeenRealized = True for n in before : newStream. append ( n ) if during is not None : newStream. append ( during ) for n in after : newStream. append ( n ) if elementHasBeenRealized is False : newStream. append ( innerElement )",False,"not hasattr(exp, 'realize')","isinstance(exp, EnerElement)",0.6537826061248779 2445,"def parse_extra_frontend_settings ( envvars ) : settings_dict = { } if isinstance ( envvars, os. _Environ ) or isinstance ( envvars, dict ) : frontend_settings_pattern = re. compile ( r""^EXTRA_FRONTEND_SETTINGS_(\d{1,5})$"" ) frontend_settings_file_pattern = re. compile ( r""^EXTRA_FRONTEND_SETTINGS_FILE_(\d{1,5})$"" ) for k, v in envvars. iteritems ( ) : settings = [ ] match = frontend_settings_pattern. match ( k ) file_match = frontend_settings_file_pattern. match ( k ) if match : port = match. group ( 1 ) settings. extend ( [ x. strip ( ). replace ( ""\,"", "","" ) for x in re. split ( r""(? ] ) elif file_match : port = file_match. group ( 1 ) try : with open ( v ) as file : for line in file : settings. append ( line. strip ( ) ) 0,settings_dict,0.6594579219818115 2446,"def __call__ ( self, message ) : with self. _lock : self. _pending_ack += 1 self. max_pending_ack = max ( self. max_pending_ack, self. _pending_ack ) self. seen_message_ids. append ( int ( message. attributes [ ""seq_num"" ] ) ) time. sleep ( self. _processing_time ) with self. _lock : self. _pending_ack -= 1 message. ack ( ) self. completed_calls += 1 if : if not self. done_future. done ( ) : self. done_future. set_result ( None )",False,self.completed_calls >= self._resolve_at_msg_count,self.done_future is None,0.6495327949523926 2447,"def parametrize ( self, *, x, conditions ) : log_epsilon = tf_util. constant ( value = np. log ( util. epsilon ), dtype = ""float"" ) shape = ( - 1, ) + self. action_spec. shape mean = self. mean. apply ( x = x ) if : mean = tf. reshape ( tensor = mean, shape = shape ) if self. global_stddev : multiples = ( tf. shape ( input = x ) [ 0 ], ) + tuple ( 1 for _ in range ( self. action_spec. rank ) ) log_stddev = tf. tile ( input = self. log_stddev, multiples = multiples ) else : log_stddev = self. log_stddev. apply ( x = x ) if : log_stddev = tf. reshape ( tensor = log_stddev, shape = shape ) if ( self. action_spec. min_value is not None and self. action_spec. max_value is not None ) : log_stddev += tf_util. constant ( value = np. log ( 0.1 ), dtype = ""float"" ) log_stddev = tf. clip_by_value ( t = log_stddev, clip_value_min = log_epsilon, clip_value_max = - log_epsilon ) stddev = tf. math. exp ( x = log_stddev ) return TensorDict ( mean = mean, stddev = stddev, log_stddev = log_stddev )",False,len(self.input_spec.shape) == 1,conditions,0.6470438838005066 2448,"def get ( self ) : name = request. args. get ( ""filename"" ) if name is not None : opts = dict ( ) opts [ ""type"" ] = ""episode"" result = guessit ( name, options = opts ) res = dict ( ) if : res [ ""episode"" ] = result [ ""episode"" ] else : res [ ""episode"" ] = 0 if ""season"" in result : res [ ""season"" ] = result [ ""season"" ] else : res [ ""season"" ] = 0 if ""subtitle_language"" in result : res [ ""subtitle_language"" ] = str ( result [ ""subtitle_language"" ] ) return jsonify ( data = res ) else : return """", 400",True,'episode' in result,'episode' in result,0.6783090233802795 2449,"def set_message_type_visibility ( self, message_type : MessageType ) : try : rows = { i for i, msg in enumerate ( self. proto_analyzer. messages ) if msg. message_type == message_type } if : self. ui. tblViewProtocol. show_rows ( rows ) else : self. ui. tblViewProtocol. hide_rows ( rows ) except Exception as e : logger. exception ( e )",False,message_type.show,self.ui.is_show_rows,0.6577572226524353 2450,"def __setLoadCmd ( self ) : base = self. __rawLoadCmd for _ in range ( self. __machHeader. ncmds ) : command = LOAD_COMMAND. from_buffer_copy ( base ) if : segment = SEGMENT_COMMAND. from_buffer_copy ( base ) self. __setSections ( segment, base [ 56 : ], 32 ) elif command. cmd == MACHOFlags. LC_SEGMENT_64 : segment = SEGMENT_COMMAND64. from_buffer_copy ( base ) self. __setSections ( segment, base [ 72 : ], 64 ) base = base [ command. cmdsize : ]",False,command.cmd == MACHOFlags.LC_SEGMENT,command.cmd == MACHOFlags.LC_SEGMENT_ADDRESS,0.6592795848846436 2451,"def test_node_label_pull_scenarios ( graph ) : label_sets = [ set ( ), { ""Foo"" }, { ""Foo"", ""Bar"" }, { ""Spam"" } ] for old_labels in label_sets : for new_labels in label_sets : node = Node ( * old_labels ) graph. create ( node ) node_id = node. identity assert set ( node. labels ) == old_labels if old_labels : remove_clause = ""REMOVE a:%s"" % "":"". join ( old_labels ) else : remove_clause = """" if : set_clause = ""SET a:%s"" % "":"". join ( new_labels ) else : set_clause = """" if remove_clause or set_clause : graph. run ( ""MATCH (a) WHERE id(a)=$x %s %s"" % ( remove_clause, set_clause ), x = node_id, ) graph. pull ( node ) assert ( set ( node. labels ) == new_labels ), ""Failed to pull new labels %r over old labels %r"" % ( new_labels, <",True,new_labels,new_labels,0.6749863624572754 2452,"def process_request ( self, request ) : force_host = settings. SSL_FORCE_HOST response = None if force_host and request. get_host ( ). split ( "":"" ) [ 0 ]!= force_host : url = ""http://%s%s"" % ( force_host, request. get_full_path ( ) ) response = HttpResponsePermanentRedirect ( url ) elif settings. SSL_ENABLED and not settings. DEV_SERVER : url = ""%s%s"" % ( request. get_host ( ), request. get_full_path ( ) ) path = request. path if settings. USE_I18N and path [ 1 : 3 ] in self. languages ( ) : path = path [ 3 : ] if path. startswith ( settings. SSL_FORCE_URL_PREFIXES ) : if not request. is_secure ( ) : response = HttpResponseRedirect ( ""https://%s"" % url ) elif : response = HttpResponseRedirect ( ""http://%s"" % url ) if response and request. method == ""POST"" : if resolve ( request. get_full_path ( ) ). url_name == ""fb_do_upload"" : return response. status_code = 307 return response",False,request.is_secure() and settings.SSL_FORCED_PREFIXES_ONLY,request.method == 'GET',0.6512200832366943 2453,"def print_tools ( self, buf = sys. stdout ) : data = self. get_tools ( ) if not data : return _pr = Printer ( buf ) conflicts = set ( self. get_conflicting_tools ( ). keys ( ) ) rows = [ [ ""TOOL"", ""PACKAGE"", """" ], [ ""----"", ""-------"", """" ] ] colors = [ None, None ] for _, ( variant, tools ) in sorted ( data. items ( ) ) : pkg_str = variant. qualified_package_name for tool in sorted ( tools ) : col = None row = [ tool, pkg_str, """" ] if : col = critical row [ - 1 ] = ""(in conflict)"" rows. append ( row ) colors. append ( col ) for col, line in zip ( colors, columnise ( rows ) ) : _pr ( line, col )",False,tool in conflicts,conflicting,0.675499439239502 2454,"def process_all ( self, lines, times = 1 ) : gap = False for _ in range ( times ) : for line in lines : if : self. write ( """" ) self. process ( line ) if not is_command ( line ) : gap = True return 0",True,gap,gap,0.6850862503051758 2455,"def test_load_with_missing ( self ) : with open ( ""state"", ""w"" ) as state_file : print ( ""!GlobalState"", file = state_file ) print ( ""assets: "", file = state_file ) if self. build_packages : print ( "" build-packages: {}"". format ( self. build_packages ), file = state_file ) if : print ( "" build-snaps: {}"". format ( self. build_snaps ), file = state_file ) if self. required_grade : print ( "" required-grade: {}"". format ( self. required_grade ), file = state_file ) global_state = GlobalState. load ( filepath = ""state"" ) self. assertThat ( global_state. get_build_packages ( ), Equals ( self. build_packages ) ) self. assertThat ( global_state. get_build_snaps ( ), Equals ( self. build_snaps ) ) self. assertThat ( global_state. get_required_grade ( ), Equals ( self. required_grade ) )",True,self.build_snaps,self.build_snaps,0.656694769859314 2456,"def test_len ( self ) : eq = self. assertEqual eq ( base64MIME. base64_len ( ""hello"" ), len ( base64MIME. encode ( ""hello"", eol = """" ) ) ) for size in range ( 15 ) : if size == 0 : bsize = 0 elif size <= 3 : bsize = 4 elif : bsize = 8 elif size <= 9 : bsize = 12 elif size <= 12 : bsize = 16 else : bsize = 20 eq ( base64MIME. base64_len ( ""x"" * size ), bsize )",False,size <= 6,size <= 8,0.6720942854881287 2457,"def wrapped ( * args, ** kwargs ) : count = 0 while True : try : return func ( * args, ** kwargs ) except ( HTTPException, HTTPError, socket. error, socket. timeout ) : if : raise logger. info ( ""Throttling API requests..."" ) time. sleep ( 2 ** count * 0.5 ) count += 1",False,count >= 10,count > 5,0.6895406246185303 2458,"def __define_authform_utils ( self ) : settings = { ""form_registration_rw"" : { ""writable"" : [ ], ""readable"" : [ ] }, ""form_profile_rw"" : { ""writable"" : [ ], ""readable"" : [ ] }, } for config_dict in settings. keys ( ) : rw_data = self. __base_visibility ( ) rw_data. update ( ** self. fields_rw ) rw_data. update ( ** getattr ( self, config_dict ) ) for key, value in rw_data. items ( ) : if isinstance ( value, ( tuple, list ) ) : readable, writable = value else : readable = writable = value if : settings [ config_dict ] [ ""readable"" ]. append ( key ) if writable : settings [ config_dict ] [ ""writable"" ]. append ( key ) setattr ( self, ""_merged_form_rw_"", { ""registration"" : settings [ ""form_registration_rw"" ], ""profile"" : settings [ ""form_profile_rw"" ], }, )",True,readable,readable,0.7143868207931519 2459,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRING : self. dbName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRING : self. tblName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. STRING : self. expr = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 4 : if ftype == TType. STRING : self. defaultPartitionName = iprot. readString ( ) else : i",False,ftype == TType.I16,fid == 2,0.6591702699661255 2460,"def hadoop_fs_rmr ( stdout, stderr, environ, * args ) : yarn = mock_hadoop_uses_yarn ( environ ) if : print ( ""rmr: DEPRECATED: Please use 'rm -r' instead."", file = stderr ) if args and args [ 0 ] == ""-skipTrash"" : path_args = args [ 1 : ] else : path_args = args if not path_args : if : print ( ""-rmr: Not enough arguments: expected 1 but got 0"", file = stderr ) print ( ""Usage: hadoop fs [generic options] -rmr"" "" ..."", file = stderr ) else : print ( ""Usage: java FsShell [-rmr [-skipTrash] ]"", file = stderr ) return - 1 return _hadoop_fs_rm ( ""rmr"", stdout, stderr, environ, path_args = path_args, recursive = True, force = False )",False,yarn,len(args) != 1,0.6881247758865356 2461,"def _parse_plugins_key ( cls, key ) : if not key : return None, None plugins = key. split ( ""+"" ) result = [ ] for plugin_key in plugins : plugin_name, plugin_module = cls. _parse_plugin_key ( plugin_key ) if : continue result. append ( ( plugin_name, plugin_module ) ) result. append ( cls. _parse_plugin_key ( key. replace ( ""+"", ""-"" ) ) ) return result",False,not plugin_name or not plugin_module,plugin_name is None,0.6530880928039551 2462,"def canvas_size ( self ) : """"""Return the width and height for this sprite canvas"""""" width = height = 0 for image in self. images : x = image. x + image. absolute_width y = image. y + image. absolute_height if : width = x if height < y : height = y return round_up ( width ), round_up ( height )",True,width < x,width < x,0.679709792137146 2463,"def _get_season_search_strings ( self, show, season = None ) : if not show : return [ { } ] to_return = [ ] name_exceptions = scene_exceptions. get_scene_exceptions ( show. tvdbid ) + [ show. name ] for cur_exception in name_exceptions : cur_params = { } if show. tvrid : cur_params [ ""rid"" ] = show. tvrid else : cur_params [ ""q"" ] = helpers. sanitizeSceneName ( cur_exception ) if season!= None : if : cur_params [ ""season"" ] = season. split ( ""-"" ) [ 0 ] if ""q"" in cur_params : cur_params [ ""q"" ] += ""."" + season. replace ( ""-"", ""."" ) else : cur_params [ ""q"" ] = season. replace ( ""-"", ""."" ) else : cur_params [ ""season"" ] = season if not ( ""rid"" in cur_params and to_return ) : to_return. append ( cur_params ) return to_return",False,show.air_by_date,season != None,0.6524279117584229 2464,"def ensure_connection ( engine ) : remaining_attempts = FLAGS. sql_max_retries while True : try : engine. connect ( ) return except sqlalchemy. exc. OperationalError : if : raise LOG. warning ( _ ( ""SQL connection failed (%(connstring)s). "" ""%(attempts)d attempts left."" ), { ""connstring"" : FLAGS. sql_connection, ""attempts"" : remaining_attempts }, ) time. sleep ( FLAGS. sql_retry_interval ) remaining_attempts -= 1",False,remaining_attempts == 0,remaining_attempts <= 0,0.6710488200187683 2465,"def _download ( dset ) : for url in dset. urls : file = os. path. basename ( urlparse ( url ). path ) out_path = os. path. join ( DATA_DIR, file ) if : print ( ""Downloading - {}."". format ( url ) ) urlretrieve ( url, out_path ) print ( ""Download complete."" )",True,not os.path.exists(out_path),not os.path.exists(out_path),0.646681547164917 2466,"def init_author_file ( self ) : self. author_map = { } if self. ui. config ( ""git"", ""authors"" ) : f = open ( self. repo. wjoin ( self. ui. config ( ""git"", ""authors"" ) ) ) try : for line in f : line = line. strip ( ) if : continue from_, to = RE_AUTHOR_FILE. split ( line, 2 ) self. author_map [ from_ ] = to finally : f. close ( )",False,not line or line.startswith('#'),not line,0.6481633186340332 2467,"def get_indexes ( self, cursor, table_name ) : cursor. execute ( ""SHOW INDEX FROM {0}"" """". format ( self. connection. ops. quote_name ( table_name ) ) ) rows = list ( cursor. fetchall ( ) ) multicol_indexes = set ( ) for row in rows : if row [ 3 ] > 1 : multicol_indexes. add ( row [ 2 ] ) indexes = { } for row in rows : if row [ 2 ] in multicol_indexes : continue if : indexes [ row [ 4 ] ] = { ""primary_key"" : False, ""unique"" : False } if row [ 2 ] == ""PRIMARY"" : indexes [ row [ 4 ] ] [ ""primary_key"" ] = True if not row [ 1 ] : indexes [ row [ 4 ] ] [ ""unique"" ] = True return indexes",True,row[4] not in indexes,row[4] not in indexes,0.6560623049736023 2468,"def when ( self, matches, context ) : to_remove = [ ] to_ignore = set ( ) remove = False for filepart in matches. markers. named ( ""path"" ) : year = matches. range ( filepart. start, filepart. end, predicate = lambda m : m. name == ""year"", index = 0 ) if : remove = True next_match = matches. range ( year. end, filepart. end, predicate = lambda m : m. private, index = 0 ) if ( next_match and not matches. holes ( year. end, next_match. start, predicate = lambda m : m. raw. strip ( seps ) ) and not matches. at_match ( next_match, predicate = lambda m : m. name == ""year"" ) ) : to_ignore. add ( next_match. initiator ) to_ignore. update ( matches. range ( filepart. start, filepart. end, predicate = lambda m : len ( m. children. named ( ""episode"" ) ) > 1, ) <",False,year,remove,0.7028327584266663 2469,"def send_push ( self, api_key, title, body, url = None, destination = None, destination_type = None ) : push_type = ""link"" if url else ""note"" notification = { ""type"" : push_type, ""title"" : title, ""body"" : body } if url : notification [ ""url"" ] = url if destination : notification [ destination_type ] = destination headers = { ""Authorization"" : b""Basic "" + base64. b64encode ( api_key. encode ( ""ascii"" ) ), ""Content-Type"" : ""application/json"", ""Accept"" : ""application/json"", ""User-Agent"" : ""Flexget"", } try : response = requests. post ( PUSHBULLET_URL, headers = headers, json = notification ) except RequestException as e : if : if e. response. status_code == 429 : reset_time = datetime. datetime. fromtimestamp ( int ( e. response. headers [ ""X-Ratelimit-Reset"" ] ) ). strftime ( ""%Y-%m-%d %H:%M:%S"" ) message = ( ""Monthly Pushbullet database operations limit reached. Next reset: %s"" % reset_time ) else : message = e. response. json ( ) [ ""error"" ] [ ""message"" ] else : message = str ( e",False,e.response is not None,e.response_code != 200,0.6556862592697144 2470,"def ParseFile ( self, knowledge_base : rdf_client. KnowledgeBase, pathspec : rdf_paths. PathSpec, filedesc : IO [ bytes ], ) -> Iterator [ rdf_webhistory. BrowserHistoryItem ] : del knowledge_base chrome = ChromeParser ( ) path = pathspec. CollapsePath ( ) for timestamp, entry_type, url, data1, _, _ in chrome. Parse ( path, filedesc ) : if entry_type == ""CHROME_DOWNLOAD"" : yield rdf_webhistory. BrowserHistoryItem ( url = url, domain = urlparse. urlparse ( url ). netloc, access_time = timestamp, program_name = ""Chrome"", source_path = pathspec. CollapsePath ( ), download_path = data1, ) elif : yield rdf_webhistory. BrowserHistoryItem ( url = url, domain = urlparse. urlparse ( url ). netloc, access_time = timestamp, program_name = ""Chrome"", source_path = pathspec. CollapsePath ( ), title = data1, )",False,entry_type == 'CHROME_VISIT',"entry_type == ""CHROME_READ_URL'",0.6542778015136719 2471,"def test_read_value ( self, replicator_fn, cross_replica ) : replicator = replicator_fn ( ) if replicator is None : self. skipTest ( ""No replicator supplied."" ) with replicator. scope ( ) : v = tf. Variable ( 0.0 ) if cross_replica : values = [ v. read_value ( ) ] else : if : read_value_fn = tf. function ( v. read_value ) else : read_value_fn = v. read_value values = replicator. run ( read_value_fn ) values = replicator. experimental_local_results ( values ) for component in v. _values : for value in values : self. assertAllEqual ( component. read_value ( ), value )",False,"isinstance(replicator, snt_replicator.TpuReplicator)",tf.function is not None,0.6506149768829346 2472,"def runas ( args = sys. argv, executable = sys. executable, cwd = None, nShow = 1, waitClose = True, waitTimeout = - 1, ) : if not 0 <= nShow <= 10 : nShow = 1 err = None try : if : args = subprocess. list2cmdline ( args ) pExecInfo = ShellExecuteInfo ( ) pExecInfo. cbSize = ctypes. sizeof ( pExecInfo ) pExecInfo. fMask |= SEE_MASK_NOCLOSEPROCESS pExecInfo. lpVerb = b""open"" if is_admin ( ) else b""runas"" pExecInfo. lpFile = encode_for_locale ( executable ) pExecInfo. lpParameters = encode_for_locale ( args ) pExecInfo. lpDirectory = encode_for_locale ( cwd ) pExecInfo. nShow = nShow if ShellExecuteEx ( pExecInfo ) : if waitClose : WaitForSingleObject ( pExecInfo. hProcess, waitTimeout ) return True else : return pExecInfo. hProcess else : err = SE_ERR_CODES. get ( pExecInfo. hInstApp, ""unknown"" ) except Exception as e : err = e if err : print ( ( ""runas failed! error: %r"" % err ) )",False,"args is not None and (not isinstance(args, str))",subprocess,0.653452455997467 2473,"def __init__ ( self, num_spherical, num_radial, cutoff = 5.0, envelope_exponent = 5 ) : super ( SphericalBasisLayer, self ). __init__ ( ) assert num_radial <= 64 self. num_spherical = num_spherical self. num_radial = num_radial self. cutoff = cutoff self. envelope = Envelope ( envelope_exponent ) bessel_forms = bessel_basis ( num_spherical, num_radial ) sph_harm_forms = real_sph_harm ( num_spherical ) self. sph_funcs = [ ] self. bessel_funcs = [ ] x, theta = sym. symbols ( ""x theta"" ) modules = { ""sin"" : torch. sin, ""cos"" : torch. cos } for i in range ( num_spherical ) : if : sph1 = sym. lambdify ( [ theta ], sph_harm_forms [ i ] [ 0 ], modules ) ( 0 ) self. sph_funcs. append ( lambda x : torch. zeros_like ( x ) + sph1 ) else : sph = sym. lambdify ( [ theta ], sph_harm_forms [ i ] [ 0 ], modules ) self. sph_funcs. append ( sph ) for j in range ( num_radial ) : bessel = sym. lambdify ( [ x ], bessel_forms [ i ] [ j ], modules ) self. bessel_funcs. append ( bessel )",False,i == 0,num_radial == 64,0.677891731262207 2474,"def builder ( ) : try : res = self. _svnwithrev ( ""ls"", ""-v"" ) except process. cmdexec. Error : e = sys. exc_info ( ) [ 1 ] if e. err. find ( ""non-existent in that revision"" )!= - 1 : raise py. error. ENOENT ( self, e. err ) elif : raise py. error. ENOENT ( self, e. err ) elif e. err. find ( ""File not found"" )!= - 1 : raise py. error. ENOENT ( self, e. err ) elif e. err. find ( ""not part of a repository"" )!= - 1 : raise py. error. ENOENT ( self, e. err ) elif e. err. find ( ""Unable to open"" )!= - 1 : raise py. error. ENOENT ( self, e. err ) elif e. err. lower ( ). find ( ""method not allowed"" )!= - 1 : raise py. error. EACCES ( self, e. err ) raise py. error. Error ( e. err ) lines = res. split ( ""\n"" ) nameinfo_seq = [ ] for lsline in lines : if lsline : info = InfoSvnCommand ( lsline ) if info. _name!= ""."" : nameinfo_seq. append ( ( info. _name, info ) ) nameinfo_seq. sort ( ) return nameinfo_seq",False,e.err.find('E200009:') != -1,e.err.find('File found') != -1,0.6513532400131226 2475,"def star_op ( self ) : """"""Put a '*' op, with special cases for *args."""""" val = ""*"" if self. paren_level : i = len ( self. code_list ) - 1 if self. code_list [ i ]. kind == ""blank"" : i -= 1 token = self. code_list [ i ] if : self. op_no_blanks ( val ) elif token. value == "","" : self. blank ( ) self. add_token ( ""op-no-blanks"", val ) else : self. op ( val ) else : self. op ( val )",False,token.kind == 'lt',token.kind == 'no-blanks',0.6594384908676147 2476,"def test_values_extended ( self ) : entries = grp. getgrall ( ) if len ( entries ) > 1000 : self. skipTest ( ""huge group file, extended test skipped"" ) for e in entries : e2 = grp. getgrgid ( e. gr_gid ) self. check_value ( e2 ) self. assertEqual ( e2. gr_gid, e. gr_gid ) name = e. gr_name if : continue e2 = grp. getgrnam ( name ) self. check_value ( e2 ) self. assertEqual ( e2. gr_name. lower ( ), name. lower ( ) )",False,name.startswith('+') or name.startswith('-'),name is None,0.644282877445221 2477,"def _presolve ( self, * args, ** kwds ) : if not isinstance ( args [ 0 ], six. string_types ) : self. _instance = args [ 0 ] xfrm = TransformationFactory ( ""mpec.nl"" ) xfrm. apply_to ( self. _instance ) if : self. _instance = None else : args = ( self. _instance, ) else : self. _instance = None SystemCallSolver. _presolve ( self, * args, ** kwds )",False,len(self._instance._transformation_data['mpec.nl'].compl_cuids) == 0,len(args) == 0,0.6532272100448608 2478,"def update_or_create_direct_relations ( self, attrs, relations ) : for field_name, ( field, field_source ) in relations. items ( ) : obj = None data = self. get_initial ( ) [ field_name ] model_class = field. Meta. model pk = self. _get_related_pk ( data, model_class ) if : obj = model_class. objects. filter ( pk = pk, ). first ( ) serializer = self. _get_serializer_for_field ( field, instance = obj, data = data, ) try : serializer. is_valid ( raise_exception = True ) attrs [ field_source ] = serializer. save ( ** self. _get_save_kwargs ( field_name ) ) except ValidationError as exc : raise ValidationError ( { field_name : exc. detail } )",True,pk,pk,0.7025264501571655 2479,"def _get_subtype ( dtype : torch. dtype, format : str, encoding : str, bits_per_sample : int ) : if format == ""wav"" : return _get_subtype_for_wav ( dtype, encoding, bits_per_sample ) if format == ""flac"" : if encoding : raise ValueError ( ""flac does not support encoding."" ) if not bits_per_sample : return ""PCM_24"" if : raise ValueError ( ""flac does not support bits_per_sample > 24."" ) return ""PCM_S8"" if bits_per_sample == 8 else f""PCM_{bits_per_sample}"" if format in ( ""ogg"", ""vorbis"" ) : if encoding or bits_per_sample : raise ValueError ( ""ogg/vorbis does not support encoding/bits_per_sample."" ) return ""VORBIS"" if format == ""sph"" : return _get_subtype_for_sphere ( encoding, bits_per_sample ) if format in ( ""nis"", ""nist"" ) : return ""PCM_16"" raise ValueError ( f""Unsupported format: {format}"" )",False,bits_per_sample > 24,not bits_per_sample,0.6492035388946533 2480,"def _parse_preamble ( self ) : """"""Parse metadata about query (PRIVATE)."""""" meta = { } while self. line : regx = re. search ( _RE_QUERY, self. line ) if : self. query_id = regx. group ( 1 ) if self. line. startswith ( ""Match_columns"" ) : self. seq_len = int ( self. line. strip ( ). split ( ) [ 1 ] ) self. line = self. handle. readline ( ). strip ( ) return meta",True,regx,regx,0.6682659387588501 2481,"def test_non_uniform_probabilities_over_elements ( self ) : param = iap. Choice ( [ 0, 1 ], p = [ 0.25, 0.75 ] ) samples = param. draw_samples ( ( 10000, ) ) unique, counts = np. unique ( samples, return_counts = True ) assert len ( unique ) == 2 for val, count in zip ( unique, counts ) : if val == 0 : assert 2500 - 500 < count < 2500 + 500 elif : assert 7500 - 500 < count < 7500 + 500 else : assert False",True,val == 1,val == 1,0.673570990562439 2482,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. LIST : self. partitions = [ ] ( _etype367, _size364 ) = iprot. readListBegin ( ) for _i368 in xrange ( _size364 ) : _elem369 = Partition ( ) _elem369. read ( iprot ) self. partitions. append ( _elem369 ) iprot. readListEnd ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,fid == 1,fid == TType.PUSH,0.6745845079421997 2483,"def onMessage ( self, message, metadata ) : if ""details"" in message : if ""http_user_agent"" in message [ ""details"" ] : if : message = None return message return ( message, metadata )",False,message['details']['http_user_agent'] == 'ELB-HealthChecker/1.0',message['http_user_agent'] != message['details'],0.6523468494415283 2484,"def _blob_name_from_object_path ( cls, name, container_name ) : scheme = urlparse ( name ). scheme if scheme : if scheme!= cls. scheme : raise StorageError ( ""When using a URL, only the `{}` scheme is supported for Azure storage: {}"", cls. scheme, name, ) f = furl ( name ) if not f. path. segments : raise StorageError ( ""Missing container name in URL {}"", name, ) parsed_container_name = f. path. segments [ 0 ] if : raise StorageError ( ""Container name mismatch (expected {}, found {}) in {}"", container_name, parsed_container_name, name, ) if len ( f. path. segments ) == 1 : raise StorageError ( ""No path found following container name {} in {}"", container_name, name, ) return f. path. segments [ 0 ], os. path. join ( * f. path. segments [ 1 : ] ) return name",True,parsed_container_name != container_name,parsed_container_name != container_name,0.6523797512054443 2485,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if : self. success = TCancelOperationResp ( ) self. success. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRUCT,self.success is not None,0.6601253747940063 2486,"def get_quarantine_count ( self ) : """"""get obj/container/account quarantine counts"""""" qcounts = { ""objects"" : 0, ""containers"" : 0, ""accounts"" : 0 } qdir = ""quarantined"" for device in os. listdir ( self. devices ) : for qtype in qcounts : qtgt = os. path. join ( self. devices, device, qdir, qtype ) if : linkcount = os. lstat ( qtgt ). st_nlink if linkcount > 2 : qcounts [ qtype ] += linkcount - 2 return qcounts",False,os.path.exists(qtgt),qtgt and os.lstat(qtgt),0.6544297933578491 2487,"def test_attributes_bad_port ( self ) : """"""Check handling of invalid ports."""""" for bytes in ( False, True ) : for parse in ( urllib. parse. urlsplit, urllib. parse. urlparse ) : for port in ( ""foo"", ""1.5"", ""-1"", ""0x10"" ) : with self. subTest ( bytes = bytes, parse = parse, port = port ) : netloc = ""www.example.net:"" + port url = ""http://"" + netloc if : netloc = netloc. encode ( ""ascii"" ) url = url. encode ( ""ascii"" ) p = parse ( url ) self. assertEqual ( p. netloc, netloc ) with self. assertRaises ( ValueError ) : p. port",False,bytes,url is not None,0.6866328120231628 2488,"def get_file_sources ( ) : global _file_sources if _file_sources is None : from galaxy. files import ConfiguredFileSources file_sources = None if os. path. exists ( ""file_sources.json"" ) : file_sources_as_dict = None with open ( ""file_sources.json"", ""r"" ) as f : file_sources_as_dict = json. load ( f ) if : file_sources = ConfiguredFileSources. from_dict ( file_sources_as_dict ) if file_sources is None : ConfiguredFileSources. from_dict ( [ ] ) _file_sources = file_sources return _file_sources",False,file_sources_as_dict is not None,file_sources_as_dict,0.6550328135490417 2489,"def search ( a, b, desired ) : if a == b : return a if abs ( b - a ) < 0.005 : ca = count ( a ) cb = count ( b ) dista = abs ( desired - ca ) distb = abs ( desired - cb ) if : return a else : return b m = ( a + b ) / 2.0 cm = count ( m ) if desired < cm : return search ( m, b, desired ) else : return search ( a, m, desired )",False,dista < distb,dista == distb,0.6647661924362183 2490,"def _handleLogError ( self, msg, data, marker, pattern ) : print ( """" ) print ( "" ERROR: %s"" % msg ) if not self. interactive : raise self. failureException ( msg ) p = "" Show: "" ""[L]og [M]arker [P]attern; "" ""[I]gnore, [R]aise, or sys.e[X]it >> "" sys. stdout. write ( p + "" "" ) sys. stdout. flush ( ) while True : i = getchar ( ). upper ( ) if i not in ""MPLIRX"" : continue print ( i. upper ( ) ) if i == ""L"" : for x, line in enumerate ( data ) : if ( x + 1 ) % self. console_height == 0 : sys. stdout. write ( ""<-- More -->\r "" ) m = getchar ( ). lower ( ) sys. stdout. write ( "" \r "" ) if m == ""q"" : break print ( line. rstrip ( ) ) elif i == ""M"" : print ( repr ( marker or self. lastmarker ) ) elif i == ""P"" : print ( repr ( pattern ) ) elif i == ""I"" : 0,0.6602107882499695 2491,"def consume_buf ( ) : ty = state [ ""ty"" ] - 1 for i in xrange ( state [ ""buf"" ]. shape [ 1 ] // N ) : tx = x // N + i src = state [ ""buf"" ] [ :, i * N : ( i + 1 ) * N, : ] if : with self. tile_request ( tx, ty, readonly = False ) as dst : mypaintlib. tile_convert_rgba8_to_rgba16 ( src, dst, self. EOTF ) if state [ ""progress"" ] : try : state [ ""progress"" ]. completed ( ty - ty0 ) except Exception : logger. exception ( ""Progress.completed() failed"" ) state [ ""progress"" ] = None",False,"src[:, :, 3].any()",state['tile'],0.6509518623352051 2492,"def using_user_docutils_conf ( confdir : str ) -> Generator [ None, None, None ] : """"""Let docutils know the location of ``docutils.conf`` for Sphinx."""""" try : docutilsconfig = os. environ. get ( ""DOCUTILSCONFIG"", None ) if : os. environ [ ""DOCUTILSCONFIG"" ] = path. join ( path. abspath ( confdir ), ""docutils.conf"" ) yield finally : if docutilsconfig is None : os. environ. pop ( ""DOCUTILSCONFIG"", None ) else : os. environ [ ""DOCUTILSCONFIG"" ] = docutilsconfig",False,confdir,path is not None,0.6801047921180725 2493,"def evaluate_batch_e2e ( args, rag_model, questions ) : with torch. no_grad ( ) : inputs_dict = rag_model. retriever. question_encoder_tokenizer. batch_encode_plus ( questions, return_tensors = ""pt"", padding = True, truncation = True ) input_ids = inputs_dict. input_ids. to ( args. device ) attention_mask = inputs_dict. attention_mask. to ( args. device ) outputs = rag_model. generate ( input_ids, attention_mask = attention_mask, num_beams = args. num_beams, min_length = args. min_length, max_length = args. max_length, early_stopping = False, num_return_sequences = 1, bad_words_ids = [ [ 0, 0 ] ], ) answers = rag_model. retriever. generator_tokenizer. batch_decode ( outputs, skip_special_tokens = True ) if : for q, a in zip ( questions, answers ) : logger. info ( ""Q: {} - A: {}"". format ( q, a ) ) return answers",False,args.print_predictions,len(answers) > 0,0.6567527055740356 2494,"def compute_timer_precision ( timer ) : precision = None points = 0 timeout = timeout_timer ( ) + 1.0 previous = timer ( ) while timeout_timer ( ) < timeout or points < 5 : for _ in XRANGE ( 10 ) : t1 = timer ( ) t2 = timer ( ) dt = t2 - t1 if 0 < dt : break else : dt = t2 - previous if : continue if precision is not None : precision = min ( precision, dt ) else : precision = dt points += 1 previous = timer ( ) return precision",False,dt <= 0.0,0 < dt,0.672704815864563 2495,"def forward_pass ( self, buffers, training_pass = True ) : _h = self. handler W, R, bias, timing = buffers. parameters inputs = buffers. inputs. default outputs = buffers. outputs. default Ha = buffers. internals. Ha flat_inputs = flatten_time_and_features ( inputs ) flat_H = flatten_time ( Ha [ : - 1 ] ) _h. dot_mm ( flat_inputs, W, flat_H, transb = True ) _h. add_mv ( flat_H, bias. reshape ( ( 1, self. size ) ), flat_H ) tmp = _h. zeros ( timing. shape ) cond = _h. zeros ( outputs [ 0 ]. shape ) for t in range ( inputs. shape [ 0 ] ) : _h. dot_add_mm ( outputs [ t - 1 ], R, Ha [ t ], transb = True ) _h. act_func [ self. activation ] ( Ha [ t ], outputs [ t ] ) if : _h. fill ( tmp, t ) _h. modulo_tt ( tmp, timing, tmp ) _h. broadcast_t ( tmp. reshape ( ( 1, tmp. shape [ 0 ] ) ), 0, cond ) _h. copy_to_if ( outputs [ t - 1 ], outputs [ t ], cond )",False,t > 0,training_pass,0.6696994304656982 2496,"def _is_static_shape ( self, shape ) : if shape is None or not isinstance ( shape, list ) : return False for dim_value in shape : if not isinstance ( dim_value, int ) : return False if : raise Exception ( ""Negative dimension is illegal: %d"" % dim_value ) return True",False,dim_value < 0,dim_value <= 0,0.663841962814331 2497,"def _update_balancer ( self, params ) : try : return self. connection. request ( ""/api/grid/loadbalancer/edit"", method = ""POST"", params = params ) except Exception as e : if : raise LibcloudLBImmutableError ( ""Balancer is immutable"", GoGridLBDriver ) raise LibcloudError ( value = ""Exception: %s"" % str ( e ), driver = self )",False,'Update already pending' in str(e),self.is_mutable,0.6577302813529968 2498,"def read_raw_data ( self, filename, ordered = False, lower_case = True, delimiter = None, add_eos = True, add_double_eos = False, ) : assert os. path. exists ( filename ), ""%s is not exist. "" % filename data = [ ] with open ( filename, ""r"", encoding = ""utf-8"" ) as f : for line in f : tokens = LMDataset. tokenize ( line = line, delimiter = delimiter, lower_case = lower_case ) if : tokens = ( [ self. vocab. _identifiers_to_tokens [ ""bos_token"" ] ] + tokens + [ self. vocab. _identifiers_to_tokens [ ""bos_token"" ] ] ) elif add_eos : tokens = tokens + [ self. vocab. _identifiers_to_tokens [ ""eos_token"" ] ] data. append ( np. asarray ( self. get_indices ( tokens ) ). astype ( ""int64"" ) ) if ordered : data = np. concatenate ( data ) return data",True,add_double_eos,add_double_eos,0.6560062766075134 2499,"def _EvalCondition ( self, cond, spid ) : b = False UP_cond = cond with tagswitch ( cond ) as case : if case ( condition_e. Shell ) : cond = cast ( condition__Shell, UP_cond ) self. _StrictErrExitList ( cond. commands ) with state. ctx_ErrExit ( self. mutable_opts, False, spid ) : cond_status = self. _ExecuteList ( cond. commands ) b = cond_status == 0 elif case ( condition_e. Oil ) : if : cond = cast ( condition__Oil, UP_cond ) obj = self. expr_ev. EvalExpr ( cond. e ) b = bool ( obj ) return b",False,mylib.PYTHON,case case case,0.6687970161437988 2500,"def sina_download ( url, info_only = False, ** kwargs ) : """"""Downloads Sina videos by URL."""""" if ""news.sina.com.cn/zxt"" in url : sina_zxt ( url, info_only = info_only, ** kwargs ) return vid = match1 ( url, r""vid=(\d+)"" ) if vid is None : video_page = get_content ( url ) vid = hd_vid = match1 ( video_page, r""hd_vid\s*:\s*\'([^\']+)\'"" ) if : vids = match1 ( video_page, r""[^\w]vid\s*:\s*\'([^\']+)\'"" ). split ( ""|"" ) vid = vids [ - 1 ] if vid is None : vid = match1 ( video_page, r'vid:""?(\d+)""?' ) if vid : sina_download_by_vid ( vid, info_only = info_only, ** kwargs ) else : vkey = match1 ( video_page, r'vkey\s*:\s*""([^""]+)""' ) if vkey is None : vid = match1 ( url, r""#(\d+)"" ) sina_download_by_vid ( vid, info_only = info_only, ** kwargs ) return title = match1 ( video_page, r'title\s*:\s*""([^""]+)""' ) sina_download_by_vkey ( vkey, title = title, info_only = info_only, ** kwargs )",False,hd_vid == '0',vid == 0,0.6591084003448486 2501,"def set_merge_cells ( self, mergecells ) : if not mergecells : return if not self. filedata : self. filedata = self. filehandle. read ( ) data = str ( self. filedata ) start = data. find ( "" if start < 0 : return end = data. find ( "">"", start ) worksheet = data [ start : end + 1 ] start = data. find ( "" if start < 0 : return end = data. find ( """" ) data = data [ start : end + 13 ] doc = minidom. parseString ( worksheet + data + """" ). firstChild if doc. namespaceURI : mergeCells = doc. getElementsByTagNameNS ( doc. namespaceURI, ""mergeCell"" ) else : mergeCells = doc. getElementsByTagName ( ""mergeCell"" ) for mergeCell in mergeCells : attrs = mergeCell. _attrs if : rangeStr = attrs [ ""ref"" ]. value rng = rangeStr. split ( "":"" ) if len ( rng ) > 1 : for cell in self. _range ( rangeStr ) : self. mergeCells [ cell ] = { } self. mergeCells [ cell ] [ ""copyFrom"" ] = rng [ 0 ]",False,'ref' in attrs.keys(),attrs,0.6532999277114868 2502,"def parseFunctionDeclaration ( self, node, identifierIsOptional = None ) : d = null params = [ ] defaults = [ ] message = None firstRestricted = None self. expectKeyword ( ""function"" ) if identifierIsOptional or not self. match ( ""("" ) : token = self. lookahead d = self. parseVariableIdentifier ( ) if : if isRestrictedWord ( token [ ""value"" ] ) : self. tolerateUnexpectedToken ( token, Messages. StrictFunctionName ) else : if isRestrictedWord ( token [ ""value"" ] ) : firstRestricted = token message = Messages. StrictFunctionName elif isStrictModeReservedWord ( token [ ""value"" ] ) : firstRestricted = token message = Messages. StrictReservedWord tmp = self. parseParams ( firstRestricted ) params = tmp [ ""params"" ] defaults = tmp [ ""defaults"" ] stricted = tmp. get ( ""stricted"" ) firstRestricted = tmp [ ""firstRestricted"" ] if tmp. get ( ""message"" ) : message = tmp [ ""message"" ] previousStrict = self. strict body = self. parseFunctionSourceElements ( ) if self. strict and firstRestricted : self. throwUnexpectedToken ( firstRestricted, message ) if self. strict and stricted : self. tolerateUnexpectedToken ( stricted, message ) self. strict = previousStrict return node. finishFunctionDeclaration ( d, params, defaults, body )",False,self.strict,d == None,0.6624323129653931 2503,"def _parse_service_catalog_auth_v3 ( self, service_catalog ) : entries = [ ] for item in service_catalog : service_type = item [ ""type"" ] service_name = item. get ( ""name"", None ) entry_endpoints = [ ] for endpoint in item [ ""endpoints"" ] : region = endpoint. get ( ""region"", None ) url = endpoint [ ""url"" ] endpoint_type = endpoint [ ""interface"" ] if endpoint_type == ""internal"" : endpoint_type = OpenStackIdentityEndpointType. INTERNAL elif : endpoint_type = OpenStackIdentityEndpointType. EXTERNAL elif endpoint_type == ""admin"" : endpoint_type = OpenStackIdentityEndpointType. ADMIN entry_endpoint = OpenStackServiceCatalogEntryEndpoint ( region = region, url = url, endpoint_type = endpoint_type ) entry_endpoints. append ( entry_endpoint ) entry = OpenStackServiceCatalogEntry ( service_type = service_type, service_name = service_name, endpoints = entry_endpoints, ) entries. append ( entry ) return entries",False,endpoint_type == 'public',endpoint_type == 'external',0.6567357778549194 2504,"def run ( self, bar_dict ) : conf = self. _env. config. base for event in self. _env. event_source. events ( conf. start_date, conf. end_date, conf. frequency ) : if event. event_type == EVENT. TICK : if self. _ensure_before_trading ( event ) : self. _split_and_publish ( event ) elif : if self. _ensure_before_trading ( event ) : bar_dict. update_dt ( event. calendar_dt ) event. bar_dict = bar_dict self. _split_and_publish ( event ) elif event. event_type == EVENT. OPEN_AUCTION : if self. _ensure_before_trading ( event ) : bar_dict. update_dt ( event. calendar_dt ) event. bar_dict = bar_dict self. _split_and_publish ( event ) elif event. event_type == EVENT. BEFORE_TRADING : self. _ensure_before_trading ( event ) elif event. event_type == EVENT. AFTER_TRADING : self. _split_and_publish ( event ) else : self. _env. event_bus. publish_event ( event ) self. _split_and_publish ( Event ( EVENT. SETTLEMENT ) )",False,event.event_type == EVENT.BAR,event.event_type == EVENT.SPACE,0.6555917263031006 2505,"def _pipe_relay ( stopped, fd, name, cb, tee, output_writer ) : while True : try : data = os. read ( fd, 4096 ) except OSError : return if len ( data ) == 0 : break if : if data. endswith ( _LAST_WRITE_TOKEN. encode ( ) ) : logger. info ( ""relay done saw last write: %s"", name ) break if tee : os. write ( tee, data ) if output_writer : output_writer. write ( data ) if cb : try : cb ( name, data ) except Exception : logger. exception ( ""problem in pipe relay"" ) cb = None logger. info ( ""relay done done: %s"", name )",False,stopped.isSet(),stopped,0.6573858261108398 2506,"def _get_sources ( self ) : servers = self. config [ ""servers"" ] url = """" for i in servers : params = { ""s"" : i, ""episode_id"" : self. url. split ( ""id="" ) [ - 1 ], } api = helpers. post ( self. _episode_list_url, params = params, referer = self. url ). json ( ) if api. get ( ""status"", False ) : : url = re. search ( iframe_regex, api [ ""value"" ] ). group ( 1 ) if url. startswith ( ""//"" ) : url = ""https:"" + url if url. endswith ( ""mp4upload.com/embed-.html"" ) or url. endswith ( ""yourupload.com/embed/"" ) : url = """" continue break extractor = ""streamx"" extractor_urls = { ""mp4upload.com"" : ""mp4upload"", ""yourupload.com"" : ""yourupload"", } for i in extractor_urls : if i in url : extractor = extractor_urls [ i ] return [ ( extractor, url ) ]",False,"iframe_regex = '' ) return html + """"",False,"isinstance(self.app_config[i], dict)",i in self.app_config,0.6510339975357056 3479,"def compare_detections ( detection1 : dict, detection2 : dict ) -> bool : if len ( detection1 )!= len ( detection2 ) : return False for named_condition in detection1 : if named_condition == ""condition"" : if detection1 [ ""condition"" ]!= detection2 [ ""condition"" ] : return False else : continue if named_condition not in detection2 : return False if len ( detection1 [ named_condition ] )!= len ( detection2 [ named_condition ] ) : return False for condition in detection1 [ named_condition ] : if type ( condition )!= str : return False if : return False condition_value1 = detection1 [ named_condition ] [ condition ] condition_value2 = detection2 [ named_condition ] [ condition ] if condition_value1!= condition_value2 : return False return True",False,condition not in detection2[named_condition],named_condition not in detection1,0.656897783279419 3480,"def move ( self, color ) : global TIMESTAMP, MOVES TIMESTAMP += 1 MOVES += 1 self. board. zobrist. update ( self, color ) self. color = color self. reference = self self. ledges = 0 self. used = True for neighbour in self. neighbours : neighcolor = neighbour. color if neighcolor == EMPTY : self. ledges += 1 else : neighbour_ref = neighbour. find ( update = True ) if neighcolor == color : if : self. ledges += neighbour_ref. ledges neighbour_ref. reference = self self. ledges -= 1 else : neighbour_ref. ledges -= 1 if neighbour_ref. ledges == 0 : neighbour. remove ( neighbour_ref ) self. board. zobrist. add ( )",False,neighbour_ref.reference.pos != self.pos,neighby_ref == EMPTY,0.6536405086517334 3481,"def skip_tactics_or_techniques ( self, src_technics, src_tactics ) : tactics = set ( ) technics = set ( ) local_storage_techniques = { item [ ""technique_id"" ] : item for item in self. find_technique ( src_technics ) } for key_id in src_technics : src_tactic = local_storage_techniques. get ( key_id, { } ). get ( ""tactic"" ) if : continue src_tactic = set ( src_tactic ) for item in src_tactics : if item in src_tactic : technics. add ( key_id ) tactics. add ( item ) return sorted ( tactics ), sorted ( technics )",True,not src_tactic,not src_tactic,0.6721090078353882 3482,"def proc_minute ( d ) : if expanded [ 0 ] [ 0 ]!= ""*"" : diff_min = nearest_diff_method ( d. minute, expanded [ 0 ], 60 ) if diff_min is not None and diff_min!= 0 : if : d += relativedelta ( minutes = diff_min, second = 59 ) else : d += relativedelta ( minutes = diff_min, second = 0 ) return True, d return False, d",False,is_prev,"isinstance(diff_min, relativedelta)",0.6632629632949829 3483,"def date_to_format ( value, target_format ) : """"""Convert date to specified format"""""" if target_format == str : if isinstance ( value, datetime. date ) : ret = value. strftime ( ""%d/%m/%y"" ) elif : ret = value. strftime ( ""%d/%m/%y"" ) elif isinstance ( value, datetime. time ) : ret = value. strftime ( ""%H:%M:%S"" ) else : ret = value return ret",False,"isinstance(value, datetime.datetime)","isinstance(value, datetime.date)",0.6477208137512207 3484,def wait_til_ready ( cls ) : while True : now = time. time ( ) next_iteration = now // 1.0 + 1 if : break else : await cls. _clock. run_til ( next_iteration ) await asyncio. sleep ( 1.0 ),False,cls.connector.ready,next_iteration == 0,0.6594887971878052 3485,"def execute ( cls, ctx, op ) : inputs, device_id, xp = as_same_device ( [ ctx [ c. key ] for c in op. inputs ], device = op. device, ret_extra = True ) with device ( device_id ) : kw = { ""casting"" : op. casting } inputs_iter = iter ( inputs ) input = next ( inputs_iter ) if : out1 = next ( inputs_iter ) else : out1 = None if op. out2 is not None : out2 = next ( inputs_iter ) else : out2 = None if op. where is not None : where = kw [ ""where"" ] = next ( inputs_iter ) else : where = None kw [ ""order"" ] = op. order try : args = [ input ] if out1 is not None : args. append ( out1. copy ( ) ) if out2 is not None : args. append ( out2. copy ( ) ) y1, y2 = xp. modf ( * args, ** kw ) except TypeError : if where is None : raise y1, y2 = xp. modf ( input ) y1, y2 = xp. where ( where, y1, out1 ), xp.",True,op.out1 is not None,op.out1 is not None,0.6578171253204346 3486,"def test_stream_index ( self ) : output = av. open ( self. sandboxed ( ""output.mov"" ), ""w"" ) vstream = output. add_stream ( ""mpeg4"", 24 ) vstream. pix_fmt = ""yuv420p"" vstream. width = 320 vstream. height = 240 astream = output. add_stream ( ""mp2"", 48000 ) astream. channels = 2 astream. format = ""s16"" self. assertEqual ( vstream. index, 0 ) self. assertEqual ( astream. index, 1 ) vframe = VideoFrame ( 320, 240, ""yuv420p"" ) vpacket = vstream. encode ( vframe ) [ 0 ] self. assertIs ( vpacket. stream, vstream ) self. assertEqual ( vpacket. stream_index, 0 ) for i in range ( 10 ) : aframe = AudioFrame ( ""s16"", ""stereo"", samples = astream. frame_size ) aframe. rate = 48000 apackets = astream. encode ( aframe ) if : apacket = apackets [ 0 ] break self. assertIs ( apacket. stream, astream ) self. assertEqual ( apacket. stream_index, 1 )",False,apackets,len(apackets) > 0,0.6742250323295593 3487,"def wait_for_initial_conf ( self, timeout = 1.0 ) : logger. info ( ""Waiting for initial configuration"" ) cur_timeout = timeout while not self. new_conf and not self. interrupted : elapsed, _, _ = self. handleRequests ( cur_timeout ) if : cur_timeout -= elapsed if cur_timeout > 0 : continue cur_timeout = timeout sys. stdout. write ( ""."" ) sys. stdout. flush ( )",False,elapsed,elapsed > 0,0.6952182650566101 3488,"def MultiReadClientMetadata ( self, client_ids ) : """"""Reads ClientMetadata records for a list of clients."""""" res = { } for client_id in client_ids : md = self. metadatas. get ( client_id, None ) if : continue res [ client_id ] = rdf_objects. ClientMetadata ( certificate = md. get ( ""certificate"" ), fleetspeak_enabled = md. get ( ""fleetspeak_enabled"" ), first_seen = md. get ( ""first_seen"" ), ping = md. get ( ""ping"" ), clock = md. get ( ""clock"" ), ip = md. get ( ""ip"" ), last_foreman_time = md. get ( ""last_foreman_time"" ), last_crash_timestamp = md. get ( ""last_crash_timestamp"" ), startup_info_timestamp = md. get ( ""startup_info_timestamp"" ), ) return res",True,md is None,md is None,0.6636156439781189 3489,"def get_celery_request_tags ( ** kwargs ) : request = kwargs. get ( ""request"" ) sender_hostname = ""unknown"" sender = kwargs. get ( ""sender"" ) if sender : try : sender_hostname = sender. hostname except AttributeError : sender_hostname = vars ( sender. request ). get ( ""origin"", ""unknown"" ) if request and not isinstance ( request, Context ) : task_name = request. name task_id = request. id receiver_hostname = request. hostname else : try : task_name = sender. name except AttributeError : task_name = kwargs. pop ( ""name"", """" ) try : task_id = sender. request. id except AttributeError : task_id = kwargs. pop ( ""id"", """" ) try : receiver_hostname = sender. request. hostname except AttributeError : receiver_hostname = """" tags = { ""task_name"" : task_name, ""task_id"" : task_id, ""sender_hostname"" : sender_hostname, ""receiver_hostname"" : receiver_hostname, } tags [ ""expired"" ] = kwargs. get ( ""expired"", False ) exception = kwargs. get ( ""exception"" ) if not exception : exception = kwargs. get ( ""exc"" ) if exception : tags [ ""error"" ] = repr ( exception ) if",False,"isinstance(exception, SoftTimeLimitExceeded)",len(tags) > 0,0.6621091961860657 3490,"def __lt__ ( self, other ) : try : if self == other : return False for i in range ( len ( self. versions ) ) : if : return True if self. versions [ i ] > other. versions [ i ] : return False if len ( self. versions ) < len ( other. versions ) : return True if len ( self. versions ) > len ( other. versions ) : return False if len ( other. numpart ) > 0 and len ( self. numpart ) == 0 : return False if len ( self. numpart ) > 0 and len ( other. numpart ) == 0 : return True return self. rest < other. rest except : return False",False,self.versions[i] < other.versions[i],self.versions[i] > other.versions[i],0.6519798636436462 3491,"def select ( self, tester, do_request, callback = None, select_params = None, byte_range = None ) : sql = ""select * from ossobject limit 10"" resp_content = b""a,b,c,d,e,f,,n,g,l,o,p"" if select_params is not None and ""Json_Type"" in select_params : if : resp_content = b'{contacts:[{""firstName"":""John"", ""lastName"":""Smith""}]}' else : resp_content = b'{""firstName"":""John"", ""lastName"":""Smith""}' output_raw = False if ( select_params is not None and ""OutputRawData"" in select_params and select_params [ ""OutputRawData"" ] ) : output_raw = True req, resp = make_select_object ( sql, resp_content, select_params, output_raw ) req_info = mock_response ( do_request, resp ) result = bucket ( ). select_object ( ""select-test.txt"", sql, callback, select_params, byte_range ) tester. assertEqual ( result. status, 206 ) tester. assertRequest ( req_info, req ) if result. status // 100 == 2 : content = result. read ( ) tester. assertEqual ( content, resp_content )",False,select_params['Json_Type'] == 'DOCUMENT',contacts,0.6502504348754883 3492,"def PrintFooter ( self ) : if self. draw == False : return footer_pos = ( self. parent. page_height * self. pheight - self. pfooter_margin + self. vertical_offset ) for val in self. parent. footer : self. SetPrintFont ( val [ ""Font"" ] ) footer_indent = val [ ""Indent"" ] * self. pwidth text = val [ ""Text"" ] ftype = val [ ""Type"" ] if : addtext = ""Page "" + str ( self. page ) + "" of "" + str ( self. total_pages ) elif ftype == ""Page"" : addtext = ""Page "" + str ( self. page ) elif ftype == ""Num"" : addtext = str ( self. page ) elif ftype == ""Date"" : addtext = self. GetDate ( ) elif ftype == ""Date & Time"" : addtext = self. GetDateTime ( ) else : addtext = """" self. OutTextPageWidth ( text + addtext, footer_pos, val [ ""Align"" ], footer_indent, True )",False,ftype == 'Pageof',ftype == 'Num',0.6611014604568481 3493,"def test_merge_documents_existing ( self, api_key, endpoint, index_name, ** kwargs ) : client = SearchClient ( endpoint, index_name, AzureKeyCredential ( api_key ) ) async with client : results = await client. merge_documents ( [ { ""hotelId"" : ""3"", ""rating"" : 1 }, { ""hotelId"" : ""4"", ""rating"" : 2 } ] ) assert len ( results ) == 2 assert set ( x. status_code for x in results ) == { 200 } if : time. sleep ( TIME_TO_SLEEP ) assert await client. get_document_count ( ) == 10 result = await client. get_document ( key = ""3"" ) assert result [ ""rating"" ] == 1 result = await client. get_document ( key = ""4"" ) assert result [ ""rating"" ] == 2",False,self.is_live,len(results) == 3,0.6547102928161621 3494,"def _getdescriptions ( self, group_pattern, return_all ) : line_pat = re. compile ( ""^(?P[^ \t]+)[ \t]+(.*)$"" ) resp, lines = self. _longcmdstring ( ""LIST NEWSGROUPS "" + group_pattern ) if not resp. startswith ( ""215"" ) : resp, lines = self. _longcmdstring ( ""XGTITLE "" + group_pattern ) groups = { } for raw_line in lines : match = line_pat. search ( raw_line. strip ( ) ) if : name, desc = match. group ( 1, 2 ) if not return_all : return desc groups [ name ] = desc if return_all : return resp, groups else : return """"",True,match,match,0.6726937294006348 3495,"def isValidDateString ( config_param_name, value, valid_value ) : try : if value == ""DD-MM-YYYY"" : return value day, month, year = value. split ( ""-"" ) if int ( day ) < 1 or int ( day ) > 31 : raise DateStringValueError ( config_param_name, value ) if int ( month ) < 1 or int ( month ) > 12 : raise DateStringValueError ( config_param_name, value ) if : raise DateStringValueError ( config_param_name, value ) return value except Exception : raise DateStringValueError ( config_param_name, value )",False,int(year) < 1900 or int(year) > 2013,valid_value and value != 'NULL',0.6595080494880676 3496,"def packet ( self, pktid, packet, msg, * args, ** kwargs ) : """"""Write a control packet debug log message"""""" if self. _debug_level >= 3 : kwargs. setdefault ( ""extra"", { } ) if : kwargs [ ""extra"" ]. update ( context = ""pktid=%d"" % pktid ) kwargs [ ""extra"" ]. update ( packet = packet ) self. debug ( msg, * args, ** kwargs )",False,pktid is not None,self._debug_level >= 6,0.6709017157554626 3497,"def _close_cloth ( self, ctx, parent ) : rootstack = ctx. protocol. rootstack close_until = rootstack. back cureltstack = ctx. protocol. eltstack [ close_until ] curctxstack = ctx. protocol. ctxstack [ close_until ] for elt, elt_ctx in reversed ( tuple ( zip ( cureltstack, curctxstack ) ) ) : if elt_ctx is not None : self. event_manager. fire_event ( ( ""before_exit"", elt ), ctx, parent ) elt_ctx. __exit__ ( None, None, None ) logger_c. debug ( ""exit %s close"", elt. tag ) if : parent. write ( elt. tail ) for sibl in elt. itersiblings ( preceding = False ) : logger_c. debug ( ""write %s nextsibl"", sibl. tag ) parent. write ( sibl ) if sibl. tail is not None : parent. write ( sibl. tail ) if elt is close_until : logger_c. debug ( ""closed until %r, breaking out"", close_until ) break del ctx. protocol. eltstack [ close_until ] del ctx. protocol. ctxstack [ close_until ] if len ( rootstack ) > 0 : rootstack. pop ( )",False,elt.tail is not None,parent is not None,0.6612796783447266 3498,"def _get_before_insertion_node ( self ) : if self. _nodes_stack. is_empty ( ) : return None line = self. _nodes_stack. parsed_until_line + 1 node = self. _new_module. get_last_leaf ( ) while True : parent = node. parent if : assert node. end_pos [ 0 ] <= line assert node. end_pos [ 1 ] == 0 or ""\n"" in self. _prefix return node node = parent",False,"parent.type in ('suite', 'file_input')",parent.end_pos is not None and parent.end_pos[1] == line,0.6527286767959595 3499,"def _visit ( self, expr ) : if is_scalar_reduction ( expr ) and not has_multiple_bases ( expr ) : key = self. _key ( expr ) if key not in self. memo : agg_expr, name = reduction_to_aggregation ( expr ) self. memo [ key ] = agg_expr, name self. tables. append ( agg_expr ) else : agg_expr, name = self. memo [ key ] return agg_expr [ name ] elif not isinstance ( expr, ir. Expr ) : return expr node = expr. op ( ) subbed_args = [ ] for arg in node. args : if : subbed_arg = [ self. _visit ( x ) for x in arg ] else : subbed_arg = self. _visit ( arg ) subbed_args. append ( subbed_arg ) subbed_node = type ( node ) ( * subbed_args ) if isinstance ( expr, ir. ValueExpr ) : result = expr. _factory ( subbed_node, name = expr. _name ) else : result = expr. _factory ( subbed_node ) return result",False,"isinstance(arg, (tuple, list))","isinstance(arg, ir.SubbedArgs)",0.6564469933509827 3500,"def format_outer_frames ( context = 5, stack_start = None, stack_end = None, ignore_ipython = True ) : LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 records = inspect. getouterframes ( inspect. currentframe ( ) ) output = list ( ) for i, ( frame, filename, line_no, func_name, lines, index ) in enumerate ( records ) : better_fn = frame. f_globals. get ( ""__file__"", None ) if : filename = better_fn if filename. endswith ( "".pyc"" ) : filename = filename [ : - 4 ] + "".py"" if ignore_ipython : if os. path. basename ( filename ) in ( ""iplib.py"", ""py3compat.py"", ) and func_name in ( ""execfile"", ""safe_execfile"", ""runcode"" ) : break maybe_start = line_no - 1 - context // 2 start = max ( maybe_start, 0 ) end = start + context lines = linecache. getlines ( filename ) [ start : end ] buf = list ( records [ i ] ) buf [ LNUM_POS ] = line_no buf [ INDEX_POS ] = line_no - 1 - start buf [ LINES_POS ] = lines ",False,"isinstance(better_fn, str)",better_fn,0.6525474786758423 3501,"def copyTokenToRepeater ( s, l, t ) : if t : if : rep << t [ 0 ] else : tflat = _flatten ( t. asList ( ) ) rep << And ( Literal ( tt ) for tt in tflat ) else : rep << Empty ( )",True,len(t) == 1,len(t) == 1,0.6657356023788452 3502,"def _checkout ( self, local_dir ) : logger. warning ( ""The coregen provider is deprecated and will be removed. Consider using a generator for this instead"" ) script_file = self. config. get ( ""script_file"" ) project_file = self. config. get ( ""project_file"" ) extra_files = self. config. get ( ""extra_files"" ) logger. info ( ""Using Coregen to generate project "" + project_file ) if not os. path. isdir ( local_dir ) : os. makedirs ( local_dir ) src_files = [ script_file, project_file ] if extra_files : src_files += extra_files. split ( ) for f in src_files : f_src = os. path. join ( self. core_root, f ) f_dst = os. path. join ( local_dir, f ) if : d_dst = os. path. dirname ( f_dst ) if not os. path. exists ( d_dst ) : os. makedirs ( d_dst ) shutil. copyfile ( f_src, f_dst ) else : logger. error ( ""Cannot find file %s"" % f_src ) args = [ ""-r"", ""-b"", script_file, ""-p"", project_file ] Launcher ( ""coregen"", args, cwd = local_dir ). run ( )",False,os.path.exists(f_src),os.path.exists(f_dst),0.6492385268211365 3503,"def backward_impl ( self, inputs, outputs, prop_down, accum ) : pad_width = self. forward_func. info. args [ ""pad_width"" ] mode = self. forward_func. info. args [ ""mode"" ] constant_value = self. forward_func. info. args [ ""constant_value"" ] x0 = inputs [ 0 ]. data dy = inputs [ 1 ]. data dx0 = outputs [ 0 ]. data g_x0 = inputs [ 0 ]. grad g_dy = inputs [ 1 ]. grad g_dx0 = outputs [ 0 ]. grad if prop_down [ 1 ] : g_dy_ = F. pad ( g_dx0, pad_width, mode, constant_value ) if : g_dy += g_dy_ else : g_dy. copy_from ( g_dy_ )",False,accum[1],prop_down[0],0.6616615056991577 3504,"def try_open_completions_event ( self, event = None ) : ""(./) Open completion list after pause with no movement."" lastchar = self. text. get ( ""insert-1c"" ) if lastchar in TRIGGERS : args = TRY_A if lastchar == ""."" else TRY_F self. _delayed_completion_index = self. text. index ( ""insert"" ) if : self. text. after_cancel ( self. _delayed_completion_id ) self. _delayed_completion_id = self. text. after ( self. popupwait, self. _delayed_open_completions, args )",False,self._delayed_completion_id is not None,self._delayed_completion_index >= 0,0.6517032384872437 3505,"def log_status_change_thread ( log_queue, request_iterator ) : std_handler = StdStreamHandler ( log_queue ) current_handler = None root_logger = logging. getLogger ( ""ray"" ) default_level = root_logger. getEffectiveLevel ( ) try : for req in request_iterator : if : root_logger. setLevel ( default_level ) root_logger. removeHandler ( current_handler ) std_handler. unregister_global ( ) if not req. enabled : current_handler = None continue current_handler = LogstreamHandler ( log_queue, req. loglevel ) std_handler. register_global ( ) root_logger. addHandler ( current_handler ) root_logger. setLevel ( req. loglevel ) except grpc. RpcError as e : logger. debug ( f""closing log thread "" f""grpc error reading request_iterator: {e}"" ) finally : if : root_logger. setLevel ( default_level ) root_logger. removeHandler ( current_handler ) std_handler. unregister_global ( ) log_queue. put ( None )",False,current_handler is not None,current_handler,0.6581913232803345 3506,"def module_list ( target, fast ) : """"""Find the list of modules to be compiled"""""" modules = [ ] native = native_modules ( target ) basedir = os. path. join ( ouroboros_repo_folder ( ), ""ouroboros"" ) for name in os. listdir ( basedir ) : module_name, ext = os. path. splitext ( name ) if ext == "".py"" or ext == """" and os. path. isdir ( os. path. join ( basedir, name ) ) : if : if not ( fast and module_name in KNOWN_PROBLEM_MODULES ) : modules. append ( module_name ) return set ( modules )",False,module_name not in IGNORE_MODULES and module_name not in native,native,0.6537770628929138 3507,"def _set_suffix_to_test_databases ( suffix ) : from django. conf import settings for db_settings in settings. DATABASES. values ( ) : test_name = db_settings. get ( ""TEST"", { } ). get ( ""NAME"" ) if not test_name : if db_settings [ ""ENGINE"" ] == ""django.db.backends.sqlite3"" : continue test_name = ""test_{}"". format ( db_settings [ ""NAME"" ] ) if : continue db_settings. setdefault ( ""TEST"", { } ) db_settings [ ""TEST"" ] [ ""NAME"" ] = ""{}_{}"". format ( test_name, suffix )",False,test_name == ':memory:',test_name,0.6544523239135742 3508,"def read_line ( self, line ) : """"""Read a new line"""""" if self. ignore : return if ( not self. is_quoted ( ) and self. comment is not None and line. startswith ( self. comment ) ) : return self. triple_start = - 1 for i, char in enumerate ( line ) : if char not in [ '""', ""'"" ] : continue if line [ i - 1 : i ] == ""\\"" : continue if : self. single = None continue if self. single is not None : continue if not self. python : continue if line [ i - 2 : i + 1 ] == 3 * char and i >= self. triple_start + 3 : if self. triple == char : self. triple = None self. triple_start = i continue if self. triple is not None : continue self. triple = char self. triple_start = i continue if self. triple is not None : continue self. single = char if self. python : if job : if : try : if [ hda. dependent_jobs for hda in [ jtod. dataset for jtod in job. output_datasets ] if hda. dependent_jobs ] : return True elif job. output_dataset_collection_instances : return ""job_produced_collection_elements"" except Exception as exception : log. error ( str ( exception ) ) return False",False,job.state == job.states.ERROR,self.has_job_dataset_collection_instances,0.6555456519126892 3510,"def leading_whitespace ( self, inputstring ) : """"""Get leading whitespace."""""" leading_ws = [ ] for i, c in enumerate ( inputstring ) : if c in legal_indent_chars : leading_ws. append ( c ) else : break if self. indchar is None : self. indchar = c elif : self. strict_err_or_warn ( ""found mixing of tabs and spaces"", inputstring, i ) return """". join ( leading_ws )",False,c != self.indchar,c in legal_indent_chars,0.657207727432251 3511,"def process_static_data ( cls, moves ) : ret = { } by_type = { } by_name = { } fast = cls is FastAttacks for attack in moves : attack = Attack ( attack ) if fast else ChargedAttack ( attack ) ret [ attack. id ] = attack by_name [ attack. name ] = attack attack_type = str ( attack. type ) if : by_type [ attack_type ] = [ ] by_type [ attack_type ]. append ( attack ) for t in by_type. iterkeys ( ) : attacks = sorted ( by_type [ t ], key = lambda m : m. dps, reverse = True ) min_dps = attacks [ - 1 ]. dps max_dps = attacks [ 0 ]. dps - min_dps if max_dps > 0.0 : for attack in attacks : attack. rate_in_type = ( attack. dps - min_dps ) / max_dps by_type [ t ] = attacks cls. BY_NAME = by_name cls. BY_TYPE = by_type cls. BY_DPS = sorted ( ret. values ( ), key = lambda m : m. dps, reverse = True ) return ret",True,attack_type not in by_type,attack_type not in by_type,0.6561830043792725 3512,"def test_record_quest_from_commands ( play_the_game = False ) : M = GameMaker ( ) commands = [ ""go east"", ""insert ball into chest"" ] R1 = M. new_room ( ""bedroom"" ) R2 = M. new_room ( ""kitchen"" ) M. set_player ( R1 ) path = M. connect ( R1. east, R2. west ) path. door = M. new ( type = ""d"", name = ""wooden door"" ) path. door. add_property ( ""open"" ) ball = M. new ( type = ""o"", name = ""ball"" ) M. inventory. add ( ball ) chest = M. new ( type = ""c"", name = ""chest"" ) chest. add_property ( ""open"" ) R2. add ( chest ) M. set_quest_from_commands ( commands ) game = M. build ( ) with make_temp_directory ( prefix = ""test_record_quest_from_commands"" ) as tmpdir : game_file = _compile_game ( game, folder = tmpdir ) if : textworld. play ( game_file ) else : agent = textworld. agents. WalkthroughAgent ( commands ) textworld. play ( game_file, agent = agent, silent = True )",True,play_the_game,play_the_game,0.6547797918319702 3513,"def _test_markets_exchange ( self, exchange, attempts = 0 ) : assets = None try : exchange. init ( ) if not exchange. markets : raise ValueError ( ""no markets found"" ) if not exchange. assets : raise ValueError ( ""no assets derived from markets"" ) assets = exchange. assets except ExchangeRequestError as e : sleep ( 5 ) if : handle_exchange_error ( exchange, e ) else : print ( ""re-trying an exchange request {} {}"". format ( exchange. name, attempts ) ) self. _test_markets_exchange ( exchange, attempts + 1 ) except Exception as e : handle_exchange_error ( exchange, e ) return assets",False,attempts > 5,attempts == 0,0.6684436798095703 3514,"def _parse_optical_transition ( elem ) : for va in elem. findall ( ""varray"" ) : if : oscillator_strength = np. array ( _parse_varray ( va ) ) [ 0 :, ] probability_transition = np. array ( _parse_varray ( va ) ) [ 0 :, 1 ] return oscillator_strength, probability_transition",False,va.attrib.get('name') == 'opticaltransitions',len(va) > 0,0.6501792669296265 3515,"def close ( self ) : if self. _closed : return self. _closed = True for proto in self. _pipes. values ( ) : if proto is None : continue proto. pipe. close ( ) if ( self. _proc is not None and self. _returncode is None and self. _proc. poll ( ) is None ) : if : logger. warning ( ""Close running child process: kill %r"", self ) try : self. _proc. kill ( ) except ProcessLookupError : pass",False,self._loop.get_debug(),self._proc is not None,0.6547896862030029 3516,"def _toplevelTryFunc ( func, * args, status = status, ** kwargs ) : with ThreadProfiler ( threading. current_thread ( ) ) as prof : t = threading. current_thread ( ) t. name = func. __name__ try : t. status = func ( * args, ** kwargs ) except EscapeException as e : t. status = ""aborted by user"" if status : status ( ""%s aborted"" % t. name, priority = 2 ) except Exception as e : t. exception = e t. status = ""exception"" vd. exceptionCaught ( e ) if : t. sheet. currentThreads. remove ( t )",True,t.sheet,t.sheet,0.6659993529319763 3517,"def _errorFields ( expression, expressionFields, expressionFieldsList, num = None, emptyFields = None, suppressOutput = False, ) : values = [ ] origExpr = None threadData = getCurrentThreadData ( ) for field in expressionFieldsList : output = None if field. startswith ( ""ROWNUM "" ) : continue if isinstance ( num, int ) : origExpr = expression expression = agent. limitQuery ( num, expression, field, expressionFieldsList [ 0 ] ) if ""ROWNUM"" in expressionFieldsList : expressionReplaced = expression else : expressionReplaced = expression. replace ( expressionFields, field, 1 ) output = ( NULL if emptyFields and field in emptyFields else _oneShotErrorUse ( expressionReplaced, field ) ) if : return None if not suppressOutput : if kb. fileReadMode and output and output. strip ( ) : print elif ( output is not None and not ( threadData. resumed and kb. suppressResumeInfo ) and not ( emptyFields and field in emptyFields ) ) : dataToStdout ( <",False,not kb.threadContinue,not origExpr,0.6656907796859741 3518,"def p_load_imm_off ( opval, va, psize = 4 ) : pubwl = ( opval >> 20 ) & 0x1F Rn = ( opval >> 16 ) & 0xF Rd = ( opval >> 12 ) & 0xF imm = opval & 0xFFF mnem, opcode = ldr_mnem [ pubwl & 1 ] iflags = 0 tsize = 4 if pubwl & 4 : iflags = IF_B tsize = 1 if ( pubwl & 0x12 ) == 2 : : if Rd == REG_PC : iflags |= envi. IF_BRANCH if ( opval & 0xFFF0FFF ) == 0x52D0004 : mnem = ""push"" olist = ( ArmRegOper ( Rd, va = va ), ) elif ( opval & 0xFFF0FFF ) == 0x49D0004 : mnem = ""pop"" olist = ( ArmRegOper ( Rd, va = va ), ) else : olist = ( ArmRegOper ( Rd, va = va ), ArmImmOffsetOper ( Rn, imm, va, pubwl = pubwl, psize = psize, tsize = tsize ), ) return ( opcode, mnem, olist, iflags, 0 )",False,iflags |= IF_T,opval & 24,0.6639662981033325 3519,"def __init__ ( self ) : super ( MultiqcModule, self ). __init__ ( name = ""Sickle"", anchor = ""sickle"", href = ""https://github.com/najoshi/sickle"", info = ""A windowed adaptive trimming tool for FASTQ files using quality."", ) self. sickle_data = dict ( ) for f in self. find_log_files ( ""sickle"" ) : parsed_data = self. parse_logs ( f [ ""f"" ] ) if len ( parsed_data ) : if : log. debug ( ""Duplicate sample name found! Overwriting: {}"". format ( f [ ""s_name"" ] ) ) self. sickle_data [ f [ ""s_name"" ] ] = parsed_data self. add_data_source ( f ) self. sickle_data = self. ignore_samples ( self. sickle_data ) if len ( self. sickle_data ) == 0 : raise UserWarning self. write_data_file ( self. sickle_data, ""multiqc_sickle"" ) self. sickle_general_stats_table ( ) self. read_count_plot ( )",False,f['s_name'] in self.sickle_data,self.has_sample(f[s_name]),0.6507534384727478 3520,"def get_tests ( args, pkg_path ) : """"""Extract test cases given a recipe's meta.yaml file."""""" recipes_dir = args. recipes_dir tests = [ ] input_dir = os. path. dirname ( os. path. join ( recipes_dir, pkg_path ) ) recipe_meta = MetaData ( input_dir ) tests_commands = recipe_meta. get_value ( ""test/commands"" ) tests_imports = recipe_meta. get_value ( ""test/imports"" ) requirements = recipe_meta. get_value ( ""requirements/run"" ) if tests_imports or tests_commands : if tests_commands : tests. append ( "" && "". join ( tests_commands ) ) if : tests. append ( "" && "". join ( 'python -c ""import %s""' % imp for imp in tests_imports ) ) elif tests_imports and ( ""perl"" in requirements or ""perl-threaded"" in requirements ) : tests. append ( "" && "". join ( '''perl -e ""use %s;""''' % imp for imp in tests_imports ) ) tests = "" && "". join ( tests ) tests = tests. replace ( ""$R "", ""Rscript "" ) return tests",False,tests_imports and 'python' in requirements,requirements,0.6565852165222168 3521,"def _check_dsl_runner ( self ) -> None : """"""Checks if runner in dsl is Kubeflow V2 runner."""""" with open ( self. flags_dict [ labels. PIPELINE_DSL_PATH ], ""r"" ) as f : dsl_contents = f. read ( ) if : raise RuntimeError ( ""KubeflowV2DagRunner not found in dsl."" )",False,'KubeflowV2DagRunner' not in dsl_contents,not dsl_contents,0.665368914604187 3522,"def u ( v ) : if PYTHON3 : if isinstance ( v, bytes ) : return v. decode ( ""utf-8"" ) elif : return v else : raise Exception ( ""Unknown input type"" ) else : return v",True,"isinstance(v, str)","isinstance(v, str)",0.6554979085922241 3523,"def _validate_reports ( value, * args, ** kwargs ) : from osf. models import OSFUser for key, val in value. items ( ) : if not OSFUser. load ( key ) : raise ValidationValueError ( ""Keys must be user IDs"" ) if : raise ValidationTypeError ( ""Values must be dictionaries"" ) if ( ""category"" not in val or ""text"" not in val or ""date"" not in val or ""retracted"" not in val ) : raise ValidationValueError ( ( ""Values must include `date`, `category`, "", ""`text`, `retracted` keys"" ) )",True,"not isinstance(val, dict)","not isinstance(val, dict)",0.6552532911300659 3524,"def _redirect ( self, chan ) : while chan. active : rqst, _, _ = select ( [ self. request, chan ], [ ], [ ], 5 ) if self. request in rqst : data = self. request. recv ( 1024 ) if not data : self. logger. log ( TRACE_LEVEL, "">>> OUT {0} recv empty data >>>"". format ( self. info ) ) break self. logger. log ( TRACE_LEVEL, "">>> OUT {0} send to {1}: {2} >>>"". format ( self. info, self. remote_address, hexlify ( data ) ), ) chan. sendall ( data ) if chan in rqst : if : self. logger. log ( TRACE_LEVEL, ""<<< IN {0} recv is not ready <<<"". format ( self. info ) ) break data = chan. recv ( 1024 ) self. logger. log ( TRACE_LEVEL, ""<<< IN {0} recv: {1} <<<"". format ( self. info, hexlify ( data ) ) ) self. request.",False,not chan.recv_ready(),not self.logger,0.660319447517395 3525,"def moveWithinLineHelper ( self, event, spot, extend ) : w = self. editWidget ( event ) if not w : return spots = ( ""end-line"", ""finish-line"", ""start-line"" ) if hasattr ( w, ""leoMoveCursorHelper"" ) and spot not in spots : extend = extend or self. extendMode w. leoMoveCursorHelper ( kind = spot, extend = extend ) else : s = w. getAllText ( ) ins = w. getInsertPoint ( ) i, j = g. getLine ( s, ins ) line = s [ i : j ] if spot == ""begin-line"" : self. moveToHelper ( event, i, extend = extend ) elif spot == ""end-line"" : if g. match ( s, j - 1, ""\n"" ) : j -= 1 self. moveToHelper ( event, j, extend = extend ) elif : if not line. isspace ( ) : if g. match ( s, j - 1, ""\n"" ) : j -= 1 while j >= 0 and s [ j ]. isspace ( ) : j -= 1 self. moveToHelper ( event, j, extend = extend ) elif spot == ""start-line"" : if not line. isspace ( ) : while i < j and",False,spot == 'finish-line',spot == 'start-line',0.66028892993927 3526,"def _sync_get ( self, identifier, * args, ** kw ) : self. _mutex. acquire ( ) try : try : if : return self. _values [ identifier ] else : self. _values [ identifier ] = value = self. creator ( identifier, * args, ** kw ) return value except KeyError : self. _values [ identifier ] = value = self. creator ( identifier, * args, ** kw ) return value finally : self. _mutex. release ( )",False,identifier in self._values,self.has_get(identifier),0.6636478900909424 3527,"def next_part_utts ( self, part_size ) : """"""Return next part of utts."""""" if self. select_by_spk : for index in range ( part_size ) : try : spk = self. spk_keys. pop ( ) except IndexError : break spk_utts = self. meta. spks [ spk ]. utts if : continue random_idx = random. randint ( 0, len ( spk_utts ) - 1 ) if random_idx < 0 : continue utt_key = spk_utts [ random_idx ] utt_meta = self. meta. utts [ utt_key ] yield ( utt_key, utt_meta ) else : for index in range ( part_size ) : try : utt_key = self. utt_keys. pop ( ) except IndexError : break utt_meta = self. meta. utts [ utt_key ] yield ( utt_key, utt_meta )",False,not spk_utts,utt_utts,0.6674712300300598 3528,"def update_encodings ( self, force_utf8 = False ) : if self. ui. write_id3v23. isChecked ( ) : if self. ui. enc_utf8. isChecked ( ) : self. ui. enc_utf16. setChecked ( True ) self. ui. enc_utf8. setEnabled ( False ) self. ui. label_id3v23_join_with. setEnabled ( True ) self. ui. id3v23_join_with. setEnabled ( True ) else : self. ui. enc_utf8. setEnabled ( True ) if : self. ui. enc_utf8. setChecked ( True ) self. ui. label_id3v23_join_with. setEnabled ( False ) self. ui. id3v23_join_with. setEnabled ( False )",True,force_utf8,force_utf8,0.6653125286102295 3529,"def build_json_schema_object ( cls, parent_builder = None ) : builder = builders. ObjectBuilder ( cls, parent_builder ) if builder. count_type ( builder. type ) > 1 : return builder for _, name, field in cls. iterate_with_name ( ) : if isinstance ( field, fields. EmbeddedField ) : builder. add_field ( name, field, _parse_embedded ( field, builder ) ) elif : builder. add_field ( name, field, _parse_list ( field, builder ) ) else : builder. add_field ( name, field, _create_primitive_field_schema ( field ) ) return builder",True,"isinstance(field, fields.ListField)","isinstance(field, fields.ListField)",0.6506459712982178 3530,"def main ( ) : app = QApplication ( sys. argv ) app. setOrganizationName ( ""ReText project"" ) app. setApplicationName ( ""ReText"" ) if hasattr ( app, ""setApplicationDisplayName"" ) : app. setApplicationDisplayName ( ""ReText"" ) RtTranslator = QTranslator ( ) for path in datadirs : if : break QtTranslator = QTranslator ( ) QtTranslator. load ( ""qt_"" + QLocale. system ( ). name ( ), QLibraryInfo. location ( QLibraryInfo. TranslationsPath ), ) app. installTranslator ( RtTranslator ) app. installTranslator ( QtTranslator ) if globalSettings. appStyleSheet : sheetfile = QFile ( globalSettings. appStyleSheet ) sheetfile. open ( QIODevice. ReadOnly ) app. setStyleSheet ( QTextStream ( sheetfile ). readAll ( ) ) sheetfile. close ( ) webSettings = QWebSettings. globalSettings ( ) webSettings. setFontFamily ( QWebSettings. FixedFont, ""monospace"" ) window = ReTextWindow ( ) window. show ( ) fileNames = [ QFileInfo ( arg ). canonicalFilePath ( ) for arg in sys. argv [ 1 : ] ] for fileName in fileNames : if QFile. exists ( fileName ) : window. openFileWrapper ( fileName ) signal. signal ( signal. SIGINT, lambda sig, frame : window. close ( ) ) sys. exit ( app. exec_ ( ) )",False,"RtTranslator.load('retext_' + QLocale.system().name(), path + '/locale')",QFile.exists(path),0.6528704166412354 3531,"def __call__ ( self, name = None ) : with self. _cache_lock : rv = self. __instances. get ( name, None ) if rv is None : rv = self. nocache ( name = name ) if : self. __instances [ name ] = rv return rv",False,"not (name is None or isinstance(rv, tzlocal_classes))",rv,0.652646541595459 3532,"def get_comment_thumbnail ( self, comment ) : try : x = int ( comment. extra_data [ ""x"" ] ) y = int ( comment. extra_data [ ""y"" ] ) width = int ( comment. extra_data [ ""width"" ] ) height = int ( comment. extra_data [ ""height"" ] ) except ( KeyError, ValueError ) : return None image_url = crop_image ( comment. file_attachment. file, x, y, width, height ) if not urlparse ( image_url ). netloc : image_url = build_server_url ( image_url ) image_html = ( ' 'alt=""%s"" />' % ( image_url, width, height, escape ( comment. text ) ) ) if comment. diff_against_file_attachment_id : diff_against_image_url = crop_image ( comment. diff_against_file_attachment. file, x, y, width, height ) if : diff_against_image_url = build_server_url ( diff_against_image_url ) diff_against_image_html = ( ' 'height=""%s"" alt=""%s"" />' % ( diff_against_image_url, width, height, escape ( comment. text ) ) ) return '
    %s%s
    ' % ( """"""Get the downloaded and/or snatched history"""""" data = History ( ). get ( self. limit, self. type ) results = [ ] for row in data : status, quality = Quality. split_composite_status ( int ( row [ ""action"" ] ) ) status = _get_status_strings ( status ) if : continue row [ ""status"" ] = status row [ ""quality"" ] = get_quality_string ( quality ) row [ ""date"" ] = row [ ""date"" ]. strftime ( dateTimeFormat ) del row [ ""action"" ] row [ ""indexerid"" ] = row. pop ( ""show_id"" ) row [ ""resource_path"" ] = os. path. dirname ( row [ ""resource"" ] ) row [ ""resource"" ] = os. path. basename ( row [ ""resource"" ] ) row [ ""tvdbid"" ] = row [ ""indexerid"" ] results. append ( row ) return await _responds ( RESULT_SUCCESS, results )",False,self.type and (not status.lower() == self.type),len(status) < 6,0.6507427096366882 3534,"def replacefunc ( elt ) : text = elt. attrib [ ""href"" ] if link_type ( text )!= ""page"" : raise zim. formats. VisitorSkip href = HRef. new_from_wiki_link ( text ) if href. rel == HREF_REL_RELATIVE : raise zim. formats. VisitorSkip elif href. rel == HREF_REL_ABSOLUTE : oldtarget = self. pages. resolve_link ( page, href ) if oldtarget == oldroot : return self. _update_link_tag ( elt, page, newroot, href ) elif oldtarget. ischild ( oldroot ) : newtarget = newroot + oldtarget. relname ( oldroot ) return self. _update_link_tag ( elt, page, newtarget, href ) else : raise zim. formats. VisitorSkip else : assert href. rel == HREF_REL_FLOATING newtarget = self. pages. resolve_link ( page, href ) oldtarget = self. pages. resolve_link ( oldpath, href ) if oldtarget == oldroot : return self. _update_link_tag ( elt, page, newroot, href ) elif oldtarget. ischild ( oldroot ) : oldanchor = self. pages. resolve_link ( oldpath, HRef ( HREF_REL_FLOATING, href. parts ( ) [ 0 ] ) ) if oldanchor. ischild ( oldroot ) : raise zim. formats. VisitorSkip else :",False,newtarget != oldtarget,target.root(),0.6791239976882935 3535,"def get_all_plugins_source ( self ) : plugins_path = os. path. join ( ROOT_PATH, ""plugins"" ) vuln_template_path = os. path. join ( ROOT_PATH, ""core"", ""data"", ""kb"", ""vuln_templates"" ) all_plugin_sources = """" for dir_name, subdir_list, file_list in os. walk ( plugins_path ) : if dir_name in ( ""test"", ""tests"" ) : continue for fname in file_list : if : continue if fname. startswith ( ""test_"" ) : continue if fname == ""__init__.py"" : continue full_path = os. path. join ( plugins_path, dir_name, fname ) ignores = { ""/attack/db/sqlmap/"", ""/attack/payloads/"", ""/plugins/tests/"" } should_continue = False for ignore in ignores : if ignore in full_path : should_continue = True break if should_continue : continue all_plugin_sources += file ( full_path ). read ( ) for dir_name, subdir_list, file_list in os. walk ( vuln_template_path ) : for fname in file_list : if : continue nb_checked = 0 augs = iaa. SomeOf ( ( 1, None ), [ iaa. Resize ( { ""height"" : ( 1, 100 ), ""width"" : ( 1, 100 ) } ), iaa. Affine ( scale = ( 0.01, 2.0 ), rotate = ( - 360, 360 ), shear = ( - 360, 360 ), translate_px = { ""x"" : ( - 50, 50 ), ""y"" : ( - 50, 50 ) }, ), iaa. PerspectiveTransform ( ( 0.01, 0.2 ) ), ], ) height, width = 100, 200 while True : poly = create_random_polygon ( height, width, nb_checked ) psoi = PolygonsOnImage ( [ poly ], shape = ( height, width, 3 ) ) psoi_aug = augs. augment_polygons ( psoi ) if not poly. is_valid or not psoi_aug. polygons [ 0 ]. is_valid : print ( ""poly: "", poly, poly. is_valid ) print ( ""poly_aug: "", psoi_aug. polygons [ 0 ], psoi_aug. polygons [ 0 ]. is_valid ) assert poly. is_valid assert psoi_aug. polygons [ 0 ]. is_valid nb_checked += 1 if : print ( ""Checked",False,nb_checked % 100 == 0,nb_checked > 3,0.6664198637008667 3537,"def poll_subprocess ( self ) : clt = self. rpcclt if clt is None : return try : response = clt. pollresponse ( self. active_seq, wait = 0.05 ) except ( EOFError, IOError, KeyboardInterrupt ) : if self. tkconsole. closing : return response = None self. restart_subprocess ( ) if response : self. tkconsole. resetoutput ( ) self. active_seq = None how, what = response console = self. tkconsole. console if how == ""OK"" : if what is not None : print >> console, repr ( what ) elif : if self. tkconsole. getvar ( ""<>"" ) : self. remote_stack_viewer ( ) elif how == ""ERROR"" : errmsg = ""PyShell.ModifiedInterpreter: Subprocess ERROR:\n"" print >> sys. __stderr__, errmsg, what print >> console, errmsg, what try : self. tkconsole. endexecuting ( ) except AttributeError : pass if not self. tkconsole. closing : self. tkconsole. text. after ( self. tkconsole. pollinterval, self. poll_subprocess )",False,how == 'EXCEPTION',"how == ""ZERO_STACK'",0.6686041355133057 3538,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 10 : self. set_socket_descriptor ( d. getPrefixedString ( ) ) continue if tt == 18 : self. set_data ( d. getPrefixedString ( ) ) continue if tt == 24 : self. set_stream_offset ( d. getVarInt64 ( ) ) continue if tt == 32 : self. set_flags ( d. getVarInt32 ( ) ) continue if tt == 42 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. mutable_send_to ( ). TryMerge ( tmp ) continue if : self. set_timeout_seconds ( d. getDouble ( ) ) continue if tt == 0 : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 49,tt == 1,0.6955125331878662 3539,"def test_error_stream ( environ, start_response ) : writer = start_response ( ""200 OK"", [ ] ) wsgi_errors = environ [ ""wsgi.errors"" ] error_msg = None for method in [ ""flush"", ""write"", ""writelines"", ] : if : error_msg = ""wsgi.errors has no '%s' attr"" % method if not error_msg and not callable ( getattr ( wsgi_errors, method ) ) : error_msg = ""wsgi.errors.%s attr is not callable"" % method if error_msg : break return_msg = error_msg or ""success"" writer ( return_msg ) return [ ]",False,"not hasattr(wsgi_errors, method)",method in wsgi_errors,0.6513174772262573 3540,"def _construct_train_dsl ( self ) : self. _train_dsl [ ""components"" ] = { } for name, component in self. _components. items ( ) : component_dsl = { ""module"" : component. module } if name in self. _components_input : component_dsl [ ""input"" ] = self. _components_input [ name ] if hasattr ( component, ""output"" ) : component_dsl [ ""output"" ] = { } if hasattr ( component. output, ""data_output"" ) : component_dsl [ ""output"" ] [ ""data"" ] = component. output. data_output if : component_dsl [ ""output"" ] [ ""model"" ] = component. output. model_output self. _train_dsl [ ""components"" ] [ name ] = component_dsl if not self. _train_dsl : raise ValueError ( ""there are no components to train"" ) LOGGER. debug ( f""train_dsl: {self._train_dsl}"" )",False,"hasattr(component.output, 'model')","hasattr(component.output, 'model_output')",0.6484242081642151 3541,"def addPythonModules ( self ) : self. message ( ""Adding Python modules"", 1 ) if self. use_zipimport : import zipfile relpath = pathjoin ( ""Contents"", ""Resources"", ZIP_ARCHIVE ) abspath = pathjoin ( self. bundlepath, relpath ) zf = zipfile. ZipFile ( abspath, ""w"", zipfile. ZIP_DEFLATED ) for name, code, ispkg in self. pymodules : self. message ( ""Adding Python module %s"" % name, 2 ) path, pyc = getPycData ( name, code, ispkg ) zf. writestr ( path, pyc ) zf. close ( ) sitepath = pathjoin ( self. bundlepath, ""Contents"", ""Resources"", ""site"" + PYC_EXT ) writePyc ( self. _getSiteCode ( ), sitepath ) else : for name, code, ispkg in self. pymodules : if : name += "".__init__"" path = name. split ( ""."" ) path = pathjoin ( ""Contents"", ""Resources"", * path ) + PYC_EXT if : self. message ( ""Adding Python package %s"" % path, 2 ) else : self. message ( ""Adding Python module %s"" % path, 2 ) abspath = pathjoin ( self. bundlepath, path ) makedirs ( os. path. dirname ( abspath ) ) if self. _lastToken == None or self. _lastToken. type == self. OpenBrace : text = super ( JavaScriptBaseLexer, self ). text if : if len ( self. _scopeStrictModes ) > 0 : self. _scopeStrictModes. pop ( ) self. _useStrictCurrent = True self. _scopeStrictModes. append ( self. _useStrictCurrent )",False,"text == '""use strict""' or text == ""'use strict'""",text == '',0.6557522416114807 3543,"def _handle_autocomplete_request_for_text ( text ) : if not hasattr ( text, ""autocompleter"" ) : if : if isinstance ( text, CodeViewText ) : text. autocompleter = Completer ( text ) elif isinstance ( text, ShellText ) : text. autocompleter = ShellCompleter ( text ) text. bind ( ""<1>"", text. autocompleter. on_text_click ) else : return text. autocompleter. handle_autocomplete_request ( )",False,"isinstance(text, (CodeViewText, ShellText)) and text.is_python_text()",text.autocomplete_request is False,0.6501612067222595 3544,"def setUpModule ( ) : try : with open ( ""tests.json"", ""r"" ) as tests_fp : DB_OVERRIDES. update ( json. load ( tests_fp ) ) except FileNotFoundError : print ( ""'tests.json' file not found, will use defaults"" ) if not aiopg : print ( ""aiopg is not installed, ignoring PostgreSQL tests"" ) for key in list ( DB_CLASSES. keys ( ) ) : if : DB_CLASSES. pop ( key ) if not aiomysql : print ( ""aiomysql is not installed, ignoring MySQL tests"" ) for key in list ( DB_CLASSES. keys ( ) ) : if key. startswith ( ""mysql"" ) : DB_CLASSES. pop ( key ) loop = asyncio. new_event_loop ( ) all_databases = load_databases ( only = None ) for key, database in all_databases. items ( ) : connect = database. connect_async ( loop = loop ) loop. run_until_complete ( connect ) if database. _async_conn is not None : disconnect = database. close_async ( ) loop. run_until_complete ( disconnect ) else : print ( ""Can't setup connection for %s"" % key ) DB_CLASSES. pop ( key )",False,key.startswith('postgres'),key.startswith('mysql'),0.6501309871673584 3545,"def get_path ( current_path, initial_path ) : if not self. _path_exists ( current_path ) and not os. path. isabs ( initial_path ) : new_path = self. _in_root_dir ( initial_path ) if : resolves_to = self. schema. paths_to_resolve. get ( key ) log. warning ( ""Paths for the '{0}' option should be relative to '{1}'. To suppress this warning, "" ""move '{0}' into '{1}', or set it's value to an absolute path."". format ( key, resolves_to ) ) return new_path return current_path",False,self._path_exists(new_path),new_path is not None,0.6529514789581299 3546,"def forget_old_txs ( ) : new_known_txs = { } if self. p2p_node is not None : for peer in self. p2p_node. peers. itervalues ( ) : new_known_txs. update ( peer. remembered_txs ) new_known_txs. update ( self. mining_txs_var. value ) for share in self. tracker. get_chain ( self. best_share_var. value, min ( 120, self. tracker. get_height ( self. best_share_var. value ) ), ) : for tx_hash in share. new_transaction_hashes : if : new_known_txs [ tx_hash ] = self. known_txs_var. value [ tx_hash ] self. known_txs_var. set ( new_known_txs )",True,tx_hash in self.known_txs_var.value,tx_hash in self.known_txs_var.value,0.6525777578353882 3547,"def get_class_name ( item ) : class_name, module_name = None, None for parent in reversed ( item. listchain ( ) ) : if : class_name = parent. name elif isinstance ( parent, pytest. Module ) : module_name = parent. module. __name__ break if class_name and "".tasks."" not in module_name : return ""{}.{}"". format ( module_name, class_name ) else : return module_name",False,"isinstance(parent, pytest.Class)","isinstance(parent, pytest.Name)",0.6493468880653381 3548,"def _test_flow_unmatching_check ( self, before_stats, pkt ) : rcv_msgs = self. _test_get_match_count ( ) lookup = False for target_tbl_id in pkt [ KEY_TBL_MISS ] : before = before_stats [ target_tbl_id ] after = rcv_msgs [ target_tbl_id ] if : lookup = True if before [ ""matched"" ] < after [ ""matched"" ] : raise TestFailure ( self. state ) if not lookup : raise TestError ( self. state )",False,before['lookup'] < after['lookup'],before and after,0.6524045467376709 3549,"def parseImpl ( self, instring, loc, doActions = True ) : loc, resultlist = self. exprs [ 0 ]. _parse ( instring, loc, doActions, callPreParse = False ) errorStop = False for e in self. exprs [ 1 : ] : if : errorStop = True continue if errorStop : try : loc, exprtokens = e. _parse ( instring, loc, doActions ) except ParseSyntaxException : raise except ParseBaseException as pe : pe. __traceback__ = None raise ParseSyntaxException ( pe ) except IndexError : raise ParseSyntaxException ( ParseException ( instring, len ( instring ), self. errmsg, self ) ) else : loc, exprtokens = e. _parse ( instring, loc, doActions ) if exprtokens or exprtokens. haskeys ( ) : resultlist += exprtokens return loc, resultlist",False,"isinstance(e, And._ErrorStop)",callPreParse,0.6523852944374084 3550,"def ParseMultiple ( self, result_dicts ) : """"""Parse WMI Event Consumers."""""" for result_dict in result_dicts : wmi_dict = result_dict. ToDict ( ) try : creator_sid_bytes = bytes ( wmi_dict [ ""CreatorSID"" ] ) wmi_dict [ ""CreatorSID"" ] = BinarySIDtoStringSID ( creator_sid_bytes ) except ValueError : wmi_dict [ ""CreatorSID"" ] = compatibility. Repr ( wmi_dict [ ""CreatorSID"" ] ) except KeyError : pass for output_type in self. output_types : anomalies = [ ] output = rdfvalue. RDFValue. classes [ output_type. __name__ ] ( ) for k, v in wmi_dict. items ( ) : try : output. Set ( k, v ) except AttributeError as e : anomalies. append ( ""Unknown field %s, with value %s"" % ( k, v ) ) except ValueError as e : anomalies. append ( ""Invalid value %s for field %s: %s"" % ( v, k, e ) ) if anomalies : yield rdf_anomaly. Anomaly ( type =",False,wmi_dict and (not output),self.type == 'wmi_event_consumers',0.6525367498397827 3551,"def depart_title ( self, node ) : close_tag = self. context [ - 1 ] if ( self. add_permalinks and self. builder. add_permalinks and node. parent. hasattr ( ""ids"" ) and node. parent [ ""ids"" ] ) : aname = node. parent [ ""ids"" ] [ 0 ] if close_tag. startswith ( "" self. body. append ( u' + u'title=""%s"">\u00B6' % _ ( ""Permalink to this headline"" ) ) elif : self. body. append ( u' + u'title=""%s"">\u00B6' % _ ( ""Permalink to this headline"" ) ) BaseTranslator. depart_title ( self, node )",False,close_tag.startswith(' signature = inspect. signature ( function ) arguments = copy. deepcopy ( arguments ) kwargs = { } for name, info in signature. parameters. items ( ) : if : kwargs [ name ] = arguments [ name ] del arguments [ name ] continue if info. default is not inspect. Parameter. empty : kwargs [ name ] = info. default continue raise ConfigError ( ""Need a value for {}"". format ( name ) ) if not allow_unused and len ( arguments ) > 0 : raise ConfigError ( ""Unused configuration parameters {}"". format ( arguments. keys ( ) ) ) return function ( ** kwargs )",True,name in arguments,name in arguments,0.678224503993988 3553,"def wrapper ( self : RequestHandler, * args, ** kwargs ) -> Optional [ Awaitable [ None ] ] : if self. request. path. endswith ( ""/"" ) : if self. request. method in ( ""GET"", ""HEAD"" ) : uri = self. request. path. rstrip ( ""/"" ) if uri : if : uri += ""?"" + self. request. query self. redirect ( uri, permanent = True ) return None else : raise HTTPError ( 404 ) return method ( self, * args, ** kwargs )",True,self.request.query,self.request.query,0.6592823266983032 3554,"def _decode_pattern_dict ( data ) : rv = { } for key, value in data. items ( ) : if isinstance ( key, bytes ) : key = key. encode ( ""utf-8"" ) if isinstance ( key, str ) : if key in [ ""$in"", ""$gt"", ""$gte"", ""$lt"", ""$lte"", ""$exists"" ] : return 1 if key == ""$nin"" : value = 1 if : try : return _decode_pattern_dict ( value ) except : return value if isinstance ( value, list ) : value = _decode_pattern_list ( value ) elif isinstance ( value, dict ) : value = _decode_pattern_dict ( value ) else : value = 1 rv [ key ] = value return rv",False,"key in ['query', '$query']","isinstance(value, dict)",0.6591883897781372 3555,"def server_operation_put ( server_id, operation ) : if settings. app. demo_mode : return utils. demo_blocked ( ) svr = server. get_by_id ( server_id, fields = server. operation_fields ) try : if operation == START : svr. start ( ) logger. LogEntry ( message = 'Started server ""%s"".' % svr. name ) if : svr. stop ( ) logger. LogEntry ( message = 'Stopped server ""%s"".' % svr. name ) elif operation == RESTART : svr. restart ( ) logger. LogEntry ( message = 'Restarted server ""%s"".' % svr. name ) except : event. Event ( type = SERVERS_UPDATED ) raise event. Event ( type = SERVERS_UPDATED ) event. Event ( type = SERVER_HOSTS_UPDATED, resource_id = svr. id ) for org in svr. iter_orgs ( ) : event. Event ( type = USERS_UPDATED, resource_id = org. id ) svr. send_link_events ( ) return utils. jsonify ( svr. dict ( ) )",True,operation == STOP,operation == STOP,0.6889662742614746 3556,"def should_keep_alive ( commit_msg ) : result = False ci = get_current_ci ( ) or """" for line in commit_msg. splitlines ( ) : parts = line. strip ( ""# "" ). split ( "":"", 1 ) ( key, val ) = parts if len ( parts ) > 1 else ( parts [ 0 ], """" ) if key == ""CI_KEEP_ALIVE"" : ci_names = val. replace ( "","", "" "" ). lower ( ). split ( ) if val else [ ] if : result = True return result",False,len(ci_names) == 0 or ci.lower() in ci_names,len(ci_names) > 1 and ci_names[0] in result,0.6553505659103394 3557,"def air_quality ( self ) : aqi_data = self. _get_aqi_data ( ) if aqi_data : if aqi_data. get ( ""status"" ) == ""ok"" : aqi_data = self. _organize ( aqi_data ) aqi_data = self. _manipulate ( aqi_data ) elif : self. py3. error ( aqi_data. get ( ""data"" ) ) return { ""cached_until"" : self. py3. time_in ( self. cache_timeout ), ""full_text"" : self. py3. safe_format ( self. format, aqi_data ), }",False,aqi_data.get('status') == 'error',aqi_data and aqi_data.get('data'),0.6501069068908691 3558,"def findDepth ( self, insts, debug = 0 ) : depth = 0 maxDepth = 0 for i in insts : opname = i [ 0 ] if : print ( i ), delta = self. effect. get ( opname, None ) if delta is not None : depth = depth + delta else : for pat, pat_delta in self. patterns : if opname [ : len ( pat ) ] == pat : delta = pat_delta depth = depth + delta break if delta is None : meth = getattr ( self, opname, None ) if meth is not None : depth = depth + meth ( i [ 1 ] ) if depth > maxDepth : maxDepth = depth if : print ( depth, maxDepth ) return maxDepth",True,debug,debug,0.6841456890106201 3559,"def update ( self ) : while self. running : command = self. get_command ( ) if self. debug : self. log ( ""Command = {}"". format ( command ) ) elif command is not None : self. log ( ""Command = {}"". format ( command ) ) if command == ""autopilot"" : self. ctr. mode = ""local"" elif command == ""speedup"" : self. cfg. AI_THROTTLE_MULT += 0.05 elif command == ""slowdown"" : self. cfg. AI_THROTTLE_MULT -= 0.05 elif : self. ctr. mode = ""user"" self. cfg. AI_THROTTLE_MULT = self. DEFAULT_AI_THROTTLE_MULT if self. debug : self. log ( ""mode = {}, cfg.AI_THROTTLE_MULT={}"". format ( self. ctr. mode, self. cfg. AI_THROTTLE_MULT ) ) time. sleep ( 0.25 )",False,command == 'stop',command == 'slowup_mode',0.662871778011322 3560,"def get_shadows_zip ( filename ) : import zipfile shadow_pkgs = set ( ) with zipfile. ZipFile ( filename ) as lib_zip : already_test = [ ] for fname in lib_zip. namelist ( ) : pname, fname = os. path. split ( fname ) if fname or ( pname and fname ) : continue if pname not in already_test and ""/"" not in pname : already_test. append ( pname ) if : shadow_pkgs. add ( pname ) return shadow_pkgs",False,is_shadowing(pname),pname and pname not in shadow_pkgs,0.6541174650192261 3561,"def _parse_fill ( fill, img, min_pil_version, name = ""fillcolor"" ) : major_found, minor_found = ( int ( v ) for v in PILLOW_VERSION. split ( ""."" ) [ : 2 ] ) major_required, minor_required = ( int ( v ) for v in min_pil_version. split ( ""."" ) [ : 2 ] ) if major_found < major_required or ( major_found == major_required and minor_found < minor_required ) : if fill is None : return { } else : msg = ( ""The option to fill background area of the transformed image, "" ""requires pillow>={}"" ) raise RuntimeError ( msg. format ( min_pil_version ) ) num_bands = len ( img. getbands ( ) ) if fill is None : fill = 0 if isinstance ( fill, ( int, float ) ) and num_bands > 1 : fill = tuple ( [ fill ] * num_bands ) if isinstance ( fill, ( list, tuple ) ) : if : msg = ( ""The number of elements in 'fill' does not match the number of "" ""bands of the image ({}!= {})"" ) raise ValueError ( msg. format ( len ( fill ), num_bands ) ) fill = tuple ( fill ) return { name : fill }",True,len(fill) != num_bands,len(fill) != num_bands,0.661264181137085 3562,"def _getValueHist ( self, version, pkgarch, checksum ) : data = self. _execute ( ""SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;"" % self. table, ( version, pkgarch, checksum ), ) row = data. fetchone ( ) if : return row [ 0 ] else : try : self. _execute ( ""INSERT INTO %s VALUES (?,?,?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"" % ( self. table, self. table ), ( version, pkgarch, checksum, version, pkgarch ), ) except sqlite3. IntegrityError as exc : logger. error ( str ( exc ) ) self. dirty = True data = self. _execute ( ""SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;"" % self. table, ( version, pkgarch, checksum ), ) row = data. fetchone ( ) if : return row [ 0 ] else : raise prserv. NotFoundError",False,row != None,row,0.6841013431549072 3563,"def main ( client ) : placement_service = client. GetService ( ""PlacementService"", version = ""v202011"" ) statement = ( ad_manager. StatementBuilder ( version = ""v202011"" ) . Where ( ""status = :status"" ) . WithBindVariable ( ""status"", ""ACTIVE"" ) ) while True : response = placement_service. getPlacementsByStatement ( statement. ToStatement ( ) ) if : for placement in response [ ""results"" ] : print ( 'Placement with ID ""%d"" and name ""%s"" was found.\n' % ( placement [ ""id"" ], placement [ ""name"" ] ) ) statement. offset += statement. limit else : break print ( ""\nNumber of results found: %s"" % response [ ""totalResultSetSize"" ] )",False,'results' in response and len(response['results']),response[0],0.6574869155883789 3564,"def authorize_application ( registration_app_id : UUID, onefuzz_app_id : UUID, permissions : List [ str ] = [ ""user_impersonation"" ], ) -> None : try : onefuzz_app = get_application ( onefuzz_app_id ) if : logger. error ( ""Application '%s' not found"", onefuzz_app_id ) return scopes = seq ( onefuzz_app [ ""api"" ] [ ""oauth2PermissionScopes"" ] ). filter ( lambda scope : scope [ ""value"" ] in permissions ) existing_preAuthorizedApplications = ( seq ( onefuzz_app [ ""api"" ] [ ""preAuthorizedApplications"" ] ) . map ( lambda paa : seq ( paa [ ""delegatedPermissionIds"" ] ). map ( lambda permission_id : ( paa [ ""appId"" ], permission_id ) ) ) . flatten ( ) ) preAuthorizedApplications = ( scopes. map ( lambda s : ( str ( registration_app_id ), s [ ""id"" ] ) ) . union ( existing_preAuthorizedApplications ) . distinct ( ) . group_by_key ( ) . map ( lambda data : { ""appId"" : data [ 0 ], ""delegatedPermissionIds"" : data [ 1 ] } ) ) query_microsoft_graph ( <",False,onefuzz_app is None,not onefuzz_app,0.6636962294578552 3565,"def _canonicalGlyphName ( name, localName2ucs, localUc2Names, altName2ucs ) : ucs = localName2ucs. get ( name ) if ucs : return name, list ( ucs ) [ 0 ] ucs = altName2ucs. get ( name ) if ucs : for uc in ucs : localNames = localUc2Names. get ( uc ) if : return localNames [ 0 ], uc return None, None",False,localNames and len(localNames),localNames,0.6551178693771362 3566,"def parse_hgsub ( lines ) : """"""Fills OrderedDict with hgsub file content passed as list of lines"""""" rv = OrderedDict ( ) for l in lines : ls = l. strip ( ) if : continue name, value = l. split ( ""="", 1 ) rv [ name. strip ( ) ] = value. strip ( ) return rv",False,not ls or ls[0] == '#',not ls,0.6596238017082214 3567,"def match_delta ( self, value ) : """"""Search for timedelta information in the string"""""" m = self. REGEX_DELTA. search ( value ) delta = datetime. timedelta ( days = 0 ) if m : d = int ( m. group ( 1 ) ) if : d = - d if m. group ( 2 ) == ""minute"" : delta = datetime. timedelta ( minutes = d ) elif m. group ( 2 ) == ""hour"" : delta = datetime. timedelta ( hours = d ) elif m. group ( 2 ) == ""day"" : delta = datetime. timedelta ( days = d ) elif m. group ( 2 ) == ""week"" : delta = datetime. timedelta ( weeks = d ) value = self. REGEX_DELTA. sub ( """", value ) return ( delta, value )",False,m.group(3) == 'ago' or m.group(3) == 'before',d,0.6476140022277832 3568,"def _find_unicode_literals_frame ( ) : import __future__ if not hasattr ( sys, ""_getframe"" ) : return 0 frm = sys. _getframe ( 1 ) idx = 1 while frm is not None : if : frm = frm. f_back idx += 1 elif frm. f_code. co_flags & __future__. unicode_literals. compiler_flag : return idx else : break return 0",False,"frm.f_globals.get('__name__', '').startswith('click.')",__future__.unicode_literals.compiler_flag,0.6529810428619385 3569,"def search_new_server ( self ) : """"""Search for a new server for this article"""""" sabnzbd. BPSMeter. register_server_article_failed ( self. fetcher. id ) self. add_to_try_list ( self. fetcher ) for server in sabnzbd. Downloader. servers : if : if server. priority >= self. fetcher. priority : self. tries = 0 sabnzbd. NzbQueue. reset_try_lists ( self, article_reset = False ) return True logging. info ( T ( ""%s => missing from all servers, discarding"" ) % self ) self. nzf. nzo. increase_bad_articles_counter ( ""missing_articles"" ) return False",False,server.active and (not self.server_in_try_list(server)),self.has_server,0.6507188081741333 3570,"def emit_properties_changed ( self, interface, properties, path = ""/"" ) : """"""Emits PropertiesChanged for the specified properties"""""" combos = { } for prop in properties : iface = self. get_interface ( interface, prop ) if iface is None : raise ValueError ( ""Property %s not registered"" % prop ) combos. setdefault ( iface, [ ] ). append ( prop ) for iface, props in combos. items ( ) : values = { } inval = [ ] for prop in props : emit = self. __props [ iface ] [ prop ] [ ""emit"" ] if : raise ValueError ( ""Can't emit changed signal for %s"" % prop ) elif emit == ""true"" : values [ prop ] = self. get_value ( iface, prop, path ) elif emit == ""invalidates"" : inval. append ( prop ) if self. SUPPORTS_MULTIPLE_OBJECT_PATHS : self. PropertiesChanged ( iface, values, inval, rel = path ) else : self. PropertiesChanged ( iface, values, inval )",True,emit == 'false',emit == 'false',0.6633288860321045 3571,"def addClasses ( self, name ) : for n in name. split ( ) : try : k, method = n. split ( ""."" ) except ValueError : k = n method = None self. classes [ k ] = 1 if : self. methods. setdefault ( k, { } ) [ method ] = 1",True,method is not None,method is not None,0.663976788520813 3572,"def pause ( self ) : if self. is_playing : self. state = MusicPlayerState. PAUSED if : self. _current_player. pause ( ) self. emit ( ""pause"", player = self, entry = self. current_entry ) return elif self. is_paused : return raise ValueError ( ""Cannot pause a MusicPlayer in state %s"" % self. state )",False,self._current_player,self.state == MusicPlayerState.PAused,0.6601255536079407 3573,"def _make_slices ( shape : tp. Tuple [ int,... ], axes : tp. Tuple [ int,... ], size : int, rng : np. random. RandomState, ) -> tp. List [ slice ] : slices = [ ] for a, s in enumerate ( shape ) : if a in axes : if : raise ValueError ( ""Cannot crossover on axis with size 1"" ) start = rng. randint ( s - size ) slices. append ( slice ( start, start + size ) ) else : slices. append ( slice ( None ) ) return slices",False,s <= 1,s - size > 1,0.6794853210449219 3574,"def _pop_waiting_trial_id ( self ) -> Optional [ int ] : for trial in self. _storage. get_all_trials ( self. _study_id, deepcopy = False ) : if : continue if not self. _storage. set_trial_state ( trial. _trial_id, TrialState. RUNNING ) : continue _logger. debug ( ""Trial {} popped from the trial queue."". format ( trial. number ) ) return trial. _trial_id return None",False,trial.state != TrialState.WAITING,trial.number == 0,0.6566871404647827 3575,"def __get_file_by_num ( self, num, file_list, idx = 0 ) : for element in file_list : if : return element if element [ 3 ] and element [ 4 ] : i = self. __get_file_by_num ( num, element [ 3 ], idx + 1 ) if not isinstance ( i, int ) : return i idx = i else : idx += 1 return idx",False,idx == num,element[0] and element[0],0.6793323159217834 3576,"def _handle_redirects ( self, response, ** extra ) : ""Follows any redirects by requesting responses from the server using GET."" response. redirect_chain = [ ] while response. status_code in ( 301, 302, 303, 307 ) : url = response [ ""Location"" ] scheme, netloc, path, query, fragment = urlsplit ( url ) redirect_chain = response. redirect_chain redirect_chain. append ( ( url, response. status_code ) ) if : extra [ ""wsgi.url_scheme"" ] = scheme response = self. get ( path, QueryDict ( query ), follow = False, ** extra ) response. redirect_chain = redirect_chain if response. redirect_chain [ - 1 ] in response. redirect_chain [ 0 : - 1 ] : break return response",False,scheme,extra,0.6865355968475342 3577,"def get_category_items ( self, context ) : category_items = None category_items = [ ( GENERAL, ""General"", ""Uncategorized presets"", 0 ) ] node_category_items = [ ] for idx, category in enumerate ( get_category_names ( ) ) : node_class = get_node_class_reference ( category ) if : title = ""/Node/ {}"". format ( node_class. bl_label ) node_category_items. append ( ( category, title, category, idx + 1 ) ) else : title = category category_items. append ( ( category, title, category, idx + 1 ) ) include_node_categories = ( not hasattr ( self, ""include_node_categories"" ) or self. include_node_categories ) if node_category_items and include_node_categories : category_items = category_items + [ None ] + node_category_items return category_items",False,"node_class and hasattr(node_class, 'bl_label')",node_class and node_class.bl_label,0.6485708951950073 3578,"def decoration_helper ( self, patched, args, keywargs ) : extra_args = [ ] with contextlib. ExitStack ( ) as exit_stack : for patching in patched. patchings : arg = exit_stack. enter_context ( patching ) if : keywargs. update ( arg ) elif patching. new is DEFAULT : extra_args. append ( arg ) args += tuple ( extra_args ) yield ( args, keywargs )",False,patching.attribute_name is not None,patching.new is True,0.6531013250350952 3579,"def test_function ( self ) : for start_method, redirs in product ( start_methods ( ), redirects ( ) ) : with self. subTest ( start_method = start_method, redirs = redirs ) : pc = start_processes ( name = ""echo"", entrypoint = echo1, args = { 0 : ( ""hello"", ), 1 : ( ""hello"", ) }, envs = { 0 : { ""RANK"" : ""0"" }, 1 : { ""RANK"" : ""1"" } }, log_dir = self. log_dir ( ), start_method = start_method, redirects = redirs, ) results = pc. wait ( period = 0.1 ) nprocs = pc. nprocs self. assert_pids_noexist ( pc. pids ( ) ) self. assertEqual ( { i : f""hello_{i}"" for i in range ( nprocs ) }, results. return_values ) for i in range ( nprocs ) : if : self. assertFalse ( results. stdouts [ i ] ) if redirs & Std. ERR!= Std. ERR : self. assertFalse ( results. stderrs [ i ] ) if redirs & Std. OUT",False,redirs & Std.OUT != Std.OUT,results.return_values[i] != 0,0.6815295219421387 3580,"def __init__ ( self, path : typing. Union [ str, ""os.PathLike[str]"" ], status_code : int = 200, headers : dict = None, media_type : str = None, background : BackgroundTask = None, filename : str = None, stat_result : os. stat_result = None, method : str = None, ) -> None : assert aiofiles is not None, ""'aiofiles' must be installed to use FileResponse"" self. path = path self. status_code = status_code self. filename = filename self. send_header_only = method is not None and method. upper ( ) == ""HEAD"" if media_type is None : media_type = guess_type ( filename or path ) [ 0 ] or ""text/plain"" self. media_type = media_type self. background = background self. init_headers ( headers ) if self. filename is not None : content_disposition_filename = quote ( self. filename ) if : content_disposition = ""attachment; filename*=utf-8''{}"". format ( content_disposition_filename ) else : content_disposition = 'attachment; filename=""{}""'. format ( self. filename ) self. headers. setdefault ( ""content-disposition"", content_disposition ) self. stat_result = stat_result if stat_result is not None : self. set_stat_headers ( stat_result )",False,content_disposition_filename != self.filename,self.filename_in_a_filename,0.6603981256484985 3581,"def _check_systemd_reval ( self, args ) : penv = { ""stdout"" : self. _devnull, ""stderr"" : self. _devnull, } if not os. getenv ( ""DBUS_SESSION_BUS_ADDRESS"" ) and self. _user : penv. update ( { ""env"" : { ""DBUS_SESSION_BUS_ADDRESS"" : ""unix:path=/var/run/user/{}/dbus/user_bus_socket"". format ( self. _uid ) }, } ) try : cmd = [ ""systemctl"" ] if : cmd. append ( ""--user"" ) cmd = cmd + args return subprocess. check_call ( cmd, ** penv ) == 0 except ( subprocess. CalledProcessError, OSError ) : return None",False,self._user,args,0.6727558374404907 3582,"def did_evm_read_storage_callback ( self, state, address, offset, value ) : m = self. manticore world = state. platform tx = world. all_transactions [ - 1 ] md = m. get_metadata ( tx. address ) if md : offsets = state. solve_n ( offset, 3000 ) with self. locked_context ( ""storage_reads"", dict ) as storage_reads : contract_function = ( md. name, md. get_func_name ( state. solve_one ( tx. data [ 0 : 4 ] ) ), ) if : storage_reads [ contract_function ] = set ( ) for off in offsets : storage_reads [ contract_function ]. add ( off )",True,contract_function not in storage_reads,contract_function not in storage_reads,0.6535273790359497 3583,"def _update_percent_field_in_targets ( self, args, update_modified = True ) : """"""Update percent field in parent transaction"""""" if args. get ( ""percent_join_field_parent"" ) : args [ ""name"" ] = self. get ( args [ ""percent_join_field_parent"" ] ) self. _update_percent_field ( args, update_modified ) else : distinct_transactions = set ( [ d. get ( args [ ""percent_join_field"" ] ) for d in self. get_all_children ( args [ ""source_dt"" ] ) ] ) for name in distinct_transactions : if : args [ ""name"" ] = name self. _update_percent_field ( args, update_modified )",True,name,name,0.6797094941139221 3584,"def contact_lists ( request ) : tags = Tag. objects. all ( ) if request. user. role == ""ADMIN"" : queryset = ContactList. objects. all ( ). order_by ( ""-created_on"" ) else : queryset = ContactList. objects. filter ( Q ( created_by = request. user ) | Q ( visible_to = request. user ) ). order_by ( ""-created_on"" ) users = User. objects. filter ( id__in = queryset. values_list ( ""created_by_id"", flat = True ) ) if request. GET. get ( ""tag"" ) : queryset = queryset. filter ( tags = request. GET. get ( ""tag"" ) ) search = False if request. method == ""POST"" : post_tags = request. POST. getlist ( ""tag"" ) if : queryset = queryset. filter ( name__icontains = request. POST. get ( ""contact_list_name"" ) ) if request. POST. get ( ""created_by"" ) : queryset = queryset. filter ( created_by = request. POST. get ( ""created_by"" ) ) if request. POST. get ( ""tag"" ) : queryset = queryset. filter ( tags__id__in = post_tags ) request_tags = request. POST. getlist ( ""tag"" ) if ( request. POST. get ( ""contact_list_name"" ) or request. POST. get ( ""created_by"" ) or request. POST. get ( ""created_by"" ) ) :",False,request.POST.get('contact_list_name'),post_tags,0.6476304531097412 3585,"def enableCtrls ( self ) : for data in self. storySettingsData : name = data [ ""name"" ] if name in self. ctrls : if : set = self. getSetting ( data [ ""requires"" ] ) for i in self. ctrls [ name ] : i. Enable ( set not in [ ""off"", ""false"", ""0"" ] )",False,'requires' in data,data['requires'] != '',0.6880241632461548 3586,"def main ( argv = [ ""worker_setup.py"" ] ) : """"""Completely set everything up from a fresh ec2 instance"""""" _, ubuntu_arch = check_ubuntu_version ( ) opts = get_options ( argv ) opts. arch = ubuntu_arch with Environ ( ""DEBIAN_FRONTEND"", ""noninteractive"" ) : if opts. update_system : run_cmd ( ""apt-get update"" ) run_cmd ( ""apt-get upgrade -y"" ) if opts. install_required : install_required_packages ( ) if : install_utility_packages ( ) if opts. install_pkg_languages : install_packaged_languages ( ) if opts. install_languages : install_all_languages ( opts ) if opts. install_jailguard : install_jailguard ( opts ) if opts. create_jails : setup_base_chroot ( opts ) if opts. packages_only : return setup_contest_files ( opts ) if opts. create_jails : setup_base_jail ( opts ) setup_jailusers ( opts ) start_script = os. path. join ( opts. root_dir, opts. local_repo, ""worker/start_worker.sh"" ) if opts. install_cronjob : cron_file = ""/etc/cron.d/ai-contest"" if not file_contains ( cron_file, start_script ) : append_line ( ",False,opts.install_utilities,opts.install_utility_packages,0.6536614298820496 3587,"def _add_noise ( tokens, lengths, params, subword_token, is_spacer = None ) : if not isinstance ( params, list ) : raise ValueError ( ""Expected a list of noise modules"" ) noises = [ ] for module in params : noise_type, args = next ( iter ( module. items ( ) ) ) if : args = [ args ] noise_type = noise_type. lower ( ) if noise_type == ""dropout"" : noise_class = noise. WordDropout elif noise_type == ""replacement"" : noise_class = noise. WordReplacement elif noise_type == ""permutation"" : noise_class = noise. WordPermutation else : raise ValueError ( ""Invalid noise type: %s"" % noise_type ) noises. append ( noise_class ( * args ) ) noiser = noise. WordNoiser ( noises = noises, subword_token = subword_token, is_spacer = is_spacer ) return noiser ( tokens, lengths, keep_shape = True )",False,"not isinstance(args, list)",len(args) > 0,0.6509168744087219 3588,"def test_is_open_close_time ( ) : timestamps = iterate_timestamps ( entity_type = ""coin"", exchange = ""binance"", level = IntervalLevel. LEVEL_1MIN, start_timestamp = ""2019-05-01"", end_timestamp = ""2019-05-01"", ) assert is_open_time ( entity_type = ""coin"", exchange = ""binance"", timestamp = timestamps [ 0 ] ) assert is_close_time ( entity_type = ""coin"", exchange = ""binance"", timestamp = timestamps [ - 1 ] ) timestamps = iterate_timestamps ( entity_type = ""coin"", exchange = ""binance"", level = IntervalLevel. LEVEL_5MIN, start_timestamp = ""2019-05-01"", end_timestamp = ""2019-05-01"", ) assert is_open_time ( entity_type = ""coin"", exchange = ""binance"", timestamp = timestamps [ 0 ] ) assert is_close_time ( entity_type = ""coin"", exchange = ""binance"", timestamp = timestamps [ - 1 ] ) timestamps = iterate_timestamps ( entity_type = ""coin"", exchange = ""binance"", level = IntervalLevel. LEVEL_5MIN, start_timestamp = ""2019-05-01"", end_timestamp = ""2019-05-02"", ) open = [ ] close = [ ] for timestamp in timestamps : if : open. append ( timestamp ) if is_close_time ( entity_type = ""coin"",",False,"is_open_time(entity_type='coin', exchange='binance', timestamp=timestamp)",timestamp not in open,0.6470658779144287 3589,"def _parse_fixits ( message, titer, line ) : """"""Parses fixit messages."""""" while ( OutputParser. message_line_re. match ( line ) is None and OutputParser. note_line_re. match ( line ) is None ) : message_text = line. strip ( ) if : message. fixits. append ( Note ( message. path, message. line, line. find ( message_text ) + 1, message_text, ) ) line = next ( titer ) return line",False,message_text != '',message_text,0.6673606634140015 3590,"def __attempt_add_to_linked_match ( self, input_name, hdca, collection_type_description, subcollection_type ) : structure = get_structure ( hdca, collection_type_description, leaf_subcollection_type = subcollection_type ) if not self. linked_structure : self. linked_structure = structure self. collections [ input_name ] = hdca self. subcollection_types [ input_name ] = subcollection_type else : if : raise exceptions. MessageException ( CANNOT_MATCH_ERROR_MESSAGE ) self. collections [ input_name ] = hdca self. subcollection_types [ input_name ] = subcollection_type",False,not self.linked_structure.can_match(structure),leaf_subcollection_type,0.6484894752502441 3591,"def _FormatContainerContents ( self, node ) : """"""Print out the last type parameter of a container. Used for *args/**kw."""""" assert isinstance ( node, pytd. Parameter ) if isinstance ( node. type, pytd. GenericType ) : container_name = node. type. name. rpartition ( ""."" ) [ 2 ] assert container_name in ( ""tuple"", ""dict"" ) self. _typing_import_counts [ container_name. capitalize ( ) ] -= 1 if : self. _typing_import_counts [ ""Any"" ] -= 1 return node. Replace ( type = node. type. parameters [ - 1 ], optional = False ). Visit ( PrintVisitor ( ) ) else : return node. Replace ( type = pytd. AnythingType ( ), optional = False ). Visit ( PrintVisitor ( ) )",False,"isinstance(node.type.parameters[-1], pytd.AnythingType)","container_name in (tuple, dict)",0.6497973203659058 3592,"def remove ( self, conv_id ) : if self. bot. memory. exists ( [ ""convmem"", conv_id ] ) : _cached = self. bot. memory. get_by_path ( [ ""convmem"", conv_id ] ) if : logger. info ( ""removing conv: {} {}"". format ( conv_id, _cached [ ""title"" ] ) ) self. bot. memory. pop_by_path ( [ ""convmem"", conv_id ] ) del self. catalog [ conv_id ] else : logger. warning ( ""cannot remove conv: {} {} {}"". format ( _cached [ ""type"" ], conv_id, _cached [ ""title"" ] ) ) else : logger. warning ( ""cannot remove: {}, not found"". format ( conv_id ) ) self. bot. memory. save ( )",False,_cached['type'] == 'GROUP',_cached,0.65423583984375 3593,"def append ( self, * values ) : for value in values : if self. AcceptedType : assert isinstance ( value, self. AcceptedType ) self. _append ( value ) name = getattr ( value, ""Name"", None ) if : name = MakeAttributeName ( value. Name ) setattr ( self, name, value )",False,name,name is None,0.6858710050582886 3594,"def download_and_unpack ( target_dir, url ) : wget_args = ""-q -l 1 -N -nd -c -e robots=off -A tgz -r -np"" tgz_dir = os. path. join ( target_dir, ""tgz"" ) exit_code = download_multi ( url, tgz_dir, wget_args ) if exit_code!= 0 : print ( ""Download tgz audio files failed with exit code %d."" % exit_code ) else : print ( ""Download done, start unpacking..."" ) audio_dir = os. path. join ( target_dir, ""audio"" ) for root, dirs, files in os. walk ( tgz_dir ) : for file in files : print ( file ) if : unpack ( os. path. join ( root, file ), audio_dir )",False,file.endswith('.tgz'),os.path.isfile(file),0.6473191976547241 3595,"def get_profile_dir ( ) : """"""Return path where all profiles of current user are stored."""""" if os. name == ""nt"" : if : basedir = unicode ( os. environ [ ""LOCALAPPDATA"" ], nt_filename_encoding ) else : from.. winutil import get_shell_folder try : basedir = get_shell_folder ( ""Local AppData"" ) except EnvironmentError : basedir = os. path. join ( os. environ [ ""USERPROFILE"" ], ""Local Settings"", ""Application Data"" ) dirpath = os. path. join ( basedir, u""Google"", u""Chrome"", u""User Data"" ) elif os. name == ""posix"" : basedir = unicode ( os. environ [ ""HOME"" ] ) if sys. platform == ""darwin"" : dirpath = os. path. join ( basedir, u""Library"", u""Application Support"" ) else : dirpath = os. path. join ( basedir, u"".config"" ) dirpath = os. path. join ( dirpath, u""Google"", u""Chrome"" ) return dirpath",False,'LOCALAPPDATA' in os.environ,"hasattr(os, 'environ')",0.6537001132965088 3596,"def set_logger_config ( ) : config = _GLOBAL_CONTEXT [ ""config"" ] if config. get ( ""quiet"" ) : log. disabled = True else : log_format = config [ ""log_format"" ] logging. basicConfig ( format = log_format ) log. setLevel ( getattr ( logging, config [ ""log_level"" ] ) ) handlers = ( config [ ""log_handlers"" ]. keys ( ) if : else [ config [ ""log_handler"" ] ] ) for handler in handlers : handler_class = load_class_by_path ( handler ) handler_config = config [ ""log_handlers"" ]. get ( handler, { } ) handler_format = handler_config. pop ( ""format"", log_format ) handler_level = getattr ( logging, handler_config. pop ( ""level"", config [ ""log_level"" ] ) ) log_handler = handler_class ( ** handler_config ) formatter = logging. Formatter ( handler_format ) log_handler. setFormatter ( formatter ) log_handler. setLevel ( handler_level ) log. addHandler ( log_handler )",False,config['log_handlers'],config.get('log_handler') is None,0.6573575735092163 3597,"def _meter_worker ( qin, qout, meter, is_train, world_size, rank, filename ) : backend = ""gloo"" torch. distributed. init_process_group ( backend = backend, init_method = ""file://{filename}"". format ( filename = filename ), world_size = world_size, rank = rank, ) while True : try : signal, val = qin. get ( ) except queue. Empty : continue if : meter. update ( val [ 0 ], val [ 1 ], is_train = is_train ) elif signal == VALUE_SIGNAL : meter. sync_state ( ) qout. put ( meter. value ) elif signal == SHUTDOWN_SIGNAL : break else : raise NotImplementedError ( ""Bad signal value"" ) return",False,signal == UPDATE_SIGNAL,signal == SIGNAL_VALUE,0.6671662926673889 3598,"def check_syntax ( filename, raise_error = False ) : """"""Return True if syntax is okay."""""" with autopep8. open_with_encoding ( filename ) as input_file : try : compile ( input_file. read ( ), """", ""exec"", dont_inherit = True ) return True except ( SyntaxError, TypeError, UnicodeDecodeError ) : if : raise else : return False",True,raise_error,raise_error,0.6604596376419067 3599,"def export_sessions_csv_task ( self, event_id ) : sessions = db. session. query ( Session ). filter_by ( event_id = event_id ) try : filedir = os. path. join ( current_app. config. get ( ""BASE_DIR"" ), ""static/uploads/temp/"" ) if : os. makedirs ( filedir ) filename = f""sessions-{uuid.uuid1().hex}.csv"" file_path = os. path. join ( filedir, filename ) with open ( file_path, ""w"" ) as temp_file : writer = csv. writer ( temp_file ) from app. api. helpers. csv_jobs_util import export_sessions_csv content = export_sessions_csv ( sessions ) for row in content : writer. writerow ( row ) sessions_csv_file = UploadedFile ( file_path = file_path, filename = filename ) sessions_csv_url = upload ( sessions_csv_file, UPLOAD_PATHS [ ""exports-temp"" ] [ ""csv"" ]. format ( event_id = event_id, identifier = """" ), ) result = { ""download_url"" : sessions_csv_url } except Exception as e : result = { ""__error"" : True, ""result"" : str ( e ) } logging. exception ( ""Error in exporting sessions as CSV"" ) return result",False,not os.path.isdir(filedir),not os.path.exists(filedir),0.647394061088562 3600,"def testTokenizer ( ) : for filename in get_data_files ( ""tokenizer"", ""*.test"" ) : with open ( filename ) as fp : tests = json. load ( fp ) if : for index, test in enumerate ( tests [ ""tests"" ] ) : if ""initialStates"" not in test : test [ ""initialStates"" ] = [ ""Data state"" ] if ""doubleEscaped"" in test : test = unescape ( test ) if test [ ""input"" ] is None : continue for initialState in test [ ""initialStates"" ] : test [ ""initialState"" ] = capitalize ( initialState ) yield runTokenizerTest, test",False,'tests' in tests,len(tests) > 0,0.6723448038101196 3601,"def find_url_region ( view, event = None, pt = None ) : if event : pt = view. window_to_text ( ( event [ ""x"" ], event [ ""y"" ] ) ) line = view. line ( pt ) line. a = max ( line. a, pt - 1024 ) line. b = pt + 1024 text = view. substr ( line ) original_text = text text = text. replace ( CONTINUATION + ""\n"", """" ) for match in rex. finditer ( text ) : if match. start ( ) <= ( pt - line. a ) and match. end ( ) >= ( pt - line. a ) : a = match. start ( ) b = match. end ( ) for marker in re. finditer ( CONTINUATION + ""\n"", original_text ) : if : b += len ( CONTINUATION ) + 1 return ( line. a + a, line. a + b ) return None",False,a <= marker.start() and b >= marker.start(),marker,0.6513715982437134 3602,"def drawSelected ( self, qp ) : qp. setFont ( self. font ) cursorX, cursorY = self. cursor. getPosition ( ) if len ( self. OPCODES ) - 1 < cursorY : return asm = self. OPCODES [ cursorY ] _, width, text = asm. getSelectedToken ( cursorX ) for i, asm in enumerate ( self. OPCODES ) : for idx, length, value in asm. tokens ( ) : if cursorY == i and cursorX == idx : continue if : qp. setOpacity ( 0.4 ) brush = QtGui. QBrush ( QtGui. QColor ( 0, 255, 0 ) ) qp. fillRect ( idx * self. fontWidth, i * self. fontHeight + 2, width * self. fontWidth, self. fontHeight, brush, ) qp. setOpacity ( 1 )",False,value == text,value,0.6705994009971619 3603,"def get_ignored_test ( self ) : self. _cleanup ( ) for i in range ( 1, 10 ) : process_data = { u""colord"" : { u""memory_mb"" : u""11.3"", u""kb_read"" : u""-1.00"", u""cpu"" : u""25.00"", u""kb_write"" : u""-1.00"", }, u""acpid"" : { u""memory_mb"" : u""0.78"", u""kb_read"" : u""-1.00"", u""cpu"" : u""0.00"", u""kb_write"" : u""-1.00"", }, } process_model. save_data ( server = self. server, data = process_data, time = i ) result = process_model. get_ignored_process_check ( self. server, 9 ) for r in result : name = r. get ( ""name"" ) if : assert float ( r [ ""memory"" ] ) == 11.3 assert float ( r [ ""cpu"" ] ) == 25.00 self. _cleanup ( )",False,name == 'colord',name,0.6595348119735718 3604,"def _lemmatize ( self, word : str, tag : Optional [ str ] = None ) -> str : lemma = self. memo. get ( ( word, tag ) ) if lemma is not None : return lemma parses = self. analyzer. parse ( word ) best_lemma, best_distance = word, np. inf for i, parse in enumerate ( parses ) : curr_tag = self. converter ( str ( parse. tag ) ) distance = get_tag_distance ( tag, curr_tag ) for feat in self. RARE_FEATURES : if feat in parse. tag : distance += self. rare_grammeme_penalty break if len ( word ) == 1 and len ( parse. normal_form ) > 1 : distance += self. long_lemma_penalty if : best_lemma, best_distance = self. _extract_lemma ( parse ), distance if distance == 0 : break self. memo [ ( word, tag ) ] = best_lemma return best_lemma",False,distance < best_distance,distance > best_distance,0.6554915904998779 3605,"def _get_convergence_plans ( self, services, strategy, always_recreate_deps = False, one_off = None ) : plans = { } for service in services : updated_dependencies = [ name for name in service. get_dependency_names ( ) if : ] is_one_off = one_off and service. name in one_off if updated_dependencies and strategy. allows_recreate : log. debug ( ""%s has upstream changes (%s)"", service. name, "", "". join ( updated_dependencies ), ) containers_stopped = any ( service. containers ( stopped = True, filters = { ""status"" : [ ""created"", ""exited"" ] } ) ) service_has_links = any ( service. get_link_names ( ) ) container_has_links = any ( c. get ( ""HostConfig.Links"" ) for c in service. containers ( ) ) should_recreate_for_links = service_has_links ^ container_has_links if always_recreate_deps or containers_stopped or should_recreate_for_links : plan = service. convergence_plan ( ConvergenceStrategy. always, is_one_off ) <",False,"name in plans and plans[name].action in ('recreate', 'create')",plans,0.6556882858276367 3606,"def set_type ( self, line ) : if ""Type:"" in line : ele_type = line [ line. find ( ""Type:"" ) + len ( ""Type:"" ) : ] ele_type = ele_type. strip ( ) if ""Default"" in ele_type : poses = [ ele_type. find ( ""Default"" ), ele_type. find ( "".Default"" ), ele_type. find ( "". Default"" ), ele_type. find ( "",Default"" ), ele_type. find ( "", Default"" ), ] else : poses = [ max ( [ ele_type. find ( "". "" ), ele_type. find ( ""."" ), ele_type. find ( '.""' ) ] ), ele_type. find ( "" "" ), ele_type. find ( "", "" ), ] poses = [ ele for ele in poses if ele > 0 ] if : end_pos = min ( poses ) ele_type = ele_type [ : end_pos ] if ele_type : self. ele_type = ele_type",True,poses,poses,0.6933566331863403 3607,"def distinct ( expr, * on ) : fields = frozenset ( expr. fields ) _on = [ ] append = _on. append for n in on : if isinstance ( n, Field ) : if : n = n. _name else : raise ValueError ( ""{0} is not a field of {1}"". format ( n, expr ) ) if not isinstance ( n, _strtypes ) : raise TypeError ( ""on must be a name or field, not: {0}"". format ( n ) ) elif n not in fields : raise ValueError ( ""{0} is not a field of {1}"". format ( n, expr ) ) append ( n ) return Distinct ( expr, tuple ( _on ) )",False,n._child.isidentical(expr),n._name,0.6504287719726562 3608,"def process ( self, resources ) : client = local_session ( self. manager. session_factory ). client ( ""ecs"" ) update = self. data. get ( ""update"" ) for r in resources : param = { } net_update = update. get ( ""networkConfiguration"", { } ). get ( ""awsvpcConfiguration"" ) if : net_param = dict ( r [ ""networkConfiguration"" ] [ ""awsvpcConfiguration"" ] ) param [ ""networkConfiguration"" ] = { ""awsvpcConfiguration"" : net_param } for k, v in net_update. items ( ) : net_param [ k ] = v for k, v in update. items ( ) : if k == ""networkConfiguration"" : continue elif r. get ( k )!= v : param [ k ] = v if not param : continue client. update_service ( cluster = r [ ""clusterArn"" ], service = r [ ""serviceName"" ], ** param )",True,net_update,net_update,0.6756992340087891 3609,"def get ( quality_name ) : """"""Returns a quality object based on canonical quality name."""""" found_components = { } for part in quality_name. lower ( ). split ( ) : component = _registry. get ( part ) if not component : raise ValueError ( ""`%s` is not a valid quality string"" % part ) if : raise ValueError ( ""`%s` cannot be defined twice in a quality"" % component. type ) found_components [ component. type ] = component if not found_components : raise ValueError ( ""No quality specified"" ) result = Quality ( ) for type, component in found_components. items ( ) : setattr ( result, type, component ) return result",False,component.type in found_components,component.type in _registry.get_components(),0.6544457077980042 3610,"def get_vrf_tables ( self, vrf_rf = None ) : vrf_tables = { } for ( scope_id, table_id ), table in self. _tables. items ( ) : if : continue if vrf_rf is not None and table_id!= vrf_rf : continue vrf_tables [ ( scope_id, table_id ) ] = table return vrf_tables",True,scope_id is None,scope_id is None,0.6559327840805054 3611,"def make_script_substitutions_in_headline ( self, p ) : """"""Make scripting substitutions in p.h."""""" c = self. c pattern = re. compile ( ""^(.*)%s(.+)%s(.*)$"" % ( re. escape ( c. abbrev_subst_start ), re. escape ( c. abbrev_subst_end ), ) ) changed = False m = pattern. match ( p. h ) if m : content = m. group ( 2 ) c. abbrev_subst_env [ ""x"" ] = """" try : exec ( content, c. abbrev_subst_env, c. abbrev_subst_env ) x = c. abbrev_subst_env. get ( ""x"" ) if : p. h = ""%s%s%s"" % ( m. group ( 1 ), x, m. group ( 3 ) ) changed = True except Exception : g. trace ( ""scripting error in"", p. h ) g. es_exception ( ) return changed",True,x,x,0.6866368055343628 3612,"def _get_mpl_patches ( json_content, transform = None, invert_color = False, ** kwargs ) : """"""Walks over the json content and builds a list of matplotlib patches"""""" mpl_patches = [ ] kwargs_edgecolor = kwargs. pop ( ""edgecolor"", None ) kwargs_linewidth = kwargs. pop ( ""linewidth"", None ) for path in json_content [ ""paths"" ] : if : edgecolor = kwargs_edgecolor else : edgecolor = path [ ""edgecolor"" ] if invert_color : edgecolor = _invert_color ( edgecolor ) linewidth = kwargs_linewidth or path [ ""linewidth"" ] path_id = path [ ""id"" ] for item in path [ ""items"" ] : type = item [ ""type"" ] pts = item [ ""pts"" ] codes = _codes ( type, pts ) path = Path ( pts, codes ) patch = patches. PathPatch ( path, edgecolor = edgecolor, linewidth = linewidth, facecolor = ""none"", gid = path_id, transform = transform, ** kwargs ) mpl_patches. append ( patch ) return mpl_patches",False,kwargs_edgecolor is not None,"isinstance(path, Path)",0.665928065776825 3613,"def project ( hidden_states, proj_layer, key_value_states, past_key_value ) : """"""projects hidden states correctly to key/query states"""""" if : hidden_states = shape ( proj_layer ( hidden_states ) ) elif past_key_value is None : hidden_states = shape ( proj_layer ( key_value_states ) ) if past_key_value is not None : if : hidden_states = tf. concat ( [ past_key_value, hidden_states ], axis = 2 ) else : hidden_states = past_key_value return hidden_states",True,key_value_states is None,key_value_states is None,0.6521288156509399 3614,"def get ( self, name ) : entry, wb_class = self. _get_obj_entry ( name ) if entry is not None : if self. _manifest_entry_is_artifact_reference ( entry ) : artifact = self. _get_ref_artifact_from_entry ( entry ) return artifact. get ( util. uri_from_path ( entry. ref ) ) if : self. download ( recursive = True ) item = self. get_path ( entry. path ) item_path = item. download ( ) result = None json_obj = { } with open ( item_path, ""r"" ) as file : json_obj = json. load ( file ) result = wb_class. from_json ( json_obj, self ) result. set_artifact_source ( self, name ) return result",False,wb_class == wandb.Table,entry.path is None,0.6618028283119202 3615,"def collect ( self, paths ) : for path in paths or ( ) : relpath = os. path. relpath ( path, self. _artifact_root ) dst = os. path. join ( self. _directory, relpath ) safe_mkdir ( os. path. dirname ( dst ) ) if : shutil. copytree ( path, dst ) else : shutil. copy ( path, dst ) self. _relpaths. add ( relpath )",False,os.path.isdir(path),os.path.isdir(dst),0.6449061036109924 3616,"def __get__ ( self, instance, owner = None ) : if instance is None : return self if self. attrname is None : raise TypeError ( ""Cannot use cached_property instance without calling __set_name__ on it."" ) try : cache = instance. __dict__ except AttributeError : msg = ( f""No '__dict__' attribute on {type(instance).__name__!r} "" f""instance to cache {self.attrname!r} property."" ) raise TypeError ( msg ) from None val = cache. get ( self. attrname, _NOT_FOUND ) if : with self. lock : val = cache. get ( self. attrname, _NOT_FOUND ) if : val = self. func ( instance ) try : cache [ self. attrname ] = val except TypeError : msg = ( f""The '__dict__' attribute on {type(instance).__name__!r} instance "" f""does not support item assignment for caching {self.attrname!r} property."" ) raise TypeError ( msg ) from None return val",True,val is _NOT_FOUND,val is _NOT_FOUND,0.659093976020813 3617,"def _cleanup_inactive_receivexlogs ( self, site ) : if site in self. receivexlogs : if not self. receivexlogs [ site ]. running : if : self. receivexlogs [ site ]. join ( ) del self. receivexlogs [ site ]",False,self.receivexlogs[site].is_alive(),self.receivexlogs[site].join,0.6542024612426758 3618,"def _query ( self ) : if self. _mongo_query is None : self. _mongo_query = self. _query_obj. to_query ( self. _document ) if self. _cls_query : if : self. _mongo_query = { ""$and"" : [ self. _cls_query, self. _mongo_query ] } else : self. _mongo_query. update ( self. _cls_query ) return self. _mongo_query",False,'_cls' in self._mongo_query,"hasattr(self._mongo_query, '__and__')",0.661358118057251 3619,"def process_path ( self, subpath, inner_schema_obj, nums, offsets, squeeze_dims ) : """"""Checks if a subpath is valid or not. Does not repeat computation done in a previous ObjectView object"""""" paths = subpath. split ( ""/"" ) [ 1 : ] try : if : if isinstance ( inner_schema_obj, Sequence ) : schema_obj = inner_schema_obj. dtype. dict_ [ paths [ 0 ] ] elif isinstance ( inner_schema_obj, SchemaDict ) : schema_obj = inner_schema_obj. dict_ [ paths [ 0 ] ] else : raise KeyError ( ) else : schema_obj = self. schema [ paths [ 0 ] ] except ( KeyError, AttributeError ) : raise KeyError ( f""{paths[0]} is an invalid key"" ) self. num_process ( schema_obj, nums, offsets, squeeze_dims ) for path in paths [ 1 : ] : try : if isinstance ( schema_obj, Sequence ) : schema_obj = schema_obj. dtype. dict_ [ path ] elif isinstance ( schema_obj, SchemaDict ) : schema_obj = schema_obj. dict_ [ path ] else : raise KeyError ( ) self. num_process ( schema_obj, nums, offsets, squeeze_dims ) except ( KeyError, AttributeError ) : raise KeyError ( f""{path} is an invalid key"" ) return schema_",False,inner_schema_obj,len(paths) > 0,0.6610968112945557 3620,"def create_snapshot ( self, snapshot ) : LOG. debug ( ""Create Snapshot\n%s"", pprint. pformat ( snapshot ) ) try : snap_name = self. _get_3par_snap_name ( snapshot [ ""id"" ] ) vol_name = self. _get_3par_vol_name ( snapshot [ ""volume"" ] ) extra = { ""volume_name"" : snapshot [ ""volume_name"" ], ""volume_id"" : snapshot. get ( ""volume_id"" ), } self. _add_name_id_to_comment ( extra, snapshot [ ""volume"" ] ) try : extra [ ""display_name"" ] = snapshot [ ""display_name"" ] except AttributeError : pass try : extra [ ""description"" ] = snapshot [ ""display_description"" ] except AttributeError : pass optional = { ""comment"" : json. dumps ( extra ), ""readOnly"" : True } if self. config. hpe3par_snapshot_expiration : optional [ ""expirationHours"" ] = int ( self. config. hpe3par_snapshot_expiration ) if : optional [ ""retentionHours"" ] = int ( self. config. hpe3par_snapshot_retention ) self. client. createSnapshot ( snap_name, vol_name, optional ) except hpeexceptions. HTTPForbidden as ex : LOG. error ( ""Exception: %s"", ex ) raise exception. NotAuthorized ( ) <",True,self.config.hpe3par_snapshot_retention,self.config.hpe3par_snapshot_retention,0.6487696170806885 3621,"def _highlight_do ( self ) : new_hl_text = self. highlight_text. text ( ) if new_hl_text!= self. hl_text : self. hl_text = new_hl_text if : self. hl. setDocument ( None ) self. hl = None if self. hl_text : self. hl = Highlighter ( self. hl_text, parent = self. doc ) self. clear_highlight_button. setEnabled ( bool ( self. hl ) )",False,self.hl is not None,self.hl,0.6611143350601196 3622,"def chopCobraUri ( uri ) : purl = urllib. parse. urlparse ( uri ) scheme = purl. scheme host = purl. hostname name = purl. path. strip ( ""/"" ) port = purl. port if not port : port = COBRA_PORT urlparams = { } for urlopt in purl. query. split ( ""&"" ) : urlval = 1 if : urlopt, urlval = urlopt. split ( ""="", 1 ) urlopt = urlopt. lower ( ) urlparams [ urlopt ] = urlval return scheme, host, port, name, urlparams",False,urlopt.find('=') != -1,urlopt,0.6585469245910645 3623,"def _get_pod_to_ep ( self, service_type ) : query = ( self. _pool. spawn ( self. _client. list_namespaced_pod, namespace = self. _k8s_namespace, label_selector = self. _get_label_selector ( service_type ), ) . result ( ) . to_dict ( ) ) result = dict ( ) for el in query [ ""items"" ] : name, pod_ep = self. _extract_pod_name_ep ( el ) if : pod_ep = None result [ name ] = pod_ep return result",False,pod_ep is not None and (not self._extract_pod_ready(el)),pod_ep is not None,0.6557104587554932 3624,"def after_process_message ( self, broker, message, *, result = None, exception = None ) : from.. message import Message if exception is None : group_completion_uuid = message. options. get ( ""group_completion_uuid"" ) group_completion_callbacks = message. options. get ( ""group_completion_callbacks"" ) if : barrier = Barrier ( self. rate_limiter_backend, group_completion_uuid, ttl = GROUP_CALLBACK_BARRIER_TTL, ) if barrier. wait ( block = False ) : for message in group_completion_callbacks : broker. enqueue ( Message ( ** message ) )",True,group_completion_uuid and group_completion_callbacks,group_completion_uuid and group_completion_callbacks,0.653144121170044 3625,"def fitting ( self, value ) : self. _fitting = value if self. _fitting is not None : if not os. path. exists ( dirname ( self. checkpoint_path ( ) ) ) : try : os. makedirs ( dirname ( self. checkpoint_path ( ) ) ) except FileExistsError as ex : pass if : try : os. makedirs ( dirname ( self. tensorboard_path ( ) ) ) except FileExistsError as ex : pass",False,not os.path.exists(dirname(self.tensorboard_path())),self.tensorboard_path() is not None,0.6508133411407471 3626,"def setup_logger ( ) : """"""Set up logger and add stdout handler"""""" logging. setLoggerClass ( IPDLogger ) logger = logging. getLogger ( ""icloudpd"" ) has_stdout_handler = False for handler in logger. handlers : if : has_stdout_handler = True if not has_stdout_handler : formatter = logging. Formatter ( fmt = ""%(asctime)s %(levelname)-8s %(message)s"", datefmt = ""%Y-%m-%d %H:%M:%S"" ) stdout_handler = logging. StreamHandler ( stream = sys. stdout ) stdout_handler. setFormatter ( formatter ) stdout_handler. name = ""stdoutLogger"" logger. addHandler ( stdout_handler ) return logger",False,handler.name == 'stdoutLogger',handler.hasHandlers(),0.6605055332183838 3627,"def pollpacket ( self, wait ) : self. _stage0 ( ) if len ( self. buffer ) < self. bufneed : r, w, x = select. select ( [ self. sock. fileno ( ) ], [ ], [ ], wait ) if : return None try : s = self. sock. recv ( BUFSIZE ) except socket. error : raise EOFError if len ( s ) == 0 : raise EOFError self. buffer += s self. _stage0 ( ) return self. _stage1 ( )",False,len(r) == 0,len(self.sock) == 0,0.6551221609115601 3628,"def _run_split_on_punc ( self, text, never_split = None ) : """"""Splits punctuation on a piece of text."""""" if never_split is not None and text in never_split : return [ text ] chars = list ( text ) i = 0 start_new_word = True output = [ ] while i < len ( chars ) : char = chars [ i ] if : output. append ( [ char ] ) start_new_word = True else : if start_new_word : output. append ( [ ] ) start_new_word = False output [ - 1 ]. append ( char ) i += 1 return [ """". join ( x ) for x in output ]",True,_is_punctuation(char),_is_punctuation(char),0.649880051612854 3629,"def parse_move ( self, node ) : old, new = """", """" for child in node : tag, text = child. tag, child. text text = text. strip ( ) if text else None if : old = text elif tag == ""New"" and text : new = text return Move ( old, new )",True,tag == 'Old' and text,tag == 'Old' and text,0.657818078994751 3630,"def calculate_length ( segments, min_segment_length, max_segment_length ) : current_point = segments [ 0 ] index = 1 total = 0 any_change = False while index < len ( segments ) : next_point = segments [ index ] distance = distance_between ( current_point, next_point ) if distance < min_segment_length and 1 < index < ( len ( segments ) - 2 ) : any_change = True current_point = ( current_point + next_point ) / 2 total += 1 index += 2 elif : any_change = True points = int ( ceil ( distance / ( ( max_segment_length + min_segment_length ) / 2 ) ) ) total += points current_point = next_point index += 1 else : total += 1 current_point = next_point index += 1 total += 1 return any_change, total",False,distance > max_segment_length,distance > max_segment_length and current_point + next_point,0.6548234224319458 3631,"def set_new_lr ( step_num, batch_id ) : """"""set new learning rate"""""" if accumulate : if : step_num += 1 else : step_num += 1 if step_num < num_warmup_steps : new_lr = lr * step_num / num_warmup_steps else : offset = ( ( step_num - num_warmup_steps ) * lr / ( num_train_steps - num_warmup_steps ) ) new_lr = lr - offset trainer. set_learning_rate ( new_lr ) return step_num",False,batch_id % accumulate == 0,step_num < num_train_steps,0.6660550832748413 3632,"def filter ( self, left, operator, right ) : if operator == ""and"" : self. filter ( left. left, left. operator, left. right ) self. filter ( right. left, right. operator, right. right ) else : left_base = left. split ( ""."" ) [ 0 ] if left_base in self. FIELDS : self. do_query = True field = self. FIELDS [ left_base ] if field. sqlalchemy_field is not None : clazz, attribute = field. sqlalchemy_field sqlalchemy_field_value = getattr ( clazz, attribute ) if : self. query = self. query. filter ( sqlalchemy_field_value == right ) elif operator == ""!="" : self. query = self. query. filter ( sqlalchemy_field_value!= right ) elif operator == ""like"" : self. query = self. query. filter ( sqlalchemy_field_value. like ( right ) ) else : raise GalaxyParseError ( ""Invalid comparison operator: %s"" % ( operator ) ) elif field. handler is not None : field. handler ( self, left, operator, right ) elif field. post_filter is not None : ",False,operator == '=',operator == '==',0.6886146664619446 3633,"def _get ( self, domain ) : with self. lock : try : record = self. cache [ domain ] time_now = time. time ( ) if : record = None except KeyError : record = None if not record : record = { ""r"" : ""unknown"", ""dns"" : { }, ""g"" : 1, ""query_count"" : 0 } return record",False,time_now - record['update'] > self.ttl,time_now - self.time_of > TAB_TIMEOUT,0.6601200103759766 3634,"def callback ( lexer, match, context ) : text = match. group ( ) if context. block_scalar_indent is None or len ( text ) <= context. block_scalar_indent : if : yield match. start ( ), indent_token_class, text else : indentation = text [ : context. block_scalar_indent ] content = text [ context. block_scalar_indent : ] yield match. start ( ), indent_token_class, indentation yield ( match. start ( ) + context. block_scalar_indent, content_token_class, content, ) context. pos = match. end ( )",False,text,context.block_scalar_indent == 0,0.6741372346878052 3635,"def _fullSync ( self ) : if self. col. isEmpty ( ) : f = ""download"" else : self. fullSyncChoice = False self. fireEvent ( ""fullSync"" ) while not self. fullSyncChoice : time. sleep ( 0.1 ) f = self. fullSyncChoice if f == ""cancel"" : return self. client = FullSyncer ( self. col, self. hkey, self. server. con ) if f == ""upload"" : if : self. fireEvent ( ""upbad"" ) else : self. client. download ( ) self. col. reopen ( ) self. _syncMedia ( )",False,not self.client.upload(),self.client.close(),0.6534979343414307 3636,"def test_wrap ( self ) : none = """" stdout_log = os. path. join ( self. test_dir, ""stdout.log"" ) stderr_log = os. path. join ( self. test_dir, ""stderr.log"" ) redirs = [ ( none, none ), ( none, stderr_log ), ( stdout_log, none ), ( stdout_log, stderr_log ), ] for stdout_redir, stderr_redir in redirs : queue = multiprocessing. SimpleQueue ( ) _wrap ( local_rank = 0, fn = echo1, args = { 0 : ( ""hello"", ) }, envs = { 0 : { ""RANK"" : ""0"" } }, stdout_redirects = { 0 : stdout_redir }, stderr_redirects = { 0 : stderr_redir }, ret_vals = { 0 : queue }, ) self. assertEqual ( ""hello_0"", queue. get ( ) ) if : self. assert_in_file ( [ ""hello stdout from 0"" ], stdout_log ) if stderr_redir : self. assert_in_file ( [ ""hello stderr from 0"" ], stderr_log )",True,stdout_redir,stdout_redir,0.6591173410415649 3637,"def test_02_profiler ( self ) : self. client. get ( ""/api/people/foo"" ) self. client. get ( ""/api/people/foo"" ) self. client. get ( ""/api/with/profiler/hello?q=2"" ) measurements = list ( flask_profiler. collection. filter ( ) ) self. assertEqual ( len ( measurements ), 3 ) test_flag = False for list_element in measurements : if : test_flag = True self. assertEqual ( list_element [ ""name"" ], ""/api/with/profiler/"" ) self. assertEqual ( list_element [ ""method"" ], ""GET"" ) self. assertEqual ( list_element [ ""kwargs"" ], { ""message"" : ""hello"" } ) self. assertEqual ( list_element [ ""context"" ] [ ""args"" ], { ""q"" : ""2"" } ) self. assertEqual ( True, test_flag )",False,list_element['name'] == '/api/with/profiler/',list_element[0],0.6491895914077759 3638,"def get_complete_position ( self, context : UserContext ) -> int : for prefix_pattern in convert2list ( self. get_filetype_var ( context [ ""filetype"" ], ""prefix_patterns"" ) ) : m = re. search ( self. _object_pattern + prefix_pattern + r""\w*$"", context [ ""input"" ] ) if : continue self. _prefix = re. sub ( r""\w*$"", """", m. group ( 0 ) ) m = re. search ( r""\w*$"", context [ ""input"" ] ) if m : return m. start ( ) return - 1",False,m is None or prefix_pattern == '',m and m.group(0) > 0,0.650195300579071 3639,"def _try_except_filenotfounderror ( try_func, except_func ) : if sys. version_info >= ( 3, 3 ) : try : try_func ( ) except FileNotFoundError as exc : except_func ( exc ) elif os. name!= ""nt"" : try : try_func ( ) except EnvironmentError as exc : if exc. errno!= ENOENT : raise else : except_func ( exc ) else : try : try_func ( ) except WindowsError as exc : if : raise else : except_func ( exc ) except EnvironmentError as exc : if exc. errno!= ENOENT : raise else : except_func ( exc )",False,"exc.errno not in (2, 3)",exc.errno != errno.ENOENT,0.6522950530052185 3640,"def handle ( self, * app_labels, ** options ) : include_deployment_checks = options [ ""deploy"" ] if options [ ""list_tags"" ] : self. stdout. write ( ""\n"". join ( sorted ( registry. tags_available ( include_deployment_checks ) ) ) ) return if app_labels : app_configs = [ apps. get_app_config ( app_label ) for app_label in app_labels ] else : app_configs = None tags = options [ ""tags"" ] if tags : try : invalid_tag = next ( tag for tag in tags if : ) except StopIteration : pass else : raise CommandError ( 'There is no system check with the ""%s"" tag.' % invalid_tag ) self. check ( app_configs = app_configs, tags = tags, display_num_errors = True, include_deployment_checks = include_deployment_checks, fail_level = getattr ( checks, options [ ""fail_level"" ] ), )",False,"not checks.tag_exists(tag, include_deployment_checks)",invalid_tag,0.6475452184677124 3641,"def internal_gen ( ) : current_epoch = ( self. epoch - 1 ) % self. config. n_epochs + 1 it = iter ( gen ) if train : if : desc = ""Initialization Epoch {}/{}"". format ( current_epoch, self. config. n_epochs ) else : desc = ""Epoch {}/{}"". format ( current_epoch, self. config. n_epochs ) else : desc = ""Validation"" for _, i in zip ( range ( self. _skip_tqdm ), it ) : yield i for i in ProgressBar ( it, desc = desc, total = total, miniters = 1, leave = current_epoch == self. config. n_epochs and train, update_hook = update_hook, silent = self. config. debugging_logs, ) : yield i if train : self. epoch += 1",False,self.config.prefit_init and self.epoch <= self.config.n_epochs,current_epoch == self.config.n_epochs,0.6488403081893921 3642,"def __recv_null ( self ) : """"""Receive a null byte."""""" while 1 : c = self. sock. recv ( 1 ) if c == """" : self. close ( ) raise EOFError ( ""Socket Closed"" ) if : return",False,c == '\x00',c != '',0.6582741737365723 3643,"def sample_pos_items_for_u ( u, num ) : pos_items = self. train_items [ u ] n_pos_items = len ( pos_items ) pos_batch = [ ] while True : if len ( pos_batch ) == num : break pos_id = np. random. randint ( low = 0, high = n_pos_items, size = 1 ) [ 0 ] pos_i_id = pos_items [ pos_id ] if : pos_batch. append ( pos_i_id ) return pos_batch",False,pos_i_id not in pos_batch,np.random.random() < 0.5,0.6526123285293579 3644,"def get_report_to_platform ( self, args, scan_reports ) : if self. bc_api_key : if : repo_id = self. get_repository ( args ) self. setup_bridgecrew_credentials ( bc_api_key = self. bc_api_key, repo_id = repo_id ) if self. is_integration_configured ( ) : self. _upload_run ( args, scan_reports )",False,args.directory,self.is_bridgecrew_configured(),0.6583524346351624 3645,"def getSinglePairs ( self ) : for pairPos in self. pairPosList : if pairPos. Format == 1 : firstGlyphsList = pairPos. Coverage. glyphs for pairSetIndex, pairSetInstance in enumerate ( pairPos. PairSet ) : for pairValueRecordItem in pairPos. PairSet [ pairSetIndex ]. PairValueRecord : secondGlyph = pairValueRecordItem. SecondGlyph valueFormat = pairPos. ValueFormat1 if valueFormat == 5 : kernValue = ""<%d 0 %d 0>"" % ( pairValueRecordItem. Value1. XPlacement, pairValueRecordItem. Value1. XAdvance, ) elif valueFormat == 0 : kernValue = ""<0 0 0 0>"" elif : kernValue = pairValueRecordItem. Value1. XAdvance else : print ( ""\tValueFormat1 = %d"" % valueFormat, file = sys. stdout ) <",False,valueFormat == 4,valueFormat == 6,0.66883385181427 3646,"def parseMessage ( self, toAddress, fromAddress, subject, message ) : super ( GatewayAccount, self ). parseMessage ( toAddress, fromAddress, subject, message ) if fromAddress == self. relayAddress : matches = self. regExpIncoming. search ( subject ) if not matches is None : self. subject = """" if not matches. group ( 1 ) is None : self. subject += matches. group ( 1 ) if not matches. group ( 3 ) is None : self. subject += matches. group ( 3 ) if : self. fromLabel = matches. group ( 2 ) self. fromAddress = matches. group ( 2 ) if toAddress == self. relayAddress : matches = self. regExpOutgoing. search ( subject ) if not matches is None : if : self. subject = matches. group ( 2 ) if not matches. group ( 1 ) is None : self. toLabel = matches. group ( 1 ) self. toAddress = matches. group ( 1 )",False,not matches.group(2) is None,len(matches) == 3,0.6473790407180786 3647,"def test_fuzz_required ( ) : for _ in range ( 1000 ) : n_hidden = random. randint ( 10, 100 ) n_in = random. randint ( 1, 10 ) n_out = random. randint ( 1, 10 ) nodes = list ( set ( random. randint ( 0, 1000 ) for _ in range ( n_in + n_out + n_hidden ) ) ) random. shuffle ( nodes ) inputs = nodes [ : n_in ] outputs = nodes [ n_in : n_in + n_out ] connections = [ ] for _ in range ( n_hidden * 2 ) : a = random. choice ( nodes ) b = random. choice ( nodes ) if : continue if a in inputs and b in inputs : continue if a in outputs and b in outputs : continue connections. append ( ( a, b ) ) required = required_for_output ( inputs, outputs, connections ) for o in outputs : assert o in required",False,a == b,b in outputs,0.6708201766014099 3648,"def _add_defaults_data_files ( self ) : if self. distribution. has_data_files ( ) : for item in self. distribution. data_files : if : item = convert_path ( item ) if os. path. isfile ( item ) : self. filelist. append ( item ) else : dirname, filenames = item for f in filenames : f = convert_path ( f ) if os. path. isfile ( f ) : self. filelist. append ( f )",False,"isinstance(item, str)","isinstance(item, basestring)",0.6552574634552002 3649,"def expected_forward_without_reduce ( self, x_data, t_data, class_weight ) : x = numpy. rollaxis ( x_data, 1, x_data. ndim ). reshape ( ( t_data. size, x_data. shape [ 1 ] ) ) t = t_data. ravel ( ) loss_shape = x_data. shape [ 0 : 1 ] + x_data. shape [ 2 : ] loss_expect = numpy. zeros ( loss_shape, x_data. dtype ) for i, ( ti, loss_idx ) in enumerate ( zip ( t, numpy. ndindex ( * loss_shape ) ) ) : xi = x [ i ] if : continue log_z = numpy. ufunc. reduce ( numpy. logaddexp, xi ) if class_weight is None : loss_expect [ loss_idx ] = - ( xi - log_z ) [ ti ] else : loss_expect [ loss_idx ] = - ( xi - log_z ) [ ti ] * class_weight [ ti ] return numpy. asarray ( loss_expect, dtype = x. dtype )",False,ti == -1,numpy.abs(xi) > 0.0,0.6640071272850037 3650,"def handler ( chan, host, port ) : sock = socket ( ) try : sock. connect ( ( host, port ) ) except Exception as e : if verbose == True : print ( e ) return while True : r, w, x = select. select ( [ sock, chan ], [ ], [ ] ) if sock in r : data = sock. recv ( 1024 ) if len ( data ) == 0 : break chan. send ( data ) if : data = chan. recv ( 1024 ) if len ( data ) == 0 : break sock. send ( data ) chan. close ( ) sock. close ( )",False,chan in r,sock in r,0.6814488172531128 3651,"def extend ( self, tasks ) : """"""Add tasks to this particular shovel"""""" self. _tasks. extend ( tasks ) for task in tasks : current = self. map modules = task. fullname. split ( ""."" ) for module in modules [ : - 1 ] : if : logger. warn ( ""Overriding task %s with a module"" % current [ module ]. file ) shovel = Shovel ( ) shovel. overrides = current [ module ] current [ module ] = shovel current = current [ module ]. map name = modules [ - 1 ] if name in current : logger. warn ( ""Overriding %s with %s"" % ( ""."". join ( modules ), task. file ) ) task. overrides = current [ name ] current [ name ] = task",False,"not isinstance(current[module], Shovel)",module in current,0.6566530466079712 3652,"def init ( self, * args, ** kwargs ) : if ""_state"" not in kwargs : state = { } for arg in ( ""children"", ""windowState"", ""detachedPanels"" ) : if : state [ arg ] = kwargs [ arg ] del kwargs [ arg ] if state : kwargs [ ""_state"" ] = state originalInit ( self, * args, ** kwargs )",True,arg in kwargs,arg in kwargs,0.677591860294342 3653,"def update_render_status ( self ) : Gdk. threads_enter ( ) if mltxmlheadless. session_render_complete ( self. get_container_program_id ( ) ) == True : job_msg = self. get_completed_job_message ( ) jobs. update_job_queue ( job_msg ) GLib. idle_add ( self. create_producer_and_do_update_edit, None ) else : status = mltxmlheadless. get_session_status ( self. get_container_program_id ( ) ) if : fraction, elapsed = status if self. container_data. render_data. do_video_render == True : msg = _ ( ""Rendering Video"" ) elif step == ""2"" : msg = _ ( ""Rendering Image Sequence"" ) job_msg = self. get_job_queue_message ( ) job_msg. progress = float ( fraction ) job_msg. elapsed = float ( elapsed ) job_msg. text = msg jobs. update_job_queue ( job_msg ) else : pass Gdk. threads_leave ( )",False,status != None,status != 'ACTIVE',0.6671301126480103 3654,"def search ( path, prefix = """" ) : loader = TestLoader ( ) for _, name, is_pkg in iter_modules ( path ) : full_name = ""{}.{}"". format ( prefix, name ) module_path = os. path. join ( path [ 0 ], name ) if is_pkg : search ( [ module_path ], full_name ) if : test_module = import_module ( full_name ) for suite in loader. loadTestsFromModule ( test_module ) : for test in suite. _tests : path = ""{}.{}.{}"". format ( full_name, test. __class__. __name__, test. _testMethodName ) rec = { ""ver"" : ""1.0"", ""execution"" : { ""command"" : ""python -m unittest {}"". format ( path ), ""recording"" : find_recording_file ( path ), }, ""classifier"" : { ""identifier"" : path, ""type"" : get_test_type ( test",False,not is_pkg and name.startswith('test'),"hasattr(loader, 'loadTests')",0.6488199830055237 3655,"def visit_Assign ( self, node ) : for i in node. targets : err = _not_assignable ( i ) if : msg = ""can't assign to {}"". format ( err ) self. error = msg, i. lineno, i. col_offset break",False,err is not None,err,0.666167676448822 3656,"def _maybeRebuildAtlas ( self, threshold = 4, minlen = 1000 ) : n = len ( self. fragmentAtlas ) if ( n > minlen ) and ( n > threshold * len ( self. data ) ) : self. fragmentAtlas. rebuild ( list ( zip ( * self. _style ( [ ""symbol"", ""size"", ""pen"", ""brush"" ] ) ) ) ) self. data [ ""sourceRect"" ] = 0 if : self. _sourceQRect. clear ( ) self. updateSpots ( )",False,_USE_QRECT,self._sourceQRect.size() > 0,0.6639128923416138 3657,"def splitProcess ( path, mode ) : output = [ ] currentSize = 0 currentTarget = path if options. webtoon : targetSize = 104857600 else : targetSize = 419430400 if options. batchsplit == 2 and mode == 2 : mode = 3 if mode < 3 : for root, dirs, files in walkLevel ( path, 0 ) : for name in files if mode == 1 else dirs : if mode == 1 : size = os. path. getsize ( os. path. join ( root, name ) ) else : size = getDirectorySize ( os. path. join ( root, name ) ) if currentSize + size > targetSize : currentTarget, pathRoot = createNewTome ( ) output. append ( pathRoot ) currentSize = size else : currentSize += size if path!= currentTarget : move ( os. path. join ( root, name ), os. path. join ( currentTarget, name ) ) else : firstTome = True for root, dirs, _ in walkLevel ( path, 0 ) : for name in dirs : if : currentTarget, pathRoot = createNewTome",False,not firstTome,firstTome,0.6656038761138916 3658,"def native_color ( c ) : try : color = CACHE [ c ] except KeyError : if : c = NAMED_COLOR [ c ] color = Color. FromArgb ( int ( c. rgba. a * 255 ), int ( c. rgba. r ), int ( c. rgba. g ), int ( c. rgba. b ) ) CACHE [ c ] = color return color",False,"isinstance(c, str)",c >= len(NAMED_COLOR),0.6585873961448669 3659,"def fmt ( console : Console, targets : HydratedTargets ) -> Fmt : results = yield [ Get ( FmtResult, FmtTarget, target. adaptor ) for target in targets if isinstance ( target. adaptor, ( PythonAppAdaptor, PythonTargetAdaptor, PythonTestsAdaptor, PythonBinaryAdaptor, ), ) and hasattr ( target. adaptor, ""sources"" ) ] for result in results : files_content = yield Get ( FilesContent, Digest, result. digest ) for file_content in files_content : with Path ( get_buildroot ( ), file_content. path ). open ( ""wb"" ) as f : f. write ( file_content. content ) if : console. print_stdout ( result. stdout ) if result. stderr : console. print_stderr ( result. stderr ) exit_code = 0 yield Fmt ( exit_code )",True,result.stdout,result.stdout,0.6825716495513916 3660,"def expect_block_mapping_key ( self, first = False ) : if not first and isinstance ( self. event, MappingEndEvent ) : self. indent = self. indents. pop ( ) self. state = self. states. pop ( ) else : self. write_indent ( ) if : self. states. append ( self. expect_block_mapping_simple_value ) self. expect_node ( mapping = True, simple_key = True ) else : self. write_indicator ( u""?"", True, indention = True ) self. states. append ( self. expect_block_mapping_value ) self. expect_node ( mapping = True )",False,self.check_simple_key(),first,0.6507017016410828 3661,"def _url_encode_impl ( obj, charset, encode_keys, sort, key ) : from. datastructures import iter_multi_items iterable = iter_multi_items ( obj ) if sort : iterable = sorted ( iterable, key = key ) for key, value in iterable : if : continue if not isinstance ( key, bytes ) : key = text_type ( key ). encode ( charset ) if not isinstance ( value, bytes ) : value = text_type ( value ). encode ( charset ) yield _fast_url_quote_plus ( key ) + ""="" + _fast_url_quote_plus ( value )",True,value is None,value is None,0.6580206155776978 3662,"def get_waiting ( self ) -> List [ str ] : tasks = self. get_running_tasks_checked ( ) waiting = [ ] for task in tasks : state_msg = task. state. name if task. state in TaskState. has_started ( ) : task = self. onefuzz. tasks. get ( task. task_id ) if : continue state_msg = ""waiting-for-heartbeat"" waiting. append ( f""{task.config.task.type.name}:{state_msg}"" ) return waiting",False,task.events,task.config.task.type == TaskState.cancelled and (not task.config.task.state.has_started()) and (not task.config.task.state.has_started()),0.6749505996704102 3663,"def migrate_common_facts ( facts ) : """"""Migrate facts from various roles into common"""""" params = { ""node"" : ( ""portal_net"" ), ""master"" : ( ""portal_net"" ) } if ""common"" not in facts : facts [ ""common"" ] = { } for role in params. keys ( ) : if : for param in params [ role ] : if param in facts [ role ] : facts [ ""common"" ] [ param ] = facts [ role ]. pop ( param ) return facts",False,role in facts,role in params,0.6870690584182739 3664,"def build ( opt ) : dpath, version = download ( opt ) if ""light_use_speech_prefix"" not in opt : opt [ ""light_use_speech_prefix"" ] = True fields = [ ""taskname"", ""setting"", ""objects"", ""person_names"", ""persona"", ""emote"", ""speech"", ""action"", ""affordances"", ""repeat"", ""cands"", ""current_self_output"", ""clip_cands"", ""speech_prefix"", ] fpath = """" for f in fields : fpath += f + str ( opt [ ""light_use_"" + f ] ) + ""_"" dpath2 = os. path. join ( opt [ ""datapath"" ], ""light_dialogue"", fpath [ : - 1 ] ) if not build_data. built ( dpath2, version ) : if : build_data. remove_dir ( dpath2 ) build_data. make_dir ( dpath2 ) fname = ""light_data.pkl"" fname2 = ""light_unseen_data.pkl"" build_from_db ( opt, dpath, dpath2, fname, fname2 ) build_data. mark_done ( dpath2, version )",False,build_data.built(dpath2),dpath2 != dpath2,0.6462545394897461 3665,"def capture_server ( evt, buf, serv ) : try : serv. listen ( 5 ) conn, addr = serv. accept ( ) except socket. timeout : pass else : n = 200 while n > 0 : r, w, e = select. select ( [ conn ], [ ], [ ] ) if r : data = conn. recv ( 10 ) buf. write ( data. replace ( ""\n"", """" ) ) if : break n -= 1 time. sleep ( 0.01 ) conn. close ( ) finally : serv. close ( ) evt. set ( )",False,'\n' in data,n > 0,0.6613521575927734 3666,"def check_and_apply_update ( ) : check_releases ( ) if not args. release_update : gitconfig ( ) branch = settings. general. branch g = git. cmd. Git ( current_working_directory ) g. fetch ( ""origin"" ) result = g. diff ( ""--shortstat"", ""origin/"" + branch ) if len ( result ) == 0 : logging. info ( ""BAZARR No new version of Bazarr available."" ) else : g. reset ( ""--hard"", ""HEAD"" ) g. checkout ( branch ) g. reset ( ""--hard"", ""origin/"" + branch ) g. pull ( ) logging. info ( ""BAZARR Updated to latest version. Restart required. "" + result ) updated ( ) else : url = ""https://api.github.com/repos/morpheus65535/bazarr/releases/latest"" release = request_json ( url, timeout = 20, whitelist_status_code = 404, validator = lambda x : type ( x ) == list, ) if release is None : logging. warning ( ""BAZARR Could not get releases from GitHub."" ) return else : latest_release = release [ ""tag_name"" ] if : <",False,'v' + os.environ['BAZARR_VERSION'] != latest_release,latest_release is not None,0.6523812413215637 3667,"def get_data ( self, path ) : """"""Gross hack to contort loader to deal w/ load_*()'s bad API."""""" if self. file and path == self. path : if : file = self. file else : self. file = file = open ( self. path, ""r"" ) with file : return file. read ( ) else : return super ( ). get_data ( path )",False,not self.file.closed,"hasattr(self.file, 'read')",0.6608824133872986 3668,"def run ( self, input, generator ) : complexities = { } piter = generator. orderedEnumeration ( ) maxLevin = 1 while True : c = 0 while c <= maxLevin : try : c, p = next ( piter ) except StopIteration : break if : complexities [ c ] = [ p ] else : complexities [ c ]. append ( p ) for c in range ( maxLevin ) : for p in complexities [ c ] : boundP = timeBoundExecution ( p, 2 ** ( maxLevin - c ) / maxLevin ) res = boundP. run ( input ) if self. stoppingCriterion ( res ) : return res maxLevin += 1",True,c not in complexities,c not in complexities,0.6749194860458374 3669,"def get_user_context ( request, escape = False ) : if isinstance ( request, HttpRequest ) : user = getattr ( request, ""user"", None ) result = { ""ip_address"" : request. META [ ""REMOTE_ADDR"" ] } if : result. update ( { ""email"" : user. email, ""id"" : user. id, } ) if user. name : result [ ""name"" ] = user. name else : result = { } return mark_safe ( json. dumps ( result ) )",False,user and user.is_authenticated(),escape,0.6531888246536255 3670,"def _transform_count_encode ( self, X_in, y ) : """"""Perform the transform count encoding."""""" X = X_in. copy ( deep = True ) X. loc [ :, self. cols ] = X. fillna ( value = np. nan ) for col in self. cols : if : if col in self. _min_group_categories. keys ( ) : X [ col ] = X [ col ]. map ( self. _min_group_categories [ col ] ). fillna ( X [ col ] ) X [ col ] = X [ col ]. map ( self. mapping [ col ] ) if isinstance ( self. _handle_unknown [ col ], ( int, np. integer ) ) : X [ col ] = X [ col ]. fillna ( self. _handle_unknown [ col ] ) elif self. _handle_unknown [ col ] == ""error"" and X [ col ]. isnull ( ). any ( ) : raise ValueError ( ""Missing data found in column %s at transform time."" % ( col, ) ) return X, self. mapping",False,self._min_group_size is not None,self.mapping[col],0.6533434391021729 3671,"def mouse ( self, button, mods, x, y ) : if button == 1 : for i in range ( 4 ) : if : self. hit = i elif button == - 1 : self. hit = None elif self. hit!= None : self. coords [ self. hit ] = ( x, y ) self. view. dirty ( )",False,"hypot(x - self.coords[i][0], y - self.coords[i][1]) < 4",i > 0,0.6549968719482422 3672,"def _process_watch ( self, watched_event ) : logger. debug ( ""process_watch: %r"", watched_event ) with handle_exception ( self. _tree. _error_listeners ) : if watched_event. type == EventType. CREATED : assert self. _parent is None, ""unexpected CREATED on non-root"" self. on_created ( ) elif watched_event. type == EventType. DELETED : self. on_deleted ( ) elif watched_event. type == EventType. CHANGED : self. _refresh_data ( ) elif : self. _refresh_children ( )",False,watched_event.type == EventType.CHILD,watched_event.type == EventType.CHANGING,0.6578012704849243 3673,"def _reauthenticate_and_retry ( self, func = None ) : import kubernetes logger. info ( ""Trying to reauthenticate"" ) kubernetes. config. load_kube_config ( ) subprocess. run ( [ ""kubectl"", ""get"", ""nodes"" ] ) self. kubeapi = kubernetes. client. CoreV1Api ( ) self. batchapi = kubernetes. client. BatchV1Api ( ) try : self. register_secret ( ) except kubernetes. client. rest. ApiException as e : if : logger. warning ( ""409 conflict ApiException when registering secrets"" ) logger. warning ( e ) else : raise WorkflowError ( e, ""This is likely a bug in "" ""https://github.com/kubernetes-client/python."", ) if func : return func ( )",False,e.status == 409 and e.reason == 'Conflict',e.args.error_code == 5,0.657464325428009 3674,"def create_fts_parser ( server, db_name, schema_name, fts_parser_name ) : """"""This function will add the fts_parser under test schema."""""" try : connection = get_db_connection ( db_name, server [ ""username"" ], server [ ""db_password"" ], server [ ""host"" ], server [ ""port"" ], server [ ""sslmode"" ], ) pg_cursor = connection. cursor ( ) query = ( ""DROP TEXT SEARCH PARSER IF EXISTS "" + schema_name + ""."" + fts_parser_name ) pg_cursor. execute ( query ) query = ( ""CREATE TEXT SEARCH PARSER "" + schema_name + ""."" + fts_parser_name + ""(START=prsd_start, GETTOKEN=prsd_nexttoken, "" ""END=prsd_end, LEXTYPES=dispell_init)"" ) pg_cursor. execute ( query ) connection. commit ( ) pg_cursor. execute ( ""select oid from pg_catalog.pg_ts_parser where "" ""prsname = '%s' order by oid ASC limit 1"" % fts_parser_name ) oid = pg_cursor. fetchone ( ) fts_parser_id = """" if :",True,oid,oid,0.7210830450057983 3675,"def wrapper ( * args, spec : Spec, ** kw ) : available_configs = set ( configs ) if spec. CONFIG_NAME not in available_configs : message = f""doesn't support this config: {spec.CONFIG_NAME}."" if : message = f""{message} Reason: {reason}"" dump_skipping_message ( message ) return None return fn ( * args, spec = spec, ** kw )",False,reason is not None,message,0.664722740650177 3676,"def _run_jdeps_analysis ( self, target, target_artifact_classpaths, potential_deps_classpaths, jdeps_output_json, ) : with self. aliased_classpaths ( potential_deps_classpaths ) as classpaths_by_alias : with open ( jdeps_output_json, ""w"" ) as f : if len ( target_artifact_classpaths ) : jdeps_stdout, jdeps_stderr = self. _spawn_jdeps_command ( target, target_artifact_classpaths, classpaths_by_alias. keys ( ) ). communicate ( ) deps_classpaths = set ( ) for line in io. StringIO ( jdeps_stdout. decode ( ""utf-8"" ) ) : match = self. _jdeps_summary_line_regex. fullmatch ( line. strip ( ) ) if : dep_name = match. group ( 1 ) deps_classpaths. add ( classpaths_by_alias. get ( dep_name, dep_name ) ) else : deps_classpaths = [ ] json. dump ( list ( deps_classpaths ), f )",False,match is not None,match,0.6587874889373779 3677,"def toxml ( self ) : text = self. value self. parent. setBidi ( getBidiType ( text ) ) if not text. startswith ( HTML_PLACEHOLDER_PREFIX ) : if : text = text. replace ( ""\n"", ""\n "" ) elif self. parent. nodeName == ""li"" and self. parent. childNodes [ 0 ] == self : text = ""\n "" + text. replace ( ""\n"", ""\n "" ) text = self. doc. normalizeEntities ( text ) return text",False,self.parent.nodeName == 'p',self.parent.nodeName == 'li',0.654312014579773 3678,"def runTest ( self, define_g = True ) : """"""Run a Leo GeneralTestCase test."""""" trace_time = False tm = self c = tm. c p = tm. p. copy ( ) if trace_time : t1 = time. clock ( ) script = g. getScript ( c, p ). strip ( ) if self. setup_script : script = self. setup_script + ""\n"" + script tm. assertTrue ( script ) if c. shortFileName ( ) == ""dynamicUnitTest.leo"" : c. write_script_file = True g. app. unitTestDict = { ""c"" : c, ""g"" : g, ""p"" : p and p. copy ( ) } if define_g : d = { ""c"" : c, ""g"" : g, ""p"" : p and p. copy ( ), ""self"" : tm, } else : d = { ""self"" : tm, } script = script + ""\n"" if c. write_script_file : scriptFile = c. writeScriptFile ( script ) if g. isPython3 : exec ( compile ( script, scriptFile, ""exec"" ), d ) else : builtins. execfile ( scriptFile, d ) else : exec ( script, d ) if trace_time : t2 = time. clock ( ) if : <",False,t2 - t1 > 3.0,t2 and t1 > 0 and (t2 > 0),0.6568703651428223 3679,"def setup_custom_resources ( kube_client : KubeClient, kind : KubeKind, version : str, crd : CustomResourceDefinition, config_dicts : Mapping [ str, Mapping [ str, Any ] ], group : str, cluster : str, service : str = None, instance : str = None, ) -> bool : succeded = True if config_dicts : crs = list_custom_resources ( kube_client = kube_client, kind = kind, version = version, group = group ) for svc, config in config_dicts. items ( ) : if : continue if not reconcile_kubernetes_resource ( kube_client = kube_client, service = svc, instance = instance, instance_configs = config, kind = kind, custom_resources = crs, version = version, group = group, cluster = cluster, crd = crd, ) : succeded = False return succeded",False,service is not None and service != svc,not instance or instance or kind or group or (service == 'cluster'),0.6587128639221191 3680,"def __init__ ( self, sample_function, batch_size, ids, targets = None, shuffle = True ) : if not is_real_iterable ( ids ) : raise TypeError ( ""IDs must be an iterable or numpy array of graph node IDs"" ) if targets is not None : if : raise TypeError ( ""Targets must be None or an iterable or numpy array "" ) if len ( ids )!= len ( targets ) : raise ValueError ( ""The length of the targets must be the same as the length of the ids"" ) self. targets = np. asanyarray ( targets ) else : self. targets = None if targets is not None and len ( ids )!= len ( targets ) : raise ValueError ( ""Length of link ids must match length of link targets"" ) if isinstance ( sample_function, collections. Callable ) : self. _sample_features = sample_function else : raise TypeError ( ""({}) The sampling function expects a callable function."". format ( type ( self ). __name__ ) ) self. batch_size = batch_size self. ids = list ( ids ) self. data_size = len ( self. ids ) self. shuffle = shuffle self. on_epoch_end ( )",False,not is_real_iterable(targets),"not isinstance(targets, np.ndarray)",0.6524567604064941 3681,"def _level_up_logging ( self ) : for handler in self. log. handlers : if issubclass ( handler. __class__, logging. FileHandler ) : if : handler. setLevel ( logging. DEBUG ) self. log. debug ( ""Leveled up log file verbosity"" )",False,handler.level != logging.DEBUG,"hasattr(handler, 'setLevel')",0.6558986902236938 3682,"def cmd_execution_alter_shell ( separator, cmd, OUTPUT_TEXTFILE ) : if settings. TARGET_OS == ""win"" : if : payload = separator + cmd + "" "" else : python_payload = ( settings. WIN_PYTHON_DIR + "" -c \""import os; os.system('"" + cmd + "">"" + OUTPUT_TEXTFILE + ""')\"""" ) payload = ( separator + 'for /f """"t""""o""""k""""e""""n""""s""=*"" %i in (\'cmd /c' + python_payload + ""') do @set /p =%i< nul"" ) else : payload = ( separator + ""$(python -c \""f=open('"" + settings. WEB_ROOT + OUTPUT_TEXTFILE + ""','w')\nf.write('$(echo $("" + cmd + ""))')\nf.close()\n\"")"" ) if ( settings. USER_AGENT_INJECTION == True or settings. REFERER_INJECTION == True or settings. HOST_INJE",False,settings.REVERSE_TCP,"hasattr(os, 'system')",0.6607925891876221 3683,"def visit_list ( self, items : Sequence [ base. AST ], *, separator : str = "","", terminator : Optional [ str ] = None, newlines : bool = True, ** kwargs : Any ) -> None : separator = terminator if terminator is not None else separator size = len ( items ) for i, item in enumerate ( items ) : self. visit ( item, ** kwargs ) if i < size - 1 or terminator is not None : self. write ( separator ) if : self. new_lines = 1 else : self. write ( "" "" )",True,newlines,newlines,0.6980547308921814 3684,"def get_version ( version_file = STATIC_VERSION_FILE ) : version_info = get_static_version_info ( version_file ) version = version_info [ ""version"" ] if version == ""__use_git__"" : version = get_version_from_git ( ) if : version = get_version_from_git_archive ( version_info ) if : version = Version ( ""unknown"", None, None ) return pep440_format ( version ) else : return version",False,not version,version == '__None__',0.6803073883056641 3685,"def generateAggregation ( self, agg, where_clausel ) : if not agg : return self. table, where_clausel if ( agg. aggfunc == SigmaAggregationParser. AGGFUNC_COUNT or agg. aggfunc == SigmaAggregationParser. AGGFUNC_MAX or agg. aggfunc == SigmaAggregationParser. AGGFUNC_MIN or agg. aggfunc == SigmaAggregationParser. AGGFUNC_SUM or agg. aggfunc == SigmaAggregationParser. AGGFUNC_AVG ) : if agg. groupfield : group_by = "" GROUP BY {0}"". format ( self. fieldNameMapping ( agg. groupfield, None ) ) else : group_by = """" if agg. aggfield : select = ""{}({}) AS agg"". format ( agg. aggfunc_notrans, self. fieldNameMapping ( agg. aggfield, None ) ) else : if : select = ""{}(*) AS agg"". format ( agg. aggfunc_notrans ) else : raise SigmaParseError ( ""For {} aggregation a fieldname needs to be specified"". format ( agg. aggfunc_notrans ) ) temp_table = ""(SELECT {} FROM {} WHERE {}{})"".",False,agg.aggfunc == SigmaAggregationParser.AGGFUNC_COUNT,agg.aggfunc_notrans,0.6619846820831299 3686,def _use_full_params ( self ) -> None : for p in self. params : if not p. _is_sharded : if : assert p. _fp16_shard. storage ( ). size ( )!= 0 p. data = p. _fp16_shard else : assert p. _full_param_padded. storage ( ). size ( )!= 0 p. data = p. _full_param_padded [ : p. _orig_size. numel ( ) ]. view ( p. _orig_size ),False,self.mixed_precision,p._fp16_shard is not None,0.6638258099555969 3687,"def test_read1 ( self ) : self. test_write ( ) blocks = [ ] nread = 0 with gzip. GzipFile ( self. filename, ""r"" ) as f : while True : d = f. read1 ( ) if : break blocks. append ( d ) nread += len ( d ) self. assertEqual ( f. tell ( ), nread ) self. assertEqual ( b"""". join ( blocks ), data1 * 50 )",True,not d,not d,0.6705198287963867 3688,"def _table_schema ( self, table ) : rows = self. db. execute_sql ( ""PRAGMA table_info('%s')"" % table ). fetchall ( ) result = { } for _, name, data_type, not_null, _, primary_key in rows : parts = [ data_type ] if primary_key : parts. append ( ""PRIMARY KEY"" ) if : parts. append ( ""NOT NULL"" ) result [ name ] = "" "". join ( parts ) return result",True,not_null,not_null,0.662692666053772 3689,"def pytest_collection_modifyitems ( config, items ) : if platform. python_implementation ( ) == ""PyPy"" : skip_marker = pytest. mark. skip ( reason = ""FeatureHasher is not compatible with PyPy"" ) for item in items : if item. name. endswith ( ( ""_hash.FeatureHasher"", ""text.HashingVectorizer"" ) ) : item. add_marker ( skip_marker ) if config. getoption ( ""--skip-network"" ) : skip_network = pytest. mark. skip ( reason = ""test requires internet connectivity"" ) for item in items : if : item. add_marker ( skip_network ) skip_doctests = False try : import numpy as np if LooseVersion ( np. __version__ ) < LooseVersion ( ""1.14"" ) : reason = ""doctests are only run for numpy >= 1.14"" skip_doctests = True elif _IS_32BIT : reason = ""doctest are only run when the default numpy int is "" ""64 bits."" skip_doctests = True elif sys. platform. startswith ( ""win32"" ) : reason = ( ""doctests are not run for Windows because numpy arrays "" ""repr is inconsistent across platforms."" ) skip_doctests = True except ImportError : if self. limit_query_param : try : limit = int ( request. query_params [ self. limit_query_param ] ) if : raise ValueError ( ) if settings. MAX_PAGE_SIZE : if limit == 0 : return settings. MAX_PAGE_SIZE else : return min ( limit, settings. MAX_PAGE_SIZE ) return limit except ( KeyError, ValueError ) : pass return self. default_limit",True,limit < 0,limit < 0,0.6877421140670776 3691,"def repoquery ( self ) : """"""perform a repoquery"""""" if self. ignore_excluders : self. tmp_file = tempfile. NamedTemporaryFile ( ) with open ( ""/etc/yum.conf"", ""r"" ) as file_handler : yum_conf_lines = file_handler. readlines ( ) yum_conf_lines = [ ""exclude="" if l. startswith ( ""exclude="" ) else l for l in yum_conf_lines ] with open ( self. tmp_file. name, ""w"" ) as file_handler : file_handler. writelines ( yum_conf_lines ) file_handler. flush ( ) repoquery_cmd = self. build_cmd ( ) rval = self. _repoquery_cmd ( repoquery_cmd, True, ""raw"" ) if rval [ ""results"" ] : processed_versions = Repoquery. process_versions ( rval [ ""results"" ]. strip ( ) ) formatted_versions = self. format_versions ( processed_versions ) rval [ ""package_found"" ] = True rval [ ""versions"" ] = formatted_versions rval [ ""package_name"" ] = self. name if : rval [ ""raw_versions"" ] = processed_versions else : del rval [ ""results"" ] else : rval [ ""package_found"" ] = False if self. ignore_excluders : self. tmp_file. close ( ) return rval",False,self.verbose,rval['raw_versions'],0.6639021039009094 3692,"def haslayer ( self, cls ) : """"""true if self has a layer that is an instance of cls. Superseded by ""cls in self"" syntax."""""" if self. __class__ == cls or self. __class__. __name__ == cls : return 1 for f in self. packetfields : fvalue_gen = self. getfieldval ( f. name ) if fvalue_gen is None : continue if : fvalue_gen = SetGen ( fvalue_gen, _iterpacket = 0 ) for fvalue in fvalue_gen : if isinstance ( fvalue, Packet ) : ret = fvalue. haslayer ( cls ) if ret : return ret return self. payload. haslayer ( cls )",False,not f.islist,"isinstance(fvalue_gen, SetGen)",0.6595672369003296 3693,"def reset_parameters ( self ) : for m in self. modules ( ) : if : continue elif isinstance ( m, nn. LayerNorm ) : nn. init. constant_ ( m. weight, 0.1 ) nn. init. constant_ ( m. bias, 0 ) else : for p in m. parameters ( ) : nn. init. normal_ ( p, 0, 0.1 )",False,"isinstance(m, nn.Embedding)","isinstance(m, nn.Linear)",0.6592777371406555 3694,"def test_large_headers ( self ) : with ExpectLog ( gen_log, ""Unsatisfiable read"", required = False ) : try : self. fetch ( ""/"", headers = { ""X-Filler"" : ""a"" * 1000 }, raise_error = True ) self. fail ( ""did not raise expected exception"" ) except HTTPError as e : if : self. assertIn ( e. response. code, ( 431, 599 ) )",False,e.response is not None,e.response.code == 200,0.6538652181625366 3695,"def time_left ( self ) : """"""Return how many seconds are left until the timeout expires"""""" if self. is_non_blocking : return 0 elif self. is_infinite : return None else : delta = self. target_time - self. TIME ( ) if : self. target_time = self. TIME ( ) + self. duration return self. duration else : return max ( 0, delta )",False,delta > self.duration,delta < 0,0.6713412404060364 3696,"def __init__ ( self, data, n_bins ) : bin_width = span / n_bins bins = [ 0 ] * n_bins for x in data : b = int ( mpfloor ( ( x - minimum ) / bin_width ) ) if b < 0 : b = 0 elif : b = n_bins - 1 bins [ b ] += 1 self. bins = bins self. bin_width = bin_width",False,b >= n_bins,b > n_bins - 1,0.6899993419647217 3697,"def _parse_subtitles ( self, video_data, url_key ) : subtitles = { } for translation in video_data. get ( ""translations"", [ ] ) : vtt_path = translation. get ( url_key ) if : continue lang = translation. get ( ""language_w3c"" ) or ISO639Utils. long2short ( translation [ ""language_medium"" ] ) subtitles. setdefault ( lang, [ ] ). append ( { ""ext"" : ""vtt"", ""url"" : vtt_path, } ) return subtitles",True,not vtt_path,not vtt_path,0.659387469291687 3698,"def parse_flow_sequence_entry ( self, first = False ) : if not self. check_token ( tokens. FlowSequenceEndToken ) : if not first : if : self. get_token ( ) if self. check_token ( tokens. FlowSequenceEndToken ) : token = self. peek_token ( ) self. echoerr ( ""While parsing a flow sequence"", self. marks [ - 1 ], ( ""expected sequence value, but got %r"" % token. id ), token. start_mark, ) else : token = self. peek_token ( ) raise ParserError ( ""while parsing a flow sequence"", self. marks [ - 1 ], ( ""expected ',' or ']', but got %r"" % token. id ), token. start_mark, ) if not self. check_token ( tokens. FlowSequenceEndToken ) : self. states. append ( self. parse_flow_sequence_entry ) return self. parse_node ( ) token = self. get_token (",False,self.check_token(tokens.FlowEntryToken),self.check_token(tokens.FlowSequenceEndToken),0.6497704982757568 3699,"def purge_snapshots ( self ) : for table in tuple ( self. snapshots ) : for _ in range ( MAX_ATTEMPTS ) : try : if table. startswith ( ""ifinfo_"" ) : try : self. execute ( ""DROP VIEW %s"" % table [ 7 : ] ) except Exception : self. log. warning ( ""purge_snapshots: %s"" % traceback. format_exc ( ) ) if self. mode == ""sqlite3"" : self. execute ( ""DROP TABLE %s"" % table ) elif : self. execute ( ""DROP TABLE %s CASCADE"" % table ) del self. snapshots [ table ] break except sqlite3. OperationalError : time. sleep ( random. random ( ) ) else : raise Exception ( ""DB snapshot error"" )",False,self.mode == 'psycopg2',self.mode == 'sqlite4.sql_snapshot',0.6523697972297668 3700,def _get_inputs ( out ) : inputs = [ ] queue = [ out ] hash_set = set ( ) while queue : t = queue. pop ( 0 ) if : inputs. append ( t ) else : input_tensors = [ t for t in t. op. input_tensors if t not in hash_set ] queue. extend ( input_tensors ) hash_set. update ( input_tensors ) return inputs,False,"isinstance(t.op, tensor.PlaceholderOp)",t.op is None,0.6528328657150269 3701,"def parse_edges ( self, pcb ) : edges = [ ] drawings = list ( pcb. GetDrawings ( ) ) bbox = None for m in pcb. GetModules ( ) : for g in m. GraphicalItems ( ) : drawings. append ( g ) for d in drawings : if : parsed_drawing = self. parse_drawing ( d ) if parsed_drawing : edges. append ( parsed_drawing ) if bbox is None : bbox = d. GetBoundingBox ( ) else : bbox. Merge ( d. GetBoundingBox ( ) ) if bbox : bbox. Normalize ( ) return edges, bbox",False,d.GetLayer() == pcbnew.Edge_Cuts,d,0.6519386768341064 3702,"def iter_chars_to_words ( self, chars ) : current_word = [ ] for char in chars : if not self. keep_blank_chars and char [ ""text"" ]. isspace ( ) : if : yield current_word current_word = [ ] elif current_word and self. char_begins_new_word ( current_word, char ) : yield current_word current_word = [ char ] else : current_word. append ( char ) if : yield current_word",False,current_word,not current_word,0.6678227186203003 3703,"def invoice_details ( request, invoice_id ) : invoice = get_object_or_404 ( Invoice. objects. select_related ( ""from_address"", ""to_address"" ), pk = invoice_id ) if invoice. company!= request. company : raise PermissionDenied user_assigned_account = False user_assigned_accounts = set ( request. user. account_assigned_users. values_list ( ""id"", flat = True ) ) invoice_accounts = set ( invoice. accounts. values_list ( ""id"", flat = True ) ) if user_assigned_accounts. intersection ( invoice_accounts ) : user_assigned_account = True if not ( ( request. user. role == ""ADMIN"" ) or ( request. user. is_superuser ) or ( invoice. created_by == request. user ) or ( request. user in invoice. assigned_to. all ( ) ) or user_assigned_account ) : raise PermissionDenied if request. method == ""GET"" : context = { } context [ ""invoice"" ] = invoice context [ ""attachments"" ] = invoice. invoice_attachment. all ( ) context [ ""comments"" ] = invoice. invoice_comments. all ( ) context [ ""invoice_history"" ] = invoice. invoice_history. all ( ) if : context [ ""users_mention"" ] = list ( User. objects. filter ( is_active = True, company = request. company ). values ( ""username"" ) ) super ( ). dropEvent ( event ) if self. count ( ) > 0 : item = self. itemAt ( event. pos ( ) ) if : index = self. indexFromItem ( item ). row ( ) self. setCurrentRow ( index ) else : self. setCurrentRow ( self. count ( ) - 1 )",False,item is not None,item.isValid(),0.6612706184387207 3705,"def _server_paste_to_uwsgi ( app_desc, server_config, applied_filters ) : uwsgi_dict = OrderedDict ( ) port = server_config. get ( ""port"", app_desc. default_port ) host = server_config. get ( ""host"", ""127.0.0.1"" ) if server_config. get ( ""use"", ""egg:Paste#http"" )!= ""egg:Paste#http"" : raise Exception ( ""Unhandled paste server 'use' value [%s], file must be manually migrate."" ) uwsgi_dict [ ""http"" ] = ""{}:{}"". format ( host, port ) uwsgi_dict [ ""threads"" ] = int ( server_config. get ( ""threadpool_workers"", 8 ) ) uwsgi_dict [ ""http-raw-body"" ] = True uwsgi_dict [ ""offload-threads"" ] = 8 prefix = None for applied_filter in applied_filters : if isinstance ( applied_filter, PrefixFilter ) : prefix = applied_filter. prefix break elif : uwsgi_dict [ ""http-auto-gzip"" ] = True if prefix : uwsgi_dict [ ""mount"" ] = ""{}={}"". format ( prefix, app_desc. uwsgi_module ) uwsgi_dict [ ""manage-script-name"" ] = True else : uwsgi_dict [ ""module"" ] = app_desc. uwsgi_module return uwsgi_dict",False,"isinstance(applied_filter, GzipFilter)","isinstance(applied_filter, gzip.BaseGZFilter)",0.6476679444313049 3706,"def _get_addons ( request, addons, addon_id, action ) : """"""Create a list of ``MenuItem``s for the activity feed."""""" items = [ ] a = MenuItem ( ) a. selected = not addon_id ( a. text, a. url ) = ( gettext ( ""All My Add-ons"" ), reverse ( ""devhub.feed_all"" ) ) if : a. url += ""?action="" + action items. append ( a ) for addon in addons : item = MenuItem ( ) try : item. selected = addon_id and addon. id == int ( addon_id ) except ValueError : pass url = reverse ( ""devhub.feed"", args = [ addon. slug ] ) if : url += ""?action="" + action item. text, item. url = addon. name, url items. append ( item ) return items",False,action,addons,0.7036648988723755 3707,"def modify_urls ( self ) : save = True if self. urlwatch_config. delete is not None : job = self. _find_job ( self. urlwatch_config. delete ) if : self. urlwatcher. jobs. remove ( job ) print ( ""Removed %r"" % ( job, ) ) else : print ( ""Not found: %r"" % ( self. urlwatch_config. delete, ) ) save = False if self. urlwatch_config. add is not None : items = [ item. split ( ""="", 1 ) for item in self. urlwatch_config. add. split ( "","" ) ] filters = [ v for k, v in items if k == ""filter"" ] items = [ ( k, v ) for k, v in items if k!= ""filter"" ] d = { k : v for k, v in items } if filters : d [ ""filter"" ] = "","". join ( filters ) job = JobBase. unserialize ( d ) print ( ""Adding %r"" % ( job, ) ) self. urlwatcher. jobs. append ( job ) if save : self. urlwatcher. urls_storage. save ( self. urlwatcher. jobs ) return 0",False,job is not None,job,0.6591392159461975 3708,"def configure_formatter ( self, config ) : """"""Configure a formatter from a dictionary."""""" if ""()"" in config : factory = config [ ""()"" ] try : result = self. configure_custom ( config ) except TypeError as te : if : raise config [ ""fmt"" ] = config. pop ( ""format"" ) config [ ""()"" ] = factory result = self. configure_custom ( config ) else : fmt = config. get ( ""format"", None ) dfmt = config. get ( ""datefmt"", None ) style = config. get ( ""style"", ""%"" ) cname = config. get ( ""class"", None ) if not cname : c = logging. Formatter else : c = _resolve ( cname ) result = c ( fmt, dfmt, style ) return result",False,"""'format'"" not in str(te)",'format' not in config,0.6636060476303101 3709,"def _get_notify ( self, action_node ) : if action_node. name not in self. _skip_notify_tasks : if : task_notify = NotificationsHelper. to_model ( action_node. notify ) return task_notify elif self. _chain_notify : return self. _chain_notify return None",False,action_node.notify,self.notify,0.6621562242507935 3710,"def _encrypt ( self ) : """"""Use your key thing to encrypt things."""""" from M2Crypto import BIO, SMIME, X509 plaintext = ""cert_id=%s\n"" % self. cert_id for name, field in self. fields. items ( ) : value = None if name in self. initial : value = self. initial [ name ] elif : value = field. initial if value is not None : plaintext += u""%s=%s\n"" % ( name, value ) plaintext = plaintext. encode ( ""utf-8"" ) s = SMIME. SMIME ( ) s. load_key_bio ( BIO. openfile ( self. private_cert ), BIO. openfile ( self. public_cert ) ) p7 = s. sign ( BIO. MemoryBuffer ( plaintext ), flags = SMIME. PKCS7_BINARY ) x509 = X509. load_cert_bio ( BIO. openfile ( self. paypal_cert ) ) sk = X509. X509_Stack ( ) sk. push ( x509 ) s. set_x509_stack ( sk ) s. set_cipher ( SMIME. Cipher ( ""des_ede3_cbc"" ) ) tmp = BIO. MemoryBuffer ( ) p7. write_der ( tmp ) p7 = s. encrypt ( tmp, flags = SMIME. PKCS7_BINARY ) out = BIO. MemoryBuffer ( ) p7. write ( out ) return out. read ( ). decode ( )",False,field.initial is not None,"hasattr(field, 'initial')",0.6518455743789673 3711,"def get_request_headers ( ) -> Dict : url = urlparse ( uri ) candidates = [ ""%s://%s"" % ( url. scheme, url. netloc ), ""%s://%s/"" % ( url. scheme, url. netloc ), uri, ""*"", ] for u in candidates : if : headers = dict ( DEFAULT_REQUEST_HEADERS ) headers. update ( self. config. linkcheck_request_headers [ u ] ) return headers return { }",True,u in self.config.linkcheck_request_headers,u in self.config.linkcheck_request_headers,0.6553332805633545 3712,"def handle_data ( self, data ) : ds = data. split ( "" "" ) lds = len ( ds ) if ds [ lds - 1 ]. isdigit ( ) : if lds == 3 or lds == 4 : day = str ( ds [ lds - 3 ] ) month = str ( ds [ lds - 2 ] ) year = str ( ds [ lds - 1 ] ) if : day = ""0"" + day month = month [ 0 : 3 ] monthNum = str ( list ( calendar. month_abbr ). index ( month ) ) if len ( monthNum ) == 1 : monthNum = ""0"" + monthNum newDate = year + monthNum + day self. dates. append ( str ( newDate ) )",False,len(day) == 1,len(month) == 1,0.658234715461731 3713,"def filter_queryset ( self, request, queryset, view ) : Membership = apps. get_model ( ""projects"", ""Membership"" ) operations = { ""filter"" : self. _prepare_filter_query, ""exclude"" : self. _prepare_exclude_query, } for mode, qs_method in operations. items ( ) : query = self. _get_queryparams ( request. QUERY_PARAMS, mode = mode ) if query : memberships = ( Membership. objects. filter ( query ) . exclude ( user__isnull = True ) . values_list ( ""user_id"", flat = True ) ) if : user_story_model = apps. get_model ( ""userstories"", ""UserStory"" ) queryset = queryset. filter ( qs_method ( Q ( self. get_assigned_users_filter ( user_story_model, memberships ) ) ) ) return FilterBackend. filter_queryset ( self, request, queryset, view )",False,memberships,not self.use_user_story,0.6871935129165649 3714,"def update_forum_nums_topic_post ( modeladmin, request, queryset ) : for forum in queryset : forum. num_topics = forum. count_nums_topic ( ) forum. num_posts = forum. count_nums_post ( ) if : forum. last_post = forum. topic_set. order_by ( ""-last_reply_on"" ) [ 0 ]. last_post else : forum. last_post = """" forum. save ( )",False,forum.num_topics,forum.num_topics > 0,0.674964189529419 3715,"def handle_starttag ( self, tag, attrs ) : if tag == ""pre"" : self. in_pre = True if self. is_block_tag ( tag ) : self. output = self. output. rstrip ( ) self. output += ""<"" + tag if attrs : attrs. sort ( ) for ( k, v ) in attrs : self. output += "" "" + k if v in [ ""href"", ""src"" ] : self. output += ( ""="" + '""' + urllib. quote ( urllib. unquote ( v ), safe = ""/"" ) + '""' ) elif : self. output += ""="" + '""' + cgi. escape ( v, quote = True ) + '""' self. output += "">"" self. last_tag = tag self. last = ""starttag""",False,v != None,"v in ['value', 'type']",0.6725598573684692 3716,"def _prepare ( self ) : from_text = str ( self. from_entry. get_text ( ) ) self. set_total ( self. db. get_number_of_media ( ) ) with self. db. get_media_cursor ( ) as cursor : for handle, data in cursor : obj = Media ( ) obj. unserialize ( data ) if : self. handle_list. append ( handle ) self. path_list. append ( obj. path ) self. update ( ) self. reset ( ) self. prepared = True",False,obj.get_path().find(from_text) != -1,obj.from_text == from_text,0.6493019461631775 3717,"def f ( view, s ) : if mode == modes. INTERNAL_NORMAL : x_limit = max ( view. line ( s. b ). a, s. b - count ) return sublime. Region ( s. a, x_limit ) elif mode in ( modes. VISUAL, modes. VISUAL_BLOCK ) : if : if mode == modes. VISUAL_BLOCK and self. view. rowcol ( s. b - 1 ) [ 1 ] == baseline : return s x_limit = max ( view. line ( s. b - 1 ). a + 1, s. b - count ) if view. line ( s. a ) == view. line ( s. b - 1 ) and count >= s. size ( ) : x_limit = max ( view. line ( s. b - 1 ). a, s. b - count - 1 ) return sublime. Region ( s. a + 1, x_limit ) return sublime. Region ( s. a, x_limit ) if s. a > s. b : x_limit = max ( view. line ( s. b ). a, s. b - count ) return sublime. Region ( s. a, x_limit ) elif mode == modes. NORMAL : x_limit = max ( view. line ( s. b ). a, s. b - count ) return sublime. Region ( x_limit, x_limit ) return s",False,s.a < s.b,self.baseline,0.6604595184326172 3718,"def populate_obj ( self, obj, name ) : field = getattr ( obj, name, None ) if field is not None : if : field. delete ( ) return if isinstance ( self. data, FileStorage ) and not is_empty ( self. data. stream ) : if not field. grid_id : func = field. put else : func = field. replace func ( self. data. stream, filename = self. data. filename, content_type = self. data. content_type, )",False,self._should_delete,field.grid_id,0.6581511497497559 3719,"def generator ( self, data ) : for sock in data : if : offset = sock. obj_offset else : offset = sock. obj_vm. vtop ( sock. obj_offset ) yield ( 0, [ Address ( offset ), int ( sock. Pid ), int ( sock. LocalPort ), int ( sock. Protocol ), str ( protos. protos. get ( sock. Protocol. v ( ), ""-"" ) ), str ( sock. LocalIpAddress ), str ( sock. CreateTime ), ], )",False,not self._config.PHYSICAL_OFFSET,"hasattr(sock, 'obj_vm')",0.6564657688140869 3720,"def _skip_trivial ( constraint_data ) : if skip_trivial_constraints : if isinstance ( constraint_data, LinearCanonicalRepn ) : if constraint_data. variables is None : return True else : if : return True return False",False,constraint_data.body.polynomial_degree() == 0,constraint_data.constraints is None,0.6509370803833008 3721,"def get_filestream_file_items ( self ) : data = { } fs_file_updates = self. get_filestream_file_updates ( ) for k, v in six. iteritems ( fs_file_updates ) : l = [ ] for d in v : offset = d. get ( ""offset"" ) content = d. get ( ""content"" ) assert offset is not None assert content is not None assert offset == 0 or offset == len ( l ), ( k, v, l, d ) if : l = [ ] l. extend ( map ( json. loads, content ) ) data [ k ] = l return data",False,not offset,content is not None,0.6812525391578674 3722,"def handle_membership ( self, cmd ) : if self. _listen_interfaces is None : mreq = struct. pack ( str ( ""4sI"" ), socket. inet_aton ( self. _multicast_address [ 0 ] ), socket. INADDR_ANY ) self. socket. setsockopt ( socket. IPPROTO_IP, cmd, mreq ) else : for interface in self. _listen_interfaces : try : if_addr = socket. inet_aton ( interface ) except socket. error : : mreq = socket. inet_aton ( self. _multicast_address [ 0 ] ) + if_addr self. socket. setsockopt ( socket. IPPROTO_IP, cmd, mreq )",False,if_addr = GetInterfaceAddress(interface),self._multicast_address is not None,0.6487302780151367 3723,"def suckfont ( data ) : import re m = re. search ( r""/FontName\s+/([^ \t\n\r]+)\s+def"", data ) if m : fontName = m. group ( 1 ) else : fontName = None interpreter = PSInterpreter ( ) interpreter. interpret ( ""/Helvetica 4 dict dup /Encoding StandardEncoding put definefont pop"" ) interpreter. interpret ( data ) fontdir = interpreter. dictstack [ 0 ] [ ""FontDirectory"" ]. value if fontdir. has_key ( fontName ) : rawfont = fontdir [ fontName ] else : fontNames = fontdir. keys ( ) if : fontNames. remove ( ""Helvetica"" ) fontNames. sort ( ) rawfont = fontdir [ fontNames [ 0 ] ] interpreter. close ( ) return unpack_item ( rawfont )",False,len(fontNames) > 1,'Helvetica' in fontNames,0.658035159111023 3724,"def parse_flow_sequence_entry ( self, first = False ) : if not self. check_token ( FlowSequenceEndToken ) : if not first : if self. check_token ( FlowEntryToken ) : self. get_token ( ) else : token = self. peek_token ( ) raise ParserError ( ""while parsing a flow sequence"", self. marks [ - 1 ], ""expected ',' or ']', but got %r"" % token. id, token. start_mark, ) if self. check_token ( KeyToken ) : token = self. peek_token ( ) event = MappingStartEvent ( None, None, True, token. start_mark, token. end_mark, flow_style = True ) self. state = self. parse_flow_sequence_entry_mapping_key return event elif : self. states. append ( self. parse_flow_sequence_entry ) return self. parse_flow_node ( ) token = self. get_token ( ) event = SequenceEndEvent ( token. start_mark, token. end_mark ) self. state = self. states. pop ( ) self. marks. pop ( ) return event",False,not self.check_token(FlowSequenceEndToken),self.check_token(FlowSequenceEndToken),0.6517918109893799 3725,"def interpret ( self, expr ) : self. _cg. reset_state ( ) self. _reset_reused_expr_cache ( ) args = [ ( True, self. _feature_array_name ) ] if : args += [ ( True, ""output"" ) ] with self. _cg. function_definition ( name = self. function_name, args = args, is_scalar_output = expr. output_size == 1 ) : last_result = self. _do_interpret ( expr ) if : self. _cg. add_assign_array_statement ( last_result, ""output"", expr. output_size ) else : self. _cg. add_return_statement ( last_result ) if self. with_linear_algebra : filename = os. path. join ( os. path. dirname ( __file__ ), ""linear_algebra.c"" ) self. _cg. prepend_code_lines ( utils. get_file_content ( filename ) ) if self. with_vectors : self. _cg. add_dependency ( """" ) if self. with_math_module : self. _cg. add_dependency ( """" ) return self. _cg. finalize_and_get_generated_code ( )",False,expr.output_size > 1,is_scalar_output,0.6547500491142273 3726,"def _text ( bitlist ) : out = """" for typ, text in bitlist : if not typ : out += text elif typ == ""em"" : out += ""\\fI%s\\fR"" % text elif : out += ""\\fB%s\\fR"" % text else : raise ValueError ( ""unexpected tag %r inside text"" % ( typ, ) ) out = out. strip ( ) out = re. sub ( re. compile ( r""^\s+"", re. M ), """", out ) return out",False,"typ in ['strong', 'code']",typ == 'b',0.6679772138595581 3727,"def _configTabs ( self ) : for key in list ( self. widgetStore. keys ( ) ) : if self. widgetStore [ key ] [ 0 ]. disabled : if not useTtk : self. widgetStore [ key ] [ 0 ]. config ( bg = self. disabledBg, fg = self. disabledFg, cursor = """" ) else : self. widgetStore [ key ] [ 0 ]. config ( style = ""DisabledTab.TLabel"", cursor = """" ) else : if : if not useTtk : self. widgetStore [ key ] [ 0 ]. config ( bg = self. widgetStore [ key ] [ 1 ]. cget ( ""bg"" ), fg = self. activeFg, cursor = """", ) else : self. widgetStore [ key ] [ 0 ]. config ( style = ""SelectedTab.TLabel"", cursor = """" ) self. widgetStore [ key ] [ 1 ]. lift ( ) else : if not useTtk : ",False,key == self.selectedTab,self.widgetStore[key][0].state & 128,0.6616818904876709 3728,"def testMismatchedDataWindow ( self ) : main = self. __constantLayer ( """", imath. Color4f ( 1 ), size = imath. V2i ( 64 ) ) diffuse = self. __constantLayer ( ""diffuse"", imath. Color4f ( 0.5 ), size = imath. V2i ( 60 ) ) copy = GafferImage. CopyChannels ( ) copy [ ""in"" ] [ 0 ]. setInput ( main [ ""out"" ] ) copy [ ""in"" ] [ 1 ]. setInput ( diffuse [ ""out"" ] ) copy [ ""channels"" ]. setValue ( ""*"" ) self. assertEqual ( copy [ ""out"" ] [ ""format"" ]. getValue ( ), main [ ""out"" ] [ ""format"" ]. getValue ( ) ) self. assertEqual ( copy [ ""out"" ] [ ""dataWindow"" ]. getValue ( ), main [ ""out"" ] [ ""dataWindow"" ]. getValue ( ) ) for channel in ( ""R"", ""G"", ""B"", ""A"" ) : diffuseDW = diffuse [ ""out"" ] [ ""dataWindow"" ]. getValue ( ) copyDW = copy [ ""out"" ] [ ""dataWindow"" ]. getValue ( ) sampler = GafferImage. Sampler ( copy [ ""out"" ], ""diffuse."" + channel, copyDW ) for x in range ( copyDW. min ( ). x, copyDW. max ( ). x ) : for y in range ( copyDW. min ( ). y, copyDW. max ( ). y ) : if : self. assertEqual ( sampler. sample ( x, y ), 0.5 ) ",False,"GafferImage.BufferAlgo.contains(diffuseDW, imath.V2i(x, y))",sampler.isDefined(),0.6530857086181641 3729,"def upgrade_state_dict_named ( self, state_dict ) : items_to_add = { } keys_to_remove = [ ] for k in state_dict. keys ( ) : if : dim = int ( state_dict [ k ]. shape [ 0 ] / 3 ) items_to_add [ k. replace ( ""in_proj_weight"", ""q_proj.weight"" ) ] = state_dict [ k ] [ : dim ] items_to_add [ k. replace ( ""in_proj_weight"", ""k_proj.weight"" ) ] = state_dict [ k ] [ dim : 2 * dim ] items_to_add [ k. replace ( ""in_proj_weight"", ""v_proj.weight"" ) ] = state_dict [ k ] [ 2 * dim : ] keys_to_remove. append ( k ) if k. endswith ( ""in_proj_bias"" ) : dim = int ( state_dict [ k ]. shape [ 0 ] / 3 ) items_to_add [ k. replace ( ""in_proj_bias"", ""q_proj.bias"" ) ] = state_dict [ k ] [ : dim ] items_to_add [ k. replace ( ""in_proj_bias"", ""k_proj.bias"" ) ] = state_dict [ k ] [ dim : 2 * dim ] items_to_add [ k.",False,k.endswith('in_proj_weight'),k.endswith('.q_proj_bias'),0.651056170463562 3730,"def pytest_deselected ( items ) : if sb_config. dashboard : sb_config. item_count -= len ( items ) for item in items : test_id, display_id = _get_test_ids_ ( item ) if : sb_config. _results. pop ( test_id )",False,test_id in sb_config._results.keys(),test_id in sb_config._results,0.6494655609130859 3731,"def find_go_files_mtime ( app_files ) : files, mtime = [ ], 0 for f, mt in app_files. items ( ) : if not f. endswith ( "".go"" ) : continue if : continue files. append ( f ) mtime = max ( mtime, mt ) return files, mtime",False,APP_CONFIG.nobuild_files.match(f),"max(mtime, mt) < mt",0.648460865020752 3732,"def check ( self ) : user_agent = ""Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1)"" headers = { ""User-Agent"" : user_agent, ""Accept"" : ""text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"", ""Accept-language"" : ""sk,cs;q=0.8,en-US;q=0.5,en;q,0.3"", ""Connection"" : ""keep-alive"", ""Accept-Encoding"" : ""gzip, deflate"", ""Cache-Control"" : ""no-cache"", ""Cookie"" : ""C107373883=/omg1337hax"", } response = self. http_request ( method = ""GET"", path = ""/test"", headers = headers ) if response is None : return False if response. status_code!= 404 : return False else : if ""server"" in response. headers : server = response. headers. get ( ""server"" ) if re. search ( ""RomPager"", server ) is not None : if : return True else : return None return False",False,"re.search('omg1337hax', response.text) is not None","re.search((""Rom pager is using: > or re.search((""Rom pager is using: > >)",0.6472072601318359 3733,"def computeLeadingWhitespaceWidth ( s, tab_width ) : w = 0 for ch in s : if : w += 1 elif ch == ""\t"" : w += abs ( tab_width ) - ( w % abs ( tab_width ) ) else : break return w",False,ch == '',ch == '\n',0.6630555391311646 3734,"def fuse_module ( m ) : last_conv = None last_conv_name = None for name, child in m. named_children ( ) : if : if last_conv is None : continue fused_conv = fuse_conv_bn ( last_conv, child ) m. _modules [ last_conv_name ] = fused_conv m. _modules [ name ] = nn. Identity ( ) last_conv = None elif isinstance ( child, nn. Conv2d ) : last_conv = child last_conv_name = name else : fuse_module ( child ) return m",False,"isinstance(child, (nn.BatchNorm2d, nn.SyncBatchNorm))",name == last_conv_name,0.6539161205291748 3735,"def htmlify ( self, htmlbuffer, htmlcontext ) : stripped = self. _strip ( ) if not stripped : return lastIndent = htmlcontext. currentIndentWidth currentIndent = self [ 0 ]. indentWidths [ - 1 ] dontCompareIndents = False if self [ 0 ]. iscode and not htmlcontext. inPRE : htmlbuffer. write ( ""
    \n"" )   htmlbuffer. write ( """". join ( self ) )   htmlcontext. inPRE = True   return  if not self [ 0 ]. iscode and htmlcontext. inPRE :   htmlbuffer. write ( ""
    \n"" ) htmlcontext. inPRE = False if self [ 0 ]. bulleted and not htmlcontext. inUL : htmlbuffer. write ( ""
      "" ) dontCompareIndents = True htmlcontext. inUL = True elif not self [ 0 ]. bulleted and htmlcontext. inUL : htmlbuffer. write ( ""
    "" ) htmlcontext. inUL = False if not dontCompareIndents : if lastIndent < currentIndent : htmlbuffer. write ( ""
    "" ) htmlcontext. inDL = True if : htmlbuffer. write ( ""
    "" ) htmlcontext. inDL = False htmlcontext. currentIndentWidth = currentIndent if htmlcontext. inUL : htmlbuffer. write ( ""
  • "" ) if htmlcontext. inDL : htmlbuffer. write ( ""
    "" ) htmlbuffer. write ( ""

    "" + stripped + "" currentIndent,htmlcontext.inDL,0.6664372682571411 3736,"def desc ( self ) : bits = [ ] for type, item in self. items : if type == ""path"" : bits. append ( basename ( item ) ) elif type == ""file"" : bits. append ( basename ( item. url ) ) elif : part_type = item. type if part_type == ""project"" : bits. append ( basename ( item. url ) ) elif part_type == ""folder"" : bits. append ( item. getStringAttribute ( ""name"" ) ) elif part_type == ""livefolder"" : bits. append ( basename ( item. liveDirectory ) ) else : log. warn ( ""unexpected container koIPart type: %r"", part_type ) return "", "". join ( bits )",False,type == 'container',type == 'part',0.6596482992172241 3737,"def __init__ ( self, config, scenario, engine ) : super ( HierarchicHTTPRequest, self ). __init__ ( config, scenario, engine, pure_body_file = True ) self. upload_files = self. config. get ( ""upload-files"", [ ] ) if self. method == ""PUT"" and len ( self. upload_files ) > 1 : self. upload_files = self. upload_files [ : 1 ] for file_dict in self. upload_files : param = file_dict. get ( ""param"", None ) if self. method == ""PUT"" : file_dict [ ""param"" ] = """" if : raise TaurusConfigError ( ""Items from upload-files must specify parameter name"" ) path_exc = TaurusConfigError ( ""Items from upload-files must specify path to file"" ) path = str ( file_dict. get ( ""path"", path_exc ) ) if not has_variable_pattern ( path ) : path = self. engine. find_file ( path ) else : msg = ""Path '%s' contains variable and can't be expanded. Don't use relative paths in 'upload-files'!"" self. log. warning ( msg % path ) file_dict [ ""path"" ] = path mime = mimetypes. guess_type ( file_dict [ ""path"" ] ) [ 0 ] or ""application/octet-stream"" file_dict. get ( ""mime-type"", mime, force_set = True ) self. content_encoding = self. config. get (",False,self.method == 'POST' and (not param),param is None,0.6490879654884338 3738,"def after_insert ( self ) : if self. prescription : frappe. db. set_value ( ""Lab Prescription"", self. prescription, ""lab_test_created"", 1 ) if : self. invoiced = True if not self. lab_test_name and self. template : self. load_test_from_template ( ) self. reload ( )",False,"frappe.db.get_value('Lab Prescription', self.prescription, 'invoiced')",self.invoiced,0.6584774255752563 3739,"def has_scheme ( self, inp ) : if ""://"" in inp : return True else : authority = inp. replace ( ""/"", ""#"" ). replace ( ""?"", ""#"" ). split ( ""#"" ) [ 0 ] if "":"" in authority : _, host_or_port = authority. split ( "":"", 1 ) if : return False else : return False return True",False,"re.match('^\\d+$', host_or_port)",host_or_port == '0',0.6455568075180054 3740,"def __init__ ( self ) : """"""Check for errors in the constructor."""""" if self. rejected_users and self. allowed_users : raise AuthorizerError ( ""rejected_users and allowed_users options "" ""are mutually exclusive"" ) users = self. _get_system_users ( ) for user in self. allowed_users or self. rejected_users : if : raise AuthorizerError ( 'invalid username ""anonymous""' ) if user not in users : raise AuthorizerError ( ""unknown user %s"" % user ) if self. anonymous_user is not None : if not self. has_user ( self. anonymous_user ) : raise AuthorizerError ( ""no such user %s"" % self. anonymous_user ) home = self. get_home_dir ( self. anonymous_user ) if not os. path. isdir ( home ) : raise AuthorizerError ( ""no valid home set for user %s"" % self. anonymous_user )",False,user == 'anonymous',self.anonymous_user is None and user not in self.users,0.664953351020813 3741,"def hard_nms ( box_scores, iou_threshold, top_k = - 1, candidate_size = 200 ) : scores = box_scores [ :, - 1 ] boxes = box_scores [ :, : - 1 ] picked = [ ] indexes = np. argsort ( scores ) indexes = indexes [ - candidate_size : ] while len ( indexes ) > 0 : current = indexes [ - 1 ] picked. append ( current ) if : break current_box = boxes [ current, : ] indexes = indexes [ : - 1 ] rest_boxes = boxes [ indexes, : ] iou = iou_of ( rest_boxes, np. expand_dims ( current_box, axis = 0 ) ) indexes = indexes [ iou <= iou_threshold ] return box_scores [ picked, : ]",False,0 < top_k == len(picked) or len(indexes) == 1,len(picked) > top_k,0.6510955095291138 3742,"def get_value ( self, trans, grid, repository_metadata ) : datatype_list = [ ] if repository_metadata : metadata = repository_metadata. metadata if metadata : datatype_dicts = metadata. get ( ""datatypes"", [ ] ) if datatype_dicts : datatype_tups = [ ] for datatype_dict in datatype_dicts : extension = datatype_dict. get ( ""extension"", """" ) dtype = datatype_dict. get ( ""dtype"", """" ) if : datatype_tups. append ( ( extension, dtype ) ) sorted_datatype_tups = sorted ( datatype_tups, key = lambda datatype_tup : datatype_tup [ 0 ] ) for datatype_tup in sorted_datatype_tups : extension, datatype = datatype_tup [ : 2 ] datatype_str = ( '' % trans. security. encode_id ( repository_metadata. id ) ",False,extension and dtype,extension,0.6790333986282349 3743,"def next_idx ( self ) : if not self. is_index_valid : return self. stop ( ) playlist_len = len ( self. list ) if self. mode == Player. MODE_ORDERED : if self. info [ ""idx"" ] < playlist_len : self. info [ ""idx"" ] += 1 elif self. mode == Player. MODE_ORDERED_LOOP : self. info [ ""idx"" ] = ( self. index + 1 ) % playlist_len elif self. mode == Player. MODE_SINGLE_LOOP : self. info [ ""idx"" ] = self. info [ ""idx"" ] else : playing_order_len = len ( self. order ) if self. _need_to_shuffle ( ) : self. shuffle_order ( ) self. _swap_song ( ) playing_order_len = len ( self. order ) self. info [ ""random_index"" ] += 1 if : self. info [ ""random_index"" ] %= playing_order_len if self. info [ ""random_index"" ] >= playing_order_len : self. info [ ""idx"" ] = playlist_len else : self. info [ ""idx"" ] = self. order [ self. info [ ""random_index"" ] ] if self. playing_song_changed_callback is not None : self. playing_song_changed_callback ( )",False,self.mode == Player.MODE_RANDOM_LOOP,self.info['random_index'] %= playing_order_len,0.6558465957641602 3744,"def _setProcessPriority ( process, nice_val, disable_gc ) : org_nice_val = Computer. _process_original_nice_value try : process. nice ( nice_val ) Computer. in_high_priority_mode = nice_val!= org_nice_val if : gc. disable ( ) else : gc. enable ( ) return True except psutil. AccessDenied : print2err ( ""WARNING: Could not set process {} priority "" ""to {}"". format ( process. pid, nice_val ) ) return False",True,disable_gc,disable_gc,0.6794929504394531 3745,"def raise_if_unsafe ( self ) : if self. existing and len ( self. existing. records ) >= self. MIN_EXISTING_RECORDS : existing_record_count = len ( self. existing. records ) update_pcent = self. change_counts [ ""Update"" ] / existing_record_count delete_pcent = self. change_counts [ ""Delete"" ] / existing_record_count if : raise UnsafePlan ( ""Too many updates, {:.2f} is over {:.2f} %"" ""({}/{})"". format ( update_pcent * 100, self. update_pcent_threshold * 100, self. change_counts [ ""Update"" ], existing_record_count, ) ) if delete_pcent > self. delete_pcent_threshold : raise UnsafePlan ( ""Too many deletes, {:.2f} is over {:.2f} %"" ""({}/{})"". format ( delete_pcent * 100, self. delete_pcent_threshold * 100, self. change_counts [ ""Delete"" ], existing_record_count, ) )",True,update_pcent > self.update_pcent_threshold,update_pcent > self.update_pcent_threshold,0.6516165733337402 3746,"def __applyCustomFormat ( self, unused ) : with Gaffer. UndoScope ( self. getPlug ( ). ancestor ( Gaffer. ScriptNode ) ) : with self. getContext ( ) : if : self. getPlug ( ). setValue ( GafferImage. FormatPlug. getDefaultFormat ( self. getContext ( ) ) ) Gaffer. Metadata. registerValue ( self. getPlug ( ), ""formatPlugValueWidget:mode"", ""custom"" )",False,self.getPlug().getValue() == GafferImage.Format(),self.getPlug() is not None,0.6514966487884521 3747,"def flush ( ) : if not ptrbuffer : return cline = """" for line in ptrbuffer : for token, value in line : if token == ""TXT"" : cline += repr ( value ) elif : cline += ""_str(%s)"" % value elif token == ""CMD"" : cline += ""_escape(%s)"" % value cline += "", "" cline = cline [ : - 2 ] + ""\\\n"" cline = cline [ : - 2 ] if cline [ : - 1 ]. endswith ( ""\\\\\\\\\\n"" ) : cline = cline [ : - 7 ] + cline [ - 1 ] cline = ""_printlist(["" + cline + ""])"" del ptrbuffer [ : ] code ( cline )",False,token == 'RAW',token == 'STRING',0.6661654710769653 3748,"def command ( self, slowly = False ) : try : if : mailpile. util. LAST_USER_ACTIVITY = 0 self. _idx ( ). save ( self. session ) GlobalPostingList. Optimize ( self. session, self. _idx ( ), force = ( ""harder"" in self. args ) ) return self. _success ( _ ( ""Optimized search engine"" ) ) except KeyboardInterrupt : return self. _error ( _ ( ""Aborted"" ) )",False,not slowly,slowly,0.6730985641479492 3749,"def __init__ ( self, description, stochastic = None, custom_check = None ) : self. _description = description self. stochastic = stochastic self. custom_check = custom_check try : parsed = ExplicitStateUpdater. DESCRIPTION. parseString ( description, parseAll = True ) except ParseException as p_exc : ex = SyntaxError ( ""Parsing failed: "" + str ( p_exc. msg ) ) ex. text = str ( p_exc. line ) ex. offset = p_exc. column ex. lineno = p_exc. lineno raise ex self. statements = [ ] self. symbols = SYMBOLS. copy ( ) for element in parsed : expression = str_to_sympy ( element. expression ) expression = expression. subs ( sympy. Function ( ""f"" ), self. symbols [ ""__f"" ] ) expression = expression. subs ( sympy. Function ( ""g"" ), self. symbols [ ""__g"" ] ) symbols = list ( expression. atoms ( sympy. Symbol ) ) unique_symbols = [ ] for symbol in symbols : if symbol. name == ""dt"" : unique_symbols. append ( symbol ) else : unique_symbols. append ( _symbol ( ""__"" + symbol. name ) ) for symbol, unique_symbol in zip ( symbols, unique_symbols ) : expression = expression. subs ( symbol, unique_symbol ) self. symbols. update ( dict ( ( ( symbol. name, symbol ) for symbol in unique_symbols ) ) ) xml_text, only_inBiddingZone_Domain = False, only_outBiddingZone_Domain = False ) : """"""Returns a tuple containing two lists."""""" if not xml_text : return None soup = BeautifulSoup ( xml_text, ""html.parser"" ) values = [ ] datetimes = [ ] for timeseries in soup. find_all ( ""timeseries"" ) : resolution = timeseries. find_all ( ""resolution"" ) [ 0 ]. contents [ 0 ] datetime_start = arrow. get ( timeseries. find_all ( ""start"" ) [ 0 ]. contents [ 0 ] ) if only_inBiddingZone_Domain : if : continue elif only_outBiddingZone_Domain : if not len ( timeseries. find_all ( ""outBiddingZone_Domain.mRID"". lower ( ) ) ) : continue for entry in timeseries. find_all ( ""point"" ) : position = int ( entry. find_all ( ""position"" ) [ 0 ]. contents [ 0 ] ) value = float ( entry. find_all ( ""quantity"" ) [ 0 ]. contents [ 0 ] ) datetime = datetime_from_position ( datetime_start, position, resolution ) values. append ( value ) datetimes. append ( datetime ) return values, datetimes",False,not len(timeseries.find_all('inBiddingZone_Domain.mRID'.lower())),"not len( timeseries.find_all(""point"".lower())",0.6568495035171509 3751,"def value_to_db_datetime ( self, value ) : if value is None : return None if timezone. is_aware ( value ) : if : value = value. astimezone ( timezone. utc ). replace ( tzinfo = None ) else : raise ValueError ( ""Oracle backend does not support timezone-aware datetimes when USE_TZ is False."" ) return six. text_type ( value )",False,settings.USE_TZ,self.use_tz,0.6705401539802551 3752,"def _load_plugins ( self, plugin ) : logger. info ( 'loading plugin ""%s""', plugin ) path_name = None if PY2 : import imp for mod in plugin. split ( ""."" ) : if : path_name = [ path_name ] _, path_name, _ = imp. find_module ( mod, path_name ) else : from importlib. util import find_spec as importlib_find path_name = importlib_find ( plugin ) try : path_name = path_name. submodule_search_locations [ 0 ] except TypeError : path_name = path_name. origin module_list = [ plugin ] if not path_name. endswith ( "".py"" ) : module_list = glob ( ""{}/[!_]*.py"". format ( path_name ) ) module_list = [ ""."". join ( ( plugin, os. path. split ( f ) [ - 1 ] [ : - 3 ] ) ) for f in module_list ] for module in module_list : try : import_module ( module ) except Exception : logger. exception ( ""Failed to import %s"", module )",False,path_name is not None,path_name,0.6558045148849487 3753,"def _frameRangeSelectionFilterUpdate ( self ) : if not self. frameMonitorTree. getJob ( ) : self. frameRangeSelection. setFrameRange ( [ ""1"", ""10000"" ] ) else : layers = self. frameMonitorTree. getJob ( ). getLayers ( ) _min = None _max = None for layer in layers : seq = FileSequence. FrameSet ( layer. range ( ) ) seq. normalize ( ) frameList = seq. getAll ( ) if _min is not None : _min = min ( _min, int ( frameList [ 0 ] ) ) else : _min = int ( frameList [ 0 ] ) if : _max = max ( _max, int ( frameList [ - 1 ] ) ) else : _max = int ( frameList [ - 1 ] ) if _min == _max : _max += 1 self. frameRangeSelection. default_select_size = 1000 // len ( layers ) self. frameRangeSelection. setFrameRange ( [ str ( _min ), str ( _max ) ] )",True,_max is not None,_max is not None,0.6593604683876038 3754,"def get_detections ( im, det, thresh, names, classes ) : ""Draw the markings around the detected region"" labelstr = [ ] category = - 1 detection = None valid = False for j in range ( classes ) : if det [ ""prob"" ] [ j ] > thresh : if category == - 1 : category = j labelstr. append ( names [ j ] + "" "" + str ( round ( det [ ""prob"" ] [ j ], 4 ) ) ) if category > - 1 : valid = True imc, imh, imw = im. shape width = int ( imh * 0.006 ) offset = category * 123457 % classes red = _get_color ( 2, offset, classes ) green = _get_color ( 1, offset, classes ) blue = _get_color ( 0, offset, classes ) rgb = [ red, green, blue ] b = det [ ""bbox"" ] left = int ( ( b. x - b. w / 2.0 ) * imw ) right = int ( ( b. x + b. w / 2.0 ) * imw ) top = int ( ( b. y - b. h / 2.0 ) * imh ) bot = int ( ( b. y + b. h / 2.0 ) * imh ) if left < 0 : left = 0 if right > imw - 1 : right = imw - 1 if : top = 0 if bot > imh - 1 : <",True,top < 0,top < 0,0.6721189022064209 3755,"def expect_first_flow_mapping_key ( self ) : if isinstance ( self. event, MappingEndEvent ) : self. indent = self. indents. pop ( ) self. flow_level -= 1 self. write_indicator ( ""}"", False ) self. state = self. states. pop ( ) else : if : self. write_indent ( ) if not self. canonical and self. check_simple_key ( ) : self. states. append ( self. expect_flow_mapping_simple_value ) self. expect_node ( mapping = True, simple_key = True ) else : self. write_indicator ( ""?"", True ) self. states. append ( self. expect_flow_mapping_value ) self. expect_node ( mapping = True )",False,self.canonical or self.column > self.best_width,self.flow_level > 0,0.6515256762504578 3756,"def language ( self ) : if self. lang_data : lang_data = [ s if s!= ""None"" else None for s in self. lang_data ] if : return Language ( lang_data [ 0 ], country = lang_data [ 1 ], script = lang_data [ 2 ] )",False,lang_data[0],len(lang_data),0.6593518257141113 3757,"def event_processor ( event, hint ) : request = weak_request ( ) if request is None : return event if ""transaction"" not in event : try : if : event [ ""transaction"" ] = request. matched_route. name elif integration. transaction_style == ""route_pattern"" : event [ ""transaction"" ] = request. matched_route. pattern except Exception : pass with capture_internal_exceptions ( ) : PyramidRequestExtractor ( request ). extract_into_event ( event ) if _should_send_default_pii ( ) : with capture_internal_exceptions ( ) : user_info = event. setdefault ( ""user"", { } ) if ""id"" not in user_info : user_info [ ""id"" ] = request. authenticated_userid return event",True,integration.transaction_style == 'route_name',integration.transaction_style == 'route_name',0.6518987417221069 3758,"def _convert_example ( example, use_bfloat16 ) : """"""Cast int64 into int32 and float32 to bfloat16 if use_bfloat16."""""" for key in list ( example. keys ( ) ) : val = example [ key ] if tf. keras. backend. is_sparse ( val ) : val = tf. sparse. to_dense ( val ) if val. dtype == tf. int64 : val = tf. cast ( val, tf. int32 ) if : val = tf. cast ( val, tf. bfloat16 ) example [ key ] = val",False,use_bfloat16 and val.dtype == tf.float32,val.dtype == tf.float32 and use_bfloat16,0.6499889492988586 3759,"def filterChecker ( url, params, headers, GET, delay, occurences, timeout, encoding ) : positions = occurences. keys ( ) sortedEfficiencies = { } environments = set ( [ ""<"", "">"" ] ) for i in range ( len ( positions ) ) : sortedEfficiencies [ i ] = { } for i in occurences : occurences [ i ] [ ""score"" ] = { } context = occurences [ i ] [ ""context"" ] if context == ""comment"" : environments. add ( ""-->"" ) elif context == ""script"" : environments. add ( occurences [ i ] [ ""details"" ] [ ""quote"" ] ) environments. add ( """" ) elif context == ""attribute"" : if : if ( occurences [ i ] [ ""details"" ] [ ""name"" ] == ""srcdoc"" ) : environments. add ( ""<"" ) environments. add ( "">"" ) if occurences [ i ] [ ""details"" ] [ ""quote"" ] : environments. add ( occurences [ i ] [ ""details"" ] [ ""quote"" ] ) for environment in environments : if environment : efficiencies = checker ( url, params, headers, ",False,occurences[i]['details']['type'] == 'value',len(sortedEfficiencies) > 0,0.6617074608802795 3760,"def on_startup ( dispatcher, url = None, cert = None ) : setup_handlers ( dispatcher ) bot = dispatcher. bot webhook = await bot. get_webhook_info ( ) if url : if webhook. url!= url : if not webhook. url : await bot. delete_webhook ( ) if : with open ( cert, ""rb"" ) as cert_file : await bot. set_webhook ( url, certificate = cert_file ) else : await bot. set_webhook ( url ) elif webhook. url : await bot. delete_webhook ( )",True,cert,cert,0.6862301230430603 3761,"def _get_compressor ( compress_type, compresslevel = None ) : if compress_type == ZIP_DEFLATED : if : return zlib. compressobj ( compresslevel, zlib. DEFLATED, - 15 ) return zlib. compressobj ( zlib. Z_DEFAULT_COMPRESSION, zlib. DEFLATED, - 15 ) elif compress_type == ZIP_BZIP2 : if : return bz2. BZ2Compressor ( compresslevel ) return bz2. BZ2Compressor ( ) elif compress_type == ZIP_LZMA : return LZMACompressor ( ) else : return None",True,compresslevel is not None,compresslevel is not None,0.6605966091156006 3762,"def button_release ( self, mapper ) : self. pressed = False if self. waiting_task and self. active is None and not self. action : mapper. cancel_task ( self. waiting_task ) self. waiting_task = None if : self. normalaction. button_press ( mapper ) mapper. schedule ( 0.02, self. normalaction. button_release ) elif self. active : self. active. button_release ( mapper ) self. active = None",False,self.normalaction,self.pressed,0.6700454950332642 3763,"def parse_plugins_list ( data : str ) -> List [ str ] : plugins : List [ str ] = [ ] modules : Dict [ str, Tuple [ int, str ] ] = { } for line, entry in enumerate ( data. splitlines ( ) ) : plugin = entry. strip ( ) if ""#"" in plugin : comment_start = plugin. find ( ""#"" ) plugin = plugin [ : comment_start ]. strip ( ) if not plugin : continue if ""@"" in plugin : module, path = map ( lambda x : x. strip ( ), plugin. split ( ""@"", 1 ) ) plugin = f""{module}@{path}"" validate_local_plugin ( line, module, path ) else : module = plugin if : first_line, first_entry = modules [ module ] raise ValueError ( f""plugin '{module}' is listed more than once: "" f""at line {first_line} ('{first_entry}') and at {line} ('{entry}')"" ) modules [ module ] = line, entry plugins. append ( plugin ) return plugins",True,module in modules,module in modules,0.672839343547821 3764,"def __codeanalysis_settings_changed ( self, current_finfo ) : if self. data : run_pyflakes, run_pep8 = self. pyflakes_enabled, self. pep8_enabled for finfo in self. data : self. __update_editor_margins ( finfo. editor ) finfo. cleanup_analysis_results ( ) if : if current_finfo is not finfo : finfo. run_code_analysis ( run_pyflakes, run_pep8 )",False,(run_pyflakes or run_pep8) and current_finfo is not None,self.has_key(finfo),0.6502747535705566 3765,"def __call__ ( self, model_output : ModelOutput ) -> ModelOutput : for model_output_i in model_output : instances = model_output_i [ ""instances"" ] if : continue instances_filtered = instances [ instances. scores >= self. min_score ] model_output_i [ ""instances"" ] = instances_filtered return model_output",False,not instances.has('scores'),instances is None,0.6536625623703003 3766,"def randomly_choose_false_edges ( nodes, true_edges, num ) : true_edges_set = set ( true_edges ) tmp_list = list ( ) all_flag = False for _ in range ( num ) : trial = 0 while True : x = nodes [ random. randint ( 0, len ( nodes ) - 1 ) ] y = nodes [ random. randint ( 0, len ( nodes ) - 1 ) ] trial += 1 if : all_flag = True break if x!= y and ( x, y ) not in true_edges_set and ( y, x ) not in true_edges_set : tmp_list. append ( ( x, y ) ) break if all_flag : break return tmp_list",False,trial >= 1000,trial == true_edges,0.682117223739624 3767,"def _toplevel_window_state_event_cb ( self, toplevel, event ) : """"""Handle transitions between fullscreen and windowed."""""" if event. changed_mask & Gdk. WindowState. FULLSCREEN : fullscreen = event. new_window_state & Gdk. WindowState. FULLSCREEN if : if self. autohide_enabled : self. _connect_autohide_events ( ) self. _start_autohide_timeout ( ) for floating in self. _floating : floating. show_all ( ) else : self. _disconnect_autohide_events ( ) self. _show_autohide_widgets ( ) self. _is_fullscreen = bool ( fullscreen ) self. _update_canvas_scrolledwindow ( ) if event. changed_mask & Gdk. WindowState. MAXIMIZED : maximized = event. new_window_state & Gdk. WindowState. MAXIMIZED self. _is_maximized = bool ( maximized )",True,fullscreen,fullscreen,0.6715191006660461 3768,"def main ( client ) : cms_metadata_service = client. GetService ( ""CmsMetadataService"", version = ""v202005"" ) statement = ad_manager. StatementBuilder ( version = ""v202005"" ) while True : response = cms_metadata_service. getCmsMetadataValuesByStatement ( statement. ToStatement ( ) ) if : for cms_metadata_value in response [ ""results"" ] : print ( ( 'CMS metadata value with Id %d and name ""%s"", associated with' 'the CmsMetadataKey with id %d and name ""%s"", was found.\n' ) % ( cms_metadata_value [ ""cmsMetadataValueId"" ], cms_metadata_value [ ""valueName"" ], cms_metadata_value [ ""key"" ] [ ""id"" ], cms_metadata_value [ ""key"" ] [ ""name"" ], ) ) statement. offset += statement. limit else : break print ( ""\nNumber of results found: %s"" % response [ ""totalResultSetSize"" ] )",False,'results' in response and len(response['results']),response[0] == 'cmsMetadataValueId',0.6575781106948853 3769,"def validate ( self, attrs ) : credentials = { self. username_field : attrs. get ( self. username_field ), ""password"" : attrs. get ( ""password"" ), } if all ( credentials. values ( ) ) : user = authenticate ( ** credentials ) if : if not user. is_active : msg = _ ( ""User account is disabled."" ) raise serializers. ValidationError ( msg ) payload = jwt_payload_handler ( user ) return { ""token"" : jwt_encode_handler ( payload ), ""user"" : user } else : msg = _ ( ""Unable to login with provided credentials."" ) raise serializers. ValidationError ( msg ) else : msg = _ ( 'Must include ""{username_field}"" and ""password"".' ) msg = msg. format ( username_field = self. username_field ) raise serializers. ValidationError ( msg )",False,user,self.username_field,0.6883484721183777 3770,"def __getitem__ ( self, index ) : if self. _check ( ) : if : if index < 0 or index >= len ( self. features ) : raise IndexError ( index ) if self. features [ index ] is None : feature = self. device. feature_request ( FEATURE. FEATURE_SET, 0x10, index ) if feature : ( feature, ) = _unpack ( ""!H"", feature [ : 2 ] ) self. features [ index ] = FEATURE [ feature ] return self. features [ index ] elif isinstance ( index, slice ) : indices = index. indices ( len ( self. features ) ) return [ self. __getitem__ ( i ) for i in range ( * indices ) ]",False,"isinstance(index, int)",self.features is not None,0.65706866979599 3771,"def getNextInvalidLink ( ) : for originID, targetID, originType, targetType in getAllDataLinkIDs ( ) : if ( originType, targetType ) in approvedLinkTypes : continue origin = idToSocket ( originID ) target = idToSocket ( targetID ) if : approvedLinkTypes. add ( ( originType, targetType ) ) else : return origin, target return None, None",False,"isConnectionValid(origin, target)",targetType,0.6510434150695801 3772,"def main ( ) : setupLogging ( ) if ( platform. system ( ) == ""Linux"" and os. getuid ( )!= 0 and rqd. rqconstants. RQD_BECOME_JOB_USER ) : logging. critical ( ""Please run launch as root"" ) sys. exit ( 1 ) try : opts, argv = getopt. getopt ( sys. argv [ 1 : ], ""hdc:"", [ ""help"", ""daemon"", ""nimbyoff"", ""update"" ] ) except getopt. GetoptError : usage ( ) sys. exit ( 1 ) optNimbyOff = False for o, a in opts : if o in [ ""-h"", ""--help"" ] : usage ( ) sys. exit ( 0 ) if : pass if o in [ ""--nimbyoff"" ] : optNimbyOff = True rqd. rqutil. permissionsLow ( ) logging. warning ( ""RQD Starting Up"" ) rqCore = rqd. rqcore. RqCore ( optNimbyOff ) rqCore. start ( )",False,"o in ['-d', '--daemon']","a in [--h"", '--help']",0.6554223895072937 3773,"def on_batch_end ( self, training_state, snapshot = False ) : if snapshot & ( self. snapshot_step is not None ) : self. save ( training_state. step ) if None not in ( self. best_snapshot_path, self. best_val_accuracy, training_state. val_acc, ) : if : self. best_val_accuracy = training_state. val_acc self. save_best ( int ( 10000 * round ( training_state. val_acc, 4 ) ) )",False,training_state.val_acc > self.best_val_accuracy,not self.best_snapshot_path,0.6495373249053955 3774,"def test_update_topic ( self ) : async with self. chat_client : await self. _create_thread ( ) topic = ""update topic"" async with self. chat_thread_client : await self. chat_thread_client. update_topic ( topic = topic ) if : await self. chat_client. delete_chat_thread ( self. thread_id )",False,not self.is_playback(),self.thread_id,0.6489672660827637 3775,"def save_all_changed_extensions ( self ) : """"""Save configuration changes to the user config file."""""" has_changes = False for ext_name in self. extensions : options = self. extensions [ ext_name ] for opt in options : if : has_changes = True if has_changes : self. ext_userCfg. Save ( )",False,"self.set_extension_value(ext_name, opt)",opt in self.extensions,0.645179033279419 3776,"def get_help_width ( ) : """"""Returns the integer width of help lines that is used in TextWrap."""""" if not sys. stdout. isatty ( ) or termios is None or fcntl is None : return _DEFAULT_HELP_WIDTH try : data = fcntl. ioctl ( sys. stdout, termios. TIOCGWINSZ, ""1234"" ) columns = struct. unpack ( ""hh"", data ) [ 1 ] if : return columns return int ( os. getenv ( ""COLUMNS"", _DEFAULT_HELP_WIDTH ) ) except ( TypeError, IOError, struct. error ) : return _DEFAULT_HELP_WIDTH",False,columns >= _MIN_HELP_WIDTH,len(columns),0.6583073139190674 3777,"def __init__ ( self, host, port = None, username = None, password = None, protocol = ""https"" ) : if not port : if protocol == ""https"" : port = 443 elif : port = 80 else : raise Exception ( ""Can't determine port from protocol. "" ""Please specifiy a port."" ) self. cwd = ""/"" self. baseurl = ""%s://%s:%d"" % ( protocol, host, port ) self. host = host self. port = port self. protocol = protocol self. username = username self. password = password if username and password : self. auth = base64. encodestring ( ""%s:%s"" % ( username, password ) ). replace ( ""\n"", """" ) else : self. auth = None",True,protocol == 'http',protocol == 'http',0.6666197776794434 3778,"def tamper ( payload, ** kwargs ) : junk_chars = ""!#$%&()*~+-_.,:;?@[/|\]^`"" retval = """" for i, char in enumerate ( payload, start = 1 ) : amount = random. randint ( 10, 15 ) if char == "">"" : retval += "">"" for _ in range ( amount ) : retval += random. choice ( junk_chars ) elif char == ""<"" : retval += ""<"" for _ in range ( amount ) : retval += random. choice ( junk_chars ) elif : for _ in range ( amount ) : retval += random. choice ( junk_chars ) else : retval += char return retval",False,char == '',char == '>',0.6676473021507263 3779,"def process_cookie_options ( self, grab, req ) : if grab. config [ ""cookiefile"" ] : try : grab. cookies. load_from_file ( grab. config [ ""cookiefile"" ] ) except IOError as ex : logging. error ( ex ) request_host = urlsplit ( req. url ). hostname if request_host : if request_host. startswith ( ""www."" ) : request_host_no_www = request_host [ 4 : ] else : request_host_no_www = request_host if grab. config [ ""cookies"" ] : if : raise error. GrabMisuseError ( ""cookies option should"" "" be a dict"" ) for name, value in grab. config [ ""cookies"" ]. items ( ) : grab. cookies. set ( name = name, value = value, domain = request_host_no_www ) cookie_hdr = grab. cookies. get_cookie_header ( req ) if cookie_hdr : req. headers [ ""Cookie"" ] = cookie_hdr",False,"not isinstance(grab.config['cookies'], dict)","hasattr(grab.config['cookies']) and hasattr(grab.config['cookies'], 'keys')",0.6539105772972107 3780,"def update_files ( data, python = True ) : """"""Update files with new version number."""""" if len ( sys. argv )!= 2 : e = Exception ( 'Specify PEP440 version: ""%s 1.2.3""' % sys. argv [ 0 ] ) raise ( e ) version = verify_pep440 ( sys. argv [ 1 ] ) if not python : version = version. base_version for filename, regex in data. items ( ) : filename = os. path. join ( BASE_DIR, filename ) matched = False pattern = re. compile ( regex ) for line in fileinput. input ( filename, inplace = True ) : if pattern. match ( line. rstrip ( ) ) : matched = True line = re. sub ( regex, r""\g

    %s\g"" % version, line. rstrip ( ) )    print ( line )   if :    raise Exception ( 'In file ""%s"", did not find regex ""%s""' % ( filename, regex ) )",True,not matched,not matched,0.6663120985031128
    3781,"def _compute_operands ( self, v ) :  expl_operand_list = [ ]  impl_operand_list = [ ]  for op in v. parsed_operands :   s = None   if op. name in [ ""MEM0"", ""MEM1"" ] :    s = ""MEM""   elif op. name in [ ""IMM0"", ""IMM1"" ] :    s = ""IMM""   elif op. type == ""nt_lookup_fn"" :    s = op. lookupfn_name    s = re. sub ( r""[()]*"", """", s )    s = re. sub ( r""_[RBN].*"", """", s )    s = re. sub ( r""FINAL_.*"", """", s )   elif op. type == ""reg"" :    s = op. bits    s = re. sub ( r""XED_REG_"", """", s )   elif op. type == ""imm_const"" :    if :     continue    s = op. name   else :    msgb ( ""UNHANDLED"", ""{}"". format ( op ) )   if s :    if op. visibility in [ ""IMPLICIT"", ""SUPPRESSED"" ] :     impl_operand_list. append ( s )    if op. visibility in [ ""EXPLICIT"", ""DEFAULT"" ] :     expl_operand_list. append ( s )  return expl",False,"op.name in ['BCAST', 'SCALE']",s,0.6584795713424683
    3782,"def _real_extract ( self, url ) :  display_id = self. _match_id ( url )  webpage = self. _download_webpage ( url, display_id )  drupal_settings = self. _parse_json (   self. _search_regex (    r""jQuery\.extend\(Drupal\.settings\s*,\s*({.+?})\);"",    webpage,    ""drupal settings"",   ),   display_id,  )  entries = [ ]  for config_profile in drupal_settings. get ( ""ren_jwplayer"", { } ). values ( ) :   media_id = config_profile. get ( ""mediaid"" )   if :    continue   media_id = compat_str ( media_id )   entries. append ( self. url_result ( ""rentv:"" + media_id, ""RENTV"", media_id ) )  return self. playlist_result ( entries, display_id )",True,not media_id,not media_id,0.6625657081604004
    3783,"def whole ( self, mapper, x, y, what ) :  distance = sqrt ( x * x + y * y )  if distance < STICK_PAD_MAX_HALF :     self. angle = None   if mapper. was_touched ( what ) :    self. action. change ( mapper, 0, 0, what )  else :     angle = atan2 ( x, y )     if :       self. angle, angle = angle, 0    self. _haptic_counter = 0   else :    self. angle, angle = angle, self. angle - angle       if angle > PI :         angle -= 2 * PI       elif angle < - PI :         angle += 2 * PI     angle *= 10000.0     if self. haptic :    self. _haptic_counter += angle * self. speed / self. haptic. frequency    if abs ( self. _haptic_counter ) > 0.5 :     if self. _haptic_counter > 0.5 :      self. _haptic_counter -= 0.5     else :      self. _haptic_counter += 0.",False,self.angle is None,self.angle > 0,0.6592676639556885
    3784,"def recent_results ( self, items ) :  """"""Set recent results from provider.""""""  if not recent_results. get ( self. get_id ( ) ) :   recent_results. update ( { self. get_id ( ) : [ ] } )  if items :   add_to_list = [ ]   for item in items :    if :     add_to_list += [ item ]   results = add_to_list + recent_results [ self. get_id ( ) ]   recent_results [ self. get_id ( ) ] = results [ : self. max_recent_items ]",False,item not in recent_results[self.get_id()],self._recent_items(item),0.6495741009712219
    3785,"def find_test_functions ( collections ) :  if not isinstance ( collections, list ) :   collections = [ collections ]  functions = [ ]  for collection in collections :   if :    collection = vars ( collection )   keys = collection. keys ( )   keys. sort ( )   for key in keys :    value = collection [ key ]    if isinstance ( value, types. FunctionType ) and hasattr ( value, ""unittest"" ) :     functions. append ( value )  return functions",False,"not isinstance(collection, dict)","isinstance(collection, types.Dict)",0.656400203704834
    3786,"def test_chunkcoding ( self ) :  tstring_lines = [ ]  for b in self. tstring :   lines = b. split ( b""\n"" )   last = lines. pop ( )   assert last == b""""   lines = [ line + b""\n"" for line in lines ]   tstring_lines. append ( lines )  for native, utf8 in zip ( * tstring_lines ) :   u = self. decode ( native ) [ 0 ]   self. assertEqual ( u, utf8. decode ( ""utf-8"" ) )   if :    self. assertEqual ( native, self. encode ( u ) [ 0 ] )",False,self.roundtriptest,u != native,0.6564186811447144
    3787,"def assertionHelper ( left_type, left_op, op, right_type, right_op, expected ) :  """"""Helper function used to figure out which test cases fail without blowing up the rest of the test.""""""  import clr  import System  expression_str = ""{0}({1}) {2} {3}({4})"". format (   left_type, left_op, str ( op ), right_type, right_op  )   if unsupported_operands. count ( left_type + op + right_type ) > 0 :   with self. assertRaises ( TypeError ) :    eval ( expression_str )   return  try :   expression = eval ( expression_str )  except TypeError as e :   self. fail ( ""TYPE BUG: %s"" % expression_str )  try :   self. assertEqual ( expression, expected )   if :    self. fail ( ""NO BUG FOR: %s"" % expression_str )  except :   if :    return   self. fail ( expression_str )",False,known_bugs.count(left_type + op + right_type) > 0,not expected,0.6478638648986816
    3788,"def _should_include_path ( path, includes, excludes ) :  """"""Return True iff the given path should be included.""""""  from os. path import basename  from fnmatch import fnmatch  base = basename ( path )  if includes :   for include in includes :    if :     try :      log. debug ( ""include `%s' (matches `%s')"", path, include )     except ( NameError, AttributeError ) :      pass     break   else :    try :     log. debug ( ""exclude `%s' (matches no includes)"", path )    except ( NameError, AttributeError ) :     pass    return False  for exclude in excludes :   if fnmatch ( base, exclude ) :    try :     log. debug ( ""exclude `%s' (matches `%s')"", path, exclude )    except ( NameError, AttributeError ) :     pass    return False  return True",True,"fnmatch(base, include)","fnmatch(base, include)",0.6508344411849976
    3789,"def _try_make_reader ( self ) :  if self. reader :   return  if not self. data_file :   files = glob ( self. _data_file_pattern )   if not files :    return   files. sort ( )   self. log. debug ( ""Files found by pattern: %s"", files )   if not os. path. getsize ( files [ - 1 ] ) :    return   self. data_file = files [ - 1 ]  self. data_file = self. engine. find_file ( self. data_file )  if not os. path. exists ( self. data_file ) :   if :    self. log. debug ( ""File not exists yet: %s"", self. data_file )    return   else :    msg = ""File has not appeared within %ss: %s"" % (     self. _file_exists_wait,     self. data_file,    )    raise TaurusInternalException ( msg )  self. log. info ( ""Will load external results from file: %s"", self. data_file )  self. label = self. data_file  self. reader = self. _get_reader ( )  if isinstance ( self. engine. aggregator, ConsolidatingAggregator ) :   self. engine. aggregator. add_underling ( self. reader )   self. engine. aggregator. add_listener ( self )",False,time.time() - self._file_check_ts < self._file_exists_wait,os.path.exists(self.data_file),0.6542251110076904
    3790,"def check_package ( self, package, package_dir ) :  """"""Check namespace packages' __init__ for declare_namespace""""""  try :   return self. packages_checked [ package ]  except KeyError :   pass  init_py = _build_py. check_package ( self, package, package_dir )  self. packages_checked [ package ] = init_py  if not init_py or not self. distribution. namespace_packages :   return init_py  for pkg in self. distribution. namespace_packages :   if :    break  else :   return init_py  f = open ( init_py, ""rU"" )  if ""declare_namespace"" not in f. read ( ) :   from distutils import log   log. warn (    ""WARNING: %s is a namespace package, but its __init__.py does\n""    ""not declare_namespace(); setuptools 0.7 will REQUIRE this!\n""    '(See the setuptools manual under ""Namespace Packages"" for'   ""details.)\n"",    package,   )  f. close ( )  return init_py",False,pkg == package or pkg.startswith(package + '.'),pkg in self.packages_checked,0.6520081162452698
    3791,"def test_invalid_mountinfo ( self ) :  line = (   ""20 1 252:1 / / rw,relatime - ext4 /dev/mapper/vg0-root""   ""rw,errors=remount-ro,data=ordered""  )  elements = line. split ( )  for i in range ( len ( elements ) + 1 ) :   lines = [ "" "". join ( elements [ 0 : i ] ) ]   if :    expected = None   else :    expected = ( ""/dev/mapper/vg0-root"", ""ext4"", ""/"" )   self. assertEqual ( expected, util. parse_mount_info ( ""/"", lines ) )",False,i < 10,has_mountinfo(),0.6805335283279419
    3792,"def get_cluster_config ( cluster = None ) :  if cluster and cluster. get ( ""connector"" ) :   cluster_config = cluster  elif cluster and cluster. get ( ""id"" )!= CLUSTER_ID. get ( ) :   if :    compute_end_point = (     cluster [ ""compute_end_point"" ] [ 0 ]     if type ( cluster [ ""compute_end_point"" ] ) == list     else cluster [ ""compute_end_point"" ]    )    cluster_config = {     ""server_host"" : compute_end_point,     ""name"" : cluster [ ""name"" ],    }   else :    cluster_config = Cluster ( user = None ). get_config (     cluster [ ""id"" ]    )  else :   cluster_config = None  return cluster_config",False,'altus:dataware:k8s' in cluster['id'],cluster and cluster['compute_end_point'],0.6575764417648315
    3793,"def it_should_undrain_and_drain ( context, num_undrain_expected, num_drain_expected ) :  num_undrain_expected = int ( num_undrain_expected )  num_drain_expected = int ( num_drain_expected )  for _ in range ( 10 ) :   print ( ""currently drained: %r"" % drain_lib. TestDrainMethod. downed_task_ids )   print ( ""drained previously: %r"" % context. drained_tasks )   num_drained = len (    drain_lib. TestDrainMethod. downed_task_ids - context. drained_tasks   )   num_undrained = len (    context. drained_tasks - drain_lib. TestDrainMethod. downed_task_ids   )   if :    return   time. sleep ( 1 )  else :   raise Exception (    ""Expected %d tasks to drain and %d to undrain, saw %d and %d""    % ( num_drain_expected, num_undrain_expected, num_drained, num_undrained )   )",False,num_drained >= num_drain_expected and num_undrained >= num_undrain_expected,num_undrained == 0,0.6489992737770081
    3794,"def require_access_to_dropdown_queries ( user, query_def ) :  parameters = query_def. get ( ""options"", { } ). get ( ""parameters"", [ ] )  dropdown_query_ids = set (   [ str ( p [ ""queryId"" ] ) for p in parameters if p [ ""type"" ] == ""query"" ]  )  if dropdown_query_ids :   groups = models. Query. all_groups_for_query_ids ( dropdown_query_ids )   if :    abort (     400,     message = ""You are trying to associate a dropdown query that does not have a matching group. ""     ""Please verify the dropdown query id you are trying to associate with this query."",    )   require_access ( dict ( groups ), user, view_only )",False,len(groups) < len(dropdown_query_ids),groups and len(groups) > 0,0.6494700908660889
    3795,"def _yield_accessible_unix_file_names ( path ) :  """"""yield file names of executable files in path.""""""  if not os. path. exists ( path ) :   return  for file_ in scandir ( path ) :   try :    if :     yield file_. name   except OSError :       pass",False,"file_.is_file() and os.access(file_.path, os.X_OK)",os.path.exists(file_),0.6489675045013428
    3796,"def _find_ttinfo ( self, dt, laststd = 0 ) :  timestamp = (   ( dt. toordinal ( ) - EPOCHORDINAL ) * 86400   + dt. hour * 3600   + dt. minute * 60   + dt. second  )  idx = 0  for trans in self. _trans_list :   if timestamp < trans :    break   idx += 1  else :   return self. _ttinfo_std  if idx == 0 :   return self. _ttinfo_before  if laststd :   while idx > 0 :    tti = self. _trans_idx [ idx - 1 ]    if :     return tti    idx -= 1   else :    return self. _ttinfo_std  else :   return self. _trans_idx [ idx - 1 ]",False,not tti.isdst,tti > 0,0.6638116836547852
    3797,"def __call__ ( self, path = ""."" ) :  l = os. listdir ( path )  l. sort ( )  for f in l :   st = os. stat ( ""%s/%s"" % ( path, f ) )   if :    print ( ""    %s"" % f )   else :    print ( ""% 8d %s"" % ( st [ 6 ], f ) )",False,st[0] & 16384,st[6] == 6,0.6641519069671631
    3798,"def _pyro_sample ( self, msg ) :  is_collapsible = getattr ( msg [ ""fn"" ], ""collapsible"", False )   if is_collapsible :   conj_node, parent = None, None   for site_name in self. trace. observation_nodes + self. trace. stochastic_nodes :    parent = getattr ( self. trace. nodes [ site_name ] [ ""fn"" ], ""parent"", None )    if parent is not None and parent. _latent. site_name == msg [ ""name"" ] :     conj_node = self. trace. nodes [ site_name ]     break   assert (    conj_node is not None   ), ""Collapsible latent site `{}` with no corresponding conjugate site."". format (    msg [ ""name"" ]   )   msg [ ""fn"" ] = parent. posterior ( conj_node [ ""value"" ] )   msg [ ""value"" ] = msg [ ""fn"" ]. sample ( )   else :   name = msg [ ""name"" ]   if name in self. trace :    guide_msg = self. trace. nodes [ name ]    if :     return None    if guide_msg [ ""type"" ]!= ""sample"" :     raise RuntimeError ( ""site {} must be sample in trace"". format ( name ) )    msg [ ""done"" ] = True    msg [ ""value"" ] = guide_msg [ ""value"" ]    msg [ ""infer"" ]",False,msg['is_observed'],guide_msg is None,0.6555825471878052
    3799,"def authdebug_view ( context, request ) :  view_name = getattr ( request, ""view_name"", None )  if authn_policy and authz_policy :   if permission is NO_PERMISSION_REQUIRED :    msg = ""Allowed (NO_PERMISSION_REQUIRED)""   elif :    msg = ""Allowed (no permission registered)""   else :    principals = authn_policy. effective_principals ( request )    msg = str ( authz_policy. permits ( context, principals, permission ) )  else :   msg = ""Allowed (no authorization policy in use)""  view_name = getattr ( request, ""view_name"", None )  url = getattr ( request, ""url"", None )  msg = ""debug_authorization of url %s (view name %r against "" ""context %r): %s"" % (   url,   view_name,   context,   msg,  )  if logger :   logger. debug ( msg )  if request is not None :   request. authdebug_message = msg  return view ( context, request )",True,permission is None,permission is None,0.6677500009536743
    3800,"def __setitem__ ( self, ndx, val ) :     exprdata = None  if ndx in self. _data :   exprdata = self. _data [ ndx ]  else :   _ndx = normalize_index ( ndx )   if :    exprdata = self. _data [ _ndx ]  if exprdata is None :   raise KeyError (    ""Cannot set the value of Expression '%s' with ""    ""invalid index '%s'"" % ( self. cname ( True ), str ( ndx ) )   )     exprdata. set_value ( val )",True,_ndx in self._data,_ndx in self._data,0.6684638261795044
    3801,"def _validate_cors_for_route ( route_url, route_methods ) :   entries_with_cors = [ entry for entry in route_methods. values ( ) if entry. cors ]  if entries_with_cors :         if :    raise ValueError (     ""Route entry cannot have both cors=True and ""     ""methods=['OPTIONS',...] configured.  When ""     ""CORS is enabled, an OPTIONS method is automatically ""     ""added for you.  Please remove 'OPTIONS' from the list of ""     ""configured HTTP methods for: %s"" % route_url    )   if not all (    entries_with_cors [ 0 ]. cors == entry. cors for entry in entries_with_cors   ) :    raise ValueError (     ""Route may not have multiple differing CORS configurations. ""     'Please ensure all views for ""%s"" that have CORS configured'    ""have the same CORS configuration."" % route_url    )",False,'OPTIONS' in route_methods,route_url.startswith('http') and (not route.methods),0.6583787202835083
    3802,"def func_std_string ( func_name ) :  if func_name [ : 2 ] == ( ""~"", 0 ) :     name = func_name [ 2 ]   if :    return ""{%s}"" % name [ 1 : - 1 ]   else :    return name  else :   return ""%s:%d(%s)"" % func_name",False,name.startswith('<') and name.endswith('>'),"name[0] == (StringType, 0) and name[1:-1] == (StringType, 0)",0.6496857404708862
    3803,"def del_ ( self, key ) :  initial_hash = hash_ = self. hash ( key )  while True :   if self. _keys [ hash_ ] is self. _empty :       return None   elif self. _keys [ hash_ ] == key :       self. _keys [ hash_ ] = self. _deleted    self. _values [ hash_ ] = self. _deleted    self. _len -= 1    return   hash_ = self. _rehash ( hash_ )   if :       return None",False,initial_hash == hash_,hash_ == initial_hash,0.6572629809379578
    3804,"def parse ( self, date_string, parse_method, settings = None ) :  date_string = str ( date_string )  if not date_string. strip ( ) :   raise ValueError ( ""Empty string"" )  date_string = strip_braces ( date_string )  date_string, ptz = pop_tz_offset_from_string ( date_string )  date_obj, period = parse_method ( date_string, settings = settings )  _settings_tz = settings. TIMEZONE. lower ( )  if ptz :   if hasattr ( ptz, ""localize"" ) :    date_obj = ptz. localize ( date_obj )   else :    date_obj = date_obj. replace ( tzinfo = ptz )   if :    date_obj = apply_timezone ( date_obj, settings. TIMEZONE )  else :   if ""local"" in _settings_tz :    stz = get_localzone ( )    if hasattr ( stz, ""localize"" ) :     date_obj = stz. localize ( date_obj )    else :     date_obj = date_obj. replace ( tzinfo = stz )   else :    date_obj = localize_timezone ( date_obj, settings. TIMEZONE )  if settings. TO_TIMEZONE :   date_obj = apply_timezone ( date_obj, settings. TO_TIMEZONE )  if not settings. RETURN_AS_TIMEZONE_AWARE or (   settings. RETURN_AS_TIMEZONE_AWARE   and ""default"" == settings. RETURN_AS_TIMEZONE_AWARE   and not ptz <",False,'local' not in _settings_tz,settings.TIMEZONE,0.6483238935470581
    3805,"def close ( self ) :  """"""Close the session. This will release any borrowed resources, such as connections, and will roll back any outstanding transactions.""""""  if self. _connection :   if self. _autoResult :    if :     try :      self. _autoResult. consume ( )      self. _collect_bookmark ( self. _autoResult. _bookmark )     except Exception as error :           self. _autoResult = None      self. _state_failed = True   if self. _transaction :    if self. _transaction. closed ( ) is False :     self. _transaction. rollback ( )    self. _transaction = None   try :    if self. _connection :     self. _connection. send_all ( )     self. _connection. fetch_all ( )       except Neo4jError :    pass   except TransactionError :    pass   except ServiceUnavailable :    pass   except SessionExpired :    pass   finally :    self. _disconnect ( )   self. _state_failed = False   self. _closed = True",False,self._state_failed is False,self._bookmark,0.6607035994529724
    3806,"def display ( self, custom_tz = None, utc_shift = None ) :  try :   arw = self. as_arrow ( )   if custom_tz :    try :     arw = arw. to ( custom_tz )    except RuntimeError :     pass   elif utc_shift is not None :    arw = arw. to ( ShiftedTimezone ( int ( utc_shift ) ) )   else :    arw = arw. to ( self. obj_session. GetParameter ( ""timezone"", ""UTC"" ) )     formatted_date = arw. format ( self. timeformat )   formatted_tz = arw. format ( ""Z"" )   if :    formatted_tz = ""Z""   return formatted_date + formatted_tz  except ValueError as e :   return obj. NoneObject ( ""Error: %s"", e )",False,"formatted_tz in ('-0000', '+0000')",formatted_date and formatted_tz,0.6527063846588135
    3807,"def create_primary_collection_for_provider ( sender, instance, created, ** kwargs ) :  if created :   Collection = apps. get_model ( ""osf.Collection"" )   user = getattr ( instance, ""_creator"", None )   if :    c = Collection (     title = ""{}'s Collection"". format ( instance. name ),     creator = user,     provider = instance,     is_promoted = True,     is_public = True,    )    c. save ( )    instance. primary_collection = c    instance. save ( )   else :       sentry. log_message (     ""Unable to create primary_collection for {}Provider {}"". format (      instance. readable_type. capitalize ( ), instance. name     )    )",True,user,user,0.6853420734405518
    3808,"def __init__ (  self,  url,  version = None,  data = None,  full_response = False,  http = None,  timeout = None,  sync = False,  loop = None,  encoding = ""ascii"",  ** kw ) :  self. sync = sync  self. _url = url  self. _version = version or self. __class__. default_version  self. _full_response = full_response  self. _data = data if data is not None else { }  if not http :   timeout = timeout if timeout is not None else self. default_timeout   if :    loop = new_event_loop ( )   http = HttpClient ( timeout = timeout, loop = loop, ** kw )  http. headers [ ""accept"" ] = ""application/json, text/*; q=0.5""  http. headers [ ""content-type"" ] = ""application/json""  self. _http = http  self. _encoding = encoding",False,sync and (not loop),not loop,0.6537997722625732
    3809,"def handle_addl_headers ( self, headers ) :  for key, value in headers :   if key == ""x-goog-hash"" :    for hash_pair in value. split ( "","" ) :     alg, b64_digest = hash_pair. strip ( ). split ( ""="", 1 )     self. cloud_hashes [ alg ] = binascii. a2b_base64 ( b64_digest )   elif :    self. component_count = int ( value )   elif key == ""x-goog-generation"" :    self. generation = value           elif key == ""x-goog-stored-content-encoding"" :    self. content_encoding = value   elif key == ""x-goog-stored-content-length"" :    self. size = int ( value )   elif key == ""x-goog-storage-class"" :    self. storage_class = value",True,key == 'x-goog-component-count',key == 'x-goog-component-count',0.6445107460021973
    3810,"def test_commit_msg_hook_fail ( self, rw_repo ) :  index = rw_repo. index  hp = _make_hook (   index. repo. git_dir, ""commit-msg"", ""echo stdout; echo stderr 1>&2; exit 1""  )  try :   index. commit ( ""This should fail"" )  except HookExecutionError as err :   if :    self. assertIsInstance ( err. status, OSError )    self. assertEqual ( err. command, [ hp ] )    self. assertEqual ( err. stdout, """" )    self. assertEqual ( err. stderr, """" )    assert str ( err )   else :    self. assertEqual ( err. status, 1 )    self. assertEqual ( err. command, [ hp ] )    self. assertEqual ( err. stdout, ""\n  stdout:'stdout\n'"" )    self. assertEqual ( err. stderr, ""\n  stderr:'stderr\n'"" )    assert str ( err )  else :   raise AssertionError ( ""Should have cought a HookExecutionError"" )",False,is_win,"hasattr(err, 'status')",0.663103461265564
    3811,"def _reset ( self ) :  self. _handle_connect ( )  if self. rewarder_session :   if :    env_id = random. choice ( self. _sample_env_ids )    logger. info ( ""Randomly sampled env_id={}"". format ( env_id ) )   else :    env_id = None   self. rewarder_session. reset ( env_id = env_id )  else :   logger. info (    ""No rewarder session exists, so cannot send a reset via the rewarder channel""   )  self. _reset_mask ( )  return [ None ] * self. n",False,self._sample_env_ids,len(self._sample_env_ids) > 0,0.6583123207092285
    3812,def __next__ ( self ) :  try :   data = next ( self. iter_loader )  except StopIteration :   self. _epoch += 1   if :    self. _dataloader. sampler. set_epoch ( self. _epoch )   self. iter_loader = iter ( self. _dataloader )   data = next ( self. iter_loader )  return data,False,"hasattr(self._dataloader.sampler, 'set_epoch')","hasattr(self, '_dataloader')",0.6535096168518066
    3813,"def unicode_argv ( ) :  if iswindows :               from ctypes import POINTER, byref, cdll, c_int, windll   from ctypes. wintypes import LPCWSTR, LPWSTR   GetCommandLineW = cdll. kernel32. GetCommandLineW   GetCommandLineW. argtypes = [ ]   GetCommandLineW. restype = LPCWSTR   CommandLineToArgvW = windll. shell32. CommandLineToArgvW   CommandLineToArgvW. argtypes = [ LPCWSTR, POINTER ( c_int ) ]   CommandLineToArgvW. restype = POINTER ( LPWSTR )   cmd = GetCommandLineW ( )   argc = c_int ( 0 )   argv = CommandLineToArgvW ( cmd, byref ( argc ) )   if :       start = argc. value - len ( sys. argv )    return [ argv [ i ] for i in range ( start, argc. value ) ]       return [ ""ignoblekeyfetch.py"" ]  else :   argvencoding = sys. stdin. encoding or ""utf-8""   return [    arg if isinstance ( arg, str ) else str ( arg, argvencoding ) for arg in sys. argv   ]",False,argc.value > 0,"hasattr(argc, 'value')",0.6623032689094543
    3814,"def plugins_end_test ( self, retcode ) :  """"""Call end_test() on all plugins""""""  logger. info ( ""Finishing test..."" )  self. publish ( ""core"", ""stage"", ""end"" )  self. publish ( ""generator"", ""test_end"", time. time ( ) )  logger. info ( ""Stopping load generator and aggregator"" )  retcode = self. job. aggregator. end_test ( retcode )  logger. debug ( ""RC after: %s"", retcode )  logger. info ( ""Stopping monitoring"" )  for plugin in self. job. monitoring_plugins :   logger. info ( ""Stopping %s"", plugin )   retcode = plugin. end_test ( retcode ) or retcode   logger. info ( ""RC after: %s"", retcode )  for plugin in [   p   for p in self. plugins. values ( )   if p is not self. job. generator_plugin and p not in self. job. monitoring_plugins  ] :   logger. debug ( ""Finalize %s"", plugin )   try :    logger. debug ( ""RC before: %s"", retcode )    retcode = plugin. end_test ( retcode )    logger. debug ( ""RC after: %s"", retcode )   except Exception :    logger. error ( ""Failed finishing plugin %s"", plugin, exc_info = True )    if :     retcode = 1  return retcode",False,not retcode,retcode == 0,0.6817899942398071
    3815,"def _verify_endpoint_tests_conf ( endpoint_tests ) :  for t in endpoint_tests :   _check_all_keys_are_present_in_dict ( t, [ ""tests"", ""type"" ] )   _verify_type_specification ( t [ ""type"" ] )   assert 0 < len ( t [ ""tests"" ]. keys ( ) ) < 8   if ""is_endpoint_redirecting_properly"" in t [ ""tests"" ] :    _verify_is_endpoint_redirecting_properly (     t [ ""tests"" ] [ ""is_endpoint_redirecting_properly"" ]    )   if ""is_location_header_rewritten"" in t [ ""tests"" ] :    _verify_is_location_header_rewritten (     t [ ""tests"" ] [ ""is_location_header_rewritten"" ]    )   if ""is_upstream_correct"" in t [ ""tests"" ] :    _verify_is_upstream_correct_test_conf ( t [ ""tests"" ] [ ""is_upstream_correct"" ] )   if ""is_upstream_req_ok"" in t [ ""tests"" ] :    _verify_is_upstream_req_ok_test_conf ( t [ ""tests"" ] [ ""is_upstream_req_ok"" ] )   if ""are_upstream_req_headers_ok"" in t [ ""tests"" ] :    _verify_are_upstream_req_headers_ok (     t [ ""tests"" ] [ ""are_upstream_req_headers_ok"" ]    )   if ""is_response_",False,'is_unauthed_access_permitted' in t['tests'],'is_response_' is False,0.6493114233016968
    3816,"def import_modules ( directory, include = None, exclude = None ) :  base_name = os. path. relpath ( directory, shared_dir ). replace ( os. path. sep, ""."" )  log. debug ( ""Importing modules from: %s"", base_name )  for name in os. listdir ( directory ) :     if :    continue   if exclude and name in exclude :    continue     path = os. path. join ( directory, name )     if not is_module ( path, name ) :    continue     if not import_module ( path, base_name, name ) :    log. warn ( ""Unable to import module: %r"", name )",True,include and name not in include,include and name not in include,0.6586755514144897
    3817,"def get_v2_image_tags ( self, imagerepo, tags_only = False ) :  """"""Get list of tags in a repo from Docker Hub""""""  if ""/"" not in imagerepo :   imagerepo = ""library/"" + imagerepo  url = self. registry_url + ""/v2/"" + imagerepo + ""/tags/list""  Msg ( ). err ( ""tags url:"", url, l = Msg. DBG )  ( dummy, buf ) = self. _get_url ( url )  tags = [ ]  try :   if :    for tag in json. loads ( buf. getvalue ( ) ) [ ""tags"" ] :     tags. append ( tag )    return tags   else :    return json. loads ( buf. getvalue ( ) )  except ( IOError, OSError, AttributeError, ValueError, TypeError ) :   return [ ]",True,tags_only,tags_only,0.663990318775177
    3818,"def determine_encoding ( self ) :  while not self. eof and len ( self. raw_buffer ) < 2 :   self. update_raw ( )  if not isinstance ( self. raw_buffer, unicode ) :   if self. raw_buffer. startswith ( codecs. BOM_UTF16_LE ) :    self. raw_decode = codecs. utf_16_le_decode    self. encoding = ""utf-16-le""   elif :    self. raw_decode = codecs. utf_16_be_decode    self. encoding = ""utf-16-be""   else :    self. raw_decode = codecs. utf_8_decode    self. encoding = ""utf-8""  self. update ( 1 )",True,self.raw_buffer.startswith(codecs.BOM_UTF16_BE),self.raw_buffer.startswith(codecs.BOM_UTF16_BE),0.6479853391647339
    3819,"def main ( self ) :  self. clear_text ( )  active_handle = self. get_active ( ""Place"" )  if active_handle :   active = self. dbstate. db. get_place_from_handle ( active_handle )   if :    self. get_notes ( active )   else :    self. set_has_data ( False )  else :   self. set_has_data ( False )",True,active,active,0.690086841583252
    3820,"def handle ( type ) :  callback_field = ""_{}_callback"". format ( type )  code_string = getattr ( self. _args, type + ""_callback"" )  if code_string :   if :    raise SystemExit (     _ (      ""Error: Cannot pass a %s_callback to RepoFilter ""      ""AND pass --%s-callback"" % ( type, type )     )    )   if ""return "" not in code_string and type not in (    ""blob"",    ""commit"",    ""tag"",    ""reset"",   ) :    raise SystemExit (     _ ( ""Error: --%s-callback should have a return statement"" ) % type    )   setattr ( self, callback_field, make_callback ( type, code_string ) )",False,"getattr(self, callback_field)",callback_field in self._args,0.6506005525588989
    3821,"def list ( self, arg, opts ) :  counter = 0  if ( len ( self. filters ) ) > 0 :   self. protocol. sendData (    ""#%s %s %s %s""    % (     ""Filter id"". ljust ( 16 ),     ""Type"". ljust ( 22 ),     ""Routes"". ljust ( 6 ),     ""Description"". ljust ( 32 ),    ),    prompt = False,   )   for fid, _filter in self. filters. items ( ) :    counter += 1    routes = """"    if :     routes += ""MO ""    if _filter. __class__. __name__ in MTFILTERS :     routes += ""MT""    self. protocol. sendData (     ""#%s %s %s %s""     % (      str ( fid ). ljust ( 16 ),      str ( _filter. __class__. __name__ ). ljust ( 22 ),      routes. ljust ( 6 ),      repr ( _filter ). ljust ( 32 ),     ),     prompt = False,    )  ",False,_filter.__class__.__name__ in MOFILTERS,_filter.__class__.__name__ in PHMFILTERS,0.6573112607002258
    3822,"def _get_extended_color ( numbers ) :  n = numbers. pop ( 0 )  if n == 2 and len ( numbers ) >= 3 :     r = numbers. pop ( 0 )   g = numbers. pop ( 0 )   b = numbers. pop ( 0 )   if not all ( 0 <= c <= 255 for c in ( r, g, b ) ) :    raise ValueError ( )  elif n == 5 and len ( numbers ) >= 1 :     idx = numbers. pop ( 0 )   if idx < 0 :    raise ValueError ( )   elif :       return idx   elif idx < 232 :       r = ( idx - 16 ) // 36    r = 55 + r * 40 if r > 0 else 0    g = ( ( idx - 16 ) % 36 ) // 6    g = 55 + g * 40 if g > 0 else 0    b = ( idx - 16 ) % 6    b = 55 + b * 40 if b > 0 else 0   elif idx < 256 :       r = g = b = ( idx - 232 ) * 10 + 8   else :    raise ValueError ( )  else :   raise ValueError ( )  return r, g, b",True,idx < 16,idx < 16,0.6788617372512817
    3823,"def parse_converter_args ( argstr : str ) -> t. Tuple [ t. Tuple, t. Dict [ str, t. Any ] ] :  argstr += "",""  args = [ ]  kwargs = { }  for item in _converter_args_re. finditer ( argstr ) :   value = item. group ( ""stringval"" )   if value is None :    value = item. group ( ""value"" )   value = _pythonize ( value )   if :    args. append ( value )   else :    name = item. group ( ""name"" )    kwargs [ name ] = value  return tuple ( args ), kwargs",False,not item.group('name'),value <        """"""Set all body text.""""""   if self. test :     bkeys = sorted ( gnx2body. keys ( ) )   vkeys = sorted ( gnx2vnode. keys ( ) )   if :    g. trace ( ""KEYS MISMATCH"" )    g. printObj ( bkeys )    g. printObj ( vkeys )    if self. test :     sys. exit ( 1 )     for key in vkeys :    v = gnx2vnode. get ( key )    body = gnx2body. get ( key )    v. _bodyString = """". join ( body )  else :   assert root_v. gnx in gnx2vnode, root_v   assert root_v. gnx in gnx2body, root_v     for key in gnx2body :    body = gnx2body. get ( key )    v = gnx2vnode. get ( key )    assert v, ( key, v )    v. _bodyString = g. toUnicode ( """". join ( body ) )",False,bkeys != vkeys,len(bkeys) > 0,0.6657154560089111
    3825,"def iter_filters ( filters, block_end = False ) :  queue = deque ( filters )  while queue :   f = queue. popleft ( )   if f is not None and f. type in ( ""or"", ""and"", ""not"" ) :    if :     queue. appendleft ( None )    for gf in f. filters :     queue. appendleft ( gf )   yield f",True,block_end,block_end,0.6628148555755615
    3826,"def writeLinks ( fp, skel, config ) :  if skel is None :   return  ooLink ( fp, ""Model::%s"" % skel. name, ""Model::RootNode"", config )  for bone in skel. getBones ( ) :   if :    parentName = bone. parent. name if bone. parent else None    ooLink ( fp, ""Model::%s"" % bone. name, ""Model::%s"" % parentName, config )   else :    ooLink ( fp, ""Model::%s"" % bone. name, ""Model::%s"" % skel. name, config )   ooLink ( fp, ""NodeAttribute::%s"" % bone. name, ""Model::%s"" % bone. name, config )",True,bone.parent,bone.parent,0.664536714553833
    3827,"def process_element ( e ) :  if e. ref is not None :   tn = e. ref   name = e. ref. split ( "":"", 1 ) [ - 1 ]  elif e. name is not None :   tn = e. type   name = e. name   if :             self. debug2 ( ""  skipped: %s ur-type"", e. name )    return  else :   raise Exception ( ""dunno"" )  process_type ( tn, name, element = e )",False,tn is None,tn == e.type,0.6808267831802368
    3828,"def forwards ( self, orm ) :  ""Migrate the Users and Groups so they extend AccessEntity""  for user in orm [ ""core.User"" ]. objects. all ( ) :   entity = orm [ ""core.AccessEntity"" ]. objects. create ( )   entity. id = user. id   entity. save ( )   user. accessentity_ptr = entity   user. save ( )  for group in orm [ ""core.Group"" ]. objects. all ( ) :   group. accessentity_ptr = orm [ ""core.AccessEntity"" ]. objects. create ( )   group. accessentity_ptr. save ( )   if :    parent = orm [ ""core.Group"" ]. objects. get ( id = group. parent_id )    group. parent_id = parent. accessentity_ptr_id   group. save ( )  for user in orm [ ""core.User"" ]. objects. all ( ) :   group = orm [ ""core.Group"" ]. objects. get ( pk = user. default_group_id )   user. default_group = group. accessentity_ptr   user. save ( )  for obj in orm [ ""core.Object"" ]. objects. all ( ) :   obj. creator = obj. user   obj. save ( )",False,group.parent,self.parent_id,0.6618846654891968
    3829,"def _real_extract ( self, url ) :  mobj = re. match ( self. _VALID_URL, url )  track_id = mobj. group ( ""track_id"" )  query = { }  if track_id :   info_json_url = self. _API_V2_BASE + ""tracks/"" + track_id   full_title = track_id   token = mobj. group ( ""secret_token"" )   if :    query [ ""secret_token"" ] = token  else :   full_title = resolve_title = ""%s/%s"" % mobj. group ( ""uploader"", ""title"" )   token = mobj. group ( ""token"" )   if :    resolve_title += ""/%s"" % token   info_json_url = self. _resolv_url ( self. _BASE_URL + resolve_title )  info = self. _download_json (   info_json_url,   full_title,   ""Downloading info JSON"",   query = query,   headers = self. _HEADERS,  )  return self. _extract_info_dict ( info, full_title, token )",True,token,token,0.6830238699913025
    3830,"def read ( self, iprot ) :  if (   iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated   and isinstance ( iprot. trans, TTransport. CReadableTransport )   and self. thrift_spec is not None   and fastbinary is not None  ) :   fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) )   return  iprot. readStructBegin ( )  while True :   ( fname, ftype, fid ) = iprot. readFieldBegin ( )   if ftype == TType. STOP :    break   if fid == 1 :    if ftype == TType. STRING :     self. message = iprot. readString ( )    else :     iprot. skip ( ftype )   elif fid == 2 :    if ftype == TType. STRING :     self. log_context = iprot. readString ( )    else :     iprot. skip ( ftype )   elif fid == 3 :    if :     self. handle = QueryHandle ( )     self. handle. read ( iprot )    else :     iprot. skip ( ftype )   elif fid == 4 :    if ftype == TType. I32 :     self. errorCode = iprot. readI32 ( )    for native, utf8 in zip ( * [ StringIO ( f ). readlines ( ) for f in self. tstring ] ) :   u = self. decode ( native ) [ 0 ]   self. assertEqual ( u, utf8. decode ( ""utf-8"" ) )   if :    self. assertEqual ( native, self. encode ( u ) [ 0 ] )",False,self.roundtriptest,utf8.upper() == 'utf-8',0.6563947200775146
    3832,"def walk_depth_first ( name ) :  stack = [ name ]  while stack :   name = stack. pop ( )   if name in levels_by_name :    continue   if name not in graph or not graph [ name ] :    level = 0    add_level_to_name ( name, level )    continue   children = graph [ name ]   children_not_calculated = [    child for child in children if child not in levels_by_name   ]   if :    stack. append ( name )    stack. extend ( children_not_calculated )    continue   level = 1 + max ( levels_by_name [ lname ] for lname in children )   add_level_to_name ( name, level )",True,children_not_calculated,children_not_calculated,0.6556819081306458
    3833,"def added_variables ( self ) :  variables = set ( )  for task in self. tasks_data :   if ""tags"" not in task :    next   if ""when"" not in task :    task [ ""when"" ] = [ ]   elif isinstance ( task [ ""when"" ], str ) :    task [ ""when"" ] = [ task [ ""when"" ] ]   variables_to_add = {    tag for tag in task [ ""tags"" ] if self. _tag_is_valid_variable ( tag )   }   task [ ""when"" ] = [    ""{varname} | bool"". format ( varname = v ) for v in variables_to_add   ] + task [ ""when"" ]   variables. update ( variables_to_add )   if :    del task [ ""when"" ]  return variables",False,not task['when'],'when' in task,0.6660057306289673
    3834,"def get_selectable_values ( self, request ) :  shop = lfs. core. utils. get_default_shop ( request )  countries = [ ]  for country in shop. shipping_countries. all ( ) :   if :    selected = True   else :    selected = False   countries. append (    {     ""id"" : country. id,     ""name"" : country. name,     ""selected"" : selected,    }   )  return countries",False,country in self.value.all(),country.selected,0.6514890193939209
    3835,"def parse_voc_xml ( self, node : ET. Element ) -> Dict [ str, Any ] :  voc_dict : Dict [ str, Any ] = { }  children = list ( node )  if children :   def_dic : Dict [ str, Any ] = collections. defaultdict ( list )   for dc in map ( self. parse_voc_xml, children ) :    for ind, v in dc. items ( ) :     def_dic [ ind ]. append ( v )   if node. tag == ""annotation"" :    def_dic [ ""object"" ] = [ def_dic [ ""object"" ] ]   voc_dict = {    node. tag : { ind : v [ 0 ] if len ( v ) == 1 else v for ind, v in def_dic. items ( ) }   }  if node. text :   text = node. text. strip ( )   if :    voc_dict [ node. tag ] = text  return voc_dict",False,not children,text,0.6756261587142944
    3836,"def get_backward_connection ( start, stop, layer ) :  start_layer, start_category, start_buffer = start. split ( ""."", 2 )  stop_layer, stop_category, stop_buffer = stop. split ( ""."", 2 )  back_buffer_name = {   ""parameters"" : ""gradients"",   ""inputs"" : ""input_deltas"",   ""outputs"" : ""output_deltas"",  }  new_end = ""."". join ( [ stop_layer, back_buffer_name [ stop_category ], stop_buffer ] )  if start_category == ""internals"" :   dstart_buffer = ""d"" + start_buffer   if :    raise KeyError (     ""Missing delta buffer {} for the internal buffer {}""     ""."". format ( dstart_buffer, start_buffer )    )   new_start = ""."". join ( [ start_layer, ""internals"", dstart_buffer ] )  else :   new_start = ""."". join (    [ start_layer, back_buffer_name [ start_category ], start_buffer ]   )  return new_start, new_end",False,dstart_buffer not in layer.internal_shapes,dstart_buffer not in dstart_buffer,0.6518374681472778
    3837,"def _convert_word_to_char_ids ( self, word ) :  code = np. zeros ( [ self. _max_word_length ], dtype = np. int32 )  if self. _pad_special_char_use :   code [ : ] = self. pad_char  if :   word_encoded = word. encode ( ""utf-8"", ""ignore"" ) [ : self. _max_word_length - 2 ]   code [ 0 ] = self. bow_char   for k, chr_id in enumerate ( word_encoded, start = 1 ) :    code [ k ] = chr_id   code [ len ( word_encoded ) + 1 ] = self. eow_char  else :   word_encoded = word. encode ( ""utf-8"", ""ignore"" ) [ : self. _max_word_length ]   for k, chr_id in enumerate ( word_encoded ) :    code [ k ] = chr_id  if not self. _pad_special_char_use :   if :    code = code [ : len ( word_encoded ) + 2 ]   else :    code = code [ : len ( word_encoded ) ]  return code",False,self._word_boundary_special_char_use,self._max_word_length > 0,0.6493944525718689
    3838,"def _parse_fields ( cls, read ) :  read = unicode_to_str ( read )  if type ( read ) is not str :   _wrong_type_for_arg ( read, ""str"", ""read"" )  fields = { }  while read and read [ 0 ]!= "";"" :   if :    DeserializeError ( read, ""does not separate fields with commas"" )   read = read [ 1 : ]   key, _type, value, read = cls. _parse_field ( read )   fields [ key ] = ( _type, value )  if read :     read = read [ 1 : ]  return fields, read",False,"read and read[0] != ','",fields,0.6585848331451416
    3839,"def __str__ ( self, prefix = """", printElemNumber = 0 ) :  res = """"  cnt = 0  for e in self. scope_ :   elm = """"   if :    elm = ""(%d)"" % cnt   res += prefix + ( ""scope%s: %s\n"" % ( elm, self. DebugFormatString ( e ) ) )   cnt += 1  if self. has_service_account_id_ :   res += prefix + (    ""service_account_id: %s\n"" % self. DebugFormatInt64 ( self. service_account_id_ )   )  return res",True,printElemNumber,printElemNumber,0.6857435703277588
    3840,"def visit_Macro ( self, node, frame ) :  macro_frame, macro_ref = self. macro_body ( node, frame )  self. newline ( )  if frame. toplevel :   if :    self. write ( ""context.exported_vars.add(%r)"" % node. name )   ref = frame. symbols. ref ( node. name )   self. writeline ( ""context.vars[%r] = "" % node. name )  self. write ( ""%s = "" % frame. symbols. ref ( node. name ) )  self. macro_def ( macro_ref, macro_frame )",False,not node.name.startswith('_'),node.name,0.6528820991516113
    3841,"def fetch_scatter_outputs ( self, task ) :  scatteroutputs = [ ]  for var in task [ ""body"" ] :     if var. startswith ( ""call"" ) :    if :     for output in self. tasks_dictionary [ task [ ""body"" ] [ var ] [ ""task"" ] ] [      ""outputs""     ] :      scatteroutputs. append (       { ""task"" : task [ ""body"" ] [ var ] [ ""alias"" ], ""output"" : output [ 0 ] }      )  return scatteroutputs",False,'outputs' in self.tasks_dictionary[task['body'][var]['task']],var in self.tasks_dictionary,0.6666173934936523
    3842,"def read ( self, iprot ) :  if (   iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated   and isinstance ( iprot. trans, TTransport. CReadableTransport )   and self. thrift_spec is not None   and fastbinary is not None  ) :   fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) )   return  iprot. readStructBegin ( )  while True :   ( fname, ftype, fid ) = iprot. readFieldBegin ( )   if :    break   if fid == 1 :    if ftype == TType. LIST :     self. new_parts = [ ]     ( _etype302, _size299 ) = iprot. readListBegin ( )     for _i303 in xrange ( _size299 ) :      _elem304 = Partition ( )      _elem304. read ( iprot )      self. new_parts. append ( _elem304 )     iprot. readListEnd ( )    else :     iprot. skip ( ftype )   else :    iprot. skip ( ftype )   iprot. readFieldEnd ( )  iprot. readStructEnd ( )",False,ftype == TType.STOP,fid == 0,0.6589915752410889
    3843,"def _load_tracker_file_etag ( self ) :  f = None  try :   f = open ( self. tracker_file_name, ""r"" )   self. etag_value_for_current_download = f. readline ( ). rstrip ( ""\n"" )         if :    print (     ""Couldn't read etag in tracker file (%s). Restarting ""     ""download from scratch."" % self. tracker_file_name    )  except IOError as e :       if e. errno!= errno. ENOENT :          print (     ""Couldn't read URI tracker file (%s): %s. Restarting ""     ""download from scratch."" % ( self. tracker_file_name, e. strerror )    )  finally :   if f :    f. close ( )",False,len(self.etag_value_for_current_download) < self.MIN_ETAG_LEN,self.etag_value_for_current_download,0.6532106995582581
    3844,"def confirm ( request ) :  details = request. session. get ( ""reauthenticate"" )  if not details :   return redirect ( ""home"" )   request. user = User. objects. get ( pk = details [ ""user_pk"" ] )  if request. method == ""POST"" :   confirm_form = PasswordConfirmForm ( request, request. POST )   if :    request. session. pop ( ""reauthenticate"" )    request. session [ ""reauthenticate_done"" ] = True    return redirect ( ""social:complete"", backend = details [ ""backend"" ] )  else :   confirm_form = PasswordConfirmForm ( request )  context = { ""confirm_form"" : confirm_form }  context. update ( details )  return render ( request, ""accounts/confirm.html"", context )",False,confirm_form.is_valid(),reauthenticate_done and 'reauthenticate' in request.session,0.6494632363319397
    3845,"def prepare ( self, size = None ) :  if _is_seekable ( self. file ) :   start_pos = self. file. tell ( )   self. file. seek ( 0, 2 )   end_pos = self. file. tell ( )   self. file. seek ( start_pos )   fsize = end_pos - start_pos   if :    self. remain = fsize   else :    self. remain = min ( fsize, size )  return self. remain",True,size is None,size is None,0.66523277759552
    3846,"def _get_path_rvar (  stmt : pgast. Query,  path_id : irast. PathId,  *,  aspect : str,  ctx : context. CompilerContextLevel, ) -> Tuple [ pgast. PathRangeVar, irast. PathId ] :  qry : Optional [ pgast. Query ] = stmt  while qry is not None :   rvar = pathctx. maybe_get_path_rvar ( qry, path_id, aspect = aspect, env = ctx. env )   if rvar is not None :    if qry is not stmt :         pathctx. put_path_rvar ( stmt, path_id, rvar, aspect = aspect, env = ctx. env )    return rvar, path_id   if :    path_id = pathctx. reverse_map_path_id ( path_id, qry. view_path_id_map )   qry = ctx. rel_hierarchy. get ( qry )  raise LookupError ( f""there is no range var for {path_id} in {stmt}"" )",False,qry.view_path_id_map,path_id is not None,0.65373295545578
    3847,"def onMouseWheel ( self, event ) :  if self. selectedHuman. isVisible ( ) :   zoomOut = event. wheelDelta > 0   if self. getSetting ( ""invertMouseWheel"" ) :    zoomOut = not zoomOut   if event. x is not None :    self. modelCamera. mousePickHumanCenter ( event. x, event. y )   if :    self. zoomOut ( )   else :    self. zoomIn ( )",True,zoomOut,zoomOut,0.702865481376648
    3848,"def _remove_iptables_rule ( self, rule, ipv6 = False, tables = None ) :  if not isinstance ( rule, tuple ) :   return self. _remove_iptables_rule_cmd ( rule, ipv6 )  _global_lock. acquire ( )  try :   if :    if rule [ 0 ] == ""POSTROUTING"" :     if tables :      table = tables [ ""nat6"" ]     else :      table = iptc. Table6 ( iptc. Table. NAT )    else :     if tables :      table = tables [ ""filter6"" ]     else :      table = iptc. Table6 ( iptc. Table. FILTER )   else :    if rule [ 0 ] == ""POSTROUTING"" :     if tables :      table = tables [ ""nat"" ]     else :      table = iptc. Table ( iptc. Table. NAT )    else :     if tables :      table = tables [ ""filter"" ]     else :      table = iptc. Table ( iptc. Table. FILTER )   chain = iptc. Chain ( table, rule [ 0 ] )  <",True,ipv6,ipv6,0.6762512922286987
    3849,"def expect_document_start ( self, first = False ) :  if isinstance ( self. event, DocumentStartEvent ) :   if ( self. event. version or self. event. tags ) and self. open_ended :    self. write_indicator ( ""..."", True )    self. write_indent ( )   if self. event. version :    version_text = self. prepare_version ( self. event. version )    self. write_version_directive ( version_text )   self. tag_prefixes = self. DEFAULT_TAG_PREFIXES. copy ( )   if :    handles = sorted ( self. event. tags. keys ( ) )    for handle in handles :     prefix = self. event. tags [ handle ]     self. tag_prefixes [ prefix ] = handle     handle_text = self. prepare_tag_handle ( handle )     prefix_text = self. prepare_tag_prefix ( prefix )     self. write_tag_directive ( handle_text, prefix_text )   implicit = (    first    and not self. event. explicit    and not self. canonical    and not self. event. version    and not self. event. tags    and not self. check_empty_document ( )   )   if not implicit :    self. write_indent ( )    self. write_indicator ( ""---"", True )    if self.",False,self.event.tags,first,0.66018146276474
    3850,"def send_slack_msg ( self, key, message_payload ) :  if key. startswith ( ""https://hooks.slack.com/"" ) :   response = requests. post (    url = key, data = message_payload, headers = { ""Content-Type"" : ""application/json"" }   )  else :   response = requests. post (    url = ""https://slack.com/api/chat.postMessage"",    data = message_payload,    headers = {     ""Content-Type"" : ""application/json;charset=utf-8"",     ""Authorization"" : ""Bearer %s"" % self. config. get ( ""slack_token"" ),    },   )  if response. status_code == 429 and ""Retry-After"" in response. headers :   self. logger. info (    ""Slack API rate limiting. Waiting %d seconds"",    int ( response. headers [ ""Retry-After"" ] ),   )   time. sleep ( int ( response. headers [ ""Retry-After"" ] ) )   return  elif response. status_code!= 200 :   self. logger. info (    ""Error in sending Slack message status:%s response: %s"",    response. status_code,    response. text,   )   return  if ""text/html"" in response. headers [ ""content-type"" ] :   if response. text!= ""ok"" :    self. logger. info (  ",False,not response_json['ok'],self.config.get('slack_token'),0.6493945121765137
    3851,"def _discover_formatters ( ) :  import inspect  from pygments. formatters import get_all_formatters   default_exts = { }  exts = { }   default_names = { }  names = { }  formatters = { ""exts"" : exts, ""names"" : names }  if DEBUG :   from collections import defaultdict   duplicates = defaultdict ( set )  for cls in get_all_formatters ( ) :   mod = inspect. getmodule ( cls )   val = ( mod. __name__, cls. __name__ )     for filename in cls. filenames :    if filename. startswith ( ""*."" ) :     filename = filename [ 1 : ]    if :     continue    if (     DEBUG     and filename in exts     and exts [ filename ]!= val     and filename not in default_exts    ) :     duplicates [ filename ]. add ( val )     duplicates [ filename ]. add ( exts [ filename ] )    exts [ filename ] = val     names [ cls. name ] = val   for alias in cls. aliases :    if (     DEBUG     and alias in names     and names [ alias ]!= val     and alias not in default_names",False,'*' in filename,filename in formatters,0.679736852645874
    3852,"def _get_frame_value ( frame, keyframes ) :  for i in range ( 0, len ( keyframes ) ) :   kf_frame, kf_value = keyframes [ i ]   if kf_frame == frame :    return kf_value   try :       frame_n, value_n = keyframes [ i + 1 ]    if :     time_fract = float ( ( frame - kf_frame ) ) / float ( ( frame_n - kf_frame ) )     value_range = value_n - kf_value     return kf_value + time_fract * value_range   except :    return kf_value",False,kf_frame < frame and frame < frame_n,kf_value == frame_n,0.6589030623435974
    3853,"def validate ( self, entry, field_uri = None ) :  super ( ). validate ( entry, field_uri )  if entry is None :   return  field_uri = field_uri or self. field_uri  if not isinstance ( entry, dict ) :   raise ConfigError ( ""{} is expected to be dict"". format ( field_uri ) )  if not entry and not self. allow_empty :   self. raise_error ( entry, field_uri, ""value is empty"" )  for k, v in entry. items ( ) :   if self. validate_keys :    uri = ""{}.keys.{}"". format ( field_uri, k )    self. key_type. validate ( k, uri )   if :    uri = ""{}.{}"". format ( field_uri, k )    self. value_type. validate ( v, uri )",False,self.validate_values,self.validate_value,0.6518567800521851
    3854,"def ack ( self, sha ) :  """"""Ack that a revision and its ancestors are present in the source.""""""  if len ( sha )!= 40 :   raise ValueError ( ""unexpected sha %r received"" % sha )  ancestors = set ( [ sha ] )   while self. heads :   for a in ancestors :    if :     self. heads. remove ( a )     new_ancestors = set ( )   for a in ancestors :    ps = self. parents. get ( a )    if ps is not None :     new_ancestors. update ( ps )    self. parents [ a ] = None     if not new_ancestors :    break   ancestors = new_ancestors",False,a in self.heads,a in self.parents,0.6693235039710999
    3855,"def _onFrameNavigated ( self, framePayload : dict ) -> None :  isMainFrame = not framePayload. get ( ""parentId"" )  if isMainFrame :   frame = self. _mainFrame  else :   self. _frames. get ( framePayload. get ( ""id"", """" ) )  if not ( isMainFrame or frame ) :   raise PageError (    ""We either navigate top level or have old version "" ""of the navigated frame""   )   if :   for child in frame. childFrames :    self. _removeFramesRecursively ( child )   _id = framePayload. get ( ""id"", """" )  if isMainFrame :   if :       self. _frames. pop ( frame. _id, None )    frame. _id = _id   else :       frame = Frame ( self. _client, self. _mouse, self. _touchscreen, None, _id )   self. _frames [ _id ] = frame   self. _mainFrame = frame   frame. _navigated ( framePayload )  self. emit ( FrameManager. Events. FrameNavigated, frame )",False,frame,frame.childFrames,0.6889786720275879
    3856,"def __call__ ( self, event, data = None ) :  datatype, delta = event  self. midi_ctrl. delta += delta  if TIMING_CLOCK in datatype and not self. played :   self. midi_ctrl. pulse += 1   if :    t_master = 60.0    self. midi_ctrl. bpm = round ( 60.0 / self. midi_ctrl. delta, 0 )    self. midi_ctrl. pulse = 0    self. midi_ctrl. delta = 0.0",False,self.midi_ctrl.pulse == self.midi_ctrl.ppqn,self.midi_ctrl.pulse >= 1.0,0.6615312695503235
    3857,"def IndexDocuments ( self, documents, response ) :  """"""Indexes an iterable DocumentPb.Document.""""""  for document in documents :   doc_id = document. id ( )   if :    doc_id = str ( uuid. uuid4 ( ) )    document. set_id ( doc_id )   response. add_doc_id ( doc_id )   if doc_id in self. _documents :    old_document = self. _documents [ doc_id ]    self. _inverted_index. RemoveDocument ( old_document )   self. _documents [ doc_id ] = document   new_status = response. add_status ( )   new_status. set_code ( search_service_pb. SearchServiceError. OK )   self. _inverted_index. AddDocument ( doc_id, document )",False,not doc_id,doc_id is None,0.6599835157394409
    3858,"def __load_protos ( ) :  g = globals ( )  for k, v in g. items ( ) :   if :    name = k [ 4 : ]    modname = name. lower ( )    try :     mod = __import__ ( modname, g, level = 1 )     PPP. set_p ( v, getattr ( mod, name ) )    except ( ImportError, AttributeError ) :     continue",False,k.startswith('PPP_'),k.startswith('          < / / >,0.6541726589202881
    3859,"def get_meminfo ( ) -> Dict [ str, Any ] :  res = { }  try :   import psutil  except ImportError :   res [ ""memory_psutil_missing"" ] = (    ""psutil not found, run pip install mypy[dmypy] ""    ""to install the needed components for dmypy""   )  else :   process = psutil. Process ( )   meminfo = process. memory_info ( )   res [ ""memory_rss_mib"" ] = meminfo. rss / MiB   res [ ""memory_vms_mib"" ] = meminfo. vms / MiB   if :    res [ ""memory_maxrss_mib"" ] = meminfo. peak_wset / MiB   else :       import resource    rusage = resource. getrusage ( resource. RUSAGE_SELF )    if sys. platform == ""darwin"" :     factor = 1    else :     factor = 1024    res [ ""memory_maxrss_mib"" ] = rusage. ru_maxrss * factor / MiB  return res",True,sys.platform == 'win32',sys.platform == 'win32',0.66079181432724
    3860,"def get_key ( self, view_instance, view_method, request, args, kwargs ) :  if :   memoization_key = self. _get_memoization_key (    view_instance = view_instance,    view_method = view_method,    args = args,    kwargs = kwargs,   )   if not hasattr ( request, ""_key_constructor_cache"" ) :    request. _key_constructor_cache = { }  if self. memoize_for_request and memoization_key in request. _key_constructor_cache :   return request. _key_constructor_cache. get ( memoization_key )  else :   value = self. _get_key (    view_instance = view_instance,    view_method = view_method,    request = request,    args = args,    kwargs = kwargs,   )   if :    request. _key_constructor_cache [ memoization_key ] = value   return value",False,self.memoize_for_request,memoization_key is not None,0.654608964920044
    3861,"def find ( self, path ) :  if os. path. isfile ( path ) or os. path. islink ( path ) :   self. num_files = self. num_files + 1   if self. match_function ( path ) :    self. files. append ( path )  elif os. path. isdir ( path ) :   for content in os. listdir ( path ) :    file = os. path. join ( path, content )    if :     self. num_files = self. num_files + 1     if self. match_function ( file ) :      self. files. append ( file )    else :     self. find ( file )",False,os.path.isfile(file) or os.path.islink(file),os.path.isfile(file),0.6462186574935913
    3862,"def __init__ ( self, section, source = None, lineno = None ) :  msg = [ repr ( section ), "" already exists"" ]  if source is not None :   message = [ ""While reading from "", repr ( source ) ]   if :    message. append ( "" [line {0:2d}]"". format ( lineno ) )   message. append ( "": section "" )   message. extend ( msg )   msg = message  else :   msg. insert ( 0, ""Section "" )  Error. __init__ ( self, """". join ( msg ) )  self. section = section  self. source = source  self. lineno = lineno  self. args = ( section, source, lineno )",True,lineno is not None,lineno is not None,0.6798129081726074
    3863,"def do_list ( self, q ) :  marker = q. params [ ""marker"" ] [ 0 ] if ""marker"" in q. params else None  max_keys = int ( q. params [ ""max_keys"" ] [ 0 ] ) if ""max_keys"" in q. params else 1000  prefix = q. params [ ""prefix"" ] [ 0 ] if ""prefix"" in q. params else """"  resp = [   '',   '' % self. xml_ns,   ""%d"" % max_keys,   ""false"",  ]  count = 0  for key in sorted ( self. server. data ) :   if not key. startswith ( prefix ) :    continue   if marker and key <= marker :    continue   resp. append ( ""%s"" % xml_escape ( key ) )   count += 1   if :    resp [ 3 ] = ""true""    break  resp. append ( """" )  body = ""\n"". join ( resp ). encode ( )  self. send_response ( 200 )  self. send_header ( ""Content-Type"", ""text/xml"" )  self. send_header ( ""Content-Length"", str ( len ( body ) ) )  self. end_headers ( )  self. wfile. write ( body )",False,count == max_keys,count > max_keys,0.6624726057052612
    3864,"def insert ( self, pack_id, data ) :  if ( pack_id not in self. queue ) and pack_id > self. begin_id :   self. queue [ pack_id ] = PacketInfo ( data )   if :    self. end_id = pack_id + 1   elif self. end_id < pack_id :    eid = self. end_id    while eid < pack_id :     self. miss_queue. add ( eid )     eid += 1    self. end_id = pack_id + 1   else :    self. miss_queue. remove ( pack_id )",False,self.end_id == pack_id,self.begin_id > pack_id,0.6509692668914795
    3865,"def walks_generator ( ) :  if filelist is not None :   bucket = [ ]   for filename in filelist :    with io. open ( filename ) as inf :     for line in inf :      walk = [ int ( x ) for x in line. strip ( ""\n"" ). split ( "" "" ) ]      bucket. append ( walk )      if :       yield bucket       bucket = [ ]   if len ( bucket ) :    yield bucket  else :   for _ in range ( epoch ) :    for nodes in graph. node_batch_iter ( batch_size ) :     walks = graph. random_walk ( nodes, walk_len )     yield walks",False,len(bucket) == batch_size,len(bucket) > 0,0.651907205581665
    3866,"def reloadCols ( self ) :  self. columns = [ ]  for i, ( name, fmt, * shape ) in enumerate ( self. npy. dtype. descr ) :   if shape :    t = anytype   elif ""M"" in fmt :    self. addColumn ( Column ( name, type = date, getter = lambda c, r, i = i : str ( r [ i ] ) ) )    continue   elif :    t = int   elif ""f"" in fmt :    t = float   else :    t = anytype   self. addColumn ( ColumnItem ( name, i, type = t ) )",False,'i' in fmt,'st' in fmt,0.667656421661377
    3867,"def test_createFile ( self ) :  text = ""This is a test!""  path = tempfile. mktemp ( )  try :   koDoc = self. _koDocFromPath ( path, load = False )   koDoc. buffer = text   koDoc. save ( 0 )   del koDoc   koDoc2 = self. _koDocFromPath ( path )   assert koDoc2. buffer == text  finally :   if :    os. unlink ( path )",True,os.path.exists(path),os.path.exists(path),0.6471022367477417
    3868,"def get_selection ( self ) :  if self. interface. multiple_select :   selection = [ ]   current_index = self. tree. selectedRowIndexes. firstIndex   for i in range ( self. tree. selectedRowIndexes. count ) :    selection. append ( self. tree. itemAtRow ( current_index ). attrs [ ""node"" ] )    current_index = self. tree. selectedRowIndexes. indexGreaterThanIndex (     current_index    )   return selection  else :   index = self. tree. selectedRow   if :    return self. tree. itemAtRow ( index ). attrs [ ""node"" ]   else :    return None",False,index != -1,index is not None and index.isValid(),0.6681556701660156
    3869,"def detect_ssl_option ( self ) :  for option in self. ssl_options ( ) :   if scan_argv ( self. argv, option ) is not None :    for other_option in self. ssl_options ( ) :     if :      if scan_argv ( self. argv, other_option ) is not None :       raise ConfigurationError (        ""Cannot give both %s and %s"" % ( option, other_option )       )    return option",False,option != other_option,option == other_option,0.6600449085235596
    3870,"def save_map ( world, path, prefix ) :  map_file = os. path. join ( path, prefix + "".sqlite"" )  db = DbReader ( map_file )  read_savegame_template ( db )  db ( ""BEGIN"" )  for island in world. islands :   island_name = ""{}_island_{:d}_{:d}.sqlite"". format (    prefix, island. origin. x, island. origin. y   )   island_db_path = os. path. join ( path, island_name )   if :    os. unlink ( island_db_path )   db (    ""INSERT INTO island (x, y, file) VALUES(?,?,?)"",    island. origin. x,    island. origin. y,    ""content/islands/"" + island_name,   )   island_db = DbReader ( island_db_path )   island. save_map ( island_db )   island_db. close ( )  db ( ""COMMIT"" )  db. close ( )",False,os.path.exists(island_db_path),os.path.isfile(island_db_path),0.6468785405158997
    3871,"def _getItemHeight ( self, item, ctrl = None ) :  """"""Returns the full height of the item to be inserted in the form""""""  if type ( ctrl ) == psychopy. visual. TextBox2 :   return ctrl. size [ 1 ]  if type ( ctrl ) == psychopy. visual. Slider :     if item [ ""layout"" ] == ""horiz"" :    return 0.03 + ctrl. labelHeight * 3   elif :       return ctrl. labelHeight * len ( item [ ""options"" ] )",False,item['layout'] == 'vert',item['layout'] == 'group',0.6513952016830444
    3872,"def decrypt_hash ( edata, nlkm, ch, xp = True ) :  if xp :   hmac_md5 = HMAC. new ( nlkm, ch )   rc4key = hmac_md5. digest ( )   rc4 = ARC4. new ( rc4key )   data = rc4. encrypt ( edata )  else :     aes = AES. new ( nlkm [ 16 : 32 ], AES. MODE_CBC, ch )   data = """"   for i in range ( 0, len ( edata ), 16 ) :    buf = edata [ i : i + 16 ]    if :     buf += ( 16 - len ( buf ) ) * ""\00""    data += aes. decrypt ( buf )  return data",True,len(buf) < 16,len(buf) < 16,0.6589879989624023
    3873,"def draw_left_axis ( self, bounds, y_ticks, y_tick_text ) :  ( left, top, right, bottom ) = bounds  stats = { }  for stat in self. stat_info :   if :    stats [ stat ] = self. stat_info [ stat ]    stats [ stat ] [ ""values"" ] = self. stats [ stat ]    stats [ stat ] [ ""fill_color"" ] = change_opacity ( stats [ stat ] [ ""color"" ], 0.5 )    stats [ stat ] [ ""color"" ] = change_opacity ( stats [ stat ] [ ""color"" ], 0.8 )  height = bottom - top  max_value = y_ticks [ - 1 ]  ratio = height / max_value  for i, y_val in enumerate ( y_ticks ) :   y = int ( bottom - y_val * ratio ) - 0.5   if i!= 0 :    self. draw_dotted_line ( gray, left, y, right, y )   self. draw_y_text ( y_tick_text [ i ], left, y )  self. draw_line ( gray, left, top, left, bottom )  for stat, info in stats. items ( ) :   if len ( info [ ""values"" ] ) > 0 :    self. draw_value_poly ( info [ ""values"" ], info [ ""color"" ], max_value, bounds )    self. draw_value_poly (     info [ ""values"" ], info [ ""fill_color"" ], max_value, bounds, info [ ""fill"" ]    )",False,self.stat_info[stat]['axis'] == 'left',stat in self.stats,0.6550924777984619
    3874,"def visit_symbol_table ( self, symtab : SymbolTable, table_fullname : str ) -> None :   for key, value in list ( symtab. items ( ) ) :   cross_ref = value. cross_ref   if cross_ref is not None :    value. cross_ref = None    if cross_ref in self. modules :     value. node = self. modules [ cross_ref ]    else :     stnode = lookup_qualified_stnode (      self. modules, cross_ref, self. allow_missing     )     if :      assert stnode. node is not None, (       table_fullname + ""."" + key,       cross_ref,      )      value. node = stnode. node     elif not self. allow_missing :      assert False, ""Could not find cross-ref %s"" % ( cross_ref, )     else :           value. node = missing_info ( self. modules )   else :    if isinstance ( value. node, TypeInfo ) :         self. visit_type_info ( value. node )",True,stnode is not None,stnode is not None,0.6625056266784668
    3875,"def prepare_verify ( verify, verify_fingerprint ) :  if isinstance ( verify, ( str, bytes ) ) :   verify = expand_path ( verify )  elif not isinstance ( verify, bool ) :   raise exceptions. UserError (    ""Invalid value for verify ({}), ""    ""must be a path to a PEM-file or boolean."". format ( verify )   )  if verify_fingerprint is not None :   if :    raise exceptions. UserError (     ""Invalid value for verify_fingerprint ""     ""({}), must be a string or null."". format ( verify_fingerprint )    )  elif not verify :   raise exceptions. UserError (    ""Disabling all SSL validation is forbidden. Consider setting ""    ""verify_fingerprint if you have a broken or self-signed cert.""   )  return {   ""verify"" : verify,   ""verify_fingerprint"" : verify_fingerprint,  }",False,"not isinstance(verify_fingerprint, (bytes, str))","not isinstance(verify_fingerprint, str)",0.6545051336288452
    3876,"def show_image ( self, wnd_name, img ) :  if wnd_name in self. named_windows :   if self. named_windows [ wnd_name ] == 0 :    self. named_windows [ wnd_name ] = 1    self. on_create_window ( wnd_name )    if :     self. capture_mouse ( wnd_name )   self. on_show_image ( wnd_name, img )  else :   print ( ""show_image: named_window "", wnd_name, "" not found."" )",False,wnd_name in self.capture_mouse_windows,self.capture_mouse and wnd_name in self.named_windows,0.6506874561309814
    3877,"def replace ( self, state ) :  if state. key in self. _dict :   existing = self. _dict [ state. key ]   existing = attributes. instance_state ( existing )   if :    self. _manage_removed_state ( existing )   else :    return  self. _dict [ state. key ] = state. obj ( )  self. _manage_incoming_state ( state )",False,existing is not state,existing.has_state(),0.6668130159378052
    3878,"def _cleanupSocket ( self ) :  """"""Close the Connection's socket.""""""  try :   self. _sock. shutdown ( socket. SHUT_WR )  except :   return  try :   while True :    r, w, e = select. select ( [ self. _sock ], [ ], [ ] )    if :     break  except :   pass  self. _sock. close ( )",False,not r or not self._sock.recv(1024),r,0.6514097452163696
    3879,"def native_device_prefix ( prefixes ) :  log. debug (   ""Getting the OS-native device prefix from potential prefixes: {0}"". format (    prefixes   )  )  for prefix in prefixes :   if :    log. debug ( ""Native prefix is {0}"". format ( prefix ) )    return prefix  else :   log. debug ( ""{0} contains no native device prefixes"". format ( prefixes ) )   return None",False,any((device.startswith(prefix) for device in os.listdir('/sys/block'))),lib.PCI_DEVICE_PREFIX.match(prefix),0.6466257572174072
    3880,"def _handle_class_and_struct ( self, class_type ) :  if self. _handling_typedef :   return self. _get_class ( class_type, None )  name_tokens, var_token = self. get_name ( )  if var_token. token_type == tokenize. NAME or var_token. name in ""*&"" :   tokens, last = self. _get_var_tokens_up_to ( False, ""("", "";"", ""{"" )   tokens. insert ( 0, var_token )   tokens = name_tokens + tokens   if :    self. _add_back_token ( last )    self. _add_back_tokens ( tokens )    return self. _get_class ( class_type, None )   if last. name == ""("" :    return self. _get_method ( tokens, 0, None, False )   return self. _get_variable ( tokens )  self. _add_back_token ( var_token )  self. _add_back_tokens ( name_tokens )  return self. _get_class ( class_type, None )",False,last.name == '{',last != None,0.6577915549278259
    3881,"def add_lines_into_order ( self, order, lines ) :   order_line_by_source = {   id ( order_line. source_line ) : order_line for order_line in lines  }   for index, order_line in enumerate ( lines ) :   order_line. order = order   order_line. ordering = index   parent_src_line = order_line. parent_source_line   if :    parent_order_line = order_line_by_source [ id ( parent_src_line ) ]    assert parent_order_line. pk, ""Parent line should be saved""    order_line. parent_line = parent_order_line   order_line. save ( )  self. add_line_taxes ( lines )   for order_line in lines :   self. process_saved_order_line ( order = order, order_line = order_line )",False,parent_src_line,parent_src_line and parent_src_line.pk,0.6582622528076172
    3882,"def _get_event_for_message ( self, message_id ) :  with self. event_lock :   if :    raise RuntimeError (     ""Event for message[{}] should have been created before accessing"". format (      message_id     )    )   return self. _events [ message_id ]",True,message_id not in self._events,message_id not in self._events,0.6641417741775513
    3883,"def _cross_replica_average ( self, t, num_shards_per_group ) :  """"""Calculates the average value of input tensor across TPU replicas.""""""  num_shards = tpu_function. get_tpu_context ( ). number_of_shards  group_assignment = None  if num_shards_per_group > 1 :   if :    raise ValueError (     ""num_shards: %d mod shards_per_group: %d, should be 0""     % ( num_shards, num_shards_per_group )    )   num_groups = num_shards // num_shards_per_group   group_assignment = [    [ x for x in range ( num_shards ) if x // num_shards_per_group == y ]    for y in range ( num_groups )   ]  return tpu_ops. cross_replica_sum ( t, group_assignment ) / tf. cast (   num_shards_per_group, t. dtype  )",False,num_shards % num_shards_per_group != 0,num_shards_per_group == 0,0.6530584692955017
    3884,"def run ( self ) :  self. _spawn ( )  while True :   ( rds, _, _ ) = select. select ( [ self. child. stdout, self. child. stderr ], [ ], [ ], 1 )   if self. child. stdout in rds :    line = self. child. stdout. readline ( )    self. captured_stdout. append ( line. decode ( ""utf-8"" ). rstrip ( ) )   if :    line = self. child. stderr. readline ( )    self. captured_stderr. append ( line. decode ( ""utf-8"" ). rstrip ( ) )   if self. child. poll ( ) is not None :    self. dump_logs ( )    break   if self. should_die. is_set ( ) :    self. _despawn ( )    break",True,self.child.stderr in rds,self.child.stderr in rds,0.6546010971069336
    3885,"def tearDown ( self ) :  """"""Shutdown the server.""""""  try :   if self. server :    self. server. stop ( 2.0 )   if :    self. root_logger. removeHandler ( self. sl_hdlr )    self. sl_hdlr. close ( )  finally :   BaseTest. tearDown ( self )",False,self.sl_hdlr,self.root_logger,0.662135899066925
    3886,"def __init__ ( self, handler ) :  self. handler = handler  self. headers = handler. headers  self. path = handler. path  self. head = False  self. url = urlparse ( self. path )  try :   length = int ( self. headers [ ""Content-Length"" ] )   if :    self. post = None   else :    self. post = handler. rfile. read ( length )     except BaseException :   self. post = None  self. bits = [ urllib. parse. unquote ( x ) for x in self. url. path. split ( ""/"" ) if x ]  self. query = parse_qs ( self. url. query )  try :   for k, v in self. query. items ( ) :    self. query [ k ] = v [ 0 ]  except BaseException :   pass  self. user = None",False,not length,length > 0,0.6768758296966553
    3887,"def _parse_version ( version : str ) -> PythonVersionInfo :  match = re. match ( r""(\d+)(?:\.(\d+)(?:\.\d+)?)?$"", version )  if match is None :   raise ValueError (    (     ""The given version is not in the right format. ""     + 'Use something like ""3.2"" or ""3"".'    )   )  major = int ( match. group ( 1 ) )  minor = match. group ( 2 )  if minor is None :       if major == 2 :    minor = ""7""   elif :    minor = ""6""   else :    raise NotImplementedError (     ""Sorry, no support yet for those fancy new/old versions.""    )  minor = int ( minor )  return PythonVersionInfo ( major, minor )",False,major == 3,major == 6,0.6767346858978271
    3888,"def escape ( text, newline = False ) :  """"""Escape special html characters.""""""  if isinstance ( text, str ) :   if ""&"" in text :    text = text. replace ( ""&"", ""&"" )   if "">"" in text :    text = text. replace ( "">"", "">"" )   if ""<"" in text :    text = text. replace ( ""<"", ""<"" )   if '""' in text :    text = text. replace ( '""', """"" )   if :    text = text. replace ( ""'"", """"" )   if newline :    if ""\n"" in text :     text = text. replace ( ""\n"", ""
    "" ) return text",False,"""'"" in text","'""' in text",0.6692028045654297 3889,"def _process_out_of_bounds ( self, value, start, end ) : ""Sets out of bounds values to None"" if isinstance ( value, np. datetime64 ) : v = dt64_to_dt ( value ) if isinstance ( start, ( int, float ) ) : start = convert_timestamp ( start ) if isinstance ( end, ( int, float ) ) : end = convert_timestamp ( end ) s, e = start, end if isinstance ( s, np. datetime64 ) : s = dt64_to_dt ( s ) if : e = dt64_to_dt ( e ) else : v, s, e = value, start, end if v < s or v > e : value = None return value",True,"isinstance(e, np.datetime64)","isinstance(e, np.datetime64)",0.6484342813491821 3890,"def _get_initialized_app ( app ) : """"""Returns a reference to an initialized App instance."""""" if app is None : return firebase_admin. get_app ( ) if isinstance ( app, firebase_admin. App ) : initialized_app = firebase_admin. get_app ( app. name ) if : raise ValueError ( ""Illegal app argument. App instance not "" ""initialized via the firebase module."" ) return app raise ValueError ( ""Illegal app argument. Argument must be of type "" 'firebase_admin.App, but given ""{0}"".'. format ( type ( app ) ) )",False,app is not initialized_app,initialized_app is None,0.6588306427001953 3891,"def _iter_lines ( path = path, response = response, max_next = options. http_max_next ) : path. responses = [ ] n = 0 while response : path. responses. append ( response ) yield from response. iter_lines ( decode_unicode = True ) src = response. links. get ( ""next"", { } ). get ( ""url"", None ) if : break n += 1 if n > max_next : vd. warning ( f""stopping at max {max_next} pages"" ) break vd. status ( f""fetching next page from {src}"" ) response = requests. get ( src, stream = True )",True,not src,not src,0.6894989013671875 3892,"def train ( config, inputs, args ) : gan = setup_gan ( config, inputs, args ) sampler = TrainingVideoFrameSampler ( gan ) gan. selected_sampler = """" samples = 0 for i in range ( args. steps ) : gan. step ( ) if : print ( ""saving "" + save_file ) gan. save ( save_file ) if i % args. sample_every == 0 : sample_file = ""samples/"" + args. config + ""/%06d.png"" % ( samples ) os. makedirs ( os. path. expanduser ( os. path. dirname ( sample_file ) ), exist_ok = True ) samples += 1 sampler. sample ( sample_file, args. save_samples ) return [ ]",False,args.action == 'train' and i % args.save_every == 0 and (i > 0),args.save_samples == False,0.6525763273239136 3893,"def convertunit ( self, unit, prefix ) : if self. ignorefunc : if self. ignorefunc ( unit ) : return unit if prefix. find ( ""@hash_placeholder@"" )!= - 1 : if unit. getlocations ( ) : hashable = unit. getlocations ( ) [ 0 ] else : hashable = unit. source prefix = prefix. replace ( ""@hash_placeholder@"", md5 ( hashable ). hexdigest ( ) [ : self. hash_len ] ) if unit. istranslated ( ) : rich_string = unit. rich_target else : rich_string = unit. rich_source if not isinstance ( rich_string, StringElem ) : rich_string = [ rich_parse ( string, podebug_parsers ) for string in rich_string ] if self. rewritefunc : rewritten = [ self. rewritefunc ( string ) for string in rich_string ] if : rich_string = rewritten unit. rich_target = add_prefix ( prefix, rich_string ) return unit",False,rewritten,"hasattr(prefix, 'rich_target')",0.6894533634185791 3894,"def add_target_and_index ( self, name_obj : Tuple [ str, str ], sig : str, signode : desc_signature ) -> None : mod_name = self. env. ref_context. get ( ""js:module"" ) fullname = ( mod_name + ""."" if mod_name else """" ) + name_obj [ 0 ] node_id = make_id ( self. env, self. state. document, """", fullname ) signode [ ""ids"" ]. append ( node_id ) old_node_id = self. make_old_id ( fullname ) if old_node_id not in self. state. document. ids and old_node_id not in signode [ ""ids"" ] : signode [ ""ids"" ]. append ( old_node_id ) self. state. document. note_explicit_target ( signode ) domain = cast ( JavaScriptDomain, self. env. get_domain ( ""js"" ) ) domain. note_object ( fullname, self. objtype, node_id, location = signode ) if ""noindexentry"" not in self. options : indextext = self. get_index_text ( mod_name, name_obj ) if : self. indexnode [ ""entries"" ]. append ( ( ""single"", indextext, node_id, """", None ) )",True,indextext,indextext,0.6693501472473145 3895,"def assertMultiLineEqual ( self, first, second, msg = None ) : """"""Assert that two multi-line strings are equal."""""" self. assertIsInstance ( first, str, ""First argument is not a string"" ) self. assertIsInstance ( second, str, ""Second argument is not a string"" ) if first!= second : if len ( first ) > self. _diffThreshold or len ( second ) > self. _diffThreshold : self. _baseAssertEqual ( first, second, msg ) firstlines = first. splitlines ( keepends = True ) secondlines = second. splitlines ( keepends = True ) if : firstlines = [ first + ""\n"" ] secondlines = [ second + ""\n"" ] standardMsg = ""%s!= %s"" % _common_shorten_repr ( first, second ) diff = ""\n"" + """". join ( difflib. ndiff ( firstlines, secondlines ) ) standardMsg = self. _truncateMessage ( standardMsg, diff ) self. fail ( self. _formatMessage ( msg, standardMsg ) )",False,len(firstlines) == 1 and first.strip('\r\n') == first,keepends,0.6496038436889648 3896,"def check ( conf, token, prev, next, nextnext, context ) : if ""stack"" not in context : context [ ""stack"" ] = [ ] if isinstance ( token, ( yaml. BlockMappingStartToken, yaml. FlowMappingStartToken ) ) : context [ ""stack"" ]. append ( Parent ( MAP ) ) elif isinstance ( token, ( yaml. BlockSequenceStartToken, yaml. FlowSequenceStartToken ) ) : context [ ""stack"" ]. append ( Parent ( SEQ ) ) elif isinstance ( token, ( yaml. BlockEndToken, yaml. FlowMappingEndToken, yaml. FlowSequenceEndToken ) ) : context [ ""stack"" ]. pop ( ) elif isinstance ( token, yaml. KeyToken ) and isinstance ( next, yaml. ScalarToken ) : if len ( context [ ""stack"" ] ) > 0 and context [ ""stack"" ] [ - 1 ]. type == MAP : if : yield LintProblem ( next. start_mark. line + 1, next. start_mark. column + 1, 'wrong ordering of key ""%s"" in mapping' % next. value, ) else : context [ ""stack"" ] [ - 1 ]. keys. append ( next. value )",False,"any((strcoll(next.value, key) < 0 for key in context['stack'][-1].keys))",next is not None,0.6503562927246094 3897,"def gen_cli ( docs_dir ) : with open ( os. path. join ( docs_dir, ""CLI_template.md"" ), ""r"" ) as cli_temp_file : temp_lines = cli_temp_file. readlines ( ) lines = [ ] for line in temp_lines : matched = re. match ( r""{onnx-tf.*}"", line ) if : command = matched. string. strip ( ) [ 1 : - 1 ] output = subprocess. check_output ( command. split ( "" "" ) ). decode ( ""UTF-8"" ) lines. append ( output ) else : lines. append ( line ) with open ( os. path. join ( docs_dir, ""CLI.md"" ), ""w"" ) as cli_file : cli_file. writelines ( lines )",True,matched,matched,0.6802220344543457 3898,"def __init__ ( self, file ) : logger. load_obj ( Dummy ( ) ) self. conf = Config ( ) buf = self. conf. read_config ( [ file ] ) raw_objects = self. conf. read_config_buf ( buf ) self. conf. create_objects_for_type ( raw_objects, ""arbiter"" ) self. conf. create_objects_for_type ( raw_objects, ""module"" ) self. conf. early_arbiter_linking ( ) self. conf. create_objects ( raw_objects ) for mod in self. conf. modules : if : self. mod_sqlite = get_instance_sqlite ( mod ) self. mod_sqlite. init ( ) if mod. module_type == ""logstore_mongodb"" : self. mod_mongodb = get_instance_mongodb ( mod )",True,mod.module_type == 'logstore_sqlite',mod.module_type == 'logstore_sqlite',0.6535748243331909 3899,"def attrgetter ( item ) : items = [ None ] * len ( attribute ) for i, attribute_part in enumerate ( attribute ) : item_i = item for part in attribute_part : item_i = environment. getitem ( item_i, part ) if : item_i = postprocess ( item_i ) items [ i ] = item_i return items",True,postprocess is not None,postprocess is not None,0.6614655256271362 3900,"def set_related_perm ( _mapper : Mapper, _connection : Connection, target : Slice ) -> None : src_class = target. cls_model id_ = target. datasource_id if id_ : ds = db. session. query ( src_class ). filter_by ( id = int ( id_ ) ). first ( ) if : target. perm = ds. perm target. schema_perm = ds. schema_perm",True,ds,ds,0.6816925406455994 3901,"def simulate ( self, data, asset, orders_for_asset ) : self. _volume_for_bar = 0 volume = data. current ( asset, ""volume"" ) if volume == 0 : return price = data. current ( asset, ""close"" ) if isnull ( price ) : return dt = data. current_dt for order in orders_for_asset : if order. open_amount == 0 : continue order. check_triggers ( price, dt ) if not order. triggered : continue txn = None try : execution_price, execution_volume = self. process_order ( data, order ) if execution_price is not None : txn = create_transaction ( order, data. current_dt, execution_price, execution_volume ) except LiquidityExceeded : break if : self. _volume_for_bar += abs ( txn. amount ) yield order, txn",True,txn,txn,0.698900580406189 3902,"def _handle ( self ) : try : if : self. send_response ( 200 ) self. set_common_headers ( ) self. wfile. write ( json. dumps ( self. received_requests ). encode ( ""utf-8"" ) ) return if self. is_valid_token ( ) and self. is_valid_user_agent ( ) : self. send_response ( HTTPStatus. OK ) self. set_common_headers ( ) self. wfile. close ( ) else : self. send_response ( HTTPStatus. BAD_REQUEST ) self. set_common_headers ( ) self. wfile. close ( ) except Exception as e : self. logger. error ( str ( e ), exc_info = True ) raise",False,self.path == '/received_requests.json',self.received_requests is not None,0.6521310806274414 3903,"def format_listing ( listing, json_output = False, human_readable = False, recursive = False, summary = False ) : if json_output : for node in listing : yield json. dumps ( node ) else : nodes = [ ] last_dir = None try : while True : node = listing. next ( ) dir_name = os. path. dirname ( node [ ""path"" ] ) if dir_name!= last_dir : if : yield _create_dir_listing ( nodes, human_readable, recursive, summary ) last_dir = dir_name nodes = [ ] nodes. append ( node ) except StopIteration : yield _create_dir_listing ( nodes, human_readable, recursive, summary )",False,last_dir,nodes,0.6665686964988708 3904,"def hash_path_recursively ( path, ignorer = None, hasher = hashlib. sha1 ) : checksum = hasher ( ) size = 0 if os. path. isdir ( path ) : tp = ""dir"" checksum. update ( b""DIR:\n"" ) for item in sorted ( os. listdir ( path ) ) : fullpath = os. path. join ( path, item ) if : continue item_res = hash_path_recursively ( fullpath, ignorer, hasher ) if item_res [ ""type"" ] == ""dir"" and item_res [ ""size"" ] == 0 : continue digest = item_res [ ""checksum"" ]. digest ( ) line = digest + b"" "" + item. encode ( ""utf-8"" ) + b""\n"" checksum. update ( line ) size += 1 else : tp = ""file"" with open ( path, ""rb"" ) as fp : data = b""FILE:\n"" while data : checksum. update ( data ) data = fp. read ( 65536 ) size += len ( data ) return { ""checksum"" : checksum, ""size"" : size, ""type"" : tp }",False,ignorer and ignorer(fullpath),not fullpath,0.662824273109436 3905,"def generic_info_hook ( state ) : addr = state. solver. eval ( state. regs. ip ) chall_resp_plugin = state. get_plugin ( ""chall_resp_info"" ) format_info = chall_resp_plugin. format_infos [ addr ]. copy ( ) if format_info. get_type ( ) == ""DontConstrain"" : arg_num = format_info. check_symbolic_arg arg = angr. calling_conventions. SimCCCdecl ( state. arch ). arg ( state, arg_num ) if : l. warning ( ""symbolic arg not hooking"" ) return if chall_resp_plugin. pending_info is not None : chall_resp_plugin. backup_pending_info. append ( ( chall_resp_plugin. ret_addr_to_unhook, chall_resp_plugin. pending_info ) ) state. project. unhook ( chall_resp_plugin. ret_addr_to_unhook ) chall_resp_plugin. ret_addr_to_unhook = None chall_resp_plugin. pending_info = None ret_addr = state. solver. eval ( state. memory. load ( state. regs. sp, 4, endness = ""Iend_LE"" ) ) chall_resp_plugin. ret_addr_to_unhook = ret_addr state. project. hook ( ret_addr, end_info_hook, length = 0 ) format_info. compute ( state ) chall_resp_plugin. pending_info = format_info l. debug ( ""starting hook for %s at %#x"", format_info. func_name, format_info. addr )",False,state.mem[arg].string.resolved.symbolic,arg.l.debug(),0.6489238142967224 3906,"def handle_query ( self, query : str ) -> BaseAction : if query == ""~"" : return SetUserQueryAction ( ""~/"" ) path = Path ( query ) result_items = [ ] try : existing_dir = path. get_existing_dir ( ) if : file_names = self. list_files ( path. get_abs_path ( ), sort_by_usage = True ) for name in self. filter_dot_files ( file_names ) [ : self. RESULT_LIMIT ] : file = os. path. join ( existing_dir, name ) result_items. append ( self. create_result_item ( file ) ) else : file_names = self. list_files ( existing_dir ) search_for = path. get_search_part ( ) if not search_for. startswith ( ""."" ) : file_names = self. filter_dot_files ( file_names ) files = [ os. path. join ( existing_dir, name ) for name in file_names ] result_items = SortedList ( search_for, min_score = 40, limit = self. RESULT_LIMIT ) result_items. extend ( [ self. create_result_item ( name ) for name in reversed ( files ) ] ) except ( InvalidPathError, OSError ) : result_items = [ ] return RenderResultListAction ( result_items )",False,existing_dir == path.get_abs_path(),existing_dir,0.650510311126709 3907,"def test_calculate_all_ctc_probs ( module, mtlalpha ) : m = importlib. import_module ( module ) args = make_arg ( mtlalpha = mtlalpha, asr_weight = 0.3 ) if ""pytorch"" in module : batch = prepare_inputs ( ""pytorch"" ) else : batch = prepare_inputs ( ""chainer"" ) model = m. E2E ( 40, 5, args ) with chainer. no_backprop_mode ( ) : if ""pytorch"" in module : ctc_probs = model. calculate_all_ctc_probs ( * batch ) if : print ( ctc_probs. shape ) else : assert ctc_probs is None else : raise NotImplementedError",False,mtlalpha > 0,ctc_probs is not None,0.6656359434127808 3908,"def _render_all_change_lines ( self, differ, old_lines, new_lines ) : for tag, i1, i2, j1, j2 in differ. get_opcodes ( ) : if : lines = self. _render_change_lines ( differ, tag, None, None, i1, i2, old_lines ) elif tag == ""insert"" : lines = self. _render_change_lines ( differ, tag, None, ""+"", j1, j2, new_lines ) elif tag == ""delete"" : lines = self. _render_change_lines ( differ, tag, ""-"", None, i1, i2, old_lines ) elif tag == ""replace"" : lines = self. _render_change_replace_lines ( differ, i1, i2, j1, j2, old_lines, new_lines ) else : raise ValueError ( 'Unexpected tag ""%s""' % tag ) for line in lines : yield line",False,tag == 'equal',tag == 'expand',0.6578370332717896 3909,"def apply_mask ( self, mask, data_t, data_f ) : ind_t, ind_f = 0, 0 out = [ ] for m in cycle ( mask ) : if m : if ind_t == len ( data_t ) : return out out. append ( data_t [ ind_t ] ) ind_t += 1 else : if : return out out. append ( data_f [ ind_f ] ) ind_f += 1 return out",True,ind_f == len(data_f),ind_f == len(data_f),0.6546093225479126 3910,"def _on_frame_data ( self, data ) : handled_future = None self. _wire_bytes_in += len ( data ) if self. _frame_opcode_is_control : if not self. _final_frame : self. _abort ( ) return opcode = self. _frame_opcode elif self. _frame_opcode == 0 : if self. _fragmented_message_buffer is None : self. _abort ( ) return self. _fragmented_message_buffer += data if self. _final_frame : opcode = self. _fragmented_message_opcode data = self. _fragmented_message_buffer self. _fragmented_message_buffer = None else : if self. _fragmented_message_buffer is not None : self. _abort ( ) return if self. _final_frame : opcode = self. _frame_opcode else : self. _fragmented_message_opcode = self. _frame_opcode self. _fragmented_message_buffer = data if self. _final_frame : handled_future = self. _handle_message ( opcode, data ) if not self. client_terminated : if : ",False,handled_future,handled_future is not None,0.6683714389801025 3911,"def add_prop_to_socket ( self, socket, default_value ) : try : self. halt_updates = True if default_value is not None : if isinstance ( default_value, float ) : if not socket. use_prop or socket. default_property_type!= ""float"" : socket. use_prop = True socket. default_property_type = ""float"" socket. default_float_property = default_value elif : if not socket. use_prop or socket. default_property_type!= ""int"" : socket. use_prop = True socket. default_property_type = ""int"" socket. default_int_property = default_value else : socket. use_prop = False else : socket. use_prop = False except : print ( ""some failure in the add_props_to_sockets function. ouch."" ) self. halt_updates = False",False,"isinstance(default_value, int)","isinstance(default_value, str)",0.6497331857681274 3912,"def __saveCache ( self, file ) : cache_file = None try : temp = RopperService. CACHE_FOLDER if : os. makedirs ( temp ) cache_file = temp + os. path. sep + self. __getCacheFileName ( file ) count = RopperService. CACHE_FILE_COUNT if not isWindows ( ) and len ( file. allGadgets ) > 1000 : if os. path. exists ( cache_file ) : os. remove ( cache_file ) length = len ( file. allGadgets ) step = int ( length / count ) for i in range ( count - 1 ) : gadgets = file. allGadgets [ i * step : ( i + 1 ) * step ] with open ( cache_file + ""_%d"" % ( i + 1 ), ""wb"" ) as f : f. write ( encode ( repr ( gadgets ). encode ( ""ascii"" ), ""zip"" ) ) gadgets = file. allGadgets [ ( count - 1 ) * step : ] with open ( cache_file + ""_%d"" % ( count ), ""wb"" ) as f : f. write ( encode ( repr ( gadgets ). encode ( ""ascii"" ), ""zip"" ) ) return with open ( cache_file, ""wb"" ) as f : f. write ( encode ( repr ( file. allGadgets ). encode ( ""ascii"" ), ""zip"" ) ) ",True,not os.path.exists(temp),not os.path.exists(temp),0.6488783359527588 3913,"def _draw_number ( screen, x_offset, y_offset, number, token = Token. Clock, transparent = False ) : ""Write number at position."" fg = Char ( "" "", token ) bg = Char ( "" "", Token ) for y, row in enumerate ( _numbers [ number ] ) : screen_row = screen. data_buffer [ y + y_offset ] for x, n in enumerate ( row ) : if : screen_row [ x + x_offset ] = fg elif not transparent : screen_row [ x + x_offset ] = bg",False,n == '#',n,0.6756603717803955 3914,"def add ( self, tag, values ) : if tag not in self. different : if tag not in self : self [ tag ] = values elif : self. different. add ( tag ) self [ tag ] = [ """" ] self. counts [ tag ] += 1",False,self[tag] != values,tag in values,0.6605939865112305 3915,"def readframes ( self, nframes ) : if self. _data_seek_needed : self. _data_chunk. seek ( 0, 0 ) pos = self. _soundpos * self. _framesize if pos : self. _data_chunk. seek ( pos, 0 ) self. _data_seek_needed = 0 if nframes == 0 : return """" if self. _sampwidth in ( 2, 4 ) and sys. byteorder == ""big"" : import array chunk = self. _data_chunk data = array. array ( _array_fmts [ self. _sampwidth ] ) assert data. itemsize == self. _sampwidth nitems = nframes * self. _nchannels if nitems * self. _sampwidth > chunk. chunksize - chunk. size_read : nitems = ( chunk. chunksize - chunk. size_read ) // self. _sampwidth data. fromfile ( chunk. file. file, nitems ) chunk. size_read = chunk. size_read + nitems * self. _sampwidth chunk = chunk. file chunk. size_read = chunk. size_read + nitems * self. _sampwidth data. byteswap ( ) data = data. tostring ( ) else : data = self. _data_chunk. read ( nframes * self. _framesize ) if : data = _byteswap3 ( data ) if self. _convert and data : data =",False,self._sampwidth == 3 and sys.byteorder == 'big',self._convert and data,0.6538362503051758 3916,"def transform ( self, X, y = None ) : if isinstance ( X, dict ) : for col, col_dict in self. column_ranges. items ( ) : if col in X : X [ col ] = scale_val ( val = X [ col ], min_val = col_dict [ ""min_val"" ], total_range = col_dict [ ""inner_range"" ], truncate_large_values = self. truncate_large_values, ) else : if : X = utils. safely_drop_columns ( X, self. cols_to_ignore ) for col, col_dict in self. column_ranges. items ( ) : if col in X. columns : min_val = col_dict [ ""min_val"" ] inner_range = col_dict [ ""inner_range"" ] X [ col ] = X [ col ]. apply ( lambda x : scale_val ( x, min_val, inner_range, self. truncate_large_values ) ) return X",False,len(self.cols_to_ignore) > 0,self.cols_to_ignore,0.650653600692749 3917,"def load_session ( dic ) : inst = bilibili. instance for i in dic. keys ( ) : inst. dic_bilibili [ i ] = dic [ i ] if : inst. dic_bilibili [ ""pcheaders"" ] [ ""cookie"" ] = dic [ i ] inst. dic_bilibili [ ""appheaders"" ] [ ""cookie"" ] = dic [ i ]",False,i == 'cookie',i in 'pcheaders',0.672454833984375 3918,"def test_identity ( self ) : for x in ( None, False, True, 12345, 123.45, ""abcde"", b""abcde"", datetime. datetime ( 2004, 10, 26, 10, 33, 33 ), plistlib. Data ( b""abcde"" ), bytearray ( b""abcde"" ), [ 12, 345 ], ( 12, 345 ), { ""12"" : 345 }, ) : with self. subTest ( x = x ) : data = plistlib. dumps ( [ x ] * 2, fmt = plistlib. FMT_BINARY ) a, b = plistlib. loads ( data ) if : x = list ( x ) self. assertEqual ( a, x ) self. assertEqual ( b, x ) self. assertIs ( a, b )",False,"isinstance(x, tuple)",len(x) > 0,0.6495096683502197 3919,"def main ( league, time, standings, team, live, use12hour, players, output_format, output_file, upcoming, ) : """"""A CLI for live and past football scores from various football leagues"""""" try : if output_format == ""stdout"" and output_file : raise IncorrectParametersException ( ""Printing output to stdout and "" ""saving to a file are mutually exclusive"" ) writer = get_writer ( output_format, output_file ) if live : get_live_scores ( writer, use12hour ) return if standings : if : raise IncorrectParametersException ( ""Please specify a league. "" ""Example --standings --league=EPL"" ) get_standings ( league, writer ) return if team : if players : get_team_players ( team, writer ) return else : get_team_scores ( team, time, writer, upcoming, use12hour ) return get_league_scores ( league, time, writer, upcoming, use12hour ) except IncorrectParametersException as e : ",False,not league,league is None,0.7011781334877014 3920,"def _handle_raise ( self, values, is_NAs, origins ) : for is_NA, origin in zip ( is_NAs, origins ) : if : msg = ( ""Missing values detected. If you want rows with missing "" ""values to be automatically deleted in a list-wise "" ""manner (not recommended), please set dropna=True in "" ""the Bambi Model initialization."" ) raise PatsyError ( msg, origin ) return values",False,np.any(is_NA),values[0] is None or is_NA is False,0.6508210897445679 3921,"def parseArrayPattern ( self ) : node = Node ( ) elements = [ ] self. expect ( ""["" ) while not self. match ( ""]"" ) : if : self. lex ( ) elements. append ( null ) else : if self. match ( ""..."" ) : restNode = Node ( ) self. lex ( ) rest = self. parseVariableIdentifier ( ) elements. append ( restNode. finishRestElement ( rest ) ) break else : elements. append ( self. parsePatternWithDefault ( ) ) if not self. match ( ""]"" ) : self. expect ( "","" ) self. expect ( ""]"" ) return node. finishArrayPattern ( elements )",False,"self.match(',')",self.match(null),0.6488161087036133 3922,"def extract_within_coref ( self, mention : MentionData ) -> List [ str ] : tokens = mention. tokens_number within_coref_token = [ ] for token_id in tokens : token_x_id = MentionData. static_gen_token_unique_id ( str ( mention. doc_id ), str ( mention. sent_id ), str ( token_id ) ) if token_x_id in self. within_doc_coref_chain : token_coref_chain = self. within_doc_coref_chain [ token_x_id ] if : within_coref_token. append ( token_coref_chain ) else : within_coref_token. append ( ""-"" ) break return within_coref_token",False,token_coref_chain,token_coref_chain is not None,0.6555942296981812 3923,"def do_schedule ( self ) : if ( self. cur_thread. is_stop ( ) or self. ins_count % QlWindowsThreadManagement. TIME_SLICE == 0 ) : if : return else : for i in range ( 1, len ( self. threads ) ) : next_id = ( self. cur_thread. id + i ) % len ( self. threads ) next_thread = self. threads [ next_id ] if next_thread. status == QlWindowsThread. RUNNING and ( not next_thread. has_waitfor ( ) ) : if self. cur_thread. is_stop ( ) : pass else : self. cur_thread. suspend ( ) next_thread. resume ( ) self. cur_thread = next_thread break",False,len(self.threads) <= 1,self.cur_thread.is_alive(),0.6619405746459961 3924,"def done ( self, result ) : logger. debug ( ""Done"" ) if result == 1 : page = self. currentPage ( ) if type ( page ) == PageProjectProperties : venv = page. vtxtPlace. text ( ) if venv : if sys. platform == ""win32"" : venv = os. path. join ( venv, ""Scripts"", ""python.exe"" ) else : venv = os. path. join ( venv, ""bin"", ""python"" ) if : btnPressed = QMessageBox. information ( self, self. tr ( ""Virtualenv Folder"" ), self. tr ( ""Folder don't exists or this is not a "" ""valid Folder.\n If you want to set "" ""or modify, go to project properties"" ), self. tr ( ""Back"" ), self. tr ( ""Continue"" ), ) """"""Customized load."""""" self. language_model. load_state_dict ( state_dict [ self. _language_model_key ], strict = strict ) if mpu. is_pipeline_last_stage ( ) : if : self. multichoice_head. load_state_dict ( state_dict [ self. _multichoice_head_key ], strict = strict ) else : print_rank_last ( ""***WARNING*** could not find {} in the checkpoint, "" ""initializing to random"". format ( self. _multichoice_head_key ) )",False,self._multichoice_head_key in state_dict,self.multichoice_head is not None,0.6564022302627563 3926,"def closeEvent ( self, event : QCloseEvent ) : if self. device. backend is not Backends. none : self. emit_editing_finished_signals ( ) self. timer. stop ( ) self. device. stop ( ""Dialog closed. Killing recording process."" ) logger. debug ( ""Device stopped successfully."" ) if not self. testing_mode : if : event. ignore ( ) return time. sleep ( 0.1 ) if self. device. backend not in ( Backends. none, Backends. network ) : logger. debug ( ""Cleaning up device"" ) self. device. cleanup ( ) logger. debug ( ""Successfully cleaned up device"" ) self. device_settings_widget. emit_device_parameters_changed ( ) settings. write ( ""{}/geometry"". format ( self. __class__. __name__ ), self. saveGeometry ( ) ) if self. device is not None : self. device. free_data ( ) self. scene_manager. eliminate ( ) self. _eliminate_graphic_view ( ) super ( ). closeEvent ( event )",False,not self.save_before_close(),self.device is not None,0.6529676914215088 3927,"def create ( self, defn, check, allow_reboot, allow_recreate ) : self. no_subscription_id_change ( defn ) self. no_property_change ( defn, ""dns_zone"" ) self. no_property_change ( defn, ""record_type"" ) self. copy_mgmt_credentials ( defn ) self. dns_record_set_name = defn. dns_record_set_name self. dns_zone = defn. dns_zone self. record_type = defn. record_type if check : rset = self. get_settled_resource ( ) if : self. warn_missing_resource ( ) elif self. state == self. UP : self. handle_changed_property ( ""tags"", rset [ ""tags"" ] ) self. handle_changed_property ( ""properties"", rset [ ""properties"" ] ) else : self. warn_not_supposed_to_exist ( ) self. confirm_destroy ( ) if self. state!= self. UP : if self. get_settled_resource ( ) : raise Exception ( ""tried creating a DNS record set that already exists; "" ""please run 'deploy --check' to fix this"" ) self. log ( ""creating {0}..."". format ( self. full_name ) ) self. _create_or_update ( defn ) if self. properties_changed ( defn ) : self. log ( ""updating properties of {0}..."". format ( self. full_name ) ) ",False,not rset,rset is None,0.6759865283966064 3928,"def test_wdi_download_w_retired_indicator ( self ) : cntry_codes = [ ""CA"", ""MX"", ""US"" ] inds = [ ""GDPPCKD"" ] with pytest. raises ( ValueError ) : result = download ( country = cntry_codes, indicator = inds, start = 2003, end = 2004, errors = ""ignore"", ) if : pytest. skip ( ""Invalid results"" )",False,len(result) > 0,len(result) != 0,0.6564669609069824 3929,"def canonicalize_instruction_name ( instr ) : name = instr. insn_name ( ). upper ( ) if name == ""MOV"" : if instr. mnemonic. startswith ( ""lsr"" ) : return ""LSR"" elif instr. mnemonic. startswith ( ""lsl"" ) : return ""LSL"" elif : return ""ASR"" return OP_NAME_MAP. get ( name, name )",False,instr.mnemonic.startswith('asr'),name == 'ASR',0.6596343517303467 3930,"def validate_pk ( self ) : try : self. _key = serialization. load_pem_private_key ( self. key, password = None, backend = default_backend ( ) ) if : AWSValidationException ( ""The private key length is not supported. Only 1024-bit and 2048-bit are allowed."" ) except Exception as err : if isinstance ( err, AWSValidationException ) : raise raise AWSValidationException ( ""The private key is not PEM-encoded or is not valid."" )",False,self._key.key_size > 2048,len(self.key) > 1024 or len(self.key) > 2048,0.6550278663635254 3931,def tickframe ( self ) : self. frame = ( self. frame + 1 ) % 8 if self. uselen and self. frame & 1 == 0 and self. lengthtimer > 0 : self. lengthtimer -= 1 if self. lengthtimer == 0 : self. enable = False if self. frame == 7 and self. envelopetimer!= 0 : self. envelopetimer -= 1 if : newvolume = self. volume + ( self. envdir or - 1 ) if newvolume < 0 or newvolume > 15 : self. envelopetimer = 0 else : self. envelopetimer = self. envper self. volume = newvolume,False,self.envelopetimer == 0,self.volume,0.663364827632904 3932,"def cart_number_checksum_validation ( cls, number ) : digits = [ ] even = False if not number. isdigit ( ) : return False for digit in reversed ( number ) : digit = ord ( digit ) - ord ( ""0"" ) if even : digit *= 2 if : digit = digit % 10 + digit // 10 digits. append ( digit ) even = not even return sum ( digits ) % 10 == 0 if digits else False",False,digit >= 10,even,0.6977077126502991 3933,"def getRenderingFor ( self, build ) : value = build. render ( self. value ) index = build. render ( self. index ) value, index = yield defer. gatherResults ( [ value, index ] ) if index not in value : rv = yield build. render ( self. default ) else : if : rv = yield build. render ( value [ index ] ) if not rv : rv = yield build. render ( self. default ) elif self. hasKey!= _notHasKey : rv = yield build. render ( self. hasKey ) elif self. hasKey!= _notHasKey : rv = yield build. render ( self. hasKey ) else : rv = yield build. render ( value [ index ] ) if rv is None : rv = yield build. render ( self. elideNoneAs ) defer. returnValue ( rv )",False,self.defaultWhenFalse,index in value,0.6545971035957336 3934,"def read_until ( self, min_num_bytes, ending, timeout = 10, data_consumer = None ) : assert data_consumer is None or len ( ending ) == 1 data = self. serial. read ( min_num_bytes ) if : data_consumer ( data ) timeout_count = 0 while True : if data. endswith ( ending ) : break elif self. serial. inWaiting ( ) > 0 : new_data = self. serial. read ( 1 ) if : data_consumer ( new_data ) data = new_data else : data = data + new_data timeout_count = 0 else : timeout_count += 1 if timeout is not None and timeout_count >= 100 * timeout : break time. sleep ( 0.01 ) return data",False,data_consumer,data_consumer is not None,0.6642252206802368 3935,"def clean_file_name ( self, c, ext, p ) : """"""Compute the file name when subdirectories mirror the node's hierarchy in Leo."""""" use_extentions = c. config. getBool ( ""open-with-uses-derived-file-extensions"" ) ancestors, found = [ ], False for p2 in p. self_and_parents ( copy = False ) : h = p2. anyAtFileNodeName ( ) if not h : h = p2. h elif use_extentions and not found : found = True base, ext2 = g. os_path_splitext ( h ) if p2 == p : h = base if : ext = ext2 ancestors. append ( g. sanitize_filename ( h ) ) ancestors. append ( ""Leo"" + str ( id ( p. v ) ) ) td = os. path. abspath ( tempfile. gettempdir ( ) ) while len ( ancestors ) > 1 : td = os. path. join ( td, ancestors. pop ( ) ) if not os. path. exists ( td ) : os. mkdir ( td ) name = ancestors. pop ( ) + ext path = os. path. join ( td, name ) return path",False,ext2,ext2 != None,0.6726727485656738 3936,"def get_usage_list ( self, start, end ) : show_terminated = self. request. GET. get ( ""show_terminated"", self. show_terminated ) instances = [ ] terminated_instances = [ ] usage = api. usage_get ( self. request, self. tenant_id, start, end ) if hasattr ( usage, ""server_usages"" ) : now = datetime. datetime. now ( ) for server_usage in usage. server_usages : server_uptime = server_usage [ ""uptime"" ] total_uptime = now - datetime. timedelta ( seconds = server_uptime ) server_usage [ ""uptime_at"" ] = total_uptime if : terminated_instances. append ( server_usage ) else : instances. append ( server_usage ) usage. server_usages = instances return ( usage, )",False,server_usage['ended_at'] and (not show_terminated),show_terminated,0.6477519273757935 3937,"def get_domain_ssl_files ( domain, ssl_certificates, env, allow_missing_cert = False, use_main_cert = True ) : if use_main_cert or not allow_missing_cert : ssl_private_key = os. path. join ( os. path. join ( env [ ""STORAGE_ROOT"" ], ""ssl"", ""ssl_private_key.pem"" ) ) ssl_certificate = os. path. join ( os. path. join ( env [ ""STORAGE_ROOT"" ], ""ssl"", ""ssl_certificate.pem"" ) ) system_certificate = { ""private-key"" : ssl_private_key, ""certificate"" : ssl_certificate, ""primary-domain"" : env [ ""PRIMARY_HOSTNAME"" ], ""certificate_object"" : load_pem ( load_cert_chain ( ssl_certificate ) [ 0 ] ), } if use_main_cert : if : return system_certificate wildcard_domain = re. sub ( ""^[^\.]+"", ""*"", domain ) if domain in ssl_certificates : return ssl_certificates [ domain ] elif wildcard_domain in ssl_certificates : return ssl_certificates [ wildcard_domain ] elif not allow_missing_cert : return system_certificate else : return None",False,domain == env['PRIMARY_HOSTNAME'],domain in ssl_certificates,0.6521974802017212 3938,"def accept_handler ( fd : socket. socket, events : int ) -> None : for i in range ( _DEFAULT_BACKLOG ) : if : return try : connection, address = sock. accept ( ) except BlockingIOError : return except ConnectionAbortedError : continue callback ( connection, address )",False,removed[0],events & 1,0.6726330518722534 3939,"def __repr__ ( self ) : attrs = [ ] for k in self. keydata : if k == ""p"" : attrs. append ( ""p(%d)"" % ( self. size ( ) + 1, ) ) elif : attrs. append ( k ) if self. has_private ( ) : attrs. append ( ""private"" ) return ""<%s @0x%x %s>"" % ( self. __class__. __name__, id ( self ), "","". join ( attrs ) )",False,"hasattr(self.key, k)",k == 'k',0.6577895879745483 3940,"def _verifySubs ( self ) : for inst in self. subs : if not isinstance ( inst, ( _Block, _Instantiator, Cosimulation ) ) : raise BlockError ( _error. ArgType % ( self. name, ) ) if : if not inst. modctxt : raise BlockError ( _error. InstanceError % ( self. name, inst. callername ) )",False,"isinstance(inst, (_Block, _Instantiator))",inst.argid,0.6546672582626343 3941,"def sleep ( ) : if isinstance ( seconds, float ) : time. sleep ( seconds ) elif isinstance ( seconds, basestring ) : if : time. sleep ( random. uniform ( float ( seconds. split ( ""-"" ) [ 0 ] ), float ( seconds. split ( ""-"" ) [ 1 ] ) ) ) else : time. sleep ( float ( seconds ) )",False,'-' in seconds,seconds >= '-',0.679764986038208 3942,"def _tab_focus_stack ( self, mode : str, *, show_error : bool = True ) -> None : """"""Select the tab which was last focused."""""" tab_deque = self. _tabbed_browser. tab_deque cur_tab = self. _cntwidget ( ) try : if : tab = tab_deque. last ( cur_tab ) elif mode == ""stack-prev"" : tab = tab_deque. prev ( cur_tab ) elif mode == ""stack-next"" : tab = tab_deque. next ( cur_tab ) else : raise utils. Unreachable ( ""Missing implementation for stack mode!"" ) except IndexError : if not show_error : return raise cmdutils. CommandError ( ""Could not find requested tab!"" ) idx = self. _tabbed_browser. widget. indexOf ( tab ) if idx == - 1 : raise cmdutils. CommandError ( ""Requested tab vanished!"" ) self. _set_current_index ( idx )",False,mode == 'last',mode == 'stack-last',0.6606627106666565 3943,"def __pathToEditor ( self, editor ) : path = [ ] child = editor parent = child. parent ( ) while parent is not None : if : path. append ( ""c"" ) break elif isinstance ( parent, _DetachedPanel ) : path. append ( str ( self. __detachedPanels. index ( parent ) ) ) path. append ( ""p"" ) break path. append ( str ( parent. index ( child ) ) ) child = parent parent = child. parent ( ) path. reverse ( ) return ""-"". join ( path )",False,"isinstance(parent, GafferUI.CompoundEditor)",parent == child,0.6511542201042175 3944,"def _temp_connection_check ( self, rid, temp_conn, db_row, types, dependents ) : if temp_conn. connected ( ) : query = render_template ( ""/"". join ( [ self. sql_path, ""dependents.sql"" ] ), fetch_dependents = True, rid = rid, lastsysoid = db_row [ ""datlastsysoid"" ], ) status, result = temp_conn. execute_dict ( query ) if : current_app. logger. error ( result ) RoleView. _handle_dependents_data ( result, types, dependents, db_row )",False,not status,status != 0,0.6738032698631287 3945,"def __init__ ( self, data = None, dataset = None, device = None ) : """"""Create a Batch from a list of examples."""""" if data is not None : self. batch_size = len ( data ) self. dataset = dataset self. fields = dataset. fields. keys ( ) self. input_fields = [ k for k, v in dataset. fields. items ( ) if v is not None and not v. is_target ] self. target_fields = [ k for k, v in dataset. fields. items ( ) if v is not None and v. is_target ] for ( name, field ) in dataset. fields. items ( ) : if : batch = [ getattr ( x, name ) for x in data ] setattr ( self, name, field. process ( batch, device = device ) )",False,field is not None,data is not None,0.662406861782074 3946,"def iter_GEN ( name ) : st = 0 for line in io. open ( name, ""rt"", encoding = ""utf-8"" ) : line = line. strip ( ) splitted = line. split ( ""#"", 1 ) if : if splitted [ 0 ]!= """" : yield ( False, splitted [ 0 ], st ) else : testcase, comment = splitted is_trivial = comment. startswith ( "" "" ) testcase = testcase. strip ( ) comment = comment. strip ( ) testcase_detected = len ( testcase ) > 0 copy_testcase_detected = comment. startswith ( ""COPY:"" ) subtask_detected = comment. startswith ( ""ST:"" ) flags = [ testcase_detected, copy_testcase_detected, subtask_detected ] flags_count = len ( [ x for x in flags if x ] ) if flags_count > 1 : raise Exception ( ""No testcase and command in"" "" the same line allowed"" ) if flags_count == 0 and not is_trivial : raise Exception ( ""Unrecognized non-trivial line"" ) if testcase_detected : yield ( False, testcase, st ) if copy_testcase_detected : yield ( True, comment [ 5 : ]. strip",False,len(splitted) == 1,len(splitted) > 0,0.6603318452835083 3947,"def __iadd__ ( self, addend ) : if isinstance ( addend, COEFFICIENT_TYPES ) : self. constant += addend return self if not issubclass ( type ( addend ), PolynomialTensor ) : raise TypeError ( ""Invalid type."" ) if self. n_qubits!= addend. n_qubits : raise TypeError ( ""Invalid tensor shape."" ) for key in addend. n_body_tensors : if : self. n_body_tensors [ key ] = numpy. add ( self. n_body_tensors [ key ], addend. n_body_tensors [ key ] ) else : self. n_body_tensors [ key ] = addend. n_body_tensors [ key ] return self",True,key in self.n_body_tensors,key in self.n_body_tensors,0.6566682457923889 3948,"def rollback ( self ) : for operation, values in self. current_transaction_state [ : : - 1 ] : if operation == ""insert"" : values. remove ( ) elif operation == ""update"" : old_value, new_value = values if : os. unlink ( new_value. full_filename ) old_value. write ( ) self. _post_xact_cleanup ( )",False,new_value.full_filename != old_value.full_filename,new_value.is_file(),0.6495531797409058 3949,"def _copy_device_array_to_device ( x : DeviceArray, device : Optional [ xc. Device ] ) -> DeviceArray : if device is None : return x elif is_device_constant ( x ) : return DeviceArray ( x. aval, device, x. _lazy_expr, DeviceConstant ( device ) ) elif xb. get_device_backend ( device ). platform == x. device_buffer. platform ( ) : if x. device_buffer. device ( ) == device : if : return x else : moved_buf = x. device_buffer else : moved_buf = x. device_buffer. copy_to_device ( device ) else : backend = xb. get_device_backend ( device ) moved_buf = backend. buffer_from_pyval ( x. device_buffer. to_py ( ), device ) return DeviceArray ( x. aval, device, x. _lazy_expr, moved_buf )",False,x._device == device,moved_buf is None,0.6686573028564453 3950,"def handle ( self, * args, ** options ) : try : role_names = [ settings. ROLE_PROJECT_ADMIN, settings. ROLE_ANNOTATOR, settings. ROLE_ANNOTATION_APPROVER, ] except KeyError as key_error : self. stderr. write ( self. style. ERROR ( f'Missing Key: ""{key_error}""' ) ) for role_name in role_names : if : continue role = Role ( ) role. name = role_name try : role. save ( ) except DatabaseError as db_error : self. stderr. write ( self. style. ERROR ( f'Database Error: ""{db_error}""' ) ) else : self. stdout. write ( self. style. SUCCESS ( f'Role created successfully ""{role_name}""' ) )",False,Role.objects.filter(name=role_name).exists(),role_name == '',0.651489794254303 3951,"def ls ( self, * path ) : success = True cur = self for name in path : if not name or name == ""."" : continue elif name == "".."" : if cur. parent : cur = cur. parent else : child = cur. find_child ( name ) if : cur = child else : success = False break if success : for node in sorted ( cur. children ) : yield node",True,child,child,0.6960253715515137 3952,"def _method_events_callback ( self, values ) : try : previous_echoed = ( values [ ""child_result_list"" ] [ - 1 ]. decode ( ). split ( ""\n"" ) [ - 2 ]. strip ( ) ) if previous_echoed. endswith ( ""foo1"" ) : return ""echo foo2\n"" elif : return ""echo foo3\n"" elif previous_echoed. endswith ( ""foo3"" ) : return ""exit\n"" else : raise Exception ( ""Unexpected output {0!r}"". format ( previous_echoed ) ) except IndexError : return ""echo foo1\n""",False,previous_echoed.endswith('foo2'),"previous_echoed.ends( ""foo2')",0.6497566103935242 3953,"def main ( ) : ( filename, start_line_str, start_col_str, end_line_str, end_col_str, * mypy_and_args, ) = sys. argv [ 1 : ] start_line = int ( start_line_str ) start_col = int ( start_col_str ) end_line = int ( end_line_str ) end_col = int ( end_col_str ) with open ( filename, ""r"" ) as f : lines = f. readlines ( ) lines [ end_line - 1 ] = update_line ( lines [ end_line - 1 ], REVEAL_TYPE_END, end_col ) lines [ start_line - 1 ] = update_line ( lines [ start_line - 1 ], REVEAL_TYPE_START, start_col ) with tempfile. NamedTemporaryFile ( mode = ""w"", prefix = ""mypy"" ) as tmp_f : tmp_f. writelines ( lines ) tmp_f. flush ( ) output = run_mypy ( mypy_and_args, filename, tmp_f. name ) revealed_type, error = process_output ( output, filename, start_line ) if revealed_type : print ( revealed_type ) if : print ( output ) exit ( int ( error ) )",True,error,error,0.6858798265457153 3954,"def get_param ( node ) : edges = defaultdict ( list ) for edge in node. findall ( ""Edge"" ) : if edge. find ( ""Terminal"" ) is not None : edges [ edge. get ( ""val"" ) ] = edge. find ( ""Terminal"" ). text elif edge. find ( ""Node"" ) is not None : node_cpd = defaultdict ( list ) node_cpd [ edge. find ( ""Node"" ). get ( ""var"" ) ] = get_param ( edge. find ( ""Node"" ) ) edges [ edge. get ( ""val"" ) ] = node_cpd elif : subdag_attribute = defaultdict ( list ) subdag_attribute [ ""type"" ] = edge. find ( ""SubDAG"" ). get ( ""type"" ) if subdag_attribute [ ""type"" ] == ""template"" : subdag_attribute [ ""idref"" ] = edge. find ( ""SubDAG"" ). get ( ""idref"" ) if edge. find ( ""SubDAG"" ). get ( ""var"" ) : subdag_attribute [ ""var"" ] = edge. find ( ""SubDAG"" ). get ( ""var"" ) if edge. find ( ""SubDAG"" ). get ( ""val"" ) : subdag_attribute [ ""val"" ] = edge. find ( ""SubDAG"" ). get ( ""val"" ) edges [ edge. get ( ""val"" ) ] = subdag_attribute return edges",True,edge.find('SubDAG') is not None,edge.find('SubDAG') is not None,0.652458906173706 3955,"def _find_mini_cluster_jar ( self, path ) : for dirpath, dirnames, filenames in os. walk ( path ) : for files in filenames : if : return os. path. join ( dirpath, files )",False,"re.match('.*hadoop-mapreduce-client-jobclient.+-tests.jar', files)",len(files) > 0,0.6438804864883423 3956,"def WriteMacBundleResources ( self, resources, bundle_depends ) : """"""Writes ninja edges for'mac_bundle_resources'."""""" xcassets = [ ] extra_env = self. xcode_settings. GetPerTargetSettings ( ) env = self. GetSortedXcodeEnv ( additional_settings = extra_env ) env = self. ComputeExportEnvString ( env ) isBinary = self. xcode_settings. IsBinaryOutputFormat ( self. config_name ) for output, res in gyp. xcode_emulation. GetMacBundleResources ( generator_default_variables [ ""PRODUCT_DIR"" ], self. xcode_settings, map ( self. GypPathToNinja, resources ), ) : output = self. ExpandSpecial ( output ) if : self. ninja. build ( output, ""mac_tool"", res, variables = [ ( ""mactool_cmd"", ""copy-bundle-resource"" ), ( ""env"", env ), ( ""binary"", isBinary ), ], ) bundle_depends. append ( output ) else : xcassets. append ( res ) return xcassets",False,os.path.splitext(output)[-1] != '.xcassets',self.hasMacBundle,0.6487319469451904 3957,"def _doc_module ( self, module, filters, exclusive ) : """"""Extract config options from module."""""" options = [ ] try : mod = importlib. import_module ( module ) for prop in dir ( mod ) : if exclusive and prop not in exclusive : continue if : continue thing = getattr ( mod, prop ) if isinstance ( thing, cfg. Opt ) and thing not in options : options. append ( thing ) elif ( isinstance ( thing, list ) and len ( thing ) > 0 and isinstance ( thing [ 0 ], cfg. Opt ) ) : options. extend ( thing ) except Exception as e : self. error ( ""Unable to import {}: {}"". format ( module, e ) ) return options",False,prop in filters,filters and prop not in filters,0.681423544883728 3958,"def summary ( self ) -> str : recorded_stats = { } output_string = """" local_rank = ""0"" if self. local_rank is None else self. local_rank if not self. enabled : return output_string for action_name, function_events in self. profiled_actions. items ( ) : function_events. populate_cpu_children = lambda : None if self. export_to_chrome : filename = f""{action_name}_{local_rank}_trace.json"" path_to_trace = ( filename if : else os. path. join ( self. path_to_export_trace, filename ) ) function_events. export_chrome_trace ( path_to_trace ) if self. emit_nvtx : return output_string else : data = function_events. key_averages ( group_by_input_shapes = self. group_by_input_shapes ) table = data. table ( sort_by = self. sort_by_key, row_limit = self. row_limit ) recorded_stats [ action_name ] = table output_string = f""{os.linesep}Profiler Report{os.linesep}"" for action, stats in recorded_stats. items ( ) : output_string += f""{os.linesep}Profile stats for: {action} rank: {local_rank} {os.linesep}{stats}""",False,self.path_to_export_trace is None,self.path_to_export_trace,0.6506867408752441 3959,"def parse ( self ) -> _NodeT : if self. __was_parse_called : raise Exception ( ""Each parser object may only be used to parse once."" ) self. __was_parse_called = True for token in self. tokens : self. _add_token ( token ) while True : tos = self. stack [ - 1 ] if not tos. dfa. is_final : expected_str = get_expected_str ( EOFSentinel. EOF, tos. dfa. transitions. keys ( ) ) raise ParserSyntaxError ( f""Incomplete input. {expected_str}"", lines = self. lines, raw_line = len ( self. lines ), raw_column = len ( self. lines [ - 1 ] ), ) if : self. _pop ( ) else : return self. convert_nonterminal ( tos. nonterminal, tos. nodes )",False,len(self.stack) > 1,self.has_last_token(),0.6496908664703369 3960,"def _parse_apt_operations ( help_text_lines ) : is_commands_list = False for line in help_text_lines : line = line. decode ( ). strip ( ) if : yield line. split ( ) [ 0 ] elif line. startswith ( ""Basic commands:"" ) or line. startswith ( ""Most used commands:"" ) : is_commands_list = True",False,is_commands_list and line,is_commands_list,0.6552774310112 3961,"def split_requests ( model ) : structs = [ ] for struct in model. structs : structtype = None if : structtype = ""Request"" elif struct. name. endswith ( ""Response"" ) or struct. name == ""ServiceFault"" : structtype = ""Response"" if structtype : struct. needconstructor = True field = Field ( ) field. name = ""TypeId"" field. uatype = ""NodeId"" struct. fields. insert ( 0, field ) if structtype and not struct. name in NoSplitStruct : paramstruct = Struct ( ) if structtype == ""Request"" : basename = struct. name. replace ( ""Request"", """" ) + ""Parameters"" paramstruct. name = basename else : basename = struct. name. replace ( ""Response"", """" ) + ""Result"" paramstruct. name = basename paramstruct. fields = struct. fields [ 2 : ] paramstruct. bits = struct. bits struct. fields = struct. fields [ : 2 ] structs. append ( paramstruct ) interval = 5 if not self. p. online : self. logError ( _ ( ""Printer is not online. Please connect to it first."" ) ) return if not ( self. p. printing or self. sdprinting ) : self. logError ( _ ( ""Printer is not printing. Please print something before monitoring."" ) ) return self. log ( _ ( ""Monitoring printer, use ^C to interrupt."" ) ) if len ( l ) : try : interval = float ( l ) except : self. logError ( _ ( ""Invalid period given."" ) ) self. log ( _ ( ""Updating values every %f seconds."" ) % ( interval, ) ) self. monitoring = 1 prev_msg_len = 0 try : while True : self. p. send_now ( ""M105"" ) if self. sdprinting : self. p. send_now ( ""M27"" ) time. sleep ( interval ) if self. p. printing : preface = _ ( ""Print progress: "" ) progress = 100 * float ( self. p. queueindex ) / len ( self. p. mainqueue ) elif : preface = _ ( ""SD print progress: "" ) progress = self. percentdone prev_msg = preface + ""%.1f%%"" % progress if self. silent is False",False,self.sdprinting,self.silent,0.657619059085846 3963,"def _post_order ( op ) : if isinstance ( op, tvm. tir. Allocate ) : lift_stmt [ - 1 ]. append ( op ) return op. body if isinstance ( op, tvm. tir. AttrStmt ) : if : lift_stmt [ - 1 ]. append ( op ) return op. body if op. attr_key == ""virtual_thread"" : return _merge_block ( lift_stmt. pop ( ) + [ op ], op. body ) return op if isinstance ( op, tvm. tir. For ) : return _merge_block ( lift_stmt. pop ( ) + [ op ], op. body ) raise RuntimeError ( ""not reached"" )",False,op.attr_key == 'storage_scope',op.attr_key == 'lift_stmt',0.6514294743537903 3964,"def train_prog ( exe, program, loss, node2vec_pyreader, args, train_steps ) : trainer_id = int ( os. getenv ( ""PADDLE_TRAINER_ID"", ""0"" ) ) step = 0 if not os. path. exists ( args. save_path ) : os. makedirs ( args. save_path ) while True : try : begin_time = time. time ( ) ( loss_val, ) = exe. run ( program, fetch_list = [ loss ] ) log. info ( ""step %s: loss %.5f speed: %.5f s/step"" % ( step, np. mean ( loss_val ), time. time ( ) - begin_time ) ) step += 1 except F. core. EOFException : node2vec_pyreader. reset ( ) if step % args. steps_per_save == 0 or step == train_steps : save_path = args. save_path if trainer_id == 0 : model_path = os. path. join ( save_path, ""%s"" % step ) fleet. save_persistables ( exe, model_path ) if : break",False,step == train_steps,trainer_id == 1,0.6636180877685547 3965,"def _test_reshape ( data, out_shape, wrap_shape ) : """"""One iteration of reshape operation with given data and out shape"""""" with tf. Graph ( ). as_default ( ) : in_data = array_ops. placeholder ( shape = data. shape, dtype = data. dtype ) out_shape = out_shape if : else np. array ( out_shape, dtype = np. int32 ) in_shape = ( out_shape if : else array_ops. placeholder ( shape = out_shape. shape, dtype = out_shape. dtype, name = ""Newshape"" ) ) out = array_ops. reshape ( in_data, in_shape ) compare_tflite_with_tvm ( [ data, out_shape ] if wrap_shape else [ data ], [ ""Placeholder:0"", ""Newshape:0"" ] if wrap_shape else [ ""Placeholder:0"" ], [ in_data, in_shape ] if wrap_shape else [ in_data ], [ out ], mode = ""vm"", )",False,not wrap_shape,out_shape is None,0.65927654504776 3966,"def _power_exact ( y, xc, yc, xe ) : yc, ye = y. int, y. exp while yc % 10 == 0 : yc //= 10 ye += 1 if xc == 1 : xe *= yc while xe % 10 == 0 : xe //= 10 ye += 1 if ye < 0 : return None exponent = xe * 10 ** ye if : xc = exponent else : xc = 0 return 5",False,y and xe,exponent < 0,0.7027629613876343 3967,"def upload_recipe_files ( self ) : path = self. job. resultdir tests = self. get_processed_tests ( ) logging. debug ( ""Recipe filtering following tests: %s"" % tests ) for root, dirnames, files in os. walk ( path ) : """"""do not upload previously uploaded results files"""""" for d in dirnames : if d in tests : dirnames. remove ( d ) for name in files : remotepath = re. sub ( path, """", root ) localfile = os. path. join ( root, name ) if : continue self. bkr_proxy. recipe_upload_file ( localfile, remotepath )",False,os.path.getsize(localfile) == 0,not localpath,0.6478428840637207 3968,"def _add_constant_node ( self, source_node ) : parent_ids = range ( len ( source_node. in_edges ) ) for idx in parent_ids : parent_node = self. tf_graph. get_node ( source_node. in_edges [ idx ] ) if : self. _rename_Const ( parent_node )",False,parent_node.type == 'Const',parent_node,0.6516399383544922 3969,"def __init__ ( self, ec2_backend, subnet, private_ip_address, private_ip_addresses = None, device_index = 0, public_ip_auto_assign = True, group_ids = None, description = None, ) : self. ec2_backend = ec2_backend self. id = random_eni_id ( ) self. device_index = device_index self. private_ip_address = private_ip_address or random_private_ip ( ) self. private_ip_addresses = private_ip_addresses self. subnet = subnet self. instance = None self. attachment_id = None self. description = description self. public_ip = None self. public_ip_auto_assign = public_ip_auto_assign self. start ( ) self. attachments = [ ] self. _group_set = [ ] group = None if group_ids : for group_id in group_ids : group = self. ec2_backend. get_security_group_from_id ( group_id ) if : group = SecurityGroup ( self. ec2_backend, group_id, group_id, group_id, vpc_id = subnet. vpc_id, ) self. ec2_backend. groups [ subnet. vpc_id ] [ group",False,not group,group,0.6749142408370972 3970,"def df_index_expr ( self, length_expr = None, as_range = False ) : """"""Generate expression to get or create index of DF"""""" if isinstance ( self. index, types. NoneType ) : if : length_expr = df_length_expr ( self ) if as_range : return f""range({length_expr})"" else : return f""numpy.arange({length_expr})"" return ""self._index""",True,length_expr is None,length_expr is None,0.6605559587478638 3971,"def tag_export_edited ( ) : tag_enabled_misp = request. form. getlist ( ""tag_enabled_misp"" ) tag_enabled_hive = request. form. getlist ( ""tag_enabled_hive"" ) list_export_tags = list ( r_serv_db. smembers ( ""list_export_tags"" ) ) r_serv_db. delete ( ""whitelist_misp"" ) r_serv_db. delete ( ""whitelist_hive"" ) for tag in tag_enabled_misp : if : r_serv_db. sadd ( ""whitelist_misp"", tag ) else : return ""invalid input"" for tag in tag_enabled_hive : if : r_serv_db. sadd ( ""whitelist_hive"", tag ) else : return ""invalid input"" return redirect ( url_for ( ""PasteSubmit.edit_tag_export"" ) )",False,"r_serv_db.sismember('list_export_tags', tag)",tag in list_export_tags,0.6452752947807312 3972,"def _update_and_return ( layer : nn. Module, key : str ) : if memory is None : out = layer ( queries ) if cache is not None : res : MaybeList [ torch. Tensor ] = cache [ key ] if : res. append ( out. squeeze ( 1 ) ) out = torch. stack ( res, dim = 1 ) else : res = torch. cat ( [ res, out ], dim = 1 ) out = res cache [ key ] = res else : if cache is not None : res : MaybeList [ torch. Tensor ] = cache [ key ] if : if len ( res ) == 0 : out = layer ( memory ) else : out = torch. stack ( res, dim = 1 ) else : if res. size ( 1 ) == 0 : out = layer ( memory ) else : out = res else : out =",False,"isinstance(res, list)",len(res) > 0,0.6555722951889038 3973,"def update_gstin ( context ) : dirty = False for key, value in iteritems ( frappe. form_dict ) : if key!= ""party"" : address_name = frappe. get_value ( ""Address"", key ) if : address = frappe. get_doc ( ""Address"", address_name ) address. gstin = value. upper ( ) address. save ( ignore_permissions = True ) dirty = True if dirty : frappe. db. commit ( ) context. updated = True",True,address_name,address_name,0.6615316867828369 3974,"def sql_indexes_for_field ( self, model, f, style ) : ""Return any spatial index creation SQL for the field."" from django. contrib. gis. db. models. fields import GeometryField output = super ( SpatiaLiteCreation, self ). sql_indexes_for_field ( model, f, style ) if isinstance ( f, GeometryField ) : gqn = self. connection. ops. geo_quote_name qn = self. connection. ops. quote_name db_table = model. _meta. db_table output. append ( style. SQL_KEYWORD ( ""SELECT "" ) + style. SQL_TABLE ( ""AddGeometryColumn"" ) + ""("" + style. SQL_TABLE ( gqn ( db_table ) ) + "", "" + style. SQL_FIELD ( gqn ( f. column ) ) + "", "" + style. SQL_FIELD ( str ( f. srid ) ) + "", "" + style. SQL_COLTYPE ( gqn ( f. geom_type ) ) + "", "" + style. SQL_KEYWORD ( str ( f. dim ) ) + "", "" + style. SQL_KEYWORD ( str ( int ( not f. null ) ) ) + "");"" ) if : output. append ( style. SQL_KEYWORD ( ""SELECT "" ) + style. SQL_TABLE ( ""CreateSpatialIndex"" ) <",False,f.spatial_index,f.spatial_index is not None,0.6597077250480652 3975,"def __init__ ( self, dst, table, include = [ ], exclude = [ ], autobatch = 0, executor = executor ) : self. dst = dst self. table = table self. total = 0 self. rows = [ ] self. autobatch = autobatch self. bindings = { } include = map ( lambda x : x. lower ( ), include ) exclude = map ( lambda x : x. lower ( ), exclude ) _verbose = self. dst. verbose self. dst. verbose = 0 try : self. dst. table ( self. table ) if : colmap = { } for a in self. dst. results : colmap [ a [ 3 ]. lower ( ) ] = a [ 4 ] cols = self. __filter__ ( colmap. keys ( ), include, exclude ) for a in zip ( range ( len ( cols ) ), cols ) : self. bindings [ a [ 0 ] ] = colmap [ a [ 1 ] ] colmap = None else : cols = self. __filter__ ( include, include, exclude ) finally : self. dst. verbose = _verbose self. executor = executor ( table, cols )",True,self.dst.results,self.dst.results,0.6548646688461304 3976,"def check_database ( ) : if len ( EmailAddress. objects. all ( ) ) > 0 : print ( ""Are you sure you want to wipe the existing development database and reseed it? (Y/N)"" ) if : destroy_database ( ) else : return False else : return True",False,raw_input().lower() == 'y',EmailAddress.objects.filter(email_address=self.email).exists(),0.6506912708282471 3977,"def _parse_rosdep_resolve_dependencies ( dependency_name : str, output : str ) -> Dict [ str, Set [ str ] ] : delimiters = re. compile ( r""\n|\s"" ) lines = delimiters. split ( output ) dependencies : Dict [ str, Set [ str ] ] = { } dependency_set = None for line in lines : line = line. strip ( ) if line. startswith ( ""#"" ) : key = line. strip ( ""# "" ) dependencies [ key ] = set ( ) dependency_set = dependencies [ key ] elif line : if : raise RosdepUnexpectedResultError ( dependency_name, output ) else : dependency_set. add ( line ) return dependencies",True,dependency_set is None,dependency_set is None,0.6681257486343384 3978,"def _test_blob ( self, obj = 0 ) : assert self. has_table ( ""blobtable"" ), ""no blob table"" tabname, sql = self. table ( ""blobtable"" ) fn = tempfile. mktemp ( ) fp = None c = self. cursor ( ) try : hello = ( ""hello"", ) * 1024 c. execute ( sql ) self. db. commit ( ) from java. io import ( FileOutputStream, FileInputStream, ObjectOutputStream, ObjectInputStream, ByteArrayInputStream, ) fp = FileOutputStream ( fn ) oos = ObjectOutputStream ( fp ) oos. writeObject ( hello ) fp. close ( ) fp = FileInputStream ( fn ) blob = ObjectInputStream ( fp ) value = blob. readObject ( ) fp. close ( ) assert hello == value, ""unable to serialize properly"" if obj == 1 : fp = open ( fn, ""rb"" ) else : fp = FileInputStream ( fn ) c. execute ( ""insert into %s (a, b) values (?,?)"" % ( tabname ), [ ( 0, fp ) ], { 1 : zxJDBC. BLOB }, ) self. db. commit ( ) c. execute ( ""select * from %s"" % ( tabname ) ) f = c. fetchall ( ) bytes = f [ 0 ] [ 1",False,os.path.exists(fn),obj == 0,0.6480526924133301 3979,"def __getitem__ ( self, key ) : self. _update ( ) dx = c_double ( ) dy = c_double ( ) dz = c_double ( ) m = self. __len__ ( ) has_z = self. _ndim == 3 if isinstance ( key, int ) : if key + m < 0 or key >= m : raise IndexError ( ""index out of range"" ) if key < 0 : i = m + key else : i = key lgeos. GEOSCoordSeq_getX ( self. _cseq, i, byref ( dx ) ) lgeos. GEOSCoordSeq_getY ( self. _cseq, i, byref ( dy ) ) if : lgeos. GEOSCoordSeq_getZ ( self. _cseq, i, byref ( dz ) ) return ( dx. value, dy. value, dz. value ) else : return ( dx. value, dy. value ) elif isinstance ( key, slice ) : res = [ ] start, stop, stride = key. indices ( m ) for i in range ( start, stop, stride ) : lgeos. GEOSCoordSeq_getX ( self. _cseq, i, byref ( dx ) ) lgeos. GEOSCoordSeq_getY ( self. _cseq, i, byref ( dy ) ) if : lgeos. GEOSCoordSeq_getZ ( self. _cseq, i, byref ( dz ) ) filtered = [ ] escaped = [ ] for c in chars : try : if : pos += 1 continue elif body [ pos ] == ""\\"" and body [ pos + 1 ] == c : escaped += [ c ] pos += 2 else : filtered += [ c ] pos += 1 except IndexError : filtered += [ c ] continue return filtered, escaped",False,body[pos] == c,pos >= len(body),0.6669749021530151 3981,"def removeSentinelLines ( self, s, line_delim, start_delim, unused_end_delim ) : """"""Properly remove all sentinle lines in s."""""" delim = ( line_delim or start_delim or """" ) + ""@"" verbatim = delim + ""verbatim"" verbatimFlag = False result = [ ] lines = g. splitLines ( s ) for line in lines : i = g. skip_ws ( line, 0 ) if not verbatimFlag and g. match ( line, i, delim ) : if : verbatimFlag = True else : result. append ( line ) verbatimFlag = False result = """". join ( result ) return result",False,"g.match(line, i, verbatim)",unused_end_delim,0.6471948623657227 3982,"def save ( self ) : for var_name in self. default_config : if getattr ( self, var_name, None ) == self. default_config [ var_name ] : if : del self. file_config [ var_name ] else : self. file_config [ var_name ] = getattr ( self, var_name ) with open ( self. config_path, ""w"" ) as f : f. write ( json. dumps ( self. file_config, indent = 2 ) )",False,var_name in self.file_config,"hasattr(self, var_name)",0.6539607644081116 3983,"def _draw ( self, context, opacity ) : """"""draw accumulated instructions in context"""""" fresh_draw = len ( self. __new_instructions or [ ] ) > 0 if fresh_draw : self. paths = [ ] self. __instruction_cache = self. __new_instructions self. __new_instructions = [ ] else : if not self. __instruction_cache : return for instruction, args in self. __instruction_cache : if fresh_draw : if instruction in ( ""new_path"", ""stroke"", ""fill"", ""clip"" ) : self. paths. append ( ( instruction, ""path"", context. copy_path ( ) ) ) elif instruction in ( ""save"", ""restore"", ""translate"", ""scale"", ""rotate"" ) : self. paths. append ( ( instruction, ""transform"", args ) ) if instruction == ""set_color"" : self. _set_color ( context, args [ 0 ], args [ 1 ], args [ 2 ], args [ 3 ] * opacity ) elif instruction == ""show_layout"" : self. _show_layout ( context, * args ) elif : context. paint_with_alpha ( opacity ) else : getattr ( context, instruction ) ( * args )",False,opacity < 1 and instruction == 'paint',instruction == 'paint_with_alpha',0.6563804149627686 3984,"def semanticTags ( self, semanticTags ) : if semanticTags is None : self. __semanticTags = OrderedDict ( ) for key, value in list ( semanticTags. items ( ) ) : if not isinstance ( key, int ) : raise TypeError ( ""At least one key is not a valid int position"" ) if : raise TypeError ( ""At least one value of the provided dict is not a list of string"" ) for x in value : if not isinstance ( x, str ) : raise TypeError ( ""At least one value of the provided dict is not a list of string"" ) self. __semanticTags = semanticTags",False,"not isinstance(value, list)","not isinstance(value, str)",0.6548378467559814 3985,"def get_value_threshold ( self ) : total_values = [ ] for ( col_name, col_results, ) in self. binning_obj. binning_obj. bin_results. all_cols_results. items ( ) : if : total_values. append ( col_results. iv ) if not self. local_only : LOGGER. debug ( ""host_results: {}, host_selection_properties: {}"". format ( self. binning_obj. host_results, self. host_selection_properties ) ) for host_id, host_binning_obj in enumerate ( self. binning_obj. host_results ) : host_select_param = self. host_selection_properties [ host_id ] for ( col_name, col_results, ) in host_binning_obj. bin_results. all_cols_results. items ( ) : if col_name in host_select_param. select_col_names : total_values. append ( col_results. iv ) sorted_value = sorted ( total_values, reverse = True ) thres_idx = int ( math. floor ( self. percentile_threshold * len ( sorted_value ) - consts. FLOAT_ZERO ) ) return sorted_value [ thres_idx ]",False,col_name in self.selection_properties.select_col_names,col_results.iv,0.6507352590560913 3986,"def _get_check_overlays ( self, force = False ) : if self. _check_overlays_stale or force : if self. _connectivity_checker. online : url = self. _settings. get ( [ ""check_overlay_url"" ] ) self. _logger. info ( ""Fetching check overlays from {}"". format ( url ) ) try : r = requests. get ( url, timeout = 3.1 ) r. raise_for_status ( ) data = r. json ( ) except Exception as exc : self. _logger. error ( ""Could not fetch check overlay from {}: {}"". format ( url, exc ) ) self. _overlay_cache = { } else : self. _overlay_cache = data else : self. _logger. info ( ""Not fetching check overlays, we are offline"" ) self. _overlay_cache = { } self. _overlay_cache_timestamp = time. time ( ) default_overlay = { } defaults = self. get_settings_defaults ( ) for key in defaults [ ""checks"" ] : if key in self. _overlay_cache : default_overlay [ key ] = self. _overlay_cache [ key ] self. _settings. remove_overlay ( self. CHECK_OVERLAY_",False,default_overlay,self._overlay_cache_timestamp is None or force,0.6626017093658447 3987,"def _set_peer_statuses ( self ) : """"""Set peer statuses."""""" cutoff = time. time ( ) - STALE_SECS for peer in self. peers : if : peer. status = PEER_BAD elif peer. last_good > cutoff : peer. status = PEER_GOOD elif peer. last_good : peer. status = PEER_STALE else : peer. status = PEER_NEVER",True,peer.bad,peer.bad,0.6627126932144165 3988,"def credentials ( self ) : """"""The session credentials as a dict"""""" creds = { } if self. _creds : if self. _creds. access_key : creds [ ""aws_access_key_id"" ] = self. _creds. access_key if self. _creds. secret_key : creds [ ""aws_secret_access_key"" ] = self. _creds. secret_key if : creds [ ""aws_session_token"" ] = self. _creds. token if self. _session. region_name : creds [ ""aws_region"" ] = self. _session. region_name if self. requester_pays : creds [ ""aws_request_payer"" ] = ""requester"" return creds",False,self._creds.token,self._session,0.6739875078201294 3989,"def __init__ ( self, ** values ) : self. start_date = values. pop ( ""start_date"", None ) if self. start_date : self. start_date = convert_to_datetime ( self. start_date ) for key, value in list ( iteritems ( values ) ) : if : raise TypeError ( ""Invalid field name: %s"" % key ) if value is None : del values [ key ] self. fields = [ ] assign_defaults = False for field_name in self. FIELD_NAMES : if field_name in values : exprs = values. pop ( field_name ) is_default = False assign_defaults = not values elif assign_defaults : exprs = DEFAULT_VALUES [ field_name ] is_default = True else : exprs = ""*"" is_default = True field_class = self. FIELDS_MAP [ field_name ] field = field_class ( field_name, exprs, is_default ) self. fields. append ( field )",True,key not in self.FIELD_NAMES,key not in self.FIELD_NAMES,0.6679651737213135 3990,"def load ( self ) : if self. is_resource : self. loaded = True return buf = self. _buf buf. seek ( self. _buf_ofs ) buf. endian = "">"" self. metadata_size = buf. read_uint ( ) self. file_size = buf. read_uint ( ) self. format = buf. read_uint ( ) self. data_offset = buf. read_uint ( ) if self. format >= 9 : self. endianness = buf. read_uint ( ) if self. endianness == 0 : buf. endian = ""<"" self. tree. load ( buf ) if 7 <= self. format <= 13 : self. long_object_ids = bool ( buf. read_uint ( ) ) num_objects = buf. read_uint ( ) for i in range ( num_objects ) : if : buf. align ( ) obj = ObjectInfo ( self ) obj. load ( buf ) self. register_object ( obj ) if self. format >= 11 : num_adds = buf. read_uint ( ) for i in range ( num_adds ) : if : buf. align ( ) id = self. read_id ( buf ) self. adds. append ( ( id, buf. read_int ( ) ) ) if self. format >= 6 : num_refs = buf. read_uint ( ) for i in range ( num_refs ) : ref = AssetRef ( self ) ",False,self.format >= 14,i > 0,0.6619637608528137 3991,"def _is_perf_file ( file_path ) : f = get_file ( file_path ) for line in f : if line [ 0 ] == ""#"" : continue r = event_regexp. search ( line ) if : f. close ( ) return True f. close ( ) return False",True,r,r,0.6941646933555603 3992,"def command ( self, args = None ) : config = self. session. config args = args if ( args is not None ) else list ( self. args ) now = int ( time. time ( ) ) if args : job = args. pop ( 0 ) while args : op = args. pop ( 0 ). lower ( ). replace ( ""-"", """" ) if : interval = int ( args. pop ( 0 ) ) config. cron_worker. schedule [ job ] [ 1 ] = interval elif op == ""trigger"" : interval = config. cron_worker. schedule [ job ] [ 1 ] config. cron_worker. schedule [ job ] [ 3 ] = now - interval elif op == ""postpone"" : hours = float ( args. pop ( 0 ) ) config. cron_worker. schedule [ job ] [ 3 ] += int ( hours * 3600 ) else : raise NotImplementedError ( ""Unknown op: %s"" % op ) return self. _success ( _ ( ""Displayed CRON schedule"" ), result = { ""last_run"" : config. cron_worker. last_run, ""jobs"" : config. cron_worker. schedule. values ( ), }, )",False,op == 'interval',op == 'set',0.662510871887207 3993,"def test_raxml ( self ) : """"""Run RAxML using the wrapper."""""" cmd = RaxmlCommandline ( raxml_exe, sequences = EX_PHYLIP, model = ""PROTCATWAG"", name = ""test"" ) self. assertIn ( ""-p"", str ( cmd ) ) try : out, err = cmd ( ) self. assertGreater ( len ( out ), 0 ) self. assertEqual ( len ( err ), 0 ) tree = Phylo. read ( ""RAxML_result.test"", ""newick"" ) self. assertEqual ( tree. count_terminals ( ), 4 ) finally : for fname in [ ""RAxML_info.test"", ""RAxML_log.test"", ""RAxML_parsimonyTree.test"", ""RAxML_result.test"", ""RAxML_bestTree.test"", ] : if : os. remove ( fname )",False,os.path.isfile(fname),os.path.exists(fname),0.6483091115951538 3994,"def test_order_discount_price ( self ) : """"""Tests the price of the discount within an order."""""" order = add_order ( self. request ) for order_item in order. items. all ( ) : if : self. assertEqual ( ""%.2f"" % order_item. price_net, ""-8.40"" ) self. assertEqual ( ""%.2f"" % order_item. product_price_net, ""-8.40"" )",False,order_item.product_name == 'Summer',order_item.price_net > 0,0.651544451713562 3995,"def help ( cls, task = None ) : """"""Describe available tasks or one specific task"""""" if task is None : usage_list = [ ] for task in iter ( cls. _tasks ) : task_func = getattr ( cls, task ) usage_string = "" %s %s"" % ( cls. _prog, task_func. usage ) desc = task_func. __doc__. splitlines ( ) [ 0 ] usage_list. append ( ( usage_string, desc ) ) max_len = functools. reduce ( lambda m, item : max ( m, len ( item [ 0 ] ) ), usage_list, 0 ) print ( ""Tasks:"" ) cols = int ( os. environ. get ( ""COLUMNS"", 80 ) ) for line, desc in usage_list : task_func = getattr ( cls, task ) if desc : line = ""%s%s # %s"" % ( line, "" "" * ( max_len - len ( line ) ), desc ) if : line = line [ : cols - 3 ] + ""..."" print ( line ) else : task_func = getattr ( cls, task ) print ( ""Usage:"" ) print ( "" %s %s"" % ( cls. _prog, task_func. usage ) ) print ( """" ) print ( task_func. __doc__ )",False,len(line) > cols,cols > 3,0.6632709503173828 3996,"def readline ( self, size = None ) : if size is not None : data = self. rfile. readline ( size ) self. bytes_read += len ( data ) self. _check_length ( ) return data res = [ ] while True : data = self. rfile. readline ( 256 ) self. bytes_read += len ( data ) self. _check_length ( ) res. append ( data ) if : return EMPTY. join ( res )",False,len(data) < 256 or data[-1:] == '\n',len(res) > 0,0.6516119241714478 3997,def setup ( level = None ) : from pipeline. logging import pipeline_logger as logger from pipeline. log. handlers import EngineLogHandler if level in set ( logging. _levelToName. values ( ) ) : logger. setLevel ( level ) logging. _acquireLock ( ) try : for hdl in logger. handlers : if : break else : hdl = EngineLogHandler ( ) hdl. setLevel ( logger. level ) logger. addHandler ( hdl ) finally : logging. _releaseLock ( ),False,"isinstance(hdl, EngineLogHandler)",hdl is None,0.6565418243408203 3998,"def user_agent_header ( ) : if menu. options. random_agent : if : err_msg = ""The option '--random-agent' is incompatible with option '--user-agent' or switch '--mobile'."" print ( settings. print_critical_msg ( err_msg ) ) raise SystemExit ( ) else : if settings. VERBOSITY_LEVEL >= 1 : debug_msg = ""Fetching random HTTP User-Agent header. "" sys. stdout. write ( settings. print_debug_msg ( debug_msg ) ) sys. stdout. flush ( ) else : pass try : menu. options. agent = random. choice ( settings. USER_AGENT_LIST ) if settings. VERBOSITY_LEVEL >= 1 : print ( settings. SUCCESS_STATUS ) info_msg = ( ""The fetched random HTTP User-Agent header value is '"" + menu. options. agent + ""'."" ) print ( settings. print_info_msg ( info_msg ) ) except : print ( settings. FAIL_STATUS )",False,menu.options.agent != settings.DEFAULT_USER_AGENT or menu.options.mobile,settings.user_agent_string,0.653887927532196 3999,"def makeFootnotesDiv ( self, root ) : """"""Return div of footnotes as et Element."""""" if not list ( self. footnotes. keys ( ) ) : return None div = util. etree. Element ( ""div"" ) div. set ( ""class"", ""footnote"" ) util. etree. SubElement ( div, ""hr"" ) ol = util. etree. SubElement ( div, ""ol"" ) surrogate_parent = util. etree. Element ( ""div"" ) for id in self. footnotes. keys ( ) : li = util. etree. SubElement ( ol, ""li"" ) li. set ( ""id"", self. makeFootnoteId ( id ) ) self. parser. parseChunk ( surrogate_parent, self. footnotes [ id ] ) for el in list ( surrogate_parent ) : li. append ( el ) surrogate_parent. remove ( el ) backlink = util. etree. Element ( ""a"" ) backlink. set ( ""href"", ""#"" + self. makeFootnoteRefId ( id ) ) if self. md. output_format not in [ ""html5"", ""xhtml5"" ] : backlink. set ( ""rev"", ""footnote"" ) backlink. set ( ""class"", ""footnote-backref"" ) backlink. set ( ""title"", self. getConfig ( ""BACKLINK_TITLE"" ) % ( self. footnotes. index ( id ) + 1 ) ) backlink. text = FN_BACKLINK_TEXT if : node = li [ - 1 ] if node.",False,len(li),li > 0,0.6595869064331055 4000,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRING : self. dbname = iprot. readString ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. LIST : self. tbl_names = [ ] ( _etype690, _size687 ) = iprot. readListBegin ( ) for _i691 in xrange ( _size687 ) : _elem692 = iprot. readString ( ) self. tbl_names. append ( _elem692 ) iprot. readListEnd ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) if not arg : return lst = self. get_list ( ) if not lst : return do_get_attr = isinstance ( attr, str ) good = arg if do_get_attr : try : good = getattr ( arg, attr ) except ( TypeError, AttributeError ) : pass for obj, i in zip ( lst, range ( len ( lst ) ) ) : if do_get_attr : try : test = getattr ( obj, attr ) except AttributeError : continue else : test = obj if : self. move ( absolute = i ) return True return self. move ( absolute = self. pointer )",False,test == good,good and test and (i > 0),0.6686276197433472 4002,"def two_mat_relative_entropy ( mat_1, mat_2, logbase = 2, diag = diagALL ) : """"""Return relative entropy of two matrices."""""" rel_ent = 0.0 key_list_1 = sorted ( mat_1 ) key_list_2 = sorted ( mat_2 ) key_list = [ ] sum_ent_1 = 0.0 sum_ent_2 = 0.0 for i in key_list_1 : if i in key_list_2 : key_list. append ( i ) if len ( key_list_1 )!= len ( key_list_2 ) : sys. stderr. write ( ""Warning: first matrix has more entries than the second\n"" ) if key_list_1!= key_list_2 : sys. stderr. write ( ""Warning: indices not the same between matrices\n"" ) for key in key_list : if diag == diagNO and key [ 0 ] == key [ 1 ] : continue if : continue if mat_1 [ key ] > EPSILON and mat_2 [ key ] > EPSILON : sum_ent_1 += mat_1 [ key ] sum_ent_2 += mat_2 [ key ] for key in key_list : if diag == diagNO and key [ 0 ] == key [ 1 ] : continue if : continue if mat_1 [ key ] > EPSILON and mat_2 [ key ] > EPSILON : val_1 = mat_1 [ key ] / sum_ent_1 val_2 = mat_2 [ key ] / sum",False,diag == diagONLY and key[0] != key[1],mat_1[key] > rel_ent or mat_2[key] > sum_ent_1,0.656577467918396 4003,"def calc_person ( self, index, indi_handle, fams_handle ) : working_lines = """" if index [ 1 ] % 2 == 0 or ( index [ 1 ] == 1 and self. center_use == 0 ) : if : working_lines = self. __calc_l. calc_lines ( None, None, self. _gui. get_val ( ""father_disp"" ) ) else : working_lines = self. disp_father else : if : working_lines = self. __calc_l. calc_lines ( None, None, self. _gui. get_val ( ""mother_disp"" ) ) else : working_lines = self. disp_mother if : return working_lines else : return self. __calc_l. calc_lines ( indi_handle, fams_handle, working_lines )",False,indi_handle == fams_handle == None,self.center_use == 1,0.6546370387077332 4004,"def file_update_many ( fh, points, now = None ) : if LOCK : fcntl. flock ( fh. fileno ( ), fcntl. LOCK_EX ) header = __readHeader ( fh ) if now is None : now = int ( time. time ( ) ) archives = iter ( header [ ""archives"" ] ) currentArchive = next ( archives ) currentPoints = [ ] for point in points : age = now - point [ 0 ] while ( currentArchive [ ""retention"" ] < age ) : if : currentPoints. reverse ( ) __archive_update_many ( fh, header, currentArchive, currentPoints ) currentPoints = [ ] try : currentArchive = next ( archives ) except StopIteration : currentArchive = None break if not currentArchive : break currentPoints. append ( point ) if currentArchive and currentPoints : currentPoints. reverse ( ) __archive_update_many ( fh, header, currentArchive, currentPoints ) if AUTOFLUSH : fh. flush ( ) os. fsync ( fh. fileno ( ) )",True,currentPoints,currentPoints,0.6865641474723816 4005,"def testCoreInterfaceIntInputData ( ) : result_testing = False for _ in range ( 10 ) : hsyncnet_instance = hsyncnet ( [ [ 1 ], [ 2 ], [ 3 ], [ 20 ], [ 21 ], [ 22 ] ], 2, initial_type. EQUIPARTITION, ccore = True ) analyser = hsyncnet_instance. process ( ) if : result_testing = True break assert result_testing",False,len(analyser.allocate_clusters(0.1)) == 2,analyser.get_num_rows() > 0,0.6531205177307129 4006,"def set_default ( self, line ) : if ""Default:"" in line : default_val = line [ line. find ( ""Default:"" ) + len ( ""Default:"" ) : ] default_val = default_val. strip ( ) float_mode = default_val [ 0 ]. isnumeric ( ) list_mode = default_val [ 0 ] == ""["" end_pos = - 1 first_point = True if float_mode or list_mode : for index, ele in enumerate ( default_val ) : end_pos = index if float_mode and ele. isnumeric ( ) : continue if float_mode and ele == ""."" and first_point : first_point = False continue elif float_mode : break elif list_mode and ele == ""]"" : end_pos += 1 break else : poses = [ max ( [ default_val. find ( "". "" ), default_val. find ( ""."" ), default_val. find ( '.""' ), ",False,end_pos != -1,len(line) > 0,0.6596670150756836 4007,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 10 : self. set_id ( d. getPrefixedString ( ) ) continue if tt == 18 : self. set_language ( d. getPrefixedString ( ) ) continue if tt == 26 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. add_field ( ). TryMerge ( tmp ) continue if : self. set_order_id ( d. getVarInt32 ( ) ) continue if tt == 40 : self. set_storage ( d. getVarInt32 ( ) ) continue if tt == 50 : length = d. getVarInt32 ( ) tmp = ProtocolBuffer. Decoder ( d. buffer ( ), d. pos ( ), d. pos ( ) + length ) d. skip ( length ) self. mutable_acl ( ). TryMerge ( tmp ) continue if tt == 56 : self. set_version ( d. getVarInt64 ( ) ) continue if tt == 0 : raise ProtocolBuffer. ProtocolBufferDecodeError ",True,tt == 32,tt == 32,0.6817759275436401 4008,"def __init__ ( self, pattern, flags = 0 ) : """"""The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""""" super ( Regex, self ). __init__ ( ) if isinstance ( pattern, basestring ) : if : warnings. warn ( ""null string passed to Regex; use Empty() instead"", SyntaxWarning, stacklevel = 2, ) self. pattern = pattern self. flags = flags try : self. re = re. compile ( self. pattern, self. flags ) self. reString = self. pattern except sre_constants. error : warnings. warn ( ""invalid pattern (%s) passed to Regex"" % pattern, SyntaxWarning, stacklevel = 2, ) raise elif isinstance ( pattern, Regex. compiledREtype ) : self. re = pattern self. pattern = self. reString = str ( pattern ) self. flags = flags else : raise ValueError ( ""Regex may only be constructed with a string or a compiled RE object"" ) self. name = _ustr ( self ) self. errmsg = ""Expected "" + self. name self. mayIndexError = False self. mayReturnEmpty = True",True,len(pattern) == 0,len(pattern) == 0,0.6583430767059326 4009,"def process_pipeline ( collection, database, pipeline, session ) : if session : raise NotImplementedError ( ""Mongomock does not handle sessions yet"" ) for stage in pipeline : for operator, options in six. iteritems ( stage ) : try : handler = _PIPELINE_HANDLERS [ operator ] except KeyError as err : raise_from ( NotImplementedError ( ""%s is not a valid operator for the aggregation pipeline. "" ""See http://docs.mongodb.org/manual/meta/aggregation-quick-reference/ "" ""for a complete list of valid operators."" % operator ), err, ) if : raise NotImplementedError ( ""Although '%s' is a valid operator for the aggregation pipeline, it is "" ""currently not implemented in Mongomock."" % operator ) collection = handler ( collection, database, options ) return command_cursor. CommandCursor ( collection )",False,not handler,database,0.6782671213150024 4010,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. STRUCT : self. status = TStatus ( ) self. status. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRING : self. delegationToken = iprot. readString ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",True,fid == 1,fid == 1,0.6761176586151123 4011,"def set ( self, key, data, timeout = None ) : filename = self. _key_to_filename ( key ) dirname = os. path. dirname ( filename ) if timeout is None : timeout = self. _default_timeout try : if : os. makedirs ( dirname ) f = os. open ( filename, os. O_EXCL | os. O_WRONLY | os. O_CREAT ) try : os. write ( f, pickle. dumps ( data, pickle. HIGHEST_PROTOCOL ) ) finally : os. close ( f ) os. utime ( filename, ( 0, time. time ( ) + timeout ) ) except ( IOError, OSError ) : pass",True,not os.path.exists(dirname),not os.path.exists(dirname),0.6489837169647217 4012,"def _on_protocol_state_changed ( self, state ) : if state == WANoiseProtocol. STATE_TRANSPORT : if : config = self. _config_manager. load ( self. _username ) config. server_static_public = self. _wa_noiseprotocol. rs self. _config_manager. save ( config ) self. _rs = self. _wa_noiseprotocol. rs self. _flush_incoming_buffer ( )",False,self._rs != self._wa_noiseprotocol.rs,self._username is not None,0.6569958925247192 4013,"def wait ( self ) : while True : return_code = self. _process. poll ( ) if return_code is not None : line = self. _process. stdout. readline ( ). decode ( ""utf-8"" ) if : break log. debug ( line. strip ( ""\n"" ) ) return True",False,line == '',not line,0.6751984357833862 4014,def has_price ( self ) : if self. can_decode_claim : claim = self. claim if : stream = claim. stream return stream. has_fee and stream. fee. amount and stream. fee. amount > 0 return False,False,claim.is_stream,claim,0.6609058380126953 4015,"def _in_out_vector_helper ( self, name1, name2, ceil ) : vector = [ ] stats = self. record if ceil is None : ceil = self. _get_max_rate ( name1, name2 ) maxlen = self. config. get_stats_history_length ( ) for n in [ name1, name2 ] : for i in range ( maxlen + 1 ) : if : vector. append ( float ( stats [ i ] [ n ] ) / ceil ) else : vector. append ( 0.0 ) return vector",False,i < len(stats),stats[i] is not None,0.6611990928649902 4016,"def check_path_owner ( path ) : if not hasattr ( os, ""geteuid"" ) : return True previous = None while path!= previous : if os. path. lexists ( path ) : if : try : path_uid = get_path_uid ( path ) except OSError : return False return path_uid == 0 else : return os. access ( path, os. W_OK ) else : previous, path = path, os. path. dirname ( path ) return False",False,os.geteuid() == 0,os.path.isdir(path),0.662284255027771 4017,"def Tool ( self, tool, toolpath = None, ** kw ) : if SCons. Util. is_String ( tool ) : tool = self. subst ( tool ) if : toolpath = self. get ( ""toolpath"", [ ] ) toolpath = list ( map ( self. _find_toolpath_dir, toolpath ) ) tool = SCons. Tool. Tool ( tool, toolpath, ** kw ) tool ( self )",True,toolpath is None,toolpath is None,0.6637915968894958 4018,"def _init_nvml ( ) : global _nvml_lib, _no_device_warned if _init_pid == os. getpid ( ) : return nvml_paths = [ ""libnvidia-ml.so"", ""libnvidia-ml.so.1"", ""libnvidia-ml.dylib"", ""nvml.dll"", ] if sys. platform [ : 3 ] == ""win"" : nvml_paths. append ( os. path. join ( os. getenv ( ""ProgramFiles"", ""C:/Program Files"" ), ""NVIDIA Corporation/NVSMI/nvml.dll"", ) ) _nvml_lib = _load_nv_library ( * nvml_paths ) if _nvml_lib is None : return try : _nvml_check_error ( _nvml_lib. nvmlInit_v2 ( ) ) except NVMLAPIError as ex : if ex. errno == NVML_DRIVER_NOT_LOADED : _nvml_lib = None if : logger. warning ( ""Failed to load libnvidia-ml: %s, no CUDA device will be enabled"", ex. message, ) _no_device_warned = True else : logger. exception",False,not _no_device_warned,_no_device_warned,0.6536405086517334 4019,"def get_tokens_unprocessed ( self, text ) : buffered = """" insertions = [ ] lng_buffer = [ ] for i, t, v in self. language_lexer. get_tokens_unprocessed ( text ) : if t is self. needle : if : insertions. append ( ( len ( buffered ), lng_buffer ) ) lng_buffer = [ ] buffered += v else : lng_buffer. append ( ( i, t, v ) ) if : insertions. append ( ( len ( buffered ), lng_buffer ) ) return do_insertions ( insertions, self. root_lexer. get_tokens_unprocessed ( buffered ) )",False,lng_buffer,i == 0,0.680876612663269 4020,"def _add_auth_information ( self ) : if self. message_type == ""logstash"" : if : return logstash_auth = requests. auth. HTTPBasicAuth ( self. username, self. password ) self. session. auth = logstash_auth elif self. message_type == ""splunk"" : auth_header = ""Splunk %s"" % self. password headers = { ""Authorization"" : auth_header, ""Content-Type"" : ""application/json"" } self. session. headers. update ( headers )",False,not self.username,self.username and self.password,0.6592991352081299 4021,"def _table_reprfunc ( self, row, col, val ) : if self. _table. column_names [ col ]. endswith ( ""Size"" ) : if : return "" %s"" % val elif val < 1024 ** 2 : return "" %.1f KB"" % ( val / 1024.0 ** 1 ) elif val < 1024 ** 3 : return "" %.1f MB"" % ( val / 1024.0 ** 2 ) else : return "" %.1f GB"" % ( val / 1024.0 ** 3 ) if col in ( 0, """" ) : return str ( val ) else : return "" %s"" % val",False,"isinstance(val, compat.string_types)",val < 1024,0.6479079127311707 4022,"def loadalbum ( albumname ) : song_ids = [ ] album = str ( albumname ) if os. path. isfile ( ""{}/songs.json"". format ( USER_PATH ) ) : with open ( ""{}/songs.json"". format ( USER_PATH ), ""r"" ) as input_file : songs_list = json. load ( input_file ) else : songs_list = api. get_all_songs ( ) with open ( ""{}/songs.json"". format ( USER_PATH ), ""w"" ) as output_file : json. dump ( songs_list, output_file ) for i in range ( 0, len ( songs_list ) ) : if : song_ids. append ( songs_list [ i ] [ ""id"" ] ) else : print ( ""Album not found"" ) songsnum = len ( song_ids ) return song_ids, songsnum",False,album.lower() in songs_list[i]['album'].lower(),songs_list[i],0.654258131980896 4023,"def event_handler ( event ) : if isinstance ( event, EndEpochEvent ) : test_reader = paddle. batch ( paddle. dataset. mnist. test ( ), batch_size = BATCH_SIZE ) avg_cost, acc = trainer. test ( reader = test_reader, feed_order = [ ""img"", ""label"" ] ) print ( ""avg_cost: %s"" % avg_cost ) print ( ""acc : %s"" % acc ) if : trainer. save_params ( params_dirname ) else : print ( ""BatchID {0}, Test Loss {1:0.2}, Acc {2:0.2}"". format ( event. epoch + 1, avg_cost, acc ) ) if math. isnan ( avg_cost ) : sys. exit ( ""got NaN loss, training failed."" ) elif isinstance ( event, EndStepEvent ) : print ( ( ""Step {0}, Epoch {1} Metrics {2}"". format ( event. step, event. epoch, list ( map ( numpy. array, event. metrics ) ) ) ) )",False,acc > 0.2,"hasattr(trainer, 'save_params')",0.6596013307571411 4024,"def get_spec ( spec_file, spec_name, lab_mode, pre_ ) : """"""Get spec using args processed from inputs"""""" if lab_mode in TRAIN_MODES : if : spec = spec_util. get ( spec_file, spec_name ) else : predir = pre_ if predir == ""latest"" : predir = sorted ( glob ( f""data/{spec_name}*/"" ) ) [ - 1 ] _, _, _, _, experiment_ts = util. prepath_split ( predir ) logger. info ( f""Resolved to train@{predir}"" ) spec = spec_util. get ( spec_file, spec_name, experiment_ts ) elif lab_mode == ""enjoy"" : session_spec_file = pre_ assert ( session_spec_file is not None ), ""enjoy mode must specify a `enjoy@{session_spec_file}`"" spec = util. read ( f""{session_spec_file}"" ) else : raise ValueError ( f""Unrecognizable lab_mode not of {TRAIN_MODES} or {EVAL_MODES}"" ) return spec",False,pre_ is None,spec_name in spec_util,0.6618254780769348 4025,"def __setattr__ ( self, option_name, option_value ) : if option_name in self. _options : sort = self. OPTIONS [ self. arch. name ] [ option_name ] [ 0 ] if : self. _options [ option_name ] = option_value else : raise ValueError ( 'Value for option ""%s"" must be of type %s' % ( option_name, sort ) ) else : super ( CFGArchOptions, self ). __setattr__ ( option_name, option_value )",False,"sort is None or isinstance(option_value, sort)","sort in (TAB > or isinstance(option_value, CFGArchOptions)",0.6525413393974304 4026,"def __str__ ( self ) : """"""Only keeps the True values."""""" result = [ ""SlicingSpec("" ] if self. entire_dataset : result. append ( "" Entire dataset,"" ) if self. by_class : if isinstance ( self. by_class, Iterable ) : result. append ( "" Into classes %s,"" % self. by_class ) elif : result. append ( "" Up to class %d,"" % self. by_class ) else : result. append ( "" By classes,"" ) if self. by_percentiles : result. append ( "" By percentiles,"" ) if self. by_classification_correctness : result. append ( "" By classification correctness,"" ) result. append ( "")"" ) return ""\n"". join ( result )",False,"isinstance(self.by_class, int)","isinstance(self.by_class, Number)",0.6511653065681458 4027,"def checkFilename ( filename ) : while True : if filename [ 0 ] == ""'"" : filename = filename [ 1 : ] if filename [ len ( filename ) - 1 ] == ""'"" : filename = filename [ : - 1 ] if : return filename filename = input ( ""[!] Cannot find '%s'.\n[*] Enter a valid name of the file containing the paths to test -> "" % filename )",False,os.path.exists(filename),"filename[len(filename) == ""'""",0.6533585786819458 4028,"def fact ( n ) : if n < 1 : raise ValueError ( ""fact() argument should be >= 1"" ) if n == 1 : return [ ] res = [ ] while n % 2 == 0 : res. append ( 2 ) n //= 2 limit = sqrt ( n + 1 ) i = 3 while i <= limit : if : res. append ( i ) n //= i limit = sqrt ( n + 1 ) else : i += 2 if n!= 1 : res. append ( n ) return res",False,n % i == 0,n % 2 == 0,0.6830201148986816 4029,"def _getString ( self ) : c = self. _getChar ( ) if c is SyntaxToken. EOF or c!= '""' : raise Exception ( ""Internal: parsing string?"" ) res = """" escape = False c = self. _peekChar ( ) while True : if c is SyntaxToken. EOF : raise Exception ( ""Hit EOF in string literal."" ) elif c == ""\n"" or c == ""\r"" : raise Exception ( ""Hit newline in string literal"" ) elif c == ""\\"" and not escape : self. _getChar ( ) escape = True elif c == '""' and not escape : self. _getChar ( ) return StringToken ( res ) elif escape : escape = False self. _getChar ( ) if c == ""n"" : res = res + ""\n"" elif c == ""t"" : res = res + ""\t"" elif : res = res = ""\r"" elif c == '""' : res = res + c elif c == ""\\"" : res = res + c else : self. _getChar ( ) res = res + c c = self. _peekChar ( )",True,c == 'r',c == 'r',0.665661096572876 4030,"def main ( ) : testmergeinto ( ) try : by = bypy. ByPy ( configdir = ConfigDir, debug = 1, verbose = 1 ) if ""refresh"" in sys. argv : by. refreshtoken ( ) if ""many"" in sys. argv : testmanyfiles ( by ) return runTests ( by ) if ""1"" in sys. argv : return time. sleep ( 10 ) by = bypy. ByPy ( configdir = ConfigDir, processes = 2, debug = 1, verbose = 1 ) runTests ( by ) if : return by = bypy. ByPy ( configdir = ConfigDir, downloader = ""aria2"", debug = 1, verbose = 1 ) downdir ( by ) by = bypy. ByPy ( configdir = ConfigDir, downloader = ""aria2"", processes = 2, debug = 1, verbose = 1 ) downdir ( by ) except KeyboardInterrupt : print ( ""User cancelled, cleaning up..."" ) finally : cleanup ( ) print ( ""Clean up done."" )",False,'2' in sys.argv,'1' in sys.argv,0.6544889211654663 4031,"def _wait_for_multipass_ready ( cls, *, echoer ) : echoer. wrapped ( ""Waiting for multipass..."" ) retry_count = 60 while retry_count : try : output = subprocess. check_output ( [ cls. provider_cmd, ""version"" ] ). decode ( ) except subprocess. CalledProcessError : output = """" except FileNotFoundError : raise errors. ProviderStartError ( provider_name = cls. provider_name, error_message = ""multipass not found - please check that it"" "" can be found in the configured PATH"", ) if : break retry_count -= 1 sleep ( 1 )",False,'multipassd' in output,retry_count == 0,0.6611419916152954 4032,"def i_rol ( self, op ) : dstSize = op. opers [ 0 ]. tsize count = self. getOperValue ( op, 1 ) tempCount = shiftMask ( count, dstSize ) if tempCount > 0 : while tempCount : val = self. getOperValue ( op, 0 ) tempCf = e_bits. msb ( val, dstSize ) self. setOperValue ( op, 0, ( val * 2 ) + tempCf ) tempCount -= 1 val = self. getOperValue ( op, 0 ) self. setFlag ( EFLAGS_CF, e_bits. lsb ( val ) ) if : val = self. getOperValue ( op, 0 ) cf = self. getFlag ( EFLAGS_CF ) self. setFlag ( EFLAGS_OF, e_bits. msb ( val, dstSize ) ^ cf ) else : self. setFlag ( EFLAGS_OF, False )",False,count == 1,dstSize > 0,0.6736946105957031 4033,"def _scrub_generated_timestamps ( self, target_workdir ) : """"""Remove the first line of comment from each file if it contains a timestamp."""""" for root, _, filenames in safe_walk ( target_workdir ) : for filename in filenames : source = os. path. join ( root, filename ) with open ( source, ""r"" ) as f : lines = f. readlines ( ) if len ( lines ) < 1 : return with open ( source, ""w"" ) as f : if : f. write ( lines [ 0 ] ) for line in lines [ 1 : ] : f. write ( line )",False,not self._COMMENT_WITH_TIMESTAMP_RE.match(lines[0]),len(lines) > 1,0.6501803398132324 4034,"def _encode_numpy ( values, uniques = None, encode = False, check_unknown = True ) : if uniques is None : if encode : uniques, encoded = np. unique ( values, return_inverse = True ) return uniques, encoded else : return np. unique ( values ) if encode : if : diff = _encode_check_unknown ( values, uniques ) if diff : raise ValueError ( ""y contains previously unseen labels: %s"" % str ( diff ) ) encoded = np. searchsorted ( uniques, values ) return uniques, encoded else : return uniques",True,check_unknown,check_unknown,0.6630774736404419 4035,"def check_command ( self ) : exit_status = self. p. poll ( ) if exit_status is not None : output = self. p. stdout. read ( ) lines = output. split ( ""\n"" ) if : lines = lines [ : - 1 ] command = os. path. basename ( self. command ) if exit_status : logging. warn ( ""%s: command has finished with non-zero exit status: %s"" % ( command, exit_status ) ) for line in lines : logging. warn ( ""%s: %s"" % ( command, line ) ) else : logging. debug ( ""%s: command has finished"" % command ) for line in lines : logging. debug ( ""%s: %s"" % ( command, line ) ) self. finish_json ( { ""status"" : exit_status } ) else : self. io_loop. add_timeout ( datetime. timedelta ( milliseconds = 100 ), self. check_command )",False,not lines[-1],lines[-1] > 0,0.6581934690475464 4036,"def resolve_value ( par : str, domain : Domain ) -> Dict : sampler = domain. get_sampler ( ) if isinstance ( sampler, Quantized ) : logger. warning ( ""Dragonfly search does not support quantization. "" ""Dropped quantization."" ) sampler = sampler. get_sampler ( ) if isinstance ( domain, Float ) : if : logger. warning ( ""Dragonfly does not support specific sampling methods."" "" The {} sampler will be dropped."". format ( sampler ) ) return { ""name"" : par, ""type"" : ""float"", ""min"" : domain. lower, ""max"" : domain. upper } raise ValueError ( ""Dragonfly does not support parameters of type "" ""`{}`"". format ( type ( domain ). __name__ ) )",False,domain.sampler is not None,sampler.has_sampling_method(domain),0.6656016111373901 4037,"def walk ( self, path : path_type ) -> Iterator [ Tuple [ str, List [ str ], List [ str ] ] ] : path = self. _process_path ( path ) q = [ path ] while q : curr = q. pop ( 0 ) file_selector : FileSelector = FileSelector ( curr ) dirs, files = [ ], [ ] for info in self. _arrow_fs. get_file_info ( file_selector ) : if : files. append ( info. base_name ) elif info. type == FileType. Directory : dirs. append ( info. base_name ) q. append ( info. path ) else : continue yield curr, dirs, files",True,info.type == FileType.File,info.type == FileType.File,0.6694533824920654 4038,"def _clean ( self ) : logger. info ( ""Cleaning up..."" ) if self. _process is not None : if self. _process. poll ( ) is None : for _ in range ( 3 ) : self. _process. terminate ( ) time. sleep ( 0.5 ) if : break else : self. _process. kill ( ) self. _process. wait ( ) logger. error ( ""KILLED"" ) if os. path. exists ( self. _tmp_dir ) : shutil. rmtree ( self. _tmp_dir ) self. _process = None self. _ws = None logger. info ( ""Cleanup complete"" )",False,self._process.poll() is not None,self._ws is None,0.6496672630310059 4039,"def __jpeg ( self, stream ) : if stream. read ( 2 ) == ""\xFF\xD8"" : while True : ( marker, code, length ) = struct. unpack ( ""!BBH"", stream. read ( 4 ) ) if marker!= 0xFF : break elif : return tuple ( reversed ( struct. unpack ( ""!xHH"", stream. read ( 5 ) ) ) ) else : stream. read ( length - 2 ) return - 1, - 1",False,code >= 192 and code <= 195,length > 2,0.6562279462814331 4040,"def describe_cluster_snapshots ( self, cluster_identifier = None, snapshot_identifier = None ) : if cluster_identifier : cluster_snapshots = [ ] for snapshot in self. snapshots. values ( ) : if : cluster_snapshots. append ( snapshot ) if cluster_snapshots : return cluster_snapshots if snapshot_identifier : if snapshot_identifier in self. snapshots : return [ self. snapshots [ snapshot_identifier ] ] raise ClusterSnapshotNotFoundError ( snapshot_identifier ) return self. snapshots. values ( )",False,snapshot.cluster.cluster_identifier == cluster_identifier,snapshot,0.6535824537277222 4041,"def addBanTicket ( self, ticket, reason = { } ) : eob = ticket. getEndOfBanTime ( self. __banTime ) with self. __lock : fid = ticket. getID ( ) oldticket = self. __banList. get ( fid ) if oldticket : reason [ ""ticket"" ] = oldticket if eob > oldticket. getEndOfBanTime ( self. __banTime ) : reason [ ""prolong"" ] = 1 btm = ticket. getBanTime ( self. __banTime ) if : diftm = ticket. getTime ( ) - oldticket. getTime ( ) if diftm > 0 : btm += diftm oldticket. setBanTime ( btm ) return False self. __banList [ fid ] = ticket self. __banTotal += 1 if self. __nextUnbanTime > eob : self. __nextUnbanTime = eob return True",False,btm != -1,btm,0.6780818700790405 4042,"def update_angles ( self, context ) : """"""Convert angle values to selected angle units"""""" if self. last_angle_units == ""RAD"" : if self. angle_units == ""RAD"" : au = 1.0 elif : au = 180.0 / pi elif self. angle_units == ""UNI"" : au = 0.5 / pi elif self. last_angle_units == ""DEG"" : if self. angle_units == ""RAD"" : au = pi / 180 elif : au = 1.0 elif self. angle_units == ""UNI"" : au = 1.0 / 360 elif self. last_angle_units == ""UNI"" : if self. angle_units == ""RAD"" : au = 2 * pi elif : au = 360 elif self. angle_units == ""UNI"" : au = 1.0 self. last_angle_units = self. angle_units self. updating = True self. twist = au * self. twist self. phase = au * self. phase self. updating = False updateNode ( self, context )",True,self.angle_units == 'DEG',self.angle_units == 'DEG',0.6523205041885376 4043,"def intersect_two ( f1, f2, work_dir, data ) : """"""Intersect two regions, handling cases where either file is not present."""""" bedtools = config_utils. get_program ( ""bedtools"", data, default = ""bedtools"" ) f1_exists = f1 and utils. file_exists ( f1 ) f2_exists = f2 and utils. file_exists ( f2 ) if not f1_exists and not f2_exists : return None elif f1_exists and not f2_exists : return f1 elif f2_exists and not f1_exists : return f2 else : out_file = os. path. join ( work_dir, ""%s-merged.bed"" % ( utils. splitext_plus ( os. path. basename ( f1 ) ) [ 0 ] ) ) if : with file_transaction ( data, out_file ) as tx_out_file : cmd = ""{bedtools} intersect -a {f1} -b {f2} > {tx_out_file}"" do. run ( cmd. format ( ** locals ( ) ), ""Intersect BED files"", data ) return out_file",False,not utils.file_exists(out_file),os.path.exists(out_file),0.6449155807495117 4044,"def save ( self, order ) : for a in getattr ( order, ""_answers"", [ ] ) : a. orderposition = ( a. orderposition ) a. save ( ) if : a. options. add ( * a. _options )",False,"hasattr(a, '_options')",a.options,0.6534874439239502 4045,"def main ( self ) : with open ( self. args. cachefile [ 0 ], ""rb"" ) as cachefile : pickled = pickle. Unpickler ( cachefile ) while True : try : key = pickled. load ( ) val = pickled. load ( ) except Exception : break if isinstance ( val, CoreRecipeInfo ) : pn = val. pn if self. args. recipe and self. args. recipe!= pn : continue if self. args. skip and val. skipped : continue if : out = key for member in self. args. members. split ( "","" ) : out += "": %s"" % val. __dict__. get ( member ) print ( ""%s"" % out ) else : print ( ""%s: %s"" % ( key, val. __dict__ ) ) elif not self. args. recipe : print ( ""%s %s"" % ( key, val ) )",True,self.args.members,self.args.members,0.6567422151565552 4046,"def conj ( self ) : dtype = self. dtype if issubclass ( self. dtype. type, np. complexfloating ) : if not self. flags. forc : raise RuntimeError ( ""only contiguous arrays may "" ""be used as arguments to this operation"" ) if : order = ""F"" else : order = ""C"" result = self. _new_like_me ( order = order ) func = elementwise. get_conj_kernel ( dtype ) func. prepared_async_call ( self. _grid, self. _block, None, self. gpudata, result. gpudata, self. mem_size ) return result else : return self",False,self.flags.f_contiguous,self.flags.f_in_conj,0.6529178619384766 4047,"def _process_message_line ( lineno, line ) : if line. startswith ( ""msgid_plural"" ) : in_msgid [ 0 ] = True msg = line [ 12 : ]. lstrip ( ) messages. append ( msg ) elif line. startswith ( ""msgid"" ) : in_msgid [ 0 ] = True offset [ 0 ] = lineno txt = line [ 5 : ]. lstrip ( ) if : _add_message ( ) messages. append ( txt ) elif line. startswith ( ""msgstr"" ) : in_msgid [ 0 ] = False in_msgstr [ 0 ] = True msg = line [ 6 : ]. lstrip ( ) if msg. startswith ( ""["" ) : idx, msg = msg [ 1 : ]. split ( ""]"", 1 ) translations. append ( [ int ( idx ), msg. lstrip ( ) ] ) else : translations. append ( [ 0, msg ] ) elif line. startswith ( '""' ) : if in_msgid [ 0 ] : messages [ - 1 ] += u""\n"" + line. rstrip ( ) elif in_msgstr [ 0 ] : translations [ - 1 ] [ 1 ] += u""\n"" + line. rstrip ( )",False,messages,txt.startswith(message),0.698043704032898 4048,"def M ( b, d ) : c = [ ] g = 0 while g < len ( b ) : i = 0 if ""a"" <= b [ g ] and ""z"" >= b [ g ] : i = ord ( b [ g ] [ 0 ] ) - 97 else : i = int ( b [ g ] ) + 26 f = 0 while f < 36 and f < len ( d ) : if ( isinstance ( d [ f ], int ) or d [ f ]. isnumeric ( ) ) and int ( d [ f ] ) == i : i = f break f += 1 if : c. append ( i - 26 ) else : c. append ( chr ( i + 97 ) ) g += 1 tmp = """" for x in c : tmp += str ( x ) return tmp",False,25 < i,i >= 34,0.6914723515510559 4049,"def visit_Module ( self, mod : ast27. Module ) -> MypyFile : self. type_ignores = { } for ti in mod. type_ignores : parsed = parse_type_ignore_tag ( ti. tag ) if : self. type_ignores [ ti. lineno ] = parsed else : self. fail ( INVALID_TYPE_IGNORE, ti. lineno, - 1 ) body = self. fix_function_overloads ( self. translate_stmt_list ( mod. body, module = True ) ) return MypyFile ( body, self. imports, False, self. type_ignores, )",False,parsed is not None,parsed,0.6679314374923706 4050,"def ListMessages ( viewed = None ) : if type ( viewed ) is str : if : viewed = None else : viewed = viewed == ""True"" messages = list ( List ( viewed = viewed ). order_by ( Message. last_logged_at. desc ( ) ). limit ( 50 ) ) total_messages = List ( ). count ( ) oc = ObjectContainer ( title2 = _ ( ""Messages"" ) ) for m in messages : if m. type is None or m. summary is None : continue thumb = None if m. type == Message. Type. Exception : thumb = ( R ( ""icon-exception-viewed.png"" ) if m. viewed else R ( ""icon-exception.png"" ) ) elif m. type == Message. Type. Info : thumb = ( R ( ""icon-notification-viewed.png"" ) if m. viewed else R ( ""icon-notification.png"" ) ) elif m. type in ERROR_TYPES : thumb = R ( ""icon-error-viewed.png"" ) if m. viewed else R ( ""icon-error.png"" ) oc. add ( DirectoryObject ( key = Callback ( ViewMessage, error_id = m. id ), title = pad_title",False,viewed == 'None',viewed == False,0.6621967554092407 4051,"def streamErrorHandler ( self, conn, error ) : name, text = ""error"", error. getData ( ) for tag in error. getChildren ( ) : if tag. getNamespace ( ) == NS_XMPP_STREAMS : if : text = tag. getData ( ) else : name = tag. getName ( ) if name in stream_exceptions. keys ( ) : exc = stream_exceptions [ name ] else : exc = StreamError raise exc ( ( name, text ) )",False,tag.getName() == 'text',tag.getNamespace() == NS_XMPP_STREAMS,0.6582554578781128 4052,"def work ( ) : """"""work"""""" _graph_wrapper = copy. copy ( graph_wrapper ) _graph_wrapper. node_feat_tensor_dict = { } for batch_train_samples, batch_train_labels in batch_info : start_nodes = batch_train_samples nodes = start_nodes edges = [ ] for max_deg in samples : pred_nodes = graph. sample_predecessor ( start_nodes, max_degree = max_deg ) for dst_node, src_nodes in zip ( start_nodes, pred_nodes ) : for src_node in src_nodes : edges. append ( ( src_node, dst_node ) ) last_nodes = nodes nodes = [ nodes, pred_nodes ] nodes = flat_node_and_edge ( nodes ) start_nodes = list ( set ( nodes ) - set ( last_nodes ) ) if : break subgraph = graph. subgraph ( nodes = nodes, edges = edges, with_node_feat = True, with_edge_feat = True ) sub_node_index = subgraph. reindex_from_parrent_nodes ( batch_train_samples ) feed_dict = _graph_wrapper. to_feed ( subgraph ) feed_dict [ ""node_label"" ] = batch_train_labels feed_dict [ ""node_index"" ] = sub_node_index feed_dict [ ""parent_node_index"" ] = np. array ( nodes,",False,len(start_nodes) == 0,len(batch_info) == 0,0.6545653343200684 4053,"def parse_hicup_logs ( self, f ) : """"""Parse a HiCUP summary report"""""" if not f [ ""fn"" ]. endswith ( "".txt"" ) : return None header = [ ] lines = f [ ""f"" ]. splitlines ( ) for l in lines : s = l. split ( ""\t"" ) if : if s [ 0 ]!= ""File"" : return None header = s [ 1 : ] else : s_name = self. clean_s_name ( s [ 0 ], f [ ""root"" ] ) if s_name. startswith ( ""HiCUP_output/"" ) : s_name = s_name [ 13 : ] parsed_data = { } for idx, num in enumerate ( s [ 1 : ] ) : try : parsed_data [ header [ idx ] ] = float ( num ) except : parsed_data [ header [ idx ] ] = num parsed_data [ ""Duplicate_Read_Pairs"" ] = ( parsed_data [ ""Valid_Pairs"" ] - parsed_data [ ""Deduplication_Read_Pairs_Uniques"" ] ) if s_name in self. hicup_data : log. debug ( ""Duplicate sample name found! Overwriting: {}"". format ( s_name ) )",False,len(header) == 0,len(s) > 0,0.6548022031784058 4054,"def test_despine_with_offset ( self ) : f, ax = plt. subplots ( ) for side in self. sides : nt. assert_equal ( ax. spines [ side ]. get_position ( ), self. original_position ) utils. despine ( ax = ax, offset = self. offset ) for side in self. sides : is_visible = ax. spines [ side ]. get_visible ( ) new_position = ax. spines [ side ]. get_position ( ) if : nt. assert_equal ( new_position, self. offset_position ) else : nt. assert_equal ( new_position, self. original_position ) plt. close ( ""all"" )",True,is_visible,is_visible,0.6615877747535706 4055,"def inner ( obj, p, cycle ) : typ = type ( obj ) if ( basetype is not None and typ is not basetype and typ. __repr__!= basetype. __repr__ ) : return p. text ( typ. __repr__ ( obj ) ) if cycle : return p. text ( ""{...}"" ) p. begin_group ( 1, start ) keys = obj. keys ( ) if not ( p. max_seq_length and len ( obj ) >= p. max_seq_length ) : try : keys = sorted ( keys ) except Exception : pass for idx, key in p. _enumerate ( keys ) : if : p. text ( "","" ) p. breakable ( ) p. pretty ( key ) p. text ( "": "" ) p. pretty ( obj [ key ] ) p. end_group ( 1, end )",False,idx,idx == len(obj) - 1,0.6968404054641724 4056,"def export_assets ( self, asset_dir, asset_prefix = """" ) : assets = { } config = self. _config. copy ( ) for key, value in config. items ( ) : if : basename = os. path. basename ( value ) config [ key ] = basename assets [ basename ] = value config_name = ""%stokenizer_config.yml"" % asset_prefix config_path = os. path. join ( asset_dir, config_name ) assets [ config_name ] = config_path with tf. io. gfile. GFile ( config_path, ""w"" ) as config_file : yaml. dump ( config, stream = config_file, default_flow_style = False ) return assets",False,"isinstance(value, str) and tf.io.gfile.exists(value)",key in assets,0.647707998752594 4057,"def assert_counts ( res, lang, files, blank, comment, code ) : for line in res : fields = line. split ( ) if len ( fields ) >= 5 : if : self. assertEqual ( files, int ( fields [ 1 ] ) ) self. assertEqual ( blank, int ( fields [ 2 ] ) ) self. assertEqual ( comment, int ( fields [ 3 ] ) ) self. assertEqual ( code, int ( fields [ 4 ] ) ) return self. fail ( ""Found no output line for {}"". format ( lang ) )",False,fields[0] == lang,len(fields) == 4,0.6669728755950928 4058,"def decide_file_icon ( file ) : if file. state == File. ERROR : return FileItem. icon_error elif isinstance ( file. parent, Track ) : if file. state == File. NORMAL : return FileItem. icon_saved elif : return FileItem. match_pending_icons [ int ( file. similarity * 5 + 0.5 ) ] else : return FileItem. match_icons [ int ( file. similarity * 5 + 0.5 ) ] elif : return FileItem. icon_file_pending else : return FileItem. icon_file",True,file.state == File.PENDING,file.state == File.PENDING,0.663223922252655 4059,"def do_request_ ( self, request ) : host = request. host if not host : raise URLError ( ""no host given"" ) if request. data is not None : data = request. data if isinstance ( data, str ) : msg = ( ""POST data should be bytes, an iterable of bytes, "" ""or a file object. It cannot be of type str."" ) raise TypeError ( msg ) if : request. add_unredirected_header ( ""Content-type"", ""application/x-www-form-urlencoded"" ) if not request. has_header ( ""Content-length"" ) and not request. has_header ( ""Transfer-encoding"" ) : content_length = self. _get_content_length ( request ) if content_length is not None : request. add_unredirected_header ( ""Content-length"", str ( content_length ) ) else : request. add_unredirected_header ( ""Transfer-encoding"", ""chunked"" ) sel_host = host if request. has_proxy ( ) : scheme, sel = _splittype ( request. selector ) sel_host, sel_path = _splithost ( sel ) if not request. has_header ( ""Host"" ) : request. add_unredirected_header ( ""Host"", sel_host ) for name, value in self.",False,not request.has_header('Content-type'),self.redirect_empty_tags,0.6466919183731079 4060,"def print_topics ( self, header, cmds, cmdlen, maxcol ) : """"""make help menu more readable"""""" if cmds : self. stdout. write ( header ) self. stdout. write ( ""\n"" ) if : self. stdout. write ( self. ruler * len ( header ) ) self. stdout. write ( ""\n"" ) for cmd in cmds : help_msg = getattr ( self, ""do_{}"". format ( cmd ) ). __doc__ self. stdout. write ( ""{:<16}"". format ( cmd ) ) self. stdout. write ( help_msg ) self. stdout. write ( ""\n"" ) self. stdout. write ( ""\n"" )",False,self.ruler,cmdlen,0.6640629172325134 4061,"def _authorized_sid ( self, jid, sid, ifrom, iq ) : with self. _preauthed_sids_lock : if : del self. _preauthed_sids [ ( jid, sid, ifrom ) ] return True return False",False,"(jid, sid, ifrom) in self._preauthed_sids",jid in self._preauthed_sids,0.6584668159484863 4062,"def post ( self, forum_id = None, slug = None ) : if forum_id is not None : forum_instance = Forum. query. filter_by ( id = forum_id ). first_or_404 ( ) forumsread = ForumsRead. query. filter_by ( user_id = real ( current_user ). id, forum_id = forum_instance. id ). first ( ) TopicsRead. query. filter_by ( user_id = real ( current_user ). id, forum_id = forum_instance. id ). delete ( ) if : forumsread = ForumsRead ( ) forumsread. user = real ( current_user ) forumsread. forum = forum_instance forumsread. last_read = time_utcnow ( ) forumsread. cleared = time_utcnow ( ) db. session. add ( forumsread ) db. session. commit ( ) flash ( _ ( ""Forum %(forum)s marked as read."", forum = forum_instance. title ), ""success"" ) return redirect ( forum_instance. url ) ForumsRead. query. filter_by ( user_id = real ( current_user ). id ). delete ( ) TopicsRead. query. filter_by ( user_id = real ( current_user ). id ). delete ( ) forums = Forum. query. all ( ) forumsread_list = [ ] for forum_instance in forums : forumsread = ForumsRead ( ) forumsread. user",False,not forumsread,forum_id,0.6758328676223755 4063,"def generate_lorem_ipsum ( n = 5, html = True, min = 20, max = 100 ) : """"""Generate some lorem impsum for the template."""""" from jinja2. constants import LOREM_IPSUM_WORDS from random import choice, randrange words = LOREM_IPSUM_WORDS. split ( ) result = [ ] for _ in range ( n ) : next_capitalized = True last_comma = last_fullstop = 0 word = None last = None p = [ ] for idx, _ in enumerate ( range ( randrange ( min, max ) ) ) : while True : word = choice ( words ) if word!= last : last = word break if next_capitalized : word = word. capitalize ( ) next_capitalized = False if idx - randrange ( 3, 8 ) > last_comma : last_comma = idx last_fullstop += 2 word += "","" if idx - randrange ( 10, 20 ) > last_fullstop : last_comma = last_fullstop = idx word += ""."" next_capitalized = True p.",False,not p.endswith('.'),html,0.6513152122497559 4064,"def writeLibraryGeometry ( fp, meshes, config, shapes = None ) : progress = Progress ( len ( meshes ), None ) fp. write ( ""\n \n"" ) for mIdx, mesh in enumerate ( meshes ) : if : shape = None else : shape = shapes [ mIdx ] writeGeometry ( fp, mesh, config, shape ) progress. step ( ) fp. write ( "" \n"" )",True,shapes is None,shapes is None,0.6831676363945007 4065,"def test_pad_cval_is_tuple ( self ) : aug = iaa. Pad ( px = ( 0, 1, 0, 0 ), pad_mode = ""constant"", pad_cval = ( 50, 52 ), keep_size = False ) image = np. zeros ( ( 1, 1 ), dtype = np. uint8 ) seen = [ 0, 0, 0 ] for _ in sm. xrange ( 300 ) : observed = aug. augment_image ( image ) if observed [ 0, 1 ] == 50 : seen [ 0 ] += 1 elif observed [ 0, 1 ] == 51 : seen [ 1 ] += 1 elif : seen [ 2 ] += 1 else : assert False assert np. all ( [ 100 - 50 < v < 100 + 50 for v in seen ] )",False,"observed[0, 1] == 52","observed[0, 2] == 50",0.652206301689148 4066,"def find_widget_by_id ( self, id, parent = None ) : """"""Recursively searches for widget with specified ID"""""" if parent == None : if id in self : return self [ id ] parent = self [ ""editor"" ] for c in parent. get_children ( ) : if hasattr ( c, ""get_id"" ) : if c. get_id ( ) == id : return c if : r = self. find_widget_by_id ( id, c ) if not r is None : return r return None",False,"isinstance(c, Gtk.Container)",id in self,0.6492255926132202 4067,"def setupterm ( term = None, fd = - 1 ) : if fd == - 1 : fd = sys. stdout. fileno ( ) if _initialised_setupterm : return None if term is None : term = ffi. NULL err = ffi. new ( ""int *"" ) if lib. setupterm ( term, fd, err ) == lib. ERR : err = err [ 0 ] if : raise error ( ""setupterm: could not find terminal"" ) elif err == - 1 : raise error ( ""setupterm: could not find terminfo database"" ) else : raise error ( ""setupterm: unknown error"" ) globals ( ) [ ""_initialised_setupterm"" ] = True return None",False,err == 0,err == -2,0.6795936822891235 4068,"def testShows5LatestHunts ( self ) : timestamp = rdfvalue. RDFDatetime. Now ( ) - rdfvalue. Duration. From ( 1, rdfvalue. DAYS ) for i in range ( 20 ) : with test_lib. FakeTime ( timestamp + rdfvalue. Duration. From ( 1000 * i, rdfvalue. SECONDS ) ) : if : descr = ""foo-%d"" % i creator = ""another"" else : descr = ""bar-%d"" % i creator = self. token. username self. CreateSampleHunt ( descr, creator = creator ) self. Open ( ""/"" ) for i in range ( 11, 20, 2 ) : self. WaitUntil ( self. IsElementPresent, ""css=grr-user-dashboard "" ""div[name=RecentlyCreatedHunts]:contains('bar-%d')"" % i, ) self. WaitUntilNot ( self. IsElementPresent, ""css=grr-user-dashboard "" ""div[name=RecentlyCreatedHunts]:contains('foo')"", )",False,i % 2 == 0,self.token.username,0.673223614692688 4069,"def after_end ( session, transaction ) : caller_info = find_caller ( inspect. stack ( ) [ 1 : ] ) with open_transactions_lock : if : return open_time = time. time ( ) - open_transactions [ transaction ] [ 0 ] msg = ""Transaction 0x%08X closed %s (open time %s)"" % ( id ( transaction ), caller_info, open_time, ) if open_time > 2 : log. warning ( msg ) else : log. debug ( msg ) del open_transactions [ transaction ]",True,transaction not in open_transactions,transaction not in open_transactions,0.6662943363189697 4070,"def _decode_field ( message, field, value ) : """"""Decode optional or required field."""""" if field. type == FieldDescriptor. TYPE_MESSAGE : decode ( getattr ( message, field. name ), value ) else : try : if : value = base64. b64decode ( value ) setattr ( message, field. name, value ) except ( ValueError, TypeError ) as e : logger. warning ( ""Message %r ignoring field %s: %s"", message. __class__. __name__, field. name, e, )",False,field.type == FieldDescriptor.TYPE_BYTES,field.type == FieldDescriptor.TYPE_BASE64,0.6631757020950317 4071,"def __call__ ( cls, * args, ** kwargs ) : session = kwargs. pop ( ""session"", None ) instance = object. __new__ ( cls, * args, ** kwargs ) instance. session = session instance. config = config. Config ( [ ] ) config_section = getattr ( instance, ""configSection"", None ) switch = getattr ( instance, ""commandLineSwitch"", None ) if session is not None and config_section is not None : instance. config = session. get ( config_section ) always_on = instance. config. as_bool ( ""always-on"", default = instance. alwaysOn ) instance. __init__ ( * args, ** kwargs ) if always_on : instance. register ( ) else : if : short_opt, long_opt, help = switch instance. addOption ( instance. _register_cb, short_opt, long_opt, help ) return instance",False,switch is not None,switch,0.6584217548370361 4072,"def getApiLevel ( self ) : if not self. api_level : try : data = self. call ( ""app/apilevel"", auth = False ) self. api_level = float ( data. get ( ""apilevel"" ) ) except HTTPError as e : sc = e. response. status_code if : log. error ( ""This version of NZBVortex isn't supported. Please update to 2.8.6 or higher"" ) else : log. error ( ""NZBVortex doesn't seem to be running or maybe the remote option isn't enabled yet: %s"", traceback. format_exc ( 1 ), ) return self. api_level",False,sc == 403,sc.status_code == 200,0.6735562086105347 4073,"def close ( self, wait = True, abort = False ) : """"""Close the socket connection."""""" if not self. closed and not self. closing : self. closing = True self. server. _trigger_event ( ""disconnect"", self. sid, run_async = False ) if not abort : self. send ( packet. Packet ( packet. CLOSE ) ) self. closed = True self. queue. put ( None ) if : self. queue. join ( )",True,wait,wait,0.6937553882598877 4074,"def test_k_is_stochastic_parameter ( self ) : aug = iaa. MedianBlur ( k = iap. Choice ( [ 3, 5 ] ) ) seen = [ False, False ] for i in sm. xrange ( 100 ) : observed = aug. augment_image ( self. base_img ) if np. array_equal ( observed, self. blur3x3 ) : seen [ 0 ] += True elif : seen [ 1 ] += True else : raise Exception ( ""Unexpected result in MedianBlur@2"" ) if all ( seen ) : break assert np. all ( seen )",False,"np.array_equal(observed, self.blur5x5)","np.array_equal(observed, self.blur2x3)",0.6499741077423096 4075,"def parse ( self, s ) : self. tokens = self. lexer. tokenize ( s ) self. _current_token = 0 self. _tokens_len = len ( self. tokens ) self. _mark_locations = [ ] if self. _tokens_len < 0 : raise SyntaxError ( ""could not find any entries"" ) self. database = database = Database ( ) while True : try : self. _advance ( ) except IndexError : self. unexpected_token ( ""preamble, string, entry_start, or eof"" ) token_type = self. token_type if token_type == ""PREAMBLE"" : preamble = self. preamble ( ) database. add_preamble ( self. _handle_value ( preamble. contents ) ) elif : string = self. string ( ) database. add_macro ( string. key, self. _handle_value ( string. value ) ) elif token_type == ""ENTRY_START"" : entry_node = self. entry ( ) entry = Entry ( entry_node. entry_type, entry_node. key. value ) for field in entry_node. fields : entry [ field. key ] = self. _handle_value ( field. value ) if field. key in Name. NAME_FIELDS : entry [ field. key ] = "" and "". join ( ( unicode ( Name ( s ) ) for s in tokenize_list ( entry [ field. key ] )",True,token_type == 'STRING',token_type == 'STRING',0.659516453742981 4076,"def tax_edit ( request, tax_id, response_format = ""html"" ) : ""Tax edit"" tax = get_object_or_404 ( Tax, pk = tax_id ) if not request. user. profile. has_permission ( tax, mode = ""w"" ) and not request. user. profile. is_admin ( ""treeio_finance"" ) : return user_denied ( request, ""You don't have access to this Tax"", response_format ) if request. POST : if : form = TaxForm ( request. user. profile, request. POST, instance = tax ) if form. is_valid ( ) : tax = form. save ( ) return HttpResponseRedirect ( reverse ( ""finance_tax_view"", args = [ tax. id ] ) ) else : return HttpResponseRedirect ( reverse ( ""finance_tax_view"", args = [ tax. id ] ) ) else : form = TaxForm ( request. user. profile, instance = tax ) return render_to_response ( ""finance/tax_edit"", { ""form"" : form, ""tax"" : tax }, context_instance = RequestContext ( request ), response_format = response_format, )",False,'cancel' not in request.POST,tax.id > 0,0.6586582064628601 4077,"def printConnections ( switches ) : ""Compactly print connected nodes to each switch"" for sw in switches : output ( ""%s: "" % sw ) for intf in sw. intfList ( ) : link = intf. link if : intf1, intf2 = link. intf1, link. intf2 remote = intf1 if intf1. node!= sw else intf2 output ( ""%s(%s) "" % ( remote. node, sw. ports [ intf ] ) ) output ( ""\n"" )",False,link,link and link.ports,0.707377016544342 4078,"def _get_vector ( self, word = ""house"" ) : try : import lmdb with self. env. begin ( ) as txn : vector = txn. get ( word. encode ( encoding = ""UTF-8"" ) ) if : word_vector = pickle. loads ( vector ) vector = None else : word_vector = np. zeros ( ( self. k, ), dtype = np. float32 ) except lmdb. Error : self. env. close ( ) self. env = lmdb. open ( self. store_path, readonly = True, max_readers = 2048, max_spare_txns = 2, lock = False, ) return self. _get_vector ( word ) except ModuleNotFoundError : logger. warning ( ""-"" * 100 ) logger. warning ( 'ATTENTION! The library ""lmdb"" is not installed!' ) logger. warning ( 'To use LMDB, please first install with ""pip install lmdb""' ) logger. warning ( ""-"" * 100 ) word_vector = np. zeros ( ( self. k, ), dtype = np. float32 ) return word_vector",True,vector,vector,0.6913517713546753 4079,"def shutdownAutoSave ( self ) : if not self. shutdown : self. shutdown = 1 self. _cv. acquire ( ) self. _cv. notify ( ) self. _cv. release ( ) if : log. debug ( ""waiting for thread to terminate"" ) self. _thread. join ( 3 ) log. debug ( ""thread has terminated"" ) self. _thread = None",True,self._thread,self._thread,0.6864792108535767 4080,"def _iter_source_val_assigns ( src ) : for node in _iter_source_assigns ( src ) : try : val = python_util. ast_param_val ( node. value ) except TypeError : pass else : target = node. targets [ - 1 ] if : yield node, target, node. value, val",False,"isinstance(target, ast.Name)",target,0.6488354206085205 4081,"def __call__ ( self, X, y, net ) : if self. shuffle : _shuffle_arrays ( [ X, y ] if y is not None else [ X ], self. random ) if self. eval_size : if : kf = KFold ( y. shape [ 0 ], round ( 1.0 / self. eval_size ) ) else : kf = StratifiedKFold ( y, round ( 1.0 / self. eval_size ) ) train_indices, valid_indices = next ( iter ( kf ) ) X_train = _sldict ( X, train_indices ) y_train = _sldict ( y, train_indices ) X_valid = _sldict ( X, valid_indices ) y_valid = _sldict ( y, valid_indices ) else : X_train, y_train = X, y X_valid, y_valid = _sldict ( X, slice ( 0, 0 ) ), _sldict ( y, slice ( 0, 0 ) ) return X_train, X_valid, y_train, y_valid",False,net.regression or not self.stratify,y.dim() == 2,0.6510807275772095 4082,def scope ( self ) : if : self. lazy_init_lock_. acquire ( ) try : if : self. scope_ = Scope ( ) finally : self. lazy_init_lock_. release ( ) return self. scope_,True,self.scope_ is None,self.scope_ is None,0.6571006774902344 4083,"def finish_task ( task ) : if task. stream : if task. stream [ ""args"" ]. get ( ""progress"" ) : update_stream ( task, status = ""complete"" ) if task. stream [ ""args"" ]. get ( ""entry_dump"" ) : entries = [ entry. store for entry in task. entries ] task. stream [ ""queue"" ]. put ( EntryDecoder ( ). encode ( { ""entry_dump"" : entries } ) ) if : task. stream [ ""queue"" ]. put ( json. dumps ( { ""summary"" : { ""accepted"" : len ( task. accepted ), ""rejected"" : len ( task. rejected ), ""failed"" : len ( task. failed ), ""undecided"" : len ( task. undecided ), ""aborted"" : task. aborted, ""abort_reason"" : task. abort_reason, } } ) )",False,task.stream['args'].get('summary'),task.has_tab,0.6579177975654602 4084,"def __str__ ( self ) -> str : text = ""\n"" for k, r in self. result. items ( ) : text += ""{}\n"". format ( ""#"" * 40 ) if : text += ""# {} (failed)\n"". format ( k ) else : text += ""# {} (succeeded)\n"". format ( k ) text += ""{}\n"". format ( ""#"" * 40 ) for sub_r in r : text += ""**** {}\n"". format ( sub_r. name ) text += ""{}\n"". format ( sub_r ) return text",False,r.failed,k is None,0.6732447147369385 4085,"def get_updated_action_list ( cls, base_action_list : list, other_action_list : list, ) -> List [ dict ] : base_action_list_dict = { action [ ""name"" ] : action for action in base_action_list } for other_action in other_action_list : other_action_name = other_action [ ""name"" ] if other_action_name in base_action_list_dict : if : base_action_list_dict. pop ( other_action_name ) else : nested_update ( base_action_list_dict [ other_action_name ], other_action ) else : base_action_list_dict [ other_action_name ] = other_action return list ( base_action_list_dict. values ( ) )",False,other_action['action'] is None,other_action_name in other_action_list_dict,0.6547008752822876 4086,"def post_process_extensions ( self, extensions, resp_obj, request, action_args ) : for ext in extensions : response = None if inspect. isgenerator ( ext ) : try : with ResourceExceptionHandler ( ) : response = ext. send ( resp_obj ) except StopIteration : continue except Fault as ex : response = ex else : try : with ResourceExceptionHandler ( ) : response = ext ( req = request, resp_obj = resp_obj, ** action_args ) except exception. VersionNotFoundForAPIMethod : continue except Fault as ex : response = ex if : return response return None",False,response,response is not None,0.7019158601760864 4087,"def generate ( g = generator, m = module ) : try : for test in g ( ) : test_func, arg = self. parseGeneratedTest ( test ) if : test_func = getattr ( m, test_func ) yield FunctionTestCase ( test_func, arg = arg, descriptor = g ) except KeyboardInterrupt : raise except : exc = sys. exc_info ( ) yield Failure ( exc [ 0 ], exc [ 1 ], exc [ 2 ], address = test_address ( generator ) )",False,not callable(test_func),"hasattr(m, test_func)",0.6521293520927429 4088,"def crawl ( self, url, path = None ) : self. results = { } self. url = urlparse ( url ) if path : self. url = self. url. _replace ( path = path ) self. url = self. url. _replace ( fragment = """" ) self. _add_target ( url, 1 ) self. _spawn_new_worker ( ) while self. threads : try : for t in self. threads : t. join ( 1 ) if : self. threads. remove ( t ) except KeyboardInterrupt : sys. exit ( 1 ) return self. results",False,not t.is_alive(),t in self.threads,0.6542649865150452 4089,"def draw ( self, dc, f, ** key ) : dc. SetPen ( wx. Pen ( Setting [ ""color"" ], width = 1, style = wx. SOLID ) ) dc. SetTextForeground ( Setting [ ""tcolor"" ] ) font = wx. Font ( 10, wx. FONTFAMILY_DEFAULT, wx. FONTSTYLE_NORMAL, wx. FONTWEIGHT_NORMAL, False ) dc. SetFont ( font ) dc. DrawLines ( [ f ( * i ) for i in self. buf ] ) for i in self. buf : dc. DrawCircle ( f ( * i ), 2 ) for pg in self. body : plg = Polygon ( pg ) dc. DrawLines ( [ f ( * i ) for i in pg ] ) for i in pg : dc. DrawCircle ( f ( * i ), 2 ) area, xy = plg. area, plg. centroid if : area *= self. unit [ 0 ] ** 2 dc. DrawText ( ""%.1f"" % area, f ( xy. x, xy. y ) )",False,self.unit != None,self.unit[0] > 0,0.656484842300415 4090,"def read_embeddings ( file_enc, skip_lines = 0, filter_set = None ) : embs = dict ( ) total_vectors_in_file = 0 with open ( file_enc, ""rb"" ) as f : for i, line in enumerate ( f ) : if i < skip_lines : continue if not line : break if : continue l_split = line. decode ( ""utf8"" ). strip ( ). split ( "" "" ) if len ( l_split ) == 2 : continue total_vectors_in_file += 1 if filter_set is not None and l_split [ 0 ] not in filter_set : continue embs [ l_split [ 0 ] ] = [ float ( em ) for em in l_split [ 1 : ] ] return embs, total_vectors_in_file",False,len(line) == 0,line and len(line) > 0,0.6593886613845825 4091,"def run ( self ) : check_paths = [ ] for root, dirs, files in os. walk ( ""."" ) : for file in files : if : path = os. path. join ( root, file ) check_paths. append ( path ) for pattern in self. exclude_patterns : exclude = lambda path : not fnmatch. fnmatch ( path, pattern ) check_paths = list ( filter ( exclude, check_paths ) ) bad_paths = list ( filter ( self. _header_bad, check_paths ) ) if bad_paths : raise MissingHeaderError ( bad_paths )",False,file.endswith('.py') or file.endswith('.c'),file,0.6443114280700684 4092,"def __call__ ( self, epoch_nr, update_nr, net, stepper, logs ) : for log_name in self. logs_to_check : log = get_by_path ( logs, log_name ) if : self. message ( ""NaN or inf detected in {}!"". format ( log_name ) ) raise StopIteration ( ) if self. check_parameters : if not net. handler. is_fully_finite ( net. buffer. parameters ) : self. message ( ""NaN or inf detected in parameters!"" ) raise StopIteration ( ) if self. check_training_loss and ""rolling_training"" in logs : rtrain = logs [ ""rolling_training"" ] if ""total_loss"" in rtrain : loss = rtrain [ ""total_loss"" ] else : loss = rtrain [ ""Loss"" ] if not np. all ( np. isfinite ( loss ) ) : self. message ( ""NaN or inf detected in rolling training loss!"" ) raise StopIteration ( )",False,not np.all(np.isfinite(log)),log,0.6494882106781006 4093,"def get_next_requests ( self, max_n_requests, ** kwargs ) : next_pages = [ ] partitions = set ( kwargs. pop ( ""partitions"", [ ] ) ) for partition_id in range ( 0, self. queue_partitions ) : if : continue results = self. queue. get_next_requests ( max_n_requests, partition_id ) next_pages. extend ( results ) self. logger. debug ( ""Got %d requests for partition id %d"", len ( results ), partition_id ) return next_pages",False,partition_id not in partitions,len(partitions) == 0,0.6595758199691772 4094,"def __exit__ ( self, exc_type, exc_val, exc_tb ) : saved_values = self. saved_values del self. saved_values for name, get, restore in self. resource_info ( ) : current = get ( ) original = saved_values. pop ( name ) if : self. changed = True restore ( original ) if not self. quiet : print >> sys. stderr, ( ""Warning -- {} was modified by {}"". format ( name, self. testname ) ) if self. verbose > 1 : print >> sys. stderr, ( "" Before: {}\n After: {} "". format ( original, current ) ) return False",False,current != original,current != restore,0.6939069628715515 4095,"def compute ( self, x, y = None, targets = None ) : if targets is None : targets = self. out_params in_params = list ( self. in_x ) if len ( in_params ) == 1 : args = [ x ] else : args = list ( zip ( * x ) ) if y is None : pipe = self. pipe else : pipe = self. train_pipe if : args. append ( y ) else : args += list ( zip ( * y ) ) in_params += self. in_y return self. _compute ( * args, pipe = pipe, param_names = in_params, targets = targets )",False,len(self.in_y) == 1,y is None,0.6501501798629761 4096,"def ziplist_entry_overhead ( self, value ) : if self. is_integer_type ( value ) : header = 1 if value < 12 : size = 0 elif value < 2 ** 8 : size = 1 elif value < 2 ** 16 : size = 2 elif : size = 3 elif value < 2 ** 32 : size = 4 else : size = 8 else : size = len ( value ) if size <= 63 : header = 1 elif size <= 16383 : header = 2 else : header = 5 prev_len = 1 if size < 254 else 5 return prev_len + header + size",False,value < 2 ** 24,value < 2 ** 8,0.670365571975708 4097,"def _check_start_pipeline_execution_errors ( graphene_info, execution_params, execution_plan ) : if execution_params. step_keys : for step_key in execution_params. step_keys : if : raise UserFacingGraphQLError ( graphene_info. schema. type_named ( ""InvalidStepError"" ) ( invalid_step_key = step_key ) )",False,not execution_plan.has_step(step_key),step_key not in graphene_info.schema.keys(),0.6502330303192139 4098,"def _status_changed ( self, device, alert = _status. ALERT. NONE, reason = None ) : assert device is not None if _log. isEnabledFor ( _INFO ) : if : _log. info ( ""status_changed %s: %s, %s (%X) %s"", device, ""present"" if bool ( device ) else ""removed"", device. status, alert, reason or """", ) else : _log. info ( ""status_changed %s: %s %s, %s (%X) %s"", device, ""paired"" if bool ( device ) else ""unpaired"", ""online"" if device. online else ""offline"", device. status, alert, reason or """", ) if : assert device == self. receiver self. status_changed_callback ( device, alert, reason ) return assert device. receiver == self. receiver if not device : _log. warn ( ""device %s was unpaired, ghosting"", device ) device = _ghost ( device ) self. status_changed_callback ( device,",False,device.kind is None,self._log,0.6557866334915161 4099,"def __getitem__ ( self, block_id ) : if not block_id : raise ValueError ( ""None or empty block_id is an invalid identifier"" ) with self. _lock : try : value = self. _cache [ block_id ] value. touch ( ) return value. value except KeyError : if : block = self. _block_store [ block_id ] self. __setitem__ ( block_id, block ) return block raise",True,block_id in self._block_store,block_id in self._block_store,0.6613422632217407 4100,"def xform_name ( name, sep = ""_"", _xform_cache = _xform_cache ) : if sep in name : return name key = ( name, sep ) if key not in _xform_cache : if : is_special = _special_case_transform. search ( name ) matched = is_special. group ( ) name = name [ : - len ( matched ) ] + sep + matched. lower ( ) s1 = _first_cap_regex. sub ( r""\1"" + sep + r""\2"", name ) s2 = _number_cap_regex. sub ( r""\1"" + sep + r""\2"", s1 ) transformed = _end_cap_regex. sub ( r""\1"" + sep + r""\2"", s2 ). lower ( ) _xform_cache [ key ] = transformed return _xform_cache [ key ]",False,_special_case_transform.search(name) is not None,_special_case_transform is not None,0.6481164693832397 4101,"def _encode_body ( self, data, files, json ) : body = None if isinstance ( data, ( str, bytes ) ) : if : raise ValueError ( ""data cannot be a string or bytes when "" ""files are present"" ) body = to_bytes ( data, self. charset ) elif data and is_streamed ( data ) : if : raise ValueError ( ""data cannot be an iterator when "" ""files are present"" ) if ""content-length"" not in self. headers : self. headers [ ""transfer-encoding"" ] = ""chunked"" return data elif data or files : if : body, content_type = self. _encode_files ( data, files ) else : body, content_type = self. _encode_params ( data ) self. headers [ ""Content-Type"" ] = content_type elif json : body = _json. dumps ( json ). encode ( self. charset ) self. headers [ ""Content-Type"" ] = ""application/json"" if body : self. headers [ ""content-length"" ] = str ( len ( body ) ) return body",True,files,files,0.6860677003860474 4102,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRUCT : self. o1 = AlreadyExistsException ( ) self. o1. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRUCT : self. o2 = InvalidObjectException ( ) self. o2. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. STRUCT : self. o3 = MetaException ( ) self. o3. read ( iprot ) else : iprot. skip ( ftype ) elif : <",True,fid == 4,fid == 4,0.6739135980606079 4103,"def webfinger ( environ, start_response, _ ) : query = parse_qs ( environ [ ""QUERY_STRING"" ] ) try : rel = query [ ""rel"" ] resource = query [ ""resource"" ] [ 0 ] except KeyError : resp = BadRequest ( ""Missing parameter in request"" ) else : if : resp = BadRequest ( ""Bad issuer in request"" ) else : wf = WebFinger ( ) resp = Response ( wf. response ( subject = resource, base = OAS. baseurl ) ) return resp ( environ, start_response )",False,rel != [OIC_ISSUER],resource is None,0.6621189117431641 4104,"def number_loop ( queue, mappings, opc ) : while len ( queue ) > 0 : code1 = queue. popleft ( ) code2 = queue. popleft ( ) assert code1. co_name == code2. co_name linestarts_orig = findlinestarts ( code1 ) linestarts_uncompiled = list ( findlinestarts ( code2 ) ) mappings += [ [ line, offset2line ( offset, linestarts_uncompiled ) ] for offset, line in linestarts_orig ] bytecode1 = Bytecode ( code1, opc ) bytecode2 = Bytecode ( code2, opc ) instr2s = bytecode2. get_instructions ( code2 ) seen = set ( [ code1. co_name ] ) for instr in bytecode1. get_instructions ( code1 ) : next_code1 = None if iscode ( instr. argval ) : next_code1 = instr. argval if : next_code2 = None while not next_code2 : try : instr2 = next ( instr2s ) if iscode ( instr2. argval ) : next_code2 = instr2. argval pass <",False,next_code1,next_code1 is None,0.6636793613433838 4105,"def default ( self, line ) : if len ( line ) == 2 and line [ 1 ] == "":"" : self. execute_remote ( line ) if len ( self. __outputBuffer. strip ( ""\r\n"" ) ) > 0 : print ( self. __outputBuffer ) self. __outputBuffer = """" else : self. __pwd = line self. execute_remote ( ""cd "" ) self. __pwd = self. __outputBuffer. strip ( ""\r\n"" ) self. prompt = self. __pwd + "">"" self. __outputBuffer = """" else : if : x = inspect. currentframe ( ) y = inspect. getouterframes ( x, 2 ) return self. send_data ( line )",False,line != '',self.prompt,0.6819586753845215 4106,"def __extract_member_from_pointer ( self, cexpr, obj ) : parents_type = map ( lambda x : idaapi. get_ctype_name ( x. cexpr. op ), list ( self. parents ) [ : 0 : - 1 ] ) parents = map ( lambda x : x. cexpr, list ( self. parents ) [ : 0 : - 1 ] ) logger. debug ( ""Parsing expression {}. Parents - {}"". format ( obj. name, parents_type ) ) if parents_type [ 0 ] in ( ""idx"", ""add"" ) : if parents [ 0 ]. y. op!= idaapi. cot_num : return offset = parents [ 0 ]. y. numval ( ) * cexpr. type. get_ptrarr_objsize ( ) cexpr = self. parent_expr ( ) if parents_type [ 0 ] == ""add"" : del parents_type [ 0 ] del parents [ 0 ] elif parents_type [ 0 : 2 ] == [ ""cast"", ""add"" ] : if parents [ 1 ]. y. op!= idaapi. cot_num : return if : size = parents [ 0 ]. type. get_ptrarr_objsize ( ) else : size = 1 offset = parents [ 1 ]. theother ( parents [ 0 ] ). numval ( ) * size cexpr = parents [ 1 ] del parents_type [ 0 : 2 ] del parents [ 0 : 2 ] else : offset = 0 return self. __extract",False,parents[0].type.is_ptr(),len(parents) == 1,0.6515637636184692 4107,"def url ( self, name ) : provider_type = self. provider [ ""type"" ]. lower ( ) obj = self. _get_object ( name ) if not obj : return None try : url = self. driver. get_object_cdn_url ( obj ) except NotImplementedError as e : object_path = ""{}/{}"". format ( self. bucket, obj. name ) if ""s3"" in provider_type : base_url = ""https://%s"" % self. driver. connection. host url = urljoin ( base_url, object_path ) elif : url = urljoin ( ""https://storage.googleapis.com"", object_path ) elif ""azure"" in provider_type : base_url = ""https://%s.blob.core.windows.net"" % self. provider [ ""user"" ] url = urljoin ( base_url, object_path ) elif ""backblaze"" in provider_type : url = urljoin ( ""api.backblaze.com/b2api/v1/"", object_path ) else : raise e return url",False,'google' in provider_type,'s3_1' in provider_type,0.6531490087509155 4108,"def validate_subevent ( self, subevent ) : if self. context [ ""event"" ]. has_subevents : if not subevent : raise ValidationError ( ""You need to set a subevent."" ) if : raise ValidationError ( ""The specified subevent does not belong to this event."" ) elif subevent : raise ValidationError ( ""You cannot set a subevent for this event."" ) return subevent",False,subevent.event != self.context['event'],subevent and self.context['event'].subevents not in subevent,0.6512810587882996 4109,"def qa ( ctx ) : """"""Run a quality report"""""" header ( qa. __doc__ ) with ctx. cd ( ROOT ) : info ( ""Ensure PyPI can render README and CHANGELOG"" ) info ( ""Building dist package"" ) dist = ctx. run ( ""python setup.py sdist"", pty = True, warn = False, hide = True ) if : error ( ""Unable to build sdist package"" ) exit ( ""Quality check failed"", dist. return_code ) readme_results = ctx. run ( ""twine check dist/*"", pty = True, warn = True, hide = True ) if readme_results. failed : print ( readme_results. stdout ) error ( ""README and/or CHANGELOG is not renderable by PyPI"" ) else : success ( ""README and CHANGELOG are renderable by PyPI"" ) if readme_results. failed : exit ( ""Quality check failed"", readme_results. return_code ) success ( ""Quality check OK"" )",True,dist.failed,dist.failed,0.6672093272209167 4110,"def formfield ( self, form_class = forms. CharField, ** kwargs ) : ""Returns a django.forms.Field instance for this database Field."" defaults = { ""required"" : not self. blank, ""label"" : capfirst ( self. verbose_name ), ""help_text"" : self. help_text, } if self. has_default ( ) : if callable ( self. default ) : defaults [ ""initial"" ] = self. default defaults [ ""show_hidden_initial"" ] = True else : defaults [ ""initial"" ] = self. get_default ( ) if self. choices : include_blank = self. blank or not ( self. has_default ( ) or ""initial"" in kwargs ) defaults [ ""choices"" ] = self. get_choices ( include_blank = include_blank ) defaults [ ""coerce"" ] = self. to_python if : defaults [ ""empty_value"" ] = None form_class = forms. TypedChoiceField for k in kwargs. keys ( ) : if k not in ( ""coerce"", ""empty_value"", ""choices"", ""required"", ""widget"", ""label"", ""initial"", ""help_text"", <",False,self.null,self.empty_value is None,0.6616407036781311 4111,def heal ( self ) : if not self. doctors : return proc_ids = self. _get_process_ids ( ) for proc_id in proc_ids : proc = PipelineProcess. objects. get ( id = proc_id ) if not proc. is_alive or proc. is_frozen : continue for dr in self. doctors : if : dr. cure ( proc ) break,False,dr.confirm(proc),dr.has_proc(),0.6547521352767944 4112,"def generator ( self, data ) : for ( proc_as, key_buf_ptr ) in data : key_buf = proc_as. read ( key_buf_ptr, 24 ) if : continue key = """". join ( ""%02X"" % ord ( k ) for k in key_buf ) yield ( 0, [ str ( key ), ], )",False,not key_buf,len(key_buf) == 0,0.6629738807678223 4113,"def test_sas_add_inside_range ( self, tables_cosmos_account_name, tables_primary_cosmos_account_key ) : url = self. account_url ( tables_cosmos_account_name, ""cosmos"" ) await self. _set_up ( tables_cosmos_account_name, tables_primary_cosmos_account_key ) try : token = generate_table_sas ( tables_cosmos_account_name, tables_primary_cosmos_account_key, self. table_name, permission = TableSasPermissions ( add = True ), expiry = datetime. utcnow ( ) + timedelta ( hours = 1 ), start_pk = ""test"", start_rk = ""test1"", end_pk = ""test"", end_rk = ""test1"", ) service = TableServiceClient ( self. account_url ( tables_cosmos_account_name, ""cosmos"" ), credential = token, ) table = service. get_table_client ( self. table_name ) entity = self. _create_random_entity_dict ( ""test"", ""test1"" ) await table. create_entity ( entity = entity ) resp = await self. table. get_entity ( ""test"", ""test1"" ) self. _assert_default_entity ( resp ) finally : await self. _tear_down ( ) if",False,self.is_live,tables_cosmos_account_name,0.6563279628753662 4114,"def log_url ( self, url_data ) : """"""Write one node."""""" node = self. get_node ( url_data ) if node is not None : self. writeln ( u' ""%s"" [' % dotquote ( node [ ""label"" ] ) ) if self. has_part ( ""realurl"" ) : self. writeln ( u' href=""%s"",' % dotquote ( node [ ""url"" ] ) ) if node [ ""dltime"" ] >= 0 and self. has_part ( ""dltime"" ) : self. writeln ( u"" dltime=%d,"" % node [ ""dltime"" ] ) if : self. writeln ( u"" size=%d,"" % node [ ""size"" ] ) if node [ ""checktime"" ] and self. has_part ( ""checktime"" ) : self. writeln ( u"" checktime=%d,"" % node [ ""checktime"" ] ) if self. has_part ( ""extern"" ) : self. writeln ( u"" extern=%d,"" % node [ ""extern"" ] ) self. writeln ( u"" ];"" )",False,node['size'] >= 0 and self.has_part('dlsize'),self.has_part('dltime'),0.648543655872345 4115,"def suggest ( self, trial_id : str ) -> Optional [ Dict ] : if not self. _dim_dict or not self. optimizer : raise RuntimeError ( UNDEFINED_SEARCH_SPACE. format ( cls = self. __class__. __name__, space = ""dim_dict"" ) ) if not self. _metric or not self. _mode : raise RuntimeError ( UNDEFINED_METRIC_MODE. format ( cls = self. __class__. __name__, metric = self. _metric, mode = self. _mode ) ) _solution = self. optimizer. suggest ( ) if _solution == ""FINISHED"" : if : return Searcher. FINISHED else : return None if _solution : self. solution_dict [ str ( trial_id ) ] = _solution _x = _solution. get_x ( ) new_trial = dict ( zip ( self. _dim_keys, _x ) ) self. _live_trial_mapping [ trial_id ] = new_trial return unflatten_dict ( new_trial )",False,ray.__version__ >= '0.8.7',trial_id is None,0.6513925194740295 4116,"def update ( self, values : List ) -> None : assert isinstance ( values, list ) self. num_iters += 1 current_stats = [ ] for i in range ( len ( values ) ) : if : continue if isinstance ( values [ i ], list ) is False : values [ i ] = [ values [ i ] ] if self. metrics [ i ] [ 0 ] is None : self. metrics [ i ] [ 0 ] = np. mean ( values [ i ] ) self. metrics [ i ] [ 1 ] = np. mean ( values [ i ] ) self. metrics [ i ] [ 2 ] = np. mean ( values [ i ] ) else : self. metrics [ i ] [ 0 ] = ( self. metrics [ i ] [ 0 ] * ( self. num_iters - 1 ) + np. mean ( values [ i ] ) ) / self. num_iters self. metrics [ i ] [ 1 ] = 0.95 * self. metrics [ i ] [ 1 ] + 0.05 * np. mean ( values [ i ] ) self. metrics [ i ] [ 2 ] = np. mean ( values [ i ] ) self. metrics [ i ] [ 0 ] = float ( self. metrics [ i ] [ 0 ] ) self. metrics [ i ] [ 1 ] = float ( self. metrics [ i ] [ 1 ] ) self. metrics [ i ] [ 2 ] = float ( self. metrics [ i ] [ 2 ] ) current_stats. append ( self. metrics [ i ] ) self. stats. append ( copy. deepcopy ( current_stats ) )",False,values[i] is None,i == 0,0.660628080368042 4117,def __create_table ( self ) : for i in range ( 256 ) : crcreg = i for j in range ( 8 ) : if : crcreg = self. __CRCPOLYNOMIAL ^ ( crcreg >> 1 ) else : crcreg >>= 1 self. __crctable [ i ] = crcreg,False,crcreg & 1 != 0,j == 6,0.6702196002006531 4118,"def __call__ ( self, text, ** kargs ) : words = jieba. tokenize ( text, mode = ""search"" ) token = Token ( ) for ( w, start_pos, stop_pos ) in words : if : continue token. original = token. text = w token. pos = start_pos token. startchar = start_pos token. endchar = stop_pos yield token",False,not accepted_chars.match(w) and len(w) <= 1,w == token.text,0.6505142450332642 4119,"def _update_state ( self ) : if ( self. is_atari_env and hasattr ( self, ""current_ale_lives"" ) and self. current_ale_lives!= self. env. unwrapped. ale. lives ( ) ) : if : self. done = True elif self. phase == RunPhase. TEST and not self. done : self. _press_fire ( ) self. _update_ale_lives ( ) if self. state and ""desired_goal"" in self. state. keys ( ) : self. goal = self. state [ ""desired_goal"" ]",False,self.phase == RunPhase.TRAIN or self.phase == RunPhase.HEATUP,self.phase == RunPhase.RUNNING,0.6573666930198669 4120,"def action_delete ( self, request, attachments ) : deleted_attachments = [ ] desynced_posts = [ ] for attachment in attachments : if : deleted_attachments. append ( attachment. pk ) desynced_posts. append ( attachment. post_id ) if desynced_posts : with transaction. atomic ( ) : for post in Post. objects. filter ( id__in = desynced_posts ) : self. delete_from_cache ( post, deleted_attachments ) for attachment in attachments : attachment. delete ( ) message = _ ( ""Selected attachments have been deleted."" ) messages. success ( request, message )",False,attachment.post,attachment.post_id is not None,0.6725665330886841 4121,def daemonize_if_required ( self ) : if self. options. daemon : if : log. shutdown_multiprocessing_logging_listener ( daemonizing = True ) salt. utils. process. daemonize ( ) self. _setup_mp_logging_listener ( ),False,self._setup_mp_logging_listener_ is True,not self.mp_logging_listener,0.6535711288452148 4122,"def bundle_directory ( self, dirpath ) : """"""Bundle all modules/packages in the given directory."""""" dirpath = os. path. abspath ( dirpath ) for nm in os. listdir ( dirpath ) : nm = _u ( nm ) if nm. startswith ( ""."" ) : continue itempath = os. path. join ( dirpath, nm ) if os. path. isdir ( itempath ) : if : self. bundle_package ( itempath ) elif nm. endswith ( "".py"" ) : self. bundle_module ( itempath )",False,"os.path.exists(os.path.join(itempath, '__init__.py'))","nm.endswith("".py')",0.6438416838645935 4123,def all_left_col_indexes ( self ) : result = [ ] for idx in self. last_left_col_indexes : if idx not in self. select_col_indexes : result. append ( idx ) elif : result. append ( idx ) return result,False,idx in self.left_col_indexes,idx in self.select_col_indexes,0.6604180932044983 4124,"def getfileinfo ( name ) : finfo = FInfo ( ) with io. open ( name, ""rb"" ) as fp : data = fp. read ( 512 ) if : finfo. Type = ""TEXT"" fp. seek ( 0, 2 ) dsize = fp. tell ( ) dir, file = os. path. split ( name ) file = file. replace ( "":"", ""-"", 1 ) return file, finfo, dsize, 0",False,0 not in data,data,0.665570855140686 4125,"def update ( self ) : if self. xPos > self. chart. width or self. xPos < 0 : self. xVel = - self. xVel self. xPos += self. xVel self. xDampening = max ( self. xDampening - 0.1, 0 ) self. xVel *= self. xDampening if self. yPos > self. chart. height or self. yPos < 0 : self. yVel = - self. yVel self. yPos += self. yVel self. yDampening = max ( self. yDampening - 0.1, 0 ) self. yVel *= self. yDampening if : self. yPos = self. chart. height self. xVel = 0 self. gravity = 0 self. kill = True self. xPos += self. xVel self. yPos += self. yVel self. yVel += self. gravity",False,self.yPos > self.chart.height - 4 and abs(self.yVel) < 0.1,self.yPos > self.chart.width,0.6562715768814087 4126,"def Run ( self, cmd_val ) : attrs, arg_r = flag_spec. ParseCmdVal ( ""type"", cmd_val ) arg = arg_types. type ( attrs. attrs ) if arg. f : funcs = { } else : funcs = self. funcs status = 0 r = _ResolveNames ( arg_r. Rest ( ), funcs, self. aliases, self. search_path ) for kind, name in r : if kind is None : self. errfmt. StderrLine ( ""type: %r not found"" % name ) status = 1 else : if arg. t : print ( kind ) elif arg. p : if kind == ""file"" : print ( name ) elif arg. P : if kind == ""file"" : print ( name ) else : resolved = self. search_path. Lookup ( name ) if : status = 1 else : print ( resolved ) else : <",False,resolved is None,resolved is not None,0.6768121123313904 4127,"def scale_axes ( self ) : """"""Set the axes limits appropriate to the images we have"""""" max_x = max_y = 0 for image_row in self. image_rows : if : shape = image_row. data. pixel_data. shape max_x = max ( shape [ 1 ], max_x ) max_y = max ( shape [ 0 ], max_y ) if self. __axes_scale is not None : init_x, init_y = self. __axes_scale if float ( max_x )!= init_x [ 1 ] or float ( max_y )!= init_y [ 0 ] : self. __axes_scale = None self. frame. navtoolbar. _nav_stack. clear ( ) elif init_x!= self. axes. get_xlim ( ) or init_y!= self. axes. get_ylim ( ) : return if max_x > 0 and max_y > 0 : self. axes. set_xlim ( 0, max_x ) self. axes. set_ylim ( 0, max_y ) self. axes. invert_yaxis ( ) self. __axes_scale = ( ( 0.0, float ( max_x ) ), ( float ( max_y ), 0.0 ) ) self. frame. navtoolbar. reset ( )",False,image_row.data.mode != MODE_HIDE,"hasattr(image_row.data, 'pixel_data')",0.6491804122924805 4128,"def boot_time ( ) : """"""Return the system boot time expressed in seconds since the epoch."""""" global BOOT_TIME f = open ( ""/proc/stat"", ""rb"" ) try : BTIME = b ( ""btime"" ) for line in f : if : ret = float ( line. strip ( ). split ( ) [ 1 ] ) BOOT_TIME = ret return ret raise RuntimeError ( ""line 'btime' not found"" ) finally : f. close ( )",False,line.startswith(BTIME),line.startswith(b'btime'),0.653969943523407 4129,"def Run ( self ) : """"""The main run method of the client."""""" for thread in self. _threads. values ( ) : thread. start ( ) logging. info ( START_STRING ) while True : dead_threads = [ tn for ( tn, t ) in self. _threads. items ( ) if not t. isAlive ( ) ] if : raise FatalError ( ""These threads are dead: %r. Shutting down..."" % dead_threads ) time. sleep ( 10 )",False,dead_threads,len(dead_threads) == 0,0.6729820966720581 4130,"def iter_renderables ( column_count : int, ) -> Iterable [ Tuple [ int, Optional [ RenderableType ] ] ] : item_count = len ( renderables ) if self. column_first : width_renderables = list ( zip ( renderable_widths, renderables ) ) column_lengths : List [ int ] = [ item_count // column_count ] * column_count for col_no in range ( item_count % column_count ) : column_lengths [ col_no ] += 1 row_count = ( item_count + column_count - 1 ) // column_count cells = [ [ - 1 ] * column_count for _ in range ( row_count ) ] row = col = 0 for index in range ( item_count ) : cells [ row ] [ col ] = index column_lengths [ col ] -= 1 if column_lengths [ col ] : row += 1 else : col += 1 row = 0 for index in chain. from_iterable ( cells ) : if : break yield width_renderables [ index ] else : yield from zip ( renderable_widths, renderables ) if item_count % column_count : for _ in range ( column_count - ( item_count % column_count ) ) : yield 0, None",False,index == -1,width_renderables is not None and index % width_renderables[row],0.6668978333473206 4131,"def get_in_inputs ( key, data ) : if isinstance ( data, dict ) : for k, v in data. items ( ) : if k == key : return v elif : out = get_in_inputs ( key, v ) if out : return out elif isinstance ( data, ( list, tuple ) ) : out = [ get_in_inputs ( key, x ) for x in data ] out = [ x for x in out if x ] if out : return out [ 0 ]",False,"isinstance(v, (list, tuple, dict))","isinstance(v, dict)",0.6514726281166077 4132,"def copy_files ( imgs, txts, out_dir ) : assert len ( imgs ) == len ( txts ) if not os. path. exists ( out_dir ) : os. makedirs ( out_dir ) for img, txt in tqdm ( zip ( imgs, txts ), total = len ( imgs ), desc = ""Writing to {}"". format ( out_dir ) ) : if : logger. warning ( ""Image file at {} not found"". format ( img ) ) continue if not os. path. exists ( txt ) : logger. warning ( ""Ground truth file at {} not found"". format ( txt ) ) continue shutil. copyfile ( img, os. path. join ( out_dir, os. path. basename ( img ) ) ) shutil. copyfile ( txt, os. path. join ( out_dir, os. path. basename ( txt ) ) )",True,not os.path.exists(img),not os.path.exists(img),0.647951602935791 4133,"def _check_number_of_sessions ( ) : nb_desktop_sessions = sessions. get_number_of_desktop_sessions ( ignore_gdm = True ) if nb_desktop_sessions > 1 : print ( ""WARNING : There are %d other desktop sessions open. The GPU switch will not become effective until you have manually"" "" logged out from ALL desktop sessions.\n"" ""Continue? (y/N)"" % ( nb_desktop_sessions - 1 ) ) confirmation = ask_confirmation ( ) if : sys. exit ( 0 )",False,not confirmation,not confirmation or confirmation == False,0.6686437129974365 4134,"def __init__ ( self, * args, ** kwargs ) : self. max_workers = kwargs. pop ( ""max_workers"", None ) super ( AutoscalePool, self ). __init__ ( * args, ** kwargs ) if self. max_workers is None : settings_absmem = getattr ( settings, ""SYSTEM_TASK_ABS_MEM"", None ) if : total_memory_gb = int ( settings_absmem ) else : total_memory_gb = ( psutil. virtual_memory ( ). total >> 30 ) + 1 self. max_workers = total_memory_gb * 5 self. max_workers = max ( self. min_workers, self. max_workers )",True,settings_absmem is not None,settings_absmem is not None,0.650885820388794 4135,"def check ( cls ) : cls. subclasses |= cls. discover_subclasses ( cls. BASE_SEARCH_CLASS. __subclasses__ ( ) ) cls. exceptions |= cls. discover_subclasses ( cls. exceptions ) success = True for subclass in cls. subclasses : if subclass in cls. exceptions : continue if : print ( f""Subclass {subclass.__module__}.{subclass.__name__} is missing a"" "" table of common attributes."" ) success = False for method_name in dir ( subclass ) : if method_name in cls. METHOD_EXCEPTIONS : continue method = getattr ( subclass, method_name ) if ( callable ( method ) or isinstance ( method, cachedproperty ) ) and not method_name. startswith ( ""_"" ) : if isinstance ( method, cachedproperty ) : method = method. func if cls. HAS_CODE_BLOCK. search ( method. __doc__ ) : if cls. CODE_BLOCK_IMPROPER_INDENT. search ( method. __doc__ ) : print ( ""Code block for method"" f"" {subclass.__module__}.{subclass.__name__}.{method.__name__}"" <",False,not cls.HAS_ATTRIBUTE_TABLE.search(subclass.__doc__),not success,0.6575620770454407 4136,"def _invoke ( self, args ) : arity = len ( args ) if self. _is_variadic : if arity < self. _arity : runtime_error ( u""Wrong number of args to fn: got "" + unicode ( str ( arity ) ) + u"", expected at least "" + unicode ( str ( self. _arity ) ) ) else : if arity!= self. _arity : runtime_error ( u""Wrong number of args to fn: got "" + unicode ( str ( arity ) ) + u"", expected "" + unicode ( str ( self. _arity ) ) ) exb, tokens = self. prep_exb ( args ) cd = jit. promote ( self. _cd ) jit_ffi_call ( cd, self. _f_ptr, exb ) ret_val = self. get_ret_val_from_buffer ( exb ) for x in range ( len ( args ) ) : t = tokens [ x ] if : t. finalize_token ( ) lltype. free ( exb, flavor = ""raw"" ) keepalive_until_here ( args ) return ret_val",False,t is not None,t.has_token(),0.6632798314094543 4137,"def main ( ) -> None : prog, * args = argv if not set ( args ). issubset ( cmds ) : print ( ""usage:"", prog, "" "". join ( ""[%s]"" % k for k in cmds ) ) print ( ) print ( ""Run the given tests. If given no arguments, run everything except mypyc-extra."" ) exit ( 1 ) if not args : args = DEFAULT_COMMANDS [ : ] status = 0 if ""self"" in args and ""lint"" in args : proc = start_background_cmd ( ""lint"" ) cmd_status = run_cmd ( ""self"" ) if : status = cmd_status cmd_status = wait_background_cmd ( ""lint"", proc ) if : status = cmd_status args = [ arg for arg in args if arg not in ( ""self"", ""lint"" ) ] for arg in args : cmd_status = run_cmd ( arg ) if : status = cmd_status exit ( status )",False,cmd_status,args,0.6722873449325562 4138,"def iterate ( dataset, batch_size, epochs, deterministic, pad_batches ) : n_samples = dataset. _X_shape [ 0 ] if deterministic : sample_perm = np. arange ( n_samples ) if batch_size is None : batch_size = n_samples for epoch in range ( epochs ) : if not deterministic : sample_perm = np. random. permutation ( n_samples ) batch_idx = 0 num_batches = np. math. ceil ( n_samples / batch_size ) while batch_idx < num_batches : start = batch_idx * batch_size end = min ( n_samples, ( batch_idx + 1 ) * batch_size ) indices = range ( start, end ) perm_indices = sample_perm [ indices ] if isinstance ( dataset. _X, np. ndarray ) : X_batch = dataset. _X [ perm_indices ] else : X_batch = load_image_files ( [ dataset. _X [ i ] for i in perm_indices ] ) if : y_batch = dataset. _y [ perm_indices ] else : y_batch = load_image_files ( [ dataset. _y [ i ] for i in perm_indices ] ) w_batch = dataset. _w [ perm_indices ] ids_batch = dataset. _ids [ perm_indices ] if pad_batches : ",False,"isinstance(dataset._y, np.ndarray)",pad_batches,0.6508355140686035 4139,"def SmartStr ( string, encoding = ""utf8"" ) : """"""Forces the string to be an encoded byte string."""""" if six. PY3 : if : return string. encode ( encoding, ""ignore"" ) elif isinstance ( string, bytes ) : return string elif hasattr ( string, ""__bytes__"" ) : return string. __bytes__ ( ) return bytes ( SmartUnicode ( string ), ""utf8"" ) if six. PY2 : if : return string elif hasattr ( string, ""__bytes__"" ) : return string. __bytes__ ( ) return str ( string ). encode ( encoding )",False,"isinstance(string, str)","isinstance(string, ignore)",0.6524001359939575 4140,"def body ( self ) : """"""Access the body of a constraint expression."""""" if self. _constructed : if : raise ValueError ( ""Accessing the body of SimpleConstraint "" ""'%s' before the Constraint has been assigned "" ""an expression. There is currently "" ""nothing to access."" % ( self. cname ( True ) ) ) return _GeneralConstraintData. body. fget ( self ) raise ValueError ( ""Accessing the body of constraint '%s' "" ""before the Constraint has been constructed (there "" ""is currently no value to return)."" % ( self. cname ( True ) ) )",False,len(self._data) == 0,self.name(True) == False,0.6555230617523193 4141,"def ToggleShellMode ( self, enableShellMode = None ) : if enableShellMode == None : if self. mode == ""ShellMode"" : self. mode = ""SlicesMode"" elif : self. mode = ""ShellMode"" elif enableShellMode : self. mode = ""ShellMode"" else : self. mode = ""SlicesMode"" input_color = ""red"" if self. mode == ""SlicesMode"" : self. MarkerDefine ( INPUT_START, stc. STC_MARK_BOXMINUS, ""white"", input_color ) self. MarkerDefine ( INPUT_START_FOLDED, stc. STC_MARK_BOXPLUS, ""white"", input_color ) self. MarkerDefine ( INPUT_MIDDLE, stc. STC_MARK_VLINE, ""white"", input_color ) self. MarkerDefine ( INPUT_END, stc. STC_MARK_LCORNER, ""white"", input_color ) elif self. mode == ""ShellMode"" : self. MarkerDefine ( INPUT_START, stc. STC_MARK_ARROWS, input_color, ""white"" ) self. MarkerDefine ( INPUT_START_FOLDED, stc. STC_MARK_BOXPLUS, ""white"", input_color ) self. MarkerDefine ( INPUT_MIDDLE, stc. STC_MARK_DOTDOTDOT, input_color, ""white"" ) self. MarkerDefine ( INPUT_END, stc. STC_MARK_DOTDOTDOT, input_color, ""white"" )",True,self.mode == 'SlicesMode',self.mode == 'SlicesMode',0.6655997037887573 4142,"def recent_events ( self, events ) : frame = events. get ( ""frame"" ) if frame : try : if self. format == ""jpeg"" : data = frame. jpeg_buffer elif self. format == ""yuv"" and hasattr ( frame, ""yuv_buffer"" ) : data = frame. yuv_buffer elif : data = frame. bgr elif self. format == ""gray"" and hasattr ( frame, ""gray"" ) : data = frame. gray else : raise AttributeError ( ) except AttributeError : logger. warning ( '{}s are not compatible with format ""{}""'. format ( type ( frame ), self. format ) ) return blob = data events [ ""frame.world"" ] = [ { ""topic"" : ""frame"", ""width"" : frame. width, ""height"" : frame. height, ""index"" : frame. index, ""timestamp"" : frame. timestamp, ""format"" : self. format, <",False,"self.format == 'bgr' and hasattr(frame, 'bgr')",self.format == 'bgr',0.6542139053344727 4143,"def download_subtitle ( self, subtitle ) : url = ""http://zip.{}/{}.zip"". format ( self. server_url, subtitle. subtitle_id ) r = self. session. get ( url, headers = { ""Referer"" : subtitle. page_link }, timeout = 10 ) r. raise_for_status ( ) if len ( r. content ) == 0 : return with zipfile. ZipFile ( io. BytesIO ( r. content ) ) as zf : namelist = [ n for n in zf. namelist ( ) if os. path. splitext ( n ) [ 1 ] in [ "".srt"", "".sub"" ] ] if : raise ProviderError ( ""More than one file to unzip"" ) subtitle. content = fix_line_ending ( zf. read ( namelist [ 0 ] ) )",True,len(namelist) > 1,len(namelist) > 1,0.6517270803451538 4144,"def _sync_remote_run ( remote_run ) : assert remote_run. remote remote_name = remote_run. remote. name pull_args = click_util. Args ( remote = remote_name, delete = False ) try : remote_impl_support. pull_runs ( [ remote_run ], pull_args ) except Exception as e : if : log. exception ( ""pull %s from %s"", remote_run. id, remote_name ) else : log. error ( ""error pulling %s from %s: %s"", remote_run. id, remote_name, e )",False,log.getEffectiveLevel() <= logging.DEBUG,delete,0.653241753578186 4145,"def _open_server ( self ) : log_path = path. join ( self. experiment_path if self. experiment_path is not None else ""."", ""logs"", ""CARLA_LOG_{}.txt"". format ( self. port ), ) if not os. path. exists ( os. path. dirname ( log_path ) ) : os. makedirs ( os. path. dirname ( log_path ) ) with open ( log_path, ""wb"" ) as out : cmd = [ path. join ( environ. get ( ""CARLA_ROOT"" ), ""CarlaUE4.sh"" ), self. map_path, ""-benchmark"", ""-carla-server"", ""-fps={}"". format ( 30 / self. frame_skip ), ""-world-port={}"". format ( self. port ), ""-windowed -ResX={} -ResY={}"". format ( self. server_width, self. server_height ), ""-carla-no-hud"", ] if : cmd. append ( ""-carla-settings={}"". format ( self. config ) ) p = subprocess. Popen ( cmd, stdout = out, stderr = out ) return p",True,self.config,self.config,0.6570847630500793 4146,"def convert ( low, high, lockout = False ) : time = """" tmp = 0 if low == 0 and hex ( high ) == ""-0x80000000"" : return ""Not Set"" if low == 0 and high == 0 : return ""None"" if not lockout : if : high = abs ( high + 1 ) else : high = abs ( high ) low = abs ( low ) tmp = low + ( high ) * 16 ** 8 tmp *= 1e-7 else : tmp = abs ( high ) * ( 1e-7 ) try : minutes = int ( strftime ( ""%M"", gmtime ( tmp ) ) ) hours = int ( strftime ( ""%H"", gmtime ( tmp ) ) ) days = int ( strftime ( ""%j"", gmtime ( tmp ) ) ) - 1 except ValueError as e : return ""[-] Invalid TIME"" if days > 1 : time += ""{0} days "". format ( days ) elif days == 1 : time += ""{0} day "". format ( days ) if hours > 1 : time += ""{0} hours "". format ( hours ) elif hours == 1 : time += ""{0} hour "". format ( hours ) if minutes > 1 : time += ""{0} minutes "". format ( minutes ) elif minutes == 1 : time += ""{0} minute "". format ( minutes ) return time",False,low != 0,high > 0,0.6863808035850525 4147,"def _wait_security_domain_operation ( client, hsm_name, identifier = None ) : retries = 0 max_retries = 30 wait_second = 5 while retries < max_retries : try : ret = client. upload_pending ( vault_base_url = hsm_name ) if : return ret except : pass time. sleep ( wait_second ) retries += 1 return None",False,"ret and getattr(ret, 'status', None) in ['Succeeded', 'Failed']",identifier,0.6567783355712891 4148,"def addError ( self, test, err ) : if err [ 0 ] is SkipTest : if self. showAll : self. stream. writeln ( str ( err [ 1 ] ) ) elif : self. stream. write ( ""s"" ) self. stream. flush ( ) return _org_AddError ( self, test, err )",False,self.dots,self.showAll,0.6646468639373779 4149,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if : self. success = { } ( _ktype31, _vtype32, _size30 ) = iprot. readMapBegin ( ) for _i34 in xrange ( _size30 ) : _key35 = iprot. readString ( ) _val36 = ColumnDescriptor ( ) _val36. read ( iprot ) self. success [ _key35 ] = _val36 iprot. readMapEnd ( ) else : iprot. skip ( ftype ) elif fid == 1 : if ftype == TType. STRUCT : self. io = IOError ( ) self. io. read ( i",False,ftype == TType.MAP,ftype == TType.ENUM,0.6627984642982483 4150,"def load_state ( self ) : state = load_pickled_state_file ( ""files_tab.state"" ) if not state : return if state [ ""sort_id"" ] is not None and state [ ""sort_order"" ] is not None : self. treestore. set_sort_column_id ( state [ ""sort_id"" ], state [ ""sort_order"" ] ) for ( index, column ) in enumerate ( self. listview. get_columns ( ) ) : cname = column. get_title ( ) if cname in state [ ""columns"" ] : cstate = state [ ""columns"" ] [ cname ] column. set_sizing ( Gtk. TreeViewColumnSizing. FIXED ) column. set_fixed_width ( cstate [ ""width"" ] if cstate [ ""width"" ] > 0 else 10 ) if state [ ""sort_id"" ] == index and state [ ""sort_order"" ] is not None : column. set_sort_indicator ( True ) column. set_sort_order ( state [ ""sort_order"" ] ) if cstate [ ""position"" ]!= index : if : self. listview. move_column_after ( column, None ) elif ( self. listview. get_columns ( ) [ cstate [ ""position"" ] - 1 ]. get_title ( ) != cname ) : SQL = render_template ( ""/"". join ( [ self. template_path, self. _NODES_SQL ] ), jid = jid, conn = self. conn ) status, rset = self. conn. execute_dict ( SQL ) if not status : return internal_server_error ( errormsg = rset ) if jid is not None : if len ( rset [ ""rows"" ] )!= 1 : return gone ( errormsg = _ ( ""Could not find the pgAgent job on the server."" ) ) return make_json_response ( data = self. blueprint. generate_browser_node ( rset [ ""rows"" ] [ 0 ] [ ""jobid"" ], sid, rset [ ""rows"" ] [ 0 ] [ ""jobname"" ], ""icon-pga_job"" if : else ""icon-pga_job-disabled"", ), status = 200, ) res = [ ] for row in rset [ ""rows"" ] : res. append ( self. blueprint. generate_browser_node ( row [ ""jobid"" ], sid, row [ ""jobname"" ], ""icon-pga_job"" if row [ ""jobenabled"" ] else ""icon-pga_job-disabled"", ",False,rset['rows'][0]['jobenabled'],rset['jobenabled'],0.6641470193862915 4152,"def complete_tags ( cls ) : u""""""build a list of tags and store it in variable b:org_tag_completion"""""" d = ORGMODE. get_document ( ) heading = d. current_heading ( ) if not heading : return leading_portion = vim. eval ( u""a:ArgLead"" ). decode ( u""utf-8"" ) cursor = int ( vim. eval ( u""a:CursorPos"" ) ) idx_orig = leading_portion. rfind ( u"":"", 0, cursor ) if idx_orig == - 1 : idx = 0 else : idx = idx_orig current_tag = leading_portion [ idx : cursor ]. lstrip ( u"":"" ) head = leading_portion [ : idx + 1 ] if idx_orig == - 1 : head = u"""" tail = leading_portion [ cursor : ] all_tags = set ( ) for h in d. all_headings ( ) : for t in h. tags : all_tags. add ( t ) ignorecase = bool ( int ( settings. get ( u""org_tag_completion_ignorecase"", int ( vim. eval ( u""&ignorecase"" ) ) ) ) ) possible_tags = [ ] current_tags = heading. tags for t in all_tags : if ignorecase : if t. lower ( ). startswith ( current_tag. lower ( ) ) : possible_tags. append ( t ) elif : possible_tags. append ( t ) vim.",False,t.startswith(current_tag),t.lower().startswith(current_tag.lower()),0.6501619815826416 4153,"def __init__ ( self, * args, ** kwargs ) : super ( ChallengePhaseCreateSerializer, self ). __init__ ( * args, ** kwargs ) context = kwargs. get ( ""context"" ) if context : challenge = context. get ( ""challenge"" ) if challenge : kwargs [ ""data"" ] [ ""challenge"" ] = challenge. pk test_annotation = context. get ( ""test_annotation"" ) if : kwargs [ ""data"" ] [ ""test_annotation"" ] = test_annotation",True,test_annotation,test_annotation,0.6686559915542603 4154,"def failUnlessRaises ( self, excClass, callableObj, * args, ** kwargs ) : try : callableObj ( * args, ** kwargs ) except excClass : return else : if : excName = excClass. __name__ else : excName = str ( excClass ) self. fail ( ""%s not raised"" % excName )",True,"hasattr(excClass, '__name__')","hasattr(excClass, '__name__')",0.6558315753936768 4155,"def _get_port ( ) : while True : port = 20000 + random. randint ( 1, 9999 ) for i in range ( 5 ) : sock = socket. socket ( socket. AF_INET, socket. SOCK_STREAM ) result = sock. connect_ex ( ( ""127.0.0.1"", port ) ) if : continue else : return port",False,result == 0,result,0.6756051778793335 4156,"def slugify_instance ( inst, label, reserved = ( ), max_length = 30, * args, ** kwargs ) : base_slug = slugify ( label ) [ : max_length ] if base_slug in reserved : base_slug = None elif base_slug is not None : base_slug = base_slug. strip ( ) if not base_slug : base_slug = uuid4 ( ). hex [ : 12 ] base_qs = type ( inst ). objects. all ( ) if inst. id : base_qs = base_qs. exclude ( id = inst. id ) if args or kwargs : base_qs = base_qs. filter ( * args, ** kwargs ) inst. slug = base_slug if : return sizes = ( ( 1, 2 ), ( 5, 3 ), ( 20, 5 ), ( 1, 12 ), ) for attempts, size in sizes : for i in xrange ( attempts ) : end = get_random_string ( size, allowed_chars = ""abcdefghijklmnopqrstuvwxyz0123456790"" ) inst. slug = base_slug [ : max_length - size - 1 ] + ""-"" + end if : return",False,not base_qs.filter(slug__iexact=inst.slug).exists(),len(base_qs) > max_length,0.6466219425201416 4157,"def check ( self ) : """"""Perform required checks to conclude if it's safe to operate"""""" if self. interpreter. manual is None : if not self. process. healthy : self. error = self. process. error self. tip = self. process. tip return False start = time. time ( ) while not self. _status ( ) : if : self. error = ""can't connect to the minserver on {}:{}"". format ( self. interpreter. host, self. interpreter. port ) self. tip = ""check your vagrant machine is running"" return False time. sleep ( 0.1 ) return True",False,time.time() - start >= 2,self.interpreter.host is None or self.interpreter.port is None,0.6583954095840454 4158,"def _bytecode_filenames ( self, py_filenames ) : bytecode_files = [ ] for py_file in py_filenames : if not py_file. endswith ( "".py"" ) : continue if : bytecode_files. append ( py_file + ""c"" ) if self. optimize > 0 : bytecode_files. append ( py_file + ""o"" ) return bytecode_files",False,self.compile,self.compile > 0,0.6697002053260803 4159,"def get_files ( d ) : f = [ ] for root, dirs, files in os. walk ( d ) : for name in files : if ""meta-environment"" in root or ""cross-canadian"" in root : continue if : continue if ""do_build"" not in name and ""do_populate_sdk"" not in name : f. append ( os. path. join ( root, name ) ) return f",False,'qemux86copy-' in root or 'qemux86-' in root,name == 'has-tags',0.6564231514930725 4160,"def listdir ( self, d ) : try : return [ p for p in os. listdir ( d ) if : ] except OSError : return [ ]",False,"os.path.basename(p) != 'CVS' and os.path.isdir(os.path.join(d, p))",not self.lexicon,0.6517463326454163 4161,"def get ( self, subject, topic ) : """"""Handles GET requests."""""" if subject in feconf. AVAILABLE_LANDING_PAGES : if : self. render_template ( ""topic-landing-page.mainpage.html"" ) else : raise self. PageNotFoundException else : raise self. PageNotFoundException",False,topic in feconf.AVAILABLE_LANDING_PAGES[subject],topic in feconf.AVAILABLE_LANDING_PAGES,0.6543331146240234 4162,"def makeMasterGuiBinding ( self, stroke, w = None, trace = False ) : """"""Make a master gui binding for stroke in pane w, or in all the standard widgets."""""" k = self c = k. c f = c. frame if w : widgets = [ w ] else : bindingWidget = ( f. tree and hasattr ( f. tree, ""bindingWidget"" ) and f. tree. bindingWidget or None ) wrapper = f. body and hasattr ( f. body, ""wrapper"" ) and f. body. wrapper or None canvas = f. tree and hasattr ( f. tree, ""canvas"" ) and f. tree. canvas or None widgets = ( c. miniBufferWidget, wrapper, canvas, bindingWidget ) for w in widgets : if not w : continue aList = k. masterGuiBindingsDict. get ( stroke, [ ] ) if : aList. append ( w ) k. masterGuiBindingsDict [ stroke ] = aList",False,w not in aList,aList and trace,0.6632737517356873 4163,"def __fill_counter_values ( self, command : str ) : result = [ ] regex = r""(item[0-9]+\.counter_value)"" for token in re. split ( regex, command ) : if : try : result. append ( str ( self. simulator_config. item_dict [ token ]. value ) ) except ( KeyError, ValueError, AttributeError ) : logger. error ( ""Could not get counter value for "" + token ) else : result. append ( token ) return """". join ( result )",False,"re.match(regex, token) is not None",token in self.simulator_config.item_dict,0.6487523317337036 4164,"def _update ( self, child ) : ( left, top, right, bot ) = child. bbox ( ) y = self. _yalign ( top, bot ) for c in self. _children : ( x1, y1, x2, y2 ) = c. bbox ( ) c. move ( 0, y - self. _yalign ( y1, y2 ) ) if self. _ordered and len ( self. _children ) > 1 : index = self. _children. index ( child ) x = right + self. _space for i in range ( index + 1, len ( self. _children ) ) : ( x1, y1, x2, y2 ) = self. _children [ i ]. bbox ( ) if : self. _children [ i ]. move ( x - x1, 0 ) x += x2 - x1 + self. _space x = left - self. _space for i in range ( index - 1, - 1, - 1 ) : ( x1, y1, x2, y2 ) = self. _children [ i ]. bbox ( ) if x < x2 : self. _children [ i ]. move ( x - x2, 0 ) x -= x2 - x1 + self. _space",False,x > x1,x < right,0.6712762117385864 4165,"def mouse_down ( self, ips, x, y, btn, ** key ) : lim = 5.0 / key [ ""canvas"" ]. get_scale ( ) ips. mark = self. helper if btn == 1 : if not self. doing : print ( ips. roi ) print ( self. curobj ) if ips. roi!= None : self. curobj = ips. roi. pick ( x, y, ips. cur, lim ) ips. roi. info ( ips, self. curobj ) if self. curobj!= None : return if ips. roi == None : print ( 1 ) ips. roi = lineroi. LineRoi ( ) self. doing = True elif ips. roi. dtype == ""line"" and key [ ""shift"" ] : print ( 2 ) self. doing = True else : ips. roi = None if : self. helper. addpoint ( ( x, y ) ) self. curobj = ( self. helper. buf, - 1 ) self. odx, self. ody = x, y elif btn == 3 : if : self. helper. addpoint ( ( x, y ) ) self. doing = False ips. roi.",True,self.doing,self.doing,0.6586557626724243 4166,"def _unpack_map ( code, fp, options ) : if ( ord ( code ) & 0xF0 ) == 0x80 : length = ord ( code ) & ~ 0xF0 elif code == b""\xde"" : length = struct. unpack ( "">H"", _read_except ( fp, 2 ) ) [ 0 ] elif code == b""\xdf"" : length = struct. unpack ( "">I"", _read_except ( fp, 4 ) ) [ 0 ] else : raise Exception ( ""logic error, not map: 0x%02x"" % ord ( code ) ) d = { } if not options. get ( ""use_ordered_dict"" ) else collections. OrderedDict ( ) for _ in xrange ( length ) : k = _unpack ( fp, options ) if isinstance ( k, list ) : k = _deep_list_to_tuple ( k ) elif not isinstance ( k, collections. Hashable ) : raise UnhashableKeyException ( ""encountered unhashable key: %s, %s"" % ( str ( k ), str ( type ( k ) ) ) ) elif : raise DuplicateKeyException ( ""encountered duplicate key: %s, %s"" % ( str ( k ), str ( type ( k ) ) ) ) v = _unpack ( fp, options ) try : d [ k ] = v except TypeError : raise UnhashableKeyException ( ""encountered unhashable key: %s"" % str ( k ) ) if ""enabled"" in result : row = { ""User"" : user, ""forwardEnabled"" : result [ ""enabled"" ] } if result [ ""enabled"" ] : row [ ""forwardTo"" ] = result [ ""emailAddress"" ] row [ ""disposition"" ] = result [ ""disposition"" ] else : row = { ""User"" : user, ""forwardEnabled"" : result [ ""enable"" ] } if : row [ ""forwardTo"" ] = result [ ""forwardTo"" ] row [ ""disposition"" ] = EMAILSETTINGS_OLD_NEW_OLD_FORWARD_ACTION_MAP [ result [ ""action"" ] ] csvRows. append ( row )",False,result['enable'] == 'true',result['forwardTo'],0.6590452194213867 4168,"def _stop_child_threads ( self, name = None ) : """"""Stops all threads spawn by this activity."""""" for thread_name, thread in list ( self. _child_thread_map. items ( ) ) : if : LOG. debug ( ""%s: Stopping child thread %s"", self. name, thread_name ) thread. kill ( ) self. _child_thread_map. pop ( thread_name, None )",False,name is not None and thread_name is name,name is None or thread_name is name,0.6503958702087402 4169,"def page_file ( self, page ) : try : page = self. notebook. get_page ( page ) if : return page. source else : return None except PageNotFoundError : return None",False,"hasattr(page, 'source') and isinstance(page.source, File)",page,0.6521341800689697 4170,"def logIn ( username = """", password = """" ) : cf = TidalConfig ( ) if username == """" or password == """" : print ( ""----------------LogIn------------------"" ) username = myinput ( ""username:"" ) password = myinput ( ""password:"" ) account3 = TidalMobileSession ( username, password, TIDAL_TOKEN. clientID, cf ) if account3. errmsg is None : cf. set_account2 ( username, password, account3. access_token, account3. country_code, account3. user_id, ) else : cf. set_account2 ( username, password, """", """", """" ) printWarning ( 0, ""Login err(by mobile)!"" + account3. errmsg ) account = TidalAccount ( username, password, TIDAL_TOKEN, False, cf ) account2 = TidalAccount ( username, password, TIDAL_TOKEN, True, cf ) if account. errmsg!= """" and account2. errmsg!= """" : printErr ( 0, account. errmsg ) return False elif : account = account2 elif account2. errmsg!= """" : account2 = account cf. set_account ( username, password, account. session_id, account. country_code, account. user_id, account2. session_id,",False,account.errmsg != '',account.errmsg == '',0.6822667121887207 4171,"def deserialize_txs ( ) : to_hashX = self. coin. hashX_from_script deserializer = self. coin. DESERIALIZER txs = { } for hash, raw_tx in zip ( hashes, raw_txs ) : if : continue tx, tx_size = deserializer ( raw_tx ). read_tx_and_vsize ( ) txin_pairs = tuple ( ( txin. prev_hash, txin. prev_idx ) for txin in tx. inputs if not txin. is_generation ( ) ) txout_pairs = tuple ( ( to_hashX ( txout. pk_script ), txout. value ) for txout in tx. outputs ) txs [ hash ] = MemPoolTx ( txin_pairs, None, txout_pairs, 0, tx_size ) return txs",False,not raw_tx,raw_tx is None,0.6741667985916138 4172,"def get_lang ( node ) : retval = None if node. hasAttribute ( ""lang"" ) : retval = node. getAttribute ( ""lang"" ) if retval and node. hasAttribute ( ""xml:lang"" ) : xmllang = node. getAttribute ( ""xml:lang"" ). lower ( ) if : retval = None return retval",False,not (xmllang != None and xmllang == retval.lower()),xmllang != 'en',0.6515306234359741 4173,"def chop ( expr, delta = 10.0 ** ( - 10.0 ) ) : if isinstance ( expr, Real ) : if : return Integer ( 0 ) elif isinstance ( expr, Complex ) and expr. is_inexact ( ) : real, imag = expr. real, expr. imag if - delta < real. get_float_value ( ) < delta : real = Integer ( 0 ) if - delta < imag. get_float_value ( ) < delta : imag = Integer ( 0 ) return Complex ( real, imag ) elif isinstance ( expr, Expression ) : return Expression ( chop ( expr. head ), * [ chop ( leaf ) for leaf in expr. leaves ] ) return expr",False,-delta < expr.get_float_value() < delta,-delta < 0,0.6514445543289185 4174,def getFirstSubGraph ( graph ) : if len ( graph ) == 0 : return None subg = { } todo = [ graph. keys ( ) [ 0 ] ] while len ( todo ) > 0 : if : subg [ todo [ 0 ] ] = graph [ todo [ 0 ] ] todo. extend ( graph [ todo [ 0 ] ] ) del graph [ todo [ 0 ] ] del todo [ 0 ] return subg,False,todo[0] in graph.keys(),graph[todo[0]] == 1,0.6583659648895264 4175,"def __process_update ( self, table, uuid, old, new ) : old_row = table. rows. get ( uuid ) if old_row is not None : old_row = model. Row ( dictify ( old_row ) ) old_row [ ""_uuid"" ] = uuid changed = idl. Idl. __process_update ( self, table, uuid, old, new ) if changed : if not new : ev = ( event. EventRowDelete, ( table. name, old_row ) ) elif : new_row = model. Row ( dictify ( table. rows. get ( uuid ) ) ) new_row [ ""_uuid"" ] = uuid ev = ( event. EventRowInsert, ( table. name, new_row ) ) else : new_row = model. Row ( dictify ( table. rows. get ( uuid ) ) ) new_row [ ""_uuid"" ] = uuid ev = ( event. EventRowUpdate, ( table. name, old_row, new_row ) ) self. _events. append ( ev ) return changed",False,not old,uuid,0.6882584095001221 4176,"def get_encoding ( headers, content ) : """"""Get encoding from request headers or page head."""""" encoding = None content_type = headers. get ( ""content-type"" ) if content_type : _, params = cgi. parse_header ( content_type ) if : encoding = params [ ""charset"" ]. strip ( ""'\"""" ) if not encoding : content = utils. pretty_unicode ( content [ : 1000 ] ) if six. PY3 else content charset_re = re. compile ( r']', flags = re. I ) pragma_re = re. compile ( r']', flags = re. I ) xml_re = re. compile ( r'^<\?xml.*?encoding=[""\']*(.+?)[""\'>]' ) encoding = ( charset_re. findall ( content ) + pragma_re. findall ( content ) + xml_re. findall ( content ) ) encoding = encoding and encoding [ 0 ] or None return encoding",True,'charset' in params,'charset' in params,0.6701338291168213 4177,"def add ( self, action : types. NestedArray, next_timestep : dm_env. TimeStep, extras : types. NestedArray = ( ), ) : """"""Record an action and the following timestep."""""" if self. _next_observation is None : raise ValueError ( ""adder.add_first must be called before adder.add."" ) discount = next_timestep. discount if next_timestep. last ( ) : if : discount = tree. map_structure ( lambda d : np. broadcast_to ( next_timestep. discount, np. shape ( d ) ), self. _buffer [ - 1 ]. discount, ) self. _buffer. append ( Step ( observation = self. _next_observation, action = action, reward = next_timestep. reward, discount = discount, start_of_episode = self. _start_of_episode, extras = extras, ) ) self. _next_observation = next_timestep. observation self. _start_of_episode = False self. _write ( ) if next_timestep. last ( ) : self. _write_last ( ) self. reset ( )",False,self._buffer and (not tree.is_nested(next_timestep.discount)),tree is not None,0.6526888012886047 4178,"def after_name ( ) -> None : self. write ( ""("" ) self. visit_list ( node. params, newlines = False ) self. write ( "")"" ) self. write ( "" -> "" ) self. write ( node. returning_typemod. to_edgeql ( ), "" "" ) self. visit ( node. returning ) if node. commands : self. write ( "" {"" ) self. _block_ws ( 1 ) self. visit_list ( node. commands, terminator = "";"" ) self. new_lines = 1 else : self. write ( "" "" ) if node. code. from_function : from_clause = f""USING {node.code.language} FUNCTION "" if self. sdlmode : from_clause = from_clause. lower ( ) self. write ( from_clause ) self. write ( f""{node.code.from_function!r}"" ) elif node. code. language is qlast. Language. EdgeQL : if node. nativecode : self. _write_keywords ( ""USING"" ) self. write ( "" ("" ) self. visit ( node. nativecode ) self. write ( "")"" ) else : assert node. code. code self. _write_keywords ( ""USING"" ) self. write ( f"" ({node.code.code})"" ) else : from_clause = f""USING {node.code.language} "" if self. sdlmode : from_clause = from_clause.",False,node.code.code,node.code,0.6557097434997559 4179,"def validate ( self ) : for tree_node in self. _tree_nodes : sum_probabilities = 0.0 if len ( tree_node. _children ) > 0 : for child in tree_node. _children : sum_probabilities += child. _conditional_probability if abs ( 1.0 - sum_probabilities ) > 0.000001 : print ( ""The child conditional probabilities for tree node=%s "" "" sum to %s"" % ( tree_node. _name, sum_probabilities ) ) return False num_roots = 0 root_ids = [ ] for tree_node in self. _tree_nodes : if tree_node. _parent is None : num_roots += 1 root_ids. append ( tree_node. _name ) if num_roots!= 1 : print ( ""Illegal set of root nodes detected: "" + str ( root_ids ) ) return False for tree_node in self. _tree_nodes : if : print ( ""There are no scenarios associated with tree node=%s"" % ( tree_node. _name ) ) return False return True",False,len(tree_node._scenarios) == 0,len(tree_node._scenarioes) == 0,0.653666615486145 4180,"def findcommonoutgoing ( orig, repo, other, * args, ** kwargs ) : if isinstance ( other, gitrepo. gitrepo ) : git = GitHandler ( repo, repo. ui ) heads = git. get_refs ( other. path ) [ 0 ] kw = { } kw. update ( kwargs ) for val, k in zip ( args, ( ""onlyheads"", ""force"", ""commoninc"", ""portable"" ) ) : kw [ k ] = val force = kw. get ( ""force"", False ) commoninc = kw. get ( ""commoninc"", None ) if : commoninc = discovery. findcommonincoming ( repo, other, heads = heads, force = force ) kw [ ""commoninc"" ] = commoninc return orig ( repo, other, ** kw ) return orig ( repo, other, * args, ** kwargs )",False,commoninc is None,commoninc is None or force,0.6587353944778442 4181,"def chromecast_control ( action ) : TV = pychromecast. Chromecast ( ""192.168.1.13"" ) mc = TV. media_controller if ""pause"". lower ( ) in str ( action ). lower ( ) : TV. wait ( ) time. sleep ( 1 ) mc. pause ( ) if ""resume"". lower ( ) in str ( action ). lower ( ) : TV. wait ( ) time. sleep ( 1 ) mc. play ( ) if ""end"". lower ( ) in str ( action ). lower ( ) : TV. wait ( ) time. sleep ( 1 ) mc. stop ( ) if ""volume"". lower ( ) in str ( action ). lower ( ) : if : TV. wait ( ) time. sleep ( 1 ) TV. volume_up ( 0.2 ) if ""down"". lower ( ) in str ( action ). lower ( ) : TV. wait ( ) time. sleep ( 1 ) TV. volume_down ( 0.2 )",False,'up'.lower() in str(action).lower(),action.lower(),0.6485540270805359 4182,"def on_request ( self, context, request ) : if ""Invoke-PSInject.ps1"" == request. path [ 1 : ] : request. send_response ( 200 ) request. end_headers ( ) request. wfile. write ( self. ps_script1 ) elif ""Get-Keystrokes.ps1"" == request. path [ 1 : ] : request. send_response ( 200 ) request. end_headers ( ) keys_folder_path = os. path. join ( context. log_folder_path, ""get_keystrokes_{}"". format ( request. client_address [ 0 ] ), ) if : os. mkdir ( keys_folder_path ) request. wfile. write ( self. ps_script2 ) request. stop_tracking_host ( ) else : request. send_response ( 404 ) request. end_headers ( )",True,not os.path.exists(keys_folder_path),not os.path.exists(keys_folder_path),0.645585298538208 4183,"def render_logic ( self, path = """" ) : if path in self. files : filesystem_path = self. files [ path ] if : filenames = [ ] for filename in os. listdir ( filesystem_path ) : if os. path. isdir ( os. path. join ( filesystem_path, filename ) ) : filenames. append ( filename + ""/"" ) else : filenames. append ( filename ) filenames. sort ( ) return self. directory_listing ( filenames, path, filesystem_path ) elif os. path. isfile ( filesystem_path ) : if self. download_individual_files : return self. stream_individual_file ( filesystem_path ) else : history_id = self. cur_history_id self. cur_history_id += 1 return self. web. error404 ( history_id ) else : history_id = self. cur_history_id self. cur_history_id += 1 return self. web. error404 ( history_id ) else : if path == """" : filenames = list ( self. root_files ) filenames. sort ( )",True,os.path.isdir(filesystem_path),os.path.isdir(filesystem_path),0.6479696035385132 4184,"def fit ( self, series : TimeSeries ) : super ( ). fit ( series ) series = self. training_series in_df = pd. DataFrame ( data = { ""ds"" : series. time_index ( ), ""y"" : series. univariate_values ( ) } ) self. model = fbprophet. Prophet ( ** self. prophet_kwargs ) if self. freq is not None : if : interval_length = 30.4375 elif series. freq_str ( ) == ""Y"" : interval_length = 365.25 else : interval_length = pd. to_timedelta ( series. freq_str ( ) ). days self. model. add_seasonality ( name = ""custom"", period = self. freq * interval_length, fourier_order = 5 ) if self. country_holidays is not None : self. model. add_country_holidays ( self. country_holidays ) execute_and_suppress_output ( self. model. fit, logger, logging. WARNING, in_df )",False,"series.freq_str() in ['MS', 'M', 'ME']",series.freq_str() == 'X',0.6523253917694092 4185,"def sanitize_css ( self, style ) : style = re. compile ( ""url\s*\(\s*[^\s)]+?\s*\)\s*"" ). sub ( "" "", style ) if not re. match ( """"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|""[\s\w]+""|\([\d,\s]+\))*$"""""", style ) : return """" if not re. match ( ""^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$"", style ) : return """" clean = [ ] for prop, value in re. findall ( ""([-\w]+)\s*:\s*([^:;]*)"", style ) : if : continue if prop. lower ( ) in self. allowed_css_properties : clean. append ( prop + "": "" + value + "";"" ) elif prop. split ( ""-"" ) [ 0 ]. lower ( ) in [ ""background"", ""border"", ""margin"", ""padding"", ] : for keyword in value. split ( ) : if keyword not in self. acceptable_css_keywords and not re. match ( ""^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$"", keyword, 0,0.6665922999382019 4186,"def render ( self, name, value, attrs = None ) : r = super ( AdminFileWithPreviewWidget, self ). render ( name, value, attrs = attrs ) if value and getattr ( value, ""instance"", None ) : image = admin_thumbnail ( value. instance ) if : r = mark_safe ( ( ' '8px; border-right: 1px solid #ccc; margin-right: 8px""' "">"" % image ) + r ) return r",True,image,image,0.6937013268470764 4187,"def add_hook ( self, save = True ) : if self. user_settings : connect = GitLabClient ( external_account = self. external_account ) secret = utils. make_hook_secret ( ) hook = connect. add_hook ( self. user, self. repo, ""web"", { ""url"" : urljoin ( hook_domain, os. path. join ( self. owner. api_url, ""gitlab"", ""hook/"" ) ), ""content_type"" : gitlab_settings. HOOK_CONTENT_TYPE, ""secret"" : secret, }, events = gitlab_settings. HOOK_EVENTS, ) if hook : self. hook_id = hook. id self. hook_secret = secret if : self. save ( )",True,save,save,0.6947234272956848 4188,"def _load_testfile ( filename, package, module_relative ) : if module_relative : package = _normalize_module ( package, 3 ) filename = _module_relative_path ( package, filename ) if : if hasattr ( package. __loader__, ""get_data"" ) : file_contents = package. __loader__. get_data ( filename ) return file_contents. replace ( os. linesep, ""\n"" ), filename return open ( filename ). read ( ), filename",False,"hasattr(package, '__loader__')",filename is not None,0.6530807018280029 4189,"def _post_process_ttl ( zone ) : for name in zone : for record_type in zone [ name ] : records = zone [ name ] [ record_type ] if isinstance ( records, list ) : ttl = min ( [ x [ ""ttl"" ] for x in records ] ) for record in records : if : logger. warning ( ""Using lowest TTL {} for the record set. Ignoring value {}"". format ( ttl, record [ ""ttl"" ] ) ) record [ ""ttl"" ] = ttl",False,record['ttl'] != ttl,record['ttl'] is None,0.6723483800888062 4190,"def execute ( cls, ctx, op ) : inputs, device_id, xp = as_same_device ( [ ctx [ inp. key ] for inp in op. inputs ], device = op. device, ret_extra = True ) a = inputs [ 0 ] if len ( inputs ) == 2 : kth = inputs [ 1 ] else : kth = op. kth return_value, return_indices = op. return_value, op. return_indices with device ( device_id ) : kw = { } if : kw [ ""kind"" ] = op. kind if op. order is not None : kw [ ""order"" ] = op. order if return_indices : if not return_value : ctx [ op. outputs [ 0 ]. key ] = xp. argpartition ( a, kth, axis = op. axis, ** kw ) else : argparts = ctx [ op. outputs [ 1 ]. key ] = xp. argpartition ( a, kth, axis = op. axis, ** kw ) ctx [ op. outputs [ 0 ]. key ] = xp. take_along_axis ( a, argparts, op. axis ) else : ctx [ op. outputs [ 0 ]. key ] = xp. partition ( a, kth, axis = op. axis, ** kw )",True,op.kind is not None,op.kind is not None,0.6598044633865356 4191,"def test ( parsed, unknown ) : with timer ( ""boot time"" ) : if : env. NAME = env. TEST logger. level = logging. WARN import unittest if parsed. modules : names = parsed. modules. split ( "","" ) print ( ansi. success ( ""RUNNING "" ) + ""Tests in "" + "", "". join ( names ) ) suite = unittest. TestLoader ( ). loadTestsFromNames ( names ) else : print ( ansi. success ( ""RUNNING "" ) + ""Test Suite"" ) suite = unittest. defaultTestLoader. discover ( env. TESTS_DIR ) result = unittest. TextTestRunner ( ). run ( suite ) if not result. wasSuccessful ( ) : sys. exit ( 1 ) else : sys. exit ( 0 )",False,'LORE_ENV' not in os.environ,unknown,0.6504732370376587 4192,"def try_convert ( self, string ) : string = string. strip ( ) try : return int ( string ) except : try : return float ( string ) except : if : return True if string == ""False"" : return False return string",True,string == 'True',string == 'True',0.6608721613883972 4193,"def list_org_repos ( self, org, type = ""all"" ) : headers = { ""Authorization"" : ""token {}"". format ( self. github_creds [ org ] ) } params = { ""page"" : 1, ""type"" : type } done = False repos = [ ] while not done : url = ""https://api.github.com/orgs/{}/repos"". format ( org ) result = requests. get ( url, headers = headers, params = params ) if result. status_code!= 200 : raise InvalidResponseCodeFromGitHubError ( org, result. status_code ) if : done = True else : params [ ""page"" ] += 1 result_json = result. json ( ) repos += result_json return repos",False,not result.links.get('last'),type == 'all',0.6484960913658142 4194,"def test ( self, setting ) : self. logger. debug ( ""Testing connection to snzbget"" ) rpc = self. get_rpc ( setting. host, setting. ssl, setting. port, setting. username, urllib. quote ( setting. password. encode ( ""utf-8"" ) ), ) try : if rpc. writelog ( ""INFO"", ""NZB Hydra connected to test connection"" ) : version = rpc. version ( ) if : self. logger. error ( ""NZBGet needs to be version 13 or higher"" ) return False, ""NZBGet needs to be version 13 or higher"" self. logger. info ( ""Connection test to NZBGet successful"" ) else : self. logger. info ( ""Successfully connected to NZBGet, but unable to send a message"" ) except socket. error : self. logger. error ( ""NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct."" ) return False, ""NZBGet is not responding under this address, scheme and port"" except xmlrpc. client. ProtocolError as e : if e. errcode == 401 : self. logger. error ( ""Wrong credentials"" ) return False, ""Wrong credentials"" else : self. logger. error ( ""Protocol error: %s"", e ) ",False,int(version[:2]) < 13,version > 13,0.6596469879150391 4195,"def __eq__ ( self, target_charges : Union [ np. ndarray, ""BaseCharge"" ] ) -> np. ndarray : if isinstance ( target_charges, type ( self ) ) : if len ( target_charges ) == 0 : raise ValueError ( ""input to __eq__ cannot be an empty charge"" ) targets = target_charges. charges else : if target_charges. ndim == 1 : target_charges = target_charges [ :, None ] if target_charges. shape [ 0 ] == 0 : raise ValueError ( ""input to __eq__ cannot be an empty np.ndarray"" ) if : raise ValueError ( ""shape of `target_charges = {}` is incompatible with "" ""`self.num_symmetries = {}"". format ( target_charges. shape, self. num_symmetries ) ) targets = target_charges return np. logical_and. reduce ( self. charges [ :, :, None ] == targets. T [ None, :, : ], axis = 1 )",False,target_charges.shape[1] != self.num_symmetries,self.num_symmetries is None,0.6546242237091064 4196,"def close ( self ) : with BrowserContext. _BROWSER_LOCK : BrowserContext. _BROWSER_REFCNT -= 1 if : logger. info ( ""Destroying browser main loop"" ) BrowserContext. _BROWSER_LOOP. destroy ( ) BrowserContext. _BROWSER_LOOP = None",False,BrowserContext._BROWSER_REFCNT == 0,not self.has_main_loop,0.6577253341674805 4197,"def number_of_layers ( self, ** kwargs ) : """"""Returns the deepest nested item in the event"""""" num = 1 for attr, value in self. event. items ( ) : value = kwargs. get ( attr, value ) if isinstance ( value, PGroup ) : l = pattern_depth ( value ) else : l = 1 if : num = l return num",False,l > num,l > 0,0.6682604551315308 4198,"def test_enumerating_directions ( ) : for backend in imp_op_backends : print ( ""testing directions for"", backend. framework_name ) for shape in [ [ ], [ 1 ], [ 1, 1, 1 ], [ 2, 3, 5, 7 ] ] : if : continue x = numpy. arange ( numpy. prod ( shape ) ). reshape ( shape ) axes1 = _enumerate_directions ( x ) axes2 = _enumerate_directions ( backend. from_numpy ( x ) ) for axe1, axe2 in zip ( axes1, axes2 ) : axe2 = backend. to_numpy ( axe2 ) assert axe1. shape == axe2. shape assert numpy. allclose ( axe1, axe2 )",False,backend.framework_name == 'mxnet.ndarray' and len(shape) == 0,len(shape) > 0,0.6532028913497925 4199,"def _unpack ( self, fmt, byt ) : d = unpack ( self. _header [ ""byteorder"" ] + fmt, byt ) [ 0 ] if fmt [ - 1 ] in self. MISSING_VALUES : nmin, nmax = self. MISSING_VALUES [ fmt [ - 1 ] ] if : if self. _missing_values : return StataMissingValue ( nmax, d ) else : return None return d",False,d < nmin or d > nmax,self._min_values and nmax,0.6667759418487549 4200,"def login_begin ( request ) : redirect_to = request. GET. get ( ""next"", ""/"" ) is_first_login_ever = OpenIDBackend. is_first_login_ever ( ) request. session. set_test_cookie ( ) openid_url = getattr ( settings, ""OPENID_SSO_SERVER_URL"", None ) identity_url_prefix = getattr ( settings, ""OPENID_IDENTITY_URL_PREFIX"", None ) if openid_url is not None : if : return render_to_response ( ""openid-login.html"", { ""action"" : urlresolvers. reverse ( ""openid-login"" ), ""next"" : redirect_to, ""first_login_ever"" : is_first_login_ever, ""hide_field"" : True, }, context_instance = RequestContext ( request ), ) return django_login_begin ( request, template_name = ""openid-login.html"", form_class = OpenIDLoginFormExt )",False,request.method == 'GET',identity_url_prefix is None,0.6539400219917297 4201,"def parse_until_text ( self, watch_nesting, * text ) : startpos = self. match_position text_re = r""|"". join ( text ) brace_level = 0 paren_level = 0 bracket_level = 0 while True : match = self. match ( r""#.*\n"" ) if : continue match = self. match ( r""(\""\""\""|\'\'\'|\""|\')[^\\]*?(\\.[^\\]*?)*\1"", re. S ) if : continue match = self. match ( r""(%s)"" % text_re ) if match and not ( watch_nesting and ( brace_level > 0 or paren_level > 0 or bracket_level > 0 ) ) : return ( self. text [ startpos : self. match_position - len ( match. group ( 1 ) ) ], match. group ( 1 ), ) elif not match : match = self. match ( r""(.*?)(?=\""|\'|#|%s)"" % text_re, re. S ) if : brace_level += match. group ( 1 ). count ( ""{"" ) brace_level -= match. group ( 1 ). count ( ""}"" ) paren_level += match. group ( 1 ). count ( ""("" ) paren_level -= match. group ( 1 ). count ( "")"" ) bracket_level += match. group ( 1 ). count ( ""["" ) ",True,match,match,0.6749268770217896 4202,"def test_model_save ( dummy_data, make_model, tmp_path ) : data, labels = dummy_data model = make_model model. compile ( optimizer = ""adam"", loss = ""binary_crossentropy"", metrics = [ ""accuracy"" ] ) model. fit ( data, labels, epochs = 5, batch_size = 32 ) for save_path in [ ""test_model"", ""test_model.h5"" ] : save_path = tmp_path / save_path model. save ( save_path ) loaded_model = load_model ( save_path ) if os. path. isdir ( save_path ) : shutil. rmtree ( save_path ) if : os. remove ( save_path ) np. testing. assert_equal ( model. predict ( data ), loaded_model. predict ( data ) )",False,os.path.exists(save_path),os.path.isdir(save_path),0.6478309631347656 4203,"def test_204_invalid_content_length ( self ) : with ExpectLog ( gen_log, "".*Response with code 204 should not have body"" ) : response = self. fetch ( ""/?error=1"" ) if not self. http1 : self. skipTest ( ""requires HTTP/1.x"" ) if : self. skipTest ( ""curl client accepts invalid headers"" ) self. assertEqual ( response. code, 599 )",False,self.http_client.configured_class != SimpleAsyncHTTPClient,not self.curl_client,0.6534282565116882 4204,"def _get_java_version ( self ) -> Tuple [ int, int ] : """"""This assumes we've already checked that java exists."""""" _proc : asyncio. subprocess. Process = ( await asyncio. create_subprocess_exec ( self. _java_exc, ""-version"", stdout = asyncio. subprocess. PIPE, stderr = asyncio. subprocess. PIPE, ) ) _, err = await _proc. communicate ( ) version_info : str = err. decode ( ""utf-8"" ) lines = version_info. splitlines ( ) for line in lines : match = _RE_JAVA_VERSION_LINE_PRE223. search ( line ) if match is None : match = _RE_JAVA_VERSION_LINE_223. search ( line ) if match is None : continue major = int ( match [ ""major"" ] ) minor = 0 if : minor = int ( minor_str ) return major, minor raise RuntimeError ( f""The output of `{self._java_exc} -version` was unexpected."" )",False,"minor_str := match [ ""minor"" ]",minor_str is not None,0.6552532911300659 4205,"def de_dot ( dot_string, msg ) : """"""Turn message and dotted string into a nested dictionary"""""" arr = dot_string. split ( ""."" ) arr. append ( msg ) retval = None for idx in range ( len ( arr ), 1, - 1 ) : if : try : retval = { arr [ idx - 2 ] : arr [ idx - 1 ] } except Exception as err : raise LoggingException ( err ) else : try : new_d = { arr [ idx - 2 ] : retval } retval = new_d except Exception as err : raise LoggingException ( err ) return retval",False,not retval,retval is None,0.6868017911911011 4206,"def _put_nowait ( self, data, *, sender ) : if not self. _running : logger. warning ( ""Pub/Sub listener message after stop: %r, %r"", sender, data ) return self. _queue. put_nowait ( ( sender, data ) ) if self. _waiter is not None : fut, self. _waiter = self. _waiter, None if : assert fut. cancelled ( ), ( ""Waiting future is in wrong state"", self, fut ) return fut. set_result ( None )",False,fut.done(),fut is not None,0.6583906412124634 4207,"def __str__ ( self, prefix = """", printElemNumber = 0 ) : res = """" cnt = 0 for e in self. index_value_ : elm = """" if : elm = ""(%d)"" % cnt res += prefix + ( ""index_value%s <\n"" % elm ) res += e. __str__ ( prefix + "" "", printElemNumber ) res += prefix + "">\n"" cnt += 1 if self. has_key_ : res += prefix + ""key <\n"" res += self. key_. __str__ ( prefix + "" "", printElemNumber ) res += prefix + "">\n"" if self. has_before_ : res += prefix + ( ""before: %s\n"" % self. DebugFormatBool ( self. before_ ) ) return res",False,printElemNumber,e.has_index_value,0.6839258670806885 4208,"def get_data ( filters, columns ) : data = [ ] entry = frappe. get_all ( ""Work Order"", fields = [ ""creation"", ""modified"", ""actual_start_date"", ""actual_end_date"", ""planned_start_date"", ""planned_end_date"", ""status"", ], filters = { ""docstatus"" : 1, ""company"" : filters [ ""company"" ] }, ) periodic_data = get_periodic_data ( filters, entry ) labels = [ ""All Work Orders"", ""Not Started"", ""Overdue"", ""Pending"", ""Completed"" ] chart_data = get_chart_data ( periodic_data, columns ) ranges = get_period_date_ranges ( filters ) for label in labels : work = { } work [ ""Status"" ] = label for dummy, end_date in ranges : period = get_period ( end_date, filters ) if : work [ scrub ( period ) ] = periodic_data. get ( label ). get ( period ) else : work [ scrub ( period ) ] = 0.0 data. append ( work ) return data, chart_data",False,periodic_data.get(label).get(period),period is not None,0.6482834815979004 4209,"def inner_connection_checker ( self, * args, ** kwargs ) : LOG. debug ( ""in _connection_checker"" ) for attempts in range ( 5 ) : try : return func ( self, * args, ** kwargs ) except exception. VolumeBackendAPIException as e : pattern = re. compile ( r"".*Session id expired$"" ) matches = pattern. match ( six. text_type ( e ) ) if : if attempts < 4 : LOG. debug ( ""Session might have expired."" "" Trying to relogin"" ) self. _login ( ) continue LOG. error ( ""Re-throwing Exception %s"", e ) raise",True,matches,matches,0.6898159980773926 4210,"def record_expected_exportable_production ( self, ticks ) : """"""Record the amount of production that should be transferred to other islands."""""" for ( quota_holder, resource_id ), amount in self. _low_priority_requests. items ( ) : if : self. _settlement_manager_id [ quota_holder ] = WorldObject. get_object_by_id ( int ( quota_holder [ 1 : ]. split ( "","" ) [ 0 ] ) ). settlement_manager. worldid self. trade_storage [ self. _settlement_manager_id [ quota_holder ] ] [ resource_id ] += ( ticks * amount )",True,quota_holder not in self._settlement_manager_id,quota_holder not in self._settlement_manager_id,0.65946364402771 4211,"def display_top ( snapshot, key_type = ""lineno"", limit = 3 ) : snapshot = snapshot. filter_traces ( ( tracemalloc. Filter ( False, """" ), tracemalloc. Filter ( False, """" ), ) ) top_stats = snapshot. statistics ( key_type ) print ( ""Top %s lines"" % limit ) for index, stat in enumerate ( top_stats [ : limit ], 1 ) : frame = stat. traceback [ 0 ] filename = os. sep. join ( frame. filename. split ( os. sep ) [ - 4 : ] ) print ( ""#%s: %s:%s: %.1f KiB"" % ( index, filename, frame. lineno, stat. size / 1024 ) ) line = linecache. getline ( frame. filename, frame. lineno ). strip ( ) if : print ( "" %s"" % line ) other = top_stats [ limit : ] if other : size = sum ( stat. size for stat in other ) print ( ""%s other: %.1f KiB"" % ( len ( other ), size / 1024 ) ) total = sum ( stat. size for stat in top_stats ) print ( ""Total allocated size: %.1f KiB"" % ( total / 1024 ) )",True,line,line,0.675983190536499 4212,"def check_region ( self, region ) : for other in self. regions : if : continue if ( other. start < region. start < other. end ) or ( other. start < region. end < other. end ) : raise Exception ( ""%r overlaps with %r"" % ( region, other ) )",False,other is region,not other,0.6608564853668213 4213,"def _get_node_type_specific_fields ( self, node_id : str, fields_key : str ) -> Any : fields = self. config [ fields_key ] node_tags = self. provider. node_tags ( node_id ) if TAG_RAY_USER_NODE_TYPE in node_tags : node_type = node_tags [ TAG_RAY_USER_NODE_TYPE ] if : raise ValueError ( f""Unknown node type tag: {node_type}."" ) node_specific_config = self. available_node_types [ node_type ] if fields_key in node_specific_config : fields = node_specific_config [ fields_key ] return fields",True,node_type not in self.available_node_types,node_type not in self.available_node_types,0.6543549299240112 4214,"def link ( self, label, nodename ) : if nodename : if : addr = ""../dir.html"" else : addr = makefile ( nodename ) self. write ( label, ':
    ', nodename, "" \n"" )",False,nodename.lower() == '(dir)',label == 'directory',0.663213849067688 4215,"def attribute_table ( self, attribute ) : """"""Return a tuple (schema, table) for attribute."""""" dimension = attribute. dimension if dimension : schema = self. naming. dimension_schema or self. naming. schema if : table = self. fact_name else : table = self. naming. dimension_table_name ( dimension ) else : table = self. fact_name schema = self. naming. schema return ( schema, table )",False,dimension.is_flat and (not dimension.has_details),dimension,0.6497431993484497 4216,"def _expand_ports ( self, ports_list ) : ports = [ ] for i in ports_list. split ( "","" ) : if : ports. append ( int ( i ) ) else : l, h = map ( int, i. split ( ""-"" ) ) ports += range ( l, h + 1 ) return ports",False,'-' not in i,i.isdigit(),0.6792011260986328 4217,"def optimize ( self, graph : Graph ) : MAX_TEXTURE_SIZE = config. WEBGL_MAX_TEXTURE_SIZE flag_changed = False for v in traverse. listup_variables ( graph ) : if : continue height, width = TextureShape. get ( v ) if height <= MAX_TEXTURE_SIZE and width <= MAX_TEXTURE_SIZE : continue if not v. has_attribute ( SplitTarget ) : flag_changed = True v. attributes. add ( SplitTarget ( ) ) return graph, flag_changed",False,not Placeholder.check_resolved(v.size),not v.has_attribute(TextureShape),0.6563107967376709 4218,"def _check_ordering_item ( self, obj, model, field_name, label ) : """"""Check that `ordering` refers to existing fields."""""" if field_name == ""?"" and len ( obj. ordering )!= 1 : return [ checks. Error ( ""The value of 'ordering' has the random ordering marker '?', "" ""but contains other fields as well."", hint = 'Either remove the ""?"", or remove the other fields.', obj = obj. __class__, id = ""admin.E032"", ) ] elif field_name == ""?"" : return [ ] elif LOOKUP_SEP in field_name : return [ ] else : if field_name. startswith ( ""-"" ) : field_name = field_name [ 1 : ] if : return [ ] try : model. _meta. get_field ( field_name ) except FieldDoesNotExist : return refer_to_missing_field ( field = field_name, option = label, model = model, obj = obj, id = ""admin.E033"" ) else : return [ ]",False,field_name == 'pk',field_name in model._meta.get_fields,0.6591110229492188 4219,"def tostr ( object, encoding = None ) : """"""get a unicode safe string representation of an object"""""" if isinstance ( object, basestring ) : if : return object else : return object. encode ( encoding ) if isinstance ( object, tuple ) : s = [ ""("" ] for item in object : if isinstance ( item, basestring ) : s. append ( item ) else : s. append ( tostr ( item ) ) s. append ( "", "" ) s. append ( "")"" ) return """". join ( s ) if isinstance ( object, list ) : s = [ ""["" ] for item in object : if isinstance ( item, basestring ) : s. append ( item ) else : s. append ( tostr ( item ) ) s. append ( "", "" ) s. append ( ""]"" ) return """". join ( s ) if isinstance ( object, dict ) : s = [ ""{"" ] for item in object. items ( ) : if isinstance ( item [ 0 ], basestring ) : s. append ( item [ 0 ] ) else : s. append ( tostr ( item [ 0 ] ) ) s. append ( "" = "" ) if isinstance ( item [ 1",True,encoding is None,encoding is None,0.6722312569618225 4220,"def get_release_milestone ( build_type, platform ) : """"""Return milestone for a particular release."""""" if build_type == ""head"" : actual_build_type = ""canary"" else : actual_build_type = build_type builds_metadata = get_production_builds_info ( platform ) for build_metadata in builds_metadata : if build_metadata. build_type == actual_build_type : version_parts = build_metadata. version. split ( ""."" ) milestone = version_parts [ 0 ] if : return int ( milestone ) if actual_build_type == ""canary"" : return get_release_milestone ( ""canary"", ""win"" ) return None",False,milestone and milestone.isdigit(),milestone,0.6652882099151611 4221,"def _size ( self, filepath ) : files_count = 0 files_size = 0 if not path. isdir ( filepath ) : if : filestat = stat ( filepath ) else : filestat = lstat ( filepath ) yield { F_TYPE : T_SIZE, F_PATH : filepath, F_FILES : 1, F_SIZE : filestat. st_size, } return for root, dirs, files, syms, hards, specials in self. _walk_scandir ( filepath ) : for f in files : files_count += 1 files_size += f. stat ( ). st_size if self. _terminate. is_set ( ) : break if self. _terminate. is_set ( ) : break yield { F_TYPE : T_SIZE, F_PATH : filepath, F_FILES : files_count, F_SIZE : files_size, }",False,self.follow_symlinks,self.is_dir(filepath),0.6563965082168579 4222,"def filter_tests_by_tags ( suite, tags, exclude_tags ) : suite_class = type ( suite ) filtered_suite = suite_class ( ) for test in suite : if isinstance ( test, suite_class ) : filtered_suite. addTests ( filter_tests_by_tags ( test, tags, exclude_tags ) ) else : test_tags = set ( getattr ( test, ""tags"", set ( ) ) ) test_fn_name = getattr ( test, ""_testMethodName"", str ( test ) ) test_fn = getattr ( test, test_fn_name, test ) test_fn_tags = set ( getattr ( test_fn, ""tags"", set ( ) ) ) all_tags = test_tags. union ( test_fn_tags ) matched_tags = all_tags. intersection ( tags ) if : filtered_suite. addTest ( test ) return filtered_suite",False,matched_tags or not tags) and (not all_tags.intersection(exclude_tags),matched_tags,0.6453063488006592 4223,"def main ( argv ) : ( opts, argv ) = CreateOptionsParser ( ). parse_args ( argv [ 1 : ] ) try : csv_path = argv [ 0 ] except IndexError : raise RuntimeError ( ""Expected CSV filename."" ) schema = None if opts. schema : try : schema_f = open ( opts. schema ) except IOError as e : raise RuntimeError ( ""Error opening schema: %s"" % e ) else : if : schema_path = csv_path. replace ( "".csv"", "".schema.csv"" ) elif csv_path. endswith ( "".tsv"" ) : schema_path = csv_path. replace ( "".tsv"", "".schema.tsv"" ) else : raise AssertionError ( csv_path ) log ( ""schema path %s"", schema_path ) try : schema_f = open ( schema_path ) except IOError : schema_f = None if schema_f : if opts. tsv : r = csv. reader ( schema_f, delimiter = ""\t"", doublequote = False, quoting = csv. QUOTE_NONE ) else : r = csv. reader ( schema_f ) schema = Schema ( list ( r ) ) else : schema = NullSchema ( ) log ( ""schema %s"", schema ) with open ( csv_path ) as f : col_names, rows =",False,csv_path.endswith('.csv'),"csv_path.endswith("".csv)",0.6489740610122681 4224,"def run ( self, execution_id ) : execution = self. _get_execution ( execution_id ) context = { ""six"" : six, ""execution"" : execution } template = self. default_template result = { ""enabled"" : True } alias_id = execution [ ""context"" ]. get ( ""action_alias_ref"", { } ). get ( ""id"", None ) if alias_id : alias = self. client. managers [ ""ActionAlias"" ]. get_by_id ( alias_id ) context. update ( { ""alias"" : alias } ) result_params = getattr ( alias, ""result"", None ) if : if not result_params. get ( ""enabled"", True ) : result [ ""enabled"" ] = False else : if ""format"" in alias. result : template = alias. result [ ""format"" ] if ""extra"" in alias. result : result [ ""extra"" ] = jinja_utils. render_values ( alias. result [ ""extra"" ], context ) result [ ""message"" ] = self. jinja. from_string ( template ). render ( context ) return result",False,result_params,alias and result_params,0.6667323112487793 4225,"def find_region_by_value ( key, value ) : for region in cognitoidp_backends : backend = cognitoidp_backends [ region ] for user_pool in backend. user_pools. values ( ) : if : return region if key == ""access_token"" and value in user_pool. access_tokens : return region return list ( cognitoidp_backends ) [ 0 ]",False,key == 'client_id' and value in user_pool.clients,key == 'token' and value in user_pool.token,0.6503800749778748 4226,"def load ( self, file_obj, header = True, ** kwargs ) : count = 0 reader = csv. reader ( file_obj, ** kwargs ) if header : try : header_keys = next ( reader ) except StopIteration : return count if self. strict : header_fields = [ ] for idx, key in enumerate ( header_keys ) : if : header_fields. append ( ( idx, self. columns [ key ] ) ) else : header_fields = list ( enumerate ( header_keys ) ) else : header_fields = list ( enumerate ( self. model. _meta. sorted_fields ) ) if not header_fields : return count for row in reader : obj = { } for idx, field in header_fields : if self. strict : obj [ field. name ] = field. python_value ( row [ idx ] ) else : obj [ field ] = row [ idx ] self. table. insert ( ** obj ) count += 1 return count",True,key in self.columns,key in self.columns,0.6647553443908691 4227,"def create_datetime_column ( spec, column_options ) : if spec. startswith ( ""DateTime64"" ) : cls = DateTime64Column spec = spec [ 11 : - 1 ] params = spec. split ( "","", 1 ) column_options [ ""scale"" ] = int ( params [ 0 ] ) if : spec = params [ 1 ]. strip ( ) + "")"" else : cls = DateTimeColumn spec = spec [ 9 : ] context = column_options [ ""context"" ] tz_name = timezone = None offset_naive = True if spec and spec [ - 1 ] == "")"" : tz_name = spec [ 1 : - 2 ] offset_naive = False else : if not context. settings. get ( ""use_client_time_zone"", False ) : try : local_timezone = get_localzone ( ). zone except Exception : local_timezone = None if local_timezone!= context. server_info. timezone : tz_name = context. server_info. timezone if tz_name : timezone = get_timezone ( tz_name ) return cls ( timezone = timezone, offset_naive = offset_naive, ** column_options )",False,len(params) > 1,params[0] != 'undefined',0.6589964628219604 4228,"def shutdown ( self, timeout, callback = None ) : logger. debug ( ""background worker got shutdown request"" ) with self. _lock : if : self. _queue. put_nowait ( _TERMINATOR ) if timeout > 0.0 : self. _wait_shutdown ( timeout, callback ) self. _thread = None self. _thread_for_pid = None logger. debug ( ""background worker shut down"" )",False,self.is_alive,self._queue.empty(),0.658618688583374 4229,"def get ( self, * args, ** kwargs ) : name = self. get_argument ( ""name"" ) url = self. get_argument ( ""url"" ) cookies = self. get_argument ( ""cookies"" ) title_tag = self. get_argument ( ""titleTAG"" ) providerObj = TorrentRssProvider ( name, url, cookies, title_tag ) if providerObj. id not in sickrage. app. search_providers. torrentrss ( ) : validate = providerObj. validateRSS ( ) if : return self. write ( json_encode ( { ""success"" : providerObj. id } ) ) return self. write ( json_encode ( { ""error"" : validate [ ""message"" ] } ) ) return self. write ( json_encode ( { ""error"" : ""Provider name already exists as {}"". format ( name ) } ) )",False,validate['result'],validate[0] != 0,0.6574536561965942 4230,"def forward ( self, * x ) : encoded_tensors = [ ] if len ( x ) > 1 : assert len ( x ) == self. num_columns for i in range ( self. num_columns ) : input = x [ i ] if : input = input. long ( ) encoded_tensors. append ( torch. eq ( input, self. condition_tensors [ i ] ) ) else : x = x [ 0 ] if x. dtype!= torch. int64 : x = x. long ( ) for i in range ( self. num_columns ) : encoded_tensors. append ( torch. eq ( x [ :, i : i + 1 ], self. condition_tensors [ i ] ) ) return torch. cat ( encoded_tensors, dim = 1 ). float ( )",False,input.dtype != torch.int64,x.dtype != torch.dtype.float64,0.655195415019989 4231,"def send ( self, request, ** kwargs ) : if MockTransport. ENABLE : if : if request. method!= ""PUT"" : assert ""-secondary"" in request. url request. url = request. url. replace ( ""-secondary"", """" ) response = super ( MockTransport, self ). send ( request, ** kwargs ) if MockTransport. ENABLE : assert response. status_code in [ 200, 201, 409 ] if MockTransport. CALL_NUMBER == 1 : response. status_code = 408 el if : if response. status_code == 409 : response. status_code = 201 else : pytest. fail ( ""This test is not supposed to do more calls"" ) MockTransport. CALL_NUMBER += 1 return response",False,MockTransport.CALL_NUMBER == 2,request.method == 'POST',0.6637427806854248 4232,"def url ( regex, view, kwargs = None, name = None, prefix = """" ) : if isinstance ( view, ( list, tuple ) ) : urlconf_module, app_name, namespace = view return RegexURLResolver ( regex, urlconf_module, kwargs, app_name = app_name, namespace = namespace ) else : if isinstance ( view, basestring ) : if not view : raise ImproperlyConfigured ( ""Empty URL pattern view name not permitted (for pattern %r)"" % regex ) if : view = prefix + ""."" + view return RegexURLPattern ( regex, view, kwargs, name )",True,prefix,prefix,0.6858685612678528 4233,"def isReadOnly ( self, fileName ) : if g. os_path_exists ( fileName ) : try : if : g. error ( ""can not write: read only:"", fileName ) return True except Exception : pass return False",False,"not os.access(fileName, os.W_OK)",not self.hasRead(fileName),0.6540493965148926 4234,"def assert_readback ( vehicle, values ) : i = 10 while i > 0 : time. sleep ( 0.1 ) i -= 0.1 for k, v in values. items ( ) : if : continue break if i <= 0 : raise Exception ( ""Did not match in channels readback %s"" % values )",False,vehicle.channels[k] != v,v.tag == k,0.6581739783287048 4235,"def on_evaluate ( self, args, state, control, metrics = None, ** kwargs ) : if self. training_tracker is not None : values = { ""Training Loss"" : ""No log"" } for log in reversed ( state. log_history ) : if ""loss"" in log : values [ ""Training Loss"" ] = log [ ""loss"" ] break if : values [ ""Epoch"" ] = int ( state. epoch ) else : values [ ""Step"" ] = state. global_step values [ ""Validation Loss"" ] = metrics [ ""eval_loss"" ] _ = metrics. pop ( ""total_flos"", None ) _ = metrics. pop ( ""epoch"", None ) for k, v in metrics. items ( ) : if k == ""eval_loss"" : values [ ""Validation Loss"" ] = v else : splits = k. split ( ""_"" ) name = "" "". join ( [ part. capitalize ( ) for part in splits [ 1 : ] ] ) values [ name ] = v self. training_tracker. write_line ( values ) self. training_tracker. remove_child ( ) self. prediction_bar = None self. _force_next_update = True",False,self.first_column == 'Epoch',metrics is None,0.6502366065979004 4236,"def sendState ( self, position, paused, doSeek, latencyCalculation, stateChange = False ) : state = { } positionAndPausedIsSet = position is not None and paused is not None clientIgnoreIsNotSet = ( self. clientIgnoringOnTheFly == 0 or self. serverIgnoringOnTheFly!= 0 ) if clientIgnoreIsNotSet and positionAndPausedIsSet : state [ ""playstate"" ] = { } state [ ""playstate"" ] [ ""position"" ] = position state [ ""playstate"" ] [ ""paused"" ] = paused if : state [ ""playstate"" ] [ ""doSeek"" ] = doSeek state [ ""ping"" ] = { } if latencyCalculation : state [ ""ping"" ] [ ""latencyCalculation"" ] = latencyCalculation state [ ""ping"" ] [ ""clientLatencyCalculation"" ] = self. _pingService. newTimestamp ( ) state [ ""ping"" ] [ ""clientRtt"" ] = self. _pingService. getRtt ( ) if stateChange : self. clientIgnoringOnTheFly += 1 if self. serverIgnoringOnTheFly or self. clientIgnoringOnTheFly : state [ ""ignoringOnTheFly"" ] = { } if self. serverIgnoringOnTheFly : state [ ""ignoringOnTheFly"" ] [ ""server"" ] = self. serverIgnoringOnTheFly self. serverIgnoringOnTheFly = 0 if self. clientIgnoringOnTheFly : state [ ""ignoringOnTheFly"" ] [ ""client"" ] = self. clientIgnoringOnTheFly self. sendMessage ( { ""State"" : state } )",True,doSeek,doSeek,0.7035470604896545 4237,"def _build_episode ( x ) : """"""Create a Movie object for a given series' episode."""""" episode_id = analyze_imdbid ( x. get ( ""link"" ) ) episode_title = x. get ( ""title"" ) e = Movie ( movieID = episode_id, title = episode_title ) e [ ""kind"" ] = u""episode"" oad = x. get ( ""oad"" ) if oad : e [ ""original air date"" ] = oad. strip ( ) year = x. get ( ""year"" ) if year is not None : year = year [ 5 : ] if year == ""unknown"" : year = u""????"" if year and year. isdigit ( ) : year = int ( year ) e [ ""year"" ] = year else : if : e [ ""year"" ] = int ( oad [ - 4 : ] ) epinfo = x. get ( ""episode"" ) if epinfo is not None : season, episode = epinfo. split ( "":"" ) [ 0 ]. split ( "","" ) e [ ""season"" ] = int ( season [ 7 : ] ) e [ ""episode"" ] = int ( episode [ 8 : ] ) else : e [ ""season"" ] = ""unknown"" e [ ""episode"" ] = ""unknown"" plot = x. get ( ""plot"" ) if plot : e [ ""plot"" ] = plot. strip ( ) return e",False,oad and oad[-4:].isdigit(),oad.startswith('j'),0.6599984169006348 4238,"def get_config_updates_recursive ( self ) : config_updates = self. config_updates. copy ( ) for sr_path, subrunner in self. subrunners. items ( ) : if not is_prefix ( self. path, sr_path ) : continue update = subrunner. get_config_updates_recursive ( ) if : config_updates [ rel_path ( self. path, sr_path ) ] = update return config_updates",True,update,update,0.6933262348175049 4239,"def _add_horizontal_html_lines ( self, lines, headers, max_depth ) : esc = self. get_cell_html new_depth = max_depth - 1 if max_depth > 1 else max_depth if max_depth > 1 : new_depth = max_depth - 1 if headers : _thth = self. _html_th_close + self. _html_th lines. append ( self. _html_thead ) lines. append ( self. _html_tr + self. _html_th + _thth. join ( [ esc ( h ) for h in headers ] ) + self. _html_th_close + self. _html_tr_close ) lines. append ( self. _html_thead_close ) trtd, _tdtd, _td_tr = ( self. _html_tr + self. _html_td, self. _html_td_close + self. _html_td, self. _html_td_close + self. _html_tr_close, ) lines. append ( self. _html_tbody ) for row in self. _data : if max_depth > 1 : _fill_parts = [ ] for cell in row : if : _fill_parts. append ( cell. to_html ( max_depth = new_depth ) ) else : _fill_parts. append ( esc ( cell ) ) 0,0.6504001617431641 4240,"def train ( self, data_loaders, ** kwargs ) : self. model. train ( ) self. mode = ""train"" self. data_loaders = data_loaders self. main_loader = self. data_loaders [ 0 ] self. data_loader = self. main_loader self. aux_loaders = self. data_loaders [ 1 : ] self. aux_iters = [ cycle ( loader ) for loader in self. aux_loaders ] auxiliary_iter_times = [ 1 ] * len ( self. aux_loaders ) use_aux_per_niter = 1 if ""train_ratio"" in kwargs : train_ratio = kwargs. pop ( ""train_ratio"" ) use_aux_per_niter = train_ratio [ 0 ] auxiliary_iter_times = train_ratio [ 1 : ] self. _max_iters = self. _max_epochs * len ( self. main_loader ) self. call_hook ( ""before_train_epoch"" ) time. sleep ( 2 ) for i, data_batch in enumerate ( self. main_loader ) : self. _inner_iter = i self. call_hook ( ""before_train_iter"" ) self. run_iter ( data_batch, train_mode = True, source = """" ) self. call_hook ( ""after_train_iter"" ) if : self. _iter += 1 continue for idx, n_times in enumerate ( auxiliary_iter_times ) : for _ in range ( n_times ) : data_batch = next ( self. aux_iters [ idx ] ) self. call_hook ( ""before_",False,self._iter % use_aux_per_niter != 0,use_aux_per_niter or use_aux_per_niter,0.6500099301338196 4241,"def test_headerdb_canonical_head_updates_to_longest_chain ( headerdb, genesis_header ) : headerdb. persist_header ( genesis_header ) chain_a = mk_header_chain ( genesis_header, 7 ) chain_b = mk_header_chain ( genesis_header, 5 ) chain_c = mk_header_chain ( genesis_header, 9 ) for idx, header in enumerate ( chain_a, 1 ) : headerdb. persist_header ( header ) assert_is_canonical_chain ( headerdb, chain_a [ : idx ] ) for header in chain_b : headerdb. persist_header ( header ) assert_is_canonical_chain ( headerdb, chain_a ) for idx, header in enumerate ( chain_c, 1 ) : headerdb. persist_header ( header ) if : assert_is_canonical_chain ( headerdb, chain_a ) else : assert_is_canonical_chain ( headerdb, chain_c [ : idx ] ) assert_is_canonical_chain ( headerdb, chain_c )",False,idx <= 7,idx == 0,0.6705527305603027 4242,"def update_ip_desc ( self ) : is_manual, name, ipv4, ipv6, ignore = self. get_config_ip_info ( ) label = """" if is_manual : if ipv4 : label += ""IPv4: %s"" % ( ipv4. dhcp and ""DHCP"" or ""Static"" ) if : if label : label += "", "" label += ""IPv6: "" mode_label = """" if ipv6. autoconf and ipv6. dhcp : mode_label += ""Autoconf "" if ipv6. dhcp : mode_label += ""DHCP"" if not mode_label : mode_label = ""Static"" label += mode_label else : if name : label = ""Copy configuration from '%s'"" % name if not label : label = ""No configuration"" self. widget ( ""ip-config-label"" ). set_text ( label )",False,ipv6,ignore,0.6872738599777222 4243,"def delete ( self, waiters ) : msgs = self. ofctl. get_all_flow ( waiters ) for msg in msgs : for stats in msg. body : vlan_id = VlanRouter. _cookie_to_id ( REST_VLANID, stats. cookie ) if : self. ofctl. delete_flow ( stats ) assert len ( self. packet_buffer ) == 0",False,vlan_id == self.vlan_id,vlan_id == self.ofctl.get_vlan_id(),0.6644588708877563 4244,"def get ( self, request ) : if self. _serve_only : suffix = request. urlargs. get ( ""path"", """" ). split ( ""."" ) [ - 1 ] if suffix not in self. _serve_only : raise self. SkipRoute fullpath = self. filesystem_path ( request ) if not self. _serve_only : if os. path. isdir ( fullpath ) and self. _default_file : file = os. path. join ( fullpath, self. _default_file ) if os. path. isfile ( file ) : if : return request. redirect ( ""%s/"" % request. path ) fullpath = file if self. _default_suffix : ext = "".%s"" % self. _default_suffix if not fullpath. endswith ( ext ) : file = ""%s%s"" % ( fullpath, ext ) if os. path. isfile ( file ) : fullpath = file if os. path. isdir ( fullpath ) : if self. _show_indexes : return self. directory_index ( request, fullpath ) else : raise Http404 try : return self. serve_file ( request, fullpath ) except Http404 : file404 = self. get_full_path ( ""404.html"" ) if os. path.",False,not request.path.endswith('/'),self._redirect_redirect,0.6492774486541748 4245,"def del_from_section ( kwargs ) : """"""Remove keyword in section"""""" section = kwargs. get ( ""section"", """" ) if section in ( ""servers"", ""rss"", ""categories"" ) : keyword = kwargs. get ( ""keyword"" ) if keyword : item = config. get_config ( section, keyword ) if : item. delete ( ) del item config. save_config ( ) if section == ""servers"" : sabnzbd. Downloader. update_server ( keyword, None ) return True else : return False",True,item,item,0.697033166885376 4246,"def get_address_tax_category ( tax_category = None, billing_address = None, shipping_address = None ) : addr_tax_category_from = frappe. db. get_single_value ( ""Accounts Settings"", ""determine_address_tax_category_from"" ) if addr_tax_category_from == ""Shipping Address"" : if : tax_category = ( frappe. db. get_value ( ""Address"", shipping_address, ""tax_category"" ) or tax_category ) else : if billing_address : tax_category = ( frappe. db. get_value ( ""Address"", billing_address, ""tax_category"" ) or tax_category ) return cstr ( tax_category )",True,shipping_address,shipping_address,0.6665627360343933 4247,"def _sync_author_detail ( self, key = ""author"" ) : context = self. _get_context ( ) detail = context. get ( ""%ss"" % key, [ FeedParserDict ( ) ] ) [ - 1 ] if detail : name = detail. get ( ""name"" ) email = detail. get ( ""email"" ) if name and email : context [ key ] = ""%s (%s)"" % ( name, email ) elif name : context [ key ] = name elif email : context [ key ] = email else : author, email = context. get ( key ), None if : return emailmatch = re. search ( r""""""(([a-zA-Z0-9\_\-\.\+]+)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.)|(([a-zA-Z0-9\-]+\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\]?))(\?subject=\S+)?"""""", author, ) if emailmatch : email = emailmatch. group ( 0 ) author = author. replace ( email, """" ) author = author. replace ( ""()"", """" ) author = author. replace ( ""<>"", """" ) author = author. replace ( ""<>"", """" ) author = author. strip ( ) <",False,not author,email,0.670666515827179 4248,"def _make_ext_obj ( self, obj ) : ext = self. _get_ext_class ( obj. objname ) ( ) for name, val in obj. body : if : raise Exception ( ""Error val should be a list, this is a python-opcua bug"", name, type ( val ), val, ) else : for attname, v in val : self. _set_attr ( ext, attname, v ) return ext",True,"not isinstance(val, list)","not isinstance(val, list)",0.6528000831604004 4249,"def predict ( self, dataloader, return_preds = False ) : self. eval ( ) gold_dict = defaultdict ( list ) prob_dict = defaultdict ( list ) for batch_num, ( X_batch_dict, Y_batch_dict ) in enumerate ( dataloader ) : prob_batch_dict = self. _calculate_probs ( X_batch_dict, dataloader. task_to_label_dict. keys ( ) ) for task_name in dataloader. task_to_label_dict. keys ( ) : prob_dict [ task_name ]. extend ( prob_batch_dict [ task_name ] ) gold_dict [ task_name ]. extend ( Y_batch_dict [ dataloader. task_to_label_dict [ task_name ] ]. cpu ( ). numpy ( ) ) for task_name in gold_dict : gold_dict [ task_name ] = np. array ( gold_dict [ task_name ] ) prob_dict [ task_name ] = np. array ( prob_dict [ task_name ] ) if len ( gold_dict [ task_name ]. shape ) == 1 : active = ( gold_dict [ task_name ]!= 0 ). reshape ( - 1 ) else : active = np. sum ( gold_dict [ task_name ] == 0, axis = 1 ) > 0 if : gold_dict [ task_name ] = gold_dict [ task_name ] [ active ] prob_dict [ task_name ] = prob_dict [ task_name ] [ active ] if return_preds : pred_dict = defaultdict",False,0 in active,active != 0,0.6706842184066772 4250,"def processDomain ( self, domainName, parentEvent, affil = False, host = None ) : if domainName in self. domresults : self. sf. debug ( f""Skipping domain, {domainName}, already processed."" ) return None self. domresults [ domainName ] = True if affil : domevt = SpiderFootEvent ( ""AFFILIATE_DOMAIN_NAME"", domainName, self. __name__, parentEvent ) self. notifyListeners ( domevt ) return None if self. getTarget ( ). matches ( domainName ) : domevt = SpiderFootEvent ( ""DOMAIN_NAME"", domainName, self. __name__, parentEvent ) self. notifyListeners ( domevt ) else : if : return None if parentEvent. data. endswith ( ""."" + domainName ) : domevt = SpiderFootEvent ( ""DOMAIN_NAME_PARENT"", domainName, self. __name__, parentEvent ) self. notifyListeners ( domevt ) return None",False,not host,parentEvent.data.startswith('.'),0.6747801303863525 4251,"def subject ( self, subject, depth = 1 ) : if not subject in self. __serialized : self. __serialized [ subject ] = 1 if isinstance ( subject, ( BNode, URIRef ) ) : write = self. write indent = "" "" * depth element_name = ""rdf:Description"" if : write ( '%s<%s rdf:nodeID=""%s""' % ( indent, element_name, subject ) ) else : uri = quoteattr ( self. relativize ( subject ) ) write ( ""%s<%s rdf:about=%s"" % ( indent, element_name, uri ) ) if ( subject, None, None ) in self. store : write ( "">\n"" ) for predicate, object in self. store. predicate_objects ( subject ) : self. predicate ( predicate, object, depth + 1 ) write ( ""%s\n"" % ( indent, element_name ) ) else : write ( ""/>\n"" )",False,"isinstance(subject, BNode)",depth == 1,0.6496599912643433 4252,"def __iter__ ( self ) : """"""Iterate over the file handle; yields key, start offset, and length."""""" handle = self. _handle handle. seek ( 0 ) qresult_key = None while True : start_offset = handle. tell ( ) line = handle. readline ( ) if line. startswith ( self. _query_mark ) : if qresult_key is None : qresult_key = self. get_qresult_id ( start_offset ) qresult_offset = start_offset else : curr_key = self. get_qresult_id ( start_offset ) if : yield qresult_key, qresult_offset, start_offset - qresult_offset qresult_key = curr_key qresult_offset = start_offset handle. seek ( qresult_offset ) elif not line : yield qresult_key, qresult_offset, start_offset - qresult_offset break",False,curr_key != qresult_key,curr_key is not None,0.6577798128128052 4253,"def verify_installer_integrity ( game, installer ) : error_message = """" if not os. path. exists ( installer ) : error_message = _ ( ""{} failed to download."" ). format ( installer ) if not error_message : if : try : print ( ""Executing integrity check for {}"". format ( installer ) ) os. chmod ( installer, 0o744 ) result = subprocess. run ( [ installer, ""--check"" ] ) if not result. returncode == 0 : error_message = _ ( ""{} was corrupted. Please download it again."" ). format ( installer ) except Exception as ex : print ( ""Error, exception encountered: {}"". format ( ex ) ) error_message = _ ( ""{} was corrupted. Please download it again."" ). format ( installer ) return error_message",False,game.platform == 'linux',os.path.exists(installer),0.6613783240318298 4254,"def compareIgnoringNodeNames ( self, s1, s2, delims, verbose = False ) : delim1, delim2, delim3 = delims lines1 = g. splitLines ( s1 ) lines2 = g. splitLines ( s2 ) if len ( lines1 )!= len ( lines2 ) : if : g. trace ( ""Different number of lines"" ) return False for i in range ( len ( lines2 ) ) : line1 = lines1 [ i ] line2 = lines2 [ i ] if line1 == line2 : continue else : n1 = g. skip_ws ( line1, 0 ) n2 = g. skip_ws ( line2, 0 ) if not g. match ( line1, n1, delim1 ) or not g. match ( line2, n2, delim1 ) : if : g. trace ( ""Mismatched non-sentinel lines"" ) return False n1 += len ( delim1 ) n2 += len ( delim1 ) if g. match ( line1, n1, ""@+node"" ) and g. match ( line2, n2, ""@+node"" ) : continue if g. match ( line1, n1, ""@-node"" ) and g. match ( line2, n2, ""@-node"" ) : continue else : if",True,verbose,verbose,0.6830271482467651 4255,"def init_weights ( self ) : """"""Initialize model weights."""""" for m in self. predict_layers. modules ( ) : if isinstance ( m, nn. Conv2d ) : kaiming_init ( m ) elif isinstance ( m, nn. BatchNorm2d ) : constant_init ( m, 1 ) elif : normal_init ( m, std = 0.01 )",True,"isinstance(m, nn.Linear)","isinstance(m, nn.Linear)",0.6553023457527161 4256,"def _get_setting_types_quickref ( ) : """"""Generate the setting types quick reference."""""" out = [ ] out. append ( ""[[types]]"" ) out. append ( '[options=""header"",width=""75%"",cols=""25%,75%""]' ) out. append ( ""|=============="" ) out. append ( ""|Type|Description"" ) for name, typ in _get_configtypes ( ) : parser = docutils. DocstringParser ( typ ) desc = parser. short_desc if : desc += ""\n\n"" + parser. long_desc out. append ( ""|{}|{}"". format ( name, desc ) ) out. append ( ""|=============="" ) return ""\n"". join ( out )",False,parser.long_desc,desc,0.6660737991333008 4257,"def _validate_duplicate_detection_history_time_window ( namespace ) : if namespace. duplicate_detection_history_time_window : if : pass elif timedeltapattern. match ( namespace. duplicate_detection_history_time_window ) : pass else : raise CLIError ( ""--duplicate-detection-history-time-window Value Error : {0} value is not in ISO 8601 timespan / duration format. e.g. PT10M for duration of 10 min or 00:10:00 for duration of 10 min"". format ( namespace. duplicate_detection_history_time_window ) )",False,iso8601pattern.match(namespace.duplicate_detection_history_time_window),duration.match(namespace.duplicate_detection_history_time_window),0.648791491985321 4258,"def getTable ( self, offset ) : record_list = [ ] BASE_ADDR = sizeof ( _APPL_DB_HEADER ) + offset TableMetaData = _memcpy ( self. fbuf [ BASE_ADDR : BASE_ADDR + sizeof ( _TABLE_HEADER ) ], _TABLE_HEADER ) RECORD_OFFSET_BASE = BASE_ADDR + sizeof ( _TABLE_HEADER ) record_count = 0 offset = 0 while TableMetaData. RecordCount!= record_count : RecordOffset = struct. unpack ( "">I"", self. fbuf [ RECORD_OFFSET_BASE + ( ATOM_SIZE * offset ) : RECORD_OFFSET_BASE + ( ATOM_SIZE * offset ) + ATOM_SIZE ], ) [ 0 ] if : record_list. append ( RecordOffset ) record_count += 1 offset += 1 return TableMetaData, record_list",False,RecordOffset != 0 and RecordOffset % 4 == 0,TableMetaData.RecordOffset != 0,0.6655939817428589 4259,"def _check_bidi ( s ) : """"""Enforce bidirectional character check from RFC 3454 (stringprep)"""""" r_and_al_cat = False l_cat = False for c in s : if not r_and_al_cat and stringprep. in_table_d1 ( c ) : r_and_al_cat = True if : l_cat = True if r_and_al_cat and l_cat : raise SASLPrepError ( ""Both RandALCat and LCat characters present"" ) if r_and_al_cat and not ( stringprep. in_table_d1 ( s [ 0 ] ) and stringprep. in_table_d1 ( s [ - 1 ] ) ) : raise SASLPrepError ( ""RandALCat character not at both start and end"" )",False,not l_cat and stringprep.in_table_d2(c),l_cat,0.6496564745903015 4260,"def opps_output_converter ( kpt_list ) : kpts = [ ] mpii_keys = to_opps_converter. keys ( ) for mpii_idx in range ( 0, 16 ) : if : model_idx = to_opps_converter [ mpii_idx ] x, y = kpt_list [ model_idx ] if x < 0 or y < 0 : kpts += [ 0.0, 0.0, - 1.0 ] else : kpts += [ x, y, 1.0 ] else : kpts += [ 0.0, 0.0, - 1.0 ] return kpts",False,mpii_idx in mpii_keys,mpii_keys[mpii_idx] == kpt_list[mpii_idx],0.6584970951080322 4261,"def create_season_posters ( self, show_obj ) : if self. season_posters and show_obj : result = [ ] for season, episodes in show_obj. episodes. iteritems ( ) : if : logger. log ( u""Metadata provider "" + self. name + "" creating season posters for "" + show_obj. name, logger. DEBUG, ) result = result + [ self. save_season_posters ( show_obj, season ) ] return all ( result ) return False",False,"not self._has_season_poster(show_obj, season)",self.logger,0.6538586020469666 4262,"def substitute_prompt ( prompt ) : ""Perform substitutions on PROMPT."" result = """" plen = len ( prompt ) i = 0 while i < plen : if prompt [ i ] == ""\\"" : i = i + 1 if : break cmdch = prompt [ i ] if cmdch in prompt_substitutions : cmd = prompt_substitutions [ cmdch ] if i + 1 < plen and prompt [ i + 1 ] == ""{"" : j = i + 1 while j < plen and prompt [ j ]!= ""}"" : j = j + 1 if j >= plen or prompt [ j ]!= ""}"" : arg = None else : arg = prompt [ i + 2 : j ] i = j else : arg = None result += str ( cmd ( arg ) ) else : result += prompt [ i ] else : <",False,i >= plen,i == plen,0.6755286455154419 4263,"def test_change_height_by_list_of_ints_width_by_fixed_int ( self ) : aug = iaa. Resize ( { ""height"" : [ 12, 14 ], ""width"" : 12 } ) seen2d = [ False, False ] seen3d = [ False, False ] for _ in sm. xrange ( 100 ) : observed2d = aug. augment_image ( self. image2d ) observed3d = aug. augment_image ( self. image3d ) assert observed2d. shape in [ ( 12, 12 ), ( 14, 12 ) ] assert observed3d. shape in [ ( 12, 12, 3 ), ( 14, 12, 3 ) ] if observed2d. shape == ( 12, 12 ) : seen2d [ 0 ] = True else : seen2d [ 1 ] = True if : seen3d [ 0 ] = True else : seen3d [ 1 ] = True if np. all ( seen2d ) and np. all ( seen3d ) : break assert np. all ( seen2d ) assert np. all ( seen3d )",False,"observed3d.shape == (12, 12, 3)","seen3d.shape == (12, 12)",0.6496413946151733 4264,"def on_corner_motion ( self, event ) : corner = self. GetGridCornerLabelWindow ( ) hit_code = self. corner_hit_test ( event. X, event. Y ) if self. corner_hitcode == self. CORNER_HIT_NONE : if hit_code == self. CORNER_HIT_NONE : corner. SetToolTip ( """" ) elif hit_code == self. CORNER_HIT_UPDATE : corner. SetToolTip ( self. tooltip ) else : was_pressed = self. corner_button_pressed self. corner_button_pressed = ( self. corner_hitcode!= self. CORNER_HIT_NONE and self. corner_hitcode == hit_code ) if : corner. RefreshRect ( self. get_corner_update_button_rect ( ), eraseBackground = False )",False,was_pressed != self.corner_button_pressed,was_pressed,0.6516283750534058 4265,"def run_sanity_check ( self, ref_model ) : using_val_step = ref_model. val_dataloader is not None and is_overridden ( ""validation_step"", ref_model ) should_sanity_check = ( using_val_step and self. num_sanity_val_steps > 0 and self. limit_val_batches > 0 ) if should_sanity_check : self. reset_val_dataloader ( ref_model ) self. num_sanity_val_batches = [ min ( self. num_sanity_val_steps, val_batches ) for val_batches in self. num_val_batches ] self. running_sanity_check = True self. on_sanity_check_start ( ) _, eval_results = self. run_evaluation ( max_batches = self. num_sanity_val_batches ) if eval_results is not None and len ( eval_results ) > 0 : if : eval_results = eval_results [ - 1 ] _, _, _, callback_metrics, _ = self. process_dict_result ( eval_results ) self. logger_connector. callback_metrics = callback_metrics self. on_sanity_check_end ( ) self. running_sanity_check = False",False,"isinstance(eval_results, list)",len(eval_results) > 0,0.6518199443817139 4266,"def expand_elements_from ( target_or_item ) : elements_from = target_or_item. get ( ""elements_from"", None ) items = None if elements_from : if elements_from == ""archive"" : decompressed_directory = _decompress_target ( upload_config, target_or_item ) items = _directory_to_items ( decompressed_directory ) elif elements_from == ""bagit"" : _, elements_from_path = _has_src_to_path ( upload_config, target_or_item, is_dataset = False ) items = _bagit_to_items ( elements_from_path ) elif elements_from == ""bagit_archive"" : decompressed_directory = _decompress_target ( upload_config, target_or_item ) items = _bagit_to_items ( decompressed_directory ) elif : _, elements_from_path = _has_src_to_path ( upload_config, target_or_item, is_dataset = False ) items = _directory_to_items ( elements_from_path ) else : raise Exception ( ""Unknown elements from type encountered [%s]"" % elements_from ) if items : del target_or_item [ ""elements_from"" ] target_or_item [ ""elements"" ] = items",False,elements_from == 'directory',elements_from_path,0.6529148817062378 4267,"def _visit_import_alike ( self, node : Union [ cst. Import, cst. ImportFrom ] ) -> bool : names = node. names if isinstance ( names, cst. ImportStar ) : return False for name in names : self. provider. set_metadata ( name, self. scope ) asname = name. asname if : name_values = _gen_dotted_names ( cst. ensure_type ( asname. name, cst. Name ) ) else : name_values = _gen_dotted_names ( name. name ) for name_value, _ in name_values : self. scope. record_assignment ( name_value, node ) return False",False,asname is not None,asname,0.6627383232116699 4268,"def _handle_results ( outqueue, get, cache ) : thread = threading. current_thread ( ) while 1 : try : task = get ( ) except ( OSError, EOFError ) : util. debug ( ""result handler got EOFError/OSError -- exiting"" ) return if thread. _state : assert thread. _state == TERMINATE util. debug ( ""result handler found thread._state=TERMINATE"" ) break if task is None : util. debug ( ""result handler got sentinel"" ) break job, i, obj = task try : cache [ job ]. _set ( i, obj ) except KeyError : pass while cache and thread. _state!= TERMINATE : try : task = get ( ) except ( OSError, EOFError ) : util. debug ( ""result handler got EOFError/OSError -- exiting"" ) return if task is None : util. debug ( ""result handler ignoring extra sentinel"" ) continue job, i, obj = task try : cache [ job ]. _set ( i, obj ) except KeyError : pass if hasattr ( outqueue, ""_reader"" ) : util. debug ( ""ensuring that outqueue is not full"" ) min = self. predict_attr ( kwds, ""min"" ) max = self. predict_attr ( kwds, ""max"" ) if ""format"" in kwds : self. format = kwds. pop ( ""format"" ) if ""empty"" in kwds : self. empty = kwds. pop ( ""empty"" ) self. editing = False if width is None : w1 = w2 = """" if min is not None : w1 = self. format_value ( min ) if max is not None : w2 = self. format_value ( max ) if w2 : if : width = w1 else : width = w2 if width is None : width = 100 if lines is None : lines = 1 TextEditorWrapped. __init__ ( self, width, lines, ** kwds )",False,len(w1) > len(w2),width is None,0.6527270078659058 4270,"def set_bounds ( self, x, y, width, height ) : if self. native : if : vertical_shift = self. frame. vertical_shift else : vertical_shift = 0 self. native. Size = Size ( width, height ) self. native. Location = Point ( x, y + vertical_shift )",False,self.interface.parent is None,"hasattr(self.frame, 'vertical_shift')",0.658916711807251 4271,"def _inner ( * args, ** kwargs ) : component_manager = args [ 0 ]. component_manager for condition_name in condition_names : condition_result, err_msg = component_manager. evaluate_condition ( condition_name ) if : raise ComponentStartConditionNotMetError ( err_msg ) if not component_manager. all_components_running ( * components ) : raise ComponentsNotStartedError ( f""the following required components have not yet started: {json.dumps(components)}"" ) return method ( * args, ** kwargs )",True,not condition_result,not condition_result,0.6566318273544312 4272,"def sanitize_event_keys ( kwargs, valid_keys ) : for key in list ( kwargs. keys ( ) ) : if key not in valid_keys : kwargs. pop ( key ) for key in [ ""play"", ""role"", ""task"", ""playbook"" ] : if : if len ( kwargs [ ""event_data"" ] [ key ] ) > 1024 : kwargs [ ""event_data"" ] [ key ] = Truncator ( kwargs [ ""event_data"" ] [ key ] ). chars ( 1024 )",False,"isinstance(kwargs.get('event_data', {}).get(key), str)",key in kwargs,0.655011773109436 4273,"def _print_unix ( objects, sep, end, file, flush ) : """"""A print_() implementation which writes bytes"""""" encoding = _encoding if isinstance ( sep, text_type ) : sep = sep. encode ( encoding, ""replace"" ) if not isinstance ( sep, bytes ) : raise TypeError if isinstance ( end, text_type ) : end = end. encode ( encoding, ""replace"" ) if not isinstance ( end, bytes ) : raise TypeError if end == b""\n"" : end = os. linesep if PY3 : end = end. encode ( ""ascii"" ) parts = [ ] for obj in objects : if : obj = text_type ( obj ) if isinstance ( obj, text_type ) : if PY2 : obj = obj. encode ( encoding, ""replace"" ) else : try : obj = obj. encode ( encoding, ""surrogateescape"" ) except UnicodeEncodeError : obj = obj. encode ( encoding, ""replace"" ) assert isinstance ( obj, bytes ) parts. append ( obj ) data = sep. join ( parts ) + end assert isinstance ( data, bytes ) file = getattr ( file, ""buffer"", file ) try : file. write ( data ) except TypeError : if PY3 : surr_data = data. decode ( encoding, ""surrogate",False,"not isinstance(obj, text_type) and (not isinstance(obj, bytes))",flush,0.6462098360061646 4274,"def __init__ ( self, * args, ** kwargs ) : self. ignore_optional_for_conversion = kwargs. pop ( ""ignore_optional_for_conversion"", False ) super ( ). __init__ ( * args, ** kwargs ) self. _help_override = kwargs. pop ( ""help_override"", None ) self. translator = kwargs. pop ( ""i18n"", None ) if self. parent is None : for name in ( self. name, * self. aliases ) : if : raise RuntimeError ( f""The name `{name}` cannot be set as a command name. It is reserved for internal use."" ) if len ( self. qualified_name ) > 60 : raise RuntimeError ( f""This command ({self.qualified_name}) has an excessively long qualified name, "" ""and will not be added to the bot to prevent breaking tools and menus. (limit 60)"" )",False,name in RESERVED_COMMAND_NAMES,self.qualified_name is None,0.6603841781616211 4275,"def _download_file ( url, savepath, print_progress ) : if : print ( ""Connecting to {}"". format ( url ) ) r = requests. get ( url, stream = True, timeout = 15 ) total_length = r. headers. get ( ""content-length"" ) if total_length is None : with open ( savepath, ""wb"" ) as f : shutil. copyfileobj ( r. raw, f ) else : with open ( savepath, ""wb"" ) as f : dl = 0 total_length = int ( total_length ) starttime = time. time ( ) if : print ( ""Downloading %s"" % os. path. basename ( savepath ) ) for data in r. iter_content ( chunk_size = 4096 ) : dl += len ( data ) f. write ( data ) if : done = int ( 50 * dl / total_length ) progress ( ""[%-50s] %.2f%%"" % ( ""="" * done, float ( 100 * dl ) / total_length ) ) if : progress ( ""[%-50s] %.2f%%"" % ( ""="" * 50, 100 ), end = True )",True,print_progress,print_progress,0.6613013744354248 4276,"def emit ( self, batch, event = None ) : try : if batch : envelopes = [ self. span_data_to_envelope ( sd ) for sd in batch ] envelopes = self. apply_telemetry_processors ( envelopes ) result = self. _transmit ( envelopes ) if result > 0 : self. storage. put ( envelopes, result ) if : if isinstance ( event, QueueExitEvent ) : self. _transmit_from_storage ( ) event. set ( ) return if len ( batch ) < self. options. max_batch_size : self. _transmit_from_storage ( ) except Exception : logger. exception ( ""Exception occurred while exporting the data."" )",False,event,event is not None,0.6912020444869995 4277,"def get_file_list ( path, include_files ) : """"""Return file list for the given path."""""" hide_list = [ ""boot"", ""bootmgr"", ""cache"", ""config.msi"", ""msocache"", ""recovery"", ""$recycle.bin"", ""recycler"", ""system volume information"", ""temporary internet files"", ] hide_list += [ "".fseventd"", "".spotlight"", "".trashes"", "".vol"", ""cachedmessages"", ""caches"", ""trash"", ] hide_list += [ "".git"" ] file_list = [ ] for filename in os. listdir ( path ) : if filename. lower ( ) in hide_list : continue full_filename = os. path. join ( path, filename ) is_dir = os. path. isdir ( full_filename ) if : continue entry = { ""name"" : filename, ""path"" : full_filename } if not is_dir : entry [ ""isFile"" ] = True file_list. append ( entry ) return file_list",False,not include_files and (not is_dir),include_files and is_dir and (not is_dir),0.6483344435691833 4278,"def _init_export_dir ( dir ) : util. ensure_dir ( dir ) try : util. touch ( os. path. join ( dir, "".guild-nocopy"" ) ) except IOError as e : if : raise RunsExportError ( ""'%s' is not a directory"" % dir ) else : raise RunsExportError ( ""error initializing export directory '%s': %s"" % ( dir, e ) )",False,e.errno == errno.ENOTDIR,os.path.isdir(dir),0.6575901508331299 4279,"def test_decimal_from_float ( f ) : d = Decimal ( f ) if isfinite ( f ) and d. is_finite ( ) : try : decstr = str ( d. quantize ( Decimal ( ""1.000000"" ) ) ) except InvalidOperation : return try : py_d = Object. parse ( decstr ) except RuntimeError as e : if : py_d = Object. parse ( str ( f ) ) assert isclose ( py_d, d, abs_tol = 1e-5 ), ( d, f. hex ( ) ) else : with pytest. raises ( PdfError ) : Object. parse ( str ( d ) )",False,'overflow' in str(e) or 'underflow' in str(e),e.args[0] in [TAB > 0,0.6533418297767639 4280,"def Execute ( self, text ) : """"""Replace selection with text and run commands."""""" ps1 = str ( sys. ps1 ) ps2 = str ( sys. ps2 ) endpos = self. GetTextLength ( ) self. SetCurrentPos ( endpos ) startpos = self. promptPosEnd self. SetSelection ( startpos, endpos ) self. ReplaceSelection ( """" ) text = text. lstrip ( ) text = self. fixLineEndings ( text ) text = self. lstripPrompt ( text ) text = text. replace ( os. linesep + ps1, ""\n"" ) text = text. replace ( os. linesep + ps2, ""\n"" ) text = text. replace ( os. linesep, ""\n"" ) lines = text. split ( ""\n"" ) commands = [ ] command = """" for line in lines : if : line = """" lstrip = line. lstrip ( ) if ( line. strip ( )!= """" and lstrip == line and lstrip [ : 4 ] not in [ ""else"", ""elif"" ] and lstrip [ : 6 ]!= ""except"" ) : if command : commands. append ( command ) command = line else : command += ""\n"" command += line commands. append ( command ) <",False,line.strip() == ps2.strip(),self.HasTab(),0.6509056091308594 4281,"def describe_dict ( o ) : sl = [ "" l = len ( o ) sl. append ( str ( l ) ) if l : sl. append ( ""-"" ) iterator = o. iteritems ( ) firstitem = True try : while True : if : firstitem = False else : sl. append ( "", "" ) k, v = iterator. next ( ) sl. append ( describe_object ( k ) ) sl. append ( "": "" ) sl. append ( describe_object ( v ) ) except StopIteration : pass sl. append ( "">"" ) return """". join ( sl )",True,firstitem,firstitem,0.6981431245803833 4282,"def calculateEnableMargins ( self ) : self. cnc. resetEnableMargins ( ) for block in self. blocks : if : CNC. vars [ ""xmin"" ] = min ( CNC. vars [ ""xmin"" ], block. xmin ) CNC. vars [ ""ymin"" ] = min ( CNC. vars [ ""ymin"" ], block. ymin ) CNC. vars [ ""zmin"" ] = min ( CNC. vars [ ""zmin"" ], block. zmin ) CNC. vars [ ""xmax"" ] = max ( CNC. vars [ ""xmax"" ], block. xmax ) CNC. vars [ ""ymax"" ] = max ( CNC. vars [ ""ymax"" ], block. ymax ) CNC. vars [ ""zmax"" ] = max ( CNC. vars [ ""zmax"" ], block. zmax )",False,block.enable,block.depth > 0,0.6705944538116455 4283,"def test_attrs_arrays ( Layer, data, ndim ) : """"""Test layer attributes and arrays."""""" np. random. seed ( 0 ) layer = Layer ( data ) assert layer. ndim == ndim properties = layer. _get_state ( ) signature = callsignature ( Layer ) for prop in properties. keys ( ) : assert prop in signature. parameters assert len ( properties ) == len ( signature. parameters ) - 1 new_layer = Layer ( ** properties ) for prop in properties. keys ( ) : if isinstance ( getattr ( layer, prop ), list ) : assert np. all ( [ np. all ( ol == nl ) for ol, nl in zip ( getattr ( layer, prop ), getattr ( new_layer, prop ) ) ] ) elif : assert np. all ( [ np. all ( value == getattr ( new_layer, prop ) [ key ] ) for key, value in getattr ( layer, prop ). items ( ) ] ) else : assert np. all ( getattr ( layer, prop ) == getattr ( new_layer, prop ) )",False,"isinstance(getattr(layer, prop), dict)","isinstance(layer, layer, dict)",0.6544009447097778 4284,"def __call__ ( self ) : dmin, dmax = self. viewlim_to_dt ( ) ymin = self. base. le ( dmin. year ) ymax = self. base. ge ( dmax. year ) ticks = [ dmin. replace ( year = ymin, ** self. replaced ) ] while 1 : dt = ticks [ - 1 ] if : return date2num ( ticks ) year = dt. year + self. base. get_base ( ) ticks. append ( dt. replace ( year = year, ** self. replaced ) )",False,dt.year >= ymax,dt.year + self.base.get_base() < dmin,0.6773662567138672 4285,"def create_admin ( username, password ) : if len ( password ) < 8 : click. echo ( ""Password length too short"" ) return False else : user = User ( username = username, password = password, is_admin = True ) if : click. echo ( f""User {username} successfully created."" ) return True else : click. echo ( ""User with given username already exists."" ) return False",False,user.insert(),user.is_admin(),0.6606122255325317 4286,"def what ( self, page, args ) : only_existing = args. get ( ""existing"", False ) exclude_current = args. get ( ""excludecurrent"", False ) trailing_path = """" request = args. get ( ""request"", None ) if request : trailing_path = request. _feincms_extra_context. get ( ""extra_path"", """" ) [ 1 : ] translations = dict ( ( t. language, t ) for t in page. available_translations ( ) ) translations [ page. language ] = page links = [ ] for key, name in settings. LANGUAGES : if : continue if key in translations : links. append ( ( key, name, translations [ key ]. get_absolute_url ( ) + trailing_path ) ) elif not only_existing : links. append ( ( key, name, None ) ) return links",False,exclude_current and key == page.language,exclude_current and key not in translations,0.6525720357894897 4287,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if ftype == TType. STRUCT : self. success = evernote. edam. type. ttypes. User ( ) self. success. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 1 : if ftype == TType. STRUCT : self. userException = evernote. edam. error. ttypes. EDAMUserException ( ) self. userException. read ( iprot ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRUCT : self. systemException = evernote. edam. error. ttypes. EDAMSystemException ( ) self. systemException. read ( iprot ) ",True,fid == 2,fid == 2,0.6798498630523682 4288,"def update ( self, frame_no ) : self. _draw_label ( ) self. _start_column = min ( self. _start_column, self. _column ) self. _start_column += _find_min_start ( self. _value [ self. _start_column : self. _column + 1 ], self. width, self. _frame. canvas. unicode_aware, self. _column >= self. string_len ( self. _value ), ) ( colour, attr, bg ) = self. _pick_colours ( ""readonly"" if self. _readonly else ""edit_text"" ) text = self. _value [ self. _start_column : ] text = _enforce_width ( text, self. width, self. _frame. canvas. unicode_aware ) if self. _hide_char : text = self. _hide_char [ 0 ] * len ( text ) text += "" "" * ( self. width - self. string_len ( text ) ) self. _frame. canvas. print_at ( text, self. _x + self. _offset, self. _y, colour, attr, bg ) if self. _has_focus : text_width = self. string_len ( text [ : self. _column - self. _start_column ] ) self. _draw_cursor ( "" "" if : else self. _hide_char [ 0 ] if self. _hide_char else self. _value [ self. _column ], frame_no, self. _x + self. _offset + text_width, <",False,self._column >= len(self._value),self._hide_char,0.6521285772323608 4289,"def ShowTimelineMenu ( self, position, layer_id ) : log. info ( ""ShowTimelineMenu: position: %s, layer: %s"" % ( position, layer_id ) ) _ = get_app ( ). _tr clipboard_clip_ids = [ k for k, v in self. copy_clipboard. items ( ) if v. get ( ""id"" ) ] clipboard_tran_ids = [ k for k, v in self. copy_transition_clipboard. items ( ) if v. get ( ""id"" ) ] if self. copy_clipboard or self. copy_transition_clipboard : if : menu = QMenu ( self ) Paste_Clip = menu. addAction ( _ ( ""Paste"" ) ) Paste_Clip. setShortcut ( QKeySequence ( self. window. getShortcutByName ( ""pasteAll"" ) ) ) Paste_Clip. triggered. connect ( partial ( self. Paste_Triggered, MENU_PASTE, float ( position ), int ( layer_id ), [ ], [ ], ) ) return menu. popup ( QCursor. pos ( ) )",False,len(clipboard_clip_ids) + len(clipboard_tran_ids) > 0,len(clipboard_clip_ids) > 0,0.6501489877700806 4290,"def _validate_pandas ( self, actual_column_type, expected_type, ) : if expected_type is None : success = True else : comp_types = [ ] try : comp_types. append ( np. dtype ( expected_type ). type ) except TypeError : try : pd_type = getattr ( pd, expected_type ) if isinstance ( pd_type, type ) : comp_types. append ( pd_type ) except AttributeError : pass try : pd_type = getattr ( pd. core. dtypes. dtypes, expected_type ) if isinstance ( pd_type, type ) : comp_types. append ( pd_type ) except AttributeError : pass native_type = _native_type_type_map ( expected_type ) if : comp_types. extend ( native_type ) success = actual_column_type. type in comp_types return { ""success"" : success, ""result"" : { ""observed_value"" : actual_column_type. type. __name__ }, }",True,native_type is not None,native_type is not None,0.6611142754554749 4291,"def test_pretrained_distilbert_models ( wo_valid_len ) : models = [ ""distilbert_6_768_12"" ] pretrained_datasets = [ ""distilbert_book_corpus_wiki_en_uncased"" ] vocab_size = { ""distilbert_book_corpus_wiki_en_uncased"" : 30522 } special_tokens = [ ""[UNK]"", ""[PAD]"", ""[SEP]"", ""[CLS]"", ""[MASK]"" ] ones = mx. nd. ones ( ( 2, 10 ) ) valid_length = mx. nd. ones ( ( 2, ) ) for model_name in models : for dataset in pretrained_datasets : eprint ( ""testing forward for %s on %s"" % ( model_name, dataset ) ) model, vocab = nlp. model. get_model ( model_name, dataset_name = dataset, pretrained = True, root = ""tests/data/model/"", ) assert len ( vocab ) == vocab_size [ dataset ] for token in special_tokens : assert token in vocab, ""Token %s not found in the vocab"" % token assert vocab [ ""RandomWordByHaibin"" ] == vocab [ vocab. unknown_token ] assert vocab. padding_token == ""[PAD]"" assert vocab. unknown_token == ""[UNK]"" model. hybridize ( ) if : output = model ( ones ) ",False,wo_valid_len,pretrained,0.6612242460250854 4292,"def create_child ( self, value = None, _id = None ) : with atomic ( savepoint = False ) : child_key = self. get_next_child_key ( ) if : value = child_key child = self. __class__. objects. create ( id = _id, key = child_key, value = value ) return child",False,value is None,value is None and child_key and (child_key not in value),0.6629682779312134 4293,"def on_bt_search_clicked ( self, widget ) : if self. current_provider is None : return query = self. en_query. get_text ( ) @ self. obtain_podcasts_with def load_data ( ) : if self. current_provider. kind == directory. Provider. PROVIDER_SEARCH : return self. current_provider. on_search ( query ) elif : return self. current_provider. on_url ( query ) elif self. current_provider. kind == directory. Provider. PROVIDER_FILE : return self. current_provider. on_file ( query )",False,self.current_provider.kind == directory.Provider.PROVIDER_URL,self.current_provider.kind == directory.Provider.SCHEME_URL,0.6515692472457886 4294,"def testPartExecutor ( self, test_case, is_setup_or_teardown = False ) : old_success = self. success self. success = True try : yield except KeyboardInterrupt : raise except unittest. SkipTest as e : self. success = False self. skipped. append ( ( test_case, str ( e ) ) ) except _ShouldStop : pass except unittest. case. _ExpectedFailure as e : self. success = False self. expecting_failure = True self. expectedFailure = e. exc_info except unittest. case. _UnexpectedSuccess : self. expecting_failure = True except : self. success = False if : self. errors_setup_and_teardown. append ( ( test_case, sys. exc_info ( ) ) ) else : self. errors. append ( ( test_case, sys. exc_info ( ) ) ) else : if self. result_supports_subtests and self. success : self. errors. append ( ( test_case, None ) ) finally : self. success = self. success and old_success",True,is_setup_or_teardown,is_setup_or_teardown,0.6469103097915649 4295,"def init_weights ( self ) : for m in self. modules ( ) : if isinstance ( m, nn. Linear ) : normal_init ( m, std = 0.01 ) if : xavier_init ( m, distribution = ""uniform"" ) if isinstance ( m, nn. BatchNorm3d ) : constant_init ( m, 1 )",True,"isinstance(m, nn.Conv3d)","isinstance(m, nn.Conv3d)",0.6551833152770996 4296,"def _set_ref ( self, value ) : if value : if not self. _flags & 4 : return if self. _flags & 2 : self. loop. ref ( ) self. _flags &= ~ 6 else : if : return self. _flags |= 4 if not self. _flags & 2 and libev. ev_is_active ( self. _watcher ) : self. loop. unref ( ) self. _flags |= 2",False,self._flags & 4,value,0.6662853956222534 4297,"def _Dynamic_Put ( self, put_request, put_response ) : """"""Send a put request to the datastore server."""""" put_request. set_trusted ( self. __trusted ) ent_kinds = [ ] for ent in put_request. entity_list ( ) : last_path = ent. key ( ). path ( ). element_list ( ) [ - 1 ] if last_path. type ( ) not in ent_kinds : ent_kinds. append ( last_path. type ( ) ) for kind in ent_kinds : indexes = self. __index_cache. get ( kind ) if : for index in indexes : new_composite = put_request. add_composite_index ( ) new_composite. CopyFrom ( index ) self. _RemoteSend ( put_request, put_response, ""Put"" ) return put_response",True,indexes,indexes,0.7012069225311279 4298,"def observe ( self, message, * args ) : if len ( args ) > 0 : return if self. _handleException ( message ) : return if self. _handleScriptError ( message ) : return try : messagetext = message. message if : self. log. debug ( ""FILTERED: %s"", messagetext ) else : self. log. info ( ""%s"", messagetext ) except Exception : self. log. error ( ""Exception occurred while handling logging message. Failing gracefully."" ) return",False,any((x in messagetext for x in self.ignored_error_strings)),self.log.getEffectiveLevel() == logging.DEBUG,0.6573004722595215 4299,"def GenerateMethods ( ag ) : global counter tg = ag. DefineType ( ""Abomination"" ) system_assembly = clr. LoadAssemblyByName ( ""System, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089"" ) mscorlib_assembly = clr. LoadAssemblyByName ( ""mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089"" ) for ref in ( system_assembly, mscorlib_assembly ) : for t in ref. GetExportedTypes ( ) : if t. ContainsGenericParameters or t. FullName in skipTypes : continue if : continue if trace : print ( counter, t. FullName ) EmitTestMethod ( tg, ""Test_%d"" % counter, t ) EmitTestMethod ( tg, ""TestRef_%d"" % counter, t. MakeByRefType ( ) ) counter += 1 return tg. CreateType ( )",False,supportsIsByRefLike and t.IsByRefLike,t.IsGenericParameters(),0.6508224606513977 4300,"def get_snpeff_files ( data ) : try : snpeff_db, datadir = get_db ( data ) except ValueError : snpeff_db = None if snpeff_db : clean_snpeff_db = snpeff_db. replace ( ""."", ""_"" ) snpeff_files = glob. glob ( os. path. join ( datadir, snpeff_db, ""*"" ) ) if : base_files = [ x for x in snpeff_files if x. endswith ( ""/snpEffectPredictor.bin"" ) ] assert len ( base_files ) == 1, base_files del snpeff_files [ snpeff_files. index ( base_files [ 0 ] ) ] return { ""base"" : base_files [ 0 ], ""indexes"" : snpeff_files } else : return { }",True,len(snpeff_files) > 0,len(snpeff_files) > 0,0.6556860208511353 4301,"def _generate_samples ( self ) : if not self. _samples : self. _inject_default_sample_section ( ) output = """" eval_scope = self. _generate_eval_scope ( ) for sample in self. _samples : command = _command_template. format ( method = self. _method, kwargs = sample. kwargs ) validator = SampleCodeValidator ( command ) if : msg = ( ""Invalid code elements detected. Sample generation will be "" ""skipped for method `{method}` with arguments `{kwargs}`."" ). format ( method = self. _method, kwargs = sample. kwargs, ) self. _log_warning ( msg ) continue try : Faker. seed ( sample. seed ) results = ""\n"". join ( [ self. _stringify_result ( eval ( command, eval_scope ) ) for _ in range ( sample. size ) ] ) except Exception : msg = ""Sample generation failed for method `{method}` with arguments `{kwargs}`."". format ( method = self. _method, kwargs = sample. kwargs, ) self. _log_warning ( msg ) ",False,validator.errors,validator.raise_error,0.6785060167312622 4302,"def test_open_read_bytes ( self, sftp ) : """"""Test reading bytes from a file"""""" f = None try : self. _create_file ( ""file"", ""xxx"" ) f = yield from sftp. open ( ""file"", ""rb"" ) self. assertEqual ( ( yield from f. read ( ) ), b""xxx"" ) finally : if : yield from f. close ( ) remove ( ""file"" )",True,f,f,0.6892582774162292 4303,"def delete_all ( path ) : ppath = os. getcwd ( ) os. chdir ( path ) for fn in glob. glob ( ""*"" ) : fn_full = os. path. join ( path, fn ) if os. path. isdir ( fn ) : delete_all ( fn_full ) elif fn. endswith ( "".png"" ) : os. remove ( fn_full ) elif : os. remove ( fn_full ) elif DELETE_ALL_OLD : os. remove ( fn_full ) os. chdir ( ppath ) os. rmdir ( path )",False,fn.endswith('.md'),delete_ALL_OLD,0.6463217735290527 4304,"def main ( ) : argv = sys. argv if len ( argv ) == 1 : sys. argv. append ( ""-h"" ) read_config ( ) sys. exit ( ) try : config, args = read_config ( ) if config [ ""pelinker_fname"" ] : load_linker_sequences ( config [ ""pelinker_fname"" ] ) if : raise RuntimeError ( ""No SFF file given?"" ) extract_reads_from_sff ( config, args ) except ( OSError, IOError, RuntimeError ) as errval : print ( errval ) return 1 if stern_warning : return 1 return 0",False,len(args) == 0,not args[0],0.6616754531860352 4305,"def CollectImage ( self, responses ) : """"""Collect the image and store it into the database."""""" for response in responses : if : for log in response. logs : self. Log ( log ) if not responses. success : raise flow_base. FlowError ( ""Failed to dump the flash image: {0}"". format ( responses. status ) ) elif not responses. First ( ). path : self. Log ( ""No path returned. Skipping host."" ) return else : image_path = responses. First ( ). path self. CallFlow ( transfer. MultiGetFile. __name__, pathspecs = [ image_path ], request_data = { ""image_path"" : image_path }, next_state = compatibility. GetName ( self. DeleteTemporaryImage ), )",False,"hasattr(response, 'logs')",response.status == 200,0.6537551879882812 4306,"def _resize_img ( self, results ) : """"""Resize images with ``results['scale']``."""""" for key in results. get ( ""img_fields"", [ ""img"" ] ) : if : img, scale_factor = mmcv. imrescale ( results [ key ], results [ ""scale"" ], return_scale = True, backend = self. backend ) new_h, new_w = img. shape [ : 2 ] h, w = results [ key ]. shape [ : 2 ] w_scale = new_w / w h_scale = new_h / h else : img, w_scale, h_scale = mmcv. imresize ( results [ key ], results [ ""scale"" ], return_scale = True, backend = self. backend ) results [ key ] = img scale_factor = np. array ( [ w_scale, h_scale, w_scale, h_scale ], dtype = np. float32 ) results [ ""img_shape"" ] = img. shape results [ ""pad_shape"" ] = img. shape results [ ""scale_factor"" ] = scale_factor results [ ""keep_ratio"" ] = self. keep_ratio",False,self.keep_ratio,key in results,0.6535025835037231 4307,"def format_amounts ( amounts, link ) : ret = [ ] for chain_id in chain_ids : chain = chains [ chain_id ] if : ret += [ "", "" ] ret += [ format_satoshis ( amounts [ chain_id ], chain ), "" "", escape ( chain [ ""code3"" ] ) ] if link : other = hash_to_address ( chain [ ""address_version"" ], binaddr ) if other!= address : ret [ - 1 ] = [ ' page [ ""dotdot"" ], ""address/"", other, '"">', ret [ - 1 ], """", ] return ret",False,chain_id != chain_ids[0],chain,0.6586318612098694 4308,"def delete_revision ( request, document_path, revision_id ) : """"""Delete a revision."""""" document_locale, document_slug, needs_redirect = locale_and_slug_from_path ( document_path, request ) revision = get_object_or_404 ( Revision, pk = revision_id, document__slug = document_slug ) document = revision. document if request. method == ""GET"" : return render ( request, ""wiki/confirm_revision_delete.html"", { ""revision"" : revision, ""document"" : document }, ) log. warning ( ""User %s is deleting revision with id=%s"" % ( request. user, revision. id ) ) Revision. objects. filter ( based_on = revision ). update ( based_on = None ) if document. current_revision == revision : revs = document. revisions. filter ( is_approved = True ). order_by ( ""-created"" ) if : rev = revs [ 1 ] rev. make_current ( ) else : document. delete ( ) return redirect ( ""wiki.all_documents"" ) revision. delete ( ) return HttpResponseRedirect ( reverse ( ""wiki.document_revisions"", args = [ document. full_path ] ) )",False,len(revs) > 1,revs,0.6525915265083313 4309,"def saveFile ( self, event ) : """"""Prompt for the name of a file and put the body text of the selected node into it.."""""" c = self. c w = self. editWidget ( event ) if not w : return fileName = g. app. gui. runSaveFileDialog ( c, initialfile = None, title = ""save-file"", filetypes = [ ( ""Text"", ""*.txt"" ), ( ""All files"", ""*"" ) ], defaultextension = "".txt"", ) if fileName : try : f = open ( fileName, ""w"" ) s = w. getAllText ( ) if : s = g. toEncodedString ( s, encoding = ""utf-8"", reportErrors = True ) f. write ( s ) f. close ( ) except IOError : g. es ( ""can not create"", fileName )",False,not g.isPython3,s,0.6498782634735107 4310,"def _serialize ( self, stream ) : write = stream. write write ( ( ""tree %s\n"" % self. tree ). encode ( ""ascii"" ) ) for p in self. parents : write ( ( ""parent %s\n"" % p ). encode ( ""ascii"" ) ) a = self. author aname = a. name c = self. committer fmt = ""%s %s <%s> %s %s\n"" write ( ( fmt % ( ""author"", aname, a. email, self. authored_date, altz_to_utctz_str ( self. author_tz_offset ), ) ). encode ( self. encoding ) ) aname = c. name write ( ( fmt % ( ""committer"", aname, c. email, self. committed_date, altz_to_utctz_str ( self. committer_tz_offset ), ) ). encode ( self. encoding ) ) if self. encoding!= self. default_encoding : write ( ( ""encoding %s\n"" % self. encoding ). encode ( ""ascii"" ) ) try : ",False,self.__getattribute__('gpgsig') is not None,self.encoding == 'utf-8',0.6501185894012451 4311,"def disconnect ( exiting_interpreter = False ) : """"""Disconnect this worker from the raylet and object store."""""" worker = global_worker if worker. connected : worker. threads_stopped. set ( ) if hasattr ( worker, ""import_thread"" ) : worker. import_thread. join_import_thread ( ) if hasattr ( worker, ""listener_thread"" ) : worker. listener_thread. join ( ) if hasattr ( worker, ""printer_thread"" ) : worker. printer_thread. join ( ) if : worker. logger_thread. join ( ) worker. threads_stopped. clear ( ) worker. _session_index += 1 worker. node = None worker. cached_functions_to_run = [ ] worker. serialization_context_map. clear ( ) try : ray_actor = ray. actor except AttributeError : ray_actor = None if ray_actor is not None : ray_actor. ActorClassMethodMetadata. reset_cache ( )",True,"hasattr(worker, 'logger_thread')","hasattr(worker, 'logger_thread')",0.6539206504821777 4312,"def main ( _ ) : problem_name = FLAGS. problem if ""video"" not in problem_name and ""gym"" not in problem_name : print ( ""This tool only works for video problems."" ) return mode = tf. estimator. ModeKeys. TRAIN hparams = trainer_lib. create_hparams ( FLAGS. hparams_set, FLAGS. hparams, data_dir = os. path. expanduser ( FLAGS. data_dir ), problem_name = problem_name, ) dataset = hparams. problem. input_fn ( mode, hparams ) features = dataset. make_one_shot_iterator ( ). get_next ( ) tf. gfile. MakeDirs ( FLAGS. output_dir ) base_template = os. path. join ( FLAGS. output_dir, FLAGS. problem ) count = 0 with tf. train. MonitoredTrainingSession ( ) as sess : while not sess. should_stop ( ) : data, _ = sess. run ( features ) video_batch = np. concatenate ( ( data [ ""inputs"" ], data [ ""targets"" ] ), axis = 1 ) for video in video_batch : print ( ""Saving {}/{}"". format ( count, FLAGS. num_samples ) ) name = ""%s_%05d"" % ( base_template, count ) decoding. save_video ( video, name + ""_{:05d}.png"" ) create_gif ( name ) count += 1 if : sys. exit ( 0 )",False,count == FLAGS.num_samples,count > 3,0.6560604572296143 4313,"def dump ( self ) : """"""Write the cached history to external storage."""""" opts = builtins. __xonsh__. env. get ( ""HISTCONTROL"" ) last_inp = None cmds = [ ] for cmd in self. buffer : if ""ignoredups"" in opts and cmd [ ""inp"" ] == last_inp : if : self. skip ( 1 ) continue if ""ignoreerr"" in opts and cmd [ ""rtn"" ]!= 0 : if : self. skip ( 1 ) continue cmds. append ( cmd ) last_inp = cmd [ ""inp"" ] with open ( self. filename, ""r"", newline = ""\n"" ) as f : hist = xlj. LazyJSON ( f ). load ( ) load_hist_len = len ( hist [ ""cmds"" ] ) hist [ ""cmds"" ]. extend ( cmds ) if self. at_exit : hist [ ""ts"" ] [ 1 ] = time. time ( ) hist [ ""locked"" ] = False if not builtins. __xonsh__. env. get ( ""XONSH_STORE_STDOUT"", False ) : [ cmd. pop ( ""out"" ) for cmd in hist [ ""cmds"" ] [ load_hist_len : ] if ""out"" in cmd ] with open ( self. filename, ""w"", newline = ""\n"" ) as f : xlj. ljdump ( hist, f, sort_keys = True )",False,self.skip is not None,"""skip"" in opts and cmd[ 'rtn""] == 0",0.6496889591217041 4314,"def target_function ( self, running, data ) : while running. is_set ( ) : try : username, password = data. next ( ). split ( "":"" ) except StopIteration : break else : ftp_client = self. ftp_create ( ) if ftp_client. connect ( retries = 3 ) is None : print_error ( ""Too many connections problems. Quiting..."", verbose = self. verbosity ) return if ftp_client. login ( username, password ) : if : running. clear ( ) self. credentials. append ( ( self. target, self. port, self. target_protocol, username, password ) ) ftp_client. close ( )",False,self.stop_on_success,running.is_set(),0.65102219581604 4315,"def _group_by_commit_and_time ( self, hits ) : result = { } for hit in hits : source_hit = hit [ ""_source"" ] key = ""%s_%s"" % ( source_hit [ ""commit_info"" ] [ ""id"" ], source_hit [ ""datetime"" ] ) benchmark = self. _benchmark_from_es_record ( source_hit ) if : result [ key ] [ ""benchmarks"" ]. append ( benchmark ) else : run_info = self. _run_info_from_es_record ( source_hit ) run_info [ ""benchmarks"" ] = [ benchmark ] result [ key ] = run_info return result",True,key in result,key in result,0.6747679710388184 4316,"def get_num ( line, char_ptr, num_chars ) : char_ptr = char_ptr + 1 numstr = """" good = ""-.0123456789"" while char_ptr < num_chars : digit = line [ char_ptr ] if : numstr = numstr + digit char_ptr = char_ptr + 1 else : break return numstr",False,good.find(digit) != -1,digit.lower() == good,0.6536663770675659 4317,"def save_graph_base ( net, file_name, graph_name = ""net"", op_only = True, blob_rename_func = None ) : graph = None ops = net. op if blob_rename_func is not None : ops = _modify_blob_names ( ops, blob_rename_func ) if not op_only : graph = net_drawer. GetPydotGraph ( ops, graph_name, rankdir = ""TB"" ) else : graph = net_drawer. GetPydotGraphMinimal ( ops, graph_name, rankdir = ""TB"", minimal_dependency = True ) try : par_dir = os. path. dirname ( file_name ) if not os. path. exists ( par_dir ) : os. makedirs ( par_dir ) format = os. path. splitext ( os. path. basename ( file_name ) ) [ - 1 ] if format == "".png"" : graph. write_png ( file_name ) elif format == "".pdf"" : graph. write_pdf ( file_name ) elif : graph. write_svg ( file_name ) else : print ( ""Incorrect format {}"". format ( format ) ) except Exception as e : print ( ""Error when writing graph to image {}"". format ( e ) ) return graph",True,format == '.svg',format == '.svg',0.6618694067001343 4318,"def _execute_for_all_tables ( self, app, bind, operation, skip_tables = False ) : app = self. get_app ( app ) if bind == ""__all__"" : binds = [ None ] + list ( app. config. get ( ""SQLALCHEMY_BINDS"" ) or ( ) ) elif isinstance ( bind, string_types ) or bind is None : binds = [ bind ] else : binds = bind for bind in binds : extra = { } if : tables = self. get_tables_for_bind ( bind ) extra [ ""tables"" ] = tables op = getattr ( self. Model. metadata, operation ) op ( bind = self. get_engine ( app, bind ), ** extra )",False,not skip_tables,skip_tables,0.6631405353546143 4319,"def _check_presale_dates ( self ) : if self. event. presale_start and self. now_dt < self. event. presale_start : raise CartError ( error_messages [ ""not_started"" ] ) if self. event. presale_has_ended : raise CartError ( error_messages [ ""ended"" ] ) if not self. event. has_subevents : tlv = self. event. settings. get ( ""payment_term_last"", as_type = RelativeDateWrapper ) if tlv : term_last = make_aware ( datetime. combine ( tlv. datetime ( self. event ). date ( ), time ( hour = 23, minute = 59, second = 59 ) ), self. event. timezone, ) if : raise CartError ( error_messages [ ""payment_ended"" ] )",False,term_last < self.now_dt,self.event.settings.get('payment_ended_timestamp') and self.event.settings.get('payment_term_last') and self.event.settings.get('payment_term_last'),0.6537894010543823 4320,"def action_delete ( self, ids ) : try : query = tools. get_query_for_ids ( self. get_query ( ), self. model, ids ) if : count = query. delete ( synchronize_session = False ) else : count = 0 for m in query. all ( ) : if self. delete_model ( m ) : count += 1 self. session. commit ( ) flash ( ngettext ( ""Record was successfully deleted."", ""%(count)s records were successfully deleted."", count, count = count, ), ""success"", ) except Exception as ex : if not self. handle_view_exception ( ex ) : raise flash ( gettext ( ""Failed to delete records. %(error)s"", error = str ( ex ) ), ""error"" )",False,self.fast_mass_delete,self.synchronize,0.6512268781661987 4321,"def __init__ ( self, raw ) : ticker_ticks = { } for tick in raw [ ""results"" ] : if : ticker_ticks [ tick [ ""T"" ] ]. append ( tick ) else : ticker_ticks [ tick [ ""T"" ] ] = [ tick ] super ( ). __init__ ( { ticker : Aggsv2 ( { ""results"" : ticks } ) for ticker, ticks in ticker_ticks. items ( ) } )",False,ticker_ticks.get(tick['T']),tick['T'] in ticker_ticks,0.6608593463897705 4322,"def onMessage ( self, message, metadata ) : if ""tags"" not in message : return ( message, metadata ) if ""githubeventsqs"" not in message [ ""tags"" ] : return ( message, metadata ) newmessage = { } newmessage [ ""details"" ] = { } newmessage [ ""category"" ] = ""github"" newmessage [ ""tags"" ] = [ ""github"", ""webhook"" ] newmessage [ ""eventsource"" ] = ""githubeventsqs"" if ""event"" in message [ ""details"" ] : newmessage [ ""source"" ] = message [ ""details"" ] [ ""event"" ] else : newmessage [ ""source"" ] = ""UNKNOWN"" if ""request_id"" in message [ ""details"" ] : newmessage [ ""details"" ] [ ""request_id"" ] = message [ ""details"" ] [ ""request_id"" ] else : newmessage [ ""details"" ] [ ""request_id"" ] = ""UNKNOWN"" if newmessage [ ""source"" ] in self. eventtypes : for key in self. yap [ newmessage [ ""source"" ] ] : mappedvalue = jmespath. search ( self. yap [ newmessage [ ""source"" ] ] [ key ], message ) if mappedvalue is not None : newmessage [ ""details"" ] [ key ] = mappedvalue if : newmessage [ ""timestamp"" ] = newmessage [ ""details"" ] [ ""commit_ts"" ] newmessage [ ""utctimestamp"" ] = toUTC ( newmessage [ ""details"" ] [ ""commit_ts"" ] ). isoformat ( ) <",False,'commit_ts' in newmessage['details'],self.timestamp is not None,0.6526157259941101 4323,"def git_get_keywords ( versionfile_abs ) : """"""Extract version information from the given file."""""" keywords = { } try : f = open ( versionfile_abs, ""r"" ) for line in f. readlines ( ) : if line. strip ( ). startswith ( ""git_refnames ="" ) : mo = re. search ( r'=\s*""(.*)""', line ) if mo : keywords [ ""refnames"" ] = mo. group ( 1 ) if : mo = re. search ( r'=\s*""(.*)""', line ) if mo : keywords [ ""full"" ] = mo. group ( 1 ) f. close ( ) except EnvironmentError : pass return keywords",False,line.strip().startswith('git_full ='),not keywords,0.6470040082931519 4324,"def _optimize_unicode ( charset, fixup ) : try : import array except ImportError : return charset charmap = [ 0 ] * 65536 negate = 0 try : for op, av in charset : if op is NEGATE : negate = 1 elif op is LITERAL : charmap [ fixup ( av ) ] = 1 elif op is RANGE : for i in xrange ( fixup ( av [ 0 ] ), fixup ( av [ 1 ] ) + 1 ) : charmap [ i ] = 1 elif : return charset except IndexError : return charset if negate : if sys. maxunicode!= 65535 : return charset for i in xrange ( 65536 ) : charmap [ i ] = not charmap [ i ] comps = { } mapping = [ 0 ] * 256 block = 0 data = [ ] for i in xrange ( 256 ) : chunk = tuple ( charmap [ i * 256 : ( i + 1 ) * 256 ] ) new = comps. setdefault ( chunk, block ) mapping [ i ] = new if new == block : block = block + 1 data = data + _mk_bitmap ( chunk ) header = [ block ] if _sre. CODESIZE ==",False,op is CATEGORY,len(charset) == 0,0.6826099157333374 4325,"def _verify_layer_file ( self, structure, layer_id ) : """"""Verify layer file in repository"""""" ( layer_algorithm, layer_hash ) = self. _split_layer_id ( layer_id ) layer_f = structure [ ""repolayers"" ] [ layer_id ] [ ""layer_f"" ] if not ( os. path. exists ( layer_f ) and os. path. islink ( layer_f ) ) : Msg ( ). err ( ""Error: layer data file symbolic link not found"", layer_id ) return False if not os. path. exists ( self. cur_tagdir + ""/"" + os. readlink ( layer_f ) ) : Msg ( ). err ( ""Error: layer data file not found"" ) return False if ""gzip"" in GuestInfo ( ""/"" ). get_filetype ( layer_f ) : if : Msg ( ). err ( ""Error: layer tar verify failed:"", layer_f ) return False if layer_algorithm : layer_f_chksum = ChkSUM ( ). hash ( layer_f, layer_algorithm ) if layer_f_chksum and layer_f_chksum!= layer_hash : Msg ( ). err ( ""Error: layer file chksum failed:"", layer_f ) return False return True",False,not FileUtil(layer_f).verify_tar(),not os.path.exists(layer_f),0.6477632522583008 4326,"def TryMerge ( self, d ) : while d. avail ( ) > 0 : tt = d. getVarInt32 ( ) if tt == 8 : self. set_hits ( d. getVarUint64 ( ) ) continue if tt == 16 : self. set_misses ( d. getVarUint64 ( ) ) continue if tt == 24 : self. set_byte_hits ( d. getVarUint64 ( ) ) continue if : self. set_items ( d. getVarUint64 ( ) ) continue if tt == 40 : self. set_bytes ( d. getVarUint64 ( ) ) continue if tt == 53 : self. set_oldest_item_age ( d. get32 ( ) ) continue if tt == 0 : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 32,tt == 50,0.6858094930648804 4327,"def prepare_request ( next_link = None ) : header_parameters = { } header_parameters [ ""Accept"" ] = self. _serialize. header ( ""accept"", accept, ""str"" ) if not next_link : url = self. list_at_scope. metadata [ ""url"" ] path_format_arguments = { ""scope"" : self. _serialize. url ( ""scope"", scope, ""str"", skip_quote = True ), } url = self. _client. format_url ( url, ** path_format_arguments ) query_parameters = { } if filter is not None : query_parameters [ ""$filter"" ] = self. _serialize. query ( ""filter"", filter, ""str"" ) if : query_parameters [ ""$top"" ] = self. _serialize. query ( ""top"", top, ""int"" ) query_parameters [ ""api-version"" ] = self. _serialize. query ( ""api_version"", api_version, ""str"" ) request = self. _client. get ( url, query_parameters, header_parameters ) else : url = next_link query_parameters = { } request = self. _client. get ( url, query_parameters, header_parameters ) return request",True,top is not None,top is not None,0.6696810722351074 4328,"def put ( self, var, val, throw = False ) : if self. prototype is None : desc = self. own. get ( var ) if : self. par. put ( var, val, False ) else : if desc [ ""writable"" ] : desc [ ""value"" ] = val else : if self. is_with_scope : if self. own. has_property ( var ) : return self. own. put ( var, val, throw = throw ) else : return self. prototype. put ( var, val ) elif var in self. own : self. own [ var ] = val return val else : return self. prototype. put ( var, val )",False,desc is None,desc,0.6736911535263062 4329,"def _yield_minibatches_idx ( self, rgen, n_batches, data_ary, shuffle = True ) : indices = np. arange ( data_ary. shape [ 0 ] ) if shuffle : indices = rgen. permutation ( indices ) if n_batches > 1 : remainder = data_ary. shape [ 0 ] % n_batches if : minis = np. array_split ( indices [ : - remainder ], n_batches ) minis [ - 1 ] = np. concatenate ( ( minis [ - 1 ], indices [ - remainder : ] ), axis = 0 ) else : minis = np. array_split ( indices, n_batches ) else : minis = ( indices, ) for idx_batch in minis : yield idx_batch",False,remainder,remainder > 0,0.6923081874847412 4330,"def create_uas ( self, at_uas, root ) : """"""Recreate uA's from the @ua nodes in the @uas tree."""""" d = { } for p in root. self_and_subtree ( copy = False ) : d [ p. v. gnx ] = p. copy ( ) for at_ua in at_uas. children ( ) : h, b = at_ua. h, at_ua. b gnx = h [ 4 : ]. strip ( ) if b and gnx and g. match_word ( h, 0, ""@ua"" ) : p = d. get ( gnx ) if p : lines = g. splitLines ( b ) if : unl, ua = lines else : unl, ua = None, b if ua. startswith ( ""ua:"" ) : ua = ua [ 3 : ] if ua : ua = self. unpickle ( ua ) p. v. u = ua else : g. trace ( ""Can not unpickle uA in"", p. h, repr ( unl ), type ( ua ), ua [ : 40 ] )",False,b.startswith('unl:') and len(lines) == 2,lines,0.6494481563568115 4331,"def get ( self, request, organization, team, project, rule_id = None ) : if rule_id : try : rule = Rule. objects. get ( project = project, id = rule_id ) except Rule. DoesNotExist : path = reverse ( ""sentry-project-rules"", args = [ organization. slug, project. slug ] ) return self. redirect ( path ) else : rule = Rule ( project = project ) action_list = [ ] condition_list = [ ] for rule_type, rule_cls in rules : node = rule_cls ( project ) context = { ""id"" : node. id, ""label"" : node. label, ""html"" : node. render_form ( ), } if : condition_list. append ( context ) elif rule_type. startswith ( ""action/"" ) : action_list. append ( context ) context = { ""rule"" : rule, ""page"" : ""rules"", ""action_list"" : json. dumps ( action_list ), ""condition_list"" : json. dumps ( condition_list ), } return self. respond ( ""sentry/projects/rules/new.html"", context )",False,rule_type.startswith('condition/'),rule_type.startswith('action/'),0.6489495038986206 4332,"def __init__ ( self, factor, type, state, num_columns = None, categories = None ) : self. factor = factor self. type = type if self. type not in [ ""numerical"", ""categorical"" ] : raise ValueError ( ""FactorInfo.type must be "" ""'numerical' or 'categorical', not %r"" % ( self. type, ) ) self. state = state if self. type == ""numerical"" : if : raise ValueError ( ""For numerical factors, num_columns "" ""must be an integer"" ) if categories is not None : raise ValueError ( ""For numerical factors, categories "" ""must be None"" ) else : assert self. type == ""categorical"" if num_columns is not None : raise ValueError ( ""For categorical factors, num_columns "" ""must be None"" ) categories = tuple ( categories ) self. num_columns = num_columns self. categories = categories",False,"not isinstance(num_columns, six.integer_types)",num_columns is not None and num_columns is not None,0.650429368019104 4333,"def _bytecode_filenames ( self, py_filenames ) : bytecode_files = [ ] for py_file in py_filenames : ext = os. path. splitext ( os. path. normcase ( py_file ) ) [ 1 ] if : continue if self. compile : bytecode_files. append ( py_file + ""c"" ) if self. optimize > 0 : bytecode_files. append ( py_file + ""o"" ) return bytecode_files",False,ext != PYTHON_SOURCE_EXTENSION,"ext.lower() in ['.c', '.c']",0.6576303243637085 4334,"def parse_enums ( html_dir, html_filename, enum_dict ) : PARSE_ENUM_NAME = re. compile ( r""\s*enum\s+(\w+)\s*{"", re. I ). match PARSE_ENUM_VALUE = re. compile ( r""\s*=\s+([0-9]+)\s*(?::\s*(.*))?"" ). match tree = etree. parse ( os. path. join ( html_dir, html_filename ) ) enums = find_enums ( tree ) for enum in enums : enum_name = PARSE_ENUM_NAME ( collect_text ( enum ) ) if not enum_name : continue enum_name = enum_name. group ( 1 ) if : continue print ( ""Found enum"", enum_name ) entries = [ ] for child in enum : name = child. text match = PARSE_ENUM_VALUE ( child. tail ) if not match : print ( ""Ignoring enum %s (failed to parse field '%s')"" % ( enum_name, name ) ) break value, descr = match. groups ( ) entries. append ( ( name, int ( value ), descr ) ) else : enum_dict [ enum_name ] = entries return enum_dict",False,enum_name not in ENUM_MAP,enum_name in enum_dict,0.6601797342300415 4335,"def newickize ( clade ) : """"""Convert a node tree to a Newick tree string, recursively."""""" label = clade. name or """" if label : unquoted_label = re. match ( token_dict [ ""unquoted node label"" ], label ) if : label = ""'%s'"" % label. replace ( ""\\"", ""\\\\"" ). replace ( ""'"", ""\\'"" ) if clade. is_terminal ( ) : return label + make_info_string ( clade, terminal = True ) else : subtrees = ( newickize ( sub ) for sub in clade ) return ""(%s)%s"" % ( "","". join ( subtrees ), label + make_info_string ( clade ) )",False,not unquoted_label or unquoted_label.end() < len(label),unquoted_label,0.6508289575576782 4336,"def fullmodname ( path ) : """"""Return a plausible module name for the path."""""" comparepath = os. path. normcase ( path ) longest = """" for dir in sys. path : dir = os. path. normcase ( dir ) if comparepath. startswith ( dir ) and comparepath [ len ( dir ) ] == os. sep : if : longest = dir if longest : base = path [ len ( longest ) + 1 : ] else : base = path drive, base = os. path. splitdrive ( base ) base = base. replace ( os. sep, ""."" ) if os. altsep : base = base. replace ( os. altsep, ""."" ) filename, ext = os. path. splitext ( base ) return filename. lstrip ( ""."" )",False,len(dir) > len(longest),path.startswith(dir),0.6505472660064697 4337,"def do ( self ) : self. emit ( ""resolution-finding-start"" ) try : logger. info ( ""Looking for resolution of device [%s]"" % ( self. __devid ) ) device = pyinsane. Scanner ( name = self. __devid ) sys. stdout. flush ( ) resolutions = device. options [ ""resolution"" ]. constraint logger. info ( ""Resolutions found: %s"" % str ( resolutions ) ) sys. stdout. flush ( ) if : interval = resolutions [ 2 ] if interval < 50 : interval = 50 res_array = [ ] for res in range ( resolutions [ 0 ], resolutions [ 1 ] + 1, interval ) : res_array. append ( res ) resolutions = res_array for resolution in resolutions : name = self. __get_resolution_name ( resolution ) self. emit ( ""resolution-found"", name, resolution, ( resolution == self. __selected_resolution ), ) finally : self. emit ( ""resolution-finding-end"" )",False,"isinstance(resolutions, tuple)",resolutions[0] == self.__selected_resolution,0.656102180480957 4338,"def sendall ( self, data, flags = 0 ) : if self. _sslobj : if : raise ValueError ( ""non-zero flags not allowed in calls to sendall() on %s"" % self. __class__ ) amount = len ( data ) count = 0 while count < amount : v = self. send ( data [ count : ] ) count += v return amount else : return socket. sendall ( self, data, flags )",True,flags != 0,flags != 0,0.6722278594970703 4339,"def get_local_wheel_metadata ( wheel_file ) : parsed_metadata = None with io. open ( wheel_file, ""rb"" ) as fh : with zipfile. ZipFile ( fh, mode = ""r"", compression = zipfile. ZIP_DEFLATED ) as zf : metadata = None for fn in zf. namelist ( ) : if os. path. basename ( fn ) == ""METADATA"" : metadata = fn break if : raise RuntimeError ( ""No metadata found in wheel: {0}"". format ( wheel_file ) ) with zf. open ( metadata, ""r"" ) as metadata_fh : parsed_metadata = distlib. metadata. Metadata ( fileobj = metadata_fh ) return parsed_metadata",True,metadata is None,metadata is None,0.6644433736801147 4340,"def _apply_abs_paths ( data, script_dir ) : for flag_data in data. values ( ) : if : continue default = flag_data. get ( ""default"" ) if ( not default or not isinstance ( default, six. string_types ) or os. path. sep not in default ) : continue abs_path = os. path. join ( script_dir, default ) if os. path. exists ( abs_path ) : flag_data [ ""default"" ] = abs_path",False,"not isinstance(flag_data, dict)",'default' not in flag_data,0.647739052772522 4341,"def get_object ( self ) : settings_qs = self. get_queryset ( ) registered_settings = settings_registry. get_registered_settings ( category_slug = self. category_slug, ) all_settings = { } for setting in settings_qs : all_settings [ setting. key ] = setting. value for key in registered_settings : if : continue try : field = settings_registry. get_setting_field ( key, for_user = bool ( self. category_slug == ""user"" ) ) all_settings [ key ] = field. get_default ( ) except serializers. SkipField : all_settings [ key ] = None all_settings [ ""user"" ] = self. request. user if self. category_slug == ""user"" else None obj = type ( ""Settings"", ( object, ), all_settings ) ( ) self. check_object_permissions ( self. request, obj ) return obj",False,key in all_settings or self.category_slug == 'changed',key in settings_registry.get_setting_keys(),0.6515622735023499 4342,"def exists ( self, hash, want_source = False ) : """"""Return nonempty if the object exists in the index files."""""" global _total_searches, _total_steps _total_searches += 1 want = str ( hash ) el = extract_bits ( want, self. bits ) if el : start = self. _fanget ( el - 1 ) startv = el << ( 32 - self. bits ) else : start = 0 startv = 0 end = self. _fanget ( el ) endv = ( el + 1 ) << ( 32 - self. bits ) _total_steps += 1 hashv = _helpers. firstword ( hash ) while start < end : _total_steps += 1 mid = start + ( hashv - startv ) * ( end - start - 1 ) / ( endv - startv ) v = self. _get ( mid ) if : start = mid + 1 startv = _helpers. firstword ( v ) elif v > want : end = mid endv = _helpers. firstword ( v ) else : return want_source and self. _get_idxname ( mid ) or True return None",False,v < want,v > want,0.6707136631011963 4343,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if : self. request = TDropPrivilegesRequest ( ) self. request. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRUCT,self.request is not None,0.6621614694595337 4344,"def update_decoder_attention_history ( cache ) : """"""Save attention weights in cache, e.g., for vizualization."""""" for k in [ x for x in self. attention_weights if ""decoder"" in x and ""self"" not in x and ""logits"" not in x ] : idx = k. find ( ""layer_"" ) if idx < 0 : continue layer_nbr = k [ idx + 6 : ] idx = 0 while idx + 1 < len ( layer_nbr ) and layer_nbr [ : idx + 1 ]. isdigit ( ) : idx += 1 layer_nbr = ""layer_%d"" % int ( layer_nbr [ : idx ] ) if : cache [ ""attention_history"" ] [ layer_nbr ] = tf. concat ( [ cache [ ""attention_history"" ] [ layer_nbr ], self. attention_weights [ k ] ], axis = 2, )",False,layer_nbr in cache['attention_history'],layer_nbr in cache,0.6555109024047852 4345,"def set_with_index_array ( self, item, value, check_units ) : variable = self. variable if check_units : fail_for_dimension_mismatch ( variable. dim, value, ""Incorrect unit for setting variable %s"" % self. name ) if variable. scalar : if : raise IndexError ( ( ""Illegal index for variable %s, it is a "" ""scalar variable."" ) % self. name ) indices = 0 elif isinstance ( item, slice ) and item == slice ( None ) and self. index_var == ""_idx"" : indices = slice ( None ) else : indices = self. indexing ( item, self. index_var ) q = Quantity ( value, copy = False ) if len ( q. shape ) : if not len ( q. shape ) == 1 or len ( q )!= 1 and len ( q )!= len ( indices ) : raise ValueError ( ( ""Provided values do not match the size "" ""of the indices, "" ""%d!= %d."" ) % ( len ( q ), len ( indices ) ) ) variable. get_value ( ) [ indices ] = value",False,"not (isinstance(item, slice) and item == slice(None))",variable.get_value() is None,0.6586532592773438 4346,"def add_callback ( self, callback, * args, ** kwargs ) : if thread. get_ident ( )!= self. _thread_ident : with self. _callback_lock : if : return list_empty = not self. _callbacks self. _callbacks. append ( functools. partial ( stack_context. wrap ( callback ), * args, ** kwargs ) ) if list_empty : self. _waker. wake ( ) else : if : return self. _callbacks. append ( functools. partial ( stack_context. wrap ( callback ), * args, ** kwargs ) )",False,self._closing,self._callbacks is None,0.676455557346344 4347,"def getoutput ( self ) : """"""convert the dtd entity back to string form"""""" lines = [ ] lines. extend ( [ comment for commenttype, comment in self. comments ] ) lines. extend ( self. unparsedlines ) if self. isnull ( ) : result = """". join ( lines ) return result. rstrip ( ) + ""\n"" if len ( self. entity ) > 0 : if getattr ( self, ""entitytype"", None ) == ""external"" : entityline = ( "" + self. entity + "" "" + self. entityparameter + "" "" + self. definition + self. closing ) else : entityline = ( "" + self. space_pre_entity + self. entity + self. space_pre_definition + self. definition + self. closing ) if getattr ( self, ""hashprefix"", None ) : entityline = self. hashprefix + "" "" + entityline if : entityline = entityline. encode ( ""UTF-8"" ) lines. append ( entityline + ""\n"" ) <",True,"isinstance(entityline, unicode)","isinstance(entityline, unicode)",0.649613618850708 4348,"def get_all_hashes ( self ) : event_hashes = [ ] sample_hashes = [ ] for a in self. event. attributes : h = None if a. type in ( ""md5"", ""sha1"", ""sha256"" ) : h = a. value event_hashes. append ( h ) elif a. type in ( ""filename|md5"", ""filename|sha1"", ""filename|sha256"" ) : h = a. value. split ( ""|"" ) [ 1 ] event_hashes. append ( h ) elif : h = a. value. split ( ""|"" ) [ 1 ] sample_hashes. append ( h ) return event_hashes, sample_hashes",False,a.type == 'malware-sample',"a.type in ( ""sample_hash""",0.6484647989273071 4349,"def sortModules ( self ) : super ( NeuronDecomposableNetwork, self ). sortModules ( ) self. _constructParameterInfo ( ) self. decompositionIndices = { } for neuron in self. _neuronIterator ( ) : self. decompositionIndices [ neuron ] = [ ] for w in range ( self. paramdim ) : inneuron, outneuron = self. paramInfo [ w ] if : self. decompositionIndices [ inneuron ]. append ( w ) else : self. decompositionIndices [ outneuron ]. append ( w )",False,self.espStyleDecomposition and outneuron[0] in self.outmodules,inneuron in self.decompositionIndices,0.6543204188346863 4350,"def _updateCache ( request_headers, response_headers, content, cache, cachekey ) : if cachekey : cc = _parse_cache_control ( request_headers ) cc_response = _parse_cache_control ( response_headers ) if : cache. delete ( cachekey ) else : info = email. Message. Message ( ) for key, value in response_headers. iteritems ( ) : if key not in [ ""status"", ""content-encoding"", ""transfer-encoding"" ] : info [ key ] = value vary = response_headers. get ( ""vary"", None ) if vary : vary_headers = vary. lower ( ). replace ( "" "", """" ). split ( "","" ) for header in vary_headers : key = ""-varied-%s"" % header try : info [ key ] = request_headers [ header ] except KeyError : pass status = response_headers. status if status == 304 : status = 200 status_header = ""status: %d\r\n"" % status header_str = info. as_string ( ) ",False,cc.has_key('no-store') or cc_response.has_key('no-store'),cachekey,0.6491734981536865 4351,"def _conv_layer ( self, sess, bottom, name, trainable = True, padding = ""SAME"", relu = True ) : with tf. variable_scope ( name ) as scope : filt = self. _get_conv_filter ( sess, name, trainable = trainable ) conv_biases = self. _get_bias ( sess, name, trainable = trainable ) conv = tf. nn. conv2d ( bottom, filt, [ 1, 1, 1, 1 ], padding = padding ) bias = tf. nn. bias_add ( conv, conv_biases ) if : bias = tf. nn. relu ( bias ) return bias",True,relu,relu,0.680166482925415 4352,"def _set_transform ( self ) : ax = self. ax if self. units in ( ""x"", ""y"" ) : if self. units == ""x"" : dx0 = ax. viewLim. width dx1 = ax. bbox. width else : dx0 = ax. viewLim. height dx1 = ax. bbox. height dx = dx1 / dx0 else : if : dx = ax. bbox. width elif self. units == ""height"" : dx = ax. bbox. height elif self. units == ""dots"" : dx = 1.0 elif self. units == ""inches"" : dx = ax. figure. dpi else : raise ValueError ( ""unrecognized units"" ) trans = transforms. Affine2D ( ). scale ( dx ) self. set_transform ( trans ) return trans",True,self.units == 'width',self.units == 'width',0.6595195531845093 4353,"def disconnect_application ( self ) : if not self. is_app_running ( self. APP_BACKDROP ) : self. socket. send ( commands. CloseCommand ( destination_id = False ) ) start_time = time. time ( ) while not self. is_app_running ( None ) : try : self. socket. send_and_wait ( commands. StatusCommand ( ) ) except cast_socket. ConnectionTerminatedException : break current_time = time. time ( ) if : raise TimeoutException ( ) time. sleep ( self. WAIT_INTERVAL ) else : logger. debug ( ""Closing not necessary. Backdrop is running..."" )",False,current_time - start_time > self.timeout,current_time - start_time > self.WAIT_INTERVAL,0.6519465446472168 4354,"def cleanDataCmd ( cmd ) : newcmd = ""AbracadabrA ** if cmd [ : 6 ]!= ""php://"" : if : cmds = cmd. split ( ""&"" ) for c in cmds : if len ( c ) > 0 : newcmd += ""system('%s');"" % c else : b64cmd = base64. b64encode ( cmd ) newcmd += ""system(base64_decode('%s'));"" % b64cmd else : newcmd += cmd [ 6 : ] newcmd += ""?> **"" return newcmd",False,reverseConn not in cmd,len(cmd) > 0,0.6773138046264648 4355,"def main ( args ) : cfg = setup ( args ) PathManager. set_strict_kwargs_checking ( False ) if args. eval_only : model = Trainer. build_model ( cfg ) DensePoseCheckpointer ( model, save_dir = cfg. OUTPUT_DIR ). resume_or_load ( cfg. MODEL. WEIGHTS, resume = args. resume ) res = Trainer. test ( cfg, model ) if cfg. TEST. AUG. ENABLED : res. update ( Trainer. test_with_TTA ( cfg, model ) ) if : verify_results ( cfg, res ) return res trainer = Trainer ( cfg ) trainer. resume_or_load ( resume = args. resume ) if cfg. TEST. AUG. ENABLED : trainer. register_hooks ( [ hooks. EvalHook ( 0, lambda : trainer. test_with_TTA ( cfg, trainer. model ) ) ] ) return trainer. train ( )",False,comm.is_main_process(),res is not None,0.6495720744132996 4356,"def handle ( self, context, sign, * args ) : if context. rounding in ( ROUND_HALF_UP, ROUND_HALF_EVEN, ROUND_HALF_DOWN, ROUND_UP ) : return Infsign [ sign ] if sign == 0 : if context. rounding == ROUND_CEILING : return Infsign [ sign ] return Decimal ( ( sign, ( 9, ) * context. prec, context. Emax - context. prec + 1 ) ) if sign == 1 : if : return Infsign [ sign ] return Decimal ( ( sign, ( 9, ) * context. prec, context. Emax - context. prec + 1 ) )",False,context.rounding == ROUND_FLOOR,context.rounding == ROUND_CEILING_TAB,0.6555569171905518 4357,"def findChapterNameForPosition ( self, p ) : """"""Return the name of a chapter containing p or None if p does not exist."""""" cc, c = self, self. c if not p or not c. positionExists ( p ) : return None for name in cc. chaptersDict : if name!= ""main"" : theChapter = cc. chaptersDict. get ( name ) if : return name return ""main""",False,theChapter.positionIsInChapter(p),theChapter and theChapter.nameExists(p),0.6569643020629883 4358,"def validate_matrix ( matrix ) : if not matrix : return None for key, value in matrix. items ( ) : if : raise ValidationError ( ""`{}` defines a non uniform distribution, "" ""and it cannot be used with bayesian optimization."". format ( key ) ) return matrix",False,value.is_distribution and (not value.is_uniform),"not (isinstance(value, torch.Tensor) and value.is_uniform)",0.6517713069915771 4359,"def master_param_to_train_param ( master_params_grads, params_grads, main_prog ) : for idx, m_p_g in enumerate ( master_params_grads ) : train_p, _ = params_grads [ idx ] if : continue with main_prog. _optimized_guard ( [ m_p_g [ 0 ], m_p_g [ 1 ] ] ) : append_cast_op ( m_p_g [ 0 ], train_p, main_prog )",False,train_p.name.find('layer_norm') > -1,train_p.name.startswith('feature_preprocessor:tnn.nn.Conv1d),0.6489761471748352 4360,"def get_recursively_referenced_fragments ( self, operation ) : assert isinstance ( operation, OperationDefinition ) fragments = self. _recursively_referenced_fragments. get ( operation ) if not fragments : fragments = [ ] collected_names = set ( ) nodes_to_visit = [ operation. selection_set ] while nodes_to_visit : node = nodes_to_visit. pop ( ) spreads = self. get_fragment_spreads ( node ) for spread in spreads : frag_name = spread. name. value if : collected_names. add ( frag_name ) fragment = self. get_fragment ( frag_name ) if fragment : fragments. append ( fragment ) nodes_to_visit. append ( fragment. selection_set ) self. _recursively_referenced_fragments [ operation ] = fragments return fragments",True,frag_name not in collected_names,frag_name not in collected_names,0.657961368560791 4361,"def hold ( self ) : dire = 1 while True : try : if time == None : break time. sleep ( 0.05 ) tasks = TaskManager. get ( ) if len ( tasks ) == 0 : if self. pro_bar. IsShown ( ) : wx. CallAfter ( self. set_progress, - 1 ) continue arr = [ i. prgs for i in tasks ] if ( None, 1 ) in arr : if : dire = 1 if self. pro_bar. GetValue ( ) >= 100 : dire = - 1 v = self. pro_bar. GetValue ( ) + dire * 5 wx. CallAfter ( self. set_progress, v ) else : v = max ( [ ( i [ 0 ] + 1 ) * 100.0 / i [ 1 ] for i in arr ] ) wx. CallAfter ( self. set_progress, v ) except : pass",False,self.pro_bar.GetValue() <= 0,dire == None,0.6528546810150146 4362,"def test_wrong_bind ( ) : N = 1024 A = te. placeholder ( ( N, N - 1 ), name = ""A"" ) B = te. compute ( ( N, N - 1 ), lambda i, j : A [ i, j ] ) s = te. create_schedule ( [ B. op ] ) s [ B ]. bind ( s [ B ]. op. axis [ 0 ], te. thread_axis ( ""threadIdx.x"" ) ) s [ B ]. bind ( s [ B ]. op. axis [ 1 ], te. thread_axis ( ""threadIdx.x"" ) ) for target in [ ""opencl"", ""cuda"" ] : if : continue valid = [ None ] with tvm. transform. PassContext ( config = { ""tir.add_lower_pass"" : [ ( 2, get_verify_pass ( valid, max_threads_per_block = N * N ) ) ] } ) : tvm. build ( s, [ A, B ], target ) assert not valid [ 0 ]",False,not tvm.testing.device_enabled(target),"target in ['cl', 'tir.connect', 'tir.connect', 'connect']",0.6482490301132202 4363,"def librato_record ( name, value ) : global _librato, _librato_lock, _librato_aggregator, _librato_timer, _librato_start if _librato is None : return try : name = ""."". join ( [ env. APP, env. NAME, name ] ) with _librato_lock : _librato_cancel_timer ( ) if _librato_aggregator is None : _librato_aggregator = Aggregator ( _librato, source = env. HOST ) _librato_start = time. time ( ) _librato_aggregator. add ( name, value ) if : librato_submit ( ) else : _librato_timer = threading. Timer ( LIBRATO_MIN_AGGREGATION_PERIOD, librato_submit ) _librato_timer. start ( ) except : report_exception ( )",False,time.time() - _librato_start > LIBRATO_MAX_AGGREGATION_PERIOD,_librato_start - _librato_start > 0,0.6539208889007568 4364,"def scan_resource_conf ( self, conf ) : if ""policy"" in conf. keys ( ) : try : policy_block = json. loads ( conf [ ""policy"" ] [ 0 ] ) if ""Statement"" in policy_block. keys ( ) : for statement in force_list ( policy_block [ ""Statement"" ] ) : if ""Action"" in statement : effect = statement. get ( ""Effect"", ""Allow"" ) action = force_list ( statement. get ( ""Action"", [ """" ] ) ) resource = force_list ( statement. get ( ""Resource"", [ """" ] ) ) if : return CheckResult. FAILED except : pass return CheckResult. PASSED",False,effect == 'Allow' and '*' in action and ('*' in resource),resource is not None and self.has_resource(resource),0.6546757817268372 4365,"def pickPath ( self, color ) : self. path [ color ] = ( ) currentPos = self. starts [ color ] while True : minDist = None minGuide = None for guide in self. guides [ color ] : guideDist = dist ( currentPos, guide ) if : minDist = guideDist minGuide = guide if dist ( currentPos, self. ends [ color ] ) == 1 : return if minGuide == None : return self. path [ color ] = self. path [ color ] + ( minGuide, ) currentPos = minGuide self. guides [ color ]. remove ( minGuide )",False,minDist == None or guideDist < minDist,minDist == None,0.6549274325370789 4366,"def __new__ ( metacls, typename, bases, namespace ) : annotations = namespace. get ( ""__annotations__"", { } ) for t in annotations. values ( ) : if : for ut in t. __args__ : _assert_tensorizer_type ( ut ) else : _assert_tensorizer_type ( t ) return super ( ). __new__ ( metacls, typename, bases, namespace )",False,"getattr(t, '__origin__', '') is Union","hasattr(t, '__args__')",0.6505976915359497 4367,"def stage_node_label ( stage ) : """"""Return a html format label for the given stage."""""" rows = max ( 1, max ( len ( stage [ ""output_tensors"" ] ), len ( stage [ ""input_tensors"" ] ) ) ) label = '<' for i in range ( rows ) : label += """" if : port_id = get_port_id ( True, i ) label += ( '"" ) else : label += '' if i == 0 : label += ( '"" ) if i < len ( stage [ ""output_tensors"" ] ) : port_id = get_port_id ( False, i ) label += ( '
    + port_id + '"">' + str ( i ) + "" + str ( rows ) + '"">' + stage_label ( stage ) + "" if self. version in ( 8, 13 ) and not self. websocket_closed : if close_data is not None : status, msg = close_data if isinstance ( msg, six. text_type ) : msg = msg. encode ( ""utf-8"" ) data = struct. pack ( ""!H"", status ) + msg else : data = """" try : self. send ( data, control_code = 8 ) except SocketError : if : raise self. websocket_closed = True",False,not ignore_send_errors,ignore_send_errors,0.652285099029541 4369,"def calcPolygonRect ( pointArray ) : """"""receives a point list and returns the rect that contains them as a tupple -> tuple left, top, right, bottom"""""" l, t, r, b = 10000000, 10000000, - 10000000, - 10000000 for n in pointArray : if n [ 0 ] < l : l = n [ 0 ] if : r = n [ 0 ] if n [ 1 ] < t : t = n [ 1 ] if n [ 1 ] > b : b = n [ 1 ] return l, t, r, b",False,n[0] > r,n[0] < r,0.6681976914405823 4370,"def decode ( self, ids ) : ids = pad_decr ( ids ) tokens = [ ] for int_id in ids : if : tokens. append ( self. _vocab_list [ int_id ] ) else : tokens. append ( self. _oov_token ) return self. _decode_token_separator. join ( tokens )",False,int_id < len(self._vocab_list),int_id in self._vocab_list,0.6535089015960693 4371,"def edit_file ( self, filename ) : import subprocess editor = self. get_editor ( ) if self. env : environ = os. environ. copy ( ) environ. update ( self. env ) else : environ = None try : c = subprocess. Popen ( '%s ""%s""' % ( editor, filename ), env = environ, shell = True ) exit_code = c. wait ( ) if : raise ClickException ( ""%s: Editing failed!"" % editor ) except OSError as e : raise ClickException ( ""%s: Editing failed: %s"" % ( editor, e ) )",True,exit_code != 0,exit_code != 0,0.6593523025512695 4372,"def is_valid ( self ) : import lfs. shipping. utils if isinstance ( self. content, ShippingMethod ) : is_shipping_method = True else : is_shipping_method = False if ( not is_shipping_method ) and ( self. operator == self. IS_SELECTED ) : shipping_method = lfs. shipping. utils. get_selected_shipping_method ( self. request ) return shipping_method in self. value. all ( ) elif ( not is_shipping_method ) and ( self. operator == self. IS_NOT_SELECTED ) : shipping_method = lfs. shipping. utils. get_selected_shipping_method ( self. request ) return shipping_method not in self. value. all ( ) elif self. operator == self. IS_VALID : for sm in self. value. all ( ) : if not lfs. criteria. utils. is_valid ( self. request, sm, self. product ) : return False return True elif self. operator == self. IS_NOT_VALID : for sm in self. value. all ( ) : if : return False return True else : return False",False,"lfs.criteria.utils.is_valid(self.request, sm, self.product)",not lfs.criteria.is_valid(sm),0.6466974020004272 4373,"def GenerateVector ( self, hits, vector, level ) : """"""Generate possible hit vectors which match the rules."""""" for item in hits. get ( level, [ ] ) : if vector : if item < vector [ - 1 ] : continue if item > self. max_separation + vector [ - 1 ] : break new_vector = vector + [ item ] if level + 1 == len ( hits ) : yield new_vector elif : for result in self. GenerateVector ( hits, new_vector, level + 1 ) : yield result",False,level + 1 < len(hits),level + 1 == len(new_vector),0.6656036376953125 4374,def run ( self ) : for _ in range ( 10 ) : if : break self. spawn_i3status ( ) if not self. ready : break self. lock. wait ( 5 ),False,not self.py3_wrapper.running,not self.ready,0.6537695527076721 4375,"def add_history ( self, source, description = None, first_seen = None, last_seen = None, active = False ) : last_seen = last_seen or datetime. utcnow ( ) first_seen = first_seen or datetime. utcnow ( ) if active : active_history = self. get_active ( description ) if active_history and last_seen > active_history. last_seen : active_history. last_seen = last_seen self. save ( validate = False ) return self else : _, overlapping_history = self. _get_overlapping ( description, first_seen, last_seen ) if : if source not in overlapping_history. sources : overlapping_history. sources. append ( source ) overlapping_history. first_seen = min ( overlapping_history. first_seen, first_seen ) overlapping_history. last_seen = max ( overlapping_history. last_seen, last_seen ) self. save ( validate = False ) return self return self. modify ( push__history = LinkHistory ( description = description, first_seen = first_seen or datetime. utcnow ( ), last_seen = last_seen or datetime. utcnow ( ), active = active, sources = [ source ], <",True,overlapping_history,overlapping_history,0.6643914580345154 4376,"def get_pointers_to_add_remove ( self, pointers, new_pointers ) : diff = relationship_diff ( current_items = { pointer. _id : pointer for pointer in pointers }, new_items = { val [ ""_id"" ] : val for val in new_pointers }, ) nodes_to_add = [ ] for node_id in diff [ ""add"" ] : node = AbstractNode. load ( node_id ) or Preprint. load ( node_id ) if : raise exceptions. NotFound ( detail = 'Node with id ""{}"" was not found'. format ( node_id ) ) nodes_to_add. append ( node ) return nodes_to_add, diff [ ""remove"" ]. values ( )",False,not node,node.has_id(),0.6688798666000366 4377,"def __process_eval_epoch_end_results_and_log_legacy ( self, eval_results ) : if self. trainer. running_sanity_check : return if eval_results is not None and len ( eval_results ) > 0 : if : eval_results = [ eval_results ] num_loaders : int = self. trainer. evaluation_loop. num_dataloaders prog_bar_metrics, log_metrics, callback_metrics = { }, { }, { } for result_idx, result in enumerate ( eval_results ) : ( _, prog_bar_metrics, log_metrics, callback_metrics, _, ) = self. trainer. process_dict_result ( result ) if num_loaders > 1 : self. __process_eval_epoch_end_results_and_log_legacy_update ( prog_bar_metrics, log_metrics, callback_metrics ) if num_loaders == 1 : self. __process_eval_epoch_end_results_and_log_legacy_update ( prog_bar_metrics, log_metrics, callback_metrics )",False,"not isinstance(eval_results, list)",len(eval_results) == 1,0.6571638584136963 4378,"def text_value_changed ( self, widget, param, value = None ) : try : if : value = widget. toPlainText ( ) except : pass if param. get ( ""category"" ) == ""Keyboard"" : previous_value = value value = QKeySequence ( value ). toString ( ) log. info ( ""Parsing keyboard mapping via QKeySequence from %s to %s"" % ( previous_value, value ) ) self. s. set ( param [ ""setting"" ], value ) log. info ( value ) self. check_for_restart ( param )",False,not value,value is None,0.6716938018798828 4379,"def _get_table_info ( self, table_name ) : table_addr = self. addr_space. profile. get_symbol ( table_name ) table_size = self. _get_table_info_distorm ( ) if : table_size = self. _get_table_info_other ( table_addr, table_name ) if : debug. error ( ""Unable to get system call table size"" ) return [ table_addr, table_size ]",False,table_size == 0,table_size is None,0.6630326509475708 4380,"def renderSubplot ( self, subplot ) : self. figure. subplots_adjust ( left = 0.15 ) xValues = [ ] yValues = [ ] i = 0 for row in self. data : if len ( row ) < 2 : raise GraphException ( ""Need at least two points for a graph data object!"" ) x = row [ 0 ] y = row [ 1 ] xValues. append ( x ) yValues. append ( y ) xValues. sort ( ) yValues. sort ( ) for row in self. data : x = row [ 0 ] y = row [ 1 ] marker = self. marker color = self. nextColor ( ) alpha = self. alpha markersize = self. markersize if len ( row ) >= 3 : displayData = row [ 2 ] if ""color"" in displayData : color = displayData [ ""color"" ] if : marker = displayData [ ""marker"" ] if ""alpha"" in displayData : alpha = displayData [ ""alpha"" ] if ""markersize"" in displayData : markersize = displayData [ ""markersize"" ] subplot. plot ( x, y, marker = marker, color = color, alpha = alpha, markersize = markersize ) i += 1 if not self. axisRangeHasBeenSet [ ""y"" ] : ",False,'marker' in displayData,'mmarker' in displayData,0.6606918573379517 4381,"def loadHandler ( self, human, values, strict ) : if values [ 0 ] == ""background"" : if : side = values [ 1 ] img_filename = values [ 2 ] i = 0 while ( img_filename and not any ( [ img_filename. lower ( ). endswith ( ex ) for ex in self. extensions ] ) and ( len ( values ) - ( i + 2 ) ) >= 6 ) : i += 1 img_filename = img_filename + "" "" + values [ 2 + i ] img_filename = getpath. thoroughFindFile ( img_filename, self. backgroundsFolders ) if not os. path. isfile ( img_filename ) : log. warning ( ""Background file %s not found"", img_filename ) return aspect = float ( values [ 3 + i ] ) trans = ( float ( values [ 4 + i ] ), float ( values [ 5 + i ] ) ) scale = float ( values [ 6 + i ] ) self. filenames [ side ] = ( img_filename, aspect ) self. transformations [ side ] = [ trans, scale ] elif len ( values ) >= 3 and values [ 1 ] == ""enabled"" : ",False,len(values) >= 7,len(values) > 2,0.6532894968986511 4382,"def _pprint_key_entries ( user, key_fn, key_entries, hash_meth = ""sha256"", prefix = ""ci-info: "" ) : if not key_entries : message = ""%sno authorized SSH keys fingerprints found for user %s.\n"" % ( prefix, user, ) util. multi_log ( message ) return tbl_fields = [ ""Keytype"", ""Fingerprint (%s)"" % ( hash_meth ), ""Options"", ""Comment"" ] tbl = SimpleTable ( tbl_fields ) for entry in key_entries : if : row = [ entry. keytype or ""-"", _gen_fingerprint ( entry. base64, hash_meth ) or ""-"", entry. options or ""-"", entry. comment or ""-"", ] tbl. add_row ( row ) authtbl_s = tbl. get_string ( ) authtbl_lines = authtbl_s. splitlines ( ) max_len = len ( max ( authtbl_lines, key = len ) ) lines = [ util. center ( ""Authorized keys from %s for user %s"" % ( key_fn, user ), ""+"", max_len ), ] lines. extend ( authtbl_lines ) for line in lines : util. multi_log ( text = ""%s%s\n"" % ( prefix, line ), stderr = False, console = True )",False,_is_printable_key(entry),entry.has_key,0.6478855609893799 4383,"def overrideCommand ( self, commandName, func ) : k = self d = k. masterBindingsDict for key in d : d2 = d. get ( key ) for key2 in d2 : bi = d2. get ( key2 ) if : bi. func = func d2 [ key2 ] = bi",False,bi.commandName == commandName,not bi,0.6691148281097412 4384,"def results_iter ( self ) : if self. connection. ops. oracle : from django. db. models. fields import DateTimeField fields = [ DateTimeField ( ) ] else : needs_string_cast = self. connection. features. needs_datetime_string_cast offset = len ( self. query. extra_select ) for rows in self. execute_sql ( MULTI ) : for row in rows : date = row [ offset ] if self. connection. ops. oracle : date = self. resolve_columns ( row, fields ) [ offset ] elif : date = typecast_timestamp ( str ( date ) ) yield date",False,needs_string_cast,self.connection.ops.needs_datetime_string_cast,0.6630522012710571 4385,"def main ( ) : pygame. init ( ) if hasattr ( eventmodule, ""init"" ) : eventmodule. init ( ) screen = pygame. display. set_mode ( ( 300, 300 ) ) reactor. interleave ( postTwistedEvent ) shouldQuit = [ ] reactor. addSystemEventTrigger ( ""after"", ""shutdown"", shouldQuit. append, True ) for event in eventIterator ( ) : if event. type == TWISTEDEVENT : event. iterateTwisted ( ) if shouldQuit : break elif event. type == QUIT : reactor. stop ( ) elif : reactor. stop ( ) pygame. quit ( )",False,event.type == KEYDOWN and event.key == K_ESCAPE,event.type == KILL,0.6541868448257446 4386,"def load_profiles ( profile_path ) : ""Load the stored profiles"" profiles = { } for profile in os. listdir ( profile_path ) : config_name = os. path. join ( profile_path, profile, ""config"" ) setup_name = os. path. join ( profile_path, profile, ""setup"" ) if not os. path. isfile ( config_name ) or not os. path. isfile ( setup_name ) : continue edids = dict ( [ x. split ( ) for x in ( y. strip ( ) for y in open ( setup_name ). readlines ( ) ) if x and x [ 0 ]!= ""#"" ] ) config = { } buffer = [ ] for line in chain ( open ( config_name ). readlines ( ), [ ""output"" ] ) : if : config [ buffer [ 0 ]. strip ( ). split ( ) [ - 1 ] ] = XrandrOutput. from_config_file ( edids, """". join ( buffer ) ) buffer = [ line ] else : buffer. append ( line ) for output_name in list ( config. keys ( ) ) : if config [ output_name ]. edid is None : del config [ output_name ] profiles [ profile ] = { if not ip_addresses : return delta = timezone. now ( ) - datetime. timedelta ( days = 1 ) for ip_address in ip_addresses : if : try : job = rq. job. Job. fetch ( ip_address. scan_summary. job_id, django_rq. get_connection ( ), ) except rq. exceptions. NoSuchJobError : continue else : ip_address. scan_summary. changed = job. meta. get ( ""changed"", False, )",False,ip_address.scan_summary and ip_address.scan_summary.modified > delta,delta - ip_address.ip_address < self.ip_address + self.scan_summary.time,0.649268627166748 4388,"def countbox ( self ) : self. box = [ 1000, 1000, - 1000, - 1000 ] for x, y in self. body : if x < self. box [ 0 ] : self. box [ 0 ] = x if : self. box [ 2 ] = x if y < self. box [ 1 ] : self. box [ 1 ] = y if y > self. box [ 3 ] : self. box [ 3 ] = y",False,x > self.box[2],y < self.box[2],0.661747932434082 4389,"def run_cron ( self ) -> None : n = datetime. now ( ) job_futures = set ( ) for cron_job in self. cron_jobs : if : if cron_job. run_at_startup : cron_job. next_run = n else : cron_job. set_next ( n ) next_run = cast ( datetime, cron_job. next_run ) if n >= next_run : job_id = ( f""{cron_job.name}:{to_unix_ms(next_run)}"" if cron_job. unique else None ) job_futures. add ( self. pool. enqueue_job ( cron_job. name, _job_id = job_id, _queue_name = self. queue_name ) ) cron_job. set_next ( n ) job_futures and await asyncio. gather ( * job_futures )",False,cron_job.next_run is None,n > 0,0.6521590948104858 4390,"def set_indentation_params ( self, ispythonsource, guess = 1 ) : if guess and ispythonsource : i = self. guess_indent ( ) if 2 <= i <= 8 : self. indentwidth = i if : self. usetabs = 0 self. editwin. set_tabwidth ( self. tabwidth )",False,self.indentwidth != self.tabwidth,self.usetabs and guess,0.6541339755058289 4391,"def batch_act_and_train ( self, batch_obs ) : xp = self. xp b_state = self. batch_states ( batch_obs, xp, self. phi ) if self. obs_normalizer : b_state = self. obs_normalizer ( b_state, update = False ) num_envs = len ( batch_obs ) if self. batch_last_episode is None : self. _initialize_batch_variables ( num_envs ) assert len ( self. batch_last_episode ) == num_envs assert len ( self. batch_last_state ) == num_envs assert len ( self. batch_last_action ) == num_envs with chainer. using_config ( ""train"", False ), chainer. no_backprop_mode ( ) : if : assert self. train_prev_recurrent_states is None self. train_prev_recurrent_states = self. train_recurrent_states ( action_distrib, batch_value ), self. train_recurrent_states = self. model ( b_state, self. train_prev_recurrent_states ) else : action_distrib, batch_value = self. model ( b_state ) batch_action = chainer. cuda. to_cpu ( action_distrib. sample ( ). array ) self. entropy_record. extend ( chainer. cuda. to_cpu ( action_distrib. entropy. array ) ) self. value_record. extend ( chainer. cuda. to_cpu ( ( batch_value. array ) ) ) self. batch_last_state = list ( batch_obs ) self. batch_last_action = list ( batch_action ) ",False,self.recurrent,self.model is not None,0.653032660484314 4392,"def __init__ ( self, meters_count ) : w = SLOT_W * meters_count h = METER_SLOT_H self. widget = cairoarea. CairoDrawableArea2 ( w, h, self. _draw ) self. audio_meters = [ ] for i in range ( 0, meters_count ) : meter = AudioMeter ( METER_HEIGHT ) if : meter. right_channel. draw_dB = True self. audio_meters. append ( meter )",False,i != meters_count - 1,meters_count == 1,0.668961763381958 4393,"def rx ( ) : while True : rx_i = rep. recv ( ) if : rep. send ( b""done"" ) break rep. send ( b""i"" )",False,rx_i == b'1000',rx_i == 0,0.6606658697128296 4394,"def _expand_requires_extra ( re ) : for extra, reqs in sorted ( re. items ( ) ) : for req in reqs : if : name, envmark = req. split ( "";"", 1 ) yield '{} ; extra == ""{}"" and ({})'. format ( name, extra, envmark ) else : yield '{} ; extra == ""{}""'. format ( req, extra )",False,';' in req,len(req) > 0,0.678476095199585 4395,"def recvfds ( sock, size ) : """"""Receive an array of fds over an AF_UNIX socket."""""" a = array. array ( ""i"" ) bytes_size = a. itemsize * size msg, ancdata, flags, addr = sock. recvmsg ( 1, socket. CMSG_LEN ( bytes_size ), ) if not msg and not ancdata : raise EOFError try : if ACKNOWLEDGE : sock. send ( b""A"" ) if len ( ancdata )!= 1 : raise RuntimeError ( ""received %d items of ancdata"" % len ( ancdata ), ) cmsg_level, cmsg_type, cmsg_data = ancdata [ 0 ] if : if len ( cmsg_data ) % a. itemsize!= 0 : raise ValueError a. frombytes ( cmsg_data ) assert len ( a ) % 256 == msg [ 0 ] return list ( a ) except ( ValueError, IndexError ) : pass raise RuntimeError ( ""Invalid data received"" )",False,cmsg_level == socket.SOL_SOCKET and cmsg_type == socket.SCM_RIGHTS,cmsg_level and cmsg_data,0.6512272357940674 4396,"def get_filtered_observation_space ( self, input_observation_space : ObservationSpace ) -> ObservationSpace : axis_size = input_observation_space. shape [ self. axis_origin ] input_observation_space. shape = np. delete ( input_observation_space. shape, self. axis_origin ) if self. axis_target == - 1 : input_observation_space. shape = np. append ( input_observation_space. shape, axis_size ) elif self. axis_target < - 1 : input_observation_space. shape = np. insert ( input_observation_space. shape, self. axis_target + 1, axis_size ) else : input_observation_space. shape = np. insert ( input_observation_space. shape, self. axis_target, axis_size ) if isinstance ( input_observation_space, PlanarMapsObservationSpace ) : if : input_observation_space. channels_axis = self. axis_target elif input_observation_space. channels_axis == self. axis_target : input_observation_space. channels_axis = self. axis_origin elif ( self. axis_origin < input_observation_space. channels_axis < self. axis_target ) : input_observation_space. channels_axis -= 1 elif ( self. axis_target < input_observation_space. channels_axis < self. axis_origin ) : input_observation_space. channels_axis += 1 return input_observation_",True,input_observation_space.channels_axis == self.axis_origin,input_observation_space.channels_axis == self.axis_origin,0.6516085863113403 4397,"def __iadd__ ( self, other ) : if safe_mode and self. _parent_expr : raise EntangledExpressionError ( self, other ) _type = other. __class__ if _type in native_numeric_types : if other : self. _args. append ( other ) return self try : _other_expr = other. is_expression ( ) except AttributeError : other = as_numeric ( other ) _other_expr = other. is_expression ( ) if _other_expr : if : raise EntangledExpressionError ( self, other ) if _type is _SumExpression : self. _args. extend ( other. _args ) other. _args = [ ] return self if safe_mode : other. _parent_expr = bypass_backreference or ref ( self ) elif other. is_indexed ( ) : raise TypeError ( ""Argument for expression '%s' is an indexed numeric "" ""value\nspecified without an index:\n\t%s\nIs this "" ""value defined over an index that you did not specify?"" % ( etype, other. cname ( True ), ) ) elif other. is_constant ( ) : other = other ( ) self. _args. append",False,safe_mode and other._parent_expr,not other.is_numeric(),0.6553704738616943 4398,"def _importZippedFiles ( self, srcPath, parentPath, parent_id ) : import zipfile, tempfile zf = zipfile. ZipFile ( srcPath, ""r"" ) try : zipped_files = zf. namelist ( ) for fname in zipped_files : if fname. startswith ( ""/"" ) : raise Exception ( ""filename in zip file is an absolute path: %s"", fname ) elif self. _up_dir_re. search ( fname ) : raise Exception ( ""filename in zip file contains a '..': %s"", fname ) koDirSvc = components. classes [ ""@activestate.com/koDirs;1"" ]. getService ( ) userDataDir = koDirSvc. userDataDir extractDir = join ( userDataDir, ""extracted-kpz"" ) zipExtractDir = tempfile. mkdtemp ( suffix = ""_zip"", prefix = ""tools_"", dir = extractDir ) zf. extractall ( zipExtractDir ) finally : zf. close ( ) try : newFiles = [ ] for fname in os. listdir ( zipExtractDir ) : path = join ( zipExtractDir, fname ) if : self. importDirectory ( parentPath, path ) else : newFiles. append ( path ) if newFiles : self. importFiles ( parentPath, newFiles ) finally : shutil. rmtree ( zipExtractDir )",False,os.path.isdir(path),path,0.6441490650177002 4399,"def test_prod_without_zeros_default_acc_dtype ( self ) : axes = [ None, 0, 1, [ ], [ 0 ], [ 1 ], [ 0, 1 ] ] for idx, dtype in enumerate ( imap ( str, theano. scalar. all_types ) ) : axis = axes [ idx % len ( axes ) ] x = tensor. matrix ( dtype = dtype ) p = ProdWithoutZeros ( axis = axis ) ( x ) assert p. owner. op. acc_dtype == dict ( bool = ""int64"", int8 = ""int64"", int16 = ""int64"", int32 = ""int64"", uint8 = ""uint64"", uint16 = ""uint64"", uint32 = ""uint64"", float16 = ""float32"", float32 = ""float64"", complex64 = ""complex128"", ). get ( dtype, dtype ) if : continue f = theano. function ( [ x ], p ) data = np. random. rand ( 2, 3 ) * 3 data = data. astype ( dtype ) f ( data )",False,'complex' in dtype,x is None,0.6661911010742188 4400,"def forward ( self, g, feats ) : with g. local_scope ( ) : init_feats = feats degs = g. in_degrees ( ). float ( ). clamp ( min = 1 ) norm = torch. pow ( degs, - 0.5 ). to ( feats. device ). unsqueeze ( 1 ) output = None for k in range ( self. K ) : feats = init_feats for t in range ( self. T ) : feats = feats * norm g. ndata [ ""h"" ] = feats g. update_all ( fn. copy_u ( ""h"", ""m"" ), fn. sum ( ""m"", ""h"" ) ) feats = g. ndata. pop ( ""h"" ) feats = feats * norm if t == 0 : feats = self. w_0 [ str ( k ) ] ( feats ) else : feats = self. w [ str ( k ) ] ( feats ) feats += self. dropout ( self. v [ str ( k ) ] ( init_feats ) ) feats += self. v [ str ( k ) ] ( self. dropout ( init_feats ) ) if self. bias is not None : feats += self. bias [ k ] [ t ] """"""Produce SchemaEntries from db.Model class."""""" from google. appengine. ext import db for name, model_property in model. properties ( ). iteritems ( ) : model_class = model_property. __class__ if : model_class = _GetModelTypeForListPropertyType ( model_property. item_type ) _add_schema_entry ( model_class, name, add_entry )",False,"issubclass(model_class, db.ListProperty)","hasattr(model_class, 'item_type')",0.6546720862388611 4402,"def workflows_using_aw ( dirpath ) : """"""Yield bundle IDs of workflows using AW."""""" for root, _, filenames in os. walk ( dirpath ) : for filename in filenames : if not filename. endswith ( "".alfredworkflow"" ) : continue path = os. path. join ( root, filename ) with ZipFile ( path ) as z : uses_alfred_workflow = False for name in z. namelist ( ) : if name in ( b""workflow/workflow.py"", b""workflow.zip"" ) : uses_alfred_workflow = True elif match_zip ( name ) : uses_alfred_workflow = True if : bundle = os. path. basename ( os. path. dirname ( path ) ) yield bundle",False,uses_alfred_workflow,uses_alfred_workflow and (not uses_alfred_workflow),0.6552683115005493 4403,"def read_headers ( cls, fp ) : headers = httputil. HeaderMap ( ) while True : line = fp. readline ( ) if not line : raise EOFError ( u""Illegal end of headers."" ) if line == ""\r\n"" : break if not line. endswith ( ""\r\n"" ) : raise ValueError ( u""MIME requires CRLF terminators: %r"" % line ) if : v = line. strip ( ). decode ( u""ISO-8859-1"" ) else : k, v = line. split ( "":"", 1 ) k = k. strip ( ). decode ( u""ISO-8859-1"" ) v = v. strip ( ). decode ( u""ISO-8859-1"" ) existing = headers. get ( k ) if existing : v = u"", "". join ( ( existing, v ) ) headers [ k ] = v return headers",False,line[0] in '\t',len(line) == 2,0.6547127962112427 4404,"def _yield_unescaped ( self, string ) : while ""\\"" in string : finder = EscapeFinder ( string ) yield finder. before + finder. backslashes if : yield self. _unescape ( finder. text ) else : yield finder. text string = finder. after yield string",False,finder.escaped and finder.text,finder.after,0.6596453189849854 4405,"def test_files ( self ) : dist_dir = os. path. join ( os. path. dirname ( __file__ ), os. pardir, os. pardir ) names = [ ] for d in self. test_directories : test_dir = os. path. join ( dist_dir, d ) for n in os. listdir ( test_dir ) : if n. endswith ( "".py"" ) and not n. startswith ( ""bad"" ) : names. append ( os. path. join ( test_dir, n ) ) for filename in names : if : print ( ""Testing %s"" % filename ) source = read_pyfile ( filename ) self. check_roundtrip ( source )",False,test_support.verbose,filename.endswith('.py'),0.6524229049682617 4406,"def _adjust_self_columns_for_partial_reordering ( self ) : pairs = set ( ) col_by_idx = list ( self. columns ) if self. partial_reordering : for tuple_ in self. partial_reordering : for index, elem in enumerate ( tuple_ ) : if : pairs. add ( ( tuple_ [ index - 1 ], elem ) ) else : for index, elem in enumerate ( self. existing_ordering ) : if : pairs. add ( ( col_by_idx [ index - 1 ], elem ) ) pairs. update ( self. add_col_ordering ) pairs = [ p for p in pairs if p [ 0 ]!= p [ 1 ] ] sorted_ = list ( topological. sort ( pairs, col_by_idx, deterministic_order = True ) ) self. columns = OrderedDict ( ( k, self. columns [ k ] ) for k in sorted_ ) self. column_transfers = OrderedDict ( ( k, self. column_transfers [ k ] ) for k in sorted_ )",False,index > 0,col_by_idx[index - 1] != elem[0],0.6684260368347168 4407,"def print_map ( node, l ) : if node. title not in l : l [ node. title ] = [ ] for n in node. children : if : w = { n. title : [ ] } l [ node. title ]. append ( w ) print_map ( n, w ) else : l [ node. title ]. append ( n. title )",False,len(n.children) > 0,n.title,0.6533650159835815 4408,"def check_options ( url, cmd, cve, check_header, filename, os_shell_option, http_request_method, go_back, go_back_again, ) : if os_shell_option == False : if : return False else : return True elif os_shell_option == ""back"" : go_back = True return go_back, go_back_again elif os_shell_option == ""os_shell"" : warn_msg = ""You are already into the '"" + os_shell_option + ""' mode."" print ( settings. print_warning_msg ( warn_msg ) ) + ""\n"" elif os_shell_option == ""bind_tcp"" : go_back, go_back_again = bind_tcp_config ( url, cmd, cve, check_header, filename, os_shell_option, http_request_method, go_back, go_back_again, ) return go_back, go_back_again elif os_shell_option == ""reverse_tcp"" : go_back, go_back_again = reverse_tcp_config ( url, cmd, cve, check_header, <",False,no_result == True,os_shell_option == 'no_tabs',0.6603779792785645 4409,"def _parse_inputs ( self, skip = ( ) ) : """"""validate spm normalize options if set to None ignore"""""" einputs = super ( Normalize12, self ). _parse_inputs ( skip = ( ""jobtype"", ""apply_to_files"" ) ) if isdefined ( self. inputs. apply_to_files ) : inputfiles = deepcopy ( self. inputs. apply_to_files ) if isdefined ( self. inputs. image_to_align ) : inputfiles. extend ( [ self. inputs. image_to_align ] ) einputs [ 0 ] [ ""subj"" ] [ ""resample"" ] = scans_for_fnames ( inputfiles ) jobtype = self. inputs. jobtype if jobtype in [ ""estwrite"", ""write"" ] : if : if isdefined ( self. inputs. image_to_align ) : einputs [ 0 ] [ ""subj"" ] [ ""resample"" ] = scans_for_fname ( self. inputs. image_to_align ) return [ { ""%s"" % ( jobtype ) : einputs [ 0 ] } ]",False,not isdefined(self.inputs.apply_to_files),"jobtype in [None, 'skip']",0.6445922255516052 4410,"def _cbCommand ( self, result ) : if result is not None : if : result = result. encode ( ""utf-8"" ) self. _writeToTransport ( result ) if not result. endswith ( b""\n"" ) : self. _writeToTransport ( b""\n"" ) self. _newLine ( )",False,"isinstance(result, str)","isinstance(result, textBase)",0.6542553901672363 4411,"def _feedErrorsToResult ( self, result, errors, setup_or_teardown = False ) : if setup_or_teardown : for test, exc_info in errors : result. addError ( test, exc_info ) else : for test, exc_info in errors : if isinstance ( test, _SubTest ) : result. addSubTest ( test. test_case, test, exc_info ) elif : if issubclass ( exc_info [ 0 ], self. failureException ) : result. addFailure ( test, exc_info ) else : result. addError ( test, exc_info )",False,exc_info is not None,"isinstance(exc_info, _Error)",0.661678671836853 4412,def function_arg ( self ) : if self. ql. ostype in ( QL_OS_POSIX ) : if : return ARMFunctionArg ( self. ql ) elif self. ql. archtype == QL_ARCH. MIPS : return MIPS32FunctionArg ( self. ql ) elif self. ql. archtype == QL_ARCH. ARM64 : return ARM64FunctionArg ( self. ql ) elif self. ql. archtype == QL_ARCH. X86 : return X86FunctionArg ( self. ql ) elif self. ql. archtype == QL_ARCH. X8664 : return X64FunctionArg ( self. ql ) else : raise,True,self.ql.archtype == QL_ARCH.ARM,self.ql.archtype == QL_ARCH.ARM,0.6662028431892395 4413,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. STRUCT : self. req = TGetTableTypesReq ( ) self. req. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,fid == 1,fid == TType.START,0.6721340417861938 4414,"def sub_paragraph ( self, li ) : """"""Search for checkbox in sub-paragraph."""""" found = False if len ( li ) : first = list ( li ) [ 0 ] if : m = RE_CHECKBOX. match ( first. text ) if m is not None : first. text = self. markdown. htmlStash. store ( get_checkbox ( m. group ( ""state"" ) ), safe = True ) + m. group ( ""line"" ) found = True return found",False,first.tag == 'p' and first.text is not None,first.text,0.6502739787101746 4415,"def wrapper ( ) : for epoch_index in range ( epoch ) : if shuffle : random. shuffle ( examples ) if phase == ""train"" : self. current_train_epoch = epoch_index features = self. get_features ( examples, is_training = True ) else : features = self. get_features ( examples, is_training = False ) all_dev_batches = [ ] for batch_insts in batch_reader ( features, batch_size ) : batch_data = prepare_batch_data ( batch_insts ) if len ( all_dev_batches ) < dev_count : all_dev_batches. append ( batch_data ) if : for batch in all_dev_batches : yield batch all_dev_batches = [ ]",True,len(all_dev_batches) == dev_count,len(all_dev_batches) == dev_count,0.648054838180542 4416,"def banbks ( a, m1, m2, al, indx, b ) : n, m = a. shape mm = m1 + m2 + 1 l = m1 for k in range ( n ) : i = indx [ k ] if i!= k : tmp = b [ k ] b [ k ] = b [ i ] b [ i ] = tmp if : l += 1 for i in range ( k + 1, l ) : b [ i ] -= al [ k, i - k - 1 ] * b [ k ] l = 1 for i in range ( n - 1, - 1, - 1 ) : dum = b [ i ] for k in range ( 1, l ) : dum -= a [ i, k ] * b [ k + i ] b [ i ] = dum / a [ i, 0 ] if l < mm : l += 1",False,l < n,l < mm,0.6870582103729248 4417,"def _add_stack_elements_to_sequence ( stack, sequence_e, timeline_range, br_map ) : _append_new_sub_element ( sequence_e, ""name"", text = stack. name ) _append_new_sub_element ( sequence_e, ""duration"", text = ""{:.0f}"". format ( timeline_range. duration. value ) ) sequence_e. append ( _build_rate ( timeline_range. start_time. rate ) ) track_rate = timeline_range. start_time. rate media_e = _get_or_create_subelement ( sequence_e, ""media"" ) video_e = _get_or_create_subelement ( media_e, ""video"" ) audio_e = _get_or_create_subelement ( media_e, ""audio"" ) media_e. clear ( ) media_e. extend ( [ video_e, audio_e ] ) for track in stack : track_elements = _build_top_level_track ( track, track_rate, br_map ) if track. kind == schema. TrackKind. Video : video_e. append ( track_elements ) elif : audio_e. append ( track_elements ) for marker in stack. markers : sequence_e. append ( _build_marker ( marker ) )",False,track.kind == schema.TrackKind.Audio,track.kind == schema.TrackKind.audio,0.6495248079299927 4418,def __next__ ( self ) : self. _offset += 1 if self. _offset >= len ( self. _results ) : if self. _results_left is False : raise StopIteration ( ) self. fetch_more ( ) while not len ( self. _results ) and self. _results_left : self. fetch_more ( ) if self. _offset < len ( self. _results ) : if : self. _limit -= 1 if self. _limit < 0 : raise StopIteration ( ) return self. _results [ self. _offset ] else : raise StopIteration ( ),False,self._limit is not None,self._limit > 0,0.6658344268798828 4419,"def unsubscribe ( self ) : for signum, handler in self. _previous_handlers. items ( ) : signame = self. signals [ signum ] if handler is None : self. bus. log ( ""Restoring %s handler to SIG_DFL."" % signame ) handler = _signal. SIG_DFL else : self. bus. log ( ""Restoring %s handler %r."" % ( signame, handler ) ) try : our_handler = _signal. signal ( signum, handler ) if : self. bus. log ( ""Restored old %s handler %r, but our "" ""handler was not registered."" % ( signame, handler ), level = 30, ) except ValueError : self. bus. log ( ""Unable to restore %s handler %r."" % ( signame, handler ), level = 40, traceback = True, )",False,our_handler is None,our_handler,0.6618780493736267 4420,"def get_party_total ( self, args ) : self. party_total = frappe. _dict ( ) for d in self. receivables : self. init_party_total ( d ) for k in list ( self. party_total [ d. party ] ) : if : self. party_total [ d. party ] [ k ] += d. get ( k, 0.0 ) self. set_party_details ( d )",False,"k not in ['currency', 'sales_person']",k in args,0.6559187173843384 4421,"def get_editops ( self ) : if not self. _editops : if : self. _editops = editops ( self. _opcodes, self. _str1, self. _str2 ) else : self. _editops = editops ( self. _str1, self. _str2 ) return self. _editops",False,self._opcodes,self.has_opcodes,0.6684092879295349 4422,"def get_allele_frequency ( self, pop_pos, locus_name ) : """"""Calculate the allele frequency for a certain locus on a population."""""" if len ( self. __allele_frequency ) == 0 : geno_freqs = self. _controller. calc_allele_genotype_freqs ( self. _fname ) pop_iter, loc_iter = geno_freqs for locus_info in loc_iter : if : self. __allele_frequency [ locus_info [ 0 ] ] = None, None else : self. __allele_frequency [ locus_info [ 0 ] ] = locus_info [ 1 : ] info = self. __allele_frequency [ locus_name ] pop_name, freqs, total = info [ 1 ] [ pop_pos ] allele_freq = { } alleles = info [ 0 ] for i, allele in enumerate ( alleles ) : allele_freq [ allele ] = freqs [ i ] return total, allele_freq",False,locus_info[0] is None,locus_name == 'None',0.6565197110176086 4423,"def validate_on_or_after ( not_on_or_after, slack ) : if not_on_or_after : now = time_util. utc_now ( ) nooa = calendar. timegm ( time_util. str_to_time ( not_on_or_after ) ) if : raise ResponseLifetimeExceed ( ""Can't use it, it's too old %d > %d"" % ( now - slack, nooa ) ) return nooa else : return False",False,now > nooa + slack,now - slack > nooa,0.6659272909164429 4424,"def print_path ( path ) : for i, step in enumerate ( path ) : next = path [ ( i + 1 ) % len ( path ) ] outstream. write ( "" %s -- "" % str ( type ( step ) ) ) if isinstance ( step, dict ) : for key, val in step. items ( ) : if val is next : outstream. write ( ""[%s]"" % repr ( key ) ) break if : outstream. write ( ""[key] = %s"" % repr ( val ) ) break elif isinstance ( step, list ) : outstream. write ( ""[%d]"" % step. index ( next ) ) elif isinstance ( step, tuple ) : outstream. write ( ""( tuple )"" ) else : outstream. write ( repr ( step ) ) outstream. write ( "" ->\n"" ) outstream. write ( ""\n"" )",False,key is next,"isinstance(val, dict)",0.6659658551216125 4425,"def read_triple ( self, path, mode, skip_first_line = False, format = [ 0, 1, 2 ] ) : if path is None : return None print ( ""Reading {} triples...."". format ( mode ) ) heads = [ ] tails = [ ] rels = [ ] with open ( path ) as f : if : _ = f. readline ( ) for line in f : triple = line. strip ( ). split ( ""\t"" ) h, r, t = triple [ format [ 0 ] ], triple [ format [ 1 ] ], triple [ format [ 2 ] ] heads. append ( self. entity2id [ h ] ) rels. append ( self. relation2id [ r ] ) tails. append ( self. entity2id [ t ] ) heads = np. array ( heads, dtype = np. int64 ) tails = np. array ( tails, dtype = np. int64 ) rels = np. array ( rels, dtype = np. int64 ) print ( ""Finished. Read {} {} triples."". format ( len ( heads ), mode ) ) return ( heads, rels, tails )",True,skip_first_line,skip_first_line,0.6486442685127258 4426,"def write_results_json ( self ) : """"""Write test results into a json-formatted file"""""" results = OrderedDict ( [ ( ""tester_host"", self. hostname ), ( ""start_time"", self. start_time ), ( ""elapsed_time"", self. elapsed_time ), ( ""tests"", OrderedDict ( ) ), ] ) for status, test, reason in self. all_results ( ) : test_result = OrderedDict ( [ ( ""name"", test. name ), ( ""description"", test. shortDescription ( ) ), ( ""status"", status ), ( ""start_time"", test. start_time ), ( ""elapsed_time"", test. elapsed_time ), ( ""measurements"", test. measurements ), ] ) if status in ( ""ERROR"", ""FAILURE"", ""EXPECTED_FAILURE"" ) : test_result [ ""message"" ] = str ( test. err [ 1 ] ) test_result [ ""err_type"" ] = test. err [ 0 ]. __name__ test_result [ ""err_output"" ] = reason elif : test_result [ ""message"" ] = reason results [ ""tests"" ] [ test. name ] = test_result with open ( os. path. join ( self. out_dir, ""results.json"" )",False,reason,"status in ('SUCCESS', 'FAIL', 'ERROR')",0.6961096525192261 4427,"def filter ( self, lexer, stream ) : current_type = None current_value = None for ttype, value in stream : if ttype is current_type : current_value += value else : if : yield current_type, current_value current_type = ttype current_value = value if : yield current_type, current_value",False,current_type is not None,current_type is None,0.6527062058448792 4428,"def _tearDownPreviousClass ( self, test, result ) : super ( TestSuite, self ). _tearDownPreviousClass ( test, result ) previousClass = getattr ( result, ""_previousTestClass"", None ) currentClass = test. __class__ if currentClass == previousClass : return if previousClass and getattr ( previousClass, ""tearDownClass"", None ) : prerun_class_attributes = getattr ( previousClass, ""_prerun_class_attributes"", None ) if prerun_class_attributes is not None : previousClass. _prerun_class_attributes = None del previousClass. _prerun_class_attributes for attr in prerun_class_attributes : if : attr_value = getattr ( previousClass, attr, None ) if attr_value is None : continue if isinstance ( attr_value, ( bool, ) + six. string_types + six. integer_types ) : setattr ( previousClass, attr, None ) continue log. warning ( ""Deleting extra class attribute after test run: %s.%s(%s). "" ""Please consider using '",True,"hasattr(previousClass, attr)","hasattr(previousClass, attr)",0.6533479690551758 4429,"def listClasses ( module = None ) : if module : __import__ ( module ) pkg = sys. modules [ module ] print ( ""Available Interfaces:"" ) for k, v in sorted ( list ( pkg. __dict__. items ( ) ) ) : if : print ( ""\t%s"" % k )",False,"inspect.isclass(v) and issubclass(v, Interface)",v,0.649504542350769 4430,"def get_cur_window ( self ) : i = 0 while True : try : cur_window = self. sniffer. the_display. get_input_focus ( ). focus cur_class = None cur_name = None while cur_class is None and cur_class is None : if type ( cur_window ) is int : return None, None, None cur_name = cur_window. get_wm_name ( ) cur_class = cur_window. get_wm_class ( ) if cur_class is None : cur_window = cur_window. query_tree ( ). parent except Xlib. error. XError : i += 1 if : return None, None, None continue break return cur_class [ 1 ], cur_window, cur_name",False,i >= 10,i > 2,0.6876682639122009 4431,"def _compare_dirs ( self, dir1, dir2 ) : diff = [ ] for root, dirs, files in os. walk ( dir1 ) : for file_ in files : path = os. path. join ( root, file_ ) target_path = os. path. join ( dir2, os. path. split ( path ) [ - 1 ] ) if : diff. append ( file_ ) return diff",False,not os.path.exists(target_path),os.path.exists(target_path),0.6455015540122986 4432,"def _ ( column, pivotValue ) : if column == colList [ 0 ] : query = dumpNode. query. replace ( ""'%s'"" if unescaper. escape ( pivotValue, False )!= pivotValue else ""%s"", ""%s"" ) % ( agent. preprocessField ( table, column ), table, agent. preprocessField ( table, column ), unescaper. escape ( pivotValue, False ), ) else : query = dumpNode. query2. replace ( ""'%s'"" if unescaper. escape ( pivotValue, False )!= pivotValue else ""%s"", ""%s"" ) % ( agent. preprocessField ( table, column ), table, agent. preprocessField ( table, colList [ 0 ] ), unescaper. escape ( pivotValue, False ) if : else pivotValue, ) query = agent. whereQuery ( query ) return unArrayizeValue ( inject. getValue ( query, blind = blind, time = blind, union = not blind, error = not blind ) )",False,SINGLE_QUOTE_MARKER not in dumpNode.query2,pivotValue == False,0.6576843857765198 4433,"def save ( self, * args, ** kwargs ) : ""Process form"" if self. instance : if : if self. cleaned_data [ ""active"" ] == ""active"" : self. instance. active = True if self. cleaned_data [ ""active"" ] == ""inactive"" : self. instance. active = False self. instance. save ( )",False,self.is_valid(),self.cleaned_data.get('active'),0.6517860889434814 4434,"def import_til ( self ) : log ( ""Importing type libraries..."" ) cur = self. db_cursor ( ) sql = ""select name from diff.program_data where type = 'til'"" cur. execute ( sql ) for row in cur. fetchall ( ) : til = row [ ""name"" ] if : til = til. decode ( ""utf-8"" ) try : add_default_til ( til ) except : log ( ""Error loading til %s: %s"" % ( row [ ""name"" ], str ( sys. exc_info ( ) [ 1 ] ) ) ) cur. close ( ) auto_wait ( )",False,type(til) is bytes,til,0.6544854640960693 4435,"def get_identity ( self ) : """"""The assertion can contain zero or one attributeStatements"""""" ava = { } for _assertion in self. assertions : if : if _assertion. advice. assertion : for tmp_assertion in _assertion. advice. assertion : if tmp_assertion. attribute_statement : assert len ( tmp_assertion. attribute_statement ) == 1 ava. update ( self. read_attribute_statement ( tmp_assertion. attribute_statement [ 0 ] ) ) if _assertion. attribute_statement : assert len ( _assertion. attribute_statement ) == 1 _attr_statem = _assertion. attribute_statement [ 0 ] ava. update ( self. read_attribute_statement ( _attr_statem ) ) if not ava : logger. error ( ""Missing Attribute Statement"" ) return ava",True,_assertion.advice,_assertion.advice,0.6740684509277344 4436,"def maybe_move ( self, spec, dist_filename, setup_base ) : dst = os. path. join ( self. build_directory, spec. key ) if os. path. exists ( dst ) : msg = ""%r already exists in %s; build directory %s will not be kept"" log. warn ( msg, spec. key, self. build_directory, setup_base ) return setup_base if os. path. isdir ( dist_filename ) : setup_base = dist_filename else : if os. path. dirname ( dist_filename ) == setup_base : os. unlink ( dist_filename ) contents = os. listdir ( setup_base ) if : dist_filename = os. path. join ( setup_base, contents [ 0 ] ) if os. path. isdir ( dist_filename ) : setup_base = dist_filename ensure_directory ( dst ) shutil. move ( setup_base, dst ) return dst",False,len(contents) == 1,len(contents) > 0,0.6538456678390503 4437,"def __str__ ( self ) : reps = [ ] for index, layer in enumerate ( self ) : if : rep = inspect. getsource ( layer ). strip ( ). rstrip ( "","" ) else : rep = str ( layer ) rep = ""({index}): {rep},"". format ( index = index, rep = rep ) for line in rep. splitlines ( ) : reps. append ( "" {line}\n"". format ( line = line ) ) reps = """". join ( reps ) if reps : reps = ""\n"" + reps return ""{cls}({layers})"". format ( cls = self. __class__. __name__, layers = reps, )",False,"getattr(layer, '__name__', None) == ''","isinstance(layer, str)",0.6475077867507935 4438,"def debug_tree ( tree ) : l = [ ] for elt in tree : if : l. append ( _names. get ( elt, elt ) ) elif isinstance ( elt, str ) : l. append ( elt ) else : l. append ( debug_tree ( elt ) ) return l",False,"isinstance(elt, (int, long))","isinstance(elt, int)",0.6580402851104736 4439,"def export_as_search_index ( self ) -> Dict [ str, Any ] : """"""Get the search index DTO from this transaction"""""" search_indexes = { ""timestamp"" : int ( self. timestamp ), ""tag"" : self. tag, ""block_id"" : int ( self. block_id ), } if self. invoker : search_indexes [ ""invoker"" ] = self. invoker reserved_keywords = search_indexes. keys ( ) for key, value in self. custom_indexed_data. items ( ) : if : search_indexes [ key ] = value else : _log. error ( f""Requested field name: {key} is a reserved keyword. Will not index"" ) return search_indexes",False,key not in reserved_keywords,key in reserved_keywords,0.6650009155273438 4440,"def load ( self, session, * args, ** kwargs ) : from mailpile. plugins. core import Rescan random. seed ( os. urandom ( 8 ) ) keep_lockdown = self. sys. lockdown with self. _lock : rv = self. _unlocked_load ( session, * args, ** kwargs ) if not kwargs. get ( ""public_only"" ) : if : from mailpile. plugins. setup_magic import Setup Setup ( session, ""setup"" ). run ( ) Rescan ( session, ""rescan"" ). _idx ( wait = False ) self. gnupghome = GnuPG ( self ). gnupghome ( ) if keep_lockdown : self. sys. lockdown = keep_lockdown return rv",False,self.version != APPVER,rv,0.6569525599479675 4441,"def prune_constraints ( self, constraints, a ) : if self. inactive_window == 0 : return k = 0 for i, sample in enumerate ( constraints ) : if : continue n_old_constraints_sample = len ( self. last_active [ i ] ) if n_old_constraints_sample < len ( sample ) : self. last_active [ i ] = np. hstack ( [ self. last_active [ i ], [ 0 ] ] ) inactive_this = a [ k : k + len ( sample ) ] < self. inactive_threshold * self. C self. last_active [ i ] [ inactive_this ] += 1 k += len ( sample ) assert len ( sample ) == len ( self. last_active [ i ] ) to_remove = self. last_active [ i ] > self. inactive_window self. last_active [ i ] = self. last_active [ i ] [ ~ to_remove ] for j in np. where ( to_remove ) [ 0 ] [ : : - 1 ] : del sample [ j ] assert len ( sample ) == len ( self. last_active [ i ] )",False,not len(sample),self.last_active[i] == 0,0.6522111296653748 4442,"def _process_template_configs ( name, implementation, configs, rules ) : from jinja2. exceptions import TemplateNotFound counters = defaultdict ( lambda : 1 ) includes = defaultdict ( list ) for config in configs : if not isinstance ( config, dict ) : continue if ""type"" not in config : continue template_type = config [ ""type"" ] del config [ ""type"" ] if template_type not in rules : continue rule = rules [ template_type ] data = _process_template_config ( name, implementation, rule, config = config, counter = counters [ template_type ] ) if data is None : continue includes [ template_type ]. append ( rule [ ""to_entry"" ] ( data ) ) counters [ template_type ] += 1 for template_type in rules : if : rule = rules [ template_type ] data = _process_template_config ( name, implementation, rule ) if data is not None : try : app. jinja_env. get_or_select_template ( data [ ""template"" ] ) except TemplateNotFound : pass except Exception : _logger. exception ( ""Error in template {}, not going to",False,len(includes[template_type]) == 0,template_type in rules,0.6547961831092834 4443,"def set_meta ( self, dataset, overwrite = True, ** kwd ) : super ( ). set_meta ( dataset, overwrite = overwrite, ** kwd ) try : if dataset and zipfile. is_zipfile ( dataset. file_name ) : with zipfile. ZipFile ( dataset. file_name ) as tempzip : if ""searchgui.properties"" in tempzip. namelist ( ) : with tempzip. open ( ""searchgui.properties"" ) as fh : for line in io. TextIOWrapper ( fh ) : if : version = line. split ( ""="" ) [ 1 ]. strip ( ) dataset. metadata. searchgui_version = version dataset. metadata. searchgui_major_version = ( version. split ( ""."" ) [ 0 ] ) except Exception as e : log. warning ( ""%s, set_meta Exception: %s"", self, e )",False,line.startswith('searchgui.version'),line.startswith(b'<'),0.648409903049469 4444,"def _key_name ( self, key_code ) : """"""Return a normalized key name for key_code."""""" if isinstance ( key_code, int ) : if key_code in key_map. keys ( ) : return key_map [ key_code ] curs_key_name = self. _curses_key_name ( key_code ) if curs_key_name : if : return key_map [ curs_key_name ] self. is_typeable = ( True ) return curs_key_name else : char = None if key_code in key_map. keys ( ) : return key_map [ key_code ] if sys. version_info [ 0 ] >= 3 : if isinstance ( key_code, str ) : self. is_typeable = True return key_code try : char = chr ( key_code ) except : pass if char is not None : self. is_typeable = True return char return False",False,curs_key_name in key_map.keys(),key_code in key_map.keys(),0.6498466730117798 4445,"def _document_params ( self, section, value, comments, path, shape ) : param_section = section. add_new_section ( ""param-values"" ) self. _start_nested_value ( param_section, ""("" ) for key, val in value. items ( ) : path. append ( "".%s"" % key ) item_section = param_section. add_new_section ( key ) item_section. style. new_line ( ) item_comment = self. _get_comment ( path, comments ) if item_comment : item_section. write ( item_comment ) item_section. style. new_line ( ) item_section. write ( key + ""="" ) item_shape = None if : item_shape = shape. members. get ( key ) self. _document ( item_section, val, comments, path, item_shape ) path. pop ( ) param_section_end = param_section. add_new_section ( ""ending-parenthesis"" ) self. _end_nested_value ( param_section_end, "")"" )",False,shape,shape is not None,0.6855771541595459 4446,"def __init__ ( self, * args, ** kw ) : for i, ( name, typ ) in enumerate ( self. _fields_ ) : def_arg = None if i < len ( args ) : def_arg = args [ i ] if name in kw : def_arg = kw [ name ] if : if not isinstance ( def_arg, tuple ) : def_arg = ( def_arg, ) else : def_arg = ( ) if len ( def_arg ) == 1 and isinstance ( def_arg [ 0 ], typ ) : def_val = def_arg [ 0 ] else : def_val = typ ( * def_arg ) setattr ( self, name, def_val )",False,def_arg is not None,def_arg is None,0.6618744134902954 4447,"def _is_legacy_mode ( self, node ) : """"""Checks if the ``ast.Call`` node's keywords signal using legacy mode."""""" script_mode = False py_version = ""py2"" for kw in node. keywords : if kw. arg == ""script_mode"" : script_mode = ( bool ( kw. value. value ) if isinstance ( kw. value, ast. NameConstant ) else True ) if : py_version = kw. value. s if isinstance ( kw. value, ast. Str ) else ""py3"" return not ( py_version. startswith ( ""py3"" ) or script_mode )",False,kw.arg == 'py_version',kw.arg == 'py2',0.6535788774490356 4448,"def create ( ) : image_shape = height, width, colors kw = dict ( parse_fn = parse_fn ) datasets = dict ( train = core. DataSet. from_files ( train_files, image_shape, ** kw ). skip ( valid ), valid = core. DataSet. from_files ( train_files, image_shape, ** kw ). take ( valid ), test = { key : core. DataSet. from_files ( value, image_shape, ** kw ) for key, value in test_files. items ( ) }, ) if cache : cached_datasets = { } for key, value in datasets. items ( ) : if : cached_datasets [ key ] = { k : v. cache ( ) for k, v in value. items ( ) } else : cached_datasets [ key ] = value. cache ( ) datasets = cached_datasets return cls ( name + ""-"" + str ( valid ), nclass = nclass, ** datasets )",False,"isinstance(value, dict)",value is not None,0.6478612422943115 4449,"def chat ( self, gpg_args, callback, * args, ** kwargs ) : """"""This lets a callback have a chat with the GPG process..."""""" gpg_args = ( self. common_args ( interactive = True, will_send_passphrase = True ) + [ ""--no-tty"", ""--command-fd=0"", ""--status-fd=1"", ] + ( gpg_args or [ ] ) ) proc = None try : self. debug ( ""Running %s"" % "" "". join ( gpg_args ) ) self. event. update_args ( gpg_args ) proc = Popen ( gpg_args, stdin = PIPE, stdout = PIPE, stderr = PIPE, bufsize = 0, long_running = True ) return callback ( proc, * args, ** kwargs ) finally : if : proc. stdin. close ( ) if proc : self. event. update_return_code ( proc. wait ( ) ) else : self. event. update_return_code ( - 1 )",False,proc and proc.stdin,proc,0.659489095211029 4450,"def alter_list_data_to_serialize ( self, request, data ) : data = super ( TaskTemplateResource, self ). alter_list_data_to_serialize ( request, data ) user_model = get_user_model ( ) user = request. user collected_templates = ( user_model. objects. get ( username = user. username ) . tasktemplate_set. all ( ) . values_list ( ""id"", flat = True ) ) template_ids = [ bundle. obj. id for bundle in data [ ""objects"" ] ] templates_labels = TemplateLabelRelation. objects. fetch_templates_labels ( template_ids ) for bundle in data [ ""objects"" ] : if : bundle. data [ ""is_add"" ] = 1 else : bundle. data [ ""is_add"" ] = 0 bundle. data [ ""template_labels"" ] = templates_labels. get ( bundle. obj. id, [ ] ) return data",False,bundle.obj.id in collected_templates,'is_add' not in bundle.data,0.6549702882766724 4451,"def _continue ( self ) : while True : try : value = unwrap ( self. _last_value ) error = None except BaseException as e : value = None error = e try : self. _accept_yield_result ( self. _continue_on_generator ( value, error ) ) except StopIteration as error : try : return_value = error. value except AttributeError : return_value = None self. _queue_exit ( return_value ) except GeneratorExit as error : error_type = type ( error ) if error_type is AsyncTaskResult : self. _queue_exit ( error. result ) elif error_type is AsyncTaskCancelledError : self. _accept_error ( error ) else : self. _queue_exit ( None ) except BaseException as error : self. _accept_error ( error ) if self. is_computed ( ) : return if : return",False,len(self._dependencies) > 0,self.is_generator(),0.6588523387908936 4452,"def _Determine_Do ( self ) : self. applicable = 1 self. value = os. environ. get ( self. name, None ) if self. value is None and black. configure. items. has_key ( ""buildType"" ) : buildType = black. configure. items [ ""buildType"" ]. Get ( ) if : self. value = ""warn"" else : self. value = None self. determined = 1",False,buildType == 'debug',buildType and buildType.Get() == 'warn',0.6556507349014282 4453,"def validate_precedence ( self ) : preclist = [ ] if self. prec : if : self. log. error ( ""precedence must be a list or tuple"" ) self. error = 1 return for level, p in enumerate ( self. prec ) : if not isinstance ( p, ( list, tuple ) ) : self. log. error ( ""Bad precedence table"" ) self. error = 1 return if len ( p ) < 2 : self. log. error ( ""Malformed precedence entry %s. Must be (assoc, term,..., term)"", p ) self. error = 1 return assoc = p [ 0 ] if not isinstance ( assoc, str ) : self. log. error ( ""precedence associativity must be a string"" ) self. error = 1 return for term in p [ 1 : ] : if not isinstance ( term, str ) : self. log. error ( ""precedence items must be strings"" ) self. error = 1 return preclist. append ( ( term",False,"not isinstance(self.prec, (list, tuple))","not isinstance(self.prec, list)",0.6559296250343323 4454,"def from_introspection ( cls, table_name, trigger_data ) : ( id, name, proc, constraint, granularity, timing, events, definition, metadata, ) = trigger_data if metadata : metadata = json. loads ( metadata ) else : metadata = { } condition = None if definition : when_off = definition. find ( ""WHEN ("" ) if : pos = when_off + 6 brackets = 1 while brackets : if definition [ pos ] == "")"" : brackets -= 1 elif definition [ pos ] == ""("" : brackets += 1 pos += 1 condition = definition [ when_off + 6 : pos - 1 ] trg = cls ( name = name, table_name = table_name, events = events, timing = timing, granularity = granularity, procedure = proc, condition = condition, is_constraint = bool ( constraint ), metadata = metadata, ) return trg",False,when_off != -1,when_off > -1,0.6642345786094666 4455,"def run ( self, edit ) : if not self. has_selection ( ) : region = sublime. Region ( 0, self. view. size ( ) ) originalBuffer = self. view. substr ( region ) prefixed = self. prefix ( originalBuffer ) if prefixed : self. view. replace ( edit, region, prefixed ) return for region in self. view. sel ( ) : if : continue originalBuffer = self. view. substr ( region ) prefixed = self. prefix ( originalBuffer ) if prefixed : self. view. replace ( edit, region, prefixed )",False,region.empty(),originalBuffer == '',0.6561012268066406 4456,"def format_errors ( messages ) : errors = { } for k, v in messages. items ( ) : key = camelize ( k, uppercase_first_letter = False ) if isinstance ( v, dict ) : errors [ key ] = format_errors ( v ) elif : errors [ key ] = v [ 0 ] return errors",True,"isinstance(v, list)","isinstance(v, list)",0.6535281538963318 4457,"def poll_authorizations ( self, orderr, deadline ) : """"""Poll Order Resource for status."""""" responses = [ ] for url in orderr. body. authorizations : while datetime. datetime. now ( ) < deadline : authzr = self. _authzr_from_response ( self. _post_as_get ( url ), uri = url ) if : responses. append ( authzr ) break time. sleep ( 1 ) if len ( responses ) < len ( orderr. body. authorizations ) : raise errors. TimeoutError ( ) failed = [ ] for authzr in responses : if authzr. body. status!= messages. STATUS_VALID : for chall in authzr. body. challenges : if chall. error is not None : failed. append ( authzr ) if failed : raise errors. ValidationError ( failed ) return orderr. update ( authorizations = responses )",False,authzr.body.status != messages.STATUS_PENDING,response,0.6537234783172607 4458,"def load_params ( self ) : path = str ( self. load_path. with_suffix ( "".json"" ). resolve ( ) ) log. info ( ""[loading parameters from {}]"". format ( path ) ) with open ( path, ""r"", encoding = ""utf8"" ) as fp : params = json. load ( fp ) for p in self. GRAPH_PARAMS : if : if p in ( ""kb_embedding_control_sum"" ) and ( math. abs ( self. opt. get ( p, 0.0 ) - params. get ( p, 0.0 ) ) < 1e-3 ) : continue raise ConfigError ( ""`{}` parameter must be equal to saved model"" "" parameter value `{}`, but is equal to `{}`"". format ( p, params. get ( p ), self. opt. get ( p ) ) )",False,self.opt.get(p) != params.get(p),p in params,0.6498596668243408 4459,"def _prepare_artifact ( self, artifact_or_path : Union [ wandb_artifacts. Artifact, str ], name : Optional [ str ] = None, type : Optional [ str ] = None, aliases : Optional [ List [ str ] ] = None, ) -> Tuple [ wandb_artifacts. Artifact, List [ str ] ] : aliases = aliases or [ ""latest"" ] if isinstance ( artifact_or_path, str ) : if name is None : name = ""run-%s-%s"" % ( self. id, os. path. basename ( artifact_or_path ) ) artifact = wandb. Artifact ( name, type ) if : artifact. add_file ( artifact_or_path ) elif os. path. isdir ( artifact_or_path ) : artifact. add_dir ( artifact_or_path ) elif ""://"" in artifact_or_path : artifact. add_reference ( artifact_or_path ) else : raise ValueError ( ""path must be a file, directory or external"" ""reference like s3://bucket/path"" ) else : artifact = artifact_or_path if not isinstance ( artifact, wandb. Artifact ) : raise ValueError ( ""You must pass an instance of wandb.Artifact or a "" ""valid file path to log_artifact"" ) if isinstance ( aliases, str ) : aliases = [ aliases ] artifact. finalize ( ) return artifact, aliases",True,os.path.isfile(artifact_or_path),os.path.isfile(artifact_or_path),0.6475059986114502 4460,"def __call__ ( self, data ) : degree = math. pi * random. uniform ( * self. degrees ) / 180.0 sin, cos = math. sin ( degree ), math. cos ( degree ) if data. pos. size ( - 1 ) == 2 : matrix = [ [ cos, sin ], [ - sin, cos ] ] else : if : matrix = [ [ 1, 0, 0 ], [ 0, cos, sin ], [ 0, - sin, cos ] ] elif self. axis == 1 : matrix = [ [ cos, 0, - sin ], [ 0, 1, 0 ], [ sin, 0, cos ] ] else : matrix = [ [ cos, sin, 0 ], [ - sin, cos, 0 ], [ 0, 0, 1 ] ] return LinearTransformation ( torch. tensor ( matrix ) ) ( data )",True,self.axis == 0,self.axis == 0,0.6647530794143677 4461,def logic ( ) : if goRight == ACTIVE : dir. next = DirType. RIGHT run. next = True elif goLeft == ACTIVE : dir. next = DirType. LEFT run. next = True if stop == ACTIVE : run. next = False if run : if : q. next [ 4 : 1 ] = q [ 3 : ] q. next [ 0 ] = not q [ 3 ] else : q. next [ 3 : ] = q [ 4 : 1 ] q. next [ 3 ] = not q [ 0 ],False,dir == DirType.LEFT,next,0.6672411561012268 4462,"def reassembleTree ( self, tree ) : root_id = None root_elt = None self. ids_from_idref = { } self. elts_from_id = { } for elt in tree. getiterator ( ) : attribs = elt. attrib id = attribs. get ( ""id"", None ) if id is None : log. error ( ""No id attribute for element %r"", elt ) continue idref = attribs. get ( ""idref"", None ) if idref is None : if : log. error ( ""Root should be %r, but found another:%r"", root_elt, elt ) continue root_id = id root_elt = elt else : self. ids_from_idref. setdefault ( idref, [ ] ). append ( id ) self. elts_from_id [ id ] = elt newTree = TreeNode ( root_elt ) self. treeBuilder ( newTree, root_id ) return newTree",False,root_id is not None,root_elt is None,0.6568630933761597 4463,"def set ( self, interface, listen_port = None, fwmark = None, private_key = None, peer = None ) : msg = wgmsg ( ) msg [ ""attrs"" ]. append ( [ ""WGDEVICE_A_IFNAME"", interface ] ) if private_key is not None : self. _wg_test_key ( private_key ) msg [ ""attrs"" ]. append ( [ ""WGDEVICE_A_PRIVATE_KEY"", private_key ] ) if listen_port is not None : msg [ ""attrs"" ]. append ( [ ""WGDEVICE_A_LISTEN_PORT"", listen_port ] ) if fwmark is not None : msg [ ""attrs"" ]. append ( [ ""WGDEVICE_A_FWMARK"", fwmark ] ) if peer is not None : self. _wg_set_peer ( msg, peer ) msg [ ""cmd"" ] = WG_CMD_SET_DEVICE msg [ ""version"" ] = WG_GENL_VERSION msg [ ""header"" ] [ ""type"" ] = self. prid msg [ ""header"" ] [ ""flags"" ] = NLM_F_REQUEST | NLM_F_ACK msg [ ""header"" ] [ ""pid"" ] = self. pid msg. encode ( ) self. sendto ( msg. data, ( 0, 0 ) ) msg = self. get ( ) [ 0 ] err = msg [ ""header"" ]. get ( ""error"", None ) if err is not None : if : logging. error ( ""Generic netlink protocol %s not found"" % self. prid ) logging. error ( ""Please check if the protocol module is loaded"" ) raise err return msg",False,"hasattr(err, 'code') and err.code == errno.ENOENT",self.prid is None,0.6485620141029358 4464,"def test_policy_gradient_cartpole ( self ) : """"""Trains a policy on cartpole."""""" task = rl_task. RLTask ( ""CartPole-v0"", max_steps = 200 ) lr = lambda : lr_schedules. multifactor ( constant = 1e-2, factors = ""constant"" ) max_avg_returns = - math. inf for _ in range ( 2 ) : agent = training. PolicyGradient ( task, model_fn = self. _model_fn, optimizer = opt. Adam, lr_schedule = lr, batch_size = 128, eval_temperatures = [ 0.0, 0.5 ], n_eval_episodes = 1, n_trajectories_per_epoch = 2, ) for ep in range ( 200 ) : agent. run ( 1 ) self. assertEqual ( agent. current_epoch, ep + 1 ) if : for eval_t in agent. _eval_temperatures : self. assertEqual ( len ( agent. _avg_returns_temperatures [ eval_t ] [ 200 ] ), len ( agent. avg_returns ), ) return max_avg_returns = max ( max_avg_returns, agent.",False,agent.avg_returns[-1] == 200.0,"hasattr(agent, '_eval_temperatures')",0.6594438552856445 4465,"def scan ( scope ) : for s in scope. children : if s. start_pos <= position <= s. end_pos : if : return scan ( s ) or s elif s. type in ( ""suite"", ""decorated"" ) : return scan ( s ) return None",False,"isinstance(s, (tree.Scope, tree.Flow))","s.type in ('test', 'notest')",0.6469538807868958 4466,"def mail_migrator ( app, schema_editor ) : Event_SettingsStore = app. get_model ( ""pretixbase"", ""Event_SettingsStore"" ) for ss in Event_SettingsStore. objects. filter ( key__in = [ ""mail_text_order_approved"", ""mail_text_order_placed"", ""mail_text_order_placed_require_approval"", ] ) : chgd = ss. value. replace ( ""{date}"", ""{expire_date}"" ) if : ss. value = chgd ss. save ( ) cache. delete ( ""hierarkey_{}_{}"". format ( ""event"", ss. object_id ) )",False,chgd != ss.value,chgd,0.6664128303527832 4467,"def read_until_regexp ( self, regexp_list ) : current_out = self. current_output for rgx in regexp_list : match = rgx. search ( current_out ) if : self. _update_buffer ( current_out [ match. end ( ) : ] ) return current_out [ : match. end ( ) ] return None",True,match,match,0.6698912382125854 4468,"def PackLVCOLUMN ( fmt = None, cx = None, text = None, subItem = None, image = None, order = None ) : extra = [ ] mask = 0 mask, fmt = _GetMaskAndVal ( fmt, 0, mask, commctrl. LVCF_FMT ) mask, cx = _GetMaskAndVal ( cx, 0, mask, commctrl. LVCF_WIDTH ) mask, text = _GetMaskAndVal ( text, None, mask, commctrl. LVCF_TEXT ) mask, subItem = _GetMaskAndVal ( subItem, 0, mask, commctrl. LVCF_SUBITEM ) mask, image = _GetMaskAndVal ( image, 0, mask, commctrl. LVCF_IMAGE ) mask, order = _GetMaskAndVal ( order, 0, mask, commctrl. LVCF_ORDER ) if text is None : text_addr = text_len = 0 else : if : text = text. encode ( ""mbcs"" ) text_buffer = array. array ( ""c"", text + ""\0"" ) extra. append ( text_buffer ) text_addr, text_len = text_buffer. buffer_info ( ) format = ""iiiiiiii"" buf = struct. pack ( format, mask, fmt, cx, text_addr, text_len, subItem, image, order ) return array. array ( ""c"", buf ), extra",False,"isinstance(text, unicode)","isinstance(text, str)",0.6465867757797241 4469,"def mouseDragEvent ( self, ev ) : if self. movable and ev. button ( ) == QtCore. Qt. LeftButton : if ev. isStart ( ) : self. moving = True self. cursorOffset = self. pos ( ) - self. mapToParent ( ev. buttonDownPos ( ) ) self. startPosition = self. pos ( ) ev. accept ( ) if not self. moving : return self. setPos ( self. cursorOffset + self. mapToParent ( ev. pos ( ) ) ) self. sigDragged. emit ( self ) if : self. moving = False self. sigPositionChangeFinished. emit ( self )",False,ev.isFinish(),self.moving,0.6657177209854126 4470,"def moveto ( self, other ) : if isinstance ( self, File ) and isinstance ( other, Folder ) : other = other. file ( self. basename ) if not isinstance ( other, MockFSObjectBase ) : raise NotImplementedError ( ""TODO: support cross object type move"" ) if other. isequal ( self ) : if other. path == self. path : raise ValueError ( ""Cannot move file or folder to self"" ) parentnode = self. _fs. stat ( self. pathnames [ : - 1 ] ) parentnode. data [ other. basename ] = parentnode. data. pop ( self. basename ) parentnode. on_changed ( ) if : self. watcher. emit ( ""moved"", self, other ) else : self. _mock_copyto ( other ) self. _remove ( removechildren = True ) if : self. watcher. emit ( ""moved"", self, other ) self. _cleanup ( ) return other",False,self.watcher,removechildren,0.6671056151390076 4471,"def kron_mmprod ( self, A, B ) : count = 0 D = len ( A ) for b in B. T : x = b N = 1 G = np. zeros ( D, dtype = np. int_ ) for d in range ( D ) : G [ d ] = len ( A [ d ] ) N = np. prod ( G ) for d in range ( D - 1, - 1, - 1 ) : X = np. reshape ( x, ( G [ d ], int ( np. round ( N / G [ d ] ) ) ), order = ""F"" ) Z = np. dot ( A [ d ], X ) Z = Z. T x = np. reshape ( Z, ( - 1, 1 ), order = ""F"" ) if : result = x else : result = np. column_stack ( ( result, x ) ) count += 1 return result",False,count == 0,count > 1,0.6703968048095703 4472,"def generate_digits_file ( source_path : str, target_path : str, line_count : int = 100, line_length : int = 9, sort_target : bool = False, line_count_empty : int = 0, seed = 13, ) : assert line_count_empty <= line_count random_gen = random. Random ( seed ) with open ( source_path, ""w"" ) as source_out, open ( target_path, ""w"" ) as target_out : all_digits = [ ] for _ in range ( line_count - line_count_empty ) : digits = [ random_gen. choice ( _DIGITS ) for _ in range ( random_gen. randint ( 1, line_length ) ) ] all_digits. append ( digits ) for _ in range ( line_count_empty ) : all_digits. append ( [ ] ) random_gen. shuffle ( all_digits ) for digits in all_digits : print ( C. TOKEN_SEPARATOR. join ( digits ), file = source_out ) if : digits. sort ( ) print ( C. TOKEN_SEPARATOR. join ( digits ), file = target_out )",True,sort_target,sort_target,0.6609206199645996 4473,"def list_plugins ( ) : result = ""\n\tSupported Plugin Commands:\n\n"" cmds = registry. get_plugin_classes ( commands. Command, lower = True ) profs = registry. get_plugin_classes ( obj. Profile ) if config. PROFILE == None : config. update ( ""PROFILE"", ""WinXPSP2x86"" ) if config. PROFILE not in profs : raise BaseException ( ""Invalid profile "" + config. PROFILE + "" selected"" ) profile = profs [ config. PROFILE ] ( ) wrongprofile = """" for cmdname in sorted ( cmds ) : command = cmds [ cmdname ] helpline = command. help ( ) or """" for line in helpline. splitlines ( ) : if : helpline = line break if command. is_valid_profile ( profile ) : result += ""\t\t{0:15}\t{1}\n"". format ( cmdname, helpline ) else : wrongprofile += ""\t\t{0:15}\t{1}\n"". format ( cmdname, helpline ) if wrongprofile and config. VERBOSE : result += ""\n\tPlugins requiring a different profile:\n\n"" result += wrongprofile return result",False,line,helpline and line[0] == 'h',0.6807525157928467 4474,"def _delete_chunks_after_reshape_single_sample ( self, sample, sample_shape, new_shape ) : if ( sample_shape <= new_shape ). all ( ) : return shapes = sample_shape assert len ( shapes. shape ) + 1 == len ( self. shape ) chunks = self. _storage_tensor. chunks [ 1 : ] div = np. ceil ( shapes / chunks ). astype ( ""int32"" ) for index in np. ndindex ( * div. tolist ( ) ) : if : try : del self [ ""."". join ( ( sample, ) + index ) ] except KeyError : pass",False,np.array(index) * chunks >= new_shape).any(,index.isValid(),0.6501710414886475 4475,"def _maybe_signal_recovery_end ( ) -> None : if self. in_recovery and not self. active_remaining_total ( ) : self. flush_buffers ( ) self. _set_recovery_ended ( ) if : self. _actives_span. set_tag ( ""Actives-Ready"", True ) self. signal_recovery_end. set ( )",False,self._actives_span is not None,self.actives_span is not None,0.6567281484603882 4476,"def get_auto_complete_terms ( cls, keyword, max_terms, limit = 10 ) : if not keyword : return [ ] with db_session : result = cls. search_keyword ( '""' + keyword + '""*', lim = limit ) [ : ] titles = [ g. title. lower ( ) for g in result ] all_terms = set ( ) for line in titles : if : break i1 = line. find ( keyword ) i2 = line. find ( "" "", i1 + len ( keyword ) ) term = line [ i1 : i2 ] if i2 >= 0 else line [ i1 : ] if term!= keyword : all_terms. add ( term ) return list ( all_terms )",False,len(all_terms) >= max_terms,len(line) < lim,0.6520348191261292 4477,"def process_file ( file_name ) : content = read_file ( file_name ) urls = URL_PATTERN. findall ( content ) content = re. sub ( HEADER_PATTERN, r""\g \g
    "", content ) directory = ""/"". join ( normalize_path ( file_name ). split ( ""/"" ) [ : - 1 ] ) paths = set ( ) md_paths = MD_PATH_PATTERN. findall ( content ) for md_path in md_paths : path = md_path. lstrip ( ""/"" ) if : path = ROOT_DIR / directory / path else : path = ROOT_DIR / path path = path. resolve ( ). relative_to ( ROOT_DIR ) paths. add ( normalize_path ( path ) ) content = content. replace ( ""("" + md_path + "")"", normalize_path ( path ) ) for url in urls : path = url [ len ( GITHUB_URL ) : ] paths. add ( path ) content = content. replace ( url, normalize_path ( os. path. relpath ( path, directory ) ) ) output_path = ROOT_DIR / ""docs"" / file_name if not output_path. parent. is_dir ( ) : os. makedirs ( output_path. parent ) with output_path. open ( ""w+"" ) as output_file : output_file. write ( content ) PROCESSED_PATHS. add ( normalize_path ( file_name ) ) for path in paths : if path not in PROCESSED_PATHS : process_file ( normalize_path ( path ) )",False,ROOT_DIR / directory / path).exists(,path.is_dir(),0.6505041122436523 4478,"def update ( self, update_tracks = True ) : self. enable_update_metadata_images ( False ) old_album_title = self. metadata [ ""album"" ] self. metadata [ ""album"" ] = config. setting [ ""nat_name"" ] for track in self. tracks : if : track. metadata [ ""album"" ] = self. metadata [ ""album"" ] for file in track. linked_files : track. update_file_metadata ( file ) self. enable_update_metadata_images ( True ) super ( ). update ( update_tracks )",False,old_album_title == track.metadata['album'],track.old_album_title == old_album_title,0.6523239612579346 4479,"def test_set_pycache_prefix ( self ) : NO_VALUE = object ( ) cases = [ ( None, None, None ), ( ""foo"", None, ""foo"" ), ( None, ""bar"", ""bar"" ), ( ""foo"", ""bar"", ""bar"" ), ( ""foo"", """", None ), ( ""foo"", NO_VALUE, None ), ] for envval, opt, expected in cases : exp_clause = ""is None"" if expected is None else f'== ""{expected}""' code = f""import sys; sys.exit(not sys.pycache_prefix {exp_clause})"" args = [ ""-c"", code ] env = { } if envval is None else { ""PYTHONPYCACHEPREFIX"" : envval } if opt is NO_VALUE : args [ : 0 ] = [ ""-X"", ""pycache_prefix"" ] elif : args [ : 0 ] = [ ""-X"", f""pycache_prefix={opt}"" ] with self. subTest ( envval = envval, opt = opt ) : with support. temp_cwd ( ) : assert_python_ok ( * args, ** env )",True,opt is not None,opt is not None,0.6585860848426819 4480,"def checked_reader ( fd, n ) : while n > 0 : rl, _, _ = select. select ( [ fd ], [ ], [ ] ) assert rl [ 0 ] == fd buf = os. read ( fd, n ) if : raise Exception ( ""Unexpected EOF reading %d more bytes"" % n ) yield buf n -= len ( buf )",False,not buf,len(buf) > n,0.6856896281242371 4481,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if : break if fid == 1 : if ftype == TType. STRUCT : self. query = Query ( ) self. query. read ( iprot ) else : iprot. skip ( ftype ) elif fid == 2 : if ftype == TType. STRING : self. clientCtx = iprot. readString ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STOP,fid == 0,0.6604976058006287 4482,"def pickline ( file, key, casefold = 1 ) : try : f = open ( file, ""r"" ) except IOError : return None pat = re. escape ( key ) + "":"" prog = re. compile ( pat, casefold and re. IGNORECASE ) while 1 : line = f. readline ( ) if not line : break if prog. match ( line ) : text = line [ len ( key ) + 1 : ] while 1 : line = f. readline ( ) if : break text = text + line return text. strip ( ) return None",False,not line or not line[0].isspace(),not text,0.6500121355056763 4483,"def is_upgradeable_proxy ( self ) -> bool : from slither. core. cfg. node import NodeType from slither. slithir. operations import LowLevelCall if self. _is_upgradeable_proxy is None : self. _is_upgradeable_proxy = False if : self. _is_upgradeable_proxy = True return True for f in self. functions : if f. is_fallback : for node in f. all_nodes ( ) : for ir in node. irs : if ( isinstance ( ir, LowLevelCall ) and ir. function_name == ""delegatecall"" ) : self. _is_upgradeable_proxy = True return self. _is_upgradeable_proxy if node. type == NodeType. ASSEMBLY : inline_asm = node. inline_asm if inline_asm : if ""delegatecall"" in inline_asm : self. _is_upgradeable_proxy = True ",False,'Proxy' in self.name,self.has_fallback,0.6601525545120239 4484,"def current_dict ( cursor_offset, line ) : """"""If in dictionary completion, return the dict that should be used"""""" for m in current_dict_re. finditer ( line ) : if : return LinePart ( m. start ( 1 ), m. end ( 1 ), m. group ( 1 ) ) return None",False,m.start(2) <= cursor_offset and m.end(2) >= cursor_offset,m.group(1) == cursor_offset,0.6473796963691711 4485,"def _get_required_scripts ( self, script, force = False ) : required_dls = [ ( url, self. _required_url_to_file_path ( url ) ) for url in script. requires ] if not force : required_dls = [ ( url, path ) for ( url, path ) in required_dls if not os. path. exists ( path ) ] if not required_dls : self. _add_script_with_requires ( script, quiet = True ) return download_manager = objreg. get ( ""qtnetwork-download-manager"" ) for url, target_path in required_dls : target = downloads. FileDownloadTarget ( target_path, force_overwrite = True ) download = download_manager. get ( QUrl ( url ), target = target, auto_remove = True ) download. requested_url = url self. _in_progress_dls. append ( download ) if : self. _on_required_download_finished ( script, download ) else : download. finished. connect ( functools. partial ( self. _on_required_download_finished, script, download ) )",False,download.successful,download.finished,0.6651439666748047 4486,"def dumps ( self ) : sections = [ ] for name, env_info in self. _dependencies_. items ( ) : sections. append ( ""[ENV_%s]"" % name ) for var, values in sorted ( env_info. vars. items ( ) ) : tmp = ""%s="" % var if : tmp += ""[%s]"" % "","". join ( [ '""%s""' % val for val in values ] ) else : tmp += ""%s"" % values sections. append ( tmp ) return ""\n"". join ( sections )",True,"isinstance(values, list)","isinstance(values, list)",0.6556833386421204 4487,"def func_named ( self, arg ) : result = None target = ""do_"" + arg if target in dir ( self ) : result = target else : if : funcs = [ fname for fname in self. keywords if fname. startswith ( arg ) ] if len ( funcs ) == 1 : result = ""do_"" + funcs [ 0 ] return result",False,self.abbrev,arg in self.keywords,0.6775655746459961 4488,"def _list_cases ( suite ) : for test in suite : if : _list_cases ( test ) elif isinstance ( test, unittest. TestCase ) : if support. match_test ( test ) : print ( test. id ( ) )",False,"isinstance(test, unittest.TestSuite)","isinstance(test, list)",0.6493837833404541 4489,"def generate_auto_complete ( self, base, iterable_var ) : sugg = [ ] for entry in iterable_var : compare_entry = entry compare_base = base if : compare_entry = compare_entry. lower ( ) compare_base = compare_base. lower ( ) if self. compare_entries ( compare_entry, compare_base ) : if entry not in sugg : sugg. append ( entry ) return sugg",False,self.settings.get(IGNORE_CASE_SETTING),compare_entry and compare_base,0.6510764360427856 4490,"def checkStates ( self, fit, base ) : pyfalog. debug ( ""Check states for fit ID: {0}"", fit ) changedMods = { } changedProjMods = { } changedProjDrones = { } for pos, mod in enumerate ( fit. modules ) : if mod is not base : canHaveState = mod. canHaveState ( mod. state ) if canHaveState is not True : changedMods [ pos ] = mod. state mod. state = canHaveState elif : changedMods [ pos ] = mod. state mod. state = FittingModuleState. ONLINE for pos, mod in enumerate ( fit. projectedModules ) : canHaveState = mod. canHaveState ( mod. state, fit ) if canHaveState is not True : changedProjMods [ pos ] = mod. state mod. state = canHaveState elif : changedProjMods [ pos ] = mod. state mod. state = FittingModuleState. OFFLINE for pos, drone in enumerate ( fit. projectedDrones ) : if drone. amountActive > 0 and not drone. canBeApplied ( fit ) : changedProjDrones [ pos ] = drone. amountActive drone. amountActive = 0 return changedMods, changedProjMods, changedProjDrones",False,not mod.isValidState(mod.state),len(fit.projectedModules) > 0,0.6494564414024353 4491,"def reset_parameters ( self ) : if self. opt [ ""tune_partial"" ] > 0 : offset = self. opt [ ""tune_partial"" ] + 2 if : self. network. embedding. weight. data [ offset : ] = self. network. fixed_embedding",False,offset < self.network.embedding.weight.data.size(0),self.opt[offset] > 0,0.6542505621910095 4492,"def test_mask ( ) : numpy. random. seed ( 42 ) image = numpy. random. uniform ( size = ( 10, 30 ) ) mask = numpy. zeros ( image. shape, bool ) mask [ :, : 20 ] = True labels = numpy. ones ( ( 10, 30 ), int ) workspace, module = make_workspace ( image, labels, mask = mask ) module. run ( workspace ) m = workspace. measurements workspace, module = make_workspace ( image [ :, : 20 ], labels [ :, : 20 ] ) module. run ( workspace ) me = workspace. measurements for f in m. get_feature_names ( INPUT_OBJECTS_NAME ) : if : values = m. get_current_measurement ( INPUT_OBJECTS_NAME, f ) expected = me. get_current_measurement ( INPUT_OBJECTS_NAME, f ) assert values == expected",False,f.startswith(cellprofiler.modules.measuretexture.TEXTURE),f in me.get_feature_names(INPUT_OBJECTS_NAME),0.6475114822387695 4493,"def catching_iter ( self, app_iter, environ ) : if not app_iter : raise StopIteration error_on_close = False try : for v in app_iter : yield v if hasattr ( app_iter, ""close"" ) : error_on_close = True app_iter. close ( ) except : response = self. exception_handler ( sys. exc_info ( ), environ ) if not error_on_close and hasattr ( app_iter, ""close"" ) : try : app_iter. close ( ) except : close_response = self. exception_handler ( sys. exc_info ( ), environ ) response += ""
    Error in.close():
    %s"" % close_response if : response = response. encode ( ""utf8"" ) yield response",False,six.PY3,"isinstance(response, unicode_type)",0.6592411994934082 4494,"def should_include ( service ) : for f in filt : if : state = filt [ f ] containers = project. containers ( [ service. name ], stopped = True ) if not has_container_with_state ( containers, state ) : return False elif f == ""source"" : source = filt [ f ] if source == ""image"" or source == ""build"" : if source not in service. options : return False else : raise UserError ( ""Invalid value for source filter: %s"" % source ) else : raise UserError ( ""Invalid filter: %s"" % f ) return True",False,f == 'status',f == 'project',0.664634108543396 4495,"def _append ( self, other ) : assert ( self. _remote_init_builder is None ), ""We don't support append if data in the frame is mapped from a remote server."" if self. num_rows == 0 : self. _columns = { key : Column. create ( data ) for key, data in other. items ( ) } else : for key, col in self. items ( ) : if key in other : continue scheme = col. scheme ctx = F. context ( col. data ) if : self. _set_zero_default_initializer ( ) initializer = self. get_initializer ( key ) new_data = initializer ( ( other. num_rows, ) + scheme. shape, scheme. dtype, ctx, slice ( self. _num_rows, self. _num_rows + other. num_rows ), ) other [ key ] = new_data for key, col in other. items ( ) : if key not in self. _columns : self. add_column ( key, col. scheme, F. context ( col. data ) ) self. _columns [ key ]. extend ( col. data, col. scheme )",False,self.get_initializer(key) is None,self._remote_init_builder is None,0.6512762904167175 4496,"def get_recommendation_from_result ( result, keys_to_apply ) : rec = { } for key in keys_to_apply : val = result. get ( key ) if not val or val == NULL : continue if key == ""cpus"" : rec [ ""cpus"" ] = float ( val ) elif : rec [ ""cpu_burst_add"" ] = min ( 1, float ( val ) ) elif key == ""mem"" : rec [ ""mem"" ] = max ( 128, round ( float ( val ) ) ) elif key == ""disk"" : rec [ ""disk"" ] = max ( 128, round ( float ( val ) ) ) elif key == ""hacheck_cpus"" : hacheck_cpus_value = max ( 0.1, min ( float ( val ), 1 ) ) rec [ ""sidecar_resource_requirements"" ] = { ""hacheck"" : { ""requests"" : { ""cpu"" : hacheck_cpus_value, }, ""limits"" : { ""cpu"" : hacheck_cpus_value, }, }, } return rec",True,key == 'cpu_burst_add',key == 'cpu_burst_add',0.6572980880737305 4497,"def theme_path ( self ) : """"""Read 'theme' setting and return path to gutter icons."""""" theme = self. get ( ""theme"" ) if not theme : theme = ""Default.gitgutter-theme"" if theme!= os. path. basename ( self. _theme_path ) : if ST3 : themes = sublime. find_resources ( theme ) self. _theme_path = ( os. path. dirname ( themes [ - 1 ] ) if : else self. _PACKAGE_THEMES + ""/Default"" ) else : theme, _ = os. path. splitext ( theme ) self. _theme_path = ""/"". join ( ( self. _PACKAGE_THEMES, theme ) ) return self. _theme_path",False,themes,not self._theme_path,0.6987696886062622 4498,"def MissingExtraStringTest ( windows ) : ""Return the errors from running the test"" bugs = [ ] for win in windows : if not win. ref : continue for char in CharsToCheck : missing_extra = """" if win. WindowText ( ). count ( char ) > win. ref. WindowText ( ). count ( char ) : missing_extra = ""ExtraCharacters"" elif : missing_extra = ""MissingCharacters"" if missing_extra : bugs. append ( ( [ win, ], { ""MissingOrExtra"" : missing_extra, ""MissingOrExtraText"" : char }, testname, 0, ) ) return bugs",False,win.WindowText().count(char) < win.ref.WindowText().count(char),not missing_extra,0.6506792306900024 4499,"def get_display_price ( base : Union [ TaxedMoney, TaxedMoneyRange ], display_gross : bool = False ) -> Money : """"""Return the price amount that should be displayed based on settings."""""" if not display_gross : display_gross = display_gross_prices ( ) if isinstance ( base, TaxedMoneyRange ) : if : base = MoneyRange ( start = base. start. gross, stop = base. stop. gross ) else : base = MoneyRange ( start = base. start. net, stop = base. stop. net ) if isinstance ( base, TaxedMoney ) : base = base. gross if display_gross else base. net return base",True,display_gross,display_gross,0.6664857268333435 4500,"def decode_image ( im_file, im_info, label_info ) : if im_info is None : im_info = dict ( ) if isinstance ( im_file, np. ndarray ) : if : raise Exception ( ""im should be 3-dimensions, but now is {}-dimensions"". format ( len ( im_file. shape ) ) ) im = im_file else : try : im = cv2. imread ( im_file ). astype ( ""float32"" ) except : raise TypeError ( ""Can't read The image file {}!"". format ( im_file ) ) im = cv2. cvtColor ( im, cv2. COLOR_BGR2RGB ) im_info [ ""im_resize_info"" ] = np. array ( [ im. shape [ 0 ], im. shape [ 1 ], 1.0 ], dtype = np. float32 ) im_info [ ""image_shape"" ] = np. array ( [ im. shape [ 0 ], im. shape [ 1 ] ] ). astype ( ""int32"" ) if not self. use_mixup : if ""mixup"" in im_info : del im_info [ ""mixup"" ] if ""mixup"" in im_info : im_info [ ""mixup"" ] = decode_image ( im_info [ ""mixup"" ] [ 0 ], im_info [ ""mixup"" ] [ 1 ], im_info [ ""mixup"" ] [ 2 ] ) if label",True,len(im_file.shape) != 3,len(im_file.shape) != 3,0.6551808714866638 4501,"def pair_up ( self, rel_list, step ) : result = [ ] item = """" for word in rel_list [ : ] : if : continue if word. replace ( "" [styv]"", """" ) in _cousin_level : if item : result. append ( item ) item = """" result. append ( word ) continue if item : if word == ""syster"" : item = item [ 0 : - 1 ] word = ""ster"" elif word == ""dotter"" and item == ""bror"" : item = ""brors"" result. append ( item + word ) item = """" else : item = word if item : result. append ( item ) gen_result = [ item + ""s"" for item in result [ 0 : - 1 ] ] gen_result = "" "". join ( gen_result + result [ - 1 : ] ) if len ( rel_list ) > 1 and step!= """" and not gen_result. rfind ( "" [styv]"" ) : gen_result = gen_result + "" [styv]"" return gen_result",False,not word,"step != '' and word.find('[styv]', '')",0.6785291433334351 4502,"def get_next_video_frame ( self, skip_empty_frame = True ) : if not self. video_format : return while True : video_packet = self. _get_video_packet ( ) if video_packet. image == 0 : self. _decode_video_packet ( video_packet ) if : break if _debug : print ( ""Returning"", video_packet ) return video_packet. image",False,video_packet.image is not None or not skip_empty_frame,video_packet.image == skip_empty_frame,0.6496666669845581 4503,"def _run_interface ( self, runtime ) : mpars = np. loadtxt ( self. inputs. in_file ) mpars = np. apply_along_axis ( func1d = normalize_mc_params, axis = 1, arr = mpars, source = self. inputs. parameter_source, ) diff = mpars [ : - 1, : 6 ] - mpars [ 1 :, : 6 ] diff [ :, 3 : 6 ] *= self. inputs. radius fd_res = np. abs ( diff ). sum ( axis = 1 ) self. _results = { ""out_file"" : op. abspath ( self. inputs. out_file ), ""fd_average"" : float ( fd_res. mean ( ) ), } np. savetxt ( self. inputs. out_file, fd_res, header = ""FramewiseDisplacement"", comments = """" ) if self. inputs. save_plot : tr = None if : tr = self. inputs. series_tr if self. inputs. normalize and tr is None : IFLOGGER. warn ( ""FD plot cannot be normalized if TR is not set"" ) self. _results [ ""out_figure"" ] = op. abspath ( self. inputs. out_figure ) fig = plot_confound ( fd_res, self. inputs. figsize, ""FD"", units = ""mm"", series_tr = tr, normalize = self. inputs. normalize, ) fig. savefig ( None : async with aiohttp_session ( auth = self. auth ) as session : async with session. get ( url ) as response : response. raise_for_status ( ) if aiofiles is not None : async with aiofiles. open ( str ( path ), mode = ""wb"" ) as stream : while True : chunk = await response. content. read ( 1024 ) if : break await stream. write ( chunk ) else : with path. open ( mode = ""wb"" ) as stream : while True : chunk = await response. content. read ( 1024 ) if : break stream. write ( chunk )",False,not chunk,len(chunk) > 0,0.6757445931434631 4505,"def finalize_options ( self ) : install. finalize_options ( self ) if self. init_system and isinstance ( self. init_system, str ) : self. init_system = self. init_system. split ( "","" ) if len ( self. init_system ) == 0 and not platform. system ( ). endswith ( ""BSD"" ) : self. init_system = [ ""systemd"" ] bad = [ f for f in self. init_system if f not in INITSYS_TYPES ] if len ( bad )!= 0 : raise DistutilsArgError ( ""Invalid --init-system: %s"" % ( "","". join ( bad ) ) ) for system in self. init_system : datakeys = [ k for k in INITSYS_ROOTS if k. partition ( ""."" ) [ 0 ] == system ] for k in datakeys : if : continue self. distribution. data_files. append ( ( INITSYS_ROOTS [ k ], INITSYS_FILES [ k ] ) ) self. distribution. reinitialize_command ( ""install_data"", True )",False,not INITSYS_FILES[k],k not in INITSYS_ROOTS,0.6623057723045349 4506,"def _collect_manual_intervention_nodes ( pipeline_tree ) : for act in pipeline_tree [ ""activities"" ]. values ( ) : if : _collect_manual_intervention_nodes ( act [ ""pipeline"" ] ) elif act [ ""component"" ] [ ""code"" ] in MANUAL_INTERVENTION_COMP_CODES : manual_intervention_nodes. add ( act [ ""id"" ] )",False,act['type'] == 'SubProcess',act['pipeline'] in MANUAL_INTERVENTION_pipeline_CODES,0.6533069610595703 4507,"def menu_export_online_user_bookmark ( opisvalid, args, options ) : __log__. info ( ""Export Bookmark mode (m)."" ) member_id = """" filename = ""export-user.txt"" if opisvalid and len ( args ) > 0 : arg = args [ 0 ] if : filename = args [ 1 ] else : filename = f""export-user-{arg}.txt"" else : filename = input ( ""Filename: "" ). rstrip ( ""\r"" ) or filename arg = input ( ""Member Id: "" ). rstrip ( ""\r"" ) or """" arg = arg. lower ( ) if arg. isdigit ( ) : member_id = arg else : print ( ""Invalid args: "", arg ) PixivBookmarkHandler. export_bookmark ( sys. modules [ __name__ ], __config__, filename, ""n"", 1, 0, member_id )",False,len(args) > 1,opisvalid and args[1] > 0,0.6524175405502319 4508,"def on_enter_frame ( self, scene, context ) : if not self. height : return colors = { ""normal"" : self. style. get_color ( gtk. StateFlags. NORMAL ), ""normal_bg"" : self. style. get_background_color ( gtk. StateFlags. NORMAL ), ""selected"" : self. style. get_color ( gtk. StateFlags. SELECTED ), ""selected_bg"" : self. style. get_background_color ( gtk. StateFlags. SELECTED ), } g = graphics. Graphics ( context ) g. set_line_style ( 1 ) g. translate ( 0.5, 0.5 ) for row, y in zip ( self. rows, self. row_positions ) : g. save_context ( ) g. translate ( 0, y ) color, bg = colors [ ""normal"" ], colors [ ""normal_bg"" ] if : color, bg = colors [ ""selected"" ], colors [ ""selected_bg"" ] g. fill_area ( 0, 0, self. width, self. row_height, bg ) label = row. label if row. description : description_color = graphics. Colors. mix ( bg, color, 0.75 ) description_color_str = graphics. Colors. hex ( description_color ) label = '{} [{}]'. format ( label, description_color_str, row. description ) self. label. show ( g, label, color = color ) g. restore_context ( )",False,row == self.current_row,row.selected,0.6570982933044434 4509,def gettext ( rv ) : for child in rv. childNodes : if : yield child. nodeValue if child. nodeType == child. ELEMENT_NODE : for item in gettext ( child ) : yield item,False,child.nodeType == child.TEXT_NODE,child.nodeType == child.ELEMENT_NODE,0.6621392965316772 4510,"def _get_field_mapping ( pb, dict_value, strict ) : field_mapping = [ ] for key, value in dict_value. items ( ) : if key == EXTENSION_CONTAINER : continue if : if strict : raise KeyError ( ""%s does not have a field called %s"" % ( pb, key ) ) continue field_mapping. append ( ( pb. DESCRIPTOR. fields_by_name [ key ], value, getattr ( pb, key, None ) ) ) for ext_num, ext_val in dict_value. get ( EXTENSION_CONTAINER, { } ). items ( ) : try : ext_num = int ( ext_num ) except ValueError : raise ValueError ( ""Extension keys must be integers."" ) if ext_num not in pb. _extensions_by_number : if strict : raise KeyError ( ""%s does not have a extension with number %s. Perhaps you forgot to import it?"" % ( pb, key ) ) continue ext_field = pb. _extensions_by_number [ ext_num ] pb_val = None pb_val = pb. Extensions [ ext_field ] field_mapping. append ( ( ext_field, ext_val, pb_val ) ) return field_mapping",False,key not in pb.DESCRIPTOR.fields_by_name,pb.has_field(key),0.6539187431335449 4511,"def remove_selected ( self ) : """"""Removes selected items from list."""""" to_delete = [ ] for i in range ( len ( self ) ) : if : to_delete. append ( i ) to_delete. reverse ( ) for i in to_delete : self. pop ( i ) if len ( to_delete ) > 0 : first_to_delete = to_delete [ - 1 ] if first_to_delete == 0 and len ( self ) > 0 : self [ 0 ]. selected = True elif first_to_delete > 0 : self [ first_to_delete - 1 ]. selected = True",False,self[i].selected,self.get(i),0.6593027114868164 4512,"def write ( self, s ) : if self. interactive : if : self. active_mode. write ( s ) else : component. get ( ""CmdLine"" ). add_line ( s, False ) self. events. append ( s ) else : print ( colors. strip_colors ( s ) )",False,"isinstance(self.active_mode, deluge.ui.console.modes.cmdline.CmdLine)","hasattr(self, 'active_mode')",0.6495927572250366 4513,"def access_api_server ( self ) : config = get_config ( ) logger. debug ( f""Passive Hunter is attempting to access the API at {self.path}"" ) try : r = requests. get ( f""{self.path}/api"", headers = self. headers, verify = False, timeout = config. network_timeout, ) if : return r. content except requests. exceptions. ConnectionError : pass return False",False,r.status_code == 200 and r.content,timeout and r.status_code == 200,0.654805064201355 4514,"def name_match ( self, name, name1 ) : if not name1 or not name : return 0 srn1 = get_surnames ( name ) sfx1 = name. get_suffix ( ) srn2 = get_surnames ( name1 ) sfx2 = name1. get_suffix ( ) if not self. name_compare ( srn1, srn2 ) : return - 1 if sfx1!= sfx2 : if sfx1!= """" and sfx2!= """" : return - 1 if name. get_first_name ( ) == name1. get_first_name ( ) : return 1 else : list1 = name. get_first_name ( ). split ( ) list2 = name1. get_first_name ( ). split ( ) if : return self. list_reduce ( list1, list2 ) else : return self. list_reduce ( list2, list1 )",False,len(list1) < len(list2),len(list1) > len(list2),0.648535966873169 4515,"def _authenticate_plain ( self, domain, username, password ) : """"""PLAIN ZAP authentication"""""" allowed = False reason = b"""" if self. passwords : if not domain : domain = ""*"" if : if username in self. passwords [ domain ] : if password == self. passwords [ domain ] [ username ] : allowed = True else : reason = b""Invalid password"" else : reason = b""Invalid username"" else : reason = b""Invalid domain"" if allowed : self. log. debug ( ""ALLOWED (PLAIN) domain=%s username=%s password=%s"", domain, username, password, ) else : self. log. debug ( ""DENIED %s"", reason ) else : reason = b""No passwords defined"" self. log. debug ( ""DENIED (PLAIN) %s"", reason ) return allowed, reason",False,domain in self.passwords,domain in self.password,0.662284255027771 4516,"def print_implementation_coverage ( coverage ) : for service_name in sorted ( coverage ) : implemented = coverage. get ( service_name ) [ ""implemented"" ] not_implemented = coverage. get ( service_name ) [ ""not_implemented"" ] operations = sorted ( implemented + not_implemented ) if implemented and not_implemented : percentage_implemented = int ( 100.0 * len ( implemented ) / ( len ( implemented ) + len ( not_implemented ) ) ) elif implemented : percentage_implemented = 100 else : percentage_implemented = 0 print ( """" ) print ( ""## {}\n"". format ( service_name ) ) print ( ""{}% implemented\n"". format ( percentage_implemented ) ) for op in operations : if : print ( ""- [X] {}"". format ( op ) ) else : print ( ""- [ ] {}"". format ( op ) )",False,op in implemented,not_implemented,0.6711909770965576 4517,"def append_row ( self, row ) : self. allocate_future_payments ( row ) self. set_invoice_details ( row ) self. set_party_details ( row ) self. set_ageing ( row ) if self. filters. get ( ""group_by_party"" ) : self. update_sub_total_row ( row, row. party ) if : self. append_subtotal_row ( self. previous_party ) self. previous_party = row. party self. data. append ( row )",False,self.previous_party and self.previous_party != row.party,self.previous_party is not None,0.6525278091430664 4518,"def __all_links_rec ( self ) : pieces = [ ] temp = [ ( idx, tag ) for tag, idx in self. _supbook_xref. items ( ) ] temp. sort ( ) for idx, tag in temp : stype, snum = tag if : rec = BIFFRecords. InternalReferenceSupBookRecord ( len ( self. __worksheets ) ). get ( ) pieces. append ( rec ) elif stype == ""xcall"" : rec = BIFFRecords. XcallSupBookRecord ( ). get ( ) pieces. append ( rec ) temp = [ ( idx, name ) for name, idx in self. _xcall_xref. items ( ) ] temp. sort ( ) for idx, name in temp : rec = BIFFRecords. ExternnameRecord ( options = 0, index = 0, name = name, fmla = ""\x02\x00\x1c\x17"" ). get ( ) pieces. append ( rec ) else : raise Exception ( ""unknown supbook stype %r"" % stype ) if len ( self. __sheet_refs ) > 0 : temp = [ ( idx, ref ) for ref, idx in self. __sheet_refs. items ( ) ] temp. sort ( ) temp = [ ref for idx, ref in temp ] externsheet_record = BIFFRecords. ExternSheetRecord ( temp )",False,stype == 'ownbook',stype == 'supbook',0.6609514355659485 4519,"def _call ( self, pyfunction, args ) : if isinstance ( pyfunction, pyobjects. PyFunction ) : if : before = self. _parameter_objects ( pyfunction ) self. pycore. object_info. function_called ( pyfunction, args. get_arguments ( pyfunction. get_param_names ( ) ) ) pyfunction. _set_parameter_pyobjects ( None ) if : after = self. _parameter_objects ( pyfunction ) if after!= before : self. follow ( pyfunction ) if isinstance ( pyfunction, rope. base. builtins. BuiltinFunction ) : pyfunction. get_returned_object ( args )",False,self.follow is not None,self.has_last_parameter,0.6551228761672974 4520,"def post ( self, request, * args, ** kwargs ) : if ( ""id"" not in request. data and ""name"" in request. data and ""organization"" in request. data ) : existing = models. Label. objects. filter ( name = request. data [ ""name"" ], organization_id = request. data [ ""organization"" ] ) if : existing = existing [ 0 ] request. data [ ""id"" ] = existing. id del request. data [ ""name"" ] del request. data [ ""organization"" ] if ( models. Label. objects. filter ( unifiedjobtemplate_labels = self. kwargs [ ""pk"" ] ). count ( ) > 100 ) : return Response ( dict ( msg = _ ( ""Maximum number of labels for {} reached."". format ( self. parent_model. _meta. verbose_name_raw ) ) ), status = status. HTTP_400_BAD_REQUEST, ) return super ( JobTemplateLabelList, self ). post ( request, * args, ** kwargs )",False,existing.exists(),existing,0.6642444729804993 4521,"def load_annotations ( self ) : """"""Load annotation file to get video information."""""" if self. ann_file. endswith ( "".json"" ) : return self. load_json_annotations ( ) video_infos = [ ] with open ( self. ann_file, ""r"" ) as fin : for line in fin : line_split = line. strip ( ). split ( ) video_info = { } idx = 0 filename = line_split [ idx ] if : if not filename. endswith ( self. suffix ) : filename = osp. join ( self. data_prefix, filename + self. suffix ) else : filename = osp. join ( self. data_prefix, filename ) video_info [ ""audio_path"" ] = filename idx += 1 video_info [ ""total_frames"" ] = int ( line_split [ idx ] ) idx += 1 label = [ int ( x ) for x in line_split [ idx : ] ] assert label, f""missing label in line: {line}"" if self. multi_class : assert self. num_classes is not None onehot = torch. zeros ( self. num_classes ) onehot [ label ] = 1.0 video_info [ """,False,self.data_prefix is not None,self.data_prefix,0.6556156277656555 4522,"def PyJs_clear_1474_ ( this, arguments, var = var ) : var = Scope ( { u""this"" : this, u""clear"" : PyJs_clear_1474_, u""arguments"" : arguments }, var ) var. registers ( [ u""entry"", u""data"", u""that"" ] ) var. put ( u""that"", var. get ( u""this"" ) ) var. put ( u""data"", var. get ( u""that"" ). get ( u""_i"" ) ) var. put ( u""entry"", var. get ( u""that"" ). get ( u""_f"" ) ) while var. get ( u""entry"" ) : try : var. get ( u""entry"" ). put ( u""r"", var. get ( u""true"" ) ) if : var. get ( u""entry"" ). put ( u""p"", var. get ( u""entry"" ). get ( u""p"" ). put ( u""n"", var. get ( u""undefined"" ) ) ) var. get ( u""data"" ). delete ( var. get ( u""entry"" ). get ( u""i"" ) ) finally : var. put ( u""entry"", var. get ( u""entry"" ). get ( u""n"" ) ) var. get ( u""that"" ). put ( u""_f"", var. get ( u""that"" ). put ( u""_l"", var. get ( u""undefined"" ) ) ) var. get ( u""that"" ). put ( var. get ( u""SIZE"" ), Js ( 0.0 ) )",False,var.get(u'entry').get(u'p'),"var.get(u""undefined')",0.6490271687507629 4523,"def _get_user_from_file ( self, wanted_user ) : """"""Get user from a passwd file"""""" wanted_uid = """" if isinstance ( wanted_user, ( int, long ) ) or re. match ( ""^\\d+$"", wanted_user ) : wanted_uid = str ( wanted_user ) wanted_user = """" try : inpasswd = open ( self. passwd_file ) except ( IOError, OSError ) : return ( """", """", """", """", """", """" ) else : for line in inpasswd : ( user, dummy, uid, gid, gecos, home, shell ) = line. strip ( ). split ( "":"" ) if wanted_user and user == wanted_user : return ( user, uid, gid, gecos, home, shell ) if : return ( user, uid, gid, gecos, home, shell ) inpasswd. close ( ) return ( """", """", """", """", """", """" )",False,wanted_uid and uid == wanted_uid,not inpasswd,0.6569399237632751 4524,"def _execute_fetch ( self, sql : str, parameters : Iterable = None, read_only = False, fetch_all : bool = False, ) -> List [ dict ] : read_only_fn = run_read_only_fetchall if fetch_all else run_read_only_fetchone parameters = parameters if parameters is not None else [ ] still_waiting = False urgent_read = False if read_only : self. waiting_reads_metric. inc ( ) self. read_count_metric. inc ( ) try : while ( self. writers and not self. _closing ) : if : self. urgent_read_done. clear ( ) urgent_read = True await self. read_ready. wait ( ) still_waiting = True if self. _closing : raise asyncio. CancelledError return await asyncio. get_event_loop ( ). run_in_executor ( self. reader_executor, read_only_fn, sql, parameters ) finally : if urgent_read : self. urgent_read_done. set ( ) self. waiting",False,not urgent_read and still_waiting and self.urgent_read_done.is_set(),still_waiting,0.6498017311096191 4525,"def add_cascade ( self, parent, label, menu, underline ) : """"""Create a menu with the given parent menu."""""" if parent : keys = { ""label"" : label, ""underline"" : underline } ch, label = self. createAccelLabel ( keys ) id = wx. NewId ( ) parent. AppendMenu ( id, label, menu, label ) accel = None if : self. createAccelData ( menu, ch, accel, id, label ) else : self. menuBar. Append ( menu, label )",False,ch,accel,0.6832994222640991 4526,"def target_function ( self, running, creds ) : while running. is_set ( ) : try : username, password = creds. next ( ). split ( "":"" ) tcp_client = self. tcp_create ( ) tcp_sock = tcp_client. connect ( ) apiros = ApiRosClient ( tcp_sock ) output = apiros. login ( username, password ) if : if self. stop_on_success : running. clear ( ) print_success ( ""Authentication Succeed - Username: '{}' Password: '{}'"". format ( username, password ), verbose = self. verbosity, ) self. credentials. append ( ( self. target, self. port, self. target_protocol, username, password ) ) else : print_error ( ""Authentication Failed - Username: '{}' Password: '{}'"". format ( username, password ), verbose = self. verbosity, ) tcp_",False,output[0][0] == '!done',self.username and password,0.6539021730422974 4527,"def saveDirectories ( self, ** kwargs ) : for kw in LIST_DIRPAGE + LIST_BOOL_DIRPAGE : value = kwargs. get ( kw ) if : if kw in ( ""complete_dir"", ""dirscan_dir"" ) : msg = config. get_config ( ""misc"", kw ). set ( value, create = True ) else : msg = config. get_config ( ""misc"", kw ). set ( value ) if msg : return badParameterResponse ( msg, kwargs. get ( ""ajax"" ) ) if not sabnzbd. check_incomplete_vs_complete ( ) : return badParameterResponse ( T ( ""The Completed Download Folder cannot be the same or a subfolder of the Temporary Download Folder"" ), kwargs. get ( ""ajax"" ), ) config. save_config ( ) if kwargs. get ( ""ajax"" ) : return sabnzbd. api. report ( ""json"" ) else : raise Raiser ( self. __root )",False,value is not None or kw in LIST_BOOL_DIRPAGE,value,0.6585636734962463 4528,"def wait ( self, seconds = None ) : readers = self. listeners [ READ ] writers = self. listeners [ WRITE ] if not readers and not writers : if seconds : time. sleep ( seconds ) return all_fds = list ( readers ) + list ( writers ) try : r, w, er = select. select ( readers. keys ( ), writers. keys ( ), all_fds, seconds ) except select. error as e : if get_errno ( e ) == errno. EINTR : return elif : self. _remove_bad_fds ( ) return else : raise for fileno in er : readers. get ( fileno, noop ). cb ( fileno ) writers. get ( fileno, noop ). cb ( fileno ) for listeners, events in ( ( readers, r ), ( writers, w ) ) : for fileno in events : try : listeners. get ( fileno, noop ). cb ( fileno ) except self. SYSTEM_EXCEPTIONS : raise except : self. squelch_exception ( fileno, sys. exc_info ( ) ) clear_sys_exc_info ( )",False,get_errno(e) in BAD_SOCK,get_errno(e) == errno.EBADF,0.6540961265563965 4529,"def do ( server, handler, config, args ) : if args. command == ""list"" : result = [ ] for section in config. sections ( ) : if args. section and args. section!= section : continue result. append ( ""[{}]"". format ( section ) ) if args. sections : continue for variable in config. options ( section ) : result. append ( ""{} = {}"". format ( variable, config. get ( section, variable ) ) ) result. append ( """" ) handler. display ( Pygment ( IniLexer ( ), ""\n"". join ( result ) ) ) elif args. command == ""set"" : try : value = args. value if : value += "" "" value += "" "". join ( args. args ) config. set ( args. section, args. key, value ) config. save ( project = args. write_project, user = args. write_user ) except config. NoSectionError : handler. display ( Error ( args. section, ""No section"" ) ) elif args. command == ""unset"" : try : if args. keys : for key in args. keys : config. remove_option ( args. section, key ) else : to_remove",True,args.args,args.args,0.6710091233253479 4530,"def run ( ) : global ar2 get_args ( ) if not ar2 : usage ( ) dump_opts ( ) elif ar2 in data : do_step ( ar2 ) save_data ( ) if ar2 == ""next"" : for item in order : if : do_step ( item ) save_data ( ) break if ar2 == ""steps"" : print ( """" ) dump_opts ( ) if ar2 == ""show"" : for item in order : if : print ( ""\n{}: {}\n"". format ( item, data [ item ] [ 1 ] ) ) break if ar2 == ""restart"" : if exists ( DATA_FILE ) : unlink ( DATA_FILE ) if ar2 == ""commands"" : print ( cmds )",False,data[item][2] == False,item in data,0.6567115783691406 4531,"def run ( self, ips, imgs, para = None ) : k, unit = ips. unit strc = generate_binary_structure ( 3, 1 if para [ ""con"" ] == ""4-connect"" else 2 ) lab, n = label ( imgs == 0 if para [ ""inv"" ] else imgs, strc, output = np. uint32 ) idx = ( np. ones ( n + 1 ) * ( 0 if para [ ""inv"" ] else para [ ""front"" ] ) ). astype ( np. uint8 ) ls = regionprops ( lab ) for i in ls : if para [ ""vol"" ] == 0 : break if para [ ""vol"" ] > 0 : if i. area * k ** 3 < para [ ""vol"" ] : idx [ i. label ] = para [ ""back"" ] if para [ ""vol"" ] < 0 : if i. area * k ** 3 >= - para [ ""vol"" ] : idx [ i. label ] = para [ ""back"" ] for i in ls : if para [ ""dia"" ] == 0 : break d = norm ( np. array ( i. bbox [ : 3 ] ) - np. array ( i. bbox [ 3 : ] ) ) if para [ ""dia"" ] > 0 : if d * k < para [ ""dia"" ] : idx [ i. label ] = para [ ""back"" ] if para [ ""dia"" ] < 0 : if : idx [ i. label ] = para [ ""back"" ] idx [ 0 ] = para [ ""front"" ] if para",False,d * k >= -para['dia'],i.area * k * k < para['front'],0.663841962814331 4532,"def populate_stats_and_pop ( self, unused_slice_key : slicer. SliceKeyType, combine_metrics : Dict [ Text, Any ], output_metrics : Dict [ Text, metrics_pb2. MetricValue ], ) -> None : matrices = combine_metrics. pop ( self. _metric_key ( metric_keys. CONFUSION_MATRIX_AT_THRESHOLDS_MATRICES ) ) thresholds = combine_metrics. pop ( self. _metric_key ( metric_keys. CONFUSION_MATRIX_AT_THRESHOLDS_THRESHOLDS ) ) if len ( matrices )!= len ( thresholds ) : raise ValueError ( ""matrices should have the same length as thresholds, but lengths "" ""were: matrices: %d, thresholds: %d"" % ( len ( matrices ), len ( thresholds ) ) ) for threshold, matrix in zip ( thresholds, matrices ) : if : threshold = threshold. unsampled_value ( output_metrics [ self. _metric_key ( metric_keys. CONFUSION_MATRIX_AT_THRESHOLDS ) ] . confusion_matrix_at_thresholds. matrices. add ( ) . CopyFrom ( _create_confusion_matrix_proto ( matrix, threshold ) ) )",False,"isinstance(threshold, types.ValueWithTDistribution)",threshold is not None,0.6496580839157104 4533,"def topsorted_shapes_first ( outputs, node2shape ) : marks = { } out = [ ] stack = [ ] for x in outputs : stack. append ( ( x, 0 ) ) while stack : ( i, jidx ) = stack. pop ( ) if jidx == 0 : m = marks. get ( i, 0 ) if m == 0 : marks [ i ] = 1 elif : raise ValueError ( ""not a dag"" ) else : continue ps = i. parents if i. ndim > 0 and not i. is_input ( ) and i. op. return_type == ""byref"" : if i in node2shape : shpels = node2shape [ i ] else : raise core. Unreachable ps = ps + shpels elif is_tuple ( i ) : for arrshp in node2shape [ i ] : ps = ps + arrshp if jidx == len ( ps ) : len(out),0.6847740411758423 4534,"def match_uri_to_workspace ( uri, workspaces ) : if uri is None : return None max_len, chosen_workspace = - 1, None path = pathlib. Path ( uri ). parts for workspace in workspaces : try : workspace_parts = pathlib. Path ( workspace ). parts except TypeError : workspace_parts = pathlib. Path ( unicode ( workspace ) ). parts if : continue match_len = 0 for workspace_part, path_part in zip ( workspace_parts, path ) : if workspace_part == path_part : match_len += 1 if match_len > 0 : if match_len > max_len : max_len = match_len chosen_workspace = workspace return chosen_workspace",False,len(workspace_parts) > len(path),len(workspace_parts) > max_len,0.6503225564956665 4535,"def _read ( self, last_pass = False ) : lines = self. file. get_lines ( size = 1024 * 1024, last_pass = last_pass ) for line in lines : if : self. skipped_header = True continue log_vals = [ val. strip ( ) for val in line. split ( ""\t"" ) ] _error = None _rstatus = None _url = self. url_label _concur = self. concurrency _tstamp = int ( log_vals [ 1 ] ) _con_time = float ( log_vals [ 2 ] ) / 1000.0 _etime = float ( log_vals [ 4 ] ) / 1000.0 _latency = float ( log_vals [ 5 ] ) / 1000.0 _bytes = None yield _tstamp, _url, _concur, _etime, _con_time, _latency, _rstatus, _error, """", _bytes",False,not self.skipped_header,line == last_pass,0.6564524173736572 4536,"def testExactlyOneInvalid ( self ) : with SetBotoConfigForTest ( [ ( ""Credentials"", ""gs_oauth2_refresh_token"", ""foo"" ), ( ""Credentials"", ""gs_service_client_id"", None ), ( ""Credentials"", ""gs_service_key_file"", None ), ] ) : succeeded = False try : GcsJsonApi ( None, self. logger ) succeeded = True except : warning_messages = self. log_handler. messages [ ""warning"" ] self. assertEquals ( 1, len ( warning_messages ) ) self. assertIn ( ""credentials are invalid"", warning_messages [ 0 ] ) self. assertIn ( CredTypes. OAUTH2_USER_ACCOUNT, warning_messages [ 0 ] ) if : self. fail ( ""Succeeded with invalid credentials, one configured."" )",False,succeeded,not succeeded,0.7072144746780396 4537,"def run ( self, edit, target_level = 0 ) : view = self. view view. run_command ( ""unfold_all"" ) section_start = - 1 section_end = view. size ( ) n_sections = 0 for ( title_begin, title_end, level ) in all_headings ( view ) : if : if section_start > 0 : section_end = title_begin - 1 reg = sublime. Region ( section_start, section_end ) view. fold ( reg ) n_sections += 1 section_start = - 1 if target_level == 0 or level == target_level : section_start = title_end if section_start >= 0 : reg = sublime. Region ( section_start, view. size ( ) ) view. fold ( reg ) n_sections += 1 if len ( view. sel ( ) ) > 0 : for sel in view. sel ( ) : if getFoldedRegion ( view, sel ) == None : view. show ( sel ) else : view. show ( sublime. Region ( 0, 0 ) ) sublime. status_message ( ""%d region%s folded"" % ( n_sections, ""s"" if n_sections > 1 else """" ) )",False,target_level == 0 or level <= target_level,level == 0,0.6517579555511475 4538,"def scaffold_directories ( cls, base_dir ) : """"""Safely create GE directories for a new project."""""" os. makedirs ( base_dir, exist_ok = True ) open ( os. path. join ( base_dir, "".gitignore"" ), ""w"" ). write ( ""uncommitted/"" ) for directory in cls. BASE_DIRECTORIES : if : plugins_dir = os. path. join ( base_dir, directory ) os. makedirs ( plugins_dir, exist_ok = True ) os. makedirs ( os. path. join ( plugins_dir, ""custom_data_docs"" ), exist_ok = True ) os. makedirs ( os. path. join ( plugins_dir, ""custom_data_docs"", ""views"" ), exist_ok = True, ) os. makedirs ( os. path. join ( plugins_dir, ""custom_data_docs"", ""renderers"" ), exist_ok = True, ) os. makedirs ( os. path. join ( plugins_dir, ""custom_data_docs"", ""styles"" ), exist_ok = True, ) cls. scaffold_custom_data_docs ( plugins_dir ) else : os. makedirs ( os. path. join ( base_dir, directory ), exist_ok = True ) uncommitted_dir = os. path. join ( base_dir, cls. GE_UNCOMMITTED_DIR ) for",False,directory == 'plugins',exist_ok,0.656913161277771 4539,"def cli_print_upgradeable ( ) -> None : args = parse_args ( ) updates : List [ InstallInfo ] = [ ] if not args. repo : aur_updates, _not_found_aur_pkgs = find_aur_updates ( ) updates += aur_updates if not args. aur : updates += find_repo_upgradeable ( ) if not updates : return for pkg in updates [ : ] : if : updates. remove ( pkg ) print_ignored_package ( package_name = pkg. name ) if args. quiet : print_stdout ( ""\n"". join ( [ pkg_update. name for pkg_update in updates ] ) ) else : print_stdout ( pretty_format_upgradeable ( updates, print_repo = PikaurConfig ( ). sync. AlwaysShowPkgOrigin. get_bool ( ) ) )",False,"pkg.name in args.ignore + PacmanConfig().options.get('IgnorePkg', [])",pkg.name in _not_found_aur_pkgs,0.658341646194458 4540,"def write ( self, text ) : try : if self. _hConsole is None : if isinstance ( text, unicode ) : text = text. encode ( ""utf-8"" ) self. _stream. write ( text ) else : if : text = bytes ( text ). decode ( ""utf-8"" ) remaining = len ( text ) while remaining > 0 : n = DWORD ( 0 ) retval = WriteConsoleW ( self. _hConsole, text, min ( remaining, 10000 ), byref ( n ), None ) if retval == 0 or n. value == 0 : raise IOError ( ""WriteConsoleW returned %r, n.value = %r"" % ( retval, n. value ) ) remaining -= n. value if remaining == 0 : break text = text [ n. value : ] except Exception as e : _complain ( ""%s.write: %r"" % ( self. name, e ) ) raise",False,"not isinstance(text, unicode)","isinstance(text, bytes)",0.6489019393920898 4541,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRING : self. key = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 2 : if : self. count = iprot. readI32 ( ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.I32,fid == 3,0.6586172580718994 4542,"def __new__ ( cls, typename, bases, ns ) : assert bases [ 0 ] is _NamedTuple types = ns. get ( ""__annotations__"", { } ) default_names = [ ] for field_name in types : if : default_names. append ( field_name ) elif default_names : raise TypeError ( f""Non-default namedtuple field {field_name} "" f""cannot follow default field"" f""{'s' if len(default_names) > 1 else ''} "" f""{', '.join(default_names)}"" ) nm_tpl = _make_nmtuple ( typename, types. items ( ), defaults = [ ns [ n ] for n in default_names ], module = ns [ ""__module__"" ], ) for key in ns : if key in _prohibited : raise AttributeError ( ""Cannot overwrite NamedTuple attribute "" + key ) elif key not in _special and key not in nm_tpl. _fields : setattr ( nm_tpl, key, ns [ key ] ) return nm_tpl",False,field_name in ns,field_name in types,0.6673007607460022 4543,"def wait_for_completion ( self, job_id, offset, max_results, start_time, timeout ) : """"""Wait for job completion and return the first page."""""" while True : result = self. get_query_results ( job_id = job_id, page_token = None, start_index = offset, max_results = max_results ) if result [ ""jobComplete"" ] : return result if : raise Exception ( ""Timeout: the query doesn't finish within %d seconds."" % timeout ) time. sleep ( 1 )",False,time.time() - start_time > timeout,timeout is None or time.time() - start_time > timeout,0.6540282964706421 4544,"def _resolve_oauth_config ( name, local_namespace, config, * variables ) : presets = PRESETS. get ( name, { } ) output = [ ] for variable in variables : value = local_namespace. get ( variable, None ) if : value = config. get ( ""OAUTH_{}_{}"". format ( name, variable ). upper ( ), None ) if : value = presets. get ( variable, None ) output. append ( value ) return output",True,value is None,value is None,0.6602245569229126 4545,"def __init__ ( self, pyversions, coverage_service ) : build_matrix = """" for version in pyversions : build_matrix += ""\n {},"". format ( version if : else ""py{}"". format ( """". join ( version. split ( ""."" ) ) ) ) coverage_package = """" if coverage_service : coverage_package += ""\n {}"". format ( coverage_service. package ) coverage_package += ""\n"" super ( Tox, self ). __init__ ( ""tox.ini"", TEMPLATE. format ( build_matrix = build_matrix, coverage_package = coverage_package ), )",False,version.startswith('pypy'),version,0.6474045515060425 4546,"def check_settings ( self ) : if self. settings_dict [ ""TIME_ZONE"" ] is not None : if not settings. USE_TZ : raise ImproperlyConfigured ( ""Connection '%s' cannot set TIME_ZONE because USE_TZ is "" ""False."" % self. alias ) elif : raise ImproperlyConfigured ( ""Connection '%s' cannot set TIME_ZONE because its engine "" ""handles time zones conversions natively."" % self. alias )",False,self.features.supports_timezones,settings.TIME_ZONE is None,0.6623598337173462 4547,"def canonicalize ( path ) : """"""Makes all paths start at top left, and go clockwise first."""""" path = [ [ x [ 0 ] ] + list ( map ( float, x [ 1 : ] ) ) for x in path ] new_substructures = [ ] for subpath in _separate_substructures ( path ) : leftmost_point, leftmost_idx = _get_leftmost_point ( subpath ) reordered = ( [ [ ""M"", leftmost_point [ 0 ], leftmost_point [ 1 ] ] ] + subpath [ leftmost_idx + 1 : ] + subpath [ 1 : leftmost_idx + 1 ] ) new_substructures. append ( ( reordered, leftmost_point ) ) new_path = [ ] first_substructure_done = False should_flip_cardinality = False for sp, _ in sorted ( new_substructures, key = lambda x : ( x [ 1 ] [ 1 ], x [ 1 ] [ 0 ] ) ) : if : should_flip_cardinality = not _is_clockwise ( sp ) first_substructure_done = True if should_flip_cardinality : sp = _make_clockwise ( sp ) new_path. extend ( sp ) path = [ [ x [ 0 ] ] + list ( map ( str, x [ 1 : ] ) ) for x in new_path ] return path",False,not first_substructure_done,first_substructure_done,0.6481825709342957 4548,"def __init__ ( self, info ) : if isinstance ( info, http. client. HTTPResponse ) : for key, value in info. getheaders ( ) : key = key. lower ( ) prev = self. get ( key ) if : value = "", "". join ( ( prev, value ) ) self [ key ] = value self. status = info. status self [ ""status"" ] = str ( self. status ) self. reason = info. reason self. version = info. version elif isinstance ( info, email. message. Message ) : for key, value in list ( info. items ( ) ) : self [ key. lower ( ) ] = value self. status = int ( self [ ""status"" ] ) else : for key, value in info. items ( ) : self [ key. lower ( ) ] = value self. status = int ( self. get ( ""status"", self. status ) )",False,prev is not None,prev,0.6594246625900269 4549,"def connect_to_printer ( self, port, baud, dtr ) : try : self. p. connect ( port, baud, dtr ) except SerialException as e : if e. errno == 2 : self. logError ( _ ( ""Error: You are trying to connect to a non-existing port."" ) ) elif : self. logError ( _ ( ""Error: You don't have permission to open %s."" ) % port ) self. logError ( _ ( ""You might need to add yourself to the dialout group."" ) ) else : self. logError ( traceback. format_exc ( ) ) return False except OSError as e : if e. errno == 2 : self. logError ( _ ( ""Error: You are trying to connect to a non-existing port."" ) ) else : self. logError ( traceback. format_exc ( ) ) return False self. statuscheck = True self. status_thread = threading. Thread ( target = self. statuschecker, name = ""status thread"" ) self. status_thread. start ( ) return True",False,e.errno == 8,e.errno == 1,0.6720011234283447 4550,"def _test_set_metadata ( self, metadata, mask = None ) : header = ofproto. OXM_OF_METADATA match = OFPMatch ( ) if mask is None : match. set_metadata ( metadata ) else : if : header = ofproto. OXM_OF_METADATA_W match. set_metadata_masked ( metadata, mask ) metadata &= mask self. _test_serialize_and_parser ( match, header, metadata, mask )",False,mask + 1 >> 64 != 1,header is None,0.6779102683067322 4551,"def detect ( cls, standalone : bool, namespace : Optional [ str ], name : Optional [ str ], ** kwargs : Any, ) -> Optional [ ""Peer"" ] : if standalone : return None if name : if await Peer. _is_peering_exist ( name, namespace = namespace ) : return cls ( name = name, namespace = namespace, ** kwargs ) elif : return cls ( name = name, namespace = namespace, legacy = True, ** kwargs ) else : raise Exception ( f""The peering {name!r} was not found"" ) if await Peer. _is_peering_exist ( name = PEERING_DEFAULT_NAME, namespace = namespace ) : return cls ( name = PEERING_DEFAULT_NAME, namespace = namespace, ** kwargs ) elif await Peer. _is_peering_legacy ( name = PEERING_DEFAULT_NAME, namespace = namespace ) : return cls ( name = PEERING_DEFAULT_NAME, namespace = namespace, legacy = True, ** kwargs ) logger. warning ( f""Default peering object not found, falling back to the standalone mode."" ) return None",False,"await Peer._is_peering_legacy(name, namespace=namespace)",await peer.has_default_name(name),0.651797354221344 4552,"def extract ( self, real ) : version = real [ ""version"" ]. value if ""metadata"" in real : self. useMetadata ( real [ ""metadata"" ] ) self. useRoot ( real ) self. format_version = ""Real audio version %s"" % version if version == 3 : size = getValue ( real, ""data_size"" ) elif ""filesize"" in real and ""headersize"" in real : size = ( real [ ""filesize"" ]. value + 40 ) - ( real [ ""headersize"" ]. value + 16 ) else : size = None if size : size *= 8 if : sec = float ( size ) / self. get ( ""bit_rate"" ) self. duration = timedelta ( seconds = sec ) computeComprRate ( self, size )",False,self.has('bit_rate'),self.get('bit_rate'),0.648869514465332 4553,"def _remove_visual_c_ref ( self, manifest_file ) : try : manifest_f = open ( manifest_file ) try : manifest_buf = manifest_f. read ( ) finally : manifest_f. close ( ) pattern = re. compile ( r"""""" r""""""VC\d{2}\.CRT(""|').*?(/>|)"""""", re. DOTALL, ) manifest_buf = re. sub ( pattern, """", manifest_buf ) pattern = ""\s*"" manifest_buf = re. sub ( pattern, """", manifest_buf ) pattern = re. compile ( r"""""" r"""""".*?(?:/>|)"""""", re. DOTALL, ) if : return None manifest_f = open ( manifest_file, ""w"" ) try : manifest_f. write ( manifest_buf ) return manifest_file finally : manifest_f. close ( ) except IOError : pass",False,"re.search(pattern, manifest_buf) is None",not manifest_file,0.6473808288574219 4554,"def _grid_configure ( self, command, index, cnf, kw ) : """"""Internal function."""""" if type ( cnf ) is StringType and not kw : if cnf [ - 1 : ] == ""_"" : cnf = cnf [ : - 1 ] if : cnf = ""-"" + cnf options = ( cnf, ) else : options = self. _options ( cnf, kw ) if not options : return _splitdict ( self. tk, self. tk. call ( ""grid"", command, self. _w, index ), conv = self. _gridconvvalue, ) res = self. tk. call ( ( ""grid"", command, self. _w, index ) + options ) if len ( options ) == 1 : return self. _gridconvvalue ( res )",False,cnf[:1] != '-',command == 'grid',0.6899287104606628 4555,"def run ( self, paths = [ ] ) : try : origin = view_locations_stack [ - 2 ] items = [ ] for item in SideBarSelection ( paths ). getSelectedItems ( ) : items. append ( item. path ( ) ) temp = [ ] for index in range ( len ( items ) ) : if not os. path. samefile ( items [ index ], origin ) : temp. append ( os. path. join ( ""."", os. path. relpath ( items [ index ], os. path. dirname ( origin ) ) ) ) items = temp if : sublime. set_clipboard ( ""\n"". join ( items ) ) if len ( items ) > 1 : sublime. status_message ( ""Items copied"" ) else : sublime. status_message ( ""Item copied"" ) except : pass",True,len(items) > 0,len(items) > 0,0.659581184387207 4556,"def add_all_onion_services ( self ) : if self. tor_conn is None : return hostname_key_list = yield list_onion_service_info ( ) for tid, hostname, key, old_hostname, old_key in hostname_key_list : if hostname and hostname not in self. hs_map : yield self. add_onion_service ( tid, hostname, key ) if : yield self. add_onion_service ( tid, old_hostname, old_key )",False,old_hostname and old_hostname not in self.hs_map,old_hostname and old_key not in self.hs_map,0.6539573669433594 4557,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 0 : if : self. success = CacheFileMetadataResult ( ) self. success. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,ftype == TType.STRUCT,self.success is not None,0.6610006093978882 4558,"def func_wrapper ( * args, ** kwargs ) : warnings. simplefilter ( ""always"", DeprecationWarning ) for old, new in arg_mapping. items ( ) : if : warnings. warn ( f""Keyword argument '{old}' has been "" f""deprecated in favour of '{new}'. "" f""'{old}' will be removed in a future version."", category = DeprecationWarning, stacklevel = 2, ) val = kwargs. pop ( old ) kwargs [ new ] = val warnings. simplefilter ( ""default"", DeprecationWarning ) return func ( * args, ** kwargs )",True,old in kwargs,old in kwargs,0.6772074699401855 4559,"def send ( self, subject, body ) : for field in filter ( lambda x : self. notification_class. init_parameters [ x ] [ ""type"" ] == ""password"", self. notification_class. init_parameters, ) : if field in self. notification_configuration : self. notification_configuration [ field ] = decrypt_field ( self, ""notification_configuration"", subfield = field ) recipients = self. notification_configuration. pop ( self. notification_class. recipient_parameter ) if not isinstance ( recipients, list ) : recipients = [ recipients ] sender = self. notification_configuration. pop ( self. notification_class. sender_parameter, None ) notification_configuration = deepcopy ( self. notification_configuration ) for field, params in self. notification_class. init_parameters. items ( ) : if : if ""default"" in params : notification_configuration [ field ] = params [ ""default"" ] backend_obj = self. notification_class ( ** notification_configuration ) notification_obj = EmailMessage ( subject, backend_obj. format_body ( body ), sender, recipients ) with set_environ ( ** settings. AWX_TASK_ENV ) : return backend_obj. send_messages ( [ notification_obj ] )",False,field not in notification_configuration,field in self.notification_class,0.6579693555831909 4560,"def _get ( self ) : fut = item = None with self. _mutex : if : fut = Future ( ) fut. add_done_callback ( lambda f : self. _get_complete ( ) if not f. cancelled ( ) else None ) self. _getters. append ( fut ) else : item = self. _get_item ( ) self. _get_complete ( ) return item, fut",False,not self._queue or self._getters,self._getters is not None,0.6624019145965576 4561,"def run_server ( self, sock ) : with sock : [ client, _ ] = sock. accept ( ) with contextlib. ExitStack ( ) as cleanup : cleanup. enter_context ( client ) reader = cleanup. enter_context ( client. makefile ( ""rb"" ) ) client. sendall ( b""200 Server ready\r\n"" ) while True : cmd = reader. readline ( ) if : client. sendall ( b""101 Capability list:\r\n"" b""VERSION 2\r\n"" b""STARTTLS\r\n"" b"".\r\n"" ) elif cmd == b""STARTTLS\r\n"" : reader. close ( ) client. sendall ( b""382 Begin TLS negotiation now\r\n"" ) context = ssl. SSLContext ( ) context. load_cert_chain ( certfile ) client = context. wrap_socket ( client, server_side = True ) cleanup. enter_context ( client ) reader = cleanup. enter_context ( client. makefile ( ""rb"" ) ) elif cmd == b""QUIT\r\n"" : client. sendall ( b""205 Bye!\",False,cmd == b'CAPABILITIES\r\n',cmd == b'QUIT\r\n',0.6551171541213989 4562,"def sanityCheck ( self ) : if not self. col. basicCheck ( ) : return ""failed basic check"" for t in ""cards"", ""notes"", ""revlog"", ""graves"" : if : return ""%s had usn = -1"" % t for g in self. col. decks. all ( ) : if g [ ""usn"" ] == - 1 : return ""deck had usn = -1"" for t, usn in self. col. tags. allItems ( ) : if usn == - 1 : return ""tag had usn = -1"" found = False for m in self. col. models. all ( ) : if self. col. server : if m [ ""usn"" ] < 0 : m [ ""usn"" ] = 0 found = True else : if m [ ""usn"" ] == - 1 : return ""model had usn = -1"" if found : self. col. models. save ( ) self. col. sched. reset ( ) self. col. sched. deckDueList ( ) return [ list ( self. col. sched. counts ( ) ), self. col. db. scalar ( ""select count() from cards"" ), self. col. db. scalar ( ""select count() from notes"" ), self. col. db. scalar ( ""select count() from revlog"" ), self. col. db. scalar ( ""select count",False,self.col.db.scalar('select count() from %s where usn = -1' % t),t in self.col.tags,0.6543831825256348 4563,"def test_delete_model ( self ) : session_factory = self. replay_flight_data ( ""test_sagemaker_delete_model"" ) p = self. load_policy ( { ""name"" : ""delete-invalid-sagemaker-model"", ""resource"" : ""sagemaker-model"", ""filters"" : [ { ""tag:DeleteMe"" : ""present"" } ], ""actions"" : [ { ""type"" : ""delete"" } ], }, session_factory = session_factory, ) resources = p. run ( ) self. assertEqual ( len ( resources ), 1 ) client = session_factory ( ). client ( ""sagemaker"" ) try : client. describe_model ( ModelName = resources [ 0 ] [ ""ModelName"" ] ) except b_exc. ClientError as e : if : self. fail ( ""Bad Error:"" + e. response [ ""Error"" ] [ ""Code"" ] ) else : self. assertEqual ( e. response [ ""Error"" ] [ ""Code"" ], ""ValidationException"" ) else : self. fail ( ""Resource still exists"" )",False,e.response['Error']['Code'] != 'ValidationException',e.response[0]['Code'] in _ERROR_MAP,0.6592837572097778 4564,"def update ( self, preds, maxvals, score, imgid, * args, ** kwargs ) : num_joints = preds. shape [ 1 ] in_vis_thresh = self. _in_vis_thresh for idx, kpt in enumerate ( preds ) : kpt = [ ] kpt_score = 0 count = 0 for i in range ( num_joints ) : kpt += preds [ idx ] [ i ]. asnumpy ( ). tolist ( ) mval = float ( maxvals [ idx ] [ i ]. asscalar ( ) ) kpt. append ( mval ) if : kpt_score += mval count += 1 if count > 0 : kpt_score /= count rescore = kpt_score * score [ idx ]. asscalar ( ) self. _results. append ( { ""image_id"" : int ( imgid [ idx ]. asscalar ( ) ), ""category_id"" : 1, ""keypoints"" : kpt, ""score"" : rescore, } ) self. _recorded_ids [ int ( imgid [ idx ]. asscalar ( ) ) ] = True",False,mval > in_vis_thresh,count > 0,0.6619218587875366 4565,"def stats ( self ) : logger. info ( ""total conversations: {}"". format ( len ( self. catalog ) ) ) if self. bot. memory. exists ( [ ""user_data"" ] ) : count_user = 0 count_user_cached = 0 count_user_cached_definitive = 0 for chat_id in self. bot. memory [ ""user_data"" ] : count_user = count_user + 1 if : count_user_cached = count_user_cached + 1 if self. bot. memory [ ""user_data"" ] [ chat_id ] [ ""_hangups"" ] [ ""is_definitive"" ] : count_user_cached_definitive = count_user_cached_definitive + 1 logger. info ( ""total users: {} cached: {} definitive (at start): {}"". format ( count_user, count_user_cached, count_user_cached_definitive ) )",False,'_hangups' in self.bot.memory['user_data'][chat_id],self.bot.memory['user_data'],0.6499892473220825 4566,"def word_range ( word ) : for ind in range ( len ( word ) ) : temp = word [ ind ] for c in [ chr ( x ) for x in range ( ord ( ""a"" ), ord ( ""z"" ) + 1 ) ] : if : yield word [ : ind ] + c + word [ ind + 1 : ]",False,c != temp,temp,0.677412748336792 4567,"def check ( conf, token, prev, next, nextnext, context ) : if prev and isinstance ( prev, yaml. tokens. TagToken ) : return if conf [ ""forbid-implicit-octal"" ] : if : if not token. style : val = token. value if ( val. isdigit ( ) and len ( val ) > 1 and val [ 0 ] == ""0"" and _is_octal_number ( val [ 1 : ] ) ) : yield LintProblem ( token. start_mark. line + 1, token. end_mark. column + 1, 'forbidden implicit octal value ""%s""' % token. value, ) if conf [ ""forbid-explicit-octal"" ] : if : if not token. style : val = token. value if len ( val ) > 2 and val [ : 2 ] == ""0o"" and _is_octal_number ( val [ 2 : ] ) : yield LintProblem ( token. start_mark. line + 1, ",False,"isinstance(token, yaml.tokens.ScalarToken)",token.start_mark,0.6518650650978088 4568,"def __init__ ( self, * args, ** kwargs ) : super ( ProjectForm, self ). __init__ ( * args, ** kwargs ) if self. instance. id : if : self. fields [ ""localfiletype"" ]. widget. attrs [ ""disabled"" ] = True self. fields [ ""localfiletype"" ]. required = False if ( self. instance. treestyle!= ""auto"" and self. instance. translationproject_set. count ( ) and self. instance. treestyle == self. instance. _detect_treestyle ( ) ) : self. fields [ ""treestyle"" ]. widget. attrs [ ""disabled"" ] = True self. fields [ ""treestyle"" ]. required = False",False,Store.objects.filter(translation_project__project=self.instance).count(),self.instance.treestyle == 'auto',0.6506452560424805 4569,"def _get_s3_files ( local_dir, file_info, params ) : """"""Retrieve s3 files to local directory, handling STORMSeq inputs."""""" assert len ( file_info ) == 1 files = file_info. values ( ) [ 0 ] fnames = [ ] for k in [ ""1"", ""2"" ] : if : fnames. append ( files [ k ] ) out = [ ] for fname in fnames : bucket, key = fname. replace ( ""s3://"", """" ). split ( ""/"", 1 ) if params [ ""access_key_id"" ] == ""TEST"" : out. append ( os. path. join ( local_dir, os. path. basename ( key ) ) ) else : out. append ( s3. get_file ( local_dir, bucket, key, params ) ) return out",False,files[k] not in fnames,files[k],0.658602774143219 4570,"def __exit__ ( self, exc_type, exc_val, exc_tb ) : saved_values = self. saved_values del self. saved_values support. gc_collect ( ) self. changed |= support. environment_altered for name, get, restore in self. resource_info ( ) : current = get ( ) original = saved_values. pop ( name ) if current!= original : self. changed = True restore ( original ) if : print_warning ( f""{name} was modified by {self.testname}"" ) print ( f"" Before: {original}\n After: {current} "", file = sys. stderr, flush = True, ) return False",False,not self.quiet and (not self.pgo),self.changed,0.6564649343490601 4571,"def recvExact ( self, size ) : buf = b"""" s = self. socket while len ( buf )!= size : x = s. recv ( size - len ( buf ) ) if : raise CobraClosedException ( ""Socket closed in recvExact..."" ) buf += x return buf",False,len(x) == 0,not x,0.6578380465507507 4572,"def perform ( self, node, inp, out_ ) : x, i = inp ( out, ) = out_ if out [ 0 ] is not None and out [ 0 ]. shape == ( len ( i ), ) + x. shape [ 1 : ] : o = out [ 0 ] else : o = None if i. dtype!= np. intp : i_ = theano. _asarray ( i, dtype = np. intp ) if not np. can_cast ( i. dtype, np. intp ) : if : raise IndexError ( ""index contains values that are bigger "" ""than the maximum array size on this system."", i, ) i = i_ out [ 0 ] = x. take ( i, axis = 0, out = o )",False,np.any(i != i_),i_ < TAB > -1,0.6542114019393921 4573,"def fake_db_group ( ** updates ) : db_group = { ""id"" : fake. GROUP_ID, ""name"" : ""group-1"", ""status"" : ""available"", ""user_id"" : fake. USER_ID, ""project_id"" : fake. PROJECT_ID, ""group_type_id"" : fake. GROUP_TYPE_ID, ""group_snapshot_id"" : None, ""source_group_id"" : None, } for name, field in objects. Group. fields. items ( ) : if name in db_group : continue if : db_group [ name ] = None elif field. default!= fields. UnspecifiedDefault : db_group [ name ] = field. default else : raise Exception ( ""fake_db_group needs help with %s."" % name ) if updates : db_group. update ( updates ) return db_group",False,field.nullable,field.default == fields.UnspecifiedDefault,0.65875244140625 4574,"def extract_conditions ( self, quals ) : """"""Build an imap search criteria string from a list of quals"""""" conditions = [ ] for qual in quals : if qual. list_any_or_all == ANY : values = [ ""(%s)"" % self. _make_condition ( qual. field_name, qual. operator [ 0 ], value ) for value in qual. value ] conditions. append ( make_or ( values ) ) elif : conditions. extend ( [ self. _make_condition ( qual. field_name, qual. operator [ 0 ], value ) for value in qual. value ] ) else : conditions. append ( self. _make_condition ( qual. field_name, qual. operator, qual. value ) ) conditions = [ x for x in conditions if x not in ( None, ""()"" ) ] return conditions",False,qual.list_any_or_all == ALL,qual.list_any_or_all == EMPTY,0.6547175645828247 4575,"def intersect_face ( pt ) : nonlocal vis_faces2D for f, vs in vis_faces2D : v0 = vs [ 0 ] for v1, v2 in iter_pairs ( vs [ 1 : ], False ) : if : return f return None",False,"intersect_point_tri_2d(pt, v0, v1, v2)",v0 == v2 and pt.contains(v1),0.6490888595581055 4576,"def merge_from_file ( metrics_file ) : """"""Merge metrics recorded in another file into the current metrics."""""" for metric in load_all ( metrics_file ) : existing = _registered_metrics. get ( metric. name ) if existing is None : _validate_metric_name ( metric. name ) _registered_metrics [ metric. name ] = metric else : if : raise TypeError ( ""Cannot merge metrics of different types."" ) existing. _merge ( metric )",False,type(metric) != type(existing),"hasattr(existing, 'type') and isinstance(existing.type, types.MultiMetrics)",0.6543618440628052 4577,"def init_weights ( self ) : for module in self. modules ( ) : if isinstance ( module, nn. Conv2d ) : kernel_height, kernel_width = module. kernel_size out_channels = module. out_channels fan_out = kernel_height * kernel_width * out_channels nn. init. normal_ ( module. weight, mean = 0.0, std = math. sqrt ( 2.0 / fan_out ) ) if : nn. init. constant_ ( module. bias, 0 ) elif isinstance ( module, nn. Linear ) : init_range = 1.0 / math. sqrt ( module. out_features ) nn. init. uniform_ ( module. weight, - init_range, init_range ) elif isinstance ( module, nn. BatchNorm2d ) : nn. init. constant_ ( module. weight, 1 ) nn. init. constant_ ( module. bias, 0 )",False,module.bias is not None,"isinstance(module, nn.Conv2d)",0.6594460010528564 4578,"def _on_queue_feeder_error ( self, e, obj ) : if isinstance ( obj, _CallItem ) : if : raised_error = RuntimeError ( ""The task could not be sent to the workers as it is too "" ""large for `send_bytes`."" ) else : raised_error = PicklingError ( ""Could not pickle the task to send it to the workers."" ) tb = traceback. format_exception ( type ( e ), e, getattr ( e, ""__traceback__"", None ) ) raised_error = set_cause ( raised_error, _RemoteTraceback ( """". join ( tb ) ) ) work_item = self. pending_work_items. pop ( obj. work_id, None ) self. running_work_items. remove ( obj. work_id ) if work_item is not None : work_item. future. set_exception ( raised_error ) del work_item self. thread_wakeup. wakeup ( ) else : super ( _SafeQueue, self ). _on_queue_feeder_error ( e, obj )",False,"isinstance(e, struct.error)",len(self.pending_work_items) > 10,0.6490083932876587 4579,"def fetch_last_known_offsets ( self, partitions = None ) : if self. group is None : raise ValueError ( ""SimpleClient.group must not be None"" ) if partitions is None : partitions = self. client. get_partition_ids_for_topic ( self. topic ) responses = self. client. send_offset_fetch_request ( self. group, [ OffsetFetchRequestPayload ( self. topic, p ) for p in partitions ], fail_on_error = False, ) for resp in responses : try : check_error ( resp ) except UnknownTopicOrPartitionError : pass if : self. offsets [ resp. partition ] = 0 else : self. offsets [ resp. partition ] = resp. offset",False,resp.offset == -1,fail_on_error,0.6565616130828857 4580,"def update ( self, E = None, ** F ) : if E : if : for k in E : self [ k ] = E [ k ] else : for ( k, v ) in E : self [ k ] = v for k in F : self [ k ] = F [ k ]",False,"hasattr(E, 'keys')",len(F) == 0,0.6543835401535034 4581,"def mirror ( url, response ) : if response!= ""dummy"" : clean_url = url. replace ( ""http://"", """" ). replace ( ""https://"", """" ). rstrip ( ""/"" ) parts = clean_url. split ( ""?"" ) [ 0 ]. split ( ""/"" ) root = parts [ 0 ] webpage = parts [ - 1 ] parts. remove ( root ) try : parts. remove ( webpage ) except ValueError : pass prefix = root + ""_mirror"" try : os. mkdir ( prefix ) except OSError : pass suffix = """" if parts : for directory in parts : suffix += directory + ""/"" try : os. mkdir ( prefix + ""/"" + suffix ) except OSError : pass path = prefix + ""/"" + suffix trail = """" if ""."" not in webpage : trail += "".html"" if webpage == root : name = ""index.html"" else : name = webpage if : trail += ""?"" + url. split ( ""?"" ) [ 1 ] with open ( path + name + trail, ""w+"" ) as out_file : out_file. write ( response. encode ( ""utf-8"" ) )",False,len(url.split('?')) > 1,url[-1],0.6556983590126038 4582,"def add_constraint ( self, cn, strength = None, weight = None ) : if strength or weight : cn = cn. clone ( ) if : cn. strength = strength if weight : cn. weight = weight expr, eplus, eminus, prev_edit_constant = self. new_expression ( cn ) if not self. try_adding_directly ( expr ) : self. add_with_artificial_variable ( expr ) self. needs_solving = True if cn. is_edit_constraint : i = len ( self. edit_var_map ) self. edit_var_map [ cn. variable ] = EditInfo ( cn, eplus, eminus, prev_edit_constant, i ) if self. auto_solve : self. optimize ( self. objective ) self. set_external_variables ( ) return cn",True,strength,strength,0.7020074129104614 4583,"def wrapped ( self, request ) : try : return self. _finished except AttributeError : if : if not request. session. shouldfail and not request. session. shouldstop : log. debug ( ""%s is still going to be used, not terminating it. "" ""Still in use on:\n%s"", self, pprint. pformat ( list ( self. node_ids ) ), ) return log. debug ( ""Finish called on %s"", self ) try : return func ( request ) finally : self. _finished = True",False,self.node_ids,self._finished,0.6619959473609924 4584,"def update ( self, pbar ) : context = { } for name, ( key, transform ) in self. mapping. items ( ) : try : value = getattr ( pbar, key ) if : context [ name ] = value else : context [ name ] = transform ( value ) except : pass return self. format % context",True,transform is None,transform is None,0.6706781387329102 4585,"def pytest_collection_modifyitems ( items ) : for item in items : if item. nodeid. startswith ( ""tests/params"" ) : if ""stage"" not in item. keywords : item. add_marker ( pytest. mark. stage ( ""unit"" ) ) if : item. add_marker ( pytest. mark. init ( rng_seed = 123 ) )",False,'init' not in item.keywords,'rng' not in item.keywords,0.6545066237449646 4586,"def _compile_dialect ( self, execute_observed ) : if self. dialect == ""default"" : return DefaultDialect ( ) else : if : params = { ""implicit_returning"" : True } else : params = { } return url. URL ( self. dialect ). get_dialect ( ) ( ** params )",False,self.dialect == 'postgresql',execute_observed,0.6592318415641785 4587,"def backward_impl ( self, inputs, outputs, prop_down, accum ) : axis = self. forward_func. info. args [ ""axis"" ] y0 = inputs [ 0 ]. data dz = inputs [ 2 ]. data dy0 = outputs [ 0 ]. data g_y0 = inputs [ 0 ]. grad g_dz = inputs [ 2 ]. grad g_dy0 = outputs [ 0 ]. grad if prop_down [ 0 ] : if : g_y0 += g_dy0 * dz * F. pow_scalar ( y0, - 2.0 ) else : g_y0. copy_from ( g_dy0 * dz * F. pow_scalar ( y0, - 2.0 ) ) if prop_down [ 2 ] : if accum [ 2 ] : g_dz -= F. sum ( g_dy0 * F. pow_scalar ( y0, - 1.0 ), axis, True ) else : g_dz. copy_from ( - F. sum ( g_dy0 * F. pow_scalar ( y0, - 1.0 ), axis, True ) )",True,accum[0],accum[0],0.6588469743728638 4588,"def _on_set_new_release_check ( self, key, value ) : if value : log. debug ( ""Checking for new release.."" ) threading. Thread ( target = self. core. get_new_release ). start ( ) if : self. new_release_timer. stop ( ) self. new_release_timer = LoopingCall ( self. _on_set_new_release_check, ""new_release_check"", True ) self. new_release_timer. start ( 72 * 60 * 60, False ) else : if : self. new_release_timer. stop ( )",False,self.new_release_timer and self.new_release_timer.running,key in ['TAB > or key in ['TAB > or value,0.6491657495498657 4589,"def add ( self, path ) : with self. get_lock ( path ) : if : self. entries [ path ] = { } self. entries [ path ] [ ""lock"" ] = self. new_locks [ path ] del self. new_locks [ path ] self. lru. append ( path )",False,not path in self.entries,path not in self.entries,0.6666389107704163 4590,"def _read_ready ( self ) : try : data = self. _sock. recv ( self. max_size ) except ( BlockingIOError, InterruptedError ) : pass except Exception as exc : self. _fatal_error ( exc, ""Fatal read error on socket transport"" ) else : if data : self. _protocol. data_received ( data ) else : if self. _loop. get_debug ( ) : logger. debug ( ""%r received EOF"", self ) keep_open = self. _protocol. eof_received ( ) if : self. _loop. remove_reader ( self. _sock_fd ) else : self. close ( )",True,keep_open,keep_open,0.6637880802154541 4591,"def send_samples ( self ) : if self. sampler : samples = self. sampler. samples if : self. send ( self. master, UpdateSamples ( self. client_id, samples ) ) return samples return None",False,len(samples) > 0,self.master,0.6631576418876648 4592,"def encode_string ( self, encoding ) : """"""Encode a buffered response body."""""" if encoding in self. attempted_charsets : return False self. attempted_charsets. add ( encoding ) body = [ ] for chunk in self. body : if : try : chunk = chunk. encode ( encoding, self. errors ) except ( LookupError, UnicodeError ) : return False body. append ( chunk ) self. body = body return True",False,"isinstance(chunk, six.text_type)","hasattr(chunk, 'encode')",0.6466336846351624 4593,"def repair_item ( href, item, seen_uids, repair_unsafe_uid ) : if item. parsed is None : raise IrreparableItem ( ) new_item = item if not item. uid : logger. warning ( ""No UID, assigning random UID."" ) new_item = item. with_uid ( generate_href ( ) ) elif item. uid in seen_uids : logger. warning ( ""Duplicate UID, assigning random UID."" ) new_item = item. with_uid ( generate_href ( ) ) elif not href_safe ( item. uid ) or not href_safe ( basename ( href ) ) : if : logger. warning ( ""UID may cause problems, add "" ""--repair-unsafe-uid to repair."" ) else : logger. warning ( ""UID or href is unsafe, assigning random UID."" ) new_item = item. with_uid ( generate_href ( ) ) if not new_item. uid : raise IrreparableItem ( ) return new_item",False,not repair_unsafe_uid,repair_unsafe_uid,0.6574409604072571 4594,"def local_mul_s_d ( node ) : if node. op == sparse. mul_s_d : x, y = node. inputs x_is_sparse_variable = _is_sparse_variable ( x ) if : svar = x dvar = y else : svar = y dvar = x if dvar. type. ndim!= 2 : return False if svar. type. format == ""csc"" : CSx = sparse. CSC mul_s_d_csx = mul_s_d_csc elif svar. type. format == ""csr"" : CSx = sparse. CSR mul_s_d_csx = mul_s_d_csr else : raise NotImplementedError if x. dtype!= y. dtype : return c_data = mul_s_d_csx ( sparse. csm_data ( svar ), sparse. csm_indices ( svar ), sparse. csm_indptr ( svar ), dvar, ) return [ CSx ( c_data, sparse. csm_indices ( svar ), sparse. csm_indptr (",True,x_is_sparse_variable,x_is_sparse_variable,0.6508723497390747 4595,"def initialize ( self ) : self. session = Session ( ) self. session. headers [ ""User-Agent"" ] = ""Subliminal/{}"". format ( __version__ ) if self. username is not None and self. password is not None : logger. info ( ""Logging in"" ) params = { ""username"" : self. username, ""password"" : self. password, ""apikey"" : self. apikey, } r = self. session. get ( self. server_url + ""users/login"", params = params, timeout = 10 ) root = etree. fromstring ( r. content ) if : raise AuthenticationError ( root. find ( ""error/message"" ). text ) self. auth_code = root. find ( ""data/user/authcode"" ). text data = { ""username"" : self. username, ""passwd"" : self. password, ""remember"" : ""yes"", ""option"" : ""com_user"", ""task"" : ""login"", ""silent"" : ""true"", } r = self. session. post ( ""http://www.italiansubs.net/index.php"", data = data, timeout = 30 ) r. raise_for_status ( ) self. logged_in = True",False,root.find('status').text == 'fail',root.find('data/user/login') is None,0.6484965085983276 4596,"def OnEndDrag ( self, event ) : self. StopDragging ( ) dropTarget = event. GetItem ( ) if not dropTarget : dropTarget = self. GetRootItem ( ) if self. IsValidDropTarget ( dropTarget ) : self. UnselectAll ( ) if : self. SelectItem ( dropTarget ) self. OnDrop ( dropTarget, self. _dragItem )",False,dropTarget != self.GetRootItem(),self.IsValidDropTarget(dropTarget),0.6531848907470703 4597,"def get_min_vertical_scroll ( ) -> int : used_height = 0 prev_lineno = ui_content. cursor_position. y for lineno in range ( ui_content. cursor_position. y, - 1, - 1 ) : used_height += get_line_height ( lineno ) if : return prev_lineno else : prev_lineno = lineno return 0",False,used_height > height - scroll_offsets_bottom,used_height == 0,0.6475968360900879 4598,"def handle_replace ( self, request, fileobj ) : """"""Replace file content with uploaded one."""""" filecopy = fileobj. read ( ) fileobj. close ( ) fileobj = BytesIOMode ( fileobj. name, filecopy ) with self. component. repository. lock : if : self. component. commit_pending ( ""replace file"", request. user ) else : self. commit_pending ( ""replace file"", request. user ) store2 = self. load_store ( fileobj ) store2. check_valid ( ) self. store. save_atomic ( self. store. storefile, lambda handle : handle. write ( filecopy ) ) previous_revision = ( self. component. repository. last_revision, ) if self. git_commit ( request. user, request. user. get_author_name ( ), store_hash = False ) : self. drop_store_cache ( ) self. handle_store_change ( request, request. user, previous_revision, change = Change. ACTION_REPLACE_UPLOAD, ) return ( 0, 0, self. unit_set. count ( ), len ( list ( store2. content_units ) ) )",False,self.is_source,self.component.repository.last_revision > 0,0.6528895497322083 4599,"def decode ( self, text : str ) -> typing. List [ str ] : lines = [ ] if text and self. buffer and self. buffer [ - 1 ] == ""\r"" : if text. startswith ( ""\n"" ) : lines. append ( self. buffer [ : - 1 ] + ""\n"" ) self. buffer = """" text = text [ 1 : ] else : lines. append ( self. buffer [ : - 1 ] + ""\n"" ) self. buffer = """" while text : num_chars = len ( text ) for idx in range ( num_chars ) : char = text [ idx ] next_char = None if idx + 1 == num_chars else text [ idx + 1 ] if char == ""\n"" : lines. append ( self. buffer + text [ : idx + 1 ] ) self. buffer = """" text = text [ idx + 1 : ] break elif : lines. append ( self. buffer + text [ : idx ] + ""\n"" ) self. buffer = """" text = text [ idx + 2 : ] break elif char == ""\r"" and next_char is not None : lines. append ( self",False,char == '\r' and next_char == '\n',char == '\r',0.6518829464912415 4600,"def delete ( self ) : from weblate. trans. models import Change, Suggestion, Vote fast_deletes = [ ] for item in self. fast_deletes : if : fast_deletes. append ( Vote. objects. filter ( suggestion__in = item ) ) fast_deletes. append ( Change. objects. filter ( suggestion__in = item ) ) fast_deletes. append ( item ) self. fast_deletes = fast_deletes return super ( ). delete ( )",False,item.model is Suggestion,item,0.6620023250579834 4601,"def post ( self ) : """"""Handle modifying actions and redirect to a GET page."""""" if self. request. get ( ""action:flush_memcache"" ) : if : message = ""Cache flushed, all keys dropped."" else : message = ""Flushing the cache failed. Please try again."" self. redirect ( self. _construct_url ( remove = [ ""action:flush_memcache"" ], add = { ""message"" : message } ) ) elif self. request. get ( ""action:delete_entities"" ) : entity_keys = self. request. params. getall ( ""entity_key"" ) db. delete ( entity_keys ) self. redirect ( self. _construct_url ( remove = [ ""action:delete_entities"" ], add = { ""message"" : ""%d entities deleted"" % len ( entity_keys ) }, ) ) else : self. error ( 404 )",False,memcache.flush_all(),self.cache_failed,0.6533583402633667 4602,"def main ( ) : env_path = os. path. join ( os. path. dirname ( os. path. realpath ( __file__ ) ), "".."", "".."", ""wr.env"" ) with open ( env_path, ""r+"" ) as fh : buff = fh. read ( ) if : print ( ""No Env"" ) return buff = buff. replace ( ""WEBAGG_HOST"", ""WARCSERVER_HOST"" ) buff = buff. replace ( ""http://webagg"", ""http://warcserver"" ) fh. seek ( 0 ) fh. write ( buff ) print ( ""Updated wr.env"" )",True,not buff,not buff,0.68404620885849 4603,"def handle ( self, * args : Any, ** options : Any ) -> None : num_processes = int ( options [ ""processes"" ] ) if num_processes < 1 : raise CommandError ( ""You must have at least one process."" ) subdomain = options [ ""subdomain"" ] if options [ ""destroy_rebuild_database"" ] : print ( ""Rebuilding the database!"" ) db_name = settings. DATABASES [ ""default"" ] [ ""NAME"" ] self. do_destroy_and_rebuild_database ( db_name ) elif options [ ""import_into_nonempty"" ] : print ( ""NOTE: The argument 'import_into_nonempty' is now the default behavior."" ) check_subdomain_available ( subdomain, from_management_command = True ) paths = [ ] for path in options [ ""export_paths"" ] : path = os. path. realpath ( os. path. expanduser ( path ) ) if : raise CommandError ( f""Directory not found: '{path}'"" ) if not os. path. isdir ( path ) : raise CommandError ( ""Export file should be folder; if it's a "" ""tarball, please unpack it first."" ) paths. append ( path ) for path in paths : print ( f""Processing dump: {path}..."" ) realm = do_import_realm ( path, subdomain, num_processes ) print ( ""Checking the system bots."" ) do_import_system_bots ( realm )",False,not os.path.exists(path),not os.path.isdir(path),0.6469482183456421 4604,"def _read_bytes ( self, num_bytes ) : self. _sock. settimeout ( self. _read_timeout ) while True : try : data = self. _rfile. read ( num_bytes ) break except ( IOError, OSError ) as e : if : continue self. _force_close ( ) raise err. OperationalError ( CR. CR_SERVER_LOST, ""Lost connection to MySQL server during query (%s)"" % ( e, ), ) except BaseException : self. _force_close ( ) raise if len ( data ) < num_bytes : self. _force_close ( ) raise err. OperationalError ( CR. CR_SERVER_LOST, ""Lost connection to MySQL server during query"" ) return data",False,e.errno == errno.EINTR,len(data) < num_bytes,0.6541719436645508 4605,"def set ( self, item, data ) : if not type ( item ) is slice : item = slice ( item, item + len ( data ), None ) virt_item = self. item2virtitem ( item ) if not virt_item : return off = 0 for s, n_item in virt_item : if : i = slice ( off, n_item. stop + off - n_item. start, n_item. step ) data_slice = data. __getitem__ ( i ) s. content. __setitem__ ( n_item, data_slice ) off = i. stop else : raise ValueError ( ""TODO XXX"" ) return",False,"isinstance(s, ProgBits)",n_item.start <= 0 < s.stop,0.6470135450363159 4606,"def _write_source ( self, file = None ) : if file is not None : self. _write_source_to ( file ) else : f = NativeIO ( ) self. _write_source_to ( f ) source_data = f. getvalue ( ) if : with open ( self. sourcefilename, ""r"" ) as fp : needs_written = not ( fp. read ( ) == source_data ) else : needs_written = True if needs_written : _ensure_dir ( self. sourcefilename ) with open ( self. sourcefilename, ""w"" ) as fp : fp. write ( source_data ) self. _has_source = True",False,os.path.exists(self.sourcefilename),self.sourcefilename,0.6495925188064575 4607,"def ip_label ( tokeniser, afi, safi ) : ipmask = prefix ( tokeniser ) nlri = Label ( afi, safi, OUT. ANNOUNCE ) nlri. cidr = CIDR ( ipmask. pack ( ), ipmask. mask ) change = Change ( nlri, Attributes ( ) ) while True : command = tokeniser ( ) if : break action = AnnounceLabel. action. get ( command, """" ) if action == ""attribute-add"" : change. attributes. add ( AnnounceLabel. known [ command ] ( tokeniser ) ) elif action == ""nlri-set"" : change. nlri. assign ( AnnounceLabel. assign [ command ], AnnounceLabel. known [ command ] ( tokeniser ) ) elif action == ""nexthop-and-attribute"" : nexthop, attribute = AnnounceLabel. known [ command ] ( tokeniser ) change. nlri. nexthop = nexthop change. attributes. add ( attribute ) else : raise ValueError ( 'route: unknown command ""%s""' % command ) return [ change ]",False,not command,command == 'delete',0.6864778995513916 4608,"def python_matches ( self, text ) : """"""Match attributes or global python names"""""" if ""."" in text : try : matches = self. attr_matches ( text ) if : if self. omit__names == 1 : no__name = lambda txt : re. match ( r"".*\.__.*?__"", txt ) is None else : no__name = ( lambda txt : re. match ( r""\._.*?"", txt [ txt. rindex ( ""."" ) : ] ) is None ) matches = filter ( no__name, matches ) except NameError : matches = [ ] else : matches = self. global_matches ( text ) return matches",False,text.endswith('.') and self.omit__names,len(matches) == 0,0.653357744216919 4609,"def _shadow_mapping_pass ( self, scene, light_node, flags ) : light = light_node. light self. _configure_shadow_mapping_viewport ( light, flags ) V, P = self. _get_light_cam_matrices ( scene, light_node, flags ) for node in self. _sorted_mesh_nodes ( scene ) : mesh = node. mesh if : continue for primitive in mesh. primitives : program = self. _get_primitive_program ( primitive, flags, ProgramFlags. NONE ) program. _bind ( ) program. set_uniform ( ""V"", V ) program. set_uniform ( ""P"", P ) program. set_uniform ( ""cam_pos"", scene. get_pose ( scene. main_camera_node ) [ : 3, 3 ] ) self. _bind_and_draw_primitive ( primitive = primitive, pose = scene. get_pose ( node ), program = program, flags = RenderFlags. DEPTH_ONLY, ) self. _reset_active_textures ( ) if program is not None : program. _unbind ( ) glFlush ( )",False,not mesh.is_visible,mesh.is_empty,0.6562389135360718 4610,"def run ( self ) : args = self. _parse_args ( ) self. _parse_compose_file ( ) podman_path = args. podman_path if podman_path!= ""podman"" : if os. path. isfile ( podman_path ) and os. access ( podman_path, os. X_OK ) : podman_path = os. path. realpath ( podman_path ) else : if : raise IOError ( ""Binary {} has not been found."". format ( podman_path ) ) self. podman = Podman ( self, podman_path, args. dry_run ) cmd_name = args. command cmd = self. commands [ cmd_name ] cmd ( self, args )",False,dry_run == False,not os.path.exists(podman_path),0.6627120971679688 4611,"def read ( self, iprot ) : if ( iprot. _fast_decode is not None and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None ) : iprot. _fast_decode ( self, iprot, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if : if ftype == TType. STRUCT : self. req = TFetchResultsReq ( ) self. req. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd ( )",False,fid == 1,fid == TType.START,0.6721687316894531 4612,"def test_decorator_order ( self ) : for action, func in api_call_map. items ( ) : func = getattr ( self. service_connection, func ) decs = [ func. __name__ ] while func : i = 0 if not hasattr ( func, ""__closure__"" ) : func = getattr ( func, ""__wrapped__"", None ) continue while i < len ( func. __closure__ ) : value = func. __closure__ [ i ]. cell_contents if hasattr ( value, ""__call__"" ) : if : self. assertTrue ( not decs or decs [ - 1 ] == ""requires"" ) decs. append ( value. __name__ ) i += 1 func = getattr ( func, ""__wrapped__"", None )",False,'requires' == value.__name__,decs,0.6711407899856567 4613,"def _get_name ( key_tuple, annotations ) : annotation = None if annotations is None else annotations. get ( key_tuple ) CELL_PREFIX = """" % ( """" if annotation is None else'class=""%s""' % annotation ) seq_name = unicode_data. get_emoji_sequence_name ( key_tuple ) if seq_name == None : if key_tuple == ( 0x20E3, ) : seq_name = ""(combining enlosing keycap)"" elif : seq_name = ""(unknown flag PUA codepoint)"" else : print ( ""no name for %s"" % unicode_data. seq_to_string ( key_tuple ) ) seq_name = ""(oops)"" return CELL_PREFIX + seq_name",False,"key_tuple == (1042475,)","key_tuple == (16579,)",0.6547436118125916 4614,"def scrub_data ( data ) : if isinstance ( data, dict ) : for k, v in data. items ( ) : if : key = k. decode ( ""utf-8"", ""replace"" ) else : key = k key = key. lower ( ) data [ k ] = scrub_data ( v ) for blk in KEYS : if blk in key : data [ k ] = MASK elif isinstance ( data, list ) : for i, l in enumerate ( list ( data ) ) : data [ i ] = scrub_data ( l ) elif isinstance ( data, str ) : if ""="" in data : if ""&"" in data : delimiter = ""&"" else : delimiter = "";"" qd = scrub_data ( OrderedDict ( e. split ( ""="", 1 ) if ""="" in e else ( e, None ) for e in data. split ( delimiter ) ) ) return delimiter. join ( ( k + ""="" + v if v is not None else k ) for k, v in qd. items ( ) )",True,"isinstance(k, bytes)","isinstance(k, bytes)",0.6504049301147461 4615,"def scan_block_scalar_indentation ( self ) : chunks = [ ] max_indent = 0 end_mark = self. get_mark ( ) while self. peek ( ) in "" \r\n\x85\u2028\u2029"" : if self. peek ( )!= "" "" : chunks. append ( self. scan_line_break ( ) ) end_mark = self. get_mark ( ) else : self. forward ( ) if : max_indent = self. column return chunks, max_indent, end_mark",True,self.column > max_indent,self.column > max_indent,0.653872549533844 4616,"def _produce ( self ) : headers = self. msg. getHeaders ( True ) boundary = None if self. msg. isMultipart ( ) : content = headers. get ( ""content-type"" ) parts = [ x. split ( ""="", 1 ) for x in content. split ( "";"" ) [ 1 : ] ] parts = { k. lower ( ). strip ( ) : v for ( k, v ) in parts } boundary = parts. get ( ""boundary"" ) if boundary is None : boundary = ""----={}"". format ( self. _uuid4 ( ). hex ) headers [ ""content-type"" ] += '; boundary=""{}""'. format ( boundary ) else : if : boundary = boundary [ 1 : - 1 ] boundary = networkString ( boundary ) self. write ( _formatHeaders ( headers ) ) self. write ( b""\r\n"" ) if self. msg. isMultipart ( ) : for p in subparts ( self. msg ) : self. write ( b""\r\n--"" + boundary + b""\r\n"" ) yield MessageProducer ( p, self. buffer, self. scheduler ). beginProducing ( None ) self. write ( b""\r\n--"" + boundary + b""--\r\n"" ) else : f = self. msg. getBodyFile ( ) while True : b = f. read ( self. CHUNK_SIZE ) if b : self. buffer. write ( b ) yield None ",False,"boundary.startswith('""') and boundary.endswith('""')",len(boundary) > 0,0.646588921546936 4617,def _get_directory_size_in_bytes ( directory ) : total = 0 try : for entry in os. scandir ( directory ) : if : total += entry. stat ( ). st_size elif entry. is_dir ( ) : total += _get_directory_size_in_bytes ( entry. path ) except NotADirectoryError : return os. path. getsize ( directory ) except PermissionError : return 0 return total,True,entry.is_file(),entry.is_file(),0.6555038094520569 4618,"def on_event ( self, c, button, data ) : if self. rvGestureGrab. get_reveal_child ( ) : if button == ""A"" and data [ 0 ] == 0 : self. use ( ) elif : self. start_over ( )",False,button == 'Y' and data[0] == 0,button == 'C' and data[0] == 1,0.6543787717819214 4619,"def _fc_layer ( self, sess, bottom, name, trainable = True, relu = True ) : with tf. variable_scope ( name ) as scope : shape = bottom. get_shape ( ). as_list ( ) dim = 1 for d in shape [ 1 : ] : dim *= d x = tf. reshape ( bottom, [ - 1, dim ] ) weight = self. _get_fc_weight ( sess, name, trainable = trainable ) bias = self. _get_bias ( sess, name, trainable = trainable ) fc = tf. nn. bias_add ( tf. matmul ( x, weight ), bias ) if : fc = tf. nn. relu ( fc ) return fc",True,relu,relu,0.6798250675201416 4620,"def terminate_subprocess ( proc, timeout = 0.1, log = None ) : if proc. poll ( ) is None : if : log. info ( ""Sending SIGTERM to %r"", proc ) proc. terminate ( ) timeout_time = time. time ( ) + timeout while proc. poll ( ) is None and time. time ( ) < timeout_time : time. sleep ( 0.02 ) if proc. poll ( ) is None : if : log. info ( ""Sending SIGKILL to %r"", proc ) proc. kill ( ) return proc. returncode",True,log,log,0.6794155240058899 4621,"def bounds_check ( cls, el1, el2, msg = None ) : lbrt1 = el1. bounds. lbrt ( ) lbrt2 = el2. bounds. lbrt ( ) try : for v1, v2 in zip ( lbrt1, lbrt2 ) : if : v1 = dt_to_int ( v1 ) if isinstance ( v2, datetime_types ) : v2 = dt_to_int ( v2 ) cls. assert_array_almost_equal_fn ( v1, v2 ) except AssertionError : raise cls. failureException ( ""BoundingBoxes are mismatched: %s!= %s."" % ( el1. bounds. lbrt ( ), el2. bounds. lbrt ( ) ) )",True,"isinstance(v1, datetime_types)","isinstance(v1, datetime_types)",0.6481517553329468 4622,"def _init_components ( self, mode = ""train"", version = 1, ** kwargs ) : if not self. dsl : raise DSLNotExistError ( """" ) components = self. dsl. get ( ""components"" ) if components is None : raise ComponentFieldNotExistError ( ) for name in components : if : raise ModuleFieldNotExistError ( component = name ) module = components [ name ] [ ""module"" ] new_component = Component ( ) new_component. set_name ( name ) new_component. set_module ( module ) self. component_name_index [ name ] = len ( self. component_name_index ) self. components. append ( new_component ) if version == 2 or mode == ""train"" : self. _check_component_valid_names ( )",False,'module' not in components[name],name not in self.components,0.6564379334449768 4623,"def ant_map ( m ) : tmp = ""rows %s\ncols %s\n"" % ( len ( m ), len ( m [ 0 ] ) ) players = { } for row in m : tmp += ""m "" for col in row : if col == LAND : tmp += ""."" elif col == BARRIER : tmp += ""%"" elif col == FOOD : tmp += ""*"" elif : tmp += ""?"" else : players [ col ] = True tmp += chr ( col + 97 ) tmp += ""\n"" tmp = ( ""players %s\n"" % len ( players ) ) + tmp return tmp",False,col == UNSEEN,col == PLAYERS,0.6728885769844055 4624,"def apply_offers ( self, basket, offers ) : applications = OfferApplications ( ) for offer in offers : num_applications = 0 while num_applications < offer. get_max_applications ( basket. owner ) : result = offer. apply_benefit ( basket ) num_applications += 1 if : break applications. add ( offer, result ) if result. is_final : break basket. offer_applications = applications",False,not result.is_successful,result.has_application,0.6553521156311035 4625,"def testIndexListToLabelsMissedPoint ( self ) : clusters = [ [ 0, 1, 2, 3 ], [ 4, 5, 6 ] ] data = [ [ 5.1, 5.2 ], [ 5.2, 5.1 ], [ 5.4, 5.2 ], [ 5.1, 5.0 ], [ 8.1, 8.0 ], [ 8.4, 8.2 ], [ 8.3, 8.4 ], [ 8.5, 8.5 ], ] encoder = cluster_encoder ( type_encoding. CLUSTER_INDEX_LIST_SEPARATION, clusters, data ) encoder. set_encoding ( type_encoding. CLUSTER_INDEX_LABELING ) expected = [ 0, 0, 0, 0, 1, 1, 1, float ( ""NaN"" ) ] actual = encoder. get_clusters ( ) self. assertEqual ( len ( expected ), len ( actual ) ) for i in range ( len ( expected ) ) : if : self. assertTrue ( math. isnan ( actual [ i ] ) ) else : self. assertEqual ( expected [ i ], actual [ i ] )",False,math.isnan(expected[i]) is True,"isinstance(actual[i], float)",0.6509243249893188 4626,"def _update_handler ( self, event : Event ) : if self. payload_off and event. data [ ""name"" ] == self. payload_off : if self. _unsub_turn_off : self. _unsub_turn_off ( ) if self. _is_on : self. _is_on = False self. schedule_update_ha_state ( ) elif event. data [ ""name"" ] == self. trigger : if self. _unsub_turn_off : self. _unsub_turn_off ( ) if self. timeout : self. _unsub_turn_off = async_call_later ( self. hass, self. timeout, self. _turn_off ) if : self. _is_on = True self. schedule_update_ha_state ( )",False,not self._is_on,self._is_on,0.6595999002456665 4627,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. STRUCT : self. sessionHandle = TSessionHandle ( ) self. sessionHandle. read ( iprot ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRING : self. catalogName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. STRING : self. schemaName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 4 : if ftype == TType. STRING : self. functionName = iprot. readString ( ) lexers = [ ] states = [ ] for name in f. order : v = f. features [ name ] if v [ ""Category"" ]!= ""Deprecated"" : if v [ ""FeatureType"" ] == ""val"" : if : states. append ( ( name, v [ ""Value"" ] ) ) elif name. startswith ( ""SCLEX_"" ) : lexers. append ( ( name, v [ ""Value"" ] ) ) return ( lexers, states )",False,name.startswith('SCE_'),name.startswith(b'SCLEX_'),0.6534950733184814 4629,"def addInPlace ( self, value1, value2 ) : for group in value2 : for key in value2 [ group ] : if : value1 [ group ] [ key ] = value2 [ group ] [ key ] else : value1 [ group ] [ key ] += value2 [ group ] [ key ] return value1",False,key not in value1[group],key not in self,0.6536213159561157 4630,"def add_value ( self, stats_key, stats_value, timestamp, reset_values = False ) : utc_timestamp = convert_timestamp_to_utc ( timestamp ) try : ms = MonitoringStats ( utc_timestamp, stats_key, stats_value ) self. session. add ( ms ) self. session. commit ( ) if : self. session. query ( MonitoringStats ). filter ( and_ ( MonitoringStats. stats_key == stats_key, MonitoringStats. timestamp < utc_timestamp, ) ). delete ( ) self. session. commit ( ) except Exception as exx : log. error ( u""exception {0!r}"". format ( exx ) ) log. error ( u""DATA: {0!s} -> {1!s}"". format ( stats_key, stats_value ) ) log. debug ( u""{0!s}"". format ( traceback. format_exc ( ) ) ) self. session. rollback ( ) finally : self. session. close ( )",True,reset_values,reset_values,0.6642947196960449 4631,"def process_goodreads_csv ( i ) : import csv csv_payload = i. csv if isinstance ( i. csv, str ) else i. csv. decode ( ) csv_file = csv. reader ( csv_payload. splitlines ( ), delimiter = "","", quotechar = '""' ) header = next ( csv_file ) books = { } books_wo_isbns = { } for book in list ( csv_file ) : _book = dict ( zip ( header, book ) ) isbn = _book [ ""ISBN"" ] = _book [ ""ISBN"" ]. replace ( '""', """" ). replace ( ""="", """" ) isbn_13 = _book [ ""ISBN13"" ] = _book [ ""ISBN13"" ]. replace ( '""', """" ). replace ( ""="", """" ) if : books [ isbn ] = _book elif isbn_13!= """" : books [ isbn_13 ] = _book books [ isbn_13 ] [ ""ISBN"" ] = isbn_13 else : books_wo_isbns [ _book [ ""Book Id"" ] ] = _book return books, books_wo_isbns",True,isbn != '',isbn != '',0.6774961352348328 4632,"def wrapper ( api, * args, ** kwargs ) : try : old_curdir = os. getcwd ( ) except EnvironmentError : old_curdir = None try : cache = ClientCache ( api. cache_folder, api. out ) with environment_append ( cache. config. env_vars ) : return f ( api, * args, ** kwargs ) except Exception as exc : msg = exception_message_safe ( exc ) try : api. out. error ( ""{} ({})"". format ( str ( exc. __class__. __name__ ), msg ) ) except BaseException : pass raise finally : if : os. chdir ( old_curdir )",False,old_curdir,old_curdir is not None,0.6626431941986084 4633,"def _load_custom_sections ( self ) -> None : if self. _config. napoleon_custom_sections is not None : for entry in self. _config. napoleon_custom_sections : if isinstance ( entry, str ) : self. _sections [ entry. lower ( ) ] = self. _parse_custom_generic_section else : if : self. _sections [ entry [ 0 ]. lower ( ) ] = self. _parse_custom_params_style_section elif entry [ 1 ] == ""returns_style"" : self. _sections [ entry [ 0 ]. lower ( ) ] = self. _parse_custom_returns_style_section else : self. _sections [ entry [ 0 ]. lower ( ) ] = self. _sections. get ( entry [ 1 ]. lower ( ), self. _parse_custom_generic_section )",False,entry[1] == 'params_style',entry[0] == 'params_style',0.6554086208343506 4634,"def load_directory ( self, rulepath ) : for f in os. listdir ( rulepath ) : full_path = f""{rulepath}/{f}"" if : try : if ( full_path. endswith ( "".yar"" ) or full_path. endswith ( "".yara"" ) or full_path. endswith ( "".rule"" ) ) : self. ruleset. append ( ( full_path, yara. compile ( full_path, externals = { ""filename"" : self. filename }, ), ) ) elif full_path. endswith ( "".yas"" ) : self. ruleset. append ( ( full_path, yara. load ( full_path ) ) ) except yara. SyntaxError as e : logger. warning ( f""Rule {full_path} "" f""has a syntax error {e}"" ) ",False,os.path.isfile(full_path),os.path.exists(full_path),0.6456151604652405 4635,"def test_expanduser ( self ) : self. assertEqual ( posixpath. expanduser ( ""foo"" ), ""foo"" ) self. assertEqual ( posixpath. expanduser ( b""foo"" ), b""foo"" ) try : import pwd except ImportError : pass else : self. assertIsInstance ( posixpath. expanduser ( ""~/"" ), str ) self. assertIsInstance ( posixpath. expanduser ( b""~/"" ), bytes ) if : self. assertEqual ( posixpath. expanduser ( ""~"" ) + ""/"", posixpath. expanduser ( ""~/"" ) ) self. assertEqual ( posixpath. expanduser ( b""~"" ) + b""/"", posixpath. expanduser ( b""~/"" ) ) self. assertIsInstance ( posixpath. expanduser ( ""~root/"" ), str ) self. assertIsInstance ( posixpath. expanduser ( ""~foo/"" ), str ) self. assertIsInstance ( posixpath. expanduser ( b""~root/"" ), bytes ) self. assertIsInstance ( posixpath. expanduser ( b""~foo/"" ), bytes ) with support. EnvironmentVarGuard ( ) as env : env [ ""HOME"" ] = ""/"" self. assertEqual ( posixpath. expanduser ( ""~"" ), ""/"" ) self. assertEqual ( posixpath. expanduser ( ""~/foo"" ), ""/foo"" ) del env [ ""HOME"" ] home = pwd. getpwuid ( os. getuid ( ) ). pw_dir ",False,posixpath.expanduser('~') != '/',os.getuid() == 0,0.6564404368400574 4636,"def prefetch ( sq, * subqueries ) : if not subqueries : return sq fixed_queries = prefetch_add_subquery ( sq, subqueries ) deps = { } rel_map = { } for prefetch_result in reversed ( fixed_queries ) : query_model = prefetch_result. model if prefetch_result. field : rel_map. setdefault ( prefetch_result. rel_model, [ ] ) rel_map [ prefetch_result. rel_model ]. append ( prefetch_result ) deps [ query_model ] = { } id_map = deps [ query_model ] has_relations = bool ( rel_map. get ( query_model ) ) for instance in prefetch_result. query : if prefetch_result. field : prefetch_result. store_instance ( instance, id_map ) if : for rel in rel_map [ query_model ] : rel. populate_instance ( instance, deps [ rel. model ] ) return prefetch_result. query",True,has_relations,has_relations,0.6722928285598755 4637,"def extract_node_solution ( tree_node ) : solution = { } for variable_id in tree_node. _standard_variable_ids : varname, index = tree_node. _variable_ids [ variable_id ] if varname not in solution : solution [ varname ] = [ ] if : solution [ varname ]. append ( ( index, tree_node. _solution [ variable_id ] ) ) else : name, index = tree_node. _variable_ids [ variable_id ] full_name = name + indexToString ( index ) print ( ""%s: node solution missing for variable with scenario tree "" ""id %s (%s)"" % ( tree_node. name, variable_id, full_name ) ) return None for varname in list ( solution. keys ( ) ) : solution [ varname ] = sorted ( solution [ varname ], key = lambda x : x [ 0 ] ) return solution",False,variable_id in tree_node._solution,variable_node.has_solution[variable_id],0.655394434928894 4638,"def fix_path_string ( all_info, current_path, path_to_value ) : while True : dynamic_path = re_get_value_at. findall ( path_to_value ) if len ( dynamic_path ) == 0 : break for dp in dynamic_path : tmp = dp while True : nested = re_nested_get_value_at. findall ( tmp ) if : break tmp = nested [ 0 ]. replace ( ""_GET_VALUE_AT_("", """", 1 ) dv = get_value_at ( all_info, current_path, tmp ) path_to_value = path_to_value. replace ( ""_GET_VALUE_AT_(%s)"" % tmp, dv ) return path_to_value",False,len(nested) == 0,nested == 0,0.6581918001174927 4639,"def genstats_cov_thresholds ( self, dist_data, threshs, hidden_threshs ) : data = defaultdict ( OrderedDict ) for s_name, d in dist_data. items ( ) : dist_subset = { t : data for t, data in d. items ( ) if t in threshs } for t in threshs : if : data [ s_name ] [ ""{}_x_pc"". format ( t ) ] = dist_subset [ t ] else : data [ s_name ] [ ""{}_x_pc"". format ( t ) ] = 0 headers = OrderedDict ( ) for t in threshs : headers [ ""{}_x_pc"". format ( t ) ] = { ""title"" : ""≥ {}X"". format ( t ), ""description"" : ""Fraction of genome with at least {}X coverage"". format ( t ), ""max"" : 100, ""min"" : 0, ""suffix"" : ""%"", ""scale"" : ""RdYlGn"", ""hidden"" : t in hidden_threshs, } self. general_stats_addcols ( data, headers )",False,int(t) in dist_subset,t in dist_subset,0.6564961075782776 4640,"def _flush_renames ( self, old_hash = None, limit = 0 ) : if limit and len ( self. _pending_renames ) < 2 * limit : return fi_input, fi_output = self. _import_pipes while self. _pending_renames : orig_id, ignore = self. _pending_renames. popitem ( last = False ) new_id = fi_output. readline ( ). rstrip ( ) self. _commit_renames [ orig_id ] = new_id if : return if limit and len ( self. _pending_renames ) < limit : return",False,old_hash == orig_id,not ignore,0.6536868810653687 4641,"def read ( self, iprot ) : if ( iprot. __class__ == TBinaryProtocol. TBinaryProtocolAccelerated and isinstance ( iprot. trans, TTransport. CReadableTransport ) and self. thrift_spec is not None and fastbinary is not None ) : fastbinary. decode_binary ( self, iprot. trans, ( self. __class__, self. thrift_spec ) ) return iprot. readStructBegin ( ) while True : ( fname, ftype, fid ) = iprot. readFieldBegin ( ) if ftype == TType. STOP : break if fid == 1 : if ftype == TType. I32 : self. protocol_version = iprot. readI32 ( ) else : iprot. skip ( ftype ) elif : if ftype == TType. STRING : self. requestorUserName = iprot. readString ( ) else : iprot. skip ( ftype ) elif fid == 3 : if ftype == TType. STRUCT : self. authorizable = TSentryAuthorizable ( ) self. authorizable. read ( iprot ) else : iprot. skip ( ftype ) else : iprot. skip ( ftype ) iprot. readFieldEnd ( ) iprot. readStructEnd (",True,fid == 2,fid == 2,0.677743136882782 4642,"def test_to_instance_dicts ( self, feature_spec, instances, feed_dict, feed_eager_tensors ) : if feed_eager_tensors : test_case. skip_if_not_tf2 ( ""Tensorflow 2.x required"" ) schema = schema_utils. schema_from_feature_spec ( feature_spec ) feed_dict_local = copy. copy ( feed_dict ) if feed_eager_tensors : for key, value in six. iteritems ( feed_dict_local ) : if : feed_dict_local [ key ] = tf. sparse. SparseTensor. from_value ( value ) else : feed_dict_local [ key ] = tf. constant ( value ) result = impl_helper. to_instance_dicts ( schema, feed_dict_local ) np. testing. assert_equal ( instances, result )",False,"isinstance(value, tf.compat.v1.SparseTensorValue)",instances[key],0.6484865546226501 4643,"def add_hook ( self, hook, name, timing ) : """"""Adds a hook function."""""" if not callable ( hook ) : raise TypeError ( ""hook function must be callable"" ) if timing not in ( ""pre"", ""post"", ""auto"" ) : raise ValueError ( ""timing must be one of ('pre', 'post', 'auto')"" ) if timing == ""auto"" : timing = getattr ( hook, ""timing"", ""pre"" ) if timing not in ( ""pre"", ""post"" ) and self. _invalid_timing_fallback : warnings. warn ( ""Hook timing attribute not in ('pre', 'post'), "" ""defaulting timing to 'pre'."" ) timing = ""pre"" if : name = getattr ( hook, ""name"", getattr ( hook, ""__name__"", None ) ) if : raise ValueError ( ""the name of the hook function is not specified"" ) if name in self. _pre_update_hooks or name in self. _post_update_hooks : raise KeyError ( 'hook ""{}"" already exists'. format ( name ) ) if timing == ""pre"" : self. _pre_update_hooks [ name ] = hook else : self. _post_update_hooks [ name ] = hook",True,name is None,name is None,0.6623499393463135 4644,"def update_service_key ( kid, name = None, metadata = None ) : try : with db_transaction ( ) : key = db_for_update ( ServiceKey. select ( ). where ( ServiceKey. kid == kid ) ). get ( ) if name is not None : key. name = name if : key. metadata. update ( metadata ) key. save ( ) except ServiceKey. DoesNotExist : raise ServiceKeyDoesNotExist",True,metadata is not None,metadata is not None,0.6653944849967957 4645,"def set_study_directions ( self, study_id : int, directions : Sequence [ StudyDirection ] ) -> None : with self. _lock : if : current_directions = self. _studies [ study_id ]. directions if directions == current_directions : return elif ( len ( current_directions ) == 1 and current_directions [ 0 ] == StudyDirection. NOT_SET ) : self. _studies [ study_id ]. directions = list ( directions ) self. _backend. set_study_directions ( study_id, directions ) return self. _backend. set_study_directions ( study_id, directions )",True,study_id in self._studies,study_id in self._studies,0.6583038568496704 4646,"def param ( self ) : if not self. _param : multi_text = """" if self. multiple : multi_text ='multiple=""True""' optional_text = """" if : optional_text = 'optional=""True""' options_text = self. options_xml data_ref_text = """" if self. set_data_ref : data_ref_text = 'data_ref=""input_bam""' template_xml = """"""%s"""""" param_str = template_xml % ( self. type, data_ref_text, multi_text, optional_text, options_text, ) self. _param = self. _parameter_for ( xml = param_str ) return self. _param",True,self.optional,self.optional,0.6671499013900757 4647,"def test_multipolygons ( self ) : ""Testing MultiPolygon objects."" prev = fromstr ( ""POINT (0 0)"" ) for mp in self. geometries. multipolygons : mpoly = fromstr ( mp. wkt ) self. assertEqual ( mpoly. geom_type, ""MultiPolygon"" ) self. assertEqual ( mpoly. geom_typeid, 6 ) self. assertEqual ( mp. valid, mpoly. valid ) if : self. assertEqual ( mp. num_geom, mpoly. num_geom ) self. assertEqual ( mp. n_p, mpoly. num_coords ) self. assertEqual ( mp. num_geom, len ( mpoly ) ) self. assertRaises ( GEOSIndexError, mpoly. __getitem__, len ( mpoly ) ) for p in mpoly : self. assertEqual ( p. geom_type, ""Polygon"" ) self. assertEqual ( p. geom_typeid, 3 ) self. assertEqual ( p. valid, True ) self. assertEqual ( mpoly. wkt, MultiPolygon ( * tuple ( poly. clone ( ) for poly in mpoly ) ). wkt )",False,mp.valid,mp.num_coords != mpoly.num_coords,0.6669692993164062 4648,"def update_api_apivs ( instance, versioning_scheme = None, description = None, display_name = None, version_header_name = None, version_query_name = None, ) : """"""Updates the details of the Api VersionSet specified by its identifier."""""" if display_name is not None : instance. display_name = display_name if versioning_scheme is not None : instance. versioning_scheme = versioning_scheme if versioning_scheme == VersioningScheme. header : if : raise CLIError ( ""Please specify version header name while using 'header' as version scheme."" ) instance. version_header_name = version_header_name instance. version_query_name = None if versioning_scheme == VersioningScheme. query : if version_query_name is None : raise CLIError ( ""Please specify version query name while using 'query' as version scheme."" ) instance. version_query_name = version_query_name instance. version_header_name = None if description is None : instance. description = description return instance",True,version_header_name is None,version_header_name is None,0.6575341820716858 4649,"def createDirectories ( self, path, permissions ) : if not self. _lock. acquire ( blocking = False ) : self. _raiseServerException ( ""Could not acquire remote connection lock. Multi-threaded access detected!"" ) try : self. do_verifyConnected ( ) self. log. debug ( ""createDirectories %s, %r"", path, permissions ) orig_path = path make_dir_paths = [ ] last_path = None rfinfo = self. list ( path ) while rfinfo is None : if : self. _raiseServerException ( ""Could not find any parent directory for %r"", orig_path ) make_dir_paths. append ( path ) path = self. do_getParentPath ( path ) rfinfo = self. list ( path ) last_path = path for path in make_dir_paths : self. do_createDirectory ( name, permissions ) finally : self. _lock. release ( )",False,not path or path == last_path,not self.hasParentPath(path),0.6520031690597534 4650,"def expand_extensions ( existing ) : for name in extension_names : ext = ( im ( ""lizard_ext.lizard"" + name. lower ( ) ). LizardExtension ( ) if : else name ) existing. insert ( len ( existing ) if not hasattr ( ext, ""ordering_index"" ) else ext. ordering_index, ext, ) return existing",False,"isinstance(name, str)","not hasattr(existing, name)",0.6540394425392151 4651,"def things ( self, query ) : limit = query. pop ( ""limit"", 100 ) offset = query. pop ( ""offset"", 0 ) keys = set ( self. docs ) for k, v in query. items ( ) : if : flat = common. flatten_dict ( v ) [ 0 ] k += ""."" + web. rstrips ( flat [ 0 ], "".key"" ) v = flat [ 1 ] keys = set ( k for k in self. filter_index ( self. index, k, v ) if k in keys ) keys = sorted ( keys ) return keys [ offset : offset + limit ]",True,"isinstance(v, dict)","isinstance(v, dict)",0.6516493558883667 4652,"def send_animation ( token, chat_id, data, duration = None, caption = None, reply_to_message_id = None, reply_markup = None, parse_mode = None, disable_notification = None, timeout = None, thumb = None, ) : method_url = r""sendAnimation"" payload = { ""chat_id"" : chat_id } files = None if not util. is_string ( data ) : files = { ""animation"" : data } else : payload [ ""animation"" ] = data if duration : payload [ ""duration"" ] = duration if caption : payload [ ""caption"" ] = caption if reply_to_message_id : payload [ ""reply_to_message_id"" ] = reply_to_message_id if reply_markup : payload [ ""reply_markup"" ] = _convert_markup ( reply_markup ) if parse_mode : payload [ ""parse_mode"" ] = parse_mode if disable_notification is not None : payload [ ""disable_notification"" ] = disable_notification if timeout : payload [ ""connect-timeout"" ] = timeout if thumb : if not util. is_string ( thumb ) : if : files [ ""thumb"" ] = thumb else : files = { ""thumb"" : thumb } else : payload [ ""thumb"" ] = thumb return _make_request ( token, method_url, params = payload, files = files, method = ""post"" )",False,files,thumb or _is_valid_thumb(thumb),0.6814186573028564 4653,"def test_slice_variants ( self ) : """"""Simple slices using different start/end values."""""" for start in list ( range ( - 30, 30 ) ) + [ None ] : for end in list ( range ( - 30, 30 ) ) + [ None ] : if : continue rec = self. record [ start : end ] seq = self. record. seq [ start : end ] seq_str = str ( self. record. seq ) [ start : end ] self. assertEqual ( seq_str, str ( seq ) ) self. assertEqual ( seq_str, str ( rec. seq ) ) self. assertEqual ( ""X"" * len ( seq_str ), rec. letter_annotations [ ""fake"" ] )",False,start is None and end is None,start == end,0.657306432723999 4654,"def _route_db ( self, model, ** hints ) : chosen_db = None for router in self. routers : try : method = getattr ( router, action ) except AttributeError : pass else : chosen_db = method ( model, ** hints ) if : return chosen_db try : return hints [ ""instance"" ]. _state. db or DEFAULT_DB_ALIAS except KeyError : return DEFAULT_DB_ALIAS",True,chosen_db,chosen_db,0.6685689687728882 4655,"def _resolve_relative_config ( dir, config ) : icon = config. get ( ""icon"" ) if icon : if : icon = File ( icon ) else : icon = dir. resolve_file ( icon ) document_root = config. get ( ""document_root"" ) if document_root : if zim. fs. isabs ( document_root ) or not dir : document_root = Dir ( document_root ) else : document_root = dir. resolve_dir ( document_root ) return icon, document_root",False,zim.fs.isabs(icon) or not dir,zim.fs.isfile(icon),0.6516158580780029 4656,"def process_tag ( self ) : tag = self. event. tag if isinstance ( self. event, ScalarEvent ) : if : self. style = self. choose_scalar_style ( ) if ( not self. canonical or tag is None ) and ( ( self. style == """" and self. event. implicit [ 0 ] ) or ( self. style!= """" and self. event. implicit [ 1 ] ) ) : self. prepared_tag = None return if self. event. implicit [ 0 ] and tag is None : tag = u""!"" self. prepared_tag = None else : if ( not self. canonical or tag is None ) and self. event. implicit : self. prepared_tag = None return if tag is None : raise EmitterError ( ""tag is not specified"" ) if self. prepared_tag is None : self. prepared_tag = self. prepare_tag ( tag ) if self. prepared_tag : self. write_indicator ( self. prepared_tag, True ) self. prepared_tag = None",False,self.style is None,tag is None,0.6569838523864746 4657,"def updateGroupStats ( light ) : for group in bridge_config [ ""groups"" ] : if light in bridge_config [ ""groups"" ] [ group ] [ ""lights"" ] : for key, value in bridge_config [ ""lights"" ] [ light ] [ ""state"" ]. iteritems ( ) : if : bridge_config [ ""groups"" ] [ group ] [ ""action"" ] [ key ] = value any_on = False all_on = True bri = 0 for group_light in bridge_config [ ""groups"" ] [ group ] [ ""lights"" ] : if bridge_config [ ""lights"" ] [ light ] [ ""state"" ] [ ""on"" ] == True : any_on = True else : all_on = False bri += bridge_config [ ""lights"" ] [ light ] [ ""state"" ] [ ""bri"" ] avg_bri = bri / len ( bridge_config [ ""groups"" ] [ group ] [ ""lights"" ] ) bridge_config [ ""groups"" ] [ group ] [ ""state"" ] = { ""any_on"" : any_on, ""all_on"" : all_on, ""bri"" : avg_bri, ""lastupdated"" : datetime. utcnow ( ). strftime ( ""%Y-%m-%dT%H:%M:%S"" ),",False,"key not in ['on', 'reachable']",key in bridge_config[group],0.6551306247711182 4658,"def get_read_write_funcs ( parsed_code ) : allids = set ( [ ] ) read = set ( [ ] ) write = set ( [ ] ) funcs = set ( [ ] ) for node in ast. walk ( parsed_code ) : if node. __class__ is ast. Name : allids. add ( node. id ) if : write. add ( node. id ) elif node. ctx. __class__ is ast. Load : read. add ( node. id ) else : raise SyntaxError elif node. __class__ is ast. Call : funcs. add ( node. func. id ) read = read - funcs if funcs. intersection ( write ) : raise SyntaxError ( ""Cannot assign to functions in abstract code"" ) return allids, read, write, funcs",False,node.ctx.__class__ is ast.Store,node.ctx.__class__ is ast.Write,0.6554673910140991 4659,"def setup ( self, ctxt ) : LOG. info ( ""Initiating connection to IBM DS8K storage system."" ) connection_type = self. configuration. safe_get ( ""connection_type"" ) replication_devices = self. configuration. safe_get ( ""replication_device"" ) if connection_type == storage. XIV_CONNECTION_TYPE_FC : if : self. _helper = helper. DS8KCommonHelper ( self. configuration, self. _connector_obj ) else : self. _helper = helper. DS8KReplicationSourceHelper ( self. configuration, self. _connector_obj ) elif connection_type == storage. XIV_CONNECTION_TYPE_FC_ECKD : self. _helper = helper. DS8KECKDHelper ( self. configuration, self. _connector_obj ) else : raise exception. InvalidParameterValue ( err = ( _ ( ""Param [connection_type] %s is invalid."" ) % connection_type ) ) if replication_devices : self. _do_replication_setup ( replication_devices, self. _helper ) self. _check_async_cloned_volumes ( )",False,not replication_devices,replication_devices,0.6599441766738892 4660,"def assert_traceback ( self, tb, files ) : deduped_files = [ ] while tb : code = tb. tb_frame. f_code fn = code. co_filename if : deduped_files. append ( fn ) tb = tb. tb_next self. assertEqual ( len ( deduped_files ), len ( files ), deduped_files ) for fn, pat in zip ( deduped_files, files ) : self. assertIn ( pat, fn )",False,not deduped_files or fn != deduped_files[-1],fn not in deduped_files,0.6565097570419312 4661,"def url_rewrite ( self, task, entry ) : try : txheaders = { ""User-agent"" : ""Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)"" } page = task. requests. get ( entry [ ""url"" ], headers = txheaders ) soup = get_soup ( page. text ) results = soup. find_all ( ""a"", attrs = { ""class"" : ""l"" } ) if not results : raise UrlRewritingError ( ""No results"" ) for res in results : url = res. get ( ""href"" ) url = url. replace ( ""/interstitial?url="", """" ) regexp = "".*"". join ( [ x. contents [ 0 ] for x in res. find_all ( ""em"" ) ] ) if : log. debug ( ""resolved, found with %s"" % regexp ) entry [ ""url"" ] = url return raise UrlRewritingError ( ""Unable to resolve"" ) except Exception as e : raise UrlRewritingError ( e )",False,"re.match(regexp, entry['title'])",self.has_regex(regexp),0.6453467607498169 4662,"def trakt_episode_data_generate ( self, data ) : uniqueSeasons = [ ] for season, episode in data : if : uniqueSeasons. append ( season ) seasonsList = [ ] for searchedSeason in uniqueSeasons : episodesList = [ ] for season, episode in data : if season == searchedSeason : episodesList. append ( { ""number"" : episode } ) seasonsList. append ( { ""number"" : searchedSeason, ""episodes"" : episodesList } ) post_data = { ""seasons"" : seasonsList } return post_data",False,season not in uniqueSeasons,season == season,0.6704022884368896 4663,"def process_response ( self, request, response ) : language = get_language ( ) if hasattr ( request, ""session"" ) : session_language = request. session. get ( LANGUAGE_SESSION_KEY, None ) if : request. session [ LANGUAGE_SESSION_KEY ] = language request. session. save ( ) if ( settings. LANGUAGE_COOKIE_NAME in request. COOKIES and request. COOKIES [ settings. LANGUAGE_COOKIE_NAME ] == language ) : return response max_age = 365 * 24 * 60 * 60 expires = datetime. datetime. utcnow ( ) + datetime. timedelta ( seconds = max_age ) response. set_cookie ( settings. LANGUAGE_COOKIE_NAME, language, expires = expires ) return response",False,session_language and (not session_language == language),session_language is not None,0.6548347473144531 4664,"def rename_references ( app, what, name, obj, options, lines, reference_offset = [ 0 ] ) : references = [ ] for line in lines : line = line. strip ( ) m = re. match ( sixu ( ""^.. \\[(%s)\\]"" ) % app. config. numpydoc_citation_re, line, re. I ) if m : references. append ( m. group ( 1 ) ) if references : for i, line in enumerate ( lines ) : for r in references : if : new_r = sixu ( ""R%d"" ) % ( reference_offset [ 0 ] + int ( r ) ) else : new_r = sixu ( ""%s%d"" ) % ( r, reference_offset [ 0 ] ) lines [ i ] = lines [ i ]. replace ( sixu ( ""[%s]_"" ) % r, sixu ( ""[%s]_"" ) % new_r ) lines [ i ] = lines [ i ]. replace ( sixu ( "".. [%s]"" ) % r, sixu ( "".. [%s]"" ) % new_r ) reference_offset [ 0 ] += len ( references )",False,"re.match(sixu('^\\d+$'), r)",r in line,0.6543307304382324 4665,"def getsource ( obj ) : """"""Wrapper around inspect.getsource"""""" try : try : src = encoding. to_unicode ( inspect. getsource ( obj ) ) except TypeError : if : src = encoding. to_unicode ( inspect. getsource ( obj. __class__ ) ) else : src = getdoc ( obj ) return src except ( TypeError, IOError ) : return",True,"hasattr(obj, '__class__')","hasattr(obj, '__class__')",0.6569093465805054 4666,"def onmove ( self, event ) : ""on motion notify event if box/line is wanted"" if self. eventpress is None or self. ignore ( event ) : return x, y = event. xdata, event. ydata if self. drawtype == ""box"" : minx, maxx = self. eventpress. xdata, x miny, maxy = self. eventpress. ydata, y if : minx, maxx = maxx, minx if miny > maxy : miny, maxy = maxy, miny self. to_draw. set_x ( minx ) self. to_draw. set_y ( miny ) self. to_draw. set_width ( maxx - minx ) self. to_draw. set_height ( maxy - miny ) self. update ( ) return False if self. drawtype == ""line"" : self. to_draw. set_data ( [ self. eventpress. xdata, x ], [ self. eventpress. ydata, y ] ) self. update ( ) return False",True,minx > maxx,minx > maxx,0.6709022521972656 4667,"def POP ( cpu, * regs ) : for reg in regs : val = cpu. stack_pop ( cpu. address_bit_size // 8 ) if : cpu. _set_mode_by_val ( val ) val = val & ~ 0x1 reg. write ( val )",False,"reg.reg in ('PC', 'R15')",val & 1 < TAB > 1,0.6497471332550049 4668,"def ad_readout ( self, data ) : cnt_idx = self. cnt - len ( data ) + 1 idx = 0 while idx < 14 : if idx == 2 : idx += 2 value = ( data [ idx ] << 8 ) | data [ idx + 1 ] name = AD_READOUTS. get ( idx, ""..."" ) if value!= 0 : if idx == 0 : self. annotate ( name, self. to_temp ( value ), cnt_idx + idx, cnt_idx + idx + 1 ) elif idx == 4 : self. annotate ( name, self. to_current ( value ), cnt_idx + idx, cnt_idx + idx + 1 ) elif : self. annotate ( name, self. to_power ( value ), cnt_idx + idx, cnt_idx + idx + 1 ) else : self. annotate ( name, str ( value ), cnt_idx + idx, cnt_idx + idx + 1 ) idx += 2",False,"idx in (6, 8)",idx == 8,0.6576352119445801 4669,"def get_bb_vars ( variables = None, target = None, postconfig = None ) : """"""Get values of multiple bitbake variables"""""" bbenv = get_bb_env ( target, postconfig = postconfig ) if variables is not None : variables = list ( variables ) var_re = re. compile ( r'^(export )?(?P\w+(_.*)?)=""(?P.*)""$' ) unset_re = re. compile ( r""^unset (?P\w+)$"" ) lastline = None values = { } for line in bbenv. splitlines ( ) : match = var_re. match ( line ) val = None if match : val = match. group ( ""value"" ) else : match = unset_re. match ( line ) if match : if : val = lastline. split ( '""' ) [ 1 ] if val : var = match. group ( ""var"" ) if variables is None : values [ var ] = val else : if var in variables : values [ var ] = val variables. remove ( var ) if not variables : break lastline = line if variables : if ""column_definition"" in field : column_def = self. get_custom_type_declaration_sql ( field ) else : default = self. get_default_value_declaration_sql ( field ) charset = field. get ( ""charset"", """" ) if charset : charset = "" "" + self. get_column_charset_declaration_sql ( charset ) collation = field. get ( ""collation"", """" ) if charset : charset = "" "" + self. get_column_collation_declaration_sql ( charset ) notnull = field. get ( ""notnull"", """" ) if : notnull = "" NOT NULL"" else : notnull = """" unique = field. get ( ""unique"", """" ) if unique : unique = "" "" + self. get_unique_field_declaration_sql ( ) else : unique = """" check = field. get ( ""check"", """" ) type_decl = self. get_sql_type_declaration ( field ) column_def = ( type_decl + charset + default + notnull + unique + check + collation ) return name + "" "" + column_def",True,notnull,notnull,0.6692383885383606 4671,"def test_basic ( self ) : for a in range ( 3 ) : for b in range ( 3 ) : for typea in ( int, Number ) : for typeb in ( int, Number ) : if : continue ta = typea ( a ) tb = typeb ( b ) for ops in opmap. values ( ) : for op in ops : realoutcome = op ( a, b ) testoutcome = op ( ta, tb ) self. assertEqual ( realoutcome, testoutcome )",False,typea == typeb == int,typea is None and b is None,0.6660654544830322 4672,"def iterate_list ( self, form_data ) : provider_names = dict ( get_all_payment_providers ( ) ) payments = OrderPayment. objects. filter ( order__event__in = self. events, state__in = form_data. get ( ""payment_states"", [ ] ) ). order_by ( ""created"" ) refunds = OrderRefund. objects. filter ( order__event__in = self. events, state__in = form_data. get ( ""refund_states"", [ ] ) ). order_by ( ""created"" ) objs = sorted ( list ( payments ) + list ( refunds ), key = lambda o : o. created ) headers = [ _ ( ""Event slug"" ), _ ( ""Order"" ), _ ( ""Payment ID"" ), _ ( ""Creation date"" ), _ ( ""Completion date"" ), _ ( ""Status"" ), _ ( ""Status code"" ), _ ( ""Amount"" ), _ ( ""Payment method"" ), ] yield headers yield self. ProgressSetTotal ( total = len ( objs ) ) for obj in objs : tz = pytz. timezone ( obj. order. event. settings. timezone ) if : d2 = obj. payment_date. astimezone ( tz ). date ( ). strftime ( ""%Y-%m-%d"" ) elif isinstance ( obj, OrderRefund ) and obj. execution_date : d2 = obj. execution_date. astimezone ( tz ). date ( ). strftime ( ""%Y-%m-%d"" ) else : d2 = """" row = [ <",False,"isinstance(obj, OrderPayment) and obj.payment_date","isinstance(obj, OrderPayment)",0.6484655141830444 4673,"def process ( self, resources ) : wafs = self. manager. get_resource_manager ( ""waf"" ). resources ( ) waf_name_id_map = { w [ ""Name"" ] : w [ ""WebACLId"" ] for w in wafs } target_acl = self. data. get ( ""web-acl"" ) target_acl_id = waf_name_id_map. get ( target_acl, target_acl ) if target_acl_id not in waf_name_id_map. values ( ) : raise ValueError ( ""invalid web acl: %s"" % ( target_acl_id ) ) client = local_session ( self. manager. session_factory ). client ( ""cloudfront"" ) force = self. data. get ( ""force"", False ) for r in resources : if r. get ( ""WebACLId"" ) and not force : continue if : continue result = client. get_distribution_config ( Id = r [ ""Id"" ] ) config = result [ ""DistributionConfig"" ] config [ ""WebACLId"" ] = target_acl_id self. retry ( client. update_distribution, Id = r [ ""Id"" ], DistributionConfig = config, IfMatch = result [ ""ETag"" ], )",False,r.get('WebACLId') == target_acl_id,r.get('NoSuchDistributionId') and force,0.6697400808334351 4674,"def test_launches_cluster_with_telemetry_client_timeout_enabled ( self ) : cfg = config. Config ( ) cfg. add ( config. Scope. application, ""client"", ""hosts"", self. test_host ) cfg. add ( config. Scope. application, ""client"", ""options"", self. client_options ) cfg. add ( config. Scope. application, ""mechanic"", ""telemetry.devices"", [ ] ) cfg. add ( config. Scope. application, ""mechanic"", ""telemetry.params"", { } ) cfg. add ( config. Scope. application, ""mechanic"", ""preserve.install"", False ) cluster_launcher = launcher. ClusterLauncher ( cfg, MockMetricsStore ( ), client_factory_class = MockClientFactory ) cluster = cluster_launcher. start ( ) for telemetry_device in cluster. telemetry. devices : if : for _, client in telemetry_device. clients. items ( ) : self. assertDictEqual ( { ""retry-on-timeout"" : True, ""timeout"" : 60 }, client. client_options ) else : self. assertDictEqual ( { ""retry-on-timeout"" : True, ""timeout"" : 60 }, telemetry_device. client. client_options, )",False,"hasattr(telemetry_device, 'clients')",telemetry_device.clients,0.6493241190910339 4675,"def setup_key_pair ( self, context ) : key_name = ""%s%s"" % ( context. project_id, FLAGS. vpn_key_suffix ) try : result = cloud. _gen_key ( context, context. user_id, key_name ) private_key = result [ ""private_key"" ] key_dir = os. path. join ( FLAGS. keys_path, context. user_id ) if : os. makedirs ( key_dir ) key_path = os. path. join ( key_dir, ""%s.pem"" % key_name ) with open ( key_path, ""w"" ) as f : f. write ( private_key ) except ( exception. Duplicate, os. error, IOError ) : pass return key_name",True,not os.path.exists(key_dir),not os.path.exists(key_dir),0.6468414068222046 4676,"def mouse_move ( self, ips, x, y, btn, ** key ) : if ips. roi == None : return lim = 5.0 / key [ ""canvas"" ]. get_scale ( ) if btn == None : self. cursor = wx. CURSOR_CROSS if : self. cursor = wx. CURSOR_HAND elif btn == 1 : if self. doing : self. helper. addpoint ( ( x, y ) ) elif self. curobj : ips. roi. draged ( self. odx, self. ody, x, y, ips. cur, self. curobj ) ips. update ( ) self. odx, self. ody = x, y",False,"ips.roi.snap(x, y, ips.cur, lim) != None",self.handshaker,0.660293698310852 4677,"def get_files ( self, dirname ) : if not self. _data. has_key ( dirname ) : self. _create ( dirname ) else : new_time = self. _changed ( dirname ) if : self. _update ( dirname, new_time ) dcLog. debug ( ""==> "" + ""\t\n"". join ( self. _data [ dirname ] [ ""flist"" ] ) ) return self. _data [ dirname ] [ ""flist"" ]",True,new_time,new_time,0.6736978888511658 4678,"def findControlPointsInMesh ( glyph, va, subsegments ) : controlPointIndices = np. zeros ( ( len ( va ), 1 ) ) index = 0 for i, c in enumerate ( subsegments ) : segmentCount = len ( glyph. contours [ i ]. segments ) - 1 for j, s in enumerate ( c ) : if j < segmentCount : if : controlPointIndices [ index ] = 1 index += s [ 1 ] return controlPointIndices",False,glyph.contours[i].segments[j].type == 'line',index >= len(s),0.6512120962142944 4679,"def deleteItem ( self, path_id, cu = None ) : with self. connect ( commit = True, cu = cu ) as cu : stmt = ""select path_id from hierarchy where parent_path_id =?"" cu. execute ( stmt, ( path_id, ) ) children = cu. fetchall ( ) for child_id in children : self. deleteItem ( child_id [ 0 ], cu ) path = self. getPath ( path_id, cu ) tableNames = [ ""common_details"", ""common_tool_details"", ""misc_properties"", ""hierarchy"", ""favorites"", ] res = self. getValuesFromTableByKey ( ""common_details"", [ ""type"" ], ""path_id"", path_id, cu ) if : tool_type = res [ 0 ] if tool_type in [ ""snippet"", ""macro"", ""command"", ""menu"", ""toolbar"", ""tutorial"", ] : tableNames. append ( tool_type ) for t in tableNames : try : cu. execute (",True,res,res,0.6974622011184692 4680,"def apply_upstream_proxy_settings ( ) : if config. SOCKS5_HOST and config. SOCKS5_PORT : import socks print_err ( ""Socket-proxy mode activated, it is incompatible with advertising and uvloop"" ) socks. set_default_proxy ( socks. PROXY_TYPE_SOCKS5, config. SOCKS5_HOST, config. SOCKS5_PORT, username = config. SOCKS5_USER, password = config. SOCKS5_PASS, ) if : socket. origsocket = socket. socket socket. socket = socks. socksocket elif hasattr ( socket, ""origsocket"" ) : socket. socket = socket. origsocket del socket. origsocket",False,"not hasattr(socket, 'origsocket')","hasattr(socket, 'sock')",0.6515005826950073 4681,"def populate_pillars_to_tests ( ) : for pillar in PILLARS : for test, test_info in list ( TESTS_MAP. items ( ) ) : if : PILLARS_TO_TESTS [ pillar ]. append ( test )",False,pillar in test_info[PILLARS_KEY],test_info and test_info['test'],0.656973659992218 4682,"def _SkipGroup ( buffer, pos, end ) : """"""Skip sub-group. Returns the new position."""""" while 1 : ( tag_bytes, pos ) = ReadTag ( buffer, pos ) new_pos = SkipField ( buffer, pos, end, tag_bytes ) if : return pos pos = new_pos",False,new_pos == -1,new_pos > pos,0.6604567170143127 4683,"def filter_tasks ( self, task_types = None, task_states = None, task_text = None ) : tasks = self. api. tasks ( self. id ). get ( ""tasks"", { } ) if tasks and tasks. get ( ""task"" ) : return [ Task ( self, task ) for task in tasks. get ( ""task"", [ ] ) if : and ( not task_states or task [ ""state"" ]. lower ( ) in task_states ) and ( not task_text or task_text. lower ( ) in str ( task ). lower ( ) ) ] else : return [ ]",False,not task_types or task['type'].lower() in task_types,task_types and task_states,0.6549599170684814 4684,"def better_default_encoder ( o ) : if isinstance ( o, uuid. UUID ) : return o. hex if isinstance ( o, datetime. datetime ) : return o. strftime ( ""%Y-%m-%dT%H:%M:%S.%fZ"" ) if isinstance ( o, datetime. date ) : return o. isoformat ( ) if isinstance ( o, datetime. time ) : if : raise ValueError ( ""JSON can't represent timezone-aware times."" ) r = o. isoformat ( ) if o. microsecond : r = r [ : 12 ] return r if isinstance ( o, ( set, frozenset ) ) : return list ( o ) if isinstance ( o, decimal. Decimal ) : return str ( o ) if isinstance ( o, Enum ) : return o. value if callable ( o ) : return """" raise TypeError ( repr ( o ) + "" is not JSON serializable"" )",False,is_aware(o),o.tzinfo,0.654313325881958 4685,"def _api_change_cat ( name, output, kwargs ) : """"""API: accepts output, value(=nzo_id), value2(=category)"""""" value = kwargs. get ( ""value"" ) value2 = kwargs. get ( ""value2"" ) if value and value2 : nzo_id = value cat = value2 if : cat = None result = sabnzbd. NzbQueue. change_cat ( nzo_id, cat ) return report ( output, keyword = ""status"", data = bool ( result > 0 ) ) else : return report ( output, _MSG_NO_VALUE )",False,cat == 'None',cat,0.6571664810180664 4686,"def build ( cls, path = None ) : """"""Build config instance."""""" loader = get_yaml_loader ( ) with resource_stream ( ""knowit"", ""defaults.yml"" ) as stream : cfgs = [ yaml. load ( stream, Loader = loader ) ] if path : with open ( path, ""r"" ) as stream : cfgs. append ( yaml. load ( stream, Loader = loader ) ) profiles_data = { } for cfg in cfgs : if ""profiles"" in cfg : profiles_data. update ( cfg [ ""profiles"" ] ) knowledge_data = { } for cfg in cfgs : if : knowledge_data. update ( cfg [ ""knowledge"" ] ) data = { ""general"" : { } } for class_name, data_map in knowledge_data. items ( ) : data. setdefault ( class_name, { } ) for code, detection_values in data_map. items ( ) : alias_map = ( profiles_data. get ( class_name ) or { } ). get ( code ) or { } alias_map. setdefault ( ""code"", code ) alias_map. setdefault ( ""default"", alias_map [ ""code"" ] ) alias_map. setdefault ( ""human"", alias_map [ ""default"" ] ) alias_map. setdefault ( ""technical"", alias_map [ ""human"" ] ) value = _Value ( ** { k : v for k, v in alias_map. items ( ) if k in _valid_aliases } ) ",True,'knowledge' in cfg,'knowledge' in cfg,0.66872638463974 4687,"def extend_nodelist ( self, nodelist, node, token ) : if node. must_be_first and nodelist : try : if : raise AttributeError except AttributeError : raise TemplateSyntaxError ( ""%r must be the first tag in the template."" % node ) if isinstance ( nodelist, NodeList ) and not isinstance ( node, TextNode ) : nodelist. contains_nontext = True nodelist. append ( node )",False,nodelist.contains_nontext,token.tag != token.tag,0.6596221923828125 4688,"def make_append ( bases, cols, calls, glyph ) : names = ( ""_{0}"". format ( i ) for i in count ( ) ) inputs = list ( bases ) + list ( cols ) signature = [ next ( names ) for i in inputs ] arg_lk = dict ( zip ( inputs, signature ) ) local_lk = { } namespace = { } body = [ ] ndims = glyph. ndims if ndims is not None : subscript = "", "". join ( [ ""i"" + str ( n ) for n in range ( ndims ) ] ) else : subscript = None for func, bases, cols, temps in calls : local_lk. update ( zip ( temps, ( next ( names ) for i in temps ) ) ) func_name = next ( names ) namespace [ func_name ] = func args = [ arg_lk [ i ] for i in bases ] if : args. extend ( ""{0}"". format ( arg_lk [ i ] ) for i in cols ) else : args. extend ( ""{0}[{1}]"". format ( arg_lk [ i ], subscript ) for i in cols ) args. extend ( [ local_lk [ i ] for i in temps ] ) body. append ( ""{0}(x, y, {1})"". format ( func_name, "", "". join ( args ) ) ) body = [ ""{0} = {1}[y, x]"". format ( name, arg_lk [ agg ] ) for agg, name in local_lk. items ( ) ] + body if : code = ( ""def append(x, y, {0}):\n"" "" {1}"" ). format ( 0,0.6740870475769043 4689,"def translate ( ) : assert Lex. next ( ) is AttributeList reader. read ( ) attrs = { } d = AttributeList. match. groupdict ( ) for k, v in d. items ( ) : if : if k == ""attrlist"" : v = subs_attrs ( v ) if v : parse_attributes ( v, attrs ) else : AttributeList. attrs [ k ] = v AttributeList. subs ( attrs ) AttributeList. attrs. update ( attrs )",False,v is not None,k in [TAB>,0.6594157218933105 4690,"def parse_changelog ( ) : with open ( ""CHANGES"" ) as f : lineiter = iter ( f ) for line in lineiter : match = re. search ( ""^Version\s+(.*)"", line. strip ( ) ) if : continue version = match. group ( 1 ). strip ( ) if lineiter. next ( ). count ( ""-"" )!= len ( match. group ( 0 ) ) : continue while 1 : change_info = lineiter. next ( ). strip ( ) if change_info : break match = re. search ( r""Released on (\w+\s+\d+,\s+\d+)"", change_info ) if : continue datestr = match. groups ( ) [ 0 ] return version, parse_date ( datestr )",False,match is None,not match,0.6621959209442139 4691,"def setinfo ( self, path, info ) : self. check ( ) _path = self. validatepath ( path ) sys_path = self. _to_sys_path ( _path ) if not os. path. exists ( sys_path ) : raise errors. ResourceNotFound ( path ) if ""details"" in info : details = info [ ""details"" ] if : _accessed = typing. cast ( int, details. get ( ""accessed"" ) ) _modified = typing. cast ( int, details. get ( ""modified"", _accessed ) ) accessed = int ( _modified if _accessed is None else _accessed ) modified = int ( _modified ) if accessed is not None or modified is not None : with convert_os_errors ( ""setinfo"", path ) : os. utime ( sys_path, ( accessed, modified ) )",False,'accessed' in details or 'modified' in details,details,0.6547573804855347 4692,"def main ( ) : filenames = ParseArguments ( sys. argv [ 1 : ] ) backup_err = sys. stderr try : sys. stderr = codecs. StreamReader ( sys. stderr, ""replace"" ) _cpplint_state. ResetErrorCounts ( ) for filename in filenames : ProcessFile ( filename, _cpplint_state. verbose_level ) _cpplint_state. PrintErrorCounts ( ) if : sys. stderr. write ( _cpplint_state. FormatJUnitXML ( ) ) finally : sys. stderr = backup_err sys. exit ( _cpplint_state. error_count > 0 )",False,_cpplint_state.output_format == 'junit',backup_err,0.6491585969924927 4693,"def write ( self, text ) : if not isinstance ( text, bytes ) : if not isinstance ( text, text_type ) : msg = ""You can only write str to a Response.body_file, not %s"" raise TypeError ( msg % type ( text ) ) if : msg = ""You can only write text to Response if charset has "" ""been set"" raise TypeError ( msg ) text = text. encode ( self. charset ) app_iter = self. _app_iter if not isinstance ( app_iter, list ) : try : new_app_iter = self. _app_iter = list ( app_iter ) finally : iter_close ( app_iter ) app_iter = new_app_iter self. content_length = sum ( len ( chunk ) for chunk in app_iter ) app_iter. append ( text ) if self. content_length is not None : self. content_length += len ( text )",False,not self.charset,self.charset is None,0.6643362045288086 4694,"def leaveISP ( self ) : if self. serial is not None : if : raise ispBase. IspError ( ""Failed to leave programming mode"" ) ret = self. serial self. serial = None return ret return None",False,"self.sendMessage([17]) != [17, 0]",self.serial != None and self.serial != None,0.6541696786880493 4695,"def clean_items ( event, items, variations ) : for item in items : if : raise ValidationError ( _ ( ""One or more items do not belong to this event."" ) ) if item. has_variations : if not any ( var. item == item for var in variations ) : raise ValidationError ( _ ( ""One or more items has variations but none of these are in the variations list."" ) )",False,event != item.event,event.type != item.type,0.6605589389801025 4696,"def handle ( self, ** options ) : from django. conf import settings, Settings, global_settings settings. _setup ( ) user_settings = module_to_dict ( settings. _wrapped ) default = options [ ""default"" ] default_settings = module_to_dict ( Settings ( default ) if default else global_settings ) output = [ ] for key in sorted ( user_settings ) : if key not in default_settings : output. append ( ""%s = %s ###"" % ( key, user_settings [ key ] ) ) elif user_settings [ key ]!= default_settings [ key ] : output. append ( ""%s = %s"" % ( key, user_settings [ key ] ) ) elif : output. append ( ""### %s = %s"" % ( key, user_settings [ key ] ) ) return ""\n"". join ( output )",False,options['all'],user_settings[key] != default_settings[key],0.6573597192764282 4697,"def bprop_slice ( self, x, S, Q, padding, stride, dilation ) : qs = x - ( dilation * ( S - 1 ) - padding ) f1 = None for s in range ( S ) : q = qs + s * dilation if q % stride == 0 : q //= stride if : if f1 is None : f1 = s x1 = q f2 = s x2 = q if f1 is None : return ( slice ( 0, 0, 1 ), slice ( 0, 0, 1 ), 0 ) f_step = 1 while ( ( f_step * dilation ) % stride )!= 0 : f_step += 1 x_step = ( f_step * dilation ) // stride return ( slice ( f1, f2 + 1, f_step ), slice ( x1, x2 + 1, x_step ), 0 )",False,q >= 0 and q < Q,q % stride == 0,0.6630949378013611 4698,"def find_symbol ( self, r, globally = False ) : query = self. view. substr ( self. view. word ( r ) ) fname = self. view. file_name ( ). replace ( ""\\"", ""/"" ) locations = self. view. window ( ). lookup_symbol_in_index ( query ) if not locations : return try : if : location = [ hit [ 2 ] for hit in locations if fname. endswith ( hit [ 1 ] ) ] [ 0 ] return location [ 0 ] - 1, location [ 1 ] - 1 else : return locations [ 0 ] except IndexError : return",False,not globally,globally,0.6732712984085083 4699,"def put ( self, session ) : with sess_lock : self. parent. put ( session ) for sp in self. skip_paths : if : return if session. sid in self. _cache : try : del self. _cache [ session. sid ] except Exception : pass self. _cache [ session. sid ] = session self. _normalize ( )",False,request.path.startswith(sp),"sp in [sp, sp.TAB]",0.647872805595398 4700,"def prepare_wrap ( self, msg ) : obscured = self. obscured_headers wrapped = self. wrapped_headers obscured_set = set ( [ ] ) to_delete = { } for ( h, header_value ) in msg. items ( ) : if not header_value : continue hl = h. lower ( ) if hl == ""mime-version"" : to_delete [ h ] = True elif not hl. startswith ( ""content-"" ) : if hl in obscured : obscured_set. add ( h ) oh = obscured [ hl ] ( header_value ) if : self. container. add_header ( h, oh ) else : self. container. add_header ( h, header_value ) if hl not in wrapped and hl not in obscured : to_delete [ h ] = True for h in to_delete : while h in msg : del msg [ h ] if hasattr ( msg, ""signature_info"" ) : self. container. signature_info = msg. signature_info self. container. encryption_info = msg. encryption_info return self. force_display_headers ( msg, obscured_set )",False,oh,h in wrapped,0.7205325365066528 4701,"def _resolve_register_argument ( self, call_stmt, arg_loc ) : size = arg_loc. size offset = arg_loc. _fix_offset ( None, size, arch = self. project. arch ) if self. _reaching_definitions is not None : ins_addr = call_stmt. tags [ ""ins_addr"" ] try : rd = self. _reaching_definitions. get_reaching_definitions_by_insn ( ins_addr, OP_BEFORE ) except KeyError : rd = None if rd is not None : defs = rd. register_definitions. get_variables_by_offset ( offset ) if not defs : l. warning ( ""Did not find any reaching definition for register %s at instruction %x."", arg_loc, ins_addr, ) elif len ( defs ) > 1 : l. warning ( ""TODO: More than one reaching definition are found at instruction %x."", ins_addr, ) else : def_ = next ( iter ( defs ) ) var_or_value = self. _find_variable_from_definition (",False,var_or_value is not None,len(defs) > 0,0.6555719971656799 4702,"def test_async_iterator ( app ) : async with new_stream ( app ) as stream : for i in range ( 100 ) : await stream. channel. deliver ( message ( key = i, value = i ) ) received = 0 async for value in stream : assert value == received received += 1 if : break assert await channel_empty ( stream. channel )",False,received >= 100,received >= 6,0.6845225095748901 4703,"def emit_default ( self ) : """"""emit the action taken when we did not hit a valid hash table entry"""""" actions = [ ] for action in self. default_actions : if : s = ""return %s"" % action. get_str_value ( ) actions. append ( s ) if action. is_field_binding ( ) : val = action. get_str_value ( ) fb = action. field_name. lower ( ) s = ""%s_set_%s(%s,%s)"" % ( self. strings_dict [ ""op_accessor"" ], fb, self. strings_dict [ ""obj_str"" ], val, ) actions. append ( s ) return actions",False,action.is_return(),action.is_hash_table(),0.6543012261390686 4704,"def check_messages ( messages ) : for message_type in messages : if : error_list. append ( _ ( ""Message type '{}' invalid, must be either'message' or 'body'"" ). format ( message_type ) ) continue message = messages [ message_type ] if message is None : continue if not isinstance ( message, str ) : error_list. append ( _ ( ""Expected string for '{}', found {}, "" ). format ( message_type, type ( message ) ) ) continue if message_type == ""message"" : if ""\n"" in message : error_list. append ( _ ( ""Messages cannot contain newlines (found newline in {} event)"". format ( event ) ) ) continue collected_messages. append ( message )",False,"message_type not in ('message', 'body')",message_type == 'body',0.6541242599487305 4705,"def get_project_dir ( env ) : project_file = workon_home / env / "".project"" if project_file. exists ( ) : with project_file. open ( ) as f : project_dir = f. readline ( ). strip ( ) if : return project_dir else : err ( ""Corrupted or outdated:"", project_file, ""\nDirectory"", project_dir, ""doesn't exist."", )",False,os.path.exists(project_dir),project_dir.exists(),0.6498926877975464 4706,"def build_packages ( targeted_packages, distribution_directory, is_dev_build = False ) : for package_root in targeted_packages : service_hierarchy = os. path. join ( os. path. basename ( package_root ) ) if : verify_update_package_requirement ( package_root ) print ( ""Generating Package Using Python {}"". format ( sys. version ) ) run_check_call ( [ sys. executable, build_packing_script_location, ""--dest"", os. path. join ( distribution_directory, service_hierarchy ), package_root, ], root_dir, )",True,is_dev_build,is_dev_build,0.6523603796958923 4707,"def _setResultsName ( self, name, listAllMatches = False ) : if __diag__. warn_name_set_on_empty_Forward : if : warnings. warn ( ""{0}: setting results name {0!r} on {1} expression "" ""that has no contained expression"". format ( ""warn_name_set_on_empty_Forward"", name, type ( self ). __name__ ), stacklevel = 3, ) return super ( Forward, self ). _setResultsName ( name, listAllMatches )",True,self.expr is None,self.expr is None,0.6563916206359863 4708,"def kill ( self, message = """" ) : """"""Kills the frame"""""" log. info ( ""Request recieved: kill"" ) if self. frameAttendantThread is None : log. warning ( ""Kill requested before frameAttendantThread is created "" ""for: %s"" % self. frameId ) elif self. frameAttendantThread. isAlive ( ) and self. pid is None : log. warning ( ""Kill requested before pid is available for: %s"" % self. frameId ) elif self. frameAttendantThread. isAlive ( ) : try : if not self. killMessage and message : self. killMessage = message rqd. rqutil. permissionsHigh ( ) try : if : subprocess. Popen ( ""taskkill /F /T /PID %i"" % self. pid, shell = True ) else : os. killpg ( self. pid, rqd. rqconstants. KILL_SIGNAL ) finally : rqd. rqutil. permissionsLow ( ) except OSError as e : log. warning ( ""kill() tried to kill a non-existant pid for: %s "" ""Error: %s"" % ( self. frameId, e ) ) except Exception as e : log. warning ( ""kill() encountered an unknown error: %",False,platform.system() == 'Windows',self.pid is not None,0.6574522256851196 4709,"def __init__ ( self, entity, mapper, selectable, name, with_polymorphic_mappers, polymorphic_on, _base_alias, _use_mapper_path, adapt_on_names, represents_outer_join, ) : self. entity = entity self. mapper = mapper self. selectable = selectable self. name = name self. with_polymorphic_mappers = with_polymorphic_mappers self. polymorphic_on = polymorphic_on self. _base_alias = _base_alias or self self. _use_mapper_path = _use_mapper_path self. represents_outer_join = represents_outer_join self. _adapter = sql_util. ColumnAdapter ( selectable, equivalents = mapper. _equivalent_columns, adapt_on_names = adapt_on_names, anonymize_labels = True, ) self. _adapt_on_names = adapt_on_names self. _target = mapper. class_ for poly in self. with_polymorphic_mappers : if : setattr ( self. entity, poly. class_. __name__, AliasedClass ( poly. class_, selectable, base_alias = self, adapt_on_names = adapt_on_names, use_mapper_path = _use_mapper_path",False,poly is not mapper,"hasattr(self, 'entity')",0.6842032670974731 4710,"def process_routine ( rootElt, obj, name, callables ) : if inspect. isfunction ( obj ) : if _gIsPy3 : argspec = inspect. getfullargspec ( obj ) else : argspec = inspect. getargspec ( obj ) sig = name + inspect. formatargspec ( * argspec ) else : sig = """" doc = getdoc ( obj ) or None call_sig_lines, description_lines = parsePyFuncDoc ( doc, [ sig ] ) if description_lines : doc = ""\n"". join ( parseDocSummary ( description_lines ) ) if call_sig_lines : signature = ""\n"". join ( call_sig_lines ) else : signature = sig if name == ""__init__"" : if doc == obj. __init__. __doc__ : doc = None if : signature = None funcElt = SubElement ( rootElt, ""scope"", ilk = ""function"", name = name ) if doc : funcElt. set ( ""doc"", doc ) if signature : funcElt. set ( ""signature"", signature ) callables [ name ] = funcElt",False,signature == obj.__init__.__doc__,rootElt,0.6579796075820923 4711,"def _pre_get_table ( self, _ctx, table_name ) : vsctl_table = self. _get_table ( table_name ) schema_helper = self. schema_helper schema_helper. register_table ( vsctl_table. table_name ) for row_id in vsctl_table. row_ids : if row_id. table : schema_helper. register_table ( row_id. table ) if : schema_helper. register_columns ( row_id. table, [ row_id. name_column ] ) if row_id. uuid_column : schema_helper. register_columns ( row_id. table, [ row_id. uuid_column ] ) return vsctl_table",True,row_id.name_column,row_id.name_column,0.6544754505157471 4712,"def polygons_to_edges_np ( obj, unique_edges = False, output_numpy = False ) : result = [ ] for pols in obj : if len ( pols ) == 0 : result. append ( [ ] ) continue regular_mesh = True try : np_pols = array ( pols, dtype = int32 ) except ValueError : regular_mesh = False if not regular_mesh : if output_numpy : result. append ( pols_to_edges_irregular_mesh ( pols, unique_edges ) ) else : result. append ( pols_to_edges_irregular_mesh ( pols, unique_edges ). tolist ( ) ) else : edges = empty ( list ( np_pols. shape ) + [ 2 ], ""i"" ) edges [ :, :, 0 ] = np_pols edges [ :, 1 :, 1 ] = np_pols [ :, : - 1 ] edges [ :, 0, 1 ] = np_pols [ :, - 1 ] edges = edges. reshape ( - 1, 2 ) if output_numpy : if : result. append ( unique ( sort ( edges ), axis = 0 ) ) else : result. append ( edges ) else : __settings__ = super ( EnhanceOrSuppressFeatures, self ). visible_settings ( ) __settings__ += [ self. method ] if self. method == ENHANCE : __settings__ += [ self. enhance_method ] self. object_size. min_value = 2 if self. enhance_method == E_DARK_HOLES : __settings__ += [ self. hole_size ] elif : __settings__ += [ self. smoothing ] elif self. enhance_method == E_DIC : __settings__ += [ self. smoothing, self. angle, self. decay ] elif self. enhance_method == E_NEURITES : __settings__ += [ self. neurite_choice ] if self. neurite_choice == N_GRADIENT : __settings__ += [ self. object_size ] else : __settings__ += [ self. smoothing ] __settings__ += [ self. wants_rescale ] elif self. enhance_method == E_SPECKLES : __settings__ += [ self. object_size, self. speckle_accuracy ] self. object_size. min_value = 3 else : __settings__ += [ self. object_size ] else : __settings__ += [ self. object_size ] return __settings__",False,self.enhance_method == E_TEXTURE,self.enhance_method == E_TARK,0.6664944887161255 4714,"def parse_constraints_from_args ( args ) : if not args. constraints : return [ ] _constraints = [ ] for constraint in args. constraints : if ARGS_SPLIT_TOKEN in constraint : constraint_name, params = constraint. split ( ARGS_SPLIT_TOKEN ) if constraint_name not in CONSTRAINT_CLASS_NAMES : raise ValueError ( f""Error: unsupported constraint {constraint_name}"" ) _constraints. append ( eval ( f""{CONSTRAINT_CLASS_NAMES[constraint_name]}({params})"" ) ) elif : _constraints. append ( eval ( f""{CONSTRAINT_CLASS_NAMES[constraint]}()"" ) ) else : raise ValueError ( f""Error: unsupported constraint {constraint}"" ) return _constraints",False,constraint in CONSTRAINT_CLASS_NAMES,constraint not inCONSTRAINT_CLASS_NAMES,0.6736607551574707 4715,"def on_pre_output_coercion ( directive_args : Dict [ str, Any ], next_directive : Callable, value : Any, ctx : Optional [ Any ], info : ""ResolveInfo"", ) : value = await next_directive ( value, ctx, info ) if value is None : return value try : py_enum = _ENUM_MAP [ directive_args [ ""name"" ] ] if : return [ None if item is None else py_enum ( item ). name for item in value ] return py_enum ( value ). name except Exception : pass return value",True,"isinstance(value, list)","isinstance(value, list)",0.6546279191970825 4716,"def cardsWithTags ( self, tagStr, search = ""and"" ) : tagIds = [ ] for tag in tagStr. split ( "" "" ) : tag = tag. replace ( ""*"", ""%"" ) if ""%"" in tag : ids = self. s. column0 ( ""select id from tags where tag like :tag"", tag = tag ) if search == ""and"" and not ids : return [ ] tagIds. append ( ids ) else : id = self. s. scalar ( ""select id from tags where tag = :tag"", tag = tag ) if search == ""and"" and not id : return [ ] tagIds. append ( id ) if search == ""or"" : return self. s. column0 ( ""select cardId from cardTags where tagId in %s"" % ids2str ( tagIds ) ) else : l = [ ] for ids in tagIds : if : l. append ( ""select cardId from cardTags where tagId in %s"" % ids2str ( ids ) ) else : l. append ( ""select cardId from cardTags where tagId = %d"" % ids ) q = "" intersect "". join ( l ) return self. s. column0 ( q )",False,"isinstance(ids, types.ListType)",ids,0.6511952877044678 4717,def remove_last_statement ( node ) : stmt = None if type ( node ) is CodeNode : stmt = remove_last_statement ( node. node ) elif type ( node ) is ailment. Block : stmt = node. statements [ - 1 ] node. statements = node. statements [ : - 1 ] elif type ( node ) is MultiNode : if node. nodes : stmt = remove_last_statement ( node. nodes [ - 1 ] ) if : node. nodes = node. nodes [ : - 1 ] elif type ( node ) is SequenceNode : if node. nodes : stmt = remove_last_statement ( node. nodes [ - 1 ] ) if : node. nodes = node. nodes [ : - 1 ] else : raise NotImplementedError ( ) return stmt,False,BaseNode.test_empty_node(node.nodes[-1]),node.nodes,0.6514387726783752 4718,"def update_streaming_endpoint_setter ( client, resource_group_name, account_name, streaming_endpoint_name, parameters, no_wait, ) : if ( parameters. access_control is not None and parameters. access_control. ip is not None and parameters. access_control. ip. allow ) : ips = list ( map ( lambda x : create_ip_range ( streaming_endpoint_name, x ) if : else x, parameters. access_control. ip. allow, ) ) parameters. access_control. ip. allow = ips return sdk_no_wait ( no_wait, client. update, resource_group_name = resource_group_name, account_name = account_name, streaming_endpoint_name = streaming_endpoint_name, parameters = parameters, )",False,"isinstance(x, str)",parameters.access_control.ip is None or x is None,0.6509113907814026 4719,"def safe_parse_date ( date_hdr ) : """"""Parse a Date: or Received: header into a unix timestamp."""""" try : if : date_hdr = date_hdr. split ( "";"" ) [ - 1 ]. strip ( ) msg_ts = long ( rfc822. mktime_tz ( rfc822. parsedate_tz ( date_hdr ) ) ) if ( msg_ts > ( time. time ( ) + 24 * 3600 ) ) or ( msg_ts < 1 ) : return None else : return msg_ts except ( ValueError, TypeError, OverflowError ) : return None",False,';' in date_hdr,"isinstance(date_hdr, basestring)",0.6635328531265259 4720,"def setup ( self, modes, current_mode ) : self. setMinimumSize ( 600, 400 ) self. setWindowTitle ( _ ( ""Select Mode"" ) ) widget_layout = QVBoxLayout ( ) label = QLabel ( _ ( 'Please select the desired mode then click ""OK"".' 'Otherwise, click ""Cancel"".' ) ) label. setWordWrap ( True ) widget_layout. addWidget ( label ) self. setLayout ( widget_layout ) self. mode_list = QListWidget ( ) self. mode_list. itemDoubleClicked. connect ( self. select_and_accept ) widget_layout. addWidget ( self. mode_list ) self. mode_list. setIconSize ( QSize ( 48, 48 ) ) for name, item in modes. items ( ) : if : litem = ModeItem ( item. name, item. description, item. icon, self. mode_list ) if item. icon == current_mode : self. mode_list. setCurrentItem ( litem ) self. mode_list. sortItems ( ) instructions = QLabel ( _ ( ""Change mode at any time by clicking "" 'the ""Mode"" button containing Mu\'s logo.' ) ) instructions. setWordWrap ( True ) widget_layout. addWidget ( instructions ) button_box = QDialogButtonBox ( QDialogButtonBox. Ok | QDialogButtonBox. Cancel ) button_box. accepted. connect ( self. accept ) button_box. rejected. connect ( self. reject ) widget_layout. addWidget ( button_box )",False,not item.is_debugger,item.name,0.6513662338256836 4721,"def python_value ( self, value ) : if value : if : pp = lambda x : x. time ( ) return format_date_time ( value, self. formats, pp ) elif isinstance ( value, datetime. datetime ) : return value. time ( ) if value is not None and isinstance ( value, datetime. timedelta ) : return ( datetime. datetime. min + value ). time ( ) return value",False,"isinstance(value, basestring)","isinstance(value, string_types)",0.6505071520805359 4722,"def process ( self ) : """"""Do processing necessary, storing result in feature."""""" summation = 0 histo = self. data [ ""flat.notes.quarterLengthHistogram"" ] if not histo : raise NativeFeatureException ( ""input lacks notes"" ) maxKey = 0 for key in histo : if histo [ key ] > 0 : summation += histo [ key ] if : maxKey = histo [ key ] self. feature. vector [ 0 ] = maxKey / summation",False,histo[key] >= maxKey,summation > maxKey,0.6675178408622742 4723,"def download_config ( client, bucket, prefix, account_id, region, day, store, rtypes = ( ) ) : config_prefix = ""%sAWSLogs/%s/Config/%s/%s/ConfigHistory/"" % ( prefix, account_id, region, day. strftime ( ""%Y/%-m/%-d"" ), ) results = client. list_objects_v2 ( Bucket = bucket, Prefix = config_prefix ) if not os. path. exists ( store ) : os. makedirs ( store ) files = [ ] downloads = Counter ( ) for k in results. get ( ""Contents"", ( ) ) : found = False for rt in rtypes : if : found = True if not found : continue fname = k [ ""Key"" ]. rsplit ( ""/"", 1 ) [ - 1 ] fpath = os. path. join ( store, fname ) files. append ( fpath ) if os. path. exists ( fpath ) : downloads [ ""Cached"" ] += 1 downloads [ ""CacheSize"" ] += k [ ""Size"" ] continue downloads [ ""Downloads"" ] += 1 downloads [ ""DownloadSize"" ] += k [ ""Size"" ] client. download_file ( bucket, k [ ""Key"" ], fpath ) log. debug ( ""Downloaded:%d Size:%d Cached:%d Size:%s Prefix:%s"", downloads [ ""Downloads"" ], downloads [ ""DownloadSize"" ], downloads [ ""Cached"" ], ",False,rt in k['Key'],rt in files,0.6663093566894531 4724,"def get_latest_from_github ( package_path = ""azure-cli"" ) : try : import requests git_url = ""https://raw.githubusercontent.com/Azure/azure-cli/master/src/{}/setup.py"". format ( package_path ) response = requests. get ( git_url, timeout = 10 ) if response. status_code!= 200 : logger. info ( ""Failed to fetch the latest version from '%s' with status code '%s' and reason '%s'"", git_url, response. status_code, response. reason, ) return None for line in response. iter_lines ( ) : txt = line. decode ( ""utf-8"", errors = ""ignore"" ) if txt. startswith ( ""VERSION"" ) : match = re. search ( r""VERSION = \""(.*)\""$"", txt ) if : return match. group ( 1 ) except Exception as ex : logger. info ( ""Failed to get the latest version from '%s'. %s"", git_url, str ( ex ) ) return None",True,match,match,0.6739219427108765 4725,"def removeItem ( self, event ) : """"""Double Left Click - remove module"""""" if event. GetModifiers ( ) == wx. MOD_CONTROL : return row, _ = self. HitTest ( event. Position ) if row!= - 1 and row not in self. blanks and isinstance ( self. mods [ row ], Module ) : col = self. getColumn ( event. Position ) if : try : mod = self. mods [ row ] except IndexError : return if not isinstance ( mod, Module ) or mod. isEmpty : return if event. GetModifiers ( ) == wx. MOD_ALT : fit = Fit. getInstance ( ). getFit ( self. activeFitID ) positions = getSimilarModPositions ( fit. modules, mod ) self. mainFrame. command. Submit ( cmd. GuiRemoveLocalModuleCommand ( fitID = self. activeFitID, positions = positions ) ) else : self. removeModule ( mod ) else : if ""wxMSW"" in wx. PlatformInfo : self. click ( event )",False,col != self.getColIndex(State),col != -1,0.6589416265487671 4726,"def times ( self, value : int ) : if value is None : self. _times = None else : try : candidate = int ( value ) except ValueError : raise BarException ( f""cannot set repeat times to: {value!r}"" ) if candidate < 0 : raise BarException ( f""cannot set repeat times to a value less than zero: {value}"" ) if : raise BarException ( ""cannot set repeat times on a start Repeat"" ) self. _times = candidate",False,self.direction == 'start',self._times is None,0.6541805267333984 4727,"def out ( parent, attr, indent = 0 ) : val = getattr ( parent, attr ) prefix = ""%s%s:"" % ( "" "" * indent, attr. replace ( ""_"", ""-"" ) ) if val is None : cli. out ( prefix ) else : if : val = [ flag_util. encode_flag_val ( c. value ) for c in val ] cli. out ( ""%s %s"" % ( prefix, flag_util. encode_flag_val ( val ) ) )",False,attr == 'choices',"isinstance(val, bool)",0.6655404567718506 4728,"def execute ( self, context ) : try : bpy. context. window. cursor_set ( ""WAIT"" ) ng = context. space_data. node_tree if : build_update_list ( ng ) process_tree ( ng ) except : pass finally : bpy. context. window. cursor_set ( ""DEFAULT"" ) return { ""FINISHED"" }",True,ng,ng,0.6923580169677734 4729,"def updateAutoSIPrefix ( self ) : if self. label. isVisible ( ) : if : _range = 10 ** np. array ( self. range ) else : _range = self. range ( scale, prefix ) = fn. siScale ( max ( abs ( _range [ 0 ] * self. scale ), abs ( _range [ 1 ] * self. scale ) ) ) if self. labelUnits == """" and prefix in [ ""k"", ""m"", ] : scale = 1.0 prefix = """" self. autoSIPrefixScale = scale self. labelUnitPrefix = prefix else : self. autoSIPrefixScale = 1.0 self. _updateLabel ( )",False,self.logMode,self.autoSIPrefixScale == 0.0,0.6592258214950562 4730,"def python_unique ( data ) : unique = [ data [ 0 ] ] unique_indices = [ 0 ] unique_inverse = [ 0 ] unique_count = [ 1 ] for index, d in enumerate ( data [ 1 : ] ) : if : idx = unique. index ( d ) unique_inverse. append ( idx ) unique_count [ idx ] += 1 else : unique. append ( d ) unique_indices. append ( index ) unique_inverse. append ( len ( unique ) - 1 ) unique_count. append ( 1 ) return unique, unique_indices, unique_inverse, unique_count",True,d in unique,d in unique,0.6762837171554565 4731,"def validate ( self ) : if not super ( GroupForm, self ). validate ( ) : return False result = True permission_fields = ( self. editpost, self. deletepost, self. deletetopic, self. posttopic, self. postreply, self. mod_edituser, self. mod_banuser, self. viewhidden, self. makehidden, ) group_fields = [ self. admin, self. super_mod, self. mod, self. banned, self. guest ] if self. guest. data : for field in permission_fields : if : field. errors. append ( _ ( ""Can't assign any permissions to this group."" ) ) result = False checked = [ ] for field in group_fields : if field. data and field. data in checked : if len ( checked ) > 1 : field. errors. append ( ""A group can't have multiple group types."" ) result = False else : checked. append ( field. data ) return result",True,field.data,field.data,0.6623830795288086 4732,"def anonymize_batch_kwargs ( self, batch_kwargs ) : anonymized_batch_kwarg_keys = [ ] for batch_kwarg_key in batch_kwargs. keys ( ) : if : anonymized_batch_kwarg_keys. append ( batch_kwarg_key ) else : anonymized_batch_kwarg_keys. append ( self. anonymize ( batch_kwarg_key ) ) return anonymized_batch_kwarg_keys",False,batch_kwarg_key in self._ge_batch_kwarg_keys,batch_kwarg_key in ['TAB >,0.6509423851966858 4733,"def format_help ( self ) : extension_version = None extension_name = None try : if : extension_name = self. command_source. extension_name extension_version = get_extension ( self. command_source. extension_name ). version except Exception : pass telemetry. set_command_details ( command = self. prog [ 3 : ], extension_name = extension_name, extension_version = extension_version, ) telemetry. set_success ( summary = ""show help"" ) super ( AzCliCommandParser, self ). format_help ( )",False,"isinstance(self.command_source, ExtensionCommandSource)","hasattr(self.command_source, 'extension_name')",0.6508350372314453 4734,"def on_press_release ( x ) : """"""Keyboard callback function."""""" global is_recording, enable_trigger_record press = keyboard. KeyboardEvent ( ""down"", 28, ""space"" ) release = keyboard. KeyboardEvent ( ""up"", 28, ""space"" ) if x. event_type == ""down"" and x. name == press. name : if ( not is_recording ) and enable_trigger_record : sys. stdout. write ( ""Start Recording... "" ) sys. stdout. flush ( ) is_recording = True if x. event_type == ""up"" and x. name == release. name : if : is_recording = False",False,is_recording == True,enable_trigger_record,0.6579296588897705 4735,"def process_webhook_prop ( namespace ) : if not isinstance ( namespace. webhook_properties, list ) : return result = { } for each in namespace. webhook_properties : if : if ""="" in each : key, value = each. split ( ""="", 1 ) else : key, value = each, """" result [ key ] = value namespace. webhook_properties = result",True,each,each,0.6774806976318359 4736,"def get ( self, name, default = None ) : """"""returns the value of an attribute or some default value if the attribute was not set"""""" if name in self. required_properties : return self. __dict__ [ name ] model_instances = self. _var_name_to_model_instances. get ( name, self. _additional_properties_model_instances ) values = [ ] if model_instances : for model_instance in model_instances : if : v = model_instance. _data_store [ name ] if v not in values : values. append ( v ) len_values = len ( values ) if len_values == 0 : return default elif len_values == 1 : return values [ 0 ] elif len_values > 1 : raise ApiValueError ( ""Values stored for property {0} in {1} differ when looking "" ""at self and self's composed instances. All values must be "" ""the same"". format ( name, type ( self ). __name__ ), [ e for e in [ self. _path_to_item, name ] if e ], )",True,name in model_instance._data_store,name in model_instance._data_store,0.6559637784957886 4737,"def _wrapped ( * args, ** kwargs ) : try : return func ( * args, ** kwargs ) except Exception : now = time. time ( ) if : logger. exception ( ""failed to emit log to external aggregator"" ) self. last_log_emit = now raise",False,now - self.last_log_emit > 10,self.last_log_emit > now,0.6558755040168762 4738,"def aiter_cogs ( cls ) -> AsyncIterator [ Tuple [ str, str ] ] : yield ""Core"", ""0"" for _dir in data_manager. cog_data_path ( ). iterdir ( ) : fpath = _dir / ""settings.json"" if not fpath. exists ( ) : continue with fpath. open ( ) as f : try : data = json. load ( f ) except json. JSONDecodeError : continue if : continue cog_name = _dir. stem for cog_id, inner in data. items ( ) : if not isinstance ( inner, dict ) : continue yield cog_name, cog_id",True,"not isinstance(data, dict)","not isinstance(data, dict)",0.654529333114624 4739,"def start ( self, foreground = False ) : log. info ( ""Starting"" ) if not Platform. is_windows ( ) : pid = self. pid ( ) if : if is_my_process ( pid ) : log. error ( ""Not starting, another instance is already running"" "" (using pidfile {0})"". format ( self. pidfile ) ) sys. exit ( 1 ) else : log. warn ( ""pidfile doesn't contain the pid of an agent process."" "" Starting normally"" ) if not foreground : self. daemonize ( ) self. write_pidfile ( ) else : log. debug ( ""Skipping pidfile check for Windows"" ) self. run ( )",True,pid,pid,0.7102892398834229 4740,"def _save_loop ( self ) : """"""Helper method for the saving thread to wait and execute save requests."""""" while True : with self. _save_condition_variable : while not self. _export_dir : self. _save_condition_variable. wait ( ) if : return if self. _saving_checkpoint : logging. info ( ""Saving checkpoint to %s"", self. _export_dir ) self. _policy_saver. save_checkpoint ( self. _export_dir ) else : logging. info ( ""Saving policy to %s"", self. _export_dir ) self. _policy_saver. save ( self. _export_dir ) self. _export_dir = None self. _save_condition_variable. notify ( )",False,self._join_save_thread,self._export_dir is None,0.6570258140563965 4741,"def _extract_tagstyle_docs_raises ( self ) : """""" """""" data = ""\n"". join ( [ d. rstrip ( ). replace ( self. docs [ ""out"" ] [ ""spaces"" ], """", 1 ) for d in self. docs [ ""in"" ] [ ""raw"" ]. splitlines ( ) ] ) listed = 0 loop = True maxi = 10000 i = 0 while loop : i += 1 if i > maxi : loop = False start, end = self. dst. get_raise_indexes ( data ) if start >= 0 : param = data [ start : end ] desc = """" start, end = self. dst. get_raise_description_indexes ( data, prev = end ) if : desc = data [ start : end ]. strip ( ) self. docs [ ""in"" ] [ ""raises"" ]. append ( ( param, desc ) ) data = data [ end : ] listed += 1 else : loop = False if i > maxi : print ( ""WARNING: an infinite loop was reached while extracting docstring parameters (>10000). This should never happen!!!"" )",False,start > 0,start >= 0,0.6678239703178406 4742,"def items ( self ) : dict = { } for userdir in self. XDG_DIRS. keys ( ) : prefix = self. get ( userdir ). strip ( '""' ). split ( ""/"" ) [ 0 ] if : path = ( os. getenv ( ""HOME"" ) + ""/"" + ""/"". join ( self. get ( userdir ). strip ( '""' ). split ( ""/"" ) [ 1 : ] ) ) else : path = self. get ( userdir ). strip ( '""' ) dict [ userdir ] = path return dict. items ( )",False,prefix,prefix.startswith(prefix),0.6821005940437317 4743,"def create_new_runtime_config ( ) : conf_dict = { } with open ( conf_path, ""r"" ) as fin : conf_dict = json. loads ( fin. read ( ) ) if not conf_dict : if : raise ValueError ( ""config file {} dose not exist, please check!"". format ( conf_path ) ) raise ValueError ( ""{} "" ) conf_dict [ ""initiator"" ] [ ""party_id"" ] = guest_party_id conf_dict [ ""job_parameters"" ] [ ""work_mode"" ] = work_mode conf_dict [ ""job_parameters"" ] [ ""backend"" ] = backend conf_dict [ ""role"" ] [ ""guest"" ] = [ guest_party_id ] conf_dict [ ""role"" ] [ ""host"" ] = [ host_party_id ] new_config_path = gen_unique_path ( ) with open ( new_config_path, ""w"" ) as fout : json_str = json. dumps ( conf_dict, indent = 1 ) fout. write ( json_str + ""\n"" ) return new_config_path",False,not os.path.isfile(conf_dict),not os.path.exists(conf_path),0.6466238498687744 4744,"def add ( request ) : Document = get_document_model ( ) DocumentForm = get_document_form ( Document ) if request. method == ""POST"" : doc = Document ( uploaded_by_user = request. user ) form = DocumentForm ( request. POST, request. FILES, instance = doc, user = request. user ) if : doc. file_size = doc. file. size doc. file. seek ( 0 ) doc. _set_file_hash ( doc. file. read ( ) ) doc. file. seek ( 0 ) form. save ( ) search_index. insert_or_update_object ( doc ) messages. success ( request, _ ( ""Document '{0}' added."" ). format ( doc. title ), buttons = [ messages. button ( reverse ( ""wagtaildocs:edit"", args = ( doc. id, ) ), _ ( ""Edit"" ) ) ], ) return redirect ( ""wagtaildocs:index"" ) else : messages. error ( request, _ ( ""The document could not be saved due to errors."" ) ) else : form = DocumentForm ( user = request.",False,form.is_valid(),doc.id >= 0,0.650469958782196 4745,"def test_suite ( ) : suite = unittest. TestSuite ( ) for fn in os. listdir ( here ) : if : modname = ""distutils.tests."" + fn [ : - 3 ] __import__ ( modname ) module = sys. modules [ modname ] suite. addTest ( module. test_suite ( ) ) return suite",False,fn.startswith('test') and fn.endswith('.py'),fn.endswith('.tests'),0.6471507549285889 4746,"def check_status ( self ) : try : du = psutil. disk_usage ( ""/"" ) if : raise ServiceWarning ( ""{host} {percent}% disk usage exceeds {disk_usage}%"". format ( host = host, percent = du. percent, disk_usage = DISK_USAGE_MAX ) ) except ValueError as e : self. add_error ( ServiceReturnedUnexpectedResult ( ""ValueError"" ), e )",False,DISK_USAGE_MAX and du.percent >= DISK_USAGE_MAX,not du,0.6570388674736023 4747,"def parse_hpk_response ( self, lines ) : results = { } lines = [ x. strip ( ). split ( "":"" ) for x in lines ] curpub = None for line in lines : if line [ 0 ] == ""info"" : pass elif line [ 0 ] == ""pub"" : curpub = line [ 1 ] validity = line [ 6 ] if line [ 5 ] : if int ( line [ 5 ] ) < time. time ( ) : validity += ""e"" results [ curpub ] = { ""created"" : datetime. fromtimestamp ( int ( line [ 4 ] ) ), ""keytype_name"" : _ ( openpgp_algorithms. get ( int ( line [ 2 ] ), ""Unknown"" ) ), ""keysize"" : line [ 3 ], ""validity"" : validity, ""uids"" : [ ], ""fingerprint"" : curpub, } elif : email, name, comment = parse_uid ( urllib. unquote ( line [ 1 ] ) ) results [ curpub ] [ ""uids"" ]. append ( { ""name"" : name, ""email"" : email, ""comment"" : comment } ) return results",False,line[0] == 'uid',line[1],0.6540929079055786 4748,"def on_strokes_edited ( self ) : strokes = self. _strokes ( ) if strokes : translation = self. _engine. raw_lookup ( strokes ) if : fmt = _ ( ""{strokes} maps to {translation}"" ) else : fmt = _ ( ""{strokes} is not in the dictionary"" ) info = self. _format_label ( fmt, ( strokes, ), translation ) else : info = """" self. strokes_info. setText ( info )",False,translation is not None,translation,0.6673803329467773 4749,"def _set_xflux_setting ( self, ** kwargs ) : for key, value in kwargs. items ( ) : if : if key == ""color"" : self. _set_xflux_screen_color ( value ) self. _current_color = str ( value ) if self. state == self. states [ ""PAUSED"" ] : self. state = self. states [ ""RUNNING"" ] else : self. _xflux. sendline ( self. _settings_map [ key ] + str ( value ) ) self. _c ( )",True,key in self._settings_map,key in self._settings_map,0.6530376076698303 4750,"def autoformat_filter_conv2d ( fsize, in_depth, out_depth ) : if isinstance ( fsize, int ) : return [ fsize, fsize, in_depth, out_depth ] elif isinstance ( fsize, ( tuple, list, tf. TensorShape ) ) : if : return [ fsize [ 0 ], fsize [ 1 ], in_depth, out_depth ] else : raise Exception ( ""filter length error: "" + str ( len ( fsize ) ) + "", only a length of 2 is supported."" ) else : raise Exception ( ""filter format error: "" + str ( type ( fsize ) ) )",True,len(fsize) == 2,len(fsize) == 2,0.6572926044464111 4751,"def get_jmx_checks ( confd_path = None, auto_conf = False ) : jmx_checks = [ ] if not confd_path : confd_path = get_confd_path ( ) if auto_conf : path = confd_path + ""/auto_conf"" else : path = confd_path for conf in glob. glob ( os. path. join ( path, ""*.yaml"" ) ) : filename = os. path. basename ( conf ) check_name = filename. split ( ""."" ) [ 0 ] if os. path. exists ( conf ) : with open ( conf, ""r"" ) as f : try : check_config = yaml. load ( f. read ( ), Loader = yLoader ) assert check_config is not None except Exception : log. error ( ""Unable to parse yaml config in %s"" % conf ) continue init_config = check_config. get ( ""init_config"", { } ) or { } if init_config. get ( ""is_jmx"" ) or check_name in JMX_CHECKS : if auto_conf : jmx_checks. append ( check_name ) else : jmx_checks. append ( { ""check_config"" : check_",False,check not in jmx_checks,jmx_checks,0.6584567427635193 4752,"def get_netrc_auth ( url, raise_errors = False ) : """"""Returns the Requests tuple auth for a given url from netrc."""""" try : from netrc import netrc, NetrcParseError netrc_path = None for f in NETRC_FILES : try : loc = os. path. expanduser ( ""~/{0}"". format ( f ) ) except KeyError : return if os. path. exists ( loc ) : netrc_path = loc break if netrc_path is None : return ri = urlparse ( url ) splitstr = b"":"" if isinstance ( url, str ) : splitstr = splitstr. decode ( ""ascii"" ) host = ri. netloc. split ( splitstr ) [ 0 ] try : _netrc = netrc ( netrc_path ). authenticators ( host ) if : login_i = 0 if _netrc [ 0 ] else 1 return ( _netrc [ login_i ], _netrc [ 2 ] ) except ( NetrcParseError, IOError ) : ",False,_netrc,raise_errors,0.6724086999893188 4753,"def execute_calls ( self, calls ) : results = [ ] start_time = time. time ( ) max_processes = scheduler_config. config. max_transfer_processes for method_call in calls : results. append ( method_call. execute_on ( self ) ) if : self. _wait_for_some_async_commands ( ) self. wait_for_all_async_commands ( ) duration = time. time ( ) - start_time if duration > self. _WARNING_DURATION : self. _report_long_execution ( calls, duration ) warnings = self. warnings self. warnings = [ ] return dict ( results = results, warnings = warnings )",False,len(self._subcommands) >= max_processes,max_processes > 0,0.6498922109603882 4754,"def compare_version ( self, other : Union [ None, ""Software"", str ] ) -> int : if other is None : return 1 if isinstance ( other, Software ) : other = ""{}{}"". format ( other. version, other. patch or """" ) else : other = str ( other ) mx = re. match ( r""^([\d\.]+\d+)(.*)$"", other ) if mx is not None : oversion, opatch = mx. group ( 1 ), mx. group ( 2 ). strip ( ) else : oversion, opatch = other, """" if self. version < oversion : return - 1 elif self. version > oversion : return 1 spatch = self. patch or """" if self. product == Product. DropbearSSH : if not re. match ( r""^test\d.*$"", opatch ) : opatch = ""z{}"". format ( opatch ) if not re. match ( r""^test\d.*$"", spatch ) : spatch = ""z{}"". format ( spatch ) elif self. product == Product. OpenSSH : mx1 = re. match ( r""^p(\d).*"", opatch ) mx2 = re. match ( r""^p(\d).*"", spatch ) if not ( bool ( mx1 ) and bool ( mx2 ) ) : if : opatch = mx1. group ( 1 ) if mx2 is not None : spatch = mx2. group ( 1 ) if ( ( spatch == """" ) and ( op",True,mx1 is not None,mx1 is not None,0.6599274277687073 4755,"def accept ( self, node : Union [ Statement, Expression ] ) -> Optional [ Value ] : """"""Transform an expression or a statement."""""" with self. catch_errors ( node. line ) : if : try : res = node. accept ( self. visitor ) res = self. coerce ( res, self. node_type ( node ), node. line ) except UnsupportedException : res = Register ( self. node_type ( node ) ) return res else : try : node. accept ( self. visitor ) except UnsupportedException : pass return None",False,"isinstance(node, Expression)",self.node_type(node),0.6584411263465881 4756,"def get_summary_writer ( self, name = ""DeepSpeedJobName"", base = os. path. join ( os. environ [ ""HOME"" ], ""tensorboard"" ) ) : if self. tensorboard_output_path ( ) : base_dir = self. tensorboard_output_path ( ) job_name = self. tensorboard_job_name ( ) log_dir = os. path. join ( base_dir, job_name ) else : if self. tensorboard_job_name ( ) : name = self. tensorboard_job_name ( ) if : infra_job_id = os. environ [ ""DLWS_JOB_ID"" ] elif ""DLTS_JOB_ID"" in os. environ : infra_job_id = os. environ [ ""DLTS_JOB_ID"" ] else : infra_job_id = ""unknown-job-id"" summary_writer_dir_name = os. path. join ( infra_job_id, ""logs"" ) log_dir = os. path. join ( base, summary_writer_dir_name, name ) os. makedirs ( log_dir, exist_ok = True ) return SummaryWriter ( log_dir = log_dir )",True,'DLWS_JOB_ID' in os.environ,'DLWS_JOB_ID' in os.environ,0.6545872688293457 4757,"def get_map ( self, map_filename ) : map_file = os. path. join ( server_info [ ""maps_path"" ], map_filename ) if not os. path. exists ( map_file ) : data = self. cloud. get_map ( map_filename ) if : raise Exception ( ""map"", ""Could not download map from main server."" ) map_dir = os. path. split ( map_file ) [ 0 ] if not os. path. exists ( map_dir ) : os. makedirs ( map_dir ) f = open ( map_file, ""w"" ) f. write ( data ) f. close ( ) else : f = open ( map_file, ""r"" ) data = f. read ( ) f. close ( ) return data",False,data == None,not data,0.6683326959609985 4758,"def read_blob ( self ) : r = """" while 1 : s = self. peekn ( 2 ) if : r += self. getn ( 2 ) elif viml_eqregh ( s, ""^\\.[0-9A-Fa-f]$"" ) : r += self. getn ( 1 ) elif viml_eqregh ( s, ""^[0-9A-Fa-f][^0-9A-Fa-f]$"" ) : raise VimLParserException ( Err ( ""E973: Blob literal should have an even number of hex characters:"" + s, self. getpos ( ), ) ) else : break return r",False,"viml_eqregh(s, '^[0-9A-Fa-f][0-9A-Fa-f]$')",s == '',0.6515358686447144 4759,"def fave_slices ( self, user_id : Optional [ int ] = None ) -> FlaskResponse : """"""Favorite slices for a user"""""" if not user_id : user_id = g. user. id qry = ( db. session. query ( Slice, models. FavStar. dttm ) . join ( models. FavStar, and_ ( models. FavStar. user_id == user_id, models. FavStar. class_name == ""slice"", Slice. id == models. FavStar. obj_id, ), ) . order_by ( models. FavStar. dttm. desc ( ) ) ) payload = [ ] for o in qry. all ( ) : dash = { ""id"" : o. Slice. id, ""title"" : o. Slice. slice_name, ""url"" : o. Slice. slice_url, ""dttm"" : o. dttm, ""viz_type"" : o. Slice. viz_type, } if : user = o. Slice. created_by dash [ ""creator"" ] = str ( user ) dash [ ""creator_url"" ] = ""/superset/profile/{}/"". format ( user. username ) payload. append ( dash ) return json_success ( json. dumps ( payload, default = utils. json_int_dttm_ser",False,o.Slice.created_by,dash,0.6600077152252197 4760,"def test_error_operator ( self ) : FO = FermionOperator terms = [ ] for i in range ( 4 ) : terms. append ( FO ( ( ( i, 1 ), ( i, 0 ) ), 0.018505508252042547 ) ) terms. append ( FO ( ( ( i, 1 ), ( ( i + 1 ) % 4, 0 ) ), - 0.012337005501361697 ) ) terms. append ( FO ( ( ( i, 1 ), ( ( i + 2 ) % 4, 0 ) ), 0.0061685027506808475 ) ) terms. append ( FO ( ( ( i, 1 ), ( ( i + 3 ) % 4, 0 ) ), - 0.012337005501361697 ) ) terms. append ( normal_ordered ( FO ( ( ( i, 1 ), ( ( i + 1 ) % 4, 1 ), ( i, 0 ), ( ( i + 1 ) % 4, 0 ) ), 3.1830988618379052, ) ) ) if : terms. append ( normal_ordered ( FO ( ( ( i, 1 ), ( ( i + 2 ) % 4, 1 ), ( i, 0 ), ( ( i + 2 ) % 4, 0 ) ), 22.281692032865351, <",False,i // 2,len(self.tabs) > 0,0.6683347225189209 4761,"def _get_data ( self, root, mode, ** kwargs ) : default_root = DATA_HOME filename, data_hash, field_indices, num_discard_samples = self. SPLITS [ mode ] fullname = ( os. path. join ( default_root, filename ) if root is None else os. path. join ( os. path. expanduser ( root ), filename ) ) if not os. path. exists ( fullname ) or ( data_hash and not md5file ( fullname ) == data_hash ) : if : warnings. warn ( ""md5 check failed for {}, download {} data to {}"". format ( filename, self. __class__. __name__, default_root ) ) path = get_path_from_url ( self. URL, default_root, self. MD5 ) fullname = os. path. join ( default_root, filename ) super ( LCQMC, self ). __init__ ( fullname, field_indices = field_indices, num_discard_samples = num_discard_samples, ** kwargs )",False,root is not None,md5file(fullname)!= 'TESTS_TAB > [DEFAULT_DATA],0.6573939919471741 4762,"def post_config_hook ( self ) : if not self. py3. check_commands ( ""thunderbird"" ) : raise Exception ( STRING_NOT_INSTALLED ) if not self. profile : directory = Path ( ""~/.thunderbird"" ). expanduser ( ) profile_ini = directory / ""profiles.ini"" profile = [ ] with profile_ini. open ( ) as f : for line in f : if line. startswith ( ""Path="" ) : profile. append ( ""{}/{}"". format ( directory, line. split ( ""Path="" ) [ - 1 ]. strip ( ) ) ) if : raise Exception ( STRING_NO_PROFILE ) self. profile = profile [ 0 ] self. profile = Path ( self. profile ). expanduser ( ) self. path = self. profile / ""calendar-data/local.sqlite"" self. init_datetimes = [ ] for word in self. format_datetime : if ( self. py3. format_contains ( self. format_todo, word ) ) and ( word in self. format_datetime ) : self. init_datetimes. append ( word ) self. thresholds_init = { } for name in [ ""format"", ""format_todo"" ] : self. thresholds_init [ name ] = self. py3. get_color_names_list ( getattr ( self, name ) )",False,not len(profile),self.profile is None,0.6559419631958008 4763,"def get_dataset ( dataset, args ) : if dataset. lower ( ) == ""coco"" : train_dataset = gdata. COCOInstance ( splits = ""instances_train2017"" ) val_dataset = gdata. COCOInstance ( splits = ""instances_val2017"", skip_empty = False ) starting_id = 0 if : length = len ( val_dataset ) shard_len = length // hvd. size ( ) rest = length % hvd. size ( ) starting_id = shard_len * hvd. rank ( ) + min ( hvd. rank ( ), rest ) val_metric = COCOInstanceMetric ( val_dataset, args. save_prefix + ""_eval"", use_ext = args. use_ext, starting_id = starting_id, ) else : raise NotImplementedError ( ""Dataset: {} not implemented."". format ( dataset ) ) if : val_dataset = val_dataset. shard ( hvd. size ( ), hvd. rank ( ) ) return train_dataset, val_dataset, val_metric",False,args.horovod and MPI,use_ext,0.6542540192604065 4764,"def __init__ ( self, client, job_id, callback = None ) : self. client = client self. job_id = job_id with client. _jobs_lock : job = client. _jobs. get ( job_id ) self. event = None if : self. event = job. get ( ""__ready"" ) if self. event is None : self. event = job [ ""__ready"" ] = Event ( ) job [ ""__callback"" ] = callback",False,job,self.event is None,0.7116715908050537 4765,"def lutime ( f, times ) : if os. utime in os. supports_follow_symlinks : os. utime ( f, times, follow_symlinks = False ) elif not os. path. islink ( f ) : os. utime ( f, times ) else : try : if : fmt_time = lambda sec : datetime. fromtimestamp ( sec ). strftime ( ""%Y%m%d%H%M.%S"" ) atime, mtime = times sp. check_call ( [ ""touch"", ""-h"", f, ""-a"", ""-t"", fmt_time ( atime ) ] ) sp. check_call ( [ ""touch"", ""-h"", f, ""-m"", ""-t"", fmt_time ( mtime ) ] ) else : sp. check_call ( [ ""touch"", ""-h"", f ] ) except sp. CalledProcessError : pass logger. warning ( ""Unable to set utime on symlink {}. Your Python build does not support it."". format ( f ) ) return None",False,times,os.path.isfile(f),0.7118969559669495 4766,"def getDefaultCompletion ( tree, node, lang ) : if lang == ""XSLT"" : if node is not None and not tree. namespace ( node ) : output = tree. tags. get ( ""http://www.w3.org/1999/XSL/Transform"", [ ] ). get ( ""output"" ) if : lang = output. attrib. get ( ""method"" ). upper ( ) publicId = output. attrib. get ( ""doctype-public"" ) systemId = output. attrib. get ( ""doctype-system"" ) default_dataset_info = default_completion. get ( lang ) if publicId or systemId : default_dataset_info = ( publicId, systemId, default_dataset_info [ 2 ] ) return default_dataset_info return None return default_completion. get ( lang )",False,output is not None,output,0.6627018451690674 4767,"def _fractional_part ( self, n, expr, evaluation ) : n_sympy = n. to_sympy ( ) if n_sympy. is_constant ( ) : if : positive_integer_part = ( Expression ( ""Floor"", n ). evaluate ( evaluation ). to_python ( ) ) result = n - positive_integer_part else : negative_integer_part = ( Expression ( ""Ceiling"", n ). evaluate ( evaluation ). to_python ( ) ) result = n - negative_integer_part else : return expr return from_python ( result )",False,n_sympy >= 0,expr.is_python(),0.6639846563339233 4768,"def todict ( obj ) : if isinstance ( obj, dict ) : return { k : todict ( v ) for k, v in obj. items ( ) } elif hasattr ( obj, ""__slots__"" ) : return { k : todict ( getattr ( obj, k ) ) for k in obj. __slots__ } elif hasattr ( obj, ""__iter__"" ) and not isinstance ( obj, str ) : return type ( obj ) ( todict ( v ) for v in obj ) elif hasattr ( obj, ""__dict__"" ) : return { k : todict ( v ) for k, v in obj. __dict__. items ( ) if : } else : return obj",False,not callable(v) and (not k.startswith('_')),"hasattr(obj, '__iter__')",0.6522979140281677 4769,"def get_current_events_users ( calendar ) : now = timezone. make_aware ( datetime. now ( ), timezone. get_current_timezone ( ) ) result = [ ] day = Day ( calendar. events. all ( ), now ) for o in day. get_occurrences ( ) : if : usernames = o. event. title. split ( "","" ) for username in usernames : result. append ( User. objects. get ( username = username. strip ( ) ) ) return result",False,o.start <= now <= o.end,o.event,0.6572772264480591 4770,"def import_suffix_generator ( a_block, datatype = False ) : if datatype is False : for name, suffix in iteritems ( a_block. component_map ( Suffix ) ) : if : yield name, suffix else : for name, suffix in iteritems ( a_block. component_map ( Suffix ) ) : if ( suffix. import_enabled ( ) is True ) and ( suffix. get_datatype ( ) is datatype ) : yield name, suffix",False,suffix.import_enabled() is True,datatype is False,0.6510511636734009 4771,"def action_delete ( self, ids ) : try : count = 0 for pk in ids : if : count += 1 flash ( ngettext ( ""Record was successfully deleted."", ""%(count)s records were successfully deleted."", count, count = count, ), ""success"", ) except Exception as ex : flash ( gettext ( ""Failed to delete records. %(error)s"", error = str ( ex ) ), ""error"" )",False,self.delete_model(self.get_one(pk)),"self.delete_record(pk, count)",0.6515241861343384 4772,"def from_csv ( fp, field_names = None, ** kwargs ) : fmtparams = { } for param in [ ""delimiter"", ""doublequote"", ""escapechar"", ""lineterminator"", ""quotechar"", ""quoting"", ""skipinitialspace"", ""strict"", ] : if : fmtparams [ param ] = kwargs. pop ( param ) if fmtparams : reader = csv. reader ( fp, ** fmtparams ) else : dialect = csv. Sniffer ( ). sniff ( fp. read ( 1024 ) ) fp. seek ( 0 ) reader = csv. reader ( fp, dialect ) table = PrettyTable ( ** kwargs ) if field_names : table. field_names = field_names else : if py3k : table. field_names = [ x. strip ( ) for x in next ( reader ) ] else : table. field_names = [ x. strip ( ) for x in reader. next ( ) ] for row in reader : table. add_row ( [ x. strip ( ) for x in row ] ) return table",True,param in kwargs,param in kwargs,0.6698812246322632 4773,"def _bootstrap ( self ) : marker = self. _get_export_marker_from_db ( ) LOG. info ( ""Using marker %s..."" % marker ) missed_executions = self. _get_missed_executions_from_db ( export_marker = marker ) LOG. info ( ""Found %d executions not exported yet..."", len ( missed_executions ) ) for missed_execution in missed_executions : if : continue execution_api = ActionExecutionAPI. from_model ( missed_execution, mask_secrets = True ) try : LOG. debug ( ""Missed execution %s"", execution_api ) self. pending_executions. put_nowait ( execution_api ) except : LOG. exception ( ""Failed adding execution to in-memory queue."" ) continue LOG. info ( ""Bootstrapped executions..."" )",False,missed_execution.status not in COMPLETION_STATUSES,missed_execution == export_marker,0.6549334526062012 4774,"def print_resolutions ( self, *, part_name : str, stage_packages_exist : bool, echoer ) -> None : if not self. _stage_packages_dependencies and not self. _unhandled_dependencies : return if self. _stage_packages_dependencies : stage_packages_list = _get_formatted_list ( self. _stage_packages_dependencies ) if : echoer. warning ( _MSG_EXTEND_STAGE_PACKAGES. format ( part_name = part_name, stage_packages = stage_packages_list ) ) else : echoer. warning ( _MSG_ADD_STAGE_PACKAGES. format ( part_name = part_name, stage_packages = stage_packages_list ) ) if self. _unhandled_dependencies : unhandled_list = _get_formatted_list ( self. _unhandled_dependencies ) echoer. warning ( _MSG_UNHANDLED_DEPENDENCIES. format ( part_name = part_name, unhandled_list = unhandled_list ) )",True,stage_packages_exist,stage_packages_exist,0.6587600111961365 4775,"def __init__ ( self, * args, ** kwargs ) : super ( ). __init__ ( * args, ** kwargs ) self. custom_fields = [ ] self. obj_type = ContentType. objects. get_for_model ( self. model ) custom_fields = CustomField. objects. filter ( content_types = self. obj_type ) for cf in custom_fields : if : self. nullable_fields. append ( cf. name ) self. fields [ cf. name ] = cf. to_form_field ( set_initial = False, enforce_required = False ) self. custom_fields. append ( cf. name )",False,not cf.required,cf.nullable,0.6583252549171448 4776,"def on_key_press ( self, k, m ) : if self. paused : return False if self. used_key : return False if k in ( key. LEFT, key. RIGHT, key. DOWN, key. UP, key. SPACE ) : if : self. model. block_left ( ) elif k == key. RIGHT : self. model. block_right ( ) elif k == key. DOWN : self. model. block_down ( ) elif k == key. UP : self. model. block_rotate ( ) elif k == key. SPACE : self. elapsed = 0 self. model. block_drop ( ) self. used_key = True return True return False",True,k == key.LEFT,k == key.LEFT,0.6671327352523804 4777,"def save ( self, session = None, to = None, pickler = None ) : if to and pickler : self. _save_to = ( pickler, to ) if self. _save_to and len ( self ) > 0 : with self. _lock : pickler, fn = self. _save_to if : session. ui. mark ( _ ( ""Saving %s state to %s"" ) % ( self, fn ) ) pickler ( self, fn )",True,session,session,0.6814517974853516 4778,"def visit_FunctionCall ( self, node : qlast. FunctionCall ) -> None : if isinstance ( node. func, tuple ) : self. write ( f""{ident_to_str(node.func[0])}::{ident_to_str(node.func[1])}"" ) else : self. write ( ident_to_str ( node. func ) ) self. write ( ""("" ) for i, arg in enumerate ( node. args ) : if i > 0 : self. write ( "", "" ) self. visit ( arg ) if node. kwargs : if : self. write ( "", "" ) for i, ( name, arg ) in enumerate ( node. kwargs. items ( ) ) : if i > 0 : self. write ( "", "" ) self. write ( f""{edgeql_quote.quote_ident(name)} := "" ) self. visit ( arg ) self. write ( "")"" ) if node. window : self. write ( "" OVER ("" ) self. _block_ws ( 1 ) if node. window. partition : self. write ( ""PARTITION BY "" ) self. visit_list ( node. window. partition, newlines = False ) self. new_lines = 1 if node. window. orderby : self. write ( ""ORDER BY "" ) self. visit_list ( node. window. orderby, separator = "" THEN"" ) self. _block_ws ( - 1 ) self. write ( "")"" )",False,node.args,len(node.kwargs.items()) > 0,0.6687539219856262 4779,"def list_entries ( self, order = False, only_servers = False ) : config_data = self. ssh_config. config_data if only_servers : new_config_data = [ ] for index, value in enumerate ( config_data ) : if : new_config_data. append ( value ) config_data = new_config_data if order : config_data = sorted ( config_data, key = itemgetter ( ""host"" ) ) return config_data",False,value.get('type') == 'entry' and value.get('host') != '*',index in only_servers,0.6496244668960571 4780,"def process ( self, resources, event = None ) : client = local_session ( self. manager. session_factory ). client ( ""shield"", region_name = ""us-east-1"" ) protections = get_type_protections ( client, self. manager. get_model ( ) ) protected_resources = { p [ ""ResourceArn"" ] for p in protections } state = self. data. get ( ""state"", False ) results = [ ] for arn, r in zip ( self. manager. get_arns ( resources ), resources ) : r [ ""c7n:ShieldProtected"" ] = shielded = arn in protected_resources if shielded and state : results. append ( r ) elif : results. append ( r ) return results",False,not shielded and (not state),not shielded and state,0.654679536819458 4781,"def handle_exception_and_die ( e ) : if hasattr ( e, ""kind"" ) : if e. kind == ""die"" : sys. stderr. write ( ""ABORT: "" + e. msg + ""\n"" ) sys. exit ( e. value ) elif : sys. stderr. write ( ""EXITING\n"" ) sys. exit ( e. value ) else : print ( str ( e ) ) sys. exit ( 1 )",False,e.kind == 'exit',e.kind == 'error',0.6578645706176758 4782,"def checkPlugs ( graphComponent ) : if isinstance ( graphComponent, Gaffer. Plug ) and not graphComponent. getName ( ). startswith ( ""__"" ) : description = Gaffer. Metadata. value ( graphComponent, ""description"" ) if : undocumentedPlugs. append ( graphComponent. fullName ( ) ) if not isinstance ( graphComponent, terminalPlugTypes ) : for plug in graphComponent. children ( Gaffer. Plug ) : checkPlugs ( plug )",False,not description or description.isspace(),description,0.6551336646080017 4783,"def inc_grad ( self, model_id : int, name : str, value : FloatsXd ) -> None : key = ( model_id, name ) if self. proxy is not None : self. proxy. inc_grad ( model_id, name, value ) elif not self. has_grad ( model_id, name ) : if hasattr ( value, ""copy"" ) : self. _grads [ key ] = value. copy ( ) elif : xp = get_array_module ( value ) self. _grads [ ( model_id, name ) ] = xp. ascontiguousarray ( value ) else : self. _grads [ ( model_id, name ) ] = value else : self. _grads [ ( model_id, name ) ] += value",False,not value.flags['C_CONTIGUOUS'],"hasattr(value, 'ascontiguousarray')",0.6503852605819702 4784,"def __call__ ( self, trainer ) : if self. available ( ) : import matplotlib. pyplot as plt else : return xp = backend. get_array_module ( self. _vars [ 0 ]. data ) stats = xp. zeros ( self. _data_shape, dtype = xp. float32 ) for i, k in enumerate ( self. _keys ) : xs = [ ] for var in self. _vars : x = getattr ( var, k, None ) if x is not None : xs. append ( x. ravel ( ) ) if xs : stat_dict = self. _statistician ( xp. concatenate ( xs, axis = 0 ), axis = 0, xp = xp ) stat_list = [ ] if self. _plot_mean : stat_list. append ( xp. atleast_1d ( stat_dict [ ""mean"" ] ) ) if self. _plot_std : stat_list. append ( xp. atleast_1d ( stat_dict [ ""std"" ] ) ) if : stat_list. append ( xp. atleast_1d ( stat_dict [ ""percentile"" ] ) ) stats [ i ] = xp. concatenate ( stat_list, axis = 0 ) if xp == cuda. cupy : stats = cuda. to_cpu ( stats ) self. _samples. add ( stats, idx = trainer. updater. iteration ) if self. _trigger ( trainer ) : file_path = os. path",True,self._plot_percentile,self._plot_percentile,0.6723756194114685 4785,"def _windows_extra_app_paths ( app_paths : List [ Path ] ) -> List [ Path ] : app_paths_output = app_paths. copy ( ) for app_path in app_paths : win_app_path = app_path. parent / ( app_path. stem + ""-script.py"" ) if : app_paths_output. append ( win_app_path ) win_app_path = app_path. parent / ( app_path. stem + "".exe.manifest"" ) if : app_paths_output. append ( win_app_path ) return app_paths_output",False,win_app_path.exists(),os.path.exists(win_app_path),0.6511099338531494 4786,"def evolve_sequence ( sequence, substitution_probability, indel_probability ) : result = [ ] sequence_length = len ( sequence ) insertion_choices = list ( sequence. nondegenerate_chars ) result = [ ] i = 0 while i < sequence_length : current_char = sequence [ i ] if random. random ( ) < substitution_probability : substituted_base = random. choice ( [ r for r in sequence. nondegenerate_chars if r!= current_char ] ) result. append ( substituted_base ) i += 1 elif : length = int ( np. random. triangular ( 1, 1, 10 ) ) if np. random. binomial ( 1, 0.5 ) == 0 : result. extend ( np. random. choice ( insertion_choices, size = length ) ) i += 1 else : i += length else : result. append ( str ( current_char ) ) i += 1 return sequence. __class__ ( """". join ( result ) )",False,random.random() < indel_probability,np.random.random() < indel_probability,0.6520761251449585 4787,"def _parse_update ( client, update : ""raw.types.UpdateMessagePoll"" ) : if update. poll is not None : return Poll. _parse ( client, update ) results = update. results. results chosen_option = None options = [ ] for i, result in enumerate ( results ) : if : chosen_option = i options. append ( types. PollOption ( text = """", voter_count = result. voters, data = result. option, client = client ) ) return Poll ( id = str ( update. poll_id ), question = """", options = options, total_voter_count = update. results. total_voters, is_closed = False, chosen_option = chosen_option, client = client, )",False,result.chosen,result.is_closed,0.6621267795562744 4788,"def get_nvd_identifiers ( self, _nvd_cls, _cpe_cls ) : cves = [ ] try : if : cves = [ self. id ] if self. metadata_json and self. metadata_json. get ( ""CVE"", [ ] ) : for cve_el in self. metadata_json. get ( ""CVE"", [ ] ) : if type ( cve_el ) == dict : cve_id = cve_el. get ( ""Name"", None ) elif type ( cve_el ) == str : cve_id = cve_el else : cve_id = None if cve_id and cve_id not in cves : cves. append ( cve_id ) except Exception as err : log. warn ( ""failed to gather NVD information for vulnerability due to exception: {}"". format ( str ( err ) ) ) return cves",False,self.id.startswith('CVE-'),self.id in self.ids,0.6551922559738159 4789,"def handle ( self, * args, ** options ) : from django. conf import settings style = color_style ( ) template_dirs = set ( get_template_setting ( ""DIRS"" ) ) template_dirs |= set ( options. get ( ""includes"", [ ] ) ) template_dirs |= set ( getattr ( settings, ""VALIDATE_TEMPLATES_EXTRA_TEMPLATE_DIRS"", [ ] ) ) if hasattr ( settings, ""TEMPLATES"" ) : settings. TEMPLATES [ 0 ] [ ""DIRS"" ] = list ( template_dirs ) else : settings. TEMPLATE_DIRS = list ( template_dirs ) settings. TEMPLATE_DEBUG = True verbosity = int ( options. get ( ""verbosity"", 1 ) ) errors = 0 for template_dir in template_dirs : for root, dirs, filenames in os. walk ( template_dir ) : for filename in filenames : if : continue if filename. endswith ( ""~"" ) : continue filepath = os. path. join ( root, filename ) if verbosity > 1 : print ( filepath ) try : get_template ( filepath ) except Exception as e : errors += 1 print ( ""%s: %s"" ",False,filename.endswith('.swp'),filename.endswith('.yaml'),0.6570688486099243 4790,"def get_all_function_symbols ( self, module = ""kernel"" ) : """"""Gets all the function tuples for the given module"""""" ret = [ ] symtable = self. type_map if module in symtable : mod = symtable [ module ] for ( addr, ( name, _sym_types ) ) in mod. items ( ) : if : addr = addr + self. shift_address ret. append ( [ name, addr ] ) else : debug. info ( ""All symbols requested for non-existent module %s"" % module ) return ret",False,self.shift_address and addr,addr in self.shift_address,0.6546943783760071 4791,"def parse_until_text ( self, * text ) : startpos = self. match_position text_re = r""|"". join ( text ) brace_level = 0 while True : match = self. match ( r""#.*\n"" ) if : continue match = self. match ( r""(\""\""\""|\'\'\'|\""|\')((? if : continue match = self. match ( r""(%s)"" % text_re ) if : if match. group ( 1 ) == ""}"" and brace_level > 0 : brace_level -= 1 continue return self. text [ startpos : self. match_position - len ( match. group ( 1 ) ) ], match. group ( 1 ) match = self. match ( r""(.*?)(?=\""|\'|#|%s)"" % text_re, re. S ) if : brace_level += match. group ( 1 ). count ( ""{"" ) brace_level -= match. group ( 1 ). count ( ""}"" ) continue raise exceptions. SyntaxException ( ""Expected: %s"" % "","". join ( text ), ** self. exception_kwargs )",False,match,not match,0.6712772250175476 4792,"def get_amount ( ref_doc, payment_account = None ) : """"""get amount based on doctype"""""" dt = ref_doc. doctype if dt in [ ""Sales Order"", ""Purchase Order"" ] : grand_total = flt ( ref_doc. grand_total ) - flt ( ref_doc. advance_paid ) elif dt in [ ""Sales Invoice"", ""Purchase Invoice"" ] : if : grand_total = flt ( ref_doc. outstanding_amount ) else : grand_total = flt ( ref_doc. outstanding_amount ) / ref_doc. conversion_rate elif dt == ""POS Invoice"" : for pay in ref_doc. payments : if pay. type == ""Phone"" and pay. account == payment_account : grand_total = pay. amount break elif dt == ""Fees"" : grand_total = ref_doc. outstanding_amount if grand_total > 0 : return grand_total else : frappe. throw ( _ ( ""Payment Entry is already created"" ) )",False,ref_doc.party_account_currency == ref_doc.currency,dt == 'Tank',0.6511203050613403 4793,"def main ( client, user_id ) : user_team_association_service = client. GetService ( ""UserTeamAssociationService"", version = ""v202005"" ) statement = ( ad_manager. StatementBuilder ( version = ""v202005"" ) . Where ( ""userId = :userId"" ) . WithBindVariable ( ""userId"", int ( user_id ) ) ) while True : response = user_team_association_service. getUserTeamAssociationsByStatement ( statement. ToStatement ( ) ) if : for user_team_association in response [ ""results"" ] : print ( 'User team association with user ID ""%d"" and team ID ""%d"" was' ""found.\n"" % ( user_team_association [ ""userId"" ], user_team_association [ ""teamId"" ] ) ) statement. offset += statement. limit else : break print ( ""\nNumber of results found: %s"" % response [ ""totalResultSetSize"" ] )",False,'results' in response and len(response['results']),response[0] == 'results',0.6563087105751038 4794,"def _format_entry ( entry, src ) : if entry : result = [ ] for x in entry. split ( "","" ) : x = x. strip ( ) if : result. append ( relpath ( os. path. join ( src, x ), src ) ) elif os. path. exists ( x ) : result. append ( relpath ( os. path. abspath ( x ), src ) ) else : raise RuntimeError ( ""No entry script %s found"" % x ) return "","". join ( result )",False,"os.path.exists(os.path.join(src, x))",os.path.exists(x),0.6534078121185303 4795,"def register ( self, stats : Dict [ str, Optional [ Union [ Num, Dict [ str, Num ] ] ] ], weight : Num = None, ) -> None : assert check_argument_types ( ) if self. _finished : raise RuntimeError ( ""Already finished"" ) if len ( self. _seen_keys_in_the_step ) == 0 : self. total_count += 1 self. count += 1 for key2, v in stats. items ( ) : if : raise RuntimeError ( f""{key2} is reserved."" ) if key2 in self. _seen_keys_in_the_step : raise RuntimeError ( f""{key2} is registered twice."" ) if v is None : v = np. nan r = to_reported_value ( v, weight ) if key2 not in self. stats : nan = to_reported_value ( np. nan, None if weight is None else 0 ) self. stats [ key2 ]. extend ( r if i == self. count - 1 else nan for i in range ( self. count ) ) else : self. stats [ key2 ]. append ( r ) self. _seen_keys_in_the_step. add ( key2 )",False,key2 in _reserved,key2 in self.reserved,0.6625944375991821 4796,"def __init__ ( self, proxy ) : KeepAliveHandler. __init__ ( self ) urllib2. HTTPSHandler. __init__ ( self, debuglevel = 0 ) self. _proxy = proxy try : host, port = self. _proxy. split ( "":"" ) except : msg = ( ""The proxy you are specifying (%s) is invalid! The expected"" "" format is : is expected."" ) raise BaseFrameworkException ( msg % proxy ) else : if : self. _proxy = None",False,not host or not port,proxy is None,0.6612662076950073 4797,"def cmd_query ( self, host, name ) : """"""implementation of the ``query`` command"""""" name = name. upper ( ) self. logger. debug ( ""querying for %r"", name ) if name not in self. services : self. logger. debug ( ""no such service"" ) return ( ) oldest = time. time ( ) - self. pruning_timeout all_servers = sorted ( self. services [ name ]. items ( ), key = lambda x : x [ 1 ] ) servers = [ ] for addrinfo, t in all_servers : if : self. logger. debug ( ""discarding stale %s:%s"", * addrinfo ) self. _remove_service ( name, addrinfo ) else : servers. append ( addrinfo ) self. logger. debug ( ""replying with %r"", servers ) return tuple ( servers )",False,t < oldest,oldest,0.6760469675064087 4798,"def _attach_field_to_tree ( field, subtree ) : splitted_field = field. split ( ""."", 1 ) if len ( splitted_field ) == 1 : new_parts = list ( subtree [ LEAF_MARKER ] ) new_parts. extend ( splitted_field ) subtree [ LEAF_MARKER ] = list ( set ( new_parts ) ) else : node, remainder = splitted_field if : subtree [ node ] = defaultdict ( dict, LEAF_CONSTRAINT ) _attach_field_to_tree ( remainder, subtree [ node ] )",True,node not in subtree,node not in subtree,0.6730892658233643 4799,"def _files_to_data ( req_args : dict ) -> List [ BinaryIO ] : open_files = [ ] files = req_args. pop ( ""files"", None ) if files is not None : for k, v in files. items ( ) : if : f = open ( v. encode ( ""utf-8"", ""ignore"" ), ""rb"" ) open_files. append ( f ) req_args [ ""data"" ]. update ( { k : f } ) else : req_args [ ""data"" ]. update ( { k : v } ) return open_files",False,"isinstance(v, str)","isinstance(v, unicode)",0.6523703336715698 4800,"def get_prompt_tokens ( ) -> List [ Tuple [ str, str ] ] : tokens = [ ] tokens. append ( ( ""class:qmark"", qmark ) ) tokens. append ( ( ""class:question"", "" {} "". format ( message ) ) ) if ic. is_answered : nbr_selected = len ( ic. selected_options ) if nbr_selected == 0 : tokens. append ( ( ""class:answer"", ""done"" ) ) elif : if isinstance ( ic. get_selected_values ( ) [ 0 ]. title, list ) : ts = ic. get_selected_values ( ) [ 0 ]. title tokens. append ( ( ""class:answer"", """". join ( [ token [ 1 ] for token in ts ] ), ) ) else : tokens. append ( ( ""class:answer"", ""[{}]"". format ( ic. get_selected_values ( ) [ 0 ]. title ), ) ) else : tokens. append ( ( ""class:answer"", ""done ({} selections)"". format ( nbr_selected ) ) ) else : for url in self. urls : if url. endswith ( ""/%exchange_id"" ) or ""/receipts/"" in url : continue url = url. replace ( ""/team/invoices/%invoice_id"", ""/org/invoices/%s"" % self. invoice_id ) url = url. replace ( ""/%invoice_id"", ""/%s"" % self. invoice_id ) assert ""/%"" not in url try : r = self. client. GET ( url, ** kw ) except Response as e : if : raise r = e assert r. code!= 404 assert r. code < 500 assert not overescaping_re. search ( r. text )",False,e.code == 404 or e.code >= 500,e.code != 200,0.666318416595459 4802,"def onClicked ( event ) : if not self. path : if not os. path. exists ( mh. getPath ( ""render"" ) ) : os. makedirs ( mh. getPath ( ""render"" ) ) self. path = mh. getPath ( ""render"" ) filename, ftype = mh. getSaveFileName ( os. path. splitext ( self. path ) [ 0 ], ""PNG Image (*.png);;JPEG Image (*.jpg);;Thumbnail (*.thumb);;All files (*.*)"", ) if filename : if : self. image. save ( filename, iformat = ""PNG"" ) else : self. image. save ( filename ) self. path = os. path. dirname ( filename )",False,'Thumbnail' in ftype,self.image.isOpen(filename),0.6626404523849487 4803,"def get_or_create_intersection_pointer ( schema : s_schema. Schema, ptrname : str, source : s_objtypes. ObjectType, components : Iterable [ Pointer ], *, modname : Optional [ str ] = None, ) -> Tuple [ s_schema. Schema, Pointer ] : components = list ( components ) if len ( components ) == 1 : return schema, components [ 0 ] targets = list ( filter ( None, [ p. get_target ( schema ) for p in components ] ) ) schema, target = utils. get_intersection_type ( schema, targets, module = modname ) cardinality = qltypes. SchemaCardinality. ONE for component in components : if : cardinality = qltypes. SchemaCardinality. MANY break metacls = type ( components [ 0 ] ) default_base_name = metacls. get_default_base_name ( ) assert default_base_name is not None genptr = schema. get ( default_base_name, type = Pointer ) schema, result = genptr. get_derived ( schema, source, target, derived_name_base = sn. Name ( module = ""__"", name = ptrname ), attrs = { ""intersection_of"" : so. ObjectSet. create ( schema, components ), ""cardinality"" : cardinality, }, ) return schema, result",False,component.get_cardinality(schema) is qltypes.SchemaCardinality.MANY,len(components) == 1,0.653479814529419 4804,"def ensure_tool_run_response_okay ( submit_response_object, request_desc, inputs = None ) : if submit_response_object. status_code!= 200 : message = None dynamic_param_error = False try : err_response = submit_response_object. json ( ) if : param_errors = err_response [ ""param_errors"" ] if ""dbkey"" in param_errors : dbkey_err_obj = param_errors [ ""dbkey"" ] dbkey_val = dbkey_err_obj. get ( ""parameter_value"" ) message = ""Invalid dbkey specified [%s]"" % dbkey_val for key, val in param_errors. items ( ) : if isinstance ( val, dict ) and val. get ( ""is_dynamic"" ) : dynamic_param_error = True if message is None : message = err_response. get ( ""err_msg"" ) or None except Exception : pass if message is None : template = ""Request to %s failed - invalid JSON content returned from Galaxy server [%s]"" message = template % ( request_desc, submit_response_object. text ) raise RunToolException ( message, inputs, dynamic_param_error = dynamic_param_error ) submit_response = submit_response_",True,'param_errors' in err_response,'param_errors' in err_response,0.6534022688865662 4805,def _push_w_to_instances ( self ) : for scenario in self. _scenario_tree. _scenarios : scenario. push_w_to_instance ( ) if : self. _problem_states. objective_updated [ scenario. _name ] = True,False,self._problem_states.has_ph_objective_weight_terms[scenario._name],scenario.isValid(),0.6521754264831543 4806,"def build ( self, input_shape ) : if isinstance ( input_shape, list ) and len ( input_shape ) == 2 : self. data_mode = ""disjoint"" self. F = input_shape [ 0 ] [ - 1 ] else : if : self. data_mode = ""single"" else : self. data_mode = ""batch"" self. F = input_shape [ - 1 ]",False,len(input_shape) == 2,"isinstance(input_shape, list)",0.6551178693771362 4807,"def untokenize ( self, iterable ) : for t in iterable : if : self. compat ( t, iterable ) break tok_type, token, start, end, line = t self. add_whitespace ( start ) self. tokens. append ( token ) self. prev_row, self. prev_col = end if tok_type in ( NEWLINE, NL ) : self. prev_row += 1 self. prev_col = 0 return """". join ( self. tokens )",False,len(t) == 2,t in EMPTY_tokens,0.6574805974960327 4808,"def destroy ( self, wipe = False ) : if wipe : self. depl. logger. warn ( ""wipe is not supported"" ) try : node = self. node ( ) question = ""are you sure you want to destroy {0}?"" if : return False known_hosts. remove ( self. public_ipv4, self. public_host_key ) self. log ( ""destroying the GCE machine..."" ) node. destroy ( ) except libcloud. common. google. ResourceNotFoundError : self. warn ( ""seems to have been destroyed already"" ) self. _node_deleted ( ) for k, v in self. block_device_mapping. items ( ) : if v. get ( ""deleteOnTermination"", False ) : self. _delete_volume ( v [ ""disk_name"" ], v [ ""region"" ] ) self. update_block_device_mapping ( k, None ) return True",False,not self.depl.logger.confirm(question.format(self.full_name)),not node.exists(),0.6497131586074829 4809,"def access_entry_point_target ( self, trans, entry_point_id ) : entry_point = trans. sa_session. query ( model. InteractiveToolEntryPoint ). get ( entry_point_id ) if self. app. interactivetool_manager. can_access_entry_point ( trans, entry_point ) : if : return self. target_if_active ( trans, entry_point ) elif entry_point. deleted : raise exceptions. MessageException ( ""InteractiveTool has ended. You will have to start a new one."" ) else : raise exceptions. MessageException ( ""InteractiveTool is not active. If you recently launched this tool it may not be ready yet, please wait a moment and refresh this page."" ) raise exceptions. ItemAccessibilityException ( ""You do not have access to this InteractiveTool entry point."" )",False,entry_point.active,entry_point.id,0.6568092703819275 4810,"def run ( self ) : """"""Process queries from task queue, stop if processor is None."""""" while True : try : processor, iprot, oprot, otrans, callback = self. queue. get ( ) if : break processor. process ( iprot, oprot ) callback ( True, otrans. getvalue ( ) ) except Exception : logging. exception ( ""Exception while processing request"" ) callback ( False, """" )",True,processor is None,processor is None,0.6669086217880249 4811,"def create_resource ( self, name, ** kwargs ) : temp_dir = os. path. join ( tempfile. gettempdir ( ), self. random_name ) if not os. path. exists ( temp_dir ) : os. mkdir ( temp_dir ) with open ( os. path. join ( temp_dir, ""readme"" ), ""w"" ) as f : f. write ( ""This directory contains test files generated by Azure CLI storage command "" ""module tests."" ) for folder_name in [ ""apple"", ""butter"", ""butter/charlie"", ""duff/edward"" ] : for file_index in range ( 10 ) : file_path = os. path. join ( temp_dir, folder_name, ""file_%s"" % file_index ) if : os. makedirs ( os. path. dirname ( file_path ) ) with open ( file_path, ""w"" ) as f : f. write ( ""Azure CLI storage command module test sample file. origin:"" "" %s"" % file_path ) setattr ( self, ""_temp_dir"", temp_dir ) return { self. parameter_name : temp_dir }",False,not os.path.exists(os.path.dirname(file_path)),not os.path.exists(file_path),0.6462835073471069 4812,"def main ( local_port = None, remote_port = None, * args ) : server_list = [ ] + teredo_server_list for arg in args : if : server_list. append ( arg ) elif isinstance ( arg, list ) : server_list += arg elif isinstance ( arg, tuple ) : server_list += list ( arg ) prober = teredo_prober ( server_list, local_port = local_port, remote_port = remote_port ) need_probe = recommend = None if not prober. qualified : print ( ( warn_3 % prober. nat_type ) ) if prober. nat_type == ""symmetric"" and input ( confirm_force ). lower ( ) == ""y"" : need_probe = True prober. qualified = True elif prober. nat_type == ""unknown"" : print ( warn_4 ) recommend = prober. ip2server [ prober. last_server_ip ] else : print ( ( nat_type_result % prober. nat_type ) ) need_probe = True if need_probe : qualified_list = prober. eval_servers ( ) for qualified, server, server_ip, cost in qualified_list : print ( ( ""%s %s %s"" % ( server_ip, server, ""%sms"" % cost if qualified else ""timedout"" ) ) ) expr, data, comfortable_memory = None, chunksize = None, blocksize = None, ** kwargs ) : comfortable_memory = comfortable_memory or min ( 1e9, available_memory ( ) / 4 ) kwargs = dict ( ) if os. path. getsize ( data. path ) > comfortable_memory : do_chunk = True if chunksize is not None : warn ( ""Deprecation warning: chunksize keyword renamed to blocksize"" ) blocksize = chunksize if : kwargs [ ""blocksize"" ] = blocksize else : do_chunk = False oexpr = optimize ( expr, data ) leaf = oexpr. _leaves ( ) [ 0 ] pth = list ( path ( oexpr, leaf ) ) if len ( pth ) >= 2 and isinstance ( pth [ - 2 ], ( Projection, Field ) ) : kwargs [ ""usecols"" ] = list ( map ( str, pth [ - 2 ]. fields ) ) if do_chunk : return dd. read_csv ( data. path, ** kwargs ) else : return into ( pd. DataFrame, data, dshape = leaf. dshape, ** kwargs )",True,blocksize is not None,blocksize is not None,0.6589226126670837 4814,"def _send_until_done ( self, data ) : while True : try : return self. connection. send ( data ) except OpenSSL. SSL. WantWriteError : wr = util. wait_for_write ( self. socket, self. socket. gettimeout ( ) ) if : raise timeout ( ) continue except OpenSSL. SSL. SysCallError as e : raise SocketError ( str ( e ) )",False,not wr,wr.returncode != 0,0.6841272115707397 4815,"def encode ( self, encodeFun, value, defMode, maxChunkSize ) : substrate, isConstructed = self. encodeValue ( encodeFun, value, defMode, maxChunkSize ) tagSet = value. getTagSet ( ) if tagSet : if : defMode = 1 return ( self. encodeTag ( tagSet [ - 1 ], isConstructed ) + self. encodeLength ( len ( substrate ), defMode ) + substrate + self. _encodeEndOfOctets ( encodeFun, defMode ) ) else : return substrate",False,not isConstructed,len(tagSet[0]) == 0,0.6596665978431702 4816,"def update_cluster ( cmd, client, resource_group_name, cluster_name, client_connection_port = None, gateway_connection_port = None, dns_name = None, tags = None, ) : try : cluster = client. managed_clusters. get ( resource_group_name, cluster_name ) if client_connection_port is not None : cluster. client_connection_port = client_connection_port if gateway_connection_port is not None : cluster. http_gateway_connection_port = gateway_connection_port if : cluster. dns_name = dns_name if tags is not None : cluster. tags = tags poller = client. managed_clusters. create_or_update ( resource_group_name, cluster_name, cluster ) return LongRunningOperation ( cmd. cli_ctx ) ( poller ) except ErrorModelException as ex : _log_error_exception ( ex ) raise",True,dns_name is not None,dns_name is not None,0.6584422588348389 4817,"def parseCmdLine ( self ) : self. debug = ""--debug"" in sys. argv or ""-g"" in sys. argv if ""--gtk2"" in sys. argv : self. WXPORT = ""gtk2"" if ""--gtk3"" in sys. argv : self. WXPORT = ""gtk3"" for key, default in Configuration. __dict__. items ( ) : if : continue for idx, arg in enumerate ( sys. argv ) : if arg and arg. startswith ( key + ""="" ) : value = arg. split ( ""="", 1 ) [ 1 ] if isinstance ( default, int ) : value = int ( value ) setattr ( self, key, value ) sys. argv [ idx ] = None sys. argv = [ arg for arg in sys. argv if arg is not None ]",False,key[0] < 'A' or key[0] > 'Z',key in self.__dict__,0.6565441489219666 4818,"def _check_previous_process ( self ) : """"""Check if there's a process leftover and shut it down if so"""""" if not self. pid_file or not os. path. exists ( self. pid_file ) : return pid_file = os. path. abspath ( self. pid_file ) self. log. warning ( ""Found proxy pid file: %s"", pid_file ) try : with open ( pid_file, ""r"" ) as f : pid = int ( f. read ( ). strip ( ) ) except ValueError : self. log. warning ( ""%s did not appear to contain a pid"", pid_file ) self. _remove_pid_file ( ) return try : self. _check_pid ( pid ) except ProcessLookupError : self. log. warning ( ""Proxy no longer running at pid=%s"", pid ) self. _remove_pid_file ( ) return self. log. warning ( ""Proxy still running at pid=%s"", pid ) if os. name!= ""nt"" : sig_list = [ signal. SIGTERM ] * 2 + [ signal. SIGKILL ] for i in range ( 3 ) : try : if : self. _terminate_win ( pid ) else : os. kill ( pid, sig_list [ i ] ) except ProcessLookupError : break time. sleep ( 1 ) try : self. _check_pid ( pid ) except ProcessLookupError : """"""Return the errors from running the test"""""" bugs = [ ] for win in windows : if not win. ref : continue for char in CharsToCheck : missing_extra = """" if win. window_text ( ). count ( char ) > win. ref. window_text ( ). count ( char ) : missing_extra = u""ExtraCharacters"" elif win. window_text ( ). count ( char ) < win. ref. window_text ( ). count ( char ) : missing_extra = u""MissingCharacters"" if : bugs. append ( ( [ win, ], { ""MissingOrExtra"" : missing_extra, ""MissingOrExtraText"" : char }, testname, 0, ) ) return bugs",True,missing_extra,missing_extra,0.6654960513114929 4820,"def get_module_comment ( self, attrname : str ) -> Optional [ List [ str ] ] : try : analyzer = ModuleAnalyzer. for_module ( self. modname ) analyzer. analyze ( ) key = ( """", attrname ) if : return list ( analyzer. attr_docs [ key ] ) except PycodeError : pass return None",True,key in analyzer.attr_docs,key in analyzer.attr_docs,0.6665188670158386 4821,"def create_resource ( self, name, ** kwargs ) : if not self. dev_setting_value : self. resource_group = self. _get_resource_group ( ** kwargs ) self. location = self. _get_resource_group_location ( ** kwargs ) cmd = ""az backup vault create -n {} -g {} --location {}"". format ( name, self. resource_group, self. location ) execute ( self. cli_ctx, cmd ) if : cmd = ""az backup vault backup-properties set -n {} -g {} --soft-delete-feature-state Disable"". format ( name, self. resource_group ) execute ( self. cli_ctx, cmd ) return { self. parameter_name : name } return { self. parameter_name : self. dev_setting_value }",False,not self.soft_delete,self.soft_delete_feature_state,0.6615732908248901 4822,"def move_api_keys ( self ) -> None : """"""Move the API keys from cog stored config to core bot config if they exist."""""" tokens = await self. config. tokens ( ) youtube = await self. bot. get_shared_api_tokens ( ""youtube"" ) twitch = await self. bot. get_shared_api_tokens ( ""twitch"" ) for token_type, token in tokens. items ( ) : if : await self. bot. set_shared_api_tokens ( ""youtube"", api_key = token ) if token_type == ""TwitchStream"" and ""client_id"" not in twitch : await self. bot. set_shared_api_tokens ( ""twitch"", client_id = token ) await self. config. tokens. clear ( )",False,token_type == 'YoutubeStream' and 'api_key' not in youtube,token_type == 'youtubeStream' and 'youtube_id' not in token,0.6559834480285645 4823,"def post_dis ( self ) : if self. g2. value : for a in self. args : if : continue m = a. expr a. expr = ExprMem ( ExprOp ( ""segm"", enc2segm [ self. g2. value ], m. ptr ), m. size ) return self",False,"not isinstance(a.expr, ExprMem)",not a.expr,0.6519356966018677 4824,"def process_formdata ( self, valuelist ) : if valuelist : if valuelist [ 0 ] == ""__None"" : self. data = None else : if : self. data = None return try : obj = self. queryset. get ( pk = valuelist [ 0 ] ) self. data = obj except DoesNotExist : self. data = None",False,self.queryset is None,valuelist[0] == '__TAB > or self.queryset.exists(),0.663162350654602 4825,"def handle_starttag ( self, tag, attrs ) : if tag == ""base"" : self. base_url = dict ( attrs ). get ( ""href"" ) if self. scan_tag ( tag ) : for attr, value in attrs : if self. scan_attr ( attr ) : if : value = strip_html5_whitespace ( value ) url = self. process_attr ( value ) link = Link ( url = url ) self. links. append ( link ) self. current_link = link",False,self.strip,self.html5_whitespace and value,0.6687150001525879 4826,"def check ( n, filename ) : n = get_nodes_by_name ( n, name ) if len ( n ) > 1 and not merge_multiple : raise ManifestException ( ""Invalid manifest file: must have a single '%s' element"" % name ) if n : values = [ ] for child in n : if : values. append ( """". join ( [ x. toxml ( ) for x in child. childNodes ] ) ) else : values. append ( _get_text ( child. childNodes ). strip ( ) ) return "", "". join ( values )",False,allowXHTML,child.filename == filename,0.6734499931335449 4827,"def display_installed_jobs_list_page ( self, installed_repository, data_manager_names = None, strings_displayed = None, strings_not_displayed = None, ) : data_managers = installed_repository. metadata. get ( ""data_manager"", { } ). get ( ""data_managers"", { } ) if data_manager_names : if : data_manager_names = [ data_manager_names ] for data_manager_name in data_manager_names : assert data_manager_name in data_managers, ( ""The requested Data Manager '%s' was not found in repository metadata."" % data_manager_name ) else : data_manager_name = list ( data_managers. keys ( ) ) for data_manager_name in data_manager_names : params = { ""id"" : data_managers [ data_manager_name ] [ ""guid"" ] } self. visit_galaxy_url ( ""/data_manager/jobs_list"", params = params ) self. check_for_strings ( strings_displayed, strings_not_displayed )",False,"not isinstance(data_manager_names, list)","hasattr(data_managers, 'keys')",0.6504775285720825 4828,"def gvariant_args ( args : List [ Any ] ) -> str : """"""Convert args into gvariant."""""" gvariant = """" for arg in args : if isinstance ( arg, bool ) : gvariant += "" {}"". format ( str ( arg ). lower ( ) ) elif isinstance ( arg, ( int, float ) ) : gvariant += f"" {arg}"" elif : gvariant += f' ""{arg}""' else : gvariant += f"" {arg!s}"" return gvariant. lstrip ( )",True,"isinstance(arg, str)","isinstance(arg, str)",0.6517902612686157 4829,"def wrapped ( request, * args, ** kwargs ) : if not request. user. is_authenticated ( ) : request. session [ ""_next"" ] = request. get_full_path ( ) if : redirect_uri = reverse ( ""sentry-auth-organization"", args = [ kwargs [ ""organization_slug"" ] ] ) else : redirect_uri = get_login_url ( ) return HttpResponseRedirect ( redirect_uri ) return func ( request, * args, ** kwargs )",True,'organization_slug' in kwargs,'organization_slug' in kwargs,0.6593064069747925 4830,"def test_get_message ( self ) : async with self. chat_client : await self. _create_thread ( ) async with self. chat_thread_client : message_id = await self. _send_message ( ) message = await self. chat_thread_client. get_message ( message_id ) assert message. id == message_id assert message. type == ChatMessageType. TEXT assert message. content. message == ""hello world"" if : await self. chat_client. delete_chat_thread ( self. thread_id )",False,not self.is_playback(),self.thread_id,0.64799964427948 4831,"def update_from_cloudformation_json ( cls, original_resource, new_resource_name, cloudformation_json, region_name, ) : properties = cloudformation_json [ ""Properties"" ] if cls. is_replacement_update ( properties ) : resource_name_property = cls. cloudformation_name_type ( ) if : properties [ resource_name_property ] = new_resource_name new_resource = cls. create_from_cloudformation_json ( properties [ resource_name_property ], cloudformation_json, region_name ) properties [ resource_name_property ] = original_resource. name cls. delete_from_cloudformation_json ( original_resource. name, cloudformation_json, region_name ) return new_resource else : if ""Path"" in properties : original_resource. path = properties [ ""Path"" ] return original_resource",False,resource_name_property not in properties,resource_name_property in properties,0.657188892364502 4832,"def _validate_and_set_default_hyperparameters ( self ) : """"""Placeholder docstring"""""" for name, definition in self. hyperparameter_definitions. items ( ) : if : spec = definition [ ""spec"" ] if ""DefaultValue"" in spec : self. hyperparam_dict [ name ] = spec [ ""DefaultValue"" ] elif ""IsRequired"" in spec and spec [ ""IsRequired"" ] : raise ValueError ( ""Required hyperparameter: %s is not set"" % name )",False,name not in self.hyperparam_dict,'spec' in definition,0.6662430763244629 4833,"def get_feature_meta ( column, preprocessing_parameters ) : if preprocessing_parameters [ ""normalization"" ] is not None : if preprocessing_parameters [ ""normalization"" ] == ""zscore"" : return { ""mean"" : column. astype ( np. float32 ). mean ( ), ""std"" : column. astype ( np. float32 ). std ( ), } elif : return { ""min"" : column. astype ( np. float32 ). min ( ), ""max"" : column. astype ( np. float32 ). max ( ), } else : logger. info ( ""Currently zscore and minmax are the only "" ""normalization strategies available. No {}"". format ( preprocessing_parameters [ ""normalization"" ] ) ) return { } else : return { }",False,preprocessing_parameters['normalization'] == 'minmax',preprocessing_parameters['normalization'] == 'max',0.6574020981788635 4834,"def setitem ( self, address, value ) : if 0x0000 <= address < 0x2000 : self. rambank_enabled = ( value & 0b00001111 ) == 0b1010 elif 0x2000 <= address < 0x4000 : value &= 0b00011111 if : value = 1 self. bank_select_register1 = value elif 0x4000 <= address < 0x6000 : self. bank_select_register2 = value & 0b11 elif 0x6000 <= address < 0x8000 : self. memorymodel = value & 0b1 elif 0xA000 <= address < 0xC000 : if self. rambanks is None : logger. warning ( ""Game tries to set value 0x%0.2x at RAM address 0x%0.4x, but RAM "" ""banks are not initialized. Initializing %d RAM banks as "" ""precaution"" % ( value, address, self. external_ram_count ) ) self. init_rambanks ( self. external_ram_count ) if self. rambank_enabled : self. rambank_selected = ( self. bank_select_register2 if self. memorymodel == 1 else 0 ) self. rambanks [ self. rambank_selected % self. external_ram_count ] [ address - 0xA000 ] = value else : addr = pwndbg. memory. page_align ( int ( addr ) ) try : while True : if : break addr += pwndbg. memory. PAGE_SIZE except gdb. MemoryError : pass return addr,False,"b'\x7fELF' == pwndbg.memory.read(addr, 4)",pwndbg.memory.page_size(addr) == -1,0.654915452003479 4836,"def _test_randomness ( self, fn, trans, configs ) : random_state = random. getstate ( ) random. seed ( 42 ) img = transforms. ToPILImage ( ) ( torch. rand ( 3, 16, 18 ) ) for p in [ 0.5, 0.7 ] : for config in configs : inv_img = fn ( img, ** config ) num_samples = 250 counts = 0 for _ in range ( num_samples ) : tranformation = trans ( p = p, ** config ) tranformation. __repr__ ( ) out = tranformation ( img ) if : counts += 1 p_value = stats. binom_test ( counts, num_samples, p = p ) random. setstate ( random_state ) self. assertGreater ( p_value, 0.0001 )",False,out == inv_img,out,0.662338137626648 4837,"def get_ext_outputs ( self ) : """"""Get a list of relative paths to C extensions in the output distro"""""" all_outputs = [ ] ext_outputs = [ ] paths = { self. bdist_dir : """" } for base, dirs, files in sorted_walk ( self. bdist_dir ) : for filename in files : if : all_outputs. append ( paths [ base ] + filename ) for filename in dirs : paths [ os. path. join ( base, filename ) ] = paths [ base ] + filename + ""/"" if self. distribution. has_ext_modules ( ) : build_cmd = self. get_finalized_command ( ""build_ext"" ) for ext in build_cmd. extensions : if isinstance ( ext, Library ) : continue fullname = build_cmd. get_ext_fullname ( ext. name ) filename = build_cmd. get_ext_filename ( fullname ) if not os. path. basename ( filename ). startswith ( ""dl-"" ) : if os. path. exists ( os. path. join ( self. bdist_dir, filename ) ) : ext_outputs. append ( filename ) return all_outputs, ext_outputs",False,os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS,filename in paths,0.6500164270401001 4838,"def get_list_installed_vs_path ( ) -> [ ] : err = """" items = [ ] path_file = os. path. join ( get_sdk_path ( ), ""Kha"", ""Kinc"", ""Tools"", ""kincmake"", ""Data"", ""windows"", ""vswhere.exe"", ) if os. path. isfile ( path_file ) : cmd = path_file + "" -nologo -property installationPath"" process = subprocess. Popen ( cmd, stdout = subprocess. PIPE ) while True : output = process. stdout. readline ( ). decode ( ""utf-8"" ) if : break if output : path = output. strip ( ) items. append ( path ) else : err = 'File ""' + path_file + '"" not found.' return items, err",False,len(output.strip()) == 0 and process.poll() is not None,not output,0.6503398418426514 4839,"def process ( self ) : inputs, outputs = self. inputs, self. outputs if not all ( s. is_linked for s in [ inputs [ ""vertices"" ], inputs [ ""polygons"" ] ] ) : return poly_or_edge_linked = outputs [ ""edges"" ]. is_linked or outputs [ ""polygons"" ]. is_linked if not ( outputs [ ""vertices"" ]. is_linked and poly_or_edge_linked ) : return verts = Vector_generate ( inputs [ ""vertices"" ]. sv_get ( ) ) polys = inputs [ ""polygons"" ]. sv_get ( ) thickness = inputs [ ""thickness"" ]. sv_get ( ) [ 0 ] verts_out = [ ] edges_out = [ ] polys_out = [ ] for v, p, t in zip ( verts, polys, repeat_last ( thickness ) ) : res = wireframe ( v, p, t, self ) if : return verts_out. append ( res [ 0 ] ) edges_out. append ( res [ 1 ] ) polys_out. append ( res [ 2 ] ) outputs [ ""vertices"" ]. sv_set ( verts_out ) outputs [ ""edges"" ]. sv_set ( edges_out ) outputs [ ""polygons"" ]. sv_set ( polys_out )",False,not res,res is not None,0.6901425123214722 4840,"def _init_weights ( self, module ) : """"""Initialize the weights."""""" if isinstance ( module, nn. Embedding ) : if : nn. init. normal_ ( module. weight, mean = 0, std = self. config. embed_init_std ) if isinstance ( module, nn. Linear ) : if self. config is not None and self. config. init_std is not None : nn. init. normal_ ( module. weight, mean = 0, std = self. config. init_std ) if hasattr ( module, ""bias"" ) and module. bias is not None : nn. init. constant_ ( module. bias, 0.0 ) if isinstance ( module, nn. LayerNorm ) : module. bias. data. zero_ ( ) module. weight. data. fill_ ( 1.0 )",False,self.config is not None and self.config.embed_init_std is not None,self.config is not None and module.embed_init_std is not None,0.6510850787162781 4841,"def do_macho ( file, bits, endian ) : cpu_type, cpu_sub_type, file_type, n_commands, size_of_commands, flags = read_data ( file, endian, 6 ) if bits == 64 : read_data ( file, endian ) for _ in range ( n_commands ) : where = file. tell ( ) cmd, cmd_size = read_data ( file, endian, 2 ) if : name_offset = read_data ( file, endian ) file. seek ( where + name_offset, os. SEEK_SET ) load = file. read ( cmd_size - name_offset ). decode ( ) load = load [ : load. index ( ""\0"" ) ] if load == what : file. seek ( where + name_offset, os. SEEK_SET ) file. write ( value. encode ( ) + b""\0"" ) file. seek ( where + cmd_size, os. SEEK_SET )",False,cmd == LC_LOAD_DYLIB,flags & 1,0.6619106531143188 4842,"def notify_webhooks ( logentry_ids : list ) : if not isinstance ( logentry_ids, list ) : logentry_ids = [ logentry_ids ] qs = LogEntry. all. select_related ( ""event"", ""event__organizer"" ). filter ( id__in = logentry_ids ) _org, _at, webhooks = None, None, None for logentry in qs : if : break notification_type = logentry. webhook_type if not notification_type : break if ( _org!= logentry. organizer or _at!= logentry. action_type or webhooks is None ) : _org = logentry. organizer _at = logentry. action_type event_listener = WebHookEventListener. objects. filter ( webhook = OuterRef ( ""pk"" ), action_type = notification_type. action_type ) webhooks = WebHook. objects. annotate ( has_el = Exists ( event_listener ) ). filter ( organizer = logentry. organizer, has_el = True, enabled = True ) if logentry. event_id : webhooks = webhooks. filter ( Q ( all_events = True ) | Q ( limit_events__pk = logentry. event_id ) ) <",False,not logentry.organizer,logentry.id,0.6660035848617554 4843,"def __init__ ( self, type_input ) : ""Figures out the correct OGR Type based upon the input."" if isinstance ( type_input, OGRGeomType ) : num = type_input. num elif isinstance ( type_input, six. string_types ) : type_input = type_input. lower ( ) if : type_input = ""unknown"" num = self. _str_types. get ( type_input ) if num is None : raise GDALException ( 'Invalid OGR String Type ""%s""' % type_input ) elif isinstance ( type_input, int ) : if type_input not in self. _types : raise GDALException ( ""Invalid OGR Integer Type: %d"" % type_input ) num = type_input else : raise TypeError ( ""Invalid OGR input type given."" ) self. num = num",False,type_input == 'geometry',type_input not in self._str_types,0.6648294925689697 4844,"def pre_save_task ( self, task, credentials, verrors ) : if task [ ""attributes"" ] [ ""encryption"" ] not in ( None, """", ""AES256"" ) : verrors. add ( ""encryption"", 'Encryption should be null or ""AES256""' ) if not credentials [ ""attributes"" ]. get ( ""skip_region"", False ) : if : response = await self. middleware. run_in_thread ( self. _get_client ( credentials ). get_bucket_location, Bucket = task [ ""attributes"" ] [ ""bucket"" ], ) task [ ""attributes"" ] [ ""region"" ] = response [ ""LocationConstraint"" ] or ""us-east-1""",False,"not credentials['attributes'].get('region', '').strip()",self.middleware,0.6531568765640259 4845,"def should_exclude_with ( self, tags ) : exclude_decision_map = { } for category_tag in self. select_category_tags ( tags ) : category, value = self. parse_category_tag ( category_tag ) active_value = self. value_provider. get ( category, None ) if : continue elif active_value == value : exclude_decision_map [ category ] = False else : if category not in exclude_decision_map : exclude_decision_map [ category ] = True return any ( exclude_decision_map. values ( ) )",True,active_value is None,active_value is None,0.6540433168411255 4846,"def get ( self, episode_idx, entry_idx = None ) : action = { } episode = self. episodes [ episode_idx ] [ entry_idx ] context = "" "". join ( episode [ ""text"" ]. split ( ""\n"" ) [ : - 1 ] ). replace ( ""\xa0"", "" "" ) question = episode [ ""text"" ]. split ( ""\n"" ) [ - 1 ] label_field = ""labels"" if ""labels"" in episode else ""eval_labels"" answers = [ ] for answer in episode [ label_field ] : new_answer = answer. replace ( ""."", """" ). replace ( ""?"", """" ). replace ( ""!"", """" ) context = context. replace ( answer, new_answer ) answers. append ( new_answer ) sentences = self. sent_tok. tokenize ( context ) labels = [ ] label_starts = [ ] for sentence in sentences : for answer in answers : if : labels. append ( sentence ) label_starts. append ( context. index ( sentence ) ) action = { ""context"" : context, ""text"" : question, label_field : labels, ""answer_starts"" : label_starts, ""label_candidates"" : sentences, ""episode_done"" : episode [ ""episode_done"" ], } if self. include_context : action [ ""text"" ] = action [ ""context"" ] + ""\n"" + action [ ""text"" ] del action [ ""context"" ] return action",False,answer in sentence and sentence not in labels,answer,0.6633495092391968 4847,"def process_dir ( self, dir_path, is_train = True ) : img_paths = glob. glob ( osp. join ( dir_path, ""*.jpg"" ) ) pattern = re. compile ( r""([\d]+)_c(\d\d\d)"" ) data = [ ] for img_path in img_paths : pid, camid = map ( int, pattern. search ( img_path ). groups ( ) ) if : continue assert 0 <= pid <= 776 assert 1 <= camid <= 20 camid -= 1 if is_train : pid = self. dataset_name + ""_"" + str ( pid ) camid = self. dataset_name + ""_"" + str ( camid ) data. append ( ( img_path, pid, camid ) ) return data",False,pid == -1,pid == 0,0.6724298000335693 4848,"def f ( x ) : if np. isnan ( x ). any ( ) : raise RuntimeError ( f""{np.isnan(x).sum()} elements of the {x.size} element array "" f""`x` are NaN."" ) X = ( torch. from_numpy ( x ) . to ( initial_conditions ) . view ( shapeX ) . contiguous ( ) . requires_grad_ ( True ) ) X_fix = fix_features ( X = X, fixed_features = fixed_features ) loss = - acquisition_function ( X_fix ). sum ( ) gradf = _arrayify ( torch. autograd. grad ( loss, X ) [ 0 ]. contiguous ( ). view ( - 1 ) ) if np. isnan ( gradf ). any ( ) : msg = ( f""{np.isnan(gradf).sum()} elements of the {x.size} element "" ""gradient array `gradf` are NaN. This often indicates numerical issues."" ) if : msg += "" Consider using `dtype=torch.double`."" raise RuntimeError ( msg ) fval = loss. item ( ) return fval, gradf",False,initial_conditions.dtype != torch.double,np.isnan(loss),0.6532289385795593 4849,"def dispatch ( ) : console_stream = sys. stderr console_handler = logging. StreamHandler ( console_stream ) setup_logging ( console_handler ) dispatcher = DocoptDispatcher ( TopLevelCommand, { ""options_first"" : True, ""version"" : get_version_info ( ""compose"" ) } ) options, handler, command_options = dispatcher. parse ( sys. argv [ 1 : ] ) ansi_mode = AnsiMode. AUTO try : if : ansi_mode = AnsiMode ( options. get ( ""--ansi"" ) ) except ValueError : raise UserError ( ""Invalid value for --ansi: {}. Expected one of {}."". format ( options. get ( ""--ansi"" ), "", "". join ( m. value for m in AnsiMode ) ) ) if options. get ( ""--no-ansi"" ) : if : raise UserError ( ""--no-ansi and --ansi cannot be combined."" ) log. warning ( ""--no-ansi option is deprecated and will be removed in future versions."" ) ansi_mode = AnsiMode. NEVER setup_console_handler ( console_handler, options. get ( ""--verbose"" ), ansi_mode. use_ansi_codes ( console_handler. stream ), options. get ( ""--log-level"" ), ) setup_parallel_logger ( ansi_mode ) if ansi_mode is AnsiMode. NEVER : command_options [ ""--no-color"" ] = True return functools. partial ( perform_command, options, handler,",False,options.get('--ansi'),options.get(--ansi'),0.6509849429130554 4850,"def _resolveSpecialSegment ( self, segmentB, specialResolutionMethods ) : resolutionMethodExecutor = _compileRules ( specialResolutionMethods, 3 ) for ( resolutionMethod, args ) in resolutionMethodExecutor [ True ] : iterables = [ ] for arg in args : iterables. append ( itertools. repeat ( arg ) ) resolutions = map ( resolutionMethod, self. allCorrectSinglePossibilities ( ), * iterables ) correctAB = zip ( self. allCorrectSinglePossibilities ( ), resolutions ) correctAB = filter ( lambda possibAB : possibility. pitchesWithinLimit ( possibA = possibAB [ 1 ], maxPitch = segmentB. _maxPitch ), correctAB, ) if : correctAB = filter ( lambda possibAB : self. _isCorrectConsecutivePossibility ( possibA = possibAB [ 0 ], possibB = possibAB [ 1 ] ), correctAB, ) if self. fbRules. applySinglePossibRulesToResolution : segmentB. _singlePossibilityRuleChecking = _compileRules ( segmentB. singlePossibilityRules ( segmentB. fbRules ) ) correctAB = filter ( lambda possibAB : segmentB. _isCorrectSinglePossibility ( errors = backend. _consume_errors ( ) assert errors assert errors [ 0 ]. lib == backend. _lib. ERR_LIB_RSA if isinstance ( key, _RSAPublicKey ) : assert errors [ 0 ]. reason == backend. _lib. RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE raise ValueError ( ""Data too long for key size. Encrypt less data or use a "" ""larger key size."" ) else : decoding_errors = [ backend. _lib. RSA_R_BLOCK_TYPE_IS_NOT_01, backend. _lib. RSA_R_BLOCK_TYPE_IS_NOT_02, backend. _lib. RSA_R_OAEP_DECODING_ERROR, backend. _lib. RSA_R_DATA_TOO_LARGE_FOR_MODULUS, ] if : decoding_errors. append ( backend. _lib. RSA_R_PKCS_DECODING_ERROR ) assert errors [ 0 ]. reason in decoding_errors raise ValueError ( ""Decryption failed."" )",False,backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR,errors[0].reason == backend._lib.RSA_R_PKCS_DECODE_ERROR,0.6530645489692688 4852,"def _check_choice ( self ) : if self. type == ""choice"" : if : raise OptionError ( ""must supply a list of choices for type 'choice'"", self ) elif type ( self. choices ) not in ( types. TupleType, types. ListType ) : raise OptionError ( ""choices must be a list of strings ('%s' supplied)"" % str ( type ( self. choices ) ). split ( ""'"" ) [ 1 ], self, ) elif self. choices is not None : raise OptionError ( ""must not supply choices for type %r"" % self. type, self )",False,self.choices is None,not self.choices,0.6720008254051208 4853,"def get_duplicate_box_mask ( box_list ) : max_iou = 0.35 box_mask = np. ones ( len ( box_list ) ) for i in range ( len ( box_list ) ) : if box_mask [ i ] == 0 : continue for j in range ( i + 1, len ( box_list ) ) : if : box_mask [ j ] = 0.0 filter_iou_mask = np. array ( box_mask > 0.0, dtype = ""bool"" ) return filter_iou_mask",False,"get_intersection_over_union(box_list[i], box_list[j]) > max_iou",box_mask[j] > max_iou,0.651002824306488 4854,"def update_sockets_range_mode ( self, loop_in_node ) : while len ( loop_in_node. inputs ) > len ( self. inputs ) : name = ""Data "" + str ( len ( self. inputs ) - 2 ) self. inputs. new ( ""SvStringsSocket"", name ) self. outputs. new ( ""SvStringsSocket"", name ) while len ( loop_in_node. inputs ) < len ( self. inputs ) : self. inputs. remove ( self. inputs [ - 1 ] ) self. outputs. remove ( self. outputs [ - 1 ] ) if loop_in_node. inputs [ - 1 ]. links : name = ""Data "" + str ( len ( self. inputs ) - 2 ) self. inputs. new ( ""SvStringsSocket"", name ) self. outputs. new ( ""SvStringsSocket"", name ) for idx, socket in enumerate ( loop_in_node. inputs ) : if : continue if socket. links : if type ( socket. links [ 0 ]. from_socket )!= type ( self. outputs [ socket. name ] ) : self. inputs. remove ( self. inputs [ socket. name ] ) self. inputs. new ( socket. links [ 0 ]. from_socket. bl_idname, socket. name ) self. inputs. move ( len ( self. inputs ) - 1, idx + 1 ) self. outputs. remove ( self. outputs [ socket. name ] ) self. outputs. new ( socket. links [ 0 ]. from_socket. bl_idname, socket. name ) <",False,idx == 0,self.has_error,0.6764886379241943 4855,"def prepend ( self, value ) : """"""prepend value to nodes"""""" root, root_text = self. _get_root ( value ) for i, tag in enumerate ( self ) : if not tag. text : tag. text = """" if : root [ - 1 ]. tail = tag. text tag. text = root_text else : tag. text = root_text + tag. text if i > 0 : root = deepcopy ( list ( root ) ) tag [ : 0 ] = root root = tag [ : len ( root ) ] return self",False,len(root) > 0,i > 0,0.6537187099456787 4856,"def test_call_extern_c_fn ( self ) : global memcmp memcmp = cffi_support. ExternCFunction ( ""memcmp"", ( ""int memcmp ( const uint8_t * ptr1, "" ""const uint8_t * ptr2, size_t num )"" ), ) @ udf ( BooleanVal ( FunctionContext, StringVal, StringVal ) ) def fn ( context, a, b ) : if a. is_null!= b. is_null : return False if a is None : return True if len ( a )!= b. len : return False if : return True return memcmp ( a. ptr, b. ptr, a. len ) == 0",False,a.ptr == b.ptr,b is None,0.6552106738090515 4857,"def validate_grammar ( ) -> None : for fn in _NONTERMINAL_CONVERSIONS_SEQUENCE : fn_productions = get_productions ( fn ) if : production_name = fn_productions [ 0 ]. name expected_name = f""convert_{production_name}"" if fn. __name__!= expected_name : raise Exception ( f""The conversion function for '{production_name}' "" + f""must be called '{expected_name}', not '{fn.__name__}'."" )",False,all((p.name == fn_productions[0].name for p in fn_productions)),len(fn_productions) > 0,0.659351646900177 4858,"def _make_bom ( bom_parts_dict : Dict [ str, float ], csv : bool = False, ) -> str : field_names = [ ""Description"", ""Count"", ""Unit Price"", ""Total Price"" ] field_names += g_bom_headers rows = [ ] all_costs : Dict [ str, float ] = { } for desc, elements in bom_parts_dict. items ( ) : row = [ ] count = elements [ ""Count"" ] currency = elements [ ""currency"" ] price = elements [ ""Unit Price"" ] if count > 0 : if price : total = price * count if : all_costs [ currency ] = 0 all_costs [ currency ] += total unit_price = _currency_str ( price, currency ) total_price = _currency_str ( total, currency ) else : unit_price = total_price = """" row = [ desc, count, unit_price, total_price ] for key in g_bom_headers : value = elements [ key ] row. append ( value ) rows. append ( row ) if len ( all_costs ) > 0 : empty_row = [ """" ] * len ( field_names ) rows. append ( empty_row ) for currency, cost in all_costs. items ( ) : self. init_if_missing ( ) nodes = [ self. repo. lookup ( n ) for n in self. repo ] export = [ node for node in nodes if not hex ( node ) in self. _map_hg ] total = len ( export ) if total : self. ui. note ( _ ( ""exporting hg objects to git\n"" ) ) exporter = hg2git. IncrementalChangesetExporter ( self. repo ) for i, rev in enumerate ( export ) : util. progress ( self. ui, ""exporting"", i, total = total ) ctx = self. repo. changectx ( rev ) state = ctx. extra ( ). get ( ""hg-git"", None ) if : self. ui. debug ( ""revision %d is a part "" ""of octopus explosion\n"" % ctx. rev ( ) ) continue self. export_hg_commit ( rev, exporter ) util. progress ( self. ui, ""importing"", None, total = total )",False,state == 'octopus',state,0.6532425284385681 4860,"def check ( self, name, data, hexdigest, shake = False, ** kwargs ) : length = len ( hexdigest ) // 2 hexdigest = hexdigest. lower ( ) constructors = self. constructors_to_test [ name ] self. assertGreaterEqual ( len ( constructors ), 2 ) for hash_object_constructor in constructors : m = hash_object_constructor ( data, ** kwargs ) computed = m. hexdigest ( ) if not shake else m. hexdigest ( length ) self. assertEqual ( computed, hexdigest, ""Hash algorithm %s constructed using %s returned hexdigest"" "" %r for %d byte input data that should have hashed to %r."" % ( name, hash_object_constructor, computed, len ( data ), hexdigest ), ) computed = m. digest ( ) if not shake else m. digest ( length ) digest = bytes. fromhex ( hexdigest ) self. assertEqual ( computed, digest ) if : self. assertEqual ( len ( digest ), m. digest_size )",False,not shake,digest,0.6744714975357056 4861,"def fix_repeating_arguments ( self ) : """"""Fix elements that should accumulate/increment values."""""" either = [ list ( child. children ) for child in transform ( self ). children ] for case in either : for e in [ child for child in case if case. count ( child ) > 1 ] : if : if e. value is None : e. value = [ ] elif type ( e. value ) is not list : e. value = e. value. split ( ) if type ( e ) is Command or type ( e ) is Option and e. argcount == 0 : e. value = 0 return self",False,type(e) is Argument or (type(e) is Option and e.argcount),e,0.6532750725746155 4862,"def _calcXMinMax ( self ) : xmin = xmax = None for line in self. line_list : xline_min = weeutil. weeutil. min_with_none ( line. x ) xline_max = weeutil. weeutil. max_with_none ( line. x ) if : xline_min = xline_min - line. bar_width [ 0 ] xmin = weeutil. weeutil. min_with_none ( [ xmin, xline_min ] ) xmax = weeutil. weeutil. max_with_none ( [ xmax, xline_max ] ) return ( xmin, xmax )",False,line.plot_type == 'bar',line.bar_width is not None,0.6531867980957031 4863,"def pairs ( self ) : for path in os. listdir ( ""src"" ) : if : continue dep = join ( ""src"", path ) if isdir ( dep ) : continue yield dep, join ( build_dir, path )",False,path == '.svn',isdir(path),0.6677864789962769 4864,"def text2text_generate_encoded ( sample_generator, vocab, targets_vocab = None, has_inputs = True, inputs_prefix = """", targets_prefix = """", ) : """"""Encode Text2Text samples from the generator with the vocab."""""" targets_vocab = targets_vocab or vocab for sample in sample_generator : if : sample [ ""inputs"" ] = vocab. encode ( inputs_prefix + sample [ ""inputs"" ] ) sample [ ""inputs"" ]. append ( text_encoder. EOS_ID ) sample [ ""targets"" ] = targets_vocab. encode ( targets_prefix + sample [ ""targets"" ] ) sample [ ""targets"" ]. append ( text_encoder. EOS_ID ) yield sample",True,has_inputs,has_inputs,0.6585865616798401 4865,"def wrapped ( self, * args, ** kwargs ) : """"""Calls the original method with a group name set before and after."""""" if not base. frame_stack : raise ValueError ( ""All `hk.Module`s must be initialized inside an `hk.transform`."" ) frame = base. current_frame ( ) state = base. ModuleState ( module = self, method_name = method_name ) with frame. module ( state ), _module_method_call ( self, method_name ) : module_name = getattr ( self, ""module_name"", None ) f = functools. partial ( unbound_method, self ) f = functools. partial ( run_interceptors, f, method_name, self ) if : local_name = module_name. split ( ""/"" ) [ - 1 ] f = stateful. named_call ( f, name = local_name ) out = f ( * args, ** kwargs ) if module_name is not None : for module_state in frame. module_stack : module_state. module. _submodules. add ( module_name ) return out",False,modules_with_named_call and module_name and (method_name == '__call__'),module_name is not None,0.6460506916046143 4866,"def pre_save ( self, model_instance, add ) : value = now ( ) if not model_instance. pk : for field in model_instance. _meta. get_fields ( ) : if : value = getattr ( model_instance, field. name ) break setattr ( model_instance, self. attname, value ) return value",False,"isinstance(field, AutoCreatedField)","hasattr(model_instance, field.name)",0.6514626741409302 4867,"def mock_whale_dir ( monkeypatch, tmp_path, request ) : is_path_creation_enabed = request. param for attr_name, path in { ""BASE_DIR"" : paths. BASE_DIR, ""CONFIG_DIR"" : paths. CONFIG_DIR, ""CONNECTION_PATH"" : paths. CONNECTION_PATH, ""LOGS_DIR"" : paths. LOGS_DIR, ""MANIFEST_DIR"" : paths. MANIFEST_DIR, ""MANIFEST_PATH"" : paths. MANIFEST_PATH, ""METRICS_PATH"" : paths. METRICS_PATH, ""METADATA_PATH"" : paths. METADATA_PATH, ""TEMPLATE_DIR"" : paths. TEMPLATE_DIR, }. items ( ) : d = get_mocked_path ( tmp_path, path ) monkeypatch. setattr ( paths, attr_name, d ) if is_path_creation_enabed : if : d. parent. mkdir ( parents = True, exist_ok = True ) elif d. is_file ( ) : d. touch ( exist_ok = True ) if attr_name in [ ""TEMPLATE_DIR"" ] : monkeypatch. setattr ( sql, attr_name, d ) return tmp_path",False,d.is_dir(),d.is_parent(),0.6537692546844482 4868,"def find_field_notnull_differ ( self, meta, table_description, table_name ) : if not self. can_detect_notnull_differ : return for field in all_local_fields ( meta ) : attname = field. db_column or field. attname if : continue null = self. get_field_db_nullable ( field, table_name ) if field. null!= null : action = field. null and ""DROP"" or ""SET"" self. add_difference ( ""notnull-differ"", table_name, attname, action )",False,"(table_name, attname) in self.new_db_fields",field.nullable and field.field_description != table_description,0.6473622918128967 4869,"def get_doc_links ( ) : """"""Returns a dictionary of function names -> upstream documentation link"""""" tadoc_homepage = ""http://www.tadoc.org/"" html_file_path = os. path. join ( INPUT_DIR, "".tadoc.org.html"" ) if os. path. exists ( html_file_path ) : with open ( html_file_path, ""r"" ) as f : html = f. read ( ) else : if : from urllib2 import urlopen else : from urllib. request import urlopen html = urlopen ( tadoc_homepage ). read ( ) with open ( html_file_path, ""wb"" ) as f : f. write ( html ) soup = BeautifulSoup ( html, ""html.parser"" ) links = [ a for a in soup. findAll ( ""a"" ) if ""indicator"" in a [ ""href"" ] ] ret = { } for a in links : url = """". join ( [ tadoc_homepage, a [ ""href"" ] ] ) func = url [ url. rfind ( ""/"" ) + 1 : url. rfind ( ""."" ) ] ret [ func ] = url return ret",False,"sys.version_info < (2, 8)",os.path.exists(tadoc_homepage),0.6524540185928345 4870,"def split_on_length ( example ) : """"""Split a batch of ditcs on length."""""" x = example [ ""targets"" ] length_diff = chunk_length * max_chunks - tf. shape ( x ) [ 1 ] padded_x = tf. pad ( x, [ ( 0, 0 ), ( 0, length_diff ), ( 0, 0 ), ( 0, 0 ) ] ) chunks = [ padded_x [ :, i * chunk_length : ( i + 1 ) * chunk_length, :, : ] for i in range ( max_chunks - 1 ) ] chunks. append ( padded_x [ :, ( max_chunks - 1 ) * chunk_length :, :, : ] ) new_example = { } new_example [ ""chunk_number"" ] = tf. concat ( [ tf. expand_dims ( tf. ones_like ( c ) * n, axis = 0 ) for n, c in enumerate ( chunks ) ], axis = 0, ) new_example [ ""targets"" ] = tf. concat ( [ tf. expand_dims ( c, axis = 0 ) for c in chunks ], axis = 0 ) for k in example : if : assert k!= ""chunk_number"", ( ""Chunking code expects the chunk_number feature name to be "" ""available"" ) new_example [ k ] = tf. concat ( [ tf. expand_dims ( example [ k ], axis = 0 ) for _ in range ( max_chunks ) ], axis = 0 ) return tf. data. Dataset. from_tensor_slices ( new_example )",False,k != 'targets',k != 'chunk_number',0.6577335596084595 4871,"def analyze ( self, item, pub_date ) : observable_sample = item [ ""title"" ] context_sample = { } context_sample [ ""description"" ] = ""ATM sample"" context_sample [ ""date_added"" ] = pub_date context_sample [ ""source"" ] = self. name family = False if "" - "" in observable_sample : family, observable_sample = observable_sample. split ( "" - "" ) try : sample = Hash. get_or_create ( value = observable_sample ) sample. add_context ( context_sample ) sample. add_source ( self. name ) sample_tags = [ ""atm"" ] if : sample_tags. append ( family ) sample. tag ( sample_tags ) except ObservableValidationError as e : logging. error ( e ) return",True,family,family,0.6895439028739929 4872,"def on_treeview_button_pressed ( self, treeview, event ) : if event. window!= treeview. get_bin_window ( ) : return False role = getattr ( treeview, TreeViewHelper. ROLE ) if role == TreeViewHelper. ROLE_EPISODES and event. button == 1 : result = treeview. get_path_at_pos ( int ( event. x ), int ( event. y ) ) if result is not None : path, column, x, y = result if : model = treeview. get_model ( ) cursor_episode = model. get_value ( model. get_iter ( path ), EpisodeListModel. C_EPISODE ) new_value = cursor_episode. is_new selected_episodes = self. get_selected_episodes ( ) if cursor_episode in selected_episodes : for episode in selected_episodes : episode. mark ( is_played = new_value ) self. update_episode_list_icons ( selected = True ) self. update_podcast_list_model ( selected = True ) return True return event. button",False,x < self.EPISODE_LIST_ICON_WIDTH and column == treeview.get_columns()[0],path is not None,0.6538043022155762 4873,"def test_avg_non_zero_reducer ( self ) : reducer = AvgNonZeroReducer ( ) for dtype in TEST_DTYPES : batch_size = 100 embedding_size = 64 embeddings = torch. randn ( batch_size, embedding_size ). type ( dtype ). to ( TEST_DEVICE ) labels = torch. randint ( 0, 10, ( batch_size, ) ) pair_indices = ( torch. randint ( 0, batch_size, ( batch_size, ) ), torch. randint ( 0, batch_size, ( batch_size, ) ), ) triplet_indices = pair_indices + ( torch. randint ( 0, batch_size, ( batch_size, ) ), ) losses = torch. randn ( batch_size ). type ( dtype ). to ( TEST_DEVICE ) zero_losses = torch. zeros ( batch_size ). type ( dtype ). to ( TEST_DEVICE ) for indices, reduction_type in [ ( torch. arange ( batch_size ), ""element"" ), ( pair_indices, ""pos_pair"" ), ( pair_indices, ""neg_pair"" ), ( triplet_indices, ""triplet"" ), ] : for L in [ losses, zero_losses ] : loss_dict = { ""loss"" : { ""losses"" : L, ""indices"" : indices, ",False,len(filtered_L) > 0,reduction_type == 'avg_non_zero_reducer',0.650538444519043 4874,"def _get_volume_options_from_type ( self, type_id, default_options ) : options = dict ( default_options. items ( ) ) if type_id : admin_context = cinder_context. get_admin_context ( ) volume_type = volume_types. get_volume_type ( admin_context, type_id ) specs = dict ( volume_type ). get ( ""extra_specs"" ) for key, value in six. iteritems ( specs ) : if key in self. VALID_VOLUME_TYPE_KEYS : if : options [ key ] = [ v. strip ( ). lower ( ) for v in value. split ( "","" ) ] else : options [ key ] = value. lower ( ) return options",False,key == self.DATACORE_DISK_POOLS_KEY,"isinstance(value, str)",0.6569615602493286 4875,"def __exit__ ( self, * args, ** kwargs ) : self. _samples_cache = { } if is_validation_enabled ( ) and isinstance ( self. prior, dict ) : extra = set ( self. prior ) - self. _param_hits if : warnings. warn ( ""pyro.module prior did not find params ['{}']. "" ""Did you instead mean one of ['{}']?"". format ( ""', '"". join ( extra ), ""', '"". join ( self. _param_misses ) ) ) return super ( ). __exit__ ( * args, ** kwargs )",False,extra,len(extra) > 0,0.6900540590286255 4876,"def __init__ ( self, sources : List [ BuildSource ] ) -> None : self. source_text_present = False self. source_modules = set ( ) self. source_paths = set ( ) for source in sources : if source. text is not None : self. source_text_present = True elif : self. source_paths. add ( source. path ) else : self. source_modules. add ( source. module )",False,source.path,source.path is not None,0.6648392677307129 4877,"def is_open ( self ) : if self. signup_code : return True else : if self. signup_code_present : if : messages. add_message ( self. request, self. messages [ ""invalid_signup_code"" ] [ ""level"" ], self. messages [ ""invalid_signup_code"" ] [ ""text"" ]. format ( ** { ""code"" : self. get_code ( ), } ), ) return settings. ACCOUNT_OPEN_SIGNUP",False,self.messages.get('invalid_signup_code'),"has_message(self, 'invalid_signup_code')",0.65576171875 4878,"def serve_until_stopped ( self ) -> None : while True : rd, wr, ex = select. select ( [ self. socket. fileno ( ) ], [ ], [ ], self. timeout ) if : self. handle_request ( ) if self. event is not None and self. event. is_set ( ) : break",False,rd,not ex,0.6763215065002441 4879,"def __on_new_workspace ( self, event ) : """"""Handle the New Workspace menu command"""""" if self. __dirty_workspace : result = wx. MessageBox ( ""Do you want to save your existing project?"", caption = ""Save project"", style = wx. YES_NO | wx. CANCEL | wx. ICON_QUESTION, parent = self. __frame, ) if result == wx. CANCEL : return elif result == wx. YES : path = get_current_workspace_path ( ) if : if not self. do_save_as_workspace ( ) : return else : self. do_save_workspace ( path ) self. do_create_workspace ( )",True,path is None,path is None,0.6642489433288574 4880,"def load_cache ( filename, get_key = mangle_key ) : cache = { } if not os. path. exists ( filename ) : return cache f = open ( filename, ""rb"" ) l = 0 for line in f. readlines ( ) : l += 1 fields = line. split ( b"" "" ) if : sys. stderr. write ( ""Invalid file format in [%s], line %d\n"" % ( filename, l ) ) continue cache [ get_key ( fields [ 0 ] [ 1 : ] ) ] = fields [ 1 ]. split ( b""\n"" ) [ 0 ] f. close ( ) return cache",False,fields == None or not len(fields) == 2 or fields[0][0:1] != b':',l > 0,0.6524518132209778 4881,"def _parse ( self, contents ) : entries = [ ] hostnames_found = set ( ) for line in contents. splitlines ( ) : if not len ( line. strip ( ) ) : entries. append ( ( ""blank"", [ line ] ) ) continue ( head, tail ) = chop_comment ( line. strip ( ), ""#"" ) if : entries. append ( ( ""all_comment"", [ line ] ) ) continue entries. append ( ( ""hostname"", [ head, tail ] ) ) hostnames_found. add ( head ) if len ( hostnames_found ) > 1 : raise IOError ( ""Multiple hostnames (%s) found!"" % ( hostnames_found ) ) return entries",False,not len(head),head == tail,0.6556034088134766 4882,"def sequence_list ( self ) : ""Returns a list of information about all DB sequences for all models in all apps."" from django. db import models, router apps = models. get_apps ( ) sequence_list = [ ] for app in apps : for model in models. get_models ( app ) : if not model. _meta. managed : continue if model. _meta. swapped : continue if not router. allow_syncdb ( self. connection. alias, model ) : continue for f in model. _meta. local_fields : if isinstance ( f, models. AutoField ) : sequence_list. append ( { ""table"" : model. _meta. db_table, ""column"" : f. column } ) break for f in model. _meta. local_many_to_many : if : sequence_list. append ( { ""table"" : f. m2m_db_table ( ), ""column"" : None } ) return sequence_list",False,f.rel.through is None,f.m2m_db_table(),0.6513980627059937 4883,"def load_file ( self ) : if self. ledger. fava_options [ ""import-config"" ] : full_path = os. path. normpath ( os. path. join ( os. path. dirname ( self. ledger. beancount_file_path ), self. ledger. fava_options [ ""import-config"" ], ) ) if : error = IngestError ( None, ""File does not exist: '{}'"". format ( full_path ), None ) self. ledger. errors. append ( error ) else : mod = runpy. run_path ( full_path ) self. config = mod [ ""CONFIG"" ]",False,not os.path.exists(full_path) or os.path.isdir(full_path),not os.path.exists(full_path),0.6471632719039917 4884,"def execute ( self ) : if self. _dirty or not self. _qr : model_class = self. model_class query_meta = self. get_query_meta ( ) if self. _tuples : ResultWrapper = TuplesQueryResultWrapper elif self. _dicts : ResultWrapper = DictQueryResultWrapper elif self. _naive or not self. _joins or self. verify_naive ( ) : ResultWrapper = NaiveQueryResultWrapper elif : ResultWrapper = AggregateQueryResultWrapper else : ResultWrapper = ModelQueryResultWrapper self. _qr = ResultWrapper ( model_class, self. _execute ( ), query_meta ) self. _dirty = False return self. _qr else : return self. _qr",False,self._aggregate_rows,self.execute(),0.6812577247619629 4885,"def _Atom ( self, children ) : """"""Handles alternatives of 'atom' where there is more than one child."""""" tok = children [ 0 ]. tok id_ = tok. id n = len ( children ) if id_ == Id. Op_LParen : if n == 2 : assert children [ 1 ]. tok. id == Id. Op_RParen, children [ 1 ] return expr. Tuple ( [ ], expr_context_e. Store ) return self. _TestlistComp ( children [ 1 ], id_ ) if id_ == Id. Op_LBracket : if n == 2 : assert children [ 1 ]. tok. id == Id. Op_RBracket, children [ 1 ] return expr. List ( [ ], expr_context_e. Store ) return self. _TestlistComp ( children [ 1 ], id_ ) if id_ == Id. Op_LBrace : i = 1 if : i += 1 return self. _Dict ( children [ i ] ) if id_ == Id. Arith_Slash : r = self. _Regex ( children [ 1 ] ) flags = [ ] trans_pref = None return expr. RegexLiteral ( children [ 0 ]. tok, r, flags, trans_pref ) if id_ == Id. Expr_Func : return expr. Lambda ( [ ], expr. Implicit ( ) ) raise NotImplementedError ( Id_str ( id_ ) )",False,children[i].tok.id == Id.Op_Newline,"isinstance(children[i], dict)",0.654444694519043 4886,"def __setitem__ ( self, key, value ) : if key in self : existing = self [ key ] if : return if not existing. shares_lineage ( value ) : util. warn ( ""Column %r on table %r being replaced by "" ""%r, which has the same key. Consider "" ""use_labels for select() statements."" % ( key, getattr ( existing, ""table"", None ), value ) ) util. memoized_property. reset ( value, ""proxy_set"" ) self. _all_columns. append ( value ) self. _data [ key ] = value",False,existing is value,existing is None,0.6726027727127075 4887,"def create ( path, binary = False ) : for i in range ( 10 ) : try : os. makedirs ( os. path. dirname ( path ), exist_ok = True ) if : return open ( path, ""wb"" ) else : return open ( path, ""w"", encoding = ""utf-8"" ) if i > 0 : log ( True, f""Created {path} at attempt {i + 1}"" ) except : time. sleep ( 0.5 ) else : raise Error ( f""Failed to create {path}"" )",True,binary,binary,0.6794127225875854 4888,"def gen_widths ( options, agi ) : """"""Generate the oc2 operand width enumeration & width lookup function"""""" lines = base_open_file ( options. input_widths, ""r"", ""widths input"" ). readlines ( ) widths_list = refine_widths_input ( lines ) ( cfn, hfn ) = emit_widths_enum ( options, widths_list ) agi. add_file_name ( cfn ) agi. add_file_name ( hfn, header = True ) cfn_map = emit_width_lookup ( options, widths_list ) agi. add_file_name ( cfn_map ) agi. widths_list = widths_list agi. widths_dict = { } for w in widths_list : agi. widths_dict [ w. name ] = w. dtype agi. scalable_widths = set ( ) for w in widths_list : ( w8, w16, w32, w64 ) = w. widths if : msge ( ""Adding scalable width: "" + w. name ) agi. scalable_widths. add ( w. name )",False,w16 != w32 or w16 != w64 or w32 != w64,w.name not in scalable_widths,0.657873272895813 4889,"def shortcut ( self, input, ch_out, stride, name, if_first = False ) : ch_in = input. shape [ 1 ] if ch_in!= ch_out or stride!= 1 : if : return self. conv_bn_layer ( input, ch_out, 1, stride, name = name ) else : return self. conv_bn_layer_new ( input, ch_out, 1, stride, name = name ) else : return input",True,if_first,if_first,0.6553119421005249 4890,"def test_list_users ( self, test_label, username, configure_first = True, raises = None, exception_message = None, ) : if configure_first : self. client. auth. ldap. create_or_update_user ( username = username, mount_point = self. TEST_LDAP_PATH, ) if raises : with self. assertRaises ( raises ) as cm : self. client. auth. ldap. list_users ( mount_point = self. TEST_LDAP_PATH, ) if : self. assertIn ( member = exception_message, container = str ( cm. exception ), ) else : list_users_response = self. client. auth. ldap. list_users ( mount_point = self. TEST_LDAP_PATH, ) self. assertDictEqual ( d1 = dict ( keys = [ username ] ), d2 = list_users_response [ ""data"" ], )",False,exception_message is not None,exception_message,0.6601938009262085 4891,"def __getitem__ ( self, key ) : if isinstance ( key, slice ) : entries = [ ] start = 0 if key. start is None else key. start stop = len ( self ) if key. stop is None else key. stop step = 1 if key. step is None else key. step for i in range ( start, stop, step ) : try : entry = self [ i ] except IndexError : break else : self. _cache [ entry. id ] entries. append ( entry ) return entries if key < 0 < abs ( key ) <= len ( self ) : key %= len ( self ) for i, entry in enumerate ( self ) : if : self. _cache [ entry. id ] return entry raise IndexError",False,i == key,key < 0 < entry.id,0.6865389347076416 4892,"def Get_Gene ( self, id ) : """"""Retreive the gene name (GN)."""""" entry = self. Get ( id ) if not entry : return None GN = """" for line in string. split ( entry, ""\n"" ) : if line [ 0 : 5 ] == ""GN "" : GN = string. strip ( line [ 5 : ] ) if GN [ - 1 ] == ""."" : GN = GN [ 0 : - 1 ] return GN if : break return GN",False,line[0:2] == '//',GN == None,0.6516856551170349 4893,"def _get_running_vms ( ssh, uuid, sudo_mode = False ) : stdin, stdout, stderr = ssh. exec_command ( ""{}xe vm-list resident-on={} "" ""params=uuid,name-label,power-state,VCPUs-number,memory-actual"". format ( ""sudo "" if sudo_mode else """", uuid, ) ) data = stdout. read ( ) vms = set ( ) for vm_data in data. split ( ""\n\n"" ) : info = parse. pairs ( lines = [ line. replace ( ""( RO)"", """" ) . replace ( ""( RW)"", """" ) . replace ( ""(MRO)"", """" ) . strip ( ) for line in vm_data. splitlines ( ) ] ) if : continue label = info [ ""name-label"" ] if label. lower ( ). startswith ( ""Transfer VM for"" ) or label. lower ( ). startswith ( ""Control domain on host:"" ) : continue power = info [ ""power-state"" ] if power not in { ""running"" } : continue cores = int ( info [ ""VCPUs-number"" ] ) memory = int ( int ( info [ ""memory-actual"" ] ) / 1024 / 1024 )",False,not info,info[0] == 'T',0.6688408851623535 4894,"def test_sanity_no_misaligned_entities ( CorpusType : Type [ HunerDataset ] ) : dataset_name = CorpusType. __class__. __name__. lower ( ) base_path = Path ( flair. cache_root ) / ""datasets"" data_folder = base_path / dataset_name from flair. tokenization import SciSpacyTokenizer tokenizer = SciSpacyTokenizer ( ) corpus = CorpusType ( ) internal = corpus. to_internal ( data_folder ) for doc_id, doc_text in internal. documents. items ( ) : misaligned_starts = [ ] misaligned_ends = [ ] token_starts = set ( ) token_ends = set ( ) for token, token_start in zip ( * tokenizer. tokenize ( doc_text ) ) : token_starts. add ( token_start ) token_ends. add ( token_start + len ( token ) ) entities = internal. entities_per_document [ doc_id ] entity_starts = [ i. char_span. start for i in entities ] entity_ends = [ i. char_span. stop for i in entities ] for start in entity_starts : if start not in entity_starts : misaligned_starts. append ( start ) for end in entity_ends : if : misaligned_starts. append ( end ) assert len ( misaligned_starts ) <= len ( entities ) // 10 assert len ( misaligned_ends ) <= len ( entities ) // 10",True,end not in entity_ends,end not in entity_ends,0.6612154245376587 4895,"def skip_this_post ( self, time_ts ) : """"""Check whether the post is current"""""" if self. stale is not None : _how_old = time. time ( ) - time_ts if : log. debug ( ""%s: record %s is stale (%d > %d)."", self. protocol_name, timestamp_to_string ( time_ts ), _how_old, self. stale, ) return True if self. post_interval is not None : _how_long = time_ts - self. lastpost if _how_long < self. post_interval : log. debug ( ""%s: wait interval (%d < %d) has not passed for record %s"", self. protocol_name, _how_long, self. post_interval, timestamp_to_string ( time_ts ), ) return True self. lastpost = time_ts return False",False,_how_old > self.stale,_how_old < self.lastpost,0.6620189547538757 4896,"def flingToEnd ( self, maxSwipes = 10 ) : if self. vertical : for _ in range ( maxSwipes ) : if : print ( ""flinging to end"", file = sys. stderr ) self. flingForward ( )",False,DEBUG,self.vertical,0.6839340925216675 4897,"def populate ( self, zone, target = False, lenient = False ) : self. log. debug ( ""populate: name=%s, target=%s, lenient=%s"", zone. name, target, lenient ) resp = None try : resp = self. _get ( ""zones/{}"". format ( zone. name ) ) self. log. debug ( ""populate: loaded"" ) except HTTPError as e : if e. response. status_code == 401 : raise Exception ( ""PowerDNS unauthorized host={}"". format ( self. host ) ) elif e. response. status_code == 422 : pass else : raise before = len ( zone. records ) exists = False if resp : exists = True for rrset in resp. json ( ) [ ""rrsets"" ] : _type = rrset [ ""type"" ] if : continue data_for = getattr ( self, ""_data_for_{}"". format ( _type ) ) record_name = zone. hostname_from_fqdn ( rrset [ ""name"" ] ) record = Record. new ( zone, record_name, data_for ( rrset ), source = self, lenient = lenient ) zone. add_record ( record, lenient = lenient ) self. log. info ( ""populate: ",False,_type == 'SOA',_type in ['TAB> or _type!= 'NONE',0.6545922756195068 4898,"def __eq__ ( self, other ) : if not isinstance ( other, relativedelta ) : return False if self. weekday or other. weekday : if not self. weekday or not other. weekday : return False if : return False n1, n2 = self. weekday. n, other. weekday. n if n1!= n2 and not ( ( not n1 or n1 == 1 ) and ( not n2 or n2 == 1 ) ) : return False return ( self. years == other. years and self. months == other. months and self. days == other. days and self. hours == other. hours and self. minutes == other. minutes and self. seconds == other. seconds and self. leapdays == other. leapdays and self. year == other. year and self. month == other. month and self. day == other. day and self. hour == other. hour and self. minute == other. minute and self. second == other. second and self. microsecond == other. microsecond )",False,self.weekday.weekday != other.weekday.weekday,"not isinstance(other, relativedelta)",0.6621918082237244 4899,"def _sort_clause ( self, query, joins, sort_field, sort_desc ) : if isinstance ( sort_field, string_types ) : field = getattr ( self. model, sort_field ) elif isinstance ( sort_field, Field ) : model_class = None try : model_class = sort_field. model_class except AttributeError : model_class = sort_field. model if : query = self. _handle_join ( query, sort_field, joins ) field = sort_field clause = field. desc ( ) if sort_desc else field. asc ( ) return query, joins, clause",False,model_class != self.model,model_class and sort_desc and (not clause.startswith('.')),0.6589987277984619 4900,"def set_required_env_var ( dcos_env_vars : dict, env_var_name : str ) -> None : env_var = os. getenv ( env_var_name ) if env_var is None : print_red ( ""ERROR: required environment variable '{}' is not set!"". format ( env_var_name ) ) if : print_red ( ""No dcos-test-utils variables were detected in your environment."" ) else : print ( ""Current dcos-test-utils variables detected in your environment:"" ) for k, v in dcos_env_vars. items ( ) : print ( ""{}={}"". format ( k, v ) ) print_red ( ""Run 'pytest --env-help' to see all environment variables to set."" ) sys. exit ( 1 ) dcos_env_vars [ env_var_name ] = env_var",False,not dcos_env_vars,dcos_env_vars is None,0.6534987688064575 4901,"def run ( self ) : while True : task = self. requestQueue. get ( ) if : break try : if self. interrupted ( ) : raise SCons. Errors. BuildError ( task. targets [ 0 ], errstr = interrupt_msg ) task. execute ( ) except : task. exception_set ( ) ok = False else : ok = True self. resultsQueue. put ( ( task, ok ) )",True,task is None,task is None,0.6771646738052368 4902,"def prepare_request ( next_link = None ) : header_parameters = { } header_parameters [ ""Accept"" ] = self. _serialize. header ( ""accept"", accept, ""str"" ) if not next_link : url = self. list. metadata [ ""url"" ] path_format_arguments = { ""subscriptionId"" : self. _serialize. url ( ""self._config.subscription_id"", self. _config. subscription_id, ""str"" ), } url = self. _client. format_url ( url, ** path_format_arguments ) query_parameters = { } if filter is not None : query_parameters [ ""$filter"" ] = self. _serialize. query ( ""filter"", filter, ""str"" ) if : query_parameters [ ""$top"" ] = self. _serialize. query ( ""top"", top, ""int"" ) query_parameters [ ""api-version"" ] = self. _serialize. query ( ""api_version"", api_version, ""str"" ) request = self. _client. get ( url, query_parameters, header_parameters ) else : url = next_link query_parameters = { } request = self. _client. get ( url, query_parameters, header_parameters ) return request",True,top is not None,top is not None,0.6702080965042114 4903,"def _format_descr ( descr, indent, box ) : wrapper = textwrap. TextWrapper ( ) if : wrapper. initial_indent = indent + "" "" wrapper. subsequent_indent = indent + "" "" wrapper. width = self. details. get ( ""width"" ) - 2 else : wrapper. initial_indent = indent wrapper. subsequent_indent = indent wrapper. width = self. details. get ( ""width"" ) new_descr = """" for line in descr. split ( ""\n"" ) : if : tmp_line = wrapper. fill ( line ) for single_line in tmp_line. split ( ""\n"" ) : single_line = single_line. ljust ( self. details. get ( ""width"" ), "" "" ) new_descr += ( single_line [ : len ( indent ) ] + self. printer. art [ ""vrt"" ] + single_line [ ( len ( indent ) + 1 ) : ( self. details. get ( ""width"" ) - 1 ) ] + self. printer. art [ ""vrt"" ] + ""\n"" ) else : new_descr += wrapper. fill ( line ) + ""\n"" return new_descr. rstrip ( )",True,box,box,0.6794736385345459 4904,"def run ( self ) : eid = self. start_episode ( ) obs = self. env. reset ( ) while True : if : action = self. env. action_space. sample ( ) self. log_action ( eid, obs, action ) else : action = self. get_action ( eid, obs ) obs, reward, done, info = self. env. step ( action ) self. log_returns ( eid, reward, info = info ) if done : self. end_episode ( eid, obs ) obs = self. env. reset ( ) eid = self. start_episode ( )",False,random.random() < self.off_pol_frac,self.has_action,0.6470698714256287 4905,"def __init__ ( self, hwnd ) : """"""Initialise the instance"""""" super ( ListViewWrapper, self ). __init__ ( hwnd ) if self. is_unicode ( ) : self. create_buffer = ctypes. create_unicode_buffer if : self. LVCOLUMN = win32structures. LVCOLUMNW self. LVITEM = win32structures. LVITEMW else : self. LVCOLUMN = win32structures. LVCOLUMNW32 self. LVITEM = win32structures. LVITEMW32 self. LVM_GETITEM = win32defines. LVM_GETITEMW self. LVM_GETCOLUMN = win32defines. LVM_GETCOLUMNW self. text_decode = lambda v : v else : self. create_buffer = ctypes. create_string_buffer if : self. LVCOLUMN = win32structures. LVCOLUMNW self. LVITEM = win32structures. LVITEMW else : self. LVCOLUMN = win32structures. LVCOLUMNW32 self. LVITEM = win32structures. LVITEMW32 self. LVM_GETCOLUMN = win32defines. LVM_GETCOLUMNA self. LVM_GETITEM = win32defines. LVM_GETITEMA self. text_decode = lambda v : v. decode ( locale. getpreferredencoding ( ) )",False,is64bitprocess(self.process_id()) or not is_x64_Python(),self.is_ LV(locale),0.6481010317802429 4906,"def _convert_bbs_to_polygons_ ( cls, batch ) : batch_contained_polygons = batch. polygons is not None if batch. bounding_boxes is None : return batch, ( False, batch_contained_polygons ) psois = [ bbsoi. to_polygons_on_image ( ) for bbsoi in batch. bounding_boxes ] psois = [ psoi. subdivide_ ( 2 ) for psoi in psois ] for psoi in psois : for polygon in psoi. polygons : if : polygon. label = ""$$IMGAUG_BB_AS_POLYGON"" else : polygon. label = polygon. label + "";$$IMGAUG_BB_AS_POLYGON"" if batch. polygons is None : batch. polygons = psois else : for psoi, bbs_as_psoi in zip ( batch. polygons, psois ) : assert psoi. shape == bbs_as_psoi. shape, ( ""Expected polygons and bounding boxes to have the same "" "".shape value, got %s and %s."" % ( psoi. shape, bbs_as_psoi. shape ) ) psoi. polygons. extend ( bbs_as_psoi. polygons ) batch. bounding_boxes = None return batch, ( True, batch_contained_polygons )",True,polygon.label is None,polygon.label is None,0.6603285074234009 4907,"def _expand_env ( self, snapcraft_yaml ) : environment_keys = [ ""name"", ""version"" ] for key in snapcraft_yaml : if : continue replacements = environment_to_replacements ( get_snapcraft_global_environment ( self. project ) ) snapcraft_yaml [ key ] = replace_attr ( snapcraft_yaml [ key ], replacements ) return snapcraft_yaml",False,any((key == env_key for env_key in environment_keys)),key not in environment_keys,0.652614951133728 4908,"def queue_autoscan_network ( network, queue_name = None ) : """"""Queues a pre-scan of a whole network on the right worker."""""" if not queue_name : if : raise NoQueueError ( ""No discovery queue defined for network "" ""environment {0}."". format ( network ), ) queue_name = network. environment. queue. name queue = django_rq. get_queue ( queue_name ) for group in _split_into_groups ( network. network. iterhosts ( ), ADDRESS_GROUP_SIZE, ) : queue. enqueue_call ( func = _autoscan_group, args = ( group, ), timeout = 90, result_ttl = 0, ) network. last_scan = datetime. datetime. now ( ) network. save ( )",False,not network.environment or not network.environment.queue,not network.environment.queue,0.6490414142608643 4909,"def MergeFrom ( self, other ) : if self. message_class is not None : if other. Parse ( self. message_class ) : self. message. MergeFrom ( other. message ) elif other. message_class is not None : if : self. message = other. message_class ( ) self. message_class = other. message_class self. message. MergeFrom ( other. message ) else : self. message += other. message",False,not self.Parse(other.message_class),"hasattr(other, 'message_class')",0.6524025797843933 4910,"def parse_headers ( obj, f ) : """"""Return dict of HTTP headers parsed from a file object."""""" d = { } while 1 : line = f. readline ( ) line = line. decode ( ""utf-8"" ) line = line. strip ( ) if not line : break l = line. split ( None, 1 ) if not l [ 0 ]. endswith ( "":"" ) : obj. errors. append ( dshell. core. DataError ( ""Invalid header {!r}"". format ( line ) ) ) k = l [ 0 ] [ : - 1 ]. lower ( ) v = len ( l )!= 1 and l [ 1 ] or """" if : if not type ( d [ k ] ) is list : d [ k ] = [ d [ k ] ] d [ k ]. append ( v ) else : d [ k ] = v return d",True,k in d,k in d,0.6770328879356384 4911,"def sanitize_function_name ( s ) : func = None if s is not None : try : valid = re. match ( r""^[\w]+\(\)"", s ) if : func = re. sub ( ""[()]"", """", s ) except : pass return func",False,valid is not None,valid,0.6631897687911987 4912,"def main ( server = SERVER ) : c = MQTTClient ( CLIENT_ID, server ) c. connect ( ) print ( ""Connected to %s, waiting for button presses"" % server ) while True : while True : if : break time. sleep_ms ( 20 ) print ( ""Button pressed"" ) c. publish ( TOPIC, b""toggle"" ) time. sleep_ms ( 200 ) c. disconnect ( )",False,button.value() == 0,not c.isOpen(),0.6571131944656372 4913,"def OnRefresh ( self ) : try : self. Clear ( ) self. nodes = { } for key in self. graph : self. nodes [ key ] = self. AddNode ( [ key, self. graph [ key ] ] ) for key in self. relations : if not key in self. nodes : self. nodes [ key ] = self. AddNode ( [ key, [ [ 0, 0, """" ] ] ] ) parent_node = self. nodes [ key ] for child in self. relations [ key ] : if : self. nodes [ child ] = self. AddNode ( [ child, [ [ 0, 0, """" ] ] ] ) child_node = self. nodes [ child ] self. AddEdge ( parent_node, child_node ) return True except : print ( ""GraphViewer Error:"", sys. exc_info ( ) [ 1 ] ) return True",False,not child in self.nodes,child in self.nodes,0.6619868278503418 4914,"def find_package_names ( ) : site_packages = sysconfig. get_python_lib ( ) res = { ""yaml"" : ""PyYAML"", ""Crypto"" : ""pycrypto"", } for pth in os. listdir ( site_packages ) : if not pth. endswith ( "".dist-info"" ) : continue pkgname = pth. split ( ""-"", 1 ) [ 0 ]. replace ( ""_"", ""-"" ) top_level_fname = os. path. join ( site_packages, pth, ""top_level.txt"" ) if not os. path. exists ( top_level_fname ) : if : print ( ""ERR:"", pth, ""has not top_level.txt"" ) continue for modname in open ( top_level_fname ). read ( ). split ( ) : modname = modname. replace ( ""/"", ""."" ) if modname. startswith ( r""win32\lib"" ) : modname = modname. rsplit ( ""\\"" ) [ 1 ] res [ modname ] = pkgname return res",False,pkgname not in res.values(),not os.path.exists(top_level_fname),0.6556094884872437 4915,"def signature ( self ) : try : from hashlib import md5 except ImportError : from md5 import md5 try : sig = md5 ( ) if self. start : sig. update ( self. start. encode ( ""latin-1"" ) ) if self. prec : sig. update ( """". join ( [ """". join ( p ) for p in self. prec ] ). encode ( ""latin-1"" ) ) if self. tokens : sig. update ( "" "". join ( self. tokens ). encode ( ""latin-1"" ) ) for f in self. pfuncs : if : sig. update ( f [ 3 ]. encode ( ""latin-1"" ) ) except ( TypeError, ValueError ) : pass digest = base64. b16encode ( sig. digest ( ) ) if sys. version_info [ 0 ] >= 3 : digest = digest. decode ( ""latin-1"" ) return digest",True,f[3],f[3],0.6632217764854431 4916,"def __init__ ( self, parent, name, columns = None, create = True, typeless = False ) : self. parent = parent self. name = unsafeSQLIdentificatorNaming ( name ) self. columns = columns if create : self. execute ( 'DROP TABLE IF EXISTS ""%s""' % self. name ) if : self. execute ( 'CREATE TABLE ""%s"" (%s)' % ( self. name, "","". join ( '""%s"" %s' % ( unsafeSQLIdentificatorNaming ( colname ), coltype ) for colname, coltype in self. columns ), ) ) else : self. execute ( 'CREATE TABLE ""%s"" (%s)' % ( self. name, "","". join ( '""%s""' % unsafeSQLIdentificatorNaming ( colname ) for colname in self. columns ), ) )",False,not typeless,typeless,0.6845206022262573 4917,"def evaluate_test_corpus ( self, corpus ) : logger. info ( ""TEST: evaluating test corpus"" ) if self. lda_alpha is None or self. lda_beta is None : self. lda_alpha, self. lda_beta = self. hdp_to_lda ( ) score = 0.0 total_words = 0 for i, doc in enumerate ( corpus ) : if : doc_word_ids, doc_word_counts = zip ( * doc ) likelihood, gamma = lda_e_step ( doc_word_ids, doc_word_counts, self. lda_alpha, self. lda_beta ) theta = gamma / np. sum ( gamma ) lda_betad = self. lda_beta [ :, doc_word_ids ] log_predicts = np. log ( np. dot ( theta, lda_betad ) ) doc_score = sum ( log_predicts ) / len ( doc ) logger. info ( ""TEST: %6d %.5f"" % ( i, doc_score ) ) score += likelihood total_words += sum ( doc_word_counts ) logger. info ( ""TEST: average score: %.5f, total score: %.5f, test docs: %d"" % ( score / total_words, score, len ( corpus ) ) ) return score",True,len(doc) > 0,len(doc) > 0,0.6546361446380615 4918,"def test_controlcharacters ( self ) : for i in range ( 128 ) : c = chr ( i ) testString = ""string containing %s"" % c if : plistlib. dumps ( testString, fmt = plistlib. FMT_XML ) else : self. assertRaises ( ValueError, plistlib. dumps, testString )",False,i >= 32 or c in '\r\n\t',c == '\n',0.656378448009491 4919,"def __getitem__ ( self, key ) : if key == 1 : return self. get_value ( ) elif key == 0 : return self. cell [ 0 ] elif isinstance ( key, slice ) : s = list ( self. cell. __getitem__ ( key ) ) if : s [ s. index ( self. cell [ 1 ] ) ] = self. get_value ( ) return s else : raise IndexError ( key )",False,self.cell[1] in s,key == self.cell[1],0.6674330830574036 4920,"def __init__ ( self, _inf = None, _tzinfos = None ) : if _inf : self. _tzinfos = _tzinfos self. _utcoffset, self. _dst, self. _tzname = _inf else : _tzinfos = { } self. _tzinfos = _tzinfos self. _utcoffset, self. _dst, self. _tzname = self. _transition_info [ 0 ] _tzinfos [ self. _transition_info [ 0 ] ] = self for inf in self. _transition_info [ 1 : ] : if : _tzinfos [ inf ] = self. __class__ ( inf, _tzinfos )",False,not _tzinfos.has_key(inf),inf not in _tzinfos,0.6518496870994568 4921,"def transform_privatedns_record_set_table_output ( result ) : table_output = [ ] for item in result : table_row = OrderedDict ( ) table_row [ ""Name"" ] = item [ ""name"" ] table_row [ ""ResourceGroup"" ] = item [ ""resourceGroup"" ] table_row [ ""Ttl"" ] = item [ ""ttl"" ] table_row [ ""Type"" ] = item [ ""type"" ]. rsplit ( ""/"", 1 ) [ 1 ] table_row [ ""AutoRegistered"" ] = item [ ""isAutoRegistered"" ] metadata = item [ ""metadata"" ] if : table_row [ ""Metadata"" ] = "" "". join ( [ '{}=""{}""'. format ( x, metadata [ x ] ) for x in sorted ( metadata ) ] ) else : table_row [ ""Metadata"" ] = "" "" table_output. append ( table_row ) return table_output",True,metadata,metadata,0.6857409477233887 4922,"def generator_mode ( ) : if description is not None : yield ""description"", ""meta"", description for data in fn ( * args, ** kw ) : if len ( data )!= 2 : yield data continue ( key, value ) = data if value is None : continue if : yield key, ""data"", encode ( value ) yield key, ""ssz"", serialize ( value ) elif isinstance ( value, bytes ) : yield key, ""data"", encode ( value ) yield key, ""ssz"", value elif isinstance ( value, list ) and all ( [ isinstance ( el, ( View, bytes ) ) for el in value ] ) : for i, el in enumerate ( value ) : if isinstance ( el, View ) : yield f""{key}_{i}"", ""data"", encode ( el ) yield f""{key}_{i}"", ""ssz"", serialize ( el ) elif isinstance ( el, bytes ) : yield f""{key}_{i}"", ""data"", encode ( el ) yield f""{key}_{i}"", ""ssz"", el yield f""{key}_count"", ""meta"", len ( value ) else : <",False,"isinstance(value, View)","isinstance(value, list)",0.6508713960647583 4923,"def read ( self ) : """"""Reads data from stream and switch state."""""" assert self. status in ( WAIT_LEN, WAIT_MESSAGE ) if self. status == WAIT_LEN : self. _read_len ( ) elif self. status == WAIT_MESSAGE : read = self. socket. recv ( self. len - len ( self. message ) ) if : logging. error ( ""can't read frame from socket (get %d of %d bytes)"" % ( len ( self. message ), self. len ) ) self. close ( ) return self. message += read if len ( self. message ) == self. len : self. status = WAIT_PROCESS",False,len(read) == 0,not read,0.6630752086639404 4924,"def posts_split_endpoint ( request, thread ) : if not thread. acl [ ""can_move_posts"" ] : raise PermissionDenied ( _ ( ""You can't split posts from this thread."" ) ) serializer = SplitPostsSerializer ( data = request. data, context = { ""settings"" : request. settings, ""thread"" : thread, ""user_acl"" : request. user_acl, }, ) if not serializer. is_valid ( ) : if : errors = serializer. errors [ ""posts"" ] try : errors = { ""detail"" : errors [ 0 ] } except KeyError : if isinstance ( errors, dict ) : errors = { ""detail"" : list ( errors. values ( ) ) [ 0 ] [ 0 ] } else : errors = serializer. errors return Response ( errors, status = 400 ) split_posts_to_new_thread ( request, thread, serializer. validated_data ) return Response ( { } )",False,'posts' in serializer.errors,"hasattr(serializer.errors, 'post_post')",0.672057032585144 4925,"def undecorated ( o ) : """"""Remove all decorators from a function, method or class"""""" if isinstance ( o, type ) : return o try : closure = o. func_closure except AttributeError : pass try : closure = o. __closure__ except AttributeError : return if closure : for cell in closure : if : continue if looks_like_a_decorator ( cell. cell_contents ) : undecd = undecorated ( cell. cell_contents ) if undecd : return undecd return o",False,cell.cell_contents is o,cell.cell_contents is None,0.6579523086547852 4926,"def update ( self ) : if not self. is_available : return current_measurement_value = self. reader. read_power ( ) current_measurement_time = time. time ( ) for m_idx, _ in enumerate ( self. last_probe ) : joule_used = ( current_measurement_value [ m_idx ]. current - self. last_probe [ m_idx ]. current ) / float ( self. MICRO_JOULE_IN_JOULE ) self. last_probe [ m_idx ] = joule_used seconds_passed = current_measurement_time - self. last_probe_time logging. debug ( ""seconds passed %s"", seconds_passed ) watts_used = float ( joule_used ) / float ( seconds_passed ) logging. debug ( ""watts used %s"", watts_used ) logging. info ( ""Joule_Used %d, seconds passed, %d"", joule_used, seconds_passed ) if : self. last_measurement [ m_idx ] = watts_used logging. info ( ""Power reading elapsed"" ) self. last_probe = current_measurement_value self. last_probe_time = current_measurement_time",False,watts_used > 0,self.last_measurement is not None,0.6577527523040771 4927,"def resend_activation_email ( request ) : if request. user. is_authenticated : return redirect ( request. GET. get ( ""next"", reverse ( ""spirit:user:update"" ) ) ) form = ResendActivationForm ( data = post_data ( request ) ) if is_post ( request ) : if : user = form. get_user ( ) send_activation_email ( request, user ) messages. info ( request, _ ( ""If you don't receive an email, please make sure you've entered "" ""the address you registered with, and check your spam folder."" ), ) return redirect ( reverse ( settings. LOGIN_URL ) ) return render ( request = request, template_name = ""spirit/user/auth/activation_resend.html"", context = { ""form"" : form }, )",False,not request.is_limited() and form.is_valid(),not form.is_valid(),0.6472569704055786 4928,"def testChildNamesHash ( self ) : p = GafferScene. Plane ( ) g1 = GafferScene. Group ( ) g1 [ ""in"" ] [ 0 ]. setInput ( p [ ""out"" ] ) g2 = GafferScene. Group ( ) g2 [ ""in"" ] [ 0 ]. setInput ( p [ ""out"" ] ) self. assertSceneHashesEqual ( g1 [ ""out"" ], g2 [ ""out"" ] ) g2 [ ""name"" ]. setValue ( ""stuff"" ) equivalentPaths = [ ( ""/"", ""/"" ), ( ""/group"", ""/stuff"" ), ( ""/group/plane"", ""/stuff/plane"" ), ] for path1, path2 in equivalentPaths : self. assertEqual ( g1 [ ""out"" ]. boundHash ( path1 ), g2 [ ""out"" ]. boundHash ( path2 ) ) self. assertEqual ( g1 [ ""out"" ]. transformHash ( path1 ), g2 [ ""out"" ]. transformHash ( path2 ) ) self. assertEqual ( g1 [ ""out"" ]. objectHash ( path1 ), g2 [ ""out"" ]. objectHash ( path2 ) ) self. assertEqual ( g1 [ ""out"" ]. attributesHash ( path1 ), g2 [ ""out"" ]. attributesHash ( path2 ) ) if : self. assertEqual ( g1 [ ""out"" ]. childNamesHash ( path1 ), g2 [ ""out"" ]. childNamesHash ( path2 ) ) else : self. assertNotEqual ( g1 [ ""out",False,path1 is not '/',"hasattr(g1, 'out')",0.6571784019470215 4929,"def __try_auth_token ( self, auth_string ) : if not self. __database_connection : return None user_name, token = auth_string. split ( "":"", 1 ) transaction = None try : transaction = self. __database_connection ( ) auth_session = ( transaction. query ( SessionRecord. token ) . filter ( SessionRecord. user_name == user_name ) . filter ( SessionRecord. token == token ) . filter ( SessionRecord. can_expire. is_ ( False ) ) . limit ( 1 ) . one_or_none ( ) ) if not auth_session : return False return auth_session except Exception as e : LOG. error ( ""Couldn't check login in the database: "" ) LOG. error ( str ( e ) ) finally : if : transaction. close ( )",True,transaction,transaction,0.697189211845398 4930,"def _collect_logs ( model ) : page_token = None all_logs = [ ] while True : paginated_logs = model. lookup_logs ( now, later, page_token = page_token ) page_token = paginated_logs. next_page_token all_logs. extend ( paginated_logs. logs ) if : break return all_logs",False,page_token is None,page_token and page_token == 'TAB > 'next_page_token',0.6544469594955444 4931,"def runTest ( self ) : """"""This function will add domain under schema node."""""" db_con = database_utils. connect_database ( self, utils. SERVER_GROUP, self. server_id, self. db_id ) if not db_con [ ""data"" ] [ ""connected"" ] : raise Exception ( ""Could not connect to database to get the domain."" ) db_name = self. database_info [ ""db_name"" ] schema_response = schema_utils. verify_schemas ( self. server, db_name, self. schema_name ) if not schema_response : raise Exception ( ""Could not find the schema to get the domain."" ) self. domain_id = self. domain_info [ 0 ] if self. is_positive_test : if : response = self. get_domain_list ( ) else : response = self. get_domain ( ) else : if hasattr ( self, ""error_fetching_domain"" ) : with patch ( self. mock_data [ ""function_name"" ], return_value = eval ( self. mock_data [ ""return_value"" ] ), ) : if : response = self. get_domain_list ( ) else : response = self. get_domain ( ) if hasattr ( self, ""wrong_domain_id"" ) : self. domain_id = 99999 response =",False,"hasattr(self, 'domain_list')",self.is_negative_test,0.6509230136871338 4932,"def emit ( self, record ) : try : app = get_app ( ) if : msg = self. format ( record ) debug_buffer = app. layout. get_buffer_by_name ( ""debug_buffer"" ) current_document = debug_buffer. document. text if current_document : msg = ""\n"". join ( [ current_document, msg ] ) debug_buffer. set_document ( Document ( text = msg ), bypass_readonly = True ) else : super ( ). emit ( record ) except : self. handleError ( record )",False,"app.is_running and getattr(app, 'debug', False)",app,0.6469560861587524 4933,"def test_getitem ( self ) : n = 200 d = deque ( xrange ( n ) ) l = range ( n ) for i in xrange ( n ) : d. popleft ( ) l. pop ( 0 ) if : d. append ( i ) l. append ( i ) for j in xrange ( 1 - len ( l ), len ( l ) ) : assert d [ j ] == l [ j ] d = deque ( ""superman"" ) self. assertEqual ( d [ 0 ], ""s"" ) self. assertEqual ( d [ - 1 ], ""n"" ) d = deque ( ) self. assertRaises ( IndexError, d. __getitem__, 0 ) self. assertRaises ( IndexError, d. __getitem__, - 1 )",False,random.random() < 0.5,len(l) > 0,0.6526885032653809 4934,"def set_print_format_fields ( self ) : bank_amount = party_amount = total_amount = 0.0 currency = bank_account_currency = party_account_currency = pay_to_recd_from = None party_type = None for d in self. get ( ""accounts"" ) : if d. party_type in [ ""Customer"", ""Supplier"" ] and d. party : party_type = d. party_type if not pay_to_recd_from : pay_to_recd_from = d. party if : party_amount += ( d. debit_in_account_currency or d. credit_in_account_currency ) party_account_currency = d. account_currency elif frappe. db. get_value ( ""Account"", d. account, ""account_type"" ) in [ ""Bank"", ""Cash"", ] : bank_amount += d. debit_in_account_currency or d. credit_in_account_currency bank_account_currency = d. account_currency if party_type and pay_to_recd_from : self. pay_to_recd_from = frappe. db. get_value ( party_type, pay_to_recd_from, ""customer_name"" if party_type == ""Customer"" else ""supplier_name"", ) if bank",False,pay_to_recd_from and pay_to_recd_from == d.party,d.account_currency,0.6474637389183044 4935,def rmdir ( dirname ) : if dirname [ - 1 ] == os. sep : dirname = dirname [ : - 1 ] if os. path. islink ( dirname ) : return for f in os. listdir ( dirname ) : if : continue path = dirname + os. sep + f if os. path. isdir ( path ) : rmdir ( path ) else : os. unlink ( path ) os. rmdir ( dirname ),False,"f in ('.', '..')",f.endswith('.yaml'),0.6566120386123657 4936,"def func ( args, env = None, include_stderr = False, expect_exit = False ) : if not env : env = { } system_exit = False with monkeypatch. context ( ) as monkeypatch_ctx : for name, val in env. items ( ) : monkeypatch_ctx. setenv ( name, val ) try : main ( args ) except SystemExit : if : system_exit = True else : raise if : assert system_exit, ""Expected command to exit, but command succeeded instead"" stdout, stderr = capsys. readouterr ( ) if include_stderr : return stdout, stderr else : return stdout",True,expect_exit,expect_exit,0.6620639562606812 4937,"def _prepare_travel_graph ( self ) : for op in self. op_dict. values ( ) : op. const = False if op. node. op in [ ""Const"", ""Placeholder"" ] : op. resolved = True if : op. const = True else : op. resolved = False",False,op.node.op == 'Const',"op.node.op in [_Const, _Placeholder]",0.6533916592597961 4938,"def _force_local ( cls, pex_file, pex_info ) : if pex_info. code_hash is None : return pex_file explode_dir = os. path. join ( pex_info. zip_unsafe_cache, pex_info. code_hash ) TRACER. log ( ""PEX is not zip safe, exploding to %s"" % explode_dir ) with atomic_directory ( explode_dir ) as explode_tmp : if explode_tmp : with TRACER. timed ( ""Unzipping %s"" % pex_file ) : with open_zip ( pex_file ) as pex_zip : pex_files = ( x for x in pex_zip. namelist ( ) if : and not x. startswith ( pex_info. internal_cache ) ) pex_zip. extractall ( explode_tmp, pex_files ) return explode_dir",False,not x.startswith(pex_builder.BOOTSTRAP_DIR),pex_info.internal_cache and pex_files,0.6468230485916138 4939,"def _tableGetCount ( self, db, table ) : if not db or not table : return None if Backend. getIdentifiedDbms ( ) in UPPER_CASE_DBMSES : db = db. upper ( ) table = table. upper ( ) if Backend. getIdentifiedDbms ( ) in ( DBMS. SQLITE, DBMS. ACCESS, DBMS. FIREBIRD, DBMS. MCKOI, DBMS. EXTREMEDB, ) : query = ""SELECT %s FROM %s"" % ( queries [ Backend. getIdentifiedDbms ( ) ]. count. query % ""*"", safeSQLIdentificatorNaming ( table, True ), ) else : query = ""SELECT %s FROM %s.%s"" % ( queries [ Backend. getIdentifiedDbms ( ) ]. count. query % ""*"", safeSQLIdentificatorNaming ( db ), safeSQLIdentificatorNaming ( table, True ), ) query = agent. whereQuery ( query ) count = inject. getValue ( query, expected = EXPECTED. INT, charsetType = CHARSET_TYPE. DIGITS ) if isNumPosStrValue ( count ) : if safeSQLIdentificatorNaming ( db ) not in kb. data. cachedCounts : kb. data. cachedCounts [ safeSQLIdentificatorNaming ( db ) ] = { } if : kb. data. cachedCounts [ safeSQLIdentificatorNaming ( db ) ] [ int ( count ) ]. append ( <",False,int(count) in kb.data.cachedCounts[safeSQLIdentificatorNaming(db)],safeSQLIdentificatorNaming(db) in self.data.cachedCounts,0.6604896187782288 4940,"def run ( self ) : pwd_found = [ ] if constant. user_dpapi and constant. user_dpapi. unlocked : main_vault_directory = os. path. join ( constant. profile [ ""APPDATA"" ], u"".."", u""Local"", u""Microsoft"", u""Vault"" ) if : for vault_directory in os. listdir ( main_vault_directory ) : cred = constant. user_dpapi. decrypt_vault ( os. path. join ( main_vault_directory, vault_directory ) ) if cred : pwd_found. append ( cred ) return pwd_found",False,os.path.exists(main_vault_directory),os.path.isdir(main_vault_directory),0.6522239446640015 4941,"def __init__ ( self, * args, ** kwargs ) : """"""Initialize the structured grid."""""" super ( ). __init__ ( ) if len ( args ) == 1 : if isinstance ( args [ 0 ], vtk. vtkStructuredGrid ) : self. deep_copy ( args [ 0 ] ) elif isinstance ( args [ 0 ], str ) : self. _from_file ( args [ 0 ] ) elif len ( args ) == 3 : arg0_is_arr = isinstance ( args [ 0 ], np. ndarray ) arg1_is_arr = isinstance ( args [ 1 ], np. ndarray ) arg2_is_arr = isinstance ( args [ 2 ], np. ndarray ) if : self. _from_arrays ( args [ 0 ], args [ 1 ], args [ 2 ] )",False,"all([arg0_is_arr, arg1_is_arr, arg2_is_arr])",arg0_is_arr and arg1_is_arr and (args[2] is None),0.6527261734008789 4942,"def udfInjectCore ( self, udfDict ) : written = False for udf in udfDict. keys ( ) : if udf in self. createdUdf : continue self. udfCheckAndOverwrite ( udf ) if len ( self. udfToCreate ) > 0 : self. udfSetRemotePath ( ) checkFile ( self. udfLocalFile ) written = self. writeFile ( self. udfLocalFile, self. udfRemoteFile, ""binary"", forceCheck = True ) if : errMsg = ""there has been a problem uploading the shared library, "" errMsg += ""it looks like the binary file has not been written "" errMsg += ""on the database underlying file system"" logger. error ( errMsg ) message = ""do you want to proceed anyway? Beware that the "" message += ""operating system takeover will fail [y/N] "" if readInput ( message, default = ""N"", boolean = True ) : written = True else : return False else : return True for udf, inpRet in udfDict. items ( ) : if udf in self. udfToCreate and udf not in self. createdUdf : self. udfCreateFromSharedLib ( udf, inpRet ) if Backend. isDbms ( DBMS. MYSQL ) : supportTblType = ""longtext"" elif Backend. isDbms ( DBMS. PGSQL ) : supportTblType = ""text"" self. udfCreateSupport",False,written is not True,written,0.6706836223602295 4943,"def TryMerge ( self, d ) : while 1 : tt = d. getVarInt32 ( ) if tt == 132 : break if tt == 136 : self. set_dispatched_usec ( d. getVarInt64 ( ) ) continue if tt == 144 : self. set_lag_usec ( d. getVarInt64 ( ) ) continue if tt == 152 : self. set_elapsed_usec ( d. getVarInt64 ( ) ) continue if tt == 160 : self. set_response_code ( d. getVarInt64 ( ) ) continue if tt == 218 : self. set_retry_reason ( d. getPrefixedString ( ) ) continue if : raise ProtocolBuffer. ProtocolBufferDecodeError d. skipData ( tt )",False,tt == 0,tt == 119,0.6815414428710938 4944,"def pytest_generate_tests ( metafunc ) : bsz_rng = [ 1 ] if ""refgruargs"" in metafunc. fixturenames : fargs = [ ] if : seq_rng = [ 2, 3, 4 ] inp_rng = [ 3, 5, 10 ] out_rng = [ 3, 5, 10 ] else : seq_rng = [ 3 ] inp_rng = [ 5 ] out_rng = [ 10 ] fargs = itt. product ( seq_rng, inp_rng, out_rng, bsz_rng ) metafunc. parametrize ( ""refgruargs"", fargs ) if ""gradgruargs"" in metafunc. fixturenames : fargs = [ ] if : seq_rng = [ 2, 3 ] inp_rng = [ 5, 10 ] out_rng = [ 3, 5, 10 ] else : seq_rng = [ 3 ] inp_rng = [ 5 ] out_rng = [ 10 ] fargs = itt. product ( seq_rng, inp_rng, out_rng, bsz_rng ) metafunc. parametrize ( ""gradgruargs"", fargs )",False,metafunc.config.option.all,'avgpool' in fargs,0.6495445966720581 4945,"def _queue_redraw_curve ( self ) : """"""Redraws the entire curve on all known view TDWs"""""" self. _stop_task_queue_runner ( complete = False ) for tdw in self. _overlays : model = tdw. doc if : continue self. _queue_task ( self. brushwork_rollback, model ) self. _queue_task ( self. brushwork_begin, model, description = _ ( ""Inking"" ), abrupt = True, ) interp_state = { ""t_abs"" : self. nodes [ 0 ]. time } for p_1, p0, p1, p2 in gui. drawutils. spline_iter ( self. nodes ) : self. _queue_task ( self. _draw_curve_segment, model, p_1, p0, p1, p2, state = interp_state ) self. _start_task_queue_runner ( )",False,len(self.nodes) < 2,complete,0.6535524129867554 4946,"def get_maps ( test ) : pages = set ( ) for addr in test [ ""pre"" ] [ ""memory"" ]. keys ( ) : pages. add ( addr >> 12 ) for addr in test [ ""pos"" ] [ ""memory"" ]. keys ( ) : pages. add ( addr >> 12 ) maps = [ ] for p in sorted ( pages ) : if : maps [ - 1 ] = ( maps [ - 1 ] [ 0 ], maps [ - 1 ] [ 1 ] + 0x1000 ) else : maps. append ( ( p << 12, 0x1000 ) ) return maps",False,len(maps) > 0 and maps[-1][0] + maps[-1][1] == p << 12,p & 128,0.6528527736663818 4947,"def _create_group_snapshot_generic ( self, context, group_snapshot, snapshots ) : """"""Creates a group_snapshot."""""" model_update = { ""status"" : ""available"" } snapshot_model_updates = [ ] for snapshot in snapshots : snapshot_model_update = { ""id"" : snapshot. id } try : driver_update = self. driver. create_snapshot ( snapshot ) if driver_update : driver_update. pop ( ""id"", None ) snapshot_model_update. update ( driver_update ) if : snapshot_model_update [ ""status"" ] = fields. SnapshotStatus. AVAILABLE except Exception : snapshot_model_update [ ""status"" ] = fields. SnapshotStatus. ERROR model_update [ ""status"" ] = ""error"" snapshot_model_updates. append ( snapshot_model_update ) return model_update, snapshot_model_updates",False,'status' not in snapshot_model_update,snapshot_model_update['status'] == fields.SnapshotStatus.AVAILABLE,0.6547070741653442 4948,"def _apply_value ( self, model, iter_, cell, stamp ) : if not stamp : cell. set_property ( ""text"", _ ( ""Never"" ) ) else : try : date = datetime. datetime. fromtimestamp ( stamp ). date ( ) except ( OverflowError, ValueError, OSError ) : text = u"""" else : format_setting = config. gettext ( ""settings"", ""datecolumn_timestamp_format"" ) if format_setting : format_ = format_setting else : today = datetime. datetime. now ( ). date ( ) days = ( today - date ). days if : format_ = ""%X"" elif days < 7 : format_ = ""%A"" else : format_ = ""%x"" stamp = time. localtime ( stamp ) text = time. strftime ( format_, stamp ) cell. set_property ( ""text"", text )",False,days == 0,days > 6,0.672976016998291 4949,"def _prepare ( ) : try : if : client = local_client ( ) client. pull ( prerequisites. image, tag = prerequisites. tag ) else : logger. info ( ""task-api-dev mode enabled, use local images"" ) except Exception as e : self. _error_occurred ( e, ""Preparing prerequisites failed."", set_status = False ) raise self. _prerequisites_installed ( prerequisites ) return True",False,not self._dev_mode,is_local_prerequisites(prerequisites.image),0.6606485843658447 4950,"def compare_model_state ( test_fixture, state, state2, check_heads = True ) : for k in state [ ""model"" ] [ ""trunk"" ]. keys ( ) : if : print ( k, state [ ""model"" ] [ ""trunk"" ] [ k ], state2 [ ""model"" ] [ ""trunk"" ] [ k ] ) test_fixture. assertTrue ( torch. allclose ( state [ ""model"" ] [ ""trunk"" ] [ k ], state2 [ ""model"" ] [ ""trunk"" ] [ k ] ) ) if check_heads : for block, head_states in state [ ""model"" ] [ ""heads"" ]. items ( ) : for head_id, states in head_states. items ( ) : for k in states. keys ( ) : test_fixture. assertTrue ( torch. allclose ( state [ ""model"" ] [ ""heads"" ] [ block ] [ head_id ] [ k ], state2 [ ""model"" ] [ ""heads"" ] [ block ] [ head_id ] [ k ], ) )",False,"not torch.allclose(state['model']['trunk'][k], state2['model']['trunk'][k])",k in state2['model'],0.6602776050567627 4951,"def _get_codon_list ( codonseq ) : """"""List of codons according to full_rf_table for counting (PRIVATE)."""""" full_rf_table = codonseq. get_full_rf_table ( ) codon_lst = [ ] for i, k in enumerate ( full_rf_table ) : if isinstance ( k, int ) : start = k try : end = int ( full_rf_table [ i + 1 ] ) except IndexError : end = start + 3 this_codon = str ( codonseq [ start : end ] ) if len ( this_codon ) == 3 : codon_lst. append ( this_codon ) else : codon_lst. append ( str ( this_codon. ungap ( ) ) ) elif : codon_lst. append ( ""---"" ) else : codon_lst. append ( codonseq [ int ( k ) : int ( k ) + 3 ] ) return codon_lst",False,str(codonseq[int(k):int(k) + 3]) == '---',len(arg_seq) == 2,0.6571063995361328 4952,"def iterations ( ) : check_garbage ( ) max_mem = 0 max_mem_stable = 0 max_mem_increasing = 0 leak = True m1 = 0 for i in range ( options. max_iterations ) : yield i if : if i == 3 : check_garbage ( ) helpers. record_memory_leak_status ( ) if i == 4 or i == 5 : helpers. record_memory_leak_status ( print_diff = True ) m2 = mem ( ) print ( ""iteration %02d/%02d: %d pages used (%+d)"" % ( i + 1, options. max_iterations, m2, m2 - m1 ) ) m1 = m2 if m2 > max_mem : max_mem = m2 max_mem_stable = 0 max_mem_increasing += 1 if max_mem_increasing == options. required : print ( ""maximum was always increasing for"", max_mem_increasing, ""iterations"", ) break else : max_mem_stable += 1 max",False,options.debug,i == 0,0.6570525169372559 4953,"def tokenize ( self, text ) : """"""Tokenize the input string `text`, and return the tokenize result."""""" text_len = len ( text ) result = [ ] i = 0 while i < text_len : word = found_word = """" if self. __is_eng_char ( text [ i ] ) : for j in range ( i, text_len + 1 ) : if j < text_len and self. __is_eng_char ( text [ j ] ) : word += self. __tolower ( text [ j ] ) else : if : result. append ( word ) i = j - 1 break else : for j in range ( i, min ( i + self. __max_word_len, text_len ) ) : word += text [ j ] if : found_word = word if len ( found_word ) > 0 : result. append ( found_word ) i += len ( found_word ) - 1 i += 1 return result",False,word in self.__vocab,len(word) > 0,0.6705615520477295 4954,"def get_domains_up_by_filers ( domain_type, date_from = None, date_to = None, tags = [ ], nb_obj = 28, page = 1 ) : if not tags : if : return domains_up_by_page ( domain_type, nb_obj = nb_obj, page = page ) else : domains = sorted ( get_domains_up_by_daterange ( date_from, date_to, domain_type ) ) domains = paginate_iterator ( domains, nb_obj = nb_obj, page = page ) domains [ ""list_elem"" ] = create_domains_metadata_list ( domains [ ""list_elem"" ], domain_type ) domains [ ""domain_type"" ] = domain_type domains [ ""date_from"" ] = date_from domains [ ""date_to"" ] = date_to return domains else : return None",False,not date_from and (not date_to),nb_obj > 0,0.6491000652313232 4955,"def GetNumberOfLines ( self ) : text = self. GetValue ( ) width = self. GetClientSize ( ). width dc = wx. ClientDC ( self ) dc. SetFont ( self. GetFont ( ) ) count = 0 for line in text. split ( ""\n"" ) : count += 1 w, h = dc. GetTextExtent ( line ) if : count += self. _wrapLine ( line, dc, width ) if not count : count = 1 return count",False,w > width - self._getExtra(),w == -1 or h == -1,0.6556733846664429 4956,"def __call__ ( self, model, output_path : str = None, epoch : int = - 1, steps : int = - 1 ) -> float : model. eval ( ) total = 0 correct = 0 if epoch!= - 1 : if steps == - 1 : out_txt = "" after epoch {}:"". format ( epoch ) else : out_txt = "" in epoch {} after {} steps:"". format ( epoch, steps ) else : out_txt = "":"" logger. info ( ""Evaluation on the "" + self. name + "" dataset"" + out_txt ) self. dataloader. collate_fn = model. smart_batching_collate for step, batch in enumerate ( tqdm ( self. dataloader, desc = ""Evaluating"" ) ) : features, label_ids = batch_to_device ( batch, model. device ) with torch. no_grad ( ) : _, prediction = self. softmax_model ( features, labels = None ) total += prediction. size ( 0 ) correct += torch. argmax ( prediction, dim = 1 ). eq ( label_ids ). sum ( ). item ( ) accuracy = correct / total logger. info ( ""Accuracy: {:.4f} ({}/{})\n"". format ( accuracy, correct, total ) ) if output_path is not None : csv_path = os. path. join ( output_path, self. csv_file ) if : with open ( csv_path, mode = ""w"", encoding = ""utf-8"" ) as f : writer = csv. writer ( f ) writer. writerow ( self. csv_headers ) ",False,not os.path.isfile(csv_path),os.path.exists(csv_path),0.6454676389694214 4957,"def build_canned_image_list ( path ) : layers_path = get_bitbake_var ( ""BBLAYERS"" ) canned_wks_layer_dirs = [ ] if layers_path is not None : for layer_path in layers_path. split ( ) : for wks_path in ( WIC_DIR, SCRIPTS_CANNED_IMAGE_DIR ) : cpath = os. path. join ( layer_path, wks_path ) if : canned_wks_layer_dirs. append ( cpath ) cpath = os. path. join ( path, CANNED_IMAGE_DIR ) canned_wks_layer_dirs. append ( cpath ) return canned_wks_layer_dirs",True,os.path.isdir(cpath),os.path.isdir(cpath),0.6465350985527039 4958,"def update_dict ( a, b ) : for key, value in b. items ( ) : if value is None : continue if : a [ key ] = value elif isinstance ( a [ key ], dict ) and isinstance ( value, dict ) : update_dict ( a [ key ], value ) elif isinstance ( a [ key ], list ) : a [ key ]. append ( value ) else : a [ key ] = [ a [ key ], value ]",True,key not in a,key not in a,0.668784499168396 4959,"def getSubsegments ( self ) : for num, localdata in self. lfh. LocalData : for bucket, seginfo in localdata. SegmentInfo : if : continue yield Win32Subsegment ( self. trace, self. heap, seginfo. ActiveSubsegment )",False,seginfo.ActiveSubsegment == 0,seginfo.ActiveSubsegment is None,0.6543750762939453 4960,"def map_external_uid_with_hangups_user ( self, source_uid, external_context ) : telegram_uid = str ( source_uid ) profilesync_keys = [ ""profilesync"", ""tg2ho"", telegram_uid ] hangups_user = False try : hangouts_map = self. bot. memory. get_by_path ( profilesync_keys ) if isinstance ( hangouts_map, str ) : return False if ""chat_id"" in hangouts_map : hangouts_uid = hangouts_map [ ""chat_id"" ] elif : gplus = hangouts_map [ ""user_gplus"" ] has_chat_id = re. search ( r""/(\d+)/about"", gplus ) if has_chat_id : hangouts_uid = has_chat_id. group ( 1 ) else : hangouts_uid = False if hangouts_uid : _hangups_user = self. bot. get_hangups_user ( hangouts_uid ) if _hangups_user. definitionsource : hangups_user = _hangups_user except KeyError : logger. info ( ""no hangups user for {}"". format ( source_uid ) ) return hangups_user",True,'user_gplus' in hangouts_map,'user_gplus' in hangouts_map,0.6548639535903931 4961,"def __saveWork ( self, work, results ) : """"""Stores the resulting last log line to the cache with the proxy key"""""" del work try : if : __cached = self. __cache [ results [ 0 ] ] __cached [ self. __TIME ] = time. time ( ) __cached [ self. __LINE ] = results [ 1 ] __cached [ self. __LLU ] = results [ 2 ] except KeyError as e : pass except Exception as e : list ( map ( logger. warning, cuegui. Utils. exceptionOutput ( e ) ) )",False,results,len(results) >= 2,0.6823166012763977 4962,"def _get_items ( self, name, target = 1 ) : all_items = self. get_items ( name ) items = [ o for o in all_items if not o. disabled ] if len ( items ) < target : if len ( all_items ) < target : raise ItemNotFoundError ( ""insufficient items with name %r"" % name ) else : raise AttributeError ( ""insufficient non-disabled items with name %s"" % name ) on = [ ] off = [ ] for o in items : if : on. append ( o ) else : off. append ( o ) return on, off",False,o.selected,o.disabled,0.6702395677566528 4963,"def start ( url, command, username, password, shell = False ) : if need_auth ( url ) : print ( ""[+] Node-RED requires authentication."" ) if : print ( ""[+] Trying default credentials."" ) access_token = login ( url ) else : print ( ""[+] Trying provided credentials."" ) access_token = login ( url, username = username, password = password ) if access_token is None : print ( ""[!] An error occured during login procedure. Wrong creds?"" ) return else : print ( ""[+] Successfully authenticated over HTTP."" ) return asyncio. get_event_loop ( ). run_until_complete ( exploit ( url, command, shell, access_token ) ) else : print ( ""[+] Node-RED does not require authentication."" ) return asyncio. get_event_loop ( ). run_until_complete ( exploit ( url, command, shell ) )",False,username is None and password is None,username is None or password is None,0.6552834510803223 4964,"def run_with_catch ( args = None, env = None ) : from virtualenv. config. cli. parser import VirtualEnvOptions env = os. environ if env is None else env options = VirtualEnvOptions ( ) try : run ( args, options, env ) except ( KeyboardInterrupt, SystemExit, Exception ) as exception : try : if : raise else : if not ( isinstance ( exception, SystemExit ) and exception. code == 0 ) : logging. error ( ""%s: %s"", type ( exception ). __name__, exception ) code = exception. code if isinstance ( exception, SystemExit ) else 1 sys. exit ( code ) finally : logging. shutdown ( )",False,"getattr(options, 'with_traceback', False)",exception is None,0.647743284702301 4965,"def downsample_seg_for_ds_transform3 ( seg, ds_scales = ( ( 1, 1, 1 ), ( 0.5, 0.5, 0.5 ), ( 0.25, 0.25, 0.25 ) ), classes = None ) : output = [ ] one_hot = torch. from_numpy ( convert_seg_image_to_one_hot_encoding_batched ( seg, classes ) ) for s in ds_scales : if all ( [ i == 1 for i in s ] ) : output. append ( torch. from_numpy ( seg ) ) else : kernel_size = tuple ( int ( 1 / i ) for i in s ) stride = kernel_size pad = tuple ( ( i - 1 ) // 2 for i in kernel_size ) if len ( s ) == 2 : pool_op = avg_pool2d elif : pool_op = avg_pool3d else : raise RuntimeError ( ) pooled = pool_op ( one_hot, kernel_size, stride, pad, count_include_pad = False, ceil_mode = False, ) output. append ( pooled ) return output",True,len(s) == 3,len(s) == 3,0.6577027440071106 4966,"def get_args ( limit = 20 ) : args = [ ] any_tags_combinations = [ DEFAULT_ANY_TAGS, COMMON_AND_RARE, RARE_ANY_TAGS, COMMON_AND_RARE2, CITY_FIX, [ ], ] not_tags_combinations = [ MATURE_TAGS, [ ] ] for no_fee in [ False, True ] : for claim_type in [ None, ""stream"", ""channel"" ] : for no_totals in [ True ] : for offset in [ 0, 100 ] : for any_tags in any_tags_combinations : for not_tags in not_tags_combinations : for order_by in ORDER_BY : kw = { ""order_by"" : order_by, ""offset"" : offset, ""limit"" : limit, ""no_totals"" : no_totals, } create_email = kwargs. pop ( ""create_email"", True ) confirm_email = kwargs. pop ( ""confirm_email"", None ) account = cls ( ** kwargs ) if ""language"" not in kwargs : if request is None : account. language = DEFAULT_LANGUAGE else : account. language = translation. get_language_from_request ( request, check_path = True ) account. save ( ) if create_email and account. user. email : kwargs = { ""primary"" : True } if : kwargs [ ""confirm"" ] = confirm_email EmailAddress. objects. add_email ( account. user, account. user. email, ** kwargs ) return account",False,confirm_email is not None,confirm_email,0.6583845615386963 4968,"def smartsplit ( code ) : """"""Split `code` at "" symbol, only if it is not escaped."""""" strings = [ ] pos = 0 while pos < len ( code ) : if : word = """" pos += 1 while pos < len ( code ) : if : break if code [ pos ] == ""\\"" : word += ""\\"" pos += 1 word += code [ pos ] pos += 1 strings. append ( '""%s""' % word ) pos += 1 return strings",False,"code[pos] == '""'",code[pos] == '\\',0.661501407623291 4969,"def __init__ ( self, response, message ) : self. response = response self. status = response. status if isinstance ( message, dict ) : self. code = message. get ( ""code"", 0 ) base = message. get ( ""message"", """" ) errors = message. get ( ""errors"" ) if : errors = flatten_error_dict ( errors ) helpful = ""\n"". join ( ""In %s: %s"" % t for t in errors. items ( ) ) self. text = base + ""\n"" + helpful else : self. text = base else : self. text = message self. code = 0 fmt = ""{0.status} {0.reason} (error code: {1})"" if len ( self. text ) : fmt = fmt + "": {2}"" super ( ). __init__ ( fmt. format ( self. response, self. code, self. text ) )",True,errors,errors,0.6926330327987671 4970,"def reverse_code ( apps : StateApps, schema_editor : DatabaseSchemaEditor ) -> None : PreregistrationUser = apps. get_model ( ""zerver"", ""PreregistrationUser"" ) for user in PreregistrationUser. objects. all ( ) : if : user. invited_as_admin = True else : user. invited_as_admin = False user. save ( update_fields = [ ""invited_as_admin"" ] )",False,user.invited_as == 2,not user.invited,0.6565252542495728 4971,"def userless_histories ( self, trans, ** kwd ) : """"""The number of userless histories and associated datasets that have not been updated for the specified number of days."""""" params = util. Params ( kwd ) message = """" if params. userless_histories_days : userless_histories_days = int ( params. userless_histories_days ) cutoff_time = datetime. utcnow ( ) - timedelta ( days = userless_histories_days ) history_count = 0 dataset_count = 0 for history in trans. sa_session. query ( model. History ). filter ( and_ ( model. History. table. c. user_id == null ( ), model. History. table. c. deleted == true ( ), model. History. table. c. update_time < cutoff_time, ) ) : for dataset in history. datasets : if : dataset_count += 1 history_count += 1 message = ( ""%d userless histories ( including a total of %d datasets ) have not been updated for at least %d days."" % ( history_count, dataset_count, userless_histories_days ) ) else : message = ""Enter the number of days."" return str ( userless_histories_days ), message",False,not dataset.deleted,"hasattr(dataset, 'update_time')",0.6615043878555298 4972,"def on_task_abort ( self, task, config ) : if ""abort"" in config : if : return log. debug ( ""sending abort notification"" ) self. send_notification ( config [ ""abort"" ] [ ""title"" ], config [ ""abort"" ] [ ""message"" ], config [ ""abort"" ] [ ""via"" ], template_renderer = task. render, )",False,task.silent_abort,task.cancel_on_task_abort,0.664771556854248 4973,"def outlineView_heightOfRowByItem_ ( self, tree, item ) -> float : default_row_height = self. rowHeight if item is self : return default_row_height heights = [ default_row_height ] for column in self. tableColumns : value = getattr ( item. attrs [ ""node"" ], str ( column. identifier ) ) if : heights. append ( value. _impl. native. intrinsicContentSize ( ). height ) return max ( heights )",False,"isinstance(value, toga.Widget)",value is not None,0.6557950377464294 4974,def close ( self ) : if self. _sock : if : self. _loop. remove_periodic ( self. handle_periodic ) self. _loop. remove ( self. _sock ) self. _sock. close ( ) self. _sock = None,False,self._loop,self.handle_periodic,0.6718765497207642 4975,"def update_handler ( self, fd : Union [ int, _Selectable ], events : int ) -> None : fd, fileobj = self. split_fd ( fd ) if events & IOLoop. READ : if : self. selector_loop. add_reader ( fd, self. _handle_events, fd, IOLoop. READ ) self. readers. add ( fd ) else : if fd in self. readers : self. selector_loop. remove_reader ( fd ) self. readers. remove ( fd ) if events & IOLoop. WRITE : if fd not in self. writers : self. selector_loop. add_writer ( fd, self. _handle_events, fd, IOLoop. WRITE ) self. writers. add ( fd ) else : if fd in self. writers : self. selector_loop. remove_writer ( fd ) self. writers. remove ( fd )",True,fd not in self.readers,fd not in self.readers,0.6583425998687744 4976,"def __init__ ( self, data, depvar, subject, within = None, between = None, aggregate_func = None ) : self. data = data self. depvar = depvar self. within = within if ""C"" in within : raise ValueError ( ""Factor name cannot be 'C'! This is in conflict "" ""with patsy's contrast function name."" ) self. between = between if between is not None : raise NotImplementedError ( ""Between subject effect not "" ""yet supported!"" ) self. subject = subject if aggregate_func == ""mean"" : self. aggregate_func = np. mean else : self. aggregate_func = aggregate_func if not data. equals ( data. drop_duplicates ( subset = [ subject ] + within ) ) : if : self. _aggregate ( ) else : msg = ( ""The data set contains more than one observation per "" ""subject and cell. Either aggregate the data manually, "" ""or pass the `aggregate_func` parameter."" ) raise ValueError ( msg ) self. _check_data_balanced ( )",False,self.aggregate_func is not None,aggregate_func is None,0.6618744730949402 4977,"def import_data ( self, fname ) : """"""Import data in current namespace"""""" if self. count ( ) : nsb = self. currentWidget ( ) nsb. refresh_table ( ) nsb. import_data ( fname ) if : self. dockwidget. setVisible ( True ) self. dockwidget. raise_ ( )",False,self.dockwidget and (not self.ismaximized),self.dockwidget.visible(),0.6572795510292053 4978,"def test_mlflow_is_not_installed_unless_specified ( ) : if no_conda : pytest. skip ( ""This test requires conda."" ) with TempDir ( chdr = True ) as tmp : fake_model_path = tmp. path ( ""fake_model"" ) fake_env_path = tmp. path ( ""fake_env.yaml"" ) _mlflow_conda_env ( path = fake_env_path, install_mlflow = False ) mlflow. pyfunc. save_model ( fake_model_path, loader_module = __name__, conda_env = fake_env_path ) p = subprocess. Popen ( [ ""mlflow"", ""models"", ""predict"", ""-m"", fake_model_path ], stderr = subprocess. PIPE, cwd = tmp. path ( """" ), ) _, stderr = p. communicate ( ) stderr = stderr. decode ( ""utf-8"" ) print ( stderr ) assert p. wait ( )!= 0 if : assert ""ModuleNotFoundError: No module named'mlflow'"" in stderr else : assert ""ImportError: No module named mlflow.pyfunc.scoring_server"" in stderr",False,PYTHON_VERSION.startswith('3'),not install_mlflow,0.651702344417572 4979,"def revert ( self, context, result, flow_failures, volume, ** kwargs ) : if not self. do_reschedule : common. error_out ( volume ) LOG. error ( ""Volume %s: create failed"", volume. id ) return False for failure in flow_failures. values ( ) : if : common. error_out ( volume ) LOG. error ( ""Volume %s: create failed"", volume. id ) return False if self. reschedule_context : cause = list ( flow_failures. values ( ) ) [ 0 ] context = self. reschedule_context try : self. _pre_reschedule ( volume ) self. _reschedule ( context, cause, volume = volume, ** kwargs ) self. _post_reschedule ( volume ) return True except exception. CinderException : LOG. exception ( ""Volume %s: rescheduling failed"", volume. id ) return False",False,failure.check(*self.no_reschedule_types),failure,0.6498314142227173 4980,"def __init__ ( self, in_channels, out_channels, ksize, stride = 1, pad = 0, dilate = 1, nobias = False, dw_initialW = None, pw_initialW = None, dw_initial_bias = None, pw_initial_bias = None, dw_activ = identity, pw_activ = relu, bn_kwargs = { }, ) : self. dw_activ = identity if dw_activ is None else dw_activ self. pw_activ = identity if pw_activ is None else pw_activ super ( SeparableConv2DBNActiv, self ). __init__ ( ) with self. init_scope ( ) : self. depthwise = Convolution2D ( in_channels, in_channels, ksize = ksize, stride = stride, pad = pad, dilate = dilate, groups = in_channels, nobias = nobias, initialW = dw_initialW, ) self. pointwise = Convolution2D ( in_channels, out_channels, 1, nobias = nobias, initialW = pw_initialW ) if : self. dw_bn = MultiNodeBatchNormalization ( out_channels, ** bn_kwargs ) self. pw_bn = MultiNodeBatchNormalization ( out_channels, ** bn_kwargs ) else : self. dw_",False,'comm' in bn_kwargs,nobias,0.6524698734283447 4981,"def __init__ ( self, dir : str = None, validate_dir : bool = True, ** kwargs : Any ) -> None : full_prefect_path = os. path. abspath ( config. home_dir ) common_path = """" try : if dir is not None : common_path = os. path. commonpath ( [ full_prefect_path, os. path. abspath ( dir ) ] ) except ValueError : pass if dir is None or common_path == full_prefect_path : directory = os. path. join ( config. home_dir, ""results"" ) else : directory = dir if validate_dir : abs_directory = os. path. abspath ( os. path. expanduser ( directory ) ) if : os. makedirs ( abs_directory ) else : abs_directory = directory self. dir = abs_directory super ( ). __init__ ( ** kwargs )",False,not os.path.exists(abs_directory),not os.path.isdir(abs_directory),0.6466200351715088 4982,"def test_open_overwrite_offset_size ( self, sftp ) : """"""Test writing data at a specific offset"""""" f = None try : self. _create_file ( ""file"", ""xxxxyyyy"" ) f = yield from sftp. open ( ""file"", ""r+"" ) yield from f. write ( ""zz"", 3 ) yield from f. close ( ) with open ( ""file"" ) as localf : self. assertEqual ( localf. read ( ), ""xxxzzyyy"" ) finally : if : yield from f. close ( ) remove ( ""file"" )",True,f,f,0.6864446997642517 4983,"def __call__ ( self, * args, ** kwargs ) -> ""CompiledExecutor"" : is_cached : bool = kwargs. pop ( ""cached"", False ) if is_cached : kwargs [ ""dest_dir"" ] = env. compiled_binary_cache_dir obj : ""CompiledExecutor"" = super ( ). __call__ ( * args, ** kwargs ) obj. is_cached = is_cached if is_cached : cache_key_material = ( utf8bytes ( obj. __class__. __name__ + obj. __module__ ) + obj. get_binary_cache_key ( ) ) cache_key = hashlib. sha384 ( cache_key_material ). hexdigest ( ) if : executor = self. compiled_binary_cache [ cache_key ] assert executor. _executable is not None if os. path. isfile ( executor. _executable ) : obj. _executable = executor. _executable obj. _dir = executor. _dir return obj obj. create_files ( * args, ** kwargs ) obj. compile ( ) if is_cached : self. compiled_binary_cache [ cache_key ] = obj return obj",True,cache_key in self.compiled_binary_cache,cache_key in self.compiled_binary_cache,0.6497700810432434 4984,def finalize ( self ) : if self. _started : if : self. _queue. put ( None ) self. _queue. join ( ) self. _consumer. join ( ) self. _started = False self. _finalized = True,False,not self._finalized,not self.consumer,0.6746705770492554 4985,"def __init__ ( self, parametrization : IntOrParameter, budget : tp. Optional [ int ] = None, num_workers : int = 1, ) -> None : super ( ). __init__ ( parametrization, budget = budget, num_workers = num_workers ) assert budget is not None nw = num_workers // 2 self. which_optim = [ 0 ] * nw for i in range ( num_workers - nw ) : self. which_optim += [ i + 1 ] assert len ( self. which_optim ) == num_workers self. optims = [ CMA ( self. parametrization, num_workers = nw ) ] for i in range ( num_workers - nw ) : self. optims += [ SQP ( self. parametrization, num_workers = 1 ) ] if : self. optims [ - 1 ]. initial_guess = self. _rng. normal ( 0, 1, self. dimension )",False,i > 0,self.optims,0.6768208742141724 4986,"def expand_first ( self, seq ) : head = """" keys, mapped_to = self. _find_full_match ( self. state. mode, seq ) if : self. state. logger. info ( ""[Mappings] found full command: {0} -> {1}"". format ( keys, mapped_to ) ) return Mapping ( seq, mapped_to [ ""name"" ], seq [ len ( keys ) : ], mapping_status. COMPLETE ) for key in KeySequenceTokenizer ( seq ). iter_tokenize ( ) : head += key keys, mapped_to = self. _find_full_match ( self. state. mode, head ) if : self. state. logger. info ( ""[Mappings] found full command: {0} -> {1}"". format ( keys, mapped_to ) ) return Mapping ( head, mapped_to [ ""name"" ], seq [ len ( head ) : ], mapping_status. COMPLETE ) else : break if self. _find_partial_match ( self. state. mode, seq ) : self. state. logger. info ( ""[Mappings] found partial command: {0}"". format ( seq ) ) return Mapping ( seq, """", """", mapping_status. INCOMPLETE ) return None",False,keys,mapped_to,0.6694816946983337 4987,"def _unquote ( str ) : if str is None or len ( str ) < 2 : return str if str [ 0 ]!= '""' or str [ - 1 ]!= '""' : return str str = str [ 1 : - 1 ] i = 0 n = len ( str ) res = [ ] while 0 <= i < n : o_match = _OctalPatt. search ( str, i ) q_match = _QuotePatt. search ( str, i ) if not o_match and not q_match : res. append ( str [ i : ] ) break j = k = - 1 if o_match : j = o_match. start ( 0 ) if : k = q_match. start ( 0 ) if q_match and ( not o_match or k < j ) : res. append ( str [ i : k ] ) res. append ( str [ k + 1 ] ) i = k + 2 else : res. append ( str [ i : j ] ) res. append ( chr ( int ( str [ j + 1 : j + 4 ], 8 ) ) ) i = j + 4 return _nulljoin ( res )",True,q_match,q_match,0.672800600528717 4988,"def git_log ( args ) : parser = argparse. ArgumentParser ( description = ""git log arg parser"" ) parser. add_argument ( ""-f"", ""--format"", action = ""store"", dest = ""format"", default = False ) parser. add_argument ( ""-o"", ""--output"", action = ""store"", dest = ""output"", type = argparse. FileType ( ""w"" ), default = sys. stdout, ) parser. add_argument ( ""-l"", ""--length"", action = ""store"", type = int, dest = ""max_entries"", default = None ) parser. add_argument ( ""--oneline"", action = ""store_true"", dest = ""oneline"", default = False ) results = parser. parse_args ( args ) try : repo = _get_repo ( ) outstream = StringIO ( ) porcelain. log ( repo. repo. path, max_entries = results. max_entries, outstream = outstream ) if not results. oneline : print ( outstream. getvalue ( ) ) else : last_commit = """" last_printed = """" start_message = False for line in outstream. getvalue ( ). split ( ""\n"" ) : if line. startswith ( ""commit:"" ) : tokens = line. split ( "" "" ) last_commit = tokens [ - 1 ] [ : 7 ] ",False,last_commit == last_printed and start_message is True,default,0.6496603488922119 4989,"def update ( self, targets ) : Section. update ( self, targets ) outputNames = set ( ) for target in targets : g = target. globals ( ) outputNames. update ( [ k for k in g. keys ( ) if k. startswith ( ""output:"" ) ] ) rows = [ ] outputNames = sorted ( outputNames ) for outputName in outputNames : row = self. __rows. get ( outputName ) if : row = _OutputRow ( outputName ) self. __rows [ outputName ] = row row. update ( targets ) row. setAlternate ( len ( rows ) % 2 ) rows. append ( row ) self. _mainColumn ( ) [ : ] = rows",True,row is None,row is None,0.6661540269851685 4990,"def seqSetup ( self, imin, imax, jmin = 0, jmax = None ) : seqi = [ imin, imin, 12, 34 ] seqj = [ jmin, 12, jmin, 34 ] if not imax and not jmax : l = 2222222222222222222222222222 seqi. append ( l ) seqj. append ( l ) for n in range ( 100 ) : ifirstmax = jfirstmax = 100000 if imax : ifirstmax = min ( imax, ifirstmax ) if jmax : jfirstmax = min ( jmax, jfirstmax ) i = randrange ( imin, ifirstmax ) j = randrange ( jmin, jfirstmax ) seqi. append ( i ) seqj. append ( j ) for n in range ( 100 ) : if : i = randrange ( maxint ) + randrange ( maxint ) else : i = randrange ( imin, imax ) if not jmax : j = randrange ( maxint ) + randrange ( maxint ) else : j = randrange ( jmin, jmax ) seqi. append ( i ) seqj. append ( j ) self. seqi = seqi self. seqj = seqj",False,not imax,not imin,0.7007740139961243 4991,"def _savePictures ( self, object, folder ) : hasPictures = False for arcname, picturerec in object. Pictures. items ( ) : what_it_is, fileobj, mediatype = picturerec self. manifest. addElement ( manifest. FileEntry ( fullpath = ""%s%s"" % ( folder, arcname ), mediatype = mediatype ) ) hasPictures = True if : self. _z. write ( fileobj, arcname, zipfile. ZIP_STORED ) else : zi = zipfile. ZipInfo ( str ( arcname ), self. _now ) zi. compress_type = zipfile. ZIP_STORED zi. external_attr = UNIXPERMS self. _z. writestr ( zi, fileobj ) subobjectnum = 1 for subobject in object. childobjects : self. _savePictures ( subobject, ""%sObject %d/"" % ( folder, subobjectnum ) ) subobjectnum += 1",False,what_it_is == IS_FILENAME,hasPictures,0.6517028212547302 4992,"def test_signal_handling_multiprocessing_process_warning ( self ) : warnings. filterwarnings ( ""always"", """", DeprecationWarning, __name__ ) fake_utcnow = datetime. date ( 2021, 1, 1 ) proc = None try : with patch ( ""salt.utils.versions.warn_until_date"", self. patched_warn_until_date ( fake_utcnow ), ) : with warnings. catch_warnings ( record = True ) as recorded_warnings : proc = salt. utils. process. SignalHandlingMultiprocessingProcess ( target = self. process_target ) self. assertEqual ( ""Please stop using'salt.utils.process.SignalHandlingMultiprocessingProcess' "" ""and instead use'salt.utils.process.SignalHandlingProcess'. "" ""'salt.utils.process.SignalHandlingMultiprocessingProcess' will go away "" ""after 2022-01-01."", six. text_type ( recorded_warnings [ 0 ]. message ), ) finally : if : del proc",True,proc is not None,proc is not None,0.6624610424041748 4993,"def get_rules ( self, map ) : for rulefactory in self. rules : for rule in rulefactory. get_rules ( map ) : new_defaults = subdomain = None if : new_defaults = { } for key, value in rule. defaults. iteritems ( ) : if isinstance ( value, basestring ) : value = format_string ( value, self. context ) new_defaults [ key ] = value if rule. subdomain is not None : subdomain = format_string ( rule. subdomain, self. context ) new_endpoint = rule. endpoint if isinstance ( new_endpoint, basestring ) : new_endpoint = format_string ( new_endpoint, self. context ) yield Rule ( format_string ( rule. rule, self. context ), new_defaults, subdomain, rule. methods, rule. build_only, new_endpoint, rule. strict_slashes, )",False,rule.defaults,rule.defaults is not None,0.6709610223770142 4994,"def __view_beside ( self, onsideof, ** kwargs ) : bounds = self. info [ ""bounds"" ] min_dist, found = - 1, None for ui in UiObject ( self. session, Selector ( ** kwargs ) ) : dist = onsideof ( bounds, ui. info [ ""bounds"" ] ) if : min_dist, found = dist, ui return found",False,dist >= 0 and (min_dist < 0 or dist < min_dist),dist > min_dist,0.6525450348854065 4995,"def openEmptyWorkBook ( self ) : """"""Open an empty frame and paste the contents of CheatSheet.leo into it."""""" lm = self fn = lm. computeWorkbookFileName ( ) c = lm. loadLocalFile ( fn, gui = g. app. gui, old_c = None ) if not g. app. batchMode and not g. os_path_exists ( fn ) : old_clipboard = g. app. gui. getTextFromClipboard ( ) c2 = c. openCheatSheet ( redraw = False ) if c2 : for p2 in c2. rootPosition ( ). self_and_siblings ( ) : c2. selectPosition ( p2 ) c2. copyOutline ( ) p = c. pasteOutline ( ) c. selectPosition ( p ) p. contract ( ) p. clearDirty ( ) c2. close ( new_c = c ) root = c. rootPosition ( ) if root. h == g. shortFileName ( fn ) : root. doDelete ( newNode = root. next ( ) ) p = g. findNodeAnywhere ( c, ""Leo's cheat sheet"" ) if : c. selectPosition ( p ) p. expand ( ) c. target_language = ""rest"" ",True,p,p,0.6932766437530518 4996,"def one ( checks, state ) : """"""Execute one loop iteration."""""" disabled = options. disable is not None and os. path. exists ( options. disable ) successful = disabled or check ( options. command, options. timeout ) if state!= states. DISABLED and disabled : state = trigger ( states. DISABLED ) elif state == states. INIT : if successful and options. rise <= 1 : state = trigger ( states. UP ) elif successful : state = trigger ( states. RISING ) checks = 1 else : state = trigger ( states. FALLING ) checks = 1 elif state == states. DISABLED : if not disabled : state = trigger ( states. INIT ) elif state == states. RISING : if successful : checks += 1 if checks >= options. rise : state = trigger ( states. UP ) else : state = trigger ( states. FALLING ) checks = 1 elif state == states. FALLING : if not successful : checks += 1 if : state = trigger ( states. DOWN ) else : state = trigger ( states. RISING ) checks = 1 elif state == states. UP : if not successful : state = trigger ( states. FALLING ) checks",False,checks >= options.fall,checks >= options.rise,0.6624369621276855 4997,"def extract_subdomains ( file_name ) : global domain_match subs = { } sub_file = open ( file_name ). read ( ) f_all = re. findall ( domain_match, sub_file ) del sub_file for i in f_all : if i. find ( ""."" ) >= 0 : p = i. split ( ""."" ) [ 0 : - 1 ] while p and len ( p [ - 1 ] ) <= 3 : p = p [ 0 : - 1 ] p = p [ 0 : - 1 ] if len ( p ) >= 1 : trace ( str ( p ), "" : "", i ) for q in p : if q : q = q. lower ( ) if : subs [ q ] += 1 else : subs [ q ] = 1 del f_all subs_sorted = sorted ( subs. keys ( ), key = lambda x : subs [ x ], reverse = True ) return subs_sorted",True,q in subs,q in subs,0.6791987419128418 4998,"def _get_children ( self, event = None ) : with self. _run_lock : if : return try : children = self. _client. retry ( self. _client. get_children, self. _path, self. _watcher ) except NoNodeError : self. _stopped = True return if not self. _watch_established : self. _watch_established = True if self. _prior_children is not None and self. _prior_children == children : return self. _prior_children = children try : if self. _send_event : result = self. _func ( children, event ) else : result = self. _func ( children ) if result is False : self. _stopped = True self. _func = None if self. _allow_session_lost : self. _client. remove_listener ( self. _session_watcher ) except Exception as exc : log. exception ( exc ) raise",False,self._stopped,self._client is None,0.669532060623169 4999,"def _calculate ( self ) : before = self. before. data after = self. after. data self. deleted = { } self. updated = { } self. created = after. copy ( ) for path, f in before. items ( ) : if : self. deleted [ path ] = f continue del self. created [ path ] if f. mtime < after [ path ]. mtime : self. updated [ path ] = after [ path ]",False,path not in after,path not in self.deleted,0.6693145036697388