,input,is_correct,expected_cond,predicted_cond,score 0,"def get_source(self, environment, template): if: return (self.overriden_templates[template], template, True) if template.startswith('admin/'): template = template[6:] template = '/'.join(['admin', 'templates', template]) return super(ThemeLoader, self).get_source(environment, template) template = '/'.join(['user', 'templates', template]) return super(ThemeLoader, self).get_source(environment, template)",True,template in self.overriden_templates,template in self.overriden_templates,0.6552744507789612 1,"def get_source(self, environment, template): if template in self.overriden_templates: return (self.overriden_templates[template], template, True) if: template = template[6:] template = '/'.join(['admin', 'templates', template]) return super(ThemeLoader, self).get_source(environment, template) template = '/'.join(['user', 'templates', template]) return super(ThemeLoader, self).get_source(environment, template)",True,template.startswith('admin/'),template.startswith('admin/'),0.6439536213874817 2,"def __init__(self, ref_list: Iterable[_Reference]) -> None: self.matching_refs: list[list[_Reference]] = [] for ref in ref_list: add = True for other_refs in self.matching_refs: if other_refs[0].matches(ref): add = False other_refs.append(ref) break if: self.matching_refs.append([ref])",True,add,add,0.6711677312850952 3,"def __init__(self, ref_list: Iterable[_Reference]) -> None: self.matching_refs: list[list[_Reference]] = [] for ref in ref_list: add = True for other_refs in self.matching_refs: if: add = False other_refs.append(ref) break if add: self.matching_refs.append([ref])",False,other_refs[0].matches(ref),ref in other_refs,0.6418271064758301 4,"def generate_sample_problems(self, solver): linear, quadratic = self.sapi.problem problems = [('sample_ising', (linear, quadratic)), ('sample_qubo', (quadratic,))] if: bqm = dimod.BQM.from_ising(linear, quadratic) problems.append(('sample_bqm', (bqm,))) return problems",False,dimod,self.model_type == 'quadratic',0.6707044839859009 5,"def get_temperature_c(self, botengine=None): """""" Get the latest temperature in Celsius :param botengine: :return: temperature in Celsius """""" if: return self.measurements[TemperatureDevice.MEASUREMENT_DEG_C][0][0] return None",True,TemperatureDevice.MEASUREMENT_DEG_C in self.measurements,TemperatureDevice.MEASUREMENT_DEG_C in self.measurements,0.649125874042511 6,"def __init__(self, cfg, input_size): super().__init__(cfg) self.cfg = cfg self.is_gru = False if: self.core = nn.GRU(input_size, cfg.rnn_size, cfg.rnn_num_layers) self.is_gru = True elif cfg.rnn_type == 'lstm': self.core = nn.LSTM(input_size, cfg.rnn_size, cfg.rnn_num_layers) else: raise RuntimeError(f'Unknown RNN type {cfg.rnn_type}') self.core_output_size = cfg.rnn_size self.rnn_num_layers = cfg.rnn_num_layers",True,cfg.rnn_type == 'gru',cfg.rnn_type == 'gru',0.6513108015060425 7,"def __init__(self, cfg, input_size): super().__init__(cfg) self.cfg = cfg self.is_gru = False if cfg.rnn_type == 'gru': self.core = nn.GRU(input_size, cfg.rnn_size, cfg.rnn_num_layers) self.is_gru = True elif: self.core = nn.LSTM(input_size, cfg.rnn_size, cfg.rnn_num_layers) else: raise RuntimeError(f'Unknown RNN type {cfg.rnn_type}') self.core_output_size = cfg.rnn_size self.rnn_num_layers = cfg.rnn_num_layers",True,cfg.rnn_type == 'lstm',cfg.rnn_type == 'lstm',0.6525049209594727 8,"def add_gain(self, val): if: self.importance += val else: self.importance_2 += val",False,self.main_type == 'gain',self.importance_2 is None,0.6461501121520996 9,"def delete_all_connections(self, location): new_data = self.adj_list.copy() loc_name = UpdateAdjList.get_location_name(location) if: new_data.pop(loc_name) for key in new_data: if loc_name in new_data[key]: new_data[key].pop(loc_name) with open(self.adj_list_path, 'w') as f: f.write(str(new_data))",False,loc_name in new_data,loc_name is not None,0.6534073352813721 10,"def delete_all_connections(self, location): new_data = self.adj_list.copy() loc_name = UpdateAdjList.get_location_name(location) if loc_name in new_data: new_data.pop(loc_name) for key in new_data: if: new_data[key].pop(loc_name) with open(self.adj_list_path, 'w') as f: f.write(str(new_data))",False,loc_name in new_data[key],key in new_data,0.6481366157531738 11,"def start_img(self, attributes): A = self.getAttributes(attributes, _imgAttrMap) if: self._syntax_error(' needs src attribute') A['_selfClosingTag'] = 'img' self._push('img', **A)",False,not A.get('src'),A['src'] is None,0.6531503200531006 12,"def get_credential(self, username, password): q = select(self.CredentialsTable).filter(self.CredentialsTable.c.username == username, self.CredentialsTable.c.password == password) results = self.sess.execute(q).first() if: return None else: return results.id",True,results is None,results is None,0.6523830890655518 13,"def flipy(self): if: print('%s.%s()' % (self.__class__.__name__, _fn_name())) return True",False,_debug,self.verbose,0.6635514497756958 14,"def _find_terminator(self, iterator): """"""The terminator might have some additional newlines before it. There is at least one application that sends additional newlines before headers (the python setuptools package). """""" for line in iterator: if: break line = line.strip() if line: return line return ''",True,not line,not line,0.6531288623809814 15,"def _find_terminator(self, iterator): """"""The terminator might have some additional newlines before it. There is at least one application that sends additional newlines before headers (the python setuptools package). """""" for line in iterator: if not line: break line = line.strip() if: return line return ''",True,line,line,0.6587799787521362 16,"def canRunGUI() -> bool: if: return bool(os.getenv('DISPLAY')) if core.sysName == 'darwin': try: import tkinter except ModuleNotFoundError: return False return True",False,core.sysName == 'linux','DISPLAY' in os.environ,0.6488453149795532 17,"def canRunGUI() -> bool: if core.sysName == 'linux': return bool(os.getenv('DISPLAY')) if: try: import tkinter except ModuleNotFoundError: return False return True",True,core.sysName == 'darwin',core.sysName == 'darwin',0.6462803483009338 18,"def unassign_role_from_group(self, session, group, role): """"""Unassigns a role from a group on a domain"""""" url = utils.urljoin(self.base_path, self.id, 'groups', group.id, 'roles', role.id) resp = session.delete(url, endpoint_filter=self.service) if: return True return False",False,resp.status_code == 204,resp.status_code == 200,0.6449288129806519 19,"def __exit__(self, type, value, traceback): if: self.close() else: if not self._extfileobj: self.fileobj.close() self.closed = True",True,type is None,type is None,0.6553890109062195 20,"def __exit__(self, type, value, traceback): if type is None: self.close() else: if: self.fileobj.close() self.closed = True",True,not self._extfileobj,not self._extfileobj,0.6471370458602905 21,"def _unassign_params(self, tensor_id): if: del self.id_to_params[tensor_id]",False,tensor_id in self.id_to_params.keys(),tensor_id in self.id_to_params,0.6464389562606812 22,"def tenant_access_ls(args): """""" Handle tenant access ls command """""" name = args.name error_info, privileges = auth_api._tenant_access_ls(name) if: return err_out(error_info.msg) header = tenant_access_ls_headers() error_info, rows = generate_tenant_access_ls_rows(privileges, name) if error_info: return err_out(error_info.msg) else: printList(args.output_format, header, rows)",True,error_info,error_info,0.6591681838035583 23,"def tenant_access_ls(args): """""" Handle tenant access ls command """""" name = args.name error_info, privileges = auth_api._tenant_access_ls(name) if error_info: return err_out(error_info.msg) header = tenant_access_ls_headers() error_info, rows = generate_tenant_access_ls_rows(privileges, name) if: return err_out(error_info.msg) else: printList(args.output_format, header, rows)",True,error_info,error_info,0.6594505906105042 24,"def batch_counter_hook(module, input, output): batch_size = 1 if: input = input[0] batch_size = len(input) else: print('Warning! No positional inputs found for a module, assuming batch size is 1.') module.__batch_counter__ += batch_size",True,len(input) > 0,len(input) > 0,0.6514883041381836 25,"def __init__(self, default_color=None, *args, **kwargs): super(QColorButton, self).__init__(*args, **kwargs) self._color = None if: self.gl_color = default_color self.setMaximumWidth(32) self.setColor(self._color) self.pressed.connect(self.onColorPicker)",False,default_color is not None,default_color,0.6507023572921753 26,"def decode_rollback(self, element): frame = self._get_frame_from_depth() if: raise GrammarError('Recognition decoding stack broken') if frame is self._stack[-1]: self._index = frame.begin else: raise GrammarError('Recognition decoding stack broken') self._log_step(element, 'rollback')",False,not frame or frame.actor != element,not frame,0.6501692533493042 27,"def decode_rollback(self, element): frame = self._get_frame_from_depth() if not frame or frame.actor!= element: raise GrammarError('Recognition decoding stack broken') if: self._index = frame.begin else: raise GrammarError('Recognition decoding stack broken') self._log_step(element, 'rollback')",False,frame is self._stack[-1],frame.begin,0.6485610008239746 28,"def normalize(self): length2 = self.x * self.x + self.y * self.y length = math.sqrt(length2) if: self.x /= length self.y /= length return length",False,length != 0,self.z,0.6626908779144287 29,"def get_person(self, p): g = ICSCalendar.SUM_PAT.match(p) if: p = g.group(1) p = p.strip() return p",True,g,g,0.6680970191955566 30,"def file_name(prefix, lang): fname = prefix if: fname += f'.{lang}' return fname",False,lang is not None,lang,0.6595156192779541 31,"def get_val(name, section='DEFAULT', default=None, encoding=None): """"""Get a value from the per-user config file Parameters ---------- name : str The name of the value to set. section : str The section to store the name/value in. default : The value to return if `name` is not set. encoding : str The config file's encoding, defaults to :py:data:`default_encoding`. Examples -------- >>> get_val(""junk"") is None True >>> set_val(""junk"", ""random"") >>> get_val(""junk"") u'random' >>> set_val(""junk"", None) >>> get_val(""junk"") is None True """""" if: if encoding == None: encoding = default_encoding config = ConfigParser.ConfigParser() f = codecs.open(path(), 'r', encoding) config.readfp(f, path()) f.close() try: return config.get(section, name) except ConfigParser.NoOptionError: return default else: return default",False,os.path.exists(path()),name,0.6423336863517761 32,"def get_val(name, section='DEFAULT', default=None, encoding=None): """"""Get a value from the per-user config file Parameters ---------- name : str The name of the value to set. section : str The section to store the name/value in. default : The value to return if `name` is not set. encoding : str The config file's encoding, defaults to :py:data:`default_encoding`. Examples -------- >>> get_val(""junk"") is None True >>> set_val(""junk"", ""random"") >>> get_val(""junk"") u'random' >>> set_val(""junk"", None) >>> get_val(""junk"") is None True """""" if os.path.exists(path()): if: encoding = default_encoding config = ConfigParser.ConfigParser() f = codecs.open(path(), 'r', encoding) config.readfp(f, path()) f.close() try: return config.get(section, name) except ConfigParser.NoOptionError: return default else: return default",False,encoding == None,encoding is None,0.6599355936050415 33,"def clean_edges(arg): if: return replace_colon(arg) try: return tuple((clean_edges(x) for x in arg)) except TypeError: return replace_colon(arg)",False,"isinstance(arg, str)","isinstance(arg, tuple)",0.6459245085716248 34,"def Equals(self, other): """""" Test for equality. Args: other (obj): Returns: bool: True `other` equals self. """""" if: return False if other is self: return True return self.Hash == other.Hash",False,other is None,type(other) is not type(self),0.6544768810272217 35,"def Equals(self, other): """""" Test for equality. Args: other (obj): Returns: bool: True `other` equals self. """""" if other is None: return False if: return True return self.Hash == other.Hash",False,other is self,self.Hash == other.Hash,0.6560910940170288 36,"def mouseMoveEvent(self, event): if: self.setPos(self.mapToParent(event.pos() - event.buttonDownPos(Qt.LeftButton))) event.setAccepted(True) else: event.setAccepted(False)",False,event.buttons() & Qt.LeftButton,event.buttonDownPos(Qt.LeftButton),0.652923047542572 37,"def __init__(self, iprot, oprot=None): self._iprot = self._oprot = iprot if: self._oprot = oprot self._seqid = 0",True,oprot is not None,oprot is not None,0.6590708494186401 38,"def new_epoch(self): if: self.history.append(self.avg) self.reset() self.has_new_data = True else: self.has_new_data = False",False,self.count > 0,self.has_new_data or self.has_new_data is False or self.avg != self.avg,0.6490698456764221 39,"def all_reduce_operation_in_group_for_variables(variables, operator, group): for i in range(len(variables)): if: variables[i] = torch.tensor(variables[i]).cuda() torch.distributed.all_reduce(variables[i], op=operator, group=group) variables[i] = variables[i].item() return variables",True,not torch.is_tensor(variables[i]),not torch.is_tensor(variables[i]),0.6446045637130737 40,"def set_accept(self, media_type: Optional[str]=None, ask_version: Optional[str]=None, accept_version: Optional[str]=None, media_type_params: Optional[dict]=None, strict_mode: Optional[bool]=None) -> None: """"""Set the request and expected response media type, going forward."""""" self._media_type = media_type self._ask_version = ask_version self._accept_version = accept_version self._media_type_params = media_type_params if: self._strict_mode = strict_mode",True,strict_mode is not None,strict_mode is not None,0.6540040373802185 41,"def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if: if new is None: new = name self.mod = new else: self.mod = old",True,PY3,PY3,0.6688830256462097 42,"def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if: new = name self.mod = new else: self.mod = old",True,new is None,new is None,0.6595436334609985 43,"def clean(self, value): value = super(ITSocialSecurityNumberField, self).clean(value) if: return value value = re.sub('\\s', u'', value).upper() try: check_digit = ssn_check_digit(value) except ValueError: raise ValidationError(self.error_messages['invalid']) if not value[15] == check_digit: raise ValidationError(self.error_messages['invalid']) return value",False,value == u'',value is None,0.6641252040863037 44,"def clean(self, value): value = super(ITSocialSecurityNumberField, self).clean(value) if value == u'': return value value = re.sub('\\s', u'', value).upper() try: check_digit = ssn_check_digit(value) except ValueError: raise ValidationError(self.error_messages['invalid']) if: raise ValidationError(self.error_messages['invalid']) return value",False,not value[15] == check_digit,check_digit < 0 or value > self.settings['DECIMAL_DIGITS'],0.6454792618751526 45,"def _convert_token_to_id_with_added_voc(self, token): id = self._tokenizer.token_to_id(token) if: return self.unk_token_id return id",True,id is None,id is None,0.6589533090591431 46,"def tablet(self, x, rot=0): """"""Tablet test objective function"""""" if: x = rotate(x) x = [x] if isscalar(x[0]) else x f = [1000000.0 * x[0] ** 2 + sum(x[1:] ** 2) for x in x] return f if len(f) > 1 else f[0]",False,rot and rot is not fcts.tablet,rot > 0,0.6522568464279175 47,"def exit_on_disconnect(state): """"""Watch for connection events and exit if disconnected."""""" _LOGGER.debug('ZK connection state: %s', state) if: _LOGGER.info('Exiting on ZK connection lost.') utils.sys_exit(-1)",False,state != states.KazooState.CONNECTED,state == b'connected',0.6500900983810425 48,"def all_formats(self): formats = self.conn.get('SELECT DISTINCT format from data') if: return set() return {f[0] for f in formats}",True,not formats,not formats,0.6549617052078247 49,"def setInfo(self, info): self._checkLock() if: self._markAsChanged('info') self.info = info return True return False",False,self.info != info,info != self.info,0.6478448510169983 50,"def check_for_progressive_training_update(self, is_resume_from_ckpt=False): for i in range(len(self.opts.progressive_steps)): if: self.net.encoder.set_progressive_stage(ProgressiveStage(i)) if self.global_step == self.opts.progressive_steps[i]: self.net.encoder.set_progressive_stage(ProgressiveStage(i))",False,is_resume_from_ckpt and self.global_step >= self.opts.progressive_steps[i],is_resume_from_ckpt and self.opts.progressive_steps[i],0.6433718204498291 51,"def check_for_progressive_training_update(self, is_resume_from_ckpt=False): for i in range(len(self.opts.progressive_steps)): if is_resume_from_ckpt and self.global_step >= self.opts.progressive_steps[i]: self.net.encoder.set_progressive_stage(ProgressiveStage(i)) if: self.net.encoder.set_progressive_stage(ProgressiveStage(i))",False,self.global_step == self.opts.progressive_steps[i],self.global_step - self.opts.progressive_steps >= 1,0.6428817510604858 52,"def __getattr__(self, element): if: return self.__class__(self, self.__selected) else: return self.__class__(self, self.__selected + (element,))",False,element in self.__selected,element is None,0.6532191038131714 53,"def output_keyword(data_object): if: return output_status_message('* * * Begin output_keyword * * *') output_status_message('Id: {0}'.format(data_object.Id)) output_status_message('MatchType: {0}'.format(data_object.MatchType)) output_status_message('Text: {0}'.format(data_object.Text)) output_status_message('* * * End output_keyword * * *')",True,data_object is None,data_object is None,0.650916576385498 54,"def _dump_wwnames_start(self): infoname = self._get_dump_infoname() if: return base_path, base_name = infoname dump_name = base_name dump_type = wdumper.TYPE_EMPTY dumper = wdumper.DumpPrinter(self.parser.get_banks(), dump_type, dump_name) dumper.dump() self.names.save_lst(basename=dump_name, path=base_path)",True,not infoname,not infoname,0.6580004692077637 55,"def encode(self, media, filename, file=None, **kwargs): """"""Attempt to encode a pyglet object to a specified format. All registered encoders that advertise support for the specific file extension will be tried. If no encoders are available, an EncodeException will be raised. """""" first_exception = None for encoder in self.get_encoders(filename): try: return encoder.encode(media, filename, file, **kwargs) except EncodeException as e: first_exception = first_exception or e if: raise EncodeException(f""No Encoders are available for this extension: '{filename}'"") raise first_exception",False,not first_exception,not self.encoders,0.647996723651886 56,"def load_all(stream, Loader=None): """""" Parse all YAML documents in a stream and produce corresponding Python objects. """""" if: load_warning('load_all') Loader = FullLoader loader = Loader(stream) try: while loader.check_data(): yield loader.get_data() finally: loader.dispose()",True,Loader is None,Loader is None,0.6556233167648315 57,"def decorate(self, pos, data, is_first=True): self._table.register(pos) row = self._table.get_row(pos) if: update_method = row.name.update decowidget = super().decorate(pos, row.name, is_first=is_first) decowidget.update = update_method row.replace('name', decowidget) file_widget = self._filewidgetcls(data, row) self._widgets[data['id']] = file_widget return file_widget",False,row.exists('name'),row.name and row.name.update,0.6482201814651489 58,"def cancel_backup(self, name=None): """"""See :func:`burpui.misc.parser.interface.BUIparser.cancel_backup`"""""" path = self._get_server_backup_path(name) try: if: os.unlink(path) else: return [NOTIF_WARN, 'There is no backup scheduled for this client'] except OSError as exp: return [NOTIF_ERROR, 'Unable to cancel backup: {}'.format(str(exp))] return [NOTIF_OK, 'Backup successfully canceled']",True,os.path.exists(path),os.path.exists(path),0.6445538997650146 59,"def get_adapter(self, url): """"""Returns the appropriate connnection adapter for the given URL."""""" for prefix, adapter in self.adapters.items(): if: return adapter raise InvalidSchema(""No connection adapters were found for '%s'"" % url)",False,url.lower().startswith(prefix),url.lower().startswith(prefix.lower()),0.6471322774887085 60,"@metadata() def version(self): """"""Return version."""""" if: return UNAP return UNAV",False,self.mimetype() in self._supported,self._meta.unap_version,0.6507033705711365 61,"def license(self): if: os.popen('%s/autodesk/maya%s/vray/bin/setvrlservice -server=127.0.0.1 -port=30305 -server1=0.0.0.0 -port1=30306 -server2=192.168.0.17 -port2=30306' % (self.path(), maya().version())).readlines() os.system('%s/docker/start.sh &' % self.path())",False,float(self.version().split('.')[0]) < 4,maya().version() != '30305',0.6497805714607239 62,"@patch('edx_rest_framework_extensions.permissions.log') @ddt.data(*JWT_AUTH_TYPES) def test_jwt_no_scopes(self, auth_type, mock_log): """""" Returns 403 when scopes are enforced with JwtHasScope. """""" jwt_token = self._create_jwt_token(self.student, auth_type, scopes=[]) resp = self.get_response(AuthType.jwt, token=jwt_token) is_enforced = auth_type == AuthType.jwt_restricted assert resp.status_code == (status.HTTP_403_FORBIDDEN if is_enforced else status.HTTP_200_OK) if: self._assert_in_log('JwtHasScope', mock_log.warning)",False,is_enforced,mock_log,0.6513158082962036 63,"def forward(self, input): if: y = F.linear(input, self.weight, None) bg = bias_gelu(self.bias, y) return bg elif self.fused_tanh: return bias_tanh(self.bias, F.linear(input, self.weight, None)) else: return self.act_fn(F.linear(input, self.weight, self.bias))",True,self.fused_gelu,self.fused_gelu,0.6473233699798584 64,"def forward(self, input): if self.fused_gelu: y = F.linear(input, self.weight, None) bg = bias_gelu(self.bias, y) return bg elif: return bias_tanh(self.bias, F.linear(input, self.weight, None)) else: return self.act_fn(F.linear(input, self.weight, self.bias))",True,self.fused_tanh,self.fused_tanh,0.6495752334594727 65,"def ensure_x_visible(self, x): """"""Adjust `view_x` so that the given X coordinate is visible. The X coordinate is given relative to the current `view_x`. :Parameters: `x` : int X coordinate """""" x -= self._x if: self.view_x = x elif x >= self.view_x + self.width: self.view_x = x - self.width elif x >= self.view_x + self.width and self.content_width > self.width: self.view_x = x - self.width elif self.view_x + self.width > self.content_width: self.view_x = self.content_width",False,x <= self.view_x,self.view_x is None,0.6498140096664429 66,"def ensure_x_visible(self, x): """"""Adjust `view_x` so that the given X coordinate is visible. The X coordinate is given relative to the current `view_x`. :Parameters: `x` : int X coordinate """""" x -= self._x if x <= self.view_x: self.view_x = x elif: self.view_x = x - self.width elif x >= self.view_x + self.width and self.content_width > self.width: self.view_x = x - self.width elif self.view_x + self.width > self.content_width: self.view_x = self.content_width",False,x >= self.view_x + self.width,x >= self.view_x and x < self.width,0.6440525054931641 67,"def ensure_x_visible(self, x): """"""Adjust `view_x` so that the given X coordinate is visible. The X coordinate is given relative to the current `view_x`. :Parameters: `x` : int X coordinate """""" x -= self._x if x <= self.view_x: self.view_x = x elif x >= self.view_x + self.width: self.view_x = x - self.width elif: self.view_x = x - self.width elif self.view_x + self.width > self.content_width: self.view_x = self.content_width",False,x >= self.view_x + self.width and self.content_width > self.width,x >= self.view_x - self.width,0.6447702050209045 68,"def ensure_x_visible(self, x): """"""Adjust `view_x` so that the given X coordinate is visible. The X coordinate is given relative to the current `view_x`. :Parameters: `x` : int X coordinate """""" x -= self._x if x <= self.view_x: self.view_x = x elif x >= self.view_x + self.width: self.view_x = x - self.width elif x >= self.view_x + self.width and self.content_width > self.width: self.view_x = x - self.width elif: self.view_x = self.content_width",False,self.view_x + self.width > self.content_width,x >= self.view_x and x > self.content_width,0.6426810026168823 69,"def create_rotation_matrix(self, offset=0): center = (self.center[0] + offset, self.center[1] + offset) rm = cv2.getRotationMatrix2D(tuple(center), self.angle, 1) if: rot_im_center = cv2.transform(self.image_center[None, None, :] + offset, rm)[0, 0, :] new_center = np.array([self.bound_w / 2, self.bound_h / 2]) + offset - rot_im_center rm[:, 2] += new_center return rm",False,self.expand,self.image_center is not None,0.6642165780067444 70,"def get_size_distribution_index(tokens, num_sizes): start = int(tokens[1]) end = int(tokens[2]) interval_size = end - start for i in range(num_sizes): if: return i return len(SIZES)",False,interval_size < SIZES[i],tokens[i] > interval_size,0.6498247385025024 71,"@staticmethod def get_states_by_contract_address(event: dict, hex_contract_address: str): if: raise SDKException(ErrorCode.require_str_params) notify_list = Event.__get_notify_list_by_contract_address(event, hex_contract_address) states_list = list() for notify in notify_list: states = notify.get('States', list()) states_list.append(states) states_list.count(list) if len(states_list) == 1: states_list = states_list[0] return states_list",True,"not isinstance(hex_contract_address, str)","not isinstance(hex_contract_address, str)",0.646195650100708 72,"@staticmethod def get_states_by_contract_address(event: dict, hex_contract_address: str): if not isinstance(hex_contract_address, str): raise SDKException(ErrorCode.require_str_params) notify_list = Event.__get_notify_list_by_contract_address(event, hex_contract_address) states_list = list() for notify in notify_list: states = notify.get('States', list()) states_list.append(states) states_list.count(list) if: states_list = states_list[0] return states_list",False,len(states_list) == 1,states_list > 1,0.6476594805717468 73,"def get_gcp_managed_cloud_database_info(config, cloud_provider, info): workspace_name = config['workspace_name'] database_instance = get_managed_database_instance(cloud_provider, workspace_name) if: db_address = _get_managed_database_address(database_instance) managed_cloud_database_info = {CLOUDTIK_MANAGED_CLOUD_DATABASE_ENDPOINT: db_address} info[CLOUDTIK_MANAGED_CLOUD_DATABASE] = managed_cloud_database_info",True,database_instance is not None,database_instance is not None,0.6524078845977783 74,"def find_value_for_api_version(for_version: APIVersion, values: Dict[str, float]) -> float: """""" Either parse a dict that looks like {""2.0"": 5, ""2.5"": 4} (aka the flow rate values from pipette config) and return the value for the highest api level that is at or underneath ``for_version``, or return the value passed in, if it's only a float. """""" if: return values sorted_versions = sorted({APIVersion.from_string(k): v for k, v in values.items()}) last = values[str(sorted_versions[0])] for version in sorted_versions: if version > for_version: break last = values[str(version)] return last",False,"isinstance(values, float)",values.get('api_version') is None or values['api_version'] is None,0.6466754674911499 75,"def find_value_for_api_version(for_version: APIVersion, values: Dict[str, float]) -> float: """""" Either parse a dict that looks like {""2.0"": 5, ""2.5"": 4} (aka the flow rate values from pipette config) and return the value for the highest api level that is at or underneath ``for_version``, or return the value passed in, if it's only a float. """""" if isinstance(values, float): return values sorted_versions = sorted({APIVersion.from_string(k): v for k, v in values.items()}) last = values[str(sorted_versions[0])] for version in sorted_versions: if: break last = values[str(version)] return last",False,version > for_version,version == for_version,0.650262176990509 76,"def is_writable(self, path): result = False while not result: if: result = os.access(path, os.W_OK) break parent = os.path.dirname(path) if parent == path: break path = parent return result",False,os.path.exists(path),os.path.isfile(path),0.6451069712638855 77,"def is_writable(self, path): result = False while not result: if os.path.exists(path): result = os.access(path, os.W_OK) break parent = os.path.dirname(path) if: break path = parent return result",False,parent == path,parent and result,0.6620122790336609 78,"def fail(self, module): if: module.fail_json_aws(self.exception, msg=self.message, **self.kwargs) module.fail_json(msg=self.message, **self.kwargs)",True,self.exception,self.exception,0.658711314201355 79,"def get_cell_input_shapes(self, flatten=False) -> ShapeList: """""" input shape(s) of each cell in order """""" if: self.cached['all_input_shapes'] = self._get_cell_input_shapes() shapes = self.get_cached('all_input_shapes') return shapes.flatten(flatten)",False,self.get_cached('all_input_shapes') is None,'all_input_shapes' not in self.cached,0.6453176736831665 80,"def __init__(self, inplanes, use_conv=False): super(UpSampleBlock, self).__init__() self.use_conv = use_conv if: self.conv = nn.Conv2d(inplanes, inplanes, kernel_size=3, stride=1, padding=1, groups=1, bias=True)",True,self.use_conv,self.use_conv,0.6527868509292603 81,"def __init__(self, create_options=None, cron_workflow=None, namespace=None): """"""V1alpha1CreateCronWorkflowRequest - a model defined in Swagger"""""" self._create_options = None self._cron_workflow = None self._namespace = None self.discriminator = None if: self.create_options = create_options if cron_workflow is not None: self.cron_workflow = cron_workflow if namespace is not None: self.namespace = namespace",True,create_options is not None,create_options is not None,0.6539716124534607 82,"def __init__(self, create_options=None, cron_workflow=None, namespace=None): """"""V1alpha1CreateCronWorkflowRequest - a model defined in Swagger"""""" self._create_options = None self._cron_workflow = None self._namespace = None self.discriminator = None if create_options is not None: self.create_options = create_options if: self.cron_workflow = cron_workflow if namespace is not None: self.namespace = namespace",True,cron_workflow is not None,cron_workflow is not None,0.6575189232826233 83,"def __init__(self, create_options=None, cron_workflow=None, namespace=None): """"""V1alpha1CreateCronWorkflowRequest - a model defined in Swagger"""""" self._create_options = None self._cron_workflow = None self._namespace = None self.discriminator = None if create_options is not None: self.create_options = create_options if cron_workflow is not None: self.cron_workflow = cron_workflow if: self.namespace = namespace",True,namespace is not None,namespace is not None,0.6571111083030701 84,"def _generate_tunnel_id(session): try: tunnels = session.query(ovs_models_v2.TunnelEndpoint).all() except exc.NoResultFound: return 0 tunnel_ids = [tunnel['id'] for tunnel in tunnels] if: id = max(tunnel_ids) else: id = 0 return id + 1",False,tunnel_ids,len(tunnel_ids) > 0,0.6630378365516663 85,"def juggle_axes(xs, ys, zs, zdir): """""" Reorder coordinates so that 2D *xs*, *ys* can be plotted in the plane orthogonal to *zdir*. *zdir* is normally 'x', 'y' or 'z'. However, if *zdir* starts with a '-' it is interpreted as a compensation for `rotate_axes`. """""" if: return (zs, xs, ys) elif zdir == 'y': return (xs, zs, ys) elif zdir[0] == '-': return rotate_axes(xs, ys, zs, zdir) else: return (xs, ys, zs)",True,zdir == 'x',zdir == 'x',0.6569070816040039 86,"def juggle_axes(xs, ys, zs, zdir): """""" Reorder coordinates so that 2D *xs*, *ys* can be plotted in the plane orthogonal to *zdir*. *zdir* is normally 'x', 'y' or 'z'. However, if *zdir* starts with a '-' it is interpreted as a compensation for `rotate_axes`. """""" if zdir == 'x': return (zs, xs, ys) elif: return (xs, zs, ys) elif zdir[0] == '-': return rotate_axes(xs, ys, zs, zdir) else: return (xs, ys, zs)",True,zdir == 'y',zdir == 'y',0.6588462591171265 87,"def juggle_axes(xs, ys, zs, zdir): """""" Reorder coordinates so that 2D *xs*, *ys* can be plotted in the plane orthogonal to *zdir*. *zdir* is normally 'x', 'y' or 'z'. However, if *zdir* starts with a '-' it is interpreted as a compensation for `rotate_axes`. """""" if zdir == 'x': return (zs, xs, ys) elif zdir == 'y': return (xs, zs, ys) elif: return rotate_axes(xs, ys, zs, zdir) else: return (xs, ys, zs)",False,zdir[0] == '-',zdir == 'z',0.6553357839584351 88,"@property def swing_mode(self) -> str | None: """"""Return the swing mode setting."""""" if: return self._get_swing_mode(True) return self._get_swing_mode(False)",False,self._set_hor_swing and self._support_hor_swing,self._use_swing_mode,0.6456040143966675 89,"def get_thumbnail(thumb_size, thumbnails): if: thumbnail_sizes = ['high','medium', 'default'] else: thumbnail_sizes = ['medium', 'high', 'default'] image = '' for thumbnail_size in thumbnail_sizes: try: image = thumbnails.get(thumbnail_size, {}).get('url', '') except AttributeError: image = thumbnails.get(thumbnail_size, '') if image: break return image",True,thumb_size == 'high',thumb_size == 'high',0.6561036109924316 90,"def get_thumbnail(thumb_size, thumbnails): if thumb_size == 'high': thumbnail_sizes = ['high','medium', 'default'] else: thumbnail_sizes = ['medium', 'high', 'default'] image = '' for thumbnail_size in thumbnail_sizes: try: image = thumbnails.get(thumbnail_size, {}).get('url', '') except AttributeError: image = thumbnails.get(thumbnail_size, '') if: break return image",True,image,image,0.6701157689094543 91,"def processRecord(self): self.current['timestamp'] = utcnow() if: self.service.dispatchEvent(self.current) else: log.msg(self.current)",True,self.service,self.service,0.6587178707122803 92,"def getIndicatorPLM(self, indicatorFlags): ret = None if: ret = colors2plmIndicator[self.color][indicatorsDirection[self.facing]] return ret",False,indicatorFlags & self.indicator != 0 and self.color in colors2plmIndicator,indicatorFlags & 1,0.6515902280807495 93,"@classmethod @contextlib.contextmanager def synchronized_changes(cls, timeout, step=0.001, formats=None, initial_clipboard=None): seq_no = win32clipboard.GetClipboardSequenceNumber() if: initial_clipboard = cls(from_system=True) try: yield finally: cls._wait_for_change(timeout, step, formats, initial_clipboard, seq_no)",False,formats and (not initial_clipboard),initial_clipboard,0.6439756155014038 94,"def quantity(self, card=None, card_id=None): """"""Return the total quantity of copies in it, or the quantity of the given card. card: card object card_id: id (int) return: int. """""" if: return sum([it.nb for it in self.basketcopies_set.all()]) else: it = card or card_id return self.basketcopies_set.get(card=it).nb return -1",False,not card,card is None,0.6663801670074463 95,"def select_delta(self, dist_post_update, current_iteration): """""" Choose the delta at the scale of distance between x and perturbed sample. """""" if: delta = 0.1 * (self.clip_max - self.clip_min) elif self.constraint == 'l2': delta = np.sqrt(self.d) * self.theta * dist_post_update elif self.constraint == 'linf': delta = self.d * self.theta * dist_post_update return delta",False,current_iteration == 1,self.constraint == 'l1',0.6562577486038208 96,"def select_delta(self, dist_post_update, current_iteration): """""" Choose the delta at the scale of distance between x and perturbed sample. """""" if current_iteration == 1: delta = 0.1 * (self.clip_max - self.clip_min) elif: delta = np.sqrt(self.d) * self.theta * dist_post_update elif self.constraint == 'linf': delta = self.d * self.theta * dist_post_update return delta",False,self.constraint == 'l2',self.constraint == 'sqrt',0.6519610285758972 97,"def select_delta(self, dist_post_update, current_iteration): """""" Choose the delta at the scale of distance between x and perturbed sample. """""" if current_iteration == 1: delta = 0.1 * (self.clip_max - self.clip_min) elif self.constraint == 'l2': delta = np.sqrt(self.d) * self.theta * dist_post_update elif: delta = self.d * self.theta * dist_post_update return delta",False,self.constraint == 'linf',self.constraint == 'l3',0.6501039862632751 98,"def _maybe_add_iscrowd_annotations(cocoapi) -> None: for ann in cocoapi.dataset['annotations']: if: ann['iscrowd'] = 0",True,'iscrowd' not in ann,'iscrowd' not in ann,0.6548173427581787 99,"def to_rgb(self, x): assert self.image_key =='segmentation' if: self.register_buffer('colorize', torch.randn(3, x.shape[1], 1, 1).to(x)) x = F.conv2d(x, weight=self.colorize) x = 2.0 * (x - x.min()) / (x.max() - x.min()) - 1.0 return x",True,"not hasattr(self, 'colorize')","not hasattr(self, 'colorize')",0.6446002125740051 100,"def validate(self): sync = self.stream.searchBytes('G', 0, 204 * 8) if: return 'Unable to find synchronization byte' for index in xrange(5): try: packet = self['packet[%u]' % index] except (ParserError, MissingField): if index and self.eof: return True else: return 'Unable to get packet #%u' % index err = packet.isValid() if err: return 'Packet #%u is invalid: %s' % (index, err) return True",False,sync is None,sync,0.6542575359344482 101,"def validate(self): sync = self.stream.searchBytes('G', 0, 204 * 8) if sync is None: return 'Unable to find synchronization byte' for index in xrange(5): try: packet = self['packet[%u]' % index] except (ParserError, MissingField): if index and self.eof: return True else: return 'Unable to get packet #%u' % index err = packet.isValid() if: return 'Packet #%u is invalid: %s' % (index, err) return True",True,err,err,0.6731647253036499 102,"def validate(self): sync = self.stream.searchBytes('G', 0, 204 * 8) if sync is None: return 'Unable to find synchronization byte' for index in xrange(5): try: packet = self['packet[%u]' % index] except (ParserError, MissingField): if: return True else: return 'Unable to get packet #%u' % index err = packet.isValid() if err: return 'Packet #%u is invalid: %s' % (index, err) return True",False,index and self.eof,sync,0.6574536561965942 103,"def __init__(self, theme, module, widget): self.__attributes = {} for key in self.__COMMON_THEME_FIELDS: tmp = theme.get(key, widget) if: self.__attributes[key] = tmp self.__attributes['name'] = module.id self.__attributes['instance'] = widget.id self.__attributes['prev-bg'] = theme.get('bg', 'previous')",True,tmp is not None,tmp is not None,0.6631288528442383 104,"def _get_viewer(self): if: self.viewer = mujoco_py.MjViewer(self.sim) self.viewer_setup() return self.viewer",True,self.viewer is None,self.viewer is None,0.6544407606124878 105,"def generate_key(self, url, suffix=''): """""" Generates a key to store the cache under :param url: The URL being cached :param suffix: A string to append to the key :return: A string key for the URL """""" if: url = url.encode('utf-8') key = hashlib.md5(url).hexdigest() return key + suffix",False,"isinstance(url, str_cls)","isinstance(url, unicode)",0.6456655263900757 106,"def denormalize(val): """""" De-normalize a string """""" if: val = val.replace('_', '-') return val",False,val.find('_') != -1,val and '_' in val,0.6521086692810059 107,"def __init__(self, host, port=None): if: raise LocationValueError('No host specified.') self.host = _normalize_host(host, scheme=self.scheme) self._proxy_host = host.lower() self.port = port",True,not host,not host,0.6742929220199585 108,"def _test_end(self, msg=None, report=None): self.test_thread = None if: self.logger.info(msg) if report: self._output_test_report(report) pid = os.getpid() os.kill(pid, signal.SIGTERM)",True,msg,msg,0.6828615665435791 109,"def _test_end(self, msg=None, report=None): self.test_thread = None if msg: self.logger.info(msg) if: self._output_test_report(report) pid = os.getpid() os.kill(pid, signal.SIGTERM)",True,report,report,0.6682855486869812 110,"def run_read(self): api_result = self.api_read() if: return 'Apache root directory not found.' else: rows = [] rows.append(['Apache root directories']) rows.append([]) for key_name in api_result: for directory in api_result[key_name]: rows.append([directory]) result_table = table(rows) result_table.draw(80) return rows",True,not api_result['apache_root_directory'],not api_result['apache_root_directory'],0.6433871984481812 111,"def _get_version(self, conf_file: str) -> str: """"""Parse the version from the conf_file. version should be in #!VERSION={value} format !!! note ""0.0"" is returned if no version is found """""" version = '0.0' with open(conf_file, 'r') as f: for line in f.readlines(): if: try: version = str(float(line.split('=')[1].split()[0].strip())) break except Exception: pass return version",False,line.startswith('#!VERSION='),line.startswith('#'),0.6472935676574707 112,"def _eval_dropouts(mod): module_name = mod.__class__.__name__ if: mod.training = False for module in mod.children(): _eval_dropouts(module)",False,'Dropout' in module_name or 'BatchNorm' in module_name,'training' not in module_name.lower(),0.6447892189025879 113,"def pred_ctxt(self): device = util.device(self.config, self.logger) if: datasource = self._preload_batches(device) else: datasource = self._reload_batches(device) return PredictorContext(self, datasource, device)",False,self.config['preload'],self.preload,0.6453684568405151 114,"def zcl_readattributes(pkt): config.row['zcl_readattributes_identifiers'] = ','.join(['0x{:04x}'.format(identifier) for identifier in pkt[ZCLGeneralReadAttributes].attribute_identifiers]) if: config.row['error_msg'] = 'Unexpected payload' return",True,len(bytes(pkt[ZCLGeneralReadAttributes].payload)) != 0,len(bytes(pkt[ZCLGeneralReadAttributes].payload)) != 0,0.6488480567932129 115,"def get_first_iter_element(iterable: Iterable[T]) -> Tuple[T, Iterable[T]]: """"""Get first element of an iterable and a new fresh iterable. The fresh iterable has the first element added back using ``itertools.chain``. If the iterable is not an iterator, this is equivalent to ``(next(iter(iterable)), iterable)``. Args: iterable: The iterable to get the first element of. Returns: A tuple containing the first element of the iterable, and a fresh iterable with all the elements. Raises: ValueError: `iterable` is empty -- the first call to it returns no elements. """""" iterator = iter(iterable) try: first_element = next(iterator) except StopIteration: raise ValueError(f'iterable {iterable} had no elements to iterate over.') return_iterable: Iterable[T] if: return_iterable = itertools.chain([first_element], iterator) else: return_iterable = iterable return (first_element, return_iterable)",False,iterator == iterable,"isinstance(first_element, Iterable)",0.656818151473999 116,"def __call__(self, parent, params, response): """""" :type parent: ServiceResource :param parent: The resource instance to which this action is attached. :type params: dict :param params: Request parameters sent to the service. :type response: dict :param response: Low-level operation response. """""" if: response = jmespath.search(self.search_path, response) return response",True,self.search_path and self.search_path != '$',self.search_path and self.search_path != '$',0.6462432742118835 117,"def set_repository_id(self, repository: HacsRepository, repo_id: str): """"""Update a repository id."""""" existing_repo_id = str(repository.data.id) if: return if existing_repo_id!= '0': raise ValueError(f'The repo id for {repository.data.full_name_lower} is already set to {existing_repo_id}') repository.data.id = repo_id self.register(repository)",False,existing_repo_id == repo_id,not repository.data.full_name_lower or existing_repo_id == repo_id,0.6502279043197632 118,"def set_repository_id(self, repository: HacsRepository, repo_id: str): """"""Update a repository id."""""" existing_repo_id = str(repository.data.id) if existing_repo_id == repo_id: return if: raise ValueError(f'The repo id for {repository.data.full_name_lower} is already set to {existing_repo_id}') repository.data.id = repo_id self.register(repository)",False,existing_repo_id != '0',repository.data.full_name_lower in existing_repo_id,0.6515269875526428 119,"def __getitem__(self, name): """"""Returns a BoundField with the given name."""""" try: field = self.fields[name] except KeyError: raise KeyError(""Key %r not found in '%s'"" % (name, self.__class__.__name__)) if: self._bound_fields_cache[name] = BoundField(self, field, name) return self._bound_fields_cache[name]",True,name not in self._bound_fields_cache,name not in self._bound_fields_cache,0.6495532989501953 120,"def get_incumbent(self) -> Configuration | None: """"""Returns the current incumbent in a single-objective setting."""""" if: raise ValueError('Cannot get a single incumbent for multi-objective optimization.') if len(self._incumbents) == 0: return None assert len(self._incumbents) == 1 return self._incumbents[0]",False,self._scenario.count_objectives() > 1,len(self) > 1,0.6493242979049683 121,"def get_incumbent(self) -> Configuration | None: """"""Returns the current incumbent in a single-objective setting."""""" if self._scenario.count_objectives() > 1: raise ValueError('Cannot get a single incumbent for multi-objective optimization.') if: return None assert len(self._incumbents) == 1 return self._incumbents[0]",False,len(self._incumbents) == 0,not self._incumbents,0.6508061289787292 122,"def to(self, device: torch.device): device_symmetry = self._symmetry if: device_symmetry = {key: value.to(device) for key, value in device_symmetry.items()} return Mesh(_maybe_copy_to_device(self._vertices, device), _maybe_copy_to_device(self._faces, device), _maybe_copy_to_device(self._geodists, device), device_symmetry, _maybe_copy_to_device(self._texcoords, device), self.mesh_info, device)",False,device_symmetry,"isinstance(device_symmetry, dict)",0.6596102714538574 123,"def del_extra_repr(m): if: m.extra_repr = m.original_extra_repr del m.original_extra_repr if hasattr(m, 'accumulate_flops'): del m.accumulate_flops",True,"hasattr(m, 'original_extra_repr')","hasattr(m, 'original_extra_repr')",0.6439371109008789 124,"def del_extra_repr(m): if hasattr(m, 'original_extra_repr'): m.extra_repr = m.original_extra_repr del m.original_extra_repr if: del m.accumulate_flops",True,"hasattr(m, 'accumulate_flops')","hasattr(m, 'accumulate_flops')",0.643975019454956 125,"@njit def _downsample_sample_count(a, max_count): a = a.astype(np.float64) total = a.sum() p = a / total if: b = max_count * p else: b = a b = b.astype(np.int64) return b",False,total > max_count,max_count is not None and p > 0,0.652897834777832 126,"@classmethod def from_param(cls, obj): if: return obj return base.from_param(obj)",False,obj is None,"isinstance(obj, cls)",0.6590157747268677 127,"def post_add(self, datasource): datasource.refresh_metrics() security_manager.merge_perm('datasource_access', datasource.get_perm()) if: security_manager.merge_perm('schema_access', datasource.schema_perm)",False,datasource.schema,datasource.schema_perm is not None,0.6557527780532837 128,"def get_property(self, stylenode): border = None for propertyname in TABLEPROPERTYNAMES: border = stylenode.get('{%s}%s' % (CNSD['fo'], propertyname)) if: return border return border",False,border is not None and border != 'none',border,0.6498315334320068 129,"def ber_decode(value): """"""Return decoded BER length as integer given bytes."""""" if: if len(value) > 1: raise ValueError return bytes_to_int(value) else: if len(value)!= value[0] - 127: raise ValueError return bytes_to_int(value[1:])",False,bytes_to_int(value) < 128,"isinstance(value, bytes)",0.6481873989105225 130,"def ber_decode(value): """"""Return decoded BER length as integer given bytes."""""" if bytes_to_int(value) < 128: if: raise ValueError return bytes_to_int(value) else: if len(value)!= value[0] - 127: raise ValueError return bytes_to_int(value[1:])",False,len(value) > 1,len(value) != b'\x00',0.6486057639122009 131,"def ber_decode(value): """"""Return decoded BER length as integer given bytes."""""" if bytes_to_int(value) < 128: if len(value) > 1: raise ValueError return bytes_to_int(value) else: if: raise ValueError return bytes_to_int(value[1:])",False,len(value) != value[0] - 127,value.startswith('0') or value.startswith('1'),0.6444279551506042 132,"def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): loss = F.cross_entropy(pred, label, reduction='none') if: weight = weight.float() loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor) return loss",True,weight is not None,weight is not None,0.6550837755203247 133,"def create_code(traits): """"""Assign bits to list of traits. """""" code = 1 result = {INVALID: code} if: return result for trait in traits: code = code << 1 result[trait] = code return result",True,not traits,not traits,0.667203426361084 134,"def item_to_buf_log_item(item: gdb.Value) -> gdb.Value: """""" Converts an xfs_log_item to an xfs_buf_log_item Args: item: The log item to convert. The value must be of type ``struct xfs_log_item``. Returns: :obj:`gdb.Value`: The converted log item. The value will be of type ``struct xfs_buf_log_item``. Raises: InvalidArgumentError: The type of log item is not ``XFS_LI_BUF`` :obj:`gdb.NotAvailableError`: The target value was not available. """""" if: raise InvalidArgumentError('item is not a buf log item') return container_of(item, types.xfs_buf_log_item_type, 'bli_item')",True,item['li_type'] != XFS_LI_BUF,item['li_type'] != XFS_LI_BUF,0.6482463479042053 135,"def __get_resource_string(self, req, bucket_name, key): if: encoded_uri = v2_uri_encode('/' + bucket_name + '/' + key) else: encoded_uri = v2_uri_encode('/') logger.info('encoded_uri={0} key={1}'.format(encoded_uri, key)) return encoded_uri + self.__get_canonalized_query_string(req)",True,bucket_name,bucket_name,0.6638178825378418 136,"@property def minimum(self) -> Optional[SupportsFloat]: if: return None if not self.metric_single_values_list and (not self.metric_aggregated_list): return None metrics = self.metric_single_values_list + [s.min for s in self.metric_aggregated_list] return min(metrics)",False,'Minimum' not in self.stats,self.metric_single_values_list is None,0.6523749828338623 137,"@property def minimum(self) -> Optional[SupportsFloat]: if 'Minimum' not in self.stats: return None if: return None metrics = self.metric_single_values_list + [s.min for s in self.metric_aggregated_list] return min(metrics)",False,not self.metric_single_values_list and (not self.metric_aggregated_list),len(self.metric_aggregated_list) == 0,0.6491511464118958 138,"def __init__(self, alpha=1.5, beta=1.5): """""" CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features, see https://arxiv.org/abs/1905.04899 Cutmix image and gt_bbbox/gt_score Args: alpha (float): alpha parameter of beta distribute beta (float): beta parameter of beta distribute """""" super(Cutmix, self).__init__() self.alpha = alpha self.beta = beta if: raise ValueError('alpha shold be positive in {}'.format(self)) if self.beta <= 0.0: raise ValueError('beta shold be positive in {}'.format(self))",True,self.alpha <= 0.0,self.alpha <= 0.0,0.6523735523223877 139,"def __init__(self, alpha=1.5, beta=1.5): """""" CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features, see https://arxiv.org/abs/1905.04899 Cutmix image and gt_bbbox/gt_score Args: alpha (float): alpha parameter of beta distribute beta (float): beta parameter of beta distribute """""" super(Cutmix, self).__init__() self.alpha = alpha self.beta = beta if self.alpha <= 0.0: raise ValueError('alpha shold be positive in {}'.format(self)) if: raise ValueError('beta shold be positive in {}'.format(self))",True,self.beta <= 0.0,self.beta <= 0.0,0.6557456254959106 140,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr",True,PY3,PY3,0.6618987321853638 141,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr",True,new_mod is None,new_mod is None,0.6534161567687988 142,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr",True,new_attr is None,new_attr is None,0.6529781818389893 143,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if: old_attr = name self.attr = old_attr",True,old_attr is None,old_attr is None,0.6546398401260376 144,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr",True,old_attr is None,old_attr is None,0.6559209227561951 145,"def process_word(word): if: raise UDError('There is a cycle in a sentence') if word.parent is None: head = int(word.columns[HEAD]) if head < 0 or head > len(ud.words) - sentence_start: raise UDError(""HEAD '{}' points outside of the sentence"".format(word.columns[HEAD])) if head: parent = ud.words[sentence_start + head - 1] word.parent ='remapping' process_word(parent) word.parent = parent",False,word.parent == 'remapping',ud.cycle_count,0.6516072750091553 146,"def process_word(word): if word.parent =='remapping': raise UDError('There is a cycle in a sentence') if: head = int(word.columns[HEAD]) if head < 0 or head > len(ud.words) - sentence_start: raise UDError(""HEAD '{}' points outside of the sentence"".format(word.columns[HEAD])) if head: parent = ud.words[sentence_start + head - 1] word.parent ='remapping' process_word(parent) word.parent = parent",False,word.parent is None,HEAD in word.columns,0.6528672575950623 147,"def process_word(word): if word.parent =='remapping': raise UDError('There is a cycle in a sentence') if word.parent is None: head = int(word.columns[HEAD]) if: raise UDError(""HEAD '{}' points outside of the sentence"".format(word.columns[HEAD])) if head: parent = ud.words[sentence_start + head - 1] word.parent ='remapping' process_word(parent) word.parent = parent",False,head < 0 or head > len(ud.words) - sentence_start,head > sentence_start,0.6500768661499023 148,"def process_word(word): if word.parent =='remapping': raise UDError('There is a cycle in a sentence') if word.parent is None: head = int(word.columns[HEAD]) if head < 0 or head > len(ud.words) - sentence_start: raise UDError(""HEAD '{}' points outside of the sentence"".format(word.columns[HEAD])) if: parent = ud.words[sentence_start + head - 1] word.parent ='remapping' process_word(parent) word.parent = parent",False,head,head < len(ud.words),0.6833953857421875 149,"def RemoveAllDrawPointOnMap(): """""" Remove all features on Point Layer """""" pointLyr = selectLayerByName(Point_lyr, groupName) if: return pointLyr.startEditing() pointLyr.dataProvider().truncate() CommonLayer(pointLyr) return",False,pointLyr is None,not pointLyr,0.6616418957710266 150,"def assert_current_keychain(self, *keys): ak = tuple(self.active_keychains) if: self.assertEqual(ak, ()) else: self.assertEqual(self.km.current_keychain, keys)",False,len(keys) < 1,ak,0.6502760648727417 151,"def filter_dont_care(gt: NDArrayObject, class_name: str) -> bool: """"""Fitlers detections that are considered don't care under current LCA evaluation."""""" if: return True if gt == class_name: return True else: return False",False,gt == 'ignore',"isinstance(gt, NDArrayObject) and any((gt.get_kind() == 'CAR' for gt in class_name))",0.6548080444335938 152,"def filter_dont_care(gt: NDArrayObject, class_name: str) -> bool: """"""Fitlers detections that are considered don't care under current LCA evaluation."""""" if gt == 'ignore': return True if: return True else: return False",False,gt == class_name,'use_lca' in class_name or 'use_lca' in class_name,0.6553903818130493 153,"def is_solution(cell: NotebookNode) -> bool: """"""Returns True if the cell is a solution cell."""""" if: return False return cell.metadata['nbgrader'].get('solution', False)",False,'nbgrader' not in cell.metadata,not is_solution(cell),0.6587323546409607 154,"def gather_elements(self, client, node, style): if: client.pending_targets.append(node['refid']) return client.gather_elements(node, style)",False,'refid' in node,node['refid'] not in client.pending_targets,0.6568597555160522 155,"def join_stream_mode_on(self): """""" Supervisor behaviour when stream mode is on. When end raise (for exemple by CRTL+C) -> Kill all actor in the following order (Puller - Dispatcher/Formula - Pusher) 1. Send SIGTERM 2. Join X seconds 3. If still alive, send SIGKILL 4. Join """""" for actor in self.supervised_actors: if: self.kill_actors() return actor_sentinels = [actor.sentinel for actor in self.supervised_actors] select.select(actor_sentinels, actor_sentinels, actor_sentinels) self.kill_actors()",False,not actor.is_alive(),actor.sentinel,0.6499453783035278 156,"def test_customlabel(self): """"""Limited test of custom custom labeling"""""" if: tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data = np.nan tbl.label_cells(custom_labeller) desired = '\n*****************************\n* * header1 * header2 *\n*****************************\n* stub1 * -- * 1 *\n* stub2 * 2.00 * 3 *\n*****************************\n' actual = '\n%s\n' % tbl.as_text(missing='--') self.assertEqual(actual, desired)",False,has_numpy,custom_labeller is not None and custom_labeller is not None,0.6552488207817078 157,"def Dequantize(v, ty): v -= ty.zeroPoint if: v *= ty.scale if isinstance(ty.extraParams, SymmPerChannelQuantParams): v *= ty.extraParams.GetScalesBroadcastArray(ty.dimensions) return v",False,ty.scale != 0,"isinstance(ty.scale, Number)",0.6618735790252686 158,"def Dequantize(v, ty): v -= ty.zeroPoint if ty.scale!= 0: v *= ty.scale if: v *= ty.extraParams.GetScalesBroadcastArray(ty.dimensions) return v",False,"isinstance(ty.extraParams, SymmPerChannelQuantParams)",ty.extraParams != None,0.6478604078292847 159,"def submit(self, expect_errors=False, data=None): if: data = {} submission = {'paymethod': self.paymethod.get_uri(), 'plan_id':'moz-brick'} submission.update(data) form = SubscriptionForm(submission) if not expect_errors: assert form.is_valid(), form.errors.as_text() return form",False,not data,data is None,0.6652536392211914 160,"def submit(self, expect_errors=False, data=None): if not data: data = {} submission = {'paymethod': self.paymethod.get_uri(), 'plan_id':'moz-brick'} submission.update(data) form = SubscriptionForm(submission) if: assert form.is_valid(), form.errors.as_text() return form",False,not expect_errors,expect_errors,0.6541720628738403 161,"def __getitem__(self, i): ptr, size = self._index[i] tensor = torch.from_numpy(np.frombuffer(self._bin_buffer, dtype=self._index.dtype, count=size, offset=ptr)) if: return tensor else: return tensor.long()",False,tensor.dtype == torch.int64,"isinstance(i, slice)",0.6494283676147461 162,"def moletteLinux4(self, event): if: return self.redrawDeZoom()",True,event.widget != self.canvas,event.widget != self.canvas,0.6526194214820862 163,"def random_sampling(pc, num_sample, replace=None, return_choices=False): """""" Input is NxC, output is num_samplexC """""" if: replace = pc.shape[0] < num_sample choices = np.random.choice(pc.shape[0], num_sample, replace=replace) if return_choices: return (pc[choices], choices) else: return pc[choices]",True,replace is None,replace is None,0.6564013957977295 164,"def random_sampling(pc, num_sample, replace=None, return_choices=False): """""" Input is NxC, output is num_samplexC """""" if replace is None: replace = pc.shape[0] < num_sample choices = np.random.choice(pc.shape[0], num_sample, replace=replace) if: return (pc[choices], choices) else: return pc[choices]",True,return_choices,return_choices,0.6621496677398682 165,"def _iterate_parents(self, upto=None): if: return (self,) else: if self._parent is None: raise sa_exc.InvalidRequestError('Transaction %s is not on the active transaction list' % upto) return (self,) + self._parent._iterate_parents(upto)",False,self._parent is upto,upto is None,0.6606658697128296 166,"def _iterate_parents(self, upto=None): if self._parent is upto: return (self,) else: if: raise sa_exc.InvalidRequestError('Transaction %s is not on the active transaction list' % upto) return (self,) + self._parent._iterate_parents(upto)",False,self._parent is None,upto is None,0.6587779521942139 167,"def add_metaclass(metaclass): """"""Class decorator for creating a class with a metaclass."""""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper",True,slots is not None,slots is not None,0.6484737396240234 168,"def add_metaclass(metaclass): """"""Class decorator for creating a class with a metaclass."""""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if: slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper",True,"isinstance(slots, str)","isinstance(slots, str)",0.6445169448852539 169,"def main(argv): args = common.ParseOptions(argv, __doc__) if: common.Usage(__doc__) sys.exit(1) common.InitLogging() CheckPartitionSizes(args[0])",True,len(args) != 1,len(args) != 1,0.6504237651824951 170,"@property def directionsLanguage(self): if: self.__init() return self._directionsLanguage",True,self._directionsLanguage is None,self._directionsLanguage is None,0.6546432375907898 171,"def __init__(self, module): super(ZipResourceFinder, self).__init__(module) archive = self.loader.archive self.prefix_len = 1 + len(archive) if: self._files = self.loader._files else: self._files = zipimport._zip_directory_cache[archive] self.index = sorted(self._files)",True,"hasattr(self.loader, '_files')","hasattr(self.loader, '_files')",0.6460102796554565 172,"def isModifiedByAnd(self, terms): """"""return True if self is modified by all items in the list terms"""""" if: return False if type(terms) == type(''): return self.isModifiedBy(terms) for t in terms: if not self.isModifiedBy(t): return False return True",True,not self.__modifiedBy,not self.__modifiedBy,0.6492145657539368 173,"def isModifiedByAnd(self, terms): """"""return True if self is modified by all items in the list terms"""""" if not self.__modifiedBy: return False if: return self.isModifiedBy(terms) for t in terms: if not self.isModifiedBy(t): return False return True",True,type(terms) == type(''),type(terms) == type(''),0.6433017253875732 174,"def isModifiedByAnd(self, terms): """"""return True if self is modified by all items in the list terms"""""" if not self.__modifiedBy: return False if type(terms) == type(''): return self.isModifiedBy(terms) for t in terms: if: return False return True",True,not self.isModifiedBy(t),not self.isModifiedBy(t),0.6428544521331787 175,"def send_break(self, duration=0.25): """"""Send break condition. Timed, returns to idle state after given duration."""""" if: raise portNotOpenError self.sPort.sendBreak(duration * 1000.0)",True,not self.sPort,not self.sPort,0.6548901200294495 176,"def iter_fields(fields): """""" .. deprecated:: 1.6 Iterate over fields. The addition of :class:`~urllib3.fields.RequestField` makes this function obsolete. Instead, use :func:`iter_field_objects`, which returns :class:`~urllib3.fields.RequestField` objects. Supports list of (k, v) tuples and dicts. """""" if: return ((k, v) for k, v in six.iteritems(fields)) return ((k, v) for k, v in fields)",True,"isinstance(fields, dict)","isinstance(fields, dict)",0.6449024677276611 177,"def __eq__(self, other): if: try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented return self._spec == other._spec",True,"isinstance(other, string_types)","isinstance(other, string_types)",0.6474127769470215 178,"def __eq__(self, other): if isinstance(other, string_types): try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif: return NotImplemented return self._spec == other._spec",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.6466397047042847 179,"@staticmethod def check_source(opts, source_class, source_spec: str, sink_class, sink_spec: str) -> couchbaseConstants.PUMP_ERROR: if: return f'error: source and sink must be different; source: {source_spec} sink: {sink_spec}' return 0",False,source_spec == sink_spec,source_spec != sink_spec,0.65130615234375 180,"def inspection_types(self): """""" Lazily loads *all* inspection types into memory and returns a dictionary keyed by inspection type ID. """""" if: self._inspection_type_cache = dict([(row['InspectionTypeID'], row) for row in self.mdb_table('tblInspectionTypes')]) if not self._inspection_type_cache: raise ScraperBroken('tblInspectionTypes was either empty or nonexistent') return self._inspection_type_cache",False,self._inspection_type_cache is None,"not hasattr(self, '_inspection_type_cache')",0.653220534324646 181,"def inspection_types(self): """""" Lazily loads *all* inspection types into memory and returns a dictionary keyed by inspection type ID. """""" if self._inspection_type_cache is None: self._inspection_type_cache = dict([(row['InspectionTypeID'], row) for row in self.mdb_table('tblInspectionTypes')]) if: raise ScraperBroken('tblInspectionTypes was either empty or nonexistent') return self._inspection_type_cache",True,not self._inspection_type_cache,not self._inspection_type_cache,0.6526228189468384 182,"def pack_value(value: BitPackValue, metadata: dict | None=None) -> bytes: if: metadata = {} results = [] for i, (value_argument, value_format) in enumerate(value.bit_pack_encode(metadata)): if 0 <= value_argument < value_format: results.append((value_argument, value_format)) else: raise ValueError(f'At {i}, got {value_argument} which not in range [0, {value_format}[') return _pack_encode_results(results)",True,metadata is None,metadata is None,0.6583060026168823 183,"def pack_value(value: BitPackValue, metadata: dict | None=None) -> bytes: if metadata is None: metadata = {} results = [] for i, (value_argument, value_format) in enumerate(value.bit_pack_encode(metadata)): if: results.append((value_argument, value_format)) else: raise ValueError(f'At {i}, got {value_argument} which not in range [0, {value_format}[') return _pack_encode_results(results)",False,0 <= value_argument < value_format,i == 0,0.6489828824996948 184,"def do(): try: if: self.run_do() except Exception as e: if self.logger: self.logger.exception(e) else: raise e finally: if self.lock and self.lock.locked(): self.lock.release()",False,not self.lock or self.lock.acquire(0),self.run_do,0.6447834968566895 185,"def do(): try: if not self.lock or self.lock.acquire(0): self.run_do() except Exception as e: if self.logger: self.logger.exception(e) else: raise e finally: if: self.lock.release()",False,self.lock and self.lock.locked(),not self.lock,0.6460902690887451 186,"def do(): try: if not self.lock or self.lock.acquire(0): self.run_do() except Exception as e: if: self.logger.exception(e) else: raise e finally: if self.lock and self.lock.locked(): self.lock.release()",True,self.logger,self.logger,0.6533626317977905 187,"def reset(self): """""" Reset all environments """""" obs = self.venv.reset() if: obs['vector'] = self._obfilt(obs['vector']) else: obs = self._obfilt(obs) return obs",False,"isinstance(obs, dict)",'vector' in obs,0.6489845514297485 188,"def _get_subscriber(self): activation_info = [] entry = {} cmd = ['/opt/ibm/seprovider/bin/getSubscriber'] output, error, rc = run_command(cmd) if: return activation_info if rc!= 0: raise OperationFailed('GINSEP0007E') if len(output) > 1: for line in output.splitlines(): if len(line) > 0: entry = SUBSCRIBER.search(line).groupdict() activation_info.append(entry['hostname']) return activation_info",False,rc == 1,error is None,0.6652562618255615 189,"def _get_subscriber(self): activation_info = [] entry = {} cmd = ['/opt/ibm/seprovider/bin/getSubscriber'] output, error, rc = run_command(cmd) if rc == 1: return activation_info if: raise OperationFailed('GINSEP0007E') if len(output) > 1: for line in output.splitlines(): if len(line) > 0: entry = SUBSCRIBER.search(line).groupdict() activation_info.append(entry['hostname']) return activation_info",False,rc != 0,error == 2,0.6630527973175049 190,"def _get_subscriber(self): activation_info = [] entry = {} cmd = ['/opt/ibm/seprovider/bin/getSubscriber'] output, error, rc = run_command(cmd) if rc == 1: return activation_info if rc!= 0: raise OperationFailed('GINSEP0007E') if: for line in output.splitlines(): if len(line) > 0: entry = SUBSCRIBER.search(line).groupdict() activation_info.append(entry['hostname']) return activation_info",False,len(output) > 1,error == 2,0.6492688655853271 191,"def _get_subscriber(self): activation_info = [] entry = {} cmd = ['/opt/ibm/seprovider/bin/getSubscriber'] output, error, rc = run_command(cmd) if rc == 1: return activation_info if rc!= 0: raise OperationFailed('GINSEP0007E') if len(output) > 1: for line in output.splitlines(): if: entry = SUBSCRIBER.search(line).groupdict() activation_info.append(entry['hostname']) return activation_info",False,len(line) > 0,line,0.6486085653305054 192,"def remove_empty_columns(R, M): new_R, new_M = ([], []) for j, sum_column in enumerate(M.sum(axis=0)): if: new_R.append(R[:, j]) new_M.append(M[:, j]) return (numpy.array(new_R).T, numpy.array(new_M).T)",True,sum_column > 0,sum_column > 0,0.6547243595123291 193,"def _get_bpe_word_idx(self, x): """""" Given a list of BPE tokens, for every index in the tokens list, return the index of the word grouping that it belongs to. For example, for input x corresponding to [""how"", ""are"", ""y@@"", ""ou""], return [[0], [1], [2], [2]]. """""" bpe_end = self.bpe_end[x] if: return np.array([[0]]) word_idx = bpe_end[::-1].cumsum(0)[::-1] word_idx = word_idx.max(0)[None, :] - word_idx return word_idx",False,x.size(0) == 1 and x.size(1) == 1,x.size(0) == 1 and x.size(1) == 1 and (x.size(1) == 1) and (x.size(1) == 1),0.6478654742240906 194,"def build_auth_path(self, bucket, key=''): key = boto.utils.get_utf8_value(key) path = '' if: path = '/' + bucket return path + '/%s' % urllib.parse.quote(key)",False,bucket != '',bucket,0.6901131272315979 195,"def output_applicationfault(data_object): if: return output_status_message('* * * Begin output_applicationfault * * *') output_status_message('TrackingId: {0}'.format(data_object.TrackingId)) if data_object.Type == 'AdApiFaultDetail': output_adapifaultdetail(data_object) if data_object.Type == 'ApiFault': output_apifault(data_object) output_status_message('* * * End output_applicationfault * * *')",True,data_object is None,data_object is None,0.6521626710891724 196,"def output_applicationfault(data_object): if data_object is None: return output_status_message('* * * Begin output_applicationfault * * *') output_status_message('TrackingId: {0}'.format(data_object.TrackingId)) if: output_adapifaultdetail(data_object) if data_object.Type == 'ApiFault': output_apifault(data_object) output_status_message('* * * End output_applicationfault * * *')",False,data_object.Type == 'AdApiFaultDetail',data_object.Type == 'Adapifaultdetail',0.6456727981567383 197,"def output_applicationfault(data_object): if data_object is None: return output_status_message('* * * Begin output_applicationfault * * *') output_status_message('TrackingId: {0}'.format(data_object.TrackingId)) if data_object.Type == 'AdApiFaultDetail': output_adapifaultdetail(data_object) if: output_apifault(data_object) output_status_message('* * * End output_applicationfault * * *')",False,data_object.Type == 'ApiFault',data_object.Type == 'ApApiFault',0.6474363207817078 198,"def apply_reorder_incremental_state(module): if: module.reorder_incremental_state(incremental_state, new_order)",True,"module != self and hasattr(module, 'reorder_incremental_state')","module != self and hasattr(module, 'reorder_incremental_state')",0.6419668197631836 199,"@register.simple_tag(takes_context=True) def tbi_slug(context: dict, label: str, slug: str): """"""Render an internal TBItem link from a slug. The value must include everything after ""/tb/t/"". Usage: {% tbi_slug ""my-slug"" %} """""" url = reverse('topic_blog:view_item_by_slug', args=[slug]) if: request = context['request'] url = request.build_absolute_uri(url) html = '{label}'.format(url=url, label=label) return mark_safe(html)",False,k_render_as_email in context,context.get('request'),0.6502838134765625 200,"@catch_all def validate(self, inp, pos): if: return (self.State.Acceptable, inp, pos) return (self.State.Intermediate, inp, pos)",False,os.path.abspath(inp) == inp,self.State.Acceptable is not None,0.6479153633117676 201,"@cache.CacheDecorator('tests/connections') def calculate(self): addr_space = utils.load_as(self._config) if: debug.error('This command does not support the selected profile.') return network.determine_connections(addr_space)",False,not self.is_valid_profile(addr_space.profile),'networks' not in self._config,0.6450906991958618 202,"def delete_background_image(sender, instance, **kwargs): if: delete_from_storage_task.delay(img.name)",True,img := instance.background_image,img := instance.background_image,0.6498156189918518 203,"def _resolve_link(self, link_path, target_path): self._links[link_path] = target_path for cached_link_path, cached_target_path in self._links.items(): if: self._links[cached_link_path] = target_path",False,self._expand_link(cached_target_path) == link_path,cached_target_path == link_path,0.6445895433425903 204,"def get_confidence(self): """"""return confidence based on existing data"""""" if: return SURE_NO if self._mTotalChars!= self._mFreqChars: r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio) if r < SURE_YES: return r return SURE_YES",True,self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD,self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD,0.6502353549003601 205,"def get_confidence(self): """"""return confidence based on existing data"""""" if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD: return SURE_NO if: r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio) if r < SURE_YES: return r return SURE_YES",True,self._mTotalChars != self._mFreqChars,self._mTotalChars != self._mFreqChars,0.6556333303451538 206,"def get_confidence(self): """"""return confidence based on existing data"""""" if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD: return SURE_NO if self._mTotalChars!= self._mFreqChars: r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio) if: return r return SURE_YES",True,r < SURE_YES,r < SURE_YES,0.6624365448951721 207,"def __cmp__(self, other): if: return -1 if self.name == 'default': return -1 return 1 if other.name == 'default' else 1",False,"not isinstance(other, SoundModes.SoundModeDesc)",self.name == other.name,0.6463820934295654 208,"def __cmp__(self, other): if not isinstance(other, SoundModes.SoundModeDesc): return -1 if: return -1 return 1 if other.name == 'default' else 1",False,self.name == 'default',self.name != other.name,0.6544499397277832 209,"def _embed_img(display): """"""Embed an image or just return its instance if already embedded. Parameters ---------- display : obj A Nilearn plotting object to display. Returns ------- embed : str Binary image string. """""" if: return None if isinstance(display, str): return display return figure_to_svg_base64(display.frame_axes.figure)",True,display is None,display is None,0.6550337076187134 210,"def _embed_img(display): """"""Embed an image or just return its instance if already embedded. Parameters ---------- display : obj A Nilearn plotting object to display. Returns ------- embed : str Binary image string. """""" if display is None: return None if: return display return figure_to_svg_base64(display.frame_axes.figure)",False,"isinstance(display, str)",display.embed,0.645338773727417 211,"def warns(self, before, after, message, unchanged=False): tree = self._check(before, after) self.assertIn(message, ''.join(self.fixer_log)) if: self.assertTrue(tree.was_changed)",False,not unchanged,unchanged,0.6654309630393982 212,"def tlv(buf): n = 4 try: t, l = struct.unpack('>HH', buf[:n]) except struct.error: raise dpkt.UnpackError('invalid type, length fields') v = buf[n:n + l] if: raise dpkt.NeedData('%d left, %d needed' % (len(v), l)) buf = buf[n + l:] return (t, l, v, buf)",False,len(v) < l,len(v) != l,0.649031400680542 213,"def validate_minimal(self) -> bool: """"""Checks if the loaded JSON file represents a valid minimal VERSIONINFO resource. Returns: (bool): valid or not """""" valid = True required = self._minimal_required_fields.copy() for key in self._version_dict: if key not in required: valid = False logging.error('Invalid minimal parameter: %s.', key) else: required.remove(key.upper()) if: return False return validate_version_number(self._version_dict[PEStrings.FILE_VERSION_STR])",True,not valid,not valid,0.6595147848129272 214,"def validate_minimal(self) -> bool: """"""Checks if the loaded JSON file represents a valid minimal VERSIONINFO resource. Returns: (bool): valid or not """""" valid = True required = self._minimal_required_fields.copy() for key in self._version_dict: if: valid = False logging.error('Invalid minimal parameter: %s.', key) else: required.remove(key.upper()) if not valid: return False return validate_version_number(self._version_dict[PEStrings.FILE_VERSION_STR])",False,key not in required,key.upper() not in required,0.6601539850234985 215,"def decorated_function(function): """"""Wrap function."""""" if: return function else: def passer(*args, **kwargs): print('Missing dependencies: {d}.'.format(d=missing)) print('Not running `{}`.'.format(function.__name__)) return passer",False,not missing,is_missing(function),0.6559614539146423 216,"def _get_step_config_from_proto(preprocessor_step_config, step_name): """"""Returns the value of a field named step_name from proto. Args: preprocessor_step_config: A preprocessor_pb2.PreprocessingStep object. step_name: Name of the field to get value from. Returns: result_dict: a sub proto message from preprocessor_step_config which will be later converted to a dictionary. Raises: ValueError: If field does not exist in proto. """""" for field, value in preprocessor_step_config.ListFields(): if: return value raise ValueError('Could not get field %s from proto!', step_name)",False,field.name == step_name,field == step_name,0.6503969430923462 217,"def postprocess_obs_dict(obs_dict): """""" Undo internal replay buffer representation changes: save images as bytes """""" for obs_key, obs in obs_dict.items(): if: obs_dict[obs_key] = normalize_image(obs) return obs_dict",True,'image' in obs_key and obs is not None,'image' in obs_key and obs is not None,0.6442729830741882 218,"def check_orphan(self): res = self.is_orphan() if: self._callback() return res",False,res and self._callback,self._callback,0.6531074643135071 219,"def validate_logserver(self): """"""Validates logserver and exits if invalid"""""" if: end(HELP, 'You must enter a logserver hostname or ip address') if not self.re_validation.match(self.logserver): end(UNKNOWN, 'logserver name/ip address supplied contains'+ 'unusable characters')",False,self.logserver is None,self.logserver is None or self.logserver.hostname is None or self.ip_address is None,0.6505424380302429 220,"def validate_logserver(self): """"""Validates logserver and exits if invalid"""""" if self.logserver is None: end(HELP, 'You must enter a logserver hostname or ip address') if: end(UNKNOWN, 'logserver name/ip address supplied contains'+ 'unusable characters')",False,not self.re_validation.match(self.logserver),not self.logserver.lower().startswith('http:') or not self.logserver.lower().startswith('http:'),0.642812192440033 221,"def to_python_variable_name(name: str): result = name.lower().replace(' ', '_').replace('-', '_') if: result = 'digit_' + result result = re.sub('\\W+', '_', result) result = result.lower() return result",False,result[0].isdigit(),type(result) == type(''),0.6494843363761902 222,"def close(self): if: self._debug_conn.safe_shutdown() self._debug_conn = None",False,self._debug_conn and self._debug_conn is not self._conn,self._debug_conn is not None,0.6469855308532715 223,"def __fullread(self, size): """""" Read a certain number of bytes from the source file. """""" try: if size < 0: raise ValueError('Requested bytes (%s) less than zero' % size) if size > self.__filesize: raise EOFError('Requested %#x of %#x (%s)' % (int(size), int(self.__filesize), self.filename)) except AttributeError: pass data = self._fileobj.read(size) if: raise EOFError self.__readbytes += size return data",False,len(data) != size,not data,0.6459337472915649 224,"def __fullread(self, size): """""" Read a certain number of bytes from the source file. """""" try: if: raise ValueError('Requested bytes (%s) less than zero' % size) if size > self.__filesize: raise EOFError('Requested %#x of %#x (%s)' % (int(size), int(self.__filesize), self.filename)) except AttributeError: pass data = self._fileobj.read(size) if len(data)!= size: raise EOFError self.__readbytes += size return data",True,size < 0,size < 0,0.6645493507385254 225,"def __fullread(self, size): """""" Read a certain number of bytes from the source file. """""" try: if size < 0: raise ValueError('Requested bytes (%s) less than zero' % size) if: raise EOFError('Requested %#x of %#x (%s)' % (int(size), int(self.__filesize), self.filename)) except AttributeError: pass data = self._fileobj.read(size) if len(data)!= size: raise EOFError self.__readbytes += size return data",False,size > self.__filesize,"size > self.__fileobj.read(self.__fileobj, size)",0.6537340879440308 226,"def freeze(self, freeze_at=0): """""" Freeze the first several stages of the model. Commonly used in fine-tuning. Layers that produce the same feature map spatial size are defined as one ""stage"" by :paper:`FPN`. Args: freeze_at (int): number of stages to freeze. `1` means freezing the stem. `2` means freezing the stem and one residual stage, etc. Returns: nn.Module: this model itself """""" if: self.stem.freeze() for idx, (stage, _) in enumerate(self.stages_and_names, start=2): if freeze_at >= idx: for block in stage.children(): block.freeze() return self",False,freeze_at >= 1,freeze_at >= 0,0.6577537059783936 227,"def freeze(self, freeze_at=0): """""" Freeze the first several stages of the model. Commonly used in fine-tuning. Layers that produce the same feature map spatial size are defined as one ""stage"" by :paper:`FPN`. Args: freeze_at (int): number of stages to freeze. `1` means freezing the stem. `2` means freezing the stem and one residual stage, etc. Returns: nn.Module: this model itself """""" if freeze_at >= 1: self.stem.freeze() for idx, (stage, _) in enumerate(self.stages_and_names, start=2): if: for block in stage.children(): block.freeze() return self",False,freeze_at >= idx,freeze_at >= 0,0.6535623073577881 228,"def get_parser(): parser = argparse.ArgumentParser(description='PyTorch Semantic Segmentation') parser.add_argument('--config', type=str, default='config/ade20k/ade20k_pspnet50.yaml', help='config file') parser.add_argument('opts', help='see config/ade20k/ade20k_pspnet50.yaml for all options', default=None, nargs=argparse.REMAINDER) args = parser.parse_args() assert args.config is not None cfg = config.load_cfg_from_cfg_file(args.config) if: cfg = config.merge_cfg_from_list(cfg, args.opts) return cfg",True,args.opts is not None,args.opts is not None,0.6515011787414551 229,"def ParseNoFromSeason(season, episode): if: return str(episode) elif season == 0: return 'S' + str(episode)",False,season >= 1,season == 1,0.6585592031478882 230,"def ParseNoFromSeason(season, episode): if season >= 1: return str(episode) elif: return 'S' + str(episode)",False,season == 0,season == 12,0.6639558672904968 231,"def get_fed_id(chat_id): get = FEDERATION_CHATS.get(str(chat_id)) if: return False else: return get['fid']",True,get is None,get is None,0.6571329832077026 232,"def gds_validate_builtin_ST_(self, validator, value, input_name, min_occurs=None, max_occurs=None, required=None): if: try: validator(value, input_name=input_name) except GDSParseError as parse_error: self.gds_collector_.add_message(str(parse_error))",True,value is not None,value is not None,0.6505969166755676 233,"def close(self): """""" Shuts down the TLS session and socket and forcibly closes it """""" try: self.shutdown() finally: if: try: self._socket.close() except socket_.error: pass self._socket = None if self._connection_id in _socket_refs: del _socket_refs[self._connection_id]",True,self._socket,self._socket,0.656726598739624 234,"def close(self): """""" Shuts down the TLS session and socket and forcibly closes it """""" try: self.shutdown() finally: if self._socket: try: self._socket.close() except socket_.error: pass self._socket = None if: del _socket_refs[self._connection_id]",True,self._connection_id in _socket_refs,self._connection_id in _socket_refs,0.6468115448951721 235,"def append_data_list(file_dir, data_list, only_true_data=False): if: for root, dirs, files in os.walk(file_dir): for dir in dirs: data_list.append(os.path.join(file_dir, dir)) break return data_list",False,file_dir != 'xxx',only_true_data,0.6505033373832703 236,"def fix_atomic_specifiers(decl): """"""Atomic specifiers like _Atomic(type) are unusually structured, conferring a qualifier upon the contained type. This function fixes a decl with atomic specifiers to have a sane AST structure, by removing spurious Typename->TypeDecl pairs and attaching the _Atomic qualifier in the right place. """""" while True: decl, found = _fix_atomic_specifiers_once(decl) if not found: break typ = decl while not isinstance(typ, c_ast.TypeDecl): try: typ = typ.type except AttributeError: return decl if: decl.quals.append('_Atomic') if typ.declname is None: typ.declname = decl.name return decl",False,'_Atomic' in typ.quals and '_Atomic' not in decl.quals,"isinstance(decl, c_ast.TypeDecl)",0.647423505783081 237,"def fix_atomic_specifiers(decl): """"""Atomic specifiers like _Atomic(type) are unusually structured, conferring a qualifier upon the contained type. This function fixes a decl with atomic specifiers to have a sane AST structure, by removing spurious Typename->TypeDecl pairs and attaching the _Atomic qualifier in the right place. """""" while True: decl, found = _fix_atomic_specifiers_once(decl) if not found: break typ = decl while not isinstance(typ, c_ast.TypeDecl): try: typ = typ.type except AttributeError: return decl if '_Atomic' in typ.quals and '_Atomic' not in decl.quals: decl.quals.append('_Atomic') if: typ.declname = decl.name return decl",False,typ.declname is None,"hasattr(typ, 'declname')",0.6491817235946655 238,"def fix_atomic_specifiers(decl): """"""Atomic specifiers like _Atomic(type) are unusually structured, conferring a qualifier upon the contained type. This function fixes a decl with atomic specifiers to have a sane AST structure, by removing spurious Typename->TypeDecl pairs and attaching the _Atomic qualifier in the right place. """""" while True: decl, found = _fix_atomic_specifiers_once(decl) if: break typ = decl while not isinstance(typ, c_ast.TypeDecl): try: typ = typ.type except AttributeError: return decl if '_Atomic' in typ.quals and '_Atomic' not in decl.quals: decl.quals.append('_Atomic') if typ.declname is None: typ.declname = decl.name return decl",True,not found,not found,0.6604551076889038 239,"def __init__(self, logger=None, handler=None, formatter=None): if: logger = logging.getLogger('pyasn1') logger.setLevel(logging.DEBUG) if handler is None: handler = logging.StreamHandler() if formatter is None: formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s') handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) logger.addHandler(handler) self.__logger = logger",True,logger is None,logger is None,0.6581186652183533 240,"def __init__(self, logger=None, handler=None, formatter=None): if logger is None: logger = logging.getLogger('pyasn1') logger.setLevel(logging.DEBUG) if: handler = logging.StreamHandler() if formatter is None: formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s') handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) logger.addHandler(handler) self.__logger = logger",True,handler is None,handler is None,0.6543397903442383 241,"def __init__(self, logger=None, handler=None, formatter=None): if logger is None: logger = logging.getLogger('pyasn1') logger.setLevel(logging.DEBUG) if handler is None: handler = logging.StreamHandler() if: formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s') handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) logger.addHandler(handler) self.__logger = logger",True,formatter is None,formatter is None,0.6560178995132446 242,"def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate, ibn): super(_DenseBlock, self).__init__() for i in range(num_layers): if: layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate, True) else: layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate, False) self.add_module('denselayer%d' % (i + 1), layer)",False,ibn and i % 3 == 0,ibn,0.6528332829475403 243,"def output_array_of_bidlandscapepoint(data_objects): if: return for data_object in data_objects['BidLandscapePoint']: output_bidlandscapepoint(data_object)",True,data_objects is None or len(data_objects) == 0,data_objects is None or len(data_objects) == 0,0.6454564332962036 244,"def wait(self): self._status_update_count += 1 if: raise TimeoutException('Timeout at polling.') if self._status_update_count >= _PollingBlocker.NUMBER_OF_INITIAL_STATUS_CHECKS: time.sleep(self._poll_interval_in_milliseconds / 1000.0) else: time.sleep(_PollingBlocker.INITIAL_STATUS_CHECK_INTERVAL_IN_MS / 1000.0)",False,self._timeout_stamp is not None and int(round(time.time()) * 1000) > self._timeout_stamp,self._status_update_count >= _polling_interval,0.6489636898040771 245,"def wait(self): self._status_update_count += 1 if self._timeout_stamp is not None and int(round(time.time()) * 1000) > self._timeout_stamp: raise TimeoutException('Timeout at polling.') if: time.sleep(self._poll_interval_in_milliseconds / 1000.0) else: time.sleep(_PollingBlocker.INITIAL_STATUS_CHECK_INTERVAL_IN_MS / 1000.0)",False,self._status_update_count >= _PollingBlocker.NUMBER_OF_INITIAL_STATUS_CHECKS,self._status_update_count == 0,0.6448144912719727 246,"def OnPaint(self, event): if: dc = wx.BufferedPaintDC(self, self._Buffer) else: dc = wx.PaintDC(self) self.PrepareDC(dc) dc.DrawBitmap(self._Buffer, 0, 0)",True,USE_BUFFERED_DC,USE_BUFFERED_DC,0.6554020643234253 247,"@staticmethod def _get_previous_pitch(music_specifier, previous_pitch_by_music_specifier, voice): key = (voice, music_specifier) if: previous_pitch_by_music_specifier[key] = None previous_pitch = previous_pitch_by_music_specifier[key] return previous_pitch",True,key not in previous_pitch_by_music_specifier,key not in previous_pitch_by_music_specifier,0.6451401710510254 248,"@property def _n_hanging_faces_x(self): """"""Number of hanging Fx."""""" if: return int(np.prod(self.shape_cells[1:])) else: return 0",False,self.includes_zero,self.dim >= 2,0.6462757587432861 249,"def decode_sequence(vocab, seq): N, T = seq.size() sents = [] for n in range(N): words = [] for t in range(T): ix = seq[n, t] if: break words.append(vocab[ix]) sent =''.join(words) sents.append(sent) return sents",True,ix == 0,ix == 0,0.6593527793884277 250,"def __init__(self, config, model, dataloader, evaluator): """""" Args: config (config): An instance object of Config, used to record parameter information. model (Model): An object of deep-learning model. dataloader (Dataloader): dataloader object. evaluator (Evaluator): evaluator object. expected that config includes these parameters below: learning_rate (float): learning rate of model train_batch_size (int): the training batch size. epoch_nums (int): number of epochs. step_size (int): step_size of scheduler. trained_model_path (str): a path of file which is used to save parameters of best model. checkpoint_path (str): a path of file which is used save checkpoint of training progress. output_path (str|None): a path of a json file which is used to save test output infomation fo model. resume (bool): start training from last checkpoint. validset_divide (bool): whether to split validset. if True, the dataset is split to trainset-validset-testset. if False, the dataset is split to trainset-testset. test_step (int): the epoch number of training after which conducts the evaluation on test. best_folds_accuracy (list|None): when running k-fold cross validation, this keeps the accuracy of folds that already run. """""" super().__init__(config, model, dataloader, evaluator) self.t_start_epoch = 0 self.s_start_epoch = 0 self.t_epoch_i = 0 self.s_epoch_i = 0 self._build_optimizer() if: self._load_checkpoint()",False,config['resume'] or config['training_resume'],config['resume'],0.638759970664978 251,"def __post_init__(self): super().__post_init__() if: self.cluster_name = ClusterName() if self.kubeconfig_path is None: self.kubeconfig_path = utils.get_kubeconfig_path(self.cluster_name.get())",False,"self.cluster_name is None or isinstance(self.cluster_name, str)",self.cluster_name is None,0.648577094078064 252,"def __post_init__(self): super().__post_init__() if self.cluster_name is None or isinstance(self.cluster_name, str): self.cluster_name = ClusterName() if: self.kubeconfig_path = utils.get_kubeconfig_path(self.cluster_name.get())",True,self.kubeconfig_path is None,self.kubeconfig_path is None,0.6508557200431824 253,"def __init__(self, config, *inputs, **kwargs): super(PreTrainedModel, self).__init__() if: raise ValueError('Parameter config in `{}(config)` should be an instance of class `PretrainedConfig`. To create a model from a pretrained model use `model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`'.format(self.__class__.__name__, self.__class__.__name__)) self.config = config",False,"not isinstance(config, PretrainedConfig)","not isinstance(config, pretrainedConfig)",0.6529104709625244 254,"def start(self): ignored = [] selection = self.ctx.result.get('identify', {}).get('selection', []) for f in selection[:]: ignore = self._safelist_file(f) if: ignored.append(ignore) selection.remove(f) return ignored",True,ignore,ignore,0.668021559715271 255,"def new_selection(self, selection): if: for key in self.widgets: self.widgets[key].set_value(None) self.set_enabled(False) return for key in self.widgets: self._update_widget(key, selection) self.set_enabled(True)",False,not selection,selection is None,0.6494703888893127 256,"def __getitem__(self, idx): image = self.load_image(idx) annots = self.load_annots(idx) scale = np.array(1.0).astype(np.float32) size = np.array([image.shape[0], image.shape[1]]).astype(np.float32) sample = {'image': image, 'annots': annots,'scale': scale,'size': size} if: sample = self.transform(sample) return sample",True,self.transform,self.transform,0.6671528816223145 257,"@pytest.mark.parametrize('framework_status', Framework.STATUSES) def test_services_can_not_be_copied_to_a_framework_that_is_not_open(self, framework_status): if: return self.set_framework_status('g-cloud-7', framework_status) res = self.client.put('/draft-services/copy-from/{}'.format(self.service_id), data=json.dumps({**self.updater_json, **self.basic_questions_json, 'targetFramework': 'g-cloud-7'}), content_type='application/json') assert res.status_code == 400 assert 'Target framework is not open' in res.get_data(as_text=True)",False,framework_status == 'open',framework_status == Framework.STATUSES,0.6536476612091064 258,"@property def braintree_data(self): data = {'amount': self.cleaned_data['amount'], 'options': {'submit_for_settlement': True}} if: data['payment_method_token'] = self.cleaned_data['paymethod'].provider_id elif self.cleaned_data.get('nonce'): data['payment_method_nonce'] = self.cleaned_data['nonce'] return data",True,self.cleaned_data.get('paymethod'),self.cleaned_data.get('paymethod'),0.6514558792114258 259,"@property def braintree_data(self): data = {'amount': self.cleaned_data['amount'], 'options': {'submit_for_settlement': True}} if self.cleaned_data.get('paymethod'): data['payment_method_token'] = self.cleaned_data['paymethod'].provider_id elif: data['payment_method_nonce'] = self.cleaned_data['nonce'] return data",True,self.cleaned_data.get('nonce'),self.cleaned_data.get('nonce'),0.6491628885269165 260,"def _getParent(self, ncbid): """""" Gets direct parent ncbi taxon id. """""" parent = self._ncbidToNcbidParent.get(ncbid, None) if: parent = self._taxonomy.getParentNcbid(ncbid) self._ncbidToNcbidParent[ncbid] = parent return parent",True,parent is None,parent is None,0.6548084020614624 261,"def _check_is_max_context(doc_spans, cur_span_index, position): """"""Check if this is the'max context' doc span for the token."""""" best_score = None best_span_index = None for span_index, doc_span in enumerate(doc_spans): end = doc_span.start + doc_span.length - 1 if: continue if position > end: continue num_left_context = position - doc_span.start num_right_context = end - position score = min(num_left_context, num_right_context) + 0.01 * doc_span.length if best_score is None or score > best_score: best_score = score best_span_index = span_index return cur_span_index == best_span_index",False,position < doc_span.start,span_index == 0,0.6510602831840515 262,"def _check_is_max_context(doc_spans, cur_span_index, position): """"""Check if this is the'max context' doc span for the token."""""" best_score = None best_span_index = None for span_index, doc_span in enumerate(doc_spans): end = doc_span.start + doc_span.length - 1 if position < doc_span.start: continue if: continue num_left_context = position - doc_span.start num_right_context = end - position score = min(num_left_context, num_right_context) + 0.01 * doc_span.length if best_score is None or score > best_score: best_score = score best_span_index = span_index return cur_span_index == best_span_index",False,position > end,end < doc_span.start,0.6604651212692261 263,"def _check_is_max_context(doc_spans, cur_span_index, position): """"""Check if this is the'max context' doc span for the token."""""" best_score = None best_span_index = None for span_index, doc_span in enumerate(doc_spans): end = doc_span.start + doc_span.length - 1 if position < doc_span.start: continue if position > end: continue num_left_context = position - doc_span.start num_right_context = end - position score = min(num_left_context, num_right_context) + 0.01 * doc_span.length if: best_score = score best_span_index = span_index return cur_span_index == best_span_index",False,best_score is None or score > best_score,score > best_score,0.6450030207633972 264,"def __init__(self, optimizer, big_gamma=0.999, epsilon=1e-08, from_grad=True): """""" Apply Gap Aware on computed gradients """""" super().__init__(optimizer) self.big_gamma = big_gamma self.running_avg_step = init_running_avg_step(optimizer) self.epsilon = epsilon for pg in self.optimizer.param_groups: for p in pg['params']: if: self.optimizer.state[p]['momentum_buffer'] = torch.zeros_like(p)",False,'momentum_buffer' not in self.optimizer.state[p],p.requires_grad,0.6456429958343506 265,"def _names_by_code(states): d = {} for name in states.__dict__: if: code = getattr(states, name) d[code] = name return d",False,not name.startswith('__'),"hasattr(states, name)",0.6441135406494141 266,"@restart_executor def map(self, function, *iterables, **kwargs): """""" Calls *function* for every item in *iterables* then calls *callback* ( if provided as a keyword argument via *kwargs*) with a list containing the results when complete. The results list will be in the order in which *iterables* was passed to *function* (not random or based on how long they took to complete). Any additional *kwargs* will be passed to the *function* with each iteration of *iterables*. """""" callback = kwargs.pop('callback', None) futures = [] for i in iterables: futures.append(self.executor.submit(safe_call, function, i, **kwargs)) if: callback_when_complete(futures, callback) return futures",False,callback,callback is not None,0.6648369431495667 267,"def close(self): try: if self.game is not None: self.game.close() except RuntimeError as exc: log.warning('Runtime error in VizDoom game close(): %r', exc) if: import pygame pygame.display.quit() pygame.quit()",True,self.screen is not None,self.screen is not None,0.6483498811721802 268,"def close(self): try: if: self.game.close() except RuntimeError as exc: log.warning('Runtime error in VizDoom game close(): %r', exc) if self.screen is not None: import pygame pygame.display.quit() pygame.quit()",False,self.game is not None,"hasattr(self, 'game')",0.6549391746520996 269,"@confluence_measure('locust_search_cql:search_results') def search_cql(): r = locust.get(f""/rest/api/search?cql=siteSearch~'{generate_random_string(3, only_letters=True)}'&start=0&limit=20"", catch_response=True) if: logger.locust_info(r.content.decode('utf-8')) content = r.content.decode('utf-8') if'results' not in content: logger.error(f'Search cql failed: {content}') assert'results' in content, 'Search cql failed.' locust.get('/rest/mywork/latest/status/notification/count', catch_response=True)",False,"'{""results"":[' not in r.content.decode('utf-8')",r.status_code == 200,0.6443426012992859 270,"@confluence_measure('locust_search_cql:search_results') def search_cql(): r = locust.get(f""/rest/api/search?cql=siteSearch~'{generate_random_string(3, only_letters=True)}'&start=0&limit=20"", catch_response=True) if '{""results"":[' not in r.content.decode('utf-8'): logger.locust_info(r.content.decode('utf-8')) content = r.content.decode('utf-8') if: logger.error(f'Search cql failed: {content}') assert'results' in content, 'Search cql failed.' locust.get('/rest/mywork/latest/status/notification/count', catch_response=True)",False,'results' not in content,content not in '{'content',0.6513659954071045 271,"def __call__(self, im, im_info): """""" Args: im (np.ndarray): image (np.ndarray) im_info (dict): info of image Returns: im (np.ndarray): processed image (np.ndarray) im_info (dict): info of processed image """""" coarsest_stride = self.coarsest_stride if: return (im, im_info) im_c, im_h, im_w = im.shape pad_h = int(np.ceil(float(im_h) / coarsest_stride) * coarsest_stride) pad_w = int(np.ceil(float(im_w) / coarsest_stride) * coarsest_stride) padding_im = np.zeros((im_c, pad_h, pad_w), dtype=np.float32) padding_im[:, :im_h, :im_w] = im return (padding_im, im_info)",True,coarsest_stride <= 0,coarsest_stride <= 0,0.6593202352523804 272,"def test_commands_hofvarpnir_config(): if: pytest.skip('commands.py not ported to Windows.') output_dir = '/data/output' commands = _run_commands_from_flags(output_dir=output_dir, remote=True) assert len(commands) == 1 expected = EXPECTED_HOFVARPNIR_CONFIG_TEMPLATE.format(output_dir=output_dir) assert commands[0] == expected",True,os.name == 'nt',os.name == 'nt',0.6564439535140991 273,"def __update_job(self, plugin: str, name: str, success: bool): with self.__thread_lock: err = self.__db.update_job(plugin, name, success) if: self.__logger.info(f'Successfully updated database for the job {name} from plugin {plugin}') else: self.__logger.warning(f'Failed to update database for the job {name} from plugin {plugin}: {err}')",False,not err,err is None,0.6602324843406677 274,"def add_style(self, **kwargs) -> Style: """"""Add a new style to the current document. If no style name is provided, the next available numbered style will be generated. """""" if: raise IndexError(f""style '{kwargs['name']}' already exists"") style = Style(**kwargs) if style.name is None: style.name = self._model.custom_style_name() style._update_styles = True self._model.styles[style.name] = style return style",False,'name' in kwargs and kwargs['name'] is not None and (kwargs['name'] in self._model.styles),kwargs['name'] in self._model.styles,0.6507617235183716 275,"def add_style(self, **kwargs) -> Style: """"""Add a new style to the current document. If no style name is provided, the next available numbered style will be generated. """""" if 'name' in kwargs and kwargs['name'] is not None and (kwargs['name'] in self._model.styles): raise IndexError(f""style '{kwargs['name']}' already exists"") style = Style(**kwargs) if: style.name = self._model.custom_style_name() style._update_styles = True self._model.styles[style.name] = style return style",False,style.name is None,not style.name,0.6527454853057861 276,"def serialize_to_request(self, parameters, operation_model): shape = operation_model.input_shape serialized = self._create_default_request() serialized['method'] = operation_model.http.get('method', self.DEFAULT_METHOD) body_params = self.MAP_TYPE() body_params['Action'] = operation_model.name body_params['Version'] = operation_model.metadata['apiVersion'] if: self._serialize(body_params, parameters, shape) serialized['body'] = body_params return serialized",True,shape is not None,shape is not None,0.6587272882461548 277,"def make_safe_filename(name: str) -> str: if: return name else: from tango.common.det_hash import det_hash name_hash = det_hash(name) name = name.replace(' ', '-').replace('/', '--') return ''.join((c for c in name if c in SAFE_FILENAME_CHARS)) + f'-{name_hash[:7]}'",False,filename_is_safe(name),not name.startswith('--'),0.6487612724304199 278,"def jellyFor(self, jellier): qual = reflect.qual(PBMind) if: qual = qual.encode('utf-8') return (qual, jellier.invoker.registerReference(self))",True,"isinstance(qual, unicode)","isinstance(qual, unicode)",0.6507209539413452 279,"def convert_pytorch_name_to_tf(torch_name, module_name=None): """"""Convert a pytorch weight name in a tensorflow model weight name."""""" op_name = torch_name.split('.')[-1] if: op_name = 'gamma' if module_name and '/BatchNorm2d' in module_name else 'kernel' transpose = bool(op_name == 'kernel' or 'emb_projs' in op_name or 'out_projs' in op_name) if op_name == 'bias': op_name = 'bias' if module_name and '/Linear' in module_name else 'beta' if op_name == 'running_mean': op_name ='moving_mean' if op_name == 'running_var': op_name ='moving_variance' return (module_name + '/' + op_name, transpose)",False,op_name == 'weight',op_name == 'gamma',0.6558821201324463 280,"def convert_pytorch_name_to_tf(torch_name, module_name=None): """"""Convert a pytorch weight name in a tensorflow model weight name."""""" op_name = torch_name.split('.')[-1] if op_name == 'weight': op_name = 'gamma' if module_name and '/BatchNorm2d' in module_name else 'kernel' transpose = bool(op_name == 'kernel' or 'emb_projs' in op_name or 'out_projs' in op_name) if: op_name = 'bias' if module_name and '/Linear' in module_name else 'beta' if op_name == 'running_mean': op_name ='moving_mean' if op_name == 'running_var': op_name ='moving_variance' return (module_name + '/' + op_name, transpose)",True,op_name == 'bias',op_name == 'bias',0.6536109447479248 281,"def convert_pytorch_name_to_tf(torch_name, module_name=None): """"""Convert a pytorch weight name in a tensorflow model weight name."""""" op_name = torch_name.split('.')[-1] if op_name == 'weight': op_name = 'gamma' if module_name and '/BatchNorm2d' in module_name else 'kernel' transpose = bool(op_name == 'kernel' or 'emb_projs' in op_name or 'out_projs' in op_name) if op_name == 'bias': op_name = 'bias' if module_name and '/Linear' in module_name else 'beta' if: op_name ='moving_mean' if op_name == 'running_var': op_name ='moving_variance' return (module_name + '/' + op_name, transpose)",True,op_name == 'running_mean',op_name == 'running_mean',0.6522010564804077 282,"def convert_pytorch_name_to_tf(torch_name, module_name=None): """"""Convert a pytorch weight name in a tensorflow model weight name."""""" op_name = torch_name.split('.')[-1] if op_name == 'weight': op_name = 'gamma' if module_name and '/BatchNorm2d' in module_name else 'kernel' transpose = bool(op_name == 'kernel' or 'emb_projs' in op_name or 'out_projs' in op_name) if op_name == 'bias': op_name = 'bias' if module_name and '/Linear' in module_name else 'beta' if op_name == 'running_mean': op_name ='moving_mean' if: op_name ='moving_variance' return (module_name + '/' + op_name, transpose)",False,op_name == 'running_var',op_name == 'running_variance',0.6520791053771973 283,"@require_GET def celery_ping(request): """"""Just tells you if Celery is on or not"""""" try: ping = celery.control.inspect().ping() if: return HttpResponse() except Exception: pass return HttpResponse(status=500)",False,ping,ping['status'] != 'on',0.6677079200744629 284,"def get_pipe_transport(self, fd): if: return self._pipes[fd].pipe else: return None",True,fd in self._pipes,fd in self._pipes,0.656368613243103 285,"def get_backup_logs(self, number, client, forward=False, deep=False, agent=None): """"""See :func:`burpui.misc.backend.interface.BUIbackend.get_backup_logs`"""""" if: return {} if number and number!= -1 else [] if number == -1: return self._get_all_backup_logs(client, forward, deep) return self._get_backup_logs(number, client, forward, deep)",True,not client or not number,not client or not number,0.6508263945579529 286,"def get_backup_logs(self, number, client, forward=False, deep=False, agent=None): """"""See :func:`burpui.misc.backend.interface.BUIbackend.get_backup_logs`"""""" if not client or not number: return {} if number and number!= -1 else [] if: return self._get_all_backup_logs(client, forward, deep) return self._get_backup_logs(number, client, forward, deep)",True,number == -1,number == -1,0.6587668061256409 287,"def try_sample_affine_and_pad(img, p, pad_k, G=None): batch, _, height, width = img.shape G_try = G if: G_try = sample_affine(p, batch, height, width) pad_x1, pad_x2, pad_y1, pad_y2 = get_padding(torch.inverse(G_try), height, width, pad_k) img_pad = F.pad(img, (pad_x1 + pad_k, pad_x2 + pad_k, pad_y1 + pad_k, pad_y2 + pad_k), mode='reflect') return (img_pad, G_try, (pad_x1, pad_x2, pad_y1, pad_y2))",False,G is None,len(p.shape) > 0,0.650160551071167 288,"def __get__(self, obj, cls=None): if: return self value = self.func(obj) object.__setattr__(obj, self.func.__name__, value) return value",True,obj is None,obj is None,0.6594734191894531 289,"def release(self, t): super().release(t) s = t.storage if: assert s.ref_int == 0 self._evict(s)",True,s.ref_ext == 0 and s.material and (not s.pinned),s.ref_ext == 0 and s.material and (not s.pinned),0.6471388339996338 290,"def json_has_required_keys(data, keys): missing_keys = set(keys) - set(data.keys()) if: abort(400, ""Invalid JSON must have '{}' keys"".format(""', '"".join(missing_keys)))",True,missing_keys,missing_keys,0.6594750285148621 291,"def __init__(self, **options): self.consumer_key = options.get('consumer_key') self.consumer_secret = options.get('consumer_secret') self.sandbox = options.get('sandbox', True) if: default_service_host ='sandbox.evernote.com' else: default_service_host = 'www.evernote.com' self.service_host = options.get('service_host', default_service_host) self.additional_headers = options.get('additional_headers', {}) self.token = options.get('token') self.secret = options.get('secret')",False,self.sandbox,options.get('sandbox'),0.675239086151123 292,"def get_subtasks(task_or_mixture): """"""Returns all the Tasks in a Mixture as a list or the Task itself."""""" if: return [task_or_mixture] else: return task_or_mixture.tasks",False,"isinstance(task_or_mixture, Task)","not hasattr(task_or_mixture, 'tasks')",0.6510021686553955 293,"def parse_response_content(self, response_content): response = super(AlipayUserDtbankcustActivityorderQueryResponse, self).parse_response_content(response_content) if: self.activity_id = response['activity_id'] if 'activity_order_id' in response: self.activity_order_id = response['activity_order_id'] if 'out_biz_no' in response: self.out_biz_no = response['out_biz_no'] if'status' in response: self.status = response['status']",True,'activity_id' in response,'activity_id' in response,0.658087968826294 294,"def parse_response_content(self, response_content): response = super(AlipayUserDtbankcustActivityorderQueryResponse, self).parse_response_content(response_content) if 'activity_id' in response: self.activity_id = response['activity_id'] if: self.activity_order_id = response['activity_order_id'] if 'out_biz_no' in response: self.out_biz_no = response['out_biz_no'] if'status' in response: self.status = response['status']",True,'activity_order_id' in response,'activity_order_id' in response,0.6560535430908203 295,"def parse_response_content(self, response_content): response = super(AlipayUserDtbankcustActivityorderQueryResponse, self).parse_response_content(response_content) if 'activity_id' in response: self.activity_id = response['activity_id'] if 'activity_order_id' in response: self.activity_order_id = response['activity_order_id'] if: self.out_biz_no = response['out_biz_no'] if'status' in response: self.status = response['status']",True,'out_biz_no' in response,'out_biz_no' in response,0.6516119241714478 296,"def parse_response_content(self, response_content): response = super(AlipayUserDtbankcustActivityorderQueryResponse, self).parse_response_content(response_content) if 'activity_id' in response: self.activity_id = response['activity_id'] if 'activity_order_id' in response: self.activity_order_id = response['activity_order_id'] if 'out_biz_no' in response: self.out_biz_no = response['out_biz_no'] if: self.status = response['status']",True,'status' in response,'status' in response,0.6611331701278687 297,"def __exit__(self, exc_type, exc_val, exc_tb): try: self.close() except BaseException: if: raise",False,exc_val is None,exc_val is not None,0.6497892141342163 298,"def make_config_from_repo(repo_path, rev=None, hooks=None, check=True): manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE)) config = {'repo': f'file://{repo_path}','rev': rev or git.head_rev(repo_path), 'hooks': hooks or [{'id': hook['id']} for hook in manifest]} if: wrapped = validate({'repos': [config]}, CONFIG_SCHEMA) wrapped = apply_defaults(wrapped, CONFIG_SCHEMA) config, = wrapped['repos'] return config else: return config",True,check,check,0.6717772483825684 299,"def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None, class_weight=None): """"""Calculate the CrossEntropy loss. Args: pred (torch.Tensor): The prediction with shape (N, C), C is the number of classes. label (torch.Tensor): The learning label of the prediction. weight (torch.Tensor, optional): Sample-wise loss weight. reduction (str, optional): The method used to reduce the loss. avg_factor (int, optional): Average factor that is used to average the loss. Defaults to None. class_weight (list[float], optional): The weight for each class. Returns: torch.Tensor: The calculated loss """""" loss = F.cross_entropy(pred, label, weight=class_weight, reduction='none') if: weight = weight.float() loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor) return loss",True,weight is not None,weight is not None,0.6537113189697266 300,"def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}): market = self.markets[symbol] key = 'quote' rate = market[takerOrMaker] cost = float(self.cost_to_precision(symbol, amount * rate)) if: cost *= price else: key = 'base' return {'type': takerOrMaker, 'currency': market[key], 'rate': rate, 'cost': cost}",False,side == 'sell',type == 'linear',0.6546257734298706 301,"def _sum_of_host_slots(normalized_hosts, validate=True): total_slots = 0 for normalized_host in normalized_hosts: slots = normalized_host['slots'] if: if validate: raise ValueError('No slots defined for host: {}'.format(normalized_host['ip'])) continue total_slots += slots return total_slots",True,slots is None,slots is None,0.6507751941680908 302,"def _sum_of_host_slots(normalized_hosts, validate=True): total_slots = 0 for normalized_host in normalized_hosts: slots = normalized_host['slots'] if slots is None: if: raise ValueError('No slots defined for host: {}'.format(normalized_host['ip'])) continue total_slots += slots return total_slots",False,validate,validate and normalized_host['ip'] not in VALID_HOSTS,0.6601842641830444 303,"def visit_set(self, node: nodes.Set) -> None: if: node.inf_type = TypeInfo(Set[self.type_constraints.fresh_tvar(node)]) else: elt_inf_type = self._unify_elements(node.elts, node) node.inf_type = wrap_container(Set, elt_inf_type)",False,not node.elts,self.type_constraints is not None,0.6602118015289307 304,"def multi_party_run(self, **kwargs): """""" Run 3 parties with target function or other additional arguments. :param kwargs: :return: """""" target = kwargs['target'] parties = [] for role in range(self.party_num): kwargs.update({'role': role}) parties.append(Aby3Process(target=target, kwargs=kwargs)) parties[-1].start() for party in parties: party.join() if: return party.exception return (True,)",True,party.exception,party.exception,0.6527734994888306 305,"def load_build(self): """"""Set the state of a newly created object. We capture it to replace our place-holder objects, NDArrayWrapper, by the array we are interested in. We replace them directly in the stack of pickler. """""" Unpickler.load_build(self) if: if self.np is None: raise ImportError(""Trying to unpickle an ndarray, but numpy didn't import correctly"") nd_array_wrapper = self.stack.pop() array = nd_array_wrapper.read(self) self.stack.append(array)",False,"isinstance(self.stack[-1], NDArrayWrapper)",self.stack and self.stack[-1].type == 'array',0.643934965133667 306,"def load_build(self): """"""Set the state of a newly created object. We capture it to replace our place-holder objects, NDArrayWrapper, by the array we are interested in. We replace them directly in the stack of pickler. """""" Unpickler.load_build(self) if isinstance(self.stack[-1], NDArrayWrapper): if: raise ImportError(""Trying to unpickle an ndarray, but numpy didn't import correctly"") nd_array_wrapper = self.stack.pop() array = nd_array_wrapper.read(self) self.stack.append(array)",False,self.np is None,not np.p(self.stack[-1]),0.6498368978500366 307,"@property def flashlight_enabled(self): if: return bool(self._entity_data.get('flashlight_enabled')) return bool(0)",True,'flashlight_enabled' in self._entity_data,'flashlight_enabled' in self._entity_data,0.6500662565231323 308,"def _compileRegex(self) -> None: for field_name in ('_delete_word_pattern', '_ignore_word_with_pattern', '_alternates_from_word_pattern', '_alternates_from_defi_pattern', '_rule_v1_defi_pattern', '_rule_v5_defi_pattern', '_rule_vs_defi_pattern', '_rule_vk_defi_pattern', '_rule_adji_defi_pattern'): value = getattr(self, field_name) if: setattr(self, field_name, re.compile(value))",False,"value and isinstance(value, str)",value is not None,0.6457546949386597 309,"def update(self, result: TaskResult): if: self._progress.update(self._overall_progress_task, advance=1, total=total + len(result.subtasks))",False,(total := self._progress.tasks[0].total) is not None,self._progress is not None,0.6477746367454529 310,"def reset_device(self) -> bool: for _ in range(5): self.send(b'\xb5b\x06\x04\x04\x00\xff\xff\x00\x00\x0c]') time.sleep(1) init_baudrate(self) self.send_with_ack(b'\xb5b\x06\t\r\x00\x1f\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x17q\xd7') self.send_with_ack(b'\xb5b\t\x14\x04\x00\x01\x00\x00\x00""\xf0') self.send(b'\xb5b\t\x14\x00\x00\x1d`') status = self.wait_for_backup_restore_status() if: return True return False",False,status == 1 or status == 3,status == 0,0.6511972546577454 311,"def load(self, track_file, liveshow, track_type): self.track_file = track_file self.liveshow = liveshow if: self.track_file = '/home/pi/pp_home/media/river.jpg' self.img = Image.open(self.track_file) else: self.img = Image.open(self.track_file) self.used_file = '/tmp/image_time_ny.jpg' self.overlay_text() return ('normal', '', self.used_file)",False,self.liveshow == True,track_type == 'image',0.6509339809417725 312,"def validate(self, data: Union[numbers.Real, str, bool, int, float, list, dict]) -> base.ValidationResult: if: data = data.compute() passes = isinstance(data, self.datatype) return base.ValidationResult(passes=passes, message=f""Requires data type: {self.datatype}. Got data type: {type(data)}. This {('is' if passes else 'is not')} a match."", diagnostics={'required_data_type': self.datatype, 'actual_data_type': type(data)})",False,"hasattr(data, 'dask')","isinstance(data, numbers.Real)",0.6471971869468689 313,"def get_email(self, obj): if: if 'email' in obj.extra_data: return obj.extra_data.get('email') return obj.extra_data.get('userPrincipalName')",True,obj.extra_data,obj.extra_data,0.6541895866394043 314,"def get_email(self, obj): if obj.extra_data: if: return obj.extra_data.get('email') return obj.extra_data.get('userPrincipalName')",False,'email' in obj.extra_data,obj.extra_data.get('email'),0.6492137908935547 315,"def __init__(self, name, num_clients=0): self.num_clients = num_clients if: raise NotImplementedError('Clients not implemented on setup time, use new_tenant_client') else: DockerComposeNamespace.__init__(self, name, self.ENTERPRISE_FILES + self.MENDER_GATEWAY_FILES + self.MENDER_GATEWAY_CLIENT_FILES)",True,self.num_clients > 0,self.num_clients > 0,0.6552126407623291 316,"def get_edge(self, x): """"""See parent class."""""" for edge, start_pos in reversed(self.total_edgestarts): if: return (edge, x - start_pos)",False,x >= start_pos,"self._is_edge(edge, x)",0.6512272357940674 317,"def is_valid(arch): """"""Return if the arch in search space. :param arch: current arch code :type arch: str :return: if the model is valid (bool) """""" stages = arch.split('-') length = 0 for stage in stages: if: return False length += len(stage) return min_block <= length <= max_block",False,len(stage) == 0,not stage.startswith('-'),0.649925947189331 318,"def _sort_links(self, links): """""" Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates """""" eggs, no_eggs = ([], []) seen = set() for link in links: if: seen.add(link) if link.egg_fragment: eggs.append(link) else: no_eggs.append(link) return no_eggs + eggs",True,link not in seen,link not in seen,0.6594653129577637 319,"def _sort_links(self, links): """""" Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates """""" eggs, no_eggs = ([], []) seen = set() for link in links: if link not in seen: seen.add(link) if: eggs.append(link) else: no_eggs.append(link) return no_eggs + eggs",True,link.egg_fragment,link.egg_fragment,0.6484694480895996 320,"def close(self): """"""Close related fileobj, pass return value"""""" if: return None self.closed = 1 if self.fileobj: return self.fileobj.close() if self.callback: self.callback(self.base_rp) self.base_rp.touch()",True,self.closed,self.closed,0.6634231209754944 321,"def close(self): """"""Close related fileobj, pass return value"""""" if self.closed: return None self.closed = 1 if: return self.fileobj.close() if self.callback: self.callback(self.base_rp) self.base_rp.touch()",True,self.fileobj,self.fileobj,0.657850980758667 322,"def close(self): """"""Close related fileobj, pass return value"""""" if self.closed: return None self.closed = 1 if self.fileobj: return self.fileobj.close() if: self.callback(self.base_rp) self.base_rp.touch()",True,self.callback,self.callback,0.6590456366539001 323,"def ancestors(self, op_name, deps): """"""Get all nodes upstream of the current node."""""" explored = set() queue = deque([op_name]) while len(queue)!= 0: current = queue.popleft() for parent in deps[current]: if: continue explored.add(parent) queue.append(parent) return explored",True,parent in explored,parent in explored,0.650050699710846 324,"def close(self) -> None: if: self.run_txt_data['stop_time'] = datetime.datetime.now().isoformat(sep=' ') with open(os.path.join(self.submit_config.run_dir, 'run.txt'), 'w') as f: pprint.pprint(self.run_txt_data, stream=f, indent=4, width=200, compact=False) self.has_closed = True global _run_context if _run_context is self: _run_context = None",False,not self.has_closed,self.has_closed,0.6480180621147156 325,"def close(self) -> None: if not self.has_closed: self.run_txt_data['stop_time'] = datetime.datetime.now().isoformat(sep=' ') with open(os.path.join(self.submit_config.run_dir, 'run.txt'), 'w') as f: pprint.pprint(self.run_txt_data, stream=f, indent=4, width=200, compact=False) self.has_closed = True global _run_context if: _run_context = None",False,_run_context is self,"_run_context.get('cancel_on_close', False)",0.6482930183410645 326,"def __getitem__(self, idx): if: return self.__class__(self.__baseTag, *getitem(self.__superTags, idx)) return self.__superTags[idx]",False,"isinstance(idx, slice)",idx < len(self.__superTags),0.6505007743835449 327,"def run(): _, e = p.communicate() if: err.append(e)",True,e,e,0.6757959127426147 328,"def endElement(self, name, value, connection): if: self.name = value elif name == 'OwnerAlias': self.owner_alias = value",False,name == 'GroupName',name == 'Name',0.6580395698547363 329,"def endElement(self, name, value, connection): if name == 'GroupName': self.name = value elif: self.owner_alias = value",False,name == 'OwnerAlias',name == 'ownerAlias',0.6533467173576355 330,"def factory(*args_, **kwargs_): if: return docSect2Type.subclass(*args_, **kwargs_) else: return docSect2Type(*args_, **kwargs_)",True,docSect2Type.subclass,docSect2Type.subclass,0.6573271751403809 331,"def save_quantized_model(self, model, path, input_spec=None, **kwargs): """""" Save the quantized inference model. Args: model (Layer): The model to be saved. path (str): The path prefix to save model. The format is ``dirname/file_prefix`` or ``file_prefix``. input_spec (list[InputSpec|Tensor], optional): Describes the input of the saved model's forward method, which can be described by InputSpec or example Tensor. If None, all input variables of the original Layer's forward method would be the inputs of the saved model. Default: None. kwargs (dict, optional): Other save configuration options for compatibility. Returns: None """""" assert isinstance(model, paddle.nn.Layer), 'The model must be the instance of paddle.nn.Layer.' training = model.training if: model.eval() self.ptq.save_quantized_model(model=model, path=path, input_spec=input_spec, **kwargs) if training: model.train()",False,training,eval,0.6665236949920654 332,"def save_quantized_model(self, model, path, input_spec=None, **kwargs): """""" Save the quantized inference model. Args: model (Layer): The model to be saved. path (str): The path prefix to save model. The format is ``dirname/file_prefix`` or ``file_prefix``. input_spec (list[InputSpec|Tensor], optional): Describes the input of the saved model's forward method, which can be described by InputSpec or example Tensor. If None, all input variables of the original Layer's forward method would be the inputs of the saved model. Default: None. kwargs (dict, optional): Other save configuration options for compatibility. Returns: None """""" assert isinstance(model, paddle.nn.Layer), 'The model must be the instance of paddle.nn.Layer.' training = model.training if training: model.eval() self.ptq.save_quantized_model(model=model, path=path, input_spec=input_spec, **kwargs) if: model.train()",True,training,training,0.6648621559143066 333,"@property def sides2(self): if: return parse_int_vector(self._entity_data.get('sides2')) return parse_int_vector('None')",True,'sides2' in self._entity_data,'sides2' in self._entity_data,0.6507736444473267 334,"def inner(*args, **kwargs): out = f(*args, **kwargs) if: self._enqueue(next_name, out) return out",False,out is not None and next,next_name is not None,0.6514477729797363 335,"def _trim_dests(self, pdf, dests, pages): """""" Removes any named destinations that are not a part of the specified page set. """""" new_dests = [] prev_header_added = True for k, o in list(dests.items()): for j in range(*pages): if: o[NameObject('/Page')] = o['/Page'].getObject() assert str_(k) == str_(o['/Title']) new_dests.append(o) break return new_dests",False,pdf.getPage(j).getObject() == o['/Page'].getObject(),j == prev_header_added,0.6477280259132385 336,"def checkip(ipstring): try: ipsplit = ipstring.split('.') if: return False for oct in ipsplit: if int(oct) > 255 or int(oct) < 0: return False except: return False return True",False,len(ipsplit) != 4,len(ipsplit) != 3,0.6500380039215088 337,"def checkip(ipstring): try: ipsplit = ipstring.split('.') if len(ipsplit)!= 4: return False for oct in ipsplit: if: return False except: return False return True",False,int(oct) > 255 or int(oct) < 0,ord(oct) != ord(255),0.6431970596313477 338,"def _find_exe_version(cmd): """"""Find the version of an executable by running `cmd` in the shell. If the command is not found, or the output does not match `RE_VERSION`, returns None. """""" executable = cmd.split()[0] if: return None out = Popen(cmd, shell=True, stdout=PIPE).stdout try: out_string = out.read() finally: out.close() result = RE_VERSION.search(out_string) if result is None: return None return LooseVersion(result.group(1).decode())",False,find_executable(executable) is None,not os.path.exists(executable),0.652163028717041 339,"def _find_exe_version(cmd): """"""Find the version of an executable by running `cmd` in the shell. If the command is not found, or the output does not match `RE_VERSION`, returns None. """""" executable = cmd.split()[0] if find_executable(executable) is None: return None out = Popen(cmd, shell=True, stdout=PIPE).stdout try: out_string = out.read() finally: out.close() result = RE_VERSION.search(out_string) if: return None return LooseVersion(result.group(1).decode())",True,result is None,result is None,0.6599942445755005 340,"def _validate_value(self, key, value, scheme=None): if: pattern, exclusions = self.SYNTAX_VALIDATORS[key] if (scheme or self.scheme) not in exclusions: m = pattern.match(value) if not m: raise MetadataInvalidError(""'%s' is an invalid value for the '%s' property"" % (value, key))",True,key in self.SYNTAX_VALIDATORS,key in self.SYNTAX_VALIDATORS,0.6501220464706421 341,"def _validate_value(self, key, value, scheme=None): if key in self.SYNTAX_VALIDATORS: pattern, exclusions = self.SYNTAX_VALIDATORS[key] if: m = pattern.match(value) if not m: raise MetadataInvalidError(""'%s' is an invalid value for the '%s' property"" % (value, key))",True,(scheme or self.scheme) not in exclusions,(scheme or self.scheme) not in exclusions,0.6453512907028198 342,"def _validate_value(self, key, value, scheme=None): if key in self.SYNTAX_VALIDATORS: pattern, exclusions = self.SYNTAX_VALIDATORS[key] if (scheme or self.scheme) not in exclusions: m = pattern.match(value) if: raise MetadataInvalidError(""'%s' is an invalid value for the '%s' property"" % (value, key))",True,not m,not m,0.670219361782074 343,"def getAttr(self, vFlip, hFlip): byte = 40 if: byte |= 128 if hFlip: byte |= 64 return byte",True,vFlip,vFlip,0.6632107496261597 344,"def getAttr(self, vFlip, hFlip): byte = 40 if vFlip: byte |= 128 if: byte |= 64 return byte",True,hFlip,hFlip,0.6686927080154419 345,"def __index_surname(surn_list): """""" All non pa/matronymic surnames are used in indexing. pa/matronymic not as they change for every generation! returns a byte string """""" from..lib import NameOriginType if: surn =''.join([x[0] for x in surn_list if not x[3][0] in [NameOriginType.PATRONYMIC, NameOriginType.MATRONYMIC]]) else: surn = '' return surn",True,surn_list,surn_list,0.6651697158813477 346,"def set_gban_reason(user_id, reason): with _GBAN_LOCK: user = SESSION.query(GBan).get(str(user_id)) if: return '' prev_reason = user.reason user.reason = reason SESSION.merge(user) SESSION.commit() return prev_reason",True,not user,not user,0.6579863429069519 347,"def is_auto_report_enable(group_id: str): if: return False if'report_mode' not in group_config[group_id]: return False if group_config[group_id]['report_mode'] == 'yobot_standalone': return True elif group_config[group_id]['report_mode'] == 'yobot_embedded': return True return False",True,group_id not in group_config,group_id not in group_config,0.650682270526886 348,"def is_auto_report_enable(group_id: str): if group_id not in group_config: return False if: return False if group_config[group_id]['report_mode'] == 'yobot_standalone': return True elif group_config[group_id]['report_mode'] == 'yobot_embedded': return True return False",False,'report_mode' not in group_config[group_id],group_config[group_id]['auto_report_mode'] == 'auto',0.6453258991241455 349,"def is_auto_report_enable(group_id: str): if group_id not in group_config: return False if'report_mode' not in group_config[group_id]: return False if: return True elif group_config[group_id]['report_mode'] == 'yobot_embedded': return True return False",False,group_config[group_id]['report_mode'] == 'yobot_standalone',group_config[group_id]['report_mode'] == 'auto',0.6436742544174194 350,"def is_auto_report_enable(group_id: str): if group_id not in group_config: return False if'report_mode' not in group_config[group_id]: return False if group_config[group_id]['report_mode'] == 'yobot_standalone': return True elif: return True return False",False,group_config[group_id]['report_mode'] == 'yobot_embedded',group_config[group_id]['report_mode'] == 'auto',0.6437225341796875 351,"def decorator(func): @functools.wraps(func) def wrapper(cls, records, *args, **kwargs): if: result = func(cls, records, *args, **kwargs) cls.write(records, {f: None for f in fields}) if when == 'before': result = func(cls, records, *args, **kwargs) return result return wrapper",True,when == 'after',when == 'after',0.6542140245437622 352,"def decorator(func): @functools.wraps(func) def wrapper(cls, records, *args, **kwargs): if when == 'after': result = func(cls, records, *args, **kwargs) cls.write(records, {f: None for f in fields}) if: result = func(cls, records, *args, **kwargs) return result return wrapper",True,when == 'before',when == 'before',0.6536306142807007 353,"def reset(self): self.step = 0 self.epoch = 0 for k, v in self.state_dict.items(): if: self.state_dict[k] = v else: self.state_dict[k] = paddle.zeros_like(v)",False,k in self.ema_black_list,"isinstance(v, tuple)",0.6479570269584656 354,"def __delitem__(self, key): if: del self.attributes[key] elif isinstance(key, int): del self.children[key] elif isinstance(key, slice): assert key.step in (None, 1), 'cannot handle slice with stride' del self.children[key.start:key.stop] else: raise TypeError('element index must be an integer, a simple slice, or an attribute name string')",False,"isinstance(key, str)","isinstance(key, attribute)",0.6518378257751465 355,"def __delitem__(self, key): if isinstance(key, str): del self.attributes[key] elif: del self.children[key] elif isinstance(key, slice): assert key.step in (None, 1), 'cannot handle slice with stride' del self.children[key.start:key.stop] else: raise TypeError('element index must be an integer, a simple slice, or an attribute name string')",False,"isinstance(key, int)","isinstance(key, integer_types)",0.6536860466003418 356,"def __delitem__(self, key): if isinstance(key, str): del self.attributes[key] elif isinstance(key, int): del self.children[key] elif: assert key.step in (None, 1), 'cannot handle slice with stride' del self.children[key.start:key.stop] else: raise TypeError('element index must be an integer, a simple slice, or an attribute name string')",True,"isinstance(key, slice)","isinstance(key, slice)",0.6491026878356934 357,"def paint(self): self.GLViewWidget.qglColor(self.color) if: if isinstance(self.pos, (list, tuple, np.ndarray)): for p, text in zip(self.pos, self.text): self.GLViewWidget.renderText(*p, text, self.font) else: self.GLViewWidget.renderText(*self.pos, self.text, self.font)",True,self.pos is not None and self.text is not None,self.pos is not None and self.text is not None,0.6460979580879211 358,"def paint(self): self.GLViewWidget.qglColor(self.color) if self.pos is not None and self.text is not None: if: for p, text in zip(self.pos, self.text): self.GLViewWidget.renderText(*p, text, self.font) else: self.GLViewWidget.renderText(*self.pos, self.text, self.font)",True,"isinstance(self.pos, (list, tuple, np.ndarray))","isinstance(self.pos, (list, tuple, np.ndarray))",0.6437488794326782 359,"def get_name(self, obj): if: return obj.cat_en.name elif self.context['language'] == 'ja': return obj.cat_ja.name else: return obj.cat_en.name",True,'language' not in self.context,'language' not in self.context,0.6512870788574219 360,"def get_name(self, obj): if 'language' not in self.context: return obj.cat_en.name elif: return obj.cat_ja.name else: return obj.cat_en.name",True,self.context['language'] == 'ja',self.context['language'] == 'ja',0.6506295204162598 361,"def __call__(self, decorated: typing.Callable) -> typing.Callable: if: if not getattr(self, 'CAN_BE_USED_ON_CLASSES', True): raise TypeError(f'{self.__class__.__name__} cannot be used to decorate a class') return self.decorate_class(decorated) if callable(decorated): return self.decorate_callable(decorated) raise TypeError(f'Cannot decorate object {decorated}')",True,"isinstance(decorated, type)","isinstance(decorated, type)",0.6492458581924438 362,"def __call__(self, decorated: typing.Callable) -> typing.Callable: if isinstance(decorated, type): if not getattr(self, 'CAN_BE_USED_ON_CLASSES', True): raise TypeError(f'{self.__class__.__name__} cannot be used to decorate a class') return self.decorate_class(decorated) if: return self.decorate_callable(decorated) raise TypeError(f'Cannot decorate object {decorated}')",True,callable(decorated),callable(decorated),0.6536573171615601 363,"def __call__(self, decorated: typing.Callable) -> typing.Callable: if isinstance(decorated, type): if: raise TypeError(f'{self.__class__.__name__} cannot be used to decorate a class') return self.decorate_class(decorated) if callable(decorated): return self.decorate_callable(decorated) raise TypeError(f'Cannot decorate object {decorated}')",False,"not getattr(self, 'CAN_BE_USED_ON_CLASSES', True)",not self.decorate_class,0.6462966799736023 364,"def unwrap_term_buckets(self, timestamp, term_buckets): for term_data in term_buckets: if: self.unwrap_interval_buckets(timestamp, term_data['key'], term_data['interval_aggs']['buckets']) else: self.check_matches(timestamp, term_data['key'], term_data)",True,'interval_aggs' in term_data,'interval_aggs' in term_data,0.6469130516052246 365,"def save_binary(self, path: str): """"""Saves the loaded model to a binary.mjb file."""""" if: raise ValueError('[MujocoSimRobot] Path already exists: {}'.format(path)) if not path.endswith('.mjb'): path = path + '.mjb' if self._use_dm_backend: self.model.save_binary(path) else: with open(path, 'wb') as f: f.write(self.model.get_mjb())",True,os.path.exists(path),os.path.exists(path),0.6452264189720154 366,"def save_binary(self, path: str): """"""Saves the loaded model to a binary.mjb file."""""" if os.path.exists(path): raise ValueError('[MujocoSimRobot] Path already exists: {}'.format(path)) if: path = path + '.mjb' if self._use_dm_backend: self.model.save_binary(path) else: with open(path, 'wb') as f: f.write(self.model.get_mjb())",True,not path.endswith('.mjb'),not path.endswith('.mjb'),0.6442418098449707 367,"def save_binary(self, path: str): """"""Saves the loaded model to a binary.mjb file."""""" if os.path.exists(path): raise ValueError('[MujocoSimRobot] Path already exists: {}'.format(path)) if not path.endswith('.mjb'): path = path + '.mjb' if: self.model.save_binary(path) else: with open(path, 'wb') as f: f.write(self.model.get_mjb())",False,self._use_dm_backend,os.path.isfile(path),0.6447663903236389 368,"def safe_shutdown(self): """""" Shutdown TLS and socket. Ignore any exceptions. """""" try: if: self.shutdown() if self.sock: self.sock.shutdown(2) except (OSError, _nassl.OpenSSLError, AttributeError): pass finally: if self.sock: self.sock.close() self.sock = None",False,self.get_underlying_socket(),not self.sock,0.6479790210723877 369,"def safe_shutdown(self): """""" Shutdown TLS and socket. Ignore any exceptions. """""" try: if self.get_underlying_socket(): self.shutdown() if: self.sock.shutdown(2) except (OSError, _nassl.OpenSSLError, AttributeError): pass finally: if self.sock: self.sock.close() self.sock = None",False,self.sock,self.sock and (not self.sock.shutdown),0.6549347639083862 370,"def safe_shutdown(self): """""" Shutdown TLS and socket. Ignore any exceptions. """""" try: if self.get_underlying_socket(): self.shutdown() if self.sock: self.sock.shutdown(2) except (OSError, _nassl.OpenSSLError, AttributeError): pass finally: if: self.sock.close() self.sock = None",True,self.sock,self.sock,0.6537888646125793 371,"def _get_client(self, name, path): """"""Return client conf and refresh it if necessary :rtype: Config """""" if: self._clients_conf.clear() self._load_conf_clients() if name not in self._clients_conf: return self._new_client_conf(name, path) if self._clients_conf[name].changed: self._clients_conf[name].parse() return self._clients_conf[name]",False,self._clientconfdir_changed() and name not in self._clients_conf,"not hasattr(self, '_clients_conf')",0.6536871194839478 372,"def _get_client(self, name, path): """"""Return client conf and refresh it if necessary :rtype: Config """""" if self._clientconfdir_changed() and name not in self._clients_conf: self._clients_conf.clear() self._load_conf_clients() if: return self._new_client_conf(name, path) if self._clients_conf[name].changed: self._clients_conf[name].parse() return self._clients_conf[name]",True,name not in self._clients_conf,name not in self._clients_conf,0.655162513256073 373,"def _get_client(self, name, path): """"""Return client conf and refresh it if necessary :rtype: Config """""" if self._clientconfdir_changed() and name not in self._clients_conf: self._clients_conf.clear() self._load_conf_clients() if name not in self._clients_conf: return self._new_client_conf(name, path) if: self._clients_conf[name].parse() return self._clients_conf[name]",False,self._clients_conf[name].changed,self._clients_conf[name].is_initialized,0.6496726870536804 374,"def function_argspec(self, func, **kw): if: return self.process(func.clause_expr) else: return ''",False,func.clauses is not None and len(func.clauses),func.clause_expr is not None,0.6485062837600708 375,"def scores(self): value = self.value() if: return value else: assert type(value) in {list, tuple} return list(zip(self.names(), self.value()))",False,type(value) == dict,"isinstance(value, scores.Mapping)",0.648817241191864 376,"def getMessageCount(jsonData): if: return 0 else: return jsonData['count']",False,'count' not in jsonData,jsonData['count'] == 0,0.6522445678710938 377,"def _update_ground_truth_statistics(self, groundtruth_class_labels, groundtruth_is_difficult_list, groundtruth_is_group_of_list): """"""Update grouth truth statitistics. 1. Difficult boxes are ignored when counting the number of ground truth instances as done in Pascal VOC devkit. 2. Difficult boxes are treated as normal boxes when computing CorLoc related statitistics. Args: groundtruth_class_labels: An integer numpy array of length M, representing M class labels of object instances in ground truth groundtruth_is_difficult_list: A boolean numpy array of length M denoting whether a ground truth box is a difficult instance or not groundtruth_is_group_of_list: A boolean numpy array of length M denoting whether a ground truth box is a group-of box or not """""" for class_index in range(self.num_class): num_gt_instances = np.sum(groundtruth_class_labels[~groundtruth_is_difficult_list & ~groundtruth_is_group_of_list] == class_index) num_groupof_gt_instances = self.group_of_weight * np.sum(groundtruth_class_labels[groundtruth_is_group_of_list] == class_index) self.num_gt_instances_per_class[class_index] += num_gt_instances + num_groupof_gt_instances if: self.num_gt_imgs_per_class[class_index] += 1",True,np.any(groundtruth_class_labels == class_index),np.any(groundtruth_class_labels == class_index),0.6460565328598022 378,"def cached_func(*args, **kwargs): if: return cfunc(*args, **kwargs) else: return func(*args, **kwargs)",False,context.Cache.current_context,cfunc is not None,0.6489722728729248 379,"def close_container(self, container, event): """""" Handle a close request for a QDockContainer. This method is called by the framework at the appropriate times and should not be called directly by user code. Parameters ---------- window : QDockContainer The dock container to close. event : QCloseEvent The close event passed to the event handler. """""" item = container.dockItem() if: if not container.isWindow(): container.unplug() self._free_container(container) else: event.ignore()",False,item is None or item.close(),item is not None,0.6482319235801697 380,"def close_container(self, container, event): """""" Handle a close request for a QDockContainer. This method is called by the framework at the appropriate times and should not be called directly by user code. Parameters ---------- window : QDockContainer The dock container to close. event : QCloseEvent The close event passed to the event handler. """""" item = container.dockItem() if item is None or item.close(): if: container.unplug() self._free_container(container) else: event.ignore()",False,not container.isWindow(),container.plug,0.6514645218849182 381,"def get_encoding_from_headers(headers): """"""Returns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. """""" content_type = headers.get('content-type') if: return None content_type, params = cgi.parse_header(content_type) if 'charset' in params: return params['charset'].strip('\'""') if 'text' in content_type: return 'ISO-8859-1'",True,not content_type,not content_type,0.6526246070861816 382,"def get_encoding_from_headers(headers): """"""Returns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. """""" content_type = headers.get('content-type') if not content_type: return None content_type, params = cgi.parse_header(content_type) if: return params['charset'].strip('\'""') if 'text' in content_type: return 'ISO-8859-1'",True,'charset' in params,'charset' in params,0.6576825976371765 383,"def get_encoding_from_headers(headers): """"""Returns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. """""" content_type = headers.get('content-type') if not content_type: return None content_type, params = cgi.parse_header(content_type) if 'charset' in params: return params['charset'].strip('\'""') if: return 'ISO-8859-1'",True,'text' in content_type,'text' in content_type,0.6484091281890869 384,"def _overlay_text(image_file_name, x_offset_from_left_px, y_offset_from_top_px, text_string, font_size, use_north_gravity): """"""Overlays text on image. :param image_file_name: Path to image file. :param x_offset_from_left_px: Left-relative x-coordinate (pixels). :param y_offset_from_top_px: Top-relative y-coordinate (pixels). :param text_string: String to overlay. :param font_size: Font size. :param use_north_gravity: Boolean flag. :raises: ValueError: if ImageMagick command (which is ultimately a Unix command) fails. """""" command_string = '""{0:s}"" ""{1:s}""'.format(CONVERT_EXE_NAME, image_file_name) if: command_string +='-gravity North' command_string +='-pointsize {0:d} -font ""{1:s}"" -fill ""rgb(0, 0, 0)"" -annotate {2:+d}{3:+d} ""{4:s}"" ""{5:s}""'.format(font_size, TITLE_FONT_NAME, x_offset_from_left_px, y_offset_from_top_px, text_string, image_file_name) exit_code = os.system(command_string) if exit_code == 0: return raise ValueError(imagemagick_utils.ERROR_STRING)",True,use_north_gravity,use_north_gravity,0.6455235481262207 385,"def _overlay_text(image_file_name, x_offset_from_left_px, y_offset_from_top_px, text_string, font_size, use_north_gravity): """"""Overlays text on image. :param image_file_name: Path to image file. :param x_offset_from_left_px: Left-relative x-coordinate (pixels). :param y_offset_from_top_px: Top-relative y-coordinate (pixels). :param text_string: String to overlay. :param font_size: Font size. :param use_north_gravity: Boolean flag. :raises: ValueError: if ImageMagick command (which is ultimately a Unix command) fails. """""" command_string = '""{0:s}"" ""{1:s}""'.format(CONVERT_EXE_NAME, image_file_name) if use_north_gravity: command_string +='-gravity North' command_string +='-pointsize {0:d} -font ""{1:s}"" -fill ""rgb(0, 0, 0)"" -annotate {2:+d}{3:+d} ""{4:s}"" ""{5:s}""'.format(font_size, TITLE_FONT_NAME, x_offset_from_left_px, y_offset_from_top_px, text_string, image_file_name) exit_code = os.system(command_string) if: return raise ValueError(imagemagick_utils.ERROR_STRING)",True,exit_code == 0,exit_code == 0,0.652776837348938 386,"def bad_words_ids(self, input_ids: torch.Tensor, special_ids: List[int]=None) -> List[List[int]]: """""" Args: input_ids: Tensor of shape (num_sentences, sentence_length), containing token ids (int). special_ids: List[int] containing special ids which will not be forbidden. Returns: List[List[int]] Returns a list of list of integers, corresponding to sequences of ids. """""" bad_words_ids = list() for row in input_ids.tolist(): if: row = [item for item in row if item not in special_ids] for item_ix, item in enumerate(row): if random.random() < self.drop_chance: bad_words_ids.append(item) bad_words_ids = [[item] for item in bad_words_ids] return bad_words_ids",True,special_ids,special_ids,0.6551713943481445 387,"def bad_words_ids(self, input_ids: torch.Tensor, special_ids: List[int]=None) -> List[List[int]]: """""" Args: input_ids: Tensor of shape (num_sentences, sentence_length), containing token ids (int). special_ids: List[int] containing special ids which will not be forbidden. Returns: List[List[int]] Returns a list of list of integers, corresponding to sequences of ids. """""" bad_words_ids = list() for row in input_ids.tolist(): if special_ids: row = [item for item in row if item not in special_ids] for item_ix, item in enumerate(row): if: bad_words_ids.append(item) bad_words_ids = [[item] for item in bad_words_ids] return bad_words_ids",False,random.random() < self.drop_chance,item_ix % 2 != 0,0.6444206237792969 388,"def load_image(test_data, shape): """"""Load calibration images."""""" test_np = np.fromfile(test_data, dtype=np.float32) test_shape = (-1,) + tuple(shape[1:]) test_np = np.reshape(test_np, test_shape) calib_num = 32 if: return test_np[:calib_num] else: return test_np",False,test_np.shape[0] > calib_num,calib_num < 32,0.6428342461585999 389,"def deserialize(self, value): """"""The method to deserialize the DynamoDB data types. :param value: A DynamoDB value to be deserialized to a pythonic value. Here are the various conversions: DynamoDB Python -------- ------ {'NULL': True} None {'BOOL': True/False} True/False {'N': str(value)} Decimal(str(value)) {'S': string} string {'B': bytes} Binary(bytes) {'NS': [str(value)]} set([Decimal(str(value))]) {'SS': [string]} set([string]) {'BS': [bytes]} set([bytes]) {'L': list} list {'M': dict} dict :returns: The pythonic value of the DynamoDB type. """""" if: raise TypeError('Value must be a nonempty dictionary whose key is a valid dynamodb type.') dynamodb_type = list(value.keys())[0] try: deserializer = getattr(self, '_deserialize_%s' % dynamodb_type.lower()) except AttributeError: raise TypeError('Dynamodb type %s is not supported' % dynamodb_type) return deserializer(value[dynamodb_type])",True,not value,not value,0.6604611277580261 390,"def cat_core(list_of_columns: List, sep: str): """""" Auxiliary function for :meth:`str.cat` Parameters ---------- list_of_columns : list of numpy arrays List of arrays to be concatenated with sep; these arrays may not contain NaNs! sep : string The separator string for concatenating the columns. Returns ------- nd.array The concatenation of list_of_columns with sep. """""" if: arr_of_cols = np.asarray(list_of_columns, dtype=object) return np.sum(arr_of_cols, axis=0) list_with_sep = [sep] * (2 * len(list_of_columns) - 1) list_with_sep[::2] = list_of_columns arr_with_sep = np.asarray(list_with_sep, dtype=object) return np.sum(arr_with_sep, axis=0)",True,sep == '',sep == '',0.6636082530021667 391,"def _match_ne(self, version, constraint, prefix): version, constraint = self._adjust_local(version, constraint, prefix) if: result = version!= constraint else: result = not _match_prefix(version, constraint) return result",True,not prefix,not prefix,0.6541920900344849 392,"def _snap_exec(commands): """""" Execute snap commands. :param commands: List commands :return: Integer exit code """""" assert isinstance(commands, list) retry_count = 0 return_code = None while return_code is None or return_code == SNAP_NO_LOCK: try: return_code = subprocess.check_call(['snap'] + commands, env=os.environ) except subprocess.CalledProcessError as e: retry_count += +1 if: raise CouldNotAcquireLockException('Could not acquire lock after {} attempts'.format(SNAP_NO_LOCK_RETRY_COUNT)) return_code = e.returncode log('Snap failed to acquire lock, trying again in {} seconds.'.format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN') sleep(SNAP_NO_LOCK_RETRY_DELAY) return return_code",False,retry_count > SNAP_NO_LOCK_RETRY_COUNT,retry_count > 3,0.6437193155288696 393,"def selected_alpn_protocol(self): proto = self._conn.get_alpn_proto_negotiated() if: proto = proto.decode('ascii') return proto if proto else None",True,"isinstance(proto, bytes)","isinstance(proto, bytes)",0.6463732719421387 394,"def _truncate_text(self, text, max_length): if: if text.strip('$').isdigit(): text = int(text.strip('$')) return '${:.2E}'.format(text) return text[:max_length - 3] + '...' return text",True,len(text) > max_length,len(text) > max_length,0.6456489562988281 395,"def _truncate_text(self, text, max_length): if len(text) > max_length: if: text = int(text.strip('$')) return '${:.2E}'.format(text) return text[:max_length - 3] + '...' return text",True,text.strip('$').isdigit(),text.strip('$').isdigit(),0.6489506959915161 396,"def is_header(self, line: IndentedLine, previous_line: IndentedLine) -> bool: if: return False if not self.strict: return is_uppercase_hun(line.content[0]) or self.extract_identifier(line) is not None if previous_line == EMPTY_LINE and is_uppercase_hun(line.content[0]): return True return super().is_header(line, previous_line)",False,not line.bold,not line or not previous_line,0.6590715646743774 397,"def is_header(self, line: IndentedLine, previous_line: IndentedLine) -> bool: if not line.bold: return False if: return is_uppercase_hun(line.content[0]) or self.extract_identifier(line) is not None if previous_line == EMPTY_LINE and is_uppercase_hun(line.content[0]): return True return super().is_header(line, previous_line)",False,not self.strict,not previous_line,0.6520365476608276 398,"def is_header(self, line: IndentedLine, previous_line: IndentedLine) -> bool: if not line.bold: return False if not self.strict: return is_uppercase_hun(line.content[0]) or self.extract_identifier(line) is not None if: return True return super().is_header(line, previous_line)",False,previous_line == EMPTY_LINE and is_uppercase_hun(line.content[0]),previous_line.content and self.is_identifier(line),0.6457589864730835 399,"def align_code(groups, buff=ALIGN_SPACING): dir = groups[0][0] align = groups[0][1] g = VGroup(*[g if type(g)!= list else align_code(g) for g in groups[1:]]) if: g.arrange_in_grid(rows=1, row_alignments=align, buff=buff) else: g.arrange_in_grid(cols=1, col_alignments=align, buff=buff) return g",True,dir == '-',dir == '-',0.6616206169128418 400,"def get_warns(user_id, chat_id): try: user = SESSION.query(Warns).get((user_id, str(chat_id))) if: return None reasons = user.reasons num = user.num_warns return (num, reasons) finally: SESSION.close()",True,not user,not user,0.6579923629760742 401,"def _get_weight_regex(self, text_in): if: replaces = {'(': '\\(', ')': '\\)', '[': '\\[', ']': '\\]', '.': '\\.', '*': '.*?'} regex_in = text_in for key, val in replaces.items(): regex_in = regex_in.replace(key, val) regex = re.compile(regex_in, re.IGNORECASE) else: regex = re.compile(re.escape(text_in), re.IGNORECASE) return regex",False,'*' in text_in,"isinstance(text_in, tuple)",0.6528275012969971 402,"def on_touch_down(self, touch): if: self.h_picker_touch = False else: self.h_picker_touch = True super().on_touch_down(touch)",True,not self._h_picker.collide_point(*touch.pos),not self._h_picker.collide_point(*touch.pos),0.6440072059631348 403,"def pathmagic(string): parts = string.split('\\') if: return string elif len(parts) == 3: return os.path.join(*parts) else: return string",True,len(parts) == 1,len(parts) == 1,0.6467896699905396 404,"def pathmagic(string): parts = string.split('\\') if len(parts) == 1: return string elif: return os.path.join(*parts) else: return string",True,len(parts) == 3,len(parts) == 3,0.6465815901756287 405,"def checkUnindent(s, l, t): if: return curCol = col(l, s) if not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])): raise ParseException(s, l, 'not an unindent') indentStack.pop()",True,l >= len(s),l >= len(s),0.6486468315124512 406,"def checkUnindent(s, l, t): if l >= len(s): return curCol = col(l, s) if: raise ParseException(s, l, 'not an unindent') indentStack.pop()",True,not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])),not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])),0.6463941931724548 407,"def execute(self, notification: SponsorEmailNotificationTemplate, sponsorships, contact_types, **kwargs): msg_kwargs = {'to_primary': SponsorContact.PRIMARY_CONTACT in contact_types, 'to_administrative': SponsorContact.ADMINISTRATIVE_CONTACT in contact_types, 'to_accounting': SponsorContact.ACCOUTING_CONTACT in contact_types, 'to_manager': SponsorContact.MANAGER_CONTACT in contact_types} for sponsorship in sponsorships: email = notification.get_email_message(sponsorship, **msg_kwargs) if: continue email.send() self.notify(notification=notification, sponsorship=sponsorship, contact_types=contact_types, request=kwargs.get('request'))",False,not email,email is None,0.6633118987083435 408,"def close_w(self) -> None: if: os.close(self.w) self.w = None",True,self.w is not None,self.w is not None,0.6481721997261047 409,"def __ixor__(self, other): if: other = ParserElement._literalStringClass(other) return self.append(other)",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.6507259011268616 410,"def _truncate_seq_pair(self, tokens_a, tokens_b, max_length): """"""Truncates a sequence pair in place to the maximum length."""""" while True: total_length = len(tokens_a) + len(tokens_b) if: break if len(tokens_a) > len(tokens_b): tokens_a.pop() else: tokens_b.pop()",True,total_length <= max_length,total_length <= max_length,0.6482000350952148 411,"def _truncate_seq_pair(self, tokens_a, tokens_b, max_length): """"""Truncates a sequence pair in place to the maximum length."""""" while True: total_length = len(tokens_a) + len(tokens_b) if total_length <= max_length: break if: tokens_a.pop() else: tokens_b.pop()",True,len(tokens_a) > len(tokens_b),len(tokens_a) > len(tokens_b),0.64232337474823 412,"def on_actionLoad_Style_triggered(self, b=None): if: return fname = QtGui.QFileDialog.getOpenFileName(self, 'Open File', os.getcwd(),'style files (*.json *.style)') self.style_fname = fname self.disableHL() self.ui.style.setPlainText(codecs.open(self.style_fname, 'rb', 'utf-8').read()) self.enableHL()",False,b is None,b is not None,0.6597837209701538 413,"def __getitem__(self, name): if: raise NotImplementedError else: return self.element.attributes[name].value",False,"isinstance(name, tuple)",name not in self.element.attributes.keys(),0.65006422996521 414,"def get_RESTART_warning(self): """"""Print a warning if the RESTART keyword is detected"""""" if: print('WARNING: DUMPFLUX file contains a RESTART.\n') print('This may cause problems with execution of DUMPFLUX run.\n') print('Please check the RESTART file path before you proceed!')",False,self.has_KW('RESTART'),self.restart_file is None,0.646465003490448 415,"def _black_or_white_by_color_brightness(color): if: return 'black' else: return 'white'",True,_color_brightness(color) >= 500,_color_brightness(color) >= 500,0.6438709497451782 416,"def classify_cert(cert_meta, now, time_remaining, expire_window, cert_list): """"""Given metadata about a certificate under examination, classify it into one of three categories, 'ok', 'warning', and 'expired'. Params: - `cert_meta` dict - A dict with certificate metadata. Required fields include: 'cert_cn', 'path', 'expiry', 'days_remaining', 'health'. - `now` (datetime) - a datetime object of the time to calculate the certificate 'time_remaining' against - `time_remaining` (datetime.timedelta) - a timedelta for how long until the cert expires - `expire_window` (datetime.timedelta) - a timedelta for how long the warning window is - `cert_list` list - A list to shove the classified cert into Return: - `cert_list` - The updated list of classified certificates """""" expiry_str = str(cert_meta['expiry']) if: cert_meta['health'] = 'expired' elif time_remaining < expire_window: cert_meta['health'] = 'warning' else: cert_meta['health'] = 'ok' cert_meta['expiry'] = expiry_str cert_meta['serial_hex'] = hex(int(cert_meta['serial'])) cert_list.append(cert_meta) return cert_list",False,cert_meta['expiry'] < now,expire_window is None,0.6477249264717102 417,"def classify_cert(cert_meta, now, time_remaining, expire_window, cert_list): """"""Given metadata about a certificate under examination, classify it into one of three categories, 'ok', 'warning', and 'expired'. Params: - `cert_meta` dict - A dict with certificate metadata. Required fields include: 'cert_cn', 'path', 'expiry', 'days_remaining', 'health'. - `now` (datetime) - a datetime object of the time to calculate the certificate 'time_remaining' against - `time_remaining` (datetime.timedelta) - a timedelta for how long until the cert expires - `expire_window` (datetime.timedelta) - a timedelta for how long the warning window is - `cert_list` list - A list to shove the classified cert into Return: - `cert_list` - The updated list of classified certificates """""" expiry_str = str(cert_meta['expiry']) if cert_meta['expiry'] < now: cert_meta['health'] = 'expired' elif: cert_meta['health'] = 'warning' else: cert_meta['health'] = 'ok' cert_meta['expiry'] = expiry_str cert_meta['serial_hex'] = hex(int(cert_meta['serial'])) cert_list.append(cert_meta) return cert_list",False,time_remaining < expire_window,cert_meta['expiry'] > now,0.6497220993041992 418,"def bytes_to_human_readable(memory_amount: int) -> str: """""" Utility to convert a number of bytes (int) in a human readable string (with units) """""" for unit in ['B', 'KB', 'MB', 'GB']: if: return '{:.3f}{}'.format(memory_amount, unit) memory_amount /= 1024.0 return '{:.3f}TB'.format(memory_amount)",False,memory_amount > -1024.0 and memory_amount < 1024.0,memory_amount >= 1024.0,0.6476356983184814 419,"def multiply_grads(self, c): """"""Multiplies grads by a constant ``c``."""""" if: self._sync_fp16_grads_to_fp32(c) elif self.has_flat_params: self.fp32_params.grad.data.mul_(c) else: for p32 in self.fp32_params: p32.grad.data.mul_(c)",False,self._needs_sync,self.use_fp16,0.6519771814346313 420,"def multiply_grads(self, c): """"""Multiplies grads by a constant ``c``."""""" if self._needs_sync: self._sync_fp16_grads_to_fp32(c) elif: self.fp32_params.grad.data.mul_(c) else: for p32 in self.fp32_params: p32.grad.data.mul_(c)",False,self.has_flat_params,self.use_fp16_grads,0.6455222964286804 421,"def get_priority(priority: Union[int, str, Priority]) -> int: """"""Get priority value. Args: priority (int or str or :obj:`Priority`): Priority. Returns: int: The priority value. """""" if: if priority < 0 or priority > 100: raise ValueError('priority must be between 0 and 100') return priority elif isinstance(priority, Priority): return priority.value elif isinstance(priority, str): return Priority[priority.upper()].value else: raise TypeError('priority must be an integer or Priority enum value')",True,"isinstance(priority, int)","isinstance(priority, int)",0.6532579064369202 422,"def get_priority(priority: Union[int, str, Priority]) -> int: """"""Get priority value. Args: priority (int or str or :obj:`Priority`): Priority. Returns: int: The priority value. """""" if isinstance(priority, int): if: raise ValueError('priority must be between 0 and 100') return priority elif isinstance(priority, Priority): return priority.value elif isinstance(priority, str): return Priority[priority.upper()].value else: raise TypeError('priority must be an integer or Priority enum value')",False,priority < 0 or priority > 100,not 0 <= priority <= 100,0.6681342124938965 423,"def get_priority(priority: Union[int, str, Priority]) -> int: """"""Get priority value. Args: priority (int or str or :obj:`Priority`): Priority. Returns: int: The priority value. """""" if isinstance(priority, int): if priority < 0 or priority > 100: raise ValueError('priority must be between 0 and 100') return priority elif: return priority.value elif isinstance(priority, str): return Priority[priority.upper()].value else: raise TypeError('priority must be an integer or Priority enum value')",True,"isinstance(priority, Priority)","isinstance(priority, Priority)",0.657296895980835 424,"def get_priority(priority: Union[int, str, Priority]) -> int: """"""Get priority value. Args: priority (int or str or :obj:`Priority`): Priority. Returns: int: The priority value. """""" if isinstance(priority, int): if priority < 0 or priority > 100: raise ValueError('priority must be between 0 and 100') return priority elif isinstance(priority, Priority): return priority.value elif: return Priority[priority.upper()].value else: raise TypeError('priority must be an integer or Priority enum value')",True,"isinstance(priority, str)","isinstance(priority, str)",0.6517472267150879 425,"def main(): args = parser.parse_args() if: random.seed(args.seed) np.random.seed(args.seed) torch.manual_seed(args.seed) cudnn.deterministic = True main_worker(args)",True,args.seed is not None,args.seed is not None,0.6485235691070557 426,"def _eq_verbose(self, acl): """"""Returns same as __eq__ but print explanation if not equal. TEST: This function is used solely as part of the test suite."""""" if: print('ACL entries for {rp} compare differently'.format(rp=self)) return 0 if not self.cmp_entry_list(self.default_entry_list, acl.default_entry_list): print('Default ACL entries for {rp} do not compare'.format(rp=self)) return 0 return 1",False,"not self.cmp_entry_list(self.entry_list, acl.entry_list)",not self.cmp_entry_list(acl.entry_list),0.6439564228057861 427,"def _eq_verbose(self, acl): """"""Returns same as __eq__ but print explanation if not equal. TEST: This function is used solely as part of the test suite."""""" if not self.cmp_entry_list(self.entry_list, acl.entry_list): print('ACL entries for {rp} compare differently'.format(rp=self)) return 0 if: print('Default ACL entries for {rp} do not compare'.format(rp=self)) return 0 return 1",False,"not self.cmp_entry_list(self.default_entry_list, acl.default_entry_list)",not self.cmp_default_list(acl.default_list),0.6422747373580933 428,"def attr_value(self, target, index=0): """""" The attribute value for the given target node (e.g. 'PROJCS'). The index keyword specifies an index of the child node to return. """""" if: raise TypeError return get_attr_value(self._ptr, target, index)",False,"not isinstance(target, str) or not isinstance(index, int)","not isinstance(target, type)",0.6442840099334717 429,"def compute_drmsd_np(structure_1, structure_2, mask=None): structure_1 = torch.tensor(structure_1) structure_2 = torch.tensor(structure_2) if: mask = torch.tensor(mask) return compute_drmsd(structure_1, structure_2, mask)",True,mask is not None,mask is not None,0.6515742540359497 430,"def plug_float_update_callback(self, context): if: active_module = self.get_active_module() if active_module!= None: if 'parameter_dictionary' in dir(active_module): active_module.parameter_dictionary[self.key_name]['val'] = self.float_val_shadow",False,len(self.key_name) > 0,self.float_val_shadow != None,0.6486524343490601 431,"def plug_float_update_callback(self, context): if len(self.key_name) > 0: active_module = self.get_active_module() if: if 'parameter_dictionary' in dir(active_module): active_module.parameter_dictionary[self.key_name]['val'] = self.float_val_shadow",False,active_module != None,active_module is not None,0.6539262533187866 432,"def plug_float_update_callback(self, context): if len(self.key_name) > 0: active_module = self.get_active_module() if active_module!= None: if: active_module.parameter_dictionary[self.key_name]['val'] = self.float_val_shadow",False,'parameter_dictionary' in dir(active_module),active_module.parameter_dictionary.get(self.key_name) is not None,0.6464701294898987 433,"def _md5check(fullname, md5sum=None): if: return True md5 = hashlib.md5() with open(fullname, 'rb') as f: for chunk in iter(lambda: f.read(4096), b''): md5.update(chunk) calc_md5sum = md5.hexdigest() if calc_md5sum!= md5sum: return False return True",True,md5sum is None,md5sum is None,0.6490707993507385 434,"def _md5check(fullname, md5sum=None): if md5sum is None: return True md5 = hashlib.md5() with open(fullname, 'rb') as f: for chunk in iter(lambda: f.read(4096), b''): md5.update(chunk) calc_md5sum = md5.hexdigest() if: return False return True",True,calc_md5sum != md5sum,calc_md5sum != md5sum,0.6423662900924683 435,"def _cache_group_by_group_id(self, group_id): if: return None group = storage.groups[group_id] if group.type!= storage.Group.TYPE_CACHE: return None return group",True,group_id not in storage.groups,group_id not in storage.groups,0.649545431137085 436,"def _cache_group_by_group_id(self, group_id): if group_id not in storage.groups: return None group = storage.groups[group_id] if: return None return group",False,group.type != storage.Group.TYPE_CACHE,group is None,0.6449147462844849 437,"def _index(self): self._property_lookup = dict(((prop.name, prop) for prop in self._properties)) if: raise ValueError('two properties with same name')",True,len(self._property_lookup) != len(self._properties),len(self._property_lookup) != len(self._properties),0.6455563306808472 438,"def wait_for_glusterd_to_start(servers, glusterd_start_wait_timeout=80): """"""Checks glusterd is running on nodes with timeout. Args: servers (str|list): A server|List of server hosts on which glusterd status has to be checked. glusterd_start_wait_timeout: timeout to retry glusterd running check in node. Returns: bool : True if glusterd is running on servers. False otherwise. """""" if: servers = [servers] count = 0 while count <= glusterd_start_wait_timeout: ret = is_glusterd_running(servers) if not ret: g.log.info('glusterd is running on %s', servers) return True sleep(1) count += 1 g.log.error('glusterd is not running on %s', servers) return False",True,"not isinstance(servers, list)","not isinstance(servers, list)",0.6427684426307678 439,"def wait_for_glusterd_to_start(servers, glusterd_start_wait_timeout=80): """"""Checks glusterd is running on nodes with timeout. Args: servers (str|list): A server|List of server hosts on which glusterd status has to be checked. glusterd_start_wait_timeout: timeout to retry glusterd running check in node. Returns: bool : True if glusterd is running on servers. False otherwise. """""" if not isinstance(servers, list): servers = [servers] count = 0 while count <= glusterd_start_wait_timeout: ret = is_glusterd_running(servers) if: g.log.info('glusterd is running on %s', servers) return True sleep(1) count += 1 g.log.error('glusterd is not running on %s', servers) return False",False,not ret,ret,0.654487669467926 440,"def onAccountBecomePlayer(self): if: self.__webController.invalidate() self.__onServerSettingsChange(self.__lobbyCtx.getServerSettings().getSettings())",False,self.__webController.getStateID() == WebControllerStates.STATE_NOT_DEFINED,not self.__lobbyCtx.getServerSettings().getSettings()['enabled'],0.6495983004570007 441,"def __setstate__(self, state): self.__dict__.update(state) for ffname in self.ffdata: if: temp = etree.ElementTree(etree.fromstring(self.ffdata[ffname])) self.ffdata[ffname] = temp",True,self.ffdata_isxml[ffname],self.ffdata_isxml[ffname],0.6492067575454712 442,"def __init__(self, x, y): if: raise ValueError('Invalid shape') check_space_types(x, y) super().__init__(x, [x, y], nl_deps=[], ic=False, adj_ic=False)",False,var_local_size(x) != var_local_size(y),x.dim() != 2 or y.dim() != 3,0.6450034379959106 443,"def decode(self, data, items): """"""Decodes the data to return the tensors specified by the list of items. Args: data: The scalar data to decode. items: A list of strings, each of which is the name of the resulting tensors to retrieve. Returns: A list of tensors, each of which corresponds to each item. """""" data = tf.reshape(data, shape=[]) if: decoded_data = tf.string_to_number(data, out_type=self._dtype) else: decoded_data = (tf.cast(data, self._dtype),) outputs = {self._data_name: decoded_data} return [outputs[item] for item in items]",False,data.dtype is tf.string,"isinstance(data, tf.string_types)",0.6459290981292725 444,"def __eq__(self, other): if: return False return self.hashcmp == other.hashcmp",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.6471760272979736 445,"def load_mask(self, idx): if: idx = np.random.randint(0, len(self.mask_path_list)) elif self.mask_choice == 'inorder': idx = idx % len(self.mask_path_list) mask = cv2.imdecode(np.fromfile(self.mask_path_list[idx], dtype=np.uint8), cv2.IMREAD_GRAYSCALE) return mask.astype(np.float32)",True,self.mask_choice == 'random',self.mask_choice == 'random',0.650625467300415 446,"def load_mask(self, idx): if self.mask_choice == 'random': idx = np.random.randint(0, len(self.mask_path_list)) elif: idx = idx % len(self.mask_path_list) mask = cv2.imdecode(np.fromfile(self.mask_path_list[idx], dtype=np.uint8), cv2.IMREAD_GRAYSCALE) return mask.astype(np.float32)",True,self.mask_choice == 'inorder',self.mask_choice == 'inorder',0.6518155932426453 447,"def parse_body(body: bytes) -> None: res_json = parse_json(body) if: raise TiebaServerError(code, res_json['error_msg'])",True,code := int(res_json['error_code']),code := int(res_json['error_code']),0.6448076963424683 448,"def load(self, require=True, *args, **kwargs): """""" Require packages for this EntryPoint, then resolve it. """""" if: warnings.warn('Parameters to load are deprecated. Call.resolve and.require separately.', PkgResourcesDeprecationWarning, stacklevel=2) if require: self.require(*args, **kwargs) return self.resolve()",True,not require or args or kwargs,not require or args or kwargs,0.6531832218170166 449,"def load(self, require=True, *args, **kwargs): """""" Require packages for this EntryPoint, then resolve it. """""" if not require or args or kwargs: warnings.warn('Parameters to load are deprecated. Call.resolve and.require separately.', PkgResourcesDeprecationWarning, stacklevel=2) if: self.require(*args, **kwargs) return self.resolve()",True,require,require,0.679688572883606 450,"@certfile.setter def certfile(self, certfile): if: raise ValueError('certfile is needed for server-side') if certfile and (not os.access(certfile, os.R_OK)): raise IOError('No such certfile found: %s' % certfile) self._certfile = certfile",False,self._server_side and (not certfile),not certfile,0.6473574638366699 451,"@certfile.setter def certfile(self, certfile): if self._server_side and (not certfile): raise ValueError('certfile is needed for server-side') if: raise IOError('No such certfile found: %s' % certfile) self._certfile = certfile",False,"certfile and (not os.access(certfile, os.R_OK))",certfile is None,0.6480023860931396 452,"def score(self, rigid_0: ru.Rigid, rigid_t: ru.Rigid, t: float): tran_0, rot_0 = _extract_trans_rots(rigid_0) tran_t, rot_t = _extract_trans_rots(rigid_t) if: rot_score = np.zeros_like(rot_0) else: rot_score = self._so3_diffuser.score(rot_t, t) if not self._diffuse_trans: trans_score = np.zeros_like(tran_0) else: trans_score = self._r3_diffuser.score(tran_t, tran_0, t) return (trans_score, rot_score)",False,not self._diffuse_rot,not self._diffuse_trans,0.6493487358093262 453,"def score(self, rigid_0: ru.Rigid, rigid_t: ru.Rigid, t: float): tran_0, rot_0 = _extract_trans_rots(rigid_0) tran_t, rot_t = _extract_trans_rots(rigid_t) if not self._diffuse_rot: rot_score = np.zeros_like(rot_0) else: rot_score = self._so3_diffuser.score(rot_t, t) if: trans_score = np.zeros_like(tran_0) else: trans_score = self._r3_diffuser.score(tran_t, tran_0, t) return (trans_score, rot_score)",False,not self._diffuse_trans,not self._diffuse_rot,0.6473543643951416 454,"def __init__(self, file_pattern: Union[Text, List[Text]], raw_record_column_name: Text, telemetry_descriptors: List[Text]): """"""Initializer. Args: file_pattern: One or a list of glob patterns. If a list, must not be empty. raw_record_column_name: Name of the raw record column. telemetry_descriptors: A set of descriptors that identify the component that is instantiating this TFXIO. These will be used to construct the namespace to contain metrics for profiling and are therefore expected to be identifiers of the component itself and not individual instances of source use. """""" super().__init__(telemetry_descriptors=telemetry_descriptors, physical_format='tfrecords_gzip', raw_record_column_name=raw_record_column_name) if: file_pattern = [file_pattern] assert file_pattern, 'Must provide at least one file pattern.' self._file_pattern = file_pattern",True,"not isinstance(file_pattern, list)","not isinstance(file_pattern, list)",0.6433738470077515 455,"@property def paths(self) -> list[str]: """"""Resolve SFTP file paths with prefix"""""" url = urlparse(self.path) uri = self.get_uri() full_paths = [] prefixes = self.hook.get_tree_map(url.netloc, prefix=url.netloc + url.path) for keys in prefixes: if: full_paths.extend(keys) paths = [uri + '/' + path for path in full_paths] return paths",True,len(keys) > 0,len(keys) > 0,0.6496731638908386 456,"def is_valid_size(self, target_size_product): if: return False if target_size_product < self.min_input_sizesquare: return False return True",True,target_size_product > self.max_input_sizesquare,target_size_product > self.max_input_sizesquare,0.6462470293045044 457,"def is_valid_size(self, target_size_product): if target_size_product > self.max_input_sizesquare: return False if: return False return True",True,target_size_product < self.min_input_sizesquare,target_size_product < self.min_input_sizesquare,0.6456623077392578 458,"def __call__(self): obj = self.ref() if: raise exceptions.InvalidRequestError('stale association proxy, parent object has gone out of scope') return getattr(obj, self.target)",False,obj is None,obj.scope != self.scope,0.6637649536132812 459,"def iterate_models(self, **kwargs): """""" Iterate Scraper models. :kwargs: FFProbe results and index :returns: Metadata model """""" for md_class in self._supported_metadata: if: md_object = md_class(**kwargs) if md_object.av_format_supported() is not None: yield md_object",False,"md_class.is_supported(self._predefined_mimetype, self._predefined_version, self._params)",md_class.allow_convert_all_versions,0.6452722549438477 460,"def iterate_models(self, **kwargs): """""" Iterate Scraper models. :kwargs: FFProbe results and index :returns: Metadata model """""" for md_class in self._supported_metadata: if md_class.is_supported(self._predefined_mimetype, self._predefined_version, self._params): md_object = md_class(**kwargs) if: yield md_object",False,md_object.av_format_supported() is not None,md_object.has_model(self._predefined_mimetype),0.6457136869430542 461,"def read_line(self): """""" Read a line from a nmea port return Line from the nmea port """""" line = '' while True: c = self.read() if: break while c == '\r': c = self.read() if c == '\n': break line += c return line",False,not c,c == '\r',0.6661070585250854 462,"def read_line(self): """""" Read a line from a nmea port return Line from the nmea port """""" line = '' while True: c = self.read() if not c: break while c == '\r': c = self.read() if: break line += c return line",False,c == '\n',not c,0.6589483618736267 463,"def warn(self, *message, tag: Optional[str]=None, end: str='\n', split: str=' ', flush: bool=True, stack_trace: Optional[FrameType]=None) -> None: if: return self.make_log(messages=list(message), tag=tag, end=end, split=split, flush=flush, level=LogLevel.warn, stack_trace=stack_trace)",False,not self.log_for(LogLevel.warn),self.is_null(),0.6436254978179932 464,"def _break_cont_exps(self, g): if: return flatten_list([self._break_cont_exps(g.inputs[i]) for i in g.inputs if is_pos(i)]) else: return [g]",False,g.typename() == 'cont_turn',"isinstance(g, Union)",0.6467052698135376 465,"def forward(self, x): x = self.relu(x) if: x = nn.ZeroPad2d((1, 0, 1, 0))(x) x = self.separable_1(x) if self.name =='specific': x = x[:, :, 1:, 1:].contiguous() x = self.bn_sep_1(x) x = self.relu1(x) x = self.separable_2(x) x = self.bn_sep_2(x) return x",True,self.name == 'specific',self.name == 'specific',0.6466684341430664 466,"def forward(self, x): x = self.relu(x) if self.name =='specific': x = nn.ZeroPad2d((1, 0, 1, 0))(x) x = self.separable_1(x) if: x = x[:, :, 1:, 1:].contiguous() x = self.bn_sep_1(x) x = self.relu1(x) x = self.separable_2(x) x = self.bn_sep_2(x) return x",True,self.name == 'specific',self.name == 'specific',0.6458703279495239 467,"def asformat(self, format, copy=False): """"""Return this matrix in the passed sparse format. Parameters ---------- format : {str, None} The desired sparse matrix format (""csr"", ""csc"", ""lil"", ""dok"",...) or None for no conversion. copy : bool, optional If True, the result is guaranteed to not share data with self. Returns ------- A : This matrix in the passed sparse format. """""" if: if copy: return self.copy() else: return self else: try: convert_method = getattr(self, 'to' + format) except AttributeError: raise ValueError('Format {} is unknown.'.format(format)) else: return convert_method(copy=copy)",False,format is None or format == self.format,format is None,0.6479859352111816 468,"def asformat(self, format, copy=False): """"""Return this matrix in the passed sparse format. Parameters ---------- format : {str, None} The desired sparse matrix format (""csr"", ""csc"", ""lil"", ""dok"",...) or None for no conversion. copy : bool, optional If True, the result is guaranteed to not share data with self. Returns ------- A : This matrix in the passed sparse format. """""" if format is None or format == self.format: if: return self.copy() else: return self else: try: convert_method = getattr(self, 'to' + format) except AttributeError: raise ValueError('Format {} is unknown.'.format(format)) else: return convert_method(copy=copy)",True,copy,copy,0.672120213508606 469,"def cancels_job_with_name(self, job_name: Text, sender_id: Text) -> bool: """"""Determines if this `ReminderCancelled` event should cancel the job with the given name. Args: job_name: Name of the job to be tested. sender_id: The `sender_id` of the tracker. Returns: `True`, if this `ReminderCancelled` event should cancel the job with the given name, and `False` otherwise. """""" match = re.match(f'^\\[([\\d\\-]*),([\\d\\-]*),([\\d\\-]*)\\]({re.escape(ACTION_NAME_SENDER_ID_CONNECTOR_STR)}{re.escape(sender_id)})', job_name) if: return False name_hash, intent_hash, entities_hash = match.group(1, 2, 3) return (not self.name or self._matches_name_hash(name_hash)) and (not self.intent or self._matches_intent_hash(intent_hash)) and (not self.entities or self._matches_entities_hash(entities_hash))",True,not match,not match,0.6534326076507568 470,"def set_value(self, tag, value): if: return family = tag.split('.')[0] if family == 'Exif': self.set_exif_value(tag, value) elif family == 'Iptc': self.set_iptc_value(tag, value) else: self.set_xmp_value(tag, value)",True,not tag,not tag,0.6641639471054077 471,"def set_value(self, tag, value): if not tag: return family = tag.split('.')[0] if: self.set_exif_value(tag, value) elif family == 'Iptc': self.set_iptc_value(tag, value) else: self.set_xmp_value(tag, value)",True,family == 'Exif',family == 'Exif',0.6504823565483093 472,"def set_value(self, tag, value): if not tag: return family = tag.split('.')[0] if family == 'Exif': self.set_exif_value(tag, value) elif: self.set_iptc_value(tag, value) else: self.set_xmp_value(tag, value)",True,family == 'Iptc',family == 'Iptc',0.6538915634155273 473,"def pytest_generate_tests(metafunc): """""" Function called by pytest when collecting a test_XXX function define the dispatch_rules fixtures in test environement with collected the value _dispatch_rules if it exist or with an empty dispatch_rules :param metafunc: the test context given by pytest """""" if: dispatch_rules = getattr(metafunc.function, '_dispatch_rules', None) if isinstance(dispatch_rules, list): metafunc.parametrize('dispatch_rules', [dispatch_rules]) else: metafunc.parametrize('dispatch_rules', [[(Report1, DispatchRule1AB(primary=True))]]) if 'formula_class' in metafunc.fixturenames: formula_class = getattr(metafunc.function, '_formula_class', DummyFormulaActor) metafunc.parametrize('formula_class', [formula_class])",True,'dispatch_rules' in metafunc.fixturenames,'dispatch_rules' in metafunc.fixturenames,0.6457846760749817 474,"def pytest_generate_tests(metafunc): """""" Function called by pytest when collecting a test_XXX function define the dispatch_rules fixtures in test environement with collected the value _dispatch_rules if it exist or with an empty dispatch_rules :param metafunc: the test context given by pytest """""" if 'dispatch_rules' in metafunc.fixturenames: dispatch_rules = getattr(metafunc.function, '_dispatch_rules', None) if isinstance(dispatch_rules, list): metafunc.parametrize('dispatch_rules', [dispatch_rules]) else: metafunc.parametrize('dispatch_rules', [[(Report1, DispatchRule1AB(primary=True))]]) if: formula_class = getattr(metafunc.function, '_formula_class', DummyFormulaActor) metafunc.parametrize('formula_class', [formula_class])",True,'formula_class' in metafunc.fixturenames,'formula_class' in metafunc.fixturenames,0.6451785564422607 475,"def pytest_generate_tests(metafunc): """""" Function called by pytest when collecting a test_XXX function define the dispatch_rules fixtures in test environement with collected the value _dispatch_rules if it exist or with an empty dispatch_rules :param metafunc: the test context given by pytest """""" if 'dispatch_rules' in metafunc.fixturenames: dispatch_rules = getattr(metafunc.function, '_dispatch_rules', None) if: metafunc.parametrize('dispatch_rules', [dispatch_rules]) else: metafunc.parametrize('dispatch_rules', [[(Report1, DispatchRule1AB(primary=True))]]) if 'formula_class' in metafunc.fixturenames: formula_class = getattr(metafunc.function, '_formula_class', DummyFormulaActor) metafunc.parametrize('formula_class', [formula_class])",False,"isinstance(dispatch_rules, list)","isinstance(dispatch_rules, Mapping)",0.6479055881500244 476,"def test_find_first_zero_bit(self): sym = gdb.lookup_symbol('cpu_online_mask', None)[0] if: sym = gdb.lookup_symbol('__cpu_online_mask', None)[0] self.assertTrue(sym is not None) bitmap = sym.value()['bits'] count = 0 bit = bitmaps.find_first_zero_bit(bitmap) self.assertTrue(type(bit) is int)",True,sym is None,sym is None,0.6599867343902588 477,"def _do_evaluate(self, runner): """"""perform evaluation and save ckpt."""""" if: return from mmdet.apis import single_gpu_test results = single_gpu_test(runner.model, self.dataloader, show=False) runner.log_buffer.output['eval_iter_num'] = len(self.dataloader) key_score = self.evaluate(runner, results) if self.save_best: self._save_ckpt(runner, key_score)",False,not self._should_evaluate(runner),self.save_best is False and runner.model is None,0.644819974899292 478,"def _do_evaluate(self, runner): """"""perform evaluation and save ckpt."""""" if not self._should_evaluate(runner): return from mmdet.apis import single_gpu_test results = single_gpu_test(runner.model, self.dataloader, show=False) runner.log_buffer.output['eval_iter_num'] = len(self.dataloader) key_score = self.evaluate(runner, results) if: self._save_ckpt(runner, key_score)",True,self.save_best,self.save_best,0.6514607071876526 479,"def try_load(name: Text) -> Optional[Image]: if: return None try: return load(name) except Exception as ex: app.log.text('can not load: %s: %s' % (name, ex), level=app.DEBUG) _NOT_EXISTED_NAMES.add(name) return None",True,name in _NOT_EXISTED_NAMES,name in _NOT_EXISTED_NAMES,0.651135265827179 480,"@staticmethod def set_incr_scan(proj_conf, total_scan): job_context = proj_conf['job_context'] task_list = proj_conf['tasks'] if: job_context['incr_scan'] = False for task_request in task_list: task_params = task_request['task_params'] if total_scan: task_params['incr_scan'] = False task_params['scm_last_revision'] = '' elif 'incr_scan' in task_params: if not task_params['incr_scan']: total_scan = True else: task_params['incr_scan'] = True",False,total_scan,'incr_scan' not in job_context,0.660485565662384 481,"@staticmethod def set_incr_scan(proj_conf, total_scan): job_context = proj_conf['job_context'] task_list = proj_conf['tasks'] if total_scan: job_context['incr_scan'] = False for task_request in task_list: task_params = task_request['task_params'] if: task_params['incr_scan'] = False task_params['scm_last_revision'] = '' elif 'incr_scan' in task_params: if not task_params['incr_scan']: total_scan = True else: task_params['incr_scan'] = True",False,total_scan,task_params['scm_last_revision'],0.6604939699172974 482,"@staticmethod def set_incr_scan(proj_conf, total_scan): job_context = proj_conf['job_context'] task_list = proj_conf['tasks'] if total_scan: job_context['incr_scan'] = False for task_request in task_list: task_params = task_request['task_params'] if total_scan: task_params['incr_scan'] = False task_params['scm_last_revision'] = '' elif: if not task_params['incr_scan']: total_scan = True else: task_params['incr_scan'] = True",False,'incr_scan' in task_params,job_context['incr_scan'],0.651297926902771 483,"@staticmethod def set_incr_scan(proj_conf, total_scan): job_context = proj_conf['job_context'] task_list = proj_conf['tasks'] if total_scan: job_context['incr_scan'] = False for task_request in task_list: task_params = task_request['task_params'] if total_scan: task_params['incr_scan'] = False task_params['scm_last_revision'] = '' elif 'incr_scan' in task_params: if: total_scan = True else: task_params['incr_scan'] = True",False,not task_params['incr_scan'],task_params['incr_scan'],0.6464079022407532 484,"def safe_location(self, location_name, geom, max_distance=200): """""" Returns a location (geometry) to use, given a location_name and geometry. This is used for data sources that publish both a geometry and a location_name -- we double-check that the geometry is within a certain `max_distance` from the geocoded location_name. If there's a discrepancy or if the location_name can't be geocoded, this returns None. """""" location = self.geocode(location_name) if: return None location_point = location['point'] if not location_point: return None location_point.srid = 4326 is_close, distance = locations_are_close(location_point, geom, max_distance) if not is_close: return None return geom",False,location is None,'point' not in location,0.6538652777671814 485,"def safe_location(self, location_name, geom, max_distance=200): """""" Returns a location (geometry) to use, given a location_name and geometry. This is used for data sources that publish both a geometry and a location_name -- we double-check that the geometry is within a certain `max_distance` from the geocoded location_name. If there's a discrepancy or if the location_name can't be geocoded, this returns None. """""" location = self.geocode(location_name) if location is None: return None location_point = location['point'] if: return None location_point.srid = 4326 is_close, distance = locations_are_close(location_point, geom, max_distance) if not is_close: return None return geom",False,not location_point,location_point.srid in self.discrepancy,0.6511427164077759 486,"def safe_location(self, location_name, geom, max_distance=200): """""" Returns a location (geometry) to use, given a location_name and geometry. This is used for data sources that publish both a geometry and a location_name -- we double-check that the geometry is within a certain `max_distance` from the geocoded location_name. If there's a discrepancy or if the location_name can't be geocoded, this returns None. """""" location = self.geocode(location_name) if location is None: return None location_point = location['point'] if not location_point: return None location_point.srid = 4326 is_close, distance = locations_are_close(location_point, geom, max_distance) if: return None return geom",True,not is_close,not is_close,0.6524591445922852 487,"def __str__(self): if: return self.name if self.strRepr is None: self.strRepr = '[' + _ustr(self.expr) + ']...' return self.strRepr",True,"hasattr(self, 'name')","hasattr(self, 'name')",0.6489574313163757 488,"def __str__(self): if hasattr(self, 'name'): return self.name if: self.strRepr = '[' + _ustr(self.expr) + ']...' return self.strRepr",True,self.strRepr is None,self.strRepr is None,0.648362398147583 489,"def get_user(self, email): user = [x for x in MOCK_USERS if x.get('email') == email] if: return user[0] return None",True,user,user,0.6739429831504822 490,"def __ge__(self, other): if: return NotImplemented return other <= self",True,"not isinstance(other, Set)","not isinstance(other, Set)",0.6492550373077393 491,"def CreateCMakeTargetName(self, qualified_target): base_name = CreateCMakeTargetBaseName(qualified_target) if: return CreateCMakeTargetFullName(qualified_target) return base_name",False,base_name in self.cmake_target_base_names_conficting,not base_name,0.6448224782943726 492,"def __call__(self, id, name=None): """""" Return mapped id from id and, if available, name """""" if: return id newid = self.name2id(name) if newid is None: return id else: return newid",True,not name,not name,0.6647347211837769 493,"def __call__(self, id, name=None): """""" Return mapped id from id and, if available, name """""" if not name: return id newid = self.name2id(name) if: return id else: return newid",True,newid is None,newid is None,0.6542881727218628 494,"@property def Type(self): if: return self._entity_data.get('Type') return '0'",True,'Type' in self._entity_data,'Type' in self._entity_data,0.6537728309631348 495,"def handle(self, handler_context): url = '' if: url = handler_context.flow['request']['url'] headers = {'Content-Type': 'text/html; charset=utf-8'} code = lb_http_status.STATUS_CODE_CAN_NOT_HANDLE_REQUEST resp_data = f'Lyrebird cannot handle this request: {url}\n' handler_context.flow['response']['headers'] = headers handler_context.flow['response']['code'] = code handler_context.flow['response']['data'] = resp_data handler_context.response = Response(resp_data, status=code, headers=headers) logger.info(f' ERROR::CAN_NOT_HANDLE_REQUEST {url}')",False,'url' in handler_context.flow.get('request'),'request' in handler_context.flow,0.6497178077697754 496,"def myprint(self, message): assert self.mylogfile!= None, 'The LogFile is not initialized yet!' print(message) sys.stdout.flush() if: print(message, file=self.mylogfile) self.mylogfile.flush()",True,self.mylogfile != None,self.mylogfile != None,0.6499958038330078 497,"def add_pattern(text, pattern_str, replace_str, before, after): if: text = re.sub('%s' % pattern_str, '%s\\1' % replace_str, text) elif before == 0 and after == 1: text = re.sub('%s' % pattern_str, '\\1%s' % replace_str, text) elif before == 1 and after == 1: text = re.sub('%s' % pattern_str, '%s\\1%s' % (replace_str, replace_str), text) return text",True,before == 1 and after == 0,before == 1 and after == 0,0.6535935401916504 498,"def add_pattern(text, pattern_str, replace_str, before, after): if before == 1 and after == 0: text = re.sub('%s' % pattern_str, '%s\\1' % replace_str, text) elif: text = re.sub('%s' % pattern_str, '\\1%s' % replace_str, text) elif before == 1 and after == 1: text = re.sub('%s' % pattern_str, '%s\\1%s' % (replace_str, replace_str), text) return text",True,before == 0 and after == 1,before == 0 and after == 1,0.6534395813941956 499,"def add_pattern(text, pattern_str, replace_str, before, after): if before == 1 and after == 0: text = re.sub('%s' % pattern_str, '%s\\1' % replace_str, text) elif before == 0 and after == 1: text = re.sub('%s' % pattern_str, '\\1%s' % replace_str, text) elif: text = re.sub('%s' % pattern_str, '%s\\1%s' % (replace_str, replace_str), text) return text",True,before == 1 and after == 1,before == 1 and after == 1,0.653862476348877 500,"def _scroll_shift_y(self, event: MouseEvent): old_ylim = self._ax.get_ylim() old_height = old_ylim[1] - old_ylim[0] shift_y = old_height / self._MOUSE_WHEEL_TRANSLATE_SCALE if: shift_y *= -1 self._ax.set_ylim(old_ylim[0] + shift_y, old_ylim[1] + shift_y) self._fig.canvas.draw()",False,event.button == 'up',self._swipe_scale > 1,0.6539157629013062 501,"def propagate_faults(self, icomb: Tuple[int], error: Tuple[str]): """"""Insert a set of faults and propagate through a circuit. icomb = integer tuple of failed operations' indices error = tuple of pauli strings Return: measurement outcome discrepancies. """""" if: raise Exception('no circuit loaded') self.qubit_array = [0] * (2 * self.qreg_size) self.clbit_array = [0] * self.creg_size for j, enc_circ in enumerate(self.encoded_circ): opcode, q_idx, c_idx, _ = enc_circ self.gate_dispatch[opcode](j, q_idx, c_idx, icomb, error) return self.clbit_array",False,self.encoded_circ is None,self.gate_dispatch is None,0.6508350372314453 502,"def get_resource(self, request, filename): """"""Return a static resource from the shared folder."""""" filename = join(dirname(__file__),'shared', basename(filename)) if: mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream' f = file(filename, 'rb') try: return Response(f.read(), mimetype=mimetype) finally: f.close() return Response('Not Found', status=404)",False,isfile(filename),os.path.exists(filename),0.6466870903968811 503,"@classmethod def register_runner(cls, name): """"""Register a model to registry with key 'name' Args: name: Key with which the task will be registered. Usage: from minigpt4_utils.common.registry import registry """""" def wrap(runner_cls): if: raise KeyError(""Name '{}' already registered for {}."".format(name, cls.mapping['runner_name_mapping'][name])) cls.mapping['runner_name_mapping'][name] = runner_cls return runner_cls return wrap",True,name in cls.mapping['runner_name_mapping'],name in cls.mapping['runner_name_mapping'],0.6477246284484863 504,"def check_spooler_service(self): ok = False service_config, service_status = self.get_service('Spooler', self.connection) if: ok = True reasons = ['Spooler service disabled'] else: reasons = ['Spooler service enabled'] if service_status == scmr.SERVICE_RUNNING: reasons.append('Spooler service running') elif service_status == scmr.SERVICE_STOPPED: ok = True reasons.append('Spooler service not running') return (ok, reasons)",False,service_config['dwStartType'] == scmr.SERVICE_DISABLED,service_config == scmr.SERVICE_DISABLED,0.6512548923492432 505,"def check_spooler_service(self): ok = False service_config, service_status = self.get_service('Spooler', self.connection) if service_config['dwStartType'] == scmr.SERVICE_DISABLED: ok = True reasons = ['Spooler service disabled'] else: reasons = ['Spooler service enabled'] if: reasons.append('Spooler service running') elif service_status == scmr.SERVICE_STOPPED: ok = True reasons.append('Spooler service not running') return (ok, reasons)",True,service_status == scmr.SERVICE_RUNNING,service_status == scmr.SERVICE_RUNNING,0.6494125127792358 506,"def check_spooler_service(self): ok = False service_config, service_status = self.get_service('Spooler', self.connection) if service_config['dwStartType'] == scmr.SERVICE_DISABLED: ok = True reasons = ['Spooler service disabled'] else: reasons = ['Spooler service enabled'] if service_status == scmr.SERVICE_RUNNING: reasons.append('Spooler service running') elif: ok = True reasons.append('Spooler service not running') return (ok, reasons)",False,service_status == scmr.SERVICE_STOPPED,service_status == scmr.SERVICE_NOT_RUNNING,0.6483436226844788 507,"def set_seed(args): random.seed(args.seed) np.random.seed(args.seed) torch.manual_seed(args.seed) if: torch.cuda.manual_seed_all(args.seed)",True,args.n_gpu > 0,args.n_gpu > 0,0.6485757827758789 508,"def can_handle(self, devinfo, debug=False): if: dev = USBDevice(devinfo) main, carda, cardb = self.find_device_nodes(detected_device=dev) if main is None and carda is None and (cardb is None): if debug: print('\tPRS-T1: Appears to be in non data mode or was ejected, ignoring') return False return True",False,islinux,self.is_data_mode(),0.6676485538482666 509,"def can_handle(self, devinfo, debug=False): if islinux: dev = USBDevice(devinfo) main, carda, cardb = self.find_device_nodes(detected_device=dev) if: if debug: print('\tPRS-T1: Appears to be in non data mode or was ejected, ignoring') return False return True",False,main is None and carda is None and (cardb is None),carda.status != 'ejected',0.6439000368118286 510,"def can_handle(self, devinfo, debug=False): if islinux: dev = USBDevice(devinfo) main, carda, cardb = self.find_device_nodes(detected_device=dev) if main is None and carda is None and (cardb is None): if: print('\tPRS-T1: Appears to be in non data mode or was ejected, ignoring') return False return True",True,debug,debug,0.6651346683502197 511,"def fix_model_name(model: Type[BaseModel], name: str) -> None: if: setattr(model.__config__, 'title', name) else: setattr(model, '__name__', name)",False,"isinstance(model, type(BaseModel))","issubclass(model, BaseModel)",0.6515050530433655 512,"def _is_punctuation(char): """"""Checks whether `chars` is a punctuation character."""""" cp = ord(char) if: return True cat = unicodedata.category(char) if cat.startswith('P'): return True return False",True,cp >= 33 and cp <= 47 or (cp >= 58 and cp <= 64) or (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126),cp >= 33 and cp <= 47 or (cp >= 58 and cp <= 64) or (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126),0.6487233638763428 513,"def _is_punctuation(char): """"""Checks whether `chars` is a punctuation character."""""" cp = ord(char) if cp >= 33 and cp <= 47 or (cp >= 58 and cp <= 64) or (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126): return True cat = unicodedata.category(char) if: return True return False",True,cat.startswith('P'),cat.startswith('P'),0.6423234939575195 514,"@dimension_labels.setter def dimension_labels(self, val): if: self.geometry.dimension_labels = val self._dimension_labels = val",False,"hasattr(self, 'geometry')","isinstance(val, (int, float, complex, np.number))",0.6522078514099121 515,"def _concrete_constraint(self, e): c = super()._concrete_value(e) if: return c if self._replace_constraints: er = self._replacement(e) return super()._concrete_constraint(er) else: return super()._concrete_constraint(e)",False,c is not None,c is not e,0.653294563293457 516,"def _concrete_constraint(self, e): c = super()._concrete_value(e) if c is not None: return c if: er = self._replacement(e) return super()._concrete_constraint(er) else: return super()._concrete_constraint(e)",False,self._replace_constraints,self._is_replacement(e),0.6504372358322144 517,"def get_module_name(group_name, model, key_word, exist_module_name, mpu=None, verbose=True): """""" get the associated module name from the model based on the key_word provided by users """""" return_module_name = [] for name, module in model.named_modules(): module_check = is_module_compressible(module, mpu) if: if name in exist_module_name and verbose: raise ValueError(f'{name} is already added to compression, please check your config file for {group_name}.') if name not in exist_module_name: exist_module_name.add(name) return_module_name.append(name) return (return_module_name, exist_module_name)",False,"re.search(key_word, name) is not None and module_check",module_check,0.6428780555725098 518,"def get_module_name(group_name, model, key_word, exist_module_name, mpu=None, verbose=True): """""" get the associated module name from the model based on the key_word provided by users """""" return_module_name = [] for name, module in model.named_modules(): module_check = is_module_compressible(module, mpu) if re.search(key_word, name) is not None and module_check: if: raise ValueError(f'{name} is already added to compression, please check your config file for {group_name}.') if name not in exist_module_name: exist_module_name.add(name) return_module_name.append(name) return (return_module_name, exist_module_name)",False,name in exist_module_name and verbose,verbose,0.6487796306610107 519,"def get_module_name(group_name, model, key_word, exist_module_name, mpu=None, verbose=True): """""" get the associated module name from the model based on the key_word provided by users """""" return_module_name = [] for name, module in model.named_modules(): module_check = is_module_compressible(module, mpu) if re.search(key_word, name) is not None and module_check: if name in exist_module_name and verbose: raise ValueError(f'{name} is already added to compression, please check your config file for {group_name}.') if: exist_module_name.add(name) return_module_name.append(name) return (return_module_name, exist_module_name)",False,name not in exist_module_name,not exist_module_name,0.6524373292922974 520,"def __rsub__(self, other): """""" Implementation of - operator when left operand is not a C{L{ParserElement}} """""" if: other = ParserElement._literalStringClass(other) if not isinstance(other, ParserElement): warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return other - self",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.6507130265235901 521,"def __rsub__(self, other): """""" Implementation of - operator when left operand is not a C{L{ParserElement}} """""" if isinstance(other, basestring): other = ParserElement._literalStringClass(other) if: warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return other - self",True,"not isinstance(other, ParserElement)","not isinstance(other, ParserElement)",0.6486527919769287 522,"def cmd(cmd, name=None): """"""Run any command to client ""name"". """""" if: print('Please give a client name.') else: client = fabutils.select_client_cfg(name) with cd(fabutils.wd(client, CFG)): with prefix(VENV_ACTIVATE.format(client.name)): run(cmd)",True,not name,not name,0.6591615676879883 523,"def first_factorization(self, threshold: Optional[float]=None): """"""Factorize :math:`V = 1/2 \\sum_{ijkl, st}V_{ijkl} is^ jt^ kt ls` by transforming to chemist notation. Args: threshold: threshold for factorization. Returns: Tuple of (eigenvalues of factors, one-body ops in factors, one body correction). """""" if: threshold = self.icut if self.spin_basis: eigenvalues, one_body_squares, one_body_correction, _ = low_rank_two_body_decomposition(self.tei, truncation_threshold=threshold, final_rank=self.lmax, spin_basis=self.spin_basis) else: eigenvalues, one_body_squares, one_body_correction, _ = low_rank_two_body_decomposition(0.5 * self.tei, truncation_threshold=threshold, final_rank=self.lmax, spin_basis=self.spin_basis) return (eigenvalues, one_body_squares, one_body_correction)",True,threshold is None,threshold is None,0.6589322090148926 524,"def first_factorization(self, threshold: Optional[float]=None): """"""Factorize :math:`V = 1/2 \\sum_{ijkl, st}V_{ijkl} is^ jt^ kt ls` by transforming to chemist notation. Args: threshold: threshold for factorization. Returns: Tuple of (eigenvalues of factors, one-body ops in factors, one body correction). """""" if threshold is None: threshold = self.icut if: eigenvalues, one_body_squares, one_body_correction, _ = low_rank_two_body_decomposition(self.tei, truncation_threshold=threshold, final_rank=self.lmax, spin_basis=self.spin_basis) else: eigenvalues, one_body_squares, one_body_correction, _ = low_rank_two_body_decomposition(0.5 * self.tei, truncation_threshold=threshold, final_rank=self.lmax, spin_basis=self.spin_basis) return (eigenvalues, one_body_squares, one_body_correction)",False,self.spin_basis,"isinstance(threshold, float)",0.6463888883590698 525,"def _filter_imgs(self, min_size=32): """"""Filter images too small."""""" valid_inds = [] for i, img_info in enumerate(self.data_infos): if: valid_inds.append(i) return valid_inds",True,"min(img_info['width'], img_info['height']) >= min_size","min(img_info['width'], img_info['height']) >= min_size",0.6423009037971497 526,"def _maybe_add_bbox(obj: Dict[str, Any], ann_dict: Dict[str, Any]) -> None: if: return obj['bbox'] = ann_dict['bbox'] obj['bbox_mode'] = BoxMode.XYWH_ABS",True,'bbox' not in ann_dict,'bbox' not in ann_dict,0.657332181930542 527,"def SetupScript(self, target_arch): script_data = self._SetupScriptInternal(target_arch) script_path = script_data[0] if: raise Exception('%s is missing - make sure VC++ tools are installed.' % script_path) return script_data",True,not os.path.exists(script_path),not os.path.exists(script_path),0.6460453271865845 528,"def sendNegotiate(self, negotiateMessage): negotiate = NTLMAuthNegotiate() negotiate.fromString(negotiateMessage) negotiate['flags'] ^= NTLMSSP_NEGOTIATE_ALWAYS_SIGN challenge = NTLMAuthChallenge() if: challenge.fromString(self.sendNegotiatev1(negotiateMessage)) else: challenge.fromString(self.sendNegotiatev2(negotiateMessage)) self.sessionData['CHALLENGE_MESSAGE'] = challenge return challenge",False,self.session.getDialect() == SMB_DIALECT,self.version <= 2,0.6515882015228271 529,"def _apply_diagonal_coulomb(self, hamil: 'diagonal_coulomb.DiagonalCoulomb') -> 'Wavefunction': """"""Applies the diagonal coulomb operator to the wavefunction Args: hamil (DiagonalCoulomb): diagonal coulomb Hamiltonian to be applied Returns: (Wavefunction): resulting wave function """""" out = copy.deepcopy(self) for _, sector in out._civec.items(): diag, array = (hamil._tensor[1], hamil._tensor[2]) sector.apply_diagonal_coulomb(diag, array, inplace=True) if: out.ax_plus_y(hamil.e_0(), self) return out",False,numpy.abs(hamil.e_0()) > 1e-15,hamil.e_0 is not None,0.6430132985115051 530,"def plot_candidates(candidates, config, ts_min=50, outdir='./'): for candidate in candidates: if: continue logger.info('Plotting %s (%.2f,%.2f)...' % (candidate['name'], candidate['glon'], candidate['glat'])) plotter = ugali.utils.plotting.ObjectPlotter(candidate, config) fig, ax = plotter.plot4() basename = '%s_plot.png' % candidate['name'] outfile = os.path.join(outdir, basename) plt.savefig(outfile)",False,candidate['TS'] < ts_min,candidate['glon'] == '2f' or candidate['glat'] < ts_min,0.6493653059005737 531,"def evaluate(self, runner, new_labels): hist = np.bincount(new_labels, minlength=runner.model.module.memory_bank.num_classes) empty_cls = (hist == 0).sum() minimal_cls_size, maximal_cls_size = (hist.min(), hist.max()) if: print_log('empty_num: {}\tmin_cluster: {}\tmax_cluster:{}'.format(empty_cls.item(), minimal_cls_size.item(), maximal_cls_size.item()), logger='root')",False,runner.rank == 0,self.verbose,0.6596526503562927 532,"def __eq__(self, other): if: return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if my_val!= other_val: return False return True",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.64579176902771 533,"def __eq__(self, other): if not isinstance(other, self.__class__): return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if: return False return True",True,my_val != other_val,my_val != other_val,0.6494433879852295 534,"def call_init(obj, *args, **kwargs): init(obj, *args, **kwargs) if: obj.set('interval', 'never')",True,obj.parameter('interval') is None,obj.parameter('interval') is None,0.6427839994430542 535,"def custom_placeholder_format(value_dict, placeholder_match): key = placeholder_match.group(1).lower() value = value_dict.get(key, key) or '_' if: first_key = list(value.keys())[0] value = str(value[first_key][0]) if isinstance(value[first_key], list) and value[first_key] else str(value[first_key]) return str(value)[:50]",False,"isinstance(value, dict) and value","isinstance(value, dict)",0.6469836235046387 536,"def OnDeserialized(self): """""" Test deserialization success. Raises: Exception: if there are no inputs for the transaction. """""" if: raise Exception('No inputs for miner transaction')",False,len(self.inputs) is not 0,self.TransactionType == TransactionType.TFTP,0.6440246105194092 537,"def get_page_number(self) -> typing.Optional[Decimal]: """""" This function returns the page number """""" kids = self._page.get_parent().get_parent().get('Kids') l = int(self._page.get_parent().get_parent().get('Count')) for i in range(0, l): if: return Decimal(i) return None",False,kids[i] == self._page,kids[i] in self._pages[i],0.6522336006164551 538,"def preprocess(i): os_info = i['os_info'] if: return {'return': 1, 'error': 'Windows is not supported in this script yet'} env = i['env'] automation = i['automation'] recursion_spaces = i['recursion_spaces'] need_version = env.get('CM_VERSION', '') if need_version == '': return {'return': 1, 'error': 'internal problem - CM_VERSION is not defined in env'} print(recursion_spaces +' # Requested version: {}'.format(need_version)) return {'return': 0}",True,os_info['platform'] == 'windows',os_info['platform'] == 'windows',0.6515749096870422 539,"def preprocess(i): os_info = i['os_info'] if os_info['platform'] == 'windows': return {'return': 1, 'error': 'Windows is not supported in this script yet'} env = i['env'] automation = i['automation'] recursion_spaces = i['recursion_spaces'] need_version = env.get('CM_VERSION', '') if: return {'return': 1, 'error': 'internal problem - CM_VERSION is not defined in env'} print(recursion_spaces +' # Requested version: {}'.format(need_version)) return {'return': 0}",False,need_version == '',need_version is None,0.6547970771789551 540,"def format_results(self, results, jsonfile_prefix=None, **kwargs): """"""Format the results to json (standard format for COCO evaluation). Args: results (list): Testing results of the dataset. jsonfile_prefix (str | None): The prefix of json files. It includes the file path and the prefix of filename, e.g., ""a/b/prefix"". If not specified, a temp file will be created. Default: None. Returns: tuple: (result_files, tmp_dir), result_files is a dict containing the json filepaths, tmp_dir is the temporal directory created for saving json files when jsonfile_prefix is not specified. """""" assert isinstance(results, list),'results must be a list' assert len(results) == len(self), 'The length of results is not equal to the dataset len: {}!= {}'.format(len(results), len(self)) if: tmp_dir = tempfile.TemporaryDirectory() jsonfile_prefix = osp.join(tmp_dir.name,'results') else: tmp_dir = None result_files = self.results2json(results, jsonfile_prefix) return (result_files, tmp_dir)",True,jsonfile_prefix is None,jsonfile_prefix is None,0.6473140716552734 541,"def set_welc_preference(chat_id, should_welcome): with INSERTION_LOCK: curr = SESSION.query(Welcome).get(str(chat_id)) if: curr = Welcome(str(chat_id), should_welcome=should_welcome) else: curr.should_welcome = should_welcome SESSION.add(curr) SESSION.commit()",True,not curr,not curr,0.6591516733169556 542,"def _load_adapters(self, model, resume_from_checkpoint): adapter_loaded = False for file_name in os.listdir(resume_from_checkpoint): if: if ',' not in file_name and 'adapter_config.json' in os.listdir(os.path.join(resume_from_checkpoint, file_name)): model.load_adapter(os.path.join(os.path.join(resume_from_checkpoint, file_name))) adapter_loaded = True return adapter_loaded",False,"os.path.isdir(os.path.join(resume_from_checkpoint, file_name))",file_name.endswith('.json'),0.6481344699859619 543,"def _load_adapters(self, model, resume_from_checkpoint): adapter_loaded = False for file_name in os.listdir(resume_from_checkpoint): if os.path.isdir(os.path.join(resume_from_checkpoint, file_name)): if: model.load_adapter(os.path.join(os.path.join(resume_from_checkpoint, file_name))) adapter_loaded = True return adapter_loaded",False,"',' not in file_name and 'adapter_config.json' in os.listdir(os.path.join(resume_from_checkpoint, file_name))","model.__class__.__name__ in ['OriA', 'OriA']",0.6490288376808167 544,"def tearDown(self): ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts) if: raise ExecutionError('Failed to umount the vol & cleanup Volume') g.log.info('Successful in umounting the volume and Cleanup') self.get_super_method(self, 'tearDown')()",True,not ret,not ret,0.6598210334777832 545,"def _prune(self): if: now = time() for idx, (key, (expires, _)) in enumerate(self._cache.items()): if expires <= now or idx % 3 == 0: self._cache.pop(key, None)",False,len(self._cache) > self._threshold,self._cache,0.6458609104156494 546,"def _prune(self): if len(self._cache) > self._threshold: now = time() for idx, (key, (expires, _)) in enumerate(self._cache.items()): if: self._cache.pop(key, None)",False,expires <= now or idx % 3 == 0,expires > self._threshold or idx > now,0.6490975022315979 547,"@property def full_address(self): addr = self.mailing_address_line_1 if: addr += f' {self.mailing_address_line_2}' return f'{addr}, {self.city}, {self.state}, {self.country}'",True,self.mailing_address_line_2,self.mailing_address_line_2,0.6500547528266907 548,"def _compare(self, other, method): if: return NotImplemented return method(self._key, other._key)",True,"not isinstance(other, _BaseVersion)","not isinstance(other, _BaseVersion)",0.6475049257278442 549,"def upButtonPressed(self): str_list = self.slm.stringList() if: str_list[self.hero_index], str_list[self.hero_index - 1] = (str_list[self.hero_index - 1], str_list[self.hero_index]) self.slm.setStringList(str_list)",False,1 <= self.hero_index < len(str_list),0 <= self.hero_index < len(str_list),0.6467466950416565 550,"def _get_digest(self, info): """""" Get a digest from a dictionary by looking at keys of the form 'algo_digest'. Returns a 2-tuple (algo, digest) if found, else None. Currently looks only for SHA256, then MD5. """""" result = None for algo in ('sha256','md5'): key = '%s_digest' % algo if: result = (algo, info[key]) break return result",True,key in info,key in info,0.6571716070175171 551,"@staticmethod def serialize_agreed_variation(agreed_variation, with_users=False): if: return agreed_variation user = User.query.filter(User.id == agreed_variation['agreedUserId']).first() if not user: return agreed_variation return dict(agreed_variation, **{'agreedUserName': user.name, 'agreedUserEmail': user.email_address})",False,not (with_users and agreed_variation.get('agreedUserId')),with_users,0.6502817273139954 552,"@staticmethod def serialize_agreed_variation(agreed_variation, with_users=False): if not (with_users and agreed_variation.get('agreedUserId')): return agreed_variation user = User.query.filter(User.id == agreed_variation['agreedUserId']).first() if: return agreed_variation return dict(agreed_variation, **{'agreedUserName': user.name, 'agreedUserEmail': user.email_address})",True,not user,not user,0.663364589214325 553,"def _set_momenta(module, momenta): if: module.momentum = momenta[module]",False,"issubclass(module.__class__, torch.nn.modules.batchnorm._BatchNorm)",module in momenta,0.6470762491226196 554,"def __init__(self, columns=None, label=None, alias=None, group=None, **kwargs): """""" """""" if: columns = [] super().__init__() self.columns = columns self.alias = alias self.group = group self._label = label self.kwargs = kwargs",True,columns is None,columns is None,0.6668776273727417 555,"def remove(self, filepath: Union[str, Path]) -> None: """"""Remove a file. Args: filepath (str or Path): Path to be removed. Raises: FileNotFoundError: If filepath does not exist, an FileNotFoundError will be raised. IsADirectoryError: If filepath is a directory, an IsADirectoryError will be raised. Examples: >>> backend = PetrelBackend() >>> filepath = 'petrel://path/of/file' >>> backend.remove(filepath) """""" if: raise NotImplementedError('Current version of Petrel Python SDK has not supported the `delete` method, please use a higher version or dev branch instead.') if not self.exists(filepath): raise FileNotFoundError(f'filepath {filepath} does not exist') if self.isdir(filepath): raise IsADirectoryError('filepath should be a file') filepath = self._map_path(filepath) filepath = self._format_path(filepath) filepath = self._replace_prefix(filepath) self._client.delete(filepath)",False,"not has_method(self._client, 'delete')",self._client is None,0.6441586017608643 556,"def remove(self, filepath: Union[str, Path]) -> None: """"""Remove a file. Args: filepath (str or Path): Path to be removed. Raises: FileNotFoundError: If filepath does not exist, an FileNotFoundError will be raised. IsADirectoryError: If filepath is a directory, an IsADirectoryError will be raised. Examples: >>> backend = PetrelBackend() >>> filepath = 'petrel://path/of/file' >>> backend.remove(filepath) """""" if not has_method(self._client, 'delete'): raise NotImplementedError('Current version of Petrel Python SDK has not supported the `delete` method, please use a higher version or dev branch instead.') if: raise FileNotFoundError(f'filepath {filepath} does not exist') if self.isdir(filepath): raise IsADirectoryError('filepath should be a file') filepath = self._map_path(filepath) filepath = self._format_path(filepath) filepath = self._replace_prefix(filepath) self._client.delete(filepath)",False,not self.exists(filepath),not os.path.exists(filepath),0.6472853422164917 557,"def remove(self, filepath: Union[str, Path]) -> None: """"""Remove a file. Args: filepath (str or Path): Path to be removed. Raises: FileNotFoundError: If filepath does not exist, an FileNotFoundError will be raised. IsADirectoryError: If filepath is a directory, an IsADirectoryError will be raised. Examples: >>> backend = PetrelBackend() >>> filepath = 'petrel://path/of/file' >>> backend.remove(filepath) """""" if not has_method(self._client, 'delete'): raise NotImplementedError('Current version of Petrel Python SDK has not supported the `delete` method, please use a higher version or dev branch instead.') if not self.exists(filepath): raise FileNotFoundError(f'filepath {filepath} does not exist') if: raise IsADirectoryError('filepath should be a file') filepath = self._map_path(filepath) filepath = self._format_path(filepath) filepath = self._replace_prefix(filepath) self._client.delete(filepath)",False,self.isdir(filepath),"not isinstance(filepath, Path)",0.6468396186828613 558,"def check_plate(self, text_list): plate_all = {'plate': []} for text_pcar in text_list: platelicense = '' for text_info in text_pcar: text = text_info[0][0][0] if: platelicense = self.replace_cn_code(text) plate_all['plate'].append(platelicense) return plate_all",False,len(text) > 2 and len(text) < 10,text,0.6466926336288452 559,"def just_fix_windows_console(): global fixed_windows_console if: return if fixed_windows_console: return if wrapped_stdout is not None or wrapped_stderr is not None: return new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) if new_stdout.convert: sys.stdout = new_stdout new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) if new_stderr.convert: sys.stderr = new_stderr fixed_windows_console = True",False,sys.platform != 'win32',"hasattr(sys, 'ps1')",0.6487252116203308 560,"def just_fix_windows_console(): global fixed_windows_console if sys.platform!= 'win32': return if: return if wrapped_stdout is not None or wrapped_stderr is not None: return new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) if new_stdout.convert: sys.stdout = new_stdout new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) if new_stderr.convert: sys.stderr = new_stderr fixed_windows_console = True",True,fixed_windows_console,fixed_windows_console,0.6512162685394287 561,"def just_fix_windows_console(): global fixed_windows_console if sys.platform!= 'win32': return if fixed_windows_console: return if: return new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) if new_stdout.convert: sys.stdout = new_stdout new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) if new_stderr.convert: sys.stderr = new_stderr fixed_windows_console = True",False,wrapped_stdout is not None or wrapped_stderr is not None,"hasattr(sys, 'stdout') and sys.stdout == 'get_stderr'",0.6467798948287964 562,"def just_fix_windows_console(): global fixed_windows_console if sys.platform!= 'win32': return if fixed_windows_console: return if wrapped_stdout is not None or wrapped_stderr is not None: return new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) if: sys.stdout = new_stdout new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) if new_stderr.convert: sys.stderr = new_stderr fixed_windows_console = True",True,new_stdout.convert,new_stdout.convert,0.6601179838180542 563,"def just_fix_windows_console(): global fixed_windows_console if sys.platform!= 'win32': return if fixed_windows_console: return if wrapped_stdout is not None or wrapped_stderr is not None: return new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) if new_stdout.convert: sys.stdout = new_stdout new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) if: sys.stderr = new_stderr fixed_windows_console = True",True,new_stderr.convert,new_stderr.convert,0.6581211090087891 564,"def _open_in_browser(self): url = get_templated_url(unicode(self.url_combo.currentText()).strip()) if: open_url(QUrl(url))",True,url,url,0.6722559332847595 565,"def sanitize_token(self, token): token_type = token['type'] if: name = token['name'] namespace = token['namespace'] if (namespace, name) in self.allowed_elements or (namespace is None and (namespaces['html'], name) in self.allowed_elements): return self.allowed_token(token) else: return self.disallowed_token(token) elif token_type == 'Comment': pass else: return token",False,"token_type in ('StartTag', 'EndTag', 'EmptyTag')",token_type == 'Text',0.6465229392051697 566,"def sanitize_token(self, token): token_type = token['type'] if token_type in ('StartTag', 'EndTag', 'EmptyTag'): name = token['name'] namespace = token['namespace'] if: return self.allowed_token(token) else: return self.disallowed_token(token) elif token_type == 'Comment': pass else: return token",False,"namespace, name) in self.allowed_elements or (namespace is None and (namespaces['html'], name) in self.allowed_elements",name.lower() in namespace.lower() and namespace.lower() in self.allowed_tokens,0.6470671892166138 567,"def sanitize_token(self, token): token_type = token['type'] if token_type in ('StartTag', 'EndTag', 'EmptyTag'): name = token['name'] namespace = token['namespace'] if (namespace, name) in self.allowed_elements or (namespace is None and (namespaces['html'], name) in self.allowed_elements): return self.allowed_token(token) else: return self.disallowed_token(token) elif: pass else: return token",False,token_type == 'Comment',token_type == 'EndTag',0.6508560180664062 568,"def forward(self, inputs): if: return (self.layers(inputs[0]), *inputs[1:]) else: return self.layers(inputs)",False,"isinstance(inputs, (list, tuple))",self.training,0.6414899230003357 569,"def applyZPNorm(metricValue, plotDict): if: if plotDict['zp'] is not None: metricValue = metricValue - plotDict['zp'] if 'normVal' in plotDict: if plotDict['normVal'] is not None: metricValue = metricValue / plotDict['normVal'] return metricValue",True,'zp' in plotDict,'zp' in plotDict,0.6547361016273499 570,"def applyZPNorm(metricValue, plotDict): if 'zp' in plotDict: if plotDict['zp'] is not None: metricValue = metricValue - plotDict['zp'] if: if plotDict['normVal'] is not None: metricValue = metricValue / plotDict['normVal'] return metricValue",True,'normVal' in plotDict,'normVal' in plotDict,0.6528257131576538 571,"def applyZPNorm(metricValue, plotDict): if 'zp' in plotDict: if: metricValue = metricValue - plotDict['zp'] if 'normVal' in plotDict: if plotDict['normVal'] is not None: metricValue = metricValue / plotDict['normVal'] return metricValue",True,plotDict['zp'] is not None,plotDict['zp'] is not None,0.6527303457260132 572,"def applyZPNorm(metricValue, plotDict): if 'zp' in plotDict: if plotDict['zp'] is not None: metricValue = metricValue - plotDict['zp'] if 'normVal' in plotDict: if: metricValue = metricValue / plotDict['normVal'] return metricValue",True,plotDict['normVal'] is not None,plotDict['normVal'] is not None,0.6515785455703735 573,"def _eval(self, part_keys: List[Dict[str, str]], part_input: Dict[str, Any]) -> Any: for key, value in zip(part_keys, part_input['Values']): if: return _cast(key['Type'], value) raise InvalidInputException('GetPartitions', f""Unknown column '{self.ident}'"")",False,self.ident == key['Name'],key['Name'] == self.ident,0.6554266214370728 574,"def get_target_label(self, input, target_is_real): """"""Get target label. Args: input (Tensor): Input tensor. target_is_real (bool): Whether the target is real or fake. Returns: (bool | Tensor): Target tensor. Return bool for wgan, otherwise, return Tensor. """""" if: return target_is_real target_val = self.real_label_val if target_is_real else self.fake_label_val return input.new_ones(input.size()) * target_val",False,self.gan_type == 'wgan',"self.gan_type in ['wgan', 'wgan_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus]",0.6473735570907593 575,"def wait_for(lock_name, value, wait=10, timeout=LOCK_EXPIRE): """"""Utility function to wait until the given lock has been released"""""" old_lock = None if: logger.warn('A task is already running. Wait for it: {}/{}'.format(lock_name, acquire_lock.lock)) old_lock = acquire_lock.lock while not acquire_lock(lock_name, value, timeout): sleep(wait) logger.debug('lock released by: {}'.format(old_lock)) return old_lock",False,"not acquire_lock(lock_name, value, timeout)",acquire_lock is not None,0.645682692527771 576,"@break_exec_regex.setter def break_exec_regex(self, break_exec_regex): """""" Setterfor break_exec_regex :param break_exec_regex: String with regex, compiled regex object or None :return: None """""" if: break_exec_regex = re.compile(break_exec_regex) self._break_exec_regex = break_exec_regex",False,"isinstance(break_exec_regex, six.string_types)",break_exec_regex is not None,0.6424634456634521 577,"def sys_x86_64_readlink(jitter, linux_env): path = jitter.cpu.RDI buf = jitter.cpu.RSI bufsize = jitter.cpu.RDX rpath = jitter.get_c_str(path) log.debug('sys_readlink(%r, %x, %x)', rpath, buf, bufsize) link = linux_env.filesystem.readlink(rpath) if: jitter.cpu.RAX = -1 else: data = link[:bufsize - 1] + b'\x00' jitter.vm.set_mem(buf, data) jitter.cpu.RAX = len(data) - 1",False,link is None,link.startswith(link),0.6551709175109863 578,"def get_test_cluster_template(context, **kw): """"""Return a ClusterTemplate object with appropriate attributes. NOTE: The object leaves the attributes marked as changed, such that a create() could be used to commit it to the DB. """""" db_cluster_template = db_utils.get_test_cluster_template(**kw) cluster_template = objects.ClusterTemplate(context) if: del db_cluster_template['id'] for key in db_cluster_template: setattr(cluster_template, key, db_cluster_template[key]) return cluster_template",True,'id' not in kw,'id' not in kw,0.6585248708724976 579,"def __missing__(self, key): if: raise KeyError(key) self[key] = value = self.default_factory() return value",True,self.default_factory is None,self.default_factory is None,0.6501814126968384 580,"def main(config): set_dirs(config) with tf.device(config.device): if: _train(config) elif config.mode == 'test' or config.mode == 'dev': _test(config) elif config.mode == 'forward': _forward(config) else: raise ValueError(""invalid value for'mode': {}"".format(config.mode))",True,config.mode == 'train',config.mode == 'train',0.657152533531189 581,"def main(config): set_dirs(config) with tf.device(config.device): if config.mode == 'train': _train(config) elif: _test(config) elif config.mode == 'forward': _forward(config) else: raise ValueError(""invalid value for'mode': {}"".format(config.mode))",False,config.mode == 'test' or config.mode == 'dev',config.mode == 'test',0.6475286483764648 582,"def main(config): set_dirs(config) with tf.device(config.device): if config.mode == 'train': _train(config) elif config.mode == 'test' or config.mode == 'dev': _test(config) elif: _forward(config) else: raise ValueError(""invalid value for'mode': {}"".format(config.mode))",True,config.mode == 'forward',config.mode == 'forward',0.6519702076911926 583,"def __getattr__(self, name): if: return self.fileobj.fileno else: raise AttributeError(name)",True,name == 'fileno',name == 'fileno',0.6563180088996887 584,"def _test_set_vlan_vid(self, vid, mask=None): header = ofproto.OXM_OF_VLAN_VID match = OFPMatch() if: match.set_vlan_vid(vid) else: header = ofproto.OXM_OF_VLAN_VID_W match.set_vlan_vid_masked(vid, mask) self._test_serialize_and_parser(match, header, vid, mask)",True,mask is None,mask is None,0.6560935974121094 585,"def get_file_name_time(self): a = str(datetime.now()) hour = a.split(' ')[-1].split(':')[0] num = int(hour) / 3 num = int(num) * 3 if: num = 24 a = str(datetime.now() - timedelta(days=1)) num = a.split(' ')[0] +'' + str(num) return num",True,num == 0,num == 0,0.6623152494430542 586,"def get_rank(): if: return 0 if not dist.is_initialized(): return 0 return dist.get_rank()",True,not dist.is_available(),not dist.is_available(),0.651805579662323 587,"def get_rank(): if not dist.is_available(): return 0 if: return 0 return dist.get_rank()",True,not dist.is_initialized(),not dist.is_initialized(),0.650885820388794 588,"def get_random_user_agent(agent_list=UA_CACHE): if: ua_file = file(UA_FILE) for line in ua_file: line = line.strip() if line: agent_list.append(line) ua = random.choice(UA_CACHE) return ua",False,not len(agent_list),os.path.exists(UA_FILE),0.6439211368560791 589,"def get_random_user_agent(agent_list=UA_CACHE): if not len(agent_list): ua_file = file(UA_FILE) for line in ua_file: line = line.strip() if: agent_list.append(line) ua = random.choice(UA_CACHE) return ua",True,line,line,0.6648772954940796 590,"def aliases_to_ordered_dict(_d): """""" Unpacks a dict-with-lists to an ordered dict with keys sorted by length """""" arr = [] for original, aliases in _d.items(): arr.append((original, original)) if: aliases = [original] elif isinstance(aliases, str): aliases = [aliases] for alias in aliases: arr.append((alias, original)) return OrderedDict(sorted(arr, key=lambda _kv: 0 - len(_kv[0])))",False,"isinstance(aliases, bool)",not aliases,0.6411546468734741 591,"def aliases_to_ordered_dict(_d): """""" Unpacks a dict-with-lists to an ordered dict with keys sorted by length """""" arr = [] for original, aliases in _d.items(): arr.append((original, original)) if isinstance(aliases, bool): aliases = [original] elif: aliases = [aliases] for alias in aliases: arr.append((alias, original)) return OrderedDict(sorted(arr, key=lambda _kv: 0 - len(_kv[0])))",False,"isinstance(aliases, str)","isinstance(aliases, list)",0.6433718800544739 592,"def stop(self) -> bool: if: return run([join(sep, 'usr','sbin', 'nginx'), '-s','stop'], stdin=DEVNULL, stderr=STDOUT, check=False).returncode == 0 return self.apiCaller.send_to_apis('POST', '/stop')",True,self._type == 'local',self._type == 'local',0.6516494750976562 593,"def decode(self, label: bytes) -> str: """"""Decode *label*."""""" if: return super().decode(label) if label == b'': return '' try: return _escapify(encodings.idna.ToUnicode(label)) except Exception as e: raise IDNAException(idna_exception=e)",False,not self.strict_decode,"isinstance(label, bytes)",0.6460101008415222 594,"def decode(self, label: bytes) -> str: """"""Decode *label*."""""" if not self.strict_decode: return super().decode(label) if: return '' try: return _escapify(encodings.idna.ToUnicode(label)) except Exception as e: raise IDNAException(idna_exception=e)",False,label == b'',len(label) == 0,0.6557474732398987 595,"@cached_property def _lsb_release_info(self): """""" Get the information items from the lsb_release command output. Returns: A dictionary containing all information items. """""" if: return {} with open(os.devnull, 'w') as devnull: try: cmd = ('lsb_release', '-a') stdout = subprocess.check_output(cmd, stderr=devnull) except OSError: return {} content = stdout.decode(sys.getfilesystemencoding()).splitlines() return self._parse_lsb_release_content(content)",True,not self.include_lsb,not self.include_lsb,0.6448009014129639 596,"def extract_img_feat(self, img): """"""Directly extract features from the img backbone+neck."""""" x = self.img_backbone(img) if: x = self.img_neck(x) return x",True,self.with_img_neck,self.with_img_neck,0.6567720770835876 597,"def find_by_tag(tag_str, to_output_list=True, only_explicit_tag=False) -> List[SiacNote]: if: return [] pinned = [] if not get_index() else get_index().pinned tags = tag_str.split(' ') query = _tag_query(tags) conn = _get_connection() res = conn.execute('select * from notes %s order by id desc' % query).fetchall() conn.close() if not to_output_list: return res return _to_notes(res, pinned)",False,len(tag_str.strip()) == 0,only_explicit_tag,0.648457407951355 598,"def find_by_tag(tag_str, to_output_list=True, only_explicit_tag=False) -> List[SiacNote]: if len(tag_str.strip()) == 0: return [] pinned = [] if not get_index() else get_index().pinned tags = tag_str.split(' ') query = _tag_query(tags) conn = _get_connection() res = conn.execute('select * from notes %s order by id desc' % query).fetchall() conn.close() if: return res return _to_notes(res, pinned)",False,not to_output_list,only_explicit_tag,0.6490492820739746 599,"def _advance(self): self._top = (self._top + 1) % self._max_buffer_size if: self._size += 1",True,self._size < self._max_buffer_size,self._size < self._max_buffer_size,0.6476047039031982 600,"@GlancesPluginModel._log_result_decorator def update(self): """"""Update core stats. Stats is a dict (with both physical and log cpu number) instead of a integer. """""" stats = self.get_init_value() if: try: stats['phys'] = psutil.cpu_count(logical=False) stats['log'] = psutil.cpu_count() except NameError: self.reset() elif self.input_method =='snmp': pass self.stats = stats return self.stats",False,self.input_method == 'local',self.input_method == 'physical',0.6471355557441711 601,"@GlancesPluginModel._log_result_decorator def update(self): """"""Update core stats. Stats is a dict (with both physical and log cpu number) instead of a integer. """""" stats = self.get_init_value() if self.input_method == 'local': try: stats['phys'] = psutil.cpu_count(logical=False) stats['log'] = psutil.cpu_count() except NameError: self.reset() elif: pass self.stats = stats return self.stats",True,self.input_method == 'snmp',self.input_method == 'snmp',0.6467959880828857 602,"def arg_col_level(v_self): columns = v_self.columns if: yield from Select(range(0, columns.nlevels))",False,columns.nlevels > 1,columns,0.6525924205780029 603,"def pairwise(sents): _ref, _hypo = ([], []) for s in sents: for i in range(len(s)): for j in range(len(s)): if: _ref.append(s[i]) _hypo.append(s[j]) return corpus_bleu(_hypo, [_ref])",True,i != j,i != j,0.6714960336685181 604,"def _read(self, addr, length, pad=False): offset = self.translate(addr) if: if pad: return '\x00' * length else: return None win32file.SetFilePointer(self.fhandle, offset, 0) data = win32file.ReadFile(self.fhandle, length)[1] return data",False,offset == None,not self.fhandle,0.657290518283844 605,"def _read(self, addr, length, pad=False): offset = self.translate(addr) if offset == None: if: return '\x00' * length else: return None win32file.SetFilePointer(self.fhandle, offset, 0) data = win32file.ReadFile(self.fhandle, length)[1] return data",True,pad,pad,0.6718904376029968 606,"def bind_processor(self, dialect): def process(value): if: return None elif isinstance(value, basestring): return value elif dialect.datetimeformat == 'internal': return value.strftime('%Y%m%d') elif dialect.datetimeformat == 'iso': return value.strftime('%Y-%m-%d') else: raise exc.InvalidRequestError(""datetimeformat '%s' is not supported."" % (dialect.datetimeformat,)) return process",True,value is None,value is None,0.6555759906768799 607,"def bind_processor(self, dialect): def process(value): if value is None: return None elif: return value elif dialect.datetimeformat == 'internal': return value.strftime('%Y%m%d') elif dialect.datetimeformat == 'iso': return value.strftime('%Y-%m-%d') else: raise exc.InvalidRequestError(""datetimeformat '%s' is not supported."" % (dialect.datetimeformat,)) return process",False,"isinstance(value, basestring)",dialect.datetimeformat == 'internal',0.6456430554389954 608,"def bind_processor(self, dialect): def process(value): if value is None: return None elif isinstance(value, basestring): return value elif: return value.strftime('%Y%m%d') elif dialect.datetimeformat == 'iso': return value.strftime('%Y-%m-%d') else: raise exc.InvalidRequestError(""datetimeformat '%s' is not supported."" % (dialect.datetimeformat,)) return process",True,dialect.datetimeformat == 'internal',dialect.datetimeformat == 'internal',0.6472441554069519 609,"def bind_processor(self, dialect): def process(value): if value is None: return None elif isinstance(value, basestring): return value elif dialect.datetimeformat == 'internal': return value.strftime('%Y%m%d') elif: return value.strftime('%Y-%m-%d') else: raise exc.InvalidRequestError(""datetimeformat '%s' is not supported."" % (dialect.datetimeformat,)) return process",True,dialect.datetimeformat == 'iso',dialect.datetimeformat == 'iso',0.647171676158905 610,"def srr_to_srx(self, srr, **kwargs): """"""Get SRX for a SRR"""""" if: srr = [srr] srr_df = self.sra_metadata(srr) srr_df = srr_df.loc[srr_df['run_accession'].isin(srr)] return _order_first(srr_df, ['run_accession', 'experiment_accession'])",False,"isinstance(srr, str)","not isinstance(srr, str)",0.6532561779022217 611,"def bar_update(count, block_size, total_size): if: pbar.total = total_size progress_bytes = count * block_size pbar.update(progress_bytes - pbar.n)",True,pbar.total is None and total_size,pbar.total is None and total_size,0.6473742723464966 612,"def __init__(self, url, **kwargs): mockread.reset_mock() if: self.a = url else: self.code = 200 self.msg = 'Ok' self.a = BytesIO(url)",False,"isinstance(url, Exception)",url.startswith('file://'),0.6497751474380493 613,"def on_dir_btn(self): fname = str(QFileDialog.getExistingDirectory(self, 'Select your markdown folder')) if: fname = fname.replace('\\', '/') self.md_source_input.setText(fname)",False,fname is not None and len(fname) > 0,fname is not None,0.6459832787513733 614,"def get_breathing_rate(self, botengine): """""" Retrieve the most recent breathing_rate value :param botengine: :return: """""" if: return self.measurements[HealthDevice.MEASUREMENT_NAME_BREATHING_RATE][0][0] return None",True,HealthDevice.MEASUREMENT_NAME_BREATHING_RATE in self.measurements,HealthDevice.MEASUREMENT_NAME_BREATHING_RATE in self.measurements,0.6496018171310425 615,"def _check_and_update(key, value): assert value is not None if: if not _known_status[key] == value: raise RuntimeError('Confilict status for {}, existing status {}, new status {}'.format(key, _known_status[key], value)) _known_status[key] = value",True,key in _known_status,key in _known_status,0.6564457416534424 616,"def _check_and_update(key, value): assert value is not None if key in _known_status: if: raise RuntimeError('Confilict status for {}, existing status {}, new status {}'.format(key, _known_status[key], value)) _known_status[key] = value",False,not _known_status[key] == value,value != _known_status[key],0.6491991281509399 617,"def GetChangeAddress(self, from_addr=None): """""" Get the address where change is send to. Args: from_address (UInt160): (optional) from address script hash. Raises: Exception: if change address could not be found. Returns: UInt160: script hash. """""" if: for contract in self._contracts.values(): if contract.ScriptHash == from_addr: return contract.ScriptHash for contract in self._contracts.values(): if contract.IsStandard: return contract.ScriptHash if len(self._contracts.values()): for k, v in self._contracts.items(): return v raise Exception('Could not find change address')",False,from_addr is not None,from_addr,0.6465224027633667 618,"def GetChangeAddress(self, from_addr=None): """""" Get the address where change is send to. Args: from_address (UInt160): (optional) from address script hash. Raises: Exception: if change address could not be found. Returns: UInt160: script hash. """""" if from_addr is not None: for contract in self._contracts.values(): if contract.ScriptHash == from_addr: return contract.ScriptHash for contract in self._contracts.values(): if contract.IsStandard: return contract.ScriptHash if: for k, v in self._contracts.items(): return v raise Exception('Could not find change address')",False,len(self._contracts.values()),self.GetChangeAddress(),0.6425373554229736 619,"def GetChangeAddress(self, from_addr=None): """""" Get the address where change is send to. Args: from_address (UInt160): (optional) from address script hash. Raises: Exception: if change address could not be found. Returns: UInt160: script hash. """""" if from_addr is not None: for contract in self._contracts.values(): if contract.ScriptHash == from_addr: return contract.ScriptHash for contract in self._contracts.values(): if: return contract.ScriptHash if len(self._contracts.values()): for k, v in self._contracts.items(): return v raise Exception('Could not find change address')",False,contract.IsStandard,contract.ScriptHash == from_addr,0.6464129090309143 620,"def GetChangeAddress(self, from_addr=None): """""" Get the address where change is send to. Args: from_address (UInt160): (optional) from address script hash. Raises: Exception: if change address could not be found. Returns: UInt160: script hash. """""" if from_addr is not None: for contract in self._contracts.values(): if: return contract.ScriptHash for contract in self._contracts.values(): if contract.IsStandard: return contract.ScriptHash if len(self._contracts.values()): for k, v in self._contracts.items(): return v raise Exception('Could not find change address')",False,contract.ScriptHash == from_addr,contract.IsStandard,0.6496742367744446 621,"def get_wave_name(waves, body): wave_name = '' for wave in waves: if: wave_name += extract_alnum(wave['wave_name']) return wave_name",False,str(wave['wave_id']) == body['waveid'],'wave_name' in wave,0.6482675075531006 622,"def power_connectable(other): if: return False if cur_pole.global_position.x!= other.global_position.x and cur_pole.global_position.y!= other.global_position.y and only_axis: return False dist = distance(cur_pole.global_position.data, other.global_position.data) min_dist = min(cur_pole.maximum_wire_distance, other.maximum_wire_distance) return dist <= min_dist",False,other is cur_pole,"not isinstance(other, Pole)",0.6570698022842407 623,"def power_connectable(other): if other is cur_pole: return False if: return False dist = distance(cur_pole.global_position.data, other.global_position.data) min_dist = min(cur_pole.maximum_wire_distance, other.maximum_wire_distance) return dist <= min_dist",False,cur_pole.global_position.x != other.global_position.x and cur_pole.global_position.y != other.global_position.y and only_axis,cur_pole.is_in_wire(),0.6506286859512329 624,"def is_next_letter_hun(a: str, b: str) -> bool: if: return True if len(a) == 1 and len(b) == 1 and (ord(a) + 1 == ord(b)): return True return False",False,"(a, b) in SPECIAL_NEXT_LETTER_PAIRS",a == b,0.6456416845321655 625,"def is_next_letter_hun(a: str, b: str) -> bool: if (a, b) in SPECIAL_NEXT_LETTER_PAIRS: return True if: return True return False",False,len(a) == 1 and len(b) == 1 and (ord(a) + 1 == ord(b)),"(a, b) in SPECIAL_NEXT_LETTER_PAIRS",0.6467128396034241 626,"def set_reward_shaping(self, reward_shaping: dict, agent_indices: Union[int, slice]): if: agent_indices = slice(agent_indices, agent_indices + 1) for agent_idx in range(agent_indices.start, agent_indices.stop): self.current_reward_shaping[agent_idx] = reward_shaping self.set_env_attr(agent_idx, 'unwrapped.reward_shaping_interface.reward_shaping_scheme', reward_shaping)",True,"isinstance(agent_indices, int)","isinstance(agent_indices, int)",0.6505836248397827 627,"def GetValue(self, actor, timeStamp=None): """""" Returns the value associated with a specified ""actor"" at a specified ""timeStamp"". If no timeStamp is specified, then the function will return all values associated with the specified actor at all time stamps. Parameters ---------- actor: str timeStamp: float Returns ------- self.stream.loc[self.timeStamp, actor] or self.stream.loc[timeStamp, actor]: list or float, respectively. """""" assert actor in self.stream.columns if: return self.stream.loc[self.timeStamp, actor] else: assert timeStamp in self.stream.index return self.stream.loc[timeStamp, actor]",True,timeStamp is None,timeStamp is None,0.6613017320632935 628,"def bootstrap_on_err_cb(exc: Exception) -> None: nonlocal retries if: retries += 1 self._logger.warning('Failed bootstrap phase; try=%s max_retries=%s', retries, max_retries) else: self._logger.error('Failed bootstrap phase after %s retries (%s)', retries, exc) raise exc",False,"not isinstance(exc, InvalidToken) and (max_retries < 0 or retries < max_retries)","isinstance(exc, Exception) and retries <= max_retries",0.6490060091018677 629,"@cached def relation_to_role_and_interface(relation_name): """""" Given the name of a relation, return the role and the name of the interface that relation uses (where role is one of ``provides``, ``requires``, or ``peers``). :returns: A tuple containing ``(role, interface)``, or ``(None, None)``. """""" _metadata = metadata() for role in ('provides','requires', 'peers'): interface = _metadata.get(role, {}).get(relation_name, {}).get('interface') if: return (role, interface) return (None, None)",True,interface,interface,0.6747817993164062 630,"def run_safety_checker(self, image, dtype): if: has_nsfw_concept = None else: if paddle.is_tensor(x=image): feature_extractor_input = self.image_processor.postprocess(image, output_type='pil') else: feature_extractor_input = self.image_processor.numpy_to_pil(image) safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors='pd') image, has_nsfw_concept = self.safety_checker(images=image, clip_input=safety_checker_input.pixel_values.cast(dtype)) return (image, has_nsfw_concept)",True,self.safety_checker is None,self.safety_checker is None,0.6455807685852051 631,"def run_safety_checker(self, image, dtype): if self.safety_checker is None: has_nsfw_concept = None else: if: feature_extractor_input = self.image_processor.postprocess(image, output_type='pil') else: feature_extractor_input = self.image_processor.numpy_to_pil(image) safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors='pd') image, has_nsfw_concept = self.safety_checker(images=image, clip_input=safety_checker_input.pixel_values.cast(dtype)) return (image, has_nsfw_concept)",False,paddle.is_tensor(x=image),paddle.is_tensor(image),0.6434617042541504 632,"def construct_search(field_name): if: return '%s__istartswith' % field_name[1:] elif field_name.startswith('='): return '%s__iexact' % field_name[1:] elif field_name.startswith('@'): return '%s__search' % field_name[1:] else: return '%s__icontains' % field_name",False,field_name.startswith('^'),field_name.startswith('istartswith='),0.6434298157691956 633,"def construct_search(field_name): if field_name.startswith('^'): return '%s__istartswith' % field_name[1:] elif: return '%s__iexact' % field_name[1:] elif field_name.startswith('@'): return '%s__search' % field_name[1:] else: return '%s__icontains' % field_name",False,field_name.startswith('='),field_name.startswith('iexact'),0.648019015789032 634,"def construct_search(field_name): if field_name.startswith('^'): return '%s__istartswith' % field_name[1:] elif field_name.startswith('='): return '%s__iexact' % field_name[1:] elif: return '%s__search' % field_name[1:] else: return '%s__icontains' % field_name",False,field_name.startswith('@'),field_name.startswith('.'),0.6440175771713257 635,"def encrypt(self, value, precision=None, random_value=None): """"""Encode and Paillier encrypt a real number value. """""" if: value = value.decode() encoding = FixedPointNumber.encode(value, self.n, self.max_int, precision) obfuscator = random_value or 1 ciphertext = self.raw_encrypt(encoding.encoding, random_value=obfuscator) encryptednumber = PaillierEncryptedNumber(self, ciphertext, encoding.exponent) if random_value is None: encryptednumber.apply_obfuscator() return encryptednumber",False,"isinstance(value, FixedPointNumber)","isinstance(value, bytes)",0.6498298645019531 636,"def encrypt(self, value, precision=None, random_value=None): """"""Encode and Paillier encrypt a real number value. """""" if isinstance(value, FixedPointNumber): value = value.decode() encoding = FixedPointNumber.encode(value, self.n, self.max_int, precision) obfuscator = random_value or 1 ciphertext = self.raw_encrypt(encoding.encoding, random_value=obfuscator) encryptednumber = PaillierEncryptedNumber(self, ciphertext, encoding.exponent) if: encryptednumber.apply_obfuscator() return encryptednumber",False,random_value is None,encoding.encoding == 'gzip',0.6471483707427979 637,"def init_data(netsettings): init_file() if: netrender.init_data = False netsettings.active_slave_index = 0 while len(netsettings.slaves) > 0: netsettings.slaves.remove(0) netsettings.active_blacklisted_slave_index = 0 while len(netsettings.slaves_blacklist) > 0: netsettings.slaves_blacklist.remove(0) netsettings.active_job_index = 0 while len(netsettings.jobs) > 0: netsettings.jobs.remove(0)",True,netrender.init_data,netrender.init_data,0.6481001377105713 638,"def add_module(self, module_name): node = self.__graph.find_node(module_name) if: return node self.__updated = True return self.__graph.add_module(module_name)",True,node is not None,node is not None,0.6537237167358398 639,"def call_get_disk_usage(args): """"""Main method for getting disk usage."""""" disk_usage = get_disk_usage(args.dir) if: return 1 print(disk_usage) return 0",True,disk_usage is None,disk_usage is None,0.6504016518592834 640,"@_app.callback(Output('vp-upper-left', 'value'), [Input('vp-graph', 'figure')], [State('vp-bound-val', 'value')]) def update_upper_left_number(fig, bounds): """"""Update the number of data points in the upper left corner."""""" l_lim = bounds[0] number = 0 if: x = np.array(fig['data'][0]['x']) idx = x < float(l_lim) number = len(x[idx]) return number",False,len(fig['data']) > 1,l_lim is not None,0.6525425910949707 641,"def process_batch(batch): for message in batch.messages: if: return _process(message) reader.commit(batch)",False,not batch.alive,not message.alive,0.6508684158325195 642,"def build_detector(cfg, train_cfg=None, test_cfg=None): """"""Build detector."""""" if: warnings.warn('train_cfg and test_cfg is deprecated, please specify them in model', UserWarning) assert cfg.get('train_cfg') is None or train_cfg is None, 'train_cfg specified in both outer field and model field ' assert cfg.get('test_cfg') is None or test_cfg is None, 'test_cfg specified in both outer field and model field ' return build(cfg, DETECTORS, dict(train_cfg=train_cfg, test_cfg=test_cfg))",True,train_cfg is not None or test_cfg is not None,train_cfg is not None or test_cfg is not None,0.6514482498168945 643,"@classmethod def to_stream(cls, instance, stream, context=None, arg=0, template=None): if: cls._storage.to_stream(instance.value, stream, context) else: logging.debug(f'instance {instance} is not a member of the {cls} class, writing int') cls._storage.to_stream(int(instance), stream, context) return instance",False,"isinstance(instance, cls)","issubclass(instance, cls)",0.649711549282074 644,"@property def public_key(self): """""" :return: The PublicKey object for the public key this certificate contains """""" if: self._public_key = load_public_key(self.asn1['tbs_certificate']['subject_public_key_info']) return self._public_key",True,self._public_key is None,self._public_key is None,0.6504393815994263 645,"def __init__(self, data_schema: Dict[str, SizeData], fpath: str, mode: str): self.fpath = fpath assert 'b' in mode, f""Tensor storage should be opened in binary mode, got '{mode}'"" if: file_h = PathManager.open(fpath, mode) elif 'r' in mode: local_fpath = PathManager.get_local_path(fpath) file_h = open(local_fpath, mode) else: raise ValueError(f'Unsupported file mode {mode}, supported modes: rb, wb') super().__init__(data_schema, file_h)",False,'w' in mode,'b' in mode,0.6633249521255493 646,"def __init__(self, data_schema: Dict[str, SizeData], fpath: str, mode: str): self.fpath = fpath assert 'b' in mode, f""Tensor storage should be opened in binary mode, got '{mode}'"" if 'w' in mode: file_h = PathManager.open(fpath, mode) elif: local_fpath = PathManager.get_local_path(fpath) file_h = open(local_fpath, mode) else: raise ValueError(f'Unsupported file mode {mode}, supported modes: rb, wb') super().__init__(data_schema, file_h)",False,'r' in mode,'rb' in mode,0.6604881882667542 647,"def add_env_value(self, key, value): """""" add key, value pair to env array """""" rval = False env = self.get_env_vars() if: env.append({'name': key, 'value': value}) rval = True else: result = self.put(DeploymentConfig.env_path, {'name': key, 'value': value}) rval = result[0] return rval",True,env,env,0.6736887693405151 648,"def __setitem__(self, key, value): """"""Store item in sorted dict with `key` and corresponding `value`. ``sd.__setitem__(key, value)`` <==> ``sd[key] = value`` Runtime complexity: `O(log(n))` -- approximate. >>> sd = SortedDict() >>> sd['c'] = 3 >>> sd['a'] = 1 >>> sd['b'] = 2 >>> sd SortedDict({'a': 1, 'b': 2, 'c': 3}) :param key: key for item :param value: value for item """""" if: self._list_add(key) dict.__setitem__(self, key, value)",True,key not in self,key not in self,0.6586028337478638 649,"def encode(self): if: return False v = int(self.expr) v = self.encodeval(v & self.lmask) self.value = v & self.lmask return True",True,"not isinstance(self.expr, ExprInt)","not isinstance(self.expr, ExprInt)",0.6470715999603271 650,"def insertComment(self, token, parent=None): if: parent = self.openElements[-1] parent.appendChild(self.commentClass(token['data']))",True,parent is None,parent is None,0.6628707647323608 651,"def _stripMimeTypeExtension(self, mime_type: MimeType, file_name: str) -> str: suffixes = mime_type.suffixes[:] if: suffixes.append(mime_type.preferredSuffix) for suffix in suffixes: if file_name.endswith(suffix): return file_name[:-len(suffix) - 1] return file_name",True,mime_type.preferredSuffix,mime_type.preferredSuffix,0.6443555355072021 652,"def _stripMimeTypeExtension(self, mime_type: MimeType, file_name: str) -> str: suffixes = mime_type.suffixes[:] if mime_type.preferredSuffix: suffixes.append(mime_type.preferredSuffix) for suffix in suffixes: if: return file_name[:-len(suffix) - 1] return file_name",True,file_name.endswith(suffix),file_name.endswith(suffix),0.6435981392860413 653,"def get_vtable_element_size(self): code = self.flags & (SWI_V32 | SWI_VSIZE) if: return 2 elif code == SWI_V32: return 4 elif code == SWI_VSIZE: return 1 return 8",True,code == 0,code == 0,0.6649768352508545 654,"def get_vtable_element_size(self): code = self.flags & (SWI_V32 | SWI_VSIZE) if code == 0: return 2 elif: return 4 elif code == SWI_VSIZE: return 1 return 8",True,code == SWI_V32,code == SWI_V32,0.6565409302711487 655,"def get_vtable_element_size(self): code = self.flags & (SWI_V32 | SWI_VSIZE) if code == 0: return 2 elif code == SWI_V32: return 4 elif: return 1 return 8",True,code == SWI_VSIZE,code == SWI_VSIZE,0.6576449871063232 656,"def str_extract(arr, pat, flags=0, expand=True): """""" Extract capture groups in the regex `pat` as columns in a DataFrame. For each subject string in the Series, extract groups from the first match of regular expression `pat`. Parameters ---------- pat : str Regular expression pattern with capturing groups. flags : int, default 0 (no flags) Flags from the ``re`` module, e.g. ``re.IGNORECASE``, that modify regular expression matching for things like case, spaces, etc. For more details, see :mod:`re`. expand : bool, default True If True, return DataFrame with one column per capture group. If False, return a Series/Index if there is one capture group or DataFrame if there are multiple capture groups. Returns ------- DataFrame or Series or Index A DataFrame with one row for each subject string, and one column for each group. Any capture group names in regular expression pat will be used for column names; otherwise capture group numbers will be used. The dtype of each result column is always object, even when no match is found. If ``expand=False`` and pat has only one capture group, then return a Series (if subject is a Series) or Index (if subject is an Index). See Also -------- extractall : Returns all matches (not just the first match). Examples -------- A pattern with two groups will return a DataFrame with two columns. Non-matches will be NaN. >>> s = pd.Series(['a1', 'b2', 'c3']) >>> s.str.extract(r'([ab])(\\d)') 0 1 0 a 1 1 b 2 2 NaN NaN A pattern may contain optional groups. >>> s.str.extract(r'([ab])?(\\d)') 0 1 0 a 1 1 b 2 2 NaN 3 Named groups will become column names in the result. >>> s.str",False,"not isinstance(expand, bool)",expand,0.6443957090377808 657,"def str_extract(arr, pat, flags=0, expand=True): """""" Extract capture groups in the regex `pat` as columns in a DataFrame. For each subject string in the Series, extract groups from the first match of regular expression `pat`. Parameters ---------- pat : str Regular expression pattern with capturing groups. flags : int, default 0 (no flags) Flags from the ``re`` module, e.g. ``re.IGNORECASE``, that modify regular expression matching for things like case, spaces, etc. For more details, see :mod:`re`. expand : bool, default True If True, return DataFrame with one column per capture group. If False, return a Series/Index if there is one capture group or DataFrame if there are multiple capture groups. Returns ------- DataFrame or Series or Index A DataFrame with one row for each subject string, and one column for each group. Any capture group names in regular expression pat will be used for column names; otherwise capture group numbers will be used. The dtype of each result column is always object, even when no match is found. If ``expand=False`` and pat has only one capture group, then return a Series (if subject is a Series) or Index (if subject is an Index). See Also -------- extractall : Returns all matches (not just the first match). Examples -------- A pattern with two groups will return a DataFrame with two columns. Non-matches will be NaN. >>> s = pd.Series(['a1', 'b2', 'c3']) >>> s.str.extract(r'([ab])(\\d)') 0 1 0 a 1 1 b 2 2 NaN NaN A pattern may contain optional groups. >>> s.str.extract(r'([ab])?(\\d)') 0 1 0 a 1 1 b 2 2 NaN 3 Named groups will become column names in the result. >>> s.str",True,expand,expand,0.6749656200408936 658,"def eval(self, body, domain): jsPayload = template(body, domain) if: logging.warning('WARNING - Please upgrade your js2py https://github.com/PiotrDabkowski/Js2Py, applying work around for the meantime.') jsPayload = jsunfuck(jsPayload) def atob(s): return base64.b64decode('{}'.format(s)).decode('utf-8') js2py.disable_pyimport() context = js2py.EvalJs({'atob': atob}) result = context.eval(jsPayload) return result",False,js2py.eval_js('(+(+!+[]+[+!+[]]+(!![]+[])[!+[]+!+[]+!+[]]+[!+[]+!+[]]+[+[]])+[])[+!+[]]') == '1','Ubuntu' in jsPayload,0.6551860570907593 659,"def index_pix_in_pixels(pix, pixels, sort=False, outside=-1): """""" Find the indices of a set of pixels into another set of pixels. !!! ASSUMES SORTED PIXELS!!! Parameters: ----------- pix : set of search pixels pixels : set of reference pixels Returns: -------- index : index into the reference pixels """""" if: pixels = np.sort(pixels) index = np.searchsorted(pixels, pix) if np.isscalar(index): if not np.in1d(pix, pixels).any(): index = outside else: index[~np.in1d(pix, pixels)] = outside return index",True,sort,sort,0.6685218811035156 660,"def index_pix_in_pixels(pix, pixels, sort=False, outside=-1): """""" Find the indices of a set of pixels into another set of pixels. !!! ASSUMES SORTED PIXELS!!! Parameters: ----------- pix : set of search pixels pixels : set of reference pixels Returns: -------- index : index into the reference pixels """""" if sort: pixels = np.sort(pixels) index = np.searchsorted(pixels, pix) if: if not np.in1d(pix, pixels).any(): index = outside else: index[~np.in1d(pix, pixels)] = outside return index",False,np.isscalar(index),index is None,0.6454963684082031 661,"def index_pix_in_pixels(pix, pixels, sort=False, outside=-1): """""" Find the indices of a set of pixels into another set of pixels. !!! ASSUMES SORTED PIXELS!!! Parameters: ----------- pix : set of search pixels pixels : set of reference pixels Returns: -------- index : index into the reference pixels """""" if sort: pixels = np.sort(pixels) index = np.searchsorted(pixels, pix) if np.isscalar(index): if: index = outside else: index[~np.in1d(pix, pixels)] = outside return index",False,"not np.in1d(pix, pixels).any()",invert,0.6456348896026611 662,"@type.setter def type(self, type): """"""Sets the type of this Assignment. :param type: The type of this Assignment. :type type: str """""" allowed_values = ['user', 'group'] if: raise ValueError('Invalid value for `type` ({0}), must be one of {1}'.format(type, allowed_values)) self._type = type",True,type not in allowed_values,type not in allowed_values,0.6552023887634277 663,"def _save_response_content(response:'requests.models.Response', destination: str, chunk_size: int=32768) -> None: with open(destination, 'wb') as f: pbar = tqdm(total=None) progress = 0 for chunk in response.iter_content(chunk_size): if: f.write(chunk) progress += len(chunk) pbar.update(progress - pbar.n) pbar.close()",True,chunk,chunk,0.6597679853439331 664,"def is_nfs_ganesha_cluster_in_bad_state(mnode): """""" Checks whether nfs ganesha cluster is in bad state. Args: mnode (str): Node in which cmd command will be executed. Returns: bool : True if nfs ganesha cluster is in bad state. False otherwise Example: is_nfs_ganesha_cluster_in_bad_state(mnode) """""" cmd = '/usr/libexec/ganesha/ganesha-ha.sh --status'+ '/run/gluster/shared_storage/nfs-ganesha/ | grep'+ "" 'Cluster HA Status' | cut -d'' -f 4 "" retcode, stdout, _ = g.run(mnode, cmd) if: g.log.error('Failed to execute nfs-ganesha status command to check if cluster is in bad state') return False if stdout.strip('\n')!= 'BAD': g.log.error('nfs-ganesha cluster is not in bad state. Current cluster state: %s'% stdout) return False",True,retcode != 0,retcode != 0,0.657815158367157 665,"def is_nfs_ganesha_cluster_in_bad_state(mnode): """""" Checks whether nfs ganesha cluster is in bad state. Args: mnode (str): Node in which cmd command will be executed. Returns: bool : True if nfs ganesha cluster is in bad state. False otherwise Example: is_nfs_ganesha_cluster_in_bad_state(mnode) """""" cmd = '/usr/libexec/ganesha/ganesha-ha.sh --status'+ '/run/gluster/shared_storage/nfs-ganesha/ | grep'+ "" 'Cluster HA Status' | cut -d'' -f 4 "" retcode, stdout, _ = g.run(mnode, cmd) if retcode!= 0: g.log.error('Failed to execute nfs-ganesha status command to check if cluster is in bad state') return False if: g.log.error('nfs-ganesha cluster is not in bad state. Current cluster state: %s'% stdout) return False",False,stdout.strip('\n') != 'BAD',retcode == 0 and stdout,0.6454600095748901 666,"def set_timeout(self): """"""Sets an alarm to time out the test"""""" if: self.vprint(2,'setting plugin timeout to 1 second') else: self.vprint(2,'setting plugin timeout to %s seconds' % self.timeout) signal.signal(signal.SIGALRM, self.sighandler) signal.alarm(self.timeout)",True,self.timeout == 1,self.timeout == 1,0.6588247418403625 667,"def __init__(self, value, type_id, info_offset, data_offset=None, bit_offset=None, enum_string=None, hash_string=None): self.value = value self.type_id = type_id self.info_offset = info_offset if: self.data_offset = info_offset else: self.data_offset = data_offset self.bit_offset = bit_offset self.enum_string = enum_string self.hash_string = hash_string",True,data_offset is None,data_offset is None,0.6573934555053711 668,"def __header_lang(self, line, state): """""" @param line: The current line in GedLine format @type line: GedLine @param state: The current state @type state: CurrentState """""" if: sattr = SrcAttribute() sattr.set_type(_('Language of GEDCOM text')) sattr.set_value(line.data) self.def_src.add_attribute(sattr)",True,self.use_def_src,self.use_def_src,0.651429295539856 669,"def __getattr__(self, name): if: return self else: raise AttributeError(name)",True,name == 'dict',name == 'dict',0.6522465944290161 670,"def withGSParams(self, gsparams=None, **kwargs): """"""Create a version of the current interpolant with the given gsparams """""" if: return self from copy import copy ret = copy(self) ret._gsparams = GSParams.check(gsparams, self.gsparams, **kwargs) return ret",True,gsparams == self.gsparams,gsparams == self.gsparams,0.6543525457382202 671,"def epoch_start(self, epoch): if: self.dae_scheduler.step() self.vae_scheduler.step()",False,epoch > self.cfg.sde.warmup_epochs,self.scheduler != None,0.6449305415153503 672,"def release_conn(self): if: return self._pool._put_conn(self._connection) self._connection = None",True,not self._pool or not self._connection,not self._pool or not self._connection,0.6462751626968384 673,"def __getattr__(self, attr): if: return self.rrset.name elif attr == 'ttl': return self.rrset.ttl elif attr == 'covers': return self.rrset.covers elif attr == 'rdclass': return self.rrset.rdclass elif attr == 'rdtype': return self.rrset.rdtype else: raise AttributeError(attr)",True,attr == 'name',attr == 'name',0.6590777635574341 674,"def __getattr__(self, attr): if attr == 'name': return self.rrset.name elif: return self.rrset.ttl elif attr == 'covers': return self.rrset.covers elif attr == 'rdclass': return self.rrset.rdclass elif attr == 'rdtype': return self.rrset.rdtype else: raise AttributeError(attr)",True,attr == 'ttl',attr == 'ttl',0.6633827686309814 675,"def __getattr__(self, attr): if attr == 'name': return self.rrset.name elif attr == 'ttl': return self.rrset.ttl elif: return self.rrset.covers elif attr == 'rdclass': return self.rrset.rdclass elif attr == 'rdtype': return self.rrset.rdtype else: raise AttributeError(attr)",True,attr == 'covers',attr == 'covers',0.6541184782981873 676,"def __getattr__(self, attr): if attr == 'name': return self.rrset.name elif attr == 'ttl': return self.rrset.ttl elif attr == 'covers': return self.rrset.covers elif: return self.rrset.rdclass elif attr == 'rdtype': return self.rrset.rdtype else: raise AttributeError(attr)",True,attr == 'rdclass',attr == 'rdclass',0.6521478891372681 677,"def __getattr__(self, attr): if attr == 'name': return self.rrset.name elif attr == 'ttl': return self.rrset.ttl elif attr == 'covers': return self.rrset.covers elif attr == 'rdclass': return self.rrset.rdclass elif: return self.rrset.rdtype else: raise AttributeError(attr)",True,attr == 'rdtype',attr == 'rdtype',0.6532492637634277 678,"def _get_new_profile_position(self) -> Optional[DeviceFacade.View]: buttons = self.device.find(className=ResourceID.BUTTON) for button in buttons: if: return button return None",False,button.content_desc() == 'Profile',button.content_desc == 'NEW',0.6502137184143066 679,"def get_out_channel(layer): if: return getattr(layer, 'out_channels') return layer.weight.size(0)",True,"hasattr(layer, 'out_channels')","hasattr(layer, 'out_channels')",0.644025981426239 680,"@staticmethod def normalize_path(path): if: return '/' path = re.sub('/+', '/', path) if not path.startswith('/'): path = '/' + path return path",False,not path,path is None,0.6645264029502869 681,"@staticmethod def normalize_path(path): if not path: return '/' path = re.sub('/+', '/', path) if: path = '/' + path return path",False,not path.startswith('/'),path[:1] != '/',0.6438450217247009 682,"def tearDown(self): """""" Cleanup and umount volume """""" for mount_object in self.mounts: self.delete_user(mount_object.client_system, 'qa') for server in self.servers: self.delete_user(server, 'qa') g.log.info('Starting to Unmount Volume and Cleanup Volume') ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts) if: raise ExecutionError('Failed to umount the vol & cleanup Volume') g.log.info('Successful in umounting the volume and Cleanup') self.get_super_method(self, 'tearDown')()",True,not ret,not ret,0.6566653847694397 683,"def demap_params(self, params): if: print(f'param out of bounds: {params}') params = np.clip(params, -1.0, 1.0) if self.param_mapping =='sin': params = np.arcsin(params) / (0.5 * np.pi) return params",False,not np.all((params <= 1.0) & (params >= -1.0)),self.param_mapping is None,0.6538748741149902 684,"def demap_params(self, params): if not np.all((params <= 1.0) & (params >= -1.0)): print(f'param out of bounds: {params}') params = np.clip(params, -1.0, 1.0) if: params = np.arcsin(params) / (0.5 * np.pi) return params",False,self.param_mapping == 'sin',self.normalize_params,0.6482543349266052 685,"def format(self, val, *args, **kwargs): if: val_ = ', '.join(val) else: val_ = '' return {self.name: val_}",False,"val and isinstance(val, list)",len(val) > 0,0.647120475769043 686,"def evaluate_book(book_id, db): authors = db.authors(book_id, index_is_id=True) if: authors = [a.strip().replace('|', ',') for a in authors.split(',')] for author in authors: if ',' not in author: return True return False",True,authors,authors,0.659276008605957 687,"def evaluate_book(book_id, db): authors = db.authors(book_id, index_is_id=True) if authors: authors = [a.strip().replace('|', ',') for a in authors.split(',')] for author in authors: if: return True return False",False,"',' not in author",author == author,0.6475291848182678 688,"def handleFreeLookPointing(self): if: msg = self.vehicle.message_factory.mount_control_encode(0, 1, self.camPitch * 100, 0.0, self.camYaw * 100, 0) self.vehicle.send_mavlink(msg) else: msg = self.vehicle.message_factory.command_long_encode(0, 1, mavutil.mavlink.MAV_CMD_CONDITION_YAW, 0, self.camYaw, YAW_SPEED, self.camDir, 0.0, 0, 0, 0) self.vehicle.send_mavlink(msg)",False,self.vehicle.mount_status[0] is not None,self.mount_status[0],0.6466354131698608 689,"def __iter__(self, slice_key=None): if: for i in range(len(self)): yield self[i] else: for i in range(len(self))[slice_key]: yield self[i]",True,slice_key is None,slice_key is None,0.6507833003997803 690,"def __call__(self, expr, state, recurrences): results = self._cache.get((expr, state, recurrences)) if: return results results = super().__call__(expr, state, recurrences) self._cache[expr, state, recurrences] = results return results",True,results is not None,results is not None,0.6499881744384766 691,"def get_user_admin_fed_full(user_id): user_feds = [] for f in FEDERATION_BYFEDID: if: user_feds.append({'fed_id': f, 'fed': FEDERATION_BYFEDID[f]}) return user_feds",False,int(user_id) in eval(eval(FEDERATION_BYFEDID[f]['fusers'])['members']),user_id == FEDERATION_BYFEDID[f].get('id'),0.651580810546875 692,"def _retries_gen(self, args, kwargs): for i, is_last_try in zip(count(), _is_last_gen(self.max_retries)): try: yield i except self._retry_exceptions: if: yield True else: logger.warning(type(self).__name__ +'caught an error, retrying (%s/%s tries). Called with (*%r, **%r).', i, self.max_retries, args, kwargs, exc_info=True) yield False continue else: return",True,is_last_try,is_last_try,0.6488479375839233 693,"def get_field(self, field): """"""Accesses a box collection and associated fields. This function returns specified field with object; if no field is specified, it returns the box coordinates. Args: field: this optional string parameter can be used to specify a related field to be accessed. Returns: a tensor representing the box collection or an associated field. Raises: ValueError: if invalid field """""" if: raise ValueError('field'+ str(field) +'does not exist') return self.data[field]",True,not self.has_field(field),not self.has_field(field),0.6458514928817749 694,"def resize_num_qa_labels(self, num_labels): """""" Build a resized question answering linear layer Module from a provided new linear layer. Increasing the size will add newly initialized weights. Reducing the size will remove weights from the end Args: cur_qa_logit_layer (:obj:`torch.nn.Linear`): Old linear layer to be resized. num_labels (:obj:`int`, `optional`): New number of labels in the linear layer weight matrix. Increasing the size will add newly initialized weights at the end. Reducing the size will remove weights from the end. If not provided or :obj:`None`, just returns a pointer to the qa labels :obj:`torch.nn.Linear`` module of the model wihtout doing anything. Return: :obj:`torch.nn.Linear`: Pointer to the resized Linear layer or the old Linear layer """""" cur_qa_logit_layer = self.get_qa_logit_layer() if: return new_qa_logit_layer = self._resize_qa_labels(num_labels) self.config.num_qa_labels = num_labels self.num_qa_labels = num_labels return new_qa_logit_layer",False,num_labels is None or cur_qa_logit_layer is None,cur_qa_logit_layer is None,0.6435016393661499 695,"def append(self, expectation): if: self.expectations.append(expectation) if self._scheduler is not None: self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation) else: self.expectations.extend(expectation.expectations) if self._scheduler is not None: self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation.expectations) return self",False,"not isinstance(expectation, self.__class__)","isinstance(expectation, Expectation)",0.6442573070526123 696,"def append(self, expectation): if not isinstance(expectation, self.__class__): self.expectations.append(expectation) if: self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation) else: self.expectations.extend(expectation.expectations) if self._scheduler is not None: self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation.expectations) return self",True,self._scheduler is not None,self._scheduler is not None,0.652035117149353 697,"def append(self, expectation): if not isinstance(expectation, self.__class__): self.expectations.append(expectation) if self._scheduler is not None: self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation) else: self.expectations.extend(expectation.expectations) if: self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation.expectations) return self",True,self._scheduler is not None,self._scheduler is not None,0.6539571285247803 698,"def __eq__(self, other): if: return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if my_val!= other_val: return False return True",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.64579176902771 699,"def __eq__(self, other): if not isinstance(other, self.__class__): return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if: return False return True",True,my_val != other_val,my_val != other_val,0.6494433879852295 700,"def sigencode_der_canonize(r, s, order): """""" Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`. Makes sure that the signature is encoded in the canonical format, where the ``s`` parameter is always smaller than ``order / 2``. Most commonly used in bitcoin. Encodes the signature to the following :term:`ASN.1` structure:: Ecdsa-Sig-Value ::= SEQUENCE { r INTEGER, s INTEGER } It's expected that this function will be used as a ``sigencode=`` parameter in :func:`ecdsa.keys.SigningKey.sign` method. :param int r: first parameter of the signature :param int s: second parameter of the signature :param int order: the order of the curve over which the signature was computed :return: DER encoding of ECDSA signature :rtype: bytes """""" if: s = order - s return sigencode_der(r, s, order)",False,s > order / 2,order != 0,0.6569547653198242 701,"def __eq__(self, other): if: return False return self.source == other.source and self.identifier == other.identifier and (self.ntype == other.ntype)",False,"not isinstance(other, GraphNode)","not isinstance(other, Token)",0.6500142216682434 702,"def _translateType(self, t): it = TypeToIntegerType(t) if: it: IntegerType return Bits(it.getBitWidth()) else: raise NotImplementedError(t)",False,it is not None,t == BALLEL_INT_TYPE,0.6533830165863037 703,"def options(self, context, module_options): """""" URL URL for the download cradle PAYLOAD Payload architecture (choices: 64 or 32) Default: 64 """""" if: context.log.fail('URL option is required!') exit(1) self.url = module_options['URL'] self.payload = '64' if 'PAYLOAD' in module_options: if module_options['PAYLOAD'] not in ['64', '32']: context.log.fail('Invalid value for PAYLOAD option!') exit(1) self.payload = module_options['PAYLOAD']",False,not 'URL' in module_options,'URL' not in module_options,0.6517795324325562 704,"def options(self, context, module_options): """""" URL URL for the download cradle PAYLOAD Payload architecture (choices: 64 or 32) Default: 64 """""" if not 'URL' in module_options: context.log.fail('URL option is required!') exit(1) self.url = module_options['URL'] self.payload = '64' if: if module_options['PAYLOAD'] not in ['64', '32']: context.log.fail('Invalid value for PAYLOAD option!') exit(1) self.payload = module_options['PAYLOAD']",True,'PAYLOAD' in module_options,'PAYLOAD' in module_options,0.6498085856437683 705,"def options(self, context, module_options): """""" URL URL for the download cradle PAYLOAD Payload architecture (choices: 64 or 32) Default: 64 """""" if not 'URL' in module_options: context.log.fail('URL option is required!') exit(1) self.url = module_options['URL'] self.payload = '64' if 'PAYLOAD' in module_options: if: context.log.fail('Invalid value for PAYLOAD option!') exit(1) self.payload = module_options['PAYLOAD']",False,"module_options['PAYLOAD'] not in ['64', '32']",module_options['PAYLOAD'] not in _SUPPORTED_PAYLOAD,0.645696222782135 706,"def get_adapter(self, url): """""" Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter """""" for prefix, adapter in self.adapters.items(): if: return adapter raise InvalidSchema(""No connection adapters were found for '%s'"" % url)",True,url.lower().startswith(prefix.lower()),url.lower().startswith(prefix.lower()),0.6447865962982178 707,"def proxy_headers(self, proxy): """"""Returns a dictionary of the headers to add to any request sent through a proxy. This works with urllib3 magic to ensure that they are correctly sent to the proxy, rather than in a tunnelled request if CONNECT is being used. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param proxies: The url of the proxy being used for this request. """""" headers = {} username, password = get_auth_from_url(proxy) if: headers['Proxy-Authorization'] = _basic_auth_str(username, password) return headers",True,username and password,username and password,0.6595168709754944 708,"def known_extension(self, extension): for pattern in self.extension_patterns: if: if extension.lower() == pattern.lower(): return 1 elif pattern.match(extension): return 1 return 0",False,"isinstance(pattern, types.StringTypes)","isinstance(extension, str)",0.6412390470504761 709,"def known_extension(self, extension): for pattern in self.extension_patterns: if isinstance(pattern, types.StringTypes): if: return 1 elif pattern.match(extension): return 1 return 0",False,extension.lower() == pattern.lower(),pattern.match(extension),0.6480761766433716 710,"def known_extension(self, extension): for pattern in self.extension_patterns: if isinstance(pattern, types.StringTypes): if extension.lower() == pattern.lower(): return 1 elif: return 1 return 0",False,pattern.match(extension),extension.lower() == pattern.lower(),0.6445437073707581 711,"def set_params(self, **kwargs): self.source.set_params(**kwargs) if: raise ValueError('Coordinate outside interior ROI.')",False,self.pixel not in self.roi.pixels_interior,self.source.intersects(self.source),0.6472979784011841 712,"def scan_line_break(self): ch = self.peek() if: if self.prefix(2) == '\r\n': self.forward(2) else: self.forward() return '\n' elif ch in '\u2028\u2029': self.forward() return ch return ''",True,ch in '\r\n\x85',ch in '\r\n\x85',0.6483626365661621 713,"def scan_line_break(self): ch = self.peek() if ch in '\r\n\x85': if: self.forward(2) else: self.forward() return '\n' elif ch in '\u2028\u2029': self.forward() return ch return ''",True,self.prefix(2) == '\r\n',self.prefix(2) == '\r\n',0.6466876268386841 714,"def scan_line_break(self): ch = self.peek() if ch in '\r\n\x85': if self.prefix(2) == '\r\n': self.forward(2) else: self.forward() return '\n' elif: self.forward() return ch return ''",False,ch in '\u2028\u2029',ch in '\u2028\u2029\u2029',0.649858832359314 715,"def startTagTableOther(self, token): if: self.clearStackToTableBodyContext() self.endTagTableRowGroup(impliedTagToken(self.tree.openElements[-1].name)) return token else: assert self.parser.innerHTML self.parser.parseError()",False,"self.tree.elementInScope('tbody', variant='table') or self.tree.elementInScope('thead', variant='table') or self.tree.elementInScope('tfoot', variant='table')","self.tree.elementInScope('tbody', variant='table') or self.tree.elementInScope('thead', variant='table') or self.tree.elementInScope('tfoot', variant='table') or self.tree.elementInScope('tfoot', variant='table')",0.6490222215652466 716,"def validate_port(self): """"""Exits with an error if the port is not valid"""""" if: self.port = '' else: try: self.port = int(self.port) if not 1 <= self.port <= 65535: raise ValueError except ValueError: end(UNKNOWN, 'port number must be a whole number between'+ '1 and 65535')",True,self.port is None,self.port is None,0.6516157984733582 717,"def validate_port(self): """"""Exits with an error if the port is not valid"""""" if self.port is None: self.port = '' else: try: self.port = int(self.port) if: raise ValueError except ValueError: end(UNKNOWN, 'port number must be a whole number between'+ '1 and 65535')",False,not 1 <= self.port <= 65535,self.port < 1 or self.port > 65535,0.6601684093475342 718,"def ajoutLigne(self, ligne): if: return self.ajoutTraceComplete(ligne) self.ajoutTraceSynthese(ligne)",False,not ligne,ligne.getlevel() == logging.INFO,0.6535208225250244 719,"def cfg_dict(cfg: Config) -> AttrDict: if: return AttrDict(cfg) else: return AttrDict(vars(cfg))",False,"isinstance(cfg, dict)","isinstance(cfg, AttrDict)",0.6484097242355347 720,"def __getitem__(self, package): """"""Get information about a package from apt and dpkg databases. :param package: Name of package :type package: str :returns: Package object :rtype: object :raises: KeyError, subprocess.CalledProcessError """""" apt_result = self._apt_cache_show([package])[package] apt_result['name'] = apt_result.pop('package') pkg = Package(apt_result) dpkg_result = self.dpkg_list([package]).get(package, {}) current_ver = None installed_version = dpkg_result.get('version') if: current_ver = Version({'ver_str': installed_version}) pkg.current_ver = current_ver pkg.architecture = dpkg_result.get('architecture') return pkg",False,installed_version,installed_version is not None,0.6565266847610474 721,"def can_offerer_create_educational_offer(offerer_id: int) -> None: import pcapi.core.educational.adage_backends as adage_client if: return if offerers_repository.offerer_has_venue_with_adage_id(offerer_id): return siren = offerers_repository.find_siren_by_offerer_id(offerer_id) try: response = adage_client.get_adage_offerer(siren) if len(response) == 0: raise educational_exceptions.CulturalPartnerNotFoundException('No venue has been found for the selected siren') except (educational_exceptions.CulturalPartnerNotFoundException, educational_exceptions.AdageException) as exception: raise exception",False,settings.CAN_COLLECTIVE_OFFERER_IGNORE_ADAGE,not offerer_id,0.648491382598877 722,"def can_offerer_create_educational_offer(offerer_id: int) -> None: import pcapi.core.educational.adage_backends as adage_client if settings.CAN_COLLECTIVE_OFFERER_IGNORE_ADAGE: return if: return siren = offerers_repository.find_siren_by_offerer_id(offerer_id) try: response = adage_client.get_adage_offerer(siren) if len(response) == 0: raise educational_exceptions.CulturalPartnerNotFoundException('No venue has been found for the selected siren') except (educational_exceptions.CulturalPartnerNotFoundException, educational_exceptions.AdageException) as exception: raise exception",False,offerers_repository.offerer_has_venue_with_adage_id(offerer_id),offerer_id is None,0.6461092233657837 723,"def can_offerer_create_educational_offer(offerer_id: int) -> None: import pcapi.core.educational.adage_backends as adage_client if settings.CAN_COLLECTIVE_OFFERER_IGNORE_ADAGE: return if offerers_repository.offerer_has_venue_with_adage_id(offerer_id): return siren = offerers_repository.find_siren_by_offerer_id(offerer_id) try: response = adage_client.get_adage_offerer(siren) if: raise educational_exceptions.CulturalPartnerNotFoundException('No venue has been found for the selected siren') except (educational_exceptions.CulturalPartnerNotFoundException, educational_exceptions.AdageException) as exception: raise exception",False,len(response) == 0,not response or response.status_code != 0,0.6514031887054443 724,"def get_decoder(self, i=None): if: n = len(self.plotter.plot_data) decoders = self.plotter.plot_data_decoder or [None] * n return decoders[i] or self.plotter.plot_data[i].psy.decoder else: return self.decoder",False,"i is not None and isinstance(self.plotter.plot_data, InteractiveList)",i is not None,0.6468645334243774 725,"def colorize_codeblock_body(s: str) -> Iterator[Union[Tag, str]]: idx = 0 for match in DOCTEST_RE.finditer(s): start = match.start() if: yield s[idx:start] yield from subfunc(match) idx = match.end() assert idx == len(s)",False,idx < start,start and idx < len(s),0.6634973287582397 726,"def activate(visualizer: Visualizer) -> None: if: raise core.UserError('Running a task', 'Please wait until the current task has been finished before switching to another window.') global __active_visualizer if __active_visualizer is not None: __active_visualizer.detach() __active_visualizer = visualizer __active_visualizer.attach() __active_visualizer.refresh_all() __active_visualizer.update_status(__active_visualizer.get_default_status())",False,visualizer.get_window().get_scheduler().has_active_tasks(),"hasattr(visualizer, 'get_default_status')",0.6484569311141968 727,"def activate(visualizer: Visualizer) -> None: if visualizer.get_window().get_scheduler().has_active_tasks(): raise core.UserError('Running a task', 'Please wait until the current task has been finished before switching to another window.') global __active_visualizer if: __active_visualizer.detach() __active_visualizer = visualizer __active_visualizer.attach() __active_visualizer.refresh_all() __active_visualizer.update_status(__active_visualizer.get_default_status())",False,__active_visualizer is not None,__active_visualizer.is_visible() and __active_visualizer.get_active_window() is visualizer.get_window(),0.6550665497779846 728,"def _get_label(self, graph_number, node): assert graph_number in [1, 2] if: return self.graph1.nodes[node]['label'] else: return self.graph2.nodes[node]['label']",True,graph_number == 1,graph_number == 1,0.6545867919921875 729,"def variance(self): if: return self.S / (self.n - 1.0) else: return 0",False,self.n >= 2,self.n > 1.0,0.653853178024292 730,"def mixing_rate_num_characters(task, temperature: float=1.0, char_count_name: str='text_chars') -> float: """"""Mixing rate based on the number of characters for the task's 'train' split. Args: task: the seqio.Task to compute a rate for. temperature: a temperature (T) to scale rate (r) by as r^(1/T). char_count_name: feature name of the character counts in the cached stats file. Returns: The mixing rate for this task. """""" if: raise ValueError('`mixing_rate_num_characters` requires that each task has is cached with the character count stats.') ret = task.get_cached_stats('train')[char_count_name] if temperature!= 1.0: ret = ret ** (1.0 / temperature) return ret",False,task.cache_dir is None,char_count_name not in task.get_cached_stats('train'),0.6491622924804688 731,"def mixing_rate_num_characters(task, temperature: float=1.0, char_count_name: str='text_chars') -> float: """"""Mixing rate based on the number of characters for the task's 'train' split. Args: task: the seqio.Task to compute a rate for. temperature: a temperature (T) to scale rate (r) by as r^(1/T). char_count_name: feature name of the character counts in the cached stats file. Returns: The mixing rate for this task. """""" if task.cache_dir is None: raise ValueError('`mixing_rate_num_characters` requires that each task has is cached with the character count stats.') ret = task.get_cached_stats('train')[char_count_name] if: ret = ret ** (1.0 / temperature) return ret",True,temperature != 1.0,temperature != 1.0,0.6570450067520142 732,"def log_error(experiment_name, model_name, specific_params, inp, err_msg, path_prefix): err_info = {'input': inp,'msg': err_msg} logged_errors = {} if: logged_errors = read_json(path_prefix, 'errors.json') if experiment_name not in logged_errors: logged_errors[experiment_name] = {} if model_name not in logged_errors[experiment_name]: logged_errors[experiment_name][model_name] = [] logged_errors[experiment_name][model_name].append({'err_info': err_info, **specific_params}) write_json(path_prefix, 'errors.json', logged_errors)",False,"check_file_exists(path_prefix, 'errors.json')",specific_params == 'no_specific_params',0.6440228223800659 733,"def log_error(experiment_name, model_name, specific_params, inp, err_msg, path_prefix): err_info = {'input': inp,'msg': err_msg} logged_errors = {} if check_file_exists(path_prefix, 'errors.json'): logged_errors = read_json(path_prefix, 'errors.json') if: logged_errors[experiment_name] = {} if model_name not in logged_errors[experiment_name]: logged_errors[experiment_name][model_name] = [] logged_errors[experiment_name][model_name].append({'err_info': err_info, **specific_params}) write_json(path_prefix, 'errors.json', logged_errors)",True,experiment_name not in logged_errors,experiment_name not in logged_errors,0.6507952213287354 734,"def log_error(experiment_name, model_name, specific_params, inp, err_msg, path_prefix): err_info = {'input': inp,'msg': err_msg} logged_errors = {} if check_file_exists(path_prefix, 'errors.json'): logged_errors = read_json(path_prefix, 'errors.json') if experiment_name not in logged_errors: logged_errors[experiment_name] = {} if: logged_errors[experiment_name][model_name] = [] logged_errors[experiment_name][model_name].append({'err_info': err_info, **specific_params}) write_json(path_prefix, 'errors.json', logged_errors)",True,model_name not in logged_errors[experiment_name],model_name not in logged_errors[experiment_name],0.6472816467285156 735,"def encode(self, x): c = torch.ones(x.shape[0], 1) * self.sos_token c = c.long().to(x.device) if: return (c, None, [None, None, c]) return c",False,self.quantize_interface,"not isinstance(c, torch.Tensor)",0.6497696042060852 736,"def _check_letter_case_collisions(eopatch_features: Features, filesystem_features: FilesystemDataInfo) -> None: """"""Check that features have no name clashes (ignoring case) with other EOPatch features and saved features."""""" lowercase_features = {_to_lowercase(*feature) for feature in eopatch_features} if: raise OSError('Some features differ only in casing and cannot be saved in separate files.') for feature, _ in filesystem_features.iterate_features(): if feature not in eopatch_features and _to_lowercase(*feature) in lowercase_features: raise OSError(f'There already exists a feature {feature} in the filesystem that only differs in casing from a feature that should be saved.')",False,len(lowercase_features) != len(eopatch_features),filesystem_features.is_multi_columns,0.6443958282470703 737,"def _check_letter_case_collisions(eopatch_features: Features, filesystem_features: FilesystemDataInfo) -> None: """"""Check that features have no name clashes (ignoring case) with other EOPatch features and saved features."""""" lowercase_features = {_to_lowercase(*feature) for feature in eopatch_features} if len(lowercase_features)!= len(eopatch_features): raise OSError('Some features differ only in casing and cannot be saved in separate files.') for feature, _ in filesystem_features.iterate_features(): if: raise OSError(f'There already exists a feature {feature} in the filesystem that only differs in casing from a feature that should be saved.')",False,feature not in eopatch_features and _to_lowercase(*feature) in lowercase_features,feature in lowercase_features,0.6462252140045166 738,"def recalculate_descendants_perms(self): if: return children = list(self.children.only('pk', 'owner', 'parent')) if not children: return effective_perms = self._get_effective_perms(include_calculated=False) for child in children: child.permissions.filter(inherited=True).delete() child._recalculate_inherited_perms(parent_effective_perms=effective_perms, stale_already_deleted=True) child.recalculate_descendants_perms()",False,self.asset_type not in ASSET_TYPES_WITH_CHILDREN,not self.children,0.6441363096237183 739,"def recalculate_descendants_perms(self): if self.asset_type not in ASSET_TYPES_WITH_CHILDREN: return children = list(self.children.only('pk', 'owner', 'parent')) if: return effective_perms = self._get_effective_perms(include_calculated=False) for child in children: child.permissions.filter(inherited=True).delete() child._recalculate_inherited_perms(parent_effective_perms=effective_perms, stale_already_deleted=True) child.recalculate_descendants_perms()",True,not children,not children,0.6563915014266968 740,"def _make_dir(filename): folder = os.path.dirname(filename) if: os.makedirs(folder)",True,not os.path.exists(folder),not os.path.exists(folder),0.6444495916366577 741,"def checkcol(amob): if: circle = Vector3(amob.pos.x, amob.pos.y, amob.scale) if segvcircle(p1, p2, circle): self.explosions.append(explosion(amob)) amob.kill() return True return False",False,amob.on,amob,0.6553150415420532 742,"def checkcol(amob): if amob.on: circle = Vector3(amob.pos.x, amob.pos.y, amob.scale) if: self.explosions.append(explosion(amob)) amob.kill() return True return False",False,"segvcircle(p1, p2, circle)",circle.x + circle.y >= amob.width and circle.y + circle.width,0.6447510123252869 743,"def serialize(self): if: self.length = len(self.value) + self._OUI_AND_SUBTYPE_LEN buf = struct.pack(self._PACK_STR, self._type, self.length, self.oui, self.subtype) buf = bytearray(buf) form = '%ds' % (self.length - self._OUI_AND_SUBTYPE_LEN) buf.extend(struct.pack(form, self.value)) return buf",False,self.length == 0,self.length == -1,0.6622902750968933 744,"def prepare(self, db, user): self.place_type = self.list[1] if: self.place_type = PlaceType() self.place_type.set_from_xml_str(self.list[1])",False,self.place_type,self.list[1] == 'place',0.6560956239700317 745,"def _update_matched_xlinks(self, b_matched, prot, res, matched, non_matched): if: if prot in matched: matched[prot].add(res) else: matched[prot] = set([res]) elif prot in non_matched: non_matched[prot].add(res) else: non_matched[prot] = set([res]) return (matched, non_matched)",True,b_matched,b_matched,0.6556805372238159 746,"def _update_matched_xlinks(self, b_matched, prot, res, matched, non_matched): if b_matched: if: matched[prot].add(res) else: matched[prot] = set([res]) elif prot in non_matched: non_matched[prot].add(res) else: non_matched[prot] = set([res]) return (matched, non_matched)",True,prot in matched,prot in matched,0.6652586460113525 747,"def _update_matched_xlinks(self, b_matched, prot, res, matched, non_matched): if b_matched: if prot in matched: matched[prot].add(res) else: matched[prot] = set([res]) elif: non_matched[prot].add(res) else: non_matched[prot] = set([res]) return (matched, non_matched)",False,prot in non_matched,not prot in non_matched,0.6521333456039429 748,"def __eq__(self, other): if: return self is other or self.__dict__ == other.__dict__ elif isinstance(other, basestring): try: self.parseString(_ustr(other), parseAll=True) return True except ParseBaseException: return False else: return super(ParserElement, self) == other",True,"isinstance(other, ParserElement)","isinstance(other, ParserElement)",0.6479086875915527 749,"def __eq__(self, other): if isinstance(other, ParserElement): return self is other or self.__dict__ == other.__dict__ elif: try: self.parseString(_ustr(other), parseAll=True) return True except ParseBaseException: return False else: return super(ParserElement, self) == other",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.6456524133682251 750,"def __init__(self, backbone, neck=None, bbox_head=None, train_cfg=None, test_cfg=None, pretrained=None): super(SingleStageDetector, self).__init__() self.backbone = builder.build_backbone(backbone) if: self.neck = builder.build_neck(neck) self.bbox_head = builder.build_head(bbox_head) self.train_cfg = train_cfg self.test_cfg = test_cfg self.init_weights(pretrained=pretrained)",True,neck is not None,neck is not None,0.6676803231239319 751,"def getfirst(self, key, default=None): """""" Return the first value received."""""" if: value = self[key] if isinstance(value, list): return value[0].value else: return value.value else: return default",True,key in self,key in self,0.6615408658981323 752,"def getfirst(self, key, default=None): """""" Return the first value received."""""" if key in self: value = self[key] if: return value[0].value else: return value.value else: return default",False,"isinstance(value, list)","isinstance(value, tuple)",0.6437720656394958 753,"def getRegisteredMachines(): result = helper.execCommand('VBoxManage list vms') lines = result.stdout.decode('utf-8').strip().split('\n') machines = {} for line in lines: if: name, vid = line.split(' ') machines[vid] = name.strip('""') return machines",False,len(line) > 0,line.startswith('VM name:'),0.6479718685150146 754,"def describe_model_package_group(self, model_package_group_name: str) -> ModelPackageGroup: model_package_group = self.model_package_groups.get(model_package_group_name) if: raise ValidationError(f'Model package group {model_package_group_name} not found') return model_package_group",True,model_package_group is None,model_package_group is None,0.6486688852310181 755,"def build_tokenizer(vocab_file, merges_file, tokenizer_type='GPT2BPETokenizer'): """"""Select and instantiate the tokenizer."""""" if: tokenizer = _GPT2BPETokenizer(vocab_file, merges_file) else: raise NotImplementedError('{} tokenizer is not implemented.'.format(tokenizer_type)) return tokenizer",True,tokenizer_type == 'GPT2BPETokenizer',tokenizer_type == 'GPT2BPETokenizer',0.647770881652832 756,"def write(self, oprot): if: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('get_open_txns_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot._fast_encode is not None and self.thrift_spec is not None,oprot._fast_encode is not None and self.thrift_spec is not None,0.6459708213806152 757,"def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('get_open_txns_result') if: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,self.success is not None,self.success is not None,0.6479748487472534 758,"@property def image_title(self) -> str: if: raise LabelRowError('Image title can only be retrieved for DataType.IMAGE or DataType.IMG_GROUP') return self._frame_level_data().image_title",False,"self._label_row.data_type not in [DataType.IMAGE, DataType.IMG_GROUP]","self._label_row_type not in [DataType.IMAGE, DataType.IMG_GROUP]",0.6482896208763123 759,"def image_combo_index_changed(self, combo, row): if: self.display_add_new_image_dialog(select_in_combo=True, combo=combo) title_item = self.item(row, 1) title_item.setIcon(combo.itemIcon(combo.currentIndex())) combo.setItemData(0, combo.currentIndex())",False,combo.currentText() == self.COMBO_IMAGE_ADD,self.get_selected_image_widget(),0.6496030688285828 760,"def mod_crop(img, scale): """"""Mod crop images, used during testing. Args: img (ndarray): Input image. scale (int): Scale factor. Returns: ndarray: Result image. """""" img = img.copy() if: h, w = (img.shape[0], img.shape[1]) h_remainder, w_remainder = (h % scale, w % scale) img = img[:h - h_remainder, :w - w_remainder,...] else: raise ValueError(f'Wrong img ndim: {img.ndim}.') return img",False,"img.ndim in (2, 3)",img.ndim == 3,0.6473227739334106 761,"def merge_counts(x, y): for k, v in y.items(): if: x[k] = 0 x[k] += v return x",True,k not in x,k not in x,0.6621514558792114 762,"def Activated(self): import FreeCADGui sel = FreeCADGui.Selection.getSelection() if: nudge = self.getNudgeValue('right') if nudge: FreeCADGui.addModule('Draft') FreeCADGui.doCommand('Draft.move(' + self.toStr(sel) + ',FreeCAD.' + str(nudge) + ')') FreeCADGui.doCommand('FreeCAD.ActiveDocument.recompute()')",True,sel,sel,0.6726675033569336 763,"def Activated(self): import FreeCADGui sel = FreeCADGui.Selection.getSelection() if sel: nudge = self.getNudgeValue('right') if: FreeCADGui.addModule('Draft') FreeCADGui.doCommand('Draft.move(' + self.toStr(sel) + ',FreeCAD.' + str(nudge) + ')') FreeCADGui.doCommand('FreeCAD.ActiveDocument.recompute()')",True,nudge,nudge,0.6760420799255371 764,"def get_prod_folder(): folder = '' system = platform.system() if: folder = '/home/xxx/' elif system == 'Windows': folder = 'C:\\xxx\\' return folder",True,system == 'Linux',system == 'Linux',0.6563327312469482 765,"def get_prod_folder(): folder = '' system = platform.system() if system == 'Linux': folder = '/home/xxx/' elif: folder = 'C:\\xxx\\' return folder",True,system == 'Windows',system == 'Windows',0.6597287654876709 766,"def iter_messages(self): for msg_data in self.messages: data = msg_data.copy() if: guild = self._bot.get_guild(guild_id) channel = guild and guild.get_channel(msg_data['channel']) else: channel = self._bot.get_channel(msg_data['channel']) data['partial_message'] = channel.get_partial_message(data['message']) if channel is not None else None yield data",False,guild_id := msg_data.get('guild'),self.guild_id,0.646680474281311 767,"def _solve(linear_op, rhs): from..operators import CholLinearOperator, TriangularLinearOperator if: return linear_op.solve(rhs) if settings.fast_computations.solves.off() or linear_op.size(-1) <= settings.max_cholesky_size.value(): return linear_op.cholesky()._cholesky_solve(rhs) else: with torch.no_grad(): preconditioner = linear_op.detach()._solve_preconditioner() return linear_op._solve(rhs, preconditioner)",False,"isinstance(linear_op, (CholLinearOperator, TriangularLinearOperator))",not settings.enable_cholesky_size.value() or linear_op.size(-1) <= settings.max_cholesky_size.value(),0.6458022594451904 768,"def _solve(linear_op, rhs): from..operators import CholLinearOperator, TriangularLinearOperator if isinstance(linear_op, (CholLinearOperator, TriangularLinearOperator)): return linear_op.solve(rhs) if: return linear_op.cholesky()._cholesky_solve(rhs) else: with torch.no_grad(): preconditioner = linear_op.detach()._solve_preconditioner() return linear_op._solve(rhs, preconditioner)",False,settings.fast_computations.solves.off() or linear_op.size(-1) <= settings.max_cholesky_size.value(),"isinstance(linear_op, CholLinearOperator)",0.6458474397659302 769,"def feed(self, aBuf, aCharLen): """"""feed a character with known length"""""" if: order = self.get_order(aBuf) else: order = -1 if order >= 0: self._mTotalChars += 1 if order < self._mTableSize: if 512 > self._mCharToFreqOrder[order]: self._mFreqChars += 1",True,aCharLen == 2,aCharLen == 2,0.6565761566162109 770,"def feed(self, aBuf, aCharLen): """"""feed a character with known length"""""" if aCharLen == 2: order = self.get_order(aBuf) else: order = -1 if: self._mTotalChars += 1 if order < self._mTableSize: if 512 > self._mCharToFreqOrder[order]: self._mFreqChars += 1",True,order >= 0,order >= 0,0.6584807634353638 771,"def feed(self, aBuf, aCharLen): """"""feed a character with known length"""""" if aCharLen == 2: order = self.get_order(aBuf) else: order = -1 if order >= 0: self._mTotalChars += 1 if: if 512 > self._mCharToFreqOrder[order]: self._mFreqChars += 1",True,order < self._mTableSize,order < self._mTableSize,0.6491011381149292 772,"def feed(self, aBuf, aCharLen): """"""feed a character with known length"""""" if aCharLen == 2: order = self.get_order(aBuf) else: order = -1 if order >= 0: self._mTotalChars += 1 if order < self._mTableSize: if: self._mFreqChars += 1",True,512 > self._mCharToFreqOrder[order],512 > self._mCharToFreqOrder[order],0.6435670852661133 773,"def GetActivePage(self): """""" Returns the current selected tab or ``wx.NOT_FOUND`` if none is selected. """""" for indx, page in enumerate(self._pages): if: return indx return wx.NOT_FOUND",False,page.active,page['name'] == 'hx.meituan.com',0.6590600609779358 774,"@pytest.mark.script_launch_mode('subprocess') def test_duration_formatting(script_runner): ret = script_runner.run(['cat-numbers', '-b', '--formatting', 'tests/data/duration_112.numbers'], print_result=False) assert ret.success rows = ret.stdout.strip().splitlines() csv_reader = csv.reader(rows) for row in csv_reader: if: check.equal(row[6], row[13])",False,row[13] != 'Check' and row[13] is not None,row[0] == 'duration',0.6467792987823486 775,"@hpipm_mode.setter def hpipm_mode(self, hpipm_mode): hpipm_modes = ('BALANCE', 'SPEED_ABS', 'SPEED', 'ROBUST') if: self.__hpipm_mode = hpipm_mode else: raise Exception('Invalid hpipm_mode value. Possible values are:\n\n' + ',\n'.join(hpipm_modes) + '.\n\nYou have:'+ hpipm_mode + '.\n\nExiting.')",True,hpipm_mode in hpipm_modes,hpipm_mode in hpipm_modes,0.6490256786346436 776,"def atari_env_by_name(name): for cfg in ENVPOOL_ATARI_ENVS: if: return cfg raise Exception('Unknown Atari env')",False,cfg.name == name,name == cfg.get('atari'),0.6565744876861572 777,"def __eq__(self, other): """""" Specie is equal to other only if element and oxidation states are exactly the same. """""" if: return False return self.symbol == other.symbol and self._oxi_state == other._oxi_state",False,"not isinstance(other, DummySpecie)","not isinstance(other, Element)",0.6492488980293274 778,"def makedir(data_url): if: shutil.rmtree(data_url) os.makedirs(data_url)",True,os.path.exists(data_url),os.path.exists(data_url),0.6426948308944702 779,"def set_dropout_rate(self, p=None) -> int: if: self.head_module[-2].p = p return 1 return 0",True,p is not None,p is not None,0.6538280248641968 780,"@staticmethod def identify_baud_rate(dxl_id, usb): """"""Identify the baud rate a Dynamixel servo is communicating at. Parameters ---------- dxl_id : int Dynamixel ID on chain. Must be [0, 25] usb : str the USB port, typically ""/dev/something"" Returns ------- int the baud rate the Dynamixel is communicating at """""" try: for b in BAUD_MAP.keys(): port_h = prh.PortHandler(usb) port_h.openPort() port_h.setBaudRate(b) packet_h = pch.PacketHandler(2.0) _, dxl_comm_result, _ = packet_h.ping(port_h, dxl_id) port_h.closePort() if: return b except: pass return -1",False,dxl_comm_result == COMM_SUCCESS,b == dxl_comm_result,0.6449222564697266 781,"def remove_plugin(self, label: str) -> None: """"""Remove a plugin from the encoder. Parameters ---------- label : str The label of the plugin to remove. """""" if: del self._available[label] elif label in self._unavailable: del self._unavailable[label] else: raise ValueError(f""Unable to remove '{label}', no such plugin'"")",True,label in self._available,label in self._available,0.6543689966201782 782,"def remove_plugin(self, label: str) -> None: """"""Remove a plugin from the encoder. Parameters ---------- label : str The label of the plugin to remove. """""" if label in self._available: del self._available[label] elif: del self._unavailable[label] else: raise ValueError(f""Unable to remove '{label}', no such plugin'"")",True,label in self._unavailable,label in self._unavailable,0.6527466773986816 783,"def select(self, panel: Any): for index, item in enumerate(self.items): if: if self.selected_index!= index: self.selected_index = index self.dirty() return",False,item == panel,item is panel,0.6561578512191772 784,"def select(self, panel: Any): for index, item in enumerate(self.items): if item == panel: if: self.selected_index = index self.dirty() return",False,self.selected_index != index,index + 1 == len(self.items),0.6484863758087158 785,"def __call__(self, results): """"""Call function to convert image in results to :obj:`torch.Tensor` and transpose the channel order. Args: results (dict): Result dict contains the image data to convert. Returns: dict: The result dict contains the image converted to :obj:`torch.Tensor` and transposed to (C, H, W) order. """""" for key in self.keys: img = results[key] if: img = np.expand_dims(img, -1) results[key] = to_tensor(img.transpose(2, 0, 1)) return results",True,len(img.shape) < 3,len(img.shape) < 3,0.6471738815307617 786,"def multi_gpu_extract(model, teacher_model, data_loader, logger, args): func = lambda **x: model(mode='extract', **x) func_teacher = lambda **x: model(mode='extract', **x) rank, world_size = get_dist_info() results = dist_forward_collect_with_teacher(func, func_teacher, data_loader, rank, len(data_loader.dataset)) if: value = results['ressl_loss'].mean() logger.info(f'mean ressl loss: {value}') torch.distributed.barrier()",False,rank == 0,'ressl_loss' in results,0.6710919141769409 787,"def user_role_updated(self, botengine, location_id, user_id, role, category, location_access, previous_category, previous_location_access): """""" A user changed roles :param botengine: BotEngine environment :param location_id: Location ID :param user_id: User ID that changed roles :param role: Application-layer agreed upon role integer which may auto-configure location_access and alert category :param category: User's current alert/communications category (1=resident; 2=supporter) :param location_access: User's current access to the location :param previous_category: User's previous category, if any :param previous_location_access: User's previous access to the location, if any :return: """""" if: botengine.get_logger().info('\t=> Now tracking location'+ str(location_id)) self.locations[location_id] = Location(botengine, location_id) self.locations[location_id].user_role_updated(botengine, user_id, role, category, location_access, previous_category, previous_location_access)",True,location_id not in self.locations,location_id not in self.locations,0.6452451944351196 788,"def forward(self, input): x_gather_list = all_gather_differentiable(input) input_size_list = all_gather_obj(input.size(0)) cur_gpu = get_rank() if: self.start_list = [sum(input_size_list[:t]) for t in range(len(input_size_list) + 1)] dist.barrier() return torch.cat(x_gather_list, 0)",False,cur_gpu == 0,cur_gpu == 1,0.6569294929504395 789,"def param_to_unit(self, name): """"""Return the unit associated with a parameter This is a wrapper function over the property ``_param_unit_map``. It also handles aliases and indexed parameters (e.g., `pint.models.parameter.prefixParameter` and `pint.models.parameter.maskParameter`) with an index beyond those currently initialized. This can be used without an existing :class:`~pint.models.TimingModel`. Parameters ---------- name : str Name of PINT parameter or alias Returns ------- astropy.u.Unit """""" pintname, firstname = self.alias_to_pint_param(name) if: return self._param_unit_map[pintname] prefix, idx_str, idx = split_prefixed_name(pintname) component = self.param_component_map[firstname][0] if getattr(self.components[component], firstname).unit_template is None: return self._param_unit_map[firstname] return u.Unit(getattr(self.components[component], firstname).unit_template(idx))",False,pintname == firstname,pintname in self._param_unit_map,0.6536151170730591 790,"def param_to_unit(self, name): """"""Return the unit associated with a parameter This is a wrapper function over the property ``_param_unit_map``. It also handles aliases and indexed parameters (e.g., `pint.models.parameter.prefixParameter` and `pint.models.parameter.maskParameter`) with an index beyond those currently initialized. This can be used without an existing :class:`~pint.models.TimingModel`. Parameters ---------- name : str Name of PINT parameter or alias Returns ------- astropy.u.Unit """""" pintname, firstname = self.alias_to_pint_param(name) if pintname == firstname: return self._param_unit_map[pintname] prefix, idx_str, idx = split_prefixed_name(pintname) component = self.param_component_map[firstname][0] if: return self._param_unit_map[firstname] return u.Unit(getattr(self.components[component], firstname).unit_template(idx))",False,"getattr(self.components[component], firstname).unit_template is None",component == 'root',0.6446189880371094 791,"def __bool__(self): for n, r in zip(self._outputFileNames, self._renameFileNames): if: return True return False",False,n != r,n == r,0.6677680611610413 792,"def wait_for_mitm_start(self, config, logger): timeout = 30 wait_time_count = 0 ip = config.get('ip') mock_port = config.get('mock.port') proxy_port = config.get('proxy.port') while True: if: return False time.sleep(1) wait_time_count += 1 try: resp = requests.get(f'http://{ip}:{mock_port}/api/status', proxies={'http': f'http://{ip}:{proxy_port}'}) if resp.status_code!= 200: continue else: return True except Exception: continue",False,wait_time_count >= timeout,wait_time_count > timeout,0.6486297845840454 793,"def wait_for_mitm_start(self, config, logger): timeout = 30 wait_time_count = 0 ip = config.get('ip') mock_port = config.get('mock.port') proxy_port = config.get('proxy.port') while True: if wait_time_count >= timeout: return False time.sleep(1) wait_time_count += 1 try: resp = requests.get(f'http://{ip}:{mock_port}/api/status', proxies={'http': f'http://{ip}:{proxy_port}'}) if: continue else: return True except Exception: continue",False,resp.status_code != 200,resp.status_code == 200 or resp.status_code == 404,0.6518865823745728 794,"def runProjectManager(self): if: return self.defineCurrentProject() dlg = QGISRedProjectManagerDialog() dlg.config(self.iface, self.ProjectDirectory, self.NetworkName, self) self.opendedLayers = False self.especificComplementaryLayers = [] self.selectedFids = {} dlg.exec_() result = dlg.ProcessDone if result: self.NetworkName = dlg.NetworkName self.ProjectDirectory = dlg.ProjectDirectory",True,not self.checkDependencies(),not self.checkDependencies(),0.6507564783096313 795,"def runProjectManager(self): if not self.checkDependencies(): return self.defineCurrentProject() dlg = QGISRedProjectManagerDialog() dlg.config(self.iface, self.ProjectDirectory, self.NetworkName, self) self.opendedLayers = False self.especificComplementaryLayers = [] self.selectedFids = {} dlg.exec_() result = dlg.ProcessDone if: self.NetworkName = dlg.NetworkName self.ProjectDirectory = dlg.ProjectDirectory",True,result,result,0.6687904596328735 796,"def strip_module(filename): if: filename = os.path.splitext(filename)[0] if filename.endswith('module'): filename = filename[:-6] return filename",False,'.' in filename,"isinstance(filename, str)",0.6636002659797668 797,"def strip_module(filename): if '.' in filename: filename = os.path.splitext(filename)[0] if: filename = filename[:-6] return filename",False,filename.endswith('module'),filename.endswith('__'),0.6420181393623352 798,"def is_active_from(self, conf): """""" used in try-restart/other commands to check if needed. """""" if: return False return self.get_active_from(conf) == 'active'",False,not conf,"conf.getlist(Service, 'ExecReload', [])",0.6576880216598511 799,"def get_unicode_from_response(r): """"""Returns the requested content back in unicode. :param r: Response object to get unicode content from. Tried: 1. charset from content-type 2. fall back and replace all unicode characters :rtype: str """""" warnings.warn('In requests 3.0, get_unicode_from_response will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)', DeprecationWarning) tried_encodings = [] encoding = get_encoding_from_headers(r.headers) if: try: return str(r.content, encoding) except UnicodeError: tried_encodings.append(encoding) try: return str(r.content, encoding, errors='replace') except TypeError: return r.content",True,encoding,encoding,0.6653707027435303 800,"def _bbox_forward(self, x, rois): """"""Box head forward function used in both training and testing time."""""" bbox_cls_feats = self.bbox_roi_extractor(x[:self.bbox_roi_extractor.num_inputs], rois) bbox_reg_feats = self.bbox_roi_extractor(x[:self.bbox_roi_extractor.num_inputs], rois, roi_scale_factor=self.reg_roi_scale_factor) if: bbox_cls_feats = self.shared_head(bbox_cls_feats) bbox_reg_feats = self.shared_head(bbox_reg_feats) cls_score, bbox_pred = self.bbox_head(bbox_cls_feats, bbox_reg_feats) bbox_results = dict(cls_score=cls_score, bbox_pred=bbox_pred, bbox_feats=bbox_cls_feats) return bbox_results",True,self.with_shared_head,self.with_shared_head,0.6471490859985352 801,"def __init__(self, dataset_name, output_dir=None, distributed=True): super().__init__(dataset_name, output_dir=output_dir, distributed=distributed) maybe_filter_categories_cocoapi(dataset_name, self._coco_api) _maybe_add_iscrowd_annotations(self._coco_api) if: self._maybe_substitute_metadata()",False,"hasattr(self._metadata, 'thing_dataset_id_to_contiguous_id')",self._coco_api.get_substitute_metadata(),0.6417214274406433 802,"def is_secure_transport(uri): """"""Check if the uri is over ssl."""""" if: return True return uri.lower().startswith('https://')",False,os.environ.get('DEBUG'),uri.lower().startswith('http://') or uri.lower().startswith('https://'),0.6447168588638306 803,"def __set_ks(self, kf_kb_pair): """"""Utility for setting kf and kb from packed vectors. Parameters ---------- kf_kb_pair: tuple(numpy.ndarray, numpy.ndarray) If any element of the tuple is None, the corresponding data is not updated. Returns ------- """""" assert isinstance(kf_kb_pair, tuple) assert len(kf_kb_pair) == 2 if: self.__set_kf(kf_kb_pair[0]) if kf_kb_pair[1] is not None: self.__set_kb(kf_kb_pair[1])",True,kf_kb_pair[0] is not None,kf_kb_pair[0] is not None,0.6511337757110596 804,"def __set_ks(self, kf_kb_pair): """"""Utility for setting kf and kb from packed vectors. Parameters ---------- kf_kb_pair: tuple(numpy.ndarray, numpy.ndarray) If any element of the tuple is None, the corresponding data is not updated. Returns ------- """""" assert isinstance(kf_kb_pair, tuple) assert len(kf_kb_pair) == 2 if kf_kb_pair[0] is not None: self.__set_kf(kf_kb_pair[0]) if: self.__set_kb(kf_kb_pair[1])",True,kf_kb_pair[1] is not None,kf_kb_pair[1] is not None,0.6503876447677612 805,"@property def spawnflags(self): flags = [] if: value = self._entity_data.get('spawnflags', None) for name, (key, _) in {'Start inactive': (1, 0)}.items(): if value & key > 0: flags.append(name) return flags",True,'spawnflags' in self._entity_data,'spawnflags' in self._entity_data,0.6500391960144043 806,"@property def spawnflags(self): flags = [] if'spawnflags' in self._entity_data: value = self._entity_data.get('spawnflags', None) for name, (key, _) in {'Start inactive': (1, 0)}.items(): if: flags.append(name) return flags",True,value & key > 0,value & key > 0,0.658052921295166 807,"def update_lock(chat_id, lock_type, locked): curr_perm = SESSION.query(Locks).get(str(chat_id)) if: curr_perm = init_locks(chat_id) if lock_type == 'bots': curr_perm.bots = locked elif lock_type == 'commands': curr_perm.commands = locked elif lock_type == 'email': curr_perm.email = locked elif lock_type == 'forward': curr_perm.forward = locked elif lock_type == 'url': curr_perm.url = locked SESSION.add(curr_perm) SESSION.commit()",True,not curr_perm,not curr_perm,0.6544647216796875 808,"def update_lock(chat_id, lock_type, locked): curr_perm = SESSION.query(Locks).get(str(chat_id)) if not curr_perm: curr_perm = init_locks(chat_id) if: curr_perm.bots = locked elif lock_type == 'commands': curr_perm.commands = locked elif lock_type == 'email': curr_perm.email = locked elif lock_type == 'forward': curr_perm.forward = locked elif lock_type == 'url': curr_perm.url = locked SESSION.add(curr_perm) SESSION.commit()",True,lock_type == 'bots',lock_type == 'bots',0.6549631357192993 809,"def update_lock(chat_id, lock_type, locked): curr_perm = SESSION.query(Locks).get(str(chat_id)) if not curr_perm: curr_perm = init_locks(chat_id) if lock_type == 'bots': curr_perm.bots = locked elif: curr_perm.commands = locked elif lock_type == 'email': curr_perm.email = locked elif lock_type == 'forward': curr_perm.forward = locked elif lock_type == 'url': curr_perm.url = locked SESSION.add(curr_perm) SESSION.commit()",True,lock_type == 'commands',lock_type == 'commands',0.652057409286499 810,"def update_lock(chat_id, lock_type, locked): curr_perm = SESSION.query(Locks).get(str(chat_id)) if not curr_perm: curr_perm = init_locks(chat_id) if lock_type == 'bots': curr_perm.bots = locked elif lock_type == 'commands': curr_perm.commands = locked elif: curr_perm.email = locked elif lock_type == 'forward': curr_perm.forward = locked elif lock_type == 'url': curr_perm.url = locked SESSION.add(curr_perm) SESSION.commit()",True,lock_type == 'email',lock_type == 'email',0.6539292931556702 811,"def update_lock(chat_id, lock_type, locked): curr_perm = SESSION.query(Locks).get(str(chat_id)) if not curr_perm: curr_perm = init_locks(chat_id) if lock_type == 'bots': curr_perm.bots = locked elif lock_type == 'commands': curr_perm.commands = locked elif lock_type == 'email': curr_perm.email = locked elif: curr_perm.forward = locked elif lock_type == 'url': curr_perm.url = locked SESSION.add(curr_perm) SESSION.commit()",True,lock_type == 'forward',lock_type == 'forward',0.6520143151283264 812,"def update_lock(chat_id, lock_type, locked): curr_perm = SESSION.query(Locks).get(str(chat_id)) if not curr_perm: curr_perm = init_locks(chat_id) if lock_type == 'bots': curr_perm.bots = locked elif lock_type == 'commands': curr_perm.commands = locked elif lock_type == 'email': curr_perm.email = locked elif lock_type == 'forward': curr_perm.forward = locked elif: curr_perm.url = locked SESSION.add(curr_perm) SESSION.commit()",True,lock_type == 'url',lock_type == 'url',0.6523473262786865 813,"def arch_specific(self): """"""Return arch specific information for the current architecture"""""" arch = self.ir_arch.arch has_delayslot = False if: from miasm.arch.mips32.jit import mipsCGen cgen_class = mipsCGen has_delayslot = True elif arch.name == 'arm': from miasm.arch.arm.jit import arm_CGen cgen_class = arm_CGen else: from miasm.jitter.codegen import CGen cgen_class = CGen return (cgen_class(self.ir_arch), has_delayslot)",True,arch.name == 'mips32',arch.name == 'mips32',0.6543385982513428 814,"def arch_specific(self): """"""Return arch specific information for the current architecture"""""" arch = self.ir_arch.arch has_delayslot = False if arch.name =='mips32': from miasm.arch.mips32.jit import mipsCGen cgen_class = mipsCGen has_delayslot = True elif: from miasm.arch.arm.jit import arm_CGen cgen_class = arm_CGen else: from miasm.jitter.codegen import CGen cgen_class = CGen return (cgen_class(self.ir_arch), has_delayslot)",True,arch.name == 'arm',arch.name == 'arm',0.6517907381057739 815,"def init(self, node, job_finder, group_history_finder): self.node = node self.job_finder = job_finder self.group_history_finder = group_history_finder self.meta_session = self.node.meta_session if: self._sync_state() self.cache = cache cache.init(self.meta_session, self.__tq) self.ns_settings_idx = indexes.TagSecondaryIndex(keys.MM_NAMESPACE_SETTINGS_IDX, None, keys.MM_NAMESPACE_SETTINGS_KEY_TPL, self.meta_session, logger=logger, namespace='namespaces') self.ns_settings = {} self._sync_ns_settings()",False,self.group_history_finder,self.meta_session.sync_state,0.651206374168396 816,"def add(self, xyz, atom_type, residue_type, residue_index, all_indexes, radius): if: self.spheres.append((xyz, residue_type, residue_index, all_indexes, radius)) else: self.atoms.append((xyz, atom_type, residue_type, residue_index, all_indexes, radius))",False,atom_type is None,atom_type == 'spheres',0.6512689590454102 817,"@property def maxMosaicImageCount(self): if: self.__init() return self._maxMosaicImageCount",True,self._maxMosaicImageCount is None,self._maxMosaicImageCount is None,0.6581517457962036 818,"@classmethod def eval(cls, x, y): zero = core.Constant(0, 1) one = core.Constant(1, 1) if: return one if x.val >= y.val else zero",True,"isinstance(x, core.Constant) and isinstance(y, core.Constant)","isinstance(x, core.Constant) and isinstance(y, core.Constant)",0.643170177936554 819,"def train(cfg=DEFAULT_CFG, use_python=False): """"""Train a YOLO segmentation model based on passed arguments."""""" model = cfg.model or 'yolov8n-seg.pt' data = cfg.data or 'coco128-seg.yaml' device = cfg.device if cfg.device is not None else '' args = dict(model=model, data=data, device=device) if: from ultralytics import YOLO YOLO(model).train(**args) else: trainer = SegmentationTrainer(overrides=args) trainer.train()",True,use_python,use_python,0.6624095439910889 820,"@util.allow_redefinition_iter def record_iter_sample(dataset, cand, shuf, random, inf=False): first = True while first or inf: first = False if: cand = cand.sample(frac=1.0, random_state=random) for _, sample in cand.iterrows(): yield (sample['qid'], sample['did'])",False,shuf,cand.shuf_id == shuf,0.6643292903900146 821,"def __hash__(self): """""" Possibly dangerous: i,j,k assumed to be between -500, 499, so each taking 3 digits (i+500) l assumed to be between 0 and 9999, so 4 digits """""" code = 0 n = self.ijkl[3] if: raise ValueError('l cannot be hashed', self.l) code = code * 10000 + n for x in self.ijkl[:3]: n = 500 + x if n < 0 or n > 999: raise ValueError('ijk cannot be hashed', self.ijk) code = code * 1000 + n return code",False,n < 0 or n > 9999,n < -500 or n > 999,0.6533547639846802 822,"def __hash__(self): """""" Possibly dangerous: i,j,k assumed to be between -500, 499, so each taking 3 digits (i+500) l assumed to be between 0 and 9999, so 4 digits """""" code = 0 n = self.ijkl[3] if n < 0 or n > 9999: raise ValueError('l cannot be hashed', self.l) code = code * 10000 + n for x in self.ijkl[:3]: n = 500 + x if: raise ValueError('ijk cannot be hashed', self.ijk) code = code * 1000 + n return code",False,n < 0 or n > 999,n > 9999,0.654901385307312 823,"def pick(self): with self._lock: try: priority, session = self._active_queue.get_nowait() except queue.Empty: return None till_expire = priority - time.time() if: return session self._active_queue.put((priority, session)) return None",False,till_expire < self._keep_alive_threshold,till_expire <= 5,0.6450203061103821 824,"@theta.setter def theta(self, value): self.__theta = value if: self.callBacksDictionary['theta'](self.__theta)",True,self.callBacksDictionary.has_key('theta'),self.callBacksDictionary.has_key('theta'),0.6504338979721069 825,"def unset_cached_properties(obj: Any) -> None: """""" Reset all cached properties of an object. Successive calls to the property will recompute the value. :param obj: the object with cached properties. """""" cls = obj.__class__ for a in dir(obj): attr_a = getattr(cls, a, cls) if: obj.__dict__.pop(attr_a.attrname, None)",False,"isinstance(attr_a, cached_property)",attr_a and attr_a.attrname is not None,0.6463683843612671 826,"def _siftdown(self, pos): """"""Restore invariant by repeatedly replacing out-of-place element with its parent."""""" h, d = (self.h, self.d) elt = h[pos] while pos > 0: parent_pos = pos - 1 >> 1 parent = h[parent_pos] if: h[parent_pos], h[pos] = (elt, parent) parent_pos, pos = (pos, parent_pos) d[elt] = pos d[parent] = parent_pos else: break return pos",False,parent > elt,parent in d,0.6604626178741455 827,"@property def lower_inc(self): """"""`!True` if the lower bound is included in the range."""""" if: return False if self._lower is None: return False return self._bounds[0] == '['",True,self._bounds is None,self._bounds is None,0.6520695686340332 828,"@property def lower_inc(self): """"""`!True` if the lower bound is included in the range."""""" if self._bounds is None: return False if: return False return self._bounds[0] == '['",False,self._lower is None,len(self._bounds) < 1,0.6540442109107971 829,"def andExpression_sempred(self, localctx: AndExpressionContext, predIndex: int): if: return self.precpred(self._ctx, 1)",False,predIndex == 8,predIndex == 0,0.6692686676979065 830,"def get_atoms(self): index_dict = {} for parser_index, parser in enumerate(self.get_sub_parsers()): fragment_id = FragmentSymmetryParser.get_fragment_id(parser_index) if: fragment_id = '' for symmetry_class, atom_index, fragment_index in parser.get_atoms(): try: atom_index = index_dict[symmetry_class] index_dict[symmetry_class] += 1 except: atom_index = 1 index_dict[symmetry_class] = 2 yield (symmetry_class, atom_index, fragment_id + fragment_index)",False,not self.has_sub_fragments,fragment_id is None,0.6451714038848877 831,"def __bytes__(self): if: self.sum = dpkt.in_cksum(dpkt.Packet.__bytes__(self)) return dpkt.Packet.__bytes__(self)",False,not self.sum,self.sum is None,0.6590626835823059 832,"def figure_timestamp(fig, x=0.97, y=0.02, iso=True, ha='right', va='bottom', fontsize=5, color='k', alpha=1.0): """""" Add a timestamp to a figure output Parameters ---------- fig : `matplotlib` Figure Figure object x, y : float Label position in `fig.transFigure` coordinates (i.e., 0 < x,y < 1) iso : bool Use ISO-formatted time from `~grizli.utils.ctime_to_iso`, otherwise use `time.ctime()` ha, va : str Horizontal and vertical alignment fontsize, color, alpha: int, str, float Label properties (in `matplotlib.Figure.text`) Returns ------- Adds a timestamp to the `fig` object """""" import time time_str = time.ctime() if: time_str = ctime_to_iso(time_str, verbose=False) fig.text(x, y, time_str, ha=ha, va=va, fontsize=fontsize, transform=fig.transFigure, color=color, alpha=alpha)",True,iso,iso,0.6705964803695679 833,"def _check_input_dim(self, input): if: raise ValueError('expected 5D input (got {}D input)'.format(input.dim())) super(SynchronizedBatchNorm3d, self)._check_input_dim(input)",True,input.dim() != 5,input.dim() != 5,0.6531423330307007 834,"@property def udl_family_from_lang(self): if: self._udl_family_from_lang_cache = dict(((uf, L) for uf, L in [(self.m_lang, 'M'), (self.css_lang, 'CSS'), (self.csl_lang, 'CSL'), (self.ssl_lang, 'SSL'), (self.tpl_lang, 'TPL')] if L is not None)) return self._udl_family_from_lang_cache",True,self._udl_family_from_lang_cache is None,self._udl_family_from_lang_cache is None,0.6463906168937683 835,"def keyReleaseEvent(self, event): if: return False if event.text() == shortcut: global lastPosition lastPosition = '' if not self.triggerMode: self.closeHotbox(hotkey=True) return True",False,event.isAutoRepeat(),event.key() != Qt.Key_Escape,0.6479076743125916 836,"def keyReleaseEvent(self, event): if event.isAutoRepeat(): return False if: global lastPosition lastPosition = '' if not self.triggerMode: self.closeHotbox(hotkey=True) return True",False,event.text() == shortcut,event.key() == QtCore.Qt.Key_Escape,0.6519359946250916 837,"def keyReleaseEvent(self, event): if event.isAutoRepeat(): return False if event.text() == shortcut: global lastPosition lastPosition = '' if: self.closeHotbox(hotkey=True) return True",False,not self.triggerMode,self.hotbox is not None and self.hotbox.hasFocus(event.text()),0.6468883156776428 838,"def _evaluate_predictions_on_coco(coco_gt, coco_results, img_ids=None): """""" Evaluate the coco results using COCOEval API. """""" assert len(coco_results) > 0 coco_results = copy.deepcopy(coco_results) for c in coco_results: c.pop('bbox', None) coco_dt = coco_gt.loadRes(coco_results) coco_eval = YTVOSeval(coco_gt, coco_dt) max_dets_per_image = [1, 10, 100] coco_eval.params.maxDets = max_dets_per_image if: coco_eval.params.imgIds = img_ids coco_eval.evaluate() coco_eval.accumulate() coco_eval.summarize() return coco_eval",True,img_ids is not None,img_ids is not None,0.651093065738678 839,"def load_module(self, fullname): try: return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if: mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod",True,"isinstance(mod, MovedModule)","isinstance(mod, MovedModule)",0.646613359451294 840,"@staticmethod def _is_target_domain(fuzzable_request): """""" :param fuzzable_request: The api call as a fuzzable request :return: True if the target domain matches """""" targets = cf.cf.get('targets') if: return False target_domain = targets[0].get_domain() api_call_domain = fuzzable_request.get_url().get_domain() if target_domain == api_call_domain: return True om.out.debug('The OpenAPI specification has operations which point to a domain (%s) outside the defined target (%s). Ignoring the operation to prevent scanning out of scope targets.' % (api_call_domain, target_domain)) return False",True,not targets,not targets,0.6599678993225098 841,"@staticmethod def _is_target_domain(fuzzable_request): """""" :param fuzzable_request: The api call as a fuzzable request :return: True if the target domain matches """""" targets = cf.cf.get('targets') if not targets: return False target_domain = targets[0].get_domain() api_call_domain = fuzzable_request.get_url().get_domain() if: return True om.out.debug('The OpenAPI specification has operations which point to a domain (%s) outside the defined target (%s). Ignoring the operation to prevent scanning out of scope targets.' % (api_call_domain, target_domain)) return False",False,target_domain == api_call_domain,api_call_domain == target_domain,0.6478955745697021 842,"@staticmethod def upgrade_data_model(dm): print('------------------------->>> Upgrading MCellSurfaceClassesPropertyGroup Data Model') if: dm['data_model_version'] = 'DM_2014_10_24_1638' if dm['data_model_version']!= 'DM_2014_10_24_1638': data_model.flag_incompatible_data_model('Error: Unable to upgrade MCellSurfaceClassesPropertyGroup data model to current version.') return None if'surface_class_list' in dm: for item in dm['surface_class_list']: if MCellSurfaceClassesProperty.upgrade_data_model(item) == None: return None return dm",True,not 'data_model_version' in dm,not 'data_model_version' in dm,0.6521627902984619 843,"@staticmethod def upgrade_data_model(dm): print('------------------------->>> Upgrading MCellSurfaceClassesPropertyGroup Data Model') if not 'data_model_version' in dm: dm['data_model_version'] = 'DM_2014_10_24_1638' if: data_model.flag_incompatible_data_model('Error: Unable to upgrade MCellSurfaceClassesPropertyGroup data model to current version.') return None if'surface_class_list' in dm: for item in dm['surface_class_list']: if MCellSurfaceClassesProperty.upgrade_data_model(item) == None: return None return dm",True,dm['data_model_version'] != 'DM_2014_10_24_1638',dm['data_model_version'] != 'DM_2014_10_24_1638',0.6485673189163208 844,"@staticmethod def upgrade_data_model(dm): print('------------------------->>> Upgrading MCellSurfaceClassesPropertyGroup Data Model') if not 'data_model_version' in dm: dm['data_model_version'] = 'DM_2014_10_24_1638' if dm['data_model_version']!= 'DM_2014_10_24_1638': data_model.flag_incompatible_data_model('Error: Unable to upgrade MCellSurfaceClassesPropertyGroup data model to current version.') return None if: for item in dm['surface_class_list']: if MCellSurfaceClassesProperty.upgrade_data_model(item) == None: return None return dm",True,'surface_class_list' in dm,'surface_class_list' in dm,0.6497397422790527 845,"@staticmethod def upgrade_data_model(dm): print('------------------------->>> Upgrading MCellSurfaceClassesPropertyGroup Data Model') if not 'data_model_version' in dm: dm['data_model_version'] = 'DM_2014_10_24_1638' if dm['data_model_version']!= 'DM_2014_10_24_1638': data_model.flag_incompatible_data_model('Error: Unable to upgrade MCellSurfaceClassesPropertyGroup data model to current version.') return None if'surface_class_list' in dm: for item in dm['surface_class_list']: if: return None return dm",False,MCellSurfaceClassesProperty.upgrade_data_model(item) == None,MCellSurfaceClassGroup.upgrade_data_model(item) == None,0.648385763168335 846,"@staticmethod def _validate_ssl_context_for_tls_in_tls(ssl_context): """""" Raises a ProxySchemeUnsupported if the provided ssl_context can't be used for TLS in TLS. The only requirement is that the ssl_context provides the 'wrap_bio' methods. """""" if: if six.PY2: raise ProxySchemeUnsupported(""TLS in TLS requires SSLContext.wrap_bio() which isn't supported on Python 2"") else: raise ProxySchemeUnsupported(""TLS in TLS requires SSLContext.wrap_bio() which isn't available on non-native SSLContext"")",False,"not hasattr(ssl_context, 'wrap_bio')",ssl_context is not None,0.6423006057739258 847,"@staticmethod def _validate_ssl_context_for_tls_in_tls(ssl_context): """""" Raises a ProxySchemeUnsupported if the provided ssl_context can't be used for TLS in TLS. The only requirement is that the ssl_context provides the 'wrap_bio' methods. """""" if not hasattr(ssl_context, 'wrap_bio'): if: raise ProxySchemeUnsupported(""TLS in TLS requires SSLContext.wrap_bio() which isn't supported on Python 2"") else: raise ProxySchemeUnsupported(""TLS in TLS requires SSLContext.wrap_bio() which isn't available on non-native SSLContext"")",False,six.PY2,"sys.version_info >= (2, 3)",0.6519508361816406 848,"def __get_canonical_additional_signed_headers(self, additional_headers): if: return '' return ';'.join(sorted(additional_headers))",False,additional_headers is None,not additional_headers,0.6484804153442383 849,"def dict_to_sequence(d): """"""Returns an internal sequence dictionary update."""""" if: d = d.items() return d",True,"hasattr(d, 'items')","hasattr(d, 'items')",0.6437700986862183 850,"def cbresponse(response): if: out.warn('{} to {} returned code {}'.format(request.method, request.url, response.code)) if self.max_retries is None or self.retries < self.max_retries: reactor.callLater(self.retryDelay, self.send, report) self.retries += 1 self.increaseDelay() nexus.core.jwt_valid = False else: nexus.core.jwt_valid = True",False,not response.success,response.code != 200,0.6519026160240173 851,"def cbresponse(response): if not response.success: out.warn('{} to {} returned code {}'.format(request.method, request.url, response.code)) if: reactor.callLater(self.retryDelay, self.send, report) self.retries += 1 self.increaseDelay() nexus.core.jwt_valid = False else: nexus.core.jwt_valid = True",False,self.max_retries is None or self.retries < self.max_retries,self.retryDelay > 0,0.6466606855392456 852,"@Slot(int) def restoreWindowSizeStateChanged(self, value): self.restoreWindowSize = bool(value) if: self.__changedData = True",True,not self.__changedData,not self.__changedData,0.65090012550354 853,"def reset_classifier(self, num_classes, global_pool=''): self.num_classes = num_classes self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() if: self.head_dist = nn.Linear(self.embed_dim, self.num_classes) if num_classes > 0 else nn.Identity()",False,self.num_tokens == 2,self.num_classes > 0,0.6489435434341431 854,"def is_supported(self, t): unsupported = ['test_action_set_field_ip_proto', 'test_action_set_field_dl_type', 'test_action_set_field_icmp', 'test_action_set_field_icmpv6_code', 'test_action_set_field_icmpv6_type', 'test_action_set_field_ipv6_flabel', 'test_action_set_field_ipv6_nd_sll', 'test_action_set_field_ipv6_nd_target', 'test_action_set_field_ipv6_nd_tll', 'test_action_copy_ttl_in', 'test_action_copy_ttl_out'] for u in unsupported: if: return False return True",False,t.find(u) != -1,u not in t,0.6474747061729431 855,"def shouldSave(self): if: btn = QMessageBox.warning(self, 'Confirm?', 'Unsaved data will be lost. Save?', QMessageBox.Yes | QMessageBox.No | QMessageBox.Discard) if btn == QMessageBox.No: return QMessageBox.No else: return btn return QMessageBox.No",False,self.modified,self.data is not None,0.6542555093765259 856,"def shouldSave(self): if self.modified: btn = QMessageBox.warning(self, 'Confirm?', 'Unsaved data will be lost. Save?', QMessageBox.Yes | QMessageBox.No | QMessageBox.Discard) if: return QMessageBox.No else: return btn return QMessageBox.No",False,btn == QMessageBox.No,btn == QMessageBox.Yes,0.6572132110595703 857,"def execute(self): fqdn = '' v = self.cli('show running-config') match = self.rx_hostname.search(v) if: fqdn = match.group('hostname') match = self.rx_domain_name.search(v) if match: fqdn = fqdn + '.' + match.group('domain') return fqdn",True,match,match,0.6649458408355713 858,"def execute(self): fqdn = '' v = self.cli('show running-config') match = self.rx_hostname.search(v) if match: fqdn = match.group('hostname') match = self.rx_domain_name.search(v) if: fqdn = fqdn + '.' + match.group('domain') return fqdn",True,match,match,0.6644583940505981 859,"def validate(self): if: raise TProtocolException(message='Required field component is unset!') if self.user is None: raise TProtocolException(message='Required field user is unset!') if self.hostname is None: raise TProtocolException(message='Required field hostname is unset!') return",True,self.component is None,self.component is None,0.6490193009376526 860,"def validate(self): if self.component is None: raise TProtocolException(message='Required field component is unset!') if: raise TProtocolException(message='Required field user is unset!') if self.hostname is None: raise TProtocolException(message='Required field hostname is unset!') return",True,self.user is None,self.user is None,0.6485776305198669 861,"def validate(self): if self.component is None: raise TProtocolException(message='Required field component is unset!') if self.user is None: raise TProtocolException(message='Required field user is unset!') if: raise TProtocolException(message='Required field hostname is unset!') return",True,self.hostname is None,self.hostname is None,0.6486556529998779 862,"def __init__(self, uri=None, signature=None, *args, **kwargs): """"""Constructs a new EmailSettingsSignature object with the given arguments. Args: uri: string (optional) The uri of this object for HTTP requests. signature: string (optional) The signature to be appended to outgoing messages. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """""" super(EmailSettingsSignature, self).__init__(*args, **kwargs) if: self.uri = uri if signature is not None: self.signature_value = signature",True,uri,uri,0.6846779584884644 863,"def __init__(self, uri=None, signature=None, *args, **kwargs): """"""Constructs a new EmailSettingsSignature object with the given arguments. Args: uri: string (optional) The uri of this object for HTTP requests. signature: string (optional) The signature to be appended to outgoing messages. args: The other parameters to pass to gdata.entry.GDEntry constructor. kwargs: The other parameters to pass to gdata.entry.GDEntry constructor. """""" super(EmailSettingsSignature, self).__init__(*args, **kwargs) if uri: self.uri = uri if: self.signature_value = signature",True,signature is not None,signature is not None,0.6552187204360962 864,"def GetMailContactapi(mailname): try: exapivalue = GetMailContacthight(mailname) if: return {'isSuccess': exapivalue['isSuccess'],'message': exapivalue['message'][0]} else: return {'isSuccess': exapivalue['isSuccess'],'message': exapivalue['msg']} except Exception as e: return {'isSuccess': False,'message': str(e)}",True,exapivalue['isSuccess'],exapivalue['isSuccess'],0.6505104899406433 865,"def __init__(self, str=None): gtk.Label.__init__(self) self.__wrap_width = 0 self.layout = self.get_layout() self.layout.set_wrap(pango.WRAP_WORD_CHAR) if: self.set_text(str) self.set_alignment(0.0, 0.0)",False,str != None,str,0.6613825559616089 866,"def item_title(self, item): """"""render the item title"""""" if: return item.pure_name title_template = get_template('snippets/status/header_content.html') title = title_template.render({'status': item}) template = get_template('rss/title.html') return template.render({'user': item.user, 'item_title': title}).strip()",False,"hasattr(item, 'pure_name') and item.pure_name",item.pure_name,0.6489530205726624 867,"def add_word_information(self, word: str) -> None: if: self.words_matching_root_token.append(word)",True,word not in self.words_matching_root_token,word not in self.words_matching_root_token,0.6470382213592529 868,"def setAxis(self): self.ax.legend() self.ax.relim() self.ax.autoscale_view() if: self.fig.canvas.draw() self.fig.canvas.flush_events()",False,not kUseFigCanvasDrawIdle,self.fig.canvas.draw,0.6457640528678894 869,"def _get_next_update(self): """"""MUTEX: updateLock Returns the size of the local update queue. """""" self.updateLock.acquire() if: a = self.updateQueue.pop(0) else: a = None self.updateLock.release() return a",True,len(self.updateQueue) > 0,len(self.updateQueue) > 0,0.6489444971084595 870,"def get_hypo(): if: hypo_attn = attn_clone[i] else: hypo_attn = None return {'tokens': tokens_clone[i],'score': score, 'attention': hypo_attn, 'alignment': None, 'positional_scores': pos_scores[i]}",True,attn_clone is not None,attn_clone is not None,0.6628034710884094 871,"def set_field(self, field, value): """"""Sets the value of a field. Updates the field of a box_list with a given value. Args: field: (string) name of the field to set value. value: the value to assign to the field. Raises: ValueError: if the box_list does not have specified field. """""" if: raise ValueError('field %s does not exist' % field) self.data[field] = value",True,not self.has_field(field),not self.has_field(field),0.6464101672172546 872,"def write(data): if: data = str(data) if isinstance(fp, file) and isinstance(data, unicode) and (fp.encoding is not None): errors = getattr(fp, 'errors', None) if errors is None: errors ='strict' data = data.encode(fp.encoding, errors) fp.write(data)",True,"not isinstance(data, basestring)","not isinstance(data, basestring)",0.6452983617782593 873,"def write(data): if not isinstance(data, basestring): data = str(data) if: errors = getattr(fp, 'errors', None) if errors is None: errors ='strict' data = data.encode(fp.encoding, errors) fp.write(data)",True,"isinstance(fp, file) and isinstance(data, unicode) and (fp.encoding is not None)","isinstance(fp, file) and isinstance(data, unicode) and (fp.encoding is not None)",0.644314169883728 874,"def write(data): if not isinstance(data, basestring): data = str(data) if isinstance(fp, file) and isinstance(data, unicode) and (fp.encoding is not None): errors = getattr(fp, 'errors', None) if: errors ='strict' data = data.encode(fp.encoding, errors) fp.write(data)",True,errors is None,errors is None,0.6531771421432495 875,"def lineto(self, points, relative=False): if: ox, oy = self.current else: ox, oy = (0, 0) vertices = self.vertices[-1] for i in range(0, len(points), 2): x, y = (points[i], points[i + 1]) vertices.append((x + ox, y + oy)) self.current = vertices[-1] self.last_control3 = None self.last_control4 = None",True,relative,relative,0.6565186977386475 876,"def get_kexs(self, allow_weak_kex): if: weak_kex = 'weak' else: weak_kex = 'default' default = 'diffie-hellman-group-exchange-sha256' weak = default + ',diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1' kex = {'default': default, 'weak': weak} default = 'curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256' weak = default + ',diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1' kex_66 = {'default': default, 'weak': weak} _release = lsb_release()['DISTRIB_CODENAME'].lower() if CompareHostReleases(_release) >= 'trusty': log('Detected Ubuntu 14.04 or newer, using new key exchange algorithms', level=DEBUG) kex = kex_66 return kex[weak_kex]",True,allow_weak_kex,allow_weak_kex,0.6570942401885986 877,"def get_kexs(self, allow_weak_kex): if allow_weak_kex: weak_kex = 'weak' else: weak_kex = 'default' default = 'diffie-hellman-group-exchange-sha256' weak = default + ',diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1' kex = {'default': default, 'weak': weak} default = 'curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256' weak = default + ',diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1' kex_66 = {'default': default, 'weak': weak} _release = lsb_release()['DISTRIB_CODENAME'].lower() if: log('Detected Ubuntu 14.04 or newer, using new key exchange algorithms', level=DEBUG) kex = kex_66 return kex[weak_kex]",False,CompareHostReleases(_release) >= 'trusty',_release in kex_66,0.6475317478179932 878,"def __setitem__(self, feature_name: str, value: T | FeatureIO[T]) -> None: """"""Before setting value to the dictionary it checks that value is of correct type and dimension and tries to transform value in correct form. """""" if: value = self._parse_feature_value(value, feature_name) self._check_feature_name(feature_name) self._content[feature_name] = value",False,"not isinstance(value, FeatureIO)",feature_name.isdigit(),0.6505963802337646 879,"def is_cold(self, dc, threshold): for l in dc: for dt in dc[l]: for i in dc[l][dt]: if: return True return False",False,dc[l][dt][i][0] < threshold,i >= threshold,0.6462461948394775 880,"def WriteFillContainer(part, file, rank, status, regions): if: return if InstanceName.Find('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)): return container = InstanceName.Get('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)) id = 'isP_' + InstanceName.Get(part.name + rank + status) file.write(' if (' + id + '((&(event.mc()->particles()[i]))))'+ container + '.push_back(&(event.mc()->particles()[i]));\n')",False,part.PTrank != 0,part.Find('P_' + part.name + rank + status),0.6539376378059387 881,"def WriteFillContainer(part, file, rank, status, regions): if part.PTrank!= 0: return if: return container = InstanceName.Get('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)) id = 'isP_' + InstanceName.Get(part.name + rank + status) file.write(' if (' + id + '((&(event.mc()->particles()[i]))))'+ container + '.push_back(&(event.mc()->particles()[i]));\n')",False,InstanceName.Find('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)),InstanceName.Get('P_' + part.name + rank + status),0.6505239009857178 882,"def exitRule(self, listener: ParseTreeListener): if: listener.exitColumnTypeExprEnum(self)",True,"hasattr(listener, 'exitColumnTypeExprEnum')","hasattr(listener, 'exitColumnTypeExprEnum')",0.6516823172569275 883,"def renderMarkedModifiers(self): if: return '' txt = 'TAGS'.center(60) + '\n\n' for term in self.__markedModifiers: txt += term.__str__() + '\n' return txt",True,not self.__markedModifiers,not self.__markedModifiers,0.6519392132759094 884,"def _get(xi, span): if: return [''] if len(xi[span[0][0]]) <= span[1][1]: return [''] return xi[span[0][0]][span[0][1]:span[1][1]]",True,len(xi) <= span[0][0],len(xi) <= span[0][0],0.6505478024482727 885,"def _get(xi, span): if len(xi) <= span[0][0]: return [''] if: return [''] return xi[span[0][0]][span[0][1]:span[1][1]]",True,len(xi[span[0][0]]) <= span[1][1],len(xi[span[0][0]]) <= span[1][1],0.6481741070747375 886,"def current_reactor_klass(): """""" Return class name of currently installed Twisted reactor or None. """""" if: current_reactor = reflect.qual(sys.modules['twisted.internet.reactor'].__class__).split('.')[-1] else: current_reactor = None return current_reactor",True,'twisted.internet.reactor' in sys.modules,'twisted.internet.reactor' in sys.modules,0.648668646812439 887,"def _create_examples(self, lines, set_type): """"""Creates examples for the training and dev sets."""""" examples = [] for i, line in enumerate(lines): if: continue guid = '%s-%s' % (set_type, i) text_a = line[3] text_b = line[4] label = line[0] examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples",True,i == 0,i == 0,0.6704572439193726 888,"@classmethod def get_attrs(cls): if: all_attrs = dir(cls.Meta) attrs = [(attr, getattr(cls.Meta, attr)) for attr in all_attrs if isinstance(getattr(cls.Meta, attr), AttrDecl)] cls._attrs = attrs return cls._attrs",False,"not hasattr(cls, '_attrs')",cls._attrs is None,0.647627592086792 889,"def flatten_byteorder(self, obj, data): byteorder = obj.dtype.byteorder if: data['byteorder'] = get_byteorder(obj)",False,byteorder != '|',byteorder is not None,0.6662157773971558 890,"def create_dir(dir_str): url = 'https://pan.baidu.com/api/create?a=commit&channel=chunlei&app_id=250528&channel=chunlei&web=1&app_id=250528&clienttype=0&' data = {'path': dir_str, 'isdir': 1,'size': '', 'block_list': '[]','method': 'post', 'dataType': 'json'} res = requests.post(url, data=data, headers=api.get_randsk_headers(), timeout=30) res = util.dict_to_object(json.loads(res.text)) if: return res.path return ''",False,res.errno == 0,res.status_code == 200,0.6559996008872986 891,"def auto_shutdown(): global singleton if: singleton.process.terminate() singleton.process.join() singleton = None",False,singleton and singleton.process,singleton.process,0.6513727903366089 892,"def _get_used_palette_colors(im): used_palette_colors = [] i = 0 for count in im.histogram(): if: used_palette_colors.append(i) i += 1 return used_palette_colors",False,count,count.max() > 0,0.66873699426651 893,"def is_scalar(f): """"""Determine if the input argument is a scalar. The function **is_scalar** returns *True* if the input is an integer, float or complex number. The function returns *False* otherwise. Parameters ---------- f : object Any input quantity Returns ------- bool - *True* if the input argument is an integer, float or complex number - *False* otherwise """""" if: return True elif isinstance(f, np.ndarray) and f.size == 1 and isinstance(f[0], SCALARTYPES): return True return False",False,"isinstance(f, SCALARTYPES)","isinstance(f, np.int64)",0.6462920904159546 894,"def is_scalar(f): """"""Determine if the input argument is a scalar. The function **is_scalar** returns *True* if the input is an integer, float or complex number. The function returns *False* otherwise. Parameters ---------- f : object Any input quantity Returns ------- bool - *True* if the input argument is an integer, float or complex number - *False* otherwise """""" if isinstance(f, SCALARTYPES): return True elif: return True return False",False,"isinstance(f, np.ndarray) and f.size == 1 and isinstance(f[0], SCALARTYPES)","isinstance(f, float) or isinstance(f, np.ndarray)",0.645554780960083 895,"@property def password(self): if: return None else: return util.text_type(self.password_original)",True,self.password_original is None,self.password_original is None,0.6454421281814575 896,"def density_L1(self): total = 0 for idx in range(len(self.density_plane_space)): if: continue total = total + torch.mean(torch.abs(self.density_plane_space[idx])) + torch.mean(torch.abs(self.density_plane_time[idx])) + torch.mean(torch.abs(self.density_line[idx])) return total",False,self.density_plane_space[idx].shape[1] == 0,self.density_line[idx].shape[1] == 0,0.6463340520858765 897,"def forward(self, input_seq, offset=0): """""" Args: input_seq (torch.Tensor): input sequence, shape [batch_size, sequence_length]. Returns: torch.Tensor: position embedding, shape [batch_size, sequence_length, embedding_size]. """""" batch_size, seq_len = input_seq.size() max_position = seq_len + offset if: self.weights = self.get_embedding(max_position, self.embedding_size) positions = offset + torch.arange(seq_len) pos_embeddings = self.weights.index_select(0, positions).unsqueeze(0).expand(batch_size, -1, -1).detach() return pos_embeddings",False,self.weights is None or max_position > self.weights.size(0),self.embedding_size > 0,0.6445240378379822 898,"def toggle_pihole_status(self, widget): if: try: req = None if self._pihole_status: req = requests.get(self._pihole_address + '/admin/api.php?disable&auth=' + self._pihole_secret) else: req = requests.get(self._pihole_address + '/admin/api.php?enable&auth=' + self._pihole_secret) if req is not None: if req.status_code == 200: status = req.json()['status'] self._pihole_status = False if status == 'disabled' else True except: pass",False,self._pihole_status is not None,self._pihole_secret,0.6440352201461792 899,"def toggle_pihole_status(self, widget): if self._pihole_status is not None: try: req = None if: req = requests.get(self._pihole_address + '/admin/api.php?disable&auth=' + self._pihole_secret) else: req = requests.get(self._pihole_address + '/admin/api.php?enable&auth=' + self._pihole_secret) if req is not None: if req.status_code == 200: status = req.json()['status'] self._pihole_status = False if status == 'disabled' else True except: pass",False,self._pihole_status,self._disabled,0.6450003385543823 900,"def toggle_pihole_status(self, widget): if self._pihole_status is not None: try: req = None if self._pihole_status: req = requests.get(self._pihole_address + '/admin/api.php?disable&auth=' + self._pihole_secret) else: req = requests.get(self._pihole_address + '/admin/api.php?enable&auth=' + self._pihole_secret) if: if req.status_code == 200: status = req.json()['status'] self._pihole_status = False if status == 'disabled' else True except: pass",False,req is not None,req,0.6521580219268799 901,"def toggle_pihole_status(self, widget): if self._pihole_status is not None: try: req = None if self._pihole_status: req = requests.get(self._pihole_address + '/admin/api.php?disable&auth=' + self._pihole_secret) else: req = requests.get(self._pihole_address + '/admin/api.php?enable&auth=' + self._pihole_secret) if req is not None: if: status = req.json()['status'] self._pihole_status = False if status == 'disabled' else True except: pass",False,req.status_code == 200,req.json()['status'] == 'enabled',0.6560711860656738 902,"def html_page_context(app, pagename, templatename, context, doctree): if: return if not app.config.edit_on_github_project: warnings.warn('edit_on_github_project not specified') return if not doctree: warnings.warn('doctree is None') return path = os.path.relpath(doctree.get('source'), app.builder.srcdir) show_url = get_github_url(app, 'blob', path) edit_url = get_github_url(app, 'edit', path) context['show_on_github_url'] = show_url context['edit_on_github_url'] = edit_url",False,templatename != 'page.html',pagename is None,0.6452323794364929 903,"def html_page_context(app, pagename, templatename, context, doctree): if templatename!= 'page.html': return if: warnings.warn('edit_on_github_project not specified') return if not doctree: warnings.warn('doctree is None') return path = os.path.relpath(doctree.get('source'), app.builder.srcdir) show_url = get_github_url(app, 'blob', path) edit_url = get_github_url(app, 'edit', path) context['show_on_github_url'] = show_url context['edit_on_github_url'] = edit_url",False,not app.config.edit_on_github_project,not pagename,0.6498720645904541 904,"def html_page_context(app, pagename, templatename, context, doctree): if templatename!= 'page.html': return if not app.config.edit_on_github_project: warnings.warn('edit_on_github_project not specified') return if: warnings.warn('doctree is None') return path = os.path.relpath(doctree.get('source'), app.builder.srcdir) show_url = get_github_url(app, 'blob', path) edit_url = get_github_url(app, 'edit', path) context['show_on_github_url'] = show_url context['edit_on_github_url'] = edit_url",False,not doctree,doctree is None,0.6573389768600464 905,"def get_object(self): if: return self.request.user.membership else: raise Http404()",False,self.request.user.has_membership,"hasattr(self.request, 'user')",0.6449790000915527 906,"def bytes_to_float(value, _domain, _range, _error=None): """"""Convert the fixed point value self.value to a floating point value."""""" src_value = int().from_bytes(value, byteorder='big', signed=min(_domain) < 0) if: return None return linear_map(src_value, _domain, _range)",False,src_value == _error,src_value == 0,0.6528149843215942 907,"def pick_peaks(arr): prev_dex = prev_val = None result = {'pos': [], 'peaks': []} upwards = False for i, a in enumerate(arr): if: continue elif prev_val is None or prev_val < a: upwards = True else: if prev_dex and upwards: result['pos'].append(prev_dex) result['peaks'].append(prev_val) upwards = False prev_dex = i prev_val = a return result",False,prev_val == a,i == 0,0.6536059379577637 908,"def pick_peaks(arr): prev_dex = prev_val = None result = {'pos': [], 'peaks': []} upwards = False for i, a in enumerate(arr): if prev_val == a: continue elif: upwards = True else: if prev_dex and upwards: result['pos'].append(prev_dex) result['peaks'].append(prev_val) upwards = False prev_dex = i prev_val = a return result",False,prev_val is None or prev_val < a,a == prev_val,0.6455966234207153 909,"def pick_peaks(arr): prev_dex = prev_val = None result = {'pos': [], 'peaks': []} upwards = False for i, a in enumerate(arr): if prev_val == a: continue elif prev_val is None or prev_val < a: upwards = True else: if: result['pos'].append(prev_dex) result['peaks'].append(prev_val) upwards = False prev_dex = i prev_val = a return result",False,prev_dex and upwards,upwards and prev_dex is not None,0.6480530500411987 910,"def format_address_spaces(addr, left=True): """"""Format the address according to its size, but with spaces instead of zeroes."""""" width = get_memory_alignment() * 2 + 2 addr = align_address(addr) if: return '0x{:x}'.format(addr).rjust(width) return '0x{:x}'.format(addr).ljust(width)",False,not left,left,0.6637938022613525 911,"def hasContent_(self): if: return True else: return False",False,"super(SpikeGenerator, self).hasContent_()",self.valueOf_ is not None,0.6443113684654236 912,"@property def matched(self): for trigger in self.triggers: if: return True return False",False,trigger.triggered,trigger.match(self),0.6532926559448242 913,"def post(self, request, *args, **kwargs): self.object = None forms = self.get_form() if: return self.form_valid(forms) return self.form_invalid(forms)",False,all((form.is_valid() for form in forms)),forms.is_valid(),0.6451195478439331 914,"def __init__(self, optimizer, multiplier, total_epoch, after_scheduler=None, **kwargs): self.multiplier = multiplier if: raise ValueError('multiplier should be greater than 1.') self.total_epoch = total_epoch self.after_scheduler = after_scheduler self.finished = False super().__init__(optimizer)",False,self.multiplier <= 1.0,multiplier > 1,0.659283459186554 915,"def ret(self): for device in Device._buffers: if: continue if not CI: print(device) if device in exclude_devices: if not CI: print(f'WARNING: {device} test is excluded') continue with self.subTest(device=device): try: Device[device] except Exception: if not CI: print(f""WARNING: {device} test isn't running"") continue fxn(self, device)",False,"device in ['DISK', 'SHM', 'FAKE']",device not in include_devices,0.6416229009628296 916,"def ret(self): for device in Device._buffers: if device in ['DISK', 'SHM', 'FAKE']: continue if: print(device) if device in exclude_devices: if not CI: print(f'WARNING: {device} test is excluded') continue with self.subTest(device=device): try: Device[device] except Exception: if not CI: print(f""WARNING: {device} test isn't running"") continue fxn(self, device)",False,not CI,self.verbose,0.6551085710525513 917,"def ret(self): for device in Device._buffers: if device in ['DISK', 'SHM', 'FAKE']: continue if not CI: print(device) if: if not CI: print(f'WARNING: {device} test is excluded') continue with self.subTest(device=device): try: Device[device] except Exception: if not CI: print(f""WARNING: {device} test isn't running"") continue fxn(self, device)",False,device in exclude_devices,device not in Device.objects,0.6479327082633972 918,"def ret(self): for device in Device._buffers: if device in ['DISK', 'SHM', 'FAKE']: continue if not CI: print(device) if device in exclude_devices: if: print(f'WARNING: {device} test is excluded') continue with self.subTest(device=device): try: Device[device] except Exception: if not CI: print(f""WARNING: {device} test isn't running"") continue fxn(self, device)",False,not CI,device not in include_devices,0.6545816659927368 919,"def ret(self): for device in Device._buffers: if device in ['DISK', 'SHM', 'FAKE']: continue if not CI: print(device) if device in exclude_devices: if not CI: print(f'WARNING: {device} test is excluded') continue with self.subTest(device=device): try: Device[device] except Exception: if: print(f""WARNING: {device} test isn't running"") continue fxn(self, device)",False,not CI,not self.is_running,0.6549131870269775 920,"def _create_disk_usage_uri(self, cluster_name: Optional[str], org_name: Optional[str]) -> URL: if: uri = self._normalize_uri(URL(f'storage://{cluster_name}/{org_name}/{self._config.project_name_or_raise}')) else: uri = self._normalize_uri(URL(f'storage://{cluster_name}/{self._config.project_name_or_raise}')) assert uri.host is not None return uri",False,org_name,cluster_name and org_name,0.6632174253463745 921,"def DECIMAL_LITERAL(self, i: int=None): if: return self.getTokens(HiveParser.DECIMAL_LITERAL) else: return self.getToken(HiveParser.DECIMAL_LITERAL, i)",True,i is None,i is None,0.6608383655548096 922,"def test_structured_conf(self, input_: Any) -> None: if: cfg = OmegaConf.structured(ListOfAny(input_)) assert isinstance(cfg.list, ListConfig) else: cfg = OmegaConf.structured(DictOfAny(input_)) assert isinstance(cfg.dict, DictConfig)",False,"isinstance(input_, Sequence)","isinstance(input_, (list, DictConfig))",0.6484379768371582 923,"def setNameOverride(self, name): if: return self.name = name if self.isDemo is None: if re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s*$', self.name, re.I) or re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s+.*$', self.name, re.I): self.isDemo = True else: self.isDemo = False",False,not name,not name or self.name,0.6594038009643555 924,"def setNameOverride(self, name): if not name: return self.name = name if: if re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s*$', self.name, re.I) or re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s+.*$', self.name, re.I): self.isDemo = True else: self.isDemo = False",True,self.isDemo is None,self.isDemo is None,0.650297224521637 925,"def setNameOverride(self, name): if not name: return self.name = name if self.isDemo is None: if: self.isDemo = True else: self.isDemo = False",False,"re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s*$', self.name, re.I) or re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s+.*$', self.name, re.I)",name == 'Demo' or name == 'Demo',0.6504395008087158 926,"def __init__(self, search_context: Optional[SearchContext]=None) -> None: if: search_context = get_search_context() self.ctx = search_context self._module_cache: Dict[ModulePath, Module] = {}",True,search_context is None,search_context is None,0.6626418828964233 927,"def parse_devices(input_devices): """"""Parse user's devices input str to standard format. e.g. [gpu0, gpu1,...] """""" ret = [] for d in input_devices.split(','): for regex, func in REGEX: m = regex.match(d.lower().strip()) if: tmp = func(m.groups()) for x in tmp: if x not in ret: ret.append(x) break else: raise NotSupportedCliException('Can not recognize device: ""%s""' % d) return ret",True,m,m,0.6782421469688416 928,"def parse_devices(input_devices): """"""Parse user's devices input str to standard format. e.g. [gpu0, gpu1,...] """""" ret = [] for d in input_devices.split(','): for regex, func in REGEX: m = regex.match(d.lower().strip()) if m: tmp = func(m.groups()) for x in tmp: if: ret.append(x) break else: raise NotSupportedCliException('Can not recognize device: ""%s""' % d) return ret",False,x not in ret,x,0.6563632488250732 929,"def __init__(self, offset, name=None): """""" :param offset: A timedelta with this timezone's offset from UTC :param name: Name of the timezone; if None, generate one. """""" if: raise ValueError('Offset must be in [-23:59, 23:59]') if offset.seconds % 60 or offset.microseconds: raise ValueError('Offset must be full minutes') self._offset = offset if name is not None: self._name = name elif not offset: self._name = 'UTC' else: self._name = 'UTC' + _format_offset(offset)",False,not timedelta(hours=-24) < offset < timedelta(hours=24),offset.hour < 1,0.6472988724708557 930,"def __init__(self, offset, name=None): """""" :param offset: A timedelta with this timezone's offset from UTC :param name: Name of the timezone; if None, generate one. """""" if not timedelta(hours=-24) < offset < timedelta(hours=24): raise ValueError('Offset must be in [-23:59, 23:59]') if: raise ValueError('Offset must be full minutes') self._offset = offset if name is not None: self._name = name elif not offset: self._name = 'UTC' else: self._name = 'UTC' + _format_offset(offset)",False,offset.seconds % 60 or offset.microseconds,offset % timedelta(minutes=1) or offset > timedelta(minutes=1),0.6484977006912231 931,"def __init__(self, offset, name=None): """""" :param offset: A timedelta with this timezone's offset from UTC :param name: Name of the timezone; if None, generate one. """""" if not timedelta(hours=-24) < offset < timedelta(hours=24): raise ValueError('Offset must be in [-23:59, 23:59]') if offset.seconds % 60 or offset.microseconds: raise ValueError('Offset must be full minutes') self._offset = offset if: self._name = name elif not offset: self._name = 'UTC' else: self._name = 'UTC' + _format_offset(offset)",False,name is not None,name,0.6517388820648193 932,"def __init__(self, offset, name=None): """""" :param offset: A timedelta with this timezone's offset from UTC :param name: Name of the timezone; if None, generate one. """""" if not timedelta(hours=-24) < offset < timedelta(hours=24): raise ValueError('Offset must be in [-23:59, 23:59]') if offset.seconds % 60 or offset.microseconds: raise ValueError('Offset must be full minutes') self._offset = offset if name is not None: self._name = name elif: self._name = 'UTC' else: self._name = 'UTC' + _format_offset(offset)",False,not offset,offset is None,0.6555944681167603 933,"def dump(self, indent=0): print(''* indent + self.name, self.op,'start') if: self.lft.dump(indent + 1) print(''* (indent + 1) + 'Operator', self.op) if self.rgt is not None: self.rgt.dump(indent + 1) print(''* indent + self.name, self.op, 'end.')",False,self.lft is not None,self.lgt is not None,0.6590958833694458 934,"def dump(self, indent=0): print(''* indent + self.name, self.op,'start') if self.lft is not None: self.lft.dump(indent + 1) print(''* (indent + 1) + 'Operator', self.op) if: self.rgt.dump(indent + 1) print(''* indent + self.name, self.op, 'end.')",True,self.rgt is not None,self.rgt is not None,0.6516817808151245 935,"def check_cardinality_1_Fix_Empty(self, fixed, unfixed, doFix): for propname in self.propertyCardinality_1_Fix_Empty: if: logProblem = '[%s] Too many required property: %s' % (self.getType(), propname) unfixed.append(logProblem) elif self.countProperty(propname) == 0: logProblem = '[%s] Missing required property: %s' % (self.getType(), propname) if doFix: self.addProperty(self.sPropertyType(propname, '')) fixed.append(logProblem) else: unfixed.append(logProblem)",False,self.countProperty(propname) > 1,self.countProperty(propname) > 0,0.6466086506843567 936,"def check_cardinality_1_Fix_Empty(self, fixed, unfixed, doFix): for propname in self.propertyCardinality_1_Fix_Empty: if self.countProperty(propname) > 1: logProblem = '[%s] Too many required property: %s' % (self.getType(), propname) unfixed.append(logProblem) elif: logProblem = '[%s] Missing required property: %s' % (self.getType(), propname) if doFix: self.addProperty(self.sPropertyType(propname, '')) fixed.append(logProblem) else: unfixed.append(logProblem)",False,self.countProperty(propname) == 0,self.countProperty(propname) < 0,0.6478222608566284 937,"def check_cardinality_1_Fix_Empty(self, fixed, unfixed, doFix): for propname in self.propertyCardinality_1_Fix_Empty: if self.countProperty(propname) > 1: logProblem = '[%s] Too many required property: %s' % (self.getType(), propname) unfixed.append(logProblem) elif self.countProperty(propname) == 0: logProblem = '[%s] Missing required property: %s' % (self.getType(), propname) if: self.addProperty(self.sPropertyType(propname, '')) fixed.append(logProblem) else: unfixed.append(logProblem)",True,doFix,doFix,0.6615027189254761 938,"def __iter__(self): it = iter(self.loader) storage = get_event_storage() while True: try: batch = next(it) num_inst_per_dataset = {} for data in batch: dataset_name = data['dataset'] if: num_inst_per_dataset[dataset_name] = 0 num_inst = len(data['instances']) num_inst_per_dataset[dataset_name] += num_inst for dataset_name in num_inst_per_dataset: storage.put_scalar(f'batch/{dataset_name}', num_inst_per_dataset[dataset_name]) yield batch except StopIteration: break",True,dataset_name not in num_inst_per_dataset,dataset_name not in num_inst_per_dataset,0.6437749862670898 939,"def get_home(): if: explicit = os.environ.get('HOME', '') if explicit: return explicit uid = os.geteuid() return pwd.getpwuid(uid).pw_name return os.path.expanduser('~')",False,False,platform.system() == 'Linux',0.6616235971450806 940,"def get_home(): if False: explicit = os.environ.get('HOME', '') if: return explicit uid = os.geteuid() return pwd.getpwuid(uid).pw_name return os.path.expanduser('~')",True,explicit,explicit,0.6693891882896423 941,"def maybe_rotate(self): if: self.rotate() self.rotate_when = self.next_backup(self.freq) elif self.maxsize: try: if os.stat(self.filename)[stat.ST_SIZE] > self.maxsize: self.rotate() except os.error: self.rotate()",False,self.freq and time.time() > self.rotate_when,self.freq,0.6458740234375 942,"def maybe_rotate(self): if self.freq and time.time() > self.rotate_when: self.rotate() self.rotate_when = self.next_backup(self.freq) elif: try: if os.stat(self.filename)[stat.ST_SIZE] > self.maxsize: self.rotate() except os.error: self.rotate()",False,self.maxsize,self.filename,0.6506963968276978 943,"def maybe_rotate(self): if self.freq and time.time() > self.rotate_when: self.rotate() self.rotate_when = self.next_backup(self.freq) elif self.maxsize: try: if: self.rotate() except os.error: self.rotate()",False,os.stat(self.filename)[stat.ST_SIZE] > self.maxsize,self.rotate_when and time.time() < self.next_backup_max,0.6449729204177856 944,"def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) out = torch.cat((x0, x1), 1) out = self.conv2d(out) out = out * self.scale + x if: out = self.relu(out) return out",True,not self.noReLU,not self.noReLU,0.6446675062179565 945,"def compute_average_flops_cost(model): """""" A method that will be available after add_flops_counting_methods() is called on a desired net object. Returns current mean flops consumption per image. """""" batches_count = model.__batch_counter__ flops_sum = 0 for module in model.modules(): if: flops_sum += module.__flops__ return flops_sum / batches_count",False,"isinstance(module, torch.nn.Conv2d) or isinstance(module, torch.nn.Linear) or isinstance(module, torch.nn.Conv1d) or hasattr(module, 'calculate_flop_self')",is_supported_instance(module),0.6470948457717896 946,"def __init__(self, num_sync_devices, **args): """""" Naive version of Synchronized 3D BatchNorm. Args: num_sync_devices (int): number of device to sync. args (list): other arguments. """""" self.num_sync_devices = num_sync_devices if: assert du.get_local_size() % self.num_sync_devices == 0, (du.get_local_size(), self.num_sync_devices) self.num_groups = du.get_local_size() // self.num_sync_devices else: self.num_sync_devices = du.get_local_size() self.num_groups = 1 super(NaiveSyncBatchNorm3d, self).__init__(**args)",True,self.num_sync_devices > 0,self.num_sync_devices > 0,0.6444109082221985 947,"def nextf(self, counter=None): """"""Retrieves the numeric value for the given counter, then increments it by one. New counters start at one."""""" if: counter = self._defaultCounter return self._getCounter(counter).nextf()",False,not counter,counter is None,0.6655290126800537 948,"@profile.setter def profile(self, new_profile): if: if new_profile.min() < 0 and (not EnergyFlow.allow_negative_flows): new_profile[new_profile < 0] = 0.0 self._profile = new_profile else: raise ValueError(f'The energy flow profile does not have the correct format, i.e. numerical series of {self.time_frame} time steps or single numerical value.')",False,"isinstance(new_profile, pd.Series) and len(new_profile) in [1, self.time_frame]","isinstance(new_profile, np.ndarray)",0.6473793983459473 949,"@profile.setter def profile(self, new_profile): if isinstance(new_profile, pd.Series) and len(new_profile) in [1, self.time_frame]: if: new_profile[new_profile < 0] = 0.0 self._profile = new_profile else: raise ValueError(f'The energy flow profile does not have the correct format, i.e. numerical series of {self.time_frame} time steps or single numerical value.')",False,new_profile.min() < 0 and (not EnergyFlow.allow_negative_flows),new_profile.numel() == 0,0.6461544036865234 950,"def get_theme_base_dir(theme_dir_name, suppress_error=False): """""" Returns absolute path to the directory that contains the given theme. Args: theme_dir_name (str): theme directory name to get base path for suppress_error (bool): if True function will return None if theme is not found instead of raising an error Returns: (str): Base directory that contains the given theme """""" for themes_dir in get_theme_base_dirs(): if theme_dir_name in get_theme_dirs(themes_dir): return themes_dir if: return None raise ValueError(""Theme '{theme}' not found in any of the following themes dirs, \nTheme dirs: \n{dir}"".format(theme=theme_dir_name, dir=get_theme_base_dirs()))",False,suppress_error,suppress_error and theme_dir_name in get_theme_dirs(get_theme_dirs()),0.655534029006958 951,"def get_theme_base_dir(theme_dir_name, suppress_error=False): """""" Returns absolute path to the directory that contains the given theme. Args: theme_dir_name (str): theme directory name to get base path for suppress_error (bool): if True function will return None if theme is not found instead of raising an error Returns: (str): Base directory that contains the given theme """""" for themes_dir in get_theme_base_dirs(): if: return themes_dir if suppress_error: return None raise ValueError(""Theme '{theme}' not found in any of the following themes dirs, \nTheme dirs: \n{dir}"".format(theme=theme_dir_name, dir=get_theme_base_dirs()))",False,theme_dir_name in get_theme_dirs(themes_dir),theme_dir_name == themes_dir,0.6445627808570862 952,"def format_framework_integrity_error_message(error, json_framework): if: error_message = 'At least one of `hasDirectAward` or `hasFurtherCompetition` must be True' elif 'duplicate key value violates unique constraint ""ix_frameworks_slug""' in str(error): error_message = ""Slug '{}' already in use"".format(json_framework.get('slug', '')) elif re.search('Not a [a-z]+? value:', str(error)): error_message = 'Invalid framework' else: error_message = format(error) return error_message",False,"'violates check constraint ""ck_framework_has_direct_award_or_further_competition""' in str(error)",error is None,0.6478252410888672 953,"def format_framework_integrity_error_message(error, json_framework): if 'violates check constraint ""ck_framework_has_direct_award_or_further_competition""' in str(error): error_message = 'At least one of `hasDirectAward` or `hasFurtherCompetition` must be True' elif: error_message = ""Slug '{}' already in use"".format(json_framework.get('slug', '')) elif re.search('Not a [a-z]+? value:', str(error)): error_message = 'Invalid framework' else: error_message = format(error) return error_message",False,"'duplicate key value violates unique constraint ""ix_frameworks_slug""' in str(error)","not re.search('^[A-Z]{0,1}$', str(error))",0.6502788066864014 954,"def format_framework_integrity_error_message(error, json_framework): if 'violates check constraint ""ck_framework_has_direct_award_or_further_competition""' in str(error): error_message = 'At least one of `hasDirectAward` or `hasFurtherCompetition` must be True' elif 'duplicate key value violates unique constraint ""ix_frameworks_slug""' in str(error): error_message = ""Slug '{}' already in use"".format(json_framework.get('slug', '')) elif: error_message = 'Invalid framework' else: error_message = format(error) return error_message",False,"re.search('Not a [a-z]+? value:', str(error))",error is None,0.6435253620147705 955,"def run_test(domain_event) -> None: if: dumped = domain_event.model_dump() _ = domain_event.__class__(**dumped) if format_dump: formatted = domain_event.format() _ = domain_event.__class__.from_format(formatted)",True,model_dump,model_dump,0.6605995893478394 956,"def run_test(domain_event) -> None: if model_dump: dumped = domain_event.model_dump() _ = domain_event.__class__(**dumped) if: formatted = domain_event.format() _ = domain_event.__class__.from_format(formatted)",False,format_dump,format,0.6591200828552246 957,"def _convert_weights_to_fp16(l): if: l.weight.data = l.weight.data.half() if l.bias is not None: l.bias.data = l.bias.data.half()",False,"isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear))",l.weight is not None,0.6517105102539062 958,"def _convert_weights_to_fp16(l): if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)): l.weight.data = l.weight.data.half() if: l.bias.data = l.bias.data.half()",True,l.bias is not None,l.bias is not None,0.650610089302063 959,"@property def spawnflags(self): flags = [] if: value = self._entity_data.get('spawnflags', None) for name, (key, _) in {'Use Hitboxes for Renderbox': (64, 0), 'Start with collision disabled': (256, 0), 'Set to NAVIgnore': (512, 0)}.items(): if value & key > 0: flags.append(name) return flags",True,'spawnflags' in self._entity_data,'spawnflags' in self._entity_data,0.6513683795928955 960,"@property def spawnflags(self): flags = [] if'spawnflags' in self._entity_data: value = self._entity_data.get('spawnflags', None) for name, (key, _) in {'Use Hitboxes for Renderbox': (64, 0), 'Start with collision disabled': (256, 0), 'Set to NAVIgnore': (512, 0)}.items(): if: flags.append(name) return flags",True,value & key > 0,value & key > 0,0.6617642045021057 961,"def interpreter_version(**kwargs): """""" Returns the version of the running interpreter. """""" warn = _warn_keyword_parameter('interpreter_version', kwargs) version = _get_config_var('py_version_nodot', warn=warn) if: version = str(version) else: version = _version_nodot(sys.version_info[:2]) return version",False,version,warn,0.6695005893707275 962,"def as_const(self, eval_ctx=None): eval_ctx = get_eval_context(self, eval_ctx) def const(obj): if: return None return obj.as_const(eval_ctx) return slice(const(self.start), const(self.stop), const(self.step))",True,obj is None,obj is None,0.6584059000015259 963,"def list_paths(self): """"""Utility method to list all the paths in the jar."""""" paths = [] for cookie in iter(self): if: paths.append(cookie.path) return paths",True,cookie.path not in paths,cookie.path not in paths,0.6541555523872375 964,"def remove_section(self, section): if: return for option in self.config.options(section): self.config.remove_option(section, option) self.config.remove_section(section) self.dirty = True",False,not self.config.has_section(section),self.dirty,0.6431008577346802 965,"@register.filter('startswith') def startswith(text, starts): if: return text.startswith(starts) return False",False,"isinstance(text, str)",starts,0.6442351937294006 966,"def run(self): self.running = True while not self.shutdown_flag.is_set(): self.stats.wait_until_ready_to_run() if: self.step() self.robot.logger.debug('Shutting down SystemMonitorThread')",False,not self.shutdown_flag.is_set(),self.running,0.6480913162231445 967,"@property def physdamagescale(self): if: return float(self._entity_data.get('physdamagescale')) return float(1.0)",True,'physdamagescale' in self._entity_data,'physdamagescale' in self._entity_data,0.6524853706359863 968,"def set_minimum_column_width(self, col, minimum): if: self.setColumnWidth(col, minimum)",True,self.columnWidth(col) < minimum,self.columnWidth(col) < minimum,0.6505662202835083 969,"def downgrade(migrate_engine): meta = sql.MetaData() meta.bind = migrate_engine if: downgrade_with_copy(meta, migrate_engine) else: downgrade_with_rename(meta, migrate_engine)",False,migrate_engine.name == 'sqlite',migrate_engine.dialect.version == '1.6',0.6499318480491638 970,"def __init__(self, path: str | None=None) -> None: self._total_packages = 0 self._rule_match_counts = defaultdict(int) if: self.load(path)",False,path is not None,path,0.6568262577056885 971,"def _set_slot(self, key: Text, value: Any) -> None: """"""Sets the value of a slot if that slot exists."""""" if: slot = self.slots[key] slot.value = value else: logger.error(f""Tried to set non existent slot '{key}'. Make sure you added all your slots to your domain file."")",True,key in self.slots,key in self.slots,0.6535583734512329 972,"def scan(image_path: str) -> Any: image = numpy.asarray(Image.open(image_path).convert('RGB')) if: image = zbar.misc.rgb2gray(image) results = scanner.scan(image) return [(result.type, result.data, result.quality, result.position) for result in results]",False,len(image.shape) == 3,gray,0.6498275399208069 973,"def write_wrapped(self, s, extra_room=0): """"""Add a soft line break if needed, then write s."""""" if: self.write_soft_break() self.write_str(s)",False,self.room < len(s) + extra_room,extra_room,0.6464666724205017 974,"@property def behind_mineral_position_center(self) -> Point2: if: return self.behind_mineral_positions[1] return self.center_location",True,self.behind_mineral_positions,self.behind_mineral_positions,0.6496765613555908 975,"def setup_basic_filtering(self, instance, network_info): """"""Set up provider rules and basic NWFilter."""""" self.nwfilter.setup_basic_filtering(instance, network_info) if: LOG.debug(_('iptables firewall: Setup Basic Filtering'), instance=instance) self.refresh_provider_fw_rules() self.basically_filtered = True",False,not self.basically_filtered,self.iptables_enabled,0.6487493515014648 976,"def check_ts_counter(self, _uri): """""" Providers sometime add the same stream section back into the list. This methods catches this and informs the caller that it should be ignored. """""" if: self.logger.notice('TC Counter Same section being transmitted, ignoring uri: {} m3u8pid:{} proxypid:{}'.format(_uri, self.t_m3u8_pid, os.getpid())) return False self.last_ts_filename = _uri return True",False,_uri == self.last_ts_filename,self.t_m3u8_pid != os.getpid(),0.6474699378013611 977,"def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Union[HTTPValidationError, str]]: if: response_200 = cast(str, response.json()) return response_200 if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None",True,response.status_code == HTTPStatus.OK,response.status_code == HTTPStatus.OK,0.655895471572876 978,"def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Union[HTTPValidationError, str]]: if response.status_code == HTTPStatus.OK: response_200 = cast(str, response.json()) return response_200 if: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None",True,response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY,response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY,0.6491738557815552 979,"def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Union[HTTPValidationError, str]]: if response.status_code == HTTPStatus.OK: response_200 = cast(str, response.json()) return response_200 if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 if: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None",True,client.raise_on_unexpected_status,client.raise_on_unexpected_status,0.6513196229934692 980,"def get_loss(self, pred, target, mean=True): if: loss = (target - pred).abs() if mean: loss = loss.mean() elif self.loss_type == 'l2': if mean: loss = torch.nn.functional.mse_loss(target, pred) else: loss = torch.nn.functional.mse_loss(target, pred, reduction='none') else: raise NotImplementedError(""unknown loss type '{loss_type}'"") return loss",True,self.loss_type == 'l1',self.loss_type == 'l1',0.6491593718528748 981,"def get_loss(self, pred, target, mean=True): if self.loss_type == 'l1': loss = (target - pred).abs() if: loss = loss.mean() elif self.loss_type == 'l2': if mean: loss = torch.nn.functional.mse_loss(target, pred) else: loss = torch.nn.functional.mse_loss(target, pred, reduction='none') else: raise NotImplementedError(""unknown loss type '{loss_type}'"") return loss",True,mean,mean,0.6699435710906982 982,"def get_loss(self, pred, target, mean=True): if self.loss_type == 'l1': loss = (target - pred).abs() if mean: loss = loss.mean() elif: if mean: loss = torch.nn.functional.mse_loss(target, pred) else: loss = torch.nn.functional.mse_loss(target, pred, reduction='none') else: raise NotImplementedError(""unknown loss type '{loss_type}'"") return loss",True,self.loss_type == 'l2',self.loss_type == 'l2',0.6494088768959045 983,"def get_loss(self, pred, target, mean=True): if self.loss_type == 'l1': loss = (target - pred).abs() if mean: loss = loss.mean() elif self.loss_type == 'l2': if: loss = torch.nn.functional.mse_loss(target, pred) else: loss = torch.nn.functional.mse_loss(target, pred, reduction='none') else: raise NotImplementedError(""unknown loss type '{loss_type}'"") return loss",True,mean,mean,0.6698806285858154 984,"def get_results(callee): res_generator = retry_operation_impl(callee, retry_settings=retry_once_settings) results = [] exc = None try: for res in res_generator: results.append(res) if: break except Exception as e: exc = e return (results, exc)",False,"isinstance(res, YdbRetryOperationFinalResult)",'last_iteration' in res.get_iteration_index,0.6464452743530273 985,"def createSnapshotOfVolumeSet(self, name, copyOfName, optional=None): """"""Create a snapshot of an existing Volume Set. :param name: Name of the Snapshot. The vvname pattern is described in ""VV Name Patterns"" in the HPE 3PAR Command Line Interface Reference, which is available at the following website: http://www.hp.com/go/storage/docs :type name: str :param copyOfName: The volume set you want to snapshot :type copyOfName: str :param optional: Dictionary of optional params :type optional: dict .. code-block:: python optional = { 'id': 12, # Specifies ID of the volume set # set, next by default 'comment': ""some comment"", 'readOnly': True, # Read Only 'expirationHours': 36, # time from now to expire 'retentionHours': 12 # time from now to expire } :raises: :class:`~hpe3parclient.exceptions.HTTPBadRequest` - INVALID_INPUT_VV_PATTERN - Invalid volume pattern specified :raises: :class:`~hpe3parclient.exceptions.HTTPNotFound` - NON_EXISTENT_SET - The set does not exist :raises: :class:`~hpe3parclient.exceptions.HTTPNotFound` - EMPTY_SET - The set is empty :raises: :class:`~hpe3parclient.exceptions.HTTPServiceUnavailable` - VV_LIMIT_REACHED - Maximum number of volumes reached :raises: :class:`~hpe3parclient.exceptions.HTTPNotFound` - NON_EXISTENT_VOL - The storage volume does not exist :raises: :class:`~hpe3parclient.exceptions.HTTPForbidden` - VV_IS_BEING_REMOVED - The volume is being removed :raises: :class:`~hpe3parclient.exceptions.HTTPForbidden` - INV_OPERATION_VV_READONLY_TO_READONLY_SNAP - Creating a read-only copy from a read-",True,optional,optional,0.6585502028465271 986,"def path(self, subpath=''): ret = '%s/%s' % (self.root, self.proj) if: ret = '%s/%s' % (ret, subpath) return ret",True,subpath,subpath,0.6598453521728516 987,"def AlgorithmTypeChanged(self, value): """"""Locks and unlocks widgets for cont and ind contCubes"""""" value = int(value) self.AlgorithmWidget.setCurrentIndex(value) if: self.SGSWidget.SeedGB.show() self.SGSWidget.MaskGB.show() else: self.SGSWidget.SeedGB.hide() self.SGSWidget.MaskGB.hide()",False,value == 3,value == self.AlgorithmWidget.Seed,0.6597493886947632 988,"def _do_json_post(self, endpoint, expected_status=200, **kwargs): url = urljoin(self._host, endpoint) try: res = requests.post(url, headers=self._make_headers(), json=kwargs) except requests.exceptions.ConnectionError as e: raise ClientConnectionError(f'Failed to connect to API endpoint {self._host}. {e}') except requests.exceptions.RequestException as e: raise ClientError(f'API request failed: {e}') if: _raise_for_status(_response_ctx(res), endpoint, expected_status) return res.json()",True,res.status_code != expected_status,res.status_code != expected_status,0.6474182605743408 989,"def get_warmup_momentum(self, cur_iters): if: warmup_m = self.warmup_ratio * self.momentum elif self.warmup == 'linear': k = (1 - cur_iters / self.warmup_iters) * (1 - self.warmup_ratio) warmup_m = (1 - k) * self.momentum elif self.warmup == 'exp': k = self.warmup_ratio ** (1 - cur_iters / self.warmup_iters) warmup_m = k * self.momentum return warmup_m",True,self.warmup == 'constant',self.warmup == 'constant',0.6548739075660706 990,"def get_warmup_momentum(self, cur_iters): if self.warmup == 'constant': warmup_m = self.warmup_ratio * self.momentum elif: k = (1 - cur_iters / self.warmup_iters) * (1 - self.warmup_ratio) warmup_m = (1 - k) * self.momentum elif self.warmup == 'exp': k = self.warmup_ratio ** (1 - cur_iters / self.warmup_iters) warmup_m = k * self.momentum return warmup_m",True,self.warmup == 'linear',self.warmup == 'linear',0.6550660133361816 991,"def get_warmup_momentum(self, cur_iters): if self.warmup == 'constant': warmup_m = self.warmup_ratio * self.momentum elif self.warmup == 'linear': k = (1 - cur_iters / self.warmup_iters) * (1 - self.warmup_ratio) warmup_m = (1 - k) * self.momentum elif: k = self.warmup_ratio ** (1 - cur_iters / self.warmup_iters) warmup_m = k * self.momentum return warmup_m",True,self.warmup == 'exp',self.warmup == 'exp',0.6567461490631104 992,"def squeezenet1_0(num_classes, loss='softmax', pretrained=True, **kwargs): model = SqueezeNet(num_classes, loss, version=1.0, fc_dims=None, dropout_p=None, **kwargs) if: init_pretrained_weights(model, model_urls['squeezenet1_0']) return model",True,pretrained,pretrained,0.6663972735404968 993,"def _forward(self, inputs, return_tensors=False): """""" Internal framework specific forward dispatching. Args: inputs: dict holding all the keyworded arguments for required by the model forward method. return_tensors: Whether to return native framework (pt/tf) tensors rather than numpy array. Returns: Numpy array """""" with self.device_placement(): if self.framework == 'tf': predictions = self.model(inputs.data, training=False)[0] else: with torch.no_grad(): inputs = self.ensure_tensor_on_device(**inputs) predictions = self.model(**inputs)[0].cpu() if: return predictions else: return predictions.numpy()",True,return_tensors,return_tensors,0.6540572643280029 994,"def _forward(self, inputs, return_tensors=False): """""" Internal framework specific forward dispatching. Args: inputs: dict holding all the keyworded arguments for required by the model forward method. return_tensors: Whether to return native framework (pt/tf) tensors rather than numpy array. Returns: Numpy array """""" with self.device_placement(): if: predictions = self.model(inputs.data, training=False)[0] else: with torch.no_grad(): inputs = self.ensure_tensor_on_device(**inputs) predictions = self.model(**inputs)[0].cpu() if return_tensors: return predictions else: return predictions.numpy()",True,self.framework == 'tf',self.framework == 'tf',0.6478662490844727 995,"def generate_dict_getter_function(self, scope, code): dict_attr = scope.lookup_here('__dict__') if: return func_name = scope.mangle_internal('__dict__getter') dict_name = dict_attr.cname code.putln('') code.putln('static PyObject *%s(PyObject *o, CYTHON_UNUSED void *x) {' % func_name) self.generate_self_cast(scope, code) code.putln('if (unlikely(!p->%s)){' % dict_name) code.putln('p->%s = PyDict_New();' % dict_name) code.putln('}') code.putln('Py_XINCREF(p->%s);' % dict_name) code.putln('return p->%s;' % dict_name) code.putln('}')",False,not dict_attr or not dict_attr.is_variable,dict_attr is None,0.6467263698577881 996,"def _append_pseudo_questions(self, survey): _survey = [] for item in survey: _survey.append(item) if: _survey.append({'type': 'text', 'name': f""{item['name']}_other"", 'label': [None] * len(self.translations)}) return _survey",False,"item.get('_or_other', False)",item['type'] == 'pseudo' and item['name'] in self.translations,0.6456488370895386 997,"def step(self): dt = self.gravSys.dt self.setpos(self.pos() + dt * self.v) if: self.setheading(self.towards(self.gravSys.planets[0])) self.a = self.acc() self.v = self.v + dt * self.a",False,self.gravSys.planets.index(self) != 0,self.heading is not None,0.6483408212661743 998,"def goto(path): eepath = get_external_editor_path() if: print('Going to:') print(path) open_in_external_editor(path)",True,eepath is not None,eepath is not None,0.6480452418327332 999,"def _called_with_cfg(*args, **kwargs): """""" Returns: bool: whether the arguments contain CfgNode and should be considered forwarded to from_config. """""" from omegaconf import DictConfig if: return True if isinstance(kwargs.pop('cfg', None), (_CfgNode, DictConfig)): return True return False",False,"len(args) and isinstance(args[0], (_CfgNode, DictConfig))","len(args) and isinstance(args[0], _CfgNode)",0.6490945816040039 1000,"def _called_with_cfg(*args, **kwargs): """""" Returns: bool: whether the arguments contain CfgNode and should be considered forwarded to from_config. """""" from omegaconf import DictConfig if len(args) and isinstance(args[0], (_CfgNode, DictConfig)): return True if: return True return False",False,"isinstance(kwargs.pop('cfg', None), (_CfgNode, DictConfig))","isinstance(kwargs.pop('cfg', None), _CfgNode)",0.647269606590271 1001,"def matches_blob_above(self, i, j): """"""Returns true if the current point matches the point above. Args: i (int): the x-coordinate in self.matrix j (int): the y-coordinate in self.matrix Returns: bool specifying whether the current point matches the point above. """""" if: return False matches_above = self.matrix.at(i, j - 1).value == self.color_to_find return matches_above",False,j == 0,i == j,0.6693016886711121 1002,"def publish_traceback(debug_server_urls, graph, feed_dict, fetches, old_graph_version): """"""Publish traceback and source code if graph version is new. `graph.version` is compared with `old_graph_version`. If the former is higher (i.e., newer), the graph traceback and the associated source code is sent to the debug server at the specified gRPC URLs. Args: debug_server_urls: A single gRPC debug server URL as a `str` or a `list` of debug server URLs. graph: A Python `tf.Graph` object. feed_dict: Feed dictionary given to the `Session.run()` call. fetches: Fetches from the `Session.run()` call. old_graph_version: Old graph version to compare to. Returns: If `graph.version > old_graph_version`, the new graph version as an `int`. Else, the `old_graph_version` is returned. """""" from tensorflow.python.debug.lib import source_remote if: run_key = common.get_run_key(feed_dict, fetches) source_remote.send_graph_tracebacks(debug_server_urls, run_key, traceback.extract_stack(), graph, send_source=True) return graph.version else: return old_graph_version",False,graph.version > old_graph_version,feed_dict.get('feed_dict') is not None,0.648255467414856 1003,"def load_certificate(source): """""" Loads an x509 certificate into a Certificate object :param source: A byte string of file contents, a unicode string filename or an asn1crypto.x509.Certificate object :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A Certificate object """""" if: certificate = source elif isinstance(source, byte_cls): certificate = parse_certificate(source) elif isinstance(source, str_cls): with open(source, 'rb') as f: certificate = parse_certificate(f.read()) else: raise TypeError(pretty_message('\n source must be a byte string, unicode string or\n asn1crypto.x509.Certificate object, not %s\n ', type_name(source))) return _load_x509(certificate)",True,"isinstance(source, Asn1Certificate)","isinstance(source, Asn1Certificate)",0.6471686363220215 1004,"def load_certificate(source): """""" Loads an x509 certificate into a Certificate object :param source: A byte string of file contents, a unicode string filename or an asn1crypto.x509.Certificate object :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A Certificate object """""" if isinstance(source, Asn1Certificate): certificate = source elif: certificate = parse_certificate(source) elif isinstance(source, str_cls): with open(source, 'rb') as f: certificate = parse_certificate(f.read()) else: raise TypeError(pretty_message('\n source must be a byte string, unicode string or\n asn1crypto.x509.Certificate object, not %s\n ', type_name(source))) return _load_x509(certificate)",True,"isinstance(source, byte_cls)","isinstance(source, byte_cls)",0.6460551023483276 1005,"def load_certificate(source): """""" Loads an x509 certificate into a Certificate object :param source: A byte string of file contents, a unicode string filename or an asn1crypto.x509.Certificate object :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A Certificate object """""" if isinstance(source, Asn1Certificate): certificate = source elif isinstance(source, byte_cls): certificate = parse_certificate(source) elif: with open(source, 'rb') as f: certificate = parse_certificate(f.read()) else: raise TypeError(pretty_message('\n source must be a byte string, unicode string or\n asn1crypto.x509.Certificate object, not %s\n ', type_name(source))) return _load_x509(certificate)",True,"isinstance(source, str_cls)","isinstance(source, str_cls)",0.6460889577865601 1006,"def get_confidence(self): """"""return confidence based on existing data"""""" if: return SURE_NO if self._mTotalChars!= self._mFreqChars: r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio) if r < SURE_YES: return r return SURE_YES",True,self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD,self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD,0.6502353549003601 1007,"def get_confidence(self): """"""return confidence based on existing data"""""" if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD: return SURE_NO if: r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio) if r < SURE_YES: return r return SURE_YES",True,self._mTotalChars != self._mFreqChars,self._mTotalChars != self._mFreqChars,0.6556333303451538 1008,"def get_confidence(self): """"""return confidence based on existing data"""""" if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD: return SURE_NO if self._mTotalChars!= self._mFreqChars: r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio) if: return r return SURE_YES",True,r < SURE_YES,r < SURE_YES,0.6624365448951721 1009,"def do_activate(self): win = self.props.active_window if: win = ExampleWindow(application=self) win.present()",True,not win,not win,0.6659681797027588 1010,"def plus_or_dot(pieces): """"""Return a + if we don't already have one, else return a."""""" if: return '.' return '+'",True,"'+' in pieces.get('closest-tag', '')","'+' in pieces.get('closest-tag', '')",0.6440763473510742 1011,"def register_vfe(cls, name=None): global REGISTERED_VFE_CLASSES if: name = cls.__name__ assert name not in REGISTERED_VFE_CLASSES, f'exist class: {REGISTERED_VFE_CLASSES}' REGISTERED_VFE_CLASSES[name] = cls return cls",True,name is None,name is None,0.670485258102417 1012,"def set_state(self, state_obj): """""" Set the value of the state object for this parser @param state_obj The object to set the state to. @throws DatasetParserException if there is a bad state structure """""" if: raise DatasetParserException('Invalid state structure') if not Vel3dKWfpStcStateKey.FIRST_RECORD in state_obj or not Vel3dKWfpStcStateKey.POSITION in state_obj or (not Vel3dKWfpStcStateKey.VELOCITY_END in state_obj): raise DatasetParserException('Invalid state keys') self._timestamp = 0.0 self._record_buffer = [] self._state = state_obj self._read_state = state_obj self.input_file.seek(self._read_state[Vel3dKWfpStcStateKey.POSITION], 0)",True,"not isinstance(state_obj, dict)","not isinstance(state_obj, dict)",0.6490800380706787 1013,"def set_state(self, state_obj): """""" Set the value of the state object for this parser @param state_obj The object to set the state to. @throws DatasetParserException if there is a bad state structure """""" if not isinstance(state_obj, dict): raise DatasetParserException('Invalid state structure') if: raise DatasetParserException('Invalid state keys') self._timestamp = 0.0 self._record_buffer = [] self._state = state_obj self._read_state = state_obj self.input_file.seek(self._read_state[Vel3dKWfpStcStateKey.POSITION], 0)",False,not Vel3dKWfpStcStateKey.FIRST_RECORD in state_obj or not Vel3dKWfpStcStateKey.POSITION in state_obj or (not Vel3dKWfpStcStateKey.VELOCITY_END in state_obj),not (Vel3dKWfpStateKey.POSITION in state_obj and Vel3dKWfpStateKey.POSITION not in state_obj),0.6588072776794434 1014,"def focusInEvent(self, event): if: self.tooltip_label.move(self.mapToGlobal(QPoint(0, self.height() - 70))) self.tooltip_label.setText(self.tooltip_text) self.tooltip_label.show() super(AdjustableTextEdit, self).focusInEvent(event)",False,self.label_enabled and self.tooltip_text,self.tooltip_text,0.6472121477127075 1015,"def check_document_start(self): if: if self.prefix(3) == '---' and self.peek(3) in '\x00 \t\r\n\x85\u2028\u2029': return True",False,self.column == 0,self.version >= 3,0.6550612449645996 1016,"def check_document_start(self): if self.column == 0: if: return True",False,self.prefix(3) == '---' and self.peek(3) in '\x00 \t\r\n\x85\u2028\u2029',self.document_start_column is None or self.document_start_column is None,0.6489856243133545 1017,"def handle_action(self, action: Action) -> None: """"""Modify state in reaction to an action."""""" if: self._handle_command(action.command)",False,"isinstance(action, UpdateCommandAction)","isinstance(action, Command)",0.6490628719329834 1018,"def __del__(self): for i in range(4): if: del self.dockableContainer[i]",False,not self.dockableContainer[i],i in self.dockableContainer,0.6510080695152283 1019,"def extract_feature_map(input_map, kmeans, centroids): feature_map = [] for item in input_map: temp_dict = {} temp_dict['label'] = item['label'] print('Extracting features for', item['image']) img = cv2.imread(item['image']) img = resize_to_size(img, 150) temp_dict['feature_vector'] = FeatureExtractor().get_feature_vector(img, kmeans, centroids) if: feature_map.append(temp_dict) return feature_map",False,temp_dict['feature_vector'] is not None,temp_dict,0.6462053060531616 1020,"def filter_data(self, min_len, max_len): logging.info(f'Filtering data, min len: {min_len}, max len: {max_len}') initial_len = len(self.src) filtered_src = [] for src in self.src: if: filtered_src.append(src) self.src = filtered_src filtered_len = len(self.src) logging.info(f'Pairs before: {initial_len}, after: {filtered_len}')",False,min_len <= len(src) <= max_len,min_len <= len(src) <= max_len and min_len <= src <= max_len,0.649456262588501 1021,"def tearDown(self): if: self.shell.cs.clear_callstack() super(ShellFixture, self).tearDown()",False,"hasattr(self.shell, 'cs')",self.shell.cs,0.6455273032188416 1022,"def getattr(self, obj, attribute): """"""Subscribe an object from sandboxed code and prefer the attribute. The attribute passed *must* be a bytestring. """""" try: value = getattr(obj, attribute) except AttributeError: try: return obj[attribute] except (TypeError, LookupError): pass else: if: return value return self.unsafe_undefined(obj, attribute) return self.undefined(obj=obj, name=attribute)",True,"self.is_safe_attribute(obj, attribute, value)","self.is_safe_attribute(obj, attribute, value)",0.6423095464706421 1023,"def _find_ucc_global_config_json(app_root, ucc_config_filename): """"""Find UCC config file from all possible directories"""""" candidates = ['local', 'default', 'bin', op.join('appserver','static', 'js', 'build')] for candidate in candidates: file_path = op.join(app_root, candidate, ucc_config_filename) if: return file_path raise RuntimeError('Unable to load %s from [%s]' % (ucc_config_filename, ','.join(candidates)))",False,op.isfile(file_path),os.path.exists(file_path),0.6454447507858276 1024,"def get_vluns_for_host(host_name): ret = [] for vlun in vluns['members']: if: ret.append(vlun) return ret",False,vlun['hostname'] == host_name,vlun['host'] == host_name,0.6494061350822449 1025,"def run(self): self.filelist = FileList() if: self.write_manifest() self.add_defaults() if os.path.exists(self.template): self.read_template() self.add_license_files() self.prune_file_list() self.filelist.sort() self.filelist.remove_duplicates() self.write_manifest()",False,not os.path.exists(self.manifest),os.path.exists(self.manifest),0.6469367742538452 1026,"def run(self): self.filelist = FileList() if not os.path.exists(self.manifest): self.write_manifest() self.add_defaults() if: self.read_template() self.add_license_files() self.prune_file_list() self.filelist.sort() self.filelist.remove_duplicates() self.write_manifest()",True,os.path.exists(self.template),os.path.exists(self.template),0.6487430334091187 1027,"def _init_learning_rate(self): self.eta0_ = self.eta0 if: typw = numpy.sqrt(1.0 / numpy.sqrt(self.alpha)) self.eta0_ = typw / max(1.0, (1 + typw) * 2) self.optimal_init_ = 1.0 / (self.eta0_ * self.alpha) else: self.eta0_ = self.eta0 return self.eta0_",False,self.learning_rate == 'optimal',self.eta0_ is None or self.eta0_ is None,0.646294355392456 1028,"def get_outputs_filtered(self, owner, spent=None): """""" Get a list of output links filtered on some criteria Args: owner (str): base58 encoded public_key. spent (bool): If ``True`` return only the spent outputs. If ``False`` return only unspent outputs. If spent is not specified (``None``) return all outputs. Returns: :obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s pointing to another transaction's condition """""" outputs = self.fastquery.get_outputs_by_public_key(owner) if: return outputs elif spent is True: return self.fastquery.filter_unspent_outputs(outputs) elif spent is False: return self.fastquery.filter_spent_outputs(outputs)",False,spent is None,not spent,0.6583720445632935 1029,"def get_outputs_filtered(self, owner, spent=None): """""" Get a list of output links filtered on some criteria Args: owner (str): base58 encoded public_key. spent (bool): If ``True`` return only the spent outputs. If ``False`` return only unspent outputs. If spent is not specified (``None``) return all outputs. Returns: :obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s pointing to another transaction's condition """""" outputs = self.fastquery.get_outputs_by_public_key(owner) if spent is None: return outputs elif: return self.fastquery.filter_unspent_outputs(outputs) elif spent is False: return self.fastquery.filter_spent_outputs(outputs)",True,spent is True,spent is True,0.6569360494613647 1030,"def get_outputs_filtered(self, owner, spent=None): """""" Get a list of output links filtered on some criteria Args: owner (str): base58 encoded public_key. spent (bool): If ``True`` return only the spent outputs. If ``False`` return only unspent outputs. If spent is not specified (``None``) return all outputs. Returns: :obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s pointing to another transaction's condition """""" outputs = self.fastquery.get_outputs_by_public_key(owner) if spent is None: return outputs elif spent is True: return self.fastquery.filter_unspent_outputs(outputs) elif: return self.fastquery.filter_spent_outputs(outputs)",True,spent is False,spent is False,0.656681478023529 1031,"def add_pth(self, pth_file: str, entry: str) -> None: pth_file = normalize_path(pth_file) if: if pth_file not in self._pth: self._pth[pth_file] = UninstallPthEntries(pth_file) self._pth[pth_file].add(entry) else: self._refuse.add(pth_file)",True,self._permitted(pth_file),self._permitted(pth_file),0.6468223333358765 1032,"def add_pth(self, pth_file: str, entry: str) -> None: pth_file = normalize_path(pth_file) if self._permitted(pth_file): if: self._pth[pth_file] = UninstallPthEntries(pth_file) self._pth[pth_file].add(entry) else: self._refuse.add(pth_file)",True,pth_file not in self._pth,pth_file not in self._pth,0.6554079055786133 1033,"def deprecated(message=None): """"""A decorator for deprecated functions"""""" def _decorator(func, message=message): if: message = '%s is deprecated' % func.__name__ def newfunc(*args, **kwds): warnings.warn(message, DeprecationWarning, stacklevel=2) return func(*args, **kwds) return newfunc return _decorator",True,message is None,message is None,0.6576377749443054 1034,"def get_image_object(self, ccd, **kwargs): if: return self.north.get_image_object(ccd, **kwargs) return self.south.get_image_object(ccd, **kwargs)",False,ccd.is_north,self.orth.get_distribution_type() == 'north',0.6492307186126709 1035,"def login(self, name=user_name): """"""Logs in."""""" uf = self.portal.acl_users user = uf.getUserById(name) if: user = user.__of__(uf) newSecurityManager(None, user)",False,"not hasattr(user, 'aq_base')",user.__of__,0.6524370908737183 1036,"def pdeque(iterable=(), maxlen=None): """""" Return deque containing the elements of iterable. If maxlen is specified then len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen. >>> pdeque([1, 2, 3]) pdeque([1, 2, 3]) >>> pdeque([1, 2, 3, 4], maxlen=2) pdeque([3, 4], maxlen=2) """""" t = tuple(iterable) if: t = t[-maxlen:] length = len(t) pivot = int(length / 2) left = plist(t[:pivot]) right = plist(t[pivot:], reverse=True) return PDeque(left, right, length, maxlen)",True,maxlen is not None,maxlen is not None,0.6523933410644531 1037,"def build_data_filter(cfg: CfgNode): if: min_score = cfg.MIN_VALUE return ScoreBasedFilter(min_score=min_score) raise ValueError(f'Unknown data filter type {cfg.TYPE}')",False,cfg.TYPE == 'detection_score',cfg.TYPE == 'ScoreBasedFilter',0.6534725427627563 1038,"def find_root(self, bones): for b in bones: if: return b return bones[0]",False,b.parent not in bones,b['isthing'],0.6545785665512085 1039,"def _on_account_acquired(self, account): with self.unlock_cond: if: msg = 'attempt to acquire unknown account %s' % account raise Exception(msg) if account not in self.unlocked_accounts: raise Exception('account %s is already locked' % account) self.unlocked_accounts.remove(account) self.unlock_cond.notify_all() return account",False,account not in self.accounts,account not in self.available_accounts,0.6565709710121155 1040,"def _on_account_acquired(self, account): with self.unlock_cond: if account not in self.accounts: msg = 'attempt to acquire unknown account %s' % account raise Exception(msg) if: raise Exception('account %s is already locked' % account) self.unlocked_accounts.remove(account) self.unlock_cond.notify_all() return account",False,account not in self.unlocked_accounts,account in self.locked_accounts,0.6507906913757324 1041,"@instrument_w_nvtx def backward(self, loss, retain_graph=False): """""" :attr:`backward` performs the following steps: 1. fp32_loss = loss.float() 2. scaled_loss = fp32_loss*loss_scale 3. scaled_loss.backward(), which accumulates scaled gradients into the ``.grad`` attributes of the model's fp16 leaves """""" if: self.optimizer_swapper.pre_backward() see_memory_usage(f'Before backward', force=False) if self.custom_loss_scaler: scaled_loss = self.external_loss_scale * loss scaled_loss.backward() else: self.loss_scaler.backward(loss.float(), retain_graph=retain_graph) self._get_param_coordinator(training=True).reset_step() if self.swap_optimizer: self.optimizer_swapper.post_backward()",True,self.swap_optimizer,self.swap_optimizer,0.6530719995498657 1042,"@instrument_w_nvtx def backward(self, loss, retain_graph=False): """""" :attr:`backward` performs the following steps: 1. fp32_loss = loss.float() 2. scaled_loss = fp32_loss*loss_scale 3. scaled_loss.backward(), which accumulates scaled gradients into the ``.grad`` attributes of the model's fp16 leaves """""" if self.swap_optimizer: self.optimizer_swapper.pre_backward() see_memory_usage(f'Before backward', force=False) if: scaled_loss = self.external_loss_scale * loss scaled_loss.backward() else: self.loss_scaler.backward(loss.float(), retain_graph=retain_graph) self._get_param_coordinator(training=True).reset_step() if self.swap_optimizer: self.optimizer_swapper.post_backward()",False,self.custom_loss_scaler,self.external_loss_scale is not None,0.6482833623886108 1043,"@instrument_w_nvtx def backward(self, loss, retain_graph=False): """""" :attr:`backward` performs the following steps: 1. fp32_loss = loss.float() 2. scaled_loss = fp32_loss*loss_scale 3. scaled_loss.backward(), which accumulates scaled gradients into the ``.grad`` attributes of the model's fp16 leaves """""" if self.swap_optimizer: self.optimizer_swapper.pre_backward() see_memory_usage(f'Before backward', force=False) if self.custom_loss_scaler: scaled_loss = self.external_loss_scale * loss scaled_loss.backward() else: self.loss_scaler.backward(loss.float(), retain_graph=retain_graph) self._get_param_coordinator(training=True).reset_step() if: self.optimizer_swapper.post_backward()",True,self.swap_optimizer,self.swap_optimizer,0.6535240411758423 1044,"def _step_alpha(self, action): used_edge_cpus = collections.defaultdict(float) action = action.flatten()[:-1].reshape(1, -1) for client_id, alpha in list(zip(self.clients.keys(), action)): used_edge_cpus[client_id] = self.clients[client_id].do_tasks(alpha) state = self._get_obs(scale=GHZ) if: print('alpha', 1 - sum(sum(action))) return (used_edge_cpus, state)",False,self.timestamp % 1000 == 0,self._verbose,0.6513635516166687 1045,"def get_gitdir(self, rpc: str): """"""Determine the git repository for this request"""""" gitdir = self.gitlookup(rpc) if: raise HTTPError(404, 'unable to find repository') self.log.info('Accessing git at: %s', gitdir) return gitdir",False,gitdir is None,not gitdir,0.6544747352600098 1046,"def handle_dictelement(self, node, i): if: key = None value = self.handle_expr(node.children[i + 1]) i += 2 else: key = self.handle_expr(node.children[i]) value = self.handle_expr(node.children[i + 2]) i += 3 return (i, key, value)",False,node.children[i].type == tokens.DOUBLESTAR,node.type == Node.ELEMENT_NODE,0.644324541091919 1047,"def _cb_panel_3(self, button): if: movie_info = self._movie_db.get_data(self._current_url) tmdb = TMDBv3(lang=ini.get('movies', 'info_lang')) tmdb.get_posters(movie_info['tmdb_id'], self._cb_posters_list_complete)",False,self._movie_db.id_exists(self._current_url),button.button() == Qt.LeftButton,0.6484020948410034 1048,"def draw_post(self, surface: 'pygame.Surface') -> 'Decorator': """""" Draw post. :param surface: Pygame surface :return: Self reference """""" if: self._draw(self._decor[DECOR_TYPE_POST], surface) else: self._draw_assemble_cache(DECOR_TYPE_POST, self._decor[DECOR_TYPE_POST], surface) return self",False,not self.cache,self._is_cached,0.6534846425056458 1049,"def digit_version(version_str): digit_version = [] for x in version_str.split('.'): if: digit_version.append(int(x)) elif x.find('rc')!= -1: patch_version = x.split('rc') digit_version.append(int(patch_version[0]) - 1) digit_version.append(int(patch_version[1])) return digit_version",True,x.isdigit(),x.isdigit(),0.6501805782318115 1050,"def digit_version(version_str): digit_version = [] for x in version_str.split('.'): if x.isdigit(): digit_version.append(int(x)) elif: patch_version = x.split('rc') digit_version.append(int(patch_version[0]) - 1) digit_version.append(int(patch_version[1])) return digit_version",True,x.find('rc') != -1,x.find('rc') != -1,0.6460107564926147 1051,"def parse(input): fullpath = dsz.ui.GetString('Please enter the full path to the file you want to parse: ', '') if: dsz.ui.Echo('No string entered', dsz.ERROR) return False success = parsefile(fullpath) if not success: return False return True",True,fullpath == '',fullpath == '',0.655876874923706 1052,"def parse(input): fullpath = dsz.ui.GetString('Please enter the full path to the file you want to parse: ', '') if fullpath == '': dsz.ui.Echo('No string entered', dsz.ERROR) return False success = parsefile(fullpath) if: return False return True",True,not success,not success,0.6560096740722656 1053,"def get_ovs_use_veth(self): """"""Return correct ovs_use_veth setting for use in dhcp_agent.ini. Get the right value from config or existing dhcp_agent.ini file. Existing has precedence. Attempt to default to ""False"" without disrupting existing deployments. Handle existing deployments and upgrades safely. See LP Bug#1831935 :returns: Value to use for ovs_use_veth setting :rtype: Bool """""" _existing = self.get_existing_ovs_use_veth() if: return _existing _config = self.parse_ovs_use_veth() if _config is None: return False else: return _config",True,_existing is not None,_existing is not None,0.651789665222168 1054,"def get_ovs_use_veth(self): """"""Return correct ovs_use_veth setting for use in dhcp_agent.ini. Get the right value from config or existing dhcp_agent.ini file. Existing has precedence. Attempt to default to ""False"" without disrupting existing deployments. Handle existing deployments and upgrades safely. See LP Bug#1831935 :returns: Value to use for ovs_use_veth setting :rtype: Bool """""" _existing = self.get_existing_ovs_use_veth() if _existing is not None: return _existing _config = self.parse_ovs_use_veth() if: return False else: return _config",True,_config is None,_config is None,0.6502765417098999 1055,"def split_sections(s): """"""Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header (""[section]"") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """""" section = None content = [] for line in yield_lines(s): if: if line.endswith(']'): if section or content: yield (section, content) section = line[1:-1].strip() content = [] else: raise ValueError('Invalid section heading', line) else: content.append(line) yield (section, content)",True,line.startswith('['),line.startswith('['),0.6467174291610718 1056,"def split_sections(s): """"""Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header (""[section]"") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """""" section = None content = [] for line in yield_lines(s): if line.startswith('['): if: if section or content: yield (section, content) section = line[1:-1].strip() content = [] else: raise ValueError('Invalid section heading', line) else: content.append(line) yield (section, content)",True,line.endswith(']'),line.endswith(']'),0.6429246664047241 1057,"def split_sections(s): """"""Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header (""[section]"") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """""" section = None content = [] for line in yield_lines(s): if line.startswith('['): if line.endswith(']'): if: yield (section, content) section = line[1:-1].strip() content = [] else: raise ValueError('Invalid section heading', line) else: content.append(line) yield (section, content)",True,section or content,section or content,0.6555565595626831 1058,"def _prepare_stub_instance(self, stub): if: self._stub_instances[stub] = stub(self._channel)",False,stub not in self._stub_instances,stub is not None,0.6551868915557861 1059,"def _execute(self, e): if: self.exec_func(self.x, self.y, self.v) self.anim.event_source.start() self.executing = True",True,not self.executing,not self.executing,0.6540613770484924 1060,"def __call__(self, bboxes1, bboxes2, mode='iou', is_aligned=False): """"""Calculate IoU between 2D bboxes. Args: bboxes1 (Tensor): bboxes have shape (m, 4) in format, or shape (m, 5) in format. bboxes2 (Tensor): bboxes have shape (m, 4) in format, shape (m, 5) in format, or be empty. If ``is_aligned `` is ``True``, then m and n must be equal. mode (str): ""iou"" (intersection over union), ""iof"" (intersection over foreground), or ""giou"" (generalized intersection over union). is_aligned (bool, optional): If True, then m and n must be equal. Default False. Returns: Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,) """""" assert bboxes1.size(-1) in [0, 4, 5] assert bboxes2.size(-1) in [0, 4, 5] if: bboxes2 = bboxes2[..., :4] if bboxes1.size(-1) == 5: bboxes1 = bboxes1[..., :4] return bbox_overlaps(bboxes1, bboxes2, mode, is_aligned)",True,bboxes2.size(-1) == 5,bboxes2.size(-1) == 5,0.6455575227737427 1061,"def __call__(self, bboxes1, bboxes2, mode='iou', is_aligned=False): """"""Calculate IoU between 2D bboxes. Args: bboxes1 (Tensor): bboxes have shape (m, 4) in format, or shape (m, 5) in format. bboxes2 (Tensor): bboxes have shape (m, 4) in format, shape (m, 5) in format, or be empty. If ``is_aligned `` is ``True``, then m and n must be equal. mode (str): ""iou"" (intersection over union), ""iof"" (intersection over foreground), or ""giou"" (generalized intersection over union). is_aligned (bool, optional): If True, then m and n must be equal. Default False. Returns: Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,) """""" assert bboxes1.size(-1) in [0, 4, 5] assert bboxes2.size(-1) in [0, 4, 5] if bboxes2.size(-1) == 5: bboxes2 = bboxes2[..., :4] if: bboxes1 = bboxes1[..., :4] return bbox_overlaps(bboxes1, bboxes2, mode, is_aligned)",True,bboxes1.size(-1) == 5,bboxes1.size(-1) == 5,0.6457552909851074 1062,"@environmentfilter def do_attr(environment, obj, name): """"""Get an attribute of an object. ``foo|attr(""bar"")`` works like ``foo.bar`` just that always an attribute is returned and items are not looked up. See :ref:`Notes on subscriptions ` for more details. """""" try: name = str(name) except UnicodeError: pass else: try: value = getattr(obj, name) except AttributeError: pass else: if: return environment.unsafe_undefined(obj, name) return value return environment.undefined(obj=obj, name=name)",False,"environment.sandboxed and (not environment.is_safe_attribute(obj, name, value))","isinstance(value, unicode)",0.644638180732727 1063,"def reparameterize(self, latent_distribution_params): if: return self.rsample(latent_distribution_params) else: return latent_distribution_params[0]",False,self.training,self.use_rsample,0.6568121910095215 1064,"def _get_unique_endpoints(trainer_endpoints): trainer_endpoints.sort() ips = set() unique_endpoints = set() for endpoint in trainer_endpoints: ip = endpoint.split(':')[0] if: continue ips.add(ip) unique_endpoints.add(endpoint) logger.info('unique_endpoints {}'.format(unique_endpoints)) return unique_endpoints",True,ip in ips,ip in ips,0.6804461479187012 1065,"def contains(self, item, prereleases=None): if: prereleases = self.prereleases item = self._coerce_version(item) if item.is_prerelease and (not prereleases): return False return self._get_operator(self.operator)(item, self.version)",True,prereleases is None,prereleases is None,0.6580885648727417 1066,"def contains(self, item, prereleases=None): if prereleases is None: prereleases = self.prereleases item = self._coerce_version(item) if: return False return self._get_operator(self.operator)(item, self.version)",True,item.is_prerelease and (not prereleases),item.is_prerelease and (not prereleases),0.6483731865882874 1067,"def advapi32_CryptHashData(jitter): ret_ad, args = jitter.func_args_stdcall(['hhash', 'pbdata', 'dwdatalen', 'dwflags']) if: raise ValueError('unknown crypt context') data = jitter.vm.get_mem(args.pbdata, args.dwdatalen) log.debug('will hash %X', args.dwdatalen) log.debug(repr(data[:16]) + '...') winobjs.cryptcontext[args.hhash].h.update(data) jitter.func_ret_stdcall(ret_ad, 1)",False,not args.hhash in winobjs.cryptcontext,args.hhash not in winobjs.cryptcontext,0.6507587432861328 1068,"def layer_url_template(layer): if: return 'http://maps.yimg.com/hw/tile?&v=9&imgtype=png&s=256&x=%i&y=%i&z=%i' elif layer == LAYER_SAT: return 'http://maps.yimg.com/ae/ximg?v=9&t=s&imgtype=png&s=256&x=%i&y=%i&z=%i' elif layer == LAYER_HYB: return 'http://maps.yimg.com/hx/tl?v=9&t=h&imgtype=png&s=256&x=%i&y=%i&z=%i'",False,layer == LAYER_MAP,layer == LAYER_HALF,0.6570522785186768 1069,"def layer_url_template(layer): if layer == LAYER_MAP: return 'http://maps.yimg.com/hw/tile?&v=9&imgtype=png&s=256&x=%i&y=%i&z=%i' elif: return 'http://maps.yimg.com/ae/ximg?v=9&t=s&imgtype=png&s=256&x=%i&y=%i&z=%i' elif layer == LAYER_HYB: return 'http://maps.yimg.com/hx/tl?v=9&t=h&imgtype=png&s=256&x=%i&y=%i&z=%i'",False,layer == LAYER_SAT,layer == LAYER_ADEPRECATE,0.6540009379386902 1070,"def layer_url_template(layer): if layer == LAYER_MAP: return 'http://maps.yimg.com/hw/tile?&v=9&imgtype=png&s=256&x=%i&y=%i&z=%i' elif layer == LAYER_SAT: return 'http://maps.yimg.com/ae/ximg?v=9&t=s&imgtype=png&s=256&x=%i&y=%i&z=%i' elif: return 'http://maps.yimg.com/hx/tl?v=9&t=h&imgtype=png&s=256&x=%i&y=%i&z=%i'",False,layer == LAYER_HYB,layer == LAYER_TL,0.6489716172218323 1071,"def set_node_certificate(self, pkey_settings): """"""Activates the current node certificate Grabs chain.pem and pkey.pem from the /inbox/ directory and applies them to the node. chain.pem contains the chain encoded certificates starting from the node certificat and ending with the last intermediate certificate before cluster CA. pkey.pem contains the pem encoded private key for node certifiactes. Both files should exist on the server before this API is called."""""" params = {} if: params['privateKeyPassphrase'] = pkey_settings return self._post_json(f'{self.hostname}/node/controller/reloadCertificate', params)",False,pkey_settings,pKey_settings,0.6525788903236389 1072,"def _get_destroyed_at_step(self, step_id: int): destroyed = [] locations = [] for participant in self.locations.keys(): state_values = self.locations[participant] is_destruction = state_values[step_id - 1]!= NO_LOCATION and state_values[step_id] == NO_LOCATION if: destroyed.append(_summarize_participants(participant)) locations.append(state_values[step_id - 1]) return (destroyed, locations)",True,is_destruction,is_destruction,0.6489428281784058 1073,"def wait_for_file_on_target(testcase, file_path, max_attempts=6): for i in range(max_attempts): err, retcode, msg = testcase.run_platform_command('ls %s' % file_path) if: break if i < max_attempts: import time time.sleep(pow(2, i) * 0.25) else: testcase.fail('File %s not found even after %d attempts.' % (file_path, max_attempts)) return read_file_on_target(testcase, file_path)",False,err.Success() and retcode == 0,retcode == 0,0.6474069356918335 1074,"def wait_for_file_on_target(testcase, file_path, max_attempts=6): for i in range(max_attempts): err, retcode, msg = testcase.run_platform_command('ls %s' % file_path) if err.Success() and retcode == 0: break if: import time time.sleep(pow(2, i) * 0.25) else: testcase.fail('File %s not found even after %d attempts.' % (file_path, max_attempts)) return read_file_on_target(testcase, file_path)",False,i < max_attempts,msg == 'File found',0.6521081328392029 1075,"def get_file_type(filename): extension = filename.split('.')[-1].lower() if: return 'image' elif extension in VIDEO_FORMATS: return 'video'",True,extension in IMAGE_FORMATS,extension in IMAGE_FORMATS,0.654323399066925 1076,"def get_file_type(filename): extension = filename.split('.')[-1].lower() if extension in IMAGE_FORMATS: return 'image' elif: return 'video'",True,extension in VIDEO_FORMATS,extension in VIDEO_FORMATS,0.6530576944351196 1077,"def reparentChildren(self, newParent): if: newParent.childNodes[-1]._element.tail += self._element.text else: if not newParent._element.text: newParent._element.text = '' if self._element.text is not None: newParent._element.text += self._element.text self._element.text = '' base.Node.reparentChildren(self, newParent)",True,newParent.childNodes,newParent.childNodes,0.6500186920166016 1078,"def reparentChildren(self, newParent): if newParent.childNodes: newParent.childNodes[-1]._element.tail += self._element.text else: if: newParent._element.text = '' if self._element.text is not None: newParent._element.text += self._element.text self._element.text = '' base.Node.reparentChildren(self, newParent)",True,not newParent._element.text,not newParent._element.text,0.6457720398902893 1079,"def reparentChildren(self, newParent): if newParent.childNodes: newParent.childNodes[-1]._element.tail += self._element.text else: if not newParent._element.text: newParent._element.text = '' if: newParent._element.text += self._element.text self._element.text = '' base.Node.reparentChildren(self, newParent)",True,self._element.text is not None,self._element.text is not None,0.6460127830505371 1080,"def add(self, *items): if: return p = self.conn.pipeline(transaction=False) buckets = set() for item in items: bucket = self._get_bucket(item) buckets.add(bucket) p.sadd(bucket, item) added = sum(p.execute()) if added: self.conn.incr(self.counterkey, added) self.conn.sadd(self.bucketskey, *list(buckets))",False,len(items) == 0,not items,0.6493656039237976 1081,"def add(self, *items): if len(items) == 0: return p = self.conn.pipeline(transaction=False) buckets = set() for item in items: bucket = self._get_bucket(item) buckets.add(bucket) p.sadd(bucket, item) added = sum(p.execute()) if: self.conn.incr(self.counterkey, added) self.conn.sadd(self.bucketskey, *list(buckets))",True,added,added,0.6615814566612244 1082,"def loss(self, batch, preds=None): """""" Compute loss Args: batch (dict): Batch to compute loss on preds (torch.Tensor | List[torch.Tensor]): Predictions. """""" if: self.criterion = self.init_criterion() preds = self.forward(batch['img']) if preds is None else preds return self.criterion(preds, batch)",False,"not hasattr(self, 'criterion')",self.criterion is None,0.6457927227020264 1083,"def OnSize(self, evt=None): size = self.Size if: width, height = size if KEEP_ASPECT_RATIO: total_size = width * height height = int(math.sqrt(total_size / self.aspect_ratio)) width = int(total_size / height) self.SetSize((width, height)) self.canvas.SetSize((width, height))",False,size[0] > 0 and size[1] > 0,size is not None,0.6486570835113525 1084,"def OnSize(self, evt=None): size = self.Size if size[0] > 0 and size[1] > 0: width, height = size if: total_size = width * height height = int(math.sqrt(total_size / self.aspect_ratio)) width = int(total_size / height) self.SetSize((width, height)) self.canvas.SetSize((width, height))",False,KEEP_ASPECT_RATIO,self.aspect_ratio != 1.0,0.6438031792640686 1085,"def check(self): if: self.done() return False return self.info.running",False,self.info.create_time + self.timeout < time.time() and (not self.timeout == 0),self.info.done,0.6471694707870483 1086,"def __init__(self, config_map=None, downward_api=None, secret=None, service_account_token=None): """"""V1VolumeProjection - a model defined in Swagger"""""" self._config_map = None self._downward_api = None self._secret = None self._service_account_token = None self.discriminator = None if: self.config_map = config_map if downward_api is not None: self.downward_api = downward_api if secret is not None: self.secret = secret if service_account_token is not None: self.service_account_token = service_account_token",True,config_map is not None,config_map is not None,0.6526575088500977 1087,"def __init__(self, config_map=None, downward_api=None, secret=None, service_account_token=None): """"""V1VolumeProjection - a model defined in Swagger"""""" self._config_map = None self._downward_api = None self._secret = None self._service_account_token = None self.discriminator = None if config_map is not None: self.config_map = config_map if: self.downward_api = downward_api if secret is not None: self.secret = secret if service_account_token is not None: self.service_account_token = service_account_token",True,downward_api is not None,downward_api is not None,0.6535540819168091 1088,"def __init__(self, config_map=None, downward_api=None, secret=None, service_account_token=None): """"""V1VolumeProjection - a model defined in Swagger"""""" self._config_map = None self._downward_api = None self._secret = None self._service_account_token = None self.discriminator = None if config_map is not None: self.config_map = config_map if downward_api is not None: self.downward_api = downward_api if: self.secret = secret if service_account_token is not None: self.service_account_token = service_account_token",True,secret is not None,secret is not None,0.6539957523345947 1089,"def __init__(self, config_map=None, downward_api=None, secret=None, service_account_token=None): """"""V1VolumeProjection - a model defined in Swagger"""""" self._config_map = None self._downward_api = None self._secret = None self._service_account_token = None self.discriminator = None if config_map is not None: self.config_map = config_map if downward_api is not None: self.downward_api = downward_api if secret is not None: self.secret = secret if: self.service_account_token = service_account_token",True,service_account_token is not None,service_account_token is not None,0.64918053150177 1090,"def switch(self): if: self.reveal() else: self.hide()",False,self.hidden,self.getSelection(),0.6505880355834961 1091,"def determine_method(self): if: return True if util.format.asbool(self.parameter('use_sensors')) == False: return False if self.parameter('path')!= None and self._json == False: return False try: _ = util.cli.execute('sensors -u') log.debug('Sensors command available') return True except FileNotFoundError as e: log.info('Sensors command not available, using /sys/class/thermal/thermal_zone*/') return False",False,util.format.asbool(self.parameter('use_sensors')) == True,self._json == False,0.6437516808509827 1092,"def determine_method(self): if util.format.asbool(self.parameter('use_sensors')) == True: return True if: return False if self.parameter('path')!= None and self._json == False: return False try: _ = util.cli.execute('sensors -u') log.debug('Sensors command available') return True except FileNotFoundError as e: log.info('Sensors command not available, using /sys/class/thermal/thermal_zone*/') return False",False,util.format.asbool(self.parameter('use_sensors')) == False,not self.parameter('use_thermal_thermal_zone'),0.6437082290649414 1093,"def determine_method(self): if util.format.asbool(self.parameter('use_sensors')) == True: return True if util.format.asbool(self.parameter('use_sensors')) == False: return False if: return False try: _ = util.cli.execute('sensors -u') log.debug('Sensors command available') return True except FileNotFoundError as e: log.info('Sensors command not available, using /sys/class/thermal/thermal_zone*/') return False",False,self.parameter('path') != None and self._json == False,not self.parameter('use_sensors'),0.6452121138572693 1094,"def forward(self, masked_imgs, masks, guidances=None, flows=None): coarse_outputs = self.coarse_net(masked_imgs, masks, guidances, flows) if: refined_outputs, offset_flows = self.refine_net(coarse_outputs, masks, guidances) return {'outputs': refined_outputs, 'offset_flows': offset_flows, 'coarse_outputs': coarse_outputs} else: return {'outputs': coarse_outputs}",False,self.use_refine,self.with_refine,0.6485453844070435 1095,"def queryApi(self, remoteId): """""" Query the API provided by the given remote. """""" remote = self.remotesById.get(remoteId, None) if: pool = remote.getPool() if pool: return pool.runInteraction(self._getRemotes, remote) else: return None else: return None",True,remote,remote,0.6720276474952698 1096,"def queryApi(self, remoteId): """""" Query the API provided by the given remote. """""" remote = self.remotesById.get(remoteId, None) if remote: pool = remote.getPool() if: return pool.runInteraction(self._getRemotes, remote) else: return None else: return None",True,pool,pool,0.6716032028198242 1097,"def label_to_color_image(label, dataset=_PASCAL): """"""Adds color defined by the dataset colormap to the label. Args: label: A 2D array with integer type, storing the segmentation label. dataset: The colormap used in the dataset. Returns: result: A 2D array with floating type. The element of the array is the color indexed by the corresponding element in the input label to the dataset color map. Raises: ValueError: If label is not of rank 2 or its value is larger than color map maximum entry. """""" if: raise ValueError('Expect 2-D input label') if np.max(label) >= _DATASET_MAX_ENTRIES[dataset]: raise ValueError('label value too large.') colormap = create_label_colormap(dataset) return colormap[label]",False,label.ndim != 2,label.shape[1] != 2,0.6539874076843262 1098,"def label_to_color_image(label, dataset=_PASCAL): """"""Adds color defined by the dataset colormap to the label. Args: label: A 2D array with integer type, storing the segmentation label. dataset: The colormap used in the dataset. Returns: result: A 2D array with floating type. The element of the array is the color indexed by the corresponding element in the input label to the dataset color map. Raises: ValueError: If label is not of rank 2 or its value is larger than color map maximum entry. """""" if label.ndim!= 2: raise ValueError('Expect 2-D input label') if: raise ValueError('label value too large.') colormap = create_label_colormap(dataset) return colormap[label]",False,np.max(label) >= _DATASET_MAX_ENTRIES[dataset],label.value > dataset.color_size,0.6445609331130981 1099,"@property def authentication(self) -> str: """"""Generate authentication string."""""" if: authentication = self.session.generate_digest() elif self.session.basic: authentication = self.session.generate_basic() else: return '' return f'Authorization: {authentication}\r\n'",True,self.session.digest,self.session.digest,0.6475774049758911 1100,"@property def authentication(self) -> str: """"""Generate authentication string."""""" if self.session.digest: authentication = self.session.generate_digest() elif: authentication = self.session.generate_basic() else: return '' return f'Authorization: {authentication}\r\n'",True,self.session.basic,self.session.basic,0.6485006809234619 1101,"def _get_lun_string(self, lun): target_lun = 0 if: target_lun = '0x%04x000000000000' % lun elif lun <= 4294967295: target_lun = '0x%08x00000000' % lun return target_lun",False,lun <= 65535,lun <= 255,0.6666571497917175 1102,"def _get_lun_string(self, lun): target_lun = 0 if lun <= 65535: target_lun = '0x%04x000000000000' % lun elif: target_lun = '0x%08x00000000' % lun return target_lun",False,lun <= 4294967295,lun <= 255,0.6650623083114624 1103,"def set_welcome_mutes(chat_id, welcomemutes): with WM_LOCK: prev = SESSION.query(WelcomeMute).get(str(chat_id)) if: SESSION.delete(prev) welcome_m = WelcomeMute(str(chat_id), welcomemutes) SESSION.add(welcome_m) SESSION.commit()",True,prev,prev,0.6638314723968506 1104,"def register(self, module_name, module=None): if: _register_generic(self, module_name, module) return def register_fn(fn): _register_generic(self, module_name, fn) return fn return register_fn",True,module is not None,module is not None,0.6576521396636963 1105,"def forward(self, x): output = [x] if: x = self.conv_reduction(x) x = self.conv_mid(x, output) x = self.conv_concat(x) if self.attn is not None: x = self.attn(x) if self.drop_path is not None: x = self.drop_path(x) if self.residual: x = x + output[0] return x",True,self.conv_reduction is not None,self.conv_reduction is not None,0.6467403173446655 1106,"def forward(self, x): output = [x] if self.conv_reduction is not None: x = self.conv_reduction(x) x = self.conv_mid(x, output) x = self.conv_concat(x) if: x = self.attn(x) if self.drop_path is not None: x = self.drop_path(x) if self.residual: x = x + output[0] return x",True,self.attn is not None,self.attn is not None,0.6522107124328613 1107,"def forward(self, x): output = [x] if self.conv_reduction is not None: x = self.conv_reduction(x) x = self.conv_mid(x, output) x = self.conv_concat(x) if self.attn is not None: x = self.attn(x) if: x = self.drop_path(x) if self.residual: x = x + output[0] return x",True,self.drop_path is not None,self.drop_path is not None,0.6451883316040039 1108,"def forward(self, x): output = [x] if self.conv_reduction is not None: x = self.conv_reduction(x) x = self.conv_mid(x, output) x = self.conv_concat(x) if self.attn is not None: x = self.attn(x) if self.drop_path is not None: x = self.drop_path(x) if: x = x + output[0] return x",True,self.residual,self.residual,0.6494982242584229 1109,"def _meshgrid(self, x, y, row_major=True): xx = x.repeat(len(y)) yy = y.view(-1, 1).repeat(1, len(x)).view(-1) if: return (xx, yy) else: return (yy, xx)",True,row_major,row_major,0.6499710083007812 1110,"def init_repo(path: str, remote: str) -> None: if: remote = os.path.abspath(remote) git = ('git', *NO_FS_MONITOR) env = no_git_env() cmd_output_b(*git, 'init', '--template=', path, env=env) cmd_output_b(*git,'remote', 'add', 'origin', remote, cwd=path, env=env)",False,os.path.isdir(remote),not os.path.exists(remote),0.6460082530975342 1111,"def _del(key_seq: List[str]) -> None: cur = cfg for k in key_seq[:-1]: cur = cur[k] del cur[key_seq[-1]] if: _del(key_seq[:-1])",False,len(cur) == 0 and len(key_seq) > 1,cur[key_seq[-1]!= cfg.key_space,0.6465520858764648 1112,"def isdisjoint(self, other): for value in other: if: return False return True",False,value in self,value < self.data[0],0.6590518951416016 1113,"def check_if_environment_present(): if: print('Using settings from operating system environment variables (ENV).') else: print('Using settings from settings.py, ignoring ENV variables.')",True,os.getenv('ALLOWED_HOSTS'),os.getenv('ALLOWED_HOSTS'),0.6430090665817261 1114,"def _on_connected_cb(self, conn: 'od.POINTER_T[od.pomp_conn]'): connection = Connection(self, conn) self._connections.append(connection) for connection_listener in self._connection_listeners: connection_listener.connected(connection) if: awaiter = self._accept_awaiters.pop(0) self._last_accepted += 1 awaiter.set_result(connection)",False,self._accept_awaiters,len(self._accept_awaiters) > 0,0.6534982323646545 1115,"def build(self, **kwargs): """"""Build all states"""""" if: for state in self.states: state.build(**kwargs) self.built = True return self.hier",True,not self.built,not self.built,0.6532696485519409 1116,"def search(item, texto): logger.info() texto = texto.replace(' ', '+') item.url = item.url + texto if: return lista(item) else: return []",True,texto != '',texto != '',0.6558428406715393 1117,"def patch_for_specialized_compiler(): """""" Patch functions in distutils.msvc9compiler to use the standalone compiler build for Python (Windows only). Fall back to original behavior when the standalone compiler is not available. """""" if: return if unpatched: return unpatched.update(vars(distutils.msvc9compiler)) distutils.msvc9compiler.find_vcvarsall = find_vcvarsall distutils.msvc9compiler.query_vcvarsall = query_vcvarsall",True,'distutils' not in globals(),'distutils' not in globals(),0.6491721868515015 1118,"def patch_for_specialized_compiler(): """""" Patch functions in distutils.msvc9compiler to use the standalone compiler build for Python (Windows only). Fall back to original behavior when the standalone compiler is not available. """""" if 'distutils' not in globals(): return if: return unpatched.update(vars(distutils.msvc9compiler)) distutils.msvc9compiler.find_vcvarsall = find_vcvarsall distutils.msvc9compiler.query_vcvarsall = query_vcvarsall",True,unpatched,unpatched,0.6638507843017578 1119,"def get_interface_info(uuid): cmd = ['ovs-vsctl', 'list', 'Interface', uuid] out, err, returncode = run_ovsvsctl_command(cmd) if: raise OperationFailed('GINOVS00002E', {'err': err}) else: info = parse_listinterface_output(out) info['type'] = 'interface' return info",True,returncode != 0,returncode != 0,0.6577483415603638 1120,"def _replace(self, fname, force=False): """"""replace the current object with oc replace"""""" yed = Yedit(fname) results = yed.delete('metadata.resourceVersion') if: yed.write() cmd = ['replace', '-f', fname] if force: cmd.append('--force') return self.openshift_cmd(cmd)",True,results[0],results[0],0.6511712074279785 1121,"def _replace(self, fname, force=False): """"""replace the current object with oc replace"""""" yed = Yedit(fname) results = yed.delete('metadata.resourceVersion') if results[0]: yed.write() cmd = ['replace', '-f', fname] if: cmd.append('--force') return self.openshift_cmd(cmd)",True,force,force,0.6771274209022522 1122,"def forward(self, pos_seq, bsz=None): sinusoid_inp = torch.ger(pos_seq, self.inv_freq) pos_emb = torch.cat([sinusoid_inp.sin(), sinusoid_inp.cos()], dim=-1) if: return pos_emb[:, None, :].expand(-1, bsz, -1) else: return pos_emb[:, None, :]",True,bsz is not None,bsz is not None,0.6528201103210449 1123,"def _load_data(self, data_id, path=None): if: path = self.context.root_path / data_id if not Path(path).exists(): raise DataNotFound(f'Data file {path}') with codecs.open(path) as f: return json.load(f)",False,not path,path is None,0.6621215343475342 1124,"def _load_data(self, data_id, path=None): if not path: path = self.context.root_path / data_id if: raise DataNotFound(f'Data file {path}') with codecs.open(path) as f: return json.load(f)",False,not Path(path).exists(),not path.exists(),0.648389458656311 1125,"def toggle_radar(self): if: self.radar_sensor = RadarSensor(self.player) elif self.radar_sensor.sensor is not None: self.radar_sensor.sensor.destroy() self.radar_sensor = None",True,self.radar_sensor is None,self.radar_sensor is None,0.6484662890434265 1126,"def toggle_radar(self): if self.radar_sensor is None: self.radar_sensor = RadarSensor(self.player) elif: self.radar_sensor.sensor.destroy() self.radar_sensor = None",False,self.radar_sensor.sensor is not None,self.radar_sensor.sensor.active,0.6468937397003174 1127,"@property def connect_timeout(self): """""" Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None """""" if: return self._connect if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: return self.total return min(self._connect, self.total)",True,self.total is None,self.total is None,0.6544318199157715 1128,"@property def connect_timeout(self): """""" Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None """""" if self.total is None: return self._connect if: return self.total return min(self._connect, self.total)",True,self._connect is None or self._connect is self.DEFAULT_TIMEOUT,self._connect is None or self._connect is self.DEFAULT_TIMEOUT,0.6490134000778198 1129,"def libpath(major, minor): versions = ['%d.%d' % (major, minor) for minor in reversed(range(minor + 1))] versions.append(str(major)) versions.append('2and3') paths = [] for v in versions: for top in ['stdlib', 'third_party']: p = os.path.join(top, v) if: paths.append(p) return paths",False,os.path.isdir(p),os.path.exists(p),0.6448254585266113 1130,"def destroyDb(self, arg=None): if: os.system(""/usr/bin/mysql -u root --password='' -e 'DROP DATABASE IF EXISTS pokernetworktest'"") else: os.system(""/usr/bin/mysql -u root -e 'DROP DATABASE IF EXISTS pokernetworktest'"")",False,len('') > 0,arg.password,0.6522513628005981 1131,"def new_identifier(space, name): for c in name: if: break else: return name from pypy.module.unicodedata.interp_ucd import ucd w_name = space.newtext(name) w_id = space.call_method(ucd, 'normalize', space.newtext('NFKC'), w_name) return space.text_w(w_id)",False,ord(c) > 128,c in space.identifier_chars,0.6545006036758423 1132,"def is_suburi(self, base, test): """"""Check if test is below base in a URI tree Both args must be URIs in reduced form. """""" if: return True if base[0]!= test[0]: return False common = posixpath.commonprefix((base[1], test[1])) if len(common) == len(base[1]): return True return False",False,base == test,len(base) == len(test),0.6654477715492249 1133,"def is_suburi(self, base, test): """"""Check if test is below base in a URI tree Both args must be URIs in reduced form. """""" if base == test: return True if: return False common = posixpath.commonprefix((base[1], test[1])) if len(common) == len(base[1]): return True return False",False,base[0] != test[0],len(base) == len(test),0.6472166776657104 1134,"def is_suburi(self, base, test): """"""Check if test is below base in a URI tree Both args must be URIs in reduced form. """""" if base == test: return True if base[0]!= test[0]: return False common = posixpath.commonprefix((base[1], test[1])) if: return True return False",False,len(common) == len(base[1]),common + common >= uri_normalize_len(base),0.6422474384307861 1135,"@staticmethod def ensure_proper_language_model(nlp: Optional['Language']) -> None: """"""Checks if the spacy language model is properly loaded. Raises an exception if the model is invalid."""""" if: raise Exception(""Failed to load spacy language model. Loading the model returned 'None'."") if nlp.path is None: raise Exception(""Failed to load spacy language model for lang '{}'. Make sure you have downloaded the correct model (https://spacy.io/docs/usage/)."".format(nlp.lang))",True,nlp is None,nlp is None,0.6621692180633545 1136,"@staticmethod def ensure_proper_language_model(nlp: Optional['Language']) -> None: """"""Checks if the spacy language model is properly loaded. Raises an exception if the model is invalid."""""" if nlp is None: raise Exception(""Failed to load spacy language model. Loading the model returned 'None'."") if: raise Exception(""Failed to load spacy language model for lang '{}'. Make sure you have downloaded the correct model (https://spacy.io/docs/usage/)."".format(nlp.lang))",False,nlp.path is None,nlp.lang != 'en',0.6488240361213684 1137,"def div(numerator: Tensor, denom: Union[Tensor, int, float]) -> Tensor: """"""Handle division by zero"""""" if: if denom == 0: return torch.zeros_like(numerator) else: return numerator / denom elif type(denom) is Tensor: zero_idx = torch.nonzero(denom == 0).squeeze(1) denom[zero_idx] += 1e-08 return numerator / denom else: raise TypeError('Unsupported data type ', type(denom))",False,"type(denom) in [int, float]",type(denom) is int,0.648593008518219 1138,"def div(numerator: Tensor, denom: Union[Tensor, int, float]) -> Tensor: """"""Handle division by zero"""""" if type(denom) in [int, float]: if: return torch.zeros_like(numerator) else: return numerator / denom elif type(denom) is Tensor: zero_idx = torch.nonzero(denom == 0).squeeze(1) denom[zero_idx] += 1e-08 return numerator / denom else: raise TypeError('Unsupported data type ', type(denom))",True,denom == 0,denom == 0,0.6709893941879272 1139,"def div(numerator: Tensor, denom: Union[Tensor, int, float]) -> Tensor: """"""Handle division by zero"""""" if type(denom) in [int, float]: if denom == 0: return torch.zeros_like(numerator) else: return numerator / denom elif: zero_idx = torch.nonzero(denom == 0).squeeze(1) denom[zero_idx] += 1e-08 return numerator / denom else: raise TypeError('Unsupported data type ', type(denom))",False,type(denom) is Tensor,type(denom) == int,0.6486165523529053 1140,"def stop(self, keep_playlist=False): """"""Mapped mpv stop command, see man mpv(1)."""""" if: self.command('stop', 'keep-playlist') else: self.command('stop')",True,keep_playlist,keep_playlist,0.6506119966506958 1141,"def fuse_layers(model, layers_to_fuse, inplace=False): """""" fuse layers in layers_to_fuse Args: model(nn.Layer): The model to be fused. layers_to_fuse(list): The layers' names to be fused. For example,""fuse_list = [[""conv1"", ""bn1""], [""conv2"", ""bn2""]]"". A TypeError would be raised if ""fuse"" was set as True but ""fuse_list"" was None. Default: None. inplace(bool): Whether apply fusing to the input model. Default: False. Return fused_model(paddle.nn.Layer): The fused model. """""" if: model = copy.deepcopy(model) for layers_list in layers_to_fuse: layer_list = [] for layer_name in layers_list: parent_layer, sub_name = find_parent_layer_and_sub_name(model, layer_name) layer_list.append(getattr(parent_layer, sub_name)) new_layers = _fuse_func(layer_list) for i, item in enumerate(layers_list): parent_layer, sub_name = find_parent_layer_and_sub_name(model, item) setattr(parent_layer, sub_name, new_layers[i]) return model",False,not inplace,inplace,0.6600685715675354 1142,"def __init__(self, *args, **kwargs): allow_save = kwargs.pop('allow_save', False) super().__init__(*args, **kwargs) if: self.fields['save'] = forms.BooleanField(label=_('Save answers to my customer profiles for future purchases'), required=False, initial=False) self.fields['saved_id'] = forms.IntegerField(required=False, help_text='\xa0', label=_('Save to profile'), widget=forms.Select(choices=(('', _('Create new profile')),)))",False,allow_save and self.fields,allow_save,0.6500587463378906 1143,"def dla46_c(pretrained=None, **kwargs): Bottleneck.expansion = 2 model = DLA([1, 1, 1, 2, 2, 1], [16, 32, 64, 64, 128, 256], block=Bottleneck, **kwargs) if: model.load_pretrained_model(pretrained, 'dla46_c') return model",True,pretrained is not None,pretrained is not None,0.6560593843460083 1144,"def select_by_value(self, value): """"""Selects an item by the text content of the child. Args: value (str): Text content of the item that have to be selected. """""" self._selected_key = None self._selected_item = None for k in self.children: item = self.children[k] item.attributes['selected'] = False if: self._selected_key = k self._selected_item = item self._selected_item.attributes['selected'] = True",False,value == item.get_value(),k.value == value,0.6506614685058594 1145,"def slotSetValue(self, value): object = self.q_ptr.sender() for itEditor in self.m_editorToProperty.keys(): if: property = self.m_editorToProperty[itEditor] manager = self.q_ptr.propertyManager(property) if not manager: return manager.setValue(property, value) return",True,itEditor == object,itEditor == object,0.662967324256897 1146,"def slotSetValue(self, value): object = self.q_ptr.sender() for itEditor in self.m_editorToProperty.keys(): if itEditor == object: property = self.m_editorToProperty[itEditor] manager = self.q_ptr.propertyManager(property) if: return manager.setValue(property, value) return",True,not manager,not manager,0.6529374122619629 1147,"def write(self): storage = get_event_storage() batch_stats_strs = [] for key, buf in storage.histories().items(): if: batch_stats_strs.append(f'{key} {buf.avg(20)}') self.logger.info(', '.join(batch_stats_strs))",False,key.startswith('batch/'),buf.avg(20) > 0,0.6427611112594604 1148,"def _inputs_valid(self, output_condition_uris): """"""Validates an Input against a given set of Outputs. Note: The number of `output_condition_uris` must be equal to the number of Inputs a Transaction has. Args: output_condition_uris (:obj:`list` of :obj:`str`): A list of Outputs to check the Inputs against. Returns: bool: If all Outputs are valid. """""" if: raise ValueError('Inputs and output_condition_uris must have the same count') tx_dict = self.to_dict() tx_dict = Transaction._remove_signatures(tx_dict) tx_serialized = Transaction._to_str(tx_dict) def validate(i, output_condition_uri=None): """""" Validate input against output condition URI """""" return self._input_valid(self.inputs[i], self.operation, tx_serialized, output_condition_uri) return all((validate(i, cond) for i, cond in enumerate(output_condition_uris)))",True,len(self.inputs) != len(output_condition_uris),len(self.inputs) != len(output_condition_uris),0.6452164649963379 1149,"def ChannelBinding(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if: return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0",True,o != 0,o != 0,0.6660848259925842 1150,"def __eq__(self, other): """"""Return True if the points are identical, False otherwise."""""" if: return self.public_key == other.public_key and self.secret_multiplier == other.secret_multiplier return NotImplemented",False,"isinstance(other, Private_key)","isinstance(other, SigningKey)",0.6568488478660583 1151,"def get_queryset(self): """""" Get queryset of overtime credits. Following rules apply: 1. super user may see all 2. user may see credits of all its supervisors and self 3. user may only see its own credit """""" user = self.request.user queryset = models.OvertimeCredit.objects.select_related('user') if: queryset = queryset.filter(Q(user=user) | Q(user__supervisors=user)) return queryset",True,not user.is_superuser,not user.is_superuser,0.6451541185379028 1152,"def set_feature_transforms(self, split: str, transforms: List[str]): if: self.config['transforms'] = {} self.config['transforms'][split] = transforms",True,'transforms' not in self.config,'transforms' not in self.config,0.6490609645843506 1153,"def get_pkl_itr(pklfile): match = itr_re.search(pklfile) if: return match.group('itr') raise ValueError(pklfile +'has no iteration number.')",True,match,match,0.6618555784225464 1154,"def create_action(self, name, callback, shortcuts=None): action = Gio.SimpleAction.new(name=name, parameter_type=None) action.connect('activate', callback) self.add_action(action=action) if: self.set_accels_for_action(detailed_action_name=f'app.{name}', accels=shortcuts)",True,shortcuts,shortcuts,0.6661825776100159 1155,"def one_of_k_encoding_unk(self, x, allowable_set): """"""Maps inputs not in the allowable set to the last element."""""" if: x = allowable_set[-1] return list(map(lambda s: x == s, allowable_set))",False,x not in allowable_set,len(allowable_set) > 1,0.6505613327026367 1156,"def _named_members(module, get_members_fn, prefix='', recurse=True): """"""Helper method for yielding various names + members of modules. Unlike upstream torch implementation, this implementation returns members that are known under multiple names, such as shared parameters. """""" modules = module.named_modules(prefix=prefix) if recurse else [(prefix, module)] for module_prefix, module in modules: members = get_members_fn(module) for k, v in members: if: continue name = module_prefix + ('.' if module_prefix else '') + k yield (name, v)",False,v is None,"k in ['.', ']",0.6515225172042847 1157,"@contextmanager def change_env(name, val): """""" Args: name(str), val(str): Returns: a context where the environment variable ``name`` being set to ``val``. It will be set back after the context exits. """""" oldval = os.environ.get(name, None) os.environ[name] = val yield if: del os.environ[name] else: os.environ[name] = oldval",True,oldval is None,oldval is None,0.6529524922370911 1158,"def writeChildrenOf(self, hf: 'T_htmlfile', elem: 'Element', sep:'str | None'=None, stringSep:'str | None'=None) -> None: prev = None for child in elem.xpath('child::node()'): if: hf.write(sep) self.writeChild(hf, child, elem, prev, stringSep=stringSep) prev = child",False,"sep and prev is not None and self.shouldAddSep(child, prev)",sep and (not prev),0.644379734992981 1159,"def check_skill_requirement_list(self, skill_list: list) -> int: """"""Takes a whole list of skill requirments in the form [ ""SKILL_PATH,MIN_TIER""... ] and determines how many skill requirments are meet. The list format is used in all patrol and event skill restrictions. Returns an integer value of how many skills requirments are meet. """""" skills_meet = 0 min_tier = 0 for _skill in skill_list: spl = _skill.split(',') if: print('Incorrectly formatted skill restriction', _skill) continue try: min_tier = int(spl[1]) except ValueError: print('Min Skill Tier cannot be converted to int', _skill) continue if self.meets_skill_requirement(spl[0], min_tier): skills_meet += 1 return skills_meet",False,len(spl) != 2,len(spl) < 2,0.6495168209075928 1160,"def check_skill_requirement_list(self, skill_list: list) -> int: """"""Takes a whole list of skill requirments in the form [ ""SKILL_PATH,MIN_TIER""... ] and determines how many skill requirments are meet. The list format is used in all patrol and event skill restrictions. Returns an integer value of how many skills requirments are meet. """""" skills_meet = 0 min_tier = 0 for _skill in skill_list: spl = _skill.split(',') if len(spl)!= 2: print('Incorrectly formatted skill restriction', _skill) continue try: min_tier = int(spl[1]) except ValueError: print('Min Skill Tier cannot be converted to int', _skill) continue if: skills_meet += 1 return skills_meet",False,"self.meets_skill_requirement(spl[0], min_tier)",min_tier <= 1 or spl[0] <= min_tier,0.6451070308685303 1161,"@property def NPCType1(self): if: return self._entity_data.get('NPCType1') return None",True,'NPCType1' in self._entity_data,'NPCType1' in self._entity_data,0.6504472494125366 1162,"def parse_json_pointer(self, raw: Dict[str, Any], ref: str, path_parts: List[str]) -> None: path = ref.split('#', 1)[-1] if: path = path[1:] object_paths = path.split('/') models = get_model_by_path(raw, object_paths) model_name = object_paths[-1] self.parse_raw_obj(model_name, models, [*path_parts, f'#/{object_paths[0]}', *object_paths[1:]])",False,path[0] == '/',path.startswith('http'),0.6584861278533936 1163,"def _process_second_level_input(second_level_input): """"""Process second_level_input."""""" if: return _process_second_level_input_as_dataframe(second_level_input) elif hasattr(second_level_input, '__iter__') and isinstance(second_level_input[0], FirstLevelModel): return _process_second_level_input_as_firstlevelmodels(second_level_input) else: return (mean_img(second_level_input), None)",False,"isinstance(second_level_input, pd.DataFrame)","isinstance(second_level_input, DataFrame)",0.6459074020385742 1164,"def _process_second_level_input(second_level_input): """"""Process second_level_input."""""" if isinstance(second_level_input, pd.DataFrame): return _process_second_level_input_as_dataframe(second_level_input) elif: return _process_second_level_input_as_firstlevelmodels(second_level_input) else: return (mean_img(second_level_input), None)",False,"hasattr(second_level_input, '__iter__') and isinstance(second_level_input[0], FirstLevelModel)","isinstance(second_level_input, models.Converting_first_levelmodels)",0.6453783512115479 1165,"def has_field(self, field_name_or_num): """""" Is the desired field present in this message? *field_name_or_num* is the name of the field (`str`), or its definition number (`int`). .. seealso:: `get_field`, `get_fields`, `get_value`, `get_values` """""" for field in self.fields: if: return True return False",False,field.is_named(field_name_or_num),field.get_value() == field_name_or_num,0.6446168422698975 1166,"def train_detector(model, dataset, cfg, distributed=False, validate=False, logger=None): if: logger = get_root_logger(cfg.log_level) if distributed: _dist_train(model, dataset, cfg, validate=validate) else: _non_dist_train(model, dataset, cfg, validate=validate)",True,logger is None,logger is None,0.6540241241455078 1167,"def train_detector(model, dataset, cfg, distributed=False, validate=False, logger=None): if logger is None: logger = get_root_logger(cfg.log_level) if: _dist_train(model, dataset, cfg, validate=validate) else: _non_dist_train(model, dataset, cfg, validate=validate)",True,distributed,distributed,0.6543304920196533 1168,"def delete_tags(self, resource_ids: List[str], tags: Dict[str, str]) -> bool: for resource_id in resource_ids: for tag in tags: if: if tags[tag] is None: self.tags[resource_id].pop(tag) elif tags[tag] == self.tags[resource_id][tag]: self.tags[resource_id].pop(tag) return True",False,tag in self.tags[resource_id],tag in self.tags,0.6491237878799438 1169,"def delete_tags(self, resource_ids: List[str], tags: Dict[str, str]) -> bool: for resource_id in resource_ids: for tag in tags: if tag in self.tags[resource_id]: if: self.tags[resource_id].pop(tag) elif tags[tag] == self.tags[resource_id][tag]: self.tags[resource_id].pop(tag) return True",False,tags[tag] is None,tag not in tags,0.6492158770561218 1170,"def delete_tags(self, resource_ids: List[str], tags: Dict[str, str]) -> bool: for resource_id in resource_ids: for tag in tags: if tag in self.tags[resource_id]: if tags[tag] is None: self.tags[resource_id].pop(tag) elif: self.tags[resource_id].pop(tag) return True",False,tags[tag] == self.tags[resource_id][tag],tag in self.tags[resource_id],0.6461017727851868 1171,"@property def start_date(self) -> datetime.datetime: if: self._start_date = datetime.datetime.fromtimestamp(self.start_time) return self._start_date",True,self._start_date is None,self._start_date is None,0.6507073640823364 1172,"def normcase(s): """"""Normalize case of pathname. Makes all characters lowercase and all slashes into backslashes."""""" if: raise TypeError(""normcase() argument must be str or bytes, not '{}'"".format(s.__class__.__name__)) return s.replace(_get_altsep(s), _get_sep(s)).lower()",False,"not isinstance(s, (bytes, str))","not isinstance(s, bytes)",0.6489598751068115 1173,"def quantity_to_restock(self): """""" Return the quantity that we can move from the stock place to the selling place. """""" if: return 0 ideal_quantity = 1 - self.quantity_selling_places() res = min(max(0, ideal_quantity), self.quantity_reserve()) return res",False,self.quantity <= 0,self.quantity_selling_places() == 0,0.6532850861549377 1174,"def get_source(self, environment, template): if: source = self.mapping[template] return (source, None, lambda: source!= self.mapping.get(template)) raise TemplateNotFound(template)",True,template in self.mapping,template in self.mapping,0.6573271155357361 1175,"def rightmost_terminal(symbols, terminals): i = len(symbols) - 1 while i >= 0: if: return symbols[i] i -= 1 return None",False,symbols[i] in terminals,not is_terminal(symbols[i]),0.648662805557251 1176,"@pydantic.root_validator def require_tag_or_repo(cls, values): if: raise ValueError('section requires one of:\n tag\n repo') return values",False,not values['tag'] and (not values['repo']),values['tag'] != 'repo',0.6459760665893555 1177,"def __repr__(self): if: return ''.format(self.name, self.ref_id) else: return ''.format(self.name)",False,self.ref_id is not None,self.ref_id,0.6469918489456177 1178,"def write(self, messages: List[InternalMessage]): if: raise RuntimeError('Can not write on closed stream.') for request in messages_to_proto_requests(messages): self._stream.write(request)",False,self._closed,self._stream is None,0.6657482385635376 1179,"def imageChanged(self): self.corrections = [] self.selected_correction = -1 self.deselectAllObjects() self.clearPolygon() self.loadImage() self.loadLabels() self.loadCorrections() self.updateMouseObject() self.update() if: self.doScreenshot()",False,self.screenshotToggleState,self.screenshot,0.6565228700637817 1180,"def conv_nd(dims, *args, **kwargs): """""" Create a 1D, 2D, or 3D convolution module. """""" if: return nn.Conv1d(*args, **kwargs) elif dims == 2: return nn.Conv2d(*args, **kwargs) elif dims == 3: return nn.Conv3d(*args, **kwargs) raise ValueError(f'unsupported dimensions: {dims}')",True,dims == 1,dims == 1,0.6719011664390564 1181,"def conv_nd(dims, *args, **kwargs): """""" Create a 1D, 2D, or 3D convolution module. """""" if dims == 1: return nn.Conv1d(*args, **kwargs) elif: return nn.Conv2d(*args, **kwargs) elif dims == 3: return nn.Conv3d(*args, **kwargs) raise ValueError(f'unsupported dimensions: {dims}')",True,dims == 2,dims == 2,0.6694872379302979 1182,"def conv_nd(dims, *args, **kwargs): """""" Create a 1D, 2D, or 3D convolution module. """""" if dims == 1: return nn.Conv1d(*args, **kwargs) elif dims == 2: return nn.Conv2d(*args, **kwargs) elif: return nn.Conv3d(*args, **kwargs) raise ValueError(f'unsupported dimensions: {dims}')",True,dims == 3,dims == 3,0.667377233505249 1183,"def encode(self, gt_bboxes_3d, gt_labels_3d): """"""Encode ground truth to prediction targets. Args: gt_bboxes_3d (BaseInstance3DBoxes): Ground truth bboxes with shape (n, 7). gt_labels_3d (torch.Tensor): Ground truth classes. Returns: tuple: Targets of center, size and direction. """""" center_target = gt_bboxes_3d.gravity_center size_class_target = gt_labels_3d size_res_target = gt_bboxes_3d.dims - gt_bboxes_3d.tensor.new_tensor(self.mean_sizes)[size_class_target] box_num = gt_labels_3d.shape[0] if: dir_class_target, dir_res_target = self.angle2class(gt_bboxes_3d.yaw) else: dir_class_target = gt_labels_3d.new_zeros(box_num) dir_res_target = gt_bboxes_3d.tensor.new_zeros(box_num) return (center_target, size_class_target, size_res_target, dir_class_target, dir_res_target)",True,self.with_rot,self.with_rot,0.6513580083847046 1184,"def prefetch(self, indices): for ds in self.datasets: if: ds.prefetch(indices)",True,"getattr(ds, 'supports_prefetch', False)","getattr(ds, 'supports_prefetch', False)",0.6412525773048401 1185,"def _decide_stage(self, update_step): """""" return stage, and the corresponding steps within the current stage """""" if: return (0, update_step) offset = self.warmup_steps if update_step < offset + self.hold_steps: return (1, update_step - offset) offset += self.hold_steps if update_step <= offset + self.decay_steps: return (2, update_step - offset) offset += self.decay_steps return (3, update_step - offset)",False,update_step < self.warmup_steps,update_step < update_step + self.warmup_steps,0.6500245332717896 1186,"def _decide_stage(self, update_step): """""" return stage, and the corresponding steps within the current stage """""" if update_step < self.warmup_steps: return (0, update_step) offset = self.warmup_steps if: return (1, update_step - offset) offset += self.hold_steps if update_step <= offset + self.decay_steps: return (2, update_step - offset) offset += self.decay_steps return (3, update_step - offset)",True,update_step < offset + self.hold_steps,update_step < offset + self.hold_steps,0.6475816965103149 1187,"def _decide_stage(self, update_step): """""" return stage, and the corresponding steps within the current stage """""" if update_step < self.warmup_steps: return (0, update_step) offset = self.warmup_steps if update_step < offset + self.hold_steps: return (1, update_step - offset) offset += self.hold_steps if: return (2, update_step - offset) offset += self.decay_steps return (3, update_step - offset)",True,update_step <= offset + self.decay_steps,update_step <= offset + self.decay_steps,0.6497117280960083 1188,"def getTagMap(self, uniq=False): if: tagMap = tagmap.TagMap() for nt in self.__namedTypes: tagMap = tagMap.clone(nt.getType(), nt.getType().getTagMap(), uniq) self.__tagMap[uniq] = tagMap return self.__tagMap[uniq]",True,self.__tagMap[uniq] is None,self.__tagMap[uniq] is None,0.6464587450027466 1189,"def check_low_score_edges_removed(scores_edges_left, scores_edges_removed): """"""Return True if scores of removed edges are lower in comparison to remaining edges."""""" if: return True min_score_left = min(scores_edges_left) max_score_rem = max(scores_edges_removed) min_thr = 0.9 * min_score_left if max_score_rem < min_thr: return True elif all((x < 0.75 for x in scores_edges_removed)): return True return False",False,all((x == 0.0 for x in scores_edges_removed)),not scores_edges_left,0.6477675437927246 1190,"def check_low_score_edges_removed(scores_edges_left, scores_edges_removed): """"""Return True if scores of removed edges are lower in comparison to remaining edges."""""" if all((x == 0.0 for x in scores_edges_removed)): return True min_score_left = min(scores_edges_left) max_score_rem = max(scores_edges_removed) min_thr = 0.9 * min_score_left if: return True elif all((x < 0.75 for x in scores_edges_removed)): return True return False",False,max_score_rem < min_thr,max_score_rem >= min_thr,0.6463969349861145 1191,"def check_low_score_edges_removed(scores_edges_left, scores_edges_removed): """"""Return True if scores of removed edges are lower in comparison to remaining edges."""""" if all((x == 0.0 for x in scores_edges_removed)): return True min_score_left = min(scores_edges_left) max_score_rem = max(scores_edges_removed) min_thr = 0.9 * min_score_left if max_score_rem < min_thr: return True elif: return True return False",False,all((x < 0.75 for x in scores_edges_removed)),min_thr >= max_score_rem,0.6457391381263733 1192,"@signature({'types': ['array-number', 'array-string']}) def _func_max(self, arg): if: return max(arg) else: return None",True,arg,arg,0.6825371980667114 1193,"def _on_epoch_end(epoch_number: int): if: total_num_epochs_str = f'of {n_epochs}' if n_epochs is not None else '' tqdm_progress_bar.display(f'Epoch {epoch_number} {total_num_epochs_str}', pos=1) self._bc_logger.log_epoch(epoch_number + 1) if on_epoch_end is not None: on_epoch_end()",False,tqdm_progress_bar is not None,_bc_logger.get_layout() == 'NT',0.6478947997093201 1194,"def _on_epoch_end(epoch_number: int): if tqdm_progress_bar is not None: total_num_epochs_str = f'of {n_epochs}' if n_epochs is not None else '' tqdm_progress_bar.display(f'Epoch {epoch_number} {total_num_epochs_str}', pos=1) self._bc_logger.log_epoch(epoch_number + 1) if: on_epoch_end()",True,on_epoch_end is not None,on_epoch_end is not None,0.6475785970687866 1195,"def _process(vcf_in: pysam.VariantFile, vcf_out: pysam.VariantFile, arguments: Dict[Text, Any]) -> None: """""""" Master function for processing the given input vcf and writing output Parameters ---------- vcf_in: pysam.VariantFile input vcf vcf_out: pysam.VariantFile output vcf arguments: Dict[Text, Any] commandline arguments Returns ------- header: pysam.VariantRecord record with ECN fields added"""""" if: bnd_end_dict = _parse_bnd_ends(arguments.vcf) else: bnd_end_dict = None ploidy_dict = _parse_ploidy_table(arguments.ploidy_table) for record in vcf_in: out = convert(record=record, bnd_end_dict=bnd_end_dict, ploidy_dict=ploidy_dict, scale_down_gq=arguments.scale_down_gq) vcf_out.write(out)",False,arguments.fix_end,arguments.vcf is not None,0.6481999754905701 1196,"def __init__(self, *args, **kw): if: kw.pop('strict', None) self.source_address = kw.get('source_address') if sys.version_info < (2, 7): kw.pop('source_address', None) self.socket_options = kw.pop('socket_options', self.default_socket_options) _HTTPConnection.__init__(self, *args, **kw)",True,six.PY3,six.PY3,0.6550235152244568 1197,"def __init__(self, *args, **kw): if six.PY3: kw.pop('strict', None) self.source_address = kw.get('source_address') if: kw.pop('source_address', None) self.socket_options = kw.pop('socket_options', self.default_socket_options) _HTTPConnection.__init__(self, *args, **kw)",False,"sys.version_info < (2, 7)",self.source_address,0.6457844376564026 1198,"def set_device(cuda, local_rank): """""" Sets device based on local_rank and returns instance of torch.device. :param cuda: if True: use cuda :param local_rank: local rank of the worker """""" if: torch.cuda.set_device(local_rank) device = torch.device('cuda') else: device = torch.device('cpu') return device",True,cuda,cuda,0.6647164225578308 1199,"def cert_extend_report_error(title: str, cert: dict) -> None: item = (title, cert['name']) if: logger.debug('Failed to load %s of %s', title, cert['name'])",False,item not in CERT_REPORT_ERRORS,not logger.exists(item),0.6504618525505066 1200,"@QtCore.Slot(bool) def modification_handler(self, changed): """""" Slot for editor document modificationChanged """""" print(changed,'set tab italic!') size = (20, 20) if: size = (10, 10) self.update_icon(size=size)",True,changed,changed,0.669736921787262 1201,"def handle_secured_get_feature(self): try: if: return MrMapNotImplementedError(ogc_request=self.ogc_request, message='MrMap currently can only handle wfs 2.x.x GetFeature requests') self.ogc_request.xml_request.secure_spatial(feature_types=self.service.security_info_per_feature_type) except NotImplementedError: return ForbiddenException(ogc_request=self.ogc_request, message=""MrMap can't secure the given request. Maybe you request multiple typenames in a single query."") response = self.remote_service.send_request(self.remote_service.get_feature_request(get_feature_request=self.ogc_request.xml_request)) return self.return_http_response(response=response)",False,self.ogc_request.service_version.split('.')[0] != '2',"self.service.version_info < (2, 5)",0.6459487676620483 1202,"def remove_fit(self): if: for marker in self.ax.fit_markers: marker.remove() lines = self.ax.get_lines() for line in lines: if hasattr(line, 'InteractivePlotLib_Type') and (line.InteractivePlotLib_Type == 'Fit' or line.InteractivePlotLib_Type == 'Fit_markers'): line.remove()",False,"hasattr(self.ax, 'fit_markers')",self.ax.fit_markers,0.6473143100738525 1203,"def remove_fit(self): if hasattr(self.ax, 'fit_markers'): for marker in self.ax.fit_markers: marker.remove() lines = self.ax.get_lines() for line in lines: if: line.remove()",False,"hasattr(line, 'InteractivePlotLib_Type') and (line.InteractivePlotLib_Type == 'Fit' or line.InteractivePlotLib_Type == 'Fit_markers')",line.fit(),0.6485917568206787 1204,"def update(self, sess, s, a, y): """""" Updates the estimator towards the given targets. Args: sess: Tensorflow session object s: State input of shape [batch_size, 4, 160, 160, 3] a: Chosen actions of shape [batch_size] y: Targets of shape [batch_size] Returns: The calculated loss on the batch. """""" feed_dict = {self.X_pl: s, self.y_pl: y, self.actions_pl: a} summaries, global_step, _, loss = sess.run([self.summaries, tf.contrib.framework.get_global_step(), self.train_op, self.loss], feed_dict) if: self.summary_writer.add_summary(summaries, global_step) return loss",False,self.summary_writer,self.summary_writer is not None,0.6491234302520752 1205,"def to_csr(self) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: """""" Convert to CSR format. Returns: row ptr tensor, column indice tensor, edge id tensor, edge weight tensor """""" if: return (self._indptr, self._indices, self._edge_ids, self._edge_weights) elif self._layout == 'CSC': row, col, edge_ids, edge_weights = self.to_coo() return coo_to_csr(row, col, edge_id=edge_ids, edge_weight=edge_weights)",False,self._layout == 'CSR',self._layout == 'INDIRE',0.6500974297523499 1206,"def to_csr(self) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: """""" Convert to CSR format. Returns: row ptr tensor, column indice tensor, edge id tensor, edge weight tensor """""" if self._layout == 'CSR': return (self._indptr, self._indices, self._edge_ids, self._edge_weights) elif: row, col, edge_ids, edge_weights = self.to_coo() return coo_to_csr(row, col, edge_id=edge_ids, edge_weight=edge_weights)",False,self._layout == 'CSC',self._layout == 'COO',0.6509554386138916 1207,"def parent(self) -> 'Name': """"""Return the parent of the name. For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. Raises ``dns.name.NoParent`` if the name is either the root name or the empty name, and thus has no parent. Returns a ``dns.name.Name``. """""" if: raise NoParent return Name(self.labels[1:])",False,self == root or self == empty,self.labels is None,0.6506943702697754 1208,"def read_manifest(self): """"""Read the manifest file (named by'self.manifest') and use it to fill in'self.filelist', the list of files to include in the source distribution. """""" log.info(""reading manifest file '%s'"", self.manifest) manifest = open(self.manifest, 'rbU') for line in manifest: if: try: line = line.decode('UTF-8') except UnicodeDecodeError: log.warn('%r not UTF-8 decodable -- skipping' % line) continue line = line.strip() if line.startswith('#') or not line: continue self.filelist.append(line) manifest.close()",True,six.PY3,six.PY3,0.6491453051567078 1209,"def read_manifest(self): """"""Read the manifest file (named by'self.manifest') and use it to fill in'self.filelist', the list of files to include in the source distribution. """""" log.info(""reading manifest file '%s'"", self.manifest) manifest = open(self.manifest, 'rbU') for line in manifest: if six.PY3: try: line = line.decode('UTF-8') except UnicodeDecodeError: log.warn('%r not UTF-8 decodable -- skipping' % line) continue line = line.strip() if: continue self.filelist.append(line) manifest.close()",True,line.startswith('#') or not line,line.startswith('#') or not line,0.641024649143219 1210,"def process_message(process): if: print_result(process) return is_successful_message(process)",False,'printed' in process and (not process['printed']),process.get('status') == 'SUCCESS',0.6461031436920166 1211,"def list_cluster_nodegroups(self, context, cluster_id, filters=None, limit=None, marker=None, sort_key=None, sort_dir=None): query = model_query(models.NodeGroup) if: query = query.filter_by(project_id=context.project_id) query = query.filter_by(cluster_id=cluster_id) query = self._add_nodegoup_filters(query, filters) return _paginate_query(models.NodeGroup, limit, marker, sort_key, sort_dir, query)",False,not context.is_admin,context.project_id,0.649480402469635 1212,"def set_bar_number(self): first_bar_number = self._segment_metadata.get('first_bar_number') if: abjad.setting(self.score).current_bar_number = first_bar_number",True,first_bar_number is not None,first_bar_number is not None,0.6469200849533081 1213,"def proxy_manager_for(self, proxy, **proxy_kwargs): """"""Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager """""" if: proxy_headers = self.proxy_headers(proxy) self.proxy_manager[proxy] = proxy_from_url(proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs) return self.proxy_manager[proxy]",True,not proxy in self.proxy_manager,not proxy in self.proxy_manager,0.6503562331199646 1214,"def _resize_final_logits_bias(self, new_num_tokens: int, old_num_tokens: int) -> None: if: new_bias = self.final_logits_bias[:, :new_num_tokens] else: extra_bias = torch.zeros((1, new_num_tokens - old_num_tokens), device=self.final_logits_bias.device) new_bias = torch.cat([self.final_logits_bias, extra_bias], dim=1) self.register_buffer('final_logits_bias', new_bias)",False,new_num_tokens <= old_num_tokens,old_num_tokens == 0,0.6511536836624146 1215,"def startTagHr(self, token): if: self.endTagP(impliedTagToken('p')) self.tree.insertElement(token) self.tree.openElements.pop() token['selfClosingAcknowledged'] = True self.parser.framesetOK = False",True,"self.tree.elementInScope('p', variant='button')","self.tree.elementInScope('p', variant='button')",0.6431137323379517 1216,"def close(self, force=False): """""" Closes the connection with the remote host. """""" if: try: self.log.close() except: pass",False,self.log,force or not self.log.isEnabledFor(self.connection),0.655048131942749 1217,"def get_arguments(self, parameters): result = [] for pyname in self.get_pynames(parameters): if: result.append(None) else: result.append(pyname.get_object()) return result",True,pyname is None,pyname is None,0.6524429321289062 1218,"def _init_weights(self, m): if: trunc_normal_(m.weight, std=0.02) if isinstance(m, nn.Linear) and m.bias is not None: nn.init.constant_(m.bias, 0) elif isinstance(m, nn.LayerNorm): nn.init.constant_(m.bias, 0) nn.init.constant_(m.weight, 1.0)",True,"isinstance(m, nn.Linear)","isinstance(m, nn.Linear)",0.6520232558250427 1219,"def _init_weights(self, m): if isinstance(m, nn.Linear): trunc_normal_(m.weight, std=0.02) if: nn.init.constant_(m.bias, 0) elif isinstance(m, nn.LayerNorm): nn.init.constant_(m.bias, 0) nn.init.constant_(m.weight, 1.0)",True,"isinstance(m, nn.Linear) and m.bias is not None","isinstance(m, nn.Linear) and m.bias is not None",0.647013247013092 1220,"def _init_weights(self, m): if isinstance(m, nn.Linear): trunc_normal_(m.weight, std=0.02) if isinstance(m, nn.Linear) and m.bias is not None: nn.init.constant_(m.bias, 0) elif: nn.init.constant_(m.bias, 0) nn.init.constant_(m.weight, 1.0)",True,"isinstance(m, nn.LayerNorm)","isinstance(m, nn.LayerNorm)",0.6462858319282532 1221,"def __getitem__(self, index): return_dict = {} return_list = self.dataset[index] if: if len(return_list) == 2: return_dict['img'] = return_list[0] if self.return_label: return_dict['class_id'] = np.asarray(return_list[1]) else: return_dict['img'] = return_list[0] else: return_dict['img'] = return_list return return_dict",False,"isinstance(return_list, (tuple, list))",type(return_list) == list,0.6437748074531555 1222,"def __getitem__(self, index): return_dict = {} return_list = self.dataset[index] if isinstance(return_list, (tuple, list)): if: return_dict['img'] = return_list[0] if self.return_label: return_dict['class_id'] = np.asarray(return_list[1]) else: return_dict['img'] = return_list[0] else: return_dict['img'] = return_list return return_dict",False,len(return_list) == 2,len(return_list) == 1,0.6475019454956055 1223,"def __getitem__(self, index): return_dict = {} return_list = self.dataset[index] if isinstance(return_list, (tuple, list)): if len(return_list) == 2: return_dict['img'] = return_list[0] if: return_dict['class_id'] = np.asarray(return_list[1]) else: return_dict['img'] = return_list[0] else: return_dict['img'] = return_list return return_dict",False,self.return_label,return_list[2],0.652177095413208 1224,"@_app.callback(Output('selected-rows', 'value'), [Input('data-meta-storage','modified_timestamp'), Input('selected-rows', 'options')], state=[State('clustergram-datasets', 'value'), State('file-upload', 'contents')]) def clear_rows(_, row_options, dataset_name, contents): if: return [] row_options = [r['value'] for r in row_options] return row_options[:datasets[dataset_name]['default_rows']]",False,dataset_name is None or row_options is None,row_options is None,0.6475705504417419 1225,"def get_mocked_inference(self, label: str, graph, **kwargs): choices_raw = graph['choices_raw'] ordering = self.behavior[label] result: List[Tuple[float, int]] = [] for prob, val in ordering: for idx, raw_val in enumerate(choices_raw): if: result.append((prob, idx)) break else: return [] return result",False,"Checker.check(val, raw_val)",raw_val is val,0.644477128982544 1226,"def init_image_dataset(name, **kwargs): """"""Initializes an image dataset."""""" avai_datasets = list(__image_datasets.keys()) if: raise ValueError('Invalid dataset name. Received ""{}"", but expected to be one of {}'.format(name, avai_datasets)) return __image_datasets[name](**kwargs)",True,name not in avai_datasets,name not in avai_datasets,0.6563838720321655 1227,"@admin_views_bp.route('/users_keysetup') @apply_remote_config @is_authenticated def admin_userskeysetup(): logger.debug('users_keysetup()') okta_admin = OktaAdmin(session[SESSION_INSTANCE_SETTINGS_KEY]) user_groups = okta_admin.get_groups_by_name('everyone') if: logger.debug(user_groups) user_group = user_groups[0] group_id = user_group['id'] group_user_list = okta_admin.get_user_list_by_group_id(group_id) return render_template('/admin/users_keysetup.html', templatename=get_app_vertical(), user_info=get_userinfo(), userlist=group_user_list, config=session[SESSION_INSTANCE_SETTINGS_KEY], user_group=user_group)",True,len(user_groups) > 0,len(user_groups) > 0,0.6460448503494263 1228,"def _schedule(): """"""Run the scheduler, output some stats."""""" new_placement = 0 evicted = 0 for event in sched.schedule(): if: new_placement = new_placement + 1 else: evicted = evicted + 1 print('scheduled: ', new_placement, ', evicted: ', evicted)",False,event.node,event.get_type() == 'call_function',0.6581727862358093 1229,"@staticmethod def _undo_till_previous_loop_execution(loop_action_name: Text, done_events: List[Event]) -> None: offset = 0 for e in reversed(done_events[:]): if: break if isinstance(e, (ActionExecuted, UserUttered)): del done_events[-1 - offset] else: offset += 1",False,"isinstance(e, ActionExecuted) and e.action_name == loop_action_name",e.name == loop_action_name,0.6492782831192017 1230,"@staticmethod def _undo_till_previous_loop_execution(loop_action_name: Text, done_events: List[Event]) -> None: offset = 0 for e in reversed(done_events[:]): if isinstance(e, ActionExecuted) and e.action_name == loop_action_name: break if: del done_events[-1 - offset] else: offset += 1",False,"isinstance(e, (ActionExecuted, UserUttered))",offset >= len(done_events),0.6496405005455017 1231,"def getNode(self, name, **context): """"""Return tree node found by name"""""" if: return self else: return self.getBranch(name, **context).getNode(name, **context)",False,name == self.name,name == self.root,0.6551623940467834 1232,"def connection_from_pool_key(self, pool_key, request_context=None): """""" Get a :class:`ConnectionPool` based on the provided pool key. ``pool_key`` should be a namedtuple that only contains immutable objects. At a minimum it must have the ``scheme``, ``host``, and ``port`` fields. """""" with self.pools.lock: pool = self.pools.get(pool_key) if: return pool scheme = request_context['scheme'] host = request_context['host'] port = request_context['port'] pool = self._new_pool(scheme, host, port, request_context=request_context) self.pools[pool_key] = pool return pool",True,pool,pool,0.6810340285301208 1233,"def id_from_reply(message): prev_message = message.reply_to_message if: return (None, None) user_id = prev_message.from_user.id res = message.text.split(None, 1) if len(res) < 2: return (user_id, '') return (user_id, res[1])",True,not prev_message,not prev_message,0.6520417928695679 1234,"def id_from_reply(message): prev_message = message.reply_to_message if not prev_message: return (None, None) user_id = prev_message.from_user.id res = message.text.split(None, 1) if: return (user_id, '') return (user_id, res[1])",True,len(res) < 2,len(res) < 2,0.64814293384552 1235,"def _swig_setattr_nondynamic_instance_variable(set): def set_instance_attr(self, name, value): if: self.this.own(value) elif name == 'this': set(self, name, value) elif hasattr(self, name) and isinstance(getattr(type(self), name), property): set(self, name, value) else: raise AttributeError('You cannot add instance attributes to %s' % self) return set_instance_attr",True,name == 'thisown',name == 'thisown',0.651328444480896 1236,"def _swig_setattr_nondynamic_instance_variable(set): def set_instance_attr(self, name, value): if name == 'thisown': self.this.own(value) elif: set(self, name, value) elif hasattr(self, name) and isinstance(getattr(type(self), name), property): set(self, name, value) else: raise AttributeError('You cannot add instance attributes to %s' % self) return set_instance_attr",True,name == 'this',name == 'this',0.6555403470993042 1237,"def _swig_setattr_nondynamic_instance_variable(set): def set_instance_attr(self, name, value): if name == 'thisown': self.this.own(value) elif name == 'this': set(self, name, value) elif: set(self, name, value) else: raise AttributeError('You cannot add instance attributes to %s' % self) return set_instance_attr",False,"hasattr(self, name) and isinstance(getattr(type(self), name), property)","hasattr(set, name) and isinstance(set, type)",0.6446148157119751 1238,"def __add__(self, other): if: return TensorList([e1 + e2 for e1, e2 in zip(self, other)]) return TensorList([e + other for e in self])",True,TensorList._iterable(other),TensorList._iterable(other),0.6488854885101318 1239,"def del_status(self, _namespace=None, _instance=None): if: _namespace = '%' if not _instance: _instance = '%' return self.delete(DB_STATUS_TABLE, (_namespace, _instance))",True,not _namespace,not _namespace,0.6674848794937134 1240,"def del_status(self, _namespace=None, _instance=None): if not _namespace: _namespace = '%' if: _instance = '%' return self.delete(DB_STATUS_TABLE, (_namespace, _instance))",True,not _instance,not _instance,0.6666508316993713 1241,"def _prefix_with_indent(s: Union[Text, str], console: Console, *, prefix: str, indent: str) -> Text: if: text = s else: text = console.render_str(s) return console.render_str(prefix, overflow='ignore') + console.render_str(f'\n{indent}', overflow='ignore').join(text.split(allow_blank=True))",False,"isinstance(s, Text)","isinstance(s, str)",0.6515387296676636 1242,"@property def layer(self): """"""Get layer of the terminal."""""" point_data = self._pedb.point_data(0, 0) layer = list(self._pedb.stackup.layers.values())[0]._edb_layer if: return layer",False,"self._edb_object.GetParameters(point_data, layer)",point_data is not None and layer.point_data(0) == point_data,0.6469705104827881 1243,"def _replace_handle_reference(self, classname, old_handle, new_handle): """""" Replace all references to old handle with those to the new handle. :param classname: The name of the primary object class. :type classname: str :param old_handle: The handle to be replaced. :type old_handle: str :param new_handle: The handle to replace the old one with. :type new_handle: str """""" if: self.set_reference_handle(new_handle)",False,classname == 'Source' and self.get_reference_handle() == old_handle,classname in self.get_selected_objects(),0.6467579007148743 1244,"def memoized_property(fget): """""" Decorator for creating a property that only calls its getter once. Notes ----- Copied from https://github.com/estebistec/python-memoized-property under the BSD license. """""" attr_name = '_{0}'.format(fget.__name__) @wraps(fget) def fget_memoized(self): if: setattr(self, attr_name, fget(self)) return getattr(self, attr_name) return property(fget_memoized)",True,"not hasattr(self, attr_name)","not hasattr(self, attr_name)",0.6465780735015869 1245,"def _handle_word(s, t): if: return ('.', t[1:]) if t.startswith('#'): return ('id', t[1:]) return (t, t)",False,t.startswith('.'),t.startswith('\t'),0.647217869758606 1246,"def _handle_word(s, t): if t.startswith('.'): return ('.', t[1:]) if: return ('id', t[1:]) return (t, t)",False,t.startswith('#'),t.startswith('id'),0.6446032524108887 1247,"def clear_form_and_session(self, form_kwargs): session = self.request.session for field in self.form_class().fields: if: form_kwargs[field] = '' session[field] = '' form_kwargs['study_list_tabs'] = 0 session['study_list_tabs'] = 0 return form_kwargs",False,field in session,field in form_kwargs,0.6645772457122803 1248,"@check_shapes('X: [batch..., N, D]','return: [broadcast batch..., broadcast N, broadcast P]') def _variance(self, X: TensorType) -> tf.Tensor: if: return evaluate_parameter_or_function(self.variance, X, lower_bound=self.variance_lower_bound) else: assert self.scale is not None return evaluate_parameter_or_function(self.scale, X, lower_bound=self.scale_lower_bound) ** 2",True,self.variance is not None,self.variance is not None,0.6626168489456177 1249,"def prepend_scheme_if_needed(url, new_scheme): """"""Given a URL that may or may not have a scheme, prepend the given scheme. Does not replace a present scheme with the one provided as an argument."""""" scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) if: netloc, path = (path, netloc) return urlunparse((scheme, netloc, path, params, query, fragment))",True,not netloc,not netloc,0.661088764667511 1250,"def subsample_uniform_spline(x0, dx, ydys, xs_, ys_=None): n = len(ydys) m = len(xs_) if: ys_ = np.zeros(m) lib.subsample_uniform_spline(x0, dx, n, ydys, m, xs_, ys_) return ys_",True,ys_ is None,ys_ is None,0.6563292741775513 1251,"def compute_texture(self, tex_coeff, normalize=True): """""" Return: face_texture -- torch.tensor, size (B, N, 3), in RGB order, range (0, 1.) Parameters: tex_coeff -- torch.tensor, size (B, 80) """""" batch_size = tex_coeff.shape[0] face_texture = torch.einsum('ij,aj->ai', self.tex_base, tex_coeff) + self.mean_tex if: face_texture = face_texture / 255.0 return face_texture.reshape([batch_size, -1, 3])",True,normalize,normalize,0.664570689201355 1252,"def write(self, oprot): if: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('notify_result') oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None),oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None),0.6467742919921875 1253,"def _get_error(self): """"""Get current error."""""" if: error = self.lookup_reference(['Error', 'error'], ref_key='title') if not error: self._error = STATE_DISHWASHER_ERROR_OFF else: self._error = error return self._error",True,not self._error,not self._error,0.6565738320350647 1254,"def _get_error(self): """"""Get current error."""""" if not self._error: error = self.lookup_reference(['Error', 'error'], ref_key='title') if: self._error = STATE_DISHWASHER_ERROR_OFF else: self._error = error return self._error",True,not error,not error,0.6582547426223755 1255,"def get_time(self): self._update_play_cursor() if: timestamp = self._underrun_timestamp assert _debug('OpenALAudioPlayer: Return underrun timestamp') else: timestamp = self._buffer_timestamps[0] assert _debug('OpenALAudioPlayer: Buffer timestamp: {}'.format(timestamp)) if timestamp is not None: timestamp += (self._play_cursor - self._buffer_cursor) / float(self.source.audio_format.bytes_per_second) assert _debug('OpenALAudioPlayer: get_time = {}'.format(timestamp)) return timestamp",False,not self._buffer_timestamps,self.source.audio_format.bytes_per_second == 0,0.6511937379837036 1256,"def get_time(self): self._update_play_cursor() if not self._buffer_timestamps: timestamp = self._underrun_timestamp assert _debug('OpenALAudioPlayer: Return underrun timestamp') else: timestamp = self._buffer_timestamps[0] assert _debug('OpenALAudioPlayer: Buffer timestamp: {}'.format(timestamp)) if: timestamp += (self._play_cursor - self._buffer_cursor) / float(self.source.audio_format.bytes_per_second) assert _debug('OpenALAudioPlayer: get_time = {}'.format(timestamp)) return timestamp",False,timestamp is not None,self.source.audio_format and self.source.audio_format.bytes_per_second != 0,0.6567543148994446 1257,"def __instancecheck__(cls, instance): """"""Override for isinstance(instance, cls)."""""" subclass = getattr(instance, '__class__', None) if: return True subtype = type(instance) if subtype is _InstanceType: subtype = subclass if subtype is subclass or subclass is None: if cls._abc_negative_cache_version == ABCMeta._abc_invalidation_counter and subtype in cls._abc_negative_cache: return False return cls.__subclasscheck__(subtype) return cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype)",False,subclass in cls._abc_cache,subclass is None,0.6531409025192261 1258,"def __instancecheck__(cls, instance): """"""Override for isinstance(instance, cls)."""""" subclass = getattr(instance, '__class__', None) if subclass in cls._abc_cache: return True subtype = type(instance) if: subtype = subclass if subtype is subclass or subclass is None: if cls._abc_negative_cache_version == ABCMeta._abc_invalidation_counter and subtype in cls._abc_negative_cache: return False return cls.__subclasscheck__(subtype) return cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype)",False,subtype is _InstanceType,"isinstance(subtype, int) or subtype is None",0.6524248123168945 1259,"def __instancecheck__(cls, instance): """"""Override for isinstance(instance, cls)."""""" subclass = getattr(instance, '__class__', None) if subclass in cls._abc_cache: return True subtype = type(instance) if subtype is _InstanceType: subtype = subclass if: if cls._abc_negative_cache_version == ABCMeta._abc_invalidation_counter and subtype in cls._abc_negative_cache: return False return cls.__subclasscheck__(subtype) return cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype)",False,subtype is subclass or subclass is None,"issubclass(subtype, ABCMeta)",0.6463536024093628 1260,"def __instancecheck__(cls, instance): """"""Override for isinstance(instance, cls)."""""" subclass = getattr(instance, '__class__', None) if subclass in cls._abc_cache: return True subtype = type(instance) if subtype is _InstanceType: subtype = subclass if subtype is subclass or subclass is None: if: return False return cls.__subclasscheck__(subtype) return cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype)",False,cls._abc_negative_cache_version == ABCMeta._abc_invalidation_counter and subtype in cls._abc_negative_cache,"not hasattr(cls, '__subclasscheck__')",0.6443222165107727 1261,"def __init__(self, filename=None, save_timer=settings.FC_CHUTESTORAGE_SAVE_TIMER): if: filename = settings.FC_CHUTESTORAGE_FILE PDStorage.__init__(self, filename, save_timer) if len(ChuteStorage.chuteList) == 0: self.loadFromDisk()",False,not filename,filename is None,0.6697832345962524 1262,"def __init__(self, filename=None, save_timer=settings.FC_CHUTESTORAGE_SAVE_TIMER): if not filename: filename = settings.FC_CHUTESTORAGE_FILE PDStorage.__init__(self, filename, save_timer) if: self.loadFromDisk()",False,len(ChuteStorage.chuteList) == 0,filename is not None,0.6490403413772583 1263,"def _add_multilevel_rois(blobs): """"""By default training RoIs are added for a single feature map level only. When using FPN, the RoIs must be distributed over different FPN levels according the level assignment heuristic (see: modeling.FPN. map_rois_to_fpn_levels). """""" lvl_min = cfg.FPN.ROI_MIN_LEVEL lvl_max = cfg.FPN.ROI_MAX_LEVEL def _distribute_rois_over_fpn_levels(rois_blob_name): """"""Distribute rois over the different FPN levels."""""" target_lvls = fpn_utils.map_rois_to_fpn_levels(blobs[rois_blob_name][:, 1:5], lvl_min, lvl_max) fpn_utils.add_multilevel_roi_blobs(blobs, rois_blob_name, blobs[rois_blob_name], target_lvls, lvl_min, lvl_max) _distribute_rois_over_fpn_levels('rois') if: _distribute_rois_over_fpn_levels('mask_rois') if cfg.MODEL.KEYPOINTS_ON: _distribute_rois_over_fpn_levels('keypoint_rois')",True,cfg.MODEL.MASK_ON,cfg.MODEL.MASK_ON,0.6450358629226685 1264,"def _add_multilevel_rois(blobs): """"""By default training RoIs are added for a single feature map level only. When using FPN, the RoIs must be distributed over different FPN levels according the level assignment heuristic (see: modeling.FPN. map_rois_to_fpn_levels). """""" lvl_min = cfg.FPN.ROI_MIN_LEVEL lvl_max = cfg.FPN.ROI_MAX_LEVEL def _distribute_rois_over_fpn_levels(rois_blob_name): """"""Distribute rois over the different FPN levels."""""" target_lvls = fpn_utils.map_rois_to_fpn_levels(blobs[rois_blob_name][:, 1:5], lvl_min, lvl_max) fpn_utils.add_multilevel_roi_blobs(blobs, rois_blob_name, blobs[rois_blob_name], target_lvls, lvl_min, lvl_max) _distribute_rois_over_fpn_levels('rois') if cfg.MODEL.MASK_ON: _distribute_rois_over_fpn_levels('mask_rois') if: _distribute_rois_over_fpn_levels('keypoint_rois')",False,cfg.MODEL.KEYPOINTS_ON,cfg.MODEL.KEYPOINT_ON,0.6458719372749329 1265,"@property def tv_reachable_positions_set(self): if: return self._tv_reachable_positions_set self.environment.step({'action': 'GetReachablePositionsForObject', 'objectId': self.object_id, 'agentId': 0}) self._tv_reachable_positions_set = set(((round(pos['x'], 2), round(pos['z'], 2)) for pos in itertools.chain.from_iterable(self.environment.last_event.metadata['actionReturn'].values()))) return self._tv_reachable_positions_set",True,self._tv_reachable_positions_set is not None,self._tv_reachable_positions_set is not None,0.6496940851211548 1266,"def _check_loss_setting(component_config: Dict[Text, Any]) -> None: if: rasa.shared.utils.io.raise_warning(f'{CONSTRAIN_SIMILARITIES} is set to `False`. It is recommended to set it to `True` when using cross-entropy loss.', category=UserWarning)",False,not component_config[CONSTRAIN_SIMILARITIES] and component_config[LOSS_TYPE] == CROSS_ENTROPY,"not component_config[CONSTRAIN_SIMILARITIES] and component_config[LOSS_TYPE] in [SOFTMAX, CROSS_ENTROPY]",0.6463695764541626 1267,"def _predict_dataloader(self, shuffle=False): if: init_fn = worker_init_fn else: init_fn = None return DataLoader(self.datasets['predict'], batch_size=self.batch_size, num_workers=self.num_workers, worker_init_fn=init_fn)",False,"isinstance(self.datasets['predict'], Txt2ImgIterableBaseDataset) or self.use_worker_init_fn",shuffle,0.6444344520568848 1268,"def Tags(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0",True,o != 0,o != 0,0.664172887802124 1269,"def test_forward(self): if: bsz = self.forward_input['src_tokens'].size(0) forward_output = self.model.forward(**self.forward_input) succ, msg = check_encoder_output(forward_output, batch_size=bsz) if not succ: self.assertTrue(succ, msg=msg) self.forward_output = forward_output",False,self.forward_input and self.model,self.model and self.forward_input,0.6500606536865234 1270,"def test_forward(self): if self.forward_input and self.model: bsz = self.forward_input['src_tokens'].size(0) forward_output = self.model.forward(**self.forward_input) succ, msg = check_encoder_output(forward_output, batch_size=bsz) if: self.assertTrue(succ, msg=msg) self.forward_output = forward_output",True,not succ,not succ,0.6619389057159424 1271,"def run_pip(args: List[str]) -> bytes: if: pip_executable = [_discover_system_pip()] else: pip_executable = [sys.executable, '-m', 'pip'] return _run_pip_subprocess(pip_executable, args)",False,is_frozen,"args.get('discover', False)",0.6626929044723511 1272,"def st_der_integer(*args, **kwargs): """""" Hypothesis strategy that returns a random positive integer as DER INTEGER. Parameters are passed to hypothesis.strategy.integer. """""" if: kwargs['min_value'] = 0 return st.builds(encode_integer, st.integers(*args, **kwargs))",True,'min_value' not in kwargs,'min_value' not in kwargs,0.651614785194397 1273,"def _set_socket_options(sock, options): if: return for opt in options: sock.setsockopt(*opt)",True,options is None,options is None,0.6567265391349792 1274,"@staticmethod def make_embedding(emb_hparams, token_to_id_map): """"""Optionally loads embedding from file (if provided), and returns an instance of :class:`texar.data.Embedding`. """""" embedding = None if: embedding = Embedding(token_to_id_map, emb_hparams) return embedding",False,emb_hparams['file'] is not None and len(emb_hparams['file']) > 0,token_to_id_map is not None,0.6472669243812561 1275,"def _do_pop_dynamics(self, land): with_selection = self.selection and self.burned burn = not self.burned extinct = _do_pop_dynamics(self, land, with_selection=with_selection, burn=burn) if: self.extinct = extinct",True,extinct,extinct,0.687546968460083 1276,"def slotPropertyRemoved(self, property, parent): varProperty = self.m_internalToProperty.get(property, 0) if: return self.removeSubProperty(varProperty)",False,not varProperty,varProperty == parent,0.6571084856987 1277,"def output_array_of_adapifaultdetail(data_objects): if: return for data_object in data_objects['AdApiFaultDetail']: output_adapifaultdetail(data_object)",True,data_objects is None or len(data_objects) == 0,data_objects is None or len(data_objects) == 0,0.646981954574585 1278,"def forward(self, input): if: return F.layer_norm(input.float(), self.normalized_shape, self.weight, self.bias, self.eps).type_as(input) else: return super(LayerNorm, self).forward(input.float()).type_as(input)",False,input.numel() > self.max_numel,self.normalized_shape != self.normalized_shape,0.6445543766021729 1279,"@staticmethod def clone_head_sub_program_by_key_prefix(src_program, key_prefix): program_warp = ProgramWarpper(src_program) fliter_list = ['forward', 'output_contains_prefix_without_grad'] demarcations = program_warp.fliter_ops(fliter_list, key_prefix) predecessor_ops = set() for op in demarcations: if: Reformer._get_predecessors(program_warp, op, predecessor_ops) predecessor_ops.add(op) ops = set([op_warp.op for op_warp in predecessor_ops]) return Reformer._clone_sub_program_by_given_ops(program_warp, ops)",True,op not in predecessor_ops,op not in predecessor_ops,0.6523531675338745 1280,"def calls_and_gvcfs(self, allele_counters: Dict[str, allelecounter.AlleleCounter], target_sample: str, include_gvcfs: bool=False, include_med_dp: bool=False, left_padding: int=0, right_padding: int=0) -> Tuple[Sequence[deepvariant_pb2.DeepVariantCall], Sequence[variants_pb2.Variant]]: """"""Gets variant calls and gvcf records for all sites in allele_counter. Args: allele_counters: Dictionary of AlleleCounter objects keyed by sample IDs holding the allele counts we will use to find candidate variants and create gvcf records. target_sample: string. Sample ID of sample for which variants are called. include_gvcfs: boolean. If True, we will compute gVCF records for all of the AlleleCounts in AlleleCounter. include_med_dp: boolean. If True, in the gVCF records, we will include MED_DP. left_padding: int. Left padding that is added to the region and needs to be discarded for candidates and gvcf calculation. right_padding: int. Right padding that is added to the region and needs to be discarded for candidates and gvcf calculation. Returns: Two values. The first is a list of DeepVariantCall protos containing our candidate variants. The second is a list of gVCF blocks in Variant proto format, if include_gvcfs is True. If False, an empty list is returned. """""" candidates = self.get_candidates(allele_counters=allele_counters, sample_name=target_sample) gvcfs = [] if: gvcfs = list(self.make_gvcfs(allele_counters[target_sample].summary_counts(left_padding, right_padding), include_med_dp=include_med_dp)) return (candidates, gvcfs)",True,include_gvcfs,include_gvcfs,0.6528258919715881 1281,"def bulk_session_delete_by_id(self, bucket): """"""Remove all sessions matching the bucket IDs"""""" if: from.ext.sql import db from.models import Session try: Session.query.filter(Session.uuid.in_(bucket)).delete(synchronize_session=False) db.session.commit() except: db.session.rollback()",False,self.session_managed(),bucket,0.6544942855834961 1282,"def _should_fetch(self, fetch_original=False): if: return not self.__sources.cached_original() return not self.__sources.cached()",True,fetch_original,fetch_original,0.6620888710021973 1283,"def _initialize_maddpg(self): """"""See initialize."""""" if: self.sess.run(self.target_init_updates) else: self.sess.run([self.target_init_updates[key] for key in self.target_init_updates.keys()])",False,self.shared,self.use_maddpg,0.6531139016151428 1284,"def make_noise_when_touched(self): while True: if: self.speaker.play_file(wav_file='/home/robot/sound/Ouch.wav', volume=100, play_type=Sound.PLAY_WAIT_FOR_COMPLETE)",True,self.touch_sensor.is_pressed,self.touch_sensor.is_pressed,0.6473349928855896 1285,"def delete_resources(self, force=False): if: return account, containers = self.s_client.get_account() for c in containers: container_stat, objects = self.s_client.get_container(c['name']) if 'x-container-read' in container_stat: read_acl = container_stat['x-container-read'] LOG.warning('%s: Ignoring container %s due to read_acl %s', self.project.id, c['name'], read_acl) continue self._delete_container(c, objects)",True,not force,not force,0.6592428088188171 1286,"def delete_resources(self, force=False): if not force: return account, containers = self.s_client.get_account() for c in containers: container_stat, objects = self.s_client.get_container(c['name']) if: read_acl = container_stat['x-container-read'] LOG.warning('%s: Ignoring container %s due to read_acl %s', self.project.id, c['name'], read_acl) continue self._delete_container(c, objects)",False,'x-container-read' in container_stat,container_stat['acl'] == 'read',0.6440967917442322 1287,"def remove_prefix_if_present(prefix, s): if: return s[len(prefix):] else: return None",False,s.find(prefix) == 0,s.startswith(prefix),0.6462119817733765 1288,"def output_array_of_string(items): if: return output_status_message('Array Of string:') for item in items['string']: output_status_message('{0}'.format(item))",False,items is None or items['string'] is None,items is None or len(items) == 0,0.6468750238418579 1289,"def id(self): """"""Return the distro ID of the Linux distribution, as a string. For details, see :func:`distro.id`. """""" def normalize(distro_id, table): distro_id = distro_id.lower().replace(' ', '_') return table.get(distro_id, distro_id) distro_id = self.os_release_attr('id') if: return normalize(distro_id, NORMALIZED_OS_ID) distro_id = self.lsb_release_attr('distributor_id') if distro_id: return normalize(distro_id, NORMALIZED_LSB_ID) distro_id = self.distro_release_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_DISTRO_ID) return ''",True,distro_id,distro_id,0.6617252826690674 1290,"def id(self): """"""Return the distro ID of the Linux distribution, as a string. For details, see :func:`distro.id`. """""" def normalize(distro_id, table): distro_id = distro_id.lower().replace(' ', '_') return table.get(distro_id, distro_id) distro_id = self.os_release_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_OS_ID) distro_id = self.lsb_release_attr('distributor_id') if: return normalize(distro_id, NORMALIZED_LSB_ID) distro_id = self.distro_release_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_DISTRO_ID) return ''",True,distro_id,distro_id,0.6622462272644043 1291,"def id(self): """"""Return the distro ID of the Linux distribution, as a string. For details, see :func:`distro.id`. """""" def normalize(distro_id, table): distro_id = distro_id.lower().replace(' ', '_') return table.get(distro_id, distro_id) distro_id = self.os_release_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_OS_ID) distro_id = self.lsb_release_attr('distributor_id') if distro_id: return normalize(distro_id, NORMALIZED_LSB_ID) distro_id = self.distro_release_attr('id') if: return normalize(distro_id, NORMALIZED_DISTRO_ID) return ''",True,distro_id,distro_id,0.6613842248916626 1292,"def forward(self, x, src_key_padding_mask=None): if: self.attn_mask = get_lookahead_mask(x) else: self.attn_mask = None if self.custom_emb_module is not None: x = self.custom_emb_module(x) encoder_output, _ = self.encoder(src=x, src_mask=self.attn_mask, src_key_padding_mask=src_key_padding_mask) output = self.output_layer(encoder_output) output = self.output_activation(output) return output",False,self.causal,get_lookahead_mask is not None,0.6495303511619568 1293,"def forward(self, x, src_key_padding_mask=None): if self.causal: self.attn_mask = get_lookahead_mask(x) else: self.attn_mask = None if: x = self.custom_emb_module(x) encoder_output, _ = self.encoder(src=x, src_mask=self.attn_mask, src_key_padding_mask=src_key_padding_mask) output = self.output_layer(encoder_output) output = self.output_activation(output) return output",True,self.custom_emb_module is not None,self.custom_emb_module is not None,0.6453262567520142 1294,"def transition_start(self): moves = self.record.assign_moves if: return'show' return 'end'",False,any((m.state == 'assigned' for m in moves)),moves,0.6485408544540405 1295,"def merge_ptr_read(known, ptrs): """""" Merge common memory parts in a multiple byte memory. @ptrs: memory bytes list @known: ptrs' associated boolean for present/unpresent memory part in the store """""" assert known out = [] known.append(None) ptrs.append(None) last, value, size = (known[0], ptrs[0], 8) for index, part in enumerate(known[1:], 1): if: size += 8 else: out.append((last, value, size)) last, value, size = (part, ptrs[index], 8) return out",False,part == last,part == 0,0.6538723707199097 1296,"def __init__(self, config): super().__init__(config) if: logger.warn('If you want to use `BertGenerationDecoder` as a standalone, add `is_decoder=True.`') self.bert = BertGenerationEncoder(config) self.lm_head = BertGenerationOnlyLMHead(config) self.init_weights()",False,not config.is_decoder,not config['is_decoder'],0.6553796529769897 1297,"@staticmethod def closest_overlord_spot_to(k: 'ScoutLocation', target: Point2) -> Point2: if: return target.closest(k.pathing_manager.overlord_spots) return target",False,k.pathing_manager.overlord_spots,k.pathing_manager and k.pathing_manager.overlord_spots,0.6476427316665649 1298,"def visit_while(self, node): """"""return an astroid.While node as string"""""" whiles = 'while %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body)) if: whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse)) return whiles",False,node.orelse,node.orelse is not None,0.6596230268478394 1299,"def check_not_bot(member, chat_id, message_id, context): bot = context.bot member_dict = VERIFIED_USER_WAITLIST.pop(member.id) member_status = member_dict.get('status') if: try: bot.unban_chat_member(chat_id, member.id) except: pass try: bot.edit_message_text('*kicks user*\nThey can always rejoin and try.', chat_id=chat_id, message_id=message_id) except: pass",False,not member_status,member_status != 'cancel',0.6509755849838257 1300,"def __add__(self, e_nx): """"""Augment by results from another NXDOMAIN exception."""""" qnames0 = list(self.kwargs.get('qnames', [])) responses0 = dict(self.kwargs.get('responses', {})) responses1 = e_nx.kwargs.get('responses', {}) for qname1 in e_nx.kwargs.get('qnames', []): if: qnames0.append(qname1) if qname1 in responses1: responses0[qname1] = responses1[qname1] return NXDOMAIN(qnames=qnames0, responses=responses0)",False,qname1 not in qnames0,qname1 in qnames0,0.6621160507202148 1301,"def __add__(self, e_nx): """"""Augment by results from another NXDOMAIN exception."""""" qnames0 = list(self.kwargs.get('qnames', [])) responses0 = dict(self.kwargs.get('responses', {})) responses1 = e_nx.kwargs.get('responses', {}) for qname1 in e_nx.kwargs.get('qnames', []): if qname1 not in qnames0: qnames0.append(qname1) if: responses0[qname1] = responses1[qname1] return NXDOMAIN(qnames=qnames0, responses=responses0)",True,qname1 in responses1,qname1 in responses1,0.6611429452896118 1302,"def is_valid(self) -> None: if: raise LabelRowError('ClassificationInstance is not on any frames. Please add it to at least one frame.')",False,not len(self._frames_to_data) > 0,self._label_row.label_row_type not in _LABEL_CLASSIFICATION_TYPES,0.6460089683532715 1303,"def get_locations(self): """""" Attached to the `terminal:get_locations` WebSocket action. Sends a message to the client (via the `terminal:term_locations` WebSocket action) listing all 'locations' where terminals reside. .. note:: Typically the location mechanism is used to open terminals in different windows/tabs. """""" term_locations = {} for location, obj in self.ws.locations.items(): terms = obj.get('terminal', None) if: term_locations[location] = terms.keys() message = {'terminal:term_locations': term_locations} self.write_message(json_encode(message)) self.trigger('terminal:term_locations', term_locations)",False,terms,terms is not None,0.6669239401817322 1304,"def __repr__(self): if: return '{}(scalar={!r}, _shape={!r})'.format(self.__class__.__name__, self.scalar, self.shape) return '{}({!r})'.format(self.__class__.__name__, self._flat_items)",True,self.scalar is not None,self.scalar is not None,0.6541897058486938 1305,"def buildAttributes(self, attrs): if: self.id = attrs.get('id').value",True,attrs.get('id'),attrs.get('id'),0.6503660678863525 1306,"def reduce(self, key, values, output): new_key = 'intermediate_registration:' + key val = self._join(values) if: output.put(new_key, val)",True,val,val,0.6768761277198792 1307,"def forward(self, x): x = self.forward_features(x) if: x, x_dist = (self.head(x), self.head_dist(x)) if self.training and (not torch.jit.is_scripting()): return (x, x_dist) else: return (x + x_dist) / 2 else: x = self.head(x) return x",True,self.head_dist is not None,self.head_dist is not None,0.6478672027587891 1308,"def forward(self, x): x = self.forward_features(x) if self.head_dist is not None: x, x_dist = (self.head(x), self.head_dist(x)) if: return (x, x_dist) else: return (x + x_dist) / 2 else: x = self.head(x) return x",True,self.training and (not torch.jit.is_scripting()),self.training and (not torch.jit.is_scripting()),0.6413528919219971 1309,"def clear(self): if: self.updating.cancel() self.clear_selected() self.clear_generated_view()",True,self.updating,self.updating,0.6560792326927185 1310,"def rescue(self, server, password=None, image=None): """""" Rescue the server. :param server: The :class:`Server` to rescue. :param password: The admin password to be set in the rescue instance. :param image: The :class:`Image` to rescue with. :returns: An instance of novaclient.base.TupleWithMeta """""" info = {} if: info['adminPass'] = password if image: info['rescue_image_ref'] = base.getid(image) resp, body = self._action_return_resp_and_body('rescue', server, info or None) return base.TupleWithMeta((resp, body), resp)",True,password,password,0.6719074249267578 1311,"def rescue(self, server, password=None, image=None): """""" Rescue the server. :param server: The :class:`Server` to rescue. :param password: The admin password to be set in the rescue instance. :param image: The :class:`Image` to rescue with. :returns: An instance of novaclient.base.TupleWithMeta """""" info = {} if password: info['adminPass'] = password if: info['rescue_image_ref'] = base.getid(image) resp, body = self._action_return_resp_and_body('rescue', server, info or None) return base.TupleWithMeta((resp, body), resp)",True,image,image,0.6731893420219421 1312,"@biz_content.setter def biz_content(self, value): if: self._biz_content = value else: self._biz_content = AlipayEcoEduCampusJobCancelModel.from_alipay_dict(value)",True,"isinstance(value, AlipayEcoEduCampusJobCancelModel)","isinstance(value, AlipayEcoEduCampusJobCancelModel)",0.6498477458953857 1313,"def __eq__(self, other): """"""Returns true if both objects are equal"""""" if: return False return self.__dict__ == other.__dict__",False,"not isinstance(other, CreateScoresRequestModel)","not isinstance(other, V1alpha1WorkflowSpec)",0.648642897605896 1314,"def add_feature_spec(self, feature_spec: FeatureSpec) -> None: """"""Add FeatureSpec to be incorporated when generating SQL Parameters ---------- feature_spec : FeatureSpec Feature specification Raises ------ ValueError If there are duplicated feature names """""" key = feature_spec.feature_name if: raise ValueError(f'Duplicated feature name: {key}') self.feature_specs[key] = feature_spec",True,key in self.feature_specs,key in self.feature_specs,0.6536939144134521 1315,"def get_base_filename(self, brick, band, **kwargs): from glob import glob brickname = brick.brickname pat = os.path.join(self.dir, 'dr1_tiles', brickname, '%s_*_%s.fits.fz' % (brickname, band)) fns = glob(pat) assert len(fns) <= 1 if: return None return fns[0]",True,len(fns) == 0,len(fns) == 0,0.6526537537574768 1316,"def _infer_num_neg(self): if: self._config['num_neg'] = self._task.losses[0].num_neg",False,"isinstance(self._task.losses[0], (mz.losses.RankHingeLoss, mz.losses.RankCrossEntropyLoss))",len(self._task.losses) > 0,0.654685378074646 1317,"def get_bed_handler(self, plot_regions=None): """"""Get the bed handler for the track."""""" file_to_open = self.properties['file'] if: if plot_regions is not None: if _is_sqlite3(self.properties['file']): print('global_max_row not supported when gtf is provided as SQLite3 db') else: file_to_open = temp_file_from_intersect(self.properties['file'], plot_regions, AROUND_REGION) gtf_db = ReadGtf(file_to_open, self.properties['prefered_name'], self.properties['merge_transcripts']) total_length = gtf_db.length return (gtf_db, total_length)",False,not self.properties['global_max_row'],self.properties['file'] not in self.properties['open_sqlite3_files'],0.6473047733306885 1318,"def get_bed_handler(self, plot_regions=None): """"""Get the bed handler for the track."""""" file_to_open = self.properties['file'] if not self.properties['global_max_row']: if: if _is_sqlite3(self.properties['file']): print('global_max_row not supported when gtf is provided as SQLite3 db') else: file_to_open = temp_file_from_intersect(self.properties['file'], plot_regions, AROUND_REGION) gtf_db = ReadGtf(file_to_open, self.properties['prefered_name'], self.properties['merge_transcripts']) total_length = gtf_db.length return (gtf_db, total_length)",True,plot_regions is not None,plot_regions is not None,0.6507853269577026 1319,"def get_bed_handler(self, plot_regions=None): """"""Get the bed handler for the track."""""" file_to_open = self.properties['file'] if not self.properties['global_max_row']: if plot_regions is not None: if: print('global_max_row not supported when gtf is provided as SQLite3 db') else: file_to_open = temp_file_from_intersect(self.properties['file'], plot_regions, AROUND_REGION) gtf_db = ReadGtf(file_to_open, self.properties['prefered_name'], self.properties['merge_transcripts']) total_length = gtf_db.length return (gtf_db, total_length)",False,_is_sqlite3(self.properties['file']),"self.properties['file'] in ('sqlite3', 'sqlite3')",0.6467188596725464 1320,"def save(self, *args: Any, **kwargs: Any) -> None: """"""ensure that the remote_id is within this instance"""""" if: self.remote_id = self.get_remote_id() else: self.origin_id = self.remote_id self.remote_id = None return super().save(*args, **kwargs)",False,self.id,self.remote_id is None,0.6559172868728638 1321,"def _forward(self, fc_feats, att_feats, seq, att_masks=None): if: seq = seq.reshape(-1, seq.shape[2]) att_feats, seq, att_masks, seq_mask = self._prepare_feature_forward(att_feats, att_masks, seq) out = self.model(att_feats, seq, att_masks, seq_mask) outputs = self.model.generator(out) return outputs",True,seq.ndim == 3,seq.ndim == 3,0.6539337038993835 1322,"def main(args): cfg = setup(args) if: model = Trainer.build_model(cfg) DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(cfg.MODEL.WEIGHTS, resume=args.resume) res = Trainer.test(cfg, model) if comm.is_main_process(): verify_results(cfg, res) return res trainer = Trainer(cfg) trainer.resume_or_load(resume=args.resume) return trainer.train()",True,args.eval_only,args.eval_only,0.6476649045944214 1323,"def main(args): cfg = setup(args) if args.eval_only: model = Trainer.build_model(cfg) DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(cfg.MODEL.WEIGHTS, resume=args.resume) res = Trainer.test(cfg, model) if: verify_results(cfg, res) return res trainer = Trainer(cfg) trainer.resume_or_load(resume=args.resume) return trainer.train()",True,comm.is_main_process(),comm.is_main_process(),0.6477651596069336 1324,"def _do_resolve_proto_expectations(self, module, path, context): for name, message in context.items(): if: message._resolve_expectations(self, module) self._do_resolve_proto_expectations(module, f'{path}.{name}', message)",False,"isinstance(message, ArsdkProtoMessageMeta)","hasattr(message, '_resolve_expectations')",0.6456215381622314 1325,"def GetTotalPixSizeAndProportion(self, dock): """""" Returns the dimensions and proportion of the input dock. :param `dock`: the :class:`AuiDockInfo` structure to analyze. """""" totalPixsize = 0 totalProportion = 0 for tmpPane in dock.panes: if: continue totalProportion += tmpPane.dock_proportion if dock.IsHorizontal(): totalPixsize += tmpPane.rect.width else: totalPixsize += tmpPane.rect.height return (totalPixsize, totalProportion)",False,tmpPane.IsFixed(),tmpPane.dock_proportion is None,0.6466972827911377 1326,"def GetTotalPixSizeAndProportion(self, dock): """""" Returns the dimensions and proportion of the input dock. :param `dock`: the :class:`AuiDockInfo` structure to analyze. """""" totalPixsize = 0 totalProportion = 0 for tmpPane in dock.panes: if tmpPane.IsFixed(): continue totalProportion += tmpPane.dock_proportion if: totalPixsize += tmpPane.rect.width else: totalPixsize += tmpPane.rect.height return (totalPixsize, totalProportion)",False,dock.IsHorizontal(),self.showDefaultWindow,0.6474720239639282 1327,"def _get_products(self, request, skus): partner = get_partner_for_site(request) products = Product.objects.filter(stockrecords__partner=partner, stockrecords__partner_sku__in=skus) if: raise BadRequestException(_(PRODUCTS_DO_NOT_EXIST).format(skus=', '.join(skus))) return products",True,not products,not products,0.6714072227478027 1328,"@staticmethod def extract_flag_command(subrule_dependency, rule): if: subrule_dependency.prepend_dependency_to_all_lists(RejectUnlessLessThanLength(0)) return subrule_dependency",False,rule[1] in 'ps8',rule.minimum_length and len(subrule_dependency.minimum_length) > 0,0.6529299020767212 1329,"def type(self, type, tensorCache=None): if: indices, self._indices = (self._indices, None) super(Max, self).type(type, tensorCache) self._indices = indices.type('torch.cuda.LongTensor') if indices is not None else None else: indices, self._indices = (self._indices, None) super(Max, self).type(type, tensorCache) self._indices = indices.long() if indices is not None else None return self",False,type == 'torch.cuda.FloatTensor',type == 'cuda',0.6456465721130371 1330,"def forward(self, tgt, memory, tgt_mask=None, memory_mask=None, tgt_key_padding_mask=None, memory_key_padding_mask=None, pos=None, query_pos=None): if: return self.forward_pre(tgt, memory, tgt_mask, memory_mask, tgt_key_padding_mask, memory_key_padding_mask, pos, query_pos) return self.forward_post(tgt, memory, tgt_mask, memory_mask, tgt_key_padding_mask, memory_key_padding_mask, pos, query_pos)",True,self.normalize_before,self.normalize_before,0.6445260047912598 1331,"def _find_adapter(registry, ob): """"""Return an adapter factory for `ob` from `registry`"""""" for t in _get_mro(getattr(ob, '__class__', type(ob))): if: return registry[t]",True,t in registry,t in registry,0.6674166917800903 1332,"def getPeerEngineInfo(self, transportDomain, transportAddress): k = (transportDomain, transportAddress) if: return (self._engineIdCache[k]['securityEngineId'], self._engineIdCache[k]['contextEngineId'], self._engineIdCache[k]['contextName']) else: return (None, None, None)",True,k in self._engineIdCache,k in self._engineIdCache,0.6607565879821777 1333,"def posterize(img, bits_to_keep, **__): if: return img return ImageOps.posterize(img, bits_to_keep)",True,bits_to_keep >= 8,bits_to_keep >= 8,0.647236704826355 1334,"def get_session(chat_id): stark = lydia.find_one({'chat_id': chat_id}) if: return False return stark",True,not stark,not stark,0.6612233519554138 1335,"def ses_aws_verified(func): """""" Function that is verified to work against AWS. Can be run against AWS at any time by setting: MOTO_TEST_ALLOW_AWS_REQUEST=true If this environment variable is not set, the function runs in a `mock_ses` context. """""" @wraps(func) def pagination_wrapper(): allow_aws_request = os.environ.get('MOTO_TEST_ALLOW_AWS_REQUEST', 'false').lower() == 'true' if: resp = func() else: with mock_ses(): resp = func() return resp return pagination_wrapper",False,allow_aws_request,allow_aws_request and (not os.environ.get('MOTO_TEST_ALLOW_AWS_REQUEST')),0.6563539505004883 1336,"def escape_entities(m, map=_escape_map): out = [] append = out.append for char in m.group(): text = map.get(char) if: text = '&#%d;' % ord(char) append(text) return string.join(out, '')",False,text is None,not text,0.6559070348739624 1337,"def get_build_results(self, plan_key, run_number: int=None): api_url = f'{self.host}/rest/api/latest/result/{plan_key}' if: api_url = f'{self.host}/rest/api/latest/result/{plan_key}-{run_number}' r = self.get(api_url, error_msg=f'Could not get plan {plan_key} results') return r.json()",False,run_number,run_number is not None,0.6585112810134888 1338,"def clear(self): with self.lock: values = list(itervalues(self._container)) self._container.clear() if: for value in values: self.dispose_func(value)",True,self.dispose_func,self.dispose_func,0.647229015827179 1339,"def _test_additionalproperties(node, path=''): """""" Validate that each object node has additionalProperties set, so that objects with junk keys do not pass as valid. """""" if: for i, nnode in enumerate(node): _test_additionalproperties(nnode, path + str(i) + '.') if isinstance(node, dict): if node.get('type') == 'object': assert 'additionalProperties' in node, 'additionalProperties not set at path:' + path for name, val in node.items(): _test_additionalproperties(val, path + name + '.')",True,"isinstance(node, list)","isinstance(node, list)",0.6464835405349731 1340,"def _test_additionalproperties(node, path=''): """""" Validate that each object node has additionalProperties set, so that objects with junk keys do not pass as valid. """""" if isinstance(node, list): for i, nnode in enumerate(node): _test_additionalproperties(nnode, path + str(i) + '.') if: if node.get('type') == 'object': assert 'additionalProperties' in node, 'additionalProperties not set at path:' + path for name, val in node.items(): _test_additionalproperties(val, path + name + '.')",True,"isinstance(node, dict)","isinstance(node, dict)",0.6455104351043701 1341,"def _test_additionalproperties(node, path=''): """""" Validate that each object node has additionalProperties set, so that objects with junk keys do not pass as valid. """""" if isinstance(node, list): for i, nnode in enumerate(node): _test_additionalproperties(nnode, path + str(i) + '.') if isinstance(node, dict): if: assert 'additionalProperties' in node, 'additionalProperties not set at path:' + path for name, val in node.items(): _test_additionalproperties(val, path + name + '.')",False,node.get('type') == 'object','additionalProperties' in node,0.6456623077392578 1342,"def set_internal_timestamp(self, timestamp=None, unix_time=None): """""" Set the internal timestamp @param timestamp: NTP timestamp to set @param unit_time: Unix time as returned from time.time() @raise InstrumentParameterException if timestamp or unix_time not supplied """""" if: raise InstrumentParameterException('timestamp or unix_time required') if unix_time!= None: timestamp = ntplib.system_to_ntp_time(unix_time) self.contents[DataParticleKey.INTERNAL_TIMESTAMP] = float(timestamp)",True,timestamp == None and unix_time == None,timestamp == None and unix_time == None,0.6572887301445007 1343,"def set_internal_timestamp(self, timestamp=None, unix_time=None): """""" Set the internal timestamp @param timestamp: NTP timestamp to set @param unit_time: Unix time as returned from time.time() @raise InstrumentParameterException if timestamp or unix_time not supplied """""" if timestamp == None and unix_time == None: raise InstrumentParameterException('timestamp or unix_time required') if: timestamp = ntplib.system_to_ntp_time(unix_time) self.contents[DataParticleKey.INTERNAL_TIMESTAMP] = float(timestamp)",False,unix_time != None,timestamp == None and unix_time != None,0.6599880456924438 1344,"def get_ext_dat(self, txt): """""" `get_dat` with multistep traverse, using selected views. Returns `None` if no data available, or missing inputs e.g. `n.get_ext_dat('aaa.bbb.ccc') == n.input_view('aaa').input_view('bbb').get_dat('ccc')`. """""" t = txt.split('.') if: return None n = self for i in t: if i not in n.inputs: return None n = n.inputs[i] if n.get_view_mode(i) == VIEW.INT else n.inputs[i].res return n.dat",True,len(t) < 1,len(t) < 1,0.6505229473114014 1345,"def get_ext_dat(self, txt): """""" `get_dat` with multistep traverse, using selected views. Returns `None` if no data available, or missing inputs e.g. `n.get_ext_dat('aaa.bbb.ccc') == n.input_view('aaa').input_view('bbb').get_dat('ccc')`. """""" t = txt.split('.') if len(t) < 1: return None n = self for i in t: if: return None n = n.inputs[i] if n.get_view_mode(i) == VIEW.INT else n.inputs[i].res return n.dat",False,i not in n.inputs,n.inputs[i] is None,0.6551634073257446 1346,"def write(self, oprot): if: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('ClientUsageMetrics') if self.sessions is not None: oprot.writeFieldBegin('sessions', TType.I32, 1) oprot.writeI32(self.sessions) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None),oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None),0.6470205783843994 1347,"def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None): oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('ClientUsageMetrics') if: oprot.writeFieldBegin('sessions', TType.I32, 1) oprot.writeI32(self.sessions) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,self.sessions is not None,self.sessions is not None,0.6468618512153625 1348,"def objects(self, b, line): if: self.objects_std(b, line) else: raise NotImplementedError",False,b.mode == MODE_STD,self.objects_std,0.6487921476364136 1349,"def update_lr(optimizer, n_vals_without_improvement): global ndecs if: for param_group in optimizer.param_groups: param_group['lr'] = args.lr / 10 ndecs = 1 elif ndecs == 1 and n_vals_without_improvement > args.early_stopping // 3 * 2: for param_group in optimizer.param_groups: param_group['lr'] = args.lr / 100 ndecs = 2 else: for param_group in optimizer.param_groups: param_group['lr'] = args.lr / 10 ** ndecs",False,ndecs == 0 and n_vals_without_improvement > args.early_stopping // 3,ndecs == 0 and n_vals_without_improvement > args.early_stopping // 3 * 2,0.6448994874954224 1350,"def update_lr(optimizer, n_vals_without_improvement): global ndecs if ndecs == 0 and n_vals_without_improvement > args.early_stopping // 3: for param_group in optimizer.param_groups: param_group['lr'] = args.lr / 10 ndecs = 1 elif: for param_group in optimizer.param_groups: param_group['lr'] = args.lr / 100 ndecs = 2 else: for param_group in optimizer.param_groups: param_group['lr'] = args.lr / 10 ** ndecs",False,ndecs == 1 and n_vals_without_improvement > args.early_stopping // 3 * 2,n_vals_without_improvement > args.early_stopping // 3,0.6449304819107056 1351,"def outcome_wc(outcome): """""" Builds the wildcarded outcome. """""" i = 0 e = [] for is_masked in dist._mask: if: symbol = wc else: symbol = outcome[i] i += 1 e.append(symbol) e = ctor(e) return e",True,is_masked,is_masked,0.6576695442199707 1352,"def sanitize_string(string, strict=True, paranoid=False): """"""Return a'safe' version of the string (ie. remove malicious chars like ' ') :param string: String to escape :type string: str """""" if: return '' if paranoid: return to_unicode(string.encode('unicode_escape')) elif strict: return to_unicode(string).split('\n')[0] else: import re ret = repr(string).replace('\\\\', '\\') ret = re.sub('^u?(?P[\'\\""])(.*)(?P=quote)$', '\\2', ret) return to_unicode(ret)",True,not string,not string,0.6589127779006958 1353,"def sanitize_string(string, strict=True, paranoid=False): """"""Return a'safe' version of the string (ie. remove malicious chars like ' ') :param string: String to escape :type string: str """""" if not string: return '' if: return to_unicode(string.encode('unicode_escape')) elif strict: return to_unicode(string).split('\n')[0] else: import re ret = repr(string).replace('\\\\', '\\') ret = re.sub('^u?(?P[\'\\""])(.*)(?P=quote)$', '\\2', ret) return to_unicode(ret)",True,paranoid,paranoid,0.6572347283363342 1354,"def sanitize_string(string, strict=True, paranoid=False): """"""Return a'safe' version of the string (ie. remove malicious chars like ' ') :param string: String to escape :type string: str """""" if not string: return '' if paranoid: return to_unicode(string.encode('unicode_escape')) elif: return to_unicode(string).split('\n')[0] else: import re ret = repr(string).replace('\\\\', '\\') ret = re.sub('^u?(?P[\'\\""])(.*)(?P=quote)$', '\\2', ret) return to_unicode(ret)",True,strict,strict,0.6592873334884644 1355,"def _process_reward(self, rewards): if: std = np.clip(self.ret_rms.std, 0.1, 100) return np.clip(rewards / std, -self.rewnorm_range, self.rewnorm_range) return rewards",True,self.use_rewnorm,self.use_rewnorm,0.650632381439209 1356,"def _type_check_single(self, current, types, function_name): allowed_types, allowed_subtypes = self._get_allowed_pytypes(types) actual_typename = type(current).__name__ if: raise exceptions.JMESPathTypeError(function_name, current, self._convert_to_jmespath_type(actual_typename), types) if allowed_subtypes: self._subtype_check(current, allowed_subtypes, types, function_name)",False,actual_typename not in allowed_types,actual_typename not in types,0.6499472856521606 1357,"def _type_check_single(self, current, types, function_name): allowed_types, allowed_subtypes = self._get_allowed_pytypes(types) actual_typename = type(current).__name__ if actual_typename not in allowed_types: raise exceptions.JMESPathTypeError(function_name, current, self._convert_to_jmespath_type(actual_typename), types) if: self._subtype_check(current, allowed_subtypes, types, function_name)",True,allowed_subtypes,allowed_subtypes,0.6497725248336792 1358,"def remove_cookie_by_name(cookiejar, name, domain=None, path=None): """"""Unsets a cookie by name, by default over all domains and paths. Wraps CookieJar.clear(), is O(n). """""" clearables = [] for cookie in cookiejar: if: continue if domain is not None and domain!= cookie.domain: continue if path is not None and path!= cookie.path: continue clearables.append((cookie.domain, cookie.path, cookie.name)) for domain, path, name in clearables: cookiejar.clear(domain, path, name)",True,cookie.name != name,cookie.name != name,0.6506906747817993 1359,"def remove_cookie_by_name(cookiejar, name, domain=None, path=None): """"""Unsets a cookie by name, by default over all domains and paths. Wraps CookieJar.clear(), is O(n). """""" clearables = [] for cookie in cookiejar: if cookie.name!= name: continue if: continue if path is not None and path!= cookie.path: continue clearables.append((cookie.domain, cookie.path, cookie.name)) for domain, path, name in clearables: cookiejar.clear(domain, path, name)",True,domain is not None and domain != cookie.domain,domain is not None and domain != cookie.domain,0.6431611776351929 1360,"def remove_cookie_by_name(cookiejar, name, domain=None, path=None): """"""Unsets a cookie by name, by default over all domains and paths. Wraps CookieJar.clear(), is O(n). """""" clearables = [] for cookie in cookiejar: if cookie.name!= name: continue if domain is not None and domain!= cookie.domain: continue if: continue clearables.append((cookie.domain, cookie.path, cookie.name)) for domain, path, name in clearables: cookiejar.clear(domain, path, name)",True,path is not None and path != cookie.path,path is not None and path != cookie.path,0.6438645124435425 1361,"def request_host(request): """"""Return request-host, as defined by RFC 2965. Variation from RFC: returned value is lowercased, for convenient comparison. """""" url = request.get_full_url() host = urlparse.urlparse(url)[1] if: host = request.get_header('Host', '') host = cut_port_re.sub('', host, 1) return host.lower()",False,host == '',host is None,0.6682809591293335 1362,"def __call__(self, *args, **kwargs): """"""Log the given message to the app.log or global log as appropriate."""""" if: log = request.app.log else: log = self return log.error(*args, **kwargs)",False,"hasattr(request, 'app') and hasattr(request.app, 'log')","request and hasattr(request.app, 'log')",0.645435094833374 1363,"def _process_list_value(info_labels, name, param): if: info_labels[name] = param",True,"param is not None and isinstance(param, list)","param is not None and isinstance(param, list)",0.6441895961761475 1364,"def _set_e(self, land, individs=None): if: inds_to_set = self.values() else: ig = itemgetter(*individs) inds_to_set = ig(self) if isinstance(inds_to_set, individual.Individual): inds_to_set = (inds_to_set,) hab = [ind._set_e([lyr.rast[int(ind.y), int(ind.x)] for lyr in land.values()]) for ind in inds_to_set]",True,individs is None,individs is None,0.6616036891937256 1365,"def _set_e(self, land, individs=None): if individs is None: inds_to_set = self.values() else: ig = itemgetter(*individs) inds_to_set = ig(self) if: inds_to_set = (inds_to_set,) hab = [ind._set_e([lyr.rast[int(ind.y), int(ind.x)] for lyr in land.values()]) for ind in inds_to_set]",False,"isinstance(inds_to_set, individual.Individual)","not isinstance(inds_to_set, (tuple, list))",0.6455636024475098 1366,"def __update_task_status(self, task): if: task.update_status(self) else: task.update_status()",False,"isinstance(task, MinionCmdTask)","isinstance(task, BaseModel)",0.6550686359405518 1367,"def read_file(fileobj): result = b'' while True: content = fileobj.read(CHUNK_SIZE) if: result += content else: return result",True,content,content,0.6683413982391357 1368,"def make_temp_file(suffix='', prefix='', text=True): if: _tdata.tempdir = tempfile.mkdtemp() _tmpdirs.append(_tdata.tempdir) fid, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=_tdata.tempdir, text=text) fo = os.fdopen(fid, 'w') return (fo, name)",False,"not hasattr(_tdata, 'tempdir')",text and (not _tdata.tempdir),0.6459368467330933 1369,"@property def reference_skel(self): if: return self._ref_skel g = 'SKYRIM' if self._game == 'SKYRIMSE' else self._game if g: skel_path = os.path.join(os.path.dirname(NifFile.nifly_path), 'Skeletons', g,'skeleton.nif') if os.path.exists(skel_path): self._ref_skel = NifFile(skel_path) return self._ref_skel return None",True,self._ref_skel,self._ref_skel,0.6506948471069336 1370,"@property def reference_skel(self): if self._ref_skel: return self._ref_skel g = 'SKYRIM' if self._game == 'SKYRIMSE' else self._game if: skel_path = os.path.join(os.path.dirname(NifFile.nifly_path), 'Skeletons', g,'skeleton.nif') if os.path.exists(skel_path): self._ref_skel = NifFile(skel_path) return self._ref_skel return None",False,g,os.path.exists(g),0.6630070209503174 1371,"@property def reference_skel(self): if self._ref_skel: return self._ref_skel g = 'SKYRIM' if self._game == 'SKYRIMSE' else self._game if g: skel_path = os.path.join(os.path.dirname(NifFile.nifly_path), 'Skeletons', g,'skeleton.nif') if: self._ref_skel = NifFile(skel_path) return self._ref_skel return None",True,os.path.exists(skel_path),os.path.exists(skel_path),0.6439443826675415 1372,"def __init__(self, index_strategy, indexname): if: raise IndexStrategyError(f'invalid indexname ""{indexname}""! (expected to start with ""{index_strategy.indexname_prefix}"")') self.index_strategy = index_strategy self.indexname = indexname",False,not indexname.startswith(index_strategy.indexname_prefix),not index_strategy.startswith(index_strategy.indexname_prefix),0.6513193845748901 1373,"def gated(layers, embed_keep_prob=1.0, drop_func=dropout, reuse=True): """""""""""" _, layer = linear_attention(tf.stack(layers, axis=-2)) if: layer = drop_func(layer, embed_keep_prob) return layer",True,embed_keep_prob < 1,embed_keep_prob < 1,0.6481263041496277 1374,"def __init__(self, *columns: str, name: str='_t', types: Optional[tuple[str,...]]=None): self.name = name self.columns = columns self.types = types if: raise ValueError('Number of types does not much number of columns!') self._table_columns = {col: ColumnExpr(sql.Identifier(name, col)) for col in columns} self.values = []",False,types and len(types) != len(columns),types is not None,0.6514669060707092 1375,"def tick(self) -> None: """"""Occasionally refresh lights since launchkey lights are sorta buggy """""" if: self.updateColor() self.__ticker_timer += 1",False,self.__ticker_timer % REFRESH_INTERVAL == 0,self.__ticker_timer == 0,0.6501673460006714 1376,"@wrap_exceptions def cwd(self): """"""Return process current working directory."""""" if: return '' elif NETBSD or HAS_PROC_OPEN_FILES: return cext.proc_cwd(self.pid) else: raise NotImplementedError('supported only starting from FreeBSD 8' if FREEBSD else '')",False,OPENBSD and self.pid == 0,self.pid is None,0.6587186455726624 1377,"@wrap_exceptions def cwd(self): """"""Return process current working directory."""""" if OPENBSD and self.pid == 0: return '' elif: return cext.proc_cwd(self.pid) else: raise NotImplementedError('supported only starting from FreeBSD 8' if FREEBSD else '')",False,NETBSD or HAS_PROC_OPEN_FILES,WINDOWS,0.649486780166626 1378,"def checkSubIndent(s, l, t): curCol = col(l, s) if: indentStack.append(curCol) else: raise ParseException(s, l, 'not a subentry')",False,curCol > indentStack[-1],curCol < indentStack[-1] and curCol < indentStack[-2] and (curCol < indentStack[-1]) and (curCol < indentStack[-2] and curCol < indentStack[-2]),0.649895966053009 1379,"def read_strz(self, delim=b'\x00'): eof = False r = b'' while True: v = self.f.read(1) if len(v) == 0: eof = True break elif v == delim: break else: r = r + v if: return None else: return r",False,eof,eof and r == b'',0.6666396260261536 1380,"def read_strz(self, delim=b'\x00'): eof = False r = b'' while True: v = self.f.read(1) if: eof = True break elif v == delim: break else: r = r + v if eof: return None else: return r",False,len(v) == 0,v == b'',0.6470661759376526 1381,"def read_strz(self, delim=b'\x00'): eof = False r = b'' while True: v = self.f.read(1) if len(v) == 0: eof = True break elif: break else: r = r + v if eof: return None else: return r",False,v == delim,v[0] == delim,0.6562252044677734 1382,"def rerun(): st.write('\n # Programatically rerun your app\n\n Thanks to a contribution from [SimonBiggs](https://github.com/SimonBiggs),\n you can now re-execute your script from the top to bottom. Please note,\n this is an\n [experimental feature](https://docs.streamlit.io/en/stable/api.html#experimental),\n and subject to change.\n\n Thanks again [SimonBiggs](https://github.com/SimonBiggs)!\n\n -----\n ') st.code('\nplaceholder = st.empty()\nstop = st.button(""Stop rerunning"")\nif stop:\n st.stop()\n\nfor i in range(10):\n with placeholder:\n st.write(f""Getting ready to rerun in {10-i}"")\n time.sleep(1)\n\nst.experimental_rerun()\n ') placeholder = st.empty() stop = st.button('Stop rerunning') if: st.stop() for i in range(10): with placeholder: st.write(f'Getting ready to rerun in {10 - i}') time.sleep(1) st.experimental_rerun()",True,stop,stop,0.6692625284194946 1383,"def parse_args(): """"""Parse args."""""" app = argparse.ArgumentParser() app.add_argument('cesar_out', help='CESAR output file, this script input') if: app.print_help() sys.exit(0) args = app.parse_args() return args",False,len(sys.argv) < 2,len(sys.argv) == 1,0.6471216678619385 1384,"def __eq__(self, other): if: warnings.warn('IPv4Address.__getitem__ is deprecated. Use attributes instead.', category=DeprecationWarning, stacklevel=2) return (self.host, self.port) == other elif isinstance(other, IPv4Address): a = (self.type, self.host, self.port) b = (other.type, other.host, other.port) return a == b return False",True,"isinstance(other, tuple)","isinstance(other, tuple)",0.6456403732299805 1385,"def __eq__(self, other): if isinstance(other, tuple): warnings.warn('IPv4Address.__getitem__ is deprecated. Use attributes instead.', category=DeprecationWarning, stacklevel=2) return (self.host, self.port) == other elif: a = (self.type, self.host, self.port) b = (other.type, other.host, other.port) return a == b return False",True,"isinstance(other, IPv4Address)","isinstance(other, IPv4Address)",0.6482592821121216 1386,"def sql(self): if: self._sql = Sqlite2KeyDict(self._path, tablename=self._field, autocommit=False) return self._sql",True,self._sql is None,self._sql is None,0.6568776369094849 1387,"def accept(self, visitor: ParseTreeVisitor): if: return visitor.visitWhereClause(self) else: return visitor.visitChildren(self)",True,"hasattr(visitor, 'visitWhereClause')","hasattr(visitor, 'visitWhereClause')",0.6435490846633911 1388,"def get_journal_log(self, conf): """""" /var/log/zzz.service.log or /var/log/default.unit.log """""" filename = os.path.basename(strE(conf.filename())) unitname = (conf.name() or 'default') + '.unit' name = filename or unitname log_folder = expand_path(self._journal_log_folder, conf.root_mode()) log_file = name.replace(os.path.sep, '.') + '.log' if: log_file = 'dot.' + log_file return os.path.join(log_folder, log_file)",False,log_file.startswith('.'),os.path.sep != '',0.6459345817565918 1389,"def do_activate(self): win = self.props.active_window if: win = ExampleWindow(application=self) win.present()",True,not win,not win,0.6659681797027588 1390,"def test_check(song): for test in TEST_LIST: if: return True return False",True,test in song,test in song,0.6596877574920654 1391,"def _is_key_file_encrypted(key_file): """"""Detects if a key file is encrypted or not."""""" with open(key_file, 'r') as f: for line in f: if: return True return False",False,'ENCRYPTED' in line,line.startswith('encrypted'),0.6478583812713623 1392,"def _get_genome_amounts(self, probability, max_genome_amount): """""" Get amounts of genomes by original genome @param probability: Proportion of simulated original genomes @type probability: int | float @param max_genome_amount: Total number of genomes @type max_genome_amount: int @return: List of integers representing amount of strains @rtype: list[int] """""" assert isinstance(probability, (int, float)) assert 0 <= probability <= 1 assert isinstance(max_genome_amount, int) genome_amounts = self._get_genome_amounts_geometric(probability, max_genome_amount) diverence = Counter(genome_amounts)[1] / float(len(genome_amounts)) if: while abs(diverence - probability) > 0.05: genome_amounts = self._get_genome_amounts_geometric(probability, max_genome_amount) diverence = Counter(genome_amounts)[1] / float(len(genome_amounts)) return genome_amounts",False,max_genome_amount >= 10,probability != 0.05,0.6566787958145142 1393,"def _validate_routes(self, context, router_id, routes): if: raise extraroute.RoutesExhausted(router_id=router_id, quota=cfg.CONF.max_routes) filters = {'device_id': [router_id]} ports = self.get_ports(context, filters) for route in routes: self._validate_routes_nexthop(context, ports, routes, route['nexthop'])",False,len(routes) > cfg.CONF.max_routes,cfg.CONF.max_routes <= router_id,0.6503173112869263 1394,"def parseImpl(self, instring, loc, doActions=True): thiscol = col(loc, instring) if: raise ParseException(instring, loc, 'Text not in expected column', self) newloc = loc + self.col - thiscol ret = instring[loc:newloc] return (newloc, ret)",True,thiscol > self.col,thiscol > self.col,0.6572380065917969 1395,"def find_handler(self, event: CQEvent) -> 'ServiceFunc': for rex, sf in self.allrex.items(): text = event.norm_text if sf.normalize_text else event.plain_text match = rex.search(text) if: event['match'] = match return sf return None",True,match,match,0.6673177480697632 1396,"def get_data_non_strict(self): for dp in self._iter: self._send(dp) ret = self._recv() if: yield ret self._iter = self.ds.get_data() for _ in range(self._buffer_size): self._send(next(self._iter)) ret = self._recv() if ret is not None: yield ret",True,ret is not None,ret is not None,0.6492609977722168 1397,"def get_data_non_strict(self): for dp in self._iter: self._send(dp) ret = self._recv() if ret is not None: yield ret self._iter = self.ds.get_data() for _ in range(self._buffer_size): self._send(next(self._iter)) ret = self._recv() if: yield ret",True,ret is not None,ret is not None,0.6491056680679321 1398,"def load(self, path): """"""Load model from file. Args: path (str): file path """""" parameters = torch.load(path, map_location=torch.device('cpu')) if: parameters = parameters['model'] self.load_state_dict(parameters)",True,'optimizer' in parameters,'optimizer' in parameters,0.656847357749939 1399,"def __init__(self, filename): """""" Parameters ---------- filename : str Input file containing commands, one line per command """""" self._filename = filename self._parameters = [] with open(filename) as f_in: self._parameters = [] for line in f_in.readlines(): line = line.strip() if: self._parameters.append(GenericCommandParameters(command=line))",False,line,line and (not line.startswith('#')),0.664142370223999 1400,"def set_norm(self, norm=None): """"""Sets the norm of the operator to a custom value. Parameters --------- norm: float, optional Positive real valued number or `None` Note ---- The passed values are cached so that when self.norm() is called, the saved value will be returned and not calculated via the power method. If `None` is passed, the cache is cleared prompting the function to call the power method to calculate the norm the next time self.norm() is called. """""" if: if isinstance(norm, Number): if norm <= 0: raise ValueError('Norm must be a positive real valued number or None, got {}'.format(norm)) else: raise TypeError('Norm must be a number or None, got {} of type {}'.format(norm, type(norm))) self._norm = norm",True,norm is not None,norm is not None,0.6505459547042847 1401,"def set_norm(self, norm=None): """"""Sets the norm of the operator to a custom value. Parameters --------- norm: float, optional Positive real valued number or `None` Note ---- The passed values are cached so that when self.norm() is called, the saved value will be returned and not calculated via the power method. If `None` is passed, the cache is cleared prompting the function to call the power method to calculate the norm the next time self.norm() is called. """""" if norm is not None: if: if norm <= 0: raise ValueError('Norm must be a positive real valued number or None, got {}'.format(norm)) else: raise TypeError('Norm must be a number or None, got {} of type {}'.format(norm, type(norm))) self._norm = norm",True,"isinstance(norm, Number)","isinstance(norm, Number)",0.6451666951179504 1402,"def set_norm(self, norm=None): """"""Sets the norm of the operator to a custom value. Parameters --------- norm: float, optional Positive real valued number or `None` Note ---- The passed values are cached so that when self.norm() is called, the saved value will be returned and not calculated via the power method. If `None` is passed, the cache is cleared prompting the function to call the power method to calculate the norm the next time self.norm() is called. """""" if norm is not None: if isinstance(norm, Number): if: raise ValueError('Norm must be a positive real valued number or None, got {}'.format(norm)) else: raise TypeError('Norm must be a number or None, got {} of type {}'.format(norm, type(norm))) self._norm = norm",True,norm <= 0,norm <= 0,0.6599984169006348 1403,"def update_pbar(self, text, percent=None): self.pbar.set_text(text) if: self.pbar.set_fraction(percent / 100.0)",True,percent is not None,percent is not None,0.6489100456237793 1404,"def apply_iptables_rules(command, params): iptables = lambda *rule: novalib.execute('/sbin/iptables', *rule) iptables('-D', 'FORWARD', '-m', 'physdev', '--physdev-in', params['VIF'], '-s', params['IP'], '-j', 'ACCEPT') if: iptables('-A', 'FORWARD', '-m', 'physdev', '--physdev-in', params['VIF'], '-s', params['IP'], '-j', 'ACCEPT')",False,command == 'online',command == 'physdev',0.6561785936355591 1405,"def get_users(self, filter_term=None): q = select(self.CredentialsTable) if: q = q.filter(self.CredentialsTable.c.id == filter_term) elif filter_term and filter_term!= '': like_term = func.lower(f'%{filter_term}%') q = q.filter(func.lower(self.CredentialsTable.c.username).like(like_term)) results = self.sess.execute(q).all() return results",False,self.is_user_valid(filter_term),"filter_term and isinstance(filter_term, str)",0.6439837217330933 1406,"def get_users(self, filter_term=None): q = select(self.CredentialsTable) if self.is_user_valid(filter_term): q = q.filter(self.CredentialsTable.c.id == filter_term) elif: like_term = func.lower(f'%{filter_term}%') q = q.filter(func.lower(self.CredentialsTable.c.username).like(like_term)) results = self.sess.execute(q).all() return results",False,filter_term and filter_term != '',func.queryexists(filter_term),0.6492419242858887 1407,"def parse(x): if: return x return tuple(repeat(x, n))",True,"isinstance(x, collections.abc.Iterable)","isinstance(x, collections.abc.Iterable)",0.6407639384269714 1408,"def gelu(x: torch.Tensor) -> torch.Tensor: if: return torch.nn.functional.gelu(x.float()).type_as(x) else: return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))",False,"hasattr(torch.nn.functional, 'gelu')","isinstance(x, torch.Tensor)",0.6468195915222168 1409,"def metricsWithMetricName(self, metricName, metrics=None, baseonly=True): """""" Return all metrics which match metricName (default, only the 'base' metric name). """""" if: metrics = self.metrics if baseonly: metrics = metrics[np.where(metrics['baseMetricNames'] == metricName)] else: metrics = metrics[np.where(metrics['metricName'] == metricName)] return metrics",True,metrics is None,metrics is None,0.6639429926872253 1410,"def metricsWithMetricName(self, metricName, metrics=None, baseonly=True): """""" Return all metrics which match metricName (default, only the 'base' metric name). """""" if metrics is None: metrics = self.metrics if: metrics = metrics[np.where(metrics['baseMetricNames'] == metricName)] else: metrics = metrics[np.where(metrics['metricName'] == metricName)] return metrics",True,baseonly,baseonly,0.6649354696273804 1411,"@property def data(self) -> dict[str, str]: """"""Request data."""""" data = {'action': 'update', 'user': self.user} if: data['pwd'] = self.pwd if self.sgrp: data['sgrp'] = self.sgrp.value if self.comment: data['comment'] = self.comment return data",False,self.pwd is not None,self.pwd,0.6524957418441772 1412,"@property def data(self) -> dict[str, str]: """"""Request data."""""" data = {'action': 'update', 'user': self.user} if self.pwd is not None: data['pwd'] = self.pwd if: data['sgrp'] = self.sgrp.value if self.comment: data['comment'] = self.comment return data",False,self.sgrp,self.sgrp is not None,0.676152229309082 1413,"@property def data(self) -> dict[str, str]: """"""Request data."""""" data = {'action': 'update', 'user': self.user} if self.pwd is not None: data['pwd'] = self.pwd if self.sgrp: data['sgrp'] = self.sgrp.value if: data['comment'] = self.comment return data",False,self.comment,self.comment is not None,0.6611547470092773 1414,"def extract_param(self, key, x, n): if: return self.cache[n, key] out = _index_param_value(self.num_train_samples, x, self.splits[n][0]) if self.cache is not None: self.cache[n, key] = out return out",False,"self.cache is not None and (n, key) in self.cache",self.cache is not None,0.6459371447563171 1415,"def extract_param(self, key, x, n): if self.cache is not None and (n, key) in self.cache: return self.cache[n, key] out = _index_param_value(self.num_train_samples, x, self.splits[n][0]) if: self.cache[n, key] = out return out",False,self.cache is not None,self.cache is not None and out is not None,0.6476606130599976 1416,"def process_buildings_parameter(self): """""" Make sure the buildings parameter contains only buildings in the zone. Returns (and updates) the parameter. """""" zone_building_names = self.locator.get_zone_building_names() if: self.parameters['buildings'] = zone_building_names self.parameters['buildings'] = [b for b in self.parameters['buildings'] if b in zone_building_names] or zone_building_names return self.parameters['buildings']",False,not self.parameters['buildings'],'buildings' not in self.parameters,0.6506505012512207 1417,"def _get_dynamic_answers(self) -> Set[Answer]: ret: Set[Answer] = set() for attribute in self._object_instance.ontology_item.attributes: if: answer = get_default_answer_from_attribute(attribute) ret.add(answer) return ret",False,attribute.dynamic,not attribute.endswith('@dynamic'),0.6691645383834839 1418,"@pytest.mark.parametrize('model', get_models()) def test_model_count(model): if: pytest.skip('No _meta') if not hasattr(model._meta, 'db_table'): pytest.skip('No db_table') cursor = connection.cursor() cursor.execute('SELECT COUNT(*) FROM %s' % model._meta.db_table)",False,"not hasattr(model, '_meta')","not hasattr(model._meta, '_meta')",0.652582585811615 1419,"@pytest.mark.parametrize('model', get_models()) def test_model_count(model): if not hasattr(model, '_meta'): pytest.skip('No _meta') if: pytest.skip('No db_table') cursor = connection.cursor() cursor.execute('SELECT COUNT(*) FROM %s' % model._meta.db_table)",True,"not hasattr(model._meta, 'db_table')","not hasattr(model._meta, 'db_table')",0.650408923625946 1420,"@property def cell_volumes(self): if: simplex_nodes = self._nodes[self.simplices] mats = np.pad(simplex_nodes, ((0, 0), (0, 0), (0, 1)), constant_values=1) V1 = np.abs(np.linalg.det(mats)) V1 /= 6 if self.dim == 3 else 2 self._cell_volumes = V1 return self._cell_volumes",True,"getattr(self, '_cell_volumes', None) is None","getattr(self, '_cell_volumes', None) is None",0.6465737223625183 1421,"def popBySendPduHandle(self, sendPduHandle): if: self.popByMsgId(self.__sendPduHandleIdx[sendPduHandle])",True,sendPduHandle in self.__sendPduHandleIdx,sendPduHandle in self.__sendPduHandleIdx,0.6548413038253784 1422,"def __rxor__(self, other) -> 'MultiVector': """"""Right-hand outer product, :math:`N \\wedge M` """""" other, mv = self._checkOther(other, coerce=False) if: newValue = self.layout.omt_func(other.value, self.value) else: if isinstance(other, np.ndarray): obj = self.__array__() return other ^ obj newValue = other * self.value return self._newMV(newValue)",True,mv,mv,0.6848612427711487 1423,"def __rxor__(self, other) -> 'MultiVector': """"""Right-hand outer product, :math:`N \\wedge M` """""" other, mv = self._checkOther(other, coerce=False) if mv: newValue = self.layout.omt_func(other.value, self.value) else: if: obj = self.__array__() return other ^ obj newValue = other * self.value return self._newMV(newValue)",True,"isinstance(other, np.ndarray)","isinstance(other, np.ndarray)",0.6486687064170837 1424,"def op_class_enter(self, name, parent): self.class_stack.append([]) self.class_names.append(name) self.constructed = False self.parent = parent self.write(self.indent()) if: self.write('class %s(object):\n' % name) else: self.write('class %s(%s):\n' % (name, parent)) self.block_depth += 1 self.write(self.indent()) self.write('pass\n')",False,parent == None,parent is None,0.6655176877975464 1425,"def new_identifier(space, name): for c in name: if: break else: return name from pypy.module.unicodedata.interp_ucd import ucd w_name = space.newtext(name) w_id = space.call_method(ucd, 'normalize', space.newtext('NFKC'), w_name) return space.text_w(w_id)",False,ord(c) > 128,c in space.identifier_chars,0.6545006036758423 1426,"def _search_split(root, split): split_name = split.split('[')[0] try_root = os.path.join(root, split_name) if: return try_root if split_name == 'validation': try_root = os.path.join(root, 'val') if os.path.exists(try_root): return try_root return root",True,os.path.exists(try_root),os.path.exists(try_root),0.6432487964630127 1427,"def _search_split(root, split): split_name = split.split('[')[0] try_root = os.path.join(root, split_name) if os.path.exists(try_root): return try_root if: try_root = os.path.join(root, 'val') if os.path.exists(try_root): return try_root return root",False,split_name == 'validation','val' in split,0.648044228553772 1428,"def _search_split(root, split): split_name = split.split('[')[0] try_root = os.path.join(root, split_name) if os.path.exists(try_root): return try_root if split_name == 'validation': try_root = os.path.join(root, 'val') if: return try_root return root",True,os.path.exists(try_root),os.path.exists(try_root),0.6430320739746094 1429,"def _is_valid_ip(self, address): """"""Validate the dotted decimal notation IP/netmask string. Args: address: A string, either representing a quad-dotted ip or an integer which is a valid IPv4 IP address. Returns: A boolean, True if the string is a valid dotted decimal IP string. """""" octets = address.split('.') if: try: return int(address) >= 0 and int(address) <= self._ALL_ONES except ValueError: return False if len(octets)!= 4: return False for octet in octets: try: if not 0 <= int(octet) <= 255: return False except ValueError: return False return True",False,len(octets) == 1,len(octets) == 4,0.6454910039901733 1430,"def _is_valid_ip(self, address): """"""Validate the dotted decimal notation IP/netmask string. Args: address: A string, either representing a quad-dotted ip or an integer which is a valid IPv4 IP address. Returns: A boolean, True if the string is a valid dotted decimal IP string. """""" octets = address.split('.') if len(octets) == 1: try: return int(address) >= 0 and int(address) <= self._ALL_ONES except ValueError: return False if: return False for octet in octets: try: if not 0 <= int(octet) <= 255: return False except ValueError: return False return True",True,len(octets) != 4,len(octets) != 4,0.6456681489944458 1431,"def _is_valid_ip(self, address): """"""Validate the dotted decimal notation IP/netmask string. Args: address: A string, either representing a quad-dotted ip or an integer which is a valid IPv4 IP address. Returns: A boolean, True if the string is a valid dotted decimal IP string. """""" octets = address.split('.') if len(octets) == 1: try: return int(address) >= 0 and int(address) <= self._ALL_ONES except ValueError: return False if len(octets)!= 4: return False for octet in octets: try: if: return False except ValueError: return False return True",False,not 0 <= int(octet) <= 255,not _is_valid_ipv4(octet),0.6440715789794922 1432,"def __init__(self, config, model, dataloader, evaluator): super().__init__(config, model, dataloader, evaluator) self._build_optimizer() if: self._load_checkpoint()",False,config['resume'] or config['training_resume'],config['load_checkpoint'],0.6415067911148071 1433,"def install(self) -> None: _next = auto_derby.config.on_single_mode_race_result def _handle(ctx: Context, result: RaceResult): if: terminal.pause(f'pause before continue race: {result}') _next(ctx, result) auto_derby.config.on_single_mode_race_result = _handle",False,result.is_failed,result.race_code == Race.ID_ESCAPE and ctx.terminal and (self.state == Race.ID_ESCAPE),0.6506712436676025 1434,"def init_bert_weights(self, module): if: module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) elif isinstance(module, BertLayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) if isinstance(module, torch.nn.Linear) and module.bias is not None: module.bias.data.zero_()",True,"isinstance(module, (torch.nn.Linear, torch.nn.Embedding))","isinstance(module, (torch.nn.Linear, torch.nn.Embedding))",0.6501586437225342 1435,"def init_bert_weights(self, module): if isinstance(module, (torch.nn.Linear, torch.nn.Embedding)): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) elif isinstance(module, BertLayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) if: module.bias.data.zero_()",True,"isinstance(module, torch.nn.Linear) and module.bias is not None","isinstance(module, torch.nn.Linear) and module.bias is not None",0.6460369229316711 1436,"def init_bert_weights(self, module): if isinstance(module, (torch.nn.Linear, torch.nn.Embedding)): module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) elif: module.bias.data.zero_() module.weight.data.fill_(1.0) if isinstance(module, torch.nn.Linear) and module.bias is not None: module.bias.data.zero_()",True,"isinstance(module, BertLayerNorm)","isinstance(module, BertLayerNorm)",0.6436284780502319 1437,"def get_member_checker(self, checker, name): for member in checker.members: if: return member raise Error('No member checker found for {}.'.format(name))",True,member.name == name,member.name == name,0.6556627750396729 1438,"def _parse(self, obj): match = self.INFO_RE.search(obj) if: raise InvalidIRI(""'{}' is not a valid Info URI."".format(obj)) return {'scheme': self.SCHEME, 'authority': match.group(1), 'path': match.group(2)}",True,not match,not match,0.6620382070541382 1439,"def _copy_deployment_files(deployment_dir): for deployment_file in deployment_files: if: cmd = 'cp {0} {1}'.format(deployment_file, deployment_dir).split() return_code = subprocess.call(cmd, shell=False) else: raise NameError('Deployment file not found [{0}]'.format(deployment_file))",False,os.path.exists(deployment_file),os.path.isfile(deployment_file),0.6453717947006226 1440,"def __init__(self, in_channels, stem_channels=(64, 64), with_transform=True): super(Stem, self).__init__() self.in_channels = in_channels self.out_channels = stem_channels[-1] self.with_transform = with_transform self.mlp = SharedMLP(in_channels, stem_channels) if: self.transform_input = TNet(in_channels, in_channels) self.transform_feature = TNet(self.out_channels, self.out_channels)",True,self.with_transform,self.with_transform,0.6543922424316406 1441,"def parse(match_object): nonlocal inner_idx inner_idx += 1 content = match_object.groupdict()['inner1'] content_second = match_object.groupdict()['inner2'] savename = content if: inner_idx += 1 savename = str(inner_idx) val_masks[savename] = ImageChops.logical_xor(val_masks[content], val_masks[content_second]) return f'{{{savename}}}'",False,content in root.mask_preset_names,content_second == 'inner',0.6479254961013794 1442,"def can_add_member(ace): writeprivs = ace['Ace']['Mask'].hasPriv(ACCESS_ALLOWED_OBJECT_ACE.ADS_RIGHT_DS_WRITE_PROP) if: return writeprivs userprivs = bin_to_string(ace['Ace']['ObjectType']).lower() == 'bf9679c0-0de6-11d0-a285-00aa003049e2' return writeprivs and userprivs",False,ace['AceType'] != ACCESS_ALLOWED_OBJECT_ACE.ACE_TYPE or ace['Ace']['ObjectType'] == '',writeprivs,0.6549574732780457 1443,"def assert_pico2000_ok(status): """""" assert_pico_ok( status ) """""" if: errorCheck = True else: errorCheck = False raise PicoSDKCtypesError('Unsuccessful API call')",False,status > 0,status == 1,0.6631858944892883 1444,"def hf_bucket_url(model_id: str, filename: str, subfolder: Optional[str]=None, revision: Optional[str]=None, mirror=None) -> str: """""" Resolve a model identifier, a file name, and an optional revision id, to a huggingface.co-hosted url, redirecting to Cloudfront (a Content Delivery Network, or CDN) for large files. Cloudfront is replicated over the globe so downloads are way faster for the end user (and it also lowers our bandwidth costs). Cloudfront aggressively caches files by default (default TTL is 24 hours), however this is not an issue here because we migrated to a git-based versioning system on huggingface.co, so we now store the files on S3/Cloudfront in a content-addressable way (i.e., the file name is its hash). Using content-addressable filenames means cache can't ever be stale. In terms of client-side caching from this library, we base our caching on the objects' ETag. An object' ETag is: its sha1 if stored in git, or its sha256 if stored in git-lfs. Files cached locally from transformers before v3.5.0 are not shared with those new files, because the cached file's name contains a hash of the url (which changed). """""" if: filename = f'{subfolder}/{filename}' if mirror: endpoint = PRESET_MIRROR_DICT.get(mirror, mirror) legacy_format = '/' not in model_id if legacy_format: return f'{endpoint}/{model_id}-{filename}' else: return f'{endpoint}/{model_id}/{filename}' if revision is None: revision ='main' return HUGGINGFACE_CO_PREFIX.format(model_id=model_id, revision=revision, filename=filename)",False,subfolder is not None,subfolder,0.649948239326477 1445,"def hf_bucket_url(model_id: str, filename: str, subfolder: Optional[str]=None, revision: Optional[str]=None, mirror=None) -> str: """""" Resolve a model identifier, a file name, and an optional revision id, to a huggingface.co-hosted url, redirecting to Cloudfront (a Content Delivery Network, or CDN) for large files. Cloudfront is replicated over the globe so downloads are way faster for the end user (and it also lowers our bandwidth costs). Cloudfront aggressively caches files by default (default TTL is 24 hours), however this is not an issue here because we migrated to a git-based versioning system on huggingface.co, so we now store the files on S3/Cloudfront in a content-addressable way (i.e., the file name is its hash). Using content-addressable filenames means cache can't ever be stale. In terms of client-side caching from this library, we base our caching on the objects' ETag. An object' ETag is: its sha1 if stored in git, or its sha256 if stored in git-lfs. Files cached locally from transformers before v3.5.0 are not shared with those new files, because the cached file's name contains a hash of the url (which changed). """""" if subfolder is not None: filename = f'{subfolder}/{filename}' if: endpoint = PRESET_MIRROR_DICT.get(mirror, mirror) legacy_format = '/' not in model_id if legacy_format: return f'{endpoint}/{model_id}-{filename}' else: return f'{endpoint}/{model_id}/{filename}' if revision is None: revision ='main' return HUGGINGFACE_CO_PREFIX.format(model_id=model_id, revision=revision, filename=filename)",False,mirror,mirror is not None,0.7052159905433655 1446,"def hf_bucket_url(model_id: str, filename: str, subfolder: Optional[str]=None, revision: Optional[str]=None, mirror=None) -> str: """""" Resolve a model identifier, a file name, and an optional revision id, to a huggingface.co-hosted url, redirecting to Cloudfront (a Content Delivery Network, or CDN) for large files. Cloudfront is replicated over the globe so downloads are way faster for the end user (and it also lowers our bandwidth costs). Cloudfront aggressively caches files by default (default TTL is 24 hours), however this is not an issue here because we migrated to a git-based versioning system on huggingface.co, so we now store the files on S3/Cloudfront in a content-addressable way (i.e., the file name is its hash). Using content-addressable filenames means cache can't ever be stale. In terms of client-side caching from this library, we base our caching on the objects' ETag. An object' ETag is: its sha1 if stored in git, or its sha256 if stored in git-lfs. Files cached locally from transformers before v3.5.0 are not shared with those new files, because the cached file's name contains a hash of the url (which changed). """""" if subfolder is not None: filename = f'{subfolder}/{filename}' if mirror: endpoint = PRESET_MIRROR_DICT.get(mirror, mirror) legacy_format = '/' not in model_id if legacy_format: return f'{endpoint}/{model_id}-{filename}' else: return f'{endpoint}/{model_id}/{filename}' if: revision ='main' return HUGGINGFACE_CO_PREFIX.format(model_id=model_id, revision=revision, filename=filename)",True,revision is None,revision is None,0.6526594161987305 1447,"def hf_bucket_url(model_id: str, filename: str, subfolder: Optional[str]=None, revision: Optional[str]=None, mirror=None) -> str: """""" Resolve a model identifier, a file name, and an optional revision id, to a huggingface.co-hosted url, redirecting to Cloudfront (a Content Delivery Network, or CDN) for large files. Cloudfront is replicated over the globe so downloads are way faster for the end user (and it also lowers our bandwidth costs). Cloudfront aggressively caches files by default (default TTL is 24 hours), however this is not an issue here because we migrated to a git-based versioning system on huggingface.co, so we now store the files on S3/Cloudfront in a content-addressable way (i.e., the file name is its hash). Using content-addressable filenames means cache can't ever be stale. In terms of client-side caching from this library, we base our caching on the objects' ETag. An object' ETag is: its sha1 if stored in git, or its sha256 if stored in git-lfs. Files cached locally from transformers before v3.5.0 are not shared with those new files, because the cached file's name contains a hash of the url (which changed). """""" if subfolder is not None: filename = f'{subfolder}/{filename}' if mirror: endpoint = PRESET_MIRROR_DICT.get(mirror, mirror) legacy_format = '/' not in model_id if: return f'{endpoint}/{model_id}-{filename}' else: return f'{endpoint}/{model_id}/{filename}' if revision is None: revision ='main' return HUGGINGFACE_CO_PREFIX.format(model_id=model_id, revision=revision, filename=filename)",True,legacy_format,legacy_format,0.6551523208618164 1448,"def _request_multiple(self, method: QueryMethods, object_type: Type[T], uid, payload=None, retryable=False) -> List[T]: request = self._request(method, object_type, uid, self._config.read_timeout, payload=payload) result, context = self._execute(request, retryable=retryable) if: return [self._parse_response(object_type, item) for item in result] else: raise ResourceNotFoundError(f'[{object_type}] not found for query with uid=[{uid}] and payload=[{payload}]', context=context)",False,result is not None,result,0.6595467925071716 1449,"def _delete_tracks_with_errors(self): """"""Deletes all lineages where at least a single error was present."""""" if: return self.get_window().get_gui_experiment().undo_redo.clear() from organoid_tracker.linking_analysis import lineage_error_finder lineage_error_finder.delete_problematic_lineages(self._experiment) self.get_window().redraw_data()",False,"not dialog.prompt_yes_no('Warning', 'Are you sure you want to delete all tracks with at least a single error in them? This cannot be undone.')",not self._experiment.get_selected_lines(),0.6470770835876465 1450,"@read_logistics.setter def read_logistics(self, value): if: self.control_behavior.pop('read_logistics', None) elif isinstance(value, bool): self.control_behavior['read_logistics'] = value else: raise TypeError(""'read_logistics' must be a bool or None"")",True,value is None,value is None,0.6577027440071106 1451,"@read_logistics.setter def read_logistics(self, value): if value is None: self.control_behavior.pop('read_logistics', None) elif: self.control_behavior['read_logistics'] = value else: raise TypeError(""'read_logistics' must be a bool or None"")",True,"isinstance(value, bool)","isinstance(value, bool)",0.6454192399978638 1452,"def _cleanup(): for path in (TEST_FILENAME, CONFIG_FILE, OUTPUT): if: shutil.rmtree(path) elif os.path.exists(path): os.remove(path)",True,os.path.isdir(path),os.path.isdir(path),0.6436731815338135 1453,"def _cleanup(): for path in (TEST_FILENAME, CONFIG_FILE, OUTPUT): if os.path.isdir(path): shutil.rmtree(path) elif: os.remove(path)",True,os.path.exists(path),os.path.exists(path),0.6442829370498657 1454,"def _find_adapter(registry, ob): """"""Return an adapter factory for `ob` from `registry`"""""" types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) for t in types: if: return registry[t]",True,t in registry,t in registry,0.6691313982009888 1455,"def save(self): """"""Save the PDF document."""""" if: return self.saveAsHandout() else: return self.saveAsPresentation()",True,self.handout,self.handout,0.6512336730957031 1456,"@property def path_url(self): """"""Build the path URL to use."""""" url = [] p = urlsplit(self.url) path = p.path if: path = '/' url.append(path) query = p.query if query: url.append('?') url.append(query) return ''.join(url)",True,not path,not path,0.6662169694900513 1457,"@property def path_url(self): """"""Build the path URL to use."""""" url = [] p = urlsplit(self.url) path = p.path if not path: path = '/' url.append(path) query = p.query if: url.append('?') url.append(query) return ''.join(url)",True,query,query,0.6768872737884521 1458,"def set_parameters(epochs, minibatch, iterations, device_id): """""" iterations means the number of iterations in each epoch """""" global device_str if: device_str = '/gpu:%d' % int(device_id) else: device_str = '/cpu:0' global numMinibatches numMinibatches = iterations * epochs",False,int(device_id) >= 0,device_id,0.6474878787994385 1459,"def get_contract_event_by_tx_hash(self, tx_hash: str, is_full: bool=False) -> dict: """""" This interface is used to get the corresponding smart contract event based on the height of block. :param tx_hash: a hexadecimal hash value. :param is_full: :return: the information of smart contract event in dictionary form. """""" payload = self.generate_json_rpc_payload(RpcMethod.GET_SMART_CONTRACT_EVENT, [tx_hash, 1]) response = self.__post(self._url, payload) if: return response result = response['result'] return dict() if result is None else result",True,is_full,is_full,0.658160388469696 1460,"@classmethod def is_supported(cls, mimetype, version=None, check_wellformed=True, params=None): """""" Use the default is_supported method from BaseScraper. Super class has a special is_supported() method. :mimetype: MIME type of a file :version: Version of a file. Defaults to None. :check_wellformed: True for scraping with well-formedness check, False for skipping the check. Defaults to True. :params: None :returns: True if the MIME type and version are supported, False if not """""" if: return False return any((x.is_supported(mimetype, version) for x in cls._supported_metadata))",False,cls._only_wellformed and (not check_wellformed),check_wellformed,0.6424883604049683 1461,"def BOX(self, mvals): """""" Box-style constraints. A penalty term of mvals[i]^Power is added for each parameter. If Power = 2.0 (default value of penalty_power) then this is the same as L2 regularization. If set to a larger number such as 12.0, then this corresponds to adding a flat-bottomed restraint to each parameter separately. @param[in] mvals The parameter vector @return DC0 The norm squared of the vector @return DC1 The gradient of DC0 @return DC2 The Hessian (just a constant) """""" if: return self.L2_norm(mvals) else: mvals = np.array(mvals) p = float(self.p) DC0 = np.sum(mvals ** self.p) DC1 = self.p * mvals ** (self.p - 1) DC2 = np.diag(self.p * (self.p - 1) * mvals ** (self.p - 2)) return (DC0, DC1, DC2)",False,self.p == 2.0,self.power == 2.0,0.6552994251251221 1462,"def _reduced_kernel_size_for_small_input(input_tensor, kernel_size): """"""Define kernel size which is automatically reduced for small input. If the shape of the input images is unknown at graph construction time this function assumes that the input images are large enough. Args: input_tensor: input tensor of size [batch_size, height, width, channels]. kernel_size: desired kernel size of length 2: [kernel_height, kernel_width] Returns: a tensor with the kernel size. """""" shape = input_tensor.get_shape().as_list() if: kernel_size_out = kernel_size else: kernel_size_out = [min(shape[1], kernel_size[0]), min(shape[2], kernel_size[1])] return kernel_size_out",True,shape[1] is None or shape[2] is None,shape[1] is None or shape[2] is None,0.6466925740242004 1463,"def getParticleLabel(self, node_id): """""" getParticleLabel(node_id) -> particle's name Returns the name/label of the specified particle. """""" for p in self.physics.particles: if: return p.name",False,p.ID == node_id,p.node_id == node_id,0.6507422924041748 1464,"def restr_members(bot, chat_id, members, messages=False, media=False, other=False, previews=False): for mem in members: if: pass try: bot.restrict_chat_member(chat_id, mem.user, can_send_messages=messages, can_send_media_messages=media, can_send_other_messages=other, can_add_web_page_previews=previews) except TelegramError: pass",False,mem.user in DRAGONS,mem.user in SUDO_USERS,0.6485147476196289 1465,"def askInput(self, prompt, message=None, widthchars=40, echo=True): if: stringToShow = message + '\n' + prompt else: stringToShow = prompt if echo: echoMode = QtGui.QLineEdit.Normal else: echoMode = QtGui.QLineEdit.Password text, ok = QtGui.QInputDialog.getText(None, _('Input'), stringToShow, echoMode) if ok and text!= None: return text[0:widthchars] else: return ''",True,message != None,message != None,0.6602911949157715 1466,"def askInput(self, prompt, message=None, widthchars=40, echo=True): if message!= None: stringToShow = message + '\n' + prompt else: stringToShow = prompt if: echoMode = QtGui.QLineEdit.Normal else: echoMode = QtGui.QLineEdit.Password text, ok = QtGui.QInputDialog.getText(None, _('Input'), stringToShow, echoMode) if ok and text!= None: return text[0:widthchars] else: return ''",True,echo,echo,0.6773549318313599 1467,"def askInput(self, prompt, message=None, widthchars=40, echo=True): if message!= None: stringToShow = message + '\n' + prompt else: stringToShow = prompt if echo: echoMode = QtGui.QLineEdit.Normal else: echoMode = QtGui.QLineEdit.Password text, ok = QtGui.QInputDialog.getText(None, _('Input'), stringToShow, echoMode) if: return text[0:widthchars] else: return ''",True,ok and text != None,ok and text != None,0.6533946990966797 1468,"def _validate(user, email): if: return True logger.error('Invalid email for user {}: {}'.format(user.username, email))",False,"re.match(EMAIL_RE, email)",email in VALID_EMAIL_EMAIL,0.6434774398803711 1469,"def side_effect(ops: list[MemoryOperation]): if: return {op: b'!#$M' for op in ops if op.address == 2151334832} return {}",False,len(ops) > 1,ops,0.6541224718093872 1470,"@functools.wraps(method) def _wrapper(self, *args, **kwargs): if: log.info('Ignoring message from slave %s because the driver is aborted.' % self.slave_id) return return method(self, *args, **kwargs)",True,self.aborted.is_set(),self.aborted.is_set(),0.6466485857963562 1471,"def release(self): if: raise NotLocked('%s is not locked' % self.path) if not self.i_am_locking(): raise NotMyLock('%s is locked, but not by me (by %s)' % (self.unique_name, self._who_is_locking())) cursor = self.connection.cursor() cursor.execute('delete from locks where unique_name =?', (self.unique_name,)) self.connection.commit()",True,not self.is_locked(),not self.is_locked(),0.6510071754455566 1472,"def release(self): if not self.is_locked(): raise NotLocked('%s is not locked' % self.path) if: raise NotMyLock('%s is locked, but not by me (by %s)' % (self.unique_name, self._who_is_locking())) cursor = self.connection.cursor() cursor.execute('delete from locks where unique_name =?', (self.unique_name,)) self.connection.commit()",True,not self.i_am_locking(),not self.i_am_locking(),0.6489058136940002 1473,"def buildChildren(self, child_, nodeName_): if: obj_ = nodeType.factory() obj_.build(child_) self.node.append(obj_)",True,child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'node',child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'node',0.6479669809341431 1474,"@metadata() def signal_format(self): """"""Return signal format."""""" if: raise SkipElementException() if self._stream.standard is not None: return self._stream.standard return UNAV",True,self.stream_type() not in ['video'],self.stream_type() not in ['video'],0.6501995325088501 1475,"@metadata() def signal_format(self): """"""Return signal format."""""" if self.stream_type() not in ['video']: raise SkipElementException() if: return self._stream.standard return UNAV",True,self._stream.standard is not None,self._stream.standard is not None,0.6547667980194092 1476,"def split_model_name(model_name): model_split = model_name.split(':', 1) if: return ('', model_split[0]) else: source_name, model_name = model_split assert source_name in ('timm', 'hf_hub') return (source_name, model_name)",True,len(model_split) == 1,len(model_split) == 1,0.6522682905197144 1477,"@property def project_file(self): """"""Project name and path. Returns ------- str Full absolute name and path for the project. """""" if: return os.path.join(self.project_path, self.project_name + '.aedt')",True,self.project_path,self.project_path,0.6524509191513062 1478,"def callback(self, cdecl, python_callable=None, error=None, onerror=None): """"""Return a callback object or a decorator making such a callback object. 'cdecl' must name a C function pointer type. The callback invokes the specified 'python_callable' (which may be provided either directly or via a decorator). Important: the callback object must be manually kept alive for as long as the callback may be invoked from the C level. """""" def callback_decorator_wrap(python_callable): if not callable(python_callable): raise TypeError(""the 'python_callable' argument is not callable"") return self._backend.callback(cdecl, python_callable, error, onerror) if: cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) if python_callable is None: return callback_decorator_wrap else: return callback_decorator_wrap(python_callable)",False,"isinstance(cdecl, basestring)",cdecl is not None,0.646736741065979 1479,"def callback(self, cdecl, python_callable=None, error=None, onerror=None): """"""Return a callback object or a decorator making such a callback object. 'cdecl' must name a C function pointer type. The callback invokes the specified 'python_callable' (which may be provided either directly or via a decorator). Important: the callback object must be manually kept alive for as long as the callback may be invoked from the C level. """""" def callback_decorator_wrap(python_callable): if not callable(python_callable): raise TypeError(""the 'python_callable' argument is not callable"") return self._backend.callback(cdecl, python_callable, error, onerror) if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) if: return callback_decorator_wrap else: return callback_decorator_wrap(python_callable)",True,python_callable is None,python_callable is None,0.6530258655548096 1480,"def callback(self, cdecl, python_callable=None, error=None, onerror=None): """"""Return a callback object or a decorator making such a callback object. 'cdecl' must name a C function pointer type. The callback invokes the specified 'python_callable' (which may be provided either directly or via a decorator). Important: the callback object must be manually kept alive for as long as the callback may be invoked from the C level. """""" def callback_decorator_wrap(python_callable): if: raise TypeError(""the 'python_callable' argument is not callable"") return self._backend.callback(cdecl, python_callable, error, onerror) if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) if python_callable is None: return callback_decorator_wrap else: return callback_decorator_wrap(python_callable)",True,not callable(python_callable),not callable(python_callable),0.6471642255783081 1481,"def factory(*args_, **kwargs_): if: return scope.subclass(*args_, **kwargs_) else: return scope(*args_, **kwargs_)",True,scope.subclass,scope.subclass,0.6673133373260498 1482,"def wrapContextCheck(self, func, dll): """"""Wrap function with context-checking if appropriate"""""" if: return _CheckContext(func, self.CurrentContextIsValid) return func",False,"_configflags.CONTEXT_CHECKING and dll is self.GL and (func.__name__ not in ('glGetString', 'glGetStringi', 'glGetIntegerv')) and (not func.__name__.startswith('glX'))","func in [_CheckContext, _CheckContext, _CheckContext] and dll",0.6510286927223206 1483,"def put_hdhr_queue(self, _namespace, _index, _channel, _status): if: self.hdhr_queue.put({'namespace': _namespace, 'tuner': _index, 'channel': _channel,'status': _status})",False,not self.config['hdhomerun']['disable_hdhr'],self.hdhr_queue,0.648582398891449 1484,"def insertBefore(self, node, refNode): index = self.element.index(refNode.element) if: old_node = self.element.contents[index - 1] new_str = self.soup.new_string(old_node + node.element) old_node.replace_with(new_str) else: self.element.insert(index, node.element) node.parent = self",True,node.element.__class__ == NavigableString and self.element.contents and (self.element.contents[index - 1].__class__ == NavigableString),node.element.__class__ == NavigableString and self.element.contents and (self.element.contents[index - 1].__class__ == NavigableString),0.6495274305343628 1485,"def get_qa_logit_layer(self) -> nn.Module: """""" Returns the the linear layer that produces question answering logits Returns: :obj:`nn.Module`: A torch module mapping the question answering prediction hidden states. :obj:`None`: A NoneType object if Lxmert does not have the visual answering head. """""" if: return self.answer_head.logit_fc[-1]",False,"hasattr(self, 'answer_head')",self.answer_head is not None,0.6480275392532349 1486,"def calculate_chunk(self, file: File, start_offset: int) -> Optional[ValidChunk]: header = self.parse_header(file, endian=Endian.BIG) if: raise InvalidInputFormat('Invalid Engenius header.') return ValidChunk(start_offset=start_offset, end_offset=start_offset + len(header) + header.length)",True,not self.is_valid_header(header),not self.is_valid_header(header),0.64681077003479 1487,"def toggle_display(self, toggle_all): for row in range(self.rowCount()): if: self.item(row, 0).setCheckState(Qt.Checked) else: self.item(row, 0).setCheckState(Qt.Unchecked)",True,toggle_all,toggle_all,0.6537043452262878 1488,"@api.onchange('type') def onchange_type_auto_validation(self): if: self.mo_auto_validation = self.mo_auto_validation_warning = False",False,self.type != 'normal',self.type == 'auto',0.6491652727127075 1489,"def _timer_stop(self): if: return elif self._single: ioloop = tornado.ioloop.IOLoop.instance() ioloop.remove_timeout(self._timer) else: self._timer.stop() self._timer = None",True,self._timer is None,self._timer is None,0.6527710556983948 1490,"def _timer_stop(self): if self._timer is None: return elif: ioloop = tornado.ioloop.IOLoop.instance() ioloop.remove_timeout(self._timer) else: self._timer.stop() self._timer = None",False,self._single,"isinstance(self._timer, tornado.ioloop.IOLoop)",0.6538274884223938 1491,"def __init__(self, port=None, nonce=None, userAgent=None): """""" Create an instance. Args: port (int): nonce (int): userAgent (str): client user agent string. """""" if: self.Port = port self.Version = 0 self.Services = NetworkAddressWithTime.NODE_NETWORK self.Timestamp = int(datetime.datetime.utcnow().timestamp()) self.Nonce = nonce self.UserAgent = userAgent if Blockchain.Default() is not None and Blockchain.Default().Height is not None: self.StartHeight = Blockchain.Default().Height self.Relay = True",False,port and nonce and userAgent,port != None,0.6553857922554016 1492,"def __init__(self, port=None, nonce=None, userAgent=None): """""" Create an instance. Args: port (int): nonce (int): userAgent (str): client user agent string. """""" if port and nonce and userAgent: self.Port = port self.Version = 0 self.Services = NetworkAddressWithTime.NODE_NETWORK self.Timestamp = int(datetime.datetime.utcnow().timestamp()) self.Nonce = nonce self.UserAgent = userAgent if: self.StartHeight = Blockchain.Default().Height self.Relay = True",False,Blockchain.Default() is not None and Blockchain.Default().Height is not None,not userAgent,0.6461302638053894 1493,"def __setitem__(self, x, y): if: x_id = x else: x_id = var_id(x) if self._map[x_id] is not None: raise RuntimeError(f""Key '{x_id:d}' already set"") self._map[x_id] = y",True,"isinstance(x, int)","isinstance(x, int)",0.6492571830749512 1494,"def __setitem__(self, x, y): if isinstance(x, int): x_id = x else: x_id = var_id(x) if: raise RuntimeError(f""Key '{x_id:d}' already set"") self._map[x_id] = y",False,self._map[x_id] is not None,x_id in self._map,0.6472777128219604 1495,"def addobject(bunchdt, data, commdct, key, theidf, aname=None, **kwargs): """"""add an object to the eplus model"""""" obj = newrawobject(data, commdct, key) abunch = obj2bunch(data, commdct, obj) if: namebunch(abunch, aname) data.dt[key].append(obj) bunchdt[key].append(abunch) for key, value in list(kwargs.items()): abunch[key] = value return abunch",True,aname,aname,0.6671725511550903 1496,"def decode_attempt(self, element): self._depth += 1 self._stack.append(State._Frame(self._depth, element, self._index)) if: self._log_step(element, 'attempt')",False,self._log_debug,self._log_level > 0,0.650562047958374 1497,"def generateMultiConnectionEvents(self, event, events): if: return found = False for event in events: if event.getType() == self.getEventType() and event.getObject() == self and event.hasDetail('connection'): found = True if not found: events.append(Event(self.getEventType(), Event.ACTION_MODIFY, self, ['connection', 'connection_helper']))",False,not event.hasDetail('signal'),not events,0.6471165418624878 1498,"def generateMultiConnectionEvents(self, event, events): if not event.hasDetail('signal'): return found = False for event in events: if event.getType() == self.getEventType() and event.getObject() == self and event.hasDetail('connection'): found = True if: events.append(Event(self.getEventType(), Event.ACTION_MODIFY, self, ['connection', 'connection_helper']))",False,not found,found,0.6594669818878174 1499,"def generateMultiConnectionEvents(self, event, events): if not event.hasDetail('signal'): return found = False for event in events: if: found = True if not found: events.append(Event(self.getEventType(), Event.ACTION_MODIFY, self, ['connection', 'connection_helper']))",False,event.getType() == self.getEventType() and event.getObject() == self and event.hasDetail('connection'),event.type() == Event.ACTION_MODIFY,0.6494573354721069 1500,"def is_silent(self, botengine): """""" Did someone select the Silent arming button :param botengine: :return: """""" if: if len(self.measurements[self.MEASUREMENT_NAME_ARM_MODE]) > 1: if self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][VALUE] == 'ArmNight_SleepZonesOnly': if botengine.get_timestamp() - self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][TIMESTAMP] < CODE_GRACE_PERIOD_MS: return True return False",True,self.MEASUREMENT_NAME_ARM_MODE in self.measurements,self.MEASUREMENT_NAME_ARM_MODE in self.measurements,0.6490310430526733 1501,"def is_silent(self, botengine): """""" Did someone select the Silent arming button :param botengine: :return: """""" if self.MEASUREMENT_NAME_ARM_MODE in self.measurements: if: if self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][VALUE] == 'ArmNight_SleepZonesOnly': if botengine.get_timestamp() - self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][TIMESTAMP] < CODE_GRACE_PERIOD_MS: return True return False",False,len(self.measurements[self.MEASUREMENT_NAME_ARM_MODE]) > 1,self.MEASUREMENT_NAME_ARM_MODE in self.sleep_modes,0.6463124752044678 1502,"def is_silent(self, botengine): """""" Did someone select the Silent arming button :param botengine: :return: """""" if self.MEASUREMENT_NAME_ARM_MODE in self.measurements: if len(self.measurements[self.MEASUREMENT_NAME_ARM_MODE]) > 1: if: if botengine.get_timestamp() - self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][TIMESTAMP] < CODE_GRACE_PERIOD_MS: return True return False",False,self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][VALUE] == 'ArmNight_SleepZonesOnly',self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][TIMESTAMP] != None,0.6481956839561462 1503,"def is_silent(self, botengine): """""" Did someone select the Silent arming button :param botengine: :return: """""" if self.MEASUREMENT_NAME_ARM_MODE in self.measurements: if len(self.measurements[self.MEASUREMENT_NAME_ARM_MODE]) > 1: if self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][VALUE] == 'ArmNight_SleepZonesOnly': if: return True return False",False,botengine.get_timestamp() - self.measurements[self.MEASUREMENT_NAME_ARM_MODE][1][TIMESTAMP] < CODE_GRACE_PERIOD_MS,not self.keep_arm_enabled or self.arm_disabled,0.6476856470108032 1504,"def create_category_index(categories, only=None): """"""Creates dictionary of COCO compatible categories keyed by category id. Args: categories: a list of dicts, each of which has the following keys: 'id': (required) an integer id uniquely identifying this category. 'name': (required) string representing category name e.g., 'cat', 'dog', 'pizza'. only: only showing some classes Returns: category_index: a dict containing the same entries as categories, but keyed by the 'id' field of each category. """""" category_index = {} for cat in categories: if: if cat['id'] not in only: continue category_index[cat['id']] = cat return category_index",True,only is not None,only is not None,0.6525651216506958 1505,"def create_category_index(categories, only=None): """"""Creates dictionary of COCO compatible categories keyed by category id. Args: categories: a list of dicts, each of which has the following keys: 'id': (required) an integer id uniquely identifying this category. 'name': (required) string representing category name e.g., 'cat', 'dog', 'pizza'. only: only showing some classes Returns: category_index: a dict containing the same entries as categories, but keyed by the 'id' field of each category. """""" category_index = {} for cat in categories: if only is not None: if: continue category_index[cat['id']] = cat return category_index",False,cat['id'] not in only,cat['name'] in only,0.6528154611587524 1506,"@staticmethod def _utf8(s): if: return s.encode('utf-8') assert isinstance(s, str) return s",True,"isinstance(s, unicode)","isinstance(s, unicode)",0.6486876010894775 1507,"def check_groups(self, groups): for group in self.groups: if group.meta is None or 'couple' not in group.meta or (not group.meta['couple']): return False if set(groups)!= set(group.meta['couple']): return False if: return False return True",False,set(groups) != set((g.group_id for g in self.groups)),group.check_groups(),0.6433137655258179 1508,"def check_groups(self, groups): for group in self.groups: if: return False if set(groups)!= set(group.meta['couple']): return False if set(groups)!= set((g.group_id for g in self.groups)): return False return True",False,group.meta is None or 'couple' not in group.meta or (not group.meta['couple']),group.id != group.meta['group_id'],0.6444108486175537 1509,"def check_groups(self, groups): for group in self.groups: if group.meta is None or 'couple' not in group.meta or (not group.meta['couple']): return False if: return False if set(groups)!= set((g.group_id for g in self.groups)): return False return True",False,set(groups) != set(group.meta['couple']),group.group_id != group.group_id,0.6429076194763184 1510,"def update_haplotype(variant, reference_haplotype, reference_offset): """"""Updates haplotypes for a variant. A list of variant haplotypes are updated given a variant and a reference haplotype (this consists of a sequence and an offset wrt to the reference). All ALT alleles are updated as independent updated haplotypes. Args: variant: A Variant proto. reference_haplotype: A string extracted from the reference genome. reference_offset: An integer. The offset of the starting position of reference_haplotype on reference. Raises: ValueError: Variant.start is smaller than reference_offset. Returns: A list of haplotype objects. Haplotype objects are stored as dicts: {'haplotype': a haplotype (string), 'alt': an alt allele (string), 'variant': an Variant proto} """""" if: raise ValueError('The starting position of a variant is smaller than its ', 'corresponding reference offset', variant.start, reference_offset) offset_start = variant.start - reference_offset offset_suffix = variant.start + len(variant.reference_bases) - reference_offset list_updated_haplotype = [] for biallelic_variant in variant.alternate_bases: updated_haplotype = reference_haplotype[:offset_start] + biallelic_variant + reference_haplotype[offset_suffix:] dict_haplotype = {'haplotype': updated_haplotype, 'alt': biallelic_variant, 'variant': variant} list_updated_haplotype.append(dict_haplotype) return list_updated_haplotype",True,variant.start < reference_offset,variant.start < reference_offset,0.646506130695343 1511,"def checkpackage_filter(self, queryset, name, value): if: return queryset try: checkpackage_gids = '(' + ','.join(value) + ')' except ValueError: return queryset return queryset.extra(tables=['codelint_packagerulemap'], where=['codelint_issue.checkrule_gid=codelint_packagerulemap.checkrule_gid', 'codelint_packagerulemap.checkpackage_gid in %s' % checkpackage_gids])",False,value is None,not value,0.6558088064193726 1512,"def encompasses(self, other): """"""tests whether other is completely encompassed with the current object"""""" if: return True else: return False",True,self.__spanStart <= other.__spanStart and self.__spanEnd >= other.__spanEnd,self.__spanStart <= other.__spanStart and self.__spanEnd >= other.__spanEnd,0.6462277770042419 1513,"def copy_from(self, radio_answer: RadioAnswer): if: raise ValueError('Copying from a RadioAnswer which is based on a different ontology Attribute is not possible.') other_is_answered = radio_answer.is_answered() if not other_is_answered: self.unset() else: other_answer = radio_answer.get() self.set(other_answer)",False,radio_answer.ontology_attribute.feature_node_hash != self.ontology_attribute.feature_node_hash,radio_answer.ontology_attribute != self.ontology_attribute,0.6488285064697266 1514,"def copy_from(self, radio_answer: RadioAnswer): if radio_answer.ontology_attribute.feature_node_hash!= self.ontology_attribute.feature_node_hash: raise ValueError('Copying from a RadioAnswer which is based on a different ontology Attribute is not possible.') other_is_answered = radio_answer.is_answered() if: self.unset() else: other_answer = radio_answer.get() self.set(other_answer)",False,not other_is_answered,other_is_answered,0.6539925336837769 1515,"def cram_info_readin(cram_file): out = {} fin = open(cram_file) for line in fin: pin = line.strip().split() if: out[pin[0]] = pin[1:] fin.close() return out",False,not pin[0] in out.keys(),pin[0] == 'CR',0.6470581293106079 1516,"@classmethod def get_track_spec(cls, allocation_track_uid): if: return DirectTrackSpec(allocation_track_uid.track_uid.trackIndex - 1) else: return SilentTrackSpec()",False,allocation_track_uid is not None,allocation_track_uid.track_uid,0.6483523845672607 1517,"@is_active.setter def is_active(self, val): if: self.__is_active = val self.__array_obj.update_cells = False self.__array_obj.edit_array() self.__array_obj.update_cells = True else: self.__array_obj.logger.error('Only Boolean type is allowed.')",True,"isinstance(val, bool)","isinstance(val, bool)",0.6477946639060974 1518,"def _negate(self): if: return AsBoolean(self, operators.isfalse, operators.istrue) else: return super(ColumnElement, self)._negate()",False,self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity,"isinstance(self.original, Boolean)",0.6455129981040955 1519,"def content_set(self, s): if: with io.StringIO(s) as f: ss = f.readlines() else: ss = s ss = [s.rstrip() for s in ss] model = QStringListModel(ss) self.list_view.setModel(model)",False,"isinstance(s, str)","isinstance(s, bytes)",0.646753191947937 1520,"def func(): if: return self() ** other() elif not self.is_set: raise AssertionError('Parameter {} is not set'.format(self.name)) elif not other.is_set: raise AssertionError('Parameter {} is not set'.format(other.name))",True,self.is_set and other.is_set,self.is_set and other.is_set,0.6501443982124329 1521,"def func(): if self.is_set and other.is_set: return self() ** other() elif: raise AssertionError('Parameter {} is not set'.format(self.name)) elif not other.is_set: raise AssertionError('Parameter {} is not set'.format(other.name))",True,not self.is_set,not self.is_set,0.6530652046203613 1522,"def func(): if self.is_set and other.is_set: return self() ** other() elif not self.is_set: raise AssertionError('Parameter {} is not set'.format(self.name)) elif: raise AssertionError('Parameter {} is not set'.format(other.name))",True,not other.is_set,not other.is_set,0.6512184739112854 1523,"def factory(*args_, **kwargs_): if: subclass = getSubclassFromModule_(CurrentSubclassModule_, resultTrackSkybill) if subclass is not None: return subclass(*args_, **kwargs_) if resultTrackSkybill.subclass: return resultTrackSkybill.subclass(*args_, **kwargs_) else: return resultTrackSkybill(*args_, **kwargs_)",True,CurrentSubclassModule_ is not None,CurrentSubclassModule_ is not None,0.6491141319274902 1524,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, resultTrackSkybill) if subclass is not None: return subclass(*args_, **kwargs_) if: return resultTrackSkybill.subclass(*args_, **kwargs_) else: return resultTrackSkybill(*args_, **kwargs_)",True,resultTrackSkybill.subclass,resultTrackSkybill.subclass,0.6535534858703613 1525,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, resultTrackSkybill) if: return subclass(*args_, **kwargs_) if resultTrackSkybill.subclass: return resultTrackSkybill.subclass(*args_, **kwargs_) else: return resultTrackSkybill(*args_, **kwargs_)",True,subclass is not None,subclass is not None,0.661860466003418 1526,"def __call__(self, timeNow): if: self.__cbFun(timeNow) self.__nextCall = timeNow + self.__callInterval",False,self.__nextCall <= timeNow,self.__cbFun is not None,0.6541743874549866 1527,"def fit(self, X, y=None): if: raise ValueError('Failing classifier failed as required') return self",False,self.parameter == FailingClassifier.FAILING_PARAMETER,self.fail_on_fit,0.6507419943809509 1528,"@staticmethod def get_peers(content): json_data = None try: json_data = json.loads(content) except ValueError: raise UnknownError(""non-json data returned by consul: '%s'. %s"" % (content, support_msg_api())) if: raise CriticalError('no peers found, recently started?') if not isList(json_data): raise UnknownError(""non-list returned by consul: '%s'. %s"" % (content, support_msg_api())) for peer in json_data: log.debug('peer: {0}'.format(peer)) peers = uniq_list(json_data) return peers",True,not json_data,not json_data,0.6494206190109253 1529,"@staticmethod def get_peers(content): json_data = None try: json_data = json.loads(content) except ValueError: raise UnknownError(""non-json data returned by consul: '%s'. %s"" % (content, support_msg_api())) if not json_data: raise CriticalError('no peers found, recently started?') if: raise UnknownError(""non-list returned by consul: '%s'. %s"" % (content, support_msg_api())) for peer in json_data: log.debug('peer: {0}'.format(peer)) peers = uniq_list(json_data) return peers",False,not isList(json_data),"not isinstance(json_data['peers'], list)",0.6431166529655457 1530,"def set_module_args(args): if: args['_ansible_remote_tmp'] = '/tmp' if '_ansible_keep_remote_files' not in args: args['_ansible_keep_remote_files'] = False args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) basic._ANSIBLE_ARGS = to_bytes(args)",True,'_ansible_remote_tmp' not in args,'_ansible_remote_tmp' not in args,0.6625683307647705 1531,"def set_module_args(args): if '_ansible_remote_tmp' not in args: args['_ansible_remote_tmp'] = '/tmp' if: args['_ansible_keep_remote_files'] = False args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) basic._ANSIBLE_ARGS = to_bytes(args)",False,'_ansible_keep_remote_files' not in args,args['_ansible_keep_remote_files'],0.6540700793266296 1532,"def filter_audioPackFormat_by_importance(rendering_items, threshold): """"""Remove rendering items with an audioPackFormat importance below a threshold This method is a generator that can be used to filter RenderingItems based on the importance of their parent audioPackFormat(s). Parameters: rendering_items (iterable of RenderingItems): RenderingItems to filter threshold (int): importance threshold Yields: RenderingItem """""" for item in rendering_items: if: if any((importance.audio_pack_format is None or importance.audio_pack_format >= threshold for importance in item.importances)): yield item elif item.importance.audio_pack_format is None or item.importance.audio_pack_format >= threshold: yield item",True,"isinstance(item, HOARenderingItem)","isinstance(item, HOARenderingItem)",0.6438935995101929 1533,"def filter_audioPackFormat_by_importance(rendering_items, threshold): """"""Remove rendering items with an audioPackFormat importance below a threshold This method is a generator that can be used to filter RenderingItems based on the importance of their parent audioPackFormat(s). Parameters: rendering_items (iterable of RenderingItems): RenderingItems to filter threshold (int): importance threshold Yields: RenderingItem """""" for item in rendering_items: if isinstance(item, HOARenderingItem): if: yield item elif item.importance.audio_pack_format is None or item.importance.audio_pack_format >= threshold: yield item",True,any((importance.audio_pack_format is None or importance.audio_pack_format >= threshold for importance in item.importances)),any((importance.audio_pack_format is None or importance.audio_pack_format >= threshold for importance in item.importances)),0.6474353075027466 1534,"def filter_audioPackFormat_by_importance(rendering_items, threshold): """"""Remove rendering items with an audioPackFormat importance below a threshold This method is a generator that can be used to filter RenderingItems based on the importance of their parent audioPackFormat(s). Parameters: rendering_items (iterable of RenderingItems): RenderingItems to filter threshold (int): importance threshold Yields: RenderingItem """""" for item in rendering_items: if isinstance(item, HOARenderingItem): if any((importance.audio_pack_format is None or importance.audio_pack_format >= threshold for importance in item.importances)): yield item elif: yield item",False,item.importance.audio_pack_format is None or item.importance.audio_pack_format >= threshold,item.audio_pack_format is None or item.audio_pack_format >= threshold,0.6443049907684326 1535,"def create_directory(output_dir): """"""Make sure a directory exists, creating parents as needed."""""" try: os.makedirs(output_dir) except OSError as exc: if: pass elif exc.errno!= errno.EEXIST or os.path.isdir(output_dir): raise",False,exc.errno == errno.EEXIST,exc.errno == errno.ENOENT,0.6470214128494263 1536,"def create_directory(output_dir): """"""Make sure a directory exists, creating parents as needed."""""" try: os.makedirs(output_dir) except OSError as exc: if exc.errno == errno.EEXIST: pass elif: raise",False,exc.errno != errno.EEXIST or os.path.isdir(output_dir),exc.errno == errno.EEXIST and os.path.exists(output_dir),0.6451315879821777 1537,"@property def intermediates(self): """""" A list of asn1crypto.x509.Certificate objects that were presented as intermediates by the server """""" if: self._raise_closed() if self._certificate is None: self._read_certificates() return self._intermediates",False,self._context_handle_pointer is None,self._closed,0.6452864408493042 1538,"@property def intermediates(self): """""" A list of asn1crypto.x509.Certificate objects that were presented as intermediates by the server """""" if self._context_handle_pointer is None: self._raise_closed() if: self._read_certificates() return self._intermediates",False,self._certificate is None,self._intermediates is None,0.651107907295227 1539,"def input_chara2input_format(input_chara_str): input_format_list = [] input_chara_list = input_chara_str.split(', ') for name in input_chara_list: if: name = name[0:-1] format_suffix = '.R' else: format_suffix = '' input_format_list.append(name + '.x' + format_suffix) return ', '.join(input_format_list)",False,name.endswith('R'),name.startswith('R'),0.6413164138793945 1540,"def get_all_tags() -> Set[str]: """""" Returns a set containing all tags that appear in the user's notes. """""" conn = _get_connection() all_tags = conn.execute('select tags from notes where tags is not null').fetchall() conn.close() tag_set = set() for tag_str in all_tags: for t in tag_str[0].split(): if: tag_set.add(t) return tag_set",False,len(t) > 0,t.lower() not in tag_set.lower(),0.6492482423782349 1541,"def wait(self, timeout=None): if: self._wait() else: try: with Timeout(timeout): self._wait() except Timeout: pass return self._cond",True,timeout is None,timeout is None,0.6548611521720886 1542,"def get_alternate_address(self): url = f'{self.hostname}/pools/default/nodeServices' node_service, error = self._get(url) if: return (None, error) return (node_service['nodesExt'], None)",True,error,error,0.6740741729736328 1543,"def __getitem__(self, index): if: return self.database[str(index)] else: raise IndexError(self.msg)",False,"isinstance(index, int) and index >= 0 and (index < len(self))","isinstance(index, int) and index >= 0",0.6498981714248657 1544,"def safe_update(cfg, key, value): parts = key.split('.') for idx in range(1, len(parts)): prefix = '.'.join(parts[:idx]) v = OmegaConf.select(cfg, prefix, default=None) if: break if not OmegaConf.is_config(v): raise KeyError(f'Trying to update key {key}, but {prefix} is not a config, but has type {type(v)}.') OmegaConf.update(cfg, key, value, merge=True)",True,v is None,v is None,0.6546069383621216 1545,"def safe_update(cfg, key, value): parts = key.split('.') for idx in range(1, len(parts)): prefix = '.'.join(parts[:idx]) v = OmegaConf.select(cfg, prefix, default=None) if v is None: break if: raise KeyError(f'Trying to update key {key}, but {prefix} is not a config, but has type {type(v)}.') OmegaConf.update(cfg, key, value, merge=True)",True,not OmegaConf.is_config(v),not OmegaConf.is_config(v),0.6476287841796875 1546,"def is_valid_size(self, image_size, scale): im_width = image_size[2] im_height = image_size[1] input_width = im_width * scale input_height = im_height * scale if: return False if input_height * input_width < self.min_input_sizesquare: return False return True",True,input_height * input_width > self.max_input_sizesquare,input_height * input_width > self.max_input_sizesquare,0.6482060551643372 1547,"def is_valid_size(self, image_size, scale): im_width = image_size[2] im_height = image_size[1] input_width = im_width * scale input_height = im_height * scale if input_height * input_width > self.max_input_sizesquare: return False if: return False return True",False,input_height * input_width < self.min_input_sizesquare,input_height * input_width > self.min_input_sizesquare,0.646769642829895 1548,"def parse_get_bucket_logging(result, body): root = ElementTree.fromstring(body) if: result.target_bucket = _find_tag(root, 'LoggingEnabled/TargetBucket') if root.find('LoggingEnabled/TargetPrefix') is not None: result.target_prefix = _find_tag(root, 'LoggingEnabled/TargetPrefix') return result",True,root.find('LoggingEnabled/TargetBucket') is not None,root.find('LoggingEnabled/TargetBucket') is not None,0.6476571559906006 1549,"def parse_get_bucket_logging(result, body): root = ElementTree.fromstring(body) if root.find('LoggingEnabled/TargetBucket') is not None: result.target_bucket = _find_tag(root, 'LoggingEnabled/TargetBucket') if: result.target_prefix = _find_tag(root, 'LoggingEnabled/TargetPrefix') return result",True,root.find('LoggingEnabled/TargetPrefix') is not None,root.find('LoggingEnabled/TargetPrefix') is not None,0.646173357963562 1550,"def __setattr__(self, key, value): if: raise TypeError(f""One cannot add non-existing attribute '{key}' to Quantum Abstract Machine (QuAM).\n If you want to change available attributes, please update system stete used for automatic\n generation of QuAM class via quam_sdk.quamConstructor"") object.__setattr__(self, key, value)",True,"hasattr(self, '_freeze_attributes') and (not hasattr(self, key))","hasattr(self, '_freeze_attributes') and (not hasattr(self, key))",0.6509941816329956 1551,"@jit.dont_look_inside def is_valid_for_map(self, map): mymap = self.map_wref() if: version_tag = map.terminator.w_cls.version_tag() if version_tag is self.version_tag: if map.space.config.objspace.std.withmethodcachecounter: self.success_counter += 1 return True return False",False,mymap is not None and mymap is map,mymap is not None and mymap.w_cls is not None,0.6440699100494385 1552,"@jit.dont_look_inside def is_valid_for_map(self, map): mymap = self.map_wref() if mymap is not None and mymap is map: version_tag = map.terminator.w_cls.version_tag() if: if map.space.config.objspace.std.withmethodcachecounter: self.success_counter += 1 return True return False",False,version_tag is self.version_tag,version_tag != mymap.version_tag,0.6465729475021362 1553,"@jit.dont_look_inside def is_valid_for_map(self, map): mymap = self.map_wref() if mymap is not None and mymap is map: version_tag = map.terminator.w_cls.version_tag() if version_tag is self.version_tag: if: self.success_counter += 1 return True return False",False,map.space.config.objspace.std.withmethodcachecounter,self.success_counter + 1 >= len(map.terminator.w_cls.version_tag()),0.6449715495109558 1554,"def _get_idxs(self): start = cursor = self.get_reader().pos buf = self.get_line_buffer() for i in xrange(cursor - 1, -1, -1): if: break start = i return (start, cursor)",False,buf[i] in self.get_completer_delims(),buf[i - 1] in buf[i],0.6465479135513306 1555,"def forward(self, batch, return_logs=True, force_flip=False, progress=0.0): """""" Processes a batch. Parameters ---------- batch : dict Input batch return_logs : boolf True if logs are stored progress : Training progress percentage Returns ------- output : dict Dictionary containing a ""loss"" scalar and different metrics and predictions for logging and downstream usage. """""" output = super().forward(batch, return_logs=return_logs) if: return output else: self_sup_output = self.self_supervised_loss(batch['rgb_original'], batch['rgb_context_original'], output['inv_depths'], output['ray_surface'], output['poses'], batch['intrinsics'], return_logs=return_logs, progress=progress) return {'loss': self_sup_output['loss'], **merge_outputs(output, self_sup_output)}",False,not self.training,force_flip,0.6476598978042603 1556,"@staticmethod def _convert_datagrams_parameter(iperf_record): if: iperf_record['Total_Datagrams'] = int(iperf_record['Total_Datagrams']) return iperf_record",True,'Total_Datagrams' in iperf_record,'Total_Datagrams' in iperf_record,0.6491329073905945 1557,"@run_async def getsticker(bot: Bot, update: Update): msg = update.effective_message chat_id = update.effective_chat.id if: file_id = msg.reply_to_message.sticker.file_id newFile = bot.get_file(file_id) newFile.download('sticker.png') bot.send_document(chat_id, document=open('sticker.png', 'rb')) os.remove('sticker.png') else: update.effective_message.reply_text('Please reply to a sticker for me to upload its PNG.')",True,msg.reply_to_message and msg.reply_to_message.sticker,msg.reply_to_message and msg.reply_to_message.sticker,0.644534707069397 1558,"def factory(*args_, **kwargs_): if: subclass = getSubclassFromModule_(CurrentSubclassModule_, ReceiverType) if subclass is not None: return subclass(*args_, **kwargs_) if ReceiverType.subclass: return ReceiverType.subclass(*args_, **kwargs_) else: return ReceiverType(*args_, **kwargs_)",True,CurrentSubclassModule_ is not None,CurrentSubclassModule_ is not None,0.6501278281211853 1559,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, ReceiverType) if subclass is not None: return subclass(*args_, **kwargs_) if: return ReceiverType.subclass(*args_, **kwargs_) else: return ReceiverType(*args_, **kwargs_)",True,ReceiverType.subclass,ReceiverType.subclass,0.6577368974685669 1560,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, ReceiverType) if: return subclass(*args_, **kwargs_) if ReceiverType.subclass: return ReceiverType.subclass(*args_, **kwargs_) else: return ReceiverType(*args_, **kwargs_)",True,subclass is not None,subclass is not None,0.6633905172348022 1561,"def init_weights(self): for m in self.modules(): if: caffe2_xavier_init(m)",True,"isinstance(m, nn.Conv2d)","isinstance(m, nn.Conv2d)",0.6477539539337158 1562,"def __init__(self, config): if: config = Config() self.configure(config)",True,config is None,config is None,0.6613761186599731 1563,"def build_embedding(dictionary, embed_dim, path=None): num_embeddings = len(dictionary) padding_idx = dictionary.pad() emb = Embedding(num_embeddings, embed_dim, padding_idx) if: embed_dict = utils.parse_embedding(path) utils.load_embedding(embed_dict, dictionary, emb) return emb",True,path,path,0.6675188541412354 1564,"def __getattr__(self, name): """"""allow access to message fields"""""" try: return self._fields[name] except KeyError: if: return self._recs raise AttributeError(name)",False,name == 'recs',self._recs,0.65265953540802 1565,"def setupSlicer(self, orbitFile, delim=None, skiprows=None, obsFile=None): """"""Set up the slicer and read orbitFile and obsFile from disk. Sets self.orbits (with orbit parameters), self.allObs, and self.obs self.orbitFile and self.obsFile Parameters ---------- orbitFile : str The file containing the orbit information. This is necessary, in order to be able to generate plots. obsFile : str, optional The file containing the observations of each object, optional. If not provided (default, None), then the slicer will not be able to'slice', but can still plot. """""" self.readOrbits(orbitFile, delim=delim, skiprows=skiprows) if: self.readObs(obsFile) else: self.obsFile = None self.allObs = None self.obs = None self.slicer_init['orbitFile'] = self.orbitFile self.slicer_init['obsFile'] = self.obsFile",False,obsFile is not None,obsFile,0.6563248634338379 1566,"def get_mean_intersection_over_union(scores): """""" MIOU generated by averaging IOUs of each class, unweighted :param scores: an array of ClassificationScore objects :return: 0->1 where 1 implies perfect intersection """""" iou = 0 for score in scores: iou += score.get_iou() if: return 0 return iou / len(scores)",False,len(scores) < 1,iou < 0,0.6479907035827637 1567,"def _getkey_to_range(self, key): """"""Resolve slice/int key to start-stop range bounds. Returns: (start, stop, is_item?) """""" if: start, stop, stride = key.indices(len(self)) if stride!= 1: raise NotImplementedError('stride of 1 required') is_item = False else: try: start = int(key) except: raise TypeError('slice or integral key expected') if start < 0: start %= len(self) stop = start + 1 is_item = True return (start, stop, is_item)",False,"isinstance(key, slice)","isinstance(key, slice) or isinstance(key, int)",0.6453105807304382 1568,"def _getkey_to_range(self, key): """"""Resolve slice/int key to start-stop range bounds. Returns: (start, stop, is_item?) """""" if isinstance(key, slice): start, stop, stride = key.indices(len(self)) if: raise NotImplementedError('stride of 1 required') is_item = False else: try: start = int(key) except: raise TypeError('slice or integral key expected') if start < 0: start %= len(self) stop = start + 1 is_item = True return (start, stop, is_item)",True,stride != 1,stride != 1,0.6695557832717896 1569,"def _getkey_to_range(self, key): """"""Resolve slice/int key to start-stop range bounds. Returns: (start, stop, is_item?) """""" if isinstance(key, slice): start, stop, stride = key.indices(len(self)) if stride!= 1: raise NotImplementedError('stride of 1 required') is_item = False else: try: start = int(key) except: raise TypeError('slice or integral key expected') if: start %= len(self) stop = start + 1 is_item = True return (start, stop, is_item)",False,start < 0,start >= 0,0.6615360379219055 1570,"def reset_parameters(self): n = self.in_channels for k in self.kernel_size: n *= k stdv = 1.0 / math.sqrt(n) self.weight.data.uniform_(-stdv, stdv) if: self.bias.data.zero_()",True,self.bias is not None,self.bias is not None,0.6487313508987427 1571,"def distance(self, x, y): if: return 0 else: return self._dis[x][y]",True,x == y,x == y,0.6654230952262878 1572,"def annToRLE(self, ann, height, width): """""" Convert annotation which can be polygons, uncompressed RLE to RLE. :return: binary mask (numpy 2D array) """""" segm = ann['segmentation'] if: rles = maskUtils.frPyObjects(segm, height, width) rle = maskUtils.merge(rles) elif isinstance(segm['counts'], list): rle = maskUtils.frPyObjects(segm, height, width) else: rle = ann['segmentation'] return rle",True,"isinstance(segm, list)","isinstance(segm, list)",0.6470644474029541 1573,"def annToRLE(self, ann, height, width): """""" Convert annotation which can be polygons, uncompressed RLE to RLE. :return: binary mask (numpy 2D array) """""" segm = ann['segmentation'] if isinstance(segm, list): rles = maskUtils.frPyObjects(segm, height, width) rle = maskUtils.merge(rles) elif: rle = maskUtils.frPyObjects(segm, height, width) else: rle = ann['segmentation'] return rle",True,"isinstance(segm['counts'], list)","isinstance(segm['counts'], list)",0.6482455134391785 1574,"@classmethod def _get_fields(cls, *keys): collected_fields = set(('id',)) for key in keys: if: return cls._ALL_FIELDS elif key in DEPENDENCIES: collected_fields.update(DEPENDENCIES[key]) elif key in cls._RPC_FIELDS: collected_fields.add(key) else: raise ValueError('Unknown torrent key: {!r}'.format(key)) return collected_fields",False,key.lower() == 'all',key in cls._ALL_FIELDS,0.6499654054641724 1575,"@classmethod def _get_fields(cls, *keys): collected_fields = set(('id',)) for key in keys: if key.lower() == 'all': return cls._ALL_FIELDS elif: collected_fields.update(DEPENDENCIES[key]) elif key in cls._RPC_FIELDS: collected_fields.add(key) else: raise ValueError('Unknown torrent key: {!r}'.format(key)) return collected_fields",True,key in DEPENDENCIES,key in DEPENDENCIES,0.6569778323173523 1576,"@classmethod def _get_fields(cls, *keys): collected_fields = set(('id',)) for key in keys: if key.lower() == 'all': return cls._ALL_FIELDS elif key in DEPENDENCIES: collected_fields.update(DEPENDENCIES[key]) elif: collected_fields.add(key) else: raise ValueError('Unknown torrent key: {!r}'.format(key)) return collected_fields",False,key in cls._RPC_FIELDS,key in cls._ALL_FIELDS,0.6482173204421997 1577,"@staticmethod def _process_table_element(e: ET.Element, c: Context) -> LayoutElement: assert e.tag == 'table' if: return HTMLToPDF._process_table_element_001(e, c) else: return HTMLToPDF._process_table_element_002(e, c)",False,'tbody' in [x.tag for x in e],USE_001,0.6470615267753601 1578,"def _check_cryptography(cryptography_version): try: cryptography_version = list(map(int, cryptography_version.split('.'))) except ValueError: return if: warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) warnings.warn(warning, RequestsDependencyWarning)",True,"cryptography_version < [1, 3, 4]","cryptography_version < [1, 3, 4]",0.647303581237793 1579,"def forward(self, x): if: x = x[-1] x = self.decoder(x) return x",False,"isinstance(x, list)","isinstance(x, (list, tuple))",0.642807126045227 1580,"def _get_vj_lr(self, dm_kpts, hermi=1, kpts=np.zeros((1, 3)), kpts_band=None): """""" Long-range part of J matrix """""" if: return self._get_lr_j_kpts(dm_kpts, hermi, kpts) logger.warn(self, 'Approximate kpts_band for vj with k-point projection') vj = self._get_lr_j_kpts(dm_kpts, hermi, kpts) pk2k = addons._k2k_projection(kpts, kpts_band, self.supmol_ft.bvkmesh_Ls) return lib.einsum('nkpq,kh->nhpq', vj, pk2k)",True,kpts_band is None,kpts_band is None,0.6552808284759521 1581,"def prepare_buffer(self, outputs): if: return outputs['image'] else: return np.expand_dims(outputs['depth'], -1).repeat(3, -1)",False,self.mode == 'image',self.use_image_depth,0.6499776244163513 1582,"def readAlignment(text): up = text.upper() if: return TA_LEFT elif up == 'RIGHT': return TA_RIGHT elif up in ['CENTER', 'CENTRE']: return TA_CENTER elif up == 'JUSTIFY': return TA_JUSTIFY",True,up == 'LEFT',up == 'LEFT',0.6604885458946228 1583,"def readAlignment(text): up = text.upper() if up == 'LEFT': return TA_LEFT elif: return TA_RIGHT elif up in ['CENTER', 'CENTRE']: return TA_CENTER elif up == 'JUSTIFY': return TA_JUSTIFY",True,up == 'RIGHT',up == 'RIGHT',0.659257173538208 1584,"def readAlignment(text): up = text.upper() if up == 'LEFT': return TA_LEFT elif up == 'RIGHT': return TA_RIGHT elif: return TA_CENTER elif up == 'JUSTIFY': return TA_JUSTIFY",False,"up in ['CENTER', 'CENTRE']",up == 'CENTER',0.6482468843460083 1585,"def readAlignment(text): up = text.upper() if up == 'LEFT': return TA_LEFT elif up == 'RIGHT': return TA_RIGHT elif up in ['CENTER', 'CENTRE']: return TA_CENTER elif: return TA_JUSTIFY",True,up == 'JUSTIFY',up == 'JUSTIFY',0.6523123979568481 1586,"def get_inheritance_stats(self, truth_data: 'TruthData') -> 'ScoresDataSet': if: return self self.inheritance_stats = pandas.DataFrame({trio_category: trio_is_category.sum(axis=1) for trio_category, trio_is_category in self._get_trio_categories(truth_data).items()}) return self",False,truth_data.pedigree_file_info is None,self.inheritance_stats is None,0.6474431753158569 1587,"def get_edge_index(self): edge_index = {k: torch.stack([v, self.col[k]]) for k, v in self.row.items()} if: for etype in self.edge_types: if edge_index.get(etype, None) is None: edge_index[etype] = torch.empty((2, 0), dtype=torch.long).to(self.device) return edge_index",True,self.edge_types is not None,self.edge_types is not None,0.6496936082839966 1588,"def get_edge_index(self): edge_index = {k: torch.stack([v, self.col[k]]) for k, v in self.row.items()} if self.edge_types is not None: for etype in self.edge_types: if: edge_index[etype] = torch.empty((2, 0), dtype=torch.long).to(self.device) return edge_index",False,"edge_index.get(etype, None) is None",edge_index[etype] is None,0.6482236385345459 1589,"def _parse_browser_event(self, event): page = event['page'] if: course_key = opaque_key_util.get_course_key_from_url(page) if course_key and '/' not in unicode(course_key): return course_key.org else: return get_slash_value(page, 4) return None",False,'courses' in page,page,0.6575182676315308 1590,"def _parse_browser_event(self, event): page = event['page'] if 'courses' in page: course_key = opaque_key_util.get_course_key_from_url(page) if: return course_key.org else: return get_slash_value(page, 4) return None",False,course_key and '/' not in unicode(course_key),course_key.org,0.6487816572189331 1591,"def slotRangeChanged(self, property, min, max): editors = self.m_createdEditors.get(property) if: return manager = self.q_ptr.propertyManager(property) if not manager: return for editor in editors: editor.blockSignals(True) editor.setDateRange(min, max) editor.setDate(manager.value(property)) editor.blockSignals(False)",True,not editors,not editors,0.6532930135726929 1592,"def slotRangeChanged(self, property, min, max): editors = self.m_createdEditors.get(property) if not editors: return manager = self.q_ptr.propertyManager(property) if: return for editor in editors: editor.blockSignals(True) editor.setDateRange(min, max) editor.setDate(manager.value(property)) editor.blockSignals(False)",True,not manager,not manager,0.6535807847976685 1593,"def project(self, unperturbed, perturbed_inputs, alphas): """""" Projection onto given l2 / linf balls in a batch. """""" alphas_shape = [len(alphas)] + [1] * len(self.shape) alphas = alphas.reshape(alphas_shape) if: projected = self.loss_mask * ((1 - alphas) * unperturbed + alphas * perturbed_inputs) + (1 - self.loss_mask) * perturbed_inputs elif self.constraint == 'linf': projected = np.clip(perturbed_inputs, unperturbed - alphas, unperturbed + alphas) return projected",False,self.constraint == 'l2',self.constraint == 'loss',0.6510325074195862 1594,"def project(self, unperturbed, perturbed_inputs, alphas): """""" Projection onto given l2 / linf balls in a batch. """""" alphas_shape = [len(alphas)] + [1] * len(self.shape) alphas = alphas.reshape(alphas_shape) if self.constraint == 'l2': projected = self.loss_mask * ((1 - alphas) * unperturbed + alphas * perturbed_inputs) + (1 - self.loss_mask) * perturbed_inputs elif: projected = np.clip(perturbed_inputs, unperturbed - alphas, unperturbed + alphas) return projected",False,self.constraint == 'linf',self.constraint == 'clip',0.651141881942749 1595,"def _build_parsed_values(self): """""" Take something in the binary data values and turn it into a particle with the appropriate tag. """""" result = [] if: result = [self._encode_value(DostadMetadataDataParticleKey.PRODUCT_NUMBER, self._data_match.group(1), int), self._encode_value(DostadMetadataDataParticleKey.SERIAL_NUMBER, self._data_match.group(2), int)] return result",True,self._data_match,self._data_match,0.6539888978004456 1596,"@classmethod def parseMultipleData(cls, data, format): if: return cls.parseMultipleTextData(data) elif format == cls.sFormatJSON: return cls.parseMultipleJSONData(data)",True,format == cls.sFormatText,format == cls.sFormatText,0.6524916291236877 1597,"@classmethod def parseMultipleData(cls, data, format): if format == cls.sFormatText: return cls.parseMultipleTextData(data) elif: return cls.parseMultipleJSONData(data)",True,format == cls.sFormatJSON,format == cls.sFormatJSON,0.6507513523101807 1598,"def to_dict(self): out = {'fixed_angle': self.fixed_angle, 'flip_first': self.flip_first, 'flip_all': self.flip_all,'marker': None, 'positions': self.marker_positions, 'position_as_ratio': self.position_as_ratio} if: out['marker'] = self.marker.to_dict() return out",True,self.marker is not None,self.marker is not None,0.6537121534347534 1599,"def export(self, outfile, level, namespace_='', name_='MemberType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_)) self.exportAttributes(outfile, level, namespace_, name_='MemberType') if: outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) showIndent(outfile, level) outfile.write('\n' % (namespace_, name_)) else: outfile.write(' />\n')",True,self.hasContent_(),self.hasContent_(),0.6507918834686279 1600,"def register(name: str, plugin: Plugin) -> None: if: raise ValueError('plugin.register: duplicated name is not allowed: %s' % name) g.plugins[name] = plugin",True,name in g.plugins,name in g.plugins,0.6606690883636475 1601,"def feed(self, aBuf, aCharLen): """"""feed a character with known length"""""" if: order = self.get_order(aBuf) else: order = -1 if order >= 0: self._mTotalChars += 1 if order < self._mTableSize: if 512 > self._mCharToFreqOrder[order]: self._mFreqChars += 1",True,aCharLen == 2,aCharLen == 2,0.6565761566162109 1602,"def feed(self, aBuf, aCharLen): """"""feed a character with known length"""""" if aCharLen == 2: order = self.get_order(aBuf) else: order = -1 if: self._mTotalChars += 1 if order < self._mTableSize: if 512 > self._mCharToFreqOrder[order]: self._mFreqChars += 1",True,order >= 0,order >= 0,0.6584807634353638 1603,"def feed(self, aBuf, aCharLen): """"""feed a character with known length"""""" if aCharLen == 2: order = self.get_order(aBuf) else: order = -1 if order >= 0: self._mTotalChars += 1 if: if 512 > self._mCharToFreqOrder[order]: self._mFreqChars += 1",True,order < self._mTableSize,order < self._mTableSize,0.6491011381149292 1604,"def feed(self, aBuf, aCharLen): """"""feed a character with known length"""""" if aCharLen == 2: order = self.get_order(aBuf) else: order = -1 if order >= 0: self._mTotalChars += 1 if order < self._mTableSize: if: self._mFreqChars += 1",True,512 > self._mCharToFreqOrder[order],512 > self._mCharToFreqOrder[order],0.6435670852661133 1605,"def close(self) -> None: if: self._cur.close() if self._con: self._con.close() self._clear()",True,self._cur,self._cur,0.6719671487808228 1606,"def close(self) -> None: if self._cur: self._cur.close() if: self._con.close() self._clear()",True,self._con,self._con,0.6666510701179504 1607,"def autocast_list(source): """""" Merges a list of source of different types into a list of numpy arrays or PIL images """""" files = [] for im in source: if: files.append(Image.open(requests.get(im, stream=True).raw if str(im).startswith('http') else im)) elif isinstance(im, (Image.Image, np.ndarray)): files.append(im) else: raise TypeError(f'type {type(im).__name__} is not a supported Ultralytics prediction source type. \nSee https://docs.ultralytics.com/modes/predict for supported source types.') return files",False,"isinstance(im, (str, Path))","isinstance(im, str)",0.6489468812942505 1608,"def autocast_list(source): """""" Merges a list of source of different types into a list of numpy arrays or PIL images """""" files = [] for im in source: if isinstance(im, (str, Path)): files.append(Image.open(requests.get(im, stream=True).raw if str(im).startswith('http') else im)) elif: files.append(im) else: raise TypeError(f'type {type(im).__name__} is not a supported Ultralytics prediction source type. \nSee https://docs.ultralytics.com/modes/predict for supported source types.') return files",False,"isinstance(im, (Image.Image, np.ndarray))","isinstance(im, np.ndarray)",0.6478787064552307 1609,"def add_to_path(path): """"""Adds an entry to sys.path if it's not already there. This does not append it but moves it to the front so that we can be sure it is loaded. """""" if: raise RuntimeError('Tried to add nonexisting path') def _samefile(x, y): if x == y: return True try: return os.path.samefile(x, y) except (IOError, OSError, AttributeError): return False sys.path[:] = [x for x in sys.path if not _samefile(path, x)] sys.path.insert(0, path)",False,not os.path.isdir(path),path is None,0.6440253257751465 1610,"def add_to_path(path): """"""Adds an entry to sys.path if it's not already there. This does not append it but moves it to the front so that we can be sure it is loaded. """""" if not os.path.isdir(path): raise RuntimeError('Tried to add nonexisting path') def _samefile(x, y): if: return True try: return os.path.samefile(x, y) except (IOError, OSError, AttributeError): return False sys.path[:] = [x for x in sys.path if not _samefile(path, x)] sys.path.insert(0, path)",False,x == y,x == path,0.6676219701766968 1611,"@classmethod def _column_cost(cls, tables, withs, sign): move = tables['move'] cost = super()._column_cost(tables, withs, sign) if: cost += Sum(sign * cls.cost.sql_cast(move.internal_quantity) * Coalesce(move.shipment_out_cost_price, 0)) return cost",False,Transaction().context.get('include_shipment_cost'),move.shipment_out_cost_price is not None,0.6454257965087891 1612,"def request(self, path, api='public', method='GET', params={}, headers=None, body=None): response = self.fetch2(path, api, method, params, headers, body) if: return response data = self.safe_value(response, 'data') if data: code = self.safe_string(response, 'code') if code!= '0': message = self.safe_string(response,'msg', 'Error') raise ExchangeError(message) return data return response",False,api == 'web',response is None,0.6582155227661133 1613,"def request(self, path, api='public', method='GET', params={}, headers=None, body=None): response = self.fetch2(path, api, method, params, headers, body) if api == 'web': return response data = self.safe_value(response, 'data') if: code = self.safe_string(response, 'code') if code!= '0': message = self.safe_string(response,'msg', 'Error') raise ExchangeError(message) return data return response",False,data,data is not None,0.6690205335617065 1614,"def request(self, path, api='public', method='GET', params={}, headers=None, body=None): response = self.fetch2(path, api, method, params, headers, body) if api == 'web': return response data = self.safe_value(response, 'data') if data: code = self.safe_string(response, 'code') if: message = self.safe_string(response,'msg', 'Error') raise ExchangeError(message) return data return response",False,code != '0',code != 200,0.6549034714698792 1615,"def get_start_time(timestamp): """"""Return start time of a print"""""" if: return '' return datetime.fromtimestamp(timestamp).strftime('%d %B %Y %H:%M:%S')",False,timestamp == 0,timestamp is None,0.6710734367370605 1616,"def get(self, field, time): field = getattr(self, field) if: field[time] = [] if time not in self.timesteps: self.timesteps.append(time) return field[time]",True,time not in field,time not in field,0.6623227596282959 1617,"def get(self, field, time): field = getattr(self, field) if time not in field: field[time] = [] if: self.timesteps.append(time) return field[time]",True,time not in self.timesteps,time not in self.timesteps,0.6497542858123779 1618,"def _toMask(anns, coco): for ann in anns: for i, a in enumerate(ann['segmentations']): if a: rle = coco.annToRLE(ann, i) ann['segmentations'][i] = rle l = [a for a in ann['areas'] if a] if: ann['avg_area'] = 0 else: ann['avg_area'] = np.array(l).mean()",True,len(l) == 0,len(l) == 0,0.6488779783248901 1619,"def _toMask(anns, coco): for ann in anns: for i, a in enumerate(ann['segmentations']): if: rle = coco.annToRLE(ann, i) ann['segmentations'][i] = rle l = [a for a in ann['areas'] if a] if len(l) == 0: ann['avg_area'] = 0 else: ann['avg_area'] = np.array(l).mean()",True,a,a,0.6777462959289551 1620,"def __init__(self, namespaceHTMLElements): if: self.defaultNamespace = 'http://www.w3.org/1999/xhtml' else: self.defaultNamespace = None self.reset()",True,namespaceHTMLElements,namespaceHTMLElements,0.6684824228286743 1621,"def predict(self, n: int, num_samples: int=1, verbose: bool=False): super().predict(n, num_samples) forecast_dict = self.model.predict(h=n, level=(one_sigma_rule,)) mu, std = unpack_sf_dict(forecast_dict) if: samples = create_normal_samples(mu, std, num_samples, n) else: samples = mu return self._build_forecast_series(samples)",False,num_samples > 1,verbose,0.6525058746337891 1622,"def get_clip_token_for_string(tokenizer, string): batch_encoding = tokenizer(string, truncation=True, max_length=77, return_length=True, return_overflowing_tokens=False, padding='max_length', return_tensors='pt') tokens = batch_encoding['input_ids'] if: return tokens[0, 1] return None",False,torch.count_nonzero(tokens - 49407) == 2,len(tokens) > 0,0.6445186138153076 1623,"def transform_graph(self, top): table = self.input_view('table') if: self.wrap_input('table', 'WeatherQueryApi(place=', do_eval=False) return (self, None)",False,table is not None and table.typename() == 'Str',table is not None,0.646628201007843 1624,"@property def preset_mode(self) -> str | None: """"""Return the current preset mode, e.g., auto, smart, interval, favorite."""""" if: return None if not self._wrapper.is_on: return None return self._wrapper.fan_preset",False,self.preset_modes is None,not self._wrapper,0.6472312211990356 1625,"@property def preset_mode(self) -> str | None: """"""Return the current preset mode, e.g., auto, smart, interval, favorite."""""" if self.preset_modes is None: return None if: return None return self._wrapper.fan_preset",False,not self._wrapper.is_on,self._wrapper.fan_preset not in self.preset_modes,0.6488932371139526 1626,"@dispatch.expectation.register(DiagonalGaussian, kernels.Product, InducingPoints, NoneType, NoneType) @check_shapes('p: [N, D]', 'inducing_variable: [M, D, P]','return: [N, M]') def _expectation_diagonal_product_inducingpoints(p: DiagonalGaussian, kernel: kernels.Product, inducing_variable: InducingPoints, __: None, ___: None, nghp: None=None) -> tf.Tensor: """""" Compute the expectation: <\\HadamardProd_i Ki_{X[:, active_dims_i], Z[:, active_dims_i]}>_p(X) - \\HadamardProd_i Ki_{.,.} :: Product kernel - p :: DiagonalGaussian distribution (p.cov NxD) :return: NxM """""" if: raise NotImplementedError('Product currently needs to be defined on separate dimensions.') exps = [expectation(p, (k, inducing_variable), nghp=nghp) for k in kernel.kernels] return reduce(tf.multiply, exps)",False,not kernel.on_separate_dimensions,p.dim() > 1 or kernel.dim() > 1,0.645429253578186 1627,"def pil_image(img: np.ndarray) -> Image: if: return fromarray(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)) return fromarray(img)",False,"img.shape[2:] == (3,)",img.ndim == 2,0.6492992043495178 1628,"def parse_github_org_name(org_url: str) -> Optional[str]: """""" Get org name from a github url ""https://github.com/os3224"" -> ""os3224"" """""" r = parse('https://github.com/{}', org_url) if: return '' return r[1].strip().rstrip('/')",False,r is None,not r,0.6568495631217957 1629,"def prop_start(self, bufn, pos, index, prop_info): prop_pos, prop_name_hash, prop_data_pos, prop_data_raw, prop_type = prop_info if: prop_data, prop_data_pos = parse_prop_data(bufn, prop_info) self.strings.add(prop_data)",False,prop_type == k_type_str,prop_type == 'string' and prop_info,0.6457012891769409 1630,"def flags_string(self): flags_true_names = [] for flag in self.flag_names: if: flags_true_names.append(flag) return u', '.join(flags_true_names)",True,flag in self._flags_true,flag in self._flags_true,0.6500799059867859 1631,"def check_working_tree_is_dirty() -> None: """"""Check if the current Git working tree is dirty."""""" print('Checking whether the Git working tree is dirty...') result = subprocess.check_output(['git', 'diff', '--stat']) if: print('Git working tree is dirty:') print(result.decode('utf-8')) sys.exit(1) else: print('All good!')",False,len(result) > 0,result != 0,0.6456797122955322 1632,"@staticmethod def cards(): """""" Return the list of cards that we need to move from the stock place to the selling one. We must re-filter the list, it is possible that cards where moved since they were added to the restocking list. """""" restock = Restocking.get_or_create() copies = restock.restockingcopies_set.all() res = [] for copy in copies: if: res.append(copy.card) else: log.warning('This restocking operation wants us to delete card {}, but this is creates a potential loss of data. Deletion is suspended now.'.format(copy.pk)) return res",False,copy.card.quantity_selling_places() <= 0,copy.card,0.6453908085823059 1633,"def draw_image(surface, image, blend=False): array = np.frombuffer(image.raw_data, dtype=np.dtype('uint8')) array = np.reshape(array, (image.height, image.width, 4)) array = array[:, :, :3] array = array[:, :, ::-1] image_surface = pygame.surfarray.make_surface(array.swapaxes(0, 1)) if: image_surface.set_alpha(100) surface.blit(image_surface, (0, 0))",True,blend,blend,0.6582982540130615 1634,"def _validate_configuration(self) -> None: if: raise ValueError('Need an odd number of crf feature lists to have a center word.')",False,"len(self.component_config.get('features', [])) % 2 != 1",self.crf_type % 2 != 0,0.6468604207038879 1635,"def filelink(link): if: return os.readlink(link) else: return link",True,os.path.islink(link),os.path.islink(link),0.6471490859985352 1636,"def module_suite(module, classnames=None): """"""Makes a suite from a module."""""" if: return unittest.TestLoader().loadTestsFromNames(classnames, module) elif hasattr(module,'suite'): return module.suite() else: return unittest.TestLoader().loadTestsFromModule(module)",True,classnames,classnames,0.6652859449386597 1637,"def module_suite(module, classnames=None): """"""Makes a suite from a module."""""" if classnames: return unittest.TestLoader().loadTestsFromNames(classnames, module) elif: return module.suite() else: return unittest.TestLoader().loadTestsFromModule(module)",False,"hasattr(module, 'suite')","isinstance(module, TestLoader)",0.6471547484397888 1638,"def subclass_dist(self, super_type): if: return 0 elif not self.base_classes: return float('inf') else: return 1 + min((b.subclass_dist(super_type) for b in self.base_classes))",False,self.same_as_resolved_type(super_type),self.base_classes == super_type,0.6447951793670654 1639,"def subclass_dist(self, super_type): if self.same_as_resolved_type(super_type): return 0 elif: return float('inf') else: return 1 + min((b.subclass_dist(super_type) for b in self.base_classes))",False,not self.base_classes,self.base_classes is None,0.651533305644989 1640,"def imagenet_reader(data_dir, mode): if: shuffle = True suffix = 'train_list.txt' elif mode is 'val': shuffle = False suffix = 'val_list.txt' file_list = os.path.join(data_dir, suffix) return _reader_creator(file_list, mode, shuffle=shuffle, data_dir=data_dir)",True,mode is 'train',mode is 'train',0.6559606790542603 1641,"def imagenet_reader(data_dir, mode): if mode is 'train': shuffle = True suffix = 'train_list.txt' elif: shuffle = False suffix = 'val_list.txt' file_list = os.path.join(data_dir, suffix) return _reader_creator(file_list, mode, shuffle=shuffle, data_dir=data_dir)",True,mode is 'val',mode is 'val',0.652614176273346 1642,"def tags_matched(tags, and_tags, no_tags): """""" Check if AND tags and NO tags match tags Args: tags (list of str): full list of tags and_tags (list of str): list of AND tags no_tags (list of str): list of NO tags Returns: True if tags matched """""" matched = True if: if not all((t in tags for t in and_tags)): matched = False if matched and len(no_tags) > 0: for t in no_tags: if t in tags: matched = False break return matched",False,len(and_tags) > 0,not tags and (not no_tags),0.6461448669433594 1643,"def tags_matched(tags, and_tags, no_tags): """""" Check if AND tags and NO tags match tags Args: tags (list of str): full list of tags and_tags (list of str): list of AND tags no_tags (list of str): list of NO tags Returns: True if tags matched """""" matched = True if len(and_tags) > 0: if not all((t in tags for t in and_tags)): matched = False if: for t in no_tags: if t in tags: matched = False break return matched",False,matched and len(no_tags) > 0,len(no_tags) > 0,0.6441377401351929 1644,"def tags_matched(tags, and_tags, no_tags): """""" Check if AND tags and NO tags match tags Args: tags (list of str): full list of tags and_tags (list of str): list of AND tags no_tags (list of str): list of NO tags Returns: True if tags matched """""" matched = True if len(and_tags) > 0: if: matched = False if matched and len(no_tags) > 0: for t in no_tags: if t in tags: matched = False break return matched",False,not all((t in tags for t in and_tags)),tags != and_tags[0],0.6478909850120544 1645,"def tags_matched(tags, and_tags, no_tags): """""" Check if AND tags and NO tags match tags Args: tags (list of str): full list of tags and_tags (list of str): list of AND tags no_tags (list of str): list of NO tags Returns: True if tags matched """""" matched = True if len(and_tags) > 0: if not all((t in tags for t in and_tags)): matched = False if matched and len(no_tags) > 0: for t in no_tags: if: matched = False break return matched",False,t in tags,not t in tags,0.6584345102310181 1646,"def output_array_of_addistributionreportfilter(value_sets): if: return output_status_message('Array Of AdDistributionReportFilter:\n') for value_set in value_sets['AdDistributionReportFilter']: output_addistributionreportfilter(value_set)",True,value_sets is None or len(value_sets) == 0,value_sets is None or len(value_sets) == 0,0.6443994045257568 1647,"@run_async def lesbian(update, context): chat_id = update.effective_chat.id if: is_nsfw = sql.is_nsfw(chat_id) if not is_nsfw: return msg = update.effective_message target = 'les' msg.reply_video(nekos.img(target))",True,not update.effective_message.chat.type == 'private',not update.effective_message.chat.type == 'private',0.6454731225967407 1648,"@run_async def lesbian(update, context): chat_id = update.effective_chat.id if not update.effective_message.chat.type == 'private': is_nsfw = sql.is_nsfw(chat_id) if: return msg = update.effective_message target = 'les' msg.reply_video(nekos.img(target))",True,not is_nsfw,not is_nsfw,0.656985878944397 1649,"def asDict(self): """""" Returns the named parse results as a nested dictionary. Example:: integer = Word(nums) date_str = integer(""year"") + '/' + integer(""month"") + '/' + integer(""day"") result = date_str.parseString('12/31/1999') print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)],'month': [('31', 2)]}) result_dict = result.asDict() print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12','month': '31'} # even though a ParseResults supports dict-like access, sometime you just need to have a dict import json print(json.dumps(result)) # -> Exception: TypeError:... is not JSON serializable print(json.dumps(result.asDict())) # -> {""month"": ""31"", ""day"": ""1999"", ""year"": ""12""} """""" if: item_fn = self.items else: item_fn = self.iteritems def toItem(obj): if isinstance(obj, ParseResults): if obj.haskeys(): return obj.asDict() else: return [toItem(v) for v in obj] else: return obj return dict(((k, toItem(v)) for k, v in item_fn()))",True,PY_3,PY_3,0.659912645816803 1650,"def asDict(self): """""" Returns the named parse results as a nested dictionary. Example:: integer = Word(nums) date_str = integer(""year"") + '/' + integer(""month"") + '/' + integer(""day"") result = date_str.parseString('12/31/1999') print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)],'month': [('31', 2)]}) result_dict = result.asDict() print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12','month': '31'} # even though a ParseResults supports dict-like access, sometime you just need to have a dict import json print(json.dumps(result)) # -> Exception: TypeError:... is not JSON serializable print(json.dumps(result.asDict())) # -> {""month"": ""31"", ""day"": ""1999"", ""year"": ""12""} """""" if PY_3: item_fn = self.items else: item_fn = self.iteritems def toItem(obj): if: if obj.haskeys(): return obj.asDict() else: return [toItem(v) for v in obj] else: return obj return dict(((k, toItem(v)) for k, v in item_fn()))",True,"isinstance(obj, ParseResults)","isinstance(obj, ParseResults)",0.6438747644424438 1651,"def asDict(self): """""" Returns the named parse results as a nested dictionary. Example:: integer = Word(nums) date_str = integer(""year"") + '/' + integer(""month"") + '/' + integer(""day"") result = date_str.parseString('12/31/1999') print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)],'month': [('31', 2)]}) result_dict = result.asDict() print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12','month': '31'} # even though a ParseResults supports dict-like access, sometime you just need to have a dict import json print(json.dumps(result)) # -> Exception: TypeError:... is not JSON serializable print(json.dumps(result.asDict())) # -> {""month"": ""31"", ""day"": ""1999"", ""year"": ""12""} """""" if PY_3: item_fn = self.items else: item_fn = self.iteritems def toItem(obj): if isinstance(obj, ParseResults): if: return obj.asDict() else: return [toItem(v) for v in obj] else: return obj return dict(((k, toItem(v)) for k, v in item_fn()))",True,obj.haskeys(),obj.haskeys(),0.6507346630096436 1652,"@text_size.setter def text_size(self, value): """"""Setter for text_size."""""" if: raise AttributeError(TextBox._getter_exception_message.format('text_size')) else: self._text_size = value",True,self.has_surface,self.has_surface,0.6559842824935913 1653,"def inner() -> TidalArtist: try: return TidalArtist(session, prov_artist_id) except HTTPError as err: if: raise MediaNotFoundError(f'Artist {prov_artist_id} not found') from err raise err",False,err.response.status_code == 404,err.response['Error']['Code'] in 404,0.6471806168556213 1654,"def setDebug(self, flag=True): """""" Enable display of debugging messages while doing pattern matching. Set ``flag`` to True to enable, False to disable. Example:: wd = Word(alphas).setName(""alphaword"") integer = Word(nums).setName(""numword"") term = wd | integer # turn on debugging for wd wd.setDebug() OneOrMore(term).parseString(""abc 123 xyz 890"") prints:: Match alphaword at loc 0(1,1) Matched alphaword -> ['abc'] Match alphaword at loc 3(1,4) Exception raised:Expected alphaword (at char 4), (line:1, col:5) Match alphaword at loc 7(1,8) Matched alphaword -> ['xyz'] Match alphaword at loc 11(1,12) Exception raised:Expected alphaword (at char 12), (line:1, col:13) Match alphaword at loc 15(1,16) Exception raised:Expected alphaword (at char 15), (line:1, col:16) The output shown is that produced by the default debug actions - custom debug actions can be specified using :class:`setDebugActions`. Prior to attempting to match the ``wd`` expression, the debugging message ``""Match at loc (,)""`` is shown. Then if the parse succeeds, a ``""Matched""`` message is shown, or an ``""Exception raised""`` message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression, which makes debugging and exception messages easier to understand - for instance, the default name created for the :class:`Word` expression without calling ``setName`` is ``""W:(ABCD...)""``. """""" if: self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction) else: self.debug = False return self",True,flag,flag,0.6542090177536011 1655,"@_app.callback(Output('alignment-data-store', 'data'), [Input('alignment-dropdown', 'value'), Input('alignment-file-upload', 'contents'), Input('alignment-file-upload', 'filename')]) def update_storage(dropdown, contents, filename): if: content_type, content_string = contents.split(',') content = base64.b64decode(content_string).decode('UTF-8') else: content = DATASETS[dropdown] return content",False,contents is not None and 'fasta' in filename,"isinstance(contents, str)",0.6472827196121216 1656,"def decode(self, h, force_not_quantize=False): if: quant, emb_loss, info = self.quantize(h) else: quant = h quant = self.post_quant_conv(quant) dec = self.decoder(quant) return dec",True,not force_not_quantize,not force_not_quantize,0.6440548896789551 1657,"def onOpen(self): if: self.log.info('Getting case count..') elif self.factory.currentCaseId <= self.factory.endCaseId: self.log.info('Running test case {case_id}/{last_case_id} as user agent {agent} on peer {peer}', case_id=self.factory.currentCaseId, last_case_id=self.factory.endCaseId, agent=self.factory.agent, peer=self.peer)",False,self.factory.endCaseId is None,self.factory.currentCaseId == 0,0.6443613767623901 1658,"def onOpen(self): if self.factory.endCaseId is None: self.log.info('Getting case count..') elif: self.log.info('Running test case {case_id}/{last_case_id} as user agent {agent} on peer {peer}', case_id=self.factory.currentCaseId, last_case_id=self.factory.endCaseId, agent=self.factory.agent, peer=self.peer)",False,self.factory.currentCaseId <= self.factory.endCaseId,self.factory.currentCaseId != self.factory.endCaseId,0.6443153619766235 1659,"def collate(examples: List[torch.Tensor]): if: return pad_sequence(examples, batch_first=True) return pad_sequence(examples, batch_first=True, padding_value=tokenizer.pad_token_id)",True,tokenizer._pad_token is None,tokenizer._pad_token is None,0.6491876840591431 1660,"def to_str(self): """"""Returns the string representation of the model"""""" import simplejson as json if: import sys reload(sys) sys.setdefaultencoding('utf-8') return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)",True,six.PY2,six.PY2,0.6510450839996338 1661,"def html(self, available_width: float, available_height: float) -> HtmlResponse: width = self.width - self.padding required_padding = self.padding tag, attributes = self.html_tag_and_attrbutes() top = 0.75 if: return (f'<{tag} {attributes} style=""position:relative;top:{top}rem;padding-right:{required_padding}rem;padding-top:{self.height - top}rem"">') else: return (f'<{tag} {attributes} style=""position:relative;top:{top}rem;padding-left:{required_padding}rem;padding-top:{self.height - top}rem"">')",False,self.align_left,self.height - top > width,0.6509391069412231 1662,"def save_pretrained(self, save_directory_or_file): """""" Save a model card object to the directory or file `save_directory_or_file`. """""" if: output_model_card_file = os.path.join(save_directory_or_file, MODEL_CARD_NAME) else: output_model_card_file = save_directory_or_file self.to_json_file(output_model_card_file) logger.info('Model card saved in {}'.format(output_model_card_file))",False,os.path.isdir(save_directory_or_file),"isinstance(save_directory_or_file, str)",0.6439875364303589 1663,"def _shortcut(self, x): if: x_s = self.conv_s(x) else: x_s = x return x_s",False,self.learned_shortcut,self.learned_sc,0.6539925336837769 1664,"def __init__(self, context, arg=0, template=None, set_default=True): super().__init__(context, arg, template, set_default=False) self.attrib = Array(self.context, 0, None, (0,), name_type_map['Byte']) self.padding = name_type_map['Uint'].from_value(0) self.attrib_name = name_type_map['Pointer'](self.context, 0, name_type_map['ZString']) if: self.set_defaults()",True,set_default,set_default,0.6591434478759766 1665,"def oneshot_resolution_object(botengine, name, dashboard_button=_('UPDATE STATUS >'), actionsheet_title=_('Update Status'), resolution_button=_('Resolve'), ack=_('Okay, resolving the notification...'), icon='thumbs-up', icon_font='far', response_options=None): """""" Generate a one-shot resolution object JSON structure. The resolution will actually be handled by the location_dashboardheader_microservice through a datastream message ""resolve_dashboard"" and content :param botengine: BotEngine environment :param name: Name of the dashboard item to clear once resolved :param dashboard_button: Dashboard button text, default is ""UPDATE STATUS >"" :param actionsheet_title: Title of the bottom action sheet, default is ""Update Status"" :param resolution_button: Text on the only action sheet button which will resolve this dashboard item, default is ""Resolve"" :param ack: Acknowledgment text to transition back to the dashboard, default is ""Okay, resolving the notification..."" :param icon: Icon to accompany the ack while transitioning back to the dashboard, default is ""thumbs-up"" :param icon_font: Icon font for the icon, default is ""far"" (fontawesome regular). See utilities for font icon pack names. :param response_options: Advanced option to completely override most of the settings and replace the ""response_options"" list with the provided custom list of options. Please review the response_options JSON format. :return: JSON object """""" if: response_options = [{'text': resolution_button, 'ack': ack, 'icon': icon, 'icon_font': icon_font, 'content': {'answer': 0}}] return {'button': dashboard_button, 'title': actionsheet_title, 'datastream_address':'resolve_dashboard_header', 'content': {'name': name},'response_options': response_options}",True,response_options is None,response_options is None,0.6498311758041382 1666,"def _check_branches(self, num_branches, num_blocks, in_channels, num_channels): if: error_msg = 'NUM_BRANCHES({}) <> NUM_BLOCKS({})'.format(num_branches, len(num_blocks)) raise ValueError(error_msg) if num_branches!= len(num_channels): error_msg = 'NUM_BRANCHES({}) <> NUM_CHANNELS({})'.format(num_branches, len(num_channels)) raise ValueError(error_msg) if num_branches!= len(in_channels): error_msg = 'NUM_BRANCHES({}) <> NUM_INCHANNELS({})'.format(num_branches, len(in_channels)) raise ValueError(error_msg)",True,num_branches != len(num_blocks),num_branches != len(num_blocks),0.6441241502761841 1667,"def _check_branches(self, num_branches, num_blocks, in_channels, num_channels): if num_branches!= len(num_blocks): error_msg = 'NUM_BRANCHES({}) <> NUM_BLOCKS({})'.format(num_branches, len(num_blocks)) raise ValueError(error_msg) if: error_msg = 'NUM_BRANCHES({}) <> NUM_CHANNELS({})'.format(num_branches, len(num_channels)) raise ValueError(error_msg) if num_branches!= len(in_channels): error_msg = 'NUM_BRANCHES({}) <> NUM_INCHANNELS({})'.format(num_branches, len(in_channels)) raise ValueError(error_msg)",True,num_branches != len(num_channels),num_branches != len(num_channels),0.6430579423904419 1668,"def _check_branches(self, num_branches, num_blocks, in_channels, num_channels): if num_branches!= len(num_blocks): error_msg = 'NUM_BRANCHES({}) <> NUM_BLOCKS({})'.format(num_branches, len(num_blocks)) raise ValueError(error_msg) if num_branches!= len(num_channels): error_msg = 'NUM_BRANCHES({}) <> NUM_CHANNELS({})'.format(num_branches, len(num_channels)) raise ValueError(error_msg) if: error_msg = 'NUM_BRANCHES({}) <> NUM_INCHANNELS({})'.format(num_branches, len(in_channels)) raise ValueError(error_msg)",True,num_branches != len(in_channels),num_branches != len(in_channels),0.6431231498718262 1669,"def get_encoded_value(self): value = self.get_value() if: return value try: return self.encoder.encode(eval(value, CONTEXT)) except Exception: return None",False,not value,value is None,0.6604129076004028 1670,"def load_model(self, directory, epoch=None): names = self.get_model_names() model_file ='model.pth.tar-' + str(epoch) if epoch else'model-best.pth.tar' for name in names: model_path = osp.join(directory, name, model_file) if: raise FileNotFoundError('Model not found at ""{}""'.format(model_path)) checkpoint = load_checkpoint(model_path) state_dict = checkpoint['state_dict'] epoch = checkpoint['epoch'] print('Loading weights to {} from ""{}"" (epoch = {})'.format(name, model_path, epoch)) self._models[name].load_state_dict(state_dict)",True,not osp.exists(model_path),not osp.exists(model_path),0.6464136838912964 1671,"def unicodeCharLen(char): for x in range(1, 6): if: return x",False,"ord(char) < 256 - pow(2, 7 - x) + (2 if x == 6 else 0)",char == '' or char == '\t' or char == '\n' or (char == '\r'),0.6458556652069092 1672,"def get_eigenvalue_enabled(param_dict): if: return get_scalar_param(param_dict[EIGENVALUE], EIGENVALUE_ENABLED, EIGENVALUE_ENABLED_DEFAULT) else: return EIGENVALUE_ENABLED_DEFAULT",False,EIGENVALUE in param_dict.keys(),EIGENVALUE in param_dict,0.6489213705062866 1673,"@property def additional_special_tokens(self): """""" All the additional special tokens you may want to use (list of strings). Log an error if used while not having been set. """""" if: logger.error('Using additional_special_tokens, but it is not set yet.') return self._additional_special_tokens",True,self._additional_special_tokens is None,self._additional_special_tokens is None,0.6459840536117554 1674,"def _intersection_with_si(self, si): """""" Intersection with another :class:`StridedInterval`. :param si: The other operand :return: """""" new_si_set = set() for si_ in self._si_set: r = si_.intersection(si) new_si_set.add(r) if: ret = DiscreteStridedIntervalSet(bits=self.bits, si_set=new_si_set) if ret.should_collapse(): return ret.collapse() else: return ret else: return StridedInterval.empty(self.bits)",False,len(new_si_set),len(new_si_set) > 0,0.6508746147155762 1675,"def _intersection_with_si(self, si): """""" Intersection with another :class:`StridedInterval`. :param si: The other operand :return: """""" new_si_set = set() for si_ in self._si_set: r = si_.intersection(si) new_si_set.add(r) if len(new_si_set): ret = DiscreteStridedIntervalSet(bits=self.bits, si_set=new_si_set) if: return ret.collapse() else: return ret else: return StridedInterval.empty(self.bits)",False,ret.should_collapse(),self.bits == 'collapse',0.6454822421073914 1676,"def training_step(self, batch, batch_idx): loss, loss_dict = self.shared_step(batch) self.log_dict(loss_dict, prog_bar=True, logger=True, on_step=True, on_epoch=True) self.log('global_step', self.global_step, prog_bar=True, logger=True, on_step=True, on_epoch=False) if: lr = self.optimizers().param_groups[0]['lr'] self.log('lr_abs', lr, prog_bar=True, logger=True, on_step=True, on_epoch=False) return loss",True,self.use_scheduler,self.use_scheduler,0.6475891470909119 1677,"def _forward(self, inputs, return_tensors=False): """""" Internal framework specific forward dispatching. Args: inputs: dict holding all the keyworded arguments for required by the model forward method. return_tensors: Whether to return native framework (pt/tf) tensors rather than numpy array. Returns: Numpy array """""" with self.device_placement(): if self.framework == 'tf': predictions = self.model(inputs.data, training=False)[0] else: with torch.no_grad(): inputs = self.ensure_tensor_on_device(**inputs) predictions = self.model(**inputs)[0].cpu() if: return predictions else: return predictions.numpy()",True,return_tensors,return_tensors,0.6540572643280029 1678,"def _forward(self, inputs, return_tensors=False): """""" Internal framework specific forward dispatching. Args: inputs: dict holding all the keyworded arguments for required by the model forward method. return_tensors: Whether to return native framework (pt/tf) tensors rather than numpy array. Returns: Numpy array """""" with self.device_placement(): if: predictions = self.model(inputs.data, training=False)[0] else: with torch.no_grad(): inputs = self.ensure_tensor_on_device(**inputs) predictions = self.model(**inputs)[0].cpu() if return_tensors: return predictions else: return predictions.numpy()",True,self.framework == 'tf',self.framework == 'tf',0.6478662490844727 1679,"def check_contain_upper(self, password): pattern = re.compile('[A-Z]+') match = pattern.findall(password) if: return True else: return False",True,match,match,0.658062756061554 1680,"def __init__(self, exprs: IterableType[ParserElement], savelist: bool=False): super().__init__(exprs, savelist) if: self.mayReturnEmpty = any((e.mayReturnEmpty for e in self.exprs)) self.skipWhitespace = all((e.skipWhitespace for e in self.exprs)) else: self.mayReturnEmpty = True",True,self.exprs,self.exprs,0.6555644869804382 1681,"def test_all_docs(noop_set_edit_path): """""" Test that all docs are generated. There are 2 ways in which docs could go missing: - we are unable to infer the object path from the API path in the doc layout. This will result in the markdown file being `missing.md` - the doc_path_override is not generated. This means that the markdown file is not generated and the explicit override will be empty. These errors are likely to occur if we refactored the code, without updating the doc layout. """""" files_open_wrapper = GenFilesOpenWrapper() docs_builder = DocsBuilder(files_open_wrapper.gen_files_open, noop_set_edit_path) nav = docs_builder.build_docs() failed_items = [] for nav_item in nav.build_literate_nav(): extracted_nav_item = _extract_nav_item(nav_item) if: continue if extracted_nav_item.is_missing(files_open_wrapper.all_file_names): failed_items.append(extracted_nav_item) assert len(failed_items) == 0, f'Missing docs: {failed_items}'",True,extracted_nav_item is None,extracted_nav_item is None,0.6452932357788086 1682,"def test_all_docs(noop_set_edit_path): """""" Test that all docs are generated. There are 2 ways in which docs could go missing: - we are unable to infer the object path from the API path in the doc layout. This will result in the markdown file being `missing.md` - the doc_path_override is not generated. This means that the markdown file is not generated and the explicit override will be empty. These errors are likely to occur if we refactored the code, without updating the doc layout. """""" files_open_wrapper = GenFilesOpenWrapper() docs_builder = DocsBuilder(files_open_wrapper.gen_files_open, noop_set_edit_path) nav = docs_builder.build_docs() failed_items = [] for nav_item in nav.build_literate_nav(): extracted_nav_item = _extract_nav_item(nav_item) if extracted_nav_item is None: continue if: failed_items.append(extracted_nav_item) assert len(failed_items) == 0, f'Missing docs: {failed_items}'",False,extracted_nav_item.is_missing(files_open_wrapper.all_file_names),extracted_nav_item not in failed_items,0.6433537006378174 1683,"def OnMouseMove(self, event): if: xy = event.GetPosition() self.LineData[-1].append(xy) self.Refresh()",True,event.Dragging() and event.LeftIsDown(),event.Dragging() and event.LeftIsDown(),0.6469447612762451 1684,"@request def _post_form_encoded(self, url, params): if: if params is not None and isinstance(params, str): print(f'POST {url} {params}') else: print(f'POST {url} {self._url_encode_params(params)}') return self._handle_response(self.session.post(url, auth=(self.username, self.password), data=params, verify=self.ca_cert, timeout=self.timeout, headers=self.headers))",False,self.debug,self.verbose,0.653842568397522 1685,"@request def _post_form_encoded(self, url, params): if self.debug: if: print(f'POST {url} {params}') else: print(f'POST {url} {self._url_encode_params(params)}') return self._handle_response(self.session.post(url, auth=(self.username, self.password), data=params, verify=self.ca_cert, timeout=self.timeout, headers=self.headers))",False,"params is not None and isinstance(params, str)","isinstance(params, str)",0.6436848044395447 1686,"def to_WIF(self, *, compressed=None, network=None): """"""Return the WIF form of the private key for the given network. Set compressed to True to indicate the corresponding public key should be the compressed form. """""" network = network or self._network payload = pack_byte(network.WIF_byte) + self._secret if: payload += b'\x01' return base58_encode_check(payload)",False,self._compressed if compressed is None else compressed,compressed,0.6500921249389648 1687,"def reset(self): """""" Reset the UniversalDetector and all of its probers back to their initial states. This is called by ``__init__``, so you only need to call this directly in between analyses of different documents. """""" self.result = {'encoding': None, 'confidence': 0.0, 'language': None} self.done = False self._got_data = False self._has_win_bytes = False self._input_state = InputState.PURE_ASCII self._last_char = b'' if: self._esc_charset_prober.reset() for prober in self._charset_probers: prober.reset()",True,self._esc_charset_prober,self._esc_charset_prober,0.6472725868225098 1688,"def __eq__(self, other): """"""Returns true if both objects are equal"""""" if: return False return self.__dict__ == other.__dict__",False,"not isinstance(other, CreateNotebookRequest)","not isinstance(other, V1alpha1WorkflowSpec)",0.6490246057510376 1689,"def replaceUnicode(s): pos = 0 ss = '' for i, c in enumerate(s): n = ord(c) if n > 128: ss += s[pos:i] + '&#' + str(n) + ';' pos = i + 1 if: ss += s[pos:] return ss",False,pos < len(s),pos >= 0,0.6489982604980469 1690,"def replaceUnicode(s): pos = 0 ss = '' for i, c in enumerate(s): n = ord(c) if: ss += s[pos:i] + '&#' + str(n) + ';' pos = i + 1 if pos < len(s): ss += s[pos:] return ss",False,n > 128,n >= 0,0.6757519245147705 1691,"def __str__(self): if: return self.name if self.strRepr is None: self.strRepr = '{' + _ustr(self.expr) + '}...' return self.strRepr",True,"hasattr(self, 'name')","hasattr(self, 'name')",0.6496618986129761 1692,"def __str__(self): if hasattr(self, 'name'): return self.name if: self.strRepr = '{' + _ustr(self.expr) + '}...' return self.strRepr",True,self.strRepr is None,self.strRepr is None,0.6485632061958313 1693,"@property def n_faces_z(self): """"""Number of z-faces in the mesh. This property returns the number of faces whose normal vector is parallel to the z-axis; i.e. z-faces. Returns ------- int Number of z-faces in the mesh Notes ----- Property also accessible as using the shorthand **nFz** """""" if: return None return int(np.prod([x + y for x, y in zip(self.shape_cells, (0, 0, 1))]))",False,self.dim < 3,self.dim < 2,0.6483214497566223 1694,"def TotalSha1(self, include_clobbered_blocks=False): """"""Return the SHA-1 hash of all data in the 'care' regions. If include_clobbered_blocks is True, it returns the hash including the clobbered_blocks."""""" ranges = self.care_map if: ranges = ranges.subtract(self.clobbered_blocks) h = sha1() for d in self._GetRangeData(ranges): h.update(d) return h.hexdigest()",True,not include_clobbered_blocks,not include_clobbered_blocks,0.6472300887107849 1695,"def add_type(type, ext, strict=True): """"""Add a mapping between a type and an extension. When the extension is already known, the new type will replace the old one. When the type is already known the extension will be added to the list of known extensions. If strict is true, information will be added to list of standard types, else to the list of non-standard types. """""" if: init() return _db.add_type(type, ext, strict)",True,_db is None,_db is None,0.6556670665740967 1696,"@staticmethod def resize(im, target_size, interp): if: w = target_size[0] h = target_size[1] elif isinstance(target_size, int): w = target_size h = target_size else: raise ValueError('target_size should be int (wh, wh), list (w, h) or tuple (w, h)') im = cv2.resize(im, (w, h), interpolation=interp) return im",False,"isinstance(target_size, (list, tuple))","isinstance(target_size, tuple) or isinstance(target_size, list)",0.6466182470321655 1697,"@staticmethod def resize(im, target_size, interp): if isinstance(target_size, (list, tuple)): w = target_size[0] h = target_size[1] elif: w = target_size h = target_size else: raise ValueError('target_size should be int (wh, wh), list (w, h) or tuple (w, h)') im = cv2.resize(im, (w, h), interpolation=interp) return im",True,"isinstance(target_size, int)","isinstance(target_size, int)",0.6475367546081543 1698,"def _reset_library_root_logger() -> None: global _default_handler with _lock: if: return library_root_logger = _get_library_root_logger() library_root_logger.removeHandler(_default_handler) library_root_logger.setLevel(logging.NOTSET) _default_handler = None",False,not _default_handler,_default_handler is None,0.6518176794052124 1699,"@classmethod def get_sigs_select(cls, target): """""" Return selector previously set with set_select """""" if: return cls._select.get_select_iter() else: return selection.Select(target).get_select_iter()",False,cls._select,target is None,0.6602563858032227 1700,"@property def linear_name(self): """"""Returns the linear units name."""""" if: return self.srs.linear_name elif self.geographic: return None else: m = self.units_regex.match(self.wkt) return m.group('unit_name')",False,gdal.HAS_GDAL,self.srs.linear_name,0.6518120765686035 1701,"@property def linear_name(self): """"""Returns the linear units name."""""" if gdal.HAS_GDAL: return self.srs.linear_name elif: return None else: m = self.units_regex.match(self.wkt) return m.group('unit_name')",False,self.geographic,self.wkt is None,0.6500808000564575 1702,"def _print_num_passed(self, num_passed): if: self.stream.write('Success! The notebook does not pass any tests.\n') else: self.stream.write(fill(self.passed_warning.format(num_passed=num_passed), width=self.width))",True,num_passed == 0,num_passed == 0,0.6501659154891968 1703,"def mode_updated(self, botengine, current_mode): """""" Mode was updated :param botengine: BotEngine environment :param current_mode: Current mode :param current_timestamp: Current timestamp """""" if: for device_id in self.parent.devices: try: self.parent.devices[device_id].save(botengine) self.parent.devices[device_id].off(botengine) except: pass else: for device_id in self.parent.devices: try: self.parent.devices[device_id].restore(botengine) except: pass",False,not self.parent.is_present(botengine),current_mode == 'save',0.6433572769165039 1704,"@property def chat_data_json(self) -> str: """""":obj:`str`: The chat_data serialized as a JSON-string."""""" if: return self._chat_data_json return json.dumps(self.chat_data)",True,self._chat_data_json,self._chat_data_json,0.6506660580635071 1705,"def be_noisy_to_people(self): while True: if: for i in range(4): self.speaker.play_file(file=SoundFile.ERROR_ALARM)",False,self.color_sensor.reflection() > 30,self.color_sensor.reflected_light_intensity > 30,0.6472970247268677 1706,"def cast_column(dataframe, sql_column): """"""Cast the dataframe column mapped to `sql_column` to the Python type mapped to `sql_column`'s sql type. dataframe[sql_column.key] MUST be a valid column in dataframe, and must have values castable (e.g., non-NAN's in case of int's - the usual case as sql_column is often a primary key). :return: dataframe with the column casted """""" col_type = get_dtype(sql_column.type) pkeyname = sql_column.key if: dataframe[pkeyname] = dataframe[pkeyname].astype(col_type, copy=False) return dataframe",False,dataframe[pkeyname].dtype != col_type,pkeyname in dataframe,0.646764874458313 1707,"def is_failed_from(self, conf): if: return True return self.get_active_from(conf) == 'failed'",False,conf is None,not conf,0.6563161611557007 1708,"def crop_max_rectangle(img, ratio=1.0): if: return img s = min(img.size[0], img.size[1] / ratio) return crop_center(img, s, ratio * s)",True,ratio is None,ratio is None,0.6558477282524109 1709,"def output_array_of_address(data_objects): if: return for data_object in data_objects['Address']: output_address(data_object)",True,data_objects is None or len(data_objects) == 0,data_objects is None or len(data_objects) == 0,0.6475517749786377 1710,"def __next__(self): data = self.file.read(self.buffer_size) if: return data raise StopIteration()",True,data,data,0.6738909482955933 1711,"def _random_sample_switch(self, log_alpha, u, layer, switch_layer, temperature=None): """"""Run partial discrete, then continuous path. Args: switch_layer: this layer and beyond will be continuous """""" if: return self._random_sample(log_alpha, u, layer) else: return self._random_sample_soft(log_alpha, u, layer, temperature)",False,layer < switch_layer,switch_layer == 'continuous',0.6497315168380737 1712,"def get_cluster_name_from_node(node_info) -> Optional[str]: for key, value in node_info.items(): if: return value return None",False,key == CLOUDTIK_TAG_CLUSTER_NAME,key.lower() == 'cluster-name',0.6546648740768433 1713,"def shorten(description, info='anilist.co'): msg = '' if: description = description[0:500] + '....' msg += f'\n*Description*: _{description}_[Read More]({info})' else: msg += f'\n*Description*:_{description}_' return msg",True,len(description) > 700,len(description) > 700,0.6523649096488953 1714,"def _set_req_to_reinstall(self, req: InstallRequirement) -> None: """""" Set a requirement to be installed. """""" if: req.should_reinstall = True req.satisfied_by = None",False,not self.use_user_site or req.satisfied_by.in_usersite,not self.should_reinstall,0.6464075446128845 1715,"@app.route('/api/v1/remotecopygroups/', methods=['GET']) def get_remote_copy_group(rcg_name): debugRequest(flask.request) for rcg in remote_copy_groups['members']: if: resp = flask.make_response(json.dumps(rcg), 200) return resp throw_error(404, NON_EXISTENT_RCOPY_GROUP, ""remote copy group doesn't exist"")",True,rcg['name'] == rcg_name,rcg['name'] == rcg_name,0.6488214731216431 1716,"def get_validation_func(column: str) -> List[FunctionType]: validation_funcs = VALIDATION_FUNCS_BY_COLUMN.get(column) if: raise Exception(f'There is no validation function for column: [{column}]') return validation_funcs",False,validation_funcs is None,not validation_funcs,0.6592252254486084 1717,"def drawGraph(self, key=None, filename='graph', format='pdf'): graph = self.graphs.get(key) if: return if graph.number_of_nodes() > 1: ag = nx.to_pydot(graph) ag.write('%s.%s' % (filename, format), format=format)",True,not graph,not graph,0.6627306938171387 1718,"def drawGraph(self, key=None, filename='graph', format='pdf'): graph = self.graphs.get(key) if not graph: return if: ag = nx.to_pydot(graph) ag.write('%s.%s' % (filename, format), format=format)",True,graph.number_of_nodes() > 1,graph.number_of_nodes() > 1,0.6469426155090332 1719,"def store_flos(self): if: if self.args.local_rank!= -1: total_flos = distributed_broadcast_scalars([self.total_flos]).sum().item() else: total_flos = self.total_flos if total_flos > 0: self.model.config.total_flos = total_flos",False,self.total_flos is not None,self.total_flos > 0,0.6504882574081421 1720,"def store_flos(self): if self.total_flos is not None: if: total_flos = distributed_broadcast_scalars([self.total_flos]).sum().item() else: total_flos = self.total_flos if total_flos > 0: self.model.config.total_flos = total_flos",False,self.args.local_rank != -1,self.model.config.distributed,0.6498078107833862 1721,"def store_flos(self): if self.total_flos is not None: if self.args.local_rank!= -1: total_flos = distributed_broadcast_scalars([self.total_flos]).sum().item() else: total_flos = self.total_flos if: self.model.config.total_flos = total_flos",False,total_flos > 0,self.args.local_rank == 0,0.6536277532577515 1722,"@cached_property def exports(self): """""" Return the information exported by this distribution. :return: A dictionary of exports, mapping an export category to a dict of :class:`ExportEntry` instances describing the individual export entries, and keyed by name. """""" result = {} r = self.get_distinfo_resource(EXPORTS_FILENAME) if: result = self.read_exports() return result",False,r,r.exists(),0.6787675619125366 1723,"@classmethod def search_rec_name(cls, name, clause): if: bool_op = 'AND' else: bool_op = 'OR' return [bool_op, ('number',) + tuple(clause[1:]), ('reference',) + tuple(clause[1:])]",False,clause[1].startswith('!') or clause[1].startswith('not '),clause[0] == 'AND',0.6444194912910461 1724,"def clearln(self): if: print('\r\x1b[K', end='', file=self.file)",False,self.file.isatty(),self.file.isatty() and self.config.colors,0.6442612409591675 1725,"def rm_welcome_setting(chat_id): try: rem = SESSION.query(Welcome).get(str(chat_id)) if: SESSION.delete(rem) SESSION.commit() return True except BaseException: return False",True,rem,rem,0.6703183650970459 1726,"def isimage(path): if: return True",False,'png' in path.lower() or 'jpg' in path.lower() or 'jpeg' in path.lower(),path and os.path.exists(path),0.6435443162918091 1727,"def filters(self): l = [] for i in range(self.ui.table_filters.rowCount() - 1): item = self.ui.table_filters.item(i, 0) filt = item.data(FilterObjectRole) ix = self.columns.tolist().index(filt.feature) if: try: filt.value = int(filt.value) except ValueError: pass l.append(filt) return l",False,self.dtypes[ix] == int,ix == 2,0.6466516256332397 1728,"@schedules.setter def schedules(self, value): if: self._root['schedules'] = [] return try: self._root['schedules'] = signatures.SCHEDULES.validate(value) except SchemaError as e: six.raise_from(DataFormatError(e), None)",True,value is None,value is None,0.6541365385055542 1729,"def done(self): if: return self._done = True self._reply_proxy.del_call_waiter(self._msg_id)",True,self._done,self._done,0.6615408658981323 1730,"@staticmethod def get_state(device: DmrDevice) -> PlayerState: """"""Return current PlayerState of the player."""""" if: return PlayerState.IDLE if device.transport_state in (TransportState.PLAYING, TransportState.TRANSITIONING): return PlayerState.PLAYING if device.transport_state in (TransportState.PAUSED_PLAYBACK, TransportState.PAUSED_RECORDING): return PlayerState.PAUSED if device.transport_state == TransportState.VENDOR_DEFINED: return PlayerState.IDLE return PlayerState.IDLE",False,device.transport_state is None,not device,0.649182915687561 1731,"@staticmethod def get_state(device: DmrDevice) -> PlayerState: """"""Return current PlayerState of the player."""""" if device.transport_state is None: return PlayerState.IDLE if: return PlayerState.PLAYING if device.transport_state in (TransportState.PAUSED_PLAYBACK, TransportState.PAUSED_RECORDING): return PlayerState.PAUSED if device.transport_state == TransportState.VENDOR_DEFINED: return PlayerState.IDLE return PlayerState.IDLE",False,"device.transport_state in (TransportState.PLAYING, TransportState.TRANSITIONING)",device.transport_state == TransportState.PLAYING,0.6480288505554199 1732,"@staticmethod def get_state(device: DmrDevice) -> PlayerState: """"""Return current PlayerState of the player."""""" if device.transport_state is None: return PlayerState.IDLE if device.transport_state in (TransportState.PLAYING, TransportState.TRANSITIONING): return PlayerState.PLAYING if: return PlayerState.PAUSED if device.transport_state == TransportState.VENDOR_DEFINED: return PlayerState.IDLE return PlayerState.IDLE",False,"device.transport_state in (TransportState.PAUSED_PLAYBACK, TransportState.PAUSED_RECORDING)",device.transport_state == TransportState.PAUSED,0.6486103534698486 1733,"@staticmethod def get_state(device: DmrDevice) -> PlayerState: """"""Return current PlayerState of the player."""""" if device.transport_state is None: return PlayerState.IDLE if device.transport_state in (TransportState.PLAYING, TransportState.TRANSITIONING): return PlayerState.PLAYING if device.transport_state in (TransportState.PAUSED_PLAYBACK, TransportState.PAUSED_RECORDING): return PlayerState.PAUSED if: return PlayerState.IDLE return PlayerState.IDLE",False,device.transport_state == TransportState.VENDOR_DEFINED,device.transport_state == PlayerState.NOT_RUNNING,0.6451587677001953 1734,"def flatten_content(survey_content, in_place=False, **opts): if: flatten_content_in_place(survey_content, **opts) return None else: survey_content_copy = deepcopy(survey_content) flatten_content_in_place(survey_content_copy, **opts) return survey_content_copy",True,in_place,in_place,0.6602417826652527 1735,"def _build_row(padded_cells, colwidths, colaligns, rowfmt): """"""Return a string which represents a row of data cells."""""" if: return None if hasattr(rowfmt, '__call__'): return rowfmt(padded_cells, colwidths, colaligns) else: return _build_simple_row(padded_cells, rowfmt)",False,not rowfmt,rowfmt is None,0.6609746813774109 1736,"def _build_row(padded_cells, colwidths, colaligns, rowfmt): """"""Return a string which represents a row of data cells."""""" if not rowfmt: return None if: return rowfmt(padded_cells, colwidths, colaligns) else: return _build_simple_row(padded_cells, rowfmt)",False,"hasattr(rowfmt, '__call__')","isinstance(rowfmt, builtin_str)",0.6428793668746948 1737,"def force_press_button_iframe(driver, f, select_by, select_query, force_submit=True): if: try: driver.switch_to.default_content() except Exception as exc: pass else: try: driver.switch_to.frame(f) except Exception as exc: pass is_clicked = force_press_button(driver, select_by, select_query, force_submit) if f: try: driver.switch_to.default_content() except Exception as exc: pass return is_clicked",False,not f,f is None,0.6589880585670471 1738,"def force_press_button_iframe(driver, f, select_by, select_query, force_submit=True): if not f: try: driver.switch_to.default_content() except Exception as exc: pass else: try: driver.switch_to.frame(f) except Exception as exc: pass is_clicked = force_press_button(driver, select_by, select_query, force_submit) if: try: driver.switch_to.default_content() except Exception as exc: pass return is_clicked",False,f,not is_clicked,0.6664504408836365 1739,"def get_tracker(self, tracking_config): tracker = tracking_config.tracker if: return RegionTracker(self.get_id(), tracking_config, self.crop_rectangle) else: raise Exception(f'Cant find for tracker {tracker}')",False,tracker == 'RegionTracker',tracker == 'region',0.6522599458694458 1740,"def _reject_connection_header(headers, hdr_validation_flags): """""" Raises a ProtocolError if the Connection header is present in a header block. """""" for header in headers: if: raise ProtocolError('Connection-specific header field present: %s.' % header[0]) yield header",False,header[0] in CONNECTION_HEADERS,header[0] not in hdr_validation_flags,0.6486382484436035 1741,"def read(self, *args): data = self._file.read(*args) if: self.process.wait() self._raise_if_error() return data",False,len(args) == 0 or args[0] <= 0,"len(args) == 0 or args[0] <= 0 or args[1] <= 0 or (args[2] <= 0) or (args[3] <= 0) or (args[4] in ['bin', 'close']))",0.6487314701080322 1742,"@staticmethod def _get_eopatch_time_series(eopatch: EOPatch, ref_date: dt.datetime | None=None, scale_time: int=1) -> np.ndarray: """"""Returns a numpy array with seconds passed between the reference date and the timestamp of each image. An array is constructed as time_series[i] = (timestamp[i] - ref_date).total_seconds(). If reference date is None the first date in the EOPatch's timestamp array is taken. If EOPatch `timestamps` attribute is empty the method returns None. :param eopatch: the EOPatch whose timestamps are used to construct the time series :param ref_date: reference date relative to which the time is measured :param scale_time: scale seconds by factor. If `60`, time will be in minutes, if `3600` hours """""" if: return np.zeros(0, dtype=np.int64) ref_date = ref_date or eopatch.timestamps[0] return np.asarray([round((timestamp - ref_date).total_seconds() / scale_time) for timestamp in eopatch.timestamps], dtype=np.int64)",False,not eopatch.timestamps,len(eopatch.timestamps) == 0,0.6590909957885742 1743,"def convert_file(row): source_path = row['chemin_source'].replace('\\', '/') if: source_path = '/'.join(source_path.split('/')[1:]).lower() if os.path.isfile(path_to_files + source_path): return path_to_files + source_path else: source_path = '/'.join(source_path.split('/')[3:]) if os.path.isfile(path_to_files + source_path): return path_to_files + source_path return False",False,'manuel' in source_path,source_path.startswith('s3'),0.6489115953445435 1744,"def convert_file(row): source_path = row['chemin_source'].replace('\\', '/') if'manuel' in source_path: source_path = '/'.join(source_path.split('/')[1:]).lower() if: return path_to_files + source_path else: source_path = '/'.join(source_path.split('/')[3:]) if os.path.isfile(path_to_files + source_path): return path_to_files + source_path return False",True,os.path.isfile(path_to_files + source_path),os.path.isfile(path_to_files + source_path),0.643067479133606 1745,"def convert_file(row): source_path = row['chemin_source'].replace('\\', '/') if'manuel' in source_path: source_path = '/'.join(source_path.split('/')[1:]).lower() if os.path.isfile(path_to_files + source_path): return path_to_files + source_path else: source_path = '/'.join(source_path.split('/')[3:]) if: return path_to_files + source_path return False",True,os.path.isfile(path_to_files + source_path),os.path.isfile(path_to_files + source_path),0.6431477665901184 1746,"def add_welcome_setting(chat_id, previous_welcome, reply, f_mesg_id): to_check = get_welcome(chat_id) if: adder = Welcome(chat_id, previous_welcome, reply, f_mesg_id) SESSION.add(adder) SESSION.commit() return True else: rem = SESSION.query(Welcome).get(str(chat_id)) SESSION.delete(rem) SESSION.commit() adder = Welcome(chat_id, previous_welcome, reply, f_mesg_id) SESSION.commit() return False",True,not to_check,not to_check,0.6521377563476562 1747,"def as_string(self, fmt): """"""Format the clipboard data as a string. @param fmt: the clipboard format. Note: we cannot simply override __str__ for this purpose, because the clipboard format is not a member of (or in a parent-child relationship with) the tagCLIPDATA structure, so we must pass it in as an argument. """""" if: encoding = 'utf16' else: encoding = 'utf8' return obj.Object('String', offset=self.abData.obj_offset, vm=self.obj_vm, encoding=encoding, length=self.cbData)",False,fmt == 'CF_UNICODETEXT',self.abData.encoding == 'utf16',0.6548631191253662 1748,"def select_setting(self, setting_name): if: return matches = self.settings_list.findItems(setting_name, Qt.MatchExactly) if matches: self.settings_list.setCurrentItem(matches[0])",False,setting_name is None,not setting_name,0.6502498388290405 1749,"def select_setting(self, setting_name): if setting_name is None: return matches = self.settings_list.findItems(setting_name, Qt.MatchExactly) if: self.settings_list.setCurrentItem(matches[0])",True,matches,matches,0.6594033241271973 1750,"def get_recipe_ingredients(recipe_name, expensive=False): """""" Returns a ``set`` of all item types that ``recipe_name`` requires. Discards quantities. First attempts to get the ``""ingredients""`` key from the recipe. If that fails, we then attempt to get the contents of the ``""normal""`` key from recipe (which is the list of ingredients under non-expensive map settings). .. NOTE:: Assumes that the items required for 'normal' mode are the same as 'expensive' mode. This is unlikely true under all circumstances, but how will we issue warnings for invalid item requests if we dont know what mode the world save is in? :param recipe_name: The name of the recipe to get the ingredients of. :param expensive: Whether or not to return the expensive recipe if available. If not, defaults to the normal recipe requirements. :returns: A ``set`` of names of each Factorio item that the recipe requires. :exception KeyError: If ``recipe_name`` is not a valid recipe. :example: .. code-block:: python print(recipes.get_recipe_ingredients(""electronic-circuit"")) # {'iron-plate', 'copper-cable'} """""" if: return {x[0] if isinstance(x, list) else x['name'] for x in raw[recipe_name]['ingredients']} else: cost_type = 'expensive' if expensive else 'normal' return {x[0] if isinstance(x, list) else x['name'] for x in raw[recipe_name][cost_type]['ingredients']}",False,'ingredients' in raw[recipe_name],"recipe_name in ['normal', 'emitted']",0.6441199779510498 1751,"def scan(self, terminal: str) -> Iterable['StringPosition']: if: return [StringPosition(self.string, self.i + len(terminal))] else: return []",False,"self.string.startswith(terminal, self.i)",terminal in self.string,0.6439003944396973 1752,"def get_activation(act_type=None): if: return nn.ReLU(inplace=True) elif act_type == 'lrelu': return nn.LeakyReLU(0.1, inplace=True) elif act_type =='mish': return nn.Mish(inplace=True) elif act_type =='silu': return nn.SiLU(inplace=True) elif act_type is None: return nn.Identity()",True,act_type == 'relu',act_type == 'relu',0.6507855653762817 1753,"def get_activation(act_type=None): if act_type =='relu': return nn.ReLU(inplace=True) elif: return nn.LeakyReLU(0.1, inplace=True) elif act_type =='mish': return nn.Mish(inplace=True) elif act_type =='silu': return nn.SiLU(inplace=True) elif act_type is None: return nn.Identity()",True,act_type == 'lrelu',act_type == 'lrelu',0.6490061283111572 1754,"def get_activation(act_type=None): if act_type =='relu': return nn.ReLU(inplace=True) elif act_type == 'lrelu': return nn.LeakyReLU(0.1, inplace=True) elif: return nn.Mish(inplace=True) elif act_type =='silu': return nn.SiLU(inplace=True) elif act_type is None: return nn.Identity()",True,act_type == 'mish',act_type == 'mish',0.6526709794998169 1755,"def get_activation(act_type=None): if act_type =='relu': return nn.ReLU(inplace=True) elif act_type == 'lrelu': return nn.LeakyReLU(0.1, inplace=True) elif act_type =='mish': return nn.Mish(inplace=True) elif: return nn.SiLU(inplace=True) elif act_type is None: return nn.Identity()",True,act_type == 'silu',act_type == 'silu',0.6524125337600708 1756,"def get_activation(act_type=None): if act_type =='relu': return nn.ReLU(inplace=True) elif act_type == 'lrelu': return nn.LeakyReLU(0.1, inplace=True) elif act_type =='mish': return nn.Mish(inplace=True) elif act_type =='silu': return nn.SiLU(inplace=True) elif: return nn.Identity()",True,act_type is None,act_type is None,0.649735689163208 1757,"def output_array_of_customerfinancialstatus(value_sets): if: return output_status_message('Array Of CustomerFinancialStatus:\n') for value_set in value_sets['CustomerFinancialStatus']: output_customerfinancialstatus(value_set)",True,value_sets is None or len(value_sets) == 0,value_sets is None or len(value_sets) == 0,0.6449264287948608 1758,"def remove_repository(self, repository_id: str, state: str, delete_repo: bool=False): """"""Removes the repository in state'state' and with id'repository_id' Args: repository_id (str): The repository id state (str): It must be either archived or imported otherwise it will return an error delete_repo (bool): Whether or not the backup repository should be deleted """""" if: _exit_if_errors(['--id is required']) if not state: _exit_if_errors(['--state is required']) if state not in ['archived', 'imported']: _exit_if_errors(['can only delete archived or imported repositories to delete an active repository it needs to be archived first']) if delete_repo and state == 'imported': _exit_if_errors(['cannot delete the repository for an imported repository']) _, errors = self.rest.delete_backup_repository(repository_id, state, delete_repo) _exit_if_errors(errors) _success('Repository was deleted')",True,not repository_id,not repository_id,0.6532858610153198 1759,"def remove_repository(self, repository_id: str, state: str, delete_repo: bool=False): """"""Removes the repository in state'state' and with id'repository_id' Args: repository_id (str): The repository id state (str): It must be either archived or imported otherwise it will return an error delete_repo (bool): Whether or not the backup repository should be deleted """""" if not repository_id: _exit_if_errors(['--id is required']) if: _exit_if_errors(['--state is required']) if state not in ['archived', 'imported']: _exit_if_errors(['can only delete archived or imported repositories to delete an active repository it needs to be archived first']) if delete_repo and state == 'imported': _exit_if_errors(['cannot delete the repository for an imported repository']) _, errors = self.rest.delete_backup_repository(repository_id, state, delete_repo) _exit_if_errors(errors) _success('Repository was deleted')",True,not state,not state,0.6596641540527344 1760,"def remove_repository(self, repository_id: str, state: str, delete_repo: bool=False): """"""Removes the repository in state'state' and with id'repository_id' Args: repository_id (str): The repository id state (str): It must be either archived or imported otherwise it will return an error delete_repo (bool): Whether or not the backup repository should be deleted """""" if not repository_id: _exit_if_errors(['--id is required']) if not state: _exit_if_errors(['--state is required']) if: _exit_if_errors(['can only delete archived or imported repositories to delete an active repository it needs to be archived first']) if delete_repo and state == 'imported': _exit_if_errors(['cannot delete the repository for an imported repository']) _, errors = self.rest.delete_backup_repository(repository_id, state, delete_repo) _exit_if_errors(errors) _success('Repository was deleted')",False,"state not in ['archived', 'imported']",not self.is_active_repository(repository_id),0.6427584290504456 1761,"def remove_repository(self, repository_id: str, state: str, delete_repo: bool=False): """"""Removes the repository in state'state' and with id'repository_id' Args: repository_id (str): The repository id state (str): It must be either archived or imported otherwise it will return an error delete_repo (bool): Whether or not the backup repository should be deleted """""" if not repository_id: _exit_if_errors(['--id is required']) if not state: _exit_if_errors(['--state is required']) if state not in ['archived', 'imported']: _exit_if_errors(['can only delete archived or imported repositories to delete an active repository it needs to be archived first']) if: _exit_if_errors(['cannot delete the repository for an imported repository']) _, errors = self.rest.delete_backup_repository(repository_id, state, delete_repo) _exit_if_errors(errors) _success('Repository was deleted')",False,delete_repo and state == 'imported',delete_repo,0.6470493078231812 1762,"@classmethod def setUpClass(cls): cls.get_super_method(cls,'setUpClass')() g.log.info('Starting %s ', cls.__name__) g.log.info('Upload io scripts to clients %s for running IO on mounts', cls.clients) cls.script_upload_path = '/usr/share/glustolibs/io/scripts/file_dir_ops.py' ret = upload_scripts(cls.clients, cls.script_upload_path) if: raise ExecutionError('Failed to upload IO scripts to clients %s' % cls.clients) g.log.info('Successfully uploaded IO scripts to clients %s', cls.clients)",True,not ret,not ret,0.6648889780044556 1763,"def register(self, fileobj, events, data=None): key = super(PollSelector, self).register(fileobj, events, data) event_mask = 0 if: event_mask |= select.POLLIN if events & EVENT_WRITE: event_mask |= select.POLLOUT self._poll.register(key.fd, event_mask) return key",True,events & EVENT_READ,events & EVENT_READ,0.6662373542785645 1764,"def register(self, fileobj, events, data=None): key = super(PollSelector, self).register(fileobj, events, data) event_mask = 0 if events & EVENT_READ: event_mask |= select.POLLIN if: event_mask |= select.POLLOUT self._poll.register(key.fd, event_mask) return key",True,events & EVENT_WRITE,events & EVENT_WRITE,0.670316219329834 1765,"def mean(data): """"""Return the sample arithmetic mean of data."""""" n = len(data) if: raise ValueError('mean requires at least one data point') return sum(data) / n",True,n < 1,n < 1,0.6666034460067749 1766,"def _flatten_newstyle_with_slots(self, obj, data): """"""Return a json-friendly dict for new-style objects with __slots__. """""" allslots = [_wrap_string_slot(getattr(cls, '__slots__', tuple())) for cls in obj.__class__.mro()] if: attrs = [x for x in dir(obj) if not x.startswith('__') and (not x.endswith('__'))] self._flatten_obj_attrs(obj, attrs, data) return data",False,"not self._flatten_obj_attrs(obj, chain(*allslots), data)",allslots,0.6433286666870117 1767,"def reset(self, stdv=None): if: stdv = stdv * math.sqrt(3) else: stdv = 1.0 / math.sqrt(self.weight.size(0)) self.weight.uniform_(-stdv, stdv)",True,stdv is not None,stdv is not None,0.6553895473480225 1768,"def init_weights(self): """"""Initialize the weights of module."""""" super(NASFCOS_FPN, self).init_weights() for module in self.fpn.values(): if: caffe2_xavier_init(module.out_conv.conv) for modules in [self.adapt_convs.modules(), self.extra_downsamples.modules()]: for module in modules: if isinstance(module, nn.Conv2d): caffe2_xavier_init(module)",True,"hasattr(module, 'conv_out')","hasattr(module, 'conv_out')",0.6438648104667664 1769,"def init_weights(self): """"""Initialize the weights of module."""""" super(NASFCOS_FPN, self).init_weights() for module in self.fpn.values(): if hasattr(module, 'conv_out'): caffe2_xavier_init(module.out_conv.conv) for modules in [self.adapt_convs.modules(), self.extra_downsamples.modules()]: for module in modules: if: caffe2_xavier_init(module)",True,"isinstance(module, nn.Conv2d)","isinstance(module, nn.Conv2d)",0.6461859345436096 1770,"def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct)) if: raise errors.ConfigError('bounds gt and ge cannot be specified at the same time') if new_cls.lt is not None and new_cls.le is not None: raise errors.ConfigError('bounds lt and le cannot be specified at the same time') return _registered(new_cls)",True,new_cls.gt is not None and new_cls.ge is not None,new_cls.gt is not None and new_cls.ge is not None,0.6504590511322021 1771,"def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct)) if new_cls.gt is not None and new_cls.ge is not None: raise errors.ConfigError('bounds gt and ge cannot be specified at the same time') if: raise errors.ConfigError('bounds lt and le cannot be specified at the same time') return _registered(new_cls)",True,new_cls.lt is not None and new_cls.le is not None,new_cls.lt is not None and new_cls.le is not None,0.6506914496421814 1772,"def _lookup(self, key): """"""Use TOC to return subpath for given key, or raise a KeyError."""""" try: if: return self._toc[key] except KeyError: pass self._refresh() try: return self._toc[key] except KeyError: raise KeyError('No message with key: %s' % key)",False,"os.path.exists(os.path.join(self._path, self._toc[key]))",key in self._toc,0.6466686725616455 1773,"def _prepare_conn(self, conn): self.sock = conn if: self._tunnel() self.auto_open = 0",False,self._tunnel_host,"hasattr(self, '_tunnel')",0.6524168252944946 1774,"def to_name_suffix(self, variant): suffix = [] for k, vs, cfg in self._variants: if: suffix.append(k + '_' + str(variant[k])) return '_'.join(suffix)",False,"not cfg.get('hide', False)","cfg.get('suffix', '')",0.6456670761108398 1775,"def exportLiteralAttributes(self, outfile, level, name_): if: showIndent(outfile, level) outfile.write('id = %s,\n' % (self.id,))",True,self.id is not None,self.id is not None,0.6504769325256348 1776,"@no_float_overloading def expr_dependencies(expr): deps = [] for dep in expr.free_symbols: if: deps.append(dep) elif is_var(dep): raise ValueError('Invalid dependency') return sorted(deps, key=lambda dep: var_id(dep))",False,"isinstance(dep, SymbolicFloat)",is_float(dep),0.6468938589096069 1777,"@no_float_overloading def expr_dependencies(expr): deps = [] for dep in expr.free_symbols: if isinstance(dep, SymbolicFloat): deps.append(dep) elif: raise ValueError('Invalid dependency') return sorted(deps, key=lambda dep: var_id(dep))",False,is_var(dep),dep not in dep,0.6493273973464966 1778,"def baked_waveform(waveform, pulse_duration): pulse_segments = [] for i in range(0, pulse_duration + 1): with baking(config, padding_method='right') as b: if: wf = [0.0] * 16 else: wf = waveform[:i].tolist() b.add_op('flux_pulse', 'flux_line', wf) b.play('flux_pulse', 'flux_line') pulse_segments.append(b) return pulse_segments",True,i == 0,i == 0,0.6680722236633301 1779,"def step(self, actions): rew = self._get_reward(actions) self._choose_next_state() done = False if: rew = 0 done = True return (0, rew, done, {})",True,self.episode_len and self.time >= self.episode_len,self.episode_len and self.time >= self.episode_len,0.6437596082687378 1780,"def write_eof(self): if: return assert self._pipe self._closing = True if not self._buffer: self._loop._remove_reader(self._fileno) self._loop.call_soon(self._call_connection_lost, None)",True,self._closing,self._closing,0.6630240082740784 1781,"def write_eof(self): if self._closing: return assert self._pipe self._closing = True if: self._loop._remove_reader(self._fileno) self._loop.call_soon(self._call_connection_lost, None)",True,not self._buffer,not self._buffer,0.657233715057373 1782,"def mwl(self, event): """"""Mouse Wheel - under tkinter we seem to need Tk v8.5+ for this """""" if: self.top.f.canvas.yview_scroll(-1 * self._tmwm, 'units') elif event.num == 5: self.top.f.canvas.yview_scroll(1 * self._tmwm, 'units') else: self.top.f.canvas.yview_scroll(-event.delta * self._tmwm, 'units')",False,event.num == 4,event.num == 1,0.658865213394165 1783,"def mwl(self, event): """"""Mouse Wheel - under tkinter we seem to need Tk v8.5+ for this """""" if event.num == 4: self.top.f.canvas.yview_scroll(-1 * self._tmwm, 'units') elif: self.top.f.canvas.yview_scroll(1 * self._tmwm, 'units') else: self.top.f.canvas.yview_scroll(-event.delta * self._tmwm, 'units')",False,event.num == 5,event.num == 8,0.6566430330276489 1784,"def cup_to_center(self): self.level_copy = self.level self.level = 1 if: self.move = 1 self.move_1() self.execute_move() self.update_ball_cup() elif self.choice == 3: self.move = 3 self.move_3() self.execute_move() self.update_ball_cup() self.reset_motor_positions() self.level = self.level_copy",False,self.choice == 1,self.choice == 2,0.6579434275627136 1785,"def cup_to_center(self): self.level_copy = self.level self.level = 1 if self.choice == 1: self.move = 1 self.move_1() self.execute_move() self.update_ball_cup() elif: self.move = 3 self.move_3() self.execute_move() self.update_ball_cup() self.reset_motor_positions() self.level = self.level_copy",False,self.choice == 3,self.choice == 2,0.6590838432312012 1786,"def find_low_inside_air(self, start: Point2, target: Point2, distance: float) -> Point2: result = self.map.find_low_inside_walk(MapType.Air, start, target, distance) result = result[0] end_point = Point2((result[0], result[1])) result_distance = target.distance_to_point2(end_point) if: vector = end_point - target normal_vector = point_normalize(vector) end_point = normal_vector * distance + target return end_point",False,result_distance > distance,result_distance > 0.5,0.6516368389129639 1787,"def upgrade_config(cfg: CN, to_version: Optional[int]=None) -> CN: """""" Upgrade a config from its current version to a newer version. Args: cfg (CfgNode): to_version (int): defaults to the latest version. """""" cfg = cfg.clone() if: to_version = _C.VERSION assert cfg.VERSION <= to_version, 'Cannot upgrade from v{} to v{}!'.format(cfg.VERSION, to_version) for k in range(cfg.VERSION, to_version): converter = globals()['ConverterV' + str(k + 1)] converter.upgrade(cfg) cfg.VERSION = k + 1 return cfg",True,to_version is None,to_version is None,0.6545443534851074 1788,"def _ustr(obj): """"""Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It then < returns the unicode object | encodes it with the default encoding |... >. """""" if: return obj try: return str(obj) except UnicodeEncodeError: ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') xmlcharref = Regex('&#\\d+;') xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) return xmlcharref.transformString(ret)",True,"isinstance(obj, unicode)","isinstance(obj, unicode)",0.6452963948249817 1789,"def addWord(self, word): if: self.word2index[word] = self.n_words self.word2count[word] = 1 self.index2word[self.n_words] = word self.n_words += 1 else: self.word2count[word] += 1",True,word not in self.word2index,word not in self.word2index,0.6499207019805908 1790,"def get_repository(self, repository_id, state, json_out=False): """"""Retrieves one repository from the backup service If the repository does not exist an error will be returned Args: repository_id (str): The repository id to be retrieved state (str): The state of the repository to retrieve json_out (bool): If True the output will be JSON otherwise it will be a human friendly format. """""" if: _exit_if_errors(['--id is required']) if not state: _exit_if_errors(['--state is required']) repository, errors = self.rest.get_backup_service_repository(repository_id, state) _exit_if_errors(errors) if json_out: print(json.dumps(repository, indent=2)) else: self.human_firendly_print_repository(repository)",True,not repository_id,not repository_id,0.651577353477478 1791,"def get_repository(self, repository_id, state, json_out=False): """"""Retrieves one repository from the backup service If the repository does not exist an error will be returned Args: repository_id (str): The repository id to be retrieved state (str): The state of the repository to retrieve json_out (bool): If True the output will be JSON otherwise it will be a human friendly format. """""" if not repository_id: _exit_if_errors(['--id is required']) if: _exit_if_errors(['--state is required']) repository, errors = self.rest.get_backup_service_repository(repository_id, state) _exit_if_errors(errors) if json_out: print(json.dumps(repository, indent=2)) else: self.human_firendly_print_repository(repository)",True,not state,not state,0.6577303409576416 1792,"def get_repository(self, repository_id, state, json_out=False): """"""Retrieves one repository from the backup service If the repository does not exist an error will be returned Args: repository_id (str): The repository id to be retrieved state (str): The state of the repository to retrieve json_out (bool): If True the output will be JSON otherwise it will be a human friendly format. """""" if not repository_id: _exit_if_errors(['--id is required']) if not state: _exit_if_errors(['--state is required']) repository, errors = self.rest.get_backup_service_repository(repository_id, state) _exit_if_errors(errors) if: print(json.dumps(repository, indent=2)) else: self.human_firendly_print_repository(repository)",True,json_out,json_out,0.653247594833374 1793,"def uname2uid(uname): """""" Given uname, return uid or None if cannot find """""" if: return _uname2uid[uname] else: try: uid = pwd.getpwnam(uname).pw_uid _uid2uname[uid] = uname except (KeyError, NameError): uid = None _uname2uid[uname] = uid return uid",True,uname in _uname2uid,uname in _uname2uid,0.654062032699585 1794,"def _click_button(self, obj, obj_name): if: logger.info(f'Pressing on {obj_name} button.') obj.click() return True logger.info(f""Object {obj_name} doesn't exists. Can't press on it!"") return False",False,obj.exists(Timeout.SHORT),obj.has_button(0),0.6469231247901917 1795,"def __init__(self, d_ffn, input_shape=None, input_size=None, dropout=0.0, activation=nn.ReLU): super().__init__() if: raise ValueError('Expected one of input_shape or input_size') if input_size is None: input_size = input_shape[-1] self.ffn = nn.Sequential(nn.Linear(input_size, d_ffn), activation(), nn.Dropout(dropout), nn.Linear(d_ffn, input_size))",True,input_shape is None and input_size is None,input_shape is None and input_size is None,0.6482417583465576 1796,"def __init__(self, d_ffn, input_shape=None, input_size=None, dropout=0.0, activation=nn.ReLU): super().__init__() if input_shape is None and input_size is None: raise ValueError('Expected one of input_shape or input_size') if: input_size = input_shape[-1] self.ffn = nn.Sequential(nn.Linear(input_size, d_ffn), activation(), nn.Dropout(dropout), nn.Linear(d_ffn, input_size))",True,input_size is None,input_size is None,0.6525101661682129 1797,"def get_padding_mask(tokens): padding_mask = tokens.eq(self.encoder.sentence_encoder.padding_idx) if: padding_mask = None return padding_mask",False,not padding_mask.any(),padding_mask == 0,0.6476333141326904 1798,"def _add_writer(self, name, writer): if: raise ValueError('Expected writer {} to have a None binding in writers.'.format(name)) if self._writers[name] is not None: raise ValueError('Expected writer {} to be bound to None in writers but saw {} instead'.format(name, self._writers[name])) self._writers[name] = writer",False,name not in self._writers,writer is None,0.6551755666732788 1799,"def _add_writer(self, name, writer): if name not in self._writers: raise ValueError('Expected writer {} to have a None binding in writers.'.format(name)) if: raise ValueError('Expected writer {} to be bound to None in writers but saw {} instead'.format(name, self._writers[name])) self._writers[name] = writer",False,self._writers[name] is not None,writer is None,0.6490299701690674 1800,"def parse_lst(self, filename=None): if: filename = self._make_filepath('wwnames.txt') self._parse_base(filename, self._parse_lst)",False,not filename,filename is None,0.6641242504119873 1801,"def _process_relative_position(self, spec, events): if: return False parts = spec[1:-1].split(',') if len(parts)!= 2: return False horizontal = int(parts[0]) vertical = int(parts[1]) event = MoveRelativeEvent(horizontal, vertical) events.append(event) return True",False,not spec.startswith('<') or not spec.endswith('>'),not spec.startswith('/'),0.6429886817932129 1802,"def _process_relative_position(self, spec, events): if not spec.startswith('<') or not spec.endswith('>'): return False parts = spec[1:-1].split(',') if: return False horizontal = int(parts[0]) vertical = int(parts[1]) event = MoveRelativeEvent(horizontal, vertical) events.append(event) return True",False,len(parts) != 2,len(parts) < 2,0.6475659012794495 1803,"def getLocaltime(self, offset, stdoffset): new_dt = self.dt.duplicate() if: new_dt.offsetSeconds(offset) elif self.mode =='s': new_dt.offsetSeconds(-stdoffset + offset) return new_dt",True,self.mode == 'u',self.mode == 'u',0.6528321504592896 1804,"def getLocaltime(self, offset, stdoffset): new_dt = self.dt.duplicate() if self.mode == 'u': new_dt.offsetSeconds(offset) elif: new_dt.offsetSeconds(-stdoffset + offset) return new_dt",True,self.mode == 's',self.mode == 's',0.6531457304954529 1805,"def clear_async_jobs(self, threshold: Optional[int]=None) -> Result[bool]: """"""Clear async job results from the server. Async jobs that are still queued or running are not stopped. :param threshold: If specified, only the job results created prior to the threshold (a unix timestamp) are deleted. Otherwise, all job results are deleted. :type threshold: int | None :return: True if job results were cleared successfully. :rtype: bool :raise arango.exceptions.AsyncJobClearError: If operation fails. """""" if: request = Request(method='delete', endpoint='/_api/job/all') else: request = Request(method='delete', endpoint='/_api/job/expired', params={'stamp': threshold}) def response_handler(resp: Response) -> bool: if resp.is_success: return True raise AsyncJobClearError(resp, request) return self._execute(request, response_handler)",True,threshold is None,threshold is None,0.6580204963684082 1806,"def clear_async_jobs(self, threshold: Optional[int]=None) -> Result[bool]: """"""Clear async job results from the server. Async jobs that are still queued or running are not stopped. :param threshold: If specified, only the job results created prior to the threshold (a unix timestamp) are deleted. Otherwise, all job results are deleted. :type threshold: int | None :return: True if job results were cleared successfully. :rtype: bool :raise arango.exceptions.AsyncJobClearError: If operation fails. """""" if threshold is None: request = Request(method='delete', endpoint='/_api/job/all') else: request = Request(method='delete', endpoint='/_api/job/expired', params={'stamp': threshold}) def response_handler(resp: Response) -> bool: if: return True raise AsyncJobClearError(resp, request) return self._execute(request, response_handler)",True,resp.is_success,resp.is_success,0.6482662558555603 1807,"def _search_for_change_contents(self, change_list, file): for change_ in reversed(change_list): if: result = self._search_for_change_contents(change_.changes, file) if result is not None: return result if isinstance(change_, change.ChangeContents) and change_.resource == file: return change_.old_contents",False,"isinstance(change_, change.ChangeSet)","isinstance(change_.changes, change.ChangeSet)",0.6481438875198364 1808,"def _search_for_change_contents(self, change_list, file): for change_ in reversed(change_list): if isinstance(change_, change.ChangeSet): result = self._search_for_change_contents(change_.changes, file) if result is not None: return result if: return change_.old_contents",False,"isinstance(change_, change.ChangeContents) and change_.resource == file",change_.old_contents is not None,0.6436895132064819 1809,"def _search_for_change_contents(self, change_list, file): for change_ in reversed(change_list): if isinstance(change_, change.ChangeSet): result = self._search_for_change_contents(change_.changes, file) if: return result if isinstance(change_, change.ChangeContents) and change_.resource == file: return change_.old_contents",False,result is not None,result,0.6529035568237305 1810,"def get_issue_generator(self, user_id, project_id, project_name): """""" Approach: 1. Get user ID from bugwarriorrc file 2. Get list of tickets from /user-tasks for a given project 3. For each ticket/task returned from #2, get ticket/task info and check if logged-in user is primary (look at `is_owner` and `user_id`) """""" user_tasks_data = self.call_api('/projects/' + str(project_id) + '/user-tasks') for key, task in enumerate(user_tasks_data): assigned_task = self.get_task_dict(project_id, key, task) if: log.debug("" Adding '"" + assigned_task['description'] + ""' to task list."") yield assigned_task",False,assigned_task,self.log_debug,0.6673259139060974 1811,"def unsubscript_topic(self, sub, ignore_missing=True): """"""Delete subscription :param sub: A string of subscription_urn or object of an instance of :class:`~openstack.smn.subscription.TopicSubscription or :class:`~openstack.smn.subscription.Subscription :param bool ignore_missing: When set to ``False`` :class:`~openstack.exceptions.ResourceNotFound` will be raised when the TopicSubscription does not exist :returns: None """""" sub_obj = sub if: sub_obj = sub.subscription_urn return self._delete(_subscription.Subscription, sub_obj, ignore_missing=ignore_missing)",False,"isinstance(sub, _subscription.TopicSubscription)","hasattr(sub, 'subscription_urn')",0.6458318829536438 1812,"def _packet_handler(self, param, header, pkt_pointer): pkt_data = string_at(pkt_pointer, header.contents.len) p = dpkt.ethernet.Ethernet(pkt_data) if: self.filterData(p.data)",False,p.data.__class__.__name__ == 'IP' and p.data.data.__class__.__name__ == 'TCP',p.data is not None,0.6459816098213196 1813,"def as_scanner(mcscf_grad): """"""Generating a nuclear gradients scanner/solver (for geometry optimizer). The returned solver is a function. This function requires one argument ""mol"" as input and returns energy and first order nuclear derivatives. The solver will automatically use the results of last calculation as the initial guess of the new calculation. All parameters assigned in the nuc-grad object and SCF object (DIIS, conv_tol, max_memory etc) are automatically applied in the solver. Note scanner has side effects. It may change many underlying objects (_scf, with_df, with_x2c,...) during calculation. Examples: >>> from pyscf import gto, scf, mcscf >>> mol = gto.M(atom='N 0 0 0; N 0 0 1.1', verbose=0) >>> mc_grad_scanner = mcscf.CASSCF(scf.RHF(mol), 4, 4).nuc_grad_method().as_scanner() >>> etot, grad = mc_grad_scanner(gto.M(atom='N 0 0 0; N 0 0 1.1')) >>> etot, grad = mc_grad_scanner(gto.M(atom='N 0 0 0; N 0 0 1.5')) """""" if: return mcscf_grad logger.info(mcscf_grad, 'Create scanner for %s', mcscf_grad.__class__) name = mcscf_grad.__class__.__name__ + CASSCF_GradScanner.__name_mixin__ return lib.set_class(CASSCF_GradScanner(mcscf_grad), (CASSCF_GradScanner, mcscf_grad.__class__), name)",False,"isinstance(mcscf_grad, lib.GradScanner)",mcscf_grad is None,0.6441487073898315 1814,"def do_PATCH(self): parsed = urlparse(self.path) self.server.rest_server.trace.append(f'PATCH:{parsed.path}') if: return for endpoint, fns in endpoints: if re.search(endpoint, parsed.path) is not None and 'PATCH' in fns: return self.handle_fn(fns['PATCH'], parsed.path) self.not_found()",False,not self.authenticated(),not parsed.path,0.6500159502029419 1815,"def do_PATCH(self): parsed = urlparse(self.path) self.server.rest_server.trace.append(f'PATCH:{parsed.path}') if not self.authenticated(): return for endpoint, fns in endpoints: if: return self.handle_fn(fns['PATCH'], parsed.path) self.not_found()",True,"re.search(endpoint, parsed.path) is not None and 'PATCH' in fns","re.search(endpoint, parsed.path) is not None and 'PATCH' in fns",0.6443148851394653 1816,"def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict): generator = None if: partition_size = info_dict['{}_size'.format(partition_name)] fec_supported = info_dict.get('verity_fec') == 'true' generator = VerifiedBootVersion1HashtreeInfoGenerator(partition_size, block_size, fec_supported) return generator",False,info_dict.get('verity') == 'true' and info_dict.get('{}_verity_block_device'.format(partition_name)),info_dict.get('verity_fec'),0.654235303401947 1817,"def getHwmonPath(self): if: self.hwmonPath = utils.locateHwmonFolder(self.getSysfsPath()) return self.hwmonPath",False,self.hwmonPath is None,not self.hwmonPath,0.6548479795455933 1818,"def limited_labelset_from_iterable(self, iterable, sequence_input=False, n_most_common=None, min_count=1): """"""Produce label mapping from iterable based on label counts Used to limit label set size. Arguments --------- iterable : iterable Input sequence on which to operate. sequence_input : bool Whether iterable yields sequences of labels or individual labels directly. False by default. n_most_common : int, None Take at most this many labels as the label set, keeping the most common ones. If None (as by default), take all. min_count : int Don't take labels if they appear less than this many times. Returns ------- collections.Counter The counts of the different labels (unfiltered). """""" if: clsname = self.__class__.__name__ logger.info(f""Limited_labelset_from_iterable called, but {clsname} is not empty. The new labels will be added, i.e. won't overwrite. This is normal if there is e.g. an unk label already."") if sequence_input: label_iterator = itertools.chain.from_iterable(iterable) else: label_iterator = iter(iterable) counts = collections.Counter(label_iterator) for label, count in counts.most_common(n_most_common): if count < min_count: break self.add_label(label) return counts",False,self.lab2ind,not iterable,0.6482251882553101 1819,"def limited_labelset_from_iterable(self, iterable, sequence_input=False, n_most_common=None, min_count=1): """"""Produce label mapping from iterable based on label counts Used to limit label set size. Arguments --------- iterable : iterable Input sequence on which to operate. sequence_input : bool Whether iterable yields sequences of labels or individual labels directly. False by default. n_most_common : int, None Take at most this many labels as the label set, keeping the most common ones. If None (as by default), take all. min_count : int Don't take labels if they appear less than this many times. Returns ------- collections.Counter The counts of the different labels (unfiltered). """""" if self.lab2ind: clsname = self.__class__.__name__ logger.info(f""Limited_labelset_from_iterable called, but {clsname} is not empty. The new labels will be added, i.e. won't overwrite. This is normal if there is e.g. an unk label already."") if: label_iterator = itertools.chain.from_iterable(iterable) else: label_iterator = iter(iterable) counts = collections.Counter(label_iterator) for label, count in counts.most_common(n_most_common): if count < min_count: break self.add_label(label) return counts",True,sequence_input,sequence_input,0.6539607048034668 1820,"def limited_labelset_from_iterable(self, iterable, sequence_input=False, n_most_common=None, min_count=1): """"""Produce label mapping from iterable based on label counts Used to limit label set size. Arguments --------- iterable : iterable Input sequence on which to operate. sequence_input : bool Whether iterable yields sequences of labels or individual labels directly. False by default. n_most_common : int, None Take at most this many labels as the label set, keeping the most common ones. If None (as by default), take all. min_count : int Don't take labels if they appear less than this many times. Returns ------- collections.Counter The counts of the different labels (unfiltered). """""" if self.lab2ind: clsname = self.__class__.__name__ logger.info(f""Limited_labelset_from_iterable called, but {clsname} is not empty. The new labels will be added, i.e. won't overwrite. This is normal if there is e.g. an unk label already."") if sequence_input: label_iterator = itertools.chain.from_iterable(iterable) else: label_iterator = iter(iterable) counts = collections.Counter(label_iterator) for label, count in counts.most_common(n_most_common): if: break self.add_label(label) return counts",False,count < min_count,count >= min_count,0.6478661298751831 1821,"def options(self, context, module_options): """""" FILTER Apply the FILTER (grep-like) (default: '') PASSWORDPOLICY Is the windows password policy enabled? (default: False) MINLENGTH Minimum password length to match, only used if PASSWORDPOLICY is True (default: 6) """""" self.FILTER = '' self.MINLENGTH = '6' self.PASSWORDPOLICY = False if: self.FILTER = module_options['FILTER'] if 'MINLENGTH' in module_options: self.MINLENGTH = module_options['MINLENGTH'] if 'PASSWORDPOLICY' in module_options: self.PASSWORDPOLICY = True self.regex = re.compile('((?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*\\d)|(?=[^ ]*[a-z])(?=[^ ]*\\d)(?=[^ ]*[^\\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*\\d)(?=[^ ]*[^\\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*[^\\w \n]))[^ \n]{' + self.MINLENGTH + ',}')",True,'FILTER' in module_options,'FILTER' in module_options,0.6558572053909302 1822,"def options(self, context, module_options): """""" FILTER Apply the FILTER (grep-like) (default: '') PASSWORDPOLICY Is the windows password policy enabled? (default: False) MINLENGTH Minimum password length to match, only used if PASSWORDPOLICY is True (default: 6) """""" self.FILTER = '' self.MINLENGTH = '6' self.PASSWORDPOLICY = False if 'FILTER' in module_options: self.FILTER = module_options['FILTER'] if: self.MINLENGTH = module_options['MINLENGTH'] if 'PASSWORDPOLICY' in module_options: self.PASSWORDPOLICY = True self.regex = re.compile('((?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*\\d)|(?=[^ ]*[a-z])(?=[^ ]*\\d)(?=[^ ]*[^\\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*\\d)(?=[^ ]*[^\\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*[^\\w \n]))[^ \n]{' + self.MINLENGTH + ',}')",True,'MINLENGTH' in module_options,'MINLENGTH' in module_options,0.6541621685028076 1823,"def options(self, context, module_options): """""" FILTER Apply the FILTER (grep-like) (default: '') PASSWORDPOLICY Is the windows password policy enabled? (default: False) MINLENGTH Minimum password length to match, only used if PASSWORDPOLICY is True (default: 6) """""" self.FILTER = '' self.MINLENGTH = '6' self.PASSWORDPOLICY = False if 'FILTER' in module_options: self.FILTER = module_options['FILTER'] if 'MINLENGTH' in module_options: self.MINLENGTH = module_options['MINLENGTH'] if: self.PASSWORDPOLICY = True self.regex = re.compile('((?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*\\d)|(?=[^ ]*[a-z])(?=[^ ]*\\d)(?=[^ ]*[^\\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*\\d)(?=[^ ]*[^\\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*[^\\w \n]))[^ \n]{' + self.MINLENGTH + ',}')",False,'PASSWORDPOLICY' in module_options,self.FILTER or self.PASSWORDPOLICY,0.6518275737762451 1824,"def render_markdown(self, text, context=github.GithubObject.NotSet): """""" :calls: `POST /markdown `_ :param text: string :param context: :class:`github.Repository.Repository` :rtype: string """""" assert isinstance(text, (str, six.text_type)), text assert context is github.GithubObject.NotSet or isinstance(context, github.Repository.Repository), context post_parameters = {'text': text} if: post_parameters['mode'] = 'gfm' post_parameters['context'] = context._identity status, headers, data = self.__requester.requestJson('POST', '/markdown', input=post_parameters) return data",False,context is not github.GithubObject.NotSet,context is not None,0.6484505534172058 1825,"def delete(self, *args, **kwargs): if: return try: return super(SafeFileCache, self).delete(*args, **kwargs) except (LockError, OSError, IOError): pass",True,self.directory is None,self.directory is None,0.6494120359420776 1826,"def _generate_qid(self): if: self._qid = randint(0, maxsize) return self._qid",True,self._qid == 0,self._qid == 0,0.6601612567901611 1827,"def log_error(self, msg: str, level: LogLevel=LogLevel.ERROR) -> None: if: stderr = self.stderr else: stderr = self._orig_stderr rich_console = self._make_rich_console(file=stderr, force_terminal=stderr.isatty()) rich_console.print(f'\n{self.program_name}: {level.value}: {msg}\n\n', style=LOG_LEVEL_COLORS[level], markup=False, highlight=False, soft_wrap=True)",False,self.stdout_isatty and self.quiet >= LOG_LEVEL_DISPLAY_THRESHOLDS[level],self.stderr,0.6472964286804199 1828,"def offset_for_traceback(code_edit, text=None): """""" Offset text using newlines to get proper line ref in tracebacks. """""" textCursor = code_edit.textCursor() if: text = textCursor.selection().toPlainText() selection_offset = textCursor.selectionStart() doc = code_edit.document() block_num = doc.findBlock(selection_offset).blockNumber() text = '\n' * block_num + text return text",True,text is None,text is None,0.6583353877067566 1829,"def createFields(self): yield String(self, 'type', 4, 'Signature (FORM)', charset='ASCII') yield filesizeHandler(UInt32(self,'size')) size = self['size'].value if: if self._parser: for field in self._parser(self): yield field if size % 2: yield NullBytes(self, 'padding', 1) else: yield RawBytes(self, 'data', size)",True,size,size,0.6651471853256226 1830,"def createFields(self): yield String(self, 'type', 4, 'Signature (FORM)', charset='ASCII') yield filesizeHandler(UInt32(self,'size')) size = self['size'].value if size: if: for field in self._parser(self): yield field if size % 2: yield NullBytes(self, 'padding', 1) else: yield RawBytes(self, 'data', size)",False,self._parser,0 < size,0.650661826133728 1831,"def createFields(self): yield String(self, 'type', 4, 'Signature (FORM)', charset='ASCII') yield filesizeHandler(UInt32(self,'size')) size = self['size'].value if size: if self._parser: for field in self._parser(self): yield field if: yield NullBytes(self, 'padding', 1) else: yield RawBytes(self, 'data', size)",False,size % 2,field.value == 0,0.6614453196525574 1832,"def parse_texture(self, data): matNum = len(self.matList) mat = NoeMaterial('material%d' % matNum, '') texture = rapi.loadTexByHandler(data, '.dds') if: texture.name = 'texture%d' % len(self.texList) self.texList.append(texture) mat.setTexture(texture.name) self.matList.append(mat)",True,texture,texture,0.6717849373817444 1833,"def _need_convert_kernel(original_backend): """"""Checks if conversion on kernel matrices is required during weight loading. The convolution operation is implemented differently in different backends. While TH implements convolution, TF and CNTK implement the correlation operation. So the channel axis needs to be flipped when TF weights are loaded on a TH model, or vice versa. However, there's no conversion required between TF and CNTK. # Arguments original_backend: Keras backend the weights were trained with, as a string. # Returns `True` if conversion on kernel matrices is required, otherwise `False`. """""" if: return False uses_correlation = {'tensorflow': True, 'theano': False, 'cntk': True} if original_backend not in uses_correlation: return False if K.backend() in uses_correlation: current_uses_correlation = uses_correlation[K.backend()] else: current_uses_correlation = True return uses_correlation[original_backend]!= current_uses_correlation",False,original_backend is None,not original_backend,0.6554367542266846 1834,"def _need_convert_kernel(original_backend): """"""Checks if conversion on kernel matrices is required during weight loading. The convolution operation is implemented differently in different backends. While TH implements convolution, TF and CNTK implement the correlation operation. So the channel axis needs to be flipped when TF weights are loaded on a TH model, or vice versa. However, there's no conversion required between TF and CNTK. # Arguments original_backend: Keras backend the weights were trained with, as a string. # Returns `True` if conversion on kernel matrices is required, otherwise `False`. """""" if original_backend is None: return False uses_correlation = {'tensorflow': True, 'theano': False, 'cntk': True} if: return False if K.backend() in uses_correlation: current_uses_correlation = uses_correlation[K.backend()] else: current_uses_correlation = True return uses_correlation[original_backend]!= current_uses_correlation",False,original_backend not in uses_correlation,not K.backend() in uses_correlation,0.64923095703125 1835,"def _need_convert_kernel(original_backend): """"""Checks if conversion on kernel matrices is required during weight loading. The convolution operation is implemented differently in different backends. While TH implements convolution, TF and CNTK implement the correlation operation. So the channel axis needs to be flipped when TF weights are loaded on a TH model, or vice versa. However, there's no conversion required between TF and CNTK. # Arguments original_backend: Keras backend the weights were trained with, as a string. # Returns `True` if conversion on kernel matrices is required, otherwise `False`. """""" if original_backend is None: return False uses_correlation = {'tensorflow': True, 'theano': False, 'cntk': True} if original_backend not in uses_correlation: return False if: current_uses_correlation = uses_correlation[K.backend()] else: current_uses_correlation = True return uses_correlation[original_backend]!= current_uses_correlation",False,K.backend() in uses_correlation,"isinstance(original_backend, KerasBackend)",0.6490319967269897 1836,"def update_list(self, lst): grammar = self._jsgf_grammar name = self._get_reference_name(lst.name) old_rule = grammar.get_rule_from_name(name) new_rule, unknown_words = self.engine.compiler.recompile_list(lst, grammar) if: grammar.remove_rule(old_rule, ignore_dependent=True) grammar.add_rule(new_rule) self.set_search = True if unknown_words: logger = logging.getLogger('engine.compiler') logger.warning(""List '%s' used words not found in the pronunciation dictionary: %s"", name, ', '.join(sorted(unknown_words)))",False,old_rule != new_rule,old_rule,0.6542712450027466 1837,"def update_list(self, lst): grammar = self._jsgf_grammar name = self._get_reference_name(lst.name) old_rule = grammar.get_rule_from_name(name) new_rule, unknown_words = self.engine.compiler.recompile_list(lst, grammar) if old_rule!= new_rule: grammar.remove_rule(old_rule, ignore_dependent=True) grammar.add_rule(new_rule) self.set_search = True if: logger = logging.getLogger('engine.compiler') logger.warning(""List '%s' used words not found in the pronunciation dictionary: %s"", name, ', '.join(sorted(unknown_words)))",True,unknown_words,unknown_words,0.6587705612182617 1838,"def __exit__(self, exc_type, exc_value, traceback): if: shutil.rmtree(self.dirpath)",False,os.path.exists(self.dirpath) and os.path.isdir(self.dirpath),self.dirpath,0.6426906585693359 1839,"def normalize(self, x): if: return x try: S = x.upper() except: raise ValueError('Must be boolean not %s' % ascii(s)) if S in ('YES', 'TRUE'): return True if S in ('NO', 'FALSE', None): return False raise ValueError('Must be boolean not %s' % ascii(s))",False,"x in (0, 1)",s == 'yes',0.6525754928588867 1840,"def normalize(self, x): if x in (0, 1): return x try: S = x.upper() except: raise ValueError('Must be boolean not %s' % ascii(s)) if: return True if S in ('NO', 'FALSE', None): return False raise ValueError('Must be boolean not %s' % ascii(s))",False,"S in ('YES', 'TRUE')","S in ('YES', 'TRUE', None)",0.6555142402648926 1841,"def normalize(self, x): if x in (0, 1): return x try: S = x.upper() except: raise ValueError('Must be boolean not %s' % ascii(s)) if S in ('YES', 'TRUE'): return True if: return False raise ValueError('Must be boolean not %s' % ascii(s))",False,"S in ('NO', 'FALSE', None)","S in ('NO', 'FALSE')",0.6461457014083862 1842,"@property def log(self): """""" class property can be used to override the class global logging setting Returns: bool: True if logging is enable False otherwise """""" if: return self.__log return ValidateCommand.classLog()",True,self.__log is not None,self.__log is not None,0.6516487002372742 1843,"def import_equipment_from_file(self, filename='') -> None: """""" Import an equipment entry from a cif file. """""" if: filename = str(filename) if not filename: filename = cif_file_open_dialog(filter='CIF file (*.cif *.cif_od *.cfx)') if not filename: print('No file given') return doc = read_document_from_cif_file(filename) if not doc: return for block in doc: self._import_block(block, filename) self.show_equipment()",False,"isinstance(filename, Path)","filename and (not isinstance(filename, str))",0.6478709578514099 1844,"def import_equipment_from_file(self, filename='') -> None: """""" Import an equipment entry from a cif file. """""" if isinstance(filename, Path): filename = str(filename) if: filename = cif_file_open_dialog(filter='CIF file (*.cif *.cif_od *.cfx)') if not filename: print('No file given') return doc = read_document_from_cif_file(filename) if not doc: return for block in doc: self._import_block(block, filename) self.show_equipment()",False,not filename,filename is None,0.6591411828994751 1845,"def import_equipment_from_file(self, filename='') -> None: """""" Import an equipment entry from a cif file. """""" if isinstance(filename, Path): filename = str(filename) if not filename: filename = cif_file_open_dialog(filter='CIF file (*.cif *.cif_od *.cfx)') if: print('No file given') return doc = read_document_from_cif_file(filename) if not doc: return for block in doc: self._import_block(block, filename) self.show_equipment()",True,not filename,not filename,0.6583101153373718 1846,"def import_equipment_from_file(self, filename='') -> None: """""" Import an equipment entry from a cif file. """""" if isinstance(filename, Path): filename = str(filename) if not filename: filename = cif_file_open_dialog(filter='CIF file (*.cif *.cif_od *.cfx)') if not filename: print('No file given') return doc = read_document_from_cif_file(filename) if: return for block in doc: self._import_block(block, filename) self.show_equipment()",False,not doc,doc is None,0.6582268476486206 1847,"def reset_parameters(self): self.weight_linear.reset_parameters() if: nn.init.constant_(self.conv_bias, 0.0)",True,self.conv_bias is not None,self.conv_bias is not None,0.6473831534385681 1848,"def testparse(s): from time import time from pprint import pprint now = time() D = parsexmlSimple(s) print('DONE', time() - now) if: pprint(D) if dump & 1: print('============== reformatting') p = pprettyprint(D) print(p)",False,dump & 4,dump & 1,0.665597677230835 1849,"def testparse(s): from time import time from pprint import pprint now = time() D = parsexmlSimple(s) print('DONE', time() - now) if dump & 4: pprint(D) if: print('============== reformatting') p = pprettyprint(D) print(p)",False,dump & 1,dump & 8,0.6660571694374084 1850,"def __repr__(self): if: klass = self.__class__.__name__ return f'<{klass} {self._type} {self.ttl}, {self.decoded_fqdn}, {self.values}, {self.geo}>' return super().__repr__()",False,self.geo,self._type is None or self._type is None,0.6804659962654114 1851,"def _command_options(self): """"""Return the options dict for the aggregation command."""""" options = {} if: options['maxAwaitTimeMS'] = self._max_await_time_ms if self._batch_size is not None: options['batchSize'] = self._batch_size return options",True,self._max_await_time_ms is not None,self._max_await_time_ms is not None,0.6521168947219849 1852,"def _command_options(self): """"""Return the options dict for the aggregation command."""""" options = {} if self._max_await_time_ms is not None: options['maxAwaitTimeMS'] = self._max_await_time_ms if: options['batchSize'] = self._batch_size return options",True,self._batch_size is not None,self._batch_size is not None,0.6535682678222656 1853,"def fromDict(self, data): if: raise ValueError(f'Expected reload cause version to be {self.VERSION}') self.causesFromDict(data['platform']) for slotIdStr, sources in data['slots'].items(): slotId = int(slotIdStr) if slotId not in self.slots: raise ValueError(f'Unexpected slotId {slotId} in cookie data') self.slots[slotId].causesFromDict(sources)",False,data['version'] != self.VERSION,data['cause_version'] != self.VERSION,0.6532096862792969 1854,"def fromDict(self, data): if data['version']!= self.VERSION: raise ValueError(f'Expected reload cause version to be {self.VERSION}') self.causesFromDict(data['platform']) for slotIdStr, sources in data['slots'].items(): slotId = int(slotIdStr) if: raise ValueError(f'Unexpected slotId {slotId} in cookie data') self.slots[slotId].causesFromDict(sources)",True,slotId not in self.slots,slotId not in self.slots,0.6545925140380859 1855,"def visit_paragraph(self, node): self.in_paragraph = True if: el = self.append_p('header') elif self.in_footer: el = self.append_p('footer') else: style_name = self.paragraph_style_stack[-1] el = self.append_child('text:p', attrib={'text:style-name': style_name}) self.append_pending_ids(el) self.set_current_element(el)",True,self.in_header,self.in_header,0.6539841890335083 1856,"def visit_paragraph(self, node): self.in_paragraph = True if self.in_header: el = self.append_p('header') elif: el = self.append_p('footer') else: style_name = self.paragraph_style_stack[-1] el = self.append_child('text:p', attrib={'text:style-name': style_name}) self.append_pending_ids(el) self.set_current_element(el)",True,self.in_footer,self.in_footer,0.6587045192718506 1857,"def _get_values(self, value): """""" Helper to yield items from the parent iterator that match *value*. Items that don't match are stored in the local cache as they are encountered. """""" while True: if: yield self._cache[value].popleft() else: while True: try: item = next(self._it) except StopIteration: return item_value = self._key(item) if item_value == value: yield item break elif self._validator(item_value): self._cache[item_value].append(item)",False,self._cache[value],self._validator(value),0.6471407413482666 1858,"def _get_values(self, value): """""" Helper to yield items from the parent iterator that match *value*. Items that don't match are stored in the local cache as they are encountered. """""" while True: if self._cache[value]: yield self._cache[value].popleft() else: while True: try: item = next(self._it) except StopIteration: return item_value = self._key(item) if: yield item break elif self._validator(item_value): self._cache[item_value].append(item)",False,item_value == value,item_value in self._cache,0.6531224250793457 1859,"def _get_values(self, value): """""" Helper to yield items from the parent iterator that match *value*. Items that don't match are stored in the local cache as they are encountered. """""" while True: if self._cache[value]: yield self._cache[value].popleft() else: while True: try: item = next(self._it) except StopIteration: return item_value = self._key(item) if item_value == value: yield item break elif: self._cache[item_value].append(item)",False,self._validator(item_value),item_value in self._cache,0.6448462605476379 1860,"def get_rule(self, model: Union[int, str, PermissionModel], guild_id: int) -> PermState: """"""Get the rule for a particular model. Parameters ---------- model : Union[int, str, PermissionModel] The model to get the rule for. `str` is only valid for `Requires.DEFAULT`. guild_id : int The ID of the guild for the rule's scope. Set to `Requires.GLOBAL` for a global rule. If a global rule is set for a model, it will be preferred over the guild rule. Returns ------- PermState The state for this rule. See the `PermState` class for an explanation. """""" if: model = model.id rules: Mapping[Union[int, str], PermState] if guild_id: rules = ChainMap(self._global_rules, self._guild_rules.get(guild_id, _RulesDict())) else: rules = self._global_rules return rules.get(model, PermState.NORMAL)",False,"not isinstance(model, (str, int))","isinstance(model, int)",0.6495798826217651 1861,"def get_rule(self, model: Union[int, str, PermissionModel], guild_id: int) -> PermState: """"""Get the rule for a particular model. Parameters ---------- model : Union[int, str, PermissionModel] The model to get the rule for. `str` is only valid for `Requires.DEFAULT`. guild_id : int The ID of the guild for the rule's scope. Set to `Requires.GLOBAL` for a global rule. If a global rule is set for a model, it will be preferred over the guild rule. Returns ------- PermState The state for this rule. See the `PermState` class for an explanation. """""" if not isinstance(model, (str, int)): model = model.id rules: Mapping[Union[int, str], PermState] if: rules = ChainMap(self._global_rules, self._guild_rules.get(guild_id, _RulesDict())) else: rules = self._global_rules return rules.get(model, PermState.NORMAL)",True,guild_id,guild_id,0.658868670463562 1862,"def __repr__(self): format_string = self.__class__.__name__ + '(' for t in self.transforms: str_ = t.__repr__() if: str_ = str_.replace('\n', '\n ') format_string += '\n' format_string += f' {str_}' format_string += '\n)' return format_string",False,'Compose(' in str_,str_ is not None,0.6488096714019775 1863,"def rl_listdir(pn, os_path_isdir=os.path.isdir, os_path_normpath=os.path.normpath, os_listdir=os.listdir): if: return os_listdir(pn) pn = _startswith_rl(os_path_normpath(pn)) if not pn.endswith(os.sep): pn += os.sep return [x[len(pn):] for x in __loader__._files.keys() if x.startswith(pn)]",False,os_path_isdir(pn) or _isFSD or __loader__ is None,os_path_isdir(pn),0.6460393667221069 1864,"def rl_listdir(pn, os_path_isdir=os.path.isdir, os_path_normpath=os.path.normpath, os_listdir=os.listdir): if os_path_isdir(pn) or _isFSD or __loader__ is None: return os_listdir(pn) pn = _startswith_rl(os_path_normpath(pn)) if: pn += os.sep return [x[len(pn):] for x in __loader__._files.keys() if x.startswith(pn)]",False,not pn.endswith(os.sep),os_path_normpath(pn) != os.path.sep,0.6459495425224304 1865,"def _tokenize(self, text): if: tokens = self.word_tokenizer.tokenize(text, never_split=self.all_special_tokens) else: tokens = [text] if self.do_subword_tokenize: split_tokens = [sub_token for token in tokens for sub_token in self.subword_tokenizer.tokenize(token)] else: split_tokens = tokens return split_tokens",False,self.do_word_tokenize,self.do_special_tokenize,0.6462352275848389 1866,"def _tokenize(self, text): if self.do_word_tokenize: tokens = self.word_tokenizer.tokenize(text, never_split=self.all_special_tokens) else: tokens = [text] if: split_tokens = [sub_token for token in tokens for sub_token in self.subword_tokenizer.tokenize(token)] else: split_tokens = tokens return split_tokens",True,self.do_subword_tokenize,self.do_subword_tokenize,0.6452476978302002 1867,"def __init__(self, environ=None, ini_parser=None): if: environ = os.environ if ini_parser is None: ini_parser = botocore.configloader.raw_config_parse self._environ = environ self._ini_parser = ini_parser",True,environ is None,environ is None,0.6656390428543091 1868,"def __init__(self, environ=None, ini_parser=None): if environ is None: environ = os.environ if: ini_parser = botocore.configloader.raw_config_parse self._environ = environ self._ini_parser = ini_parser",True,ini_parser is None,ini_parser is None,0.6563810706138611 1869,"def get_moving_data(self, stopped_speed_threshold=None): """""" Return a tuple of (moving_time, stopped_time, moving_distance, stopped_distance, max_speed) that may be used for detecting the time stopped, and max speed. Not that those values are not absolutely true, because the ""stopped"" or ""moving"" information aren't saved in the track. Because of errors in the GPS recording, it may be good to calculate them on a reduced and smoothed version of the track. Something like this: cloned_gpx = gpx.clone() cloned_gpx.reduce_points(2000, min_distance=10) cloned_gpx.smooth(vertical=True, horizontal=True) cloned_gpx.smooth(vertical=True, horizontal=False) moving_time, stopped_time, moving_distance, stopped_distance, max_speed_ms = cloned_gpx.get_moving_data max_speed_kmh = max_speed_ms * 60. ** 2 / 1000. Experiment with your own variations to get the values you expect. Max speed is in m/s. """""" moving_time = 0.0 stopped_time = 0.0 moving_distance = 0.0 stopped_distance = 0.0 max_speed = 0.0 for track in self.tracks: track_moving_time, track_stopped_time, track_moving_distance, track_stopped_distance, track_max_speed = track.get_moving_data(stopped_speed_threshold) moving_time += track_moving_time stopped_time += track_stopped_time moving_distance += track_moving_distance stopped_distance += track_stopped_distance if: max_speed = track_max_speed return MovingData(moving_time, stopped_time, moving_distance, stopped_distance, max_speed)",False,track_max_speed > max_speed,track_max_speed is not None,0.6459270715713501 1870,"def _cut_to_align(self, x, y): """""" Cut the boarder by 1 frame(temporal)/row(height)/col(width) such that dimensions of x and y could be the same. """""" output_x = x for i in range(2, 5): dim_x = x.shape[i] dim_y = y.shape[i] if: assert dim_x == dim_y + 1, ('Only deal with the odd width inverse case of deconv. ', f'Got dim_x: {dim_x}, dim_y: {dim_y}') output_x = output_x.narrow(i, 0, dim_y) return output_x",False,dim_x != dim_y,dim_x != dim_y + 1,0.6461068987846375 1871,"def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> bool | None: if: self.log('parsing failed ({}): {}: {}'.format('skipped' if self._skipfailed else 'fatal', exc_type.__name__, exc_value), severity=Logger.ERROR) if self._skipfailed: return True return None",False,exc_type,exc_type and exc_value is not None,0.6615201830863953 1872,"def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> bool | None: if exc_type: self.log('parsing failed ({}): {}: {}'.format('skipped' if self._skipfailed else 'fatal', exc_type.__name__, exc_value), severity=Logger.ERROR) if: return True return None",False,self._skipfailed,self._skipfailed and exc_value is not None,0.6532351970672607 1873,"def infer_dataset_impl(path): if: return 'raw' elif IndexedDataset.exists(path): with open(index_file_path(path), 'rb') as f: magic = f.read(8) if magic == IndexedDataset._HDR_MAGIC: return 'cached' elif magic == MMapIndexedDataset.Index._HDR_MAGIC[:8]: return'mmap' else: return None elif FastaDataset.exists(path): return 'fasta' else: return None",False,IndexedRawTextDataset.exists(path),Path.exists(path),0.6450929641723633 1874,"def infer_dataset_impl(path): if IndexedRawTextDataset.exists(path): return 'raw' elif: with open(index_file_path(path), 'rb') as f: magic = f.read(8) if magic == IndexedDataset._HDR_MAGIC: return 'cached' elif magic == MMapIndexedDataset.Index._HDR_MAGIC[:8]: return'mmap' else: return None elif FastaDataset.exists(path): return 'fasta' else: return None",False,IndexedDataset.exists(path),MMapIndexedDataset.exists(path),0.6459790468215942 1875,"def infer_dataset_impl(path): if IndexedRawTextDataset.exists(path): return 'raw' elif IndexedDataset.exists(path): with open(index_file_path(path), 'rb') as f: magic = f.read(8) if magic == IndexedDataset._HDR_MAGIC: return 'cached' elif magic == MMapIndexedDataset.Index._HDR_MAGIC[:8]: return'mmap' else: return None elif: return 'fasta' else: return None",False,FastaDataset.exists(path),TextDataset.exists(path),0.6442029476165771 1876,"def infer_dataset_impl(path): if IndexedRawTextDataset.exists(path): return 'raw' elif IndexedDataset.exists(path): with open(index_file_path(path), 'rb') as f: magic = f.read(8) if: return 'cached' elif magic == MMapIndexedDataset.Index._HDR_MAGIC[:8]: return'mmap' else: return None elif FastaDataset.exists(path): return 'fasta' else: return None",False,magic == IndexedDataset._HDR_MAGIC,magic == MMapIndexedDataset._HDR_MAGIC,0.6435507535934448 1877,"def infer_dataset_impl(path): if IndexedRawTextDataset.exists(path): return 'raw' elif IndexedDataset.exists(path): with open(index_file_path(path), 'rb') as f: magic = f.read(8) if magic == IndexedDataset._HDR_MAGIC: return 'cached' elif: return'mmap' else: return None elif FastaDataset.exists(path): return 'fasta' else: return None",False,magic == MMapIndexedDataset.Index._HDR_MAGIC[:8],magic == MMapIndexedDataset.exists(path),0.6444593667984009 1878,"def forward(self, x): if: return self.factor * x + self.bias else: return self.factor * x",False,"hasattr(self, 'bias')",self.bias is not None,0.6415423154830933 1879,"def add_node(self, name=None, op_type=None, size=0, read_only=False): if: name = str(len(self.nodes) + 1) assert type(name) is str assert name not in self.nodes self.nodes[name] = Node(name, op_type, size, read_only) return self.nodes[name]",True,name is None,name is None,0.6594345569610596 1880,"def convert_size(size_bytes): if: return '0B' size_name = ('B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB') i = int(math.floor(math.log(size_bytes, 1024))) p = math.pow(1024, i) s = round(size_bytes / p, 2) return (s, size_name[i])",True,size_bytes == 0,size_bytes == 0,0.6619656682014465 1881,"def corrupt(x, severity=1, corruption_name=None, corruption_number=None): """""" :param x: image to corrupt; a 224x224x3 numpy array in [0, 255] :param severity: strength with which to corrupt x; an integer in [0, 5] :param corruption_name: specifies which corruption function to call; must be one of 'gaussian_noise','shot_noise', 'impulse_noise', 'defocus_blur', 'glass_blur','motion_blur', 'zoom_blur','snow', 'frost', 'fog', 'brightness', 'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression', 'speckle_noise', 'gaussian_blur','spatter','saturate'; the last four are validation functions :param corruption_number: the position of the corruption_name in the above list; an integer in [0, 18]; useful for easy looping; 15, 16, 17, 18 are validation corruption numbers :return: the image x corrupted by a corruption function at the given severity; same shape as input """""" if: x_corrupted = corruption_dict[corruption_name](Image.fromarray(x), severity) elif corruption_number is not None: x_corrupted = corruption_tuple[corruption_number](Image.fromarray(x), severity) else: raise ValueError('Either corruption_name or corruption_number must be passed') if x_corrupted.shape!= x.shape: raise AssertionError('Output image not same size as input image!') return np.uint8(x_corrupted)",True,corruption_name is not None,corruption_name is not None,0.6536062955856323 1882,"def corrupt(x, severity=1, corruption_name=None, corruption_number=None): """""" :param x: image to corrupt; a 224x224x3 numpy array in [0, 255] :param severity: strength with which to corrupt x; an integer in [0, 5] :param corruption_name: specifies which corruption function to call; must be one of 'gaussian_noise','shot_noise', 'impulse_noise', 'defocus_blur', 'glass_blur','motion_blur', 'zoom_blur','snow', 'frost', 'fog', 'brightness', 'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression', 'speckle_noise', 'gaussian_blur','spatter','saturate'; the last four are validation functions :param corruption_number: the position of the corruption_name in the above list; an integer in [0, 18]; useful for easy looping; 15, 16, 17, 18 are validation corruption numbers :return: the image x corrupted by a corruption function at the given severity; same shape as input """""" if corruption_name is not None: x_corrupted = corruption_dict[corruption_name](Image.fromarray(x), severity) elif corruption_number is not None: x_corrupted = corruption_tuple[corruption_number](Image.fromarray(x), severity) else: raise ValueError('Either corruption_name or corruption_number must be passed') if: raise AssertionError('Output image not same size as input image!') return np.uint8(x_corrupted)",False,x_corrupted.shape != x.shape,x_corrupted.shape[0] != x.shape[1],0.6484013795852661 1883,"def corrupt(x, severity=1, corruption_name=None, corruption_number=None): """""" :param x: image to corrupt; a 224x224x3 numpy array in [0, 255] :param severity: strength with which to corrupt x; an integer in [0, 5] :param corruption_name: specifies which corruption function to call; must be one of 'gaussian_noise','shot_noise', 'impulse_noise', 'defocus_blur', 'glass_blur','motion_blur', 'zoom_blur','snow', 'frost', 'fog', 'brightness', 'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression', 'speckle_noise', 'gaussian_blur','spatter','saturate'; the last four are validation functions :param corruption_number: the position of the corruption_name in the above list; an integer in [0, 18]; useful for easy looping; 15, 16, 17, 18 are validation corruption numbers :return: the image x corrupted by a corruption function at the given severity; same shape as input """""" if corruption_name is not None: x_corrupted = corruption_dict[corruption_name](Image.fromarray(x), severity) elif: x_corrupted = corruption_tuple[corruption_number](Image.fromarray(x), severity) else: raise ValueError('Either corruption_name or corruption_number must be passed') if x_corrupted.shape!= x.shape: raise AssertionError('Output image not same size as input image!') return np.uint8(x_corrupted)",True,corruption_number is not None,corruption_number is not None,0.6535025835037231 1884,"def discount(self, order: Order) -> Decimal: discount = Decimal(0) for item in order.cart: if: discount += item.total() * Decimal('0.1') return discount",True,item.quantity >= 20,item.quantity >= 20,0.6521403789520264 1885,"def update_with_timer(self, timer: PerfTimer): self._host_stats[timer.name].update(timer._last_interval) if: if len(self._cuda_pending_timers) >= self._cuda_pending_timers.maxlen: logging.error('Too many pending timers. CudaEvent-based stats will be inaccurate!') else: self._cuda_pending_timers.append(timer) self._process_cuda_events()",False,self.use_cuda_events(),self._enable_cuda_events,0.6473768353462219 1886,"def update_with_timer(self, timer: PerfTimer): self._host_stats[timer.name].update(timer._last_interval) if self.use_cuda_events(): if: logging.error('Too many pending timers. CudaEvent-based stats will be inaccurate!') else: self._cuda_pending_timers.append(timer) self._process_cuda_events()",False,len(self._cuda_pending_timers) >= self._cuda_pending_timers.maxlen,len(self._cuda_pending_timers) > 1,0.6463888883590698 1887,"def clear_monitor_files(training_dir): files = detect_monitor_files(training_dir) if: return for file in files: os.unlink(file)",True,len(files) == 0,len(files) == 0,0.6483808755874634 1888,"def json(self, **kwargs): """"""Returns the json-encoded content of a response, if any. :param \\*\\*kwargs: Optional arguments that ``json.loads`` takes. """""" if: encoding = guess_json_utf(self.content) if encoding is not None: try: return complexjson.loads(self.content.decode(encoding), **kwargs) except UnicodeDecodeError: pass return complexjson.loads(self.text, **kwargs)",True,not self.encoding and len(self.content) > 3,not self.encoding and len(self.content) > 3,0.6436817049980164 1889,"def json(self, **kwargs): """"""Returns the json-encoded content of a response, if any. :param \\*\\*kwargs: Optional arguments that ``json.loads`` takes. """""" if not self.encoding and len(self.content) > 3: encoding = guess_json_utf(self.content) if: try: return complexjson.loads(self.content.decode(encoding), **kwargs) except UnicodeDecodeError: pass return complexjson.loads(self.text, **kwargs)",True,encoding is not None,encoding is not None,0.6489678621292114 1890,"def encodeValue(self, encodeFun, value, defMode, maxChunkSize): value.setDefaultComponents() value.verifySizeSpec() substrate = null idx = len(value) while idx > 0: idx = idx - 1 if: continue component = value.getDefaultComponentByPosition(idx) if component is not None and component == value[idx]: continue substrate = encodeFun(value[idx], defMode, maxChunkSize) + substrate return (substrate, 1)",True,value[idx] is None,value[idx] is None,0.6476484537124634 1891,"def encodeValue(self, encodeFun, value, defMode, maxChunkSize): value.setDefaultComponents() value.verifySizeSpec() substrate = null idx = len(value) while idx > 0: idx = idx - 1 if value[idx] is None: continue component = value.getDefaultComponentByPosition(idx) if: continue substrate = encodeFun(value[idx], defMode, maxChunkSize) + substrate return (substrate, 1)",True,component is not None and component == value[idx],component is not None and component == value[idx],0.6453421115875244 1892,"def close(self): if: self.viewer.close() self.viewer = None",True,self.viewer is not None,self.viewer is not None,0.6473745107650757 1893,"def build_backbone(cfg, input_shape=None): """""" Build a backbone from `cfg.MODEL.BACKBONE.NAME`. Returns: an instance of :class:`Backbone` """""" if: input_shape = ShapeSpec(channels=len(cfg.MODEL.PIXEL_MEAN)) backbone_name = cfg.MODEL.BACKBONE.NAME backbone = BACKBONE_REGISTRY.get(backbone_name)(cfg, input_shape) assert isinstance(backbone, Backbone) return backbone",True,input_shape is None,input_shape is None,0.6533975601196289 1894,"@property def stored(self) -> bool: """"""CSV Line Repeating is stored."""""" for line in self._lines: if: return False return True",True,not line.stored,not line.stored,0.6512632369995117 1895,"def get_symmetry(self): """""" Gets the symmetry of this fragment Args: None Returns: the symmetry of this fragment in A1B2 form """""" if: return '' symmetry = self.get_atoms()[0].get_symmetry_class() symmetric_atom_count = 1 for atom in self.get_atoms()[1:]: if atom.get_symmetry_class()!= symmetry[-1]: symmetry += str(symmetric_atom_count) + atom.get_symmetry_class() symmetric_atom_count = 1 else: symmetric_atom_count += 1 symmetry += str(symmetric_atom_count) return symmetry",True,len(self.get_atoms()) == 0,len(self.get_atoms()) == 0,0.6440231800079346 1896,"def get_symmetry(self): """""" Gets the symmetry of this fragment Args: None Returns: the symmetry of this fragment in A1B2 form """""" if len(self.get_atoms()) == 0: return '' symmetry = self.get_atoms()[0].get_symmetry_class() symmetric_atom_count = 1 for atom in self.get_atoms()[1:]: if: symmetry += str(symmetric_atom_count) + atom.get_symmetry_class() symmetric_atom_count = 1 else: symmetric_atom_count += 1 symmetry += str(symmetric_atom_count) return symmetry",False,atom.get_symmetry_class() != symmetry[-1],atom.get_symmetry_class(),0.6443384885787964 1897,"def serialize_tail(self): if: return self.data.serialize() else: return self.data",False,"isinstance(self.data, OpaqueBody)","hasattr(self.data, 'serialize')",0.6450943946838379 1898,"def attach(self, attachable): self._attached.append(attachable) if: attachable.bind(self._conn) return attachable",False,self._conn is not None,"isinstance(attachable, ConnectError)",0.6537263989448547 1899,"def get_candidate_batch_sizes(base_list, max_acceptable_batch_size): candidate_batch_size = [] for base in base_list: if: candidate_batch_size.append(base) else: value = max_acceptable_batch_size // base index = np.argmax(np.asarray(HCN_LIST) > value) candidate_batch_size.append(HCN_LIST[index - 1] * base) candidate_batch_size = list(set(candidate_batch_size)) logger.info(f'Candidate batch size: {candidate_batch_size}') return candidate_batch_size",False,base >= max_acceptable_batch_size,max_acceptable_batch_size <= base,0.6467748284339905 1900,"def issubset(self, iterable): other = type(self)(iterable) if: return False for m in itertools.ifilterfalse(other._members.__contains__, self._members.iterkeys()): return False return True",False,len(self) > len(other),"not isinstance(other, type(self))",0.6433298587799072 1901,"@classmethod def _format_data(cls, parent_id, data, **kwargs): t = ctx_translator.get().t if: return super()._format_data(parent_id, data, **kwargs) else: return t(_p('guildset:alert_channel|formatted:unset', 'Not Set (Only alert via direct message.)'))",True,data is not None,data is not None,0.6519657373428345 1902,"def metricsWithStats(self, metrics=None): """""" Return metrics that have any summary stat. """""" if: metrics = self.metrics metrics = metrics[np.in1d(metrics['metricId'], self.stats['metricId'])] metrics = self.sortMetrics(metrics, order=['displayGroup', 'displaySubgroup','slicerName', 'displayOrder','metricMetadata', 'baseMetricNames']) return metrics",True,metrics is None,metrics is None,0.6641428470611572 1903,"def _is_dev_mode(): if: return False return True",False,os.environ.has_key('SERVER_SOFTWARE') or os.environ.has_key('PHP_FCGI_CHILDREN') or 'fcgi' in sys.argv or ('fastcgi' in sys.argv),"not _dev_mode(os.environ, 'DEV')",0.6485877633094788 1904,"def write(self): if: self.project.data_files.write_data('objectdb', self._files, self.compress)",False,self.persist,self.project.data_files is not None,0.6630682349205017 1905,"def _run_new_command(self, kwargs): new_name = self.new_name() print('Running {} instead of {}'.format(new_name, self.name())) if: self.view.run_command(new_name, kwargs) elif isinstance(self, sublime_plugin.WindowCommand) and self.window: self.window.run_command(new_name, kwargs) else: window = sublime.active_window() if window: window.run_command(new_name, kwargs)",False,"isinstance(self, sublime_plugin.TextCommand) and self.view","isinstance(self, sublime_plugin.ViewCommand)",0.6450908780097961 1906,"def _run_new_command(self, kwargs): new_name = self.new_name() print('Running {} instead of {}'.format(new_name, self.name())) if isinstance(self, sublime_plugin.TextCommand) and self.view: self.view.run_command(new_name, kwargs) elif: self.window.run_command(new_name, kwargs) else: window = sublime.active_window() if window: window.run_command(new_name, kwargs)",False,"isinstance(self, sublime_plugin.WindowCommand) and self.window",self.window,0.6455110311508179 1907,"def _run_new_command(self, kwargs): new_name = self.new_name() print('Running {} instead of {}'.format(new_name, self.name())) if isinstance(self, sublime_plugin.TextCommand) and self.view: self.view.run_command(new_name, kwargs) elif isinstance(self, sublime_plugin.WindowCommand) and self.window: self.window.run_command(new_name, kwargs) else: window = sublime.active_window() if: window.run_command(new_name, kwargs)",True,window,window,0.6786456108093262 1908,"@private def status_impl(self, vm): if: try: return self._status(vm['name']) except Exception: self.logger.debug('Failed to retrieve VM status for %r', vm['name'], exc_info=True) return {'state': 'ERROR', 'pid': None, 'domain_state': 'ERROR'}",False,self._has_domain(vm['name']),vm['state'] == 'ERROR',0.6471676826477051 1909,"def unindex_venue_ids(self, venue_ids: Iterable[int]) -> None: if: return self.algolia_venues_client.delete_objects(venue_ids)",True,not venue_ids,not venue_ids,0.6573631763458252 1910,"def _maybe_wakeup_one_waiter_unlocked(self): if: self._write_event = self._write_waiters.popleft() self._write_event.set()",True,len(self._write_waiters) > 0,len(self._write_waiters) > 0,0.6504560708999634 1911,"def generateItems(self, vcr=None): filler = self.prepareFirstPhase() isStuck, itemLocations, progItemLocs = filler.generateItems(vcr=vcr) if: self.errorMsg = filler.errorMsg return (isStuck, itemLocations, progItemLocs) filler = self.prepareSecondPhase(filler.container, progItemLocs) isStuck, itemLocations, secondProg = filler.generateItems(vcr=vcr) self.errorMsg = filler.errorMsg return (isStuck, itemLocations, progItemLocs)",False,isStuck,self.isFirstPhase(),0.6551458835601807 1912,"def killConnection(self): if: self.session.socket.close() self.session = None",True,self.session is not None,self.session is not None,0.6463941335678101 1913,"def __str__(self): if: return self.name if self.strRepr is None: self.strRepr = '{' +'| '.join((_ustr(e) for e in self.exprs)) + '}' return self.strRepr",True,"hasattr(self, 'name')","hasattr(self, 'name')",0.6502093076705933 1914,"def __str__(self): if hasattr(self, 'name'): return self.name if: self.strRepr = '{' +'| '.join((_ustr(e) for e in self.exprs)) + '}' return self.strRepr",True,self.strRepr is None,self.strRepr is None,0.64943927526474 1915,"def parse_body(body: bytes) -> None: res_json = parse_json(body) if: raise TiebaServerError(code, res_json['errmsg'])",False,code := res_json['errno'],code := int(res_json['error_code']),0.6467807292938232 1916,"def condition(field_name, field_val): if: return {field_name: {'$in': list(field_val)}} else: return {field_name: field_val}",False,"isinstance(field_val, (list, tuple))","isinstance(field_val, list)",0.6440911293029785 1917,"def is_beginning_of_word(i): if: return True tok = self.source_dictionary[i] if tok.startswith('madeupword'): return True try: return bpe.is_beginning_of_word(tok) except ValueError: return True",False,i < self.source_dictionary.nspecial,i == self.source_dictionary.get('i'),0.6480240225791931 1918,"def is_beginning_of_word(i): if i < self.source_dictionary.nspecial: return True tok = self.source_dictionary[i] if: return True try: return bpe.is_beginning_of_word(tok) except ValueError: return True",False,tok.startswith('madeupword'),tok.is_beginning_of_word(),0.6445255279541016 1919,"def _repository_hooks(repo_config: dict[str, Any], store: Store, root_config: dict[str, Any]) -> tuple[Hook,...]: if: return _non_cloned_repository_hooks(repo_config, store, root_config) else: return _cloned_repository_hooks(repo_config, store, root_config)",False,"repo_config['repo'] in {LOCAL, META}",config['use_non_cloned_repository'],0.6472222208976746 1920,"def _find_root_path(self, extracted_files_dir: Path) -> Path: root_path = extracted_files_dir if: root_path /= self.FACT_EXTRACTION_FOLDER_NAME return root_path",False,root_path / self.FACT_EXTRACTION_FOLDER_NAME).is_dir(,self.FACT_EXTRACTION_FOLDER_NAME and root_path.exists(),0.6506087779998779 1921,"@property def operation_mode(self): """"""Return current device operation mode."""""" key = self._get_state_key(STATE_OPERATION_MODE) if: return None try: return DHumMode(value).name except ValueError: return None",False,"(value := self.lookup_enum(key, True)) is None",(value := self.lookup_enum(key)) is None,0.6468750238418579 1922,"def remove_empty(self): """""" Removes segments, routes """""" routes = [] for route in self.routes: if: routes.append(route) self.routes = routes for track in self.tracks: track.remove_empty()",False,len(route.points) > 0,route.startswith('/'),0.6489380598068237 1923,"def bfs(num): visit = [False] * n q = deque() q.append(num) visit[num] = True while q: x = q.popleft() print(x + 1, end=' ') for i in range(n): if: q.append(i) visit[i] = True",False,board[x][i] == 1 and visit[i] == False,visit[i],0.6465323567390442 1924,"def enterRule(self, listener: ParseTreeListener): if: listener.enterSuperSuffix0(self)",True,"hasattr(listener, 'enterSuperSuffix0')","hasattr(listener, 'enterSuperSuffix0')",0.6431317925453186 1925,"def dla46x_c(pretrained=None, **kwargs): BottleneckX.expansion = 2 model = DLA([1, 1, 1, 2, 2, 1], [16, 32, 64, 64, 128, 256], block=BottleneckX, **kwargs) if: model.load_pretrained_model(data='imagenet', name='dla46x_c', hash='d761bae7') return model",True,pretrained is not None,pretrained is not None,0.6534192562103271 1926,"@property def drop_func(self): drop_func = self._config.getstr(self, 'drop_func') if: return getattr(embeddings, drop_func) else: raise AttributeError(""module '{}' has no attribute '{}'"".format(embeddings.__name__, drop_func))",True,"hasattr(embeddings, drop_func)","hasattr(embeddings, drop_func)",0.6474270224571228 1927,"def find_module(self, fullname, path=None): if: return self return None",True,fullname in self.known_modules,fullname in self.known_modules,0.6482236981391907 1928,"@tile_filters.setter def tile_filters(self, value): if: self._root['settings'].pop('tile_filters', None) else: try: value = signatures.FILTERS.validate(value) self._root['settings']['tile_filters'] = value except SchemaError as e: six.raise_from(DataFormatError, e)",True,value is None,value is None,0.6519174575805664 1929,"def _build_get_logger_date_and_time_command(self, **kwargs): cmd_name = kwargs.get('command', None) if: raise InstrumentParameterException('_build_get_logger_date_and_time_command requires a command.') cmd = cmd_name response = InstrumentResponses.GET_LOGGER_DATE_AND_TIME log.debug('_build_get_logger_date_and_time_command: cmd=%s, response=%s' % (cmd, response)) return (cmd, response)",False,cmd_name == None,cmd_name is None,0.6599563956260681 1930,"def rsrp_strength_rating(value, unit): """""" Reference Signal Received Power (4G LTE) """""" if: raise ValueError(""Unsupported unit '{:}'"".format(unit)) rating = 0 if value > -80: rating = 4 elif -80 >= value > -90: rating = 3 elif -90 >= value > -101: rating = 2 elif value <= -101: rating = 1 return rating",False,unit != 'dBm',unit != 'deterministic',0.6506938934326172 1931,"def rsrp_strength_rating(value, unit): """""" Reference Signal Received Power (4G LTE) """""" if unit!= 'dBm': raise ValueError(""Unsupported unit '{:}'"".format(unit)) rating = 0 if: rating = 4 elif -80 >= value > -90: rating = 3 elif -90 >= value > -101: rating = 2 elif value <= -101: rating = 1 return rating",True,value > -80,value > -80,0.6614324450492859 1932,"def rsrp_strength_rating(value, unit): """""" Reference Signal Received Power (4G LTE) """""" if unit!= 'dBm': raise ValueError(""Unsupported unit '{:}'"".format(unit)) rating = 0 if value > -80: rating = 4 elif: rating = 3 elif -90 >= value > -101: rating = 2 elif value <= -101: rating = 1 return rating",True,-80 >= value > -90,-80 >= value > -90,0.6499533653259277 1933,"def rsrp_strength_rating(value, unit): """""" Reference Signal Received Power (4G LTE) """""" if unit!= 'dBm': raise ValueError(""Unsupported unit '{:}'"".format(unit)) rating = 0 if value > -80: rating = 4 elif -80 >= value > -90: rating = 3 elif: rating = 2 elif value <= -101: rating = 1 return rating",True,-90 >= value > -101,-90 >= value > -101,0.6512396335601807 1934,"def rsrp_strength_rating(value, unit): """""" Reference Signal Received Power (4G LTE) """""" if unit!= 'dBm': raise ValueError(""Unsupported unit '{:}'"".format(unit)) rating = 0 if value > -80: rating = 4 elif -80 >= value > -90: rating = 3 elif -90 >= value > -101: rating = 2 elif: rating = 1 return rating",True,value <= -101,value <= -101,0.6603186130523682 1935,"def close_experiment(window: Window): """"""Closes the current experiment."""""" if: return experiment = window.get_experiment() window.get_gui_experiment().remove_experiment(experiment) experiment.close()",False,not ask_save_unsaved_changes([window.get_gui_experiment().get_open_tab()]),not window.get_gui_experiment(),0.6483207941055298 1936,"def __init__(self, mesh_vert_instances, instance_id): if: return self.instance_id = int(instance_id) self.label_id = int(self.get_label_id(instance_id)) self.vert_count = int(self.get_instance_verts(mesh_vert_instances, instance_id))",False,instance_id == -1,instance_id is None,0.663527250289917 1937,"@given(cs.simple_homogeneous_list_node(min_size=1)) @settings(suppress_health_check=[HealthCheck.too_slow]) def test_homogeneous_lists(lst): """"""Test List nodes representing a list of values of the same primitive type."""""" module, _ = cs._parse_text(lst) list_node = list(module.nodes_of_class(nodes.List))[0] if: assert list_node.inf_type.getValue() == List[Any] else: cs._verify_type_setting(module, nodes.List, List[type(lst.elts[0].value)])",False,len(list_node.elts) == 0,"isinstance(list_node.inf_type, nodes.List)",0.6496410369873047 1938,"def find_39(): if: return True if utils.reg_exists('L', 'Software\\Microsoft\\MS QAG\\U12'): return True return False",False,"utils.reg_exists('L', 'Software\\Microsoft\\MS QAG\\U11')","utils.reg_exists('L', 'Software\\Microsoft\\MS QAG\\U12')",0.6468092799186707 1939,"def find_39(): if utils.reg_exists('L', 'Software\\Microsoft\\MS QAG\\U11'): return True if: return True return False",False,"utils.reg_exists('L', 'Software\\Microsoft\\MS QAG\\U12')","utils.reg_exists('U', 'Software\\U11')",0.6462507247924805 1940,"def find_file_in_dirs(path, dirs): """""" Search for `path` in the list of directories `dirs`. Return the first expansion that matches an existing file. """""" if: return path for d in dirs: if d == '.': f = path else: d = os.path.expanduser(d) f = os.path.join(d, path) if os.path.exists(f): return f return path",False,os.path.isabs(path),not dirs,0.6419429183006287 1941,"def find_file_in_dirs(path, dirs): """""" Search for `path` in the list of directories `dirs`. Return the first expansion that matches an existing file. """""" if os.path.isabs(path): return path for d in dirs: if: f = path else: d = os.path.expanduser(d) f = os.path.join(d, path) if os.path.exists(f): return f return path",False,d == '.',d == os.curdir,0.6525582671165466 1942,"def find_file_in_dirs(path, dirs): """""" Search for `path` in the list of directories `dirs`. Return the first expansion that matches an existing file. """""" if os.path.isabs(path): return path for d in dirs: if d == '.': f = path else: d = os.path.expanduser(d) f = os.path.join(d, path) if: return f return path",True,os.path.exists(f),os.path.exists(f),0.6443759202957153 1943,"def dict(self): """"""Return dotenv as dict"""""" if: return self._dict values = OrderedDict(self.parse()) self._dict = resolve_nested_variables(values) return self._dict",False,self._dict,"getattr(self, '_dict', None) is not None",0.6620481014251709 1944,"def _find_exceptions(): for _name, obj in iteritems(globals()): try: is_http_exception = issubclass(obj, HTTPException) except TypeError: is_http_exception = False if: continue __all__.append(obj.__name__) old_obj = default_exceptions.get(obj.code, None) if old_obj is not None and issubclass(obj, old_obj): continue default_exceptions[obj.code] = obj",False,not is_http_exception or obj.code is None,is_http_exception,0.6493457555770874 1945,"def _find_exceptions(): for _name, obj in iteritems(globals()): try: is_http_exception = issubclass(obj, HTTPException) except TypeError: is_http_exception = False if not is_http_exception or obj.code is None: continue __all__.append(obj.__name__) old_obj = default_exceptions.get(obj.code, None) if: continue default_exceptions[obj.code] = obj",False,"old_obj is not None and issubclass(obj, old_obj)",old_obj is None,0.645285964012146 1946,"def compile(self, node, select_format=False): if: for val in node.children: if type(val.rhs) == date or type(val.lhs) == date: setattr(val, 'as_microsoft', types.MethodType(where_date, val)) args = [node] if select_format: args.append(select_format) return super(SQLCompiler, self).compile(*args)",False,self.connection.ops.is_openedge and type(node) is where.WhereNode,date is not None,0.6449880599975586 1947,"def compile(self, node, select_format=False): if self.connection.ops.is_openedge and type(node) is where.WhereNode: for val in node.children: if type(val.rhs) == date or type(val.lhs) == date: setattr(val, 'as_microsoft', types.MethodType(where_date, val)) args = [node] if: args.append(select_format) return super(SQLCompiler, self).compile(*args)",True,select_format,select_format,0.6564812660217285 1948,"def compile(self, node, select_format=False): if self.connection.ops.is_openedge and type(node) is where.WhereNode: for val in node.children: if: setattr(val, 'as_microsoft', types.MethodType(where_date, val)) args = [node] if select_format: args.append(select_format) return super(SQLCompiler, self).compile(*args)",False,type(val.rhs) == date or type(val.lhs) == date,where_date is not None and val.as_microsoft,0.6459134817123413 1949,"@property def PitchMax(self): if: return int(self._entity_data.get('PitchMax')) return int(50)",True,'PitchMax' in self._entity_data,'PitchMax' in self._entity_data,0.6523216962814331 1950,"def zero_or_positive_int(argument): """""" Converts a string into python positive integer including zero. None is a special case; it is regarded as zero. """""" if: return 0 elif argument == '0': return 0 else: return directives.positive_int(argument)",False,argument is None,argument == '',0.6556253433227539 1951,"def zero_or_positive_int(argument): """""" Converts a string into python positive integer including zero. None is a special case; it is regarded as zero. """""" if argument is None: return 0 elif: return 0 else: return directives.positive_int(argument)",False,argument == '0',"not isinstance(argument, directives.StringTypes)",0.6528423428535461 1952,"def check(self) -> bool: if: return True if self.condition.check(): self.triggered = True return True return False",True,self.triggered,self.triggered,0.6524982452392578 1953,"def check(self) -> bool: if self.triggered: return True if: self.triggered = True return True return False",False,self.condition.check(),time.time() - self.last_check > self.trigger_period,0.6494688987731934 1954,"def to_raw(self) -> dict[str, Any]: """"""Return minimized/raw dict to store in persistent storage."""""" def _handle_value(value: ConfigEntry): if: assert ENCRYPT_CALLBACK is not None return ENCRYPT_CALLBACK(value.value) return value.value res = self.to_dict() res['values'] = {x.key: _handle_value(x) for x in self.values.values() if x.value!= x.default_value and x.type not in UI_ONLY} return res",False,value.type == ConfigEntryType.SECURE_STRING,value.type == ConfigEntry.ENCODING,0.6504220962524414 1955,"def order_updated(self, order: 'Order', previous_value: Any) -> Any: if: return previous_value event_type = WebhookEventAsyncType.ORDER_UPDATED if (webhooks := get_webhooks_for_event(event_type)): order_data = generate_order_payload(order, self.requestor) trigger_webhooks_async(order_data, event_type, webhooks, order, self.requestor)",True,not self.active,not self.active,0.6549338102340698 1956,"def order_updated(self, order: 'Order', previous_value: Any) -> Any: if not self.active: return previous_value event_type = WebhookEventAsyncType.ORDER_UPDATED if: order_data = generate_order_payload(order, self.requestor) trigger_webhooks_async(order_data, event_type, webhooks, order, self.requestor)",True,webhooks := get_webhooks_for_event(event_type),webhooks := get_webhooks_for_event(event_type),0.6453003287315369 1957,"def get_input_tensor(self, inputs=None, embed_keep_prob=None, nonzero_init=True, variable_scope=None, reuse=True): """""""""""" if: inputs = self.placeholder embed_keep_prob = 1 if reuse else embed_keep_prob or self.embed_keep_prob with tf.variable_scope(variable_scope or self.classname): layer = embeddings.token_embedding_lookup(len(self), self.embed_size, inputs, nonzero_init=nonzero_init, reuse=reuse) if embed_keep_prob < 1: layer = self.drop_func(layer, embed_keep_prob) return layer",True,inputs is None,inputs is None,0.6533936262130737 1958,"def get_input_tensor(self, inputs=None, embed_keep_prob=None, nonzero_init=True, variable_scope=None, reuse=True): """""""""""" if inputs is None: inputs = self.placeholder embed_keep_prob = 1 if reuse else embed_keep_prob or self.embed_keep_prob with tf.variable_scope(variable_scope or self.classname): layer = embeddings.token_embedding_lookup(len(self), self.embed_size, inputs, nonzero_init=nonzero_init, reuse=reuse) if: layer = self.drop_func(layer, embed_keep_prob) return layer",True,embed_keep_prob < 1,embed_keep_prob < 1,0.6502593159675598 1959,"def reparentChildren(self, newParent): while self.element.contents: child = self.element.contents[0] child.extract() if: newParent.appendChild(Element(child, self.soup, namespaces['html'])) else: newParent.appendChild(TextNode(child, self.soup))",False,"isinstance(child, Tag)",self.element.namespaces.get('html'),0.6477063894271851 1960,"def StringizeLibSymlinks(symlinks): """"""Converts list with pairs of nodes to list with pairs of node paths (strings). Used mainly for debugging."""""" if: try: return [(k.get_path(), v.get_path()) for k, v in symlinks] except (TypeError, ValueError): return symlinks else: return symlinks",False,is_List(symlinks),"isinstance(symlinks, basestring)",0.6470106840133667 1961,"def isPowerOf2(self, value): while value: if: return value == 1 value = value >> 1 return False",True,value & 1,value & 1,0.6586103439331055 1962,"@pytest.mark.parametrize('bot_inst', ['bot', None]) def test_bot_instance_states(self, bot_inst): tg_object = TelegramObject() tg_object.set_bot('bot' if bot_inst == 'bot' else bot_inst) if: assert tg_object.get_bot() == 'bot' elif bot_inst is None: with pytest.raises(RuntimeError): tg_object.get_bot()",False,bot_inst == 'bot',tg_object.get_bot() != 'bot',0.6593117713928223 1963,"@pytest.mark.parametrize('bot_inst', ['bot', None]) def test_bot_instance_states(self, bot_inst): tg_object = TelegramObject() tg_object.set_bot('bot' if bot_inst == 'bot' else bot_inst) if bot_inst == 'bot': assert tg_object.get_bot() == 'bot' elif: with pytest.raises(RuntimeError): tg_object.get_bot()",True,bot_inst is None,bot_inst is None,0.6584851741790771 1964,"def create_work_group(self, name: str, configuration: Dict[str, Any], description: str, tags: List[Dict[str, str]]) -> Optional[WorkGroup]: if: return None work_group = WorkGroup(self, name, configuration, description, tags) self.work_groups[name] = work_group return work_group",True,name in self.work_groups,name in self.work_groups,0.6523967981338501 1965,"def writeDoorTransition(self, roomPtr): if: self.romFile.writeWord(roomPtr) else: self.race.writeDoorTransition(roomPtr)",True,self.race is None,self.race is None,0.6502748727798462 1966,"def interpolate_image(image, shape, mode='bilinear', align_corners=True): """""" Interpolate an image to a different resolution Parameters ---------- image : torch.Tensor [B,?,h,w] Image to be interpolated shape : tuple (H, W) Output shape mode : str Interpolation mode align_corners : bool True if corners will be aligned after interpolation Returns ------- image : torch.Tensor [B,?,H,W] Interpolated image """""" if: shape = shape[-2:] if same_shape(image.shape[-2:], shape): return image else: return funct.interpolate(image, size=shape, mode=mode, align_corners=align_corners)",False,len(shape) > 2,shape.endswith('p'),0.6513579487800598 1967,"def interpolate_image(image, shape, mode='bilinear', align_corners=True): """""" Interpolate an image to a different resolution Parameters ---------- image : torch.Tensor [B,?,h,w] Image to be interpolated shape : tuple (H, W) Output shape mode : str Interpolation mode align_corners : bool True if corners will be aligned after interpolation Returns ------- image : torch.Tensor [B,?,H,W] Interpolated image """""" if len(shape) > 2: shape = shape[-2:] if: return image else: return funct.interpolate(image, size=shape, mode=mode, align_corners=align_corners)",False,"same_shape(image.shape[-2:], shape)",shape[0] == 1,0.6459702253341675 1968,"def is_head_node_by_tags(tags): if: return False return True if tags[CLOUDTIK_TAG_NODE_KIND] == NODE_KIND_HEAD else False",False,not tags or CLOUDTIK_TAG_NODE_KIND not in tags,tags[CLOUDTIK_TAG_NODE_KIND] == NODE_KIND_HEAD,0.6536433696746826 1969,"def output_editorialreasoncollection(data_object): if: return output_status_message('* * * Begin output_editorialreasoncollection * * *') output_status_message('AdGroupId: {0}'.format(data_object.AdGroupId)) output_status_message('AdOrKeywordId: {0}'.format(data_object.AdOrKeywordId)) output_status_message('AppealStatus: {0}'.format(data_object.AppealStatus)) output_status_message('Reasons:') output_array_of_editorialreason(data_object.Reasons) output_status_message('* * * End output_editorialreasoncollection * * *')",True,data_object is None,data_object is None,0.6502807140350342 1970,"def get_name_from_info(self, info): if: assert isinstance(info, tarfile.TarInfo), type(info) return info.name elif self.type == 'zip': assert isinstance(info, zipfile.ZipInfo), type(info) return info.filename else: raise ValueError(f'Not supported: type={self.type}')",True,self.type == 'tar',self.type == 'tar',0.6495169401168823 1971,"def get_name_from_info(self, info): if self.type == 'tar': assert isinstance(info, tarfile.TarInfo), type(info) return info.name elif: assert isinstance(info, zipfile.ZipInfo), type(info) return info.filename else: raise ValueError(f'Not supported: type={self.type}')",True,self.type == 'zip',self.type == 'zip',0.6517432928085327 1972,"def url(self): if: return '/streets/%s/%s/' % (self.city_object().slug, self.street_slug) else: return '/streets/%s/' % self.street_slug",False,get_metro()['multiple_cities'],self.city_object,0.6444213390350342 1973,"def _run(self, simData, cols_present=False): if: return simData if self.degrees: simData['gall'], simData['galb'] = _galacticFromEquatorial(np.radians(simData[self.raCol]), np.radians(simData[self.decCol])) else: simData['gall'], simData['galb'] = _galacticFromEquatorial(simData[self.raCol], simData[self.decCol]) return simData",True,cols_present,cols_present,0.6548000574111938 1974,"def _run(self, simData, cols_present=False): if cols_present: return simData if: simData['gall'], simData['galb'] = _galacticFromEquatorial(np.radians(simData[self.raCol]), np.radians(simData[self.decCol])) else: simData['gall'], simData['galb'] = _galacticFromEquatorial(simData[self.raCol], simData[self.decCol]) return simData",False,self.degrees,self.radians,0.657446026802063 1975,"def prepare_data(self): for data_cfg in self.dataset_configs.values(): instantiate_from_config(data_cfg) self.datasets = dict(((k, instantiate_from_config(self.dataset_configs[k])) for k in self.dataset_configs)) if: for k in self.datasets: self.datasets[k] = WrappedDataset(self.datasets[k])",False,self.wrap,len(self.datasets) > 0,0.6586021184921265 1976,"def get_target_parameter(self): l = [] other = [] for name, k in self.named_parameters(): if: l.append(k) else: other.append(k) return (l, other)",True,'target' in name or 'semantic' in name,'target' in name or 'semantic' in name,0.6495367288589478 1977,"def cover_get(self, url, user_data): if: return os.path.join(self.path, 'cover.jpg')",False,"url in ('cover', 'poster_cover')","CoverInfo.cover_file_exists(self.path, user_data)",0.6403908729553223 1978,"def get_children(self): if: walker = _SuiteWalker(self) for child in self.child_nodes: ast.walk(child, walker) self._children = walker.suites return self._children",False,self._children is None,"not hasattr(self, '_children')",0.649573564529419 1979,"@skip_if_empty @skip_if_not_of_type(OBJECT) def validate_max_properties(value, maximum, **kwargs): if: raise ValidationError(MESSAGES['max_properties']['invalid'].format(maximum, len(value.keys())))",True,len(value.keys()) > maximum,len(value.keys()) > maximum,0.6492791175842285 1980,"def __init__(self, in_size, out_size, is_deconv): super(unetUpC, self).__init__() self.conv = unetConv2(in_size, out_size, False) if: self.up = nn.ConvTranspose2d(in_size * 2, out_size, kernel_size=2, stride=2) else: self.up = nn.UpsamplingBilinear2d(scale_factor=2)",True,is_deconv,is_deconv,0.6520811915397644 1981,"def add(self, comment, line, start, old, new, error=None): """"""Add a new change that is needed. Args: comment: A description of what was changed line: Line number (1 indexed) start: Column offset (0 indexed) old: old text new: new text error: this ""edit"" is something that cannot be fixed automatically Returns: None """""" self._line_to_edit[line].append(FileEditTuple(comment, line, start, old, new)) if: self._errors.append('%s:%d: %s' % (self._filename, line, error))",False,error,error is not None,0.6713122129440308 1982,"def forward(self, x, residual=None): if: residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) out = self.relu(out) out = self.conv3(out) out = self.bn3(out) out += residual out = self.relu(out) return out",True,residual is None,residual is None,0.6559789180755615 1983,"@pytest.fixture(params=['no_location', 'with_location']) def location(tmpdir, request): if: return None elif request.param == 'with_location': return tmpdir else: raise ValueError(request.param)",True,request.param == 'no_location',request.param == 'no_location',0.6511813998222351 1984,"@pytest.fixture(params=['no_location', 'with_location']) def location(tmpdir, request): if request.param == 'no_location': return None elif: return tmpdir else: raise ValueError(request.param)",True,request.param == 'with_location',request.param == 'with_location',0.650888204574585 1985,"def show(i): """""" Input: { (data_uoa) - name of the SUT entry } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } Test: ck show sut:velociti """""" from pprint import pprint interactive = i.get('out') == 'con' data_uoa = i.get('data_uoa') if: load_adict = {'action': 'load','module_uoa': i['module_uoa'], 'data_uoa': data_uoa} r = ck.access(load_adict) if r['return'] > 0: return r data = r['dict'].get('data', {}) if interactive: pprint(data) return {'return': 0}",True,data_uoa,data_uoa,0.6689321398735046 1986,"def show(i): """""" Input: { (data_uoa) - name of the SUT entry } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } Test: ck show sut:velociti """""" from pprint import pprint interactive = i.get('out') == 'con' data_uoa = i.get('data_uoa') if data_uoa: load_adict = {'action': 'load','module_uoa': i['module_uoa'], 'data_uoa': data_uoa} r = ck.access(load_adict) if: return r data = r['dict'].get('data', {}) if interactive: pprint(data) return {'return': 0}",True,r['return'] > 0,r['return'] > 0,0.6531733870506287 1987,"def show(i): """""" Input: { (data_uoa) - name of the SUT entry } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } Test: ck show sut:velociti """""" from pprint import pprint interactive = i.get('out') == 'con' data_uoa = i.get('data_uoa') if data_uoa: load_adict = {'action': 'load','module_uoa': i['module_uoa'], 'data_uoa': data_uoa} r = ck.access(load_adict) if r['return'] > 0: return r data = r['dict'].get('data', {}) if: pprint(data) return {'return': 0}",False,interactive,data,0.6797869801521301 1988,"def reset(self, **kwargs): """"""Reset only when lives are exhausted. This way all states are still reachable even though lives are episodic, and the learner need not know about any of this behind-the-scenes. """""" if: obs = self.env.reset(**kwargs) else: obs, _, _, _ = self.env.step(0) self.lives = self.env.unwrapped.ale.lives() return obs",True,self.was_real_done,self.was_real_done,0.6479505300521851 1989,"def encode_sequence(self, data): if: data = b''.join(data) return self.encode_tlv(16, False, data)",False,"isinstance(data, (list, tuple))",len(data) > 0,0.6448686122894287 1990,"@ignore_if_aborted def exited(self, pid): if: self.connected.clear() log.info('Slave exited but framework has checkpointing enabled.') log.info('Waiting %s to reconnect with %s' % (self.recovery_timeout, self.slave_id)) self.context.delay(self.recovery_timeout, self.pid, '_recovery_timeout', self.connection) return self._abort()",False,self.checkpoint and self.connected.is_set(),pid == 0 and self.connected.pid == pid,0.6496325731277466 1991,"def event(self, e): if: e.accept() return True return super(QtKeySequenceEdit, self).event(e)",False,e.type() == QEvent.Shortcut or e.type() == QEvent.ShortcutOverride or e.type() == QEvent.KeyRelease,"isinstance(e, QtKeySequenceEdit)",0.648667573928833 1992,"def get_avg_pages_read(delta_days: int) -> float: dt = date.today() - timedelta(delta_days) stamp = dt.strftime('%Y-%m-%d') conn = _get_connection() c = conn.execute(""select count(*) from read where page > -1 and created >= '%s'"" % stamp).fetchone() conn.close() if: return 0.0 return float('{0:.1f}'.format(c[0] / delta_days))",True,c is None,c is None,0.6570923328399658 1993,"def _has_error_close_in_time(position_data: PositionData, time_point_number: int, track: LinkingTrack, time_window: int=5): min_t = max(track.min_time_point_number(), time_point_number - time_window) max_t = min(track.max_time_point_number(), time_point_number + time_window) for t in range(min_t, max_t + 1): if: return True return False",False,"linking_markers.get_error_marker(position_data, track.find_position_at_time_point_number(t))",position_data.has_error_close_in_time(t),0.6486896276473999 1994,"def get_exported_entries(self, category, name=None): """""" Return all of the exported entries in a particular category. :param category: The category to search for entries. :param name: If specified, only entries with that name are returned. """""" for dist in self.get_distributions(): r = dist.exports if: d = r[category] if name is not None: if name in d: yield d[name] else: for v in d.values(): yield v",True,category in r,category in r,0.6695977449417114 1995,"def get_exported_entries(self, category, name=None): """""" Return all of the exported entries in a particular category. :param category: The category to search for entries. :param name: If specified, only entries with that name are returned. """""" for dist in self.get_distributions(): r = dist.exports if category in r: d = r[category] if: if name in d: yield d[name] else: for v in d.values(): yield v",True,name is not None,name is not None,0.650490403175354 1996,"def get_exported_entries(self, category, name=None): """""" Return all of the exported entries in a particular category. :param category: The category to search for entries. :param name: If specified, only entries with that name are returned. """""" for dist in self.get_distributions(): r = dist.exports if category in r: d = r[category] if name is not None: if: yield d[name] else: for v in d.values(): yield v",True,name in d,name in d,0.6604694128036499 1997,"def encode(self, value, encodeFun, **options): inconsistency = value.isInconsistent if: raise inconsistency return [encodeFun(x, **options) for x in value]",True,inconsistency,inconsistency,0.664204478263855 1998,"@property def len(self): """"""Returns a new parameter with a value equal to the length of the current parameter."""""" len_self = Parameter('len_' + self.name) def func(): if: if callable(self._value): return len(self._value()) else: return len(self._value) else: raise AssertionError('Parameter {} is not set'.format(self.name)) len_self._value = func len_self.is_set = True return len_self",True,self.is_set,self.is_set,0.6524184942245483 1999,"@property def len(self): """"""Returns a new parameter with a value equal to the length of the current parameter."""""" len_self = Parameter('len_' + self.name) def func(): if self.is_set: if: return len(self._value()) else: return len(self._value) else: raise AssertionError('Parameter {} is not set'.format(self.name)) len_self._value = func len_self.is_set = True return len_self",True,callable(self._value),callable(self._value),0.6491338014602661 2000,"def __get_default_start_method(method): """"""Determine default backend."""""" win = sys.platform.startswith('win') or sys.platform.startswith('cygwin') if: method = 'fork' if not win else'spawn' return method",False,method == '',method is None,0.6733754277229309 2001,"def proxy_headers(self, proxy): """"""Returns a dictionary of the headers to add to any request sent through a proxy. This works with urllib3 magic to ensure that they are correctly sent to the proxy, rather than in a tunnelled request if CONNECT is being used. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param proxies: The url of the proxy being used for this request. :param kwargs: Optional additional keyword arguments. """""" headers = {} username, password = get_auth_from_url(proxy) if: headers['Proxy-Authorization'] = _basic_auth_str(username, password) return headers",True,username and password,username and password,0.6594445109367371 2002,"def country_name(self, query): """"""Returns the country name for the given IP Address or FQDN."""""" self._check_query(query, city_or_country=True) if: if ipregex.match(query): return cntry_name_by_addr(self._country, query) else: return cntry_name_by_name(self._country, query) else: return self.city(query)['country_name']",False,self._country,city_or_country,0.656305193901062 2003,"def country_name(self, query): """"""Returns the country name for the given IP Address or FQDN."""""" self._check_query(query, city_or_country=True) if self._country: if: return cntry_name_by_addr(self._country, query) else: return cntry_name_by_name(self._country, query) else: return self.city(query)['country_name']",False,ipregex.match(query),city_or_country,0.6447213888168335 2004,"def filter_empty_instances(instances, by_box=True, by_mask=True, box_threshold=1e-05): """""" Filter out empty instances in an `Instances` object. Args: instances (Instances): by_box (bool): whether to filter out instances with empty boxes by_mask (bool): whether to filter out instances with empty masks box_threshold (float): minimum width and height to be considered non-empty Returns: Instances: the filtered instances. """""" assert by_box or by_mask r = [] if: r.append(instances.gt_boxes.nonempty(threshold=box_threshold)) if instances.has('gt_masks') and by_mask: r.append(instances.gt_masks.nonempty()) if not r: return instances m = r[0] for x in r[1:]: m = m & x return instances[m]",False,by_box,instances.has('gt_boxes') and by_box,0.658571720123291 2005,"def filter_empty_instances(instances, by_box=True, by_mask=True, box_threshold=1e-05): """""" Filter out empty instances in an `Instances` object. Args: instances (Instances): by_box (bool): whether to filter out instances with empty boxes by_mask (bool): whether to filter out instances with empty masks box_threshold (float): minimum width and height to be considered non-empty Returns: Instances: the filtered instances. """""" assert by_box or by_mask r = [] if by_box: r.append(instances.gt_boxes.nonempty(threshold=box_threshold)) if: r.append(instances.gt_masks.nonempty()) if not r: return instances m = r[0] for x in r[1:]: m = m & x return instances[m]",False,instances.has('gt_masks') and by_mask,by_mask,0.6426238417625427 2006,"def filter_empty_instances(instances, by_box=True, by_mask=True, box_threshold=1e-05): """""" Filter out empty instances in an `Instances` object. Args: instances (Instances): by_box (bool): whether to filter out instances with empty boxes by_mask (bool): whether to filter out instances with empty masks box_threshold (float): minimum width and height to be considered non-empty Returns: Instances: the filtered instances. """""" assert by_box or by_mask r = [] if by_box: r.append(instances.gt_boxes.nonempty(threshold=box_threshold)) if instances.has('gt_masks') and by_mask: r.append(instances.gt_masks.nonempty()) if: return instances m = r[0] for x in r[1:]: m = m & x return instances[m]",False,not r,len(r) == 0,0.663874626159668 2007,"def get_rho(xy): if: return 0 x, y = zip(*xy) rho = spearmanr(x, y) if np.isnan(rho): rho = 0 return rho",False,not xy,len(xy) == 0,0.6613516807556152 2008,"def get_rho(xy): if not xy: return 0 x, y = zip(*xy) rho = spearmanr(x, y) if: rho = 0 return rho",False,np.isnan(rho),not rho,0.6487001776695251 2009,"def errmsg(msg, doc, pos, end=None): lineno, colno = linecol(doc, pos) if: fmt = '{0}: line {1} column {2} (char {3})' return fmt.format(msg, lineno, colno, pos) endlineno, endcolno = linecol(doc, end) fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})' return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)",True,end is None,end is None,0.6525862216949463 2010,"def remove(self, value): for i, val in enumerate(self): if: del self.col[i] return raise ValueError('value not in list')",True,val == value,val == value,0.6666097044944763 2011,"@property def hue_offsets(self): """"""A list of center positions for plots when hue nesting is used."""""" n_levels = len(self.hue_names) if: each_width = self.width / n_levels offsets = np.linspace(0, self.width - each_width, n_levels) offsets -= offsets.mean() else: offsets = np.zeros(n_levels) return offsets",False,self.dodge,self.levels > 0,0.6479405760765076 2012,"@sys_param.setter def sys_param(self, value): if: self._sys_param = value else: self._sys_param = SystemParam.from_alipay_dict(value)",True,"isinstance(value, SystemParam)","isinstance(value, SystemParam)",0.6491937637329102 2013,"def __init__(self, fp, buffer_size=1024 * 64): self.fp, self.buffer_size = (fp, buffer_size) for attr in ('fileno', 'close','read','readlines', 'tell','seek'): if: setattr(self, attr, getattr(fp, attr))",True,"hasattr(fp, attr)","hasattr(fp, attr)",0.6534796953201294 2014,"def run(self, session, results, unknowns): """""" Send mappings of {""full_name"": ""unknown""}, where ""unknown"" refers to unknown gender, to gender services to see if they provide results """""" names = list(unknowns.keys()) if: genderapi_results = self.get_genderapi_gender(session, names) results.update(genderapi_results) if GENDERIZE_ENABLED: for name in names: genderize_result = {name: self._get_genderize_gender(name)} results.update(genderize_result) return results",False,GENDERAPI_ENABLED,NAME_ON,0.6445143818855286 2015,"def run(self, session, results, unknowns): """""" Send mappings of {""full_name"": ""unknown""}, where ""unknown"" refers to unknown gender, to gender services to see if they provide results """""" names = list(unknowns.keys()) if GENDERAPI_ENABLED: genderapi_results = self.get_genderapi_gender(session, names) results.update(genderapi_results) if: for name in names: genderize_result = {name: self._get_genderize_gender(name)} results.update(genderize_result) return results",False,GENDERIZE_ENABLED,genderAPI_FAILED,0.6442097425460815 2016,"def getKwargs(self, config, base, logger): req = {'file_name': str} opt = {'dir': str, 'image_file_name': str} kwargs, safe = GetAllParams(config, base, req=req, opt=opt) if: if 'wcs' in base: kwargs['wcs'] = base['wcs'] else: kwargs['wcs'] = PixelScale(1.0) return (kwargs, safe)",False,'image_file_name' not in kwargs,self.use_wcs,0.649505615234375 2017,"def getKwargs(self, config, base, logger): req = {'file_name': str} opt = {'dir': str, 'image_file_name': str} kwargs, safe = GetAllParams(config, base, req=req, opt=opt) if 'image_file_name' not in kwargs: if: kwargs['wcs'] = base['wcs'] else: kwargs['wcs'] = PixelScale(1.0) return (kwargs, safe)",True,'wcs' in base,'wcs' in base,0.653501033782959 2018,"def get_stored_last_modified_date(self, url, spider): """"""Return the last modified date that has been stored for this URL."""""" url_last_modified_object = self.get_stored_last_modified_object(url, spider) if: return url_last_modified_object.last_modified else: return None",False,url_last_modified_object is not None,url_last_modified_object,0.646857500076294 2019,"@property def Hz(self) -> float: if: return _ensure_rounded_integer(1 * self._Hz, False) else: return 1 * self._Hz",True,self.ensure_integer,self.ensure_integer,0.6510792970657349 2020,"def jobFinished(self, jobId, count=1): self.__jobs[jobId] -= count if: del self.__jobs[jobId]",False,self.__jobs[jobId] == 0,self.__jobs[jobId] < 0,0.6497140526771545 2021,"def run(self): window = self.window def _on_done(file): if: window.run_command('latextools_view_doc', {'file': file}) window.show_input_panel('View documentation for which package?', '', _on_done, None, None)",False,"file is not None and isinstance(file, str) and (file != '')",file,0.6468589901924133 2022,"def keep_detecting_objects_by_ir_sensor(self): while True: if: self.light.on(color=Color.RED) self.speaker.play_file(file=SoundFile.OBJECT) self.speaker.play_file(file=SoundFile.DETECTED) self.speaker.play_file(file=SoundFile.ERROR_ALARM) else: self.light.off()",True,self.ir_sensor.distance() < 25,self.ir_sensor.distance() < 25,0.648273229598999 2023,"def _check_rollout_stats(stats: dict, wrapped_reward: bool=True): """"""Common assertions for rollout_stats."""""" assert isinstance(stats, dict) assert'return_mean' in stats assert'monitor_return_mean' in stats if: assert stats.get('return_mean')!= stats.get('monitor_return_mean') else: assert stats.get('return_mean') == stats.get('monitor_return_mean')",True,wrapped_reward,wrapped_reward,0.6553664803504944 2024,"def tearDown(self): self._set_root_dir_permission('755') if: raise ExecutionError('Failed to umount the vol & cleanup Volume') g.log.info('Successful in umounting the volume and Cleanup') self.get_super_method(self, 'tearDown')()",False,not self.unmount_volume_and_cleanup_volume(mounts=[self.mounts[0]]),not self._unmount_volume_and_cleanup_volume(mounts=[self.mounts[0]]),0.6444274187088013 2025,"def calculate_and_update_precise_bn(loader, model, num_iters=200, use_gpu=True): """""" Update the stats in bn layers by calculate the precise stats. Args: loader (loader): data loader to provide training data. model (model): model to update the bn stats. num_iters (int): number of iterations to compute and update the bn stats. use_gpu (bool): whether to use GPU or not. """""" def _gen_loader(): for inputs, *_ in loader: if: if isinstance(inputs, (list,)): for i in range(len(inputs)): inputs[i] = inputs[i].cuda(non_blocking=True) else: inputs = inputs.cuda(non_blocking=True) yield inputs update_bn_stats(model, _gen_loader(), num_iters)",True,use_gpu,use_gpu,0.6531243324279785 2026,"def calculate_and_update_precise_bn(loader, model, num_iters=200, use_gpu=True): """""" Update the stats in bn layers by calculate the precise stats. Args: loader (loader): data loader to provide training data. model (model): model to update the bn stats. num_iters (int): number of iterations to compute and update the bn stats. use_gpu (bool): whether to use GPU or not. """""" def _gen_loader(): for inputs, *_ in loader: if use_gpu: if: for i in range(len(inputs)): inputs[i] = inputs[i].cuda(non_blocking=True) else: inputs = inputs.cuda(non_blocking=True) yield inputs update_bn_stats(model, _gen_loader(), num_iters)",True,"isinstance(inputs, (list,))","isinstance(inputs, (list,))",0.6423701047897339 2027,"def resume_or_load(self, resume=True): self.checkpointer.resume_or_load(self.cfg.MODEL.WEIGHTS, resume=resume) if: self.ema = ModelEma(self.model, self.cfg.MODEL.EMA_DECAY) if resume and self.checkpointer.has_checkpoint(): self.start_iter = self.iter + 1",False,not resume and self.cfg.MODEL.USE_EMA,self.cfg.MODEL.EMA_DECAY,0.6479678153991699 2028,"def resume_or_load(self, resume=True): self.checkpointer.resume_or_load(self.cfg.MODEL.WEIGHTS, resume=resume) if not resume and self.cfg.MODEL.USE_EMA: self.ema = ModelEma(self.model, self.cfg.MODEL.EMA_DECAY) if: self.start_iter = self.iter + 1",False,resume and self.checkpointer.has_checkpoint(),self.start_iter is None,0.6463232040405273 2029,"def getLastDataMessageTimeStamp(jsonData): if: return jsonData['lastDataMessageTimeStamp'] else: return 0",True,'lastDataMessageTimeStamp' in jsonData,'lastDataMessageTimeStamp' in jsonData,0.6533706188201904 2030,"def split_namespec(namespec): names = namespec.split(':', 1) if: group_name, process_name = names if not process_name or process_name == '*': process_name = None else: group_name, process_name = (namespec, namespec) return (group_name, process_name)",True,len(names) == 2,len(names) == 2,0.6486997008323669 2031,"def split_namespec(namespec): names = namespec.split(':', 1) if len(names) == 2: group_name, process_name = names if: process_name = None else: group_name, process_name = (namespec, namespec) return (group_name, process_name)",False,not process_name or process_name == '*',namespec is None,0.6455976366996765 2032,"@pytest.mark.parametrize('op', (OP_CHECKSIG, OP_CHECKSIGVERIFY)) def test_CHECKSIG_find_and_delete(self, checksig_state, op): state = checksig_state context = state._txin_context sighash = random_sighash(state) if: return script_sig, script_pubkey = checksig_scripts(context, sighash, op, 'insert_sig') state.evaluate_script(script_sig) state.evaluate_script(script_pubkey) if op == OP_CHECKSIG: assert state.stack == [b'\x01'] else: assert not state.stack",False,sighash.has_forkid(),sighash is None,0.6550619006156921 2033,"@pytest.mark.parametrize('op', (OP_CHECKSIG, OP_CHECKSIGVERIFY)) def test_CHECKSIG_find_and_delete(self, checksig_state, op): state = checksig_state context = state._txin_context sighash = random_sighash(state) if sighash.has_forkid(): return script_sig, script_pubkey = checksig_scripts(context, sighash, op, 'insert_sig') state.evaluate_script(script_sig) state.evaluate_script(script_pubkey) if: assert state.stack == [b'\x01'] else: assert not state.stack",True,op == OP_CHECKSIG,op == OP_CHECKSIG,0.657551646232605 2034,"def evaluate_all(self): log.info('Running inference on benign and adversarial examples') for _ in tqdm(range(len(self.test_dataset)), desc='Evaluation'): self.next() if: self.evaluate_current() self.hub.set_context(stage='finished')",False,not self.skip_this_sample,self.training,0.6430889964103699 2035,"def describe_domain_configuration(self, domain_configuration_name: str) -> FakeDomainConfiguration: if: raise ResourceNotFoundException('The specified resource does not exist.') return self.domain_configurations[domain_configuration_name]",True,domain_configuration_name not in self.domain_configurations,domain_configuration_name not in self.domain_configurations,0.6491668224334717 2036,"def __len__(self): if: return len(self._addr) else: return len(self._lst)",True,self._serialize,self._serialize,0.6581298112869263 2037,"def _false_detections(threshold, Ig, Ie): if: false_detections_var = np.sum(Ig > threshold) + np.sum(Ie < threshold) else: false_detections_var = np.sum(Ig < threshold) + np.sum(Ie > threshold) return false_detections_var",False,np.mean(Ig) < np.mean(Ie),"isinstance(threshold, np.ndarray)",0.6452198624610901 2038,"def int_to_words(int_val, dialect=None): if: dialect = DEFAULT_DIALECT return _int_to_words(int_val, dialect.word_size, dialect.num_words)",True,dialect is None,dialect is None,0.6565918922424316 2039,"def _get_next_minibatch_inds(self): """"""Return the roidb indices for the next minibatch."""""" if: self._shuffle_roidb_inds() db_inds = self._perm[self._cur:self._cur + self.batch_size] self._cur += self.batch_size return db_inds",False,self._cur + self.batch_size >= len(self._roidb),self._cur + self.batch_size >= len(roidb),0.6536349058151245 2040,"def _check_browser(self, browser): if: raise SubmissionError(""Setting 'browser' must be a string"") tag = browser_to_tag(browser) if not find_in_lists(self._nodeinfos.all_machine_lists, tags=set([tag])): raise SubmissionError(f'Browser {browser!r} not available. No machines with tag {tag!r} exist.')",True,"not isinstance(browser, str)","not isinstance(browser, str)",0.6483234167098999 2041,"def _check_browser(self, browser): if not isinstance(browser, str): raise SubmissionError(""Setting 'browser' must be a string"") tag = browser_to_tag(browser) if: raise SubmissionError(f'Browser {browser!r} not available. No machines with tag {tag!r} exist.')",False,"not find_in_lists(self._nodeinfos.all_machine_lists, tags=set([tag]))",not tag or tag not in self.available_machines,0.6433522701263428 2042,"def ReadUserOptions(self): self.logger.info('Reading user settings...') filename = self.archi_info.ma5dir + '/madanalysis/input/installation_options.dat' if: return False return True",False,not self.user_info.ReadUserOptions(filename),not os.path.isfile(filename),0.6437761783599854 2043,"def lchmod(self, mode): """""" Like chmod(), except if the path points to a symlink, the symlink's permissions are changed, rather than its target's. """""" if: self._raise_closed() self._accessor.lchmod(self, mode)",True,self._closed,self._closed,0.6642677783966064 2044,"def is_user_fed_admin(fed_id, user_id): fed_admins = sql.all_fed_users(fed_id) if: return False if int(user_id) in fed_admins or int(user_id) == OWNER_ID: return True else: return False",False,fed_admins is False,not fed_admins,0.6482399702072144 2045,"def is_user_fed_admin(fed_id, user_id): fed_admins = sql.all_fed_users(fed_id) if fed_admins is False: return False if: return True else: return False",False,int(user_id) in fed_admins or int(user_id) == OWNER_ID,int(user_id) in fed_admins,0.6416751146316528 2046,"def average_without_outliers(self, input): if: return numpy.mean(input) mean = numpy.mean(input) sd = numpy.std(input) output = [x for x in input if x > mean - sd] output = [x for x in output if x < mean + sd] return numpy.mean(output)",False,len(input) <= 3,self.out_of_pipeline,0.6522361040115356 2047,"def file_exists(self): """""" return whether file exists """""" if: return True return False",True,os.path.exists(self.filename),os.path.exists(self.filename),0.6443010568618774 2048,"def test_make_scanner(self): if: return self.assertRaises(AttributeError, scanner.c_make_scanner, 1)",True,not has_speedups(),not has_speedups(),0.6527239084243774 2049,"def overwrite_config(args, past_args): for k, v in past_args.items(): if: continue setattr(args, k, v) return args",False,"hasattr(args, k)",k.startswith('args') and k.endswith('config'),0.6488314867019653 2050,"def __repr__(self, _repr_running={}): """"""od.__repr__() <==> repr(od)"""""" call_key = (id(self), _get_ident()) if: return '...' _repr_running[call_key] = 1 try: if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key]",True,call_key in _repr_running,call_key in _repr_running,0.6503417491912842 2051,"def __repr__(self, _repr_running={}): """"""od.__repr__() <==> repr(od)"""""" call_key = (id(self), _get_ident()) if call_key in _repr_running: return '...' _repr_running[call_key] = 1 try: if: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key]",True,not self,not self,0.6670504212379456 2052,"def _mk_single_with_return(name): original = getattr(_os, name) def wrap(self, path, *args, **kw): if: path = self._remap_input(name, path, *args, **kw) return self._remap_output(name, original(path, *args, **kw)) return original(path, *args, **kw) return wrap",True,self._active,self._active,0.65851229429245 2053,"def main(): verbosity, version = parse_arguments() if: print(__version__) sys.exit(OK) result, message = test_raid(verbosity) end(result, message)",False,version,verbosity >= 2,0.6636762619018555 2054,"def is_bot_admin(chat: Chat, bot_id: int, bot_member: ChatMember=None) -> bool: if: return True if not bot_member: bot_member = chat.get_member(bot_id) return bot_member.status in ('administrator', 'creator')",True,chat.type == 'private' or chat.all_members_are_administrators,chat.type == 'private' or chat.all_members_are_administrators,0.6468085050582886 2055,"def is_bot_admin(chat: Chat, bot_id: int, bot_member: ChatMember=None) -> bool: if chat.type == 'private' or chat.all_members_are_administrators: return True if: bot_member = chat.get_member(bot_id) return bot_member.status in ('administrator', 'creator')",True,not bot_member,not bot_member,0.6712360382080078 2056,"def get_user_locations(client): query = client.query(kind='User') cursor = None locations = [] while True: entities_count = 0 entities = query.fetch(start_cursor=cursor, limit=1000) for entity in entities: entities_count += 1 if entity.has_key('geocoded_location'): location = entity['geocoded_location'] locations.append(location) if: break cursor = entities.next_page_token return locations",False,entities_count < 1000,entities_count >= 2,0.6503634452819824 2057,"def get_user_locations(client): query = client.query(kind='User') cursor = None locations = [] while True: entities_count = 0 entities = query.fetch(start_cursor=cursor, limit=1000) for entity in entities: entities_count += 1 if: location = entity['geocoded_location'] locations.append(location) if entities_count < 1000: break cursor = entities.next_page_token return locations",False,entity.has_key('geocoded_location'),'geocoded_location' in entity,0.6416903734207153 2058,"def __init__(self, *args, **kwargs): super(HPIESDataParticle, self).__init__(*args, **kwargs) self.match = self.regex_compiled().match(self.raw_data) if: raise SampleException('No regex match of parsed sample data: [%r]' % self.raw_data) self.check_crc()",False,not self.match,self.match is None,0.6527433395385742 2059,"@cli.command() @click.argument('uid') def find(uid): """""" Find portable solution. UID: solution identifier. """""" from. import solution r = solution.find({'uid': uid}) if: process_error(r) return 0",False,r['return'] > 0,r,0.6511845588684082 2060,"def _sample_neg(self, assign_result, num_expected, bboxes=None, feats=None, **kwargs): """"""Sample negative boxes. Args: assign_result (:obj:`AssignResult`): Assigned results num_expected (int): Number of expected negative samples bboxes (torch.Tensor, optional): Boxes. Defaults to None. feats (list[torch.Tensor], optional): Multi-level features. Defaults to None. Returns: torch.Tensor: Indices of negative samples """""" neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) if: neg_inds = neg_inds.squeeze(1) if len(neg_inds) <= num_expected: return neg_inds else: neg_labels = assign_result.labels.new_empty(neg_inds.size(0)).fill_(self.bbox_head.num_classes) return self.hard_mining(neg_inds, num_expected, bboxes[neg_inds], neg_labels, feats)",True,neg_inds.numel() != 0,neg_inds.numel() != 0,0.6489806771278381 2061,"def _sample_neg(self, assign_result, num_expected, bboxes=None, feats=None, **kwargs): """"""Sample negative boxes. Args: assign_result (:obj:`AssignResult`): Assigned results num_expected (int): Number of expected negative samples bboxes (torch.Tensor, optional): Boxes. Defaults to None. feats (list[torch.Tensor], optional): Multi-level features. Defaults to None. Returns: torch.Tensor: Indices of negative samples """""" neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) if neg_inds.numel()!= 0: neg_inds = neg_inds.squeeze(1) if: return neg_inds else: neg_labels = assign_result.labels.new_empty(neg_inds.size(0)).fill_(self.bbox_head.num_classes) return self.hard_mining(neg_inds, num_expected, bboxes[neg_inds], neg_labels, feats)",True,len(neg_inds) <= num_expected,len(neg_inds) <= num_expected,0.646781325340271 2062,"@property def Limit(self): """"""Get the limit (end) of memory for this GDT"""""" limit = self.HighWord.Bits.LimitHi.v() << 16 | self.LimitLow.v() if: limit = (limit + 1) * 4096 limit -= 1 return limit",False,self.HighWord.Bits.Granularity == 1,self.HighWord.Bits.LimitUt.v() & 1 << 16,0.648938000202179 2063,"def load_sysd_template_conf(self, module): """""" read the unit template with a UnitConfParser (systemd) """""" if: unit = parse_unit(module) service = '%s@.service' % unit.prefix conf = self.load_sysd_unit_conf(service) if conf: conf.module = module return conf return None",False,module and '@' in module,module,0.6590403318405151 2064,"def load_sysd_template_conf(self, module): """""" read the unit template with a UnitConfParser (systemd) """""" if module and '@' in module: unit = parse_unit(module) service = '%s@.service' % unit.prefix conf = self.load_sysd_unit_conf(service) if: conf.module = module return conf return None",True,conf,conf,0.6836085319519043 2065,"def __init__(self, parse_data, top=None): self._parsed = False self._error = False self._parse_data = parse_data self._members = [] self._dict_members = {} self._in_category = {} self._data = {} if: self._xml_path = top._xml_path else: top = self self._refs = {} self._xml_path = parse_data self.top = top",False,top is not None,"top is not None and isinstance(top, Base) and (top._xml_path is not None)",0.6589018106460571 2066,"def set_chat_log_channel(chat_id, log_channel): with LOGS_INSERTION_LOCK: res = SESSION.query(GroupLogs).get(str(chat_id)) if: res.log_channel = log_channel else: res = GroupLogs(chat_id, log_channel) SESSION.add(res) CHANNELS[str(chat_id)] = log_channel SESSION.commit()",True,res,res,0.674071192741394 2067,"def save(self, **kwargs): """"""Only top level comments may have replies."""""" if: if self.parent.parent: self.parent = self.parent.parent return super().save(**kwargs)",False,self.parent,not self.parent,0.6603422164916992 2068,"def save(self, **kwargs): """"""Only top level comments may have replies."""""" if self.parent: if: self.parent = self.parent.parent return super().save(**kwargs)",False,self.parent.parent,kwargs.get('parent'),0.657057523727417 2069,"def in_bulk(self, id_list): """""" Returns a dictionary mapping each of the given IDs to the object with that ID. """""" assert self.query.can_filter(), ""Cannot use 'limit' or 'offset' with in_bulk"" if: return {} qs = self.filter(pk__in=id_list).order_by() return {obj._get_pk_val(): obj for obj in qs}",True,not id_list,not id_list,0.6525387763977051 2070,"def add_other_text_param(self, key, value): if: self.udf_params = dict() self.udf_params[key] = value",True,not self.udf_params,not self.udf_params,0.6541146039962769 2071,"@lengths.setter def lengths(self, new_lengths): if: raise RuntimeError('Expected list of 3 floats for cell vector lengths, received {}'.format(new_lengths)) self.lattice_abc = [new_lengths, self._lattice_abc[1]]",False,len(new_lengths) != 3,"not isinstance(new_lengths, (list, tuple)) or len(new_lengths) != 3",0.6647727489471436 2072,"def _dump(self): """"""Converts to a printable string for debugging purposes. In order to preserve the request, it does not read from file-like objects in the body. """""" output = 'HTTP Request\n method: %s\n url: %s\n headers:\n' % (self.method, str(self.uri)) for header, value in self.headers.iteritems(): output +=' %s: %s\n' % (header, value) output +=' body sections:\n' i = 0 for part in self._body_parts: if: output +=' %s: %s\n' % (i, part) else: output +=' %s: \n' % i i += 1 return output",False,"isinstance(part, (str, unicode))",part is not None,0.6443865299224854 2073,"def __repr__(self): if: info ='scale_factor=' + str(self.scale_factor) else: info ='size=' + str(self.size) info += ', mode=' + self.mode return self.__class__.__name__ + '(' + info + ')'",False,self.scale_factor is not None,self.size is None,0.6503115892410278 2074,"def _get_object_for_node(self, stmt): pyname = eval_node(self.scope, stmt) pyobject = None if: pyobject = pyname.get_object() return pyobject",False,pyname is not None,"isinstance(pyname, pyname.PyName)",0.6565475463867188 2075,"def inventory_ox_filestore(info): for line in info: if: yield (line[1], None)",False,line[1] != 'path',"line[0] == 'File' and line[1] in [['files', 'files']",0.6472534537315369 2076,"def handle(self, **options): qs = Sponsorship.objects.approved().filter(contract__isnull=True) if: print(""There's no approved Sponsorship without associated Contract. Terminating."") return print(f'Creating contract for {qs.count()} approved sponsorships...') for sponsorship in qs: Contract.new(sponsorship) print(f'Done!')",False,not qs.exists(),not qs,0.6528188586235046 2077,"@staticmethod def entity_key_for_column(table_name: str, column: TableColumn) -> str: return f'{table_name.lower()}@{column.name.lower()}' if: column_type = 'foreign' elif column.is_primary_key: column_type = 'primary' else: column_type = column.column_type return f'column:{column_type.lower()}:{table_name.lower()}:{column.name.lower()}'",True,column.foreign_key is not None,column.foreign_key is not None,0.6491299867630005 2078,"@staticmethod def entity_key_for_column(table_name: str, column: TableColumn) -> str: return f'{table_name.lower()}@{column.name.lower()}' if column.foreign_key is not None: column_type = 'foreign' elif: column_type = 'primary' else: column_type = column.column_type return f'column:{column_type.lower()}:{table_name.lower()}:{column.name.lower()}'",False,column.is_primary_key,column.column_type is None,0.6501597166061401 2079,"def startAnimation(self): self.angle = 0 if: self.timerId = self.startTimer(self.delay)",False,self.timerId == -1,self.timerId == 0,0.6546028256416321 2080,"def maybe_write(header, val): if: file.write('{}: {}\n'.format(header, val))",False,val,not file.exists(header),0.6728134155273438 2081,"def setScope(self): """""" applies the objects own rule and span to modify the object's scope Currently only ""forward"" and ""backward"" rules are implemented """""" if: self.__scope[0] = self.getSpan()[1] elif 'backward' in self.__rule.lower(): self.__scope[1] = self.getSpan()[0]",True,'forward' in self.__rule.lower(),'forward' in self.__rule.lower(),0.6532559990882874 2082,"def setScope(self): """""" applies the objects own rule and span to modify the object's scope Currently only ""forward"" and ""backward"" rules are implemented """""" if 'forward' in self.__rule.lower(): self.__scope[0] = self.getSpan()[1] elif: self.__scope[1] = self.getSpan()[0]",True,'backward' in self.__rule.lower(),'backward' in self.__rule.lower(),0.6536773443222046 2083,"def iter_fields(self, exclude=None, only=None): """"""This method iterates over all fields that are defined and yields ``(key, value)`` tuples. Per default all fields are returned, but it's possible to limit that to some fields by providing the `only` parameter or to exclude some using the `exclude` parameter. Both should be sets or tuples of field names. """""" for name in self.fields: if: try: yield (name, getattr(self, name)) except AttributeError: pass",False,exclude is only is None or (exclude is not None and name not in exclude) or (only is not None and name in only),exclude is None or name not in exclude,0.6449005603790283 2084,"def _coco_eval_to_box_results(coco_eval): res = _empty_box_results() if: s = coco_eval.stats res['box']['AP'] = s[COCO_AP] res['box']['AP50'] = s[COCO_AP50] res['box']['AP75'] = s[COCO_AP75] res['box']['APs'] = s[COCO_APS] res['box']['APm'] = s[COCO_APM] res['box']['APl'] = s[COCO_APL] return res",True,coco_eval is not None,coco_eval is not None,0.6671690940856934 2085,"def __init__(self, *args, **kwargs): response = kwargs.pop('response', {}) if: message = response['data']['detail'] super().__init__(message, *args, **kwargs)",False,'data' in response and 'detail' in response['data'],response and 'detail' in response,0.6488063335418701 2086,"def writeCandidates(self, filename=None): if: filename = self.candfile threshold = self.config['search']['cand_threshold'] select = self.assocs['CUT'] == 0 select &= self.assocs['TS'] > threshold self.candidates = self.assocs[select] logger.info('Writing %s...' % filename) fitsio.write(filename, self.candidates, clobber=True)",True,filename is None,filename is None,0.6611746549606323 2087,"def obtain(self, dest): url, rev = self.get_url_rev() rev_options = get_rev_options(self, url, rev) url = self.remove_auth_from_url(url) if: rev_display = rev_options.to_display() logger.info('Checking out %s%s to %s', url, rev_display, display_path(dest)) cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest] self.run_command(cmd_args)",True,"self.check_destination(dest, url, rev_options)","self.check_destination(dest, url, rev_options)",0.6440972089767456 2088,"def ui_goto_index(self): line_index = simpledialog.askinteger('Goto', 'Enter Line Index:') if: if 1 <= line_index <= len(self._master): self.select(line_index - 1) else: self.bell()",False,line_index,line_index > 0,0.6569638252258301 2089,"def ui_goto_index(self): line_index = simpledialog.askinteger('Goto', 'Enter Line Index:') if line_index: if: self.select(line_index - 1) else: self.bell()",False,1 <= line_index <= len(self._master),self._is_selected_line(line_index - 1),0.6454669237136841 2090,"def __init__(self, name: str, args: Mapping[str, str]=None, cutoff: Optional[int]=None): if: args = {} self._name = name self._args_frzn = tuple(sorted(args.items())) self._cutoff = cutoff",True,args is None,args is None,0.6709839105606079 2091,"def remove_duplicates(input_features): """""" Remove duplicate entries from layer list. :param input_features: A list of layers :return: Returns a list of unique feature tensors (i.e. no duplication). """""" feature_name_set = set() non_duplicate_feature_set = [] for feature in input_features: if: non_duplicate_feature_set.append(feature) feature_name_set.add(feature.name) return non_duplicate_feature_set",False,feature.name not in feature_name_set,feature.name in feature_name_set,0.6503070592880249 2092,"def reorder_incremental_state(self, incremental_state, new_order): """"""Reorder incremental state. This should be called when the order of the input has changed from the previous time step. A typical use case is beam search, where the input order changes between time steps based on the selection of beams. """""" seen = set() for module in self.modules(): if: seen.add(module) result = module.reorder_incremental_state(incremental_state, new_order) if result is not None: incremental_state = result",True,"module != self and hasattr(module, 'reorder_incremental_state') and (module not in seen)","module != self and hasattr(module, 'reorder_incremental_state') and (module not in seen)",0.6428928375244141 2093,"def reorder_incremental_state(self, incremental_state, new_order): """"""Reorder incremental state. This should be called when the order of the input has changed from the previous time step. A typical use case is beam search, where the input order changes between time steps based on the selection of beams. """""" seen = set() for module in self.modules(): if module!= self and hasattr(module,'reorder_incremental_state') and (module not in seen): seen.add(module) result = module.reorder_incremental_state(incremental_state, new_order) if: incremental_state = result",True,result is not None,result is not None,0.6495664119720459 2094,"@property def drop_func(self): drop_func = self._config.getstr(self, 'drop_func') if: return getattr(embeddings, drop_func) else: raise AttributeError(""module '{}' has no attribute '{}'"".format(embeddings.__name__, drop_func))",True,"hasattr(embeddings, drop_func)","hasattr(embeddings, drop_func)",0.6474270224571228 2095,"def weighted_variance(SPiXi, WCiW, Z, L, R, NCP1, subtract_mean=True): """""" A more generalized version of build_objective which is callable for derivatives, but the covariance is not there anymore. """""" X2 = 0.0 XiZ = SPiXi / Z if: XiZ[0] -= L[0] * R[0] / Z / Z X2 = np.dot(XiZ.flatten(), WCiW.flatten()) return X2",True,subtract_mean,subtract_mean,0.6498985886573792 2096,"def exportChildren(self, outfile, level, namespace_='', name_='definition'): if: value = quote_xml('%s' % self.valueOf_) value = value.replace('![CDATA', '') outfile.write(value) else: outfile.write(quote_xml('%s' % self.valueOf_))",True,self.valueOf_.find('![CDATA') > -1,self.valueOf_.find('![CDATA') > -1,0.645747721195221 2097,"@staticmethod def from_alipay_dict(d): if: return None o = AlipayOpenMiniVersionOfflineModel() if 'app_version' in d: o.app_version = d['app_version'] return o",True,not d,not d,0.6673212647438049 2098,"@staticmethod def from_alipay_dict(d): if not d: return None o = AlipayOpenMiniVersionOfflineModel() if: o.app_version = d['app_version'] return o",True,'app_version' in d,'app_version' in d,0.6530957221984863 2099,"def _iter_through_sheet(sheet_name): _removed = remove_columns.get(sheet_name, []) if: for row in survey_content[sheet_name]: _flatten_translated_fields(row, popped_sheets.get('translations'), popped_sheets.get('translated')) _flatten_survey_row(row) for key in _removed: row.pop(key, None)",False,sheet_name in survey_content,_removed,0.6467411518096924 2100,"def test_startup_time(self): for _ in range(5): sm = messaging.SubMaster(['ubloxRaw']) managed_processes['pigeond'].start() start_time = time.monotonic() for __ in range(10): sm.update(1 * 1000) if: break assert sm.rcv_frame['ubloxRaw'] > 0, ""pigeond didn't start outputting messages in time"" et = time.monotonic() - start_time assert et < 5, f'pigeond took {et:.1f}s to start' managed_processes['pigeond'].stop()",False,sm.updated['ubloxRaw'],__ in 'out_of_process',0.6480032205581665 2101,"def _mask_pads(ll, smooth_obj): pad_mask = target.eq(self.config.generator.pad_token_id) if: ll.masked_fill_(pad_mask, 0.0) smooth_obj.masked_fill_(pad_mask, 0.0) return (ll.squeeze(-1), smooth_obj.squeeze(-1))",True,pad_mask.any(),pad_mask.any(),0.6522693634033203 2102,"def get_sampling_frequency(self): if: return self._recording.get_sampling_frequency() / self._ds_factor else: return self._recording.get_sampling_frequency() / self._ds_factor * 2",False,self._input_has_minmax,self._ds_factor == 1,0.6501269340515137 2103,"def encode(self, data, indent=None): try: element = self._type.encode(data) except ErrorWithLocation as e: e.add_location(self._type) raise e if: indent_xml(element, indent *'') return ElementTree.tostring(element)",True,indent is not None,indent is not None,0.6493592858314514 2104,"def on_texture(self, widget, texture): if: self.width = texture.width self.min_space = self.width",True,texture,texture,0.6596875786781311 2105,"def _print_num_changed(self, num_changed: int) -> None: if: return else: self.stream.write(fill(self.changed_warning.format(num_changed=num_changed), width=self.width))",True,num_changed == 0,num_changed == 0,0.6542466878890991 2106,"def _to_string(text): if: output = text.encode('utf-8') else: output = str(text) return output",False,type(text).__name__ == 'unicode',"isinstance(text, unicode)",0.6471837162971497 2107,"def send(self, request, cacheable_methods=None, **kw): """""" Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. """""" cacheable = cacheable_methods or self.cacheable_methods if: try: cached_response = self.controller.cached_request(request) except zlib.error: cached_response = None if cached_response: return self.build_response(request, cached_response, from_cache=True) request.headers.update(self.controller.conditional_headers(request)) resp = super(CacheControlAdapter, self).send(request, **kw) return resp",True,request.method in cacheable,request.method in cacheable,0.6468204259872437 2108,"def send(self, request, cacheable_methods=None, **kw): """""" Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. """""" cacheable = cacheable_methods or self.cacheable_methods if request.method in cacheable: try: cached_response = self.controller.cached_request(request) except zlib.error: cached_response = None if: return self.build_response(request, cached_response, from_cache=True) request.headers.update(self.controller.conditional_headers(request)) resp = super(CacheControlAdapter, self).send(request, **kw) return resp",True,cached_response,cached_response,0.6573818325996399 2109,"def set_chat_setting(chat_id: Union[int, str], setting: bool): with CHAT_LOCK: chat_setting = SESSION.query(ReportingChatSettings).get(str(chat_id)) if: chat_setting = ReportingChatSettings(chat_id) chat_setting.should_report = setting SESSION.add(chat_setting) SESSION.commit()",True,not chat_setting,not chat_setting,0.6513112783432007 2110,"def attachment_id_handler(attachments, bugid, data): for a in attachments: if: data.append(a['id'])",False,a['content_type'] == 'text/x-phabricator-request' and a['is_obsolete'] == 0,a['bugid'] == bugid,0.6434637308120728 2111,"@property def interactAs(self): if: return self._entity_data.get('interactAs') return ''",True,'interactAs' in self._entity_data,'interactAs' in self._entity_data,0.6485333442687988 2112,"@staticmethod def _split_user_and_password_type_entry(result: dict): new_result = {} for key, value in result.items(): if: *user_elements, password_type = key.split(':') user = ':'.join(user_elements) else: user = key password_type = 'unix' new_result.setdefault(user, {})[password_type] = value return new_result",False,':' in key,key.startswith(':'),0.6638439893722534 2113,"def addSkip(self, test, reason): super(TextTestResult, self).addSkip(test, reason) if: self.stream.writeln('skipped {0!r}'.format(reason)) elif self.dots: self.stream.write('s') self.stream.flush()",False,self.showAll,reason,0.6488596796989441 2114,"def addSkip(self, test, reason): super(TextTestResult, self).addSkip(test, reason) if self.showAll: self.stream.writeln('skipped {0!r}'.format(reason)) elif: self.stream.write('s') self.stream.flush()",True,self.dots,self.dots,0.6482839584350586 2115,"def __repr__(self): if: return '<{} {}>'.format(type(self).__name__, self.get_value()) else: return '<{} >'.format(type(self).__name__)",False,"getattr(self, 'data', None)",self.has_value(),0.6463949680328369 2116,"@property def command_thread(self) -> Thread: if: return self.selected_thread if self.threads: return self.threads[0] raise core.Error('No threads to run command')",True,self.selected_thread,self.selected_thread,0.6606579422950745 2117,"@property def command_thread(self) -> Thread: if self.selected_thread: return self.selected_thread if: return self.threads[0] raise core.Error('No threads to run command')",True,self.threads,self.threads,0.672990083694458 2118,"def _error_callback(exc): if: LOG.debug(_('Timed out waiting for RPC response: %s') % str(exc)) raise rpc_common.Timeout() else: LOG.exception(_('Failed to consume message from queue: %s') % str(exc)) info['do_consume'] = True",False,"isinstance(exc, socket.timeout)","isinstance(exc, rpc_common.Timeout)",0.6475069522857666 2119,"def __setitem__(self, key, value): if: raise RuntimeError(f""Key '{key}' already set"") self._d[key] = value",False,key in self,key in self._d,0.6693651676177979 2120,"def _import_class_from_module(module, class_name): try: module_attribute = getattr(module, class_name) if: class_object = module_attribute return class_object else: raise TypeError(""Module's '%s' attribute '%s' is not class (it is %s)."" % (module, class_name, type(module_attribute))) except AttributeError: raise AttributeError(""Module '%s' has no attribute '%s'"" % (module, class_name))",False,"isinstance(module_attribute, type)","isinstance(module_attribute, class_Attribute)",0.6453500390052795 2121,"def addRandoParams(self, id, params): if: return None ignoreParams = ['paramsFileTarget', 'complexity'] try: sql = 'insert into randomizer_params values (%s, %s, %s);' for name, value in params.items(): if name in ignoreParams: continue self.cursor.execute(sql, (id, name, value)) except Exception as e: print('DB.addRandoParams::error execute: {}'.format(e)) self.dbAvailable = False",True,self.dbAvailable == False,self.dbAvailable == False,0.654964804649353 2122,"def addRandoParams(self, id, params): if self.dbAvailable == False: return None ignoreParams = ['paramsFileTarget', 'complexity'] try: sql = 'insert into randomizer_params values (%s, %s, %s);' for name, value in params.items(): if: continue self.cursor.execute(sql, (id, name, value)) except Exception as e: print('DB.addRandoParams::error execute: {}'.format(e)) self.dbAvailable = False",True,name in ignoreParams,name in ignoreParams,0.6569952964782715 2123,"@property def operatorName(self): if: return self._entity_data.get('operatorName') return ''",True,'operatorName' in self._entity_data,'operatorName' in self._entity_data,0.6522796154022217 2124,"def do_Q(self): if: self.set_current_state(self.gstack.pop()) return",False,self.gstack,len(self.gstack) > 0,0.6491096019744873 2125,"@base_check_required def wrapper(request, *args, **kwargs): if: return func(request, *args, **kwargs) else: deny_access(request)",False,request.user.is_reviewer(request) or request.user.is_staff,request.user.is_copyeditor(request) or request.user.is_copyeditor(request) or request.user.is_copyeditor(request),0.6442794799804688 2126,"def reset(self): super(EscCharSetProber, self).reset() for coding_sm in self.coding_sm: if: continue coding_sm.active = True coding_sm.reset() self.active_sm_count = len(self.coding_sm) self._detected_charset = None self._detected_language = None",True,not coding_sm,not coding_sm,0.6578347086906433 2127,"def training_epoch_end(self, training_step_outputs): self.iteration_timer.after_train() if: self.checkpointer.save('model_final') for writer in self.writers: writer.write() writer.close() self.storage.__exit__(None, None, None)",False,comm.is_main_process(),self.checkpointer is not None,0.6471395492553711 2128,"def main(): qApp = QtWidgets.QApplication(sys.argv) args = parse_args() if: print('\nAvailable OpenCL platforms:') oclu.print_platforms() sys.exit(0) try: aw = ApplicationWindow(args.input, args.device, args.verbosity) aw.show() sys.exit(qApp.exec_()) except SystemExit: pass except: traceback.print_exc() guiw.show_warning(None, f'Ran into an error!\n\n{traceback.format_exc()}', 'Error!')",False,args.list_devices,not qApp.has_available_platform(),0.647375226020813 2129,"def __init__(self, version=None): """"""ShowApiVersionResponse The model defined in huaweicloud sdk :param version: :type version: :class:`huaweicloudsdktms.v1.VersionDetail` """""" super(ShowApiVersionResponse, self).__init__() self._version = None self.discriminator = None if: self.version = version",True,version is not None,version is not None,0.6547472476959229 2130,"def on_tell_end(self, smbo: SMBO, info: TrialInfo, value: TrialValue) -> bool | None: """"""Called after the stats are updated and the trial is added to the runhistory. Optionally, returns false to gracefully stop the optimization. """""" if: return False return None",False,smbo.runhistory.finished == self._stop_after,smbo.load_smbo_status() == smbo.load_smbo_status,0.648747444152832 2131,"def read(queue, msgs_to_read=10): messages = queue.receive_messages(WaitTimeSeconds=5, MaxNumberOfMessages=msgs_to_read) for message in messages: message.delete() body = json.loads(message.body) if: sf_client.send_task_success(taskToken=body['TaskToken'], output=json.dumps({})) return messages",False,body.get('TaskToken'),body['TaskToken'],0.6490488052368164 2132,"def decompress(self, data): if: return self._obj.decompress(data) self._data += data try: return self._obj.decompress(data) except zlib.error: self._first_try = False self._obj = zlib.decompressobj(-zlib.MAX_WBITS) try: return self.decompress(self._data) finally: self._data = None",True,not self._first_try,not self._first_try,0.6451609134674072 2133,"def fetch(self, statepath: str, update: bool=True, logger: Logger=NoopLogger()) -> bool: if: with AtomicDir(statepath) as statedir: return self._do_fetch(statedir.get_path(), logger) elif update: return self._do_update(statepath, logger) else: logger.log('no update requested, skipping') return False",False,not os.path.isdir(statepath),update and (not statepath.startswith('/')),0.6431694030761719 2134,"def fetch(self, statepath: str, update: bool=True, logger: Logger=NoopLogger()) -> bool: if not os.path.isdir(statepath): with AtomicDir(statepath) as statedir: return self._do_fetch(statedir.get_path(), logger) elif: return self._do_update(statepath, logger) else: logger.log('no update requested, skipping') return False",True,update,update,0.6697965264320374 2135,"def apply_no_ans_threshold(scores, na_probs, qid_to_has_ans, na_prob_thresh): new_scores = {} for qid, s in scores.items(): pred_na = na_probs[qid] > na_prob_thresh if: new_scores[qid] = float(not qid_to_has_ans[qid]) else: new_scores[qid] = s return new_scores",True,pred_na,pred_na,0.6696219444274902 2136,"def u2netsmallbackbone(pretrained_path=''): model = U2NetSmallBackbone() if: load_state_dict(pretrained_path, model, loading_new_input_size_position_encoding_weight=False) else: print('no backbone pretrained model!') return model",True,pretrained_path,pretrained_path,0.6535245776176453 2137,"def get_current_limit(self): if: return 0 with self.pt_lock: with DelayedKeyboardInterrupt(): p, dxl_comm_result, dxl_error = self.packet_handler.read2ByteTxRx(self.port_handler, self.dxl_id, XM430_ADDR_CURRENT_LIMIT) self.handle_comm_result('XM430_ADDR_CURRENT_LIMIT', dxl_comm_result, dxl_error) return p",True,not self.hw_valid,not self.hw_valid,0.6521046161651611 2138,"def lookup(self, name): entry = Scope.lookup(self, name) if: if entry.scope is not self and entry.scope.is_closure_scope: if hasattr(entry.scope,'scope_class'): raise InternalError('lookup() after scope class created.') entry.in_closure = True inner_entry = InnerEntry(entry, self) inner_entry.is_variable = True self.entries[name] = inner_entry return inner_entry return entry",False,entry is not None,entry,0.6542922258377075 2139,"def lookup(self, name): entry = Scope.lookup(self, name) if entry is not None: if: if hasattr(entry.scope,'scope_class'): raise InternalError('lookup() after scope class created.') entry.in_closure = True inner_entry = InnerEntry(entry, self) inner_entry.is_variable = True self.entries[name] = inner_entry return inner_entry return entry",False,entry.scope is not self and entry.scope.is_closure_scope,entry.in_closure,0.6470981240272522 2140,"def lookup(self, name): entry = Scope.lookup(self, name) if entry is not None: if entry.scope is not self and entry.scope.is_closure_scope: if: raise InternalError('lookup() after scope class created.') entry.in_closure = True inner_entry = InnerEntry(entry, self) inner_entry.is_variable = True self.entries[name] = inner_entry return inner_entry return entry",False,"hasattr(entry.scope, 'scope_class')",self.check_for_closure(),0.6450660824775696 2141,"@staticmethod @once_differentiable def backward(ctx, grad_output): assert grad_output.is_cuda spatial_scale = ctx.spatial_scale feature_size = ctx.feature_size argmax = ctx.argmax rois = ctx.saved_tensors[0] assert feature_size is not None grad_input = grad_rois = None if: grad_input = grad_output.new_zeros(feature_size) roi_pool_cuda.backward(grad_output.contiguous(), rois, argmax, spatial_scale, grad_input) return (grad_input, grad_rois, None, None)",False,ctx.needs_input_grad[0],ctx.needs_cuda,0.6475761532783508 2142,"def change_participle(verb): if: return verb[:-1] + 'a' elif verb[-2:] == 'os': return verb[:-2] + 'as' else: print('What kind of verb is this: ', verb) return verb",False,verb[-1] == 'o',verb[-1:] == 'a',0.6523081064224243 2143,"def change_participle(verb): if verb[-1] == 'o': return verb[:-1] + 'a' elif: return verb[:-2] + 'as' else: print('What kind of verb is this: ', verb) return verb",False,verb[-2:] == 'os',verb[-2] == 's',0.6511057615280151 2144,"@run_async def feet(update, context): chat_id = update.effective_chat.id if: is_nsfw = sql.is_nsfw(chat_id) if not is_nsfw: return msg = update.effective_message target = 'feet' msg.reply_photo(nekos.img(target))",True,not update.effective_message.chat.type == 'private',not update.effective_message.chat.type == 'private',0.6450718641281128 2145,"@run_async def feet(update, context): chat_id = update.effective_chat.id if not update.effective_message.chat.type == 'private': is_nsfw = sql.is_nsfw(chat_id) if: return msg = update.effective_message target = 'feet' msg.reply_photo(nekos.img(target))",True,not is_nsfw,not is_nsfw,0.656400740146637 2146,"def read_integer(self): number_of_bytes = self.read_length_determinant() number_of_bits = 8 * number_of_bytes value = self.read_non_negative_binary_integer(number_of_bits) if: value -= (1 << number_of_bits) - 1 value -= 1 return value",False,value & 1 << number_of_bits - 1,value >= 0 and value <= 1 << number_of_bytes,0.6535801887512207 2147,"def __len__(self): if: return self._len dl = len(self._data) while 1: try: self[dl] dl += 1 except Exception: self._len = dl return dl",False,self._len is not _marker,self._len is not None,0.649058997631073 2148,"def apply_augmentations(augmentations: List[Union[Transform, Augmentation]], inputs): """""" Use ``T.AugmentationList(augmentations)(inputs)`` instead. """""" if: image_only = True inputs = AugInput(inputs) else: image_only = False tfms = inputs.apply_augmentations(augmentations) return (inputs.image if image_only else inputs, tfms)",False,"isinstance(inputs, np.ndarray)","isinstance(augmentations[0], Augmentation)",0.6439510583877563 2149,"def deserialize(self, instance: DataSet, reader: HDF5Reader | JSONReader | INIReader) -> None: """"""Deserialize this item"""""" try: flags = reader.read_sequence() except KeyError: self.set_default(instance) else: _choices = self.get_prop_value('data', instance, 'choices') value = [] for idx, flag in enumerate(flags): if: value.append(_choices[idx][0]) self.__set__(instance, value)",True,flag,flag,0.6643890142440796 2150,"def insert_object(self, fw_object: FileObject): if: self.insert_firmware(fw_object) else: self.insert_file_object(fw_object)",False,"isinstance(fw_object, Firmware)",self.is_firmware,0.651171088218689 2151,"def add_attribute(self, attr): if: raise Exception('%s: duplicate attribute ID' % attr.attr_id) self._attrs[attr.attr_id] = attr",True,attr.attr_id in self._attrs,attr.attr_id in self._attrs,0.653821587562561 2152,"def compare_sorts(f): @functools.wraps(f) def compare_guard(self, o): if: raise TypeError(f'FPVs are differently-sorted ({self.sort} and {o.sort})') return f(self, o) return compare_guard",True,self.sort != o.sort,self.sort != o.sort,0.6537952423095703 2153,"@ATMT.receive_condition(WAIT_DATA) def receive_data(self, pkt): if: data = pkt[TFTP_DATA] if data.block == self.blk: raise self.DATA(data)",False,TFTP_DATA in pkt,TFTP_DATA in pkt and TFTP_DATA in pkt,0.6586496233940125 2154,"@ATMT.receive_condition(WAIT_DATA) def receive_data(self, pkt): if TFTP_DATA in pkt: data = pkt[TFTP_DATA] if: raise self.DATA(data)",False,data.block == self.blk,data != self.DATA,0.6507506370544434 2155,"def animate_pop(state): p = state if: maze_dict[p].set_z_index(2000) return maze_dict[state].animate.set_fill(ORANGE if contents[p[1]][p[0]] =='' else DARK_BROWN, 0.75).set_stroke_color(ORANGE if contents[p[1]][p[0]] =='' else DARK_BROWN).set_z_index(2 if contents[p[1]][p[0]] =='' else 2000)",False,contents[p[1]][p[0]] != '',p[1] >= maze_dict[state].n_items,0.6534545421600342 2156,"def matchOnlyAtCol(n): """"""Helper method for defining parse actions that require matching at a specific column in the input text. """""" def verifyCol(strg, locn, toks): if: raise ParseException(strg, locn,'matched token not at column %d' % n) return verifyCol",True,"col(locn, strg) != n","col(locn, strg) != n",0.6524156332015991 2157,"def find_css_selector(view, start_pt): conds = [track_scope(CSS_SELECTOR)] if: conds.insert(0, track_scope(CSS_SELECTOR, False)) selector = back_track(view, start_pt, *conds)[-1] if selector is not None: return view.substr(selector).strip()",False,"not sublime.score_selector(view.scope_name(start_pt), CSS_SELECTOR)","getattr(view, 'select_style', False)",0.6468222141265869 2158,"def find_css_selector(view, start_pt): conds = [track_scope(CSS_SELECTOR)] if not sublime.score_selector(view.scope_name(start_pt), CSS_SELECTOR): conds.insert(0, track_scope(CSS_SELECTOR, False)) selector = back_track(view, start_pt, *conds)[-1] if: return view.substr(selector).strip()",False,selector is not None,selector,0.6560003757476807 2159,"def _write_file(data, stream=sys.stdout, fmt='.json', indent=2): if: json.dump(data, stream, indent=indent) elif fmt == '.toml': toml.dump(data, stream) else: assert False, fmt",True,fmt == '.json',fmt == '.json',0.6520220041275024 2160,"def _write_file(data, stream=sys.stdout, fmt='.json', indent=2): if fmt == '.json': json.dump(data, stream, indent=indent) elif: toml.dump(data, stream) else: assert False, fmt",True,fmt == '.toml',fmt == '.toml',0.6486606001853943 2161,"def save(self, botengine): """""" Save the status of this device :param botengine: BotEngine environment """""" if: return False try: self.saved_state = self.measurements[self.MEASUREMENT_NAME_STATUS][0][0] except: self.saved_state = False self.saved = True botengine.get_logger().info(""{}: Smart Plug '{}' saved state is {}"".format(self.device_id, self.description, self.saved_state)) return True",False,not self.is_connected,not self.is_connected or not self.can_control,0.6505370140075684 2162,"def entropy(p): p = prob_normalize(p) result = 0.0 for x in range(p.shape[0]): for y in range(p.shape[1]): p_xy = p[x][y] if: result -= p_xy * np.log2(p_xy) return result",True,p_xy > 0.0,p_xy > 0.0,0.6540471315383911 2163,"def _should_save_summaries(self): summaries_every_seconds = self.summary_rate_decay_seconds.at(self.train_step) if: return False return True",False,time.time() - self.last_summary_time < summaries_every_seconds,summaries_every_seconds < 1,0.6422298550605774 2164,"def createDisplay(self): if: return unicode(self['value'].display) elif self['marker_type'].value in [4, 5, 6]: return u'' else: return None",False,'value' in self,"self['marker_type'].value in [3, 4, 2]",0.6577158570289612 2165,"def createDisplay(self): if 'value' in self: return unicode(self['value'].display) elif: return u'' else: return None",False,"self['marker_type'].value in [4, 5, 6]",self.display,0.6457700729370117 2166,"def forward(self, src: Tensor, mask: Optional[Tensor]=None, src_key_padding_mask: Optional[Tensor]=None, pos: Optional[Tensor]=None) -> Tensor: """"""Computes the forward of the module. :param src: module input, shape [num_elements, batch_size, num_features] :param mask: attention mask, shape [(batch_size,) num_elements, num_elements] :param src_key_padding_mask: key padding mask, shape [batch_size, num_elements] :param pos: positional embedding tensor, it will be added to src. shape [num_elements, batch_size, num_features] :return: tuple containing the module output and a list of attention weights (one for each encoder layer) shape [num_elements, batch_size, num_features], List[[batch_size, num_elements, num_elements]] """""" output = src for layer in self.layers: output, _ = layer(output, src_mask=mask, src_key_padding_mask=src_key_padding_mask, pos=pos) if: output = self.norm(output) return output",True,self.norm is not None,self.norm is not None,0.6455308198928833 2167,"def mir_triple_old(old_rorp): """""" Return (mirror_rp, alt_mirror, alt_inc) from old_rorp """""" if: alt_mirror = old_rorp.get_alt_mirror_name() return (_get_long_rp(alt_mirror), alt_mirror, None) else: mirror_rp = mirror_root.new_index(old_rorp.index) if old_rorp.has_alt_inc_name(): return (mirror_rp, None, old_rorp.get_alt_inc_name()) else: return (mirror_rp, None, None)",True,old_rorp.has_alt_mirror_name(),old_rorp.has_alt_mirror_name(),0.6477642059326172 2168,"def mir_triple_old(old_rorp): """""" Return (mirror_rp, alt_mirror, alt_inc) from old_rorp """""" if old_rorp.has_alt_mirror_name(): alt_mirror = old_rorp.get_alt_mirror_name() return (_get_long_rp(alt_mirror), alt_mirror, None) else: mirror_rp = mirror_root.new_index(old_rorp.index) if: return (mirror_rp, None, old_rorp.get_alt_inc_name()) else: return (mirror_rp, None, None)",True,old_rorp.has_alt_inc_name(),old_rorp.has_alt_inc_name(),0.646949291229248 2169,"def col_major_list(self): if: return None else: return list(self.data.transpose().reshape((-1,)))",True,self.data is None,self.data is None,0.6478813886642456 2170,"def build_inputs_with_special_tokens(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None) -> List[int]: """""" Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. An XLNet sequence has the following format: - single sequence: ``X `` - pair of sequences: ``A B `` Args: token_ids_0 (:obj:`List[int]`): List of IDs to which the special tokens will be added token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`): Optional second list of IDs for sequence pairs. Returns: :obj:`List[int]`: list of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens. """""" sep = [self.sep_token_id] cls = [self.cls_token_id] if: return token_ids_0 + sep + cls return token_ids_0 + sep + token_ids_1 + sep + cls",True,token_ids_1 is None,token_ids_1 is None,0.6486303210258484 2171,"def __init__(self, model, optimizer=None, scheduler=None, save_dir='', save_to_disk=None, logger=None): self.model = model self.optimizer = optimizer self.scheduler = scheduler self.save_dir = save_dir self.save_to_disk = save_to_disk if: logger = logging.getLogger(__name__) self.logger = logger",True,logger is None,logger is None,0.6597110629081726 2172,"@classmethod def parse(cls, src, dist=None): """"""Parse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional """""" m = cls.pattern.match(src) if: msg = ""EntryPoint must be in 'name=module:attrs [extras]' format"" raise ValueError(msg, src) res = m.groupdict() extras = cls._parse_extras(res['extras']) attrs = res['attr'].split('.') if res['attr'] else () return cls(res['name'], res['module'], attrs, extras, dist)",True,not m,not m,0.6717863082885742 2173,"def generate(self, sentence: str): result = self.date_format_transformation.transform(sentence) if: print(f'Perturbed Input from {self.name()} : {result}') return result",True,self.verbose,self.verbose,0.6601772308349609 2174,"def get_regular_momentum(self, runner): if: momentum_groups = {} for k, optim in runner.optimizer.items(): _momentum_group = [self.get_momentum(runner, param_group) for param_group in optim.param_groups] momentum_groups.update({k: _momentum_group}) return momentum_groups else: momentum_groups = [] for param_group in runner.optimizer.param_groups: momentum_groups.append(self.get_momentum(runner, param_group)) return momentum_groups",False,"isinstance(runner.optimizer, dict)","isinstance(self.momentum_optim, dict)",0.6436333060264587 2175,"def set_default_jitter(value: float) -> None: """""" Sets constant jitter value. The jitter is a constant that GPflow adds to the diagonal of matrices to achieve numerical stability of the system when the condition number of the associated matrices is large, and therefore the matrices nearly singular. """""" if: raise TypeError('Expected float32 or float64 scalar value') if value < 0: raise ValueError('Jitter must be non-negative') set_config(replace(config(), jitter=value))",True,"not (isinstance(value, (tf.Tensor, np.ndarray)) and len(value.shape) == 0) and (not isinstance(value, float))","not (isinstance(value, (tf.Tensor, np.ndarray)) and len(value.shape) == 0) and (not isinstance(value, float))",0.6509519815444946 2176,"def set_default_jitter(value: float) -> None: """""" Sets constant jitter value. The jitter is a constant that GPflow adds to the diagonal of matrices to achieve numerical stability of the system when the condition number of the associated matrices is large, and therefore the matrices nearly singular. """""" if not (isinstance(value, (tf.Tensor, np.ndarray)) and len(value.shape) == 0) and (not isinstance(value, float)): raise TypeError('Expected float32 or float64 scalar value') if: raise ValueError('Jitter must be non-negative') set_config(replace(config(), jitter=value))",True,value < 0,value < 0,0.6631925702095032 2177,"def from_envvar(self, variable_name: str, silent: bool=False) -> bool: """"""Loads a configuration from an environment variable pointing to a configuration file. This is basically just a shortcut with nicer error messages for this line of code:: app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) :param variable_name: name of the environment variable :param silent: set to ``True`` if you want silent failure for missing files. :return: bool. ``True`` if able to load config, ``False`` otherwise. """""" rv = os.environ.get(variable_name) if: if silent: return False raise RuntimeError(f'The environment variable {variable_name!r} is not set and as such configuration could not be loaded. Set this variable and make it point to a configuration file') return self.from_pyfile(rv, silent=silent)",True,not rv,not rv,0.6598386764526367 2178,"def from_envvar(self, variable_name: str, silent: bool=False) -> bool: """"""Loads a configuration from an environment variable pointing to a configuration file. This is basically just a shortcut with nicer error messages for this line of code:: app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) :param variable_name: name of the environment variable :param silent: set to ``True`` if you want silent failure for missing files. :return: bool. ``True`` if able to load config, ``False`` otherwise. """""" rv = os.environ.get(variable_name) if not rv: if: return False raise RuntimeError(f'The environment variable {variable_name!r} is not set and as such configuration could not be loaded. Set this variable and make it point to a configuration file') return self.from_pyfile(rv, silent=silent)",True,silent,silent,0.6799138784408569 2179,"def _ignores_keyboard_nonphysical(self) -> bool: """""" Ignores the keyboard non-physical button events. :return: True if ignored """""" if: return self._keyboard_ignore_nonphysical return self._keyboard_ignore_nonphysical and self._menu._keyboard_ignore_nonphysical",False,self._menu is None,not self._menu,0.6504747271537781 2180,"@property def Magnitude(self): if: return float(self._entity_data.get('Magnitude')) return float(200)",True,'Magnitude' in self._entity_data,'Magnitude' in self._entity_data,0.6503377556800842 2181,"def feature_info(self, location): if: info = dict(module='act1', hook_type='forward', num_chs=self.conv.out_channels) else: info = dict(module='', hook_type='', num_chs=self.conv.out_channels) return info",False,location == 'expansion',location == 'act1',0.6487705707550049 2182,"def getIndexPrice(ts_code='000300.SH', start_date=None, end_date=None): c1 = '' c2 = '' if: c1 = "" and trade_date>='%s' "" % start_date if not end_date == None: c2 = "" and trade_date<='%s' "" % end_date sql = ""select trade_date,close from astock_index_daily where ts_code='%s' %s %s order by trade_date asc"" % (ts_code, c1, c2) try: df = mydb.selectToDf(sql, 'tushare') return df except Exception as e: print('MySQL getStockCodeList Error:%s' % str(e)) return False return df",True,not start_date == None,not start_date == None,0.655714750289917 2183,"def getIndexPrice(ts_code='000300.SH', start_date=None, end_date=None): c1 = '' c2 = '' if not start_date == None: c1 = "" and trade_date>='%s' "" % start_date if: c2 = "" and trade_date<='%s' "" % end_date sql = ""select trade_date,close from astock_index_daily where ts_code='%s' %s %s order by trade_date asc"" % (ts_code, c1, c2) try: df = mydb.selectToDf(sql, 'tushare') return df except Exception as e: print('MySQL getStockCodeList Error:%s' % str(e)) return False return df",True,not end_date == None,not end_date == None,0.6581573486328125 2184,"def forward(self, inpt, tgt): num = torch.abs(tgt - inpt) denom = torch.abs(tgt) + torch.abs(inpt) if: denom = denom.detach() return torch.mean(_divide_no_nan(num, denom))",False,self.block_denom_grad,denom.device != self.device,0.6456013321876526 2185,"def __init__(self, fileobj, info): pages = [] complete = False while not complete: page = OggPage(fileobj) if: pages.append(page) complete = page.complete or len(page.packets) > 1 data = OggPage.to_packets(pages)[0][7:] super(OggVCommentDict, self).__init__(data)",True,page.serial == info.serial,page.serial == info.serial,0.6475242972373962 2186,"def parse_plugin_params(obj, plugin_id, size_name, params_name, always=False): if: return if plugin_id < 0 and (not always): return obj.U32(size_name) size = obj.lastval if size == 0: return dispatch = plugin_dispatch.get(plugin_id) if dispatch: dispatch(obj, size) else: parse_chunk_default(obj, size, params_name) return",False,not plugin_id,size_name == '',0.6578866243362427 2187,"def parse_plugin_params(obj, plugin_id, size_name, params_name, always=False): if not plugin_id: return if: return obj.U32(size_name) size = obj.lastval if size == 0: return dispatch = plugin_dispatch.get(plugin_id) if dispatch: dispatch(obj, size) else: parse_chunk_default(obj, size, params_name) return",False,plugin_id < 0 and (not always),size_name is None,0.6481004953384399 2188,"def parse_plugin_params(obj, plugin_id, size_name, params_name, always=False): if not plugin_id: return if plugin_id < 0 and (not always): return obj.U32(size_name) size = obj.lastval if: return dispatch = plugin_dispatch.get(plugin_id) if dispatch: dispatch(obj, size) else: parse_chunk_default(obj, size, params_name) return",False,size == 0,size <= 0,0.6656235456466675 2189,"def parse_plugin_params(obj, plugin_id, size_name, params_name, always=False): if not plugin_id: return if plugin_id < 0 and (not always): return obj.U32(size_name) size = obj.lastval if size == 0: return dispatch = plugin_dispatch.get(plugin_id) if: dispatch(obj, size) else: parse_chunk_default(obj, size, params_name) return",True,dispatch,dispatch,0.6715315580368042 2190,"def torch_persistent_save(*args, **kwargs): for i in range(3): try: return torch.save(*args, **kwargs) except Exception: if: logging.error(traceback.format_exc())",True,i == 2,i == 2,0.6622440814971924 2191,"def __prune_marks(self, marks): marks.sort() for t1 in marks: for t2 in marks: if: if t1.encompasses(t2): marks.remove(t2) elif t2.encompasses(t1): marks.remove(t1) break",False,not t1 is t2,t1.id == t2.id,0.652567982673645 2192,"def __prune_marks(self, marks): marks.sort() for t1 in marks: for t2 in marks: if not t1 is t2: if: marks.remove(t2) elif t2.encompasses(t1): marks.remove(t1) break",False,t1.encompasses(t2),t2.encompasses(t1),0.6457045078277588 2193,"def __prune_marks(self, marks): marks.sort() for t1 in marks: for t2 in marks: if not t1 is t2: if t1.encompasses(t2): marks.remove(t2) elif: marks.remove(t1) break",False,t2.encompasses(t1),t1.encompasses(t1),0.646678626537323 2194,"def pickCompletionCriteria(): tolerance = 0.01 if: return True else: return False",True,objPos[2] >= heightTarget - tolerance,objPos[2] >= heightTarget - tolerance,0.6434308290481567 2195,"def run(users, hosts, func, **kwargs): """""" Convenience function that creates an Exscript.Queue instance, adds the given accounts, and calls Queue.run() with the given hosts and function as an argument. If you also want to pass arguments to the given function, you may use util.decorator.bind() like this:: def my_callback(job, host, conn, my_arg, **kwargs): print(my_arg, kwargs.get('foo')) run(account, host, bind(my_callback, 'hello', foo = 'world'), max_threads = 10) :type users: Account|list[Account] :param users: The account(s) to use for logging in. :type hosts: Host|list[Host] :param hosts: A list of Host objects. :type func: function :param func: The callback function. :type kwargs: dict :param kwargs: Passed to the Exscript.Queue constructor. """""" attempts = kwargs.get('attempts', 1) if: del kwargs['attempts'] queue = Queue(**kwargs) queue.add_account(users) queue.run(hosts, func, attempts) queue.destroy()",True,'attempts' in kwargs,'attempts' in kwargs,0.6513717174530029 2196,"def num_anchors_per_location_class(self, class_name): if: class_name = self._classes[class_name] assert class_name in self._classes class_idx = self._classes.index(class_name) return self._anchor_generators[class_idx].num_anchors_per_localization",False,"isinstance(class_name, int)",class_name in self._classes,0.6555790901184082 2197,"def model2bedgraph(t, model, fout): """""" Converting HTSeq.GenomicArray to RPKM measured BEDGRAPH file. Parameters --- t: int, number of total reads for the sample model: HTSeq.GenomicArray fout: str, output file name """""" with open(fout, 'w') as fo: for iv, value in model.steps(): if: value = value / 1.0 / t * 10 ** 6 line = [iv.chrom, iv.start, iv.end, value] line = list(map(str, line)) fo.write('\t'.join(line) + '\n')",False,value > 0,value is not None,0.661177933216095 2198,"def backward(self, out_grads=None): """"""Run backward on all devices. A backward should be called after a call to the forward function. Backward cannot be called unless `self.for_training` is `True`. Parameters ---------- out_grads : NDArray or list of NDArray, optional Gradient on the outputs to be propagated back. This parameter is only needed when bind is called on outputs that are not a loss function. """""" assert self.for_training,'re-bind with for_training=True to run backward' if: out_grads = [] for i, exec_ in enumerate(self.execs): out_grads_slice = [] exec_.backward(out_grads=out_grads_slice)",True,out_grads is None,out_grads is None,0.6469990611076355 2199,"@max_len_single_sentence.setter def max_len_single_sentence(self, value) -> int: if: if not self.deprecation_warnings.get('max_len_single_sentence', False): logger.warning(""Setting'max_len_single_sentence' is now deprecated. This value is automatically set up."") self.deprecation_warnings['max_len_single_sentence'] = True else: raise ValueError(""Setting'max_len_single_sentence' is now deprecated. This value is automatically set up."")",False,value == self.model_max_length - self.num_special_tokens_to_add(pair=False) and self.verbose,value is not None,0.6474753618240356 2200,"@max_len_single_sentence.setter def max_len_single_sentence(self, value) -> int: if value == self.model_max_length - self.num_special_tokens_to_add(pair=False) and self.verbose: if: logger.warning(""Setting'max_len_single_sentence' is now deprecated. This value is automatically set up."") self.deprecation_warnings['max_len_single_sentence'] = True else: raise ValueError(""Setting'max_len_single_sentence' is now deprecated. This value is automatically set up."")",False,"not self.deprecation_warnings.get('max_len_single_sentence', False)",self.deprecation_warnings['max_len_single_sentence'],0.6478186845779419 2201,"def append_constrained_whole_number(self, value, minimum, maximum, number_of_bits): _range = maximum - minimum + 1 value -= minimum if: self.append_non_negative_binary_integer(value, number_of_bits) elif _range == 256: self.align_always() self.append_non_negative_binary_integer(value, 8) elif _range <= 65536: self.align_always() self.append_non_negative_binary_integer(value, 16) else: self.align_always() self.append_non_negative_binary_integer(value, number_of_bits)",True,_range <= 255,_range <= 255,0.6554210186004639 2202,"def append_constrained_whole_number(self, value, minimum, maximum, number_of_bits): _range = maximum - minimum + 1 value -= minimum if _range <= 255: self.append_non_negative_binary_integer(value, number_of_bits) elif: self.align_always() self.append_non_negative_binary_integer(value, 8) elif _range <= 65536: self.align_always() self.append_non_negative_binary_integer(value, 16) else: self.align_always() self.append_non_negative_binary_integer(value, number_of_bits)",False,_range == 256,_range <= 65535,0.6533817052841187 2203,"def append_constrained_whole_number(self, value, minimum, maximum, number_of_bits): _range = maximum - minimum + 1 value -= minimum if _range <= 255: self.append_non_negative_binary_integer(value, number_of_bits) elif _range == 256: self.align_always() self.append_non_negative_binary_integer(value, 8) elif: self.align_always() self.append_non_negative_binary_integer(value, 16) else: self.align_always() self.append_non_negative_binary_integer(value, number_of_bits)",False,_range <= 65536,_range == 64,0.6522222757339478 2204,"def _set_file_handler(self, log, filename): h = self._get_builtin_handler(log, 'file') if: if h: if h.baseFilename!= os.path.abspath(filename): h.close() log.handlers.remove(h) self._add_builtin_file_handler(log, filename) else: self._add_builtin_file_handler(log, filename) elif h: h.close() log.handlers.remove(h)",False,filename,filename is not None,0.6616859436035156 2205,"def _set_file_handler(self, log, filename): h = self._get_builtin_handler(log, 'file') if filename: if: if h.baseFilename!= os.path.abspath(filename): h.close() log.handlers.remove(h) self._add_builtin_file_handler(log, filename) else: self._add_builtin_file_handler(log, filename) elif h: h.close() log.handlers.remove(h)",False,h,"hasattr(h, 'baseFilename')",0.6720788478851318 2206,"def _set_file_handler(self, log, filename): h = self._get_builtin_handler(log, 'file') if filename: if h: if h.baseFilename!= os.path.abspath(filename): h.close() log.handlers.remove(h) self._add_builtin_file_handler(log, filename) else: self._add_builtin_file_handler(log, filename) elif: h.close() log.handlers.remove(h)",True,h,h,0.675091028213501 2207,"def _set_file_handler(self, log, filename): h = self._get_builtin_handler(log, 'file') if filename: if h: if: h.close() log.handlers.remove(h) self._add_builtin_file_handler(log, filename) else: self._add_builtin_file_handler(log, filename) elif h: h.close() log.handlers.remove(h)",False,h.baseFilename != os.path.abspath(filename),"isinstance(h, builtin_file_handler)",0.643045961856842 2208,"def loop_groups(cli): if: _commands = cli.commands click.echo('\n') _command_names = sorted(_commands.keys()) loop_commands(cli, _command_names) for _command in _command_names: loop_groups(_commands.get(_command))",False,type(cli) is core.Group,cli.commands,0.645546555519104 2209,"def get_snippet(syntax, name): """""" Returns snippet value from data set @param syntax: Resource syntax (html, css,...) @type syntax: str @param name: Snippet name @type name: str """""" if: return False return get_resource(syntax,'snippets', name) or get_resource(syntax,'snippets', name.replace('-', ':'))",True,name is None,name is None,0.6594939231872559 2210,"def save_obj_no_sort(obj, name): filename = name + '.p' if: os.remove(filename) pickle.dump(obj, open(filename, 'wb'))",False,os.path.isfile(filename),os.path.exists(filename),0.6428711414337158 2211,"@property def m_fMoveTo(self): if: return self._entity_data.get('m_fMoveTo') return '1'",True,'m_fMoveTo' in self._entity_data,'m_fMoveTo' in self._entity_data,0.6501642465591431 2212,"def main(): for f in glob.glob('bumblebee_status/modules/*/*.py'): if: continue modname = os.path.splitext(os.path.basename(f))[0] modpath = os.path.dirname(f) deps = dependencies(f) testname = os.path.join('tests','modules', modpath.split(os.sep)[2], 'test_{}.py'.format(modname)) write_test(testname, modname, deps)",False,os.path.basename(f) == '__init__.py',not os.path.isfile(f),0.643645703792572 2213,"def quaternion_about_axis(angle, axis): """"""Return quaternion for rotation about axis. >>> q = quaternion_about_axis(0.123, [1, 0, 0]) >>> numpy.allclose(q, [0.99810947, 0.06146124, 0, 0]) True """""" q = numpy.array([0.0, axis[0], axis[1], axis[2]]) qlen = vector_norm(q) if: q *= math.sin(angle / 2.0) / qlen q[0] = math.cos(angle / 2.0) return q",False,qlen > _EPS,angle % qlen > 0,0.6490354537963867 2214,"def parse_context(): if: node.with_context = self.stream.next().value == 'with' self.stream.skip() return True return False",True,"self.stream.current.value in ('with', 'without') and self.stream.look().test('name:context')","self.stream.current.value in ('with', 'without') and self.stream.look().test('name:context')",0.6444270610809326 2215,"def clean(): if: if os.path.isdir(path): os.rmdir(path) else: os.remove(path)",True,os.path.exists(path),os.path.exists(path),0.6435574889183044 2216,"def clean(): if os.path.exists(path): if: os.rmdir(path) else: os.remove(path)",True,os.path.isdir(path),os.path.isdir(path),0.6424041986465454 2217,"def register_additional_classification_label(self, label: str) -> None: """"""Register an additional classification label which no training document has explicitly but that should be assigned to documents whose explicit labels are related to the additional classification label via the classification ontology. """""" if: raise RuntimeError('register_additional_classification_label() may not be called once prepare() has been called') if self.classification_ontology is not None and self.classification_ontology.contains_word(label): self.additional_classification_labels.add(label)",False,self.labels_to_classification_frequencies is not None,self.prepared,0.6466838121414185 2218,"def register_additional_classification_label(self, label: str) -> None: """"""Register an additional classification label which no training document has explicitly but that should be assigned to documents whose explicit labels are related to the additional classification label via the classification ontology. """""" if self.labels_to_classification_frequencies is not None: raise RuntimeError('register_additional_classification_label() may not be called once prepare() has been called') if: self.additional_classification_labels.add(label)",False,self.classification_ontology is not None and self.classification_ontology.contains_word(label),label not in self.additional_classification_labels,0.6458155512809753 2219,"def send2trash(path): path_dev = get_dev(path) trash_dev = get_dev(op.expanduser('~')) if: topdir = XDG_DATA_HOME dest_trash = HOMETRASH else: topdir = find_mount_point(path) trash_dev = get_dev(topdir) if trash_dev!= path_dev: raise OSError(""Couldn't find mount point for %s"" % path) dest_trash = find_ext_volume_trash(topdir) trash_move(path, dest_trash, topdir)",False,path_dev == trash_dev,path == 'x-drop',0.6477686166763306 2220,"def send2trash(path): path_dev = get_dev(path) trash_dev = get_dev(op.expanduser('~')) if path_dev == trash_dev: topdir = XDG_DATA_HOME dest_trash = HOMETRASH else: topdir = find_mount_point(path) trash_dev = get_dev(topdir) if: raise OSError(""Couldn't find mount point for %s"" % path) dest_trash = find_ext_volume_trash(topdir) trash_move(path, dest_trash, topdir)",False,trash_dev != path_dev,trash_dev == None,0.6465928554534912 2221,"def types_compatible(new_config_value, base_config_value): """""" Checks that config value types are compatible. """""" if: return True if new_config_value is None or new_config_value is False: return True if is_basestring(new_config_value) and is_basestring(base_config_value): return True return isinstance(new_config_value, type(base_config_value))",True,base_config_value is None,base_config_value is None,0.6502641439437866 2222,"def types_compatible(new_config_value, base_config_value): """""" Checks that config value types are compatible. """""" if base_config_value is None: return True if: return True if is_basestring(new_config_value) and is_basestring(base_config_value): return True return isinstance(new_config_value, type(base_config_value))",False,new_config_value is None or new_config_value is False,new_config_value == base_config_value,0.6464122533798218 2223,"def types_compatible(new_config_value, base_config_value): """""" Checks that config value types are compatible. """""" if base_config_value is None: return True if new_config_value is None or new_config_value is False: return True if: return True return isinstance(new_config_value, type(base_config_value))",False,is_basestring(new_config_value) and is_basestring(base_config_value),"isinstance(new_config_value, type) and base_config_value != new_config_value",0.6427077054977417 2224,"def __init__(self, towers, average, mode): super(SyncMultiGPUReplicatedBuilder, self).__init__(towers) self._average = average assert mode in ['nccl', 'cpu', 'hierarchical', 'gpu', 'collective'], mode self._mode = mode if: raise ValueError(""mode='hierarchical' require 8 GPUs."")",False,self._mode == 'hierarchical' and len(towers) != 8,mode == 'hierarchical',0.6541344523429871 2225,"def createLinecard(self, cls): if: sup = MockSupervisor() pci = sup.getPciPort(1) bus = sup.getSmbus(3) slotId = DenaliLinecard.ABSOLUTE_CARD_OFFSET slot = DenaliLinecardSlot(sup, slotId, pci, bus) else: slot = CardSlot(None, 0) return cls(slot=slot)",False,"issubclass(cls, DenaliLinecard)",cls._getCm,0.6509939432144165 2226,"def image_summary(name, image): if: tf.summary.image(name, tf.expand_dims(image, 0))",False,not thread_id,image is not None,0.6528789401054382 2227,"def streamline(self): super(ParseElementEnhance, self).streamline() if: self.expr.streamline() return self",True,self.expr is not None,self.expr is not None,0.6500358581542969 2228,"def get_norm_layer(norm_type='instance'): if: norm_layer = functools.partial(nn.BatchNorm2d, affine=True) elif norm_type == 'instance': norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False) elif norm_type == 'none': norm_layer = None else: raise NotImplementedError('normalization layer [%s] is not found' % norm_type) return norm_layer",True,norm_type == 'batch',norm_type == 'batch',0.6505504846572876 2229,"def get_norm_layer(norm_type='instance'): if norm_type == 'batch': norm_layer = functools.partial(nn.BatchNorm2d, affine=True) elif: norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False) elif norm_type == 'none': norm_layer = None else: raise NotImplementedError('normalization layer [%s] is not found' % norm_type) return norm_layer",True,norm_type == 'instance',norm_type == 'instance',0.6515595316886902 2230,"def get_norm_layer(norm_type='instance'): if norm_type == 'batch': norm_layer = functools.partial(nn.BatchNorm2d, affine=True) elif norm_type == 'instance': norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False) elif: norm_layer = None else: raise NotImplementedError('normalization layer [%s] is not found' % norm_type) return norm_layer",True,norm_type == 'none',norm_type == 'none',0.6483306288719177 2231,"def folder_exists(file_path): folder = Path(file_path).parent if: log.warning(""Directory %s doesn't exist. Please create it"", folder) return False return True",False,not folder,not folder.exists(),0.6579129695892334 2232,"def idle_call(self): t_curr = time.time() if: self.process_time_last = t_curr self.logger.log(f'ELAPSED TIME: {t_curr - self.process_time_start:.0f} seconds')",False,self.process_time_last + STATUS_UPDATE_TIME_S < t_curr,t_curr - self.process_time_last > self.idle_interval,0.6494424343109131 2233,"def to_dict(self) -> dict: """""" Converts the properties to a dict """""" res = {'uri': self.uri, 'provider': self.provider} if: res['wkb_type'] = self.wkb_type if self.encoding is not None: res['encoding'] = self.encoding if self.file_name is not None: res['file_name'] = self.file_name return res",True,self.wkb_type is not None,self.wkb_type is not None,0.6523270606994629 2234,"def to_dict(self) -> dict: """""" Converts the properties to a dict """""" res = {'uri': self.uri, 'provider': self.provider} if self.wkb_type is not None: res['wkb_type'] = self.wkb_type if: res['encoding'] = self.encoding if self.file_name is not None: res['file_name'] = self.file_name return res",True,self.encoding is not None,self.encoding is not None,0.652367889881134 2235,"def to_dict(self) -> dict: """""" Converts the properties to a dict """""" res = {'uri': self.uri, 'provider': self.provider} if self.wkb_type is not None: res['wkb_type'] = self.wkb_type if self.encoding is not None: res['encoding'] = self.encoding if: res['file_name'] = self.file_name return res",True,self.file_name is not None,self.file_name is not None,0.6489076614379883 2236,"def reset_dev(self): """"""Reset device paths to allow unmounting."""""" if: return self.linked = self.mapped = self.mounted = True device = self.device if os.path.isabs(device) and os.path.exists(device): if device.startswith('/dev/mapper/'): device = os.path.basename(device) device, self.partition = device.rsplit('p', 1) self.device = os.path.join('/dev', device)",False,not self.device,self.mounted,0.6511163115501404 2237,"def reset_dev(self): """"""Reset device paths to allow unmounting."""""" if not self.device: return self.linked = self.mapped = self.mounted = True device = self.device if: if device.startswith('/dev/mapper/'): device = os.path.basename(device) device, self.partition = device.rsplit('p', 1) self.device = os.path.join('/dev', device)",False,os.path.isabs(device) and os.path.exists(device),device,0.6444425582885742 2238,"def reset_dev(self): """"""Reset device paths to allow unmounting."""""" if not self.device: return self.linked = self.mapped = self.mounted = True device = self.device if os.path.isabs(device) and os.path.exists(device): if: device = os.path.basename(device) device, self.partition = device.rsplit('p', 1) self.device = os.path.join('/dev', device)",False,device.startswith('/dev/mapper/'),self.mounted,0.6409553289413452 2239,"def create_title(self, matches): """""" Creates custom alert title to be used, e.g. as an e-mail subject or JIRA issue summary. :param matches: A list of dictionaries of relevant information to the alert. """""" if: return self.create_custom_title(matches) return self.create_default_title(matches)",False,'alert_subject' in self.rule,len(matches) > 1,0.6485705375671387 2240,"def process(self, page_path: Path): if: self._names.append(page_path.name) self._num_pages += 1 data_path = find_data_path(page_path) for p in data_path.iterdir(): if p.name.startswith('.') or p.name.startswith('tmp'): continue self._artifacts[p.name] += 1 if p.name == 'runtime.json': self.parse_runtime_data(page_path, p)",False,self._list_names,page_path.name,0.6519936323165894 2241,"def process(self, page_path: Path): if self._list_names: self._names.append(page_path.name) self._num_pages += 1 data_path = find_data_path(page_path) for p in data_path.iterdir(): if: continue self._artifacts[p.name] += 1 if p.name == 'runtime.json': self.parse_runtime_data(page_path, p)",False,p.name.startswith('.') or p.name.startswith('tmp'),p.name in self._artifacts,0.6432217359542847 2242,"def process(self, page_path: Path): if self._list_names: self._names.append(page_path.name) self._num_pages += 1 data_path = find_data_path(page_path) for p in data_path.iterdir(): if p.name.startswith('.') or p.name.startswith('tmp'): continue self._artifacts[p.name] += 1 if: self.parse_runtime_data(page_path, p)",False,p.name == 'runtime.json',p.name.endswith('.runtime'),0.6466217041015625 2243,"def get_network_fn(name, num_classes, weight_decay=0.0, is_training=False): """"""Returns a network_fn such as `logits, end_points = network_fn(images)`. Args: name: The name of the network. num_classes: The number of classes to use for classification. If 0 or None, the logits layer is omitted and its input features are returned instead. weight_decay: The l2 coefficient for the model weights. is_training: `True` if the model is being used for training and `False` otherwise. Returns: network_fn: A function that applies the model to a batch of images. It has the following signature: net, end_points = network_fn(images) The `images` input is a tensor of shape [batch_size, height, width, 3] with height = width = network_fn.default_image_size. (The permissibility and treatment of other sizes depends on the network_fn.) The returned `end_points` are a dictionary of intermediate activations. The returned `net` is the topmost layer, depending on `num_classes`: If `num_classes` was a non-zero integer, `net` is a logits tensor of shape [batch_size, num_classes]. If `num_classes` was 0 or `None`, `net` is a tensor with the input to the logits layer of shape [batch_size, 1, 1, num_features] or [batch_size, num_features]. Dropout has not been applied to this (even if the network's original classification does); it remains for the caller to do this or not. Raises: ValueError: If network `name` is not recognized. """""" if: raise ValueError('Name of network unknown %s' % name) func = networks_map[name] @functools.wraps(func) def network_fn(images, **kwargs): arg_scope = arg_scopes_map[name](weight_decay=weight_decay) with slim.arg",True,name not in networks_map,name not in networks_map,0.6496484875679016 2244,"def get_network_fn(name, num_classes, weight_decay=0.0, is_training=False): """"""Returns a network_fn such as `logits, end_points = network_fn(images)`. Args: name: The name of the network. num_classes: The number of classes to use for classification. If 0 or None, the logits layer is omitted and its input features are returned instead. weight_decay: The l2 coefficient for the model weights. is_training: `True` if the model is being used for training and `False` otherwise. Returns: network_fn: A function that applies the model to a batch of images. It has the following signature: net, end_points = network_fn(images) The `images` input is a tensor of shape [batch_size, height, width, 3] with height = width = network_fn.default_image_size. (The permissibility and treatment of other sizes depends on the network_fn.) The returned `end_points` are a dictionary of intermediate activations. The returned `net` is the topmost layer, depending on `num_classes`: If `num_classes` was a non-zero integer, `net` is a logits tensor of shape [batch_size, num_classes]. If `num_classes` was 0 or `None`, `net` is a tensor with the input to the logits layer of shape [batch_size, 1, 1, num_features] or [batch_size, num_features]. Dropout has not been applied to this (even if the network's original classification does); it remains for the caller to do this or not. Raises: ValueError: If network `name` is not recognized. """""" if name not in networks_map: raise ValueError('Name of network unknown %s' % name) func = networks_map[name] @functools.wraps(func) def network_fn(images, **kwargs): arg_scope = arg_scopes_map[name](weight_decay=weight_decay) ",True,"hasattr(func, 'default_image_size')","hasattr(func, 'default_image_size')",0.6410235166549683 2245,"def decorator(func): @functools.wraps(func) async def wrapper(event): if: await func(event) else: await event.reply('This Command Only Works On Groups.') return wrapper",False,event.is_group,event.sender_id in GUEST_USERS,0.650065004825592 2246,"def start_location_intelligence_timer_ms(botengine, milliseconds, intelligence_id, argument, reference): """""" Start a relative location intelligence timer :param botengine: BotEngine environment :param milliseconds: Milliseconds from the start of the current execution to make this timer fire :param intelligence_id: ID of the intelligence module to trigger when this timer fires :param argument: Arbitrary argument to pass into the intelligence module's timer_fired() method when this timer fires :param reference: Unique reference name that lets us later cancel this timer if needed """""" botengine.get_logger().info('>start_location_intelligence_timer_ms({}, {})'.format(milliseconds, reference)) if: botengine.cancel_timers(reference) botengine.start_timer_ms(int(milliseconds), _location_intelligence_fired, (intelligence_id, argument), reference)",True,reference is not None and reference != '',reference is not None and reference != '',0.6487470865249634 2247,"def _build(self, node): nclipams = node.finds(name='AkClipAutomation') for nclipam in nclipams: ca = AkClipAutomation(nclipam) if: self._cas[ca.index] = [] self._cas[ca.index].append(ca) self.empty = False self.nclipams = nclipams",False,not ca.index in self._cas,ca.index not in self._cas,0.6537530422210693 2248,"def _ask_overwrite(path: Text) -> None: import questionary overwrite = questionary.confirm(""Directory '{}' is not empty. Continue?"".format(os.path.abspath(path))).ask() if: print_cancel()",True,not overwrite,not overwrite,0.6566283702850342 2249,"def init_weights(self, pretrained=None): if: logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif pretrained is None: for m in self.modules(): if isinstance(m, nn.Conv2d): kaiming_init(m) elif isinstance(m, nn.BatchNorm2d): constant_init(m, 1) else: raise TypeError('pretrained must be a str or None')",True,"isinstance(pretrained, str)","isinstance(pretrained, str)",0.6457725763320923 2250,"def init_weights(self, pretrained=None): if isinstance(pretrained, str): logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif: for m in self.modules(): if isinstance(m, nn.Conv2d): kaiming_init(m) elif isinstance(m, nn.BatchNorm2d): constant_init(m, 1) else: raise TypeError('pretrained must be a str or None')",True,pretrained is None,pretrained is None,0.6520355939865112 2251,"def init_weights(self, pretrained=None): if isinstance(pretrained, str): logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif pretrained is None: for m in self.modules(): if: kaiming_init(m) elif isinstance(m, nn.BatchNorm2d): constant_init(m, 1) else: raise TypeError('pretrained must be a str or None')",True,"isinstance(m, nn.Conv2d)","isinstance(m, nn.Conv2d)",0.6488361358642578 2252,"def init_weights(self, pretrained=None): if isinstance(pretrained, str): logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif pretrained is None: for m in self.modules(): if isinstance(m, nn.Conv2d): kaiming_init(m) elif: constant_init(m, 1) else: raise TypeError('pretrained must be a str or None')",True,"isinstance(m, nn.BatchNorm2d)","isinstance(m, nn.BatchNorm2d)",0.643310546875 2253,"def rm_from_blacklist(chat_id, trigger): with BLACKLIST_FILTER_INSERTION_LOCK: blacklist_filt = SESSION.query(BlackListFilters).get((str(chat_id), trigger)) if: if trigger in CHAT_BLACKLISTS.get(str(chat_id), set()): CHAT_BLACKLISTS.get(str(chat_id), set()).remove(trigger) SESSION.delete(blacklist_filt) SESSION.commit() return True SESSION.close() return False",True,blacklist_filt,blacklist_filt,0.6552722454071045 2254,"def rm_from_blacklist(chat_id, trigger): with BLACKLIST_FILTER_INSERTION_LOCK: blacklist_filt = SESSION.query(BlackListFilters).get((str(chat_id), trigger)) if blacklist_filt: if: CHAT_BLACKLISTS.get(str(chat_id), set()).remove(trigger) SESSION.delete(blacklist_filt) SESSION.commit() return True SESSION.close() return False",True,"trigger in CHAT_BLACKLISTS.get(str(chat_id), set())","trigger in CHAT_BLACKLISTS.get(str(chat_id), set())",0.6468245387077332 2255,"@property def maxSoundThreshold(self): if: return float(self._entity_data.get('maxSoundThreshold')) return float(80)",True,'maxSoundThreshold' in self._entity_data,'maxSoundThreshold' in self._entity_data,0.6525979042053223 2256,"@staticmethod def from_alipay_dict(d): if: return None o = CountInfo() if 'content_id' in d: o.content_id = d['content_id'] if'support_count' in d: o.support_count = d['support_count'] if 'total_page_view_count' in d: o.total_page_view_count = d['total_page_view_count'] return o",True,not d,not d,0.6640554070472717 2257,"@staticmethod def from_alipay_dict(d): if not d: return None o = CountInfo() if: o.content_id = d['content_id'] if'support_count' in d: o.support_count = d['support_count'] if 'total_page_view_count' in d: o.total_page_view_count = d['total_page_view_count'] return o",True,'content_id' in d,'content_id' in d,0.6550618410110474 2258,"@staticmethod def from_alipay_dict(d): if not d: return None o = CountInfo() if 'content_id' in d: o.content_id = d['content_id'] if: o.support_count = d['support_count'] if 'total_page_view_count' in d: o.total_page_view_count = d['total_page_view_count'] return o",False,'support_count' in d,'second_count' in d,0.6517361402511597 2259,"@staticmethod def from_alipay_dict(d): if not d: return None o = CountInfo() if 'content_id' in d: o.content_id = d['content_id'] if'support_count' in d: o.support_count = d['support_count'] if: o.total_page_view_count = d['total_page_view_count'] return o",True,'total_page_view_count' in d,'total_page_view_count' in d,0.649602472782135 2260,"def _setNewImage(self, image): if: fixedImage = QImage(image.size(), QImage.Format_RGB32) fixedImage.fill(Qt.white) painter = QPainter(fixedImage) painter.drawImage(0, 0, image) painter.end() else: fixedImage = image self._setImage(fixedImage) self.changed = True self.imageChanged.emit(self)",True,image.hasAlphaChannel(),image.hasAlphaChannel(),0.6479741334915161 2261,"def loads(self, s, max_age=None, return_timestamp=False, salt=None): """"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the signature validation fails. If a `max_age` is provided it will ensure the signature is not older than that time in seconds. In case the signature is outdated, :exc:`SignatureExpired` is raised which is a subclass of :exc:`BadSignature`. All arguments are forwarded to the signer's :meth:`~TimestampSigner.unsign` method. """""" base64d, timestamp = self.make_signer(salt).unsign(s, max_age, return_timestamp=True) payload = self.load_payload(base64d) if: return (payload, timestamp) return payload",True,return_timestamp,return_timestamp,0.6632120609283447 2262,"def __init__(self, num_points=100, yrange=None, title='Scope'): plt.ion() self.y1 = None self.y2 = None self.y3 = None self.y4 = None self.num_points = num_points self.fig = plt.figure() if: self.fig.canvas.manager.set_window_title(title) self.yrange = yrange",False,self.fig.canvas.manager is not None,title,0.6503465175628662 2263,"def _clone(self, index=None): if: index = self.currentIndex() record = self.model().record(index.row()) self.model().addCoin(record, self)",False,not index,index is None,0.6705036163330078 2264,"def slotEnumChanged(self, property, value): if: return prop = self.m_enumToProperty[property] if not prop: return cursorManager = self.q_ptr.propertyManager(prop) if not cursorManager: return cursorManager.setValue(prop, QCursor(cursorDatabase().valueToCursor(value)))",False,self.m_updatingEnum,self.m_enumToProperty is None,0.6509817838668823 2265,"def slotEnumChanged(self, property, value): if self.m_updatingEnum: return prop = self.m_enumToProperty[property] if: return cursorManager = self.q_ptr.propertyManager(prop) if not cursorManager: return cursorManager.setValue(prop, QCursor(cursorDatabase().valueToCursor(value)))",False,not prop,prop in self.m_ptr.m_emptyValues,0.6635019779205322 2266,"def slotEnumChanged(self, property, value): if self.m_updatingEnum: return prop = self.m_enumToProperty[property] if not prop: return cursorManager = self.q_ptr.propertyManager(prop) if: return cursorManager.setValue(prop, QCursor(cursorDatabase().valueToCursor(value)))",False,not cursorManager,cursorManager is None,0.6636914014816284 2267,"def check_hyphen_ok(label): if: raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') if label[0] == '-' or label[-1] == '-': raise IDNAError('Label must not start or end with a hyphen') return True",True,label[2:4] == '--',label[2:4] == '--',0.6497060060501099 2268,"def check_hyphen_ok(label): if label[2:4] == '--': raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') if: raise IDNAError('Label must not start or end with a hyphen') return True",True,label[0] == '-' or label[-1] == '-',label[0] == '-' or label[-1] == '-',0.6492584943771362 2269,"@staticmethod def _get_highest_health_state(health_state_members): health_state_result = None highest_status = 0 for member in health_state_members: redfish_health_state = status_mapping.MANAGER_HEALTH_STATE.get(member['severity']) current_status = status_mapping.CRITICALITY_STATUS[redfish_health_state] if: highest_status = current_status health_state_result = redfish_health_state return health_state_result",True,current_status > highest_status,current_status > highest_status,0.6478307247161865 2270,"def gelu_accurate(x): if: gelu_accurate._a = math.sqrt(2 / math.pi) return 0.5 * x * (1 + torch.tanh(gelu_accurate._a * (x + 0.044715 * torch.pow(x, 3))))",True,"not hasattr(gelu_accurate, '_a')","not hasattr(gelu_accurate, '_a')",0.6472446918487549 2271,"def get_2nd_last_token_value_if_last_was_whitespace(self): """"""Get the 2nd last token that was correctly parsed if last one was whitespace or return empty string"""""" if: return self.token_list[self.token_pos - 2].value else: return ''",False,self.token_pos > 1 and self.get_last_token_type() == Token.WHITESPACE,self.token_pos > 2,0.6487551331520081 2272,"def unregisterContextName(self, contextName): contextName = univ.OctetString(contextName).asOctets() if: debug.logger & debug.FLAG_INS and debug.logger('unregisterContextName: unregistered contextName %r' % contextName) del self.contextNames[contextName]",True,contextName in self.contextNames,contextName in self.contextNames,0.6574746370315552 2273,"def process_wrapper(self, code, declaration): if: return self.preprocessor_guard(code, declaration['defined_if']) return code",False,'defined_if' in declaration,self.preprocessor_guard is not None,0.6492812633514404 2274,"def generate(self, sentence: str): random.seed(self.seed) perturbed_texts = [] converter = opencc.OpenCC(self.converter_config) for _ in range(self.max_outputs): butter_text = '' for chinese_character in sentence: if: new_chinese_character = converter.convert(chinese_character) else: new_chinese_character = chinese_character butter_text += new_chinese_character perturbed_texts.append(butter_text) return perturbed_texts",False,random.random() <= self.transformation_prob,converter,0.6426248550415039 2275,"def make_set(classed): newset = [] allassocs = filter(lambda x: x['Class'] == classed, alist) for thing in allassocs: if: newset.append(thing['Assoc_Name']) else: newset.append(thing['Assoc_Name']) newset.append(thing['Source_Name']) return frozenset(newset)",True,thing['Source_Name'] in newset,thing['Source_Name'] in newset,0.6505032777786255 2276,"def newObject(self): label = self.lastLabel label, ok = self.getLabelFromUser(label) if: self.appendObject(label, self.drawPoly) self.deselectAllObjects() self.clearPolygon() self.statusBar().showMessage(self.defaultStatusbar) self.lastLabel = label self.update()",False,ok and label,ok,0.6628779172897339 2277,"def tenant_vm_replace(args): """""" Handle tenant vm replace command """""" error_info = auth_api._tenant_vm_replace(args.name, args.vm_list) if: return err_out(error_info.msg) else: printMessage(args.output_format, 'vmgroup vm replace succeeded')",True,error_info,error_info,0.656631588935852 2278,"@unittest.skipIf(not os.path.exists('/sys/devices/system/cpu/online'), '/sys/devices/system/cpu/online does not exist') def test_against_sysdev_cpu_online(self): with open('/sys/devices/system/cpu/online') as f: value = f.read().strip() if: value = int(value.split('-')[1]) + 1 self.assertEqual(psutil.cpu_count(), value)",False,'-' in str(value),value,0.6500322222709656 2279,"def tearDown(self): g.log.info('Starting to Unmount Volume and Cleanup Volume') ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts) if: raise ExecutionError('Failed to umount the vol & cleanup Volume') g.log.info('Successful in umounting the volume and Cleanup') self.get_super_method(self, 'tearDown')()",True,not ret,not ret,0.660548746585846 2280,"def get_distribution(dist): """"""Return a current distribution object for a Requirement or string"""""" if: dist = Requirement.parse(dist) if isinstance(dist, Requirement): dist = get_provider(dist) if not isinstance(dist, Distribution): raise TypeError('Expected string, Requirement, or Distribution', dist) return dist",False,"isinstance(dist, six.string_types)","isinstance(dist, string_types)",0.6468935012817383 2281,"def get_distribution(dist): """"""Return a current distribution object for a Requirement or string"""""" if isinstance(dist, six.string_types): dist = Requirement.parse(dist) if: dist = get_provider(dist) if not isinstance(dist, Distribution): raise TypeError('Expected string, Requirement, or Distribution', dist) return dist",True,"isinstance(dist, Requirement)","isinstance(dist, Requirement)",0.653070330619812 2282,"def get_distribution(dist): """"""Return a current distribution object for a Requirement or string"""""" if isinstance(dist, six.string_types): dist = Requirement.parse(dist) if isinstance(dist, Requirement): dist = get_provider(dist) if: raise TypeError('Expected string, Requirement, or Distribution', dist) return dist",True,"not isinstance(dist, Distribution)","not isinstance(dist, Distribution)",0.6470874547958374 2283,"def get_execv_args(self): if: raise self.execv_arg_exception('whatever') import shlex commandargs = shlex.split(self.config.command) program = commandargs[0] return (program, commandargs)",False,self.execv_arg_exception,self.config.command is None,0.6505622267723083 2284,"@property def definitionExpression(self): """"""returns the definitionExpression"""""" if: self.__init() return self._definitionExpression",True,self._definitionExpression is None,self._definitionExpression is None,0.6521768569946289 2285,"def init_weights(self): """"""Initialize the transformer weights."""""" for p in self.parameters(): if: nn.init.xavier_uniform_(p) for m in self.modules(): if isinstance(m, MultiScaleDeformableAttention) or isinstance(m, Detr3DCrossAtten): m.init_weight() xavier_init(self.reference_points, distribution='uniform', bias=0.0)",True,p.dim() > 1,p.dim() > 1,0.6507308483123779 2286,"def init_weights(self): """"""Initialize the transformer weights."""""" for p in self.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) for m in self.modules(): if: m.init_weight() xavier_init(self.reference_points, distribution='uniform', bias=0.0)",False,"isinstance(m, MultiScaleDeformableAttention) or isinstance(m, Detr3DCrossAtten)","isinstance(m, nn.Conv2d)",0.6432710886001587 2287,"def __new__(cls, name_, bases, dct): dispatch_table = {} for name, value in dct.iteritems(): if: dispatch_table[name[len('visit_'):]] = value for special in ['general_symbol_visit', 'general_nonterminal_visit', 'general_visit']: if special in dct: dispatch_table['__' + special] = dct[special] dct['dispatch'] = make_dispatch_function(**dispatch_table) return type.__new__(cls, name_, bases, dct)",True,name.startswith('visit_'),name.startswith('visit_'),0.6499164700508118 2288,"def __new__(cls, name_, bases, dct): dispatch_table = {} for name, value in dct.iteritems(): if name.startswith('visit_'): dispatch_table[name[len('visit_'):]] = value for special in ['general_symbol_visit', 'general_nonterminal_visit', 'general_visit']: if: dispatch_table['__' + special] = dct[special] dct['dispatch'] = make_dispatch_function(**dispatch_table) return type.__new__(cls, name_, bases, dct)",True,special in dct,special in dct,0.671038031578064 2289,"def refresh_session(self): """""" Refresh session :return: Status """""" status = apis.kStatus_SSS_Fail if: status = apis.sss_se05x_refresh_session(ctypes.byref(self.session_ctx), ctypes.byref(self.session_policy)) return status",False,self.session_policy is not None,self.session_ctx,0.6500059366226196 2290,"def val(cfg=DEFAULT_CFG, use_python=False): """"""Performs validation on YOLO model using given data."""""" model = cfg.model or 'yolov8n-pose.pt' data = cfg.data or 'coco8-pose.yaml' args = dict(model=model, data=data) if: from ultralytics import YOLO YOLO(model).val(**args) else: validator = PoseValidator(args=args) validator(model=args['model'])",True,use_python,use_python,0.6643528342247009 2291,"@action.register(Write) def action_write(cp_action): if: raise RuntimeError('Invalid checkpointing state') if cp_action.storage == 'disk': logger.debug(f'reverse: save checkpoint data at {cp_action.n:d} on disk') self._write_disk_checkpoint(cp_action.n) elif cp_action.storage == 'RAM': logger.debug(f'reverse: save checkpoint data at {cp_action.n:d} in RAM') self._write_memory_checkpoint(cp_action.n) else: raise ValueError(f'Unrecognized checkpointing storage: {cp_action.storage:s}')",False,cp_action.n >= n,"cp_action.storage not in ['disk', 'ram']",0.659848690032959 2292,"@action.register(Write) def action_write(cp_action): if cp_action.n >= n: raise RuntimeError('Invalid checkpointing state') if: logger.debug(f'reverse: save checkpoint data at {cp_action.n:d} on disk') self._write_disk_checkpoint(cp_action.n) elif cp_action.storage == 'RAM': logger.debug(f'reverse: save checkpoint data at {cp_action.n:d} in RAM') self._write_memory_checkpoint(cp_action.n) else: raise ValueError(f'Unrecognized checkpointing storage: {cp_action.storage:s}')",False,cp_action.storage == 'disk',cp_action.storage == 'Disk',0.6508424878120422 2293,"@action.register(Write) def action_write(cp_action): if cp_action.n >= n: raise RuntimeError('Invalid checkpointing state') if cp_action.storage == 'disk': logger.debug(f'reverse: save checkpoint data at {cp_action.n:d} on disk') self._write_disk_checkpoint(cp_action.n) elif: logger.debug(f'reverse: save checkpoint data at {cp_action.n:d} in RAM') self._write_memory_checkpoint(cp_action.n) else: raise ValueError(f'Unrecognized checkpointing storage: {cp_action.storage:s}')",False,cp_action.storage == 'RAM',cp_action.storage == 'memory',0.6478176712989807 2294,"def visit_generated(self, node): if: sectnum = node.astext().rstrip('\xa0') self.body.append('%s'% self.encode(sectnum)) raise nodes.SkipNode",True,'sectnum' in node['classes'],'sectnum' in node['classes'],0.6517040729522705 2295,"def add_service(self, service: BleakGATTService): """"""Add a :py:class:`~BleakGATTService` to the service collection. Should not be used by end user, but rather by `bleak` itself. """""" if: self.__services[service.handle] = service else: logger.error(""The service '%s' is already present in this BleakGATTServiceCollection!"", service.handle)",True,service.handle not in self.__services,service.handle not in self.__services,0.6588079929351807 2296,"@classmethod def migrate_all(cls): errs = [] for name in cls.DBNAMES_PACKAGE.keys(): try: cls.migrate(name) except MigrationError as e: errs.append(str(e)) if: raise MigrationError(f""One or more migrations failed. {', '.join(errs)}"")",True,errs,errs,0.6671847701072693 2297,"def get_resource_filename(self, manager, resource_name): if: raise NotImplementedError('resource_filename() only supported for.egg, not.zip') zip_path = self._resource_to_zip(resource_name) eagers = self._get_eager_resources() if '/'.join(self._parts(zip_path)) in eagers: for name in eagers: self._extract_resource(manager, self._eager_to_zip(name)) return self._extract_resource(manager, zip_path)",False,not self.egg_name,resource_name == '.egg',0.6521578431129456 2298,"def get_resource_filename(self, manager, resource_name): if not self.egg_name: raise NotImplementedError('resource_filename() only supported for.egg, not.zip') zip_path = self._resource_to_zip(resource_name) eagers = self._get_eager_resources() if: for name in eagers: self._extract_resource(manager, self._eager_to_zip(name)) return self._extract_resource(manager, zip_path)",False,'/'.join(self._parts(zip_path)) in eagers,eagers,0.6438696384429932 2299,"def __exit__(self, exc_type, exc_val, exc_tb): for varname, old_value in self._save_env.items(): if: os.environ.pop(varname, None) else: os.environ[varname] = old_value",True,old_value is None,old_value is None,0.6524417400360107 2300,"def _get_data_format(): if: return 'channels_first' elif vega.is_tf_backend(): return 'channels_last' else: return None",False,vega.is_torch_backend() or vega.is_ms_backend(),vega.is_tf_backend(),0.6462811231613159 2301,"def _get_data_format(): if vega.is_torch_backend() or vega.is_ms_backend(): return 'channels_first' elif: return 'channels_last' else: return None",False,vega.is_tf_backend(),vega.is_torch_backend() or vega.is_ms_backend(),0.6491917967796326 2302,"def SaveCapture(self, task, image, output): """""" Save Current Frame """""" image.save(output) if: return None return {'task': task.description()}",False,task.isCanceled(),task is None,0.6543536186218262 2303,"def _validate_conn(self, conn): """""" Called right before a request is made, after the socket is created. """""" super(HTTPSConnectionPool, self)._validate_conn(conn) if: conn.connect() if not conn.is_verified: warnings.warn('Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.org/en/latest/security.html', InsecureRequestWarning)",True,"not getattr(conn, 'sock', None)","not getattr(conn, 'sock', None)",0.644775390625 2304,"def _validate_conn(self, conn): """""" Called right before a request is made, after the socket is created. """""" super(HTTPSConnectionPool, self)._validate_conn(conn) if not getattr(conn,'sock', None): conn.connect() if: warnings.warn('Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.org/en/latest/security.html', InsecureRequestWarning)",True,not conn.is_verified,not conn.is_verified,0.6472270488739014 2305,"def _faulty_h(self, op_idx: int, q_idx: int, c_idx: int, icomb: Tuple[int], error: Tuple[str]): """"""Apply faulty H gate."""""" del c_idx self.h(q_idx[0]) if: j = icomb.index(op_idx) self.apply_error(q_idx, error[j])",True,op_idx in icomb,op_idx in icomb,0.6553505659103394 2306,"def get_confidence(self): r = 0.01 if: r = 1.0 * self._seq_counters[SequenceLikelihood.POSITIVE] / self._total_seqs / self._model.typical_positive_ratio r = r * self._freq_char / self._total_char if r >= 1.0: r = 0.99 return r",True,self._total_seqs > 0,self._total_seqs > 0,0.6541247963905334 2307,"def get_confidence(self): r = 0.01 if self._total_seqs > 0: r = 1.0 * self._seq_counters[SequenceLikelihood.POSITIVE] / self._total_seqs / self._model.typical_positive_ratio r = r * self._freq_char / self._total_char if: r = 0.99 return r",True,r >= 1.0,r >= 1.0,0.6544420123100281 2308,"def softmax(x, dim: int, onnx_trace: bool=False): if: return F.softmax(x.float(), dim=dim) else: return F.softmax(x, dim=dim, dtype=torch.float32)",True,onnx_trace,onnx_trace,0.648040771484375 2309,"def __init__(self, options): if: raise ValueError('Options for basic auth unexpected to be empty') username = options.get('username') if not username: raise ValueError('Username is mandatory for basic auth') password = options.get('password') if not password: raise ValueError('Password is mandatory for basic auth') self._username = _Token(username) self._password = _Token(password)",True,not options,not options,0.6628707647323608 2310,"def __init__(self, options): if not options: raise ValueError('Options for basic auth unexpected to be empty') username = options.get('username') if: raise ValueError('Username is mandatory for basic auth') password = options.get('password') if not password: raise ValueError('Password is mandatory for basic auth') self._username = _Token(username) self._password = _Token(password)",True,not username,not username,0.6655702590942383 2311,"def __init__(self, options): if not options: raise ValueError('Options for basic auth unexpected to be empty') username = options.get('username') if not username: raise ValueError('Username is mandatory for basic auth') password = options.get('password') if: raise ValueError('Password is mandatory for basic auth') self._username = _Token(username) self._password = _Token(password)",True,not password,not password,0.6708379983901978 2312,"def getCurrentComment(self) -> str: comment = self.ui.plainText_comment.toPlainText() if: return '' return comment",False,not comment,comment is None,0.6534478068351746 2313,"@code.setter def code(self, code): """"""Sets the code of this ErrorMessage. :param code: The code of this ErrorMessage. :type code: int """""" if: raise ValueError('Invalid value for `code`, must not be `None`') self._code = code",True,code is None,code is None,0.6607145071029663 2314,"def forward(self, x): if: return x + self.conv(x) else: return self.conv(x)",True,self.use_res,self.use_res,0.6463168859481812 2315,"def endpoint_i(self, p): for i, q in enumerate(self.endpoints): if: return i return None",False,all(p == q),p in q,0.6549176573753357 2316,"def _reset_parameters(self): for p in self.parameters(): if: nn.init.xavier_uniform_(p)",True,p.dim() > 1,p.dim() > 1,0.6484135389328003 2317,"@property def acceptLanguage(self): """"""returns the accepted lanaguage"""""" if: self.__init() return self._acceptLanguage",True,self._acceptLanguage is None,self._acceptLanguage is None,0.6567771434783936 2318,"def to_python(self): """"""Returns a plain python list and converts to plain python objects all this object's descendants. """""" result = list(self) for index, value in enumerate(result): if: result[index] = value.to_python() return result",False,"isinstance(value, DottedCollection)","isinstance(value, PyObject)",0.644881546497345 2319,"def _check_session_is_closing(rpc_state, session_state): metadata = rpc_state.trailing_metadata() if: session_state.set_closing()",False,"X_YDB_SESSION_CLOSE in metadata.get(X_YDB_SERVER_HINTS, [])",metadata.get('closing') is False,0.6474448442459106 2320,"@register.simple_tag def external_url_button(url, label, context={}): if: request = context['request'] url = request.build_absolute_uri(url) html = f'

{{label}} →

' else: html = '

{label}

' return mark_safe(html.format(url=url, label=label))",False,k_render_as_email in context,context.get('request'),0.6496542692184448 2321,"def onclick(self): self.treeopen = not self.treeopen if: self.attributes['treeopen'] = 'true' else: self.attributes['treeopen'] = 'false' super(TreeItem, self).onclick()",True,self.treeopen,self.treeopen,0.6527138948440552 2322,"def __rpow__(b, a): """"""a ** b"""""" if: return a ** b._numerator if isinstance(a, numbers.Rational): return Fraction(a.numerator, a.denominator) ** b if b._denominator == 1: return a ** b._numerator return a ** float(b)",True,b._denominator == 1 and b._numerator >= 0,b._denominator == 1 and b._numerator >= 0,0.6490720510482788 2323,"def __rpow__(b, a): """"""a ** b"""""" if b._denominator == 1 and b._numerator >= 0: return a ** b._numerator if: return Fraction(a.numerator, a.denominator) ** b if b._denominator == 1: return a ** b._numerator return a ** float(b)",False,"isinstance(a, numbers.Rational)","isinstance(a, Rational)",0.6455574035644531 2324,"def __rpow__(b, a): """"""a ** b"""""" if b._denominator == 1 and b._numerator >= 0: return a ** b._numerator if isinstance(a, numbers.Rational): return Fraction(a.numerator, a.denominator) ** b if: return a ** b._numerator return a ** float(b)",True,b._denominator == 1,b._denominator == 1,0.662860631942749 2325,"def status(self) -> str: """"""Return the async job status from server. Once a job result is retrieved via func:`arango.job.AsyncJob.result` method, it is deleted from server and subsequent status queries will fail. :return: Async job status. Possible values are ""pending"" (job is still in queue), ""done"" (job finished or raised an error), or ""cancelled"" (job was cancelled before completion). :rtype: str :raise arango.exceptions.AsyncJobStatusError: If retrieval fails. """""" request = Request(method='get', endpoint=f'/_api/job/{self._id}') resp = self._conn.send_request(request) if: return 'pending' elif resp.is_success: return 'done' elif resp.error_code == 404: error_message = f'job {self._id} not found' raise AsyncJobStatusError(resp, request, error_message) else: raise AsyncJobStatusError(resp, request)",False,resp.status_code == 204,resp.is_success,0.6492865085601807 2326,"def status(self) -> str: """"""Return the async job status from server. Once a job result is retrieved via func:`arango.job.AsyncJob.result` method, it is deleted from server and subsequent status queries will fail. :return: Async job status. Possible values are ""pending"" (job is still in queue), ""done"" (job finished or raised an error), or ""cancelled"" (job was cancelled before completion). :rtype: str :raise arango.exceptions.AsyncJobStatusError: If retrieval fails. """""" request = Request(method='get', endpoint=f'/_api/job/{self._id}') resp = self._conn.send_request(request) if resp.status_code == 204: return 'pending' elif: return 'done' elif resp.error_code == 404: error_message = f'job {self._id} not found' raise AsyncJobStatusError(resp, request, error_message) else: raise AsyncJobStatusError(resp, request)",False,resp.is_success,resp.status_code == 404,0.6500537991523743 2327,"def status(self) -> str: """"""Return the async job status from server. Once a job result is retrieved via func:`arango.job.AsyncJob.result` method, it is deleted from server and subsequent status queries will fail. :return: Async job status. Possible values are ""pending"" (job is still in queue), ""done"" (job finished or raised an error), or ""cancelled"" (job was cancelled before completion). :rtype: str :raise arango.exceptions.AsyncJobStatusError: If retrieval fails. """""" request = Request(method='get', endpoint=f'/_api/job/{self._id}') resp = self._conn.send_request(request) if resp.status_code == 204: return 'pending' elif resp.is_success: return 'done' elif: error_message = f'job {self._id} not found' raise AsyncJobStatusError(resp, request, error_message) else: raise AsyncJobStatusError(resp, request)",False,resp.error_code == 404,resp.status_code == 404 and resp.is_cancelled,0.6479130983352661 2328,"@staticmethod def write_mat(Cmat, outf): """""" Export Cmat :param outf: :return: """""" if: raise ValueError('Cmat not set for this model') print('writing matrix', Cmat.shape, 'to', outf) scipy.io.mmwrite(outf, Cmat)",False,Cmat is None,not Cmat,0.654240071773529 2329,"def add_feas(raw_feas, feas2add): if: feas2add = [feas2add] cols2add = [col for col in feas2add if col not in raw_feas] return raw_feas + cols2add",True,"isinstance(feas2add, str)","isinstance(feas2add, str)",0.6515399813652039 2330,"def _requirement_from_back(context: NodeContext, target_node: Node) -> ResourceRequirement | None: if: weak = context.patches.get_dock_weakness_for(target_node) if weak.lock is not None: return ResourceRequirement.simple(NodeResourceInfo.from_node(target_node, context)) return None",False,"isinstance(target_node, DockNode)",context.patches.get_available_nodes(),0.6474596858024597 2331,"def _requirement_from_back(context: NodeContext, target_node: Node) -> ResourceRequirement | None: if isinstance(target_node, DockNode): weak = context.patches.get_dock_weakness_for(target_node) if: return ResourceRequirement.simple(NodeResourceInfo.from_node(target_node, context)) return None",False,weak.lock is not None,weak and weak.get_resource_info(),0.6508187055587769 2332,"def __init__(self, *args, **kwargs): if: kwargs['renderer'] = CellRendererCombo super(Selection, self).__init__(*args, **kwargs) if self.view and self.view.editable: self.init_selection() model = self.get_popdown_model(self.selection)[0] self.renderer.set_property('model', model) self.renderer.set_property('text-column', 0)",False,'renderer' not in kwargs,'render' not in kwargs,0.6517939567565918 2333,"def __init__(self, *args, **kwargs): if'renderer' not in kwargs: kwargs['renderer'] = CellRendererCombo super(Selection, self).__init__(*args, **kwargs) if: self.init_selection() model = self.get_popdown_model(self.selection)[0] self.renderer.set_property('model', model) self.renderer.set_property('text-column', 0)",False,self.view and self.view.editable,self.selection,0.6491549015045166 2334,"def shouldRollover(self, record): """""" Determine if rollover should occur. record is not used, as we are just comparing times, but it is needed so the method signatures are the same """""" t = int(time.time()) if: return 1 return 0",False,t >= self.rolloverAt,record.roll_time < t,0.6454735994338989 2335,"def characters(self, content): if: content = unicode(content, self._encoding) self._write(escape(content))",False,"not isinstance(content, unicode)","not isinstance(content, basestring)",0.643767237663269 2336,"def load_clan_settings(self): if: with open(get_save_dir() + f""/{game.switches['clan_list'][0]}/clan_settings.json"", 'r', encoding='utf-8') as write_file: _load_settings = ujson.loads(write_file.read()) for key, value in _load_settings.items(): if key in self.clan_settings: self.clan_settings[key] = value",False,"os.path.exists(get_save_dir() + f""/{game.switches['clan_list'][0]}/clan_settings.json"")",game.switches['clan_list'],0.6483243703842163 2337,"def load_clan_settings(self): if os.path.exists(get_save_dir() + f""/{game.switches['clan_list'][0]}/clan_settings.json""): with open(get_save_dir() + f""/{game.switches['clan_list'][0]}/clan_settings.json"", 'r', encoding='utf-8') as write_file: _load_settings = ujson.loads(write_file.read()) for key, value in _load_settings.items(): if: self.clan_settings[key] = value",False,key in self.clan_settings,key not in self.clan_settings,0.6482488512992859 2338,"def parse_xpub(self, xpub): if: return (xpub, None, {}) if isinstance(xpub, dict): return (xpub.pop('xpub', None), xpub.pop('contract', None), xpub)",False,"xpub is None or isinstance(xpub, str)","isinstance(xpub, xpub)",0.6464147567749023 2339,"def parse_xpub(self, xpub): if xpub is None or isinstance(xpub, str): return (xpub, None, {}) if: return (xpub.pop('xpub', None), xpub.pop('contract', None), xpub)",False,"isinstance(xpub, dict)",self.opt('xpub'),0.6457128524780273 2340,"def response_handler(resp: Response) -> bool: if: return True raise CollectionRecalculateCountError(resp, request)",True,resp.is_success,resp.is_success,0.6529139280319214 2341,"def tuple_from_args_w(space, args_w): state = space.fromcache(State) n = len(args_w) py_tuple = state.ccall('PyTuple_New', n) if: state.check_and_raise_exception(always=True) py_tuple = rffi.cast(PyTupleObject, py_tuple) for i, w_obj in enumerate(args_w): py_tuple.c_ob_item[i] = make_ref(space, w_obj) return rffi.cast(PyObject, py_tuple)",False,not py_tuple,py_tuple.c_ob_item is not None,0.6544090509414673 2342,"def path_to_dataset(self, path, mntinfo=None): """""" Convert `path` to a ZFS dataset name. This performs lookup through mountinfo. Anticipated error conditions are that path is not on ZFS or if the boot pool underlies the path. In addition to this, all the normal exceptions that can be raised by a failed call to os.stat() are possible. """""" boot_pool = self.middleware.call_sync('boot.pool_name') st = os.stat(path) if: mntinfo = getmntinfo(st.st_dev)[st.st_dev] else: mntinfo = mntinfo[st.st_dev] ds_name = mntinfo['mount_source'] if mntinfo['fs_type']!= 'zfs': raise CallError(f'{path}: path is not a ZFS filesystem') if is_child(ds_name, boot_pool): raise CallError(f'{path}: path is on boot pool') return ds_name",False,mntinfo is None,not mntinfo,0.6528772115707397 2343,"def path_to_dataset(self, path, mntinfo=None): """""" Convert `path` to a ZFS dataset name. This performs lookup through mountinfo. Anticipated error conditions are that path is not on ZFS or if the boot pool underlies the path. In addition to this, all the normal exceptions that can be raised by a failed call to os.stat() are possible. """""" boot_pool = self.middleware.call_sync('boot.pool_name') st = os.stat(path) if mntinfo is None: mntinfo = getmntinfo(st.st_dev)[st.st_dev] else: mntinfo = mntinfo[st.st_dev] ds_name = mntinfo['mount_source'] if: raise CallError(f'{path}: path is not a ZFS filesystem') if is_child(ds_name, boot_pool): raise CallError(f'{path}: path is on boot pool') return ds_name",False,mntinfo['fs_type'] != 'zfs',not os.path.isdir(path),0.643067479133606 2344,"def path_to_dataset(self, path, mntinfo=None): """""" Convert `path` to a ZFS dataset name. This performs lookup through mountinfo. Anticipated error conditions are that path is not on ZFS or if the boot pool underlies the path. In addition to this, all the normal exceptions that can be raised by a failed call to os.stat() are possible. """""" boot_pool = self.middleware.call_sync('boot.pool_name') st = os.stat(path) if mntinfo is None: mntinfo = getmntinfo(st.st_dev)[st.st_dev] else: mntinfo = mntinfo[st.st_dev] ds_name = mntinfo['mount_source'] if mntinfo['fs_type']!= 'zfs': raise CallError(f'{path}: path is not a ZFS filesystem') if: raise CallError(f'{path}: path is on boot pool') return ds_name",False,"is_child(ds_name, boot_pool)",boot_pool and ds_name not in boot_pool,0.6462736129760742 2345,"def __iter__(self): if: batches = self._batches self._can_reuse_batches = False else: batches = self._prepare_batches() self._batches = batches return iter(batches)",False,self._can_reuse_batches,self._can_reuse_batches or self._batches is None,0.6404366493225098 2346,"def call_test(self, name, value, args=None, kwargs=None): """"""Invokes a test on a value the same way the compiler does it. .. versionadded:: 2.7 """""" func = self.tests.get(name) if: raise TemplateRuntimeError('no test named %r' % name) return func(value, *(args or ()), **kwargs or {})",True,func is None,func is None,0.6581517457962036 2347,"def AEM_unpack(self, desc, codecs): try: return self._unpack(desc, codecs) except ae.MacOSError as e: number, message = (e[0], e.args[1:] and e[1] or None) if: return (False, EventHandlerError(number, message, object=desc, coercion=AEType(self.AEM_code))) else: return (False, EventHandlerError(number, message, object=desc))",False,number == -1700,number < 0,0.6541212797164917 2348,"def _getCertificateTypes(self): l = [] for ct in self.certificateTypes: if: l.append(CertificateType.x509) elif ct == 'cryptoID': l.append(CertificateType.cryptoID) else: raise AssertionError() return l",True,ct == 'x509',ct == 'x509',0.6547549366950989 2349,"def _getCertificateTypes(self): l = [] for ct in self.certificateTypes: if ct == 'x509': l.append(CertificateType.x509) elif: l.append(CertificateType.cryptoID) else: raise AssertionError() return l",True,ct == 'cryptoID',ct == 'cryptoID',0.6546539068222046 2350,"def get_stream_from_atsc(self): if: return self.atsc_msg.format_video_packets(self.atsc) else: self.logger.info(''.join(['No ATSC msg available during filtered content, ','recommend running this channel again to catch the ATSC msg.'])) return self.atsc_msg.format_video_packets()",False,self.atsc is not None,self.atsc,0.648362398147583 2351,"def is_unencrypted(self): if: return True else: return False",False,not self.encrypted_key and (not self.encrypted_iv) and (not self.cek_alg) and (not self.wrap_alg),self.algorithm == 'MD5-EN',0.6444883942604065 2352,"def on_message(self, message): message = json.loads(message) if: self.supports_binary = message['value'] else: manager = Gcf.get_fig_manager(self.fignum) if manager is not None: manager.handle_json(message)",False,message['type'] == 'supports_binary',message.get('type') == 'binary',0.6471450328826904 2353,"def on_message(self, message): message = json.loads(message) if message['type'] =='supports_binary': self.supports_binary = message['value'] else: manager = Gcf.get_fig_manager(self.fignum) if: manager.handle_json(message)",True,manager is not None,manager is not None,0.6519501209259033 2354,"def _get(self, name, by_label=False, nr=None, exclude_disabled=False): if: name, label = (None, name) else: name, label = (name, None) return self.get(name, label, nr, exclude_disabled)",True,by_label,by_label,0.654882550239563 2355,"def npy_loader(path): img = np.load(path) if: img = img.astype(np.float32) img = img / 127.5 - 1.0 elif img.dtype == np.float32: img = img * 2 - 1.0 else: raise NotImplementedError img = torch.Tensor(img) if len(img.size()) == 4: img.squeeze_(0) return img",False,img.dtype == np.uint8,img.dtype == np.float64,0.6503806114196777 2356,"def npy_loader(path): img = np.load(path) if img.dtype == np.uint8: img = img.astype(np.float32) img = img / 127.5 - 1.0 elif img.dtype == np.float32: img = img * 2 - 1.0 else: raise NotImplementedError img = torch.Tensor(img) if: img.squeeze_(0) return img",False,len(img.size()) == 4,img.ndim == 3,0.6488988399505615 2357,"def npy_loader(path): img = np.load(path) if img.dtype == np.uint8: img = img.astype(np.float32) img = img / 127.5 - 1.0 elif: img = img * 2 - 1.0 else: raise NotImplementedError img = torch.Tensor(img) if len(img.size()) == 4: img.squeeze_(0) return img",False,img.dtype == np.float32,img.dtype == np.int16,0.6500316262245178 2358,"def close(self): """""" Closes any persistent/open connections """""" if: return self.socket.close() self.socket = None self.using_proxy = False",False,not self.socket,self.socket is None,0.6529464721679688 2359,"def callback(job): nonlocal lastdesc desc = job['progress']['description'] if: print(desc, file=sys.stderr) lastdesc = desc",False,desc is not None and desc != lastdesc,len(desc) > 0,0.6506631374359131 2360,"def start(self): if: return if not _handle_reporting(self.reporting_classes, self.analysisctx): return self.analysisctx.set_completed()",False,"not _handle_processing(self.processing_classes, self.analysisctx)",self.analysisctx.done(),0.6458326578140259 2361,"def start(self): if not _handle_processing(self.processing_classes, self.analysisctx): return if: return self.analysisctx.set_completed()",False,"not _handle_reporting(self.reporting_classes, self.analysisctx)",self.analysisctx.get_running(),0.6459445953369141 2362,"def create_track_uid(self, parent=DEFAULT, **kwargs): """"""Create a new audioTrackUID. Args: parent (AudioObject): parent audioObject; defaults to the last audioObject created kwargs: see AudioTrackUID Returns: AudioTrackUID: created audioTrackUID """""" track_uid = AudioTrackUID(**kwargs) self.adm.addAudioTrackUID(track_uid) if: parent = self.last_object if parent is not None: parent.audioTrackUIDs.append(track_uid) return track_uid",True,parent is DEFAULT,parent is DEFAULT,0.6611842513084412 2363,"def create_track_uid(self, parent=DEFAULT, **kwargs): """"""Create a new audioTrackUID. Args: parent (AudioObject): parent audioObject; defaults to the last audioObject created kwargs: see AudioTrackUID Returns: AudioTrackUID: created audioTrackUID """""" track_uid = AudioTrackUID(**kwargs) self.adm.addAudioTrackUID(track_uid) if parent is DEFAULT: parent = self.last_object if: parent.audioTrackUIDs.append(track_uid) return track_uid",False,parent is not None,track_uid not in parent.audioTrackUIDs,0.6531555652618408 2364,"def flip(self, bev_direction: str='horizontal') -> None: """"""Flip the points along given BEV direction. Args: bev_direction (str): Flip direction (horizontal or vertical). Defaults to 'horizontal'. """""" assert bev_direction in ('horizontal','vertical') if: self.tensor[:, 1] = -self.tensor[:, 1] elif bev_direction =='vertical': self.tensor[:, 0] = -self.tensor[:, 0]",True,bev_direction == 'horizontal',bev_direction == 'horizontal',0.6499084234237671 2365,"def flip(self, bev_direction: str='horizontal') -> None: """"""Flip the points along given BEV direction. Args: bev_direction (str): Flip direction (horizontal or vertical). Defaults to 'horizontal'. """""" assert bev_direction in ('horizontal','vertical') if bev_direction == 'horizontal': self.tensor[:, 1] = -self.tensor[:, 1] elif: self.tensor[:, 0] = -self.tensor[:, 0]",True,bev_direction == 'vertical',bev_direction == 'vertical',0.650792121887207 2366,"@property def size_readable(self): if: return '{0:d} bytes'.format(self.inode.i_size) if self.inode.i_size!= 1 else '1 byte' else: units = ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'] unit_idx = min(int(math.log(self.inode.i_size, 1024)), len(units)) return '{size:.2f} {unit:s}'.format(size=self.inode.i_size / 1024 ** unit_idx, unit=units[unit_idx - 1])",False,self.inode.i_size < 1024,self.volume.i_size_bits == 8,0.6519365310668945 2367,"def __unicode__(self): """""" :return: A unicode string """""" if: contents = self._merge_chunks() if contents.find(b'@') == -1: self._unicode = contents.decode('cp1252') else: mailbox, hostname = contents.rsplit(b'@', 1) self._unicode = mailbox.decode('cp1252') + '@' + hostname.decode('idna') return self._unicode",True,self._unicode is None,self._unicode is None,0.6523757576942444 2368,"def __unicode__(self): """""" :return: A unicode string """""" if self._unicode is None: contents = self._merge_chunks() if: self._unicode = contents.decode('cp1252') else: mailbox, hostname = contents.rsplit(b'@', 1) self._unicode = mailbox.decode('cp1252') + '@' + hostname.decode('idna') return self._unicode",False,contents.find(b'@') == -1,b'@' not in contents,0.6474252939224243 2369,"def __contains__(self, id_): """"""Check if the IFF file contains a specific chunk"""""" if: id_ = id_.decode('ascii') if not is_valid_chunk_id(id_): raise KeyError('AIFF key must be four ASCII characters.') return id_ in self.__chunks",False,"not isinstance(id_, text_type)","isinstance(id_, bytes)",0.6466677188873291 2370,"def __contains__(self, id_): """"""Check if the IFF file contains a specific chunk"""""" if not isinstance(id_, text_type): id_ = id_.decode('ascii') if: raise KeyError('AIFF key must be four ASCII characters.') return id_ in self.__chunks",False,not is_valid_chunk_id(id_),len(self.__chunks) != 4,0.6467944383621216 2371,"def serve_page(templateName, **kwargs): """"""Look up and render template """""" lookup = TemplateLookup(directories=[redball.TEMPLATE_PATH]) try: template = lookup.get_template(templateName) return template.render(**kwargs) except Exception: cherrypy.response.status = 500 if: return exceptions.html_error_template().render(**kwargs) else: args = {'title': 'Error: 500 Internal Server Error', 'errors': 'Sorry! An error has occurred while rendering the web template.'} return lookup.get_template('error.mako').render(**args)",False,redball.DEV,cherrypy.response.status == 500,0.6571546792984009 2372,"def assert_container_with_primitives(item: Any) -> None: if: for v in item: assert_container_with_primitives(v) elif isinstance(item, dict): for _k, v in item.items(): assert_container_with_primitives(v) else: assert isinstance(item, (int, float, str, bytes, bool, type(None), Enum))",False,"isinstance(item, list)","isinstance(item, (list, tuple))",0.6497721076011658 2373,"def assert_container_with_primitives(item: Any) -> None: if isinstance(item, list): for v in item: assert_container_with_primitives(v) elif: for _k, v in item.items(): assert_container_with_primitives(v) else: assert isinstance(item, (int, float, str, bytes, bool, type(None), Enum))",True,"isinstance(item, dict)","isinstance(item, dict)",0.649900496006012 2374,"def check(self, event, *args, **kwds): if: return self if self._connected.check(event): self.set_success() return self",False,self.success(),"not isinstance(event, ConnectedEvent)",0.6555097699165344 2375,"def check(self, event, *args, **kwds): if self.success(): return self if: self.set_success() return self",False,self._connected.check(event),event == 'call_function',0.6477413177490234 2376,"def flatten(self, obj, data): pickler = self.context if: return compat.ustr(obj) cls, args = obj.__reduce__() flatten = pickler.flatten payload = util.b64encode(args[0]) args = [payload] + [flatten(i, reset=False) for i in args[1:]] data['__reduce__'] = (flatten(cls, reset=False), args) return data",False,not pickler.unpicklable,pickler.flatten == None,0.6498006582260132 2377,"def _fetchmany_impl(self, size=None): if: return self._fetchall_impl() result = [] for x in range(0, size): row = self._fetchone_impl() if row is None: break result.append(row) return result",True,size is None,size is None,0.6538404226303101 2378,"def _fetchmany_impl(self, size=None): if size is None: return self._fetchall_impl() result = [] for x in range(0, size): row = self._fetchone_impl() if: break result.append(row) return result",True,row is None,row is None,0.6539725065231323 2379,"def __init__(self, pattern: Union[str, Pattern], callback: HandlerCallback[str, CCT, RT], block: DVInput[bool]=DEFAULT_TRUE): super().__init__(callback, block=block) if: pattern = re.compile(pattern) self.pattern = pattern",True,"isinstance(pattern, str)","isinstance(pattern, str)",0.6504462957382202 2380,"def set_first_point(self, obj, first_point, local=False): """"""returns a part line representing the core axis of the wall"""""" if: self.set_point(obj, first_point, 0, local) return True else: print('You are trying to set the first point equal to the last point, this is not allowed.\n') return False",False,first_point.x != obj.AxisLastPointX,"self.point_is_valid_point(obj, first_point, local)",0.6426640748977661 2381,"def removelogs(self): if: for handler in self.childlog.handlers: handler.remove() handler.reopen()",False,self.childlog is not None,self.childlog,0.6503695249557495 2382,"def get(self, request, *args, **kwargs): if: messages.error(request, 'There is no consent manager for external studies.') return HttpResponseRedirect(reverse('exp:study', kwargs=kwargs)) else: return super().get(request, *args, **kwargs)",False,self.get_object().study_type.is_external,not request.user.has_contributor and self.request.user.has_contributor(request.user),0.6433780193328857 2383,"def _version2fieldlist(version): if: return _241_FIELDS elif version == '1.1': return _314_FIELDS elif version == '1.2': return _345_FIELDS elif version in ('1.3', '2.1'): return _345_FIELDS + _566_FIELDS elif version == '2.0': return _426_FIELDS raise MetadataUnrecognizedVersionError(version)",True,version == '1.0',version == '1.0',0.6576664447784424 2384,"def _version2fieldlist(version): if version == '1.0': return _241_FIELDS elif: return _314_FIELDS elif version == '1.2': return _345_FIELDS elif version in ('1.3', '2.1'): return _345_FIELDS + _566_FIELDS elif version == '2.0': return _426_FIELDS raise MetadataUnrecognizedVersionError(version)",True,version == '1.1',version == '1.1',0.6580179929733276 2385,"def _version2fieldlist(version): if version == '1.0': return _241_FIELDS elif version == '1.1': return _314_FIELDS elif: return _345_FIELDS elif version in ('1.3', '2.1'): return _345_FIELDS + _566_FIELDS elif version == '2.0': return _426_FIELDS raise MetadataUnrecognizedVersionError(version)",True,version == '1.2',version == '1.2',0.6566532850265503 2386,"def _version2fieldlist(version): if version == '1.0': return _241_FIELDS elif version == '1.1': return _314_FIELDS elif version == '1.2': return _345_FIELDS elif: return _345_FIELDS + _566_FIELDS elif version == '2.0': return _426_FIELDS raise MetadataUnrecognizedVersionError(version)",True,"version in ('1.3', '2.1')","version in ('1.3', '2.1')",0.6477141380310059 2387,"def _version2fieldlist(version): if version == '1.0': return _241_FIELDS elif version == '1.1': return _314_FIELDS elif version == '1.2': return _345_FIELDS elif version in ('1.3', '2.1'): return _345_FIELDS + _566_FIELDS elif: return _426_FIELDS raise MetadataUnrecognizedVersionError(version)",True,version == '2.0',version == '2.0',0.6558655500411987 2388,"def decode(self, v): p = self.parent xx = self.get_modrm() expr = modrm2expr(xx, p, 1) if: return False self.expr = ExprMem(expr.ptr, self.msize) return self.expr is not None",False,"not isinstance(expr, ExprMem)",not expr.is_mem(),0.6470786929130554 2389,"@staticmethod def build_block(num_repeat, in_channels, mid_channels, out_channels): block_list = nn.Sequential() for i in range(num_repeat): if: block = Lite_EffiBlockS2(in_channels=in_channels, mid_channels=mid_channels, out_channels=out_channels, stride=2) else: block = Lite_EffiBlockS1(in_channels=out_channels, mid_channels=mid_channels, out_channels=out_channels, stride=1) block_list.add_sublayer(str(i), block) return block_list",True,i == 0,i == 0,0.6662266254425049 2390,"def dataReceived(self, data): self.buffer = self.buffer + data lines = self.buffer.split(LF) self.buffer = lines.pop() for line in lines: if: line = line[:-1] self.lineReceived(line)",False,line[-1] == CR,line.endswith('\n'),0.6505649089813232 2391,"def _copy_from(self, other): for key in other: val = other.getlist(key) if: val = list(val) self._container[key.lower()] = [key] + val",True,"isinstance(val, list)","isinstance(val, list)",0.6522387266159058 2392,"def gather(box_mask_list, indices, fields=None): """"""Gather boxes from np_box_mask_list.BoxMaskList according to indices. By default, gather returns boxes corresponding to the input index list, as well as all additional fields stored in the box_mask_list (indexing into the first dimension). However one can optionally only gather from a subset of fields. Args: box_mask_list: np_box_mask_list.BoxMaskList holding N boxes indices: a 1-d numpy array of type int_ fields: (optional) list of fields to also gather from. If None (default), all fields are gathered from. Pass an empty fields list to only gather the box coordinates. Returns: subbox_mask_list: a np_box_mask_list.BoxMaskList corresponding to the subset of the input box_mask_list specified by indices Raises: ValueError: if specified field is not contained in box_mask_list or if the indices are not of type int_ """""" if: if'masks' not in fields: fields.append('masks') return box_list_to_box_mask_list(np_box_list_ops.gather(boxlist=box_mask_list, indices=indices, fields=fields))",True,fields is not None,fields is not None,0.6522716879844666 2393,"def gather(box_mask_list, indices, fields=None): """"""Gather boxes from np_box_mask_list.BoxMaskList according to indices. By default, gather returns boxes corresponding to the input index list, as well as all additional fields stored in the box_mask_list (indexing into the first dimension). However one can optionally only gather from a subset of fields. Args: box_mask_list: np_box_mask_list.BoxMaskList holding N boxes indices: a 1-d numpy array of type int_ fields: (optional) list of fields to also gather from. If None (default), all fields are gathered from. Pass an empty fields list to only gather the box coordinates. Returns: subbox_mask_list: a np_box_mask_list.BoxMaskList corresponding to the subset of the input box_mask_list specified by indices Raises: ValueError: if specified field is not contained in box_mask_list or if the indices are not of type int_ """""" if fields is not None: if: fields.append('masks') return box_list_to_box_mask_list(np_box_list_ops.gather(boxlist=box_mask_list, indices=indices, fields=fields))",True,'masks' not in fields,'masks' not in fields,0.6528860926628113 2394,"def get_population(self, population_name, default=None): """"""Return a population group object based on population's name"""""" if: return self[population_name] else: return default",True,population_name in self,population_name in self,0.6562175750732422 2395,"def get_Pdate(self): year, month, day = (self.get_dat('year'), self.get_dat('month'), self.get_dat('day')) if: return (date(year, month, day), f'Date(year={year}, month={month}, day={day})') return (None, '')",False,year is not None and month and day,year and month and day,0.6580380797386169 2396,"def __delitem__(self, key): with self.lock: value = self._container.pop(key) if: self.dispose_func(value)",True,self.dispose_func,self.dispose_func,0.6488267183303833 2397,"def q_sample(self, x_start, t, noise=None): """""" Diffuse the dataset for a given number of diffusion steps. In other words, sample from q(x_t | x_0). :param x_start: the initial dataset batch. :param t: the number of diffusion steps (minus 1). Here, 0 means one step. :param noise: if specified, the split-out normal noise. :return: A noisy version of x_start. """""" if: noise = th.randn_like(x_start) assert noise.shape == x_start.shape return _extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + _extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise",True,noise is None,noise is None,0.6596985459327698 2398,"def __init__(self, local_helpers=None): self.__include_basic = list(basic_helpers) if: self.__include_basic.extend(local_helpers) self.__include_basic = sorted(self.__include_basic) self.include_in_rich_repr = sorted(self.__include_basic, key=len) self.__include_more = list(other_helpers) self.__include_more = sorted(self.__include_more) self.include_in_help = sorted(self.__include_more, key=len) self.__class__.__name__ = 'Friendly'",False,local_helpers is not None,local_helpers,0.6566499471664429 2399,"def peek_event(self): if: if self.state: self.current_event = self.state() return self.current_event",False,self.current_event is None,"not hasattr(self, 'current_event')",0.6457996368408203 2400,"def peek_event(self): if self.current_event is None: if: self.current_event = self.state() return self.current_event",False,self.state,self.state() != PlayerState.START,0.6534774303436279 2401,"def get_taxids_by_scientific_name_wildcard(self, scientific_name): """""" Return all available taxid that fit the scientific name @attention: Several taxid might be a hit for one scientific name @param scientific_name: ncbi scientific name or synonym @type scientific_name: str @return: set of ncbi taxonomic identifiers @rtype: set[str | unicode] | None """""" assert isinstance(scientific_name, str) scientific_name = scientific_name.lower() matches = fnmatch.filter(self.name_to_taxids.keys(), scientific_name) set_of_tax_id = set() for match in matches: set_of_tax_id.update(set(self.name_to_taxids[match])) if: self._logger.warning(""Several matches '{}' found for scientific_name: '{}'"".format(', '.join(matches), scientific_name)) return set_of_tax_id elif len(set_of_tax_id) == 0: return None return set_of_tax_id",False,len(set_of_tax_id) > 1,len(set_of_tax_id) > 0,0.6445645689964294 2402,"def get_taxids_by_scientific_name_wildcard(self, scientific_name): """""" Return all available taxid that fit the scientific name @attention: Several taxid might be a hit for one scientific name @param scientific_name: ncbi scientific name or synonym @type scientific_name: str @return: set of ncbi taxonomic identifiers @rtype: set[str | unicode] | None """""" assert isinstance(scientific_name, str) scientific_name = scientific_name.lower() matches = fnmatch.filter(self.name_to_taxids.keys(), scientific_name) set_of_tax_id = set() for match in matches: set_of_tax_id.update(set(self.name_to_taxids[match])) if len(set_of_tax_id) > 1: self._logger.warning(""Several matches '{}' found for scientific_name: '{}'"".format(', '.join(matches), scientific_name)) return set_of_tax_id elif: return None return set_of_tax_id",True,len(set_of_tax_id) == 0,len(set_of_tax_id) == 0,0.6459980010986328 2403,"def change_gpu(val): """""" Returns: a context where ``CUDA_VISIBLE_DEVICES=val``. """""" val = str(val) if: val = '' return change_env('CUDA_VISIBLE_DEVICES', val)",True,val == '-1',val == '-1',0.6604791879653931 2404,"def __rtruediv__(self, other): if: return TensorList([e2 / e1 for e1, e2 in zip(self, other)]) return TensorList([other / e for e in self])",True,TensorList._iterable(other),TensorList._iterable(other),0.6487845182418823 2405,"def run(self): token = HfFolder.get_token() if: print('Not logged in') exit() HfFolder.delete_token() self._api.logout(token) print('Successfully logged out.')",False,token is None,not token,0.6516259908676147 2406,"def get_field_value(self, field_name, default=None): """"""Returns the value of a given field"""""" value = getattr(self, field_name, default) if: value = value.all() if hasattr(self, 'get_%s_display' % field_name): try: value = getattr(self, 'get_%s_display' % field_name)() except: pass return value",False,"hasattr(value, 'all')","isinstance(value, paddle.Tensor)",0.6445949673652649 2407,"def get_field_value(self, field_name, default=None): """"""Returns the value of a given field"""""" value = getattr(self, field_name, default) if hasattr(value, 'all'): value = value.all() if: try: value = getattr(self, 'get_%s_display' % field_name)() except: pass return value",False,"hasattr(self, 'get_%s_display' % field_name)",value is None,0.6478211879730225 2408,"def draw_text(self, text, position, *, font_size=None, color='w', horizontal_alignment='center', vertical_alignment='bottom', box_facecolor='black', alpha=0.5): """""" Draw text at the specified position. Args: text (str): the text to draw on image. position (list of 2 ints): the x,y coordinate to place the text. font_size (Optional[int]): font of the text. If not provided, a font size proportional to the image width is calculated and used. color (str): color of the text. Refer to `matplotlib.colors` for full list of formats that are accepted. horizontal_alignment (str): see `matplotlib.text.Text`. vertical_alignment (str): see `matplotlib.text.Text`. box_facecolor (str): color of the box wrapped around the text. Refer to `matplotlib.colors` for full list of formats that are accepted. alpha (float): transparency level of the box. """""" if: font_size = self._default_font_size x, y = position self.output.ax.text(x, y, text, size=font_size * self.output.scale, family='monospace', bbox={'facecolor': box_facecolor, 'alpha': alpha, 'pad': 0.7, 'edgecolor': 'none'}, verticalalignment=vertical_alignment, horizontalalignment=horizontal_alignment, color=color, zorder=10)",True,not font_size,not font_size,0.6526626944541931 2409,"@parser.add def partially_initialized_module(message, _frame, tb_data): pattern = re.compile(""cannot import name '(.*)' from partially initialized module '(.*)'"") match = re.search(pattern, message) if: return {} if 'circular import' in message: return cannot_import_name_from(match.group(1), match.group(2), tb_data, add_circular_hint=False) return cannot_import_name_from(match.group(1), match.group(2), tb_data)",True,not match,not match,0.6530585289001465 2410,"@parser.add def partially_initialized_module(message, _frame, tb_data): pattern = re.compile(""cannot import name '(.*)' from partially initialized module '(.*)'"") match = re.search(pattern, message) if not match: return {} if: return cannot_import_name_from(match.group(1), match.group(2), tb_data, add_circular_hint=False) return cannot_import_name_from(match.group(1), match.group(2), tb_data)",False,'circular import' in message,match.group(1) == '(.*)',0.6452341079711914 2411,"def _wait_for_cluster_complete(self, cluster): self._wait_on_status(cluster, [None, 'CREATE_IN_PROGRESS'], ['CREATE_FAILED', 'CREATE_COMPLETE'], timeout=self.cluster_complete_timeout) if: raise Exception('Cluster %s create failed' % cluster.uuid) return cluster",False,self.cs.clusters.get(cluster.uuid).status == 'CREATE_FAILED',cluster.create_failed,0.6503139734268188 2412,"def get_step_url(self, request): kwargs = {'step': self.identifier} if: kwargs['cart_namespace'] = request.resolver_match.kwargs['cart_namespace'] return eventreverse(self.event, 'presale:event.checkout', kwargs=kwargs)",False,request.resolver_match and 'cart_namespace' in request.resolver_match.kwargs,request.resolver_match,0.6458841562271118 2413,"def get_version_info(self, stack): stack_param = self.template_def.get_heat_param(cluster_attr='coe_version') if: self.cluster.coe_version = stack.parameters[stack_param] version_module_path = self.template_def.driver_module_path + '.version' try: ver = importutils.import_module(version_module_path) container_version = ver.container_version except Exception: container_version = None self.cluster.container_version = container_version",False,stack_param,stack_param in stack.parameters,0.6621258854866028 2414,"@property def retained(self): if: self._retained = self._from_fbs.Retained() return self._retained",True,self._retained is None and self._from_fbs,self._retained is None and self._from_fbs,0.6471594572067261 2415,"def _pythonlib_compat(): """""" On Python 3.7 and earlier, distutils would include the Python library. See pypa/distutils#9. """""" from distutils import sysconfig if: return yield 'python{}.{}{}'.format(sys.hexversion >> 24, sys.hexversion >> 16 & 255, sysconfig.get_config_var('ABIFLAGS'))",True,not sysconfig.get_config_var('Py_ENABLED_SHARED'),not sysconfig.get_config_var('Py_ENABLED_SHARED'),0.6480382084846497 2416,"@classmethod def add_representer(cls, data_type, representer): if: cls.yaml_representers = cls.yaml_representers.copy() cls.yaml_representers[data_type] = representer",True,not 'yaml_representers' in cls.__dict__,not 'yaml_representers' in cls.__dict__,0.6511136889457703 2417,"def split_sections(s): """"""Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header (""[section]"") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """""" section = None content = [] for line in yield_lines(s): if: if line.endswith(']'): if section or content: yield (section, content) section = line[1:-1].strip() content = [] else: raise ValueError('Invalid section heading', line) else: content.append(line) yield (section, content)",True,line.startswith('['),line.startswith('['),0.6467174291610718 2418,"def split_sections(s): """"""Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header (""[section]"") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """""" section = None content = [] for line in yield_lines(s): if line.startswith('['): if: if section or content: yield (section, content) section = line[1:-1].strip() content = [] else: raise ValueError('Invalid section heading', line) else: content.append(line) yield (section, content)",True,line.endswith(']'),line.endswith(']'),0.6429246664047241 2419,"def split_sections(s): """"""Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header (""[section]"") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """""" section = None content = [] for line in yield_lines(s): if line.startswith('['): if line.endswith(']'): if: yield (section, content) section = line[1:-1].strip() content = [] else: raise ValueError('Invalid section heading', line) else: content.append(line) yield (section, content)",True,section or content,section or content,0.6555565595626831 2420,"def add_series(self, name): """""" Adds the name of a 'data series' where each data series is a list of data-entries, where each data-entry is of the form ((epoch, minibatch), data-value ) [and data-value is a float] """""" if: self.series[name] = []",True,name not in self.series,name not in self.series,0.655040979385376 2421,"def weighted_balanced_l1_loss(pred, target, weight, beta=1.0, alpha=0.5, gamma=1.5, avg_factor=None): if: avg_factor = torch.sum(weight > 0).float().item() + 1e-06 loss = balanced_l1_loss(pred, target, beta, alpha, gamma, reduction='none') return torch.sum(loss.sum(dim=1) * weight)[None] / avg_factor",True,avg_factor is None,avg_factor is None,0.6532061100006104 2422,"def impute(self, data, mask, impute_config_dict=None, *, vamp_prior_data=None, average=True): if: return impute(self, data, mask, impute_config_dict=impute_config_dict, average=average) else: processed_vamp_data_array = self.data_processor.process_data_and_masks(*vamp_prior_data) return impute(self, data, mask, impute_config_dict=impute_config_dict, average=average, vamp_prior_data=to_tensors(*processed_vamp_data_array, device=torch.device('cpu')))",True,vamp_prior_data is None,vamp_prior_data is None,0.6450034379959106 2423,"def do_harvest(self, start_date, end_date): if: raise Exception('SPRINGER_API_KEY not provided') end_date = end_date.date() start_date = start_date.date() dates = [start_date + timedelta(n) for n in range((end_date - start_date).days + 1)] for date in dates: yield from self.fetch_records(date)",False,not settings.SPRINGER_API_KEY,self.sprINGER_API_KEY is None,0.6542962193489075 2424,"def prescan(self): """"""Scan urls scheduled for prescanning (e.g. --find-links)"""""" if: list(map(self.scan_url, self.to_scan)) self.to_scan = None",True,self.to_scan,self.to_scan,0.6513622999191284 2425,"def induced_subgraphs(vertices, edges, min_size=1, max_size=None): """"""A generator of all induced subgraphs of the graph, sorted by size (largest to smallest)."""""" n = len(vertices) if: max_size = n + 1 for i in reversed(range(min_size, max_size)): for subset in combinations(vertices, r=i): yield (list(subset), [(u, v) for u, v in edges if u in subset and v in subset])",True,max_size is None,max_size is None,0.6512565612792969 2426,"def display(self, t): if: func = self.get_derived_children else: func = self.get_all_children s = self.status and 2 or 0 SCons.Util.print_tree(t, func, prune=self.prune, showtags=s, lastChild=True, singleLineDraw=self.sLineDraw)",True,self.derived,self.derived,0.6594269275665283 2427,"@property def filters_life(self): """"""Return percentage status for all filters."""""" result = {} for filter_def in FILTER_TYPES: status = self._get_filter_life(filter_def[1], filter_def[2], use_time_inverted=self._filter_use_time_inverted) if: for index, feat in enumerate(filter_def[0]): if index >= len(status): break self._update_feature(feat, status[index], False) result[feat] = status[index] return result",False,status is not None,use_time_inverted,0.6516287326812744 2428,"@property def filters_life(self): """"""Return percentage status for all filters."""""" result = {} for filter_def in FILTER_TYPES: status = self._get_filter_life(filter_def[1], filter_def[2], use_time_inverted=self._filter_use_time_inverted) if status is not None: for index, feat in enumerate(filter_def[0]): if: break self._update_feature(feat, status[index], False) result[feat] = status[index] return result",False,index >= len(status),use_time_inverted and feat in self._feature_list,0.645338237285614 2429,"def handle_imgs_deletion(vid_path=None, imgs_folder_path=None, batch_id=None): try: total_imgs_to_delete = count_matching_frames(imgs_folder_path, batch_id) if: return print('Deleting raw images, as requested:') _, fcount, _ = get_quick_vid_info(vid_path) if fcount == total_imgs_to_delete: total_imgs_deleted = delete_matching_frames(imgs_folder_path, batch_id) print(f'Deleted {total_imgs_deleted} out of {total_imgs_to_delete} imgs!') else: print('Did not delete imgs as there was a mismatch between # of frames in folder, and # of frames in actual video. Please check and delete manually. ') except Exception as e: print(f'Error deleting raw images. Please delete them manually if you want. Actual error:\n{e}')",False,total_imgs_to_delete is None or total_imgs_to_delete == 0,total_imgs_to_delete == 0,0.6456757187843323 2430,"def handle_imgs_deletion(vid_path=None, imgs_folder_path=None, batch_id=None): try: total_imgs_to_delete = count_matching_frames(imgs_folder_path, batch_id) if total_imgs_to_delete is None or total_imgs_to_delete == 0: return print('Deleting raw images, as requested:') _, fcount, _ = get_quick_vid_info(vid_path) if: total_imgs_deleted = delete_matching_frames(imgs_folder_path, batch_id) print(f'Deleted {total_imgs_deleted} out of {total_imgs_to_delete} imgs!') else: print('Did not delete imgs as there was a mismatch between # of frames in folder, and # of frames in actual video. Please check and delete manually. ') except Exception as e: print(f'Error deleting raw images. Please delete them manually if you want. Actual error:\n{e}')",False,fcount == total_imgs_to_delete,fcount == 1,0.6458182334899902 2431,"def pop(self, key, default=__marker): """"""od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. """""" if: result = self[key] del self[key] return result if default is self.__marker: raise KeyError(key) return default",True,key in self,key in self,0.669646143913269 2432,"def pop(self, key, default=__marker): """"""od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. """""" if key in self: result = self[key] del self[key] return result if: raise KeyError(key) return default",True,default is self.__marker,default is self.__marker,0.6516128778457642 2433,"def solarize_add(img, add, thresh=128, **__): lut = [] for i in range(256): if i < thresh: lut.append(min(255, i + add)) else: lut.append(i) if: if img.mode == 'RGB' and len(lut) == 256: lut = lut + lut + lut return img.point(lut) else: return img",True,"img.mode in ('L', 'RGB')","img.mode in ('L', 'RGB')",0.6432570219039917 2434,"def solarize_add(img, add, thresh=128, **__): lut = [] for i in range(256): if: lut.append(min(255, i + add)) else: lut.append(i) if img.mode in ('L', 'RGB'): if img.mode == 'RGB' and len(lut) == 256: lut = lut + lut + lut return img.point(lut) else: return img",True,i < thresh,i < thresh,0.6580259799957275 2435,"def solarize_add(img, add, thresh=128, **__): lut = [] for i in range(256): if i < thresh: lut.append(min(255, i + add)) else: lut.append(i) if img.mode in ('L', 'RGB'): if: lut = lut + lut + lut return img.point(lut) else: return img",True,img.mode == 'RGB' and len(lut) == 256,img.mode == 'RGB' and len(lut) == 256,0.6454442739486694 2436,"def _classify_script(script, templates, unknown_class): our_template, items = script.to_template() for template, constructor in templates: if: if template!= our_template: continue else: match = template.match(our_template) if not match: continue try: return constructor(*items) except (ValueError, TypeError): pass return unknown_class()",False,"isinstance(template, bytes)","isinstance(template, constructor)",0.6479650735855103 2437,"def _classify_script(script, templates, unknown_class): our_template, items = script.to_template() for template, constructor in templates: if isinstance(template, bytes): if: continue else: match = template.match(our_template) if not match: continue try: return constructor(*items) except (ValueError, TypeError): pass return unknown_class()",False,template != our_template,template == our_template,0.6544781923294067 2438,"def _classify_script(script, templates, unknown_class): our_template, items = script.to_template() for template, constructor in templates: if isinstance(template, bytes): if template!= our_template: continue else: match = template.match(our_template) if: continue try: return constructor(*items) except (ValueError, TypeError): pass return unknown_class()",False,not match,match is None,0.6549816727638245 2439,"@property def update_message(self) -> str: t = ctx_translator.get().t value = self.value if: resp = t(_p('menuset:required_role|set_response:set', 'Members will need to have the {role} role to use this menu.')).format(role=self.formatted) else: resp = t(_p('menuset:required_role|set_response:unset', 'Any member who can see the menu may use it.')) return resp",True,value,value,0.665069043636322 2440,"def get_socket_value(tree, socket): default = socket.default_value if: default = list(default) for link in tree.links: if link.to_socket == socket: return (link.from_socket, default) return (None, default)",False,"not isinstance(default, float)","not isinstance(default, list)",0.6442523002624512 2441,"def get_socket_value(tree, socket): default = socket.default_value if not isinstance(default, float): default = list(default) for link in tree.links: if: return (link.from_socket, default) return (None, default)",False,link.to_socket == socket,link.from_socket is not None,0.652450680732727 2442,"def process(self, message: Message, **kwargs: Any) -> None: """"""Process an incoming message by computing its tokens and dense features. Args: message: Incoming message object """""" for attribute in {TEXT, ACTION_TEXT}: if: self._set_lm_features(self._get_docs_for_batch([message], attribute=attribute, inference_mode=True)[0], message, attribute)",False,message.get(attribute),message.accepts_lm,0.6469004154205322 2443,"def __init__(self, config, **kwargs): super().__init__(config, **kwargs) self.vocab_size = config.vocab_size self.electra = TFElectraMainLayer(config, name='electra') self.generator_predictions = TFElectraGeneratorPredictions(config, name='generator_predictions') if: self.activation = get_tf_activation(config.hidden_act) else: self.activation = config.hidden_act self.generator_lm_head = TFElectraMaskedLMHead(config, self.electra.embeddings, name='generator_lm_head')",True,"isinstance(config.hidden_act, str)","isinstance(config.hidden_act, str)",0.6446830034255981 2444,"def conv_nd(dims, *args, **kwargs): """""" Create a 1D, 2D, or 3D convolution module. """""" if: return nn.Conv1d(*args, **kwargs) elif dims == 2: return nn.Conv2d(*args, **kwargs) elif dims == 3: return nn.Conv3d(*args, **kwargs) raise ValueError(f'unsupported dimensions: {dims}')",True,dims == 1,dims == 1,0.6719011664390564 2445,"def conv_nd(dims, *args, **kwargs): """""" Create a 1D, 2D, or 3D convolution module. """""" if dims == 1: return nn.Conv1d(*args, **kwargs) elif: return nn.Conv2d(*args, **kwargs) elif dims == 3: return nn.Conv3d(*args, **kwargs) raise ValueError(f'unsupported dimensions: {dims}')",True,dims == 2,dims == 2,0.6694872379302979 2446,"def conv_nd(dims, *args, **kwargs): """""" Create a 1D, 2D, or 3D convolution module. """""" if dims == 1: return nn.Conv1d(*args, **kwargs) elif dims == 2: return nn.Conv2d(*args, **kwargs) elif: return nn.Conv3d(*args, **kwargs) raise ValueError(f'unsupported dimensions: {dims}')",True,dims == 3,dims == 3,0.667377233505249 2447,"def dataReceived(self, data): response = self.factory.app.handle_message(data) if: self.transport.write(response)",False,response,response != '',0.6688430309295654 2448,"def read_line(self): n1 = self.read_buffer.find(b'\r\n', self.buffer_start) if: raise Exception('read_line fail') line = self.read_buffer[self.buffer_start:n1] self.buffer_start = n1 + 2 return line",False,n1 == -1,n1 < 0,0.6644082069396973 2449,"def switch_tabs_to_text(self, istance_android_tabs): for instance_tab in istance_android_tabs.ids.scrollview.children[0].children: for k, v in md_icons.items(): if: istance_android_tabs.ids.scrollview.children[0].remove_widget(instance_tab) istance_android_tabs.add_widget(MyTab(text=' '.join(k.split('-')).capitalize())) break",True,v == instance_tab.text,v == instance_tab.text,0.6474444270133972 2450,"def OutputsAsNumpy(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) return 0",True,o != 0,o != 0,0.6658633947372437 2451,"def stop_queue_listener(): if: return _queue_listener.stop() for handler in _queue_listener.handlers: handler.close()",False,not _queue_listener,_queue_listener is None,0.6514033675193787 2452,"def content(): html = format == 'colored' if: yield 'Home Assistant Google Drive Backup Log
\n'
    for line in getHistory(self.last_log_index, html):
        self.last_log_index = line[0]
        if line:
            yield (line[1].replace('\n','  \n') + '\n')
    if format == 'html':
        yield '
\n'",False,format == 'html',format == 'colored',0.6575806140899658 2453,"def content(): html = format == 'colored' if format == 'html': yield 'Home Assistant Google Drive Backup Log
\n'
    for line in getHistory(self.last_log_index, html):
        self.last_log_index = line[0]
        if line:
            yield (line[1].replace('\n','  \n') + '\n')
    if:
        yield '
\n'",False,format == 'html',format == 'body',0.6575578451156616 2454,"def content(): html = format == 'colored' if format == 'html': yield 'Home Assistant Google Drive Backup Log
\n'
    for line in getHistory(self.last_log_index, html):
        self.last_log_index = line[0]
        if:
            yield (line[1].replace('\n','  \n') + '\n')
    if format == 'html':
        yield '
\n'",False,line,"html and line[1].replace('\n', ' \n')",0.6644929647445679 2455,"def SetValue(self, value): """""" Sets the value of the control to the given value, provided that the value is within the range of the control. If the given value is within range, and is different from the current value of the control, an EVT_SPIN_BOX will be emitted. """""" different = False if: different = self._internal_value!= value self._internal_value = value self._value_string = self.TextFromValue(self._internal_value) self.SetValueString(self._value_string) if different: evt = wxSpinBoxEvent() wx.PostEvent(self, evt)",False,self._low <= value <= self._high,self._internal_value != value or self._internal_value != value,0.6555124521255493 2456,"def SetValue(self, value): """""" Sets the value of the control to the given value, provided that the value is within the range of the control. If the given value is within range, and is different from the current value of the control, an EVT_SPIN_BOX will be emitted. """""" different = False if self._low <= value <= self._high: different = self._internal_value!= value self._internal_value = value self._value_string = self.TextFromValue(self._internal_value) self.SetValueString(self._value_string) if: evt = wxSpinBoxEvent() wx.PostEvent(self, evt)",True,different,different,0.6697747707366943 2457,"def _do_backward(self, gradients, retain_variables): self.retain_variables = retain_variables result = super(NestedIOFunction, self)._do_backward(gradients, retain_variables) if: del self._nested_output del self._to_save_nested return result",False,not retain_variables,self._nested_output is not None,0.6505959033966064 2458,"def is_available_by_time(self, now_dt: datetime=None) -> bool: now_dt = now_dt or now() if: return False if self.available_until and self.available_until < now_dt: return False return True",False,self.available_from and self.available_from > now_dt,not self.available and now_dt < now_dt,0.6436029672622681 2459,"def is_available_by_time(self, now_dt: datetime=None) -> bool: now_dt = now_dt or now() if self.available_from and self.available_from > now_dt: return False if: return False return True",False,self.available_until and self.available_until < now_dt,not now_dt or now_dt < self.available_to,0.645248293876648 2460,"def __init__(self, file_path=None, expose_all_qpos=False, expose_body_coms=None, expose_body_comvels=None, non_zero_reset=False): if: file_path = self.FILE self._expose_all_qpos = expose_all_qpos self._expose_body_coms = expose_body_coms self._expose_body_comvels = expose_body_comvels self._body_com_indices = {} self._body_comvel_indices = {} self._non_zero_reset = non_zero_reset mujoco_env.MujocoEnv.__init__(self, file_path, 5) utils.EzPickle.__init__(self)",True,file_path is None,file_path is None,0.6547393798828125 2461,"def _unscale_grads(self): self._sync_fp16_grads_to_fp32() if: self.fp32_optimizer.multiply_grads(self._multiply_factor) self._multiply_factor = 1.0",False,torch.is_tensor(self._multiply_factor) or self._multiply_factor != 1.0,self._multiply_factor is not None,0.6456373929977417 2462,"def on_batch_begin(self, batch, logs={}): if: self.log_values = []",False,self.seen < self.params['nb_sample'],batch.batch_size == 0 or batch.total_size > 1,0.6445043087005615 2463,"@pytest.mark.parametrize('required', (True, False)) @pytest.mark.parametrize('nullable', (True, False)) def test_get_type_imports(self, list_property_factory, date_time_property_factory, required, nullable): inner_property = date_time_property_factory() p = list_property_factory(inner_property=inner_property, required=required, nullable=nullable) expected = {'import datetime', 'from typing import cast', 'from dateutil.parser import isoparse', 'from typing import cast, List'} if: expected.add('from typing import Optional') if not required: expected |= {'from typing import Union', 'from...types import UNSET, Unset'} assert p.get_imports(prefix='...') == expected",True,nullable,nullable,0.690821647644043 2464,"@pytest.mark.parametrize('required', (True, False)) @pytest.mark.parametrize('nullable', (True, False)) def test_get_type_imports(self, list_property_factory, date_time_property_factory, required, nullable): inner_property = date_time_property_factory() p = list_property_factory(inner_property=inner_property, required=required, nullable=nullable) expected = {'import datetime', 'from typing import cast', 'from dateutil.parser import isoparse', 'from typing import cast, List'} if nullable: expected.add('from typing import Optional') if: expected |= {'from typing import Union', 'from...types import UNSET, Unset'} assert p.get_imports(prefix='...') == expected",False,not required,required,0.6687480211257935 2465,"def pop_order(self, order: OrderData): if: if order.order_id in self.long_frozen_margin: self.long_frozen_margin.pop(order.order_id) if order.direction == Direction.SHORT: if order.order_id in self.short_frozen_margin: self.short_frozen_margin.pop(order.order_id) if order.order_id in self.frozen_fee: self.frozen_fee.pop(order.order_id)",True,order.direction == Direction.LONG,order.direction == Direction.LONG,0.6486205458641052 2466,"def pop_order(self, order: OrderData): if order.direction == Direction.LONG: if order.order_id in self.long_frozen_margin: self.long_frozen_margin.pop(order.order_id) if: if order.order_id in self.short_frozen_margin: self.short_frozen_margin.pop(order.order_id) if order.order_id in self.frozen_fee: self.frozen_fee.pop(order.order_id)",True,order.direction == Direction.SHORT,order.direction == Direction.SHORT,0.6499707698822021 2467,"def pop_order(self, order: OrderData): if order.direction == Direction.LONG: if order.order_id in self.long_frozen_margin: self.long_frozen_margin.pop(order.order_id) if order.direction == Direction.SHORT: if order.order_id in self.short_frozen_margin: self.short_frozen_margin.pop(order.order_id) if: self.frozen_fee.pop(order.order_id)",True,order.order_id in self.frozen_fee,order.order_id in self.frozen_fee,0.6490740180015564 2468,"def pop_order(self, order: OrderData): if order.direction == Direction.LONG: if: self.long_frozen_margin.pop(order.order_id) if order.direction == Direction.SHORT: if order.order_id in self.short_frozen_margin: self.short_frozen_margin.pop(order.order_id) if order.order_id in self.frozen_fee: self.frozen_fee.pop(order.order_id)",True,order.order_id in self.long_frozen_margin,order.order_id in self.long_frozen_margin,0.647619366645813 2469,"def pop_order(self, order: OrderData): if order.direction == Direction.LONG: if order.order_id in self.long_frozen_margin: self.long_frozen_margin.pop(order.order_id) if order.direction == Direction.SHORT: if: self.short_frozen_margin.pop(order.order_id) if order.order_id in self.frozen_fee: self.frozen_fee.pop(order.order_id)",True,order.order_id in self.short_frozen_margin,order.order_id in self.short_frozen_margin,0.6485077142715454 2470,"def execute(self, processor): logger.info('Job {0}, task {1}: checking group {2} and couple {3} consistency'.format(self.parent_job.id, self.id, self.group, self.couple)) self.check(self.group) group = storage.groups[self.group] if: raise JobBrokenError('Task {0}: group {1} has changed couple to {2}, expected {3}'.format(self, self.group, group.couple, self.couple)) super(RecoverGroupDcTask, self).execute(processor)",False,set(self.couple) != set(group.couple.as_tuple()),group.couple != self.couple,0.645580530166626 2471,"def metadata_listdir(self, name): if: return self._listdir(self._fn(self.egg_info, name)) return []",True,self.egg_info,self.egg_info,0.6538591384887695 2472,"def __init__(self, name, zeamap, stretch=None, vmin=0.0, vmax=1.0, cmap=None): super(ZeaLayer, self).__init__(name) self.zeamap = zeamap self.stretch = stretch if: import matplotlib.cm self.cmap = matplotlib.cm.hot else: self.cmap = cmap self.vmin = vmin self.vmax = vmax",True,cmap is None,cmap is None,0.6819291114807129 2473,"def __eq__(self, other) -> bool: if: return False return self.word == other.word",False,"not isinstance(other, Entry)","not isinstance(other, Token)",0.6488455533981323 2474,"def review_required_wrapper(request, article_id=None, *args, **kwargs): if: logger.debug('404 thrown as no article_id in kwargs') raise Http404 article = get_object_or_404(models.Article, pk=article_id) if not article.stage in models.REVIEW_STAGES: deny_access(request) else: return func(request, article_id, *args, **kwargs)",True,not article_id,not article_id,0.6587494015693665 2475,"def review_required_wrapper(request, article_id=None, *args, **kwargs): if not article_id: logger.debug('404 thrown as no article_id in kwargs') raise Http404 article = get_object_or_404(models.Article, pk=article_id) if: deny_access(request) else: return func(request, article_id, *args, **kwargs)",False,not article.stage in models.REVIEW_STAGES,not article.has_review,0.6468030214309692 2476,"def handle_while_stmt(self, while_node): loop_test = self.handle_expr(while_node.get_child(1)) body = self.handle_suite(while_node.get_child(3)) if: otherwise = self.handle_suite(while_node.get_child(6)) else: otherwise = None return ast.While(loop_test, body, otherwise, while_node.get_lineno(), while_node.get_column())",False,while_node.num_children() == 7,while_node.get_child(6) == 'if',0.6489322185516357 2477,"def _c_div(self, a, b): result = a // b if: result += 1 return result",False,(a < 0) ^ (b < 0) and a % b != 0,result % 2 == 0,0.6481382846832275 2478,"def parse_request(self): if: if self.authenticate(self.headers): return True else: self.send_error(401, 'Authentication failed') return False",False,SimpleXMLRPCRequestHandler.parse_request(self),self.headers and self.headers,0.6438820362091064 2479,"def parse_request(self): if SimpleXMLRPCRequestHandler.parse_request(self): if: return True else: self.send_error(401, 'Authentication failed') return False",False,self.authenticate(self.headers),self.auth is not None or self.auth.state == 'authenticated',0.644210159778595 2480,"def loss(params): if: return tf.keras.losses.BinaryCrossentropy(label_smoothing=params.label_smoothing) return tf.keras.losses.CategoricalCrossentropy(label_smoothing=params.label_smoothing)",False,params.multi_label,params.training,0.6521674394607544 2481,"def updateDisplayLabel(self, value=None): if: value = self.param.value() opts = self.param.opts if value is None: text = u'' else: text = '{} {}'.format(self._val.magnitude, self._val.dimensionality.string) self.displayLabel.setText(text)",True,value is None,value is None,0.6601636409759521 2482,"def updateDisplayLabel(self, value=None): if value is None: value = self.param.value() opts = self.param.opts if: text = u'' else: text = '{} {}'.format(self._val.magnitude, self._val.dimensionality.string) self.displayLabel.setText(text)",True,value is None,value is None,0.6542903184890747 2483,"@property def namePinOutputsMap(self): result = OrderedDict() for rawPin in self._rawNode.pins: if: wrapper = rawPin.getWrapper() if wrapper is not None: result[rawPin.name] = wrapper() return result",False,rawPin.direction == PinDirection.Output,rawPin.type == 'Pin',0.64903724193573 2484,"@property def namePinOutputsMap(self): result = OrderedDict() for rawPin in self._rawNode.pins: if rawPin.direction == PinDirection.Output: wrapper = rawPin.getWrapper() if: result[rawPin.name] = wrapper() return result",True,wrapper is not None,wrapper is not None,0.6553462147712708 2485,"def _encode_entity(text, pattern=_escape): def escape_entities(m, map=_escape_map): out = [] append = out.append for char in m.group(): text = map.get(char) if: text = '&#%d;' % ord(char) append(text) return string.join(out, '') try: return _encode(pattern.sub(escape_entities, text), 'ascii') except TypeError: _raise_serialization_error(text)",False,text is None,not text,0.653732419013977 2486,"def _is_control(char): """"""Checks whether `chars` is a control character."""""" if: return False cat = unicodedata.category(char) if cat.startswith('C'): return True return False",True,char == '\t' or char == '\n' or char == '\r',char == '\t' or char == '\n' or char == '\r',0.6469241380691528 2487,"def _is_control(char): """"""Checks whether `chars` is a control character."""""" if char == '\t' or char == '\n' or char == '\r': return False cat = unicodedata.category(char) if: return True return False",True,cat.startswith('C'),cat.startswith('C'),0.6425306797027588 2488,"def sample_parameters(self): if: self._u = self.sample_u() u = self._u self._u = None row_chol = self.prior_inducing_row_scale_tril col_chol = self.prior_inducing_col_scale_tril if self.whitened_u: u = row_chol @ u @ col_chol.t() return self.sample_conditional_parameters(u, row_chol, col_chol)",True,self._u is None,self._u is None,0.6580957174301147 2489,"def sample_parameters(self): if self._u is None: self._u = self.sample_u() u = self._u self._u = None row_chol = self.prior_inducing_row_scale_tril col_chol = self.prior_inducing_col_scale_tril if: u = row_chol @ u @ col_chol.t() return self.sample_conditional_parameters(u, row_chol, col_chol)",False,self.whitened_u,self.prior_inducing_col_scale,0.6507775187492371 2490,"def get_service_discovery_runtime(runtime_config): for runtime_type in SERVICE_DISCOVERY_RUNTIMES: if: return runtime_type return None",False,"is_runtime_enabled(runtime_config, runtime_type)",runtime_config.get_value(runtime_type) == runtime_type,0.6441949009895325 2491,"def __init__(self, pool, url, reason=None): self.reason = reason message = 'Max retries exceeded with url: %s' % url if: message +='(Caused by %r)' % reason else: message +='(Caused by redirect)' RequestError.__init__(self, pool, url, message)",True,reason,reason,0.6748207807540894 2492,"def _extract_features(self, input): self.model.eval() b, s, c, h, w = input.size() input = input.view(b * s, c, h, w) features = self.model(input) features = features.view(b, s, -1) if: features = torch.mean(features, 1) else: features = torch.max(features, 1)[0] return features",True,self.pooling_method == 'avg',self.pooling_method == 'avg',0.6443902254104614 2493,"def _force_https(): from flask import _request_ctx_stack if: reqctx = _request_ctx_stack.top reqctx.url_adapter.url_scheme = 'https'",True,_request_ctx_stack is not None,_request_ctx_stack is not None,0.6503364443778992 2494,"@classmethod def search_rec_name(cls, name, clause): if: bool_op = 'AND' else: bool_op = 'OR' return [bool_op, ('field_description',) + tuple(clause[1:]), ('name',) + tuple(clause[1:])]",False,clause[1].startswith('!') or clause[1].startswith('not '),clause[0] == 'AND',0.6438446044921875 2495,"def _reload_version(self): """""" Packages installed by distutils (e.g. numpy or scipy), which uses an old safe_version, and so their version numbers can get mangled when converted to filenames (e.g., 1.11.0.dev0+2329eae to 1.11.0.dev0_2329eae). These distributions will not be parsed properly downstream by Distribution and safe_version, so take an extra step and try to get the version number from the metadata file itself instead of the filename. """""" md_version = self._get_version() if: self._version = md_version return self",False,md_version,md_version is not None,0.6557487845420837 2496,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr",True,PY3,PY3,0.6618987321853638 2497,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr",True,new_mod is None,new_mod is None,0.6534161567687988 2498,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr",True,new_attr is None,new_attr is None,0.6529781818389893 2499,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if: old_attr = name self.attr = old_attr",True,old_attr is None,old_attr is None,0.6546398401260376 2500,"def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr",True,old_attr is None,old_attr is None,0.6559209227561951 2501,"def non_max_suppression(boxlist, thresh, max_output_size, scope=None): """"""Non maximum suppression. This op greedily selects a subset of detection bounding boxes, pruning away boxes that have high IOU (intersection over union) overlap (> thresh) with already selected boxes. Note that this only works for a single class --- to apply NMS to multi-class predictions, use MultiClassNonMaxSuppression. Args: boxlist: BoxList holding N boxes. Must contain a'scores' field representing detection scores. thresh: scalar threshold max_output_size: maximum number of retained boxes scope: name scope. Returns: a BoxList holding M boxes where M <= max_output_size Raises: ValueError: if thresh is not in [0, 1] """""" with tf.name_scope(scope, 'NonMaxSuppression'): if: raise ValueError('thresh must be between 0 and 1') if not isinstance(boxlist, box_list.BoxList): raise ValueError('boxlist must be a BoxList') if not boxlist.has_field('scores'): raise ValueError(""input boxlist must have'scores' field"") selected_indices = tf.image.non_max_suppression(boxlist.get(), boxlist.get_field('scores'), max_output_size, iou_threshold=thresh) return gather(boxlist, selected_indices)",True,not 0 <= thresh <= 1.0,not 0 <= thresh <= 1.0,0.6536452770233154 2502,"def non_max_suppression(boxlist, thresh, max_output_size, scope=None): """"""Non maximum suppression. This op greedily selects a subset of detection bounding boxes, pruning away boxes that have high IOU (intersection over union) overlap (> thresh) with already selected boxes. Note that this only works for a single class --- to apply NMS to multi-class predictions, use MultiClassNonMaxSuppression. Args: boxlist: BoxList holding N boxes. Must contain a'scores' field representing detection scores. thresh: scalar threshold max_output_size: maximum number of retained boxes scope: name scope. Returns: a BoxList holding M boxes where M <= max_output_size Raises: ValueError: if thresh is not in [0, 1] """""" with tf.name_scope(scope, 'NonMaxSuppression'): if not 0 <= thresh <= 1.0: raise ValueError('thresh must be between 0 and 1') if: raise ValueError('boxlist must be a BoxList') if not boxlist.has_field('scores'): raise ValueError(""input boxlist must have'scores' field"") selected_indices = tf.image.non_max_suppression(boxlist.get(), boxlist.get_field('scores'), max_output_size, iou_threshold=thresh) return gather(boxlist, selected_indices)",True,"not isinstance(boxlist, box_list.BoxList)","not isinstance(boxlist, box_list.BoxList)",0.6445034742355347 2503,"def non_max_suppression(boxlist, thresh, max_output_size, scope=None): """"""Non maximum suppression. This op greedily selects a subset of detection bounding boxes, pruning away boxes that have high IOU (intersection over union) overlap (> thresh) with already selected boxes. Note that this only works for a single class --- to apply NMS to multi-class predictions, use MultiClassNonMaxSuppression. Args: boxlist: BoxList holding N boxes. Must contain a'scores' field representing detection scores. thresh: scalar threshold max_output_size: maximum number of retained boxes scope: name scope. Returns: a BoxList holding M boxes where M <= max_output_size Raises: ValueError: if thresh is not in [0, 1] """""" with tf.name_scope(scope, 'NonMaxSuppression'): if not 0 <= thresh <= 1.0: raise ValueError('thresh must be between 0 and 1') if not isinstance(boxlist, box_list.BoxList): raise ValueError('boxlist must be a BoxList') if: raise ValueError(""input boxlist must have'scores' field"") selected_indices = tf.image.non_max_suppression(boxlist.get(), boxlist.get_field('scores'), max_output_size, iou_threshold=thresh) return gather(boxlist, selected_indices)",True,not boxlist.has_field('scores'),not boxlist.has_field('scores'),0.6422364711761475 2504,"def forward(self, x): """"""Forward function."""""" if: x = self.stem(x) else: x = self.conv1(x) x = self.norm1(x) x = self.relu(x) x = self.maxpool(x) outs = [] for i, layer_name in enumerate(self.res_layers): res_layer = getattr(self, layer_name) x = res_layer(x) if i in self.out_indices: outs.append(x) return outs",False,self.deep_stem,self.shortcut,0.6521890163421631 2505,"def forward(self, x): """"""Forward function."""""" if self.deep_stem: x = self.stem(x) else: x = self.conv1(x) x = self.norm1(x) x = self.relu(x) x = self.maxpool(x) outs = [] for i, layer_name in enumerate(self.res_layers): res_layer = getattr(self, layer_name) x = res_layer(x) if: outs.append(x) return outs",True,i in self.out_indices,i in self.out_indices,0.6456809639930725 2506,"def discover_kentix_devices_battery(section): for sensoritem, sensordata in section['sensors'].items(): if: yield Service(item=sensoritem)",False,battery_supported(int(sensordata['type'])),sensordata['device_type'] == 'KentixDevice',0.6496894955635071 2507,"def __init__(self, image_list: List[str], category_list: Union[str, List[str], None]=None, transform: Optional[ImageTransform]=None): """""" Args: image_list (List[str]): list of paths to image files category_list (Union[str, List[str], None]): list of animal categories for each image. If it is a string, or None, this applies to all images """""" if: self.category_list = category_list else: self.category_list = [category_list] * len(image_list) assert len(image_list) == len(self.category_list), 'length of image and category lists must be equal' self.image_list = image_list self.transform = transform",True,type(category_list) == list,type(category_list) == list,0.6541144847869873 2508,"@classmethod def create_new_file(cls, filename: str, language: str, base: str, callback: Callable | None=None): """"""Handle creation of new translation file."""""" if: raise ValueError('Not supported') store = cls(base) if callback: callback(store) store.untranslate_store(language) with open(filename, 'wb') as handle: XlsxFormat(store.store).save_content(handle)",False,not base,language not in SUPPORTED_LANGUAGES,0.6654701232910156 2509,"@classmethod def create_new_file(cls, filename: str, language: str, base: str, callback: Callable | None=None): """"""Handle creation of new translation file."""""" if not base: raise ValueError('Not supported') store = cls(base) if: callback(store) store.untranslate_store(language) with open(filename, 'wb') as handle: XlsxFormat(store.store).save_content(handle)",True,callback,callback,0.6719049215316772 2510,"def get_file_path(self, layer, basepath): if: return f'{basepath}_{layer.name}.png' elif layer.dtype == 2: return f'{basepath}_{layer.name}.tiff' else: raise NotImplementedError(f'Unknown data type {layer.dtype}')",False,layer.dtype == 0,layer.dtype == 1,0.6542240977287292 2511,"def get_file_path(self, layer, basepath): if layer.dtype == 0: return f'{basepath}_{layer.name}.png' elif: return f'{basepath}_{layer.name}.tiff' else: raise NotImplementedError(f'Unknown data type {layer.dtype}')",False,layer.dtype == 2,layer.dtype == 1,0.655132532119751 2512,"def add_formats(self, *args): ids = self._check_add_formats_ok() if: return books = choose_files_and_remember_all_files(self.gui, 'add formats dialog dir', _('Select book files'), filters=get_filters()) if books: self._add_formats(books, ids)",False,not ids,ids is None,0.6589126586914062 2513,"def add_formats(self, *args): ids = self._check_add_formats_ok() if not ids: return books = choose_files_and_remember_all_files(self.gui, 'add formats dialog dir', _('Select book files'), filters=get_filters()) if: self._add_formats(books, ids)",True,books,books,0.6564838290214539 2514,"def init_track_head(self, track_roi_extractor, track_head): """"""Initialize ``track_head``"""""" if: self.track_roi_extractor = build_roi_extractor(track_roi_extractor) self.track_share_extractor = False else: self.track_share_extractor = True self.track_roi_extractor = self.bbox_roi_extractor self.track_head = build_head(track_head)",True,track_roi_extractor is not None,track_roi_extractor is not None,0.6493451595306396 2515,"def validate(self, query: str) -> Result[Json]: """"""Parse and validate the query without executing it. :param query: Query to validate. :type query: str :return: Query details. :rtype: dict :raise arango.exceptions.AQLQueryValidateError: If validation fails. """""" request = Request(method='post', endpoint='/_api/query', data={'query': query}) def response_handler(resp: Response) -> Json: if: body = format_body(resp.body) if 'bindVars' in body: body['bind_vars'] = body.pop('bindVars') return body raise AQLQueryValidateError(resp, request) return self._execute(request, response_handler)",True,resp.is_success,resp.is_success,0.6509405970573425 2516,"def validate(self, query: str) -> Result[Json]: """"""Parse and validate the query without executing it. :param query: Query to validate. :type query: str :return: Query details. :rtype: dict :raise arango.exceptions.AQLQueryValidateError: If validation fails. """""" request = Request(method='post', endpoint='/_api/query', data={'query': query}) def response_handler(resp: Response) -> Json: if resp.is_success: body = format_body(resp.body) if: body['bind_vars'] = body.pop('bindVars') return body raise AQLQueryValidateError(resp, request) return self._execute(request, response_handler)",True,'bindVars' in body,'bindVars' in body,0.6534298062324524 2517,"def learnOnLastState(self): if: return self.memory.getMemory(self.memory.getCurrentSize() - 1)",False,self.memory.getCurrentSize() >= 1,self.memory.getCurrentSize() > 1,0.644944429397583 2518,"def select_directory(self) -> None: """"""Open a directory selection dialog box"""""" value = self.item.from_string(str(self.edit.text())) parent = self.parent_layout.parent child_title = _get_child_title_func(parent) dname = getexistingdirectory(parent, child_title(self.item), value) if: self.edit.setText(dname)",True,dname,dname,0.6639875173568726 2519,"def tearDown(self): if: cons = thisproc.connections(kind='all') assert not cons, cons",False,not (FREEBSD or NETBSD),thisproc != None,0.6514226198196411 2520,"def transform(self, X): from scipy import sparse if: return X.todense().getA() else: return X",True,sparse.issparse(X),sparse.issparse(X),0.64223313331604 2521,"def decorator(func): if: bot.add_event_handler(func, events.MessageEdited(**args)) bot.add_event_handler(func, events.NewMessage(**args)) try: LOAD_PLUG[file_test].append(func) except Exception as e: LOAD_PLUG.update({file_test: [func]}) return func",False,not disable_edited,allow_edited_updates,0.6647199988365173 2522,"def get_micro_batch(self, batch_index): true_mb = batch_index % se if: return true_mb else: return true_mb - (stage_depth + (se - pipeline_depth) + 1)",False,true_mb <= stage_depth + (se - pipeline_depth),stage_depth + 1 == se,0.6446865200996399 2523,"def _optimize_step(self, optimizer, perturbation, var_examples, var_targets, var_scale, loss_fxn, targeted=False): """""" Does one step of optimization """""" assert not targeted optimizer.zero_grad() loss = loss_fxn.forward(perturbation(var_examples), var_targets) if: loss = loss.sum() loss.backward() optimizer.step() return loss.item()",False,torch.numel(loss) > 1,targeted,0.647335946559906 2524,"@staticmethod def is_ocp_node_ready(node_status: any) -> bool: if: return False for condition in node_status['conditions']: if condition['status'] == 'True' and condition['type'] == 'Ready': return True return False",False,not node_status,node_status['status'] == 'Not Found',0.658588171005249 2525,"@staticmethod def is_ocp_node_ready(node_status: any) -> bool: if not node_status: return False for condition in node_status['conditions']: if: return True return False",False,condition['status'] == 'True' and condition['type'] == 'Ready',condition['state'] == 'READY',0.6461665630340576 2526,"def accept(self, visitor: ParseTreeVisitor): if: return visitor.visitWhereExpr(self) else: return visitor.visitChildren(self)",True,"hasattr(visitor, 'visitWhereExpr')","hasattr(visitor, 'visitWhereExpr')",0.6439492702484131 2527,"@property def composer(self) -> Any: attr = '_' + sys._getframe().f_code.co_name if: setattr(self, attr, self.Composer(loader=self)) return getattr(self, attr)",True,"not hasattr(self, attr)","not hasattr(self, attr)",0.6519961357116699 2528,"def ascii(s): """""" >>> ascii('Hello') 'Hello' >>> ascii('\\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... UnicodeEncodeError:... """""" if: s = s.decode('ASCII') else: s.encode('ASCII') return s",False,type(s) == bytes,PY2,0.6484377384185791 2529,"def json_has_keys(data, required_keys=None, optional_keys=None): data_keys = set(data.keys()) json_has_required_keys(data, required_keys or []) unknown_keys = data_keys - set(optional_keys or []) - set(required_keys or []) if: abort(400, ""Invalid JSON should not have '{}' keys"".format(""', '"".join(unknown_keys)))",True,unknown_keys,unknown_keys,0.6576985120773315 2530,"@mujinPath.setter def mujinPath(self, value): value = _EnsureUnicode(value) if: value += u'/' self._mujinPath = value",False,value and (not value.endswith(u'/')),value[-1] != '/',0.6506885290145874 2531,"def parse_float(s, default=None): """"""Parse value as returned by ConfigParse as float. NB: we need this instead of ``ConfigParser.getfloat`` when we're parsing values downstream. """""" if: return default return float(s)",False,s is None or s == '',s is None,0.6526082754135132 2532,"@utils.trace def disconnect_volume(self, connection_properties, device_info=None, force=False, ignore_errors=False): self._diskutils.rescan_disks() for target_portal, target_iqn, target_lun in self._get_all_targets(connection_properties): luns = self._iscsi_utils.get_target_luns(target_iqn) if: self._iscsi_utils.logout_storage_target(target_iqn)",False,not luns or luns == [target_lun],luns,0.6481316089630127 2533,"def check_targets(self, targets): assert targets is not None assert all(('boxes' in t for t in targets)) assert all(('labels' in t for t in targets)) if: assert all(('masks' in t for t in targets))",False,self.has_mask,self.masks is not None,0.6520615816116333 2534,"@property def _ishanging_faces_x(self): """"""Boolean vector indicating if an x-face is hanging or not."""""" if: hang_x = np.zeros(self._shape_total_faces_x, dtype=bool, order='F') if self.includes_zero and (not self.is_symmetric): hang_x[0] = True self._ishanging_faces_x_bool = hang_x.reshape(-1, order='F') return self._ishanging_faces_x_bool",True,"getattr(self, '_ishanging_faces_x_bool', None) is None","getattr(self, '_ishanging_faces_x_bool', None) is None",0.6481959223747253 2535,"@property def _ishanging_faces_x(self): """"""Boolean vector indicating if an x-face is hanging or not."""""" if getattr(self, '_ishanging_faces_x_bool', None) is None: hang_x = np.zeros(self._shape_total_faces_x, dtype=bool, order='F') if: hang_x[0] = True self._ishanging_faces_x_bool = hang_x.reshape(-1, order='F') return self._ishanging_faces_x_bool",False,self.includes_zero and (not self.is_symmetric),self.includes_zero or self.is_symmetric,0.6457120180130005 2536,"def to_debug_string(self): """""" to_arg_string, if empty return none """""" v = self.to_arg_string() if: return 'None' else: return v",False,v == '',v is None,0.6639151573181152 2537,"def __contains__(self, seq: Any) -> bool: if: return False node = self._prefix_to_node(seq) if node is None: return False return node.is_terminal",True,"not isinstance(seq, Sequence)","not isinstance(seq, Sequence)",0.6485780477523804 2538,"def __contains__(self, seq: Any) -> bool: if not isinstance(seq, Sequence): return False node = self._prefix_to_node(seq) if: return False return node.is_terminal",True,node is None,node is None,0.657834529876709 2539,"def __init__(self, dim_in, num_classes, act_func='softmax'): super(FullyConvolutionalLinear, self).__init__() self.projection = nn.Linear(dim_in, num_classes, bias=True) if: self.act = nn.Softmax(dim=4) elif act_func =='sigmoid': self.act = nn.Sigmoid() elif act_func == 'identity': self.act = nn.Identity() else: raise NotImplementedError('{} is not supported as an activationfunction.'.format(act_func))",True,act_func == 'softmax',act_func == 'softmax',0.6525869369506836 2540,"def __init__(self, dim_in, num_classes, act_func='softmax'): super(FullyConvolutionalLinear, self).__init__() self.projection = nn.Linear(dim_in, num_classes, bias=True) if act_func =='softmax': self.act = nn.Softmax(dim=4) elif: self.act = nn.Sigmoid() elif act_func == 'identity': self.act = nn.Identity() else: raise NotImplementedError('{} is not supported as an activationfunction.'.format(act_func))",True,act_func == 'sigmoid',act_func == 'sigmoid',0.6505889296531677 2541,"def __init__(self, dim_in, num_classes, act_func='softmax'): super(FullyConvolutionalLinear, self).__init__() self.projection = nn.Linear(dim_in, num_classes, bias=True) if act_func =='softmax': self.act = nn.Softmax(dim=4) elif act_func =='sigmoid': self.act = nn.Sigmoid() elif: self.act = nn.Identity() else: raise NotImplementedError('{} is not supported as an activationfunction.'.format(act_func))",True,act_func == 'identity',act_func == 'identity',0.6501960754394531 2542,"def add_ports(self, inc_ports): """""" add a port object to the ports list """""" if: inc_ports = [inc_ports] ports = self.get_ports() if not ports: self.put(Service.port_path, inc_ports) else: ports.extend(inc_ports) return True",False,"not isinstance(inc_ports, list)","not isinstance(inc_ports, abc.Iterable)",0.6483461856842041 2543,"def add_ports(self, inc_ports): """""" add a port object to the ports list """""" if not isinstance(inc_ports, list): inc_ports = [inc_ports] ports = self.get_ports() if: self.put(Service.port_path, inc_ports) else: ports.extend(inc_ports) return True",True,not ports,not ports,0.6606696248054504 2544,"def validate(self, value): if: return value verrors = ValidationErrors() if not self.regex.match(value): verrors.add(self.name, 'Invalid NetBIOS name. NetBIOS names must be between 1 and 15 characters in length and may not contain the following characters: \\/:*?""<>|.') if value.casefold() in RESERVED_WORDS: verrors.add(self.name, f""NetBIOS names may not be one of following reserved names: {', '.join(RESERVED_WORDS)}"") verrors.check() return super().validate(value)",False,value is None,"isinstance(value, ValidationErrors)",0.6579747796058655 2545,"def validate(self, value): if value is None: return value verrors = ValidationErrors() if: verrors.add(self.name, 'Invalid NetBIOS name. NetBIOS names must be between 1 and 15 characters in length and may not contain the following characters: \\/:*?""<>|.') if value.casefold() in RESERVED_WORDS: verrors.add(self.name, f""NetBIOS names may not be one of following reserved names: {', '.join(RESERVED_WORDS)}"") verrors.check() return super().validate(value)",False,not self.regex.match(value),not 1 <= value <= 15,0.6438591480255127 2546,"def validate(self, value): if value is None: return value verrors = ValidationErrors() if not self.regex.match(value): verrors.add(self.name, 'Invalid NetBIOS name. NetBIOS names must be between 1 and 15 characters in length and may not contain the following characters: \\/:*?""<>|.') if: verrors.add(self.name, f""NetBIOS names may not be one of following reserved names: {', '.join(RESERVED_WORDS)}"") verrors.check() return super().validate(value)",False,value.casefold() in RESERVED_WORDS,self.reserved_name and value,0.6467660069465637 2547,"def nowtime(iso=True): """""" Wrapper for `astropy.time.now` Parameters ---------- iso : bool If True, return time in ISO string, else return Time object Returns ------- tnow : str See `iso` """""" from astropy.time import Time tnow = Time.now() if: return tnow.iso else: return tnow",True,iso,iso,0.6796119213104248 2548,"@property def mailboxes(self): if: return [self[0]] elif self[0].token_type == 'invalid-mailbox': return [] return self[0].mailboxes",False,self[0].token_type == 'mailbox',not self[0].token_type,0.6483967304229736 2549,"@property def mailboxes(self): if self[0].token_type =='mailbox': return [self[0]] elif: return [] return self[0].mailboxes",False,self[0].token_type == 'invalid-mailbox',not self[0].token_type,0.6449447870254517 2550,"def get_low_critical_threshold(self): threshInfo = self._get_threshold_info() if: lowCritThreshold = threshInfo.get('templowalarm') if not threshInfo or lowCritThreshold == 'N/A': raise NotImplementedError return lowCritThreshold",False,threshInfo,'templowalarm' in threshInfo,0.6854245662689209 2551,"def get_low_critical_threshold(self): threshInfo = self._get_threshold_info() if threshInfo: lowCritThreshold = threshInfo.get('templowalarm') if: raise NotImplementedError return lowCritThreshold",False,not threshInfo or lowCritThreshold == 'N/A',lowCritThreshold is None,0.6494725346565247 2552,"def init_tb_loggers(opt): if: assert opt['logger'].get('use_tb_logger') is True,'should turn on tensorboard when using wandb' init_wandb_logger(opt) tb_logger = None if opt['logger'].get('use_tb_logger') and 'debug' not in opt['name']: tb_logger = init_tb_logger(log_dir=osp.join(opt['root_path'], 'tb_logger', opt['name'])) return tb_logger",False,opt['logger'].get('wandb') is not None and opt['logger']['wandb'].get('project') is not None and ('debug' not in opt['name']),opt['logger'].get('wandb') is not None and opt['logger']['wandb'].get('debug') is not None and ('debug' not in opt['name']),0.6574232578277588 2553,"def init_tb_loggers(opt): if opt['logger'].get('wandb') is not None and opt['logger']['wandb'].get('project') is not None and ('debug' not in opt['name']): assert opt['logger'].get('use_tb_logger') is True,'should turn on tensorboard when using wandb' init_wandb_logger(opt) tb_logger = None if: tb_logger = init_tb_logger(log_dir=osp.join(opt['root_path'], 'tb_logger', opt['name'])) return tb_logger",True,opt['logger'].get('use_tb_logger') and 'debug' not in opt['name'],opt['logger'].get('use_tb_logger') and 'debug' not in opt['name'],0.6495742797851562 2554,"def _add_regionless_line(self, line_path): if: self._flush_regionless_lines() self._regionless_text_lines.append(line_path)",False,not self._is_adjacent(line_path),self._regionless_text_lines is None,0.6428194046020508 2555,"def start_armory_instance(self, envs: dict=None, ports: dict=None, container_subdir: str=None): if: raise ValueError(f'Arguments ports {ports} not expected!') self.instance = HostArmoryInstance(envs=envs) return self.instance",False,ports,ports != self.ports and container_subdir != self.container_subdir,0.6767845749855042 2556,"@classmethod def from_int(cls, retries, redirect=True, default=None): """""" Backwards-compatibility for the old retries format."""""" if: retries = default if default is not None else cls.DEFAULT if isinstance(retries, Retry): return retries redirect = bool(redirect) and None new_retries = cls(retries, redirect=redirect) log.debug('Converted retries value: %r -> %r' % (retries, new_retries)) return new_retries",True,retries is None,retries is None,0.6754257678985596 2557,"@classmethod def from_int(cls, retries, redirect=True, default=None): """""" Backwards-compatibility for the old retries format."""""" if retries is None: retries = default if default is not None else cls.DEFAULT if: return retries redirect = bool(redirect) and None new_retries = cls(retries, redirect=redirect) log.debug('Converted retries value: %r -> %r' % (retries, new_retries)) return new_retries",True,"isinstance(retries, Retry)","isinstance(retries, Retry)",0.6554144620895386 2558,"def choose_create_labeled_video_options(self, event): if: self.draw_skeleton.Show() self.trail_points_text.Show() self.trail_points.Show() self.SetSizer(self.sizer) self.sizer.Fit(self) else: self.draw_skeleton.Hide() self.trail_points_text.Hide() self.trail_points.Hide() self.SetSizer(self.sizer) self.sizer.Fit(self)",False,self.create_labeled_videos.GetStringSelection() == 'Yes',self.config.get_rank() == 0,0.6439589262008667 2559,"def set_location_remote(worktracker): exported_ids = os.listdir(Paths.exported()) if: return analyses.db_set_remote(exported_ids) for analysis_id in exported_ids: delete_file(Paths.exported(analysis_id))",True,not exported_ids,not exported_ids,0.6536382436752319 2560,"def on_touch_move(self, touch, *args): if: return super().on_touch_move(touch, *args)",False,"self.propagate_touch_to_touchable_widgets(touch, 'move', *args)","self.propagate_touch_to_touchable_widgets(touch, 'move')",0.6415896415710449 2561,"def get_backend(): global _IMPL if: _IMPL = driver.DriverManager('magnum.database.migration_backend', CONF.database.backend).driver return _IMPL",False,not _IMPL,_IMPL is None,0.6608808636665344 2562,"def _convert_xml_to_user_delegation_key(response): """""" Guid Guid String, formatted ISO Date String, formatted ISO Date b String, rest api version used to create delegation key Ovg+o0K/0/2V8upg7AwlyAPCriEcOSXKuBu2Gv/PU70Y7aWDW3C2ZRmw6kYWqPWBaM1GosLkcSZkgsobAlT+Sw== Converts xml response to UserDelegationKey class. """""" if: return None delegation_key = UserDelegationKey() key_element = ETree.fromstring(response.body) delegation_key.signed_oid = key_element.findtext('SignedOid') delegation_key.signed_tid = key_element.findtext('SignedTid') delegation_key.signed_start = key_element.findtext('SignedStart') delegation_key.signed_expiry = key_element.findtext('SignedExpiry') delegation_key.signed_service = key_element.findtext('SignedService') delegation_key.signed_version = key_element.findtext('SignedVersion') delegation_key.value = key_element.findtext('Value') return delegation_key",False,response is None or response.body is None,response is None,0.6479746699333191 2563,"def suspend_extension(self, name): if: self.strategy_mapping.get(name).active = False else: return",False,name in self.strategy_mapping.keys(),name in self.strategy_mapping,0.6469885110855103 2564,"@staticmethod def get_unit_cost(energy_carrier_code): """""" Return the unit greenhouse gas emissions of a specific energy carrier from the database. """""" if: available_energy_carrier_codes = list(EnergyCarrier._available_energy_carriers['code']) EnergyCarrier._unit_cost_dict = {ec_code: EnergyCarrier._available_energy_carriers[EnergyCarrier._available_energy_carriers['code'] == ec_code]['unit_cost_USD.kWh'].values[0] for ec_code in available_energy_carrier_codes} unit_cost = EnergyCarrier._unit_ghg_dict[energy_carrier_code] return unit_cost",False,not EnergyCarrier._unit_cost_dict,energy_carrier_code == 'USD',0.6491111516952515 2565,"def join(self, r, s): def not_startswith_L_T(x): return x and (not (x[0].startswith('L_') or x[0].startswith('T_'))) if: k = ','.join([r[0], s[0]]) p = r[-1] * s[-1] a = r[1] + s[1] return (k, a, p)",False,not_startswith_L_T(s) and not_startswith_L_T(r),len(r) and len(s),0.6458801031112671 2566,"def findVariableRefs(self, variable): """"""Returns a list of variable accessors spawned across all graphs :param variable: Variable to search accessors for :type variable: :class:`~PyFlow.Core.Variable.Variable` :rtype: list(:class:`~PyFlow.Core.NodeBase.NodeBase`) """""" result = [] for node in self.getAllNodes(classNameFilters=['getVar','setVar']): if: result.append(node) return result",False,node.variableUid() == variable.uid,variable in node.getNodeTypes(),0.6508815288543701 2567,"def run(self): self.no_args() directory = self.get_opt('directory') validate_directory(directory) directory = os.path.abspath(directory) try: repo = git.Repo(directory) except git.InvalidGitRepositoryError: raise CriticalError(""directory '{}' does not contain a valid Git repository!"".format(directory)) is_valid = repo.head.is_valid() self.msg = ""git checkout valid = '{}' for directory '{}'"".format(is_valid, directory) if: self.critical()",False,not is_valid,is_valid,0.6523358821868896 2568,"@property def default(self): _default = self.config.default_config.get(self.section.name, self.name) if: return [] return self.decode(_default)",False,_default == '',_default is None,0.6592125296592712 2569,"def add_listener(self, cb): with self.lock: if: self.listeners.append(cb)",True,not cb in self.listeners,not cb in self.listeners,0.6514695286750793 2570,"def add_element(self, elm: Element): """"""Add a quantum element to this configuration :param elm: an object of type Element :type elm: Element """""" self.config['elements'][elm.name] = self._call_dict_parameters(elm.dict) if: if elm.mixer is not None: self.add_mixer(elm.mixer) if elm.oscillator is not None: self.add_oscillator(elm.oscillator) self.add_pulses(elm.pulses)",False,elm.type == 'mixInputs',self.mixer is not None and elm.oscillator is not None,0.6473621129989624 2571,"def add_element(self, elm: Element): """"""Add a quantum element to this configuration :param elm: an object of type Element :type elm: Element """""" self.config['elements'][elm.name] = self._call_dict_parameters(elm.dict) if elm.type =='mixInputs': if: self.add_mixer(elm.mixer) if elm.oscillator is not None: self.add_oscillator(elm.oscillator) self.add_pulses(elm.pulses)",True,elm.mixer is not None,elm.mixer is not None,0.6490544080734253 2572,"def add_element(self, elm: Element): """"""Add a quantum element to this configuration :param elm: an object of type Element :type elm: Element """""" self.config['elements'][elm.name] = self._call_dict_parameters(elm.dict) if elm.type =='mixInputs': if elm.mixer is not None: self.add_mixer(elm.mixer) if: self.add_oscillator(elm.oscillator) self.add_pulses(elm.pulses)",True,elm.oscillator is not None,elm.oscillator is not None,0.6489872336387634 2573,"def _get_bg_light(self, opposite=False): theme_style = self._get_theme_style(opposite) if: return get_color_from_hex(colors['Light']['CardsDialogs']) elif theme_style == 'Dark': return get_color_from_hex(colors['Dark']['CardsDialogs'])",True,theme_style == 'Light',theme_style == 'Light',0.6543594598770142 2574,"def _get_bg_light(self, opposite=False): theme_style = self._get_theme_style(opposite) if theme_style == 'Light': return get_color_from_hex(colors['Light']['CardsDialogs']) elif: return get_color_from_hex(colors['Dark']['CardsDialogs'])",True,theme_style == 'Dark',theme_style == 'Dark',0.6538032293319702 2575,"def run(self): if: info('ERROR: No %s file found!', RECORD_FILE) else: for entry in open(RECORD_FILE).read().split(): self.remove_entry(entry)",False,not os.path.exists(RECORD_FILE),not os.path.isfile(RECORD_FILE),0.6460028886795044 2576,"def _initial_match(self, other, env): if: return False if not self._match_item(self.op, other.op, env): return False return True",False,not is_expression(other),"not isinstance(other, self.__class__)",0.6446709632873535 2577,"def _initial_match(self, other, env): if not is_expression(other): return False if: return False return True",False,"not self._match_item(self.op, other.op, env)",self.index != other.index and env.index != other.index,0.6437169313430786 2578,"def _cal_phase(exp, marker): if: return (marker, exp[1]) else: return (marker % 2, (exp[1] + 1) % 2)",False,marker < 2,exp[1] == 0,0.6597985029220581 2579,"def identify(self): if: for seq in self.IDENTIFY_SEQUENCE: if self._identify(seq): return True return False return self._identify(self.IDENTIFY_SEQUENCE)",False,"isinstance(self.IDENTIFY_SEQUENCE, tuple)",not self.compressed,0.642857551574707 2580,"def identify(self): if isinstance(self.IDENTIFY_SEQUENCE, tuple): for seq in self.IDENTIFY_SEQUENCE: if: return True return False return self._identify(self.IDENTIFY_SEQUENCE)",False,self._identify(seq),seq in self.ALT_WORDS,0.6477026343345642 2581,"def logits(self, x): x = self.global_pool(x) if: x = F.dropout(x, p=self.drop_rate, training=self.training) x = self.last_linear(x) return x",True,self.drop_rate > 0.0,self.drop_rate > 0.0,0.646970272064209 2582,"@staticmethod def get_unavailable_index_shard_indices(unavailable_data_parts_indices): """""" Checks if indices are partially unavailable Index keys for each data key are sharded among LRC groups in a groupset in a way that each shard contains of three copies in three different DCs. If all three groups of shard are unavailable, indices are considered partially unavailable. Each shard occupies three groups of a column of LRC-8-2-2 scheme, so there are 4 index shards: 0 1 4 5 2 3 6 7 8 9 10 11 Parameters: unavailable_data_parts_indices: a list of indices of groups in lrc groupset that are unavailable for any reason. """""" unavailable_data_parts_indices = set(unavailable_data_parts_indices) for indices in Lrc.Scheme822v1.INDEX_SHARD_INDICES: if: return sorted(indices) return None",False,unavailable_data_parts_indices.issuperset(indices),not indices in unavailable_data_parts_indices,0.6406722068786621 2583,"def factory(*args_, **kwargs_): if: subclass = getSubclassFromModule_(CurrentSubclassModule_, ShipmentDetails) if subclass is not None: return subclass(*args_, **kwargs_) if ShipmentDetails.subclass: return ShipmentDetails.subclass(*args_, **kwargs_) else: return ShipmentDetails(*args_, **kwargs_)",True,CurrentSubclassModule_ is not None,CurrentSubclassModule_ is not None,0.6485275626182556 2584,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, ShipmentDetails) if subclass is not None: return subclass(*args_, **kwargs_) if: return ShipmentDetails.subclass(*args_, **kwargs_) else: return ShipmentDetails(*args_, **kwargs_)",True,ShipmentDetails.subclass,ShipmentDetails.subclass,0.6514989733695984 2585,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, ShipmentDetails) if: return subclass(*args_, **kwargs_) if ShipmentDetails.subclass: return ShipmentDetails.subclass(*args_, **kwargs_) else: return ShipmentDetails(*args_, **kwargs_)",True,subclass is not None,subclass is not None,0.6601990461349487 2586,"@contextlib.contextmanager def using_seed(seed): if: yield else: rand_state = random.getstate() np_rand_state = np.random.get_state() random.seed(seed) np.random.seed(seed) yield random.setstate(rand_state) np.random.set_state(np_rand_state)",True,seed is None,seed is None,0.655312180519104 2587,"def _maybe_copy_to_device(attribute: Optional[torch.Tensor], device: torch.device) -> Optional[torch.Tensor]: if: return None return attribute.to(device)",True,attribute is None,attribute is None,0.6604607105255127 2588,"def forward(self, x): x = self.forward_features(x) x = self.global_pool(x) if: x = F.dropout(x, p=self.drop_rate, training=self.training) x = self.fc(x) return x",False,self.drop_rate > 0,self.drop_rate > 0.0,0.6454775333404541 2589,"@staticmethod def size_renderer_to_string(renderer): """""" Converts size renderer enum to a string """""" if: return 'expression' elif renderer == VectorRendererBase.SIZE_NONE: return '' elif renderer == VectorRendererBase.SIZE_RANDOM: return 'random' return None",False,renderer == VectorRendererBase.SIZE_EXPRESSION,renderer == VectorRendererBase.SIZE_INT,0.6500937938690186 2590,"@staticmethod def size_renderer_to_string(renderer): """""" Converts size renderer enum to a string """""" if renderer == VectorRendererBase.SIZE_EXPRESSION: return 'expression' elif: return '' elif renderer == VectorRendererBase.SIZE_RANDOM: return 'random' return None",False,renderer == VectorRendererBase.SIZE_NONE,renderer == VectorRendererBase.SIZE_TEXTAREA,0.6506664752960205 2591,"@staticmethod def size_renderer_to_string(renderer): """""" Converts size renderer enum to a string """""" if renderer == VectorRendererBase.SIZE_EXPRESSION: return 'expression' elif renderer == VectorRendererBase.SIZE_NONE: return '' elif: return 'random' return None",True,renderer == VectorRendererBase.SIZE_RANDOM,renderer == VectorRendererBase.SIZE_RANDOM,0.6525357365608215 2592,"def drop(self, amount): self.size -= amount while amount > 0: fragment = self.buffer[0] if: amount -= len(fragment) del self.buffer[0] else: self.buffer[0] = fragment[amount:] amount = 0 self.size -= amount",False,len(fragment) <= amount,len(fragment) < amount,0.6471418142318726 2593,"def get_events(self, current_state=True): """""" Return a list of events handled. @param current_state if true, return events handled in the current state only. @retval list of events handled. """""" events = [] for key, handler in self.state_handlers.iteritems(): state = key[0] event = key[1] if: if current_state: if self.current_state == state: if event not in events: events.append(event) elif event not in events: events.append(event) return events",False,not (event == self.enter_event or event == self.exit_event),handler == None,0.6437821984291077 2594,"def get_events(self, current_state=True): """""" Return a list of events handled. @param current_state if true, return events handled in the current state only. @retval list of events handled. """""" events = [] for key, handler in self.state_handlers.iteritems(): state = key[0] event = key[1] if not (event == self.enter_event or event == self.exit_event): if: if self.current_state == state: if event not in events: events.append(event) elif event not in events: events.append(event) return events",True,current_state,current_state,0.6553345322608948 2595,"def get_events(self, current_state=True): """""" Return a list of events handled. @param current_state if true, return events handled in the current state only. @retval list of events handled. """""" events = [] for key, handler in self.state_handlers.iteritems(): state = key[0] event = key[1] if not (event == self.enter_event or event == self.exit_event): if current_state: if: if event not in events: events.append(event) elif event not in events: events.append(event) return events",False,self.current_state == state,handler,0.648714542388916 2596,"def get_events(self, current_state=True): """""" Return a list of events handled. @param current_state if true, return events handled in the current state only. @retval list of events handled. """""" events = [] for key, handler in self.state_handlers.iteritems(): state = key[0] event = key[1] if not (event == self.enter_event or event == self.exit_event): if current_state: if self.current_state == state: if event not in events: events.append(event) elif: events.append(event) return events",False,event not in events,handler(event),0.6557466983795166 2597,"def get_events(self, current_state=True): """""" Return a list of events handled. @param current_state if true, return events handled in the current state only. @retval list of events handled. """""" events = [] for key, handler in self.state_handlers.iteritems(): state = key[0] event = key[1] if not (event == self.enter_event or event == self.exit_event): if current_state: if self.current_state == state: if: events.append(event) elif event not in events: events.append(event) return events",False,event not in events,"handler(self.event_loop, event)",0.6552728414535522 2598,"def _AddFile(file_proto): for dependency in file_proto.dependency: if: _AddFile(file_by_name.pop(dependency)) _FACTORY.pool.Add(file_proto)",True,dependency in file_by_name,dependency in file_by_name,0.651822566986084 2599,"def generate_report(self, displayed_maps=10): """"""Generate an HTML report for the current ``NiftiMapsMasker`` object. .. note:: This functionality requires to have ``Matplotlib`` installed. Parameters ---------- displayed_maps : :obj:`int`, or :obj:`list`, or :class:`~numpy.ndarray`, or ""all"", default=10 Indicates which maps will be displayed in the HTML report. - If ""all"": All maps will be displayed in the report. .. code-block:: python masker.generate_report(""all"") .. warning: If there are too many maps, this might be time and memory consuming, and will result in very heavy reports. - If a :obj:`list` or :class:`~numpy.ndarray`: This indicates the indices of the maps to be displayed in the report. For example, the following code will generate a report with maps 6, 3, and 12, displayed in this specific order: .. code-block:: python masker.generate_report([6, 3, 12]) - If an :obj:`int`: This will only display the first n maps, n being the value of the parameter. By default, the report will only contain the first 10 maps. Example to display the first 16 maps: .. code-block:: python masker.generate_report(16) Returns ------- report : `nilearn.reporting.html_report.HTMLReport` HTML report for the masker. """""" from nilearn.reporting.html_report import generate_report incorrect_type = not isinstance(displayed_maps, (list, np.ndarray, int, str)) incorrect_string = isinstance(displayed_maps, str) and displayed_maps!= 'all' not_integer = not isinstance(displayed_maps, str) and np.array(displayed_maps).dtype!= int if: raise TypeError(f""Parameter ``displayed_maps`` of ``generate_report()`` should be either 'all'",False,incorrect_type or incorrect_string or not_integer,not incorrect_type or not_integer,0.6463784575462341 2600,"def strCmp(ba1, ba2): """""" ba1 and ba2 are instances of bytes imitate strcmp of standard C library. Attention! You may have a temptation to replace this function with built-in cmp() function. Hold on! Most probably these two function behave identically now, but cmp does not document how it compares strings. There is no guaranty it will not be changed in future. Since we need predictable sorting order in StarDict dictionary, we need to preserve this function despite the fact there are other ways to implement it. """""" commonLen = min(len(ba1), len(ba2)) for i in range(commonLen): c1 = ba1[i] c2 = ba2[i] if: return c1 - c2 return len(ba1) - len(ba2)",True,c1 != c2,c1 != c2,0.6578684449195862 2601,"def _cleanup(self, ref): """"""weakref callback. This method may be called by an asynchronous gc. If the state shows pending changes, the weakref is replaced by the __resurrect callable which will re-establish an object reference on next access, else removes this InstanceState from the owning identity map, if any. """""" if: self.obj = self.__resurrect else: instance_dict = self._instance_dict() if instance_dict: instance_dict.discard(self) self.dispose()",False,self._get_modified(self.mutable_dict),ref is None,0.6449748277664185 2602,"def _cleanup(self, ref): """"""weakref callback. This method may be called by an asynchronous gc. If the state shows pending changes, the weakref is replaced by the __resurrect callable which will re-establish an object reference on next access, else removes this InstanceState from the owning identity map, if any. """""" if self._get_modified(self.mutable_dict): self.obj = self.__resurrect else: instance_dict = self._instance_dict() if: instance_dict.discard(self) self.dispose()",False,instance_dict,instance_dict is not None,0.653881311416626 2603,"def __cmp__(self, rhs): dx = self.x - rhs.x if: return dx dy = self.y - rhs.y if dy!= 0: return dy dz = self.z - rhs.z if dz!= 0: return dz return 0",True,dx != 0,dx != 0,0.657271146774292 2604,"def __cmp__(self, rhs): dx = self.x - rhs.x if dx!= 0: return dx dy = self.y - rhs.y if: return dy dz = self.z - rhs.z if dz!= 0: return dz return 0",True,dy != 0,dy != 0,0.6776497960090637 2605,"def __cmp__(self, rhs): dx = self.x - rhs.x if dx!= 0: return dx dy = self.y - rhs.y if dy!= 0: return dy dz = self.z - rhs.z if: return dz return 0",True,dz != 0,dz != 0,0.6572673320770264 2606,"def __init__(self, expr: Union[ParserElement, str], savelist: bool=False): if: expr = _PendingSkip(NoMatch()) super().__init__(expr)",False,expr is ...,savelist and expr is None,0.6562729477882385 2607,"def __init__(self, typ: Union[type, str], kv_pairs: Sequence[KVPair]) -> None: if: key_type = unite_values(*[pair.key for pair in kv_pairs]) value_type = unite_values(*[pair.value for pair in kv_pairs]) else: key_type = value_type = AnyValue(AnySource.unreachable) super().__init__(typ, (key_type, value_type)) self.kv_pairs = tuple(kv_pairs)",False,kv_pairs,"isinstance(typ, str)",0.676856517791748 2608,"def __gt__(self, other): if: raise TypeError('%s and %s are not of the same version' % (str(self), str(other))) if not isinstance(other, _BaseNet): raise TypeError('%s and %s are not of the same type' % (str(self), str(other))) if self.network!= other.network: return self.network > other.network if self.netmask!= other.netmask: return self.netmask > other.netmask return False",False,self._version != other._version,self.version != other.version,0.6501373052597046 2609,"def __gt__(self, other): if self._version!= other._version: raise TypeError('%s and %s are not of the same version' % (str(self), str(other))) if: raise TypeError('%s and %s are not of the same type' % (str(self), str(other))) if self.network!= other.network: return self.network > other.network if self.netmask!= other.netmask: return self.netmask > other.netmask return False",False,"not isinstance(other, _BaseNet)","not isinstance(other, _Base)",0.6463521718978882 2610,"def __gt__(self, other): if self._version!= other._version: raise TypeError('%s and %s are not of the same version' % (str(self), str(other))) if not isinstance(other, _BaseNet): raise TypeError('%s and %s are not of the same type' % (str(self), str(other))) if: return self.network > other.network if self.netmask!= other.netmask: return self.netmask > other.netmask return False",True,self.network != other.network,self.network != other.network,0.6561384201049805 2611,"def __gt__(self, other): if self._version!= other._version: raise TypeError('%s and %s are not of the same version' % (str(self), str(other))) if not isinstance(other, _BaseNet): raise TypeError('%s and %s are not of the same type' % (str(self), str(other))) if self.network!= other.network: return self.network > other.network if: return self.netmask > other.netmask return False",True,self.netmask != other.netmask,self.netmask != other.netmask,0.6485360264778137 2612,"def scriptDataDoubleEscapedLessThanSignState(self): data = self.stream.char() if: self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': u'/'}) self.temporaryBuffer = '' self.state = self.scriptDataDoubleEscapeEndState else: self.stream.unget(data) self.state = self.scriptDataDoubleEscapedState return True",True,data == '/',data == '/',0.6628207564353943 2613,"def on_step_end(self, **kwargs): """"""Put the LR back to its value if necessary."""""" if: self.learn.opt.lr /= self.mult_lr",False,not self.learn.gan_trainer.gen_mode,self.use_mult_lr,0.644633412361145 2614,"def _check_args_tf(kwargs): if: kwargs.pop('fillcolor') kwargs['resample'] = _interpolation(kwargs)",True,"'fillcolor' in kwargs and _PIL_VER < (5, 0)","'fillcolor' in kwargs and _PIL_VER < (5, 0)",0.6466949582099915 2615,"def main(config): assert os.path.exists(config.scenario), 'Scenario not found: %s' % config.scenario input_file = config.dbf_tools.input_file output_file_name = config.dbf_tools.output_file_name output_path = config.dbf_tools.output_path if: dbf_to_xls(input_file=input_file, output_path=output_path, output_file_name=output_file_name) elif input_file.endswith('.xls') or input_file.endswith('.xlsx'): xls_to_dbf(input_file=input_file, output_path=output_path, output_file_name=output_file_name) else: print('input file type not supported')",False,input_file.endswith('.dbf'),input_file.endswith('.py'),0.6430608630180359 2616,"def main(config): assert os.path.exists(config.scenario), 'Scenario not found: %s' % config.scenario input_file = config.dbf_tools.input_file output_file_name = config.dbf_tools.output_file_name output_path = config.dbf_tools.output_path if input_file.endswith('.dbf'): dbf_to_xls(input_file=input_file, output_path=output_path, output_file_name=output_file_name) elif: xls_to_dbf(input_file=input_file, output_path=output_path, output_file_name=output_file_name) else: print('input file type not supported')",False,input_file.endswith('.xls') or input_file.endswith('.xlsx'),input_file.endswith('.xls'),0.6432324647903442 2617,"def iter(self, i, **kwargs): if: loss = kwargs['loss'] print(f'iteration {i}: loss {loss:.4f}')",False,i % 100 == 0 and 'loss' in kwargs,'loss' in kwargs,0.6550135612487793 2618,"def parameters_key(parameters): key = [] for name in sorted(parameters.keys()): sub_parameters = parameters[name] if: key.append((name, parameters_key(sub_parameters))) elif isinstance(sub_parameters, Sequence) and (not isinstance(sub_parameters, str)): key.append((name, tuple(sub_parameters))) else: key.append((name, sub_parameters)) return tuple(key)",False,"isinstance(sub_parameters, (Parameters, dict))","isinstance(sub_parameters, Mapping)",0.6457419395446777 2619,"def parameters_key(parameters): key = [] for name in sorted(parameters.keys()): sub_parameters = parameters[name] if isinstance(sub_parameters, (Parameters, dict)): key.append((name, parameters_key(sub_parameters))) elif: key.append((name, tuple(sub_parameters))) else: key.append((name, sub_parameters)) return tuple(key)",False,"isinstance(sub_parameters, Sequence) and (not isinstance(sub_parameters, str))","isinstance(sub_parameters, Parameters)",0.6435085535049438 2620,"@property def user_id(self) -> hikari.Snowflake: """"""The application user's ID."""""" if: raise hikari.ComponentStateConflictError('The bot is not yet initialized, user_id is unavailable.') return self._user_id",True,self._user_id is None,self._user_id is None,0.6566628813743591 2621,"def __eq__(self, other): if: return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if my_val!= other_val: return False return True",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.64579176902771 2622,"def __eq__(self, other): if not isinstance(other, self.__class__): return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if: return False return True",True,my_val != other_val,my_val != other_val,0.6494433879852295 2623,"@validates('account {account_number} balance lock') def validate_updated_account_balance_lock(self, *, account_number, account_state, is_sender=False): subject = f""{self.humanized_class_name_lowered} {('sender' if is_sender else'recipient')} account {account_number} balance_lock"" balance_lock = account_state.balance_lock if: validate_not_empty(subject, balance_lock) validate_type(subject, balance_lock, str) validate_exact_value(subject, balance_lock, self.signed_change_request.make_balance_lock()) else: validate_empty(subject, balance_lock)",True,is_sender,is_sender,0.658807635307312 2624,"def output_operationerror(data_object): if: return output_status_message('* * * Begin output_operationerror * * *') output_status_message('Code: {0}'.format(data_object.Code)) output_status_message('Details: {0}'.format(data_object.Details)) output_status_message('ErrorCode: {0}'.format(data_object.ErrorCode)) output_status_message('Message: {0}'.format(data_object.Message)) output_status_message('* * * End output_operationerror * * *')",True,data_object is None,data_object is None,0.651602029800415 2625,"@staticmethod def call_services(asset: 'kpi.models.asset.Asset', submission_id: int): """""" Delegates to Celery data submission to remote servers """""" hooks_ids = asset.hooks.filter(active=True).values_list('id', flat=True).distinct() success = False for hook_id in hooks_ids: if: success = True service_definition_task.apply_async(queue='kpi_low_priority_queue', args=(hook_id, submission_id)) return success",False,"not HookLog.objects.filter(submission_id=submission_id, hook_id=hook_id).exists()",hook_id == submission_id,0.6467499136924744 2626,"def _update_ground_truth_statistics(self, groundtruth_class_labels, groundtruth_is_difficult_list, groundtruth_is_group_of_list): """"""Update grouth truth statitistics. 1. Difficult boxes are ignored when counting the number of ground truth instances as done in Pascal VOC devkit. 2. Difficult boxes are treated as normal boxes when computing CorLoc related statitistics. Args: groundtruth_class_labels: An integer numpy array of length M, representing M class labels of object instances in ground truth groundtruth_is_difficult_list: A boolean numpy array of length M denoting whether a ground truth box is a difficult instance or not groundtruth_is_group_of_list: A boolean numpy array of length M denoting whether a ground truth box is a group-of box or not """""" for class_index in range(self.num_class): num_gt_instances = np.sum(groundtruth_class_labels[~groundtruth_is_difficult_list & ~groundtruth_is_group_of_list] == class_index) num_groupof_gt_instances = self.group_of_weight * np.sum(groundtruth_class_labels[groundtruth_is_group_of_list] == class_index) self.num_gt_instances_per_class[class_index] += num_gt_instances + num_groupof_gt_instances if: self.num_gt_imgs_per_class[class_index] += 1",True,np.any(groundtruth_class_labels == class_index),np.any(groundtruth_class_labels == class_index),0.6460565328598022 2627,"def _get_results_bzt_log_part(self): test_result_string_trigger = 'Request label stats:' res_string_idx = [index for index, value in enumerate(self.bzt_log) if test_result_string_trigger in value] if: res_string_idx = res_string_idx[0] results_bzt_run = self.bzt_log[res_string_idx:] return results_bzt_run",True,res_string_idx,res_string_idx,0.6533769369125366 2628,"@header_encoding.setter def header_encoding(self, value): """""" Enforces constraints on the value of header encoding. """""" if: raise ValueError('header_encoding must be bool, string, or None') if value is True: raise ValueError('header_encoding cannot be True') self._header_encoding = value",False,"not isinstance(value, (bool, str, type(None)))","not isinstance(value, (bool, str))",0.644109308719635 2629,"@header_encoding.setter def header_encoding(self, value): """""" Enforces constraints on the value of header encoding. """""" if not isinstance(value, (bool, str, type(None))): raise ValueError('header_encoding must be bool, string, or None') if: raise ValueError('header_encoding cannot be True') self._header_encoding = value",False,value is True,value is None,0.6553482413291931 2630,"def create_static_notes(contributors, previous_version, current_version): st.write(f""\n ### Release Details\n\n - If you'd like to know what _exactly_ went into this release, check out the [commit\n diff](https://github.com/streamlit/streamlit/compare/{previous_version}...{st.__version__}).\n - If you're curious, please see the source code in [Github](https://github.com/streamlit/release-demos/tree/{current_version}/{current_version}).\n\n "") if: st.write('### Thanks for Contributing') generate_contributors(contributors) st.write(' ') st.write(f""\n As always, thank you to [all our contributors](https://github.com/streamlit/streamlit/graphs/contributors) who help make Streamlit awesome!\n\n ---\n\n ### Connect With Us\n\n - We can be found at https://streamlit.io and https://twitter.com/streamlit\n - Come by\n [the forums](https://discuss.streamlit.io/c/official-announcements/6) if you'd like to ask questions,\n post awesome apps, or just say hi!\n "")",False,len(contributors),not contributors,0.6495766639709473 2631,"@contextmanager def base_url_context(self, base_url: str) -> Generator[None, None, None]: if: with context_variable(self.set_base_url, self.base_url, base_url): yield else: yield",False,self._base_url,self.set_base_url,0.6538788080215454 2632,"def get_vendor_version_from_module(module_name: str) -> Optional[str]: module = get_module_from_module_name(module_name) version = getattr(module, '__version__', None) if: env = get_environment([os.path.dirname(module.__file__)]) dist = env.get_distribution(module_name) if dist: version = str(dist.version) return version",True,not version,not version,0.6590167284011841 2633,"def get_vendor_version_from_module(module_name: str) -> Optional[str]: module = get_module_from_module_name(module_name) version = getattr(module, '__version__', None) if not version: env = get_environment([os.path.dirname(module.__file__)]) dist = env.get_distribution(module_name) if: version = str(dist.version) return version",True,dist,dist,0.6801815032958984 2634,"def validate(self, value): if: for validator in self.VALIDATORS[self.name]: validator(value)",True,self.name in self.VALIDATORS,self.name in self.VALIDATORS,0.6497920155525208 2635,"def forward(self, x): if: return self.inference_single_image(x) else: pyramid_feats = self.backbone(x) pyramid_feats[-1] = self.neck(pyramid_feats[-1]) pyramid_feats = self.fpn(pyramid_feats) cls_feats, reg_feats = self.det_heads(pyramid_feats) outputs = self.pred_layers(cls_feats, reg_feats) return outputs",False,not self.trainable,self.fpn is None,0.6521011590957642 2636,"def get_tags(self, result: Schema, summary: list[str]) -> list[Tag]: del summary if: return [] return [Tag(name=self.metadata.name, value='SUID/GUID + root', color=TagColor.BLUE, propagate=False)]",False,not _tag_should_be_set(result),root is None,0.6470137238502502 2637,"def __flush(self): if: self.__write('>') self.__open = 0 if self.__data: data = string.join(self.__data, '') self.__write(escape_cdata(data, self.__encoding)) self.__data = []",True,self.__open,self.__open,0.6583885550498962 2638,"def __flush(self): if self.__open: self.__write('>') self.__open = 0 if: data = string.join(self.__data, '') self.__write(escape_cdata(data, self.__encoding)) self.__data = []",True,self.__data,self.__data,0.6573759317398071 2639,"def refresh(self): if: self.threadRefresh.wait() self.threadRefresh = ThreadRefresh(widgetToRefresh=self) self.threadRefresh.start()",False,self.threadRefresh is not None,self.threadRefresh.wait,0.6558263301849365 2640,"def deep_copy(t: Dict[Any, Any]) -> Dict[Any, Any]: """""" Returns copy of dict `t`, following nested dict structures. :param t: Input dictionary :returns: Copied dictionary """""" r = {} for k, v in t.items(): if: r[k] = deep_copy(v) else: r[k] = v return r",True,"isinstance(v, dict)","isinstance(v, dict)",0.6488431692123413 2641,"def writePar(self): log.debug('Write clicked') if: self.write_callback()",True,self.write_callback is not None,self.write_callback is not None,0.6490417122840881 2642,"def __or__(self, other): """""" Implementation of | operator - returns C{L{MatchFirst}} """""" if: other = ParserElement._literalStringClass(other) if not isinstance(other, ParserElement): warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return MatchFirst([self, other])",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.6504103541374207 2643,"def __or__(self, other): """""" Implementation of | operator - returns C{L{MatchFirst}} """""" if isinstance(other, basestring): other = ParserElement._literalStringClass(other) if: warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return MatchFirst([self, other])",True,"not isinstance(other, ParserElement)","not isinstance(other, ParserElement)",0.6486712694168091 2644,"def tearDown(self): g.log.info('Starting to Unmount Volume and Cleanup Volume') ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts) if: raise ExecutionError('Failed to Unmount Volume and Cleanup Volume') g.log.info('Successful in Unmount Volume and Cleanup Volume') self.get_super_method(self, 'tearDown')()",True,not ret,not ret,0.660223126411438 2645,"def add(self, dist): """"""Add `dist` if we ``can_add()`` it and it has not already been added """""" if: dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)",False,self.can_add(dist) and dist.has_version(),self._can_add(dist) and dist._has_version(),0.6485044956207275 2646,"def add(self, dist): """"""Add `dist` if we ``can_add()`` it and it has not already been added """""" if self.can_add(dist) and dist.has_version(): dists = self._distmap.setdefault(dist.key, []) if: dists.append(dist) dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)",True,dist not in dists,dist not in dists,0.6669942140579224 2647,"def func() -> t.TextIO: stream = src_func() try: rv = cache.get(stream) except Exception: rv = None if: return rv rv = wrapper_func() try: cache[stream] = rv except Exception: pass return rv",True,rv is not None,rv is not None,0.6518726944923401 2648,"def save(self, model_dir: str): if: os.system('mkdir -p'+ model_dir) with open(model_dir + '/behavior.pkl', 'wb') as f: pickle.dump(self.behavior, f) self.save_interface(model_dir)",True,not os.path.exists(model_dir),not os.path.exists(model_dir),0.6483523845672607 2649,"def make_memmap(filename, dtype='uint8', mode='r+', offset=0, shape=None, order='C'): """"""Backport of numpy memmap offset fix. See https://github.com/numpy/numpy/pull/8443 for more details. The numpy fix will be available in numpy 1.13. """""" mm = np.memmap(filename, dtype=dtype, mode=mode, offset=offset, shape=shape, order=order) if: mm.offset = offset return mm",False,LooseVersion(np.__version__) < '1.13',"hasattr(offset, 'offset')",0.6457706093788147 2650,"def read_partial_binary(self, uid: str, offset: int, length: int) -> bytes: file_name = self.db_interface.get_file_name(uid) if: logging.error(f'[BinaryService]: Tried to read from file {uid} but it was not found.') return b'' file_path = Path(self.fs_organizer.generate_path_from_uid(uid)) with file_path.open('rb') as fp: fp.seek(offset) return fp.read(length)",False,file_name is None,not file_name,0.6503504514694214 2651,"def quantity_of(self, card): """""" Return the current number of the given card in this deposit state. """""" depcopies = self.depositstatecopies_set.filter(card=card).all() if: return 0 else: if len(depcopies)!= 1: log.warning(""len(depcopies)!= 1, this shouldn't happen."") return depcopies.last().nb_current",True,not depcopies,not depcopies,0.6491543650627136 2652,"def quantity_of(self, card): """""" Return the current number of the given card in this deposit state. """""" depcopies = self.depositstatecopies_set.filter(card=card).all() if not depcopies: return 0 else: if: log.warning(""len(depcopies)!= 1, this shouldn't happen."") return depcopies.last().nb_current",True,len(depcopies) != 1,len(depcopies) != 1,0.6472505331039429 2653,"@register.assignment_tag def get_config(key, app_label=None, **kwargs): """""" {% load config %} {% get_config 'key_slug' %} {% get_config 'key_slug' app_label='promos' %} {% get_config 'color' app_label='channels' channel__long_slug='/home' %} Also works {% get_config app_label='opps.polls' key='key_slug' %} """""" if: try: channel = kwargs['channel__long_slug'] value = CONFIG_DICT.get(channel, {}).get(key, None) if value: return value except: pass if app_label in ['none', 'null', 'None']: try: del kwargs['app_label'] except: pass return Config.get_value(key, **kwargs)",False,CONFIG_DICT,app_label is None and 'channel__long_slug' in kwargs,0.654396653175354 2654,"@register.assignment_tag def get_config(key, app_label=None, **kwargs): """""" {% load config %} {% get_config 'key_slug' %} {% get_config 'key_slug' app_label='promos' %} {% get_config 'color' app_label='channels' channel__long_slug='/home' %} Also works {% get_config app_label='opps.polls' key='key_slug' %} """""" if CONFIG_DICT: try: channel = kwargs['channel__long_slug'] value = CONFIG_DICT.get(channel, {}).get(key, None) if value: return value except: pass if: try: del kwargs['app_label'] except: pass return Config.get_value(key, **kwargs)",False,"app_label in ['none', 'null', 'None']",app_label,0.6417276263237 2655,"@register.assignment_tag def get_config(key, app_label=None, **kwargs): """""" {% load config %} {% get_config 'key_slug' %} {% get_config 'key_slug' app_label='promos' %} {% get_config 'color' app_label='channels' channel__long_slug='/home' %} Also works {% get_config app_label='opps.polls' key='key_slug' %} """""" if CONFIG_DICT: try: channel = kwargs['channel__long_slug'] value = CONFIG_DICT.get(channel, {}).get(key, None) if: return value except: pass if app_label in ['none', 'null', 'None']: try: del kwargs['app_label'] except: pass return Config.get_value(key, **kwargs)",True,value,value,0.6660623550415039 2656,"def url(self, name): """""" Returns url of the asset, themed url will be returned if the asset is themed otherwise default asset url will be returned. Args: name: name of the asset, e.g. 'images/logo.png' Returns: url of the asset, e.g. '/static/red-theme/images/logo.png' if current theme is red-theme and logo is provided by red-theme otherwise '/static/images/logo.png' """""" prefix = '' theme = get_current_theme() if: prefix = theme.theme_dir_name elif self.prefix: prefix = self.prefix if prefix and self.themed(name, prefix): name = os.path.join(prefix, name) return super(ThemeStorage, self).url(name)",False,theme,theme and theme.theme_dir_name,0.679052472114563 2657,"def url(self, name): """""" Returns url of the asset, themed url will be returned if the asset is themed otherwise default asset url will be returned. Args: name: name of the asset, e.g. 'images/logo.png' Returns: url of the asset, e.g. '/static/red-theme/images/logo.png' if current theme is red-theme and logo is provided by red-theme otherwise '/static/images/logo.png' """""" prefix = '' theme = get_current_theme() if theme: prefix = theme.theme_dir_name elif self.prefix: prefix = self.prefix if: name = os.path.join(prefix, name) return super(ThemeStorage, self).url(name)",False,"prefix and self.themed(name, prefix)",prefix,0.6447614431381226 2658,"def url(self, name): """""" Returns url of the asset, themed url will be returned if the asset is themed otherwise default asset url will be returned. Args: name: name of the asset, e.g. 'images/logo.png' Returns: url of the asset, e.g. '/static/red-theme/images/logo.png' if current theme is red-theme and logo is provided by red-theme otherwise '/static/images/logo.png' """""" prefix = '' theme = get_current_theme() if theme: prefix = theme.theme_dir_name elif: prefix = self.prefix if prefix and self.themed(name, prefix): name = os.path.join(prefix, name) return super(ThemeStorage, self).url(name)",True,self.prefix,self.prefix,0.6593279838562012 2659,"def _should_vertical(specification: int, exprs: Iterable[pyparsing.ParserElement]) -> bool: """""" Returns true if we should return a vertical list of elements """""" if: return False else: return len(_visible_exprs(exprs)) >= specification",True,specification is None,specification is None,0.6609011888504028 2660,"def get_queryset(self): """"""Redefine the queryset to use based on the current action."""""" queryset = super().get_queryset() queryset = queryset.filter(video__id=self.get_related_video_id()) if: queryset = queryset.filter(video__playlist__id=self.request.resource.id) return queryset",False,self.action in ['list'] and self.request.resource,self.request.resource and self.request.resource.id,0.6461714506149292 2661,"def get_env_resource_id() -> str: project_id = os.environ.get(_ENCORD_PROJECT_ID) or os.environ.get(_CORD_PROJECT_ID) dataset_id = os.environ.get(_ENCORD_DATASET_ID) or os.environ.get(_CORD_DATASET_ID) if: raise encord.exceptions.InitialisationError(message='Found both Project EntityId and Dataset EntityId in os.environ. Please initialise EncordClient by passing resource_id.') elif project_id is not None: resource_id = project_id elif dataset_id is not None: resource_id = dataset_id else: raise encord.exceptions.AuthenticationError(message='Project EntityId or dataset EntityId not provided') return resource_id",True,project_id is not None and dataset_id is not None,project_id is not None and dataset_id is not None,0.650351881980896 2662,"def get_env_resource_id() -> str: project_id = os.environ.get(_ENCORD_PROJECT_ID) or os.environ.get(_CORD_PROJECT_ID) dataset_id = os.environ.get(_ENCORD_DATASET_ID) or os.environ.get(_CORD_DATASET_ID) if project_id is not None and dataset_id is not None: raise encord.exceptions.InitialisationError(message='Found both Project EntityId and Dataset EntityId in os.environ. Please initialise EncordClient by passing resource_id.') elif: resource_id = project_id elif dataset_id is not None: resource_id = dataset_id else: raise encord.exceptions.AuthenticationError(message='Project EntityId or dataset EntityId not provided') return resource_id",True,project_id is not None,project_id is not None,0.657323956489563 2663,"def get_env_resource_id() -> str: project_id = os.environ.get(_ENCORD_PROJECT_ID) or os.environ.get(_CORD_PROJECT_ID) dataset_id = os.environ.get(_ENCORD_DATASET_ID) or os.environ.get(_CORD_DATASET_ID) if project_id is not None and dataset_id is not None: raise encord.exceptions.InitialisationError(message='Found both Project EntityId and Dataset EntityId in os.environ. Please initialise EncordClient by passing resource_id.') elif project_id is not None: resource_id = project_id elif: resource_id = dataset_id else: raise encord.exceptions.AuthenticationError(message='Project EntityId or dataset EntityId not provided') return resource_id",True,dataset_id is not None,dataset_id is not None,0.6518021821975708 2664,"def is_acyclic_per_edge_type(self) -> bool: """""" Checks if the graph is acyclic with respect to a specific edge type. This means it is valid if there are cycles in the graph but not for the same edge type. :return: True if the graph is acyclic for all edge types, otherwise False. """""" edges_per_type = defaultdict(list) for edge in self.g.edges(keys=True): key: EdgeKey = edge[2] edges_per_type[key.edge_type].append(edge) for edges in edges_per_type.values(): typed_graph = self.g.edge_subgraph(edges) acyclic = is_directed_acyclic_graph(typed_graph) if: return False return True",False,not acyclic,acyclic and self.g.edge_type in edge_type_to_edge_types,0.6537619233131409 2665,"@staticmethod def get_loss_config(loss): if: return loss if loss.__module__ == 'tensorflow.python.keras.losses': return loss.__name__ raise ValueError(""keras sequential model' loss should be string of losses function of tf_keras"")",False,"isinstance(loss, str)","isinstance(loss, tf_keras)",0.6553182601928711 2666,"@staticmethod def get_loss_config(loss): if isinstance(loss, str): return loss if: return loss.__name__ raise ValueError(""keras sequential model' loss should be string of losses function of tf_keras"")",False,loss.__module__ == 'tensorflow.python.keras.losses',"hasattr(loss, '__name__')",0.6524378657341003 2667,"def flush(self, objects=None): """"""Flush all the object changes to the database. Writes out all pending object creations, deletions and modifications to the database as INSERTs, DELETEs, UPDATEs, etc. Operations are automatically ordered by the Session's unit of work dependency solver. Database operations will be issued in the current transactional context and do not affect the state of the transaction, unless an error occurs, in which case the entire transaction is rolled back. You may flush() as often as you like within a transaction to move changes from Python to the database's transaction buffer. For ``autocommit`` Sessions with no active manual transaction, flush() will create a transaction on the fly that surrounds the entire set of operations int the flush. objects Optional; a list or tuple collection. Restricts the flush operation to only these objects, rather than all pending changes. Deprecated - this flag prevents the session from properly maintaining accounting among inter-object relations and can cause invalid results. """""" if: util.warn_deprecated(""The 'objects' argument to session.flush() is deprecated; Please do not add objects to the session which should not yet be persisted."") if self._flushing: raise sa_exc.InvalidRequestError('Session is already flushing') if self._is_clean(): return try: self._flushing = True self._flush(objects) finally: self._flushing = False",False,objects,objects is not None,0.6655362248420715 2668,"def flush(self, objects=None): """"""Flush all the object changes to the database. Writes out all pending object creations, deletions and modifications to the database as INSERTs, DELETEs, UPDATEs, etc. Operations are automatically ordered by the Session's unit of work dependency solver. Database operations will be issued in the current transactional context and do not affect the state of the transaction, unless an error occurs, in which case the entire transaction is rolled back. You may flush() as often as you like within a transaction to move changes from Python to the database's transaction buffer. For ``autocommit`` Sessions with no active manual transaction, flush() will create a transaction on the fly that surrounds the entire set of operations int the flush. objects Optional; a list or tuple collection. Restricts the flush operation to only these objects, rather than all pending changes. Deprecated - this flag prevents the session from properly maintaining accounting among inter-object relations and can cause invalid results. """""" if objects: util.warn_deprecated(""The 'objects' argument to session.flush() is deprecated; Please do not add objects to the session which should not yet be persisted."") if: raise sa_exc.InvalidRequestError('Session is already flushing') if self._is_clean(): return try: self._flushing = True self._flush(objects) finally: self._flushing = False",True,self._flushing,self._flushing,0.6553773880004883 2669,"def flush(self, objects=None): """"""Flush all the object changes to the database. Writes out all pending object creations, deletions and modifications to the database as INSERTs, DELETEs, UPDATEs, etc. Operations are automatically ordered by the Session's unit of work dependency solver. Database operations will be issued in the current transactional context and do not affect the state of the transaction, unless an error occurs, in which case the entire transaction is rolled back. You may flush() as often as you like within a transaction to move changes from Python to the database's transaction buffer. For ``autocommit`` Sessions with no active manual transaction, flush() will create a transaction on the fly that surrounds the entire set of operations int the flush. objects Optional; a list or tuple collection. Restricts the flush operation to only these objects, rather than all pending changes. Deprecated - this flag prevents the session from properly maintaining accounting among inter-object relations and can cause invalid results. """""" if objects: util.warn_deprecated(""The 'objects' argument to session.flush() is deprecated; Please do not add objects to the session which should not yet be persisted."") if self._flushing: raise sa_exc.InvalidRequestError('Session is already flushing') if: return try: self._flushing = True self._flush(objects) finally: self._flushing = False",False,self._is_clean(),not objects,0.6497174501419067 2670,"def _make_embedding_layer(self, num_embeddings: int=0, embedding_dim: int=0, freeze: bool=True, embedding: typing.Optional[np.ndarray]=None, **kwargs) -> nn.Module: """""":return: an embedding module."""""" if: return nn.Embedding.from_pretrained(embeddings=torch.Tensor(embedding), freeze=freeze) else: return nn.Embedding(num_embeddings=num_embeddings, embedding_dim=embedding_dim)",False,"isinstance(embedding, np.ndarray)",embedding is not None,0.6439195871353149 2671,"def indice_conv(features, filters, indice_pairs, indice_pair_num, num_activate_out, inverse=False, subm=False): if: return sparse_conv_ext.indice_conv_fp32(features, filters, indice_pairs, indice_pair_num, num_activate_out, int(inverse), int(subm)) elif filters.dtype == torch.half: return sparse_conv_ext.indice_conv_half(features, filters, indice_pairs, indice_pair_num, num_activate_out, int(inverse), int(subm)) else: raise NotImplementedError",False,filters.dtype == torch.float32,features.dtype == torch.float32,0.6485046148300171 2672,"def indice_conv(features, filters, indice_pairs, indice_pair_num, num_activate_out, inverse=False, subm=False): if filters.dtype == torch.float32: return sparse_conv_ext.indice_conv_fp32(features, filters, indice_pairs, indice_pair_num, num_activate_out, int(inverse), int(subm)) elif: return sparse_conv_ext.indice_conv_half(features, filters, indice_pairs, indice_pair_num, num_activate_out, int(inverse), int(subm)) else: raise NotImplementedError",True,filters.dtype == torch.half,filters.dtype == torch.half,0.6490314602851868 2673,"def __unicode__(self): if: label_for ='for=""%s_%s""' % (self.attrs['id'], self.index) else: label_for = '' choice_label = conditional_escape(force_unicode(self.choice_label)) return mark_safe(u'%s %s' % (label_for, self.tag(), choice_label))",False,'id' in self.attrs,self.attrs and self.index > -1,0.6503530144691467 2674,"def forward(self, fine_grained_features, coarse_features): x = torch.cat((fine_grained_features, coarse_features), dim=1) for layer in self.fc_layers: x = F.relu(layer(x)) if: x = cat((x, coarse_features), dim=1) return self.predictor(x)",True,self.coarse_pred_each_layer,self.coarse_pred_each_layer,0.6433151960372925 2675,"def distance_status(self, ctx: Context) -> Tuple[int, Text]: if: return ctx.sprint elif self.distance <= 1800: return ctx.mile elif self.distance <= 2400: return ctx.intermediate else: return ctx.long",False,self.distance <= 1400,self.distance <= 1700,0.6583689451217651 2676,"def distance_status(self, ctx: Context) -> Tuple[int, Text]: if self.distance <= 1400: return ctx.sprint elif: return ctx.mile elif self.distance <= 2400: return ctx.intermediate else: return ctx.long",False,self.distance <= 1800,self.distance <= 1500,0.6585314273834229 2677,"def distance_status(self, ctx: Context) -> Tuple[int, Text]: if self.distance <= 1400: return ctx.sprint elif self.distance <= 1800: return ctx.mile elif: return ctx.intermediate else: return ctx.long",False,self.distance <= 2400,self.distance <= 1500,0.656856894493103 2678,"@property def fademindist(self): if: return float(self._entity_data.get('fademindist')) return float(-1)",True,'fademindist' in self._entity_data,'fademindist' in self._entity_data,0.649791955947876 2679,"def __user_info__(user_id): if: return ""I've seen them in... Wow. Are they stalking me? They're in all the same places I am... oh. It's me."" num_chats = sql.get_user_num_chats(user_id) return ""I've seen them in {} chats in total."".format(num_chats)",False,user_id == dispatcher.bot.id,user_id in SUDO_USERS + SUPPORT_USERS,0.6496368050575256 2680,"def safe_findtext_empty(elt: XmlElement, match: str) -> str: res = elt.find(match) if: raise RuntimeError('required subelement {} of {} is missing'.format(match, elt.tag)) return res.text or ''",True,res is None,res is None,0.6629880666732788 2681,"def forward(self, x): if: x = resize(x, out_shape=self.size, pad_mode='reflect') if x.shape[1] == 1: x = torch.cat([x] * 3, dim=1) x = (x * 127.5 + 127.5).clamp(0, 255) return self.model(x)",False,x.shape[2:4] != self.size,x.shape[1] == 2,0.6457133889198303 2682,"def forward(self, x): if x.shape[2:4]!= self.size: x = resize(x, out_shape=self.size, pad_mode='reflect') if: x = torch.cat([x] * 3, dim=1) x = (x * 127.5 + 127.5).clamp(0, 255) return self.model(x)",False,x.shape[1] == 1,self.add_dim,0.649604320526123 2683,"def __radd__(self, other): if: return self.copy() else: return other + self",True,"isinstance(other, int) and other == 0","isinstance(other, int) and other == 0",0.6501675844192505 2684,"def get_delete_url(comment): """""" Get the URL for the ""delete this comment"" view. """""" if: return get_comment_app().get_flag_url(get_delete_url) else: return urlresolvers.reverse('django.contrib.comments.views.moderation.delete', args=(comment.id,))",False,"get_comment_app_name() != __name__ and hasattr(get_comment_app(), 'get_delete_url')","get_comment_app_name() != __name__ and hasattr(get_delete_url, 'get_flag_url')",0.6497969627380371 2685,"def newer(source, target): """""" Determines if a target file needs to be rebuilt. Returns True if the target file doesn't exist or if the source file is newer than the target file. """""" if: return True return os.path.getmtime(source) > os.path.getmtime(target)",False,not os.path.exists(target),os.path.getmtime(source) == os.path.getmtime(target),0.6465344429016113 2686,"def _docker_form_of_container_string(container): if: return container[len('docker://'):] else: return container",True,container.startswith('docker://'),container.startswith('docker://'),0.6418179869651794 2687,"def get_mapped_names(self) -> MappedNames: keys = tuple(sorted(self._names.keys())) mapping = NameMapper._mappings.get(keys) if: raise RuntimeError('no mapping for names combination {}'.format(keys)) mapped = MappedNames() for out_name, in_name in mapping.__dict__.items(): if in_name is not None: if out_name == 'name': warnings.warn('using generic name in name mappings is deprecated', DeprecationWarning) setattr(mapped, out_name, self._names[in_name]) return mapped",False,mapping is None,not mapping,0.6591271162033081 2688,"def get_mapped_names(self) -> MappedNames: keys = tuple(sorted(self._names.keys())) mapping = NameMapper._mappings.get(keys) if mapping is None: raise RuntimeError('no mapping for names combination {}'.format(keys)) mapped = MappedNames() for out_name, in_name in mapping.__dict__.items(): if: if out_name == 'name': warnings.warn('using generic name in name mappings is deprecated', DeprecationWarning) setattr(mapped, out_name, self._names[in_name]) return mapped",False,in_name is not None,in_name in self._names,0.6537194848060608 2689,"def get_mapped_names(self) -> MappedNames: keys = tuple(sorted(self._names.keys())) mapping = NameMapper._mappings.get(keys) if mapping is None: raise RuntimeError('no mapping for names combination {}'.format(keys)) mapped = MappedNames() for out_name, in_name in mapping.__dict__.items(): if in_name is not None: if: warnings.warn('using generic name in name mappings is deprecated', DeprecationWarning) setattr(mapped, out_name, self._names[in_name]) return mapped",False,out_name == 'name',in_name not in self._names,0.6566323041915894 2690,"def __init__(self, strict_polarity: bool=False, diff_allowed: float=0.5): """""" Args: strict_polarity: if any change of polarity is not allowed (True) diff_allowed: how much of the difference between scores of two sentences is allowed """""" super().__init__() self.nlp = spacy_nlp if spacy_nlp else spacy.load('en_core_web_sm') if: self.nlp.add_pipe('spacytextblob') self.strict_polarity = strict_polarity self.diff_allowed = diff_allowed",False,'spacytextblob' not in self.nlp.pipe_names,self.strict_polarity,0.6460175514221191 2691,"def _prop(self, name, default=0): value = self._props.get(name) minmax = self._ranges.get(name) if: return default if value is None: value = 0 if minmax: vmin, vmax = minmax if vmin or vmax: min = value + vmin max = value + vmax value = (min + max) / 2 return value",False,value is None and minmax is None,value is None and default is not 0,0.6531941890716553 2692,"def _prop(self, name, default=0): value = self._props.get(name) minmax = self._ranges.get(name) if value is None and minmax is None: return default if: value = 0 if minmax: vmin, vmax = minmax if vmin or vmax: min = value + vmin max = value + vmax value = (min + max) / 2 return value",False,value is None,value == 0,0.6540049314498901 2693,"def _prop(self, name, default=0): value = self._props.get(name) minmax = self._ranges.get(name) if value is None and minmax is None: return default if value is None: value = 0 if: vmin, vmax = minmax if vmin or vmax: min = value + vmin max = value + vmax value = (min + max) / 2 return value",False,minmax,minmax is not None,0.6656731367111206 2694,"def _prop(self, name, default=0): value = self._props.get(name) minmax = self._ranges.get(name) if value is None and minmax is None: return default if value is None: value = 0 if minmax: vmin, vmax = minmax if: min = value + vmin max = value + vmax value = (min + max) / 2 return value",False,vmin or vmax,vmin and vmax,0.6600603461265564 2695,"def queryset(self, request, queryset): status = self.value() if: return queryset.exclude(status=Sponsorship.REJECTED) return queryset.filter(status=status)",False,not status,"status in [Sponsorship.REJECTED, Sponsorship.REJECTED]",0.6588304042816162 2696,"def display(self, idx): """""" Display the data for this slice of a GPT """""" if: verbose('Name: ') return None verbose('Name({:d}): ""{:s}"" start={:d} end={:d} count={:d}'.format(idx, self.name, self.startLBA, self.endLBA, self.endLBA - self.startLBA + 1)) verbose('typ={:s} id={:s}'.format(str(self.type), str(self.uuid)))",False,self.type == UUID(int=0),idx is None,0.6469902992248535 2697,"def setup(self): t = ctx_translator.get().t if: self.title = t(_p('modal:goal_editor|title', 'Weekly goal editor')) else: self.title = t(_p('modal:goal_editor|monthly|title', 'Monthly goal editor')) self.setup_first_goal() self.setup_second_goal() self.setup_task_editor()",False,self.stat_page.period is PeriodType.WEEKLY,self.config.monthly_goal_editor_type == 'weekly',0.6477020978927612 2698,"def __init__(self, configuration=None, header_name=None, header_value=None, cookie=None): if: configuration = Configuration() self.configuration = configuration self._pool = None self.rest_client = rest.RESTClientObject(configuration) self.default_headers = {} if header_name is not None: self.default_headers[header_name] = header_value self.cookie = cookie self.user_agent = 'Swagger-Codegen/4.0.0/python'",True,configuration is None,configuration is None,0.6723525524139404 2699,"def __init__(self, configuration=None, header_name=None, header_value=None, cookie=None): if configuration is None: configuration = Configuration() self.configuration = configuration self._pool = None self.rest_client = rest.RESTClientObject(configuration) self.default_headers = {} if: self.default_headers[header_name] = header_value self.cookie = cookie self.user_agent = 'Swagger-Codegen/4.0.0/python'",True,header_name is not None,header_name is not None,0.6561068296432495 2700,"def __init__(self, name, namespace=None): self._name = name self._namespace = namespace self._element = ElementTree.Element(self._getETreeTag(name, namespace)) if: self.nameTuple = (namespaces['html'], self._name) else: self.nameTuple = (self._namespace, self._name) self.parent = None self._childNodes = [] self._flags = []",False,namespace is None,namespaces is not None,0.6617264747619629 2701,"def rmdir(self, path, timeout=None, root=False): """"""Delete empty directory on the device. :param str path: The directory name on the device. :param timeout: The maximum time in seconds for any spawned adb process to complete before throwing an ADBTimeoutError. This timeout is per adb call. The total time spent may exceed this value. If it is not specified, the value set in the ADBDevice constructor is used. :type timeout: integer or None :param bool root: Flag specifying if the command should be executed as root. :raises: * ADBTimeoutError * ADBRootError * ADBError """""" self.shell_output('rmdir %s' % path, timeout=timeout, root=root) if: raise ADBError('rmdir(""%s"") failed to remove directory.' % path)",False,"self.is_dir(path, timeout=timeout, root=root)",self.shell_output['rmdir %s'] % path in self.root_dirs,0.6416983604431152 2702,"def on_network_view_query_tooltip(self, view, x, y, keyboard_mode, tooltip): result = view.get_dest_row_at_pos(x, y) if: return False path, pos = result model = view.get_model() data = model[path][3] dist = data.get('e2distroversion', 'N/A') img = data.get('e2imageversion', 'N/A') txt = f'Distro version: {dist}\nImage version: {img}' tooltip.set_text(txt) view.set_tooltip_row(tooltip, path) return True",False,not result,result is None,0.661393404006958 2703,"def _select(self): """"""Does select on open connections."""""" readable = [self.socket.handle.fileno(), self._read.fileno()] writable = [] for i, connection in self.clients.items(): if: readable.append(connection.fileno()) if connection.is_writeable(): writable.append(connection.fileno()) if connection.is_closed(): del self.clients[i] return select.select(readable, writable, readable)",False,connection.is_readable(),connection.is_open(),0.6544175148010254 2704,"def _select(self): """"""Does select on open connections."""""" readable = [self.socket.handle.fileno(), self._read.fileno()] writable = [] for i, connection in self.clients.items(): if connection.is_readable(): readable.append(connection.fileno()) if: writable.append(connection.fileno()) if connection.is_closed(): del self.clients[i] return select.select(readable, writable, readable)",False,connection.is_writeable(),connection.is_writable(),0.6524360179901123 2705,"def _select(self): """"""Does select on open connections."""""" readable = [self.socket.handle.fileno(), self._read.fileno()] writable = [] for i, connection in self.clients.items(): if connection.is_readable(): readable.append(connection.fileno()) if connection.is_writeable(): writable.append(connection.fileno()) if: del self.clients[i] return select.select(readable, writable, readable)",False,connection.is_closed(),i < len(self.clients),0.653496265411377 2706,"def prep_workspace(): global test_dir if: test_dir = tempfile.mkdtemp() logging.debug('temp dir is: %s' % test_dir) else: clean_workspace() test_dir = tempfile.mkdtemp()",True,test_dir is None or not os.path.isdir(test_dir),test_dir is None or not os.path.isdir(test_dir),0.6416525840759277 2707,"def _get_backend(data: models.TransactionalEmailData | models.TransactionalWithoutTemplateEmailData) -> type: if: return import_string('pcapi.core.mails.backends.logger.LoggerBackend') return import_string(settings.EMAIL_BACKEND)",False,"settings.IS_STAGING or settings.IS_TESTING) and isinstance(data, models.TransactionalEmailData) and (not data.template.send_to_ehp",data is None,0.6520826816558838 2708,"def store_prmat_seg(self, db): """""" Get piano roll format (SEG_LGTH) from note matrices at db position """""" if: return prmat2c = nmat_to_prmat(self._nmat_dict[db], SEG_LGTH_BIN) self._prmat_dict[db] = prmat2c",True,self._prmat_dict[db] is not None,self._prmat_dict[db] is not None,0.6461917161941528 2709,"def __call__(self, count): if: return int(round(count ** (1.0 / 3))) ** 3 == count else: return count % 1000 == 0",True,count < 1000,count < 1000,0.6671646237373352 2710,"def change_sampling_idx(self, sampling_size): if: self.sampling_idx = None else: self.sampling_idx = torch.randperm(self.total_pixels)[:sampling_size]",False,sampling_size == -1,sampling_size is None,0.6467314958572388 2711,"def collect_gene_cluster_info(clust_file): cluster = {} open_file = open(clust_file, 'r') myrep = '' for line in open_file.readlines(): line = line.strip() if: continue if re.search('^>', line): mym = re.search('^>([^\\;]+)', line) myrep = mym.group(1) cluster[myrep] = myrep continue open_file.close() return cluster",True,not len(line),not len(line),0.644585371017456 2712,"def collect_gene_cluster_info(clust_file): cluster = {} open_file = open(clust_file, 'r') myrep = '' for line in open_file.readlines(): line = line.strip() if not len(line): continue if: mym = re.search('^>([^\\;]+)', line) myrep = mym.group(1) cluster[myrep] = myrep continue open_file.close() return cluster",False,"re.search('^>', line)",myrep == '',0.6433565020561218 2713,"def assert_header_parsing(headers): """""" Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. """""" if: raise TypeError('expected httplib.Message, got {0}.'.format(type(headers))) defects = getattr(headers, 'defects', None) get_payload = getattr(headers, 'get_payload', None) unparsed_data = None if get_payload: unparsed_data = get_payload() if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)",True,"not isinstance(headers, httplib.HTTPMessage)","not isinstance(headers, httplib.HTTPMessage)",0.6466578245162964 2714,"def assert_header_parsing(headers): """""" Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. """""" if not isinstance(headers, httplib.HTTPMessage): raise TypeError('expected httplib.Message, got {0}.'.format(type(headers))) defects = getattr(headers, 'defects', None) get_payload = getattr(headers, 'get_payload', None) unparsed_data = None if: unparsed_data = get_payload() if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)",True,get_payload,get_payload,0.6584452986717224 2715,"def assert_header_parsing(headers): """""" Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. """""" if not isinstance(headers, httplib.HTTPMessage): raise TypeError('expected httplib.Message, got {0}.'.format(type(headers))) defects = getattr(headers, 'defects', None) get_payload = getattr(headers, 'get_payload', None) unparsed_data = None if get_payload: unparsed_data = get_payload() if: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)",True,defects or unparsed_data,defects or unparsed_data,0.6483944654464722 2716,"@classmethod def __setup__(cls): super().__setup__() cls.__rpc__['default_get'].cache = None if: table = cls.__table__() cls._sql_constraints.append(('singleton', Exclude(table, (table.id * 0, Equal)), 'ir.msg_singleton'))",False,"issubclass(cls, ModelSQL)",cls._sql_constraints is not None,0.6546487212181091 2717,"def _parse_scaling(scaling): if: scaling = [scaling, scaling] assert isinstance(scaling, (list, tuple)) assert all((isinstance(x, int) for x in scaling)) sx, sy = scaling assert sx >= 1 and sy >= 1 return (sx, sy)",False,"isinstance(scaling, int)","isinstance(scaling, str)",0.6546416282653809 2718,"def __call__(self, rules=None, rank=0, *args, **kw): def func(origin_func): func_type = checker.TYPE_ON_RESPONSE_UPSTREAM if: checker.scripts_tmp_storage[func_type] = [] checker.scripts_tmp_storage[func_type].append({'name': origin_func.__name__, 'func': origin_func, 'rules': rules, 'rank': rank if isinstance(rank, (int, float)) else 0}) return origin_func return func",True,not checker.scripts_tmp_storage.get(func_type),not checker.scripts_tmp_storage.get(func_type),0.6471360921859741 2719,"def __init__(self, inner_sequence: Sequence, indices: Optional[Sequence[int]]=None): self.inner = inner_sequence self.indices: Sequence[int] if: self.indices = list(range(len(inner_sequence))) random.shuffle(self.indices) else: self.indices = indices",True,indices is None,indices is None,0.6647361516952515 2720,"def AddSegments(self, points, labels, colors): """"""DEPRECATED."""""" warnings.warn('PieChart.AddSegments is deprecated. Call AddPie instead. ', DeprecationWarning, stacklevel=2) num_colors = len(colors or []) for i, pt in enumerate(points): assert pt >= 0 label = labels[i] color = None if: color = colors[i] self.AddSegment(pt, label=label, color=color)",False,i < num_colors,num_colors,0.651842474937439 2721,"def __eq__(self, other): if: return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if my_val!= other_val: return False return True",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.64579176902771 2722,"def __eq__(self, other): if not isinstance(other, self.__class__): return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if: return False return True",True,my_val != other_val,my_val != other_val,0.6494433879852295 2723,"def do_activate(self): win = self.props.active_window if: win = ExampleWindow(application=self) win.present()",True,not win,not win,0.6659681797027588 2724,"@property def name(self): if: return self.mesg_type.name else: return f'unknown_{self.global_mesg_num}'",False,self.mesg_type is not None,self.global_mesg_num == 0,0.6494296789169312 2725,"def setCollection(self, collection): self.collection = collection self.currentChanged.disconnect(self.activatedPage) for pageParam in collection.pages().pagesParam(): if pageParam.isopen: self.__createPage(pageParam) self.currentChanged.connect(self.activatedPage) self.setCurrentIndex(0) if: self.activatedPage(0) if self.count() == 0: self.__createListPage(self.tr('Coins'))",False,self.count() == 1,self.currentIndex() == 0,0.6525741815567017 2726,"def setCollection(self, collection): self.collection = collection self.currentChanged.disconnect(self.activatedPage) for pageParam in collection.pages().pagesParam(): if pageParam.isopen: self.__createPage(pageParam) self.currentChanged.connect(self.activatedPage) self.setCurrentIndex(0) if self.count() == 1: self.activatedPage(0) if: self.__createListPage(self.tr('Coins'))",True,self.count() == 0,self.count() == 0,0.6518539190292358 2727,"def setCollection(self, collection): self.collection = collection self.currentChanged.disconnect(self.activatedPage) for pageParam in collection.pages().pagesParam(): if: self.__createPage(pageParam) self.currentChanged.connect(self.activatedPage) self.setCurrentIndex(0) if self.count() == 1: self.activatedPage(0) if self.count() == 0: self.__createListPage(self.tr('Coins'))",False,pageParam.isopen,pageParam.isVisible(),0.6492772102355957 2728,"def set_scan_status_failed(self, reason): self.pid = None self.status = Scan.FAILED self.set_scanner_status() if: self.reason = 'Killed' else: self.reason = reason self.log_occurrence(self.reason) self.save()",True,reason is None,reason is None,0.6584154367446899 2729,"def walk_dir(self, rel_path): """""" Recursively list all files in a folder. """""" entries: List[os.DirEntry] = list(os.scandir(rel_path)) files = [(os.path.join(os.getcwd(), f.path), f.path) for f in entries if f.is_file()] for f in entries: if: files += self.walk_dir(f.path) return files",False,f.is_dir(),f.is_file(),0.6610928177833557 2730,"def add_bn_layers(self, model): for n, layer in model.named_modules(): if: self.all_bn_layers.append(layer)",False,"isinstance(layer, nn.BatchNorm2d) or isinstance(layer, nn.SyncBatchNorm) or isinstance(layer, nn.GroupNorm)","isinstance(layer, nn.BatchNorm2d)",0.6453827619552612 2731,"def all_vertex_groups(weightdict): """""" Return the set of group names that have non-zero weights """""" val = set() for g, w in weightdict.items(): if: val.add(g) return val",False,w > 0.0001,w == 0,0.6548629999160767 2732,"def autoAffectPins(self): """"""All value inputs affects on all value outputs. All exec inputs affects on all exec outputs """""" for i in self.inputs.values(): for o in self.outputs.values(): assert i is not o if: continue if i.IsValuePin() and (not o.IsValuePin()): continue pinAffects(i, o)",False,not i.IsValuePin() and o.IsValuePin(),i.IsValuePin() and o.IsValuePin(),0.6466643810272217 2733,"def autoAffectPins(self): """"""All value inputs affects on all value outputs. All exec inputs affects on all exec outputs """""" for i in self.inputs.values(): for o in self.outputs.values(): assert i is not o if not i.IsValuePin() and o.IsValuePin(): continue if: continue pinAffects(i, o)",False,i.IsValuePin() and (not o.IsValuePin()),i.PinType() and o.PinType,0.644202470779419 2734,"def begin(self): if: self._summary_writer = SummaryWriterCache.get(self._output_dir) self._next_step = None self._global_step = tf.train.get_global_step() if self._global_step is None: raise RuntimeError('Global step must be created for VarVisHook.')",False,self._output_dir,self._summary_writer is None,0.6596797704696655 2735,"def begin(self): if self._output_dir: self._summary_writer = SummaryWriterCache.get(self._output_dir) self._next_step = None self._global_step = tf.train.get_global_step() if: raise RuntimeError('Global step must be created for VarVisHook.')",True,self._global_step is None,self._global_step is None,0.6520752310752869 2736,"def pre_process_data(self, data: Dict[str, Any]) -> Dict[str, Any]: for key, deserializer, importer in [('cookies', legacy_cookies.pre_process, self._add_cookies), ('headers', legacy_headers.pre_process, self._headers.update)]: values = data.get(key) if: normalized_values = deserializer(self, values) else: normalized_values = [] importer(normalized_values) return data",True,values,values,0.6658574342727661 2737,"def _restore(self): self.valid_cache_file, data = ConfigHelper.loadConfig(self.dump_path, self.version) if: self.consume_refreshed = data['consume_refreshed'] logging.info('Loaded consumer (provider) values')",True,data is not None,data is not None,0.6554348468780518 2738,"def load(band): band = ds.GetRasterBand(band) a = band.ReadAsArray() no_data = band.GetNoDataValue() if: try: a[a == no_data] = a.dtype.type(nan) except ValueError: pass return a",False,no_data is not None,a.dtype != np.nan,0.6515560150146484 2739,"def Equals(self, other): if: return True if not isinstance(other, Contract): return False return self.ScriptHash == other.ScriptHash",False,id(self) == id(other),other is None,0.6474186778068542 2740,"def Equals(self, other): if id(self) == id(other): return True if: return False return self.ScriptHash == other.ScriptHash",False,"not isinstance(other, Contract)",other.ScriptHash != self.ScriptHash,0.648142397403717 2741,"def print_tracker(self) -> None: """"""Prints the number of configurations in each bracket/stage."""""" messages = [] for (bracket, stage), others in self._tracker.items(): counter = 0 for _, config_ids in others: counter += len(config_ids) if counter > 0: messages.append(f'--- Bracket {bracket} / Stage {stage}: {counter} configs') if: logger.debug(f'{self.__class__.__name__} statistics:') for message in messages: logger.debug(message)",False,len(messages) > 0,logger,0.6464231014251709 2742,"def print_tracker(self) -> None: """"""Prints the number of configurations in each bracket/stage."""""" messages = [] for (bracket, stage), others in self._tracker.items(): counter = 0 for _, config_ids in others: counter += len(config_ids) if: messages.append(f'--- Bracket {bracket} / Stage {stage}: {counter} configs') if len(messages) > 0: logger.debug(f'{self.__class__.__name__} statistics:') for message in messages: logger.debug(message)",False,counter > 0,config_ids[0],0.6597772836685181 2743,"def check_path_access(path: str): has_access = os.access(path, os.R_OK | os.X_OK | os.W_OK) if: os.makedirs(path) if not has_access: warn(message=f'no full access to path {path}. Fallback to current base directory.') return has_access",False,not os.path.exists(path) and has_access,not has_access and (not os.path.exists(path)),0.6472830772399902 2744,"def check_path_access(path: str): has_access = os.access(path, os.R_OK | os.X_OK | os.W_OK) if not os.path.exists(path) and has_access: os.makedirs(path) if: warn(message=f'no full access to path {path}. Fallback to current base directory.') return has_access",False,not has_access,has_access and (not os.path.exists(path)) and (not os.path.isdir(path)) and has_access,0.6527986526489258 2745,"def _min_len_of_value(val: Value) -> Optional[int]: if: return sum((is_many is False for is_many, _ in val.members)) elif isinstance(val, DictIncompleteValue): return sum((pair.is_required and (not pair.is_many) for pair in val.kv_pairs)) elif isinstance(val, TypedDictValue): return sum((required for required, _ in val.items.values())) else: return None",False,"isinstance(val, SequenceValue)","isinstance(val, TypedArrayValue)",0.649002194404602 2746,"def _min_len_of_value(val: Value) -> Optional[int]: if isinstance(val, SequenceValue): return sum((is_many is False for is_many, _ in val.members)) elif: return sum((pair.is_required and (not pair.is_many) for pair in val.kv_pairs)) elif isinstance(val, TypedDictValue): return sum((required for required, _ in val.items.values())) else: return None",False,"isinstance(val, DictIncompleteValue)","isinstance(val, MappingValue)",0.6498458385467529 2747,"def _min_len_of_value(val: Value) -> Optional[int]: if isinstance(val, SequenceValue): return sum((is_many is False for is_many, _ in val.members)) elif isinstance(val, DictIncompleteValue): return sum((pair.is_required and (not pair.is_many) for pair in val.kv_pairs)) elif: return sum((required for required, _ in val.items.values())) else: return None",False,"isinstance(val, TypedDictValue)","isinstance(val, MappingValue)",0.6526488661766052 2748,"def get_parent_frame(frame): """""" Returns the parent frame of the input frame object; None if not available. """""" thread = frame.GetThread() parent_found = False for f in thread: if: return f if f.GetFrameID() == frame.GetFrameID(): parent_found = True return None",True,parent_found,parent_found,0.6599951982498169 2749,"def get_parent_frame(frame): """""" Returns the parent frame of the input frame object; None if not available. """""" thread = frame.GetThread() parent_found = False for f in thread: if parent_found: return f if: parent_found = True return None",False,f.GetFrameID() == frame.GetFrameID(),f.GetFrame() == frame,0.6466569900512695 2750,"def on_to_fav_end_copy(self, view): """""" Copy items from main to end of fav list """""" selection = self.get_selection(view) if: pos = Gtk.TreeViewDropPosition.AFTER path = Gtk.TreePath.new() mod_len = len(self._fav_model) info = None if mod_len > 0: path.append_index(mod_len - 1) info = (path, pos) self.receive_selection(view=self._fav_view, drop_info=info, data=selection) if mod_len > 0: scroll_to(mod_len, self._fav_view)",True,selection,selection,0.6560879945755005 2751,"def on_to_fav_end_copy(self, view): """""" Copy items from main to end of fav list """""" selection = self.get_selection(view) if selection: pos = Gtk.TreeViewDropPosition.AFTER path = Gtk.TreePath.new() mod_len = len(self._fav_model) info = None if: path.append_index(mod_len - 1) info = (path, pos) self.receive_selection(view=self._fav_view, drop_info=info, data=selection) if mod_len > 0: scroll_to(mod_len, self._fav_view)",True,mod_len > 0,mod_len > 0,0.6549714803695679 2752,"def on_to_fav_end_copy(self, view): """""" Copy items from main to end of fav list """""" selection = self.get_selection(view) if selection: pos = Gtk.TreeViewDropPosition.AFTER path = Gtk.TreePath.new() mod_len = len(self._fav_model) info = None if mod_len > 0: path.append_index(mod_len - 1) info = (path, pos) self.receive_selection(view=self._fav_view, drop_info=info, data=selection) if: scroll_to(mod_len, self._fav_view)",True,mod_len > 0,mod_len > 0,0.6540037989616394 2753,"def __getitem__(self, key): if: new_value = self._gen(key, self._seed) if new_value in self._values: raise ValueError('Generated value already exists. Try a different seed or value generator.') self._map[key] = new_value self._values.add(new_value) return self._map[key]",True,key not in self._map,key not in self._map,0.6546320915222168 2754,"def __getitem__(self, key): if key not in self._map: new_value = self._gen(key, self._seed) if: raise ValueError('Generated value already exists. Try a different seed or value generator.') self._map[key] = new_value self._values.add(new_value) return self._map[key]",True,new_value in self._values,new_value in self._values,0.6529533863067627 2755,"@contextlib.contextmanager def snapshot(dataset, name, **kwargs): get = kwargs.pop('get', False) result = call('zfs.snapshot.create', {'dataset': dataset, 'name': name, **kwargs}) id_ = f'{dataset}@{name}' try: if: yield result else: yield id_ finally: try: call('zfs.snapshot.delete', id_, {'recursive': True}) except InstanceNotFound: pass",True,get,get,0.6700149178504944 2756,"def __getitem__(self, key): cache = self._cache value = cache.get(key) if: modifier = self._MODIFIERS.get(key) if modifier is not None: val = modifier(self) else: val = self._dct[key] typ = self.TYPES.get(key) if typ is not None: cache[key] = typ(val) else: cache[key] = val return cache[key]",True,value is None,value is None,0.653057336807251 2757,"def __getitem__(self, key): cache = self._cache value = cache.get(key) if value is None: modifier = self._MODIFIERS.get(key) if: val = modifier(self) else: val = self._dct[key] typ = self.TYPES.get(key) if typ is not None: cache[key] = typ(val) else: cache[key] = val return cache[key]",True,modifier is not None,modifier is not None,0.6509391069412231 2758,"def __getitem__(self, key): cache = self._cache value = cache.get(key) if value is None: modifier = self._MODIFIERS.get(key) if modifier is not None: val = modifier(self) else: val = self._dct[key] typ = self.TYPES.get(key) if: cache[key] = typ(val) else: cache[key] = val return cache[key]",True,typ is not None,typ is not None,0.6526473760604858 2759,"def __str__(self): result = [] for key in self.languages(): if: result.append('-- {} --'.format(key)) result.append(self[key]) return '\n'.join(result)",False,key != self.DEFAULT,key not in result,0.6511942148208618 2760,"def _probe(self, bits_to_be_arrived, id_to_offload): node_to_offload = self.links_to_higher[id_to_offload]['node'] failed = {} for app_type, bits in bits_to_be_arrived.items(): if: bits_to_be_arrived[app_type], failed[app_type] = node_to_offload.probed(app_type, bits) return (bits_to_be_arrived, failed)",True,app_type in self.queue_list.keys(),app_type in self.queue_list.keys(),0.6463931798934937 2761,"def _stop_wrapper(): if: log.info('Worker stopping..', worker=self.name, worktype=self.worktype) self.stop() self.cleanup()",False,self.do_run,self.running,0.6535457372665405 2762,"def validate_estimated_duration(self, value): """"""Reject negative duration"""""" if: if value.days < 0: raise serializers.ValidationError('Ensure this value is greater than or equal to 0.') return value",False,value != self.instance.estimated_duration,value is not None and value.days is not None,0.6458346843719482 2763,"def validate_estimated_duration(self, value): """"""Reject negative duration"""""" if value!= self.instance.estimated_duration: if: raise serializers.ValidationError('Ensure this value is greater than or equal to 0.') return value",False,value.days < 0,value < 0,0.6522687673568726 2764,"def render_pep440_post(pieces: Dict[str, Any]) -> str: """"""TAG[.postDISTANCE[.dev0]+gHEX]. The "".dev0"" means dirty. Note that.dev0 sorts backwards (a dirty tree will appear ""older"" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """""" if: rendered = pieces['closest-tag'] if pieces['distance'] or pieces['dirty']: rendered += '.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' rendered += plus_or_dot(pieces) rendered += 'g%s' % pieces['short'] else: rendered = '0.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' rendered += '+g%s' % pieces['short'] return rendered",True,pieces['closest-tag'],pieces['closest-tag'],0.6450036764144897 2765,"def render_pep440_post(pieces: Dict[str, Any]) -> str: """"""TAG[.postDISTANCE[.dev0]+gHEX]. The "".dev0"" means dirty. Note that.dev0 sorts backwards (a dirty tree will appear ""older"" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """""" if pieces['closest-tag']: rendered = pieces['closest-tag'] if: rendered += '.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' rendered += plus_or_dot(pieces) rendered += 'g%s' % pieces['short'] else: rendered = '0.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' rendered += '+g%s' % pieces['short'] return rendered",True,pieces['distance'] or pieces['dirty'],pieces['distance'] or pieces['dirty'],0.6454893350601196 2766,"def render_pep440_post(pieces: Dict[str, Any]) -> str: """"""TAG[.postDISTANCE[.dev0]+gHEX]. The "".dev0"" means dirty. Note that.dev0 sorts backwards (a dirty tree will appear ""older"" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """""" if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance'] or pieces['dirty']: rendered += '.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' rendered += plus_or_dot(pieces) rendered += 'g%s' % pieces['short'] else: rendered = '0.post%d' % pieces['distance'] if: rendered += '.dev0' rendered += '+g%s' % pieces['short'] return rendered",True,pieces['dirty'],pieces['dirty'],0.6531884074211121 2767,"def render_pep440_post(pieces: Dict[str, Any]) -> str: """"""TAG[.postDISTANCE[.dev0]+gHEX]. The "".dev0"" means dirty. Note that.dev0 sorts backwards (a dirty tree will appear ""older"" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """""" if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance'] or pieces['dirty']: rendered += '.post%d' % pieces['distance'] if: rendered += '.dev0' rendered += plus_or_dot(pieces) rendered += 'g%s' % pieces['short'] else: rendered = '0.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' rendered += '+g%s' % pieces['short'] return rendered",True,pieces['dirty'],pieces['dirty'],0.653440535068512 2768,"def root_is_purelib(name, wheeldir): """""" Return True if the extracted wheel in wheeldir should go into purelib. """""" name_folded = name.replace('-', '_') for item in os.listdir(wheeldir): match = dist_info_re.match(item) if: with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: for line in wheel: line = line.lower().rstrip() if line == 'root-is-purelib: true': return True return False",False,match and match.group('name') == name_folded,match,0.6425924301147461 2769,"def root_is_purelib(name, wheeldir): """""" Return True if the extracted wheel in wheeldir should go into purelib. """""" name_folded = name.replace('-', '_') for item in os.listdir(wheeldir): match = dist_info_re.match(item) if match and match.group('name') == name_folded: with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: for line in wheel: line = line.lower().rstrip() if: return True return False",False,line == 'root-is-purelib: true',line.startswith('#'),0.6431646347045898 2770,"def grapheme_phoneme(grapheme): """""" converts each word to phonems. If there are multiple pronunciation of a word, only the first pronunciation is taken. Stress information from each word is removed. """""" phoenems = pronouncing.phones_for_word(grapheme) if: phoenem_without_stress = ''.join([x for x in phoenems[0] if x.isalpha()]).lower() transformed_word = phoenem_without_stress else: transformed_word = grapheme return transformed_word",False,len(phoenems) > 0,phoenems,0.6490671634674072 2771,"def turn_on_tracking() -> None: """"""Adds attribute to track where tasks/mixtures were registered."""""" global _PROVIDER_PROVENANCE_LOOKUP if: _PROVIDER_PROVENANCE_LOOKUP = {} _PROVIDER_PROVENANCE_LOOKUP = {}",True,_PROVIDER_PROVENANCE_LOOKUP is None,_PROVIDER_PROVENANCE_LOOKUP is None,0.6525768041610718 2772,"def check_if_error_is_recoverable_and_log(error_context, status_code, error_desc, recoverable_message): if: error_desc = http_error_description(status_code) if status_code and (not is_http_error_recoverable(status_code)): log.error('Error %s (giving up permanently): %s' % (error_context, error_desc)) return False log.warning('Error %s (%s): %s' % (error_context, recoverable_message, error_desc)) return True",False,status_code and error_desc is None,error_desc is None,0.6493393778800964 2773,"def check_if_error_is_recoverable_and_log(error_context, status_code, error_desc, recoverable_message): if status_code and error_desc is None: error_desc = http_error_description(status_code) if: log.error('Error %s (giving up permanently): %s' % (error_context, error_desc)) return False log.warning('Error %s (%s): %s' % (error_context, recoverable_message, error_desc)) return True",False,status_code and (not is_http_error_recoverable(status_code)),recoverable_message is None,0.6433325409889221 2774,"def on_touch_up(self, *args): if: if self.refresh_callback: self.refresh_callback() if not self.refresh_spinner: self.refresh_spinner = RefreshSpinner(_refresh_layout=self) self.root_layout.add_widget(self.refresh_spinner) self.refresh_spinner.start_anim_spinner() self._work_spinnrer = True self._did_overscroll = False return True return super().on_touch_up(*args)",False,self._did_overscroll and (not self._work_spinnrer),self._work_spinnrer,0.6456690430641174 2775,"def on_touch_up(self, *args): if self._did_overscroll and (not self._work_spinnrer): if: self.refresh_callback() if not self.refresh_spinner: self.refresh_spinner = RefreshSpinner(_refresh_layout=self) self.root_layout.add_widget(self.refresh_spinner) self.refresh_spinner.start_anim_spinner() self._work_spinnrer = True self._did_overscroll = False return True return super().on_touch_up(*args)",True,self.refresh_callback,self.refresh_callback,0.6522043347358704 2776,"def on_touch_up(self, *args): if self._did_overscroll and (not self._work_spinnrer): if self.refresh_callback: self.refresh_callback() if: self.refresh_spinner = RefreshSpinner(_refresh_layout=self) self.root_layout.add_widget(self.refresh_spinner) self.refresh_spinner.start_anim_spinner() self._work_spinnrer = True self._did_overscroll = False return True return super().on_touch_up(*args)",False,not self.refresh_spinner,"not isinstance(self.refresh_spinner, RefreshSpinner)",0.6509924530982971 2777,"def configure_custom(self, config): """"""Configure an object with a user-supplied factory."""""" c = config.pop('()') if: c = self.resolve(c) props = config.pop('.', None) kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) result = c(**kwargs) if props: for name, value in props.items(): setattr(result, name, value) return result",True,not callable(c),not callable(c),0.6494231820106506 2778,"def configure_custom(self, config): """"""Configure an object with a user-supplied factory."""""" c = config.pop('()') if not callable(c): c = self.resolve(c) props = config.pop('.', None) kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) result = c(**kwargs) if: for name, value in props.items(): setattr(result, name, value) return result",True,props,props,0.6706264615058899 2779,"def torch2np(data): if: np_data = {} for k, v in data.items(): np_data[k] = v.detach().numpy() return np_data else: return {'output': data.detach().numpy()}",True,"isinstance(data, dict)","isinstance(data, dict)",0.6468664407730103 2780,"def __exit__(self, exc_type, exc_val, exc_tb): if: self.unlock()",False,"isinstance(self.db, PooledMySQLDatabase)",self.lock,0.6429920196533203 2781,"@property def imageFilePath(self): addr = self.m('imageFilePath').obj_offset addr = self._read_ptr(addr) if: return '' buf = self.obj_vm.read(addr, 256) if buf: idx = buf.find('\x00') if idx!= -1: buf = buf[:idx] return buf",False,addr == None,addr == -1,0.6634215116500854 2782,"@property def imageFilePath(self): addr = self.m('imageFilePath').obj_offset addr = self._read_ptr(addr) if addr == None: return '' buf = self.obj_vm.read(addr, 256) if: idx = buf.find('\x00') if idx!= -1: buf = buf[:idx] return buf",False,buf,self.m('imageFilePath'),0.6807945370674133 2783,"@property def imageFilePath(self): addr = self.m('imageFilePath').obj_offset addr = self._read_ptr(addr) if addr == None: return '' buf = self.obj_vm.read(addr, 256) if buf: idx = buf.find('\x00') if: buf = buf[:idx] return buf",True,idx != -1,idx != -1,0.6649454236030579 2784,"@classmethod def poll(cls, context): if: log.error('Must have an active object to export.') return False if context.object.mode!= 'POSE': log.error('Must be in POSE Mode to export skeleton bones') return False try: if len([x for x in context.object.pose.bones if x.bone.select]) == 0: log.error('Must select one or more bones in pose mode to export') return False except: log.error('Must have a selected armature with selected bones.') return False return True",False,not context.object,not context,0.6574079394340515 2785,"@classmethod def poll(cls, context): if not context.object: log.error('Must have an active object to export.') return False if: log.error('Must be in POSE Mode to export skeleton bones') return False try: if len([x for x in context.object.pose.bones if x.bone.select]) == 0: log.error('Must select one or more bones in pose mode to export') return False except: log.error('Must have a selected armature with selected bones.') return False return True",False,context.object.mode != 'POSE',context.object.pose.bones != 'POSE',0.6484944820404053 2786,"@classmethod def poll(cls, context): if not context.object: log.error('Must have an active object to export.') return False if context.object.mode!= 'POSE': log.error('Must be in POSE Mode to export skeleton bones') return False try: if: log.error('Must select one or more bones in pose mode to export') return False except: log.error('Must have a selected armature with selected bones.') return False return True",False,len([x for x in context.object.pose.bones if x.bone.select]) == 0,"context.object.mode not in ('ARMATURE', 'ARMATURE')",0.6464313268661499 2787,"def __eq__(self, other): if: return False return self.ordered_children == other.ordered_children",False,"not isinstance(other, Ambig)","not isinstance(other, self.__class__)",0.6464385986328125 2788,"def accept(self, visitor: ParseTreeVisitor): if: return visitor.visitInterfaceMemberDeclaration3(self) else: return visitor.visitChildren(self)",True,"hasattr(visitor, 'visitInterfaceMemberDeclaration3')","hasattr(visitor, 'visitInterfaceMemberDeclaration3')",0.6444666385650635 2789,"def reset_parameters(self): n = self.in_channels for k in self.kernel_size: n *= k stdv = 1.0 / math.sqrt(n) self.weight.data.uniform_(-stdv, stdv) if: self.bias.data.uniform_(-stdv, stdv)",False,self.bias is not None,self.bias is not N_,0.6499782800674438 2790,"@classmethod def from_exif(cls, file_value): datetime_string, sub_sec_string = file_value if: return cls([]) while datetime_string[-2:] ==' ': datetime_string = datetime_string[:-3] return cls.from_ISO_8601(datetime_string, sub_sec_string=sub_sec_string)",False,not datetime_string,datetime_string == '',0.6556483507156372 2791,"def query_pipeline_index(pipeline_index: Optional[int]=Query(None, title='Pipeline index', ge=0), config: AzimuthConfig=Depends(get_config)) -> Optional[int]: """"""Get and validate the pipeline index from query parameters. Args: pipeline_index: Which pipeline to select in the config. config: App config Returns: Validated pipeline_index. Raises: HTTPException(400) on validation error. """""" if: return pipeline_index elif config.pipelines is None: raise HTTPException(HTTP_400_BAD_REQUEST, detail=f'Current config has no pipeline specified, but pipeline index {pipeline_index} was requested.') elif len(config.pipelines) < pipeline_index: raise HTTPException(HTTP_400_BAD_REQUEST, detail=f'Current config has {len(config.pipelines)} models specified, but pipeline index {pipeline_index} was requested.') return pipeline_index",True,pipeline_index is None,pipeline_index is None,0.6510292291641235 2792,"def query_pipeline_index(pipeline_index: Optional[int]=Query(None, title='Pipeline index', ge=0), config: AzimuthConfig=Depends(get_config)) -> Optional[int]: """"""Get and validate the pipeline index from query parameters. Args: pipeline_index: Which pipeline to select in the config. config: App config Returns: Validated pipeline_index. Raises: HTTPException(400) on validation error. """""" if pipeline_index is None: return pipeline_index elif: raise HTTPException(HTTP_400_BAD_REQUEST, detail=f'Current config has no pipeline specified, but pipeline index {pipeline_index} was requested.') elif len(config.pipelines) < pipeline_index: raise HTTPException(HTTP_400_BAD_REQUEST, detail=f'Current config has {len(config.pipelines)} models specified, but pipeline index {pipeline_index} was requested.') return pipeline_index",False,config.pipelines is None,len(config.pipelines) == 0,0.6467784643173218 2793,"def query_pipeline_index(pipeline_index: Optional[int]=Query(None, title='Pipeline index', ge=0), config: AzimuthConfig=Depends(get_config)) -> Optional[int]: """"""Get and validate the pipeline index from query parameters. Args: pipeline_index: Which pipeline to select in the config. config: App config Returns: Validated pipeline_index. Raises: HTTPException(400) on validation error. """""" if pipeline_index is None: return pipeline_index elif config.pipelines is None: raise HTTPException(HTTP_400_BAD_REQUEST, detail=f'Current config has no pipeline specified, but pipeline index {pipeline_index} was requested.') elif: raise HTTPException(HTTP_400_BAD_REQUEST, detail=f'Current config has {len(config.pipelines)} models specified, but pipeline index {pipeline_index} was requested.') return pipeline_index",False,len(config.pipelines) < pipeline_index,len(config.pipelines) != len(config.pipelines),0.6432350873947144 2794,"def __write_start_wrap(self, name): if: self.__write_obj.write('mi%s\n' % name) self.__write_obj.write('mi: if saver: saver = saver[0] else: saver = None if saver is None and variables.global_variables(): saver = tf_saver.Saver() ops.add_to_collection(ops.GraphKeys.SAVERS, saver) return saver",False,saver is not None,len(saver) > 0,0.6534674167633057 2796,"@deprecated('2017-03-25', 'Please use Estimator.export_savedmodel() instead.') def _get_saver(): """"""Lazy init and return saver."""""" saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS) if saver is not None: if saver: saver = saver[0] else: saver = None if: saver = tf_saver.Saver() ops.add_to_collection(ops.GraphKeys.SAVERS, saver) return saver",False,saver is None and variables.global_variables(),saver,0.6426378488540649 2797,"@deprecated('2017-03-25', 'Please use Estimator.export_savedmodel() instead.') def _get_saver(): """"""Lazy init and return saver."""""" saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS) if saver is not None: if: saver = saver[0] else: saver = None if saver is None and variables.global_variables(): saver = tf_saver.Saver() ops.add_to_collection(ops.GraphKeys.SAVERS, saver) return saver",False,saver,len(saver) == 1,0.6637371182441711 2798,"def getNextVarBinds(self, varBinds, errorIndex=None): errorIndication = None rspVarBinds = [] if: return (errorIndication, rspVarBinds) for idx, varBind in enumerate(varBinds): if varBind[1].tagSet in (rfc1905.NoSuchObject.tagSet, rfc1905.NoSuchInstance.tagSet, rfc1905.EndOfMibView.tagSet): continue rspVarBinds.append((varBind[0], null)) return (errorIndication, rspVarBinds)",False,errorIndex,errorIndex is None,0.663931131362915 2799,"def getNextVarBinds(self, varBinds, errorIndex=None): errorIndication = None rspVarBinds = [] if errorIndex: return (errorIndication, rspVarBinds) for idx, varBind in enumerate(varBinds): if: continue rspVarBinds.append((varBind[0], null)) return (errorIndication, rspVarBinds)",False,"varBind[1].tagSet in (rfc1905.NoSuchObject.tagSet, rfc1905.NoSuchInstance.tagSet, rfc1905.EndOfMibView.tagSet)",idx == errorIndex,0.6500200629234314 2800,"def read_data_model(self, file_name): """""" Return a data model read from a named file """""" if: print('Opening a JSON file') f = open(file_name, 'r') json_model = f.read() data_model = json.loads(json_model) else: print('Opening a Pickle file') f = open(file_name, 'r') pickled_model = f.read() data_model = self.unpickle_data_model(pickled_model) return data_model",False,file_name[-5:].lower() == '.json',PY3,0.6451271772384644 2801,"def __init__(self, tensors: List[torch.Tensor], transforms: List[Optional[Callable[[torch.Tensor], torch.Tensor]]]): super().__init__(*tensors) if: raise ValueError('Must provide a transform (which may be None) for every data tensor.') self.transforms = transforms",False,len(tensors) != len(transforms),transforms is None,0.6479178667068481 2802,"def find_f_curve_for_data_path_and_index(object_or_action, data_path: str, index: int) -> Optional[FCurve]: """"""Finds the first F-curve in the F-curves of the action whose data path and index match the given arguments. Parameters: object_or_action: the object or action data_path: the data path of the F-curve we are looking for index: the index of the F-curve we are looking for Returns: the first such F-curve or `None` if no F-curve controls the given data path and index """""" if: action = get_action_for_object(object_or_action) if not action: return None else: action = object_or_action for curve in action.fcurves: if curve.data_path == data_path and curve.array_index == index: return curve return None",False,"not isinstance(object_or_action, Action)","isinstance(object_or_action, FcurveAction)",0.6507428884506226 2803,"def find_f_curve_for_data_path_and_index(object_or_action, data_path: str, index: int) -> Optional[FCurve]: """"""Finds the first F-curve in the F-curves of the action whose data path and index match the given arguments. Parameters: object_or_action: the object or action data_path: the data path of the F-curve we are looking for index: the index of the F-curve we are looking for Returns: the first such F-curve or `None` if no F-curve controls the given data path and index """""" if not isinstance(object_or_action, Action): action = get_action_for_object(object_or_action) if: return None else: action = object_or_action for curve in action.fcurves: if curve.data_path == data_path and curve.array_index == index: return curve return None",False,not action,action is None,0.6695296764373779 2804,"def find_f_curve_for_data_path_and_index(object_or_action, data_path: str, index: int) -> Optional[FCurve]: """"""Finds the first F-curve in the F-curves of the action whose data path and index match the given arguments. Parameters: object_or_action: the object or action data_path: the data path of the F-curve we are looking for index: the index of the F-curve we are looking for Returns: the first such F-curve or `None` if no F-curve controls the given data path and index """""" if not isinstance(object_or_action, Action): action = get_action_for_object(object_or_action) if not action: return None else: action = object_or_action for curve in action.fcurves: if: return curve return None",False,curve.data_path == data_path and curve.array_index == index,curve.data_path == data_path and (not curve.data_path == data_path) and (index == curve.index),0.6469627618789673 2805,"def transform(self, transformation, size): width = size[0] * 16 height = size[1] * 16 if: self.Xpos = width - self.Xpos - self.width",False,transformation == Transform.Mirror,transformation.get_x() != None and width != self.Xpos or height != self.width,0.6520874500274658 2806,"def forward_embedding(self, txt_tokens): x = self.embed_scale * self.embed_tokens(txt_tokens) if: positions = self.embed_positions(txt_tokens) x = x + positions x = F.dropout(x, p=self.dropout, training=self.training) return x",False,hparams['use_pos_embed'],self.embed_positions is not None,0.6463130712509155 2807,"def recv_getSimpleChannelContacts(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getSimpleChannelContacts_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.e is not None: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'getSimpleChannelContacts failed: unknown result')",True,mtype == TMessageType.EXCEPTION,mtype == TMessageType.EXCEPTION,0.6533805727958679 2808,"def recv_getSimpleChannelContacts(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getSimpleChannelContacts_result() result.read(iprot) iprot.readMessageEnd() if: return result.success if result.e is not None: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'getSimpleChannelContacts failed: unknown result')",True,result.success is not None,result.success is not None,0.6496027708053589 2809,"def recv_getSimpleChannelContacts(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getSimpleChannelContacts_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'getSimpleChannelContacts failed: unknown result')",True,result.e is not None,result.e is not None,0.650999903678894 2810,"def safe_arg_scope(funcs, **kwargs): """"""Returns `slim.arg_scope` with all None arguments removed. Arguments: funcs: Functions to pass to `arg_scope`. **kwargs: Arguments to pass to `arg_scope`. Returns: arg_scope or No-op context manager. Note: can be useful if None value should be interpreted as ""do not overwrite this parameter value"". """""" filtered_args = {name: value for name, value in kwargs.items() if value is not None} if: return slim.arg_scope(funcs, **filtered_args) else: return NoOpScope()",True,filtered_args,filtered_args,0.6565251350402832 2811,"def __init__(self, points_to_min_max: Optional[Dict[IntensityPoint, Tuple[float, float]]]=None): if: points_to_min_max = dict() self.points = points_to_min_max",True,points_to_min_max is None,points_to_min_max is None,0.6563054323196411 2812,"@property def saturday(self): if: raise IntervalError('Scheduling.saturday() jobs is only allowed for weekly jobs. Using.saturday() on a job scheduled to run every 2 or more weeks is not supported.') self.start_day ='saturday' return self.weeks",True,self.interval != 1,self.interval != 1,0.6600439548492432 2813,"def latest_checkpoint(model_dir, model_name): """"""return path of latest checkpoint in a model_dir Args: model_dir: string, indicate your model dir(save ckpts, summarys, logs, etc). model_name: name of your model. we find ckpts by name Returns: path: None if isn't exist or latest checkpoint path. """""" ckpt_info_path = Path(model_dir) / 'checkpoints.json' if: return None with open(ckpt_info_path, 'r') as f: ckpt_dict = json.loads(f.read()) if model_name not in ckpt_dict['latest_ckpt']: return None latest_ckpt = ckpt_dict['latest_ckpt'][model_name] ckpt_file_name = Path(model_dir) / latest_ckpt if not ckpt_file_name.is_file(): return None return str(ckpt_file_name)",True,not ckpt_info_path.is_file(),not ckpt_info_path.is_file(),0.6462277770042419 2814,"def latest_checkpoint(model_dir, model_name): """"""return path of latest checkpoint in a model_dir Args: model_dir: string, indicate your model dir(save ckpts, summarys, logs, etc). model_name: name of your model. we find ckpts by name Returns: path: None if isn't exist or latest checkpoint path. """""" ckpt_info_path = Path(model_dir) / 'checkpoints.json' if not ckpt_info_path.is_file(): return None with open(ckpt_info_path, 'r') as f: ckpt_dict = json.loads(f.read()) if: return None latest_ckpt = ckpt_dict['latest_ckpt'][model_name] ckpt_file_name = Path(model_dir) / latest_ckpt if not ckpt_file_name.is_file(): return None return str(ckpt_file_name)",False,model_name not in ckpt_dict['latest_ckpt'],model_name not in ckpt_dict,0.646318256855011 2815,"def latest_checkpoint(model_dir, model_name): """"""return path of latest checkpoint in a model_dir Args: model_dir: string, indicate your model dir(save ckpts, summarys, logs, etc). model_name: name of your model. we find ckpts by name Returns: path: None if isn't exist or latest checkpoint path. """""" ckpt_info_path = Path(model_dir) / 'checkpoints.json' if not ckpt_info_path.is_file(): return None with open(ckpt_info_path, 'r') as f: ckpt_dict = json.loads(f.read()) if model_name not in ckpt_dict['latest_ckpt']: return None latest_ckpt = ckpt_dict['latest_ckpt'][model_name] ckpt_file_name = Path(model_dir) / latest_ckpt if: return None return str(ckpt_file_name)",True,not ckpt_file_name.is_file(),not ckpt_file_name.is_file(),0.6469262838363647 2816,"def update_attr(module, name, val): """"""Update module attribute."""""" if: return if name not in module._attrs_ori: return module._attrs_ori[name] = val",False,"not hasattr(module, '_attrs_ori')","isinstance(module, Base)",0.6470276117324829 2817,"def update_attr(module, name, val): """"""Update module attribute."""""" if not hasattr(module, '_attrs_ori'): return if: return module._attrs_ori[name] = val",False,name not in module._attrs_ori,"name in ['norm', 'norm_type']",0.6503814458847046 2818,"def produce_fake_losses(self, densepose_predictor_outputs: Any, embedder: nn.Module) -> LossDict: meshname_to_embed_losses = self.embed_loss.fake_values(densepose_predictor_outputs, embedder=embedder) embed_loss_dict = {f'loss_densepose_E{mesh_name}': meshname_to_embed_losses[mesh_name] for mesh_name in meshname_to_embed_losses} all_loss_dict = {'loss_densepose_S': self.segm_loss.fake_value(densepose_predictor_outputs), **embed_loss_dict} if: all_loss_dict['loss_shape2shape'] = self.shape2shape_loss.fake_value(embedder) if self.do_pix2shape: all_loss_dict['loss_pix2shape'] = self.pix2shape_loss.fake_value(densepose_predictor_outputs, embedder) return all_loss_dict",True,self.do_shape2shape,self.do_shape2shape,0.649524986743927 2819,"def produce_fake_losses(self, densepose_predictor_outputs: Any, embedder: nn.Module) -> LossDict: meshname_to_embed_losses = self.embed_loss.fake_values(densepose_predictor_outputs, embedder=embedder) embed_loss_dict = {f'loss_densepose_E{mesh_name}': meshname_to_embed_losses[mesh_name] for mesh_name in meshname_to_embed_losses} all_loss_dict = {'loss_densepose_S': self.segm_loss.fake_value(densepose_predictor_outputs), **embed_loss_dict} if self.do_shape2shape: all_loss_dict['loss_shape2shape'] = self.shape2shape_loss.fake_value(embedder) if: all_loss_dict['loss_pix2shape'] = self.pix2shape_loss.fake_value(densepose_predictor_outputs, embedder) return all_loss_dict",True,self.do_pix2shape,self.do_pix2shape,0.6504276990890503 2820,"def throw_by_ir_beacon(self): if: self.catapult_motor.run_to_rel_pos(speed_sp=1000, position_sp=-150, stop_action=Motor.STOP_ACTION_HOLD) self.catapult_motor.wait_while(Motor.STATE_RUNNING) self.catapult_motor.run_to_rel_pos(speed_sp=1000, position_sp=150, stop_action=Motor.STOP_ACTION_HOLD) self.catapult_motor.wait_while(Motor.STATE_RUNNING) while self.beacon.beacon: pass",True,self.beacon.beacon,self.beacon.beacon,0.6469663381576538 2821,"def __init__(self, default_cluster: str, default_org: Optional[str], default_project: str, registry_urls: Dict[str, URL]): self._default_cluster = default_cluster self._default_org_name = default_org self._default_project_name = default_project self._registries = {} for cluster_name, registry_url in registry_urls.items(): if: raise ValueError(f""Empty hostname in registry URL '{registry_url}': please consider updating configuration"") self._registries[cluster_name] = _get_url_authority(registry_url)",False,not registry_url.host,not registry_url,0.6479030251502991 2822,"def _strip_flavors_from_stem(stem: str, flavors: Iterable[str]) -> str: flavors_set = set((flavor.lstrip('-') for flavor in flavors if flavor.startswith('-'))) stem_parts = stem.split('-') while len(stem_parts) > 1: if: stem_parts.pop() else: break return '-'.join(stem_parts)",False,stem_parts[-1] in flavors_set,"flavors_set.intersection(stem_parts[0]) in ['-', ']",0.6450999975204468 2823,"def to_tensor(self, dtype, device): """"""See :func:`BaseInstanceMasks.to_tensor`."""""" if: return torch.empty((0, self.height, self.width), dtype=dtype, device=device) ndarray_masks = self.to_ndarray() return torch.tensor(ndarray_masks, dtype=dtype, device=device)",True,len(self.masks) == 0,len(self.masks) == 0,0.6471322178840637 2824,"def final_eval(self): """"""Evaluate trained model and save validation results."""""" for f in (self.last, self.best): if: strip_optimizer(f) LOGGER.info(f""Results saved to {colorstr('bold', self.save_dir)}"")",True,f.exists(),f.exists(),0.6563878059387207 2825,"@property def special_tokens_map(self) -> Dict[str, Union[str, List[str]]]: """""" :obj:`Dict[str, Union[str, List[str]]]`: A dictionary mapping special token class attributes (:obj:`cls_token`, :obj:`unk_token`, etc.) to their values (:obj:`''`, :obj:`''`, etc.). Convert potential tokens of :obj:`tokenizers.AddedToken` type to string. """""" set_attr = {} for attr in self.SPECIAL_TOKENS_ATTRIBUTES: attr_value = getattr(self, '_' + attr) if: set_attr[attr] = str(attr_value) return set_attr",True,attr_value,attr_value,0.6590654850006104 2826,"def test_Float_divmod(): zero = 0.0 try: divmod(1, zero) except ZeroDivisionError as e: message = str(e) friendly.explain_traceback(redirect='capture') result = friendly.get_output() assert 'ZeroDivisionError: float divmod()' in result if: assert 'The second argument of the `divmod()`' in result return (result, message)",True,friendly.get_lang() == 'en',friendly.get_lang() == 'en',0.647597074508667 2827,"def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): r = self._createComponent(asn1Spec, tagSet, '') if: return substrateFun(r, substrate, length) while substrate: component, substrate = decodeFun(substrate, self.protoComponent, allowEoo=True) if eoo.endOfOctets.isSameTypeWith(component) and component == eoo.endOfOctets: break r = r + component else: raise error.SubstrateUnderrunError('No EOO seen before substrate ends') return (r, substrate)",True,substrateFun,substrateFun,0.6480517983436584 2828,"def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): r = self._createComponent(asn1Spec, tagSet, '') if substrateFun: return substrateFun(r, substrate, length) while substrate: component, substrate = decodeFun(substrate, self.protoComponent, allowEoo=True) if: break r = r + component else: raise error.SubstrateUnderrunError('No EOO seen before substrate ends') return (r, substrate)",True,eoo.endOfOctets.isSameTypeWith(component) and component == eoo.endOfOctets,eoo.endOfOctets.isSameTypeWith(component) and component == eoo.endOfOctets,0.6416057348251343 2829,"def initPoss(self, poss=None, nDim=None, lvec=None, pixPerAngstrome=10): if: self.initSampling(lvec, pixPerAngstrome=10, nDim=None) self.prepareBuffers(poss=poss)",False,poss is None,lvec is not None,0.65427565574646 2830,"def IDAPython_UnLoadProcMod(script, g): """""" Unload processor module. """""" pname = g['__name__'] if g and g.has_key('__name__') else '__main__' parent = sys.modules[pname] scriptname = os.path.split(script)[1] procmod_name = os.path.splitext(scriptname)[0] if: delattr(parent, procmod_name) del sys.modules[procmod_name] PY_COMPILE_ERR = None return PY_COMPILE_ERR",False,"getattr(parent, procmod_name, None)",parent and procmod_name,0.6466964483261108 2831,"def getMailboxDatebase(request): try: result = getmailboxdatabasenovalue() new_result = {'isSuccess': result['isSuccess'],'message': []} if: for i in result['message']: new_result['message'].append(ast.literal_eval(i)) response = HttpResponse() response['Content-Type'] = 'text/javascript' response.write(json.dumps(new_result)) return response except Exception as e: print(e) return render_to_response('ad/directorytree.html', locals())",False,result['isSuccess'],'message' in result,0.6556758284568787 2832,"def load_SCFP_potential(self, locator, building_codes): self.type = 'SolarCollectorFP' self.scale = 'Building' scfp_potential_files = np.vectorize(locator.SC_results)(building_codes, 'FP') potentials = self._get_building_potentials(scfp_potential_files, building_codes, 'Q_SC_gen_kWh', 'T_SC_re_C') if: main_energy_carrier = EnergyCarrier.temp_to_thermal_ec('water', potentials['average_temp']) self.main_potential.generate('source','secondary', main_energy_carrier, potentials['main_profile']) self.main_building_profiles = potentials['main_building_profiles'] return self else: return None",False,potentials,potentials['main_profile'] is not None,0.6685618162155151 2833,"def display(self, task): """""" Either display a single frame (BGR image) to a window or write to an output file if output path is provided. Args: task (TaskInfo object): task object that contain the necessary information for prediction visualization. (e.g. visualized frames.) """""" for frame in task.frames[task.num_buffer_frames:]: if: cv2.imshow('SlowFast', frame) time.sleep(1 / self.output_fps) else: self.output_file.write(frame)",True,self.output_file is None,self.output_file is None,0.6448152661323547 2834,"def __init__(self, mu=0.02, bins=10, momentum=0, loss_weight=1.0): super(GHMR, self).__init__() self.mu = mu self.bins = bins edges = torch.arange(bins + 1).float() / bins self.register_buffer('edges', edges) self.edges[-1] = 1000.0 self.momentum = momentum if: acc_sum = torch.zeros(bins) self.register_buffer('acc_sum', acc_sum) self.loss_weight = loss_weight",True,momentum > 0,momentum > 0,0.6645728945732117 2835,"def hRpcOpenPrinterEx(dce, printerName, pDatatype=NULL, pDevModeContainer=NULL, accessRequired=SERVER_READ, pClientInfo=NULL): """""" RpcOpenPrinterEx retrieves a handle for a printer, port, port monitor, print job, or print server Full Documentation: https://msdn.microsoft.com/en-us/library/cc244809.aspx :param DCERPC_v5 dce: a connected DCE instance. :param string printerName: A string for a printer connection, printer object, server object, job object, port object, or port monitor object. This MUST be a Domain Name System (DNS), NetBIOS, Internet Protocol version 4 (IPv4), Internet Protocol version 6 (IPv6), or Universal Naming Convention (UNC) name that remote procedure call (RPC) binds to, and it MUST uniquely identify a print server on the network. :param string pDatatype: A string that specifies the data type to be associated with the printer handle. :param DEVMODE_CONTAINER pDevModeContainer: A DEVMODE_CONTAINER structure. This parameter MUST adhere to the specification in DEVMODE_CONTAINER Parameters (section 3.1.4.1.8.1). :param int accessRequired: The access level that the client requires for interacting with the object to which a handle is being opened. :param SPLCLIENT_CONTAINER pClientInfo: This parameter MUST adhere to the specification in SPLCLIENT_CONTAINER Parameters. :return: a RpcOpenPrinterExResponse instance, raises DCERPCSessionError on error. """""" request = RpcOpenPrinterEx() request['pPrinterName'] = checkNullString(printerName) request['pDatatype'] = pDatatype if: request['pDevModeContainer']['pDevMode'] = NULL else: request['pDevModeContainer'] = pDevModeContainer request['AccessRequired'] = accessRequired if pClientInfo is NULL: raise Exception('pClientInfo cannot be NULL') request['pClientInfo'] = pClientInfo return dce.request(request)",True,pDevModeContainer is NULL,pDevModeContainer is NULL,0.6528207063674927 2836,"def hRpcOpenPrinterEx(dce, printerName, pDatatype=NULL, pDevModeContainer=NULL, accessRequired=SERVER_READ, pClientInfo=NULL): """""" RpcOpenPrinterEx retrieves a handle for a printer, port, port monitor, print job, or print server Full Documentation: https://msdn.microsoft.com/en-us/library/cc244809.aspx :param DCERPC_v5 dce: a connected DCE instance. :param string printerName: A string for a printer connection, printer object, server object, job object, port object, or port monitor object. This MUST be a Domain Name System (DNS), NetBIOS, Internet Protocol version 4 (IPv4), Internet Protocol version 6 (IPv6), or Universal Naming Convention (UNC) name that remote procedure call (RPC) binds to, and it MUST uniquely identify a print server on the network. :param string pDatatype: A string that specifies the data type to be associated with the printer handle. :param DEVMODE_CONTAINER pDevModeContainer: A DEVMODE_CONTAINER structure. This parameter MUST adhere to the specification in DEVMODE_CONTAINER Parameters (section 3.1.4.1.8.1). :param int accessRequired: The access level that the client requires for interacting with the object to which a handle is being opened. :param SPLCLIENT_CONTAINER pClientInfo: This parameter MUST adhere to the specification in SPLCLIENT_CONTAINER Parameters. :return: a RpcOpenPrinterExResponse instance, raises DCERPCSessionError on error. """""" request = RpcOpenPrinterEx() request['pPrinterName'] = checkNullString(printerName) request['pDatatype'] = pDatatype if pDevModeContainer is NULL: request['pDevModeContainer']['pDevMode'] = NULL else: request['pDevModeContainer'] = pDevModeContainer request['AccessRequired'] = accessRequired if: raise Exception('pClientInfo cannot be NULL') request['pClientInfo'] = pClientInfo return dce.request(request)",True,pClientInfo is NULL,pClientInfo is NULL,0.6525269746780396 2837,"def populate_column(self, colname, is_checked): item = QListWidgetItem(self.gui.library_view.model().headers[colname], self) item.setData(Qt.UserRole, colname) flags = Qt.ItemIsEnabled | Qt.ItemIsSelectable if: flags |= Qt.ItemIsUserCheckable item.setFlags(flags) if colname!= 'ondevice': item.setCheckState(Qt.Checked if is_checked else Qt.Unchecked)",False,colname != 'ondevice',is_checked,0.6556457877159119 2838,"def populate_column(self, colname, is_checked): item = QListWidgetItem(self.gui.library_view.model().headers[colname], self) item.setData(Qt.UserRole, colname) flags = Qt.ItemIsEnabled | Qt.ItemIsSelectable if colname!= 'ondevice': flags |= Qt.ItemIsUserCheckable item.setFlags(flags) if: item.setCheckState(Qt.Checked if is_checked else Qt.Unchecked)",False,colname != 'ondevice',is_checked,0.6555706858634949 2839,"def parse_docstring(doc: str, markup: str, processtypes: bool=False) -> ParsedDocstring: parse = get_parser_by_name(markup) if: if markup in ('google', 'numpy'): raise AssertionError(""don't process types twice."") parse = pydoctor.epydoc.markup.processtypes(parse) errors: List[ParseError] = [] parsed = parse(doc, errors) assert not errors, [f'{e.linenum()}:{e.descr()}' for e in errors] return parsed",False,processtypes,parse is not None and processtypes,0.6574243903160095 2840,"def parse_docstring(doc: str, markup: str, processtypes: bool=False) -> ParsedDocstring: parse = get_parser_by_name(markup) if processtypes: if: raise AssertionError(""don't process types twice."") parse = pydoctor.epydoc.markup.processtypes(parse) errors: List[ParseError] = [] parsed = parse(doc, errors) assert not errors, [f'{e.linenum()}:{e.descr()}' for e in errors] return parsed",False,"markup in ('google', 'numpy')",parse is not None,0.6483191251754761 2841,"def gqa_init(self): imgfeat_linear_size = utils.MCAN_GQA_PARAMS['FRCN_FEAT_SIZE'][1] if: self.bbox_linear = nn.Linear(5, utils.MCAN_GQA_PARAMS['BBOXFEAT_EMB_SIZE']) imgfeat_linear_size += utils.MCAN_GQA_PARAMS['BBOXFEAT_EMB_SIZE'] self.frcn_linear = nn.Linear(imgfeat_linear_size, utils.MCAN_GQA_PARAMS['HIDDEN_SIZE']) if utils.MCAN_GQA_PARAMS['USE_AUX_FEAT']: self.grid_linear = nn.Linear(utils.MCAN_GQA_PARAMS['GRID_FEAT_SIZE'][1], utils.MCAN_GQA_PARAMS['HIDDEN_SIZE'])",True,utils.MCAN_GQA_PARAMS['USE_BBOX_FEAT'],utils.MCAN_GQA_PARAMS['USE_BBOX_FEAT'],0.649954080581665 2842,"def gqa_init(self): imgfeat_linear_size = utils.MCAN_GQA_PARAMS['FRCN_FEAT_SIZE'][1] if utils.MCAN_GQA_PARAMS['USE_BBOX_FEAT']: self.bbox_linear = nn.Linear(5, utils.MCAN_GQA_PARAMS['BBOXFEAT_EMB_SIZE']) imgfeat_linear_size += utils.MCAN_GQA_PARAMS['BBOXFEAT_EMB_SIZE'] self.frcn_linear = nn.Linear(imgfeat_linear_size, utils.MCAN_GQA_PARAMS['HIDDEN_SIZE']) if: self.grid_linear = nn.Linear(utils.MCAN_GQA_PARAMS['GRID_FEAT_SIZE'][1], utils.MCAN_GQA_PARAMS['HIDDEN_SIZE'])",False,utils.MCAN_GQA_PARAMS['USE_AUX_FEAT'],utils.MCAN_GQA_PARAMS['USE_GRID_FEAT'],0.6483941078186035 2843,"def format_path_status(self, path): if: return 'File does not exist' if not os.path.isfile(path): return 'Not a file' if not os.access(path, os.R_OK): return 'File is not readable' return 'File exists'",False,not os.path.exists(path),not path,0.6469056606292725 2844,"def format_path_status(self, path): if not os.path.exists(path): return 'File does not exist' if: return 'Not a file' if not os.access(path, os.R_OK): return 'File is not readable' return 'File exists'",True,not os.path.isfile(path),not os.path.isfile(path),0.6442776322364807 2845,"def format_path_status(self, path): if not os.path.exists(path): return 'File does not exist' if not os.path.isfile(path): return 'Not a file' if: return 'File is not readable' return 'File exists'",False,"not os.access(path, os.R_OK)",os.stat(path).st_mode & 63 != 0,0.6452008485794067 2846,"def get(self): if: return (self.feats, self.targets, self.ptr) else: return (self.feats[:self.ptr], self.targets[:self.ptr], self.ptr)",False,self.full_flag,self.ptr == 0,0.6453484892845154 2847,"def check_value(self): if: return True else: return self.peek(1) in '\x00 \t\r\n\x85\u2028\u2029'",False,self.flow_level,self.peek() == 'u2028\u2029',0.6500926613807678 2848,"def _ext_crldistripoints(self, extension, extensions): crl_points = [] for i, point in enumerate(extension.value): if: continue for full_name in point.full_name: crl_points.append(full_name.value) extensions[extension.oid._name] = crl_points",False,not point.full_name,point.oid._name in extensions,0.649628758430481 2849,"def init_weights(self, pretrained=None): """"""Weight initialization for model."""""" self.backbone.init_weights(pretrained) self.mesh_head.init_weights() if: self.discriminator.init_weights()",False,self.with_gan,self.discriminator,0.6473178863525391 2850,"def image_transform_perspective(image_cv2, M, depth=None): if: return cv2.warpPerspective(image_cv2, M, (image_cv2.shape[1], image_cv2.shape[0]), borderMode=cv2.BORDER_REFLECT_101) else: return render_3d_perspective(image_cv2, depth, M)",True,depth is None,depth is None,0.6565160751342773 2851,"def clear(self, update=True): """"""Clears the display memory"""""" self.__buffer = [0, 0, 0, 0, 0, 0, 0, 0] if: self.writeDisplay()",True,update,update,0.6734596490859985 2852,"def deserialize_path(path): if: return None p = tuple(chain(*[(mapperutil.class_mapper(cls), key) for cls, key in path])) if p and p[-1] is None: p = p[0:-1] return p",True,path is None,path is None,0.657701313495636 2853,"def deserialize_path(path): if path is None: return None p = tuple(chain(*[(mapperutil.class_mapper(cls), key) for cls, key in path])) if: p = p[0:-1] return p",False,p and p[-1] is None,len(p) > 1,0.6535248160362244 2854,"def getboolean(self, section, prop, default=None): """""" Gets the value of a field as a boolean. If default is None, then this will throw a ConfigMissingSectionError if the section does not exist and a ConfigMissingPropertyError if the property does not exist in the section. Args: section - The section of the settings file to look in. prop - The property of the section to look for. Return: The value of the given property in the given section as a boolean, or default if the section or property is undefined and default is not None. """""" try: return self.configparser.getboolean(section, prop) except NoSectionError: if: raise ConfigMissingSectionError(self.file_path, section, prop) from None else: return default except NoOptionError: if default is None: raise ConfigMissingPropertyError(self.file_path, section, prop) from None else: return default",True,default is None,default is None,0.6551001071929932 2855,"def getboolean(self, section, prop, default=None): """""" Gets the value of a field as a boolean. If default is None, then this will throw a ConfigMissingSectionError if the section does not exist and a ConfigMissingPropertyError if the property does not exist in the section. Args: section - The section of the settings file to look in. prop - The property of the section to look for. Return: The value of the given property in the given section as a boolean, or default if the section or property is undefined and default is not None. """""" try: return self.configparser.getboolean(section, prop) except NoSectionError: if default is None: raise ConfigMissingSectionError(self.file_path, section, prop) from None else: return default except NoOptionError: if: raise ConfigMissingPropertyError(self.file_path, section, prop) from None else: return default",True,default is None,default is None,0.6540915966033936 2856,"def read(self, sz): ret = self.__rbuf.read(sz) if: return ret self.__rbuf = BufferIO(self.__trans.read(max(sz, self.__rbuf_size))) return self.__rbuf.read(sz)",False,len(ret) != 0,ret != 0,0.650590717792511 2857,"def connection_from_host(self, host, port=None, scheme='http'): """""" Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. """""" if: raise LocationValueError('No host specified.') request_context = self.connection_pool_kw.copy() request_context['scheme'] = scheme or 'http' if not port: port = port_by_scheme.get(request_context['scheme'].lower(), 80) request_context['port'] = port request_context['host'] = host return self.connection_from_context(request_context)",True,not host,not host,0.6686007380485535 2858,"def connection_from_host(self, host, port=None, scheme='http'): """""" Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. """""" if not host: raise LocationValueError('No host specified.') request_context = self.connection_pool_kw.copy() request_context['scheme'] = scheme or 'http' if: port = port_by_scheme.get(request_context['scheme'].lower(), 80) request_context['port'] = port request_context['host'] = host return self.connection_from_context(request_context)",False,not port,port is None,0.6639223098754883 2859,"def _payment_values(self, amount): values = {'payment_reference': self.number, 'communication': self.number +'-'+ self.name, 'journal_id': self.move_id.journal_id.payroll_payment_journal_id.id, 'payment_method_id': self.move_id.journal_id.payroll_payment_method_id.id, 'partner_type':'supplier', 'partner_id': self.employee_id.address_home_id.id, 'payment_type': 'outbound', 'amount': -amount} if: values.update({'payment_type': 'inbound', 'amount': amount, 'payment_method_id': self.move_id.journal_id.payroll_payment_method_refund_id.id}) return values",False,amount > 0.0,self.move_id.journal_id.payroll_payment_method_refund_id.id,0.6560810804367065 2860,"def run(x, bb): if: x = bb(x, emb) elif isinstance(bb, SpatialTransformer): x = bb(x, context) else: x = bb(x) return x",False,"isinstance(bb, ResBlock)","isinstance(bb, TimestepBlock)",0.6451005935668945 2861,"def run(x, bb): if isinstance(bb, ResBlock): x = bb(x, emb) elif: x = bb(x, context) else: x = bb(x) return x",False,"isinstance(bb, SpatialTransformer)","isinstance(bb, Context)",0.6470016837120056 2862,"def suffix_reserved_words(func): """""" Given a function that is called with a reseved word, rewrite the keyword with an underscore suffix. """""" @partial_safe_wraps(func) def inner(*args, **kwargs): for word in RESERVED_WORDS: if: key = '{0}_'.format(word) kwargs[key] = kwargs.pop(word) return func(*args, **kwargs) return inner",False,word in kwargs,word.isupper(),0.6578152179718018 2863,"def annotations(self, tag, issue, issue_obj): url = issue['html_url'] annotations = [] if: comments = self._comments(tag, issue['number']) log.debug(' got comments for %s', issue['html_url']) annotations = ((c['user']['login'], c['body']) for c in comments) return self.build_annotations(annotations, issue_obj.get_processed_url(url))",False,self.main_config.annotation_comments,self.comments,0.646681547164917 2864,"def __init__(self, data=None, **kwargs): self._store = dict() if: data = {} self.update(data, **kwargs)",True,data is None,data is None,0.6581403613090515 2865,"def _map_path(self, filepath: Union[str, Path]) -> str: """"""Map ``filepath`` to a string path whose prefix will be replaced by :attr:`self.path_mapping`. Args: filepath (str or Path): Path to be mapped. """""" filepath = str(filepath) if: for k, v in self.path_mapping.items(): filepath = filepath.replace(k, v, 1) return filepath",True,self.path_mapping is not None,self.path_mapping is not None,0.6488661170005798 2866,"def get_extension_string(self): """""" Returns the WebSocket extension configuration string as sent to the server. :returns: PMCE configuration string. :rtype: str """""" pmce_string = self.EXTENSION_NAME if: pmce_string += '; client_no_context_takeover' if self.request_no_context_takeover: pmce_string += '; server_no_context_takeover' return pmce_string",False,self.accept_no_context_takeover,self.client_no_context_takeover,0.6462266445159912 2867,"def get_extension_string(self): """""" Returns the WebSocket extension configuration string as sent to the server. :returns: PMCE configuration string. :rtype: str """""" pmce_string = self.EXTENSION_NAME if self.accept_no_context_takeover: pmce_string += '; client_no_context_takeover' if: pmce_string += '; server_no_context_takeover' return pmce_string",False,self.request_no_context_takeover,self.server_no_context_takeover,0.6469260454177856 2868,"def callback(flag): if: text = pipe.reader.readline() if text =='super beans\n': event.set() else: self.assertEqual(flag, select.POLLHUP)",False,flag == select.POLLIN,flag == select.POLLHUP,0.6503331065177917 2869,"def callback(flag): if flag == select.POLLIN: text = pipe.reader.readline() if: event.set() else: self.assertEqual(flag, select.POLLHUP)",False,text == 'super beans\n',len(text) == 1,0.6510034799575806 2870,"def truncate_paragraphs(paragraphs, max_length): truncated_paragraphs = [] current_length = 0 for paragraph in paragraphs: if: continue paragraph = paragraph.strip() if current_length + len(paragraph) <= max_length: truncated_paragraphs.append(paragraph) current_length += len(paragraph) else: remaining_length = max_length - current_length truncated_paragraph = paragraph[:remaining_length] truncated_paragraphs.append(truncated_paragraph) break return truncated_paragraphs",False,len(paragraph) == 0,not paragraph,0.6497855186462402 2871,"def truncate_paragraphs(paragraphs, max_length): truncated_paragraphs = [] current_length = 0 for paragraph in paragraphs: if len(paragraph) == 0: continue paragraph = paragraph.strip() if: truncated_paragraphs.append(paragraph) current_length += len(paragraph) else: remaining_length = max_length - current_length truncated_paragraph = paragraph[:remaining_length] truncated_paragraphs.append(truncated_paragraph) break return truncated_paragraphs",False,current_length + len(paragraph) <= max_length,max_length <= 0,0.6448981761932373 2872,"def _get_stream(device): """"""Gets a background stream for copying between CPU and GPU"""""" global _streams if: return None if _streams is None: _streams = [None] * torch.cuda.device_count() if _streams[device] is None: _streams[device] = torch.cuda.Stream(device) return _streams[device]",True,device == -1,device == -1,0.6725316047668457 2873,"def _get_stream(device): """"""Gets a background stream for copying between CPU and GPU"""""" global _streams if device == -1: return None if: _streams = [None] * torch.cuda.device_count() if _streams[device] is None: _streams[device] = torch.cuda.Stream(device) return _streams[device]",True,_streams is None,_streams is None,0.6621301174163818 2874,"def _get_stream(device): """"""Gets a background stream for copying between CPU and GPU"""""" global _streams if device == -1: return None if _streams is None: _streams = [None] * torch.cuda.device_count() if: _streams[device] = torch.cuda.Stream(device) return _streams[device]",True,_streams[device] is None,_streams[device] is None,0.6521532535552979 2875,"def dParam(self, taskname='', cl=1): """"""Dump the task parameters in executable form Default is to write CL version of code; if cl parameter is false, writes Python executable code instead. """""" if: taskname = taskname + '.' for i in range(len(self.__pars)): p = self.__pars[i] if p.name!= '$nargs': print(f'{taskname}{p.dpar(cl=cl)}') if cl: print('# EOF')",False,taskname and taskname[-1:] != '.',not taskname.endswith('.'),0.656425952911377 2876,"def dParam(self, taskname='', cl=1): """"""Dump the task parameters in executable form Default is to write CL version of code; if cl parameter is false, writes Python executable code instead. """""" if taskname and taskname[-1:]!= '.': taskname = taskname + '.' for i in range(len(self.__pars)): p = self.__pars[i] if p.name!= '$nargs': print(f'{taskname}{p.dpar(cl=cl)}') if: print('# EOF')",False,cl,not cl,0.6888899803161621 2877,"def dParam(self, taskname='', cl=1): """"""Dump the task parameters in executable form Default is to write CL version of code; if cl parameter is false, writes Python executable code instead. """""" if taskname and taskname[-1:]!= '.': taskname = taskname + '.' for i in range(len(self.__pars)): p = self.__pars[i] if: print(f'{taskname}{p.dpar(cl=cl)}') if cl: print('# EOF')",False,p.name != '$nargs',p.set_dir(),0.6520150899887085 2878,"def forward_features(self, x): x_size = (x.shape[2], x.shape[3]) x = self.patch_embed(x) if: x = x + self.absolute_pos_embed x = self.pos_drop(x) for layer in self.layers: x = layer(x, x_size) x = self.norm(x) x = self.patch_unembed(x, x_size) return x",True,self.ape,self.ape,0.6509541273117065 2879,"def __iter__(self): batches = list(self._sampler) part_batches = batches[self._start:self._end] if: candidates = self._rng.choice(len(batches), size=self._part_len - len(part_batches)) for idx in candidates: part_batches.append(batches[idx]) logging.info('In ShardedIterator. Constructed part batches!') for batch in part_batches: yield batch",False,self._even_size and len(part_batches) < self._part_len,self._part_len > 0,0.6438008546829224 2880,"def get_implied(self, permission): permission_key = self.__get_key(permission.content_type.app_label, permission.content_type.model) implied_permissions = self.__implied_permissions.get(permission_key) if: return implied_permissions.get(permission.codename, []) return []",True,implied_permissions,implied_permissions,0.6537160873413086 2881,"def capybara(x): if: y = True else: y = False if y is True: assert_is_value(y, KnownValue(True)) y = bool(x) assert_is_value(y, TypedValue(bool), skip_annotated=True)",False,x,x is None,0.672295093536377 2882,"def capybara(x): if x: y = True else: y = False if: assert_is_value(y, KnownValue(True)) y = bool(x) assert_is_value(y, TypedValue(bool), skip_annotated=True)",False,y is True,y,0.6595953702926636 2883,"def _update_current_domain(self, sys_action, goal: Goal): for diaact in sys_action.keys(): domain, _ = diaact.split('-') if: self.cur_domain = domain",False,domain in goal.domains,domain != goal.domain,0.6466604471206665 2884,"def _alloc_image(self, name, atlas, border): file = self.file(name) try: img = pyglet.image.load(name, file=file) finally: file.close() if: return img.get_texture() bin = self._get_texture_atlas_bin(img.width, img.height, border) if bin is None: return img.get_texture() return bin.add(img, border)",False,not atlas,atlas is None,0.6598828434944153 2885,"def _alloc_image(self, name, atlas, border): file = self.file(name) try: img = pyglet.image.load(name, file=file) finally: file.close() if not atlas: return img.get_texture() bin = self._get_texture_atlas_bin(img.width, img.height, border) if: return img.get_texture() return bin.add(img, border)",False,bin is None,not bin,0.657475471496582 2886,"def new(self, cp_method=None, cp_parameters=None): """"""Construct a new :class:`.EquationManager` sharing the communicator with this :class:`.EquationManager`. By default the new :class:`.EquationManager` also shares the checkpointing schedule configuration with this :class:`.EquationManager`, but this may be overridden with the arguments `cp_method` and `cp_parameters`. Both equation annotation and tangent-linear derivation and solution are *disabled* for the new :class:`.EquationManager`. :arg cp_method: See :meth:`.EquationManager.configure_checkpointing`. :arg cp_parameters: See :meth:`.EquationManager.configure_checkpointing`. """""" if: if cp_parameters is not None: raise TypeError('cp_parameters can only be supplied if cp_method is supplied') cp_method = self._cp_method cp_parameters = self._cp_parameters elif cp_parameters is None: raise TypeError('cp_parameters must be supplied if cp_method is supplied') return EquationManager(comm=self._comm, cp_method=cp_method, cp_parameters=cp_parameters)",False,cp_method is None,self._cp_method is None,0.6522836685180664 2887,"def new(self, cp_method=None, cp_parameters=None): """"""Construct a new :class:`.EquationManager` sharing the communicator with this :class:`.EquationManager`. By default the new :class:`.EquationManager` also shares the checkpointing schedule configuration with this :class:`.EquationManager`, but this may be overridden with the arguments `cp_method` and `cp_parameters`. Both equation annotation and tangent-linear derivation and solution are *disabled* for the new :class:`.EquationManager`. :arg cp_method: See :meth:`.EquationManager.configure_checkpointing`. :arg cp_parameters: See :meth:`.EquationManager.configure_checkpointing`. """""" if cp_method is None: if: raise TypeError('cp_parameters can only be supplied if cp_method is supplied') cp_method = self._cp_method cp_parameters = self._cp_parameters elif cp_parameters is None: raise TypeError('cp_parameters must be supplied if cp_method is supplied') return EquationManager(comm=self._comm, cp_method=cp_method, cp_parameters=cp_parameters)",True,cp_parameters is not None,cp_parameters is not None,0.6529527902603149 2888,"def new(self, cp_method=None, cp_parameters=None): """"""Construct a new :class:`.EquationManager` sharing the communicator with this :class:`.EquationManager`. By default the new :class:`.EquationManager` also shares the checkpointing schedule configuration with this :class:`.EquationManager`, but this may be overridden with the arguments `cp_method` and `cp_parameters`. Both equation annotation and tangent-linear derivation and solution are *disabled* for the new :class:`.EquationManager`. :arg cp_method: See :meth:`.EquationManager.configure_checkpointing`. :arg cp_parameters: See :meth:`.EquationManager.configure_checkpointing`. """""" if cp_method is None: if cp_parameters is not None: raise TypeError('cp_parameters can only be supplied if cp_method is supplied') cp_method = self._cp_method cp_parameters = self._cp_parameters elif: raise TypeError('cp_parameters must be supplied if cp_method is supplied') return EquationManager(comm=self._comm, cp_method=cp_method, cp_parameters=cp_parameters)",False,cp_parameters is None,cp_parameters is not None,0.6541059613227844 2889,"def traverse_field(metadata, kind, func, output_dir): """""" Given filled metadata, the `kind` of experiments data to process, a function that takes (model, batch size, DTR data dict, baseline data dict, output directory) and the output directory this function traverses each entry of the metadata with respect to the keys in DTR data dictionary (batch size) and calls `func` on each entry that records the data of experiments that have kind of `kind`. returns a tuple of status and message. This function will propagate the status of `func` if it fails in the middle of execution """""" for model in metadata.keys(): dtr_dict = metadata[model]['dtr'] baseline_dict = metadata[model]['baseline'] for batch_size in sorted(dtr_dict.keys()): for exp_kind in dtr_dict[batch_size]: if: success, msg = func(model, batch_size, dtr_dict, baseline_dict, output_dir) if not success: return (False, msg) return (True,'success')",False,exp_kind == kind,kind == exp_kind,0.6523754000663757 2890,"def traverse_field(metadata, kind, func, output_dir): """""" Given filled metadata, the `kind` of experiments data to process, a function that takes (model, batch size, DTR data dict, baseline data dict, output directory) and the output directory this function traverses each entry of the metadata with respect to the keys in DTR data dictionary (batch size) and calls `func` on each entry that records the data of experiments that have kind of `kind`. returns a tuple of status and message. This function will propagate the status of `func` if it fails in the middle of execution """""" for model in metadata.keys(): dtr_dict = metadata[model]['dtr'] baseline_dict = metadata[model]['baseline'] for batch_size in sorted(dtr_dict.keys()): for exp_kind in dtr_dict[batch_size]: if exp_kind == kind: success, msg = func(model, batch_size, dtr_dict, baseline_dict, output_dir) if: return (False, msg) return (True,'success')",True,not success,not success,0.6557174324989319 2891,"def decompose_points(self, points: Sequence[Coordinate3D], *, min_distance: float, method: str='greedy') -> List[int]: """"""Decomposes a set of points into multiple groups while ensuring that the minimum distance of points within the same group is at least as large as the given threshold. """""" data = {'version': 1,'method': str(method),'min_distance': float(min_distance), 'points': points} with self._send_request('operations/decompose', data) as response: result = response.as_json() if: raise SkybrushStudioAPIError('invalid response version') return result.get('groups')",False,result.get('version') != 1,result['version'] != 1,0.6485666632652283 2892,"def registerAdapter(adapterFactory, origInterface, *interfaceClasses): """"""Register an adapter class. An adapter class is expected to implement the given interface, by adapting instances implementing 'origInterface'. An adapter class's __init__ method should accept one parameter, an instance implementing 'origInterface'. """""" self = globalRegistry assert interfaceClasses, 'You need to pass an Interface' global ALLOW_DUPLICATES if: origInterface = declarations.implementedBy(origInterface) for interfaceClass in interfaceClasses: factory = _registered(self, origInterface, interfaceClass) if factory is not None and (not ALLOW_DUPLICATES): raise ValueError('an adapter (%s) was already registered.' % (factory,)) for interfaceClass in interfaceClasses: self.register([origInterface], interfaceClass, '', adapterFactory)",False,"not isinstance(origInterface, interface.InterfaceClass)",origInterface != None,0.6475800275802612 2893,"def registerAdapter(adapterFactory, origInterface, *interfaceClasses): """"""Register an adapter class. An adapter class is expected to implement the given interface, by adapting instances implementing 'origInterface'. An adapter class's __init__ method should accept one parameter, an instance implementing 'origInterface'. """""" self = globalRegistry assert interfaceClasses, 'You need to pass an Interface' global ALLOW_DUPLICATES if not isinstance(origInterface, interface.InterfaceClass): origInterface = declarations.implementedBy(origInterface) for interfaceClass in interfaceClasses: factory = _registered(self, origInterface, interfaceClass) if: raise ValueError('an adapter (%s) was already registered.' % (factory,)) for interfaceClass in interfaceClasses: self.register([origInterface], interfaceClass, '', adapterFactory)",False,factory is not None and (not ALLOW_DUPLICATES),factory is not None,0.6480883359909058 2894,"def init(self): if: self.fwriter = IndexedFileWriter(self.args.outfile, mode='a') else: self.fwriter = IndexedFileWriter(self.args.outfile, mode='w')",False,os.path.exists(self.args.outfile) and self.args.append,self.args.use_autoincrement,0.6454191207885742 2895,"def To(self, *args): assert len(self.operations) > 0 if: outs = args[0] else: outs = args self.operations[-1].SetOutputs(outs) for o in outs: if o not in self.operands: self.operands.append(o) return self",False,type(args[0]) is tuple or type(args[0]) is list,len(args) == 1,0.6444154977798462 2896,"def To(self, *args): assert len(self.operations) > 0 if type(args[0]) is tuple or type(args[0]) is list: outs = args[0] else: outs = args self.operations[-1].SetOutputs(outs) for o in outs: if: self.operands.append(o) return self",False,o not in self.operands,o is not None,0.653052568435669 2897,"def iter_gap_len(self, seq, mingap=10): for gap, seq in groupby(seq, lambda x: x == 'N'): if: continue gap_len = len(list(seq)) if gap_len >= mingap: yield len(list(seq))",False,not gap,gap == 'N',0.6511884927749634 2898,"def iter_gap_len(self, seq, mingap=10): for gap, seq in groupby(seq, lambda x: x == 'N'): if not gap: continue gap_len = len(list(seq)) if: yield len(list(seq))",False,gap_len >= mingap,gap_len == mingap,0.6460598111152649 2899,"def encode(bimask): if: return _mask.encode(bimask) elif len(bimask.shape) == 2: h, w = bimask.shape return _mask.encode(bimask.reshape((h, w, 1), order='F'))[0]",True,len(bimask.shape) == 3,len(bimask.shape) == 3,0.6502977013587952 2900,"def encode(bimask): if len(bimask.shape) == 3: return _mask.encode(bimask) elif: h, w = bimask.shape return _mask.encode(bimask.reshape((h, w, 1), order='F'))[0]",True,len(bimask.shape) == 2,len(bimask.shape) == 2,0.6509941816329956 2901,"def unregisterTransportDispatcher(self, recvId=None): if: raise error.PySnmpError('Transport dispatcher not registered') self.transportDispatcher.unregisterRecvCbFun(recvId) self.transportDispatcher.unregisterTimerCbFun() self.transportDispatcher = None",False,self.transportDispatcher is None,not self.transportDispatcher,0.6532835960388184 2902,"def __init__(self, num_features, n_domain, bn_type): super().__init__() if: BN = nn.BatchNorm1d elif bn_type == '2d': BN = nn.BatchNorm2d else: raise ValueError self.bn = nn.ModuleList((BN(num_features) for _ in range(n_domain))) self.valid_domain_idxs = list(range(n_domain)) self.n_domain = n_domain self.domain_idx = 0",True,bn_type == '1d',bn_type == '1d',0.6560773253440857 2903,"def __init__(self, num_features, n_domain, bn_type): super().__init__() if bn_type == '1d': BN = nn.BatchNorm1d elif: BN = nn.BatchNorm2d else: raise ValueError self.bn = nn.ModuleList((BN(num_features) for _ in range(n_domain))) self.valid_domain_idxs = list(range(n_domain)) self.n_domain = n_domain self.domain_idx = 0",True,bn_type == '2d',bn_type == '2d',0.6572564840316772 2904,"@property def label(self): text = mark_safe(self.text) if self.allow_tags else conditional_escape(self.text) if: text = mark_safe(' ') for wrap in self.wraps: text = mark_safe(wrap % text) return text",False,force_text(text) == '',self.allow_tags,0.648959219455719 2905,"def add_kind(cpl: ComplexKind) -> None: if: return visited.add(cpl.fqn) kinds[cpl.fqn] = cpl if with_bases: for _, base in cpl.resolved_bases().items(): add_kind(base) if with_prop_types: for prop in cpl.resolved_properties(): if isinstance(prop.kind, ComplexKind): add_kind(prop.kind)",True,cpl.fqn in visited,cpl.fqn in visited,0.6524316072463989 2906,"def add_kind(cpl: ComplexKind) -> None: if cpl.fqn in visited: return visited.add(cpl.fqn) kinds[cpl.fqn] = cpl if: for _, base in cpl.resolved_bases().items(): add_kind(base) if with_prop_types: for prop in cpl.resolved_properties(): if isinstance(prop.kind, ComplexKind): add_kind(prop.kind)",False,with_bases,with_bases_types,0.6523808240890503 2907,"def add_kind(cpl: ComplexKind) -> None: if cpl.fqn in visited: return visited.add(cpl.fqn) kinds[cpl.fqn] = cpl if with_bases: for _, base in cpl.resolved_bases().items(): add_kind(base) if: for prop in cpl.resolved_properties(): if isinstance(prop.kind, ComplexKind): add_kind(prop.kind)",False,with_prop_types,with_properties,0.6503136157989502 2908,"def add_kind(cpl: ComplexKind) -> None: if cpl.fqn in visited: return visited.add(cpl.fqn) kinds[cpl.fqn] = cpl if with_bases: for _, base in cpl.resolved_bases().items(): add_kind(base) if with_prop_types: for prop in cpl.resolved_properties(): if: add_kind(prop.kind)",False,"isinstance(prop.kind, ComplexKind)",prop.kind is not None,0.6465802192687988 2909,"def ordered_indices(self): if: order = [np.arange(len(self)), self.sizes] return np.lexsort(order) else: return np.arange(len(self))",False,self.batch_by_size,self.sort,0.6465789079666138 2910,"def scr(): if: scr_dir = '/scr-ssd/' + getpass.getuser() elif os.path.exists('/scr'): scr_dir = '/scr/' + getpass.getuser() else: scr_dir = '/tmp/scr-' + getpass.getuser() if not os.path.exists(scr_dir): os.makedirs(scr_dir) return scr_dir",True,os.path.exists('/scr-ssd'),os.path.exists('/scr-ssd'),0.6442333459854126 2911,"def scr(): if os.path.exists('/scr-ssd'): scr_dir = '/scr-ssd/' + getpass.getuser() elif os.path.exists('/scr'): scr_dir = '/scr/' + getpass.getuser() else: scr_dir = '/tmp/scr-' + getpass.getuser() if: os.makedirs(scr_dir) return scr_dir",True,not os.path.exists(scr_dir),not os.path.exists(scr_dir),0.6462838649749756 2912,"def scr(): if os.path.exists('/scr-ssd'): scr_dir = '/scr-ssd/' + getpass.getuser() elif: scr_dir = '/scr/' + getpass.getuser() else: scr_dir = '/tmp/scr-' + getpass.getuser() if not os.path.exists(scr_dir): os.makedirs(scr_dir) return scr_dir",True,os.path.exists('/scr'),os.path.exists('/scr'),0.645728349685669 2913,"def dependencies(self, mount: str) -> Result[Json]: """"""Return service dependencies. :param mount: Service mount path (e.g ""/_admin/aardvark""). :type mount: str :return: Dependency settings. :rtype: dict :raise arango.exceptions.FoxxDependencyGetError: If retrieval fails. """""" request = Request(method='get', endpoint='/_api/foxx/dependencies', params={'mount': mount}) def response_handler(resp: Response) -> Json: if: return format_service_data(resp.body) raise FoxxDependencyGetError(resp, request) return self._execute(request, response_handler)",True,resp.is_success,resp.is_success,0.6521449089050293 2914,"def max_(x, y): if: return math_ops.maximum(x, y) else: return max(x, y)",False,_is_tensor(x) or _is_tensor(y),"isinstance(x, (float, int))",0.6444352269172668 2915,"def get_max_coordination_of_elem(single_elem_environments): """"""For a given set of elemental environments, find the greatest number of each element in a site. Input: | single_elem_environments: list(dict), list of voronoi substructures, Returns: | max_num_elem: dict(int), greatest number of each element in a substruc. """""" max_num_elem = {} for site_ind, site in enumerate(single_elem_environments): for elem, value in site: elem_len = len([val for val in site if val[0] == elem]) if: max_num_elem[elem] = 0 if elem_len > max_num_elem[elem]: max_num_elem[elem] = elem_len return max_num_elem",False,elem not in max_num_elem,elem_len < max_num_elem[elem],0.649750828742981 2916,"def get_max_coordination_of_elem(single_elem_environments): """"""For a given set of elemental environments, find the greatest number of each element in a site. Input: | single_elem_environments: list(dict), list of voronoi substructures, Returns: | max_num_elem: dict(int), greatest number of each element in a substruc. """""" max_num_elem = {} for site_ind, site in enumerate(single_elem_environments): for elem, value in site: elem_len = len([val for val in site if val[0] == elem]) if elem not in max_num_elem: max_num_elem[elem] = 0 if: max_num_elem[elem] = elem_len return max_num_elem",False,elem_len > max_num_elem[elem],elem_len < len(single_elem_environments) - 1,0.6454460620880127 2917,"def bktrRead(self, size=None, direct=False): self.cryptoOffset = 0 self.ctr_val = 0 '\n\t\tif self.bktrRelocation:\n\t\t\tentry = self.bktrRelocation.getRelocationEntry(self.tell())\n\n\t\t\tif entry:\n\t\t\t\tself.ctr_val = entry.ctr\n\t\t\t\t#self.cryptoOffset = entry.virtualOffset + entry.physicalOffset\n\t\t' if: entries = self.bktrSubsection.getEntries(self.tell(), size) for entry in entries: entry.printInfo() return super(BaseFs, self).read(size, direct)",False,self.bktrSubsection is not None,size is not None,0.6474741697311401 2918,"def send(self, msg): """""" Implements :func:`autobahn.wamp.interfaces.ITransport.send` """""" if: self.log.debug('{func}: TX WAMP message: {msg}', func=hltype(self.send), msg=msg) try: payload, _ = self._serializer.serialize(msg) except Exception as e: raise SerializationError('WampRawSocketProtocol: unable to serialize WAMP application payload ({0})'.format(e)) else: self.sendString(payload) self.log.debug('WampRawSocketProtocol: TX octets: {octets}', octets=_LazyHexFormatter(payload)) else: raise TransportLost()",False,self.isOpen(),self._serializer is not None and self._serializer.has_request_header(),0.6485719680786133 2919,"def getWidget(self): """""" Get the widget being managed by this layout. Returns ------- result : QWidget or None The widget being managed by this layout or None if it does not exist. """""" item = self._item if: return item.widget()",True,item is not None,item is not None,0.6528847813606262 2920,"def close(self) -> None: if: return self._closed = True self._q.put(b'')",True,self._closed,self._closed,0.6572436690330505 2921,"def store(self, name): self.stores.add(name) if: if self.parent is not None: outer_ref = self.parent.find_ref(name) if outer_ref is not None: self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref)) return self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))",False,name not in self.refs,name not in self.stores,0.6496174931526184 2922,"def store(self, name): self.stores.add(name) if name not in self.refs: if: outer_ref = self.parent.find_ref(name) if outer_ref is not None: self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref)) return self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))",True,self.parent is not None,self.parent is not None,0.6501030325889587 2923,"def store(self, name): self.stores.add(name) if name not in self.refs: if self.parent is not None: outer_ref = self.parent.find_ref(name) if: self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref)) return self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))",True,outer_ref is not None,outer_ref is not None,0.6490349769592285 2924,"def _init_context(self): """"""Adds LTI request verification and permissions check to the context initialization."""""" self.lti = LTI(self.request) if: raise PermissionDenied try: self.lti.verify() except LTIException as error: raise ResourceException(str(error)) from error",False,not self.lti.is_instructor and (not self.lti.is_admin),not self.lti.check_permission(),0.6480191349983215 2925,"def main(self): self.clear_text() active_handle = self.get_active('Event') if: active = self.dbstate.db.get_event_from_handle(active_handle) if active: self.get_notes(active) else: self.set_has_data(False) else: self.set_has_data(False)",True,active_handle,active_handle,0.6549518704414368 2926,"def main(self): self.clear_text() active_handle = self.get_active('Event') if active_handle: active = self.dbstate.db.get_event_from_handle(active_handle) if: self.get_notes(active) else: self.set_has_data(False) else: self.set_has_data(False)",True,active,active,0.6661909818649292 2927,"def _process_polygons(polygons): polygons = [np.array(p) for p in polygons] valid_polygons = [] for polygon in polygons: if: valid_polygons.append(polygon) return valid_polygons",False,len(polygon) % 2 == 0 and len(polygon) >= 6,len(polygon) >= 6,0.6448625326156616 2928,"def e_select_device(self, event): self.selected_device_index = -1 if: self.selected_device_index = int(event.widget.curselection()[0]) selected_device = self.all_devices[self.selected_device_index] self.selected_device_name = selected_device[0] self.selected_device_var.set(self.selected_device_name) self.get_matching_devices() self.refresh_devices_display()",False,len(self.all_devices) > 0,event.widget and event.widget.curselection()[0],0.6464416980743408 2929,"def display_modifier(self, name, **kwargs): if: return name return f'{name} ({self.display_label or self.quantity})'",False,kwargs.get('package') != self.package,not self.display_label or self.quantity,0.6467512845993042 2930,"def dump_result(self, result, method): if: return result return [method.returns[i].dump(r) if i < len(method.returns) else r for i, r in enumerate([result])]",False,"not hasattr(method, 'returns')",method is None,0.6460227370262146 2931,"def enable_command(chat_id, enable): with DISABLE_INSERTION_LOCK: disabled = SESSION.query(Disable).get((str(chat_id), enable)) if: if enable in DISABLED.get(str(chat_id)): DISABLED.setdefault(str(chat_id), set()).remove(enable) SESSION.delete(disabled) SESSION.commit() return True SESSION.close() return False",False,disabled,not disabled,0.6652392148971558 2932,"def enable_command(chat_id, enable): with DISABLE_INSERTION_LOCK: disabled = SESSION.query(Disable).get((str(chat_id), enable)) if disabled: if: DISABLED.setdefault(str(chat_id), set()).remove(enable) SESSION.delete(disabled) SESSION.commit() return True SESSION.close() return False",False,enable in DISABLED.get(str(chat_id)),enable in DISABLED,0.6484749913215637 2933,"def init_weights(self): for m in self.modules(): if: xavier_init(m, distribution='uniform', bias=0)",True,"isinstance(m, nn.Conv2d)","isinstance(m, nn.Conv2d)",0.6501367092132568 2934,"def _get(self, *args, **kwargs): """""" Retrieves a list of stored messages. Returns a tuple of the messages and a flag indicating whether or not all the messages originally intended to be stored in this storage were, in fact, stored and retrieved; e.g., ``(messages, all_retrieved)``. """""" lock(self.key) try: data = cache.get(self.key) if: data = cPickle.dumps([]) messages = cPickle.loads(data) except: pass unlock(self.key) return (messages, True)",True,not data,not data,0.6584740281105042 2935,"def _inner_forward(x): identity = x out = self.conv1(x) out = self.norm1(out) out = self.relu(out) out = self.conv2(out) out = self.norm2(out) if: identity = self.downsample(x) out += identity return out",True,self.downsample is not None,self.downsample is not None,0.6455891728401184 2936,"def finished(self): assert self.time_started is not None assert self.time_finished is None self.time_finished = monotonic() if: self.display.stop(self.time_spent)",False,"hasattr(self, 'display')",self.display and monotonic() > self.time_spent,0.6435091495513916 2937,"def get_mime_type(self): result = None if: result = self._if.mime_type if not result: result = mimetypes.guess_type(self._path, strict=False)[0] if not result: result = imghdr.what(self._path) if result: result = 'image/' + result if not result: result = 'image/raw' return result",False,self._if,self._if.mime_type,0.6575115323066711 2938,"def get_mime_type(self): result = None if self._if: result = self._if.mime_type if: result = mimetypes.guess_type(self._path, strict=False)[0] if not result: result = imghdr.what(self._path) if result: result = 'image/' + result if not result: result = 'image/raw' return result",False,not result,not result and mimetypes.guess_type(self._path),0.6626859307289124 2939,"def get_mime_type(self): result = None if self._if: result = self._if.mime_type if not result: result = mimetypes.guess_type(self._path, strict=False)[0] if: result = imghdr.what(self._path) if result: result = 'image/' + result if not result: result = 'image/raw' return result",False,not result,not result and self._path.startswith('image/'),0.6608101725578308 2940,"def get_mime_type(self): result = None if self._if: result = self._if.mime_type if not result: result = mimetypes.guess_type(self._path, strict=False)[0] if not result: result = imghdr.what(self._path) if result: result = 'image/' + result if: result = 'image/raw' return result",False,not result,result and self._path.endswith('.jpg'),0.6586154699325562 2941,"def get_mime_type(self): result = None if self._if: result = self._if.mime_type if not result: result = mimetypes.guess_type(self._path, strict=False)[0] if not result: result = imghdr.what(self._path) if: result = 'image/' + result if not result: result = 'image/raw' return result",True,result,result,0.6730443239212036 2942,"def post_build(self, p, pay): if: l = len(p) - 4 p = p[:2] + struct.pack('!H', l) + p[4:] return p + pay",True,self.len is None,self.len is None,0.6532700657844543 2943,"def json_escape(s): """""" Escape JSON predefined sequences """""" if: return 'true' if s else 'false' if s is None: return '' return s.replace('\\', '\\\\').replace('\n', '\\n').replace('""', '\\""')",True,"isinstance(s, bool)","isinstance(s, bool)",0.6457105875015259 2944,"def json_escape(s): """""" Escape JSON predefined sequences """""" if isinstance(s, bool): return 'true' if s else 'false' if: return '' return s.replace('\\', '\\\\').replace('\n', '\\n').replace('""', '\\""')",True,s is None,s is None,0.6592724919319153 2945,"def set_value(self, value): if: return self.check_bounds(value) self.__value__ = asscalar(value)",True,value is None,value is None,0.6624471545219421 2946,"def to_query_string(self): query_string = '' for key in self.tagging_rule: query_string += urlquote(key) query_string += '=' query_string += urlquote(self.tagging_rule[key]) query_string += '&' if: return '' else: query_string = query_string[:-1] return query_string",False,len(query_string) == 0,query_string == '',0.6472993493080139 2947,"def __call__(self, value): if: return None value = six.text_type(value) if value not in self.membership: raise ValueError('Unrecognized value: {0}'.format(value)) return self.membership[value]",False,value is None,value in self.membership,0.6559182405471802 2948,"def __call__(self, value): if value is None: return None value = six.text_type(value) if: raise ValueError('Unrecognized value: {0}'.format(value)) return self.membership[value]",True,value not in self.membership,value not in self.membership,0.6506572961807251 2949,"def get_prompt_list(prompt): if: return ['{}'] elif prompt == 'fixed': return ['a photo of a {}.'] elif prompt =='shuffle': return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.'] elif prompt =='shuffle+': return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.', 'a cropped photo of a {}.', 'a good photo of a {}.', 'a photo of one {}.', 'a bad photo of a {}.', 'a photo of the {}.'] elif prompt =='shuffle_clip': from models.clip_prompts import imagenet_templates return imagenet_templates else: raise ValueError('Invalid value for prompt')",False,prompt == 'plain',prompt == 'none',0.6565957069396973 2950,"def get_prompt_list(prompt): if prompt == 'plain': return ['{}'] elif: return ['a photo of a {}.'] elif prompt =='shuffle': return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.'] elif prompt =='shuffle+': return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.', 'a cropped photo of a {}.', 'a good photo of a {}.', 'a photo of one {}.', 'a bad photo of a {}.', 'a photo of the {}.'] elif prompt =='shuffle_clip': from models.clip_prompts import imagenet_templates return imagenet_templates else: raise ValueError('Invalid value for prompt')",False,prompt == 'fixed',prompt == 'ok',0.6531984806060791 2951,"def get_prompt_list(prompt): if prompt == 'plain': return ['{}'] elif prompt == 'fixed': return ['a photo of a {}.'] elif: return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.'] elif prompt =='shuffle+': return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.', 'a cropped photo of a {}.', 'a good photo of a {}.', 'a photo of one {}.', 'a bad photo of a {}.', 'a photo of the {}.'] elif prompt =='shuffle_clip': from models.clip_prompts import imagenet_templates return imagenet_templates else: raise ValueError('Invalid value for prompt')",False,prompt == 'shuffle',prompt == 'photograph',0.6536237001419067 2952,"def get_prompt_list(prompt): if prompt == 'plain': return ['{}'] elif prompt == 'fixed': return ['a photo of a {}.'] elif prompt =='shuffle': return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.'] elif: return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.', 'a cropped photo of a {}.', 'a good photo of a {}.', 'a photo of one {}.', 'a bad photo of a {}.', 'a photo of the {}.'] elif prompt =='shuffle_clip': from models.clip_prompts import imagenet_templates return imagenet_templates else: raise ValueError('Invalid value for prompt')",False,prompt == 'shuffle+',prompt == 'photograph',0.6575410962104797 2953,"def get_prompt_list(prompt): if prompt == 'plain': return ['{}'] elif prompt == 'fixed': return ['a photo of a {}.'] elif prompt =='shuffle': return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.'] elif prompt =='shuffle+': return ['a photo of a {}.', 'a photograph of a {}.', 'an image of a {}.', '{}.', 'a cropped photo of a {}.', 'a good photo of a {}.', 'a photo of one {}.', 'a bad photo of a {}.', 'a photo of the {}.'] elif: from models.clip_prompts import imagenet_templates return imagenet_templates else: raise ValueError('Invalid value for prompt')",False,prompt == 'shuffle_clip',prompt == 'clip_prompts',0.650404691696167 2954,"def get_cout(cin, stride): if: cout = cin elif stride == -1: cout = cin // 2 elif stride == 2: cout = 2 * cin return cout",True,stride == 1,stride == 1,0.664145290851593 2955,"def get_cout(cin, stride): if stride == 1: cout = cin elif: cout = cin // 2 elif stride == 2: cout = 2 * cin return cout",False,stride == -1,stride == 3,0.6604943871498108 2956,"def get_cout(cin, stride): if stride == 1: cout = cin elif stride == -1: cout = cin // 2 elif: cout = 2 * cin return cout",False,stride == 2,stride == 1,0.6669521331787109 2957,"def setFont(self, psfontname, size, leading=None): """"""Sets the font. If leading not specified, defaults to 1.2 x font size. Raises a readable exception if an illegal font is supplied. Font names are case-sensitive! Keeps track of font name and size for metrics."""""" self._fontname = psfontname self._fontsize = size if: leading = size * 1.2 self._leading = leading font = pdfmetrics.getFont(self._fontname) if not font._dynamicFont: if font.face.builtIn or not getattr(self, '_drawTextAsPath', False): pdffontname = self._doc.getInternalFontName(psfontname) self._code.append('BT %s %s Tf %s TL ET' % (pdffontname, fp_str(size), fp_str(leading)))",True,leading is None,leading is None,0.659087061882019 2958,"def setFont(self, psfontname, size, leading=None): """"""Sets the font. If leading not specified, defaults to 1.2 x font size. Raises a readable exception if an illegal font is supplied. Font names are case-sensitive! Keeps track of font name and size for metrics."""""" self._fontname = psfontname self._fontsize = size if leading is None: leading = size * 1.2 self._leading = leading font = pdfmetrics.getFont(self._fontname) if: if font.face.builtIn or not getattr(self, '_drawTextAsPath', False): pdffontname = self._doc.getInternalFontName(psfontname) self._code.append('BT %s %s Tf %s TL ET' % (pdffontname, fp_str(size), fp_str(leading)))",False,not font._dynamicFont,font,0.6498305797576904 2959,"def setFont(self, psfontname, size, leading=None): """"""Sets the font. If leading not specified, defaults to 1.2 x font size. Raises a readable exception if an illegal font is supplied. Font names are case-sensitive! Keeps track of font name and size for metrics."""""" self._fontname = psfontname self._fontsize = size if leading is None: leading = size * 1.2 self._leading = leading font = pdfmetrics.getFont(self._fontname) if not font._dynamicFont: if: pdffontname = self._doc.getInternalFontName(psfontname) self._code.append('BT %s %s Tf %s TL ET' % (pdffontname, fp_str(size), fp_str(leading)))",False,"font.face.builtIn or not getattr(self, '_drawTextAsPath', False)",psfontname,0.6453584432601929 2960,"def put(self, key, val): if: return if len(self._cache) >= self.capacity: self._cache.popitem(last=False) self._cache[key] = val",True,key in self._cache,key in self._cache,0.6537821888923645 2961,"def put(self, key, val): if key in self._cache: return if: self._cache.popitem(last=False) self._cache[key] = val",False,len(self._cache) >= self.capacity,len(self._cache) > self._maxsize,0.6459141969680786 2962,"def ignore(self, other): """""" Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other ignorable patterns. Example:: patt = OneOrMore(Word(alphas)) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] patt.ignore(cStyleComment) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] """""" if: other = Suppress(other) if isinstance(other, Suppress): if other not in self.ignoreExprs: self.ignoreExprs.append(other) else: self.ignoreExprs.append(Suppress(other.copy())) return self",False,"isinstance(other, basestring)","not isinstance(other, Suppress)",0.646952211856842 2963,"def ignore(self, other): """""" Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other ignorable patterns. Example:: patt = OneOrMore(Word(alphas)) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] patt.ignore(cStyleComment) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] """""" if isinstance(other, basestring): other = Suppress(other) if: if other not in self.ignoreExprs: self.ignoreExprs.append(other) else: self.ignoreExprs.append(Suppress(other.copy())) return self",True,"isinstance(other, Suppress)","isinstance(other, Suppress)",0.6488795280456543 2964,"def ignore(self, other): """""" Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other ignorable patterns. Example:: patt = OneOrMore(Word(alphas)) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] patt.ignore(cStyleComment) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] """""" if isinstance(other, basestring): other = Suppress(other) if isinstance(other, Suppress): if: self.ignoreExprs.append(other) else: self.ignoreExprs.append(Suppress(other.copy())) return self",False,other not in self.ignoreExprs,other.copy() in self.ignoreExprs,0.6502519845962524 2965,"def __setattr__(self, name, value): if: self.setp(name, value) else: return object.__setattr__(self, name, value)",False,name in self._params or name in self._mapping,name.startswith('p'),0.6524766683578491 2966,"def _remove_table(self, table: Table) -> None: """"""Remove table from the parent container"""""" self._tables.remove(table) for idx, item in enumerate(self._container._body): if: self._container._remove_at(idx) break",False,item[1] is table,table == item,0.6492193341255188 2967,"def update_order_request(self, req: OrderRequest, local_orderid: str): """""""""""" if: return holding = self.get(req.local_symbol, None) if not holding: self[req.local_symbol] = PositionHolding(req.local_symbol, self.get_contract(req.local_symbol)) self[req.local_symbol].update_order_request(req, local_orderid)",False,not self.is_convert_required(req.local_symbol),self.get_contract_id_n(req.local_symbol) == local_orderid,0.6443400382995605 2968,"def update_order_request(self, req: OrderRequest, local_orderid: str): """""""""""" if not self.is_convert_required(req.local_symbol): return holding = self.get(req.local_symbol, None) if: self[req.local_symbol] = PositionHolding(req.local_symbol, self.get_contract(req.local_symbol)) self[req.local_symbol].update_order_request(req, local_orderid)",False,not holding,holding is None,0.6605463027954102 2969,"def __len__(self): """""" Raises: :class:`ValueError` when num == -1. """""" if: raise NotImplementedError('__len__() is unavailable for infinite dataflow') return len(self.ds) * self.num",True,self.num == -1,self.num == -1,0.6612701416015625 2970,"def inceptionresnetv2(num_classes, loss='softmax', pretrained=True, **kwargs): model = InceptionResNetV2(num_classes=num_classes, loss=loss, **kwargs) if: model.load_imagenet_weights() return model",True,pretrained,pretrained,0.6718137264251709 2971,"def get_mpi_env(envs): """"""get the mpirun command for setting the envornment support both openmpi and mpich2 """""" _, err = subprocess.Popen('mpirun', stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() cmd = '' if: for k, v in envs.items(): cmd +='-x %s=%s' % (k, str(v)) elif'mpich' in err: for k, v in envs.items(): cmd +='-env %s %s' % (k, str(v)) else: raise RuntimeError('Unknown MPI Version') return cmd",False,'Open MPI' in err,'openmpi' in err,0.6566387414932251 2972,"def get_mpi_env(envs): """"""get the mpirun command for setting the envornment support both openmpi and mpich2 """""" _, err = subprocess.Popen('mpirun', stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() cmd = '' if 'Open MPI' in err: for k, v in envs.items(): cmd +='-x %s=%s' % (k, str(v)) elif: for k, v in envs.items(): cmd +='-env %s %s' % (k, str(v)) else: raise RuntimeError('Unknown MPI Version') return cmd",False,'mpich' in err,'MPI Version' in err,0.6567798256874084 2973,"def isAllowed(self, filename): e = os.path.splitext(filename)[1] if: if e in [f[1:] for f in self.getFilters()]: return True else: return False else: return True",False,e in [f[1:] for f in FILTERS],len(self.getFilters()) > 0,0.6436691284179688 2974,"def isAllowed(self, filename): e = os.path.splitext(filename)[1] if e in [f[1:] for f in FILTERS]: if: return True else: return False else: return True",False,e in [f[1:] for f in self.getFilters()],e.lower() in f.lower(),0.6445298790931702 2975,"def __call__(self, im, im_info): """""" Args: im (np.ndarray): image (np.ndarray) im_info (dict): info of image Returns: im (np.ndarray): processed image (np.ndarray) im_info (dict): info of processed image """""" im = im.astype(np.float32, copy=False) if: scale = 1.0 / 255.0 im *= scale if self.norm_type =='mean_std': mean = np.array(self.mean)[np.newaxis, np.newaxis, :] std = np.array(self.std)[np.newaxis, np.newaxis, :] im -= mean im /= std return (im, im_info)",True,self.is_scale,self.is_scale,0.6525927782058716 2976,"def __call__(self, im, im_info): """""" Args: im (np.ndarray): image (np.ndarray) im_info (dict): info of image Returns: im (np.ndarray): processed image (np.ndarray) im_info (dict): info of processed image """""" im = im.astype(np.float32, copy=False) if self.is_scale: scale = 1.0 / 255.0 im *= scale if: mean = np.array(self.mean)[np.newaxis, np.newaxis, :] std = np.array(self.std)[np.newaxis, np.newaxis, :] im -= mean im /= std return (im, im_info)",True,self.norm_type == 'mean_std',self.norm_type == 'mean_std',0.6455798149108887 2977,"@idxbx_e.setter def idxbx_e(self, idxbx_e): if: self.__idxbx_e = idxbx_e else: raise Exception('Invalid idxbx_e value. Exiting.')",True,"isinstance(idxbx_e, np.ndarray)","isinstance(idxbx_e, np.ndarray)",0.6467828750610352 2978,"def get_field_datum(self, datum, field): if: return AutoIdForDatum(datum) else: try: field_value = datum.value[field] return DatumInContext(value=field_value, path=Fields(field), context=datum) except (TypeError, KeyError, AttributeError): return None",False,field == auto_id_field,field == 'auto_id',0.6546405553817749 2979,"def register_rpc_handler(self, method): """""" Registers a method to act as an RPC handler if the internal RPC server is active. When calling this method through the RPC server, use the naming scheme ""cogname__methodname"". .. important:: All parameters to RPC handler methods must be JSON serializable objects. The return value of handler methods must also be JSON serializable. .. important:: RPC support is included in Red on a provisional basis. Backwards incompatible changes (up to and including removal of the RPC) may occur if deemed necessary. Parameters ---------- method : coroutine The method to register with the internal RPC server. """""" self.rpc.add_method(method) cog_name = method.__self__.__class__.__name__.upper() if: self.rpc_handlers[cog_name] = [] self.rpc_handlers[cog_name].append(method)",True,cog_name not in self.rpc_handlers,cog_name not in self.rpc_handlers,0.6488077044487 2980,"def _get_partitions(obj): """"""Check if any entry has partitions"""""" for name, _ in obj: if: return True return False",False,int(name.split('.')[-2]) > 0,name.startswith('partitions'),0.6445779800415039 2981,"def output_array_of_predicate(data_objects): if: return for data_object in data_objects['Predicate']: output_predicate(data_object)",True,data_objects is None or len(data_objects) == 0,data_objects is None or len(data_objects) == 0,0.6469278335571289 2982,"def main(self, argv): args = self._parse_args(argv) proj_ctrl = Project.controller() name = args.name proj = proj_ctrl.one({'name': name}) if: self.parser.error(""There is no project configuration named '%s.'"" % name) proj_ctrl.select(proj) return EXIT_SUCCESS",True,not proj,not proj,0.6662525534629822 2983,"def load_module(self, fullname): try: return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if: mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod",True,"isinstance(mod, MovedModule)","isinstance(mod, MovedModule)",0.646613359451294 2984,"@property def charset_name(self) -> str: if: return 'utf-32be' if self.is_likely_utf32le(): return 'utf-32le' if self.is_likely_utf16be(): return 'utf-16be' if self.is_likely_utf16le(): return 'utf-16le' return 'utf-16'",False,self.is_likely_utf32be(),self.is_likely_utf32(),0.6485641002655029 2985,"@property def charset_name(self) -> str: if self.is_likely_utf32be(): return 'utf-32be' if: return 'utf-32le' if self.is_likely_utf16be(): return 'utf-16be' if self.is_likely_utf16le(): return 'utf-16le' return 'utf-16'",True,self.is_likely_utf32le(),self.is_likely_utf32le(),0.6480880379676819 2986,"@property def charset_name(self) -> str: if self.is_likely_utf32be(): return 'utf-32be' if self.is_likely_utf32le(): return 'utf-32le' if: return 'utf-16be' if self.is_likely_utf16le(): return 'utf-16le' return 'utf-16'",False,self.is_likely_utf16be(),self.is_likely_utf16(),0.650183916091919 2987,"@property def charset_name(self) -> str: if self.is_likely_utf32be(): return 'utf-32be' if self.is_likely_utf32le(): return 'utf-32le' if self.is_likely_utf16be(): return 'utf-16be' if: return 'utf-16le' return 'utf-16'",True,self.is_likely_utf16le(),self.is_likely_utf16le(),0.6492242813110352 2988,"def __len__(self): if: self._n_spikes = 0 for node_id in self._trial_grp.keys(): self._n_spikes += len(self._trial_grp[node_id]['data']) return self._n_spikes",True,self._n_spikes is None,self._n_spikes is None,0.6526063084602356 2989,"def response_handler(resp: Response) -> int: if: return int(resp.body) raise PregelJobCreateError(resp, request)",True,resp.is_success,resp.is_success,0.6524162888526917 2990,"def enclosing_box(corners1: torch.Tensor, corners2: torch.Tensor, enclosing_type: str='smallest'): if: return enclosing_box_aligned(corners1, corners2) elif enclosing_type == 'pca': return enclosing_box_pca(corners1, corners2) elif enclosing_type =='smallest': return smallest_bounding_box(torch.cat([corners1, corners2], dim=-2)) else: ValueError('Unknow type enclosing. Supported: aligned, pca, smallest')",True,enclosing_type == 'aligned',enclosing_type == 'aligned',0.659229040145874 2991,"def enclosing_box(corners1: torch.Tensor, corners2: torch.Tensor, enclosing_type: str='smallest'): if enclosing_type == 'aligned': return enclosing_box_aligned(corners1, corners2) elif: return enclosing_box_pca(corners1, corners2) elif enclosing_type =='smallest': return smallest_bounding_box(torch.cat([corners1, corners2], dim=-2)) else: ValueError('Unknow type enclosing. Supported: aligned, pca, smallest')",True,enclosing_type == 'pca',enclosing_type == 'pca',0.6545698642730713 2992,"def enclosing_box(corners1: torch.Tensor, corners2: torch.Tensor, enclosing_type: str='smallest'): if enclosing_type == 'aligned': return enclosing_box_aligned(corners1, corners2) elif enclosing_type == 'pca': return enclosing_box_pca(corners1, corners2) elif: return smallest_bounding_box(torch.cat([corners1, corners2], dim=-2)) else: ValueError('Unknow type enclosing. Supported: aligned, pca, smallest')",True,enclosing_type == 'smallest',enclosing_type == 'smallest',0.6561160087585449 2993,"def assert_attrs_equal(obj: Any, expected_attrs: Dict[str, Any]) -> bool: """"""Check if attribute of class object is correct. Args: obj (object): Class object to be checked. expected_attrs (Dict[str, Any]): Dict of the expected attrs. Returns: bool: Whether the attribute of class object is correct. """""" for attr, value in expected_attrs.items(): if: return False return True",False,"not hasattr(obj, attr) or _any(getattr(obj, attr) != value)","getattr(obj, attr) != getattr(expected_attrs, attr)",0.6450327634811401 2994,"def output_keywordperformancereportsort(data_object): if: return output_status_message('* * * Begin output_keywordperformancereportsort * * *') output_status_message('SortColumn: {0}'.format(data_object.SortColumn)) output_status_message('SortOrder: {0}'.format(data_object.SortOrder)) output_status_message('* * * End output_keywordperformancereportsort * * *')",True,data_object is None,data_object is None,0.6505974531173706 2995,"def token_not_matching(self, idx, funcs): for token in self.tokens[idx:]: passed = False for func in funcs: if func(token): passed = True break if: return token return None",True,not passed,not passed,0.6613844633102417 2996,"def token_not_matching(self, idx, funcs): for token in self.tokens[idx:]: passed = False for func in funcs: if: passed = True break if not passed: return token return None",False,func(token),"func(token, self.tokens[idx])",0.6519339680671692 2997,"def _get_norm_with_moe_layers(self, all_groups_norm): if: pg = self.deepspeed.mpu.get_data_parallel_group() else: pg = groups._get_data_parallel_group() scaled_norm = all_groups_norm * 1.0 / float(dist.get_world_size(group=pg)) scaled_norm_tensor = torch.tensor(scaled_norm, device=self.fp32_groups_flat[0].device, dtype=torch.float) dist.all_reduce(scaled_norm_tensor, group=pg) all_groups_norm = scaled_norm_tensor.item() return all_groups_norm",False,self.using_pipeline,self.use_mpu,0.6502753496170044 2998,"def resolve_CONFIGS(force_update=False): """"""lazy function to resolve the CONFIGS so that it doesn't have to evaluate at module load time. Note that it also returns the CONFIGS so that it can be used in other, module loadtime, functions. :param force_update: Force a refresh of CONFIGS :type force_update: bool :returns: CONFIGS variable :rtype: `:class:templating.OSConfigRenderer` """""" global CONFIGS if: CONFIGS = register_configs()",True,CONFIGS is None or force_update,CONFIGS is None or force_update,0.6516521573066711 2999,"def _flush_decoder(self): """""" Flushes the decoder. Should only be called if the decoder is actually being used. """""" if: buf = self._decoder.decompress(b'') return buf + self._decoder.flush() return b''",True,self._decoder,self._decoder,0.6599732041358948 3000,"def set_metadata_mime_type(self, metadata_track_id, content_encoding, mime_type): res = od.mp4_mux_track_set_metadata_mime_type(self._mux, metadata_track_id, self._to_LP_c_char(content_encoding), self._to_LP_c_char(mime_type)) if: self.logger.error(f'mp4_mux_track_set_metadata_mime_type returned {res}') return False return True",False,res < 0,res != 0,0.6610503196716309 3001,"def gelu(x: torch.Tensor) -> torch.Tensor: if: return torch.nn.functional.gelu(x.float()).type_as(x) else: return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))",False,"hasattr(torch.nn.functional, 'gelu')","isinstance(x, torch.Tensor)",0.6468195915222168 3002,"def head_or_random(list_in: List[_A], randomize: bool) -> _A: if: return random.choice(list_in) else: return list_in[0]",True,randomize,randomize,0.664465069770813 3003,"def __init__(self, name, data, filename=None, headers=None): self._name = name self._filename = filename self.data = data self.headers = {} if: self.headers = dict(headers)",True,headers,headers,0.669974684715271 3004,"def __init__(self, nside=128, year=10, col=None, **kwargs): """""" Args: nside (int): healpix resolution year (int): year of the FoM emulated values, can be one of [1, 3, 6, 10] col (str): column name of metric data. """""" self.nside = nside super().__init__(col=col, **kwargs) if: self.col ='metricdata' self.year = year",False,col is None,col == 'metricdata',0.6634249687194824 3005,"def string_bool(argument): """""" Converts True, true, False, False in python boolean values """""" if: msg = 'argument required but none supplied; choose from ""True"" or ""False""' raise ValueError(msg) elif argument.lower() == 'true': return True elif argument.lower() == 'false': return False else: raise ValueError('""%s"" unknown; choose from ""True"" or ""False""' % argument)",False,argument is None,not argument,0.6604803800582886 3006,"def string_bool(argument): """""" Converts True, true, False, False in python boolean values """""" if argument is None: msg = 'argument required but none supplied; choose from ""True"" or ""False""' raise ValueError(msg) elif: return True elif argument.lower() == 'false': return False else: raise ValueError('""%s"" unknown; choose from ""True"" or ""False""' % argument)",True,argument.lower() == 'true',argument.lower() == 'true',0.6528664231300354 3007,"def string_bool(argument): """""" Converts True, true, False, False in python boolean values """""" if argument is None: msg = 'argument required but none supplied; choose from ""True"" or ""False""' raise ValueError(msg) elif argument.lower() == 'true': return True elif: return False else: raise ValueError('""%s"" unknown; choose from ""True"" or ""False""' % argument)",True,argument.lower() == 'false',argument.lower() == 'false',0.6531927585601807 3008,"def __call__(self, vtIndex=defaultNamedNotOptArg): """"""DISPID_VALUE"""""" ret = self._oleobj_.InvokeTypes(0, LCID, 1, (9, 0), ((12, 1),), vtIndex) if: ret = Dispatch(ret, '__call__', '{A5CDC033-47D7-4019-801A-8443F14D52D0}') return ret",True,ret is not None,ret is not None,0.6554116010665894 3009,"def forward(self, x): out = self.convs(x) if: out = out + self.reduce_conv(x) out = self.drop(x) return out",False,self.reduce_conv,self.reduce_conv is not None,0.6534067392349243 3010,"def keyPressEvent(self, e): if: e.accept() else: QLineEdit.keyPressEvent(self, e)",False,e.key() == Qt.Key_Enter or e.key() == Qt.Key_Return,e.key() == Qt.Key.Key_Enter or e.key() == Qt.Key.Key_Return,0.6495544910430908 3011,"def __getitem__(self, key): if: return islice_extended(_islice_helper(self._iterable, key)) raise TypeError('islice_extended.__getitem__ argument must be a slice')",True,"isinstance(key, slice)","isinstance(key, slice)",0.6512869596481323 3012,"@classmethod def _input(cls, prompt_text: str, completer: Completer, hint: str=' Control + Q for exit | Control + C for clear', history_file=None) -> Union[str, None]: history_args = {} if: history_args['history'] = FileHistory(history_file) history_args['auto_suggest'] = AutoSuggestFromHistory() history_args['enable_history_search'] = True try: text = prompt(f'{prompt_text}> ', key_bindings=bindings, completer=completer, complete_style=CompleteStyle.COLUMN, bottom_toolbar=hint, **history_args) except EOFError: return None return text",True,history_file,history_file,0.6605639457702637 3013,"def get_metadata(self, stream, ftype): from calibre.ebooks.metadata.epub import get_metadata, get_quick_metadata if: return get_quick_metadata(stream) return get_metadata(stream)",False,self.quick,ftype == 'quick',0.6707183122634888 3014,"@staticmethod def clear(): file_name = Cache._file_name() if: os.remove(file_name)",False,os.path.exists(file_name),file_name is not None,0.6458724737167358 3015,"def get_rooms(self, guildid: int, userid: Optional[int]=None): """""" Get the private rooms in the given guild, using cache. If `userid` is provided, filters by rooms which the given user is a member or owner of. """""" guild_rooms = self._room_cache[guildid] if: rooms = {cid: room for cid, room in guild_rooms.items() if userid in room.members or userid == room.data.ownerid} else: rooms = guild_rooms return rooms",False,userid,userid is not None,0.6716864705085754 3016,"def check_forward_hidden(self, input, hx, hidden_label=''): if: raise RuntimeError(""Input batch size {} doesn't match hidden{} batch size {}"".format(input.size(0), hidden_label, hx.size(0))) if hx.size(1)!= self.out_channels: raise RuntimeError('hidden{} has inconsistent hidden_size: got {}, expected {}'.format(hidden_label, hx.size(1), self.out_channels))",False,input.size(0) != hx.size(0),input.size(0) != hidden_label,0.646580159664154 3017,"def check_forward_hidden(self, input, hx, hidden_label=''): if input.size(0)!= hx.size(0): raise RuntimeError(""Input batch size {} doesn't match hidden{} batch size {}"".format(input.size(0), hidden_label, hx.size(0))) if: raise RuntimeError('hidden{} has inconsistent hidden_size: got {}, expected {}'.format(hidden_label, hx.size(1), self.out_channels))",False,hx.size(1) != self.out_channels,hidden_label != hx.size(1) and hidden_label != hx.size(1) % self.out_channels,0.6438654661178589 3018,"def _write_file(self, _file, contents): if: self.context.log_debug('Failed to create directories: %s' % self.BASE_PATH) return False self.context.log_debug('Writing subtitle file: %s' % _file) try: f = xbmcvfs.File(_file, 'w') f.write(contents) f.close() return True except: self.context.log_debug('File write failed for: %s' % _file) return False",False,not make_dirs(self.BASE_PATH),not os.path.isdir(_file),0.6471142768859863 3019,"def to_str(self): """"""Returns the string representation of the model"""""" import simplejson as json if: import sys reload(sys) sys.setdefaultencoding('utf-8') return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)",True,six.PY2,six.PY2,0.6510450839996338 3020,"def recv_grantNamespacePermission(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grantNamespacePermission_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return",True,mtype == TMessageType.EXCEPTION,mtype == TMessageType.EXCEPTION,0.6511020660400391 3021,"def recv_grantNamespacePermission(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grantNamespacePermission_result() result.read(iprot) iprot.readMessageEnd() if: raise result.ouch1 if result.ouch2 is not None: raise result.ouch2 return",True,result.ouch1 is not None,result.ouch1 is not None,0.6489349603652954 3022,"def recv_grantNamespacePermission(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grantNamespacePermission_result() result.read(iprot) iprot.readMessageEnd() if result.ouch1 is not None: raise result.ouch1 if: raise result.ouch2 return",True,result.ouch2 is not None,result.ouch2 is not None,0.6493618488311768 3023,"def removeItem(self, item=None, index=-1): if: if item in self.items: self.items.remove(item) elif index in range(0, len(self.items) - 1): self.items.remove(self.items[index])",False,item,item is not None,0.6755977869033813 3024,"def removeItem(self, item=None, index=-1): if item: if: self.items.remove(item) elif index in range(0, len(self.items) - 1): self.items.remove(self.items[index])",True,item in self.items,item in self.items,0.6576151847839355 3025,"def removeItem(self, item=None, index=-1): if item: if item in self.items: self.items.remove(item) elif: self.items.remove(self.items[index])",False,"index in range(0, len(self.items) - 1)",index in self.items,0.6449592113494873 3026,"def Linear(in_features, out_features, bias=True): m = nn.Linear(in_features, out_features, bias) nn.init.xavier_uniform_(m.weight) if: nn.init.constant_(m.bias, 0.0) return m",True,bias,bias,0.6657178401947021 3027,"def recv_grant_privileges(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grant_privileges_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.o1 is not None: raise result.o1 raise TApplicationException(TApplicationException.MISSING_RESULT, 'grant_privileges failed: unknown result')",True,mtype == TMessageType.EXCEPTION,mtype == TMessageType.EXCEPTION,0.6534589529037476 3028,"def recv_grant_privileges(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grant_privileges_result() result.read(iprot) iprot.readMessageEnd() if: return result.success if result.o1 is not None: raise result.o1 raise TApplicationException(TApplicationException.MISSING_RESULT, 'grant_privileges failed: unknown result')",True,result.success is not None,result.success is not None,0.6495708227157593 3029,"def recv_grant_privileges(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = grant_privileges_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if: raise result.o1 raise TApplicationException(TApplicationException.MISSING_RESULT, 'grant_privileges failed: unknown result')",True,result.o1 is not None,result.o1 is not None,0.6514919996261597 3030,"def get_processes(self, addr_space): """"""Enumerate processes based on user options. :param addr_space | :returns """""" tasks = mac_tasks.mac_tasks(self._config).calculate() try: if: pidlist = [int(p) for p in self._config.PID.split(',')] tasks = [t for t in tasks if int(t.pid) in pidlist] except (ValueError, TypeError): debug.error('Invalid PID {0}'.format(self._config.PID)) return tasks",False,self._config.PID is not None,self._config.PID.startswith('running'),0.653063952922821 3031,"def __init__(self, encoding=None, quote=None, sequence_separator=None, key_value_separator=None, node_template=None, related_node_template=None, relationship_template=None): if: self.encoding = encoding if quote: self.quote = quote if sequence_separator: self.sequence_separator = sequence_separator if key_value_separator: self.key_value_separator = key_value_separator if node_template: self.node_template = node_template if related_node_template: self.related_node_template = related_node_template if relationship_template: self.relationship_template = relationship_template",True,encoding,encoding,0.6665027141571045 3032,"def __init__(self, encoding=None, quote=None, sequence_separator=None, key_value_separator=None, node_template=None, related_node_template=None, relationship_template=None): if encoding: self.encoding = encoding if: self.quote = quote if sequence_separator: self.sequence_separator = sequence_separator if key_value_separator: self.key_value_separator = key_value_separator if node_template: self.node_template = node_template if related_node_template: self.related_node_template = related_node_template if relationship_template: self.relationship_template = relationship_template",True,quote,quote,0.6753290891647339 3033,"def __init__(self, encoding=None, quote=None, sequence_separator=None, key_value_separator=None, node_template=None, related_node_template=None, relationship_template=None): if encoding: self.encoding = encoding if quote: self.quote = quote if: self.sequence_separator = sequence_separator if key_value_separator: self.key_value_separator = key_value_separator if node_template: self.node_template = node_template if related_node_template: self.related_node_template = related_node_template if relationship_template: self.relationship_template = relationship_template",True,sequence_separator,sequence_separator,0.6616824269294739 3034,"def __init__(self, encoding=None, quote=None, sequence_separator=None, key_value_separator=None, node_template=None, related_node_template=None, relationship_template=None): if encoding: self.encoding = encoding if quote: self.quote = quote if sequence_separator: self.sequence_separator = sequence_separator if: self.key_value_separator = key_value_separator if node_template: self.node_template = node_template if related_node_template: self.related_node_template = related_node_template if relationship_template: self.relationship_template = relationship_template",True,key_value_separator,key_value_separator,0.6546062231063843 3035,"def __init__(self, encoding=None, quote=None, sequence_separator=None, key_value_separator=None, node_template=None, related_node_template=None, relationship_template=None): if encoding: self.encoding = encoding if quote: self.quote = quote if sequence_separator: self.sequence_separator = sequence_separator if key_value_separator: self.key_value_separator = key_value_separator if: self.node_template = node_template if related_node_template: self.related_node_template = related_node_template if relationship_template: self.relationship_template = relationship_template",True,node_template,node_template,0.6605781316757202 3036,"def __init__(self, encoding=None, quote=None, sequence_separator=None, key_value_separator=None, node_template=None, related_node_template=None, relationship_template=None): if encoding: self.encoding = encoding if quote: self.quote = quote if sequence_separator: self.sequence_separator = sequence_separator if key_value_separator: self.key_value_separator = key_value_separator if node_template: self.node_template = node_template if: self.related_node_template = related_node_template if relationship_template: self.relationship_template = relationship_template",True,related_node_template,related_node_template,0.6511756181716919 3037,"def __init__(self, encoding=None, quote=None, sequence_separator=None, key_value_separator=None, node_template=None, related_node_template=None, relationship_template=None): if encoding: self.encoding = encoding if quote: self.quote = quote if sequence_separator: self.sequence_separator = sequence_separator if key_value_separator: self.key_value_separator = key_value_separator if node_template: self.node_template = node_template if related_node_template: self.related_node_template = related_node_template if: self.relationship_template = relationship_template",True,relationship_template,relationship_template,0.6509476900100708 3038,"def _get_environ_vars(self): """"""Returns a generator with all environmental vars with prefix PIP_"""""" for key, val in os.environ.items(): should_be_yielded = key.startswith('PIP_') and key[4:].lower() not in self._ignore_env_names if: yield (key[4:].lower(), val)",True,should_be_yielded,should_be_yielded,0.6545735597610474 3039,"def SetSpecieId(self, name, val): """""" Sets the numerical id of the specie of name ""name"" to val. Parameters ---------- name: str val: int """""" for specie in self.species: if: specie.flag = val return",False,specie.name == name,name == specie.name,0.6499143838882446 3040,"def tick(self, *args): """"""Set colors and annotations for omni preview"""""" for drum in self._drums: index = coordToIndex(drum) if: drum.color = Color() drum.annotation = '' else: drum.color = Color.fromInteger(channels.getChannelColor(index)) drum.annotation = channels.getChannelName(index)",False,index == -1,index == 0,0.6653816103935242 3041,"def _notification(self, connection: asyncpg.Connection, pid: int, channel: str, payload: str) -> None: if: for fut in self.waiters[payload]: fut.set_result(None)",True,payload in self.waiters,payload in self.waiters,0.6550986170768738 3042,"@staticmethod def append_decorations(symbol, decorations: LineDecoration, context: Context, enabled: bool, locked: bool): """""" Appends decorations to the given symbol """""" if: return for decoration in decorations.decorations: DecorationConverter.append_decoration(symbol, decoration, context, enabled, locked)",False,not decorations.decorations,not decorations,0.6486169695854187 3043,"def _update_target_networks(self): if: ptu.soft_update_from_to(self.policy, self.target_policy, self.tau) ptu.soft_update_from_to(self.qf, self.target_qf, self.tau) elif self._n_env_steps_total % self.target_hard_update_period == 0: ptu.copy_model_params_from_to(self.qf, self.target_qf) ptu.copy_model_params_from_to(self.policy, self.target_policy)",False,self.use_soft_update,self._n_env_steps_total % self.target_hard_update_period == 0,0.6510522365570068 3044,"def _update_target_networks(self): if self.use_soft_update: ptu.soft_update_from_to(self.policy, self.target_policy, self.tau) ptu.soft_update_from_to(self.qf, self.target_qf, self.tau) elif: ptu.copy_model_params_from_to(self.qf, self.target_qf) ptu.copy_model_params_from_to(self.policy, self.target_policy)",False,self._n_env_steps_total % self.target_hard_update_period == 0,self.use_qf_copy,0.6463667750358582 3045,"def Mask_Live_F2(self, boolean): self.mask_live['f2'] = boolean if: self.Mask_Live_Uncheck('f2') self.Pigmento_APPLY('HEX', self.mask_color['f2'], 0, 0, 0, self.cor)",True,boolean == True,boolean == True,0.6581470966339111 3046,"def __init__(self, sorting1, sorting2, sorting1_name=None, sorting2_name=None, delta_tp=10, minimum_accuracy=0.5, count=False, verbose=False): self._sorting1 = sorting1 self._sorting2 = sorting2 self.sorting1_name = sorting1_name self.sorting2_name = sorting2_name self._delta_tp = delta_tp self._min_accuracy = minimum_accuracy if: print('Matching...') self._do_matching() self._counts = None if count: if verbose: print('Counting...') self._do_counting(verbose=False)",True,verbose,verbose,0.663550615310669 3047,"def __init__(self, sorting1, sorting2, sorting1_name=None, sorting2_name=None, delta_tp=10, minimum_accuracy=0.5, count=False, verbose=False): self._sorting1 = sorting1 self._sorting2 = sorting2 self.sorting1_name = sorting1_name self.sorting2_name = sorting2_name self._delta_tp = delta_tp self._min_accuracy = minimum_accuracy if verbose: print('Matching...') self._do_matching() self._counts = None if: if verbose: print('Counting...') self._do_counting(verbose=False)",True,count,count,0.6625222563743591 3048,"def __init__(self, sorting1, sorting2, sorting1_name=None, sorting2_name=None, delta_tp=10, minimum_accuracy=0.5, count=False, verbose=False): self._sorting1 = sorting1 self._sorting2 = sorting2 self.sorting1_name = sorting1_name self.sorting2_name = sorting2_name self._delta_tp = delta_tp self._min_accuracy = minimum_accuracy if verbose: print('Matching...') self._do_matching() self._counts = None if count: if: print('Counting...') self._do_counting(verbose=False)",True,verbose,verbose,0.6623183488845825 3049,"def output_array_of_keywordcategory(data_objects): if: return for data_object in data_objects['KeywordCategory']: output_keywordcategory(data_object)",True,data_objects is None or len(data_objects) == 0,data_objects is None or len(data_objects) == 0,0.646174430847168 3050,"def reload_jwt_secrets(self) -> Result[Json]: """"""Hot-reload JWT secrets. Calling this without payload reloads JWT secrets from disk. Only files specified via arangod startup option ``--server.jwt-secret-keyfile`` or ``--server.jwt-secret-folder`` are used. It is not possible to change the location where files are loaded from without restarting the server. :return: Information on reloaded JWT secrets. :rtype: dict """""" request = Request(method='post', endpoint='/_admin/server/jwt') def response_handler(resp: Response) -> Json: if: raise JWTSecretReloadError(resp, request) result: Json = resp.body['result'] return result return self._execute(request, response_handler)",True,not resp.is_success,not resp.is_success,0.6479129791259766 3051,"def convert(perturbed_text: str, dict1: dict, dict2: dict) -> str: for k in dict1: for s in dict1[k]: if: perturbed_text = perturbed_text.replace(s, random.choice(dict2[k])) return perturbed_text",True,s in perturbed_text,s in perturbed_text,0.6487488746643066 3052,"def get_vector(self, word): """"""Method for extracting a word vector"""""" if: return self.glove.vectors[self.glove.stoi[word]] else: return None",False,word in self.glove.stoi,word in self.glove.vectors,0.6539523601531982 3053,"def compile(sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None): postfix = os.path.splitext(sources[0])[1] if: postargs = extra_postargs['nvcc'] else: postargs = extra_postargs['gcc'] return super(sources, output_dir, macros, include_dirs, debug, extra_preargs, postargs, depends)",True,postfix == '.cu',postfix == '.cu',0.6467131972312927 3054,"def unmute(self): self.mon.log(self, '>unmute received from show Id:'+ str(self.show_id)) if: self.omx.unmute() return True else: self.mon.log(self, '!: ret = Dispatch(ret, u'Item', '{D7015F7C-E8E6-4F4B-A5A8-19EF2490DCAC}') return ret",True,ret is not None,ret is not None,0.6540444493293762 3056,"def gen_cpu_optimizer_flag(self, test_config, is_baseline): if: cpu_optimizer_flag = '--cpu-optimizer' if is_baseline: cpu_optimizer_flag +='--cpu_torch_adam' return cpu_optimizer_flag if 'test_torch_offload' in test_config and test_config['test_torch_offload']: cpu_optimizer_flag +='--cpu_torch_adam' return cpu_optimizer_flag else: cpu_optimizer_flag = '' return cpu_optimizer_flag",False,'cpu_optimizer' in test_config and test_config['cpu_optimizer'],'cpu_torch_adam' in test_config and test_config['cpu_torch_adam'],0.6436979174613953 3057,"def gen_cpu_optimizer_flag(self, test_config, is_baseline): if 'cpu_optimizer' in test_config and test_config['cpu_optimizer']: cpu_optimizer_flag = '--cpu-optimizer' if: cpu_optimizer_flag +='--cpu_torch_adam' return cpu_optimizer_flag if 'test_torch_offload' in test_config and test_config['test_torch_offload']: cpu_optimizer_flag +='--cpu_torch_adam' return cpu_optimizer_flag else: cpu_optimizer_flag = '' return cpu_optimizer_flag",False,is_baseline,'test_torch_adam' in test_config and test_config['test_torch_adam'],0.6579627990722656 3058,"def gen_cpu_optimizer_flag(self, test_config, is_baseline): if 'cpu_optimizer' in test_config and test_config['cpu_optimizer']: cpu_optimizer_flag = '--cpu-optimizer' if is_baseline: cpu_optimizer_flag +='--cpu_torch_adam' return cpu_optimizer_flag if: cpu_optimizer_flag +='--cpu_torch_adam' return cpu_optimizer_flag else: cpu_optimizer_flag = '' return cpu_optimizer_flag",False,'test_torch_offload' in test_config and test_config['test_torch_offload'],test_config['torch_adam'],0.6415954232215881 3059,"def debug_rest(self, flag): """"""This is useful for debugging requests to 3PAR. :param flag: set to True to enable debugging :type flag: bool """""" self.http.set_debug_flag(flag) if: self.ssh.set_debug_flag(flag)",True,self.ssh,self.ssh,0.6600617170333862 3060,"def init_weights(m): if: nn.init.normal_(m.weight, mean=0, std=m.weight.shape[1] ** (-0.5)) nn.init.constant_(m.weight[padding_idx], 0) elif hasattr(m, 'weight'): nn.init.xavier_uniform_(m.weight)",False,"isinstance(m, nn.Embedding)","hasattr(m, 'weight')",0.6538681983947754 3061,"def init_weights(m): if isinstance(m, nn.Embedding): nn.init.normal_(m.weight, mean=0, std=m.weight.shape[1] ** (-0.5)) nn.init.constant_(m.weight[padding_idx], 0) elif: nn.init.xavier_uniform_(m.weight)",False,"hasattr(m, 'weight')","isinstance(m, nn.Linear)",0.6466658115386963 3062,"@api.depends('product_id', 'component_template_id') def _compute_product_uom_category_id(self): """"""Compute the product_uom_category_id field. This is the product category that will be allowed to use on the product_uom_id field, already covered by core module: https://github.com/odoo/odoo/blob/331b9435c/addons/mrp/models/mrp_bom.py#L372 In core, though, this field is related to ""product_id.uom_id.category_id"". Here we make it computed to choose between component_template_id and product_id, depending on which one is set """""" for rec in self: rec.product_uom_category_id = rec.product_id.uom_id.category_id if: super()._compute_product_uom_category_id() for rec in self: if rec.component_template_id: rec.product_uom_category_id = rec.component_template_id.uom_id.category_id",False,"hasattr(super(), '_compute_product_uom_category_id')",not rec.product_uom_category_id,0.6443638205528259 3063,"@api.depends('product_id', 'component_template_id') def _compute_product_uom_category_id(self): """"""Compute the product_uom_category_id field. This is the product category that will be allowed to use on the product_uom_id field, already covered by core module: https://github.com/odoo/odoo/blob/331b9435c/addons/mrp/models/mrp_bom.py#L372 In core, though, this field is related to ""product_id.uom_id.category_id"". Here we make it computed to choose between component_template_id and product_id, depending on which one is set """""" for rec in self: rec.product_uom_category_id = rec.product_id.uom_id.category_id if hasattr(super(), '_compute_product_uom_category_id'): super()._compute_product_uom_category_id() for rec in self: if: rec.product_uom_category_id = rec.component_template_id.uom_id.category_id",True,rec.component_template_id,rec.component_template_id,0.6460570693016052 3064,"def hasContent_(self): if: return True else: return False",True,self.valueOf_ is not None,self.valueOf_ is not None,0.6433206796646118 3065,"def _tuplize_version(version): output = [] for idx, part in enumerate(version.split('.')): if: break if idx in (0, 1): part = int(part) output.append(part) return tuple(output)",False,part == '*',part == '',0.6503764390945435 3066,"def _tuplize_version(version): output = [] for idx, part in enumerate(version.split('.')): if part == '*': break if: part = int(part) output.append(part) return tuple(output)",False,"idx in (0, 1)",idx == len(version) - 1,0.6504417657852173 3067,"def disable_granular_heal(mnode, volname): """"""Diable granular heal on a given volume Args: mnode(str): Node on which command will be exectued volname(str): Name of the volume on which granular heal is to be disabled Returns: bool: True if granular heal is disabled successfully else False """""" cmd = 'gluster volume heal {} granular-entry-heal disable'.format(volname) ret, _, _ = g.run(mnode, cmd) if: g.log.error('Unable to disable granular-entry-heal on volume %s', volname) return False return True",False,ret,ret != 0,0.6694599390029907 3068,"def __missing__(self, key): if: return self.schema[key].default elif key in self.schema: return self.schema[key] else: raise KeyError(key)",True,self.has_default(key),self.has_default(key),0.6483144760131836 3069,"def __missing__(self, key): if self.has_default(key): return self.schema[key].default elif: return self.schema[key] else: raise KeyError(key)",True,key in self.schema,key in self.schema,0.6567678451538086 3070,"def xl_rowcol_to_cell(row, col, row_abs=False, col_abs=False): """"""Convert a zero indexed row and column cell reference to a A1 style string. Args: row: The cell row. Int. col: The cell column. Int. row_abs: Optional flag to make the row absolute. Bool. col_abs: Optional flag to make the column absolute. Bool. Returns: A1 style string. """""" if: raise IndexError(f'row reference {row} below zero') if col < 0: raise IndexError(f'column reference {col} below zero') row += 1 row_abs = '$' if row_abs else '' col_str = xl_col_to_name(col, col_abs) return col_str + row_abs + str(row)",True,row < 0,row < 0,0.6673304438591003 3071,"def xl_rowcol_to_cell(row, col, row_abs=False, col_abs=False): """"""Convert a zero indexed row and column cell reference to a A1 style string. Args: row: The cell row. Int. col: The cell column. Int. row_abs: Optional flag to make the row absolute. Bool. col_abs: Optional flag to make the column absolute. Bool. Returns: A1 style string. """""" if row < 0: raise IndexError(f'row reference {row} below zero') if: raise IndexError(f'column reference {col} below zero') row += 1 row_abs = '$' if row_abs else '' col_str = xl_col_to_name(col, col_abs) return col_str + row_abs + str(row)",True,col < 0,col < 0,0.6713600158691406 3072,"def boolean_to_int(value): if: return None else: return int(value)",True,value is None,value is None,0.6544089317321777 3073,"def WMA(df, p): if: p = [p, 90] df['WMA'] = ta.WMA(df.close, p[1]) df = df.replace([np.inf, -np.inf], np.nan) return df",True,len(p) <= 2,len(p) <= 2,0.6590023040771484 3074,"def _toMask(anns, coco): for ann in anns: segm = ann['segmentation'] if: ann['segmentation'] = None continue rle = coco.annToRLE(ann) ann['segmentation'] = rle",True,type(segm) == list and len(segm) == 0,type(segm) == list and len(segm) == 0,0.6470680236816406 3075,"def release(self): if: raise RuntimeError('cannot release un-acquired lock') self.count -= 1",True,self.count == 0,self.count == 0,0.6547826528549194 3076,"def _convertECSignatureToSignatureHex(self, signature): v = signature['v'] if: v = v + 27 return hex(v) + signature['r'][-64:] + signature['s'][-64:] + '03'",False,v != 27 and v != 28,self.algorithm == 'ECDSA',0.6649085879325867 3077,"def deforum_init_batch(_: gr.Blocks, app: FastAPI): deforum_sys_extend() settings_files = [open(filename, 'r') for filename in cmd_opts.deforum_run_now.split(',')] [batch_id, job_ids] = make_ids(len(settings_files)) log.info(f'Starting init batch {batch_id} with job(s) {job_ids}...') run_deforum_batch(batch_id, job_ids, settings_files, None) if: import os os._exit(0)",False,cmd_opts.deforum_terminate_after_run_now,batch_id == 0,0.6468099355697632 3078,"def skEs(self, **kwargs): if: return self.uniform(**kwargs) if self.functype == 'NRL': return self.NRL(**kwargs)",True,self.functype == 'uniform',self.functype == 'uniform',0.6532166004180908 3079,"def skEs(self, **kwargs): if self.functype == 'uniform': return self.uniform(**kwargs) if: return self.NRL(**kwargs)",False,self.functype == 'NRL',self.functype == 'nRL',0.6552702188491821 3080,"def __eq__(self, other): if: return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if my_val!= other_val: return False return True",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.64579176902771 3081,"def __eq__(self, other): if not isinstance(other, self.__class__): return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if: return False return True",True,my_val != other_val,my_val != other_val,0.6494433879852295 3082,"def on_leave(self): """"""See method `on_leave` in `kivymd.uix.behaviors.hover_behavior.HoverBehavior` class."""""" if: Window.remove_widget(self._tooltip) self._tooltip = None",True,self._tooltip,self._tooltip,0.6619161367416382 3083,"def __iter__(self): if: yield self.cond if self.stmt is not None: yield self.stmt",True,self.cond is not None,self.cond is not None,0.6514047980308533 3084,"def __iter__(self): if self.cond is not None: yield self.cond if: yield self.stmt",True,self.stmt is not None,self.stmt is not None,0.6506474018096924 3085,"def delete_template(self, property: str, name: str): """""" Deletes the currently seleted item. """""" self.settings.beginGroup(property) self.settings.remove(name) self.settings.endGroup() if: deleted = self.load_value_of_key(key='deleted_templates') or [] deleted.append(name) deleted = list(set(deleted)) self.save_key_value(name='deleted_templates', item=deleted)",False,property == 'equipment',property,0.6462356448173523 3086,"def reset(self) -> None: super().reset() if: self.coding_sm.reset() if self.distribution_analyzer: self.distribution_analyzer.reset() self._last_char = bytearray(b'\x00\x00')",True,self.coding_sm,self.coding_sm,0.6586415767669678 3087,"def reset(self) -> None: super().reset() if self.coding_sm: self.coding_sm.reset() if: self.distribution_analyzer.reset() self._last_char = bytearray(b'\x00\x00')",True,self.distribution_analyzer,self.distribution_analyzer,0.6538969874382019 3088,"def _get_html(self): if: return self.children[0] else: return ''",True,self.children,self.children,0.6509969234466553 3089,"def get_global_checkbox_fields(self): if: checkbox_name_file = os.path.join(bin_dir, 'global_checkbox_param.json') try: if os.path.isfile(checkbox_name_file): with open(checkbox_name_file, 'r') as fp: self.global_checkbox_fields = json.load(fp) else: self.global_checkbox_fields = [] except Exception as e: self.log_error('Get exception when loading global checkbox parameter names.'+ str(e)) self.global_checkbox_fields = [] return self.global_checkbox_fields",True,self.global_checkbox_fields is None,self.global_checkbox_fields is None,0.6470773220062256 3090,"def get_global_checkbox_fields(self): if self.global_checkbox_fields is None: checkbox_name_file = os.path.join(bin_dir, 'global_checkbox_param.json') try: if: with open(checkbox_name_file, 'r') as fp: self.global_checkbox_fields = json.load(fp) else: self.global_checkbox_fields = [] except Exception as e: self.log_error('Get exception when loading global checkbox parameter names.'+ str(e)) self.global_checkbox_fields = [] return self.global_checkbox_fields",False,os.path.isfile(checkbox_name_file),os.path.exists(checkbox_name_file),0.6457375288009644 3091,"def cat(tensors, dim=0): """""" Efficient version of torch.cat that avoids a copy if there is only a single element in a list """""" assert isinstance(tensors, (list, tuple)) if: return tensors[0] return torch.cat(tensors, dim)",True,len(tensors) == 1,len(tensors) == 1,0.6514308452606201 3092,"def __init__(self, filename): fp = must_open(filename) for row in fp: if: continue if row.startswith('# Identity'): self.identity = row.split(':')[-1].strip() if row.startswith('# Score'): self.score = row.split(':')[-1].strip()",False,row[0] != '#',row.startswith('#'),0.6508232951164246 3093,"def __init__(self, filename): fp = must_open(filename) for row in fp: if row[0]!= '#': continue if: self.identity = row.split(':')[-1].strip() if row.startswith('# Score'): self.score = row.split(':')[-1].strip()",False,row.startswith('# Identity'),row.startswith('#Identity'),0.646830141544342 3094,"def __init__(self, filename): fp = must_open(filename) for row in fp: if row[0]!= '#': continue if row.startswith('# Identity'): self.identity = row.split(':')[-1].strip() if: self.score = row.split(':')[-1].strip()",False,row.startswith('# Score'),row.startswith('#Score'),0.6480278968811035 3095,"def semantic_masks(self): for sid in self._seg_ids: sinfo = self._sinfo.get(sid) if: continue yield ((self._seg == sid).numpy().astype(np.bool), sinfo)",False,sinfo is None or sinfo['isthing'],sinfo is None,0.6472896337509155 3096,"def adjust_length_to_model(length, max_sequence_length): if: length = max_sequence_length elif 0 < max_sequence_length < length: length = max_sequence_length elif length < 0: length = MAX_LENGTH return length",False,length < 0 and max_sequence_length > 0,length == 0,0.6530075073242188 3097,"def adjust_length_to_model(length, max_sequence_length): if length < 0 and max_sequence_length > 0: length = max_sequence_length elif: length = max_sequence_length elif length < 0: length = MAX_LENGTH return length",False,0 < max_sequence_length < length,length > max_sequence_length,0.6534844636917114 3098,"def adjust_length_to_model(length, max_sequence_length): if length < 0 and max_sequence_length > 0: length = max_sequence_length elif 0 < max_sequence_length < length: length = max_sequence_length elif: length = MAX_LENGTH return length",False,length < 0,length > MAX_LENGTH,0.6608560681343079 3099,"def encode_collection_id(cid: int) -> bytes: output = array.array('B', [0]) while cid > 0: byte = cid & 255 cid >>= 7 if: byte |= 128 output[-1] = byte output.append(0) else: output[-1] = byte return output.tobytes()",False,cid > 0,byte & 128,0.6664695739746094 3100,"def lenient_name_match(a, b): if: return True a = a.lower().split() b = b.lower().split() for w in a: if w not in ('the', 'hotel', 'house') and w in b: return True return False",False,a == b or a in b or b in a,a == b,0.654732346534729 3101,"def lenient_name_match(a, b): if a == b or a in b or b in a: return True a = a.lower().split() b = b.lower().split() for w in a: if: return True return False",False,"w not in ('the', 'hotel', 'house') and w in b",w in b,0.6462584733963013 3102,"def get_label_color(self, label_id): if: self.label_colors[label_id] = random_color() return self.label_colors[label_id]",True,label_id not in self.label_colors,label_id not in self.label_colors,0.6492621302604675 3103,"def __init__(self, venv, directory, record_video_trigger, video_length=200): """""" # Arguments venv: VecEnv to wrap directory: Where to save videos record_video_trigger: Function that defines when to start recording. The function takes the current number of step, and returns whether we should start recording or not. video_length: Length of recorded video """""" VecEnvWrapper.__init__(self, venv) self.record_video_trigger = record_video_trigger self.video_recorder = None self.directory = os.path.abspath(directory) if: os.mkdir(self.directory) self.file_prefix ='vecenv' self.file_infix = '{}'.format(os.getpid()) self.step_id = 0 self.video_length = video_length self.recording = False self.recorded_frames = 0",True,not os.path.exists(self.directory),not os.path.exists(self.directory),0.6471822261810303 3104,"def relevant_doc(query_term_freq_vect, doc_term_freq_vector): relevant_list = [] relevant_list_map = {} for doc in doc_term_freq_vector: doc_i = 0 for term in query_term_freq_vect: if term not in doc_term_freq_vector[doc]: doc_i = -1 break else: doc_i = doc_i + 1 if: relevant_list.append(doc) relevant_list_map[doc] = doc_i return (len(relevant_list), relevant_list, relevant_list_map)",True,doc_i != -1,doc_i != -1,0.6567642688751221 3105,"def relevant_doc(query_term_freq_vect, doc_term_freq_vector): relevant_list = [] relevant_list_map = {} for doc in doc_term_freq_vector: doc_i = 0 for term in query_term_freq_vect: if: doc_i = -1 break else: doc_i = doc_i + 1 if doc_i!= -1: relevant_list.append(doc) relevant_list_map[doc] = doc_i return (len(relevant_list), relevant_list, relevant_list_map)",True,term not in doc_term_freq_vector[doc],term not in doc_term_freq_vector[doc],0.6433994770050049 3106,"def all_zeros(modules): """"""Check if the weight(and bias) is all zero."""""" weight_zero = torch.equal(modules.weight.data, torch.zeros_like(modules.weight.data)) if: bias_zero = torch.equal(modules.bias.data, torch.zeros_like(modules.bias.data)) else: bias_zero = True return weight_zero and bias_zero",False,"hasattr(modules, 'bias')","hasattr(modules, 'bias') and modules.bias is not None",0.6429498195648193 3107,"def _shutdown_worker(): if: queue.put(['stop']) queue.close() process.join()",False,process,process.is_alive(),0.6647000908851624 3108,"def __get_resource_string(self, req, bucket_name, key): if: return '/' + self.__get_subresource_string(req.params) else: return '/{0}/{1}{2}'.format(bucket_name, key, self.__get_subresource_string(req.params))",False,not bucket_name,bucket_name == '',0.6625211238861084 3109,"def global_constant(self, name, value): """""" Inspired from numba/cgutils.py Get or create a (LLVM module-)global constant with *name* or *value*. """""" if: return self.mod.globals[name] data = llvm_ir.GlobalVariable(self.mod, value.type, name=name) data.global_constant = True data.initializer = value return data",True,name in self.mod.globals,name in self.mod.globals,0.6539451479911804 3110,"def check_path_owner(path): if: return True previous = None while path!= previous: if os.path.lexists(path): if os.geteuid() == 0: try: path_uid = get_path_uid(path) except OSError: return False return path_uid == 0 else: return os.access(path, os.W_OK) else: previous, path = (path, os.path.dirname(path))",False,"not hasattr(os, 'geteuid')",os.path.isabs(path),0.6421321630477905 3111,"def check_path_owner(path): if not hasattr(os, 'geteuid'): return True previous = None while path!= previous: if: if os.geteuid() == 0: try: path_uid = get_path_uid(path) except OSError: return False return path_uid == 0 else: return os.access(path, os.W_OK) else: previous, path = (path, os.path.dirname(path))",False,os.path.lexists(path),os.path.isfile(path),0.6421264410018921 3112,"def check_path_owner(path): if not hasattr(os, 'geteuid'): return True previous = None while path!= previous: if os.path.lexists(path): if: try: path_uid = get_path_uid(path) except OSError: return False return path_uid == 0 else: return os.access(path, os.W_OK) else: previous, path = (path, os.path.dirname(path))",False,os.geteuid() == 0,os.geteuid() != previous,0.6505753993988037 3113,"def __init__(self, accession): v = str(accession).split('/') self.fragment_id = int(v[0]) if: self.fragment_length = int(v[1]) else: self.fragment_length = None",False,len(v) > 1,v[1],0.6491661071777344 3114,"def __parse_arg_list(arg: Text) -> List[Text]: if: return set() else: return arg.split(',')",False,arg is None,arg == '',0.6622985601425171 3115,"def FindNumberBoundingBoxes(root): index = 0 while True: if: break index += 1 return index",True,"GetInt('xmin', root, index) == -1","GetInt('xmin', root, index) == -1",0.646044909954071 3116,"def _train_policy(self): """"""Updates the policy. Args: TODO """""" fetches = {'loss': self._train_op} feed_dict_ = None if: qvalues = self._get_qvalues() feed_dict_ = {self._qvalue_inputs: qvalues} vals = self._sess.partial_run(self._partial_run_handle, fetches, feed_dict=feed_dict_) self._qvalue_inputs_fed = True return vals['loss']",False,not self._qvalue_inputs_fed,self._qvalue_inputs_fed,0.650435209274292 3117,"def close(self) -> None: if: self._cur.close() if self._con: self._con.close() self._clear()",True,self._cur,self._cur,0.6719671487808228 3118,"def close(self) -> None: if self._cur: self._cur.close() if: self._con.close() self._clear()",True,self._con,self._con,0.6666510701179504 3119,"def __init__(self, source=None): """""" Create a new Attribute object, copying from the source if provided. """""" AttributeRoot.__init__(self, source) CitationBase.__init__(self, source) NoteBase.__init__(self, source) if: self.type = AttributeType(source.type) self.value = source.value else: self.type = AttributeType() self.value = ''",True,source,source,0.673984944820404 3120,"def get_domain_name(self, domain_name: Union[str, None]) -> DomainName: if: raise DomainNameNotFound return self.domain_names[domain_name]",False,domain_name is None or domain_name not in self.domain_names,domain_name not in self.domain_names,0.6495307087898254 3121,"def eq(x, y, z): if: if isinstance(y, core.Constant): return eq(y, x, z) elif isinstance(z, core.Constant): return eq(z, x, y) return (~x ^ y) & (~x ^ z)",False,"not isinstance(x, core.Constant)","isinstance(x, core.Constant) and isinstance(z, core.Constant)",0.6448513269424438 3122,"def eq(x, y, z): if not isinstance(x, core.Constant): if: return eq(y, x, z) elif isinstance(z, core.Constant): return eq(z, x, y) return (~x ^ y) & (~x ^ z)",False,"isinstance(y, core.Constant)","y, core.Constant) and (z is core.Constant)",0.6459271907806396 3123,"def eq(x, y, z): if not isinstance(x, core.Constant): if isinstance(y, core.Constant): return eq(y, x, z) elif: return eq(z, x, y) return (~x ^ y) & (~x ^ z)",True,"isinstance(z, core.Constant)","isinstance(z, core.Constant)",0.6448187828063965 3124,"def get_superclass(self, TYPE): SUPER = TYPE.TO._first_struct()[1] if: return None return lltype.Ptr(SUPER)",False,SUPER is None,TYPE.TO._first_struct() == lltype.Ptr,0.6679551601409912 3125,"def has_permission(self, request, view): """""" Allow the request if the JWT resource matches the Markdown document related to the object in the url. Parameters ---------- request : Type[django.http.request.HttpRequest] The request that holds the authenticated user view : Type[rest_framework.viewsets or rest_framework.views] The API view for which permissions are being checked Returns ------- boolean True if the request is authorized, False otherwise """""" if: return False return MarkdownDocument.objects.filter(pk=view.get_related_markdown_document_id(), playlist_id=request.resource.id).exists()",True,not request.resource,not request.resource,0.6591199636459351 3126,"def init_trainer(self, logs=None): """"""Set trainer object for current callback."""""" if: return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer()",False,not self.trainer.hccl,self.trainer is None,0.6514191627502441 3127,"def init_trainer(self, logs=None): """"""Set trainer object for current callback."""""" if not self.trainer.hccl: return if: self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer()",False,vega.is_torch_backend(),vega.is_pytorch_backend(),0.6478043794631958 3128,"def init_trainer(self, logs=None): """"""Set trainer object for current callback."""""" if not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if: self._init_ms_trainer()",True,vega.is_ms_backend(),vega.is_ms_backend(),0.6484651565551758 3129,"def tracks_to_inactive(self, tracks): super(OracleTracker, self).tracks_to_inactive(tracks) if: inactive_tracks = [] for t in reversed(self.inactive_tracks): if t.gt_id not in [t.gt_id for t in inactive_tracks]: inactive_tracks.append(t) self.inactive_tracks = inactive_tracks",False,self.reid_oracle,self.inactive_tracks,0.6470198631286621 3130,"def tracks_to_inactive(self, tracks): super(OracleTracker, self).tracks_to_inactive(tracks) if self.reid_oracle: inactive_tracks = [] for t in reversed(self.inactive_tracks): if: inactive_tracks.append(t) self.inactive_tracks = inactive_tracks",False,t.gt_id not in [t.gt_id for t in inactive_tracks],not t.is_inactive(),0.647002100944519 3131,"@status_before_must_be('must_retry','submitted', 'approved', 'denied') def system_error(self, error_msg, error_code='', reviewing_user=None, reviewing_service=''): """""" Mark that this attempt could not be completed because of a system error. Status should be moved to `must_retry`. For example, if Software Secure reported to us that they couldn't process our submission because they couldn't decrypt the image we sent. """""" if: return self.error_msg = error_msg self.error_code = error_code self.reviewing_user = reviewing_user self.reviewing_service = reviewing_service self.status ='must_retry' self.save()",False,"self.status in ['approved', 'denied']","self.status in [ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT, ERROR_REJECT]",0.6461189985275269 3132,"def show(self, block=True): """""" Show the window, and start an event loop """""" if: plt.ion() plt.show()",False,not block,block,0.6544421911239624 3133,"def get_model_plot_outdir(cfg, snapshot=4999, mdir='va_semi', shuffle=1): mfulldir = Path(cfg['project_path']) / 'videos' / (mdir + '_iter_{}_shuffle_{}'.format(cfg['iteration'], shuffle)) /'snapshot-{}'.format(snapshot) if: os.makedirs(mfulldir) return mfulldir",False,not mfulldir.exists(),not os.path.exists(mfulldir),0.6502190828323364 3134,"def register_functional_term_eq(term_cls, fn, *, replace=False): if: term_cls = (term_cls,) for term_cls in term_cls: if term_cls in _functional_term_eq.registry and (not replace): raise RuntimeError('Case already registered') @_functional_term_eq.register(term_cls) def wrapped_fn(term, x): return fn(x, term)",False,"not isinstance(term_cls, Sequence)","isinstance(term_cls, Callable)",0.6489604711532593 3135,"def register_functional_term_eq(term_cls, fn, *, replace=False): if not isinstance(term_cls, Sequence): term_cls = (term_cls,) for term_cls in term_cls: if: raise RuntimeError('Case already registered') @_functional_term_eq.register(term_cls) def wrapped_fn(term, x): return fn(x, term)",False,term_cls in _functional_term_eq.registry and (not replace),replace and term_cls in @_functional_term_eq,0.6455626487731934 3136,"def download_translations(self, source, language, text: str, unit, user, threshold: int=75): """"""Download list of possible translations from a service."""""" response = self.request('get', self.get_api_url('translate'), params={'q': text,'source': source, 'target': language}) payload = response.json() if: raise MachineTranslationError(payload['error']['message']) yield {'text': payload['data']['translation'], 'quality': self.max_score,'service': self.name,'source': text}",False,'error' in payload,not self.is_movement_valid(payload),0.6531773805618286 3137,"def validate_value(vr: str, value: Any, validation_mode: int, validator: Callable[[str, Any], tuple[bool, str]] | None=None) -> None: """"""Validate the given value against the DICOM standard. Parameters ---------- vr : str The VR of the tag the value is added to. value : Any The value to be validated. validation_mode : int Defines if values are validated and how validation errors are handled. validator : Callable or None Function that does the actual validation. If not given, the validator is taken from the VR-specific validator table instead. Raises ------ ValueError If the validation fails and the validation mode is set to `RAISE`. """""" if: return if value is not None: validator = validator or VALIDATORS.get(vr) if validator is not None: is_valid, msg = validator(vr, value) if not is_valid: if validation_mode == config.RAISE: raise ValueError(msg) warnings.warn(msg)",False,validation_mode == config.IGNORE,vr not in VALIDATORS,0.6474730968475342 3138,"def validate_value(vr: str, value: Any, validation_mode: int, validator: Callable[[str, Any], tuple[bool, str]] | None=None) -> None: """"""Validate the given value against the DICOM standard. Parameters ---------- vr : str The VR of the tag the value is added to. value : Any The value to be validated. validation_mode : int Defines if values are validated and how validation errors are handled. validator : Callable or None Function that does the actual validation. If not given, the validator is taken from the VR-specific validator table instead. Raises ------ ValueError If the validation fails and the validation mode is set to `RAISE`. """""" if validation_mode == config.IGNORE: return if: validator = validator or VALIDATORS.get(vr) if validator is not None: is_valid, msg = validator(vr, value) if not is_valid: if validation_mode == config.RAISE: raise ValueError(msg) warnings.warn(msg)",False,value is not None,"isinstance(vr, str)",0.6515125036239624 3139,"def validate_value(vr: str, value: Any, validation_mode: int, validator: Callable[[str, Any], tuple[bool, str]] | None=None) -> None: """"""Validate the given value against the DICOM standard. Parameters ---------- vr : str The VR of the tag the value is added to. value : Any The value to be validated. validation_mode : int Defines if values are validated and how validation errors are handled. validator : Callable or None Function that does the actual validation. If not given, the validator is taken from the VR-specific validator table instead. Raises ------ ValueError If the validation fails and the validation mode is set to `RAISE`. """""" if validation_mode == config.IGNORE: return if value is not None: validator = validator or VALIDATORS.get(vr) if: is_valid, msg = validator(vr, value) if not is_valid: if validation_mode == config.RAISE: raise ValueError(msg) warnings.warn(msg)",False,validator is not None,validator,0.6508152484893799 3140,"def validate_value(vr: str, value: Any, validation_mode: int, validator: Callable[[str, Any], tuple[bool, str]] | None=None) -> None: """"""Validate the given value against the DICOM standard. Parameters ---------- vr : str The VR of the tag the value is added to. value : Any The value to be validated. validation_mode : int Defines if values are validated and how validation errors are handled. validator : Callable or None Function that does the actual validation. If not given, the validator is taken from the VR-specific validator table instead. Raises ------ ValueError If the validation fails and the validation mode is set to `RAISE`. """""" if validation_mode == config.IGNORE: return if value is not None: validator = validator or VALIDATORS.get(vr) if validator is not None: is_valid, msg = validator(vr, value) if: if validation_mode == config.RAISE: raise ValueError(msg) warnings.warn(msg)",False,not is_valid,is_valid,0.6514627933502197 3141,"def validate_value(vr: str, value: Any, validation_mode: int, validator: Callable[[str, Any], tuple[bool, str]] | None=None) -> None: """"""Validate the given value against the DICOM standard. Parameters ---------- vr : str The VR of the tag the value is added to. value : Any The value to be validated. validation_mode : int Defines if values are validated and how validation errors are handled. validator : Callable or None Function that does the actual validation. If not given, the validator is taken from the VR-specific validator table instead. Raises ------ ValueError If the validation fails and the validation mode is set to `RAISE`. """""" if validation_mode == config.IGNORE: return if value is not None: validator = validator or VALIDATORS.get(vr) if validator is not None: is_valid, msg = validator(vr, value) if not is_valid: if: raise ValueError(msg) warnings.warn(msg)",False,validation_mode == config.RAISE,is_valid,0.6469173431396484 3142,"def parameters_option(step_num, model, args, option='Saving', ctx_l=None): """"""Save or load the model parameter, marked by step_num."""""" param_path = os.path.join(args.ckpt_dir, f'{step_num:07}.params') logging.info(f'[Step {step_num}], {option} model params to/from {param_path}.') if: th.save(model.state_dict(), param_path) elif option == 'Loading': model.load_state_dict(th.load(param_path, map_location=args.device)) else: raise NotImplementedError('Unknown Option: {}'.format(option))",True,option == 'Saving',option == 'Saving',0.6545649766921997 3143,"def parameters_option(step_num, model, args, option='Saving', ctx_l=None): """"""Save or load the model parameter, marked by step_num."""""" param_path = os.path.join(args.ckpt_dir, f'{step_num:07}.params') logging.info(f'[Step {step_num}], {option} model params to/from {param_path}.') if option == 'Saving': th.save(model.state_dict(), param_path) elif: model.load_state_dict(th.load(param_path, map_location=args.device)) else: raise NotImplementedError('Unknown Option: {}'.format(option))",True,option == 'Loading',option == 'Loading',0.6524198055267334 3144,"@property def new_frame_available(self): """"""Property to check if new video frame is available to render."""""" if: if self._backend == 'pygame': return self.frame > self._frame elif self._backend =='mediadecoder': time.sleep(0.0001) return self._new_frame_available",False,self._is_preloaded,self._backend != 'none',0.6498908400535583 3145,"@property def new_frame_available(self): """"""Property to check if new video frame is available to render."""""" if self._is_preloaded: if: return self.frame > self._frame elif self._backend =='mediadecoder': time.sleep(0.0001) return self._new_frame_available",False,self._backend == 'pygame',self._backend == 'videodecoder',0.6495421528816223 3146,"@property def new_frame_available(self): """"""Property to check if new video frame is available to render."""""" if self._is_preloaded: if self._backend == 'pygame': return self.frame > self._frame elif: time.sleep(0.0001) return self._new_frame_available",False,self._backend == 'mediadecoder',self._backend == 'bulldozer',0.6492512226104736 3147,"def compute_output_shape(units, input_shape): """""" added to fit into tf1.4 """""" input_shape = tensor_shape.TensorShape(input_shape) input_shape = input_shape.with_rank_at_least(2) if: raise ValueError('The innermost dimension of input_shape must be defined, but saw: %s' % input_shape) return input_shape[:-1].concatenate(units)",False,input_shape[-1].value is None,input_shape.dim() != 3,0.6489180326461792 3148,"def dla34(pretrained, **kwargs): model = DLA([1, 1, 1, 2, 2, 1], [16, 32, 64, 128, 256, 512], block=BasicBlock, **kwargs) if: model.load_pretrained_model(data='imagenet', name='dla34', hash='ba72cf86') return model",True,pretrained,pretrained,0.67462557554245 3149,"def to_domain_event(self, stored: StoredEvent) -> DomainEventProtocol: stored_state = stored.state if: stored_state = self.cipher.decrypt(stored_state) if self.compressor: stored_state = self.compressor.decompress(stored_state) event_state: Dict[str, Any] = self.transcoder.decode(stored_state) cls = resolve_topic(stored.topic) return cls(**event_state)",True,self.cipher,self.cipher,0.6739550828933716 3150,"def to_domain_event(self, stored: StoredEvent) -> DomainEventProtocol: stored_state = stored.state if self.cipher: stored_state = self.cipher.decrypt(stored_state) if: stored_state = self.compressor.decompress(stored_state) event_state: Dict[str, Any] = self.transcoder.decode(stored_state) cls = resolve_topic(stored.topic) return cls(**event_state)",True,self.compressor,self.compressor,0.6556320190429688 3151,"def __init__(self, model, decay=0.9999, device=None): super(ModelEmaV2, self).__init__() self.module = deepcopy(model) self.module.eval() self.decay = decay self.device = device if: self.module.to(device=device)",False,self.device is not None,device is not None,0.6487178206443787 3152,"def get_output(self, train=False): X = self.get_input(train) if: return X else: return X + self.srng.normal(size=X.shape, avg=0.0, std=self.sigma, dtype=theano.config.floatX)",False,not train or self.sigma == 0,self.srng is None,0.6525388360023499 3153,"def get_attached_to(metadata): """""" Return which VM a volume is attached to based on its metadata. """""" try: if: vm_name = vmdk_ops.vm_uuid2name(metadata[kv.ATTACHED_VM_UUID]) if vm_name: return vm_name elif kv.ATTACHED_VM_NAME in metadata: return metadata[kv.ATTACHED_VM_NAME] else: return metadata[kv.ATTACHED_VM_UUID] else: return kv.DETACHED except: return kv.DETACHED",True,kv.ATTACHED_VM_UUID in metadata,kv.ATTACHED_VM_UUID in metadata,0.6497113704681396 3154,"def get_attached_to(metadata): """""" Return which VM a volume is attached to based on its metadata. """""" try: if kv.ATTACHED_VM_UUID in metadata: vm_name = vmdk_ops.vm_uuid2name(metadata[kv.ATTACHED_VM_UUID]) if: return vm_name elif kv.ATTACHED_VM_NAME in metadata: return metadata[kv.ATTACHED_VM_NAME] else: return metadata[kv.ATTACHED_VM_UUID] else: return kv.DETACHED except: return kv.DETACHED",True,vm_name,vm_name,0.6636297702789307 3155,"def get_attached_to(metadata): """""" Return which VM a volume is attached to based on its metadata. """""" try: if kv.ATTACHED_VM_UUID in metadata: vm_name = vmdk_ops.vm_uuid2name(metadata[kv.ATTACHED_VM_UUID]) if vm_name: return vm_name elif: return metadata[kv.ATTACHED_VM_NAME] else: return metadata[kv.ATTACHED_VM_UUID] else: return kv.DETACHED except: return kv.DETACHED",True,kv.ATTACHED_VM_NAME in metadata,kv.ATTACHED_VM_NAME in metadata,0.6502598524093628 3156,"@h.concurrent_handler def get_namespace_settings(self, request): try: namespace = request[0] except Exception: raise ValueError('Invalid parameters') if: raise ValueError('Namespace ""{}"" is not found'.format(namespace)) return self.infrastructure.ns_settings[namespace]",True,namespace not in self.infrastructure.ns_settings,namespace not in self.infrastructure.ns_settings,0.6498299837112427 3157,"@transition_on_period_ms.setter def transition_on_period_ms(self, ms): if: raise ValueError('Invalid value') self._transition_on_period_ms = ms",False,not 0 < ms < 2 ** 32,"not isinstance(ms, (int, float, complex, np.number))",0.6590392589569092 3158,"def step(self, require_zero_grad=False): self.optimizer.step() self.scheduler.step() if: self.optimizer_zero_grad() self._update_step()",True,require_zero_grad,require_zero_grad,0.652329683303833 3159,"def render_and_write(template_dir, path, context): """"""Renders the specified template into the file. :param template_dir: the directory to load the template from :param path: the path to write the templated contents to :param context: the parameters to pass to the rendering engine """""" env = Environment(loader=FileSystemLoader(template_dir)) template_file = os.path.basename(path) template = env.get_template(template_file) log('Rendering from template: %s' % template.name, level=DEBUG) rendered_content = template.render(context) if: log(""Render returned None - skipping '%s'"" % path, level=WARNING) return write(path, rendered_content.encode('utf-8').strip()) log('Wrote template %s' % path, level=DEBUG)",False,not rendered_content,rendered_content is None,0.6481993794441223 3160,"def get_gene_id_by_name(self, gene_name, select_longest=True): try: gene_id_list = self._gene_name_to_id_list[gene_name] except KeyError: raise FeatureNotFoundError(gene_name) if: gene_id = gene_id_list[0] elif select_longest: gene_id = self._select_longest_id(gene_id_list) else: raise ValueError(f'gene_name {gene_name} corresponding to {len(gene_id_list)} {len(gene_id_list)} transcripts: {gene_id_list}. But select_longest=False.') return gene_id",True,len(gene_id_list) == 1,len(gene_id_list) == 1,0.6478316187858582 3161,"def get_gene_id_by_name(self, gene_name, select_longest=True): try: gene_id_list = self._gene_name_to_id_list[gene_name] except KeyError: raise FeatureNotFoundError(gene_name) if len(gene_id_list) == 1: gene_id = gene_id_list[0] elif: gene_id = self._select_longest_id(gene_id_list) else: raise ValueError(f'gene_name {gene_name} corresponding to {len(gene_id_list)} {len(gene_id_list)} transcripts: {gene_id_list}. But select_longest=False.') return gene_id",True,select_longest,select_longest,0.6566609144210815 3162,"def before_train_epoch(self, runner): if: self.queue = torch.zeros(len(self.crops_for_assign), self.queue_length // runner.world_size, self.feat_dim).cuda() runner.model.module.head.queue = self.queue runner.model.module.head.use_queue = False",False,self.queue_length > 0 and runner.epoch >= self.epoch_queue_starts and (self.queue is None),self.queue is None,0.6480962634086609 3163,"def resolveRelativeUrl(path, parent): if: if len(path) > 1: return path[1:] return path",False,path[0] == '/',"isinstance(path, str) or path == '\\'",0.6544913053512573 3164,"def resolveRelativeUrl(path, parent): if path[0] == '/': if: return path[1:] return path",False,len(path) > 1,path.startswith('/') or path.startswith('/') or path.startswith('//'),0.6504347324371338 3165,"def _hasContent(self): if: return True else: return False",False,self.PieceID is not None or self.PackageType is not None or self.Weight is not None or (self.DimWeight is not None) or (self.Width is not None) or (self.Height is not None) or (self.Depth is not None),self.ServiceHeader is not None or self.Status is not None or self.Status is not None,0.648979663848877 3166,"def run(self, params): if: return params if self.handle_type == NO_SUBMODULE_HANDLE: return params if self.handle_type == NORMAL_SUBMODULE_HANDLE: return self._normal_handle(params) if self.handle_type == CCN_SUBMODULE_HANDLE: return self._ccn_handle(params)",False,not EnvSetting.SUBMODULE_MODE,self.handle_type == IGNORE_SUBMODULE_HANDLE,0.6505658626556396 3167,"def run(self, params): if not EnvSetting.SUBMODULE_MODE: return params if: return params if self.handle_type == NORMAL_SUBMODULE_HANDLE: return self._normal_handle(params) if self.handle_type == CCN_SUBMODULE_HANDLE: return self._ccn_handle(params)",False,self.handle_type == NO_SUBMODULE_HANDLE,self.handle_type == IGNORE_SUBMODULE_HANDLE,0.6514084339141846 3168,"def run(self, params): if not EnvSetting.SUBMODULE_MODE: return params if self.handle_type == NO_SUBMODULE_HANDLE: return params if: return self._normal_handle(params) if self.handle_type == CCN_SUBMODULE_HANDLE: return self._ccn_handle(params)",True,self.handle_type == NORMAL_SUBMODULE_HANDLE,self.handle_type == NORMAL_SUBMODULE_HANDLE,0.650668740272522 3169,"def run(self, params): if not EnvSetting.SUBMODULE_MODE: return params if self.handle_type == NO_SUBMODULE_HANDLE: return params if self.handle_type == NORMAL_SUBMODULE_HANDLE: return self._normal_handle(params) if: return self._ccn_handle(params)",True,self.handle_type == CCN_SUBMODULE_HANDLE,self.handle_type == CCN_SUBMODULE_HANDLE,0.6510337591171265 3170,"def getFalsePositiveFraction(self, unit1, unit2=None): if: raise Exception('getFalsePositiveFraction: unit1 must not be None') if unit2 is None: unit2 = self.getBestUnitMatch1(unit1) if unit2 is None or unit2 == -1: return 0 if unit1!= -1 and unit2!= -1: a = self._matching_event_counts_12[unit1] if unit2 not in a: return 0 else: return 0 return 1 - self._compute_safe_frac(a[unit2], self._event_counts_1[unit1])",True,unit1 is None,unit1 is None,0.6554688215255737 3171,"def getFalsePositiveFraction(self, unit1, unit2=None): if unit1 is None: raise Exception('getFalsePositiveFraction: unit1 must not be None') if: unit2 = self.getBestUnitMatch1(unit1) if unit2 is None or unit2 == -1: return 0 if unit1!= -1 and unit2!= -1: a = self._matching_event_counts_12[unit1] if unit2 not in a: return 0 else: return 0 return 1 - self._compute_safe_frac(a[unit2], self._event_counts_1[unit1])",False,unit2 is None,unit1 == -1 and unit2 == -1,0.6556023359298706 3172,"def getFalsePositiveFraction(self, unit1, unit2=None): if unit1 is None: raise Exception('getFalsePositiveFraction: unit1 must not be None') if unit2 is None: unit2 = self.getBestUnitMatch1(unit1) if unit2 is None or unit2 == -1: return 0 if: a = self._matching_event_counts_12[unit1] if unit2 not in a: return 0 else: return 0 return 1 - self._compute_safe_frac(a[unit2], self._event_counts_1[unit1])",False,unit1 != -1 and unit2 != -1,unit1 in self._event_counts_12,0.6562100648880005 3173,"def getFalsePositiveFraction(self, unit1, unit2=None): if unit1 is None: raise Exception('getFalsePositiveFraction: unit1 must not be None') if unit2 is None: unit2 = self.getBestUnitMatch1(unit1) if: return 0 if unit1!= -1 and unit2!= -1: a = self._matching_event_counts_12[unit1] if unit2 not in a: return 0 else: return 0 return 1 - self._compute_safe_frac(a[unit2], self._event_counts_1[unit1])",False,unit2 is None or unit2 == -1,unit1 == unit2,0.6525620222091675 3174,"def getFalsePositiveFraction(self, unit1, unit2=None): if unit1 is None: raise Exception('getFalsePositiveFraction: unit1 must not be None') if unit2 is None: unit2 = self.getBestUnitMatch1(unit1) if unit2 is None or unit2 == -1: return 0 if unit1!= -1 and unit2!= -1: a = self._matching_event_counts_12[unit1] if: return 0 else: return 0 return 1 - self._compute_safe_frac(a[unit2], self._event_counts_1[unit1])",False,unit2 not in a,a is None,0.6594944000244141 3175,"def get_versions(requirements=True): """"""Get the versions of PLUGIN_NAME and it's requirements Parameters ---------- requirements: bool If True, the requirements are imported and it's versions are included """""" ret = {'version': plugin_version} if: pass return ret",False,requirements,requirements and plugin_version in requirements and (not requirements),0.6877959370613098 3176,"def parse_body(body: bytes) -> None: res_json = parse_json(body) if: raise TiebaServerError(code, res_json['error_msg']) if (code := (int(res_json['data']['is_push_success'])!= 1)): raise TiebaServerError(code, res_json['data']['msg'])",True,code := int(res_json['error_code']),code := int(res_json['error_code']),0.646769642829895 3177,"def parse_body(body: bytes) -> None: res_json = parse_json(body) if (code := int(res_json['error_code'])): raise TiebaServerError(code, res_json['error_msg']) if: raise TiebaServerError(code, res_json['data']['msg'])",False,code := (int(res_json['data']['is_push_success'])!= 1),code := int(res_json['data']),0.6463968753814697 3178,"def trunc_normal_init(module: nn.Module, mean: float=0, std: float=1, a: float=-2, b: float=2, bias: float=0) -> None: if: trunc_normal_(module.weight, mean, std, a, b) if hasattr(module, 'bias') and module.bias is not None: nn.init.constant_(module.bias, bias)",False,"hasattr(module, 'weight') and module.weight is not None","hasattr(module, 'weight')",0.6462951898574829 3179,"def trunc_normal_init(module: nn.Module, mean: float=0, std: float=1, a: float=-2, b: float=2, bias: float=0) -> None: if hasattr(module, 'weight') and module.weight is not None: trunc_normal_(module.weight, mean, std, a, b) if: nn.init.constant_(module.bias, bias)",True,"hasattr(module, 'bias') and module.bias is not None","hasattr(module, 'bias') and module.bias is not None",0.6437869668006897 3180,"def decode_list(self, ids): numres = self._num_reserved_ids decoded_ids = [] int2byte = six.int2byte for id_ in ids: if: decoded_ids.append(RESERVED_TOKENS_BYTES[int(id_)]) else: decoded_ids.append(int2byte(id_ - numres)) return decoded_ids",False,0 <= id_ < numres,id_ < numres,0.6514174342155457 3181,"def previous(self): if: return None return self.seq[self.pos - 1]",True,self.pos == 0,self.pos == 0,0.6545200347900391 3182,"def _normalize_choice(self, idx: int, choice_tuple: tuple[Any,...]) -> tuple[Any, Any, Any]: assert isinstance(choice_tuple, tuple) if: key, value, img = choice_tuple else: key = idx value, img = choice_tuple return (key, value, img)",False,len(choice_tuple) == 3,idx == 0,0.6503686308860779 3183,"def get_host(self, task_host): if: return task_host else: return self.ssl_sock.host",True,task_host,task_host,0.6695533990859985 3184,"def fanin_init(tensor): size = tensor.size() if: fan_in = size[0] elif len(size) > 2: fan_in = np.prod(size[1:]) else: raise Exception('Shape must be have dimension at least 2.') bound = 1.0 / np.sqrt(fan_in) return tensor.data.uniform_(-bound, bound)",False,len(size) == 2,len(size) == 1,0.6554105281829834 3185,"def fanin_init(tensor): size = tensor.size() if len(size) == 2: fan_in = size[0] elif: fan_in = np.prod(size[1:]) else: raise Exception('Shape must be have dimension at least 2.') bound = 1.0 / np.sqrt(fan_in) return tensor.data.uniform_(-bound, bound)",False,len(size) > 2,len(size) == 3,0.6533804535865784 3186,"def prep_image(prefix, key, image): """""" Prepare image for wandb logging Parameters ---------- prefix : str Prefix added to the key for logging key : str Key from data containing the inverse depth map image : torch.Tensor [3,H,W] Image to be logged Returns ------- output : dict Dictionary with key and value for logging """""" if: image = image.detach().permute(1, 2, 0).cpu().numpy() prefix_key = '{}-{}'.format(prefix, key) return {prefix_key: wandb.Image(image, caption=key)}",False,is_tensor(image),image.dim() == 3,0.6501303911209106 3187,"def clear_que_idle(self): for i in range(0, 15): for q in que: if: que.remove(q)",False,q[1] is self and q[0] == 'clear_idle',q[1] == self and q[0] == 'idle',0.646538496017456 3188,"def __call__(self, im, im_info): """""" Args: im (np.ndarray): image (np.ndarray) im_info (dict): info of image Returns: im (np.ndarray): processed image (np.ndarray) im_info (dict): info of processed image """""" im = im.astype(np.float32, copy=False) if: scale = 1.0 / 255.0 im *= scale if self.norm_type =='mean_std': mean = np.array(self.mean)[np.newaxis, np.newaxis, :] std = np.array(self.std)[np.newaxis, np.newaxis, :] im -= mean im /= std return (im, im_info)",True,self.is_scale,self.is_scale,0.6525927782058716 3189,"def __call__(self, im, im_info): """""" Args: im (np.ndarray): image (np.ndarray) im_info (dict): info of image Returns: im (np.ndarray): processed image (np.ndarray) im_info (dict): info of processed image """""" im = im.astype(np.float32, copy=False) if self.is_scale: scale = 1.0 / 255.0 im *= scale if: mean = np.array(self.mean)[np.newaxis, np.newaxis, :] std = np.array(self.std)[np.newaxis, np.newaxis, :] im -= mean im /= std return (im, im_info)",True,self.norm_type == 'mean_std',self.norm_type == 'mean_std',0.6455798149108887 3190,"def prune(self): """""" Release all broken connections marked as ""in use"" and then close all free connections. """""" for cx in list(self._in_use_list): if: cx.release() self.__close(self._free_list)",False,cx.broken,cx.is_bound,0.651585578918457 3191,"def _get_project_members(self): if: self.members = utils.get_project_users(self.k_client, self.project, role=CONF.keystone.member_role_id) return self.members",False,self.members is None,not self.members,0.6492447853088379 3192,"def __eq__(self, other): if: return False return self.inputs == other.inputs",False,type(self) is not type(other),"not isinstance(other, self.__class__)",0.6462202072143555 3193,"def __init__(self, option_strings, dest=argparse.SUPPRESS, nargs=None, default=argparse.SUPPRESS, **kwargs): if: raise ValueError('nargs not allowed') kwargs['help'] = 'Print the versions of all plugins and requirements and exit' kwargs['default'] = default super(AllVersionsAction, self).__init__(option_strings, nargs=0, dest=dest, **kwargs)",True,nargs is not None,nargs is not None,0.6560962200164795 3194,"def __str__(self): retval = ': retval +='filename=""' + self.filename + '""' if hasattr(self, 'code'): retval +='code=""' + self.code + '""' retval += '>' return retval",True,"hasattr(self, 'filename')","hasattr(self, 'filename')",0.6494921445846558 3195,"def __str__(self): retval = ': retval +='code=""' + self.code + '""' retval += '>' return retval",True,"hasattr(self, 'code')","hasattr(self, 'code')",0.6502418518066406 3196,"def append_table(self, source_table: BaseTable, target_table: BaseTable, source_to_target_columns_map: dict[str, str]) -> None: """""" Append the source table rows into a destination table. :param source_table: Contains the rows to be appended to the target_table :param target_table: Contains the destination table in which the rows will be appended :param source_to_target_columns_map: Dict of source_table columns names to target_table columns names """""" if: warnings.warn('Warning: Databricks does not support ""partial"" inserts. You will need to cast all columns if you wish to use this feature') append_query = f""INSERT INTO `{self.get_table_qualified_name(target_table)}` ({','.join(source_to_target_columns_map.keys())}) SELECT {','.join(source_to_target_columns_map.values())} FROM `{self.get_table_qualified_name(source_table)}`"" else: append_query = f'INSERT INTO `{self.get_table_qualified_name(target_table)}` SELECT * FROM `{self.get_table_qualified_name(source_table)}`' self.run_sql(append_query)",False,source_to_target_columns_map,target_table.dialect.dialect.partial,0.6461104154586792 3197,"def _get_target_single(self, rois, rel_roi_points, pos_assigned_gt_inds, gt_masks, cfg): """"""Get training target of MaskPointHead for each image."""""" num_pos = rois.size(0) num_points = cfg.num_points if: gt_masks_th = gt_masks.to_tensor(rois.dtype, rois.device).index_select(0, pos_assigned_gt_inds) gt_masks_th = gt_masks_th.unsqueeze(1) rel_img_points = rel_roi_point_to_rel_img_point(rois, rel_roi_points, gt_masks_th.shape[2:]) point_targets = point_sample(gt_masks_th, rel_img_points).squeeze(1) else: point_targets = rois.new_zeros((0, num_points)) return point_targets",True,num_pos > 0,num_pos > 0,0.6512205600738525 3198,"def _create_vg(name, path): """""" This method creates a volume group :param name: Name of the volume group :param path: list of PVs :return: """""" cmd = ['vgcreate', name] + [path[i] for i in range(len(path))] out, err, rc = run_command(cmd) if: raise OperationFailed('GINVG00009E', {'err': err}) return",True,rc != 0,rc != 0,0.6573634147644043 3199,"def on_data_flood(self): """""" Ensure the process (REPL or runner) causing the data flood is stopped *before* the base on_data_flood is called to turn off the plotter and tell the user what to fix. """""" self.set_buttons(run=True, repl=True, debug=True) if: self.remove_repl() elif self.runner: self.run_toggle(None) super().on_data_flood()",False,self.kernel_runner,self.repl,0.6515999436378479 3200,"def on_data_flood(self): """""" Ensure the process (REPL or runner) causing the data flood is stopped *before* the base on_data_flood is called to turn off the plotter and tell the user what to fix. """""" self.set_buttons(run=True, repl=True, debug=True) if self.kernel_runner: self.remove_repl() elif: self.run_toggle(None) super().on_data_flood()",False,self.runner,self.toggle_kernel_runner,0.6562711000442505 3201,"def encode_timeout(timeout: float) -> str: if: return '{}S'.format(int(timeout)) elif timeout > 0.01: return '{}m'.format(int(timeout * 10 ** 3)) elif timeout > 1e-05: return '{}u'.format(int(timeout * 10 ** 6)) else: return '{}n'.format(int(timeout * 10 ** 9))",False,timeout > 10,timeout > 0.02,0.6720567941665649 3202,"def encode_timeout(timeout: float) -> str: if timeout > 10: return '{}S'.format(int(timeout)) elif: return '{}m'.format(int(timeout * 10 ** 3)) elif timeout > 1e-05: return '{}u'.format(int(timeout * 10 ** 6)) else: return '{}n'.format(int(timeout * 10 ** 9))",False,timeout > 0.01,timeout > 0,0.6550772786140442 3203,"def encode_timeout(timeout: float) -> str: if timeout > 10: return '{}S'.format(int(timeout)) elif timeout > 0.01: return '{}m'.format(int(timeout * 10 ** 3)) elif: return '{}u'.format(int(timeout * 10 ** 6)) else: return '{}n'.format(int(timeout * 10 ** 9))",False,timeout > 1e-05,timeout > 0.02,0.6487107276916504 3204,"def pop_obsolete_local_facts(local_facts): """"""Remove unused keys from local_facts"""""" keys_to_remove = {'master': ('etcd_port',)} for role in keys_to_remove: if: for key in keys_to_remove[role]: local_facts[role].pop(key, None)",True,role in local_facts,role in local_facts,0.6558643579483032 3205,"def iter_child_nodes(self, exclude=None, only=None): """"""Iterates over all direct child nodes of the node. This iterates over all fields and yields the values of they are nodes. If the value of a field is a list all the nodes in that list are returned. """""" for field, item in self.iter_fields(exclude, only): if: for n in item: if isinstance(n, Node): yield n elif isinstance(item, Node): yield item",True,"isinstance(item, list)","isinstance(item, list)",0.6434664726257324 3206,"def iter_child_nodes(self, exclude=None, only=None): """"""Iterates over all direct child nodes of the node. This iterates over all fields and yields the values of they are nodes. If the value of a field is a list all the nodes in that list are returned. """""" for field, item in self.iter_fields(exclude, only): if isinstance(item, list): for n in item: if isinstance(n, Node): yield n elif: yield item",True,"isinstance(item, Node)","isinstance(item, Node)",0.6470112800598145 3207,"def iter_child_nodes(self, exclude=None, only=None): """"""Iterates over all direct child nodes of the node. This iterates over all fields and yields the values of they are nodes. If the value of a field is a list all the nodes in that list are returned. """""" for field, item in self.iter_fields(exclude, only): if isinstance(item, list): for n in item: if: yield n elif isinstance(item, Node): yield item",False,"isinstance(n, Node)",field in n,0.6481305360794067 3208,"@property def correspond_onnx_op(self): if: return {'type': 'MeanVarianceNormalization','version': 9} else: return super(CaffeMVNOp, self).correspond_onnx_op",False,self.normalize_variance,self._cfg.MODEL.Caffe_MVNOp.mean_norm_type == 'mean_variance',0.6554127931594849 3209,"def loader(path): if: return (None, None) path = posixpath.join(package_path, path) if not provider.has_resource(path): return (None, None) basename = posixpath.basename(path) if filesystem_bound: return (basename, self._opener(provider.get_resource_filename(manager, path))) return (basename, lambda: (provider.get_resource_stream(manager, path), loadtime, 0))",True,path is None,path is None,0.657390832901001 3210,"def loader(path): if path is None: return (None, None) path = posixpath.join(package_path, path) if: return (None, None) basename = posixpath.basename(path) if filesystem_bound: return (basename, self._opener(provider.get_resource_filename(manager, path))) return (basename, lambda: (provider.get_resource_stream(manager, path), loadtime, 0))",True,not provider.has_resource(path),not provider.has_resource(path),0.6460205316543579 3211,"def loader(path): if path is None: return (None, None) path = posixpath.join(package_path, path) if not provider.has_resource(path): return (None, None) basename = posixpath.basename(path) if: return (basename, self._opener(provider.get_resource_filename(manager, path))) return (basename, lambda: (provider.get_resource_stream(manager, path), loadtime, 0))",True,filesystem_bound,filesystem_bound,0.6570329666137695 3212,"def grid_sample(input, grid): if: return _GridSample2dForward.apply(input, grid) return torch.nn.functional.grid_sample(input=input, grid=grid, mode='bilinear', padding_mode='zeros', align_corners=False)",False,_should_use_custom_op(),input.ndim == 2,0.6496963500976562 3213,"def add_chat(chat_id): stark = lydia.find_one({'chat_id': chat_id}) if: return False else: lydia.insert_one({'chat_id': chat_id}) return True",False,stark,not stark,0.6637530326843262 3214,"def __init__(self, *args, **kwargs): if: self.logname = '%s.%s' % (settings.SHORT_NAME, self.schema_slugs[0]) super(NewsItemListDetailScraper, self).__init__(*args, **kwargs) self._schema_cache = None self._schemas_cache = None self._lookups_cache = None self._schema_fields_cache = None self._schema_field_mapping_cache = None self._geocoder = SmartGeocoder()",False,self.logname is None,len(self.schema_slugs) > 0,0.6549073457717896 3215,"def mod_channel(channel): if: return channel old_az = channel.polar_position.azimuth new_azimuth = np.sign(old_az) * (45 if np.abs(old_az) > 30 else 15) return evolve(channel, polar_nominal_position=PolarPosition(azimuth=new_azimuth, elevation=0.0, distance=1.0))",False,"channel.name not in ('M+SC', 'M-SC')",channel.polar_position is None,0.646004855632782 3216,"def _augment(self, img, gamma): old_dtype = img.dtype lut = ((np.arange(256, dtype='float32') / 255) ** (1.0 / (1.0 + gamma)) * 255).astype('uint8') img = np.clip(img, 0, 255).astype('uint8') ret = cv2.LUT(img, lut).astype(old_dtype) if: ret = ret[:, :, np.newaxis] return ret",False,img.ndim == 3 and ret.ndim == 2,self.copy_axis,0.6469398736953735 3217,"def write_plain_text(self, text, start, end): if: self.wrapped.write(text[start:end]) self.wrapped.flush()",True,start < end,start < end,0.6589024066925049 3218,"def from_key_val_list(value): """"""Take an object and test to see if it can be represented as a dictionary. Unless it can not be represented as such, return an OrderedDict, e.g., :: >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') Traceback (most recent call last): ... ValueError: cannot encode objects that are not 2-tuples >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) :rtype: OrderedDict """""" if: return None if isinstance(value, (str, bytes, bool, int)): raise ValueError('cannot encode objects that are not 2-tuples') return OrderedDict(value)",True,value is None,value is None,0.6520352363586426 3219,"def from_key_val_list(value): """"""Take an object and test to see if it can be represented as a dictionary. Unless it can not be represented as such, return an OrderedDict, e.g., :: >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') Traceback (most recent call last): ... ValueError: cannot encode objects that are not 2-tuples >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) :rtype: OrderedDict """""" if value is None: return None if: raise ValueError('cannot encode objects that are not 2-tuples') return OrderedDict(value)",True,"isinstance(value, (str, bytes, bool, int))","isinstance(value, (str, bytes, bool, int))",0.6426099538803101 3220,"def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded): exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) for file_name, excluded_configs in exclusions.items(): if: pass else: for config_name, config in excluded_configs: p.AddFileConfig(file_name, _ConfigFullName(config_name, config), {'ExcludedFromBuild': 'true'})",False,not list_excluded and len(excluded_configs) == len(spec['configurations']),not excluded_configs,0.642126739025116 3221,"def random_generator(start, end, count, step): random_num_dict = {} for i in range(start, start + count): random_num = random.randrange(start, end) if: random_num_dict[random_num] = random_num elif i > 0: i = i - 1 return random_num_dict",True,random_num not in random_num_dict,random_num not in random_num_dict,0.6445322036743164 3222,"def random_generator(start, end, count, step): random_num_dict = {} for i in range(start, start + count): random_num = random.randrange(start, end) if random_num not in random_num_dict: random_num_dict[random_num] = random_num elif: i = i - 1 return random_num_dict",True,i > 0,i > 0,0.6606062650680542 3223,"def print_statistics(name: str) -> str: if: return '' printed_stats = [f'{k}: {v}' for k, v in statistics[name].items()] return ', '.join(printed_stats)",True,name not in statistics,name not in statistics,0.6647918224334717 3224,"def copy_token_to_repeater(s, l, t): matchTokens = _flatten(t.as_list()) def must_match_these_tokens(s, l, t): theseTokens = _flatten(t.as_list()) if: raise ParseException(s, l, 'Expected {}, found{}'.format(matchTokens, theseTokens)) rep.set_parse_action(must_match_these_tokens, callDuringTry=True)",True,theseTokens != matchTokens,theseTokens != matchTokens,0.6551246643066406 3225,"def encode(self, data): value = format_bytes(data[0]).upper() if: value = {'value': value, 'length': data[1]} return value",False,self.size is None,len(data) > 1,0.6492758989334106 3226,"def trim_if_longer_than(text, n): if: return text return text[:n] + '...'",False,len(text) <= n,n == 0,0.6536369323730469 3227,"def preprocess_precache(self, dataset: tf.data.Dataset, seed: Optional[int]=None) -> tf.data.Dataset: """"""Runs preprocessing steps before the optional CacheDatasetPlaceholder."""""" if: return dataset with utils.map_seed_manager(seed): return self._preprocess_dataset(dataset, self.preprocessors[:self._cache_step_idx])",False,not self.supports_caching,seed is None,0.6476250290870667 3228,"def has_handlers(logger): _logger = logger _has_handler = False while _logger: if: _has_handler = True break if not _logger.propagate: break else: _logger = _logger.parent return _has_handler",False,_logger.handlers,_logger.has_handlers(),0.651807427406311 3229,"def has_handlers(logger): _logger = logger _has_handler = False while _logger: if _logger.handlers: _has_handler = True break if: break else: _logger = _logger.parent return _has_handler",False,not _logger.propagate,_logger.root,0.6438626646995544 3230,"@api.one def action_confirm(self): res = super(SaleExceptionConfirm, self).action_confirm() if: return self.sale_id.action_button_confirm() return res",False,self.ignore,self.sale_id,0.6543232798576355 3231,"def __eq__(self, other): """""" Does an equivalence test on the OGR type with the given other OGRGeomType, the short-hand string, or the integer. """""" if: return self.num == other.num elif isinstance(other, six.string_types): return self.name.lower() == other.lower() elif isinstance(other, int): return self.num == other else: return False",True,"isinstance(other, OGRGeomType)","isinstance(other, OGRGeomType)",0.6488220691680908 3232,"def __eq__(self, other): """""" Does an equivalence test on the OGR type with the given other OGRGeomType, the short-hand string, or the integer. """""" if isinstance(other, OGRGeomType): return self.num == other.num elif: return self.name.lower() == other.lower() elif isinstance(other, int): return self.num == other else: return False",False,"isinstance(other, six.string_types)","isinstance(other, str)",0.6445688605308533 3233,"def __eq__(self, other): """""" Does an equivalence test on the OGR type with the given other OGRGeomType, the short-hand string, or the integer. """""" if isinstance(other, OGRGeomType): return self.num == other.num elif isinstance(other, six.string_types): return self.name.lower() == other.lower() elif: return self.num == other else: return False",True,"isinstance(other, int)","isinstance(other, int)",0.6473619341850281 3234,"def function(cell): if: return levels - 1 r = cell.center - 0.5 dist = np.sqrt(r.dot(r)) if dist < 0.2: return levels return levels - 1",False,'notatree' in mesh_type,cell.center is None,0.6514195203781128 3235,"def function(cell): if 'notatree' in mesh_type: return levels - 1 r = cell.center - 0.5 dist = np.sqrt(r.dot(r)) if: return levels return levels - 1",False,dist < 0.2,dist < 1e-10,0.6602287888526917 3236,"def force_unicode(self, x): if: x = x.decode(preferred_encoding,'replace') return x",True,"not isinstance(x, str)","not isinstance(x, str)",0.6433157324790955 3237,"def __new__(cls): if: cls.instance = cls.__OrderableNone() return cls.instance",False,not cls.instance,cls.instance is None,0.6613869667053223 3238,"def get_comps(self, sourcename): components = [] if: for c in self.sd[sourcename]['Children']: if c in self.cd and 'Deleted' not in self.cd[c]: components.append(c) else: self.log('Source', sourcename, 'has no children -- this should not happen!') return components",True,'Children' in self.sd[sourcename],'Children' in self.sd[sourcename],0.6500914096832275 3239,"def get_comps(self, sourcename): components = [] if 'Children' in self.sd[sourcename]: for c in self.sd[sourcename]['Children']: if: components.append(c) else: self.log('Source', sourcename, 'has no children -- this should not happen!') return components",True,c in self.cd and 'Deleted' not in self.cd[c],c in self.cd and 'Deleted' not in self.cd[c],0.6479125022888184 3240,"def run(self): url = 'https://www.libaclub.com/facade.php?act=search&searchAction=keyword&keyword={}&sId=&timetype=2&timeBegin=1563938285&timeEnd=1566530285&sid=0&searchScope=0&orderBy=0&page=1' url_list = get_config_para('nike_daily_keywords') logger.log(31, url_list) for item in url_list: keyword = item['keywords'] logger.log(31, keyword) if: search_url = url.format(keyword) try: self.get_search_page(search_url, keyword) except: logger.error(traceback.format_exc())",True,keyword,keyword,0.6718394756317139 3241,"def _setFont(self, psfontname, size): """"""Sets the font and fontSize Raises a readable exception if an illegal font is supplied. Font names are case-sensitive! Keeps track of font anme and size for metrics."""""" self._fontname = psfontname self._fontsize = size font = pdfmetrics.getFont(self._fontname) if: self._curSubset = -1 else: pdffontname = self._canvas._doc.getInternalFontName(psfontname) self._code.append('%s %s Tf' % (pdffontname, fp_str(size)))",False,font._dynamicFont,font.name != psfontname,0.6506326794624329 3242,"def _LSAGuessPayloadClass(p, **kargs): """""" Guess the correct LSA class for a given payload """""" cls = conf.raw_layer if: typ = struct.unpack('!B', p[3])[0] clsname = _OSPF_LSclasses.get(typ, 'Raw') cls = globals()[clsname] return cls(p, **kargs)",True,len(p) >= 4,len(p) >= 4,0.6595719456672668 3243,"def __exit__(self, type, value, traceback): self.close() if: return True",False,"self.stop and issubclass(type, StopForward)",type is None,0.6432678699493408 3244,"def __validate_index__(self, idx): """""" Raise an exception if the specified index will result in an invalid pattern. """""" if: raise ValueError('Duplicate pattern entries detected.') from_idx, to_idx = self.split_multiindex(idx, self.from_slice, self.to_slice) if to_idx.duplicated().any(): raise ValueError('Fan-in pattern entries detected.') if set(from_idx).intersection(to_idx): raise ValueError('Ports cannot both receive input and send output.')",False,idx.duplicated().any(),idx.duplicated(),0.6450552940368652 3245,"def __validate_index__(self, idx): """""" Raise an exception if the specified index will result in an invalid pattern. """""" if idx.duplicated().any(): raise ValueError('Duplicate pattern entries detected.') from_idx, to_idx = self.split_multiindex(idx, self.from_slice, self.to_slice) if: raise ValueError('Fan-in pattern entries detected.') if set(from_idx).intersection(to_idx): raise ValueError('Ports cannot both receive input and send output.')",False,to_idx.duplicated().any(),from_idx.in_pattern != to_idx.in_pattern,0.6430460214614868 3246,"def __validate_index__(self, idx): """""" Raise an exception if the specified index will result in an invalid pattern. """""" if idx.duplicated().any(): raise ValueError('Duplicate pattern entries detected.') from_idx, to_idx = self.split_multiindex(idx, self.from_slice, self.to_slice) if to_idx.duplicated().any(): raise ValueError('Fan-in pattern entries detected.') if: raise ValueError('Ports cannot both receive input and send output.')",False,set(from_idx).intersection(to_idx),from_idx.in_channels > 1 and to_idx.in_channels > 1,0.6415694952011108 3247,"def get(self, nr): result = [x for x in self.leaves if x.id == nr] if: self.logger('trying to access an unexisting leaf') return result[0]",False,len(result) != 1,len(result) == 0,0.6490288972854614 3248,"@pull_keys_from_obj('type', 'items') @skip_if_any_kwargs_empty('type') @suffix_reserved_words def validate_items_required_if_array_type(type_, items, **kwargs): types = pluralize(type_) if: raise ValidationError(MESSAGES['items']['items_required_for_type_array'])",False,ARRAY in types and items is EMPTY,not is_array_type(items),0.654911458492279 3249,"def add_operator_storage(klass): """""" Add a member to the class named '__op_storage__'. This member will be added as needed, and can be used to store instance specific data needed by the operators. The value of the storage will be a sortedmap. """""" members = klass.members() if: m = Typed(sortedmap, ()) m.set_name('__op_storage__') m.set_index(len(members)) members['__op_storage__'] = m",False,'__op_storage__' not in members,members,0.6559680700302124 3250,"def __init__(self, datasets, eval_key=None): super().__init__() assert isinstance(datasets, OrderedDict) self.datasets = datasets self.eval_key = eval_key self.longest_dataset = None self.longest_dataset_key = None for key, dataset in datasets.items(): assert isinstance(dataset, FairseqDataset) if: self.longest_dataset = dataset self.longest_dataset_key = key self._ordered_indices = None",True,self.longest_dataset is None or len(dataset) > len(self.longest_dataset),self.longest_dataset is None or len(dataset) > len(self.longest_dataset),0.646739661693573 3251,"def __init__(self, *args, **kwargs): super(GroupNodeBackendsSet, self).__init__(*args, **kwargs) if: raise ValueError('Node backends set should contain only unique elements')",False,len(self) != len(set(self)),len(self.backends) > 0,0.6459528207778931 3252,"def reward_v_w_center_linear(self, vel_cmd, center): """""" Applies a linear regression between v and w Supposing there is a lineal relationship V and W. So, formula w = B_0 + x*v. Data for Formula1: Max W = 5 r/s we take max abs value. Correctly it is w left or right Max V = 100 m/s Min V = 20 m/s B_0 = -B_1 * Max V B_1 = -(W Max / (V Max - V Min)) w target = B_0 + B_1 * v error = w_actual - w_target reward = 1/exp(reward + center))) where Max value = 1 Args: linear and angular velocity center Returns: reward """""" done = False if: done = True reward = self.rewards['penal'] else: w_target = self.beta_0 + self.beta_1 * abs(vel_cmd.linear.x) error = abs(w_target - abs(vel_cmd.angular.z)) reward = 1 / math.exp(error + center) return (reward, done)",False,center > 0.9,'penal' in self.rewards,0.6535540819168091 3253,"def start(self) -> bool: if: return run([join(sep, 'usr','sbin', 'nginx'), '-e', '/var/log/bunkerweb/error.log'], stdin=DEVNULL, stderr=STDOUT, check=False).returncode == 0 return self.apiCaller.send_to_apis('POST', '/start')",True,self._type == 'local',self._type == 'local',0.6525919437408447 3254,"@classmethod def _replenish_product(cls, product, product_qty=1, product_uom=None): if: product_uom = cls.uom_unit wiz = cls.env['product.replenish'].with_context(default_product_id=product.id).create({'quantity': product_qty, 'product_uom_id': product_uom.id}) wiz.launch_replenishment()",True,product_uom is None,product_uom is None,0.6585710048675537 3255,"def set_onmouseleave(self, onmouseleave: CallbackMouseType) -> 'Widget': """""" Set ``onmouseleave`` callback. This method is executed in :py:meth:`pygame_menu.widgets.core.widget.Widget.mouseleave` method. The callback function receives the following arguments: .. code-block:: python onmouseleave(widget, event) onmouseleave() :param onmouseleave: Callback executed if user leaves the Widget with the mouse; it can be a function or None :return: Self reference """""" if: assert callable(onmouseleave), 'onmouseleave must be callable (function-type) or None' self._onmouseleave = onmouseleave return self",True,onmouseleave,onmouseleave,0.6610784530639648 3256,"def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs): if: return (pred * weight).sum() assert reduction_override in (None, 'none','mean','sum') reduction = reduction_override if reduction_override else self.reduction loss = self.loss_weight * bounded_iou_loss(pred, target, weight, beta=self.beta, eps=self.eps, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss",True,weight is not None and (not torch.any(weight > 0)),weight is not None and (not torch.any(weight > 0)),0.6488144397735596 3257,"def remove_record(self, record): if: self.hydrate() if record._type == 'NS' and record.name == '': self._root_ns = None self._records[record.name].discard(record)",False,self._origin,not self.loaded,0.657575249671936 3258,"def remove_record(self, record): if self._origin: self.hydrate() if: self._root_ns = None self._records[record.name].discard(record)",False,record._type == 'NS' and record.name == '',record.name not in self._records,0.648290753364563 3259,"def visual(self, output, img_info, cls_conf=0.35): ratio = img_info['ratio'] img = img_info['raw_img'] if: return img output = output.numpy() bboxes = output[:, 0:4] / ratio cls = output[:, 6] scores = output[:, 4] * output[:, 5] vis_res = vis(img, bboxes, scores, cls, cls_conf, self.cls_names) return vis_res",True,output is None,output is None,0.6602973937988281 3260,"def clean(self): cleaned_data = super().clean() child = cleaned_data['child'] if: return cleaned_data else: raise ValidationError(""Child data doesn't match user authentication state."")",False,"not child or ',' in child or (self.user.is_authenticated and child.isnumeric())",child.user_authenticated and child.user_authenticated,0.6428632736206055 3261,"def build_inputs_with_special_tokens(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None) -> List[int]: """""" Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A BERT sequence has the following format: - single sequence: ``[CLS] X [SEP]`` - pair of sequences: ``[CLS] A [SEP] B [SEP]`` Args: token_ids_0 (:obj:`List[int]`): List of IDs to which the special tokens will be added. token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: :obj:`List[int]`: List of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens. """""" if: return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + token_ids_0 + sep + token_ids_1 + sep",True,token_ids_1 is None,token_ids_1 is None,0.6495926380157471 3262,"def flash_block(self, data, seq, timeout=DEFAULT_TIMEOUT): """"""Write block to flash, retry if fail"""""" for attempts_left in range(WRITE_BLOCK_ATTEMPTS - 1, -1, -1): try: self.check_command('write to target Flash after seq %d' % seq, self.ESP_FLASH_DATA, struct.pack(': self.trace('Block write failed, retrying with {} attempts left'.format(attempts_left)) else: raise",False,attempts_left,attempts_left == WRITE_BLOCK_ATTEMPTS - 1,0.6562485694885254 3263,"def find_nondom_weight_params(node_group, node_adaptor): if: node_adaptor.weights_non_dom = types.MethodType(lambda self, node: node['weights_non_dom'], node_adaptor) elif 'weight_non_dom_0' in node_group.all_columns and 'weight_non_dom_1' in node_group.all_columns: node_adaptor.weights_non_dom = types.MethodType(lambda self, node: [node['weight_non_dom_0'], node['weight_non_dom_1']], node_adaptor) else: node_adaptor.weights_non_dom = types.MethodType(return_none, node_adaptor)",True,'weights_non_dom' in node_group.all_columns,'weights_non_dom' in node_group.all_columns,0.6459236145019531 3264,"def find_nondom_weight_params(node_group, node_adaptor): if 'weights_non_dom' in node_group.all_columns: node_adaptor.weights_non_dom = types.MethodType(lambda self, node: node['weights_non_dom'], node_adaptor) elif: node_adaptor.weights_non_dom = types.MethodType(lambda self, node: [node['weight_non_dom_0'], node['weight_non_dom_1']], node_adaptor) else: node_adaptor.weights_non_dom = types.MethodType(return_none, node_adaptor)",False,'weight_non_dom_0' in node_group.all_columns and 'weight_non_dom_1' in node_group.all_columns,'weight_non_dom_0' in node_group.all_columns,0.6467881202697754 3265,"def reverse_write_file_extract(filename): output_file = filename if: output_file = filename[:len(filename) - 4] output_file = output_file + '_reverse.txt' fh = open(output_file, 'w') for line in reversed(list(open(filename))): line_ex = extract_line_af(line, symbol_seq) fh.write(line_ex) fh.close() return output_file",True,filename[len(filename) - 4:] == '.txt',filename[len(filename) - 4:] == '.txt',0.6434508562088013 3266,"def api_client(self) -> 'K8sClient': if: account_id = account.id if (cfg := K8sConfig.current_config()): return cfg.client_for(account_id) raise AttributeError(f'No API client for account: {account} or no client for account.')",False,account := self.account(),account,0.6618865728378296 3267,"def api_client(self) -> 'K8sClient': if (account := self.account()): account_id = account.id if: return cfg.client_for(account_id) raise AttributeError(f'No API client for account: {account} or no client for account.')",False,cfg := K8sConfig.current_config(),account_id and cfg.client_for(account_id),0.6562936305999756 3268,"def vgg16_bn(pretrained=False, **kwargs): """"""VGG 16-layer model (configuration ""D"") with batch normalization Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """""" if: kwargs['init_weights'] = False model = VGG(make_layers(cfg['D'], batch_norm=True), **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn'])) return model",True,pretrained,pretrained,0.6736469268798828 3269,"def vgg16_bn(pretrained=False, **kwargs): """"""VGG 16-layer model (configuration ""D"") with batch normalization Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """""" if pretrained: kwargs['init_weights'] = False model = VGG(make_layers(cfg['D'], batch_norm=True), **kwargs) if: model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn'])) return model",True,pretrained,pretrained,0.6694509983062744 3270,"def add_metaclass(metaclass): """"""Class decorator for creating a class with a metaclass."""""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper",True,slots is not None,slots is not None,0.6484737396240234 3271,"def add_metaclass(metaclass): """"""Class decorator for creating a class with a metaclass."""""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if: slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper",True,"isinstance(slots, str)","isinstance(slots, str)",0.6445169448852539 3272,"def plus_or_dot(pieces: Dict[str, Any]) -> str: """"""Return a + if we don't already have one, else return a."""""" if: return '.' return '+'",True,"'+' in pieces.get('closest-tag', '')","'+' in pieces.get('closest-tag', '')",0.6470862627029419 3273,"def _fit(self, transformers): """"""Sub-routine to fit sub-learner"""""" xtemp, ytemp = slice_array(self.in_array, self.targets, self.in_index) t0 = time() if: xtemp, ytemp = transformers.transform(xtemp, ytemp) self.estimator.fit(xtemp, ytemp) self.fit_time_ = time() - t0",False,transformers,transformers is not None,0.6613075733184814 3274,"def check_status(): """"""Make sure all files and modules are in place and print some information if they're not """""" for ref_dir in ['iref']: if: print('\nNo ${0} set! Make a directory and point to it in ~/.bashrc or ~/.cshrc.\nFor example,\n\n $ mkdir $GRIZLI/{0}\n $ export {0}=""$GRIZLI/{0}/"" # put this in ~/.bashrc\n'.format(ref_dir)) else: if not os.getenv('iref').endswith('/'): print(""Warning: $iref should end with a '/' character [{0}]"".format(os.getenv('iref'))) test_file = 'iref$uc72113oi_pfl.fits'.replace('iref$', os.getenv('iref')) if not os.path.exists(test_file): print('\n HST calibrations not found in $iref [{0}]\n\n To fetch them, run\n\n >>> import grizli.utils\n >>> grizli.utils.fetch_default_calibs()\n\n '.format(os.getenv('iref')))",False,not os.getenv(ref_dir),os.getenv('iref') == '',0.6450818777084351 3275,"def check_status(): """"""Make sure all files and modules are in place and print some information if they're not """""" for ref_dir in ['iref']: if not os.getenv(ref_dir): print('\nNo ${0} set! Make a directory and point to it in ~/.bashrc or ~/.cshrc.\nFor example,\n\n $ mkdir $GRIZLI/{0}\n $ export {0}=""$GRIZLI/{0}/"" # put this in ~/.bashrc\n'.format(ref_dir)) else: if: print(""Warning: $iref should end with a '/' character [{0}]"".format(os.getenv('iref'))) test_file = 'iref$uc72113oi_pfl.fits'.replace('iref$', os.getenv('iref')) if not os.path.exists(test_file): print('\n HST calibrations not found in $iref [{0}]\n\n To fetch them, run\n\n >>> import grizli.utils\n >>> grizli.utils.fetch_default_calibs()\n\n '.format(os.getenv('iref')))",False,not os.getenv('iref').endswith('/'),os.getenv('iref') and (not os.path.isslash(iref)),0.6447300314903259 3276,"def check_status(): """"""Make sure all files and modules are in place and print some information if they're not """""" for ref_dir in ['iref']: if not os.getenv(ref_dir): print('\nNo ${0} set! Make a directory and point to it in ~/.bashrc or ~/.cshrc.\nFor example,\n\n $ mkdir $GRIZLI/{0}\n $ export {0}=""$GRIZLI/{0}/"" # put this in ~/.bashrc\n'.format(ref_dir)) else: if not os.getenv('iref').endswith('/'): print(""Warning: $iref should end with a '/' character [{0}]"".format(os.getenv('iref'))) test_file = 'iref$uc72113oi_pfl.fits'.replace('iref$', os.getenv('iref')) if: print('\n HST calibrations not found in $iref [{0}]\n\n To fetch them, run\n\n >>> import grizli.utils\n >>> grizli.utils.fetch_default_calibs()\n\n '.format(os.getenv('iref')))",True,not os.path.exists(test_file),not os.path.exists(test_file),0.6444414854049683 3277,"def to_json_string(self, use_diff: bool=True) -> str: """""" Serializes this instance to a JSON string. Args: use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`): If set to ``True``, only the difference between the config instance and the default ``PretrainedConfig()`` is serialized to JSON string. Returns: :obj:`str`: String containing all the attributes that make up this configuration instance in JSON format. """""" if: config_dict = self.to_diff_dict() else: config_dict = self.to_dict() return json.dumps(config_dict, indent=2, sort_keys=True) + '\n'",False,use_diff is True,use_diff,0.6502054929733276 3278,"@property def requirers_str(self): if: return 'the application' return ', '.join(self.requirers)",True,not self.requirers,not self.requirers,0.6549382209777832 3279,"def _parse_read_user_config(self, response, prompt): """""" Parse the response from the instrument for a read user command. @param response The response string from the instrument @param prompt The prompt received from the instrument @retval return The user configuration parsed into a dict. Names include: @raise InstrumentProtocolException When a bad response is encountered """""" log.debug('_parse_read_user_config: response=%s', response.encode('hex')) if: log.warn('_parse_read_user_config: Bad read user response from instrument (%s)', response.encode('hex')) raise InstrumentProtocolException('Invalid read user response. (%s)' % response.encode('hex')) return response",False,"not self._check_configuration(response, USER_CONFIG_SYNC_BYTES, USER_CONFIG_LEN)",response != self.REQUEST_CODE,0.6444648504257202 3280,"def faad(self): if: return value = os.system('faad %s -o %s > %s 2> %s' % (self.filename, devnull, devnull, devnull)) self.failIf(value and value!= NOTFOUND)",False,not have_faad,not self.exists,0.65619957447052 3281,"@restore_manager def gradient(self, M): if: dJ, = self.gradient((M,)) return dJ set_manager(self._manager) _ = self.objective(M, force=self._manager._cp_schedule.is_exhausted) dJ = compute_gradient(self._J, self._M) for dJ_i in dJ: if not issubclass(var_dtype(dJ_i), (float, np.floating)): raise ValueError('Invalid dtype') return dJ",False,is_var(M),self._manager._J is None,0.6510542631149292 3282,"@restore_manager def gradient(self, M): if is_var(M): dJ, = self.gradient((M,)) return dJ set_manager(self._manager) _ = self.objective(M, force=self._manager._cp_schedule.is_exhausted) dJ = compute_gradient(self._J, self._M) for dJ_i in dJ: if: raise ValueError('Invalid dtype') return dJ",False,"not issubclass(var_dtype(dJ_i), (float, np.floating))",dJ_i.dtype != 'NHWC',0.6440389752388 3283,"def end_base64(self, data): value = Binary() value.decode(data.encode('ascii')) if: value = value.data self.append(value) self._value = 0",False,self._use_bytes,value.data,0.6510063409805298 3284,"def _wrap_string_slot(string): """"""Converts __slots__ = 'a' into __slots__ = ('a',) """""" if: return (string,) return string",False,"isinstance(string, string_types)","string is None or isinstance(string, str)",0.6476110219955444 3285,"def run_subprocess(command, **kwargs): global _subprocesses pro = subprocess.Popen(command, preexec_fn=os.setsid, stderr=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True, **kwargs) _subprocesses.append(pro) output, error = pro.communicate() ret = pro.returncode if: sys.stderr.write(error + '\n') raise OSError('subprocess failed with return code %d: %s\n%s' % (ret,''.join(command), error)) return output",True,ret != 0,ret != 0,0.6620335578918457 3286,"def start(self): """""" Start the timer queue to make it start function """""" if: return self._started = True self._thr.start() log.logger.info('TimerQueue started.')",True,self._started,self._started,0.661342203617096 3287,"def exe_exists(exe: str) -> bool: found = parse_shebang.find_executable(exe) if: return False homedir = os.path.expanduser('~') try: common: str | None = os.path.commonpath((found, homedir)) except ValueError: common = None return not SHIMS_RE.search(found) and (os.path.dirname(homedir) == homedir or common!= homedir)",False,found is None,not found,0.6525626182556152 3288,"def __init__(self, elements=None): self.n_elts = 0 self.n_comps = 0 self._next = 0 self._elts = [] self._indx = {} self._par = [] self._siz = [] if: elements = [] for elt in elements: self.add(elt)",True,elements is None,elements is None,0.6557098031044006 3289,"def approx_equal(self, other, epsilon=0.0001): """""" Determines if two tensors are approximately equal Args: - self: tensor - other: tensor Returns: - bool """""" if: raise RuntimeError('Size mismatch between self ({self}) and other ({other})'.format(self=self.size(), other=other.size())) return torch.max((self - other).abs()) <= epsilon",True,self.size() != other.size(),self.size() != other.size(),0.6498256921768188 3290,"def _set_py_limited_api(Extension, kwds): """""" Add py_limited_api to kwds if setuptools >= 26 is in use. Do not alter the setting if it already exists. Setuptools takes care of ignoring the flag on Python 2 and PyPy. CPython itself should ignore the flag in a debugging version (by not listing.abi3.so in the extensions it supports), but it doesn't so far, creating troubles. That's why we check for ""not hasattr(sys, 'gettotalrefcount')"" (the 2.7 compatible equivalent of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) On Windows, with CPython <= 3.4, it's better not to use py_limited_api because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. For now we'll skip py_limited_api on all Windows versions to avoid an inconsistent mess. """""" if: import setuptools try: setuptools_major_version = int(setuptools.__version__.partition('.')[0]) if setuptools_major_version >= 26: kwds['py_limited_api'] = True except ValueError: kwds['py_limited_api'] = True return kwds",False,"'py_limited_api' not in kwds and (not hasattr(sys, 'gettotalrefcount')) and (sys.platform != 'win32')","Extension.is_win32 and (not hasattr(sys, 'gettotalrefcount'))",0.6496493220329285 3291,"def _set_py_limited_api(Extension, kwds): """""" Add py_limited_api to kwds if setuptools >= 26 is in use. Do not alter the setting if it already exists. Setuptools takes care of ignoring the flag on Python 2 and PyPy. CPython itself should ignore the flag in a debugging version (by not listing.abi3.so in the extensions it supports), but it doesn't so far, creating troubles. That's why we check for ""not hasattr(sys, 'gettotalrefcount')"" (the 2.7 compatible equivalent of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) On Windows, with CPython <= 3.4, it's better not to use py_limited_api because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. For now we'll skip py_limited_api on all Windows versions to avoid an inconsistent mess. """""" if 'py_limited_api' not in kwds and (not hasattr(sys, 'gettotalrefcount')) and (sys.platform!= 'win32'): import setuptools try: setuptools_major_version = int(setuptools.__version__.partition('.')[0]) if: kwds['py_limited_api'] = True except ValueError: kwds['py_limited_api'] = True return kwds",True,setuptools_major_version >= 26,setuptools_major_version >= 26,0.6473472118377686 3292,"def createDescription(self): if: return 'PowerPoint Object Container; instance %s, type %s' % (self['instance'].value, self['type'].display) return 'PowerPoint Object; version %s, instance %s, type %s' % (self['version'].value, self['instance'].value, self['type'].display)",False,self['version'].value == 15,self['instance'].value,0.6561750769615173 3293,"def forward(self, x, x_mask): """"""Subsample x. :param torch.Tensor x: input tensor :param torch.Tensor x_mask: input mask :return: subsampled x and mask :rtype Tuple[torch.Tensor, torch.Tensor] """""" x = x.unsqueeze(1) x = self.conv(x) b, c, t, f = x.size() x = self.out(x.transpose(1, 2).contiguous().view(b, t, c * f)) if: return (x, None) return (x, x_mask[:, :, :-2:2][:, :, :-4:3])",True,x_mask is None,x_mask is None,0.6477604508399963 3294,"def __init__(self, as_uuid=False): """"""Construct a UUID type. :param as_uuid=False: if True, values will be interpreted as Python uuid objects, converting to/from string via the DBAPI. """""" if: raise NotImplementedError('This version of Python does not support the native UUID type.') self.as_uuid = as_uuid",False,as_uuid and _python_UUID is None,not as_uuid,0.6509877443313599 3295,"def minimum_width(self, col): cell = self.cell_from_index(col) if: return 0 return cell.minimum_width()",False,not cell,cell is None,0.6695544719696045 3296,"def get_junctions(nodes): """"""Return the nodes with traffic lights. Parameters ---------- nodes : list of dict all available nodes Returns ------- list of dict the nodes with traffic lights """""" junctions = [] for node in nodes: if: if node['type'] == 'traffic_light': junctions.append(node) return junctions",True,'type' in node,'type' in node,0.657156229019165 3297,"def get_junctions(nodes): """"""Return the nodes with traffic lights. Parameters ---------- nodes : list of dict all available nodes Returns ------- list of dict the nodes with traffic lights """""" junctions = [] for node in nodes: if 'type' in node: if: junctions.append(node) return junctions",False,node['type'] == 'traffic_light',node['light'],0.6467031240463257 3298,"def drawer_thread(self, queue, lock, key, is_running): self.init(lock) while is_running.value == 1: self.drawer_refresh(queue, lock) if: time.sleep(kPlotSleep) print(mp.current_process().name, 'closing fig ', self.fig) plt.close(self.fig)",False,kUseFigCanvasDrawIdle,key == 'plot',0.6471734046936035 3299,"@api.model def _bom_line_find(self, product_tmpl=None, product=None, picking_type=None, company_id=False, bom_type=False): """"""Finds BoM lines for particular product, picking and company"""""" if: return self.env['mrp.bom.line'] domain = self._bom_line_find_domain(product_tmpl=product_tmpl, product=product, picking_type=picking_type, company_id=company_id, bom_type=bom_type) if domain is False: return self.env['mrp.bom.line'] return self.search(domain, order='sequence, product_id')",False,product and product.type == 'service' or (product_tmpl and product_tmpl.type == 'service'),product is False,0.6449494361877441 3300,"@api.model def _bom_line_find(self, product_tmpl=None, product=None, picking_type=None, company_id=False, bom_type=False): """"""Finds BoM lines for particular product, picking and company"""""" if product and product.type =='service' or (product_tmpl and product_tmpl.type =='service'): return self.env['mrp.bom.line'] domain = self._bom_line_find_domain(product_tmpl=product_tmpl, product=product, picking_type=picking_type, company_id=company_id, bom_type=bom_type) if: return self.env['mrp.bom.line'] return self.search(domain, order='sequence, product_id')",False,domain is False,not domain,0.6588637828826904 3301,"def evaluate(self, xl_item): if: keyword, operator, value = self.values return operator(xl_item[keyword], value) FilterOperator1, op, FilterOperator2 = self.operations if FilterOperator2 is None: return op(FilterOperator1.evaluate(xl_item)) else: return op(FilterOperator1.evaluate(xl_item), FilterOperator2.evaluate(xl_item))",True,len(self.operations) == 0,len(self.operations) == 0,0.6500109434127808 3302,"def evaluate(self, xl_item): if len(self.operations) == 0: keyword, operator, value = self.values return operator(xl_item[keyword], value) FilterOperator1, op, FilterOperator2 = self.operations if: return op(FilterOperator1.evaluate(xl_item)) else: return op(FilterOperator1.evaluate(xl_item), FilterOperator2.evaluate(xl_item))",False,FilterOperator2 is None,FilterOperator1 is None or FilterOperator2 is None,0.6496834754943848 3303,"def __init__(self, positive_fraction=0.5): """"""Constructs a minibatch sampler. Args: positive_fraction: desired fraction of positive examples (scalar in [0,1]) Raises: ValueError: if positive_fraction < 0, or positive_fraction > 1 """""" if: raise ValueError('positive_fraction should be in range [0,1]. Received: %s.' % positive_fraction) self._positive_fraction = positive_fraction",True,positive_fraction < 0 or positive_fraction > 1,positive_fraction < 0 or positive_fraction > 1,0.6507483720779419 3304,"def hget_unsafe(self, name: str, key: str, default=None) -> Optional[bytes]: name = self._add_namespace(name) if: get_ret = self._redis.hget(name, key) if get_ret.decode('UTF-8') == 'None': return None else: return get_ret else: return default",False,"self.hexists_unsafe(name, key)",self._redis,0.6468687057495117 3305,"def hget_unsafe(self, name: str, key: str, default=None) -> Optional[bytes]: name = self._add_namespace(name) if self.hexists_unsafe(name, key): get_ret = self._redis.hget(name, key) if: return None else: return get_ret else: return default",False,get_ret.decode('UTF-8') == 'None',get_ret is None,0.6423614025115967 3306,"def forward(self, x): shortcut = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) out = self.relu(out) if: shortcut = self.downsample(x) out = self.se_module(out) + shortcut out = self.relu(out) return out",True,self.downsample is not None,self.downsample is not None,0.646532416343689 3307,"def _get_env(environment, name): value = environment.get(name, _undefined) if: raise UndefinedEnvironmentName('{0!r} does not exist in evaluation environment.'.format(name)) return value",False,"isinstance(value, Undefined)",value is _undefined,0.644119381904602 3308,"def cityline_login(driver, account, password): is_email_sent = assign_text(driver, By.CSS_SELECTOR, 'input[type=""text""]', account, submit=True) if: is_click_here_pressed = force_press_button(driver, By.CSS_SELECTOR, '.otp-box > ul > li:nth-child(3) > a') is_password_sent = False if is_email_sent: is_password_sent = assign_text(driver, By.CSS_SELECTOR, 'div > input[type=""password""]', password, submit=True) return is_password_sent",False,is_email_sent,is_email_sent and password and (not is_click_here_pressed),0.6517529487609863 3309,"def cityline_login(driver, account, password): is_email_sent = assign_text(driver, By.CSS_SELECTOR, 'input[type=""text""]', account, submit=True) if is_email_sent: is_click_here_pressed = force_press_button(driver, By.CSS_SELECTOR, '.otp-box > ul > li:nth-child(3) > a') is_password_sent = False if: is_password_sent = assign_text(driver, By.CSS_SELECTOR, 'div > input[type=""password""]', password, submit=True) return is_password_sent",False,is_email_sent,is_click_here_pressed,0.6500408053398132 3310,"def filter_variables(variables, filter_regex_list, invert=False): """"""Filters out the variables matching the filter_regex. Filter out the variables whose name matches the any of the regular expressions in filter_regex_list and returns the remaining variables. Optionally, if invert=True, the complement set is returned. Args: variables: a list of tensorflow variables. filter_regex_list: a list of string regular expressions. invert: (boolean). If True, returns the complement of the filter set; that is, all variables matching filter_regex are kept and all others discarded. Returns: a list of filtered variables. """""" kept_vars = [] variables_to_ignore_patterns = list(filter(None, filter_regex_list)) for var in variables: add = True for pattern in variables_to_ignore_patterns: if re.match(pattern, var.op.name): add = False break if: kept_vars.append(var) return kept_vars",True,add != invert,add != invert,0.6594606637954712 3311,"def filter_variables(variables, filter_regex_list, invert=False): """"""Filters out the variables matching the filter_regex. Filter out the variables whose name matches the any of the regular expressions in filter_regex_list and returns the remaining variables. Optionally, if invert=True, the complement set is returned. Args: variables: a list of tensorflow variables. filter_regex_list: a list of string regular expressions. invert: (boolean). If True, returns the complement of the filter set; that is, all variables matching filter_regex are kept and all others discarded. Returns: a list of filtered variables. """""" kept_vars = [] variables_to_ignore_patterns = list(filter(None, filter_regex_list)) for var in variables: add = True for pattern in variables_to_ignore_patterns: if: add = False break if add!= invert: kept_vars.append(var) return kept_vars",True,"re.match(pattern, var.op.name)","re.match(pattern, var.op.name)",0.6412944793701172 3312,"def run_read(self): api_result = self.api_read() if: return 'Apache SSL key and Certificate not found.' else: rows = [] rows.append(['Description', 'Value']) rows.append([]) for key_name in api_result: for desc, value in api_result[key_name].iteritems(): rows.append([desc, value]) result_table = table(rows) result_table.draw(80) return rows",False,not api_result['apache_ssl_certificate'] and (not api_result['apache_ssl_key']),not api_result,0.6446303129196167 3313,"def __new__(cls, horizontal=DEFAULT_ALIGNMENT[0], vertical=DEFAULT_ALIGNMENT[1]): if: horizontal = horizontal.lower() if horizontal not in HORIZONTAL_MAP: raise TypeError('invalid horizontal alignment') horizontal = HORIZONTAL_MAP[horizontal] if isinstance(vertical, str): vertical = vertical.lower() if vertical not in VERTICAL_MAP: raise TypeError('invalid vertical alignment') vertical = VERTICAL_MAP[vertical] return super(_Alignment, cls).__new__(cls, (horizontal, vertical))",True,"isinstance(horizontal, str)","isinstance(horizontal, str)",0.650280237197876 3314,"def __new__(cls, horizontal=DEFAULT_ALIGNMENT[0], vertical=DEFAULT_ALIGNMENT[1]): if isinstance(horizontal, str): horizontal = horizontal.lower() if horizontal not in HORIZONTAL_MAP: raise TypeError('invalid horizontal alignment') horizontal = HORIZONTAL_MAP[horizontal] if: vertical = vertical.lower() if vertical not in VERTICAL_MAP: raise TypeError('invalid vertical alignment') vertical = VERTICAL_MAP[vertical] return super(_Alignment, cls).__new__(cls, (horizontal, vertical))",True,"isinstance(vertical, str)","isinstance(vertical, str)",0.6554739475250244 3315,"def __new__(cls, horizontal=DEFAULT_ALIGNMENT[0], vertical=DEFAULT_ALIGNMENT[1]): if isinstance(horizontal, str): horizontal = horizontal.lower() if: raise TypeError('invalid horizontal alignment') horizontal = HORIZONTAL_MAP[horizontal] if isinstance(vertical, str): vertical = vertical.lower() if vertical not in VERTICAL_MAP: raise TypeError('invalid vertical alignment') vertical = VERTICAL_MAP[vertical] return super(_Alignment, cls).__new__(cls, (horizontal, vertical))",True,horizontal not in HORIZONTAL_MAP,horizontal not in HORIZONTAL_MAP,0.6584954857826233 3316,"def __new__(cls, horizontal=DEFAULT_ALIGNMENT[0], vertical=DEFAULT_ALIGNMENT[1]): if isinstance(horizontal, str): horizontal = horizontal.lower() if horizontal not in HORIZONTAL_MAP: raise TypeError('invalid horizontal alignment') horizontal = HORIZONTAL_MAP[horizontal] if isinstance(vertical, str): vertical = vertical.lower() if: raise TypeError('invalid vertical alignment') vertical = VERTICAL_MAP[vertical] return super(_Alignment, cls).__new__(cls, (horizontal, vertical))",True,vertical not in VERTICAL_MAP,vertical not in VERTICAL_MAP,0.6631019115447998 3317,"def pop_int(self, key: str, default: Any=DEFAULT) -> Optional[int]: """""" Performs a pop and coerces to an int. """""" value = self.pop(key, default) if: return None else: return int(value)",True,value is None,value is None,0.6537577509880066 3318,"def to_string_tuple(self, string_encoder=None): """""" Convert the record into a tuple of UTF-8 encoded byte strings. This format is convenient for use with Luigi since it expects tuples of strings as output from reduce functions. Arguments: string_encoder : The string encoder to encode the record fields with. """""" if: string_encoder = HiveTsvEncoder() field_values = [] for field_name, field_obj in self.get_fields().items(): val = getattr(self, field_name) if val is not None: val = field_obj.serialize_to_string(val) field_values.append(string_encoder.encode(val, field_obj)) return tuple(field_values)",True,string_encoder is None,string_encoder is None,0.6539633870124817 3319,"def to_string_tuple(self, string_encoder=None): """""" Convert the record into a tuple of UTF-8 encoded byte strings. This format is convenient for use with Luigi since it expects tuples of strings as output from reduce functions. Arguments: string_encoder : The string encoder to encode the record fields with. """""" if string_encoder is None: string_encoder = HiveTsvEncoder() field_values = [] for field_name, field_obj in self.get_fields().items(): val = getattr(self, field_name) if: val = field_obj.serialize_to_string(val) field_values.append(string_encoder.encode(val, field_obj)) return tuple(field_values)",False,val is not None,"hasattr(field_obj, 'serialize_to_string')",0.6518678665161133 3320,"def _is_whitespace(char): """"""Checks whether `chars` is a whitespace character."""""" if: return True cat = unicodedata.category(char) if cat == 'Zs': return True return False",True,char == '' or char == '\t' or char == '\n' or (char == '\r'),char == '' or char == '\t' or char == '\n' or (char == '\r'),0.6456636190414429 3321,"def _is_whitespace(char): """"""Checks whether `chars` is a whitespace character."""""" if char =='' or char == '\t' or char == '\n' or (char == '\r'): return True cat = unicodedata.category(char) if: return True return False",True,cat == 'Zs',cat == 'Zs',0.6528396606445312 3322,"def _start(self): self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.s.connect((self.server, self.port)) if: self.s.send(self.options + '\n') self.s.recv(8192)",True,self.options,self.options,0.6547831296920776 3323,"def wait_process(self, print_viewer_output=True): """""" If the viewer was created locally in a separate process wait for it to exit and optionally print the standard output of the remote viewer. :param print_viewer_output: if True print the output of the remote viewer. """""" self.close_connection() if: self.process.wait() if print_viewer_output: print('\nRemote viewer output:') print(self.process.stdout.read().decode())",True,self.process is not None,self.process is not None,0.6485069990158081 3324,"def wait_process(self, print_viewer_output=True): """""" If the viewer was created locally in a separate process wait for it to exit and optionally print the standard output of the remote viewer. :param print_viewer_output: if True print the output of the remote viewer. """""" self.close_connection() if self.process is not None: self.process.wait() if: print('\nRemote viewer output:') print(self.process.stdout.read().decode())",True,print_viewer_output,print_viewer_output,0.6512216329574585 3325,"def itemByIndex(self, index): if: return None return super().__getitem__(index)",False,index < 0 or index >= self.__len__(),index is None,0.6484782695770264 3326,"def extract_gltf(self, eid, extract_dir, save_to_one_dir, include_skeleton, texture_format): if: try: nodes_export_gltf(self.vfs, self.vfs_view_current(), extract_dir, allow_overwrite=False, save_to_one_dir=save_to_one_dir, include_skeleton=include_skeleton, texture_format=texture_format) except EDecaFileExists as exce: self.error_dialog('{} Canceled: File Exists: {}'.format(eid, exce.args))",False,self.vfs_view_current().node_selected_count() > 0,self.vfs is not None,0.6426528692245483 3327,"def move(self, to_path: Union[str, Path]): from_path = self.get() to_path = Path(to_path) if: if from_path.samefile(to_path): return self else: to_path.parent.mkdir(mode=488, exist_ok=True, parents=True) from_path.rename(to_path) self._path = to_path self._temporary = False return self",False,to_path.exists(),from_path.exists() and to_path.exists(),0.6529403924942017 3328,"def move(self, to_path: Union[str, Path]): from_path = self.get() to_path = Path(to_path) if to_path.exists(): if: return self else: to_path.parent.mkdir(mode=488, exist_ok=True, parents=True) from_path.rename(to_path) self._path = to_path self._temporary = False return self",False,from_path.samefile(to_path),from_path.exists() or to_path.is_file(),0.6434625387191772 3329,"def no_tracing(func): """"""Decorator to temporarily turn off tracing for the duration of a test."""""" if: return func else: @functools.wraps(func) def wrapper(*args, **kwargs): original_trace = sys.gettrace() try: sys.settrace(None) return func(*args, **kwargs) finally: sys.settrace(original_trace) return wrapper",False,"not hasattr(sys, 'gettrace')",DEBUG,0.6425193548202515 3330,"def component(self, x): """"""Find the connected component containing the given element. Parameters ---------- x : immutable object Returns ------- set Raises ------ ValueError If the given element is not found. """""" if: raise ValueError('{} is not an element'.format(x)) elts = np.array(self._elts) vfind = np.vectorize(self.find) roots = vfind(elts) return set(elts[roots == self.find(x)])",False,x not in self,"not isinstance(x, Element)",0.6567975878715515 3331,"def index_by_id(self, queue_id: str, queue_item_id: str) -> int | None: """"""Get index by queue_item_id."""""" queue_items = self._queue_items[queue_id] for index, item in enumerate(queue_items): if: return index return None",False,item.queue_item_id == queue_item_id,item['queue_item_id'] == queue_item_id,0.648495614528656 3332,"def single_proc_run(config): from lib.ddp_data_loaders import make_data_loader train_loader = make_data_loader(config, int(config.trainer.batch_size / config.misc.num_gpus), num_threads=int(config.misc.train_num_thread / config.misc.num_gpus)) Trainer = get_trainer(config.trainer.trainer) trainer = Trainer(config=config, data_loader=train_loader) if: trainer.train() else: trainer.test()",False,config.misc.is_train,config.misc.train_num_thread > 0,0.6511161923408508 3333,"@root.command('edit-chute-configuration') @click.argument('chute') @click.pass_context def edit_chute_configuration(ctx, chute): """""" Interactively edit the chute configuration and restart it. CHUTE must be the name of an installed chute. Open the text editor specified by the EDITOR environment variable with the current chute configuration. If you save and exit, the new configuration will be applied and the chute restarted. """""" client = ctx.obj['client'] old_data = client.get_chute_config(chute) new_data, changed = util.open_yaml_editor(old_data, 'chute'+ chute) if: result = client.set_chute_config(chute, new_data) ctx.invoke(watch_change_logs, change_id=result['change_id']) return new_data",True,changed,changed,0.6757489442825317 3334,"def zero_hessian(self): """""" Zeros out the accumalated hessian traces. """""" for p in self.get_params(): if: p.hess.zero_()",True,"not isinstance(p.hess, float) and self.state[p]['hessian step'] % self.update_each == 0","not isinstance(p.hess, float) and self.state[p]['hessian step'] % self.update_each == 0",0.6468917727470398 3335,"def get(self, url_base_path: str, request: Optional[Message]=None, used_params: Optional[List[str]]=None) -> bytes: """""" Send a GET request. :param url_base_path: URL base path :param request: Protobuf coded request :param used_params: Parameters to be removed from request after converting it to dict :raises RuntimeError: if response code is not 200 :return: Content of response """""" url = self._make_url(url_base_path=url_base_path, request=request, used_params=used_params) response = self._session.get(url=url) if: raise RuntimeError(f'Error when sending a GET request.\n Response: {response.status_code}, {str(response.content)})') return response.content",True,response.status_code != 200,response.status_code != 200,0.6573207378387451 3336,"def nms(dets, thresh, force_cpu=False): """"""Dispatch to either CPU or GPU NMS implementations."""""" if: return [] if cfg.USE_GPU_NMS and (not force_cpu): return gpu_nms(dets, thresh, device_id=0) else: return cpu_nms(dets, thresh)",True,dets.shape[0] == 0,dets.shape[0] == 0,0.6480503082275391 3337,"def nms(dets, thresh, force_cpu=False): """"""Dispatch to either CPU or GPU NMS implementations."""""" if dets.shape[0] == 0: return [] if: return gpu_nms(dets, thresh, device_id=0) else: return cpu_nms(dets, thresh)",False,cfg.USE_GPU_NMS and (not force_cpu),force_cpu,0.644140362739563 3338,"@property def plasma(self): if: try: import pyarrow.plasma as plasma self._plasma = plasma except ImportError: self._plasma = None return self._plasma",False,self._plasma is None and (not self.disable),self._plasma is None,0.6454801559448242 3339,"@classmethod def _adapt_string_for_cast(self, type_): type_ = sqltypes.to_instance(type_) if: return type_ elif isinstance(type_, _StringType): return CHAR(length=type_.length, charset=type_.charset, collation=type_.collation, ascii=type_.ascii, binary=type_.binary, unicode=type_.unicode, national=False) else: return CHAR(length=type_.length)",False,"isinstance(type_, sqltypes.CHAR)","isinstance(type_, sqltypes.IntType)",0.6476126909255981 3340,"@classmethod def _adapt_string_for_cast(self, type_): type_ = sqltypes.to_instance(type_) if isinstance(type_, sqltypes.CHAR): return type_ elif: return CHAR(length=type_.length, charset=type_.charset, collation=type_.collation, ascii=type_.ascii, binary=type_.binary, unicode=type_.unicode, national=False) else: return CHAR(length=type_.length)",False,"isinstance(type_, _StringType)","isinstance(type_.length, int)",0.6457464694976807 3341,"def get_name(self, obj): if: return obj.item_en.name elif self.context['language'] == 'ja': return obj.item_ja.name else: return obj.item_en.name",True,'language' not in self.context,'language' not in self.context,0.648820161819458 3342,"def get_name(self, obj): if 'language' not in self.context: return obj.item_en.name elif: return obj.item_ja.name else: return obj.item_en.name",True,self.context['language'] == 'ja',self.context['language'] == 'ja',0.6479569673538208 3343,"def __next__(self) -> Report: """""" Allow to get the next data :raise: StopIteration in stream mode when no report was found. """""" file_object = open(self.filename, 'r') json_str = file_object.read() file_object.close() if: raise StopIteration() if json_str == self.previousJson: logging.error('Error : Report did not change since last read') raise StopIteration() self.previousJson = json_str return self.report_type.from_json(json.loads(json_str))",False,json_str is None,json_str == '',0.650860071182251 3344,"def __next__(self) -> Report: """""" Allow to get the next data :raise: StopIteration in stream mode when no report was found. """""" file_object = open(self.filename, 'r') json_str = file_object.read() file_object.close() if json_str is None: raise StopIteration() if: logging.error('Error : Report did not change since last read') raise StopIteration() self.previousJson = json_str return self.report_type.from_json(json.loads(json_str))",False,json_str == self.previousJson,self.previousJson != json_str,0.6470224857330322 3345,"def reload_cellblender(self, scn): print('Disabling CellBlender Application') bpy.ops.wm.addon_disable(module='cellblender') print('Delete MCell RNA properties if needed') if: print('Deleting MCell RNA properties') del scn['mcell'] print('Enabling CellBlender Application') bpy.ops.wm.addon_enable(module='cellblender')",False,scn.get('mcell'),'mcell' in scn,0.649645209312439 3346,"def tensor2img_fast(tensor, rgb2bgr=True, min_max=(0, 1)): """"""This implementation is slightly faster than tensor2img. It now only supports torch tensor with shape (1, c, h, w). Args: tensor (Tensor): Now only support torch tensor with (1, c, h, w). rgb2bgr (bool): Whether to change rgb to bgr. Default: True. min_max (tuple[int]): min and max values for clamp. """""" output = tensor.squeeze(0).detach().clamp_(*min_max).permute(1, 2, 0) output = (output - min_max[0]) / (min_max[1] - min_max[0]) * 255 output = output.type(torch.uint8).cpu().numpy() if: output = cv2.cvtColor(output, cv2.COLOR_RGB2BGR) return output",True,rgb2bgr,rgb2bgr,0.6550439596176147 3347,"def load_completions(self, force=False): with self._WLOCK: if: return self._started = True t = threading.Thread(target=cwl_parsing_handler, args=(self._on_completions,)) t.daemon = True t.start()",False,self._started or (self._completed and (not force)),self._started and (not force),0.651587724685669 3348,"def __init__(self, o: Union[Callable[[], T], Iterable[T]], sentinel: Optional[T]=None, modifier: Optional[Callable[[T], T]]=None) -> None: """""" Parameters ---------- o : Iterable or Callable ``o`` is interpreted very differently depending on the presence of ``sentinel``. If ``sentinel`` is not given, then ``o`` must be a collection object which supports either the iteration protocol or the sequence protocol. If ``sentinel`` is given, then ``o`` must be a callable object. sentinel : object, optional If given, the iterator will call ``o`` with no arguments for each call to its `next` method; if the value returned is equal to ``sentinel``, :exc:`StopIteration` will be raised, otherwise the value will be returned. modifier : callable, optional The function that will be used to modify each item returned by the iterator. ``modifier`` should take a single argument and return a single value. Defaults to ``lambda x: x``. If ``sentinel`` is not given, `modifier` must be passed as a keyword argument. """""" if: self.modifier = modifier else: self.modifier = lambda x: x if not callable(self.modifier): raise TypeError('modify_iter(): modifier must be callable') super().__init__(o, sentinel)",False,modifier,"isinstance(modifier, Callable)",0.664461612701416 3349,"def __init__(self, o: Union[Callable[[], T], Iterable[T]], sentinel: Optional[T]=None, modifier: Optional[Callable[[T], T]]=None) -> None: """""" Parameters ---------- o : Iterable or Callable ``o`` is interpreted very differently depending on the presence of ``sentinel``. If ``sentinel`` is not given, then ``o`` must be a collection object which supports either the iteration protocol or the sequence protocol. If ``sentinel`` is given, then ``o`` must be a callable object. sentinel : object, optional If given, the iterator will call ``o`` with no arguments for each call to its `next` method; if the value returned is equal to ``sentinel``, :exc:`StopIteration` will be raised, otherwise the value will be returned. modifier : callable, optional The function that will be used to modify each item returned by the iterator. ``modifier`` should take a single argument and return a single value. Defaults to ``lambda x: x``. If ``sentinel`` is not given, `modifier` must be passed as a keyword argument. """""" if modifier: self.modifier = modifier else: self.modifier = lambda x: x if: raise TypeError('modify_iter(): modifier must be callable') super().__init__(o, sentinel)",False,not callable(self.modifier),not callable(o),0.6431137323379517 3350,"def __init__(self, root, relabel_root, load_bytes=False, transform=None, class_map=''): class_to_idx = None if: class_to_idx = load_class_map(class_map, root) images, class_to_idx = find_images_and_targets(root, class_to_idx=class_to_idx) if len(images) == 0: raise RuntimeError(f""Found 0 images in subfolders of {root}. Supported image extensions are {', '.join(IMG_EXTENSIONS)}"") self.root = root self.relabel_root = relabel_root self.samples = images self.imgs = self.samples self.class_to_idx = class_to_idx self.load_bytes = load_bytes self.transform = transform",True,class_map,class_map,0.6694598197937012 3351,"def __init__(self, root, relabel_root, load_bytes=False, transform=None, class_map=''): class_to_idx = None if class_map: class_to_idx = load_class_map(class_map, root) images, class_to_idx = find_images_and_targets(root, class_to_idx=class_to_idx) if: raise RuntimeError(f""Found 0 images in subfolders of {root}. Supported image extensions are {', '.join(IMG_EXTENSIONS)}"") self.root = root self.relabel_root = relabel_root self.samples = images self.imgs = self.samples self.class_to_idx = class_to_idx self.load_bytes = load_bytes self.transform = transform",False,len(images) == 0,images is None,0.654414713382721 3352,"def transform_entity_synonyms(synonyms, known_synonyms: Optional[Dict[Text, Any]]=None) -> Dict[Text, Any]: """"""Transforms the entity synonyms into a text->value dictionary"""""" entity_synonyms = known_synonyms if known_synonyms else {} for s in synonyms: if: for synonym in s['synonyms']: entity_synonyms[synonym] = s['value'] return entity_synonyms",False,'value' in s and 'synonyms' in s,'synonyms' in s,0.65416419506073 3353,"def label_row(self, row): if: return 'Fail' elif row.RD_prob >= 0.9 and row.PE_prob >= 0.9 and (row.SR1_prob >= 0.4): return 'Pass' else: return 'Unlabeled'",False,row.RD_prob < 0.4 or row.PE_prob < 0.4,row.RD_prob < 0.4 and row.PE_prob < 0.4 and (row.SR1_prob < 0.4),0.6469786167144775 3354,"def label_row(self, row): if row.RD_prob < 0.4 or row.PE_prob < 0.4: return 'Fail' elif: return 'Pass' else: return 'Unlabeled'",False,row.RD_prob >= 0.9 and row.PE_prob >= 0.9 and (row.SR1_prob >= 0.4),row.RD_prob >= 0.9 and row.PE_prob >= 0.9,0.6475220918655396 3355,"def _get_n_args(self, args, example, n): """"""Helper to make sure the command got the right number of arguments """""" if: msg = 'Got unexpected number of arguments, expected {}. (example: ""{} config {}"")'.format(n, get_prog(), example) raise PipError(msg) if n == 1: return args[0] else: return args",False,len(args) != n,n != 0,0.6545162796974182 3356,"def _get_n_args(self, args, example, n): """"""Helper to make sure the command got the right number of arguments """""" if len(args)!= n: msg = 'Got unexpected number of arguments, expected {}. (example: ""{} config {}"")'.format(n, get_prog(), example) raise PipError(msg) if: return args[0] else: return args",False,n == 1,len(args) == 1,0.6659960746765137 3357,"def __init__(self, PSF, geometry): super(BlurringOperator, self).__init__(domain_geometry=geometry, range_geometry=geometry) if: self.PSF = PSF else: raise TypeError('PSF must be a number array with same number of dimensions as geometry.') if not (isinstance(geometry, cil.framework.framework.ImageGeometry) or isinstance(geometry, cil.framework.framework.AcquisitionGeometry)): raise TypeError('geometry must be an ImageGeometry or AcquisitionGeometry.')",False,"isinstance(PSF, np.ndarray)","isinstance(psf, numbers.Number)",0.6505733728408813 3358,"def __init__(self, PSF, geometry): super(BlurringOperator, self).__init__(domain_geometry=geometry, range_geometry=geometry) if isinstance(PSF, np.ndarray): self.PSF = PSF else: raise TypeError('PSF must be a number array with same number of dimensions as geometry.') if: raise TypeError('geometry must be an ImageGeometry or AcquisitionGeometry.')",False,"not (isinstance(geometry, cil.framework.framework.ImageGeometry) or isinstance(geometry, cil.framework.framework.AcquisitionGeometry))","not (isinstance(geometry, ImageGeometry) and type(geometry))",0.6508026719093323 3359,"def get_name(self, obj): if: return obj.reg_en elif self.context['language'] == 'ja': return obj.reg_ja elif self.context['language'] == 'ko': return obj.reg_ko elif self.context['language'] =='sc': return obj.reg_sc elif self.context['language'] == 'tc': return obj.reg_tc else: return obj.reg_en",True,'language' not in self.context,'language' not in self.context,0.6526510715484619 3360,"def get_name(self, obj): if 'language' not in self.context: return obj.reg_en elif: return obj.reg_ja elif self.context['language'] == 'ko': return obj.reg_ko elif self.context['language'] =='sc': return obj.reg_sc elif self.context['language'] == 'tc': return obj.reg_tc else: return obj.reg_en",True,self.context['language'] == 'ja',self.context['language'] == 'ja',0.6514073610305786 3361,"def get_name(self, obj): if 'language' not in self.context: return obj.reg_en elif self.context['language'] == 'ja': return obj.reg_ja elif: return obj.reg_ko elif self.context['language'] =='sc': return obj.reg_sc elif self.context['language'] == 'tc': return obj.reg_tc else: return obj.reg_en",True,self.context['language'] == 'ko',self.context['language'] == 'ko',0.6539989113807678 3362,"def get_name(self, obj): if 'language' not in self.context: return obj.reg_en elif self.context['language'] == 'ja': return obj.reg_ja elif self.context['language'] == 'ko': return obj.reg_ko elif: return obj.reg_sc elif self.context['language'] == 'tc': return obj.reg_tc else: return obj.reg_en",True,self.context['language'] == 'sc',self.context['language'] == 'sc',0.6513539552688599 3363,"def get_name(self, obj): if 'language' not in self.context: return obj.reg_en elif self.context['language'] == 'ja': return obj.reg_ja elif self.context['language'] == 'ko': return obj.reg_ko elif self.context['language'] =='sc': return obj.reg_sc elif: return obj.reg_tc else: return obj.reg_en",True,self.context['language'] == 'tc',self.context['language'] == 'tc',0.6505173444747925 3364,"def get_tunnel_list(remote=False): channels_cmd = ops.cmd.getDszCommand('commands', prefixes=['stopaliasing'], all=False, any=False, astyped=False, verbose=False, dszquiet=True, remote=remote) channels_data = channels_cmd.execute() tunnel_list = [] for command_data in channels_data.command: if: tunnel_list.append(command_data) return tunnel_list",False,convert_str(command_data.name) == 'redirect',command_data.command.type == 'tunnel',0.6442177295684814 3365,"@property def cliplen(self): if: return self._cliplen elif self._config.has_option(self.classname, 'cliplen'): return self._config.getint(self, 'cliplen') else: return 0",False,'_cliplen' in self.__dict__,self._cliplen,0.6528490781784058 3366,"@property def cliplen(self): if '_cliplen' in self.__dict__: return self._cliplen elif: return self._config.getint(self, 'cliplen') else: return 0",False,"self._config.has_option(self.classname, 'cliplen')",self._config is not None,0.6476914882659912 3367,"def delete_pickle(self, _class_name): self.logger.debug('Deleting Pickle File {}'.format(_class_name)) file_path = self.get_file_path(_class_name) if: os.remove(file_path) else: self.logger.warning('Deleting pickle file does not exist: {}'.format(file_path))",True,os.path.exists(file_path),os.path.exists(file_path),0.644834041595459 3368,"def __eq__(self, other: Any) -> bool: """"""Compares object with other object."""""" if: return NotImplemented return True",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.6477192640304565 3369,"def _log_reader(self, i): if: log_path = self._log_paths[i] self._log_readers[i] = LogReader(log_path, sort_by_time=self.sort_by_time) return self._log_readers[i]",False,self._log_readers[i] is None and self._log_paths[i] is not None,i not in self._log_readers,0.648859977722168 3370,"def set_fix_params(self, params): if: params = [] self.fix_params = params for p in self.params: p.active = p.name not in params",True,params is None,params is None,0.6655040979385376 3371,"@property def links(self): """"""Returns the parsed header links of the response, if any."""""" header = self.headers.get('link') l = {} if: links = parse_header_links(header) for link in links: key = link.get('rel') or link.get('url') l[key] = link return l",True,header,header,0.6690298318862915 3372,"def _populateDriverData(self): driverlist = ops.db.Database(ops.db.DRIVERLIST) curs = driverlist.connection.execute('SELECT * FROM drivers WHERE name =?', (self.filename,)) row = curs.fetchone() if: self._friendly = row['comment'] self._drivertype = row['type'] else: self._friendly = '' self._drivertype = ''",False,row is not None,row,0.6575003862380981 3373,"def run(self): """""" Run visualization asynchronously. """""" while True: task = self.task_queue.get() if: break frames = draw_predictions(task, self.video_vis) task.frames = np.array(frames) self.result_queue.put(task)",True,"isinstance(task, _StopToken)","isinstance(task, _StopToken)",0.6459864974021912 3374,"def clean_name(self): """"""Ensures a contact with the same name doesn't already exists"""""" instance = getattr(self, 'instance', None) name = self.cleaned_data['name'] if: slug = unicode(name).replace(' ', '-') slug = defaultfilters.slugify(unidecode(slug)) if ContactType.objects.filter(slug=slug).exists(): raise forms.ValidationError(_('Contact Type with such name already exists.')) return name",False,instance and (not instance.id),instance and name and (not instance.is_contact_with_custom_name),0.6469849944114685 3375,"def clean_name(self): """"""Ensures a contact with the same name doesn't already exists"""""" instance = getattr(self, 'instance', None) name = self.cleaned_data['name'] if instance and (not instance.id): slug = unicode(name).replace(' ', '-') slug = defaultfilters.slugify(unidecode(slug)) if: raise forms.ValidationError(_('Contact Type with such name already exists.')) return name",False,ContactType.objects.filter(slug=slug).exists(),name and (not instance.exists()),0.6437221765518188 3376,"def dtc(clear=False, **kwargs): """""" Reads and clears Diagnostics Trouble Codes (DTCs). Optional arguments: - clear (bool): clear DTC codes """""" if: return query('clear_dtc', **kwargs) return query('get_dtc', _converter='dtc', **kwargs)",True,clear,clear,0.6965453028678894 3377,"def to_alipay_dict(self): params = dict() if: if hasattr(self.image_content, 'to_alipay_dict'): params['image_content'] = self.image_content.to_alipay_dict() else: params['image_content'] = self.image_content return params",True,self.image_content,self.image_content,0.6522769927978516 3378,"def to_alipay_dict(self): params = dict() if self.image_content: if: params['image_content'] = self.image_content.to_alipay_dict() else: params['image_content'] = self.image_content return params",False,"hasattr(self.image_content, 'to_alipay_dict')","isinstance(self.image_content, torch.Tensor)",0.6424386501312256 3379,"def __mul__(self, other): if: raise TypeError('Distance instance must be multiplicated with numbers.') else: return self.__class__(self.kilometers * other)",False,"isinstance(other, Distance)","not isinstance(other, (int, float))",0.6566572189331055 3380,"def loss(self, pred, gt): gt_var = torch.FloatTensor(gt) if: gt_var = gt_var.cuda() pred = pred / torch.sqrt((pred ** 2).sum((1, 2, 3), keepdim=True)) gt_var = gt_var / torch.sqrt((gt_var ** 2).sum((1, 2, 3), keepdim=True)) return self.loss_fn(pred, gt_var)",True,self.gpu,self.gpu,0.66664719581604 3381,"def update_features(self, feat): feat /= np.linalg.norm(feat) self.curr_feat = feat if: self.smooth_feat = feat else: self.smooth_feat = self.alpha * self.smooth_feat + (1 - self.alpha) * feat self.features.append(feat) self.smooth_feat /= np.linalg.norm(self.smooth_feat)",True,self.smooth_feat is None,self.smooth_feat is None,0.6455026865005493 3382,"@property def tiles(self): """"""gets the value tiles"""""" if: self.init() return self._tiles",True,self._tiles is None,self._tiles is None,0.6620444059371948 3383,"def __init__(self, node=None, sos_token=None): if: self.root = node elif sos_token: self.root = Tree(sos_token)",True,node,node,0.6820021867752075 3384,"def __init__(self, node=None, sos_token=None): if node: self.root = node elif: self.root = Tree(sos_token)",True,sos_token,sos_token,0.6608739495277405 3385,"def summary(self, sort_by: Optional[Union[str, List[str]]]=None) -> pd.DataFrame: if: self._generate_summary(sort_by) return self.table",False,self.table is None or sort_by,sort_by is not None,0.6507714986801147 3386,"def write(self, oprot): if: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('get_file_metadata_args') if self.req is not None: oprot.writeFieldBegin('req', TType.STRUCT, 1) self.req.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot._fast_encode is not None and self.thrift_spec is not None,oprot._fast_encode is not None and self.thrift_spec is not None,0.6465106010437012 3387,"def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('get_file_metadata_args') if: oprot.writeFieldBegin('req', TType.STRUCT, 1) self.req.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,self.req is not None,self.req is not None,0.649897038936615 3388,"def mobius_add(self, x: torch.Tensor, y: torch.Tensor, *, dim=-1, project=True) -> torch.Tensor: res = _mobius_add(x, y, c=self.c, dim=dim) if: return _project(res, c=self.c, dim=dim) else: return res",True,project,project,0.6792160272598267 3389,"def output_apifault(data_object): if: return output_status_message('* * * Begin output_apifault * * *') output_status_message('OperationErrors:') output_array_of_operationerror(data_object.OperationErrors) if data_object.Type == 'ApiBatchFault': output_apibatchfault(data_object) output_status_message('* * * End output_apifault * * *')",True,data_object is None,data_object is None,0.6532562971115112 3390,"def output_apifault(data_object): if data_object is None: return output_status_message('* * * Begin output_apifault * * *') output_status_message('OperationErrors:') output_array_of_operationerror(data_object.OperationErrors) if: output_apibatchfault(data_object) output_status_message('* * * End output_apifault * * *')",False,data_object.Type == 'ApiBatchFault',data_object.apatchfault is not None,0.6452023983001709 3391,"def __init__(self, mol, backflow_kernel, backflow_kernel_kwargs={}, cuda=False): """"""Transform the electorn coordinates into backflow coordinates. see : Orbital-dependent backflow wave functions for real-space quantum Monte Carlo https://arxiv.org/abs/1910.07167 .. math: \\bold{q}_i = \\bold{r}_i + \\sum_{j eq i} \\eta(r_{ij})(\\bold{r}_i - \\bold{r}_j) """""" super().__init__() self.edist = ElectronElectronDistance(mol.nelec) self.nelec = mol.nelec self.nao = mol.basis.nao self.backflow_kernel = OrbitalDependentBackFlowKernel(backflow_kernel, backflow_kernel_kwargs, mol, cuda) self.ndim = 3 self.cuda = cuda self.device = torch.device('cpu') if: self.device = torch.device('cuda')",True,self.cuda,self.cuda,0.6558609008789062 3392,"@staticmethod def getLogicFromIPS(ips): patch = IPS_Patch.load(ips) for logic, data in RomReader.flavorPatches.items(): address = data['address'] value = patch.getValue(address) if: return logic return 'vanilla'",False,value is not None and value == data['value'],value,0.6465359926223755 3393,"def reduce_signal(ana, t_start=None, t_stop=None): """""" reduce signal to time limits """""" if: ana = ana[ana.times >= t_start] ana.t_start = max(t_start, ana.t_start) if t_stop is not None: ana = ana[ana.times <= t_stop] return ana",True,t_start is not None,t_start is not None,0.6558539867401123 3394,"def reduce_signal(ana, t_start=None, t_stop=None): """""" reduce signal to time limits """""" if t_start is not None: ana = ana[ana.times >= t_start] ana.t_start = max(t_start, ana.t_start) if: ana = ana[ana.times <= t_stop] return ana",True,t_stop is not None,t_stop is not None,0.656186044216156 3395,"def restore_from_classification_checkpoint_fn(self, first_stage_feature_extractor_scope, second_stage_feature_extractor_scope): """"""Returns a map of variables to load from a foreign checkpoint. Note that this overrides the default implementation in faster_rcnn_meta_arch.FasterRCNNFeatureExtractor which does not work for InceptionResnetV2 checkpoints. TODO: revisit whether it's possible to force the `Repeat` namescope as created in `_extract_box_classifier_features` to start counting at 2 (e.g. `Repeat_2`) so that the default restore_fn can be used. Args: first_stage_feature_extractor_scope: A scope name for the first stage feature extractor. second_stage_feature_extractor_scope: A scope name for the second stage feature extractor. Returns: A dict mapping variable names (to load from a checkpoint) to variables in the model graph. """""" variables_to_restore = {} for variable in tf.global_variables(): if: var_name = variable.op.name.replace(first_stage_feature_extractor_scope + '/', '') variables_to_restore[var_name] = variable if variable.op.name.startswith(second_stage_feature_extractor_scope): var_name = variable.op.name.replace(second_stage_feature_extractor_scope + '/InceptionResnetV2/Repeat', 'InceptionResnetV2/Repeat_2') var_name = var_name.replace(second_stage_feature_extractor_scope + '/', '') variables_to_restore[var_name] = variable return variables_to_restore",True,variable.op.name.startswith(first_stage_feature_extractor_scope),variable.op.name.startswith(first_stage_feature_extractor_scope),0.643566370010376 3396,"def restore_from_classification_checkpoint_fn(self, first_stage_feature_extractor_scope, second_stage_feature_extractor_scope): """"""Returns a map of variables to load from a foreign checkpoint. Note that this overrides the default implementation in faster_rcnn_meta_arch.FasterRCNNFeatureExtractor which does not work for InceptionResnetV2 checkpoints. TODO: revisit whether it's possible to force the `Repeat` namescope as created in `_extract_box_classifier_features` to start counting at 2 (e.g. `Repeat_2`) so that the default restore_fn can be used. Args: first_stage_feature_extractor_scope: A scope name for the first stage feature extractor. second_stage_feature_extractor_scope: A scope name for the second stage feature extractor. Returns: A dict mapping variable names (to load from a checkpoint) to variables in the model graph. """""" variables_to_restore = {} for variable in tf.global_variables(): if variable.op.name.startswith(first_stage_feature_extractor_scope): var_name = variable.op.name.replace(first_stage_feature_extractor_scope + '/', '') variables_to_restore[var_name] = variable if: var_name = variable.op.name.replace(second_stage_feature_extractor_scope + '/InceptionResnetV2/Repeat', 'InceptionResnetV2/Repeat_2') var_name = var_name.replace(second_stage_feature_extractor_scope + '/', '') variables_to_restore[var_name] = variable return variables_to_restore",True,variable.op.name.startswith(second_stage_feature_extractor_scope),variable.op.name.startswith(second_stage_feature_extractor_scope),0.643777072429657 3397,"def is_our_script(filename: str) -> bool: if: return False with open(filename, 'rb') as f: contents = f.read() return any((h in contents for h in (CURRENT_HASH,) + PRIOR_HASHES))",False,not os.path.exists(filename),filename.endswith('.py') or filename.endswith('.py'),0.645397424697876 3398,"@cached_property def open_payment(self): lp = self.order.payments.last() if: return lp",False,"lp and lp.state not in (OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED)",lp,0.653232216835022 3399,"def tearDown(self): for f in [self.OUT_PRFX + '_state.json']: if: os.remove(f)",True,os.path.isfile(f),os.path.isfile(f),0.6426050662994385 3400,"def set_dis_optimizer(self, discriminator_optimizer): """"""Set discriminator optimizer (MUST)."""""" self._dis_optimizer = discriminator_optimizer if: self._dis_optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer(self._dis_optimizer, 'dynamic')",False,self._is_discriminator_mixed_precision,self._dis_optimizer is not None,0.6432698965072632 3401,"def start_state_machine_unload(self): if: self.play_state = 'unloaded' elif self.play_state == 'loaded': self.play_state ='start_unload' self.unloading_count = 0 self.unload_signal = True self.tick_timer = self.canvas.after(50, self.load_state_machine) elif self.play_state == 'loading': self.unload_signal = True else: self.mon.err(self, 'illegal state in unload method:'+ self.play_state) self.end('error', 'illegal state in unload method:'+ self.play_state)",False,"self.play_state in ('closed', 'initialised', 'unloaded')",self.play_state == 'unloaded',0.6454126834869385 3402,"def start_state_machine_unload(self): if self.play_state in ('closed', 'initialised', 'unloaded'): self.play_state = 'unloaded' elif: self.play_state ='start_unload' self.unloading_count = 0 self.unload_signal = True self.tick_timer = self.canvas.after(50, self.load_state_machine) elif self.play_state == 'loading': self.unload_signal = True else: self.mon.err(self, 'illegal state in unload method:'+ self.play_state) self.end('error', 'illegal state in unload method:'+ self.play_state)",False,self.play_state == 'loaded',self.play_state == 'start_unload',0.6504601240158081 3403,"def start_state_machine_unload(self): if self.play_state in ('closed', 'initialised', 'unloaded'): self.play_state = 'unloaded' elif self.play_state == 'loaded': self.play_state ='start_unload' self.unloading_count = 0 self.unload_signal = True self.tick_timer = self.canvas.after(50, self.load_state_machine) elif: self.unload_signal = True else: self.mon.err(self, 'illegal state in unload method:'+ self.play_state) self.end('error', 'illegal state in unload method:'+ self.play_state)",False,self.play_state == 'loading',self.play_state == 'unloaded',0.6499567031860352 3404,"def complete_aggregate_definition(self): """""" Checks that all the components have been established, to avoid redundant calculations :return: True or False """""" if: return False else: return True",False,self.get_all_component_percents().aggregate(Sum('percent'))['percent__sum'] < 0.98,self._aggregate_criterion is not None or self._aggregate_criterion.count() == 0,0.6436593532562256 3405,"def get_next_block_number(self) -> int: last_block_chunk_file_path = self._get_last_block_chunk_file_path() if: blockchain_state = self.get_last_blockchain_state() assert blockchain_state return blockchain_state.next_block_number return self._get_block_chunk_last_block_number(last_block_chunk_file_path) + 1",True,last_block_chunk_file_path is None,last_block_chunk_file_path is None,0.6442198753356934 3406,"def get_entry_map(self, group=None): """"""Return the entry point map for `group`, or the full entry map"""""" try: ep_map = self._ep_map except AttributeError: ep_map = self._ep_map = EntryPoint.parse_map(self._get_metadata('entry_points.txt'), self) if: return ep_map.get(group, {}) return ep_map",True,group is not None,group is not None,0.6525303721427917 3407,"@cached def role_and_interface_to_relations(role, interface_name): """""" Given a role and interface name, return a list of relation names for the current charm that use that interface under that role (where role is one of ``provides``, ``requires``, or ``peers``). :returns: A list of relation names. """""" _metadata = metadata() results = [] for relation_name, relation in _metadata.get(role, {}).items(): if: results.append(relation_name) return results",True,relation['interface'] == interface_name,relation['interface'] == interface_name,0.6527340412139893 3408,"def __init__(self, url, checkout='master', **kwargs): """""" checkout: branch, tag, or commit hash to checkout (default: ""master""). """""" super(GithubDownloader, self).__init__(url, **kwargs) if: self.checkout = checkout else: self.checkout ='master'",True,checkout,checkout,0.6733279228210449 3409,"def accept(self, visitor: ParseTreeVisitor): if: return visitor.visitPrimaryNoNewArray10(self) else: return visitor.visitChildren(self)",True,"hasattr(visitor, 'visitPrimaryNoNewArray10')","hasattr(visitor, 'visitPrimaryNoNewArray10')",0.645148515701294 3410,"def log_error(self, message): if: raise Exception(message) if self.verbose_level >= 1: print(message) self.error_log += message + '\n'",False,self.disallow_error,self.strict,0.6473442316055298 3411,"def log_error(self, message): if self.disallow_error: raise Exception(message) if: print(message) self.error_log += message + '\n'",False,self.verbose_level >= 1,self.verbose,0.6479179859161377 3412,"def generate_doxygen_xml(app): build_dir = os.path.join(app.confdir, '.build') if: os.mkdir(build_dir) try: subprocess.call(['doxygen', '--version']) retcode = subprocess.call(['doxygen'], cwd=app.confdir) if retcode < 0: sys.stderr.write(f'doxygen error code: {-retcode}\n') except OSError as e: sys.stderr.write(f'doxygen execution failed: {e}\n')",True,not os.path.exists(build_dir),not os.path.exists(build_dir),0.6445726156234741 3413,"def generate_doxygen_xml(app): build_dir = os.path.join(app.confdir, '.build') if not os.path.exists(build_dir): os.mkdir(build_dir) try: subprocess.call(['doxygen', '--version']) retcode = subprocess.call(['doxygen'], cwd=app.confdir) if: sys.stderr.write(f'doxygen error code: {-retcode}\n') except OSError as e: sys.stderr.write(f'doxygen execution failed: {e}\n')",False,retcode < 0,retcode != 0,0.6594671010971069 3414,"def _on_key_down(event): if: return ecore.ECORE_CALLBACK_PASS_ON if key_down_func: return key_down_func(event) return ecore.ECORE_CALLBACK_DONE",False,"isinstance(win.focused_object, EmcTextEntry) and event.key not in ('Up', 'Down')",pass_on,0.6460008025169373 3415,"def _on_key_down(event): if isinstance(win.focused_object, EmcTextEntry) and event.key not in ('Up', 'Down'): return ecore.ECORE_CALLBACK_PASS_ON if: return key_down_func(event) return ecore.ECORE_CALLBACK_DONE",False,key_down_func,key_down_func is not None,0.6544012427330017 3416,"def addBookmarkDestination(self, dest, parent=None): destRef = self._addObject(dest) outlineRef = self.getOutlineRoot() if: parent = outlineRef parent = parent.getObject() parent.addChild(destRef, self) return destRef",False,parent == None,parent is None,0.6635028719902039 3417,"def zcl_iaszone_zonestatuschangenotif(pkt): config.row['zcl_iaszone_zonestatuschangenotif_zonestatus'] = '0x' + pkt[ZCLIASZoneZoneStatusChangeNotification].zone_status.hex() config.row['zcl_iaszone_zonestatuschangenotif_extendedstatus'] = '0x' + pkt[ZCLIASZoneZoneStatusChangeNotification].extended_status.hex() config.row['zcl_iaszone_zonestatuschangenotif_zoneid'] = '0x{:02x}'.format(pkt[ZCLIASZoneZoneStatusChangeNotification].zone_id) config.row['zcl_iaszone_zonestatuschangenotif_delay'] = '0x{:04x}'.format(pkt[ZCLIASZoneZoneStatusChangeNotification].delay) if: config.row['error_msg'] = 'Unexpected payload' return",True,len(bytes(pkt[ZCLIASZoneZoneStatusChangeNotification].payload)) != 0,len(bytes(pkt[ZCLIASZoneZoneStatusChangeNotification].payload)) != 0,0.6522179841995239 3418,"def load_all(stream, Loader=None): """""" Parse all YAML documents in a stream and produce corresponding Python objects. """""" if: load_warning('load_all') Loader = FullLoader loader = Loader(stream) try: while loader.check_data(): yield loader.get_data() finally: loader.dispose()",True,Loader is None,Loader is None,0.6556233167648315 3419,"def tearDown(self): ret, _, _ = peer_probe(self.mnode, self.servers[1]) if: raise ExecutionError('Failed to detach %s' % self.servers[1]) g.log.info('Peer detach successful %s', self.servers[1]) self.get_super_method(self, 'tearDown')()",False,ret,ret != 0,0.6716450452804565 3420,"def mkarg(x): if: return "" '"" + x + ""'"" s ='""' for c in x: if c in '\\$""`': s = s + '\\' s = s + c s = s + '""' return s",False,"""'"" not in x","isinstance(x, str)",0.6582803726196289 3421,"def mkarg(x): if ""'"" not in x: return "" '"" + x + ""'"" s ='""' for c in x: if: s = s + '\\' s = s + c s = s + '""' return s",False,"c in '\\$""`'",c in '',0.65412437915802 3422,"def __init__(self, g=None): if: self.data = nx.DiGraph() else: assert type(g) == nx.DiGraph self.data = g",True,g is None,g is None,0.6647627353668213 3423,"def is_using_venv() -> bool: """"""Check for venv-based virtual environment which sets sys.base_prefix"""""" if: result = True else: result = sys.prefix!= getattr(sys, 'base_prefix', sys.prefix) return result",False,"getattr(sys, 'real_prefix', None) is not None","hasattr(sys, 'real_prefix')",0.6466532349586487 3424,"def __getitem__(self, condition): if: pdist = self._probdist_factory(FreqDist(), *self._factory_args, **self._factory_kw_args) self._pdists[condition] = pdist return self._pdists[condition]",True,condition not in self._pdists,condition not in self._pdists,0.6675348281860352 3425,"@property def compression_format(self): """"""Returns a string representing the compression format"""""" if: return self.dx_10.dxgi_format.name return self.pixel_format.four_c_c.name",False,self.pixel_format.four_c_c == FourCC.DX10,self.pixel_format.four_c_c is None,0.6469659805297852 3426,"def apply_prepare_for_onnx_export_(module): if: seen.add(module) module.prepare_for_onnx_export_(**kwargs)",True,"module != self and hasattr(module, 'prepare_for_onnx_export_') and (module not in seen)","module != self and hasattr(module, 'prepare_for_onnx_export_') and (module not in seen)",0.6449085474014282 3427,"def can_pm_to_private_or_empty(self) -> bool: if: return False field_pm_to_private_or_empty = self.conditions.get(FIELD_PM_TO_PRIVATE_OR_EMPTY) return field_pm_to_private_or_empty is not None and bool(field_pm_to_private_or_empty)",False,self.conditions is None,not self.conditions,0.6469719409942627 3428,"def add(self, param: Param): """""":param param: parameter to add."""""" if: raise TypeError('Only accepts a Param instance.') if param.name in self._params: msg = 'Parameter named %s already exists.\nTo re-assign parameter %s value, use `params[""%s""] = value` instead.'.format(param.name, param.name, param.name) raise ValueError(msg) self._params[param.name] = param",True,"not isinstance(param, Param)","not isinstance(param, Param)",0.6505089998245239 3429,"def add(self, param: Param): """""":param param: parameter to add."""""" if not isinstance(param, Param): raise TypeError('Only accepts a Param instance.') if: msg = 'Parameter named %s already exists.\nTo re-assign parameter %s value, use `params[""%s""] = value` instead.'.format(param.name, param.name, param.name) raise ValueError(msg) self._params[param.name] = param",False,param.name in self._params,self.has_parameter(param.name),0.6545289158821106 3430,"def _evaluate_pg_loss(self): fetches = {'loss': self._pg_loss} feed_dict_ = None if: qvalues = self._get_qvalues() feed_dict_ = {self._qvalue_inputs: qvalues} vals = self._sess.partial_run(self._partial_run_handle, fetches, feed_dict=feed_dict_) self._qvalue_inputs_fed = True return vals['loss']",False,not self._qvalue_inputs_fed,self._qvalue_inputs_fed,0.6534574031829834 3431,"def get_offset(seq): for i in range(len(seq)): if: return (seq[i:], i)",False,seq[i] != '-',seq[i] > 0,0.6573563814163208 3432,"def short_hash(name): if: raise ValueError('Pretrained model for {name} is not available.'.format(name=name)) return _model_sha256[name][:8]",True,name not in _model_sha256,name not in _model_sha256,0.6568353772163391 3433,"@sourceLangName.setter def sourceLangName(self, langName: str) -> None: if: self._info[c_sourceLang] = '' return lang = self._getLangByStr(langName) if lang is None: return self._info[c_sourceLang] = lang.name",False,not langName,langName is None,0.6765367984771729 3434,"@sourceLangName.setter def sourceLangName(self, langName: str) -> None: if not langName: self._info[c_sourceLang] = '' return lang = self._getLangByStr(langName) if: return self._info[c_sourceLang] = lang.name",True,lang is None,lang is None,0.6601065397262573 3435,"def GetDataInterfaceFromXMLFile(self, bstrFileName=defaultNamedNotOptArg): """"""GetDataInterfaceFromXMLFile"""""" ret = self._oleobj_.InvokeTypes(2, LCID, 1, (9, 0), ((8, 1),), bstrFileName) if: ret = Dispatch(ret, u'GetDataInterfaceFromXMLFile', None) return ret",True,ret is not None,ret is not None,0.6539720296859741 3436,"def get_line(self, obj): if: return obj.linename.line_en elif self.context['language'] == 'ja': return obj.linename.line_ja elif self.context['language'] =='sc': return obj.linename.line_sc elif self.context['language'] == 'tc': return obj.linename.line_tc else: return obj.linename.line_en",True,'language' not in self.context,'language' not in self.context,0.652808666229248 3437,"def get_line(self, obj): if 'language' not in self.context: return obj.linename.line_en elif: return obj.linename.line_ja elif self.context['language'] =='sc': return obj.linename.line_sc elif self.context['language'] == 'tc': return obj.linename.line_tc else: return obj.linename.line_en",True,self.context['language'] == 'ja',self.context['language'] == 'ja',0.6516512632369995 3438,"def get_line(self, obj): if 'language' not in self.context: return obj.linename.line_en elif self.context['language'] == 'ja': return obj.linename.line_ja elif: return obj.linename.line_sc elif self.context['language'] == 'tc': return obj.linename.line_tc else: return obj.linename.line_en",True,self.context['language'] == 'sc',self.context['language'] == 'sc',0.6511927843093872 3439,"def get_line(self, obj): if 'language' not in self.context: return obj.linename.line_en elif self.context['language'] == 'ja': return obj.linename.line_ja elif self.context['language'] =='sc': return obj.linename.line_sc elif: return obj.linename.line_tc else: return obj.linename.line_en",True,self.context['language'] == 'tc',self.context['language'] == 'tc',0.650489091873169 3440,"def view_blog(webdriver, datasets): random_blog = random.choice(datasets[BLOGS]) blog_id = random_blog[0] blog_description = random_blog[2] blog = Page(webdriver, page_id=blog_id) datasets['view_blog'] = random_blog def measure(): blog.go_to() blog.wait_for_page_loaded() measure_dom_requests(webdriver, interaction=f'selenium_view_blog', description=blog_description) if: measure_browser_navi_metrics(webdriver, datasets, expected_metrics=browser_metrics['selenium_view_blog']) measure()",True,CONFLUENCE_SETTINGS.extended_metrics,CONFLUENCE_SETTINGS.extended_metrics,0.6436656713485718 3441,"@passivity_tolerance.setter def passivity_tolerance(self, value): if: self._passivity_tolerance = value",False,"isinstance(value, (int, float))","isinstance(value, (int, float, complex, np.number))",0.6471701860427856 3442,"@property def available(self) -> bool: """"""Return True if entity is available."""""" is_avail = True if: is_avail = self.entity_description.available_fn(self._api) return self._api.available and is_avail",False,self.entity_description.available_fn is not None,self.entity_description and self._api.available_fn,0.647518515586853 3443,"@property def linear_frequency_y(self): if: return float(self._entity_data.get('linear_frequency_y')) return float(0)",True,'linear_frequency_y' in self._entity_data,'linear_frequency_y' in self._entity_data,0.6473568081855774 3444,"def able_to_open(self, filepath: Path) -> bool: if: show_general_warning(self, 'The file you tried to open does not exist!') return False if filepath.stat().st_size == 0: show_general_warning(self, 'This file has zero byte size!') return False return True",False,not filepath.exists(),not path.exists(),0.6516550779342651 3445,"def able_to_open(self, filepath: Path) -> bool: if not filepath.exists(): show_general_warning(self, 'The file you tried to open does not exist!') return False if: show_general_warning(self, 'This file has zero byte size!') return False return True",False,filepath.stat().st_size == 0,len(filepath.read()) == 0,0.6471143960952759 3446,"def set_welc_preference(chat_id, should_welcome): with INSERTION_LOCK: curr = SESSION.query(Welcome).get(str(chat_id)) if: curr = Welcome(str(chat_id), should_welcome=should_welcome) else: curr.should_welcome = should_welcome SESSION.add(curr) SESSION.commit()",True,not curr,not curr,0.6591516733169556 3447,"def get_targets(self, anchor_list, valid_flag_list, gt_bboxes, img_metas, featmap_sizes, gt_bboxes_ignore=None, label_channels=1): """"""Compute regression and classification targets for anchors. Args: anchor_list (list[list]): Multi level anchors of each image. valid_flag_list (list[list]): Multi level valid flags of each image. gt_bboxes (list[Tensor]): Ground truth bboxes of each image. img_metas (list[dict]): Meta info of each image. featmap_sizes (list[Tensor]): Feature mapsize each level gt_bboxes_ignore (list[Tensor]): Ignore bboxes of each images label_channels (int): Channel of label. Returns: cls_reg_targets (tuple) """""" if: cls_reg_targets = self.region_targets(anchor_list, valid_flag_list, gt_bboxes, img_metas, featmap_sizes, gt_bboxes_ignore_list=gt_bboxes_ignore, label_channels=label_channels) else: cls_reg_targets = super(StageCascadeRPNHead, self).get_targets(anchor_list, valid_flag_list, gt_bboxes, img_metas, gt_bboxes_ignore_list=gt_bboxes_ignore, label_channels=label_channels) return cls_reg_targets",False,"isinstance(self.assigner, RegionAssigner)","isinstance(self.region_config, RegionConfig)",0.6438440084457397 3448,"def interpolate_mobject(self, alpha: float) -> None: new_alpha = self.rate_func(alpha) if: self.original.set_z_index(self.z_index)",False,new_alpha > 0.5,new_alpha > self.z_index,0.6495963335037231 3449,"def __init__(self, *args, **kwargs): """""" Save the original SIGINT handler for later. """""" super(InterruptibleMixin, self).__init__(*args, **kwargs) self.original_handler = signal(SIGINT, self.handle_sigint) if: self.original_handler = default_int_handler",False,self.original_handler is None,self.original_handler == signal,0.6518441438674927 3450,"def createFields(self): yield UInt32(self,'size', 'Size') yield String(self, 'tag', 4, 'Tag', charset='ASCII') size = self['size'].value if: if self.parse_func: for field in self.parse_func(self): yield field else: yield RawBytes(self, 'content', size, 'Data') yield textHandler(UInt32(self, 'crc32', 'CRC32'), hexadecimal)",False,size != 0,size,0.6587303876876831 3451,"def createFields(self): yield UInt32(self,'size', 'Size') yield String(self, 'tag', 4, 'Tag', charset='ASCII') size = self['size'].value if size!= 0: if: for field in self.parse_func(self): yield field else: yield RawBytes(self, 'content', size, 'Data') yield textHandler(UInt32(self, 'crc32', 'CRC32'), hexadecimal)",False,self.parse_func,self.parse_func is not None,0.6481972336769104 3452,"def generate_final_hkx(self): stat = subprocess.run([hkxcmd_path, 'CONVERT', '-V:WIN32', self.xml_filepath, self.filepath_short], capture_output=True, check=True) if: s = stat.stderr.decode('utf-8').strip() self.error(s) return None if not os.path.exists(self.filepath_short): self.error(f'Failed to create {self.filepath_short}') return None self.info(f'Created HKX file: {self.filepath_short}')",False,stat.returncode,stat.returncode != 0,0.6527742743492126 3453,"def generate_final_hkx(self): stat = subprocess.run([hkxcmd_path, 'CONVERT', '-V:WIN32', self.xml_filepath, self.filepath_short], capture_output=True, check=True) if stat.returncode: s = stat.stderr.decode('utf-8').strip() self.error(s) return None if: self.error(f'Failed to create {self.filepath_short}') return None self.info(f'Created HKX file: {self.filepath_short}')",True,not os.path.exists(self.filepath_short),not os.path.exists(self.filepath_short),0.6449844837188721 3454,"@staticmethod def _initialize_component_maps(): if: if Component._integration_version: component_maps = execute_git(None, integration_dir(), ['show', f'{Component._integration_version}:component-maps.yml'], capture=True, capture_stderr=False) Component.COMPONENT_MAPS = yaml.safe_load(component_maps) else: with open(os.path.join(integration_dir(), 'component-maps.yml')) as fd: Component.COMPONENT_MAPS = yaml.safe_load(fd)",False,Component.COMPONENT_MAPS is None,not Component.component_MAPS,0.6486815214157104 3455,"@staticmethod def _initialize_component_maps(): if Component.COMPONENT_MAPS is None: if: component_maps = execute_git(None, integration_dir(), ['show', f'{Component._integration_version}:component-maps.yml'], capture=True, capture_stderr=False) Component.COMPONENT_MAPS = yaml.safe_load(component_maps) else: with open(os.path.join(integration_dir(), 'component-maps.yml')) as fd: Component.COMPONENT_MAPS = yaml.safe_load(fd)",False,Component._integration_version,is_git_available(),0.6500644683837891 3456,"def tearDown(self): self.handler.flush() LOGGER.removeHandler(self.handler) if: if self.POPULATE_EXPECTED: dbutil.TESTSERVER.save(self.test_dbname, self.EXPECTED_DIR, 'hypercube_tile_contents.sql') else: kkk = -1 LOGGER.info('About to drop %s', self.test_dbname) dbutil.TESTSERVER.drop(self.test_dbname)",False,self.test_dbname,self.db_connection is not None,0.6508885025978088 3457,"def tearDown(self): self.handler.flush() LOGGER.removeHandler(self.handler) if self.test_dbname: if: dbutil.TESTSERVER.save(self.test_dbname, self.EXPECTED_DIR, 'hypercube_tile_contents.sql') else: kkk = -1 LOGGER.info('About to drop %s', self.test_dbname) dbutil.TESTSERVER.drop(self.test_dbname)",False,self.POPULATE_EXPECTED,kkk == -1,0.6499350070953369 3458,"def deco(func_or_class, name=name): if: name = func_or_class.__name__ self._do_register(name, func_or_class) return func_or_class",True,name is None,name is None,0.6647344827651978 3459,"def _diagonal(self: Float[LinearOperator, '... M N']) -> Float[torch.Tensor, '... N']: if: return (self.root.tensor ** 2).sum(-1) else: return super()._diagonal()",False,"isinstance(self.root, DenseLinearOperator)",self.root is not None,0.6472099423408508 3460,"@property def selected_audio_track(self): """""" index of the selected audio track """""" idx = 0 for e in self._OMP.list_audio(): if: return idx idx += 1 return 0",False,e.endswith(':active'),e.audio_type == 'audio',0.6445705890655518 3461,"@lru_cache(maxsize=512) def parse_string(text: str, parser: str): if: raise ValueError('Invalid query string.') return PARSERS[parser].parse_string(text, parse_all=True)",False,'\x00' in text,parser not in PARSERS,0.6579679250717163 3462,"@lru_cache(maxsize=4) def get_damping_aniso_ss_perp(self, om, data_name=None): if: ks = self.camb.ks else: ks = self.data_dict[data_name]['ks_input'] return np.exp(-np.outer(ks ** 2, 1.0 - self.mu ** 2) * self.get_pregen('sigma_ss', om) / 2.0)",True,data_name is None,data_name is None,0.6511674523353577 3463,"def __get_bssid_hasheable_type(self, bssid): if: raise Exception('BSSID datatype must be a tuple, list or array') return tuple(bssid)",False,"not isinstance(bssid, (list, tuple, array))","not isinstance(bssid, (tuple, list))",0.649648904800415 3464,"def draw(self, renderer, *args, **kwargs): """""" Draw the Artist using the given renderer. This method will be overridden in the Artist subclasses. Typically, it is implemented to not have any effect if the Artist is not visible (`.Artist.get_visible` is *False*). Parameters ---------- renderer : `.RendererBase` subclass. """""" if: return self.stale = False",True,not self.get_visible(),not self.get_visible(),0.6483052968978882 3465,"def _compare_import_locations(self, stmt1, stmt2): def get_location(stmt): if: return stmt.get_new_start() else: return stmt.get_old_location()[0] return cmp(get_location(stmt1), get_location(stmt2))",True,stmt.get_new_start() is not None,stmt.get_new_start() is not None,0.6498054265975952 3466,"def setup(self, start=None): """"""Prepare for parsing. This *must* be called before starting to parse. The optional argument is an alternative start symbol; it defaults to the grammar's start symbol. You can use a Parser instance to parse any number of programs; each time you call setup() the parser is reset to an initial state determined by the (implicit or explicit) start symbol. """""" if: start = self.grammar.start newnode = (start, None, None, []) stackentry = (self.grammar.dfas[start], 0, newnode) self.stack = [stackentry] self.rootnode = None self.used_names = set()",True,start is None,start is None,0.6629326343536377 3467,"def tree(cls, level=0, last_sibling=True): yield (cls, level, last_sibling) chidren = subclasses(cls) if: last = chidren[-1] for child in chidren: yield from tree(child, level + 1, child is last)",False,chidren,last_sibling,0.6570907235145569 3468,"def forward(self, x): en = self.encoding(x) b, c, _, _ = x.size() gamma = self.fc(en) y = gamma.view(b, c, 1, 1) outputs = [F.relu_(x + x * y)] if: outputs.append(self.selayer(en)) return tuple(outputs)",False,self.se_loss,self.selayer is not None,0.6559070348739624 3469,"def _print(self, msg, msg_args): """"""Display the message on stout or stderr depending on verbosity"""""" if: return if self.verbose > 50: writer = sys.stderr.write else: writer = sys.stdout.write msg = msg % msg_args writer('[%s]: %s\n' % (self, msg))",False,not self.verbose,self.verbose > 10,0.652661919593811 3470,"def _print(self, msg, msg_args): """"""Display the message on stout or stderr depending on verbosity"""""" if not self.verbose: return if: writer = sys.stderr.write else: writer = sys.stdout.write msg = msg % msg_args writer('[%s]: %s\n' % (self, msg))",False,self.verbose > 50,self.verbosity > 2,0.6565552949905396 3471,"def remove(self, arr): """"""Removes an array from the list Parameters ---------- arr: str or :class:`InteractiveBase` The array name or the data object in this list to remove Raises ------ ValueError If no array with the specified array name is in the list"""""" name = arr if isinstance(arr, six.string_types) else arr.psy.arr_name if: raise ValueError('Array {0} not in the list'.format(name)) for i, arr in enumerate(self): if arr.psy.arr_name == name: del self[i] return raise ValueError('No array found with name {0}'.format(name))",False,arr not in self,name not in self,0.6663435101509094 3472,"def remove(self, arr): """"""Removes an array from the list Parameters ---------- arr: str or :class:`InteractiveBase` The array name or the data object in this list to remove Raises ------ ValueError If no array with the specified array name is in the list"""""" name = arr if isinstance(arr, six.string_types) else arr.psy.arr_name if arr not in self: raise ValueError('Array {0} not in the list'.format(name)) for i, arr in enumerate(self): if: del self[i] return raise ValueError('No array found with name {0}'.format(name))",False,arr.psy.arr_name == name,name == arr,0.651111364364624 3473,"def set_data(self, data): """"""Set table data"""""" if: self.model.set_data(data, self.dictfilter) self.sortByColumn(0, Qt.AscendingOrder)",False,data is not None,data,0.6532381176948547 3474,"def _selected(self, attribute): """""" Returns the selected value of the collection indicated by attribute. Raises an exception if selected value exists and there is not exactly one value in the collection. :param attribute: The name of a collection of the instance, such as 'built_forms"" :return: The selected value or the only value of the collection if its a one-item collection """""" selection = self.selections['sets'].get(attribute, None) if: return selection if len(self._computed_related(attribute)) == 1: return self._computed_related(attribute)[0] raise Exception('Instance {0} has no {1} selected does not return exactly one set, but returns {2}'.format(self, attribute, len(self._computed_related(attribute))))",True,selection,selection,0.6585071086883545 3475,"def _selected(self, attribute): """""" Returns the selected value of the collection indicated by attribute. Raises an exception if selected value exists and there is not exactly one value in the collection. :param attribute: The name of a collection of the instance, such as 'built_forms"" :return: The selected value or the only value of the collection if its a one-item collection """""" selection = self.selections['sets'].get(attribute, None) if selection: return selection if: return self._computed_related(attribute)[0] raise Exception('Instance {0} has no {1} selected does not return exactly one set, but returns {2}'.format(self, attribute, len(self._computed_related(attribute))))",True,len(self._computed_related(attribute)) == 1,len(self._computed_related(attribute)) == 1,0.6479767560958862 3476,"def put_unpack_buffer_aux_into_scope(buf_entry, code): buffer_aux, mode = (buf_entry.buffer_aux, buf_entry.type.mode) pybuffernd_struct = buffer_aux.buflocal_nd_var.cname fldnames = ['strides','shape'] if: fldnames.append('suboffsets') ln = [] for i in range(buf_entry.type.ndim): for fldname in fldnames: ln.append('%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];' % (pybuffernd_struct, i, fldname, pybuffernd_struct, fldname, i)) code.putln(' '.join(ln))",False,mode == 'full',mode == 'suboffsets',0.6561431884765625 3477,"def shuffle_data(self): print(self.split, ': re-build dataset data(lang), should_shuffle=', self.should_shuffle, flush=True) self.scanvqa_new = self.split_scene_new(self.scanvqa) if: random.shuffle(self.scanvqa_new) if self.scanvqa_new_len == -1: self.scanvqa_new_len = len(self.scanvqa_new) assert len(self.scanvqa_new) == self.scanvqa_new_len, 'assert scanvqa length right' print(self.split, ': build dataset done', flush=True)",True,self.should_shuffle,self.should_shuffle,0.652734637260437 3478,"def shuffle_data(self): print(self.split, ': re-build dataset data(lang), should_shuffle=', self.should_shuffle, flush=True) self.scanvqa_new = self.split_scene_new(self.scanvqa) if self.should_shuffle: random.shuffle(self.scanvqa_new) if: self.scanvqa_new_len = len(self.scanvqa_new) assert len(self.scanvqa_new) == self.scanvqa_new_len, 'assert scanvqa length right' print(self.split, ': build dataset done', flush=True)",False,self.scanvqa_new_len == -1,self.scanvqa_new_len is None,0.648914098739624 3479,"def __init__(self, argv=None, aliases=None, flags=None, **kw): """"""Create a key value pair config loader. Parameters ---------- argv : list A list that has the form of sys.argv[1:] which has unicode elements of the form u""key=value"". If this is None (default), then sys.argv[1:] will be used. aliases : dict A dict of aliases for configurable traits. Keys are the short aliases, Values are the resolved trait. Of the form: `{'alias' : 'Configurable.trait'}` flags : dict A dict of flags, keyed by str name. Vaues can be Config objects, dicts, or ""key=value"" strings. If Config or dict, when the flag is triggered, The flag is loaded as `self.config.update(m)`. Returns ------- config : Config The resulting Config object. Examples -------- >>> from traitlets.config.loader import KeyValueConfigLoader >>> cl = KeyValueConfigLoader() >>> d = cl.load_config([""--A.name='brian'"",""--B.number=0""]) >>> sorted(d.items()) [('A', {'name': 'brian'}), ('B', {'number': 0})] """""" super(KeyValueConfigLoader, self).__init__(**kw) if: argv = sys.argv[1:] self.argv = argv self.aliases = aliases or {} self.flags = flags or {}",True,argv is None,argv is None,0.6644856929779053 3480,"@staticmethod def get_body_can_parser(CP): if: signals = [('BSM_ALERT', 'BSM_STATUS_RIGHT'), ('BSM_ALERT', 'BSM_STATUS_LEFT')] checks = [('BSM_STATUS_LEFT', 3), ('BSM_STATUS_RIGHT', 3)] bus_body = 0 return CANParser(DBC[CP.carFingerprint]['body'], signals, checks, bus_body) return None",False,CP.enableBsm,CP.carFingerprint in RAM_CARS,0.6586025953292847 3481,"@staticmethod def cal_inverse_perm(src_perm): """"""Calculate perm after inverse."""""" if: return None elif isinstance(src_perm, (list, tuple)): if len(src_perm) == 0: return None else: return [src_perm.index(i) for i in range(len(src_perm))]",True,src_perm is None,src_perm is None,0.657792866230011 3482,"@staticmethod def cal_inverse_perm(src_perm): """"""Calculate perm after inverse."""""" if src_perm is None: return None elif: if len(src_perm) == 0: return None else: return [src_perm.index(i) for i in range(len(src_perm))]",False,"isinstance(src_perm, (list, tuple))","isinstance(src_perm, list)",0.6468716859817505 3483,"@staticmethod def cal_inverse_perm(src_perm): """"""Calculate perm after inverse."""""" if src_perm is None: return None elif isinstance(src_perm, (list, tuple)): if: return None else: return [src_perm.index(i) for i in range(len(src_perm))]",True,len(src_perm) == 0,len(src_perm) == 0,0.6520488262176514 3484,"def ResetVirtualSites_fast(positions, vsinfo): """"""Given a set of OpenMM-compatible positions and a System object, compute the correct virtual site positions according to the System."""""" isvsites, vsfuncs, vsidxs, vswts = vsinfo if: pos = np.array(positions.value_in_unit(nanometer)) for i in range(len(positions)): if isvsites[i]: pos[i] = vsfuncs[i](pos, vsidxs[i], vswts[i]) newpos = [Vec3(*i) for i in pos] * nanometer return newpos else: return positions",False,any(isvsites),np.isscalar(nanometer),0.6478077173233032 3485,"def ResetVirtualSites_fast(positions, vsinfo): """"""Given a set of OpenMM-compatible positions and a System object, compute the correct virtual site positions according to the System."""""" isvsites, vsfuncs, vsidxs, vswts = vsinfo if any(isvsites): pos = np.array(positions.value_in_unit(nanometer)) for i in range(len(positions)): if: pos[i] = vsfuncs[i](pos, vsidxs[i], vswts[i]) newpos = [Vec3(*i) for i in pos] * nanometer return newpos else: return positions",True,isvsites[i],isvsites[i],0.6522972583770752 3486,"def __str__(self) -> str: result = self.name if: result = f'{result}:{self.tag}' if self._is_in_neuro_registry: assert self.cluster_name is not None base = '' if self.org_name: base = f'/{self.org_name}' result = str(URL.build(scheme='image', host=self.cluster_name, path=f'{base}/{self.project_name}/{result}')) return result",False,self.tag,self.tag is not None,0.658315896987915 3487,"def __str__(self) -> str: result = self.name if self.tag: result = f'{result}:{self.tag}' if: assert self.cluster_name is not None base = '' if self.org_name: base = f'/{self.org_name}' result = str(URL.build(scheme='image', host=self.cluster_name, path=f'{base}/{self.project_name}/{result}')) return result",False,self._is_in_neuro_registry,result == '',0.6458683013916016 3488,"def __str__(self) -> str: result = self.name if self.tag: result = f'{result}:{self.tag}' if self._is_in_neuro_registry: assert self.cluster_name is not None base = '' if: base = f'/{self.org_name}' result = str(URL.build(scheme='image', host=self.cluster_name, path=f'{base}/{self.project_name}/{result}')) return result",True,self.org_name,self.org_name,0.657263994216919 3489,"def handler(self, *args, **kwargs): """"""Use this tool as a CherryPy page handler. For example:: class Root: nav = tools.staticdir.handler(section=""/nav"", dir=""nav"", root=absDir) """""" def handle_func(*a, **kw): handled = self.callable(*args, **self._merged_args(kwargs)) if: raise cherrypy.NotFound() return cherrypy.serving.response.body handle_func.exposed = True return handle_func",True,not handled,not handled,0.656199038028717 3490,"@property def checkout_nb_initial(self): """""" The quantity of cards at the beginning of the ongoing deposit state. Not the quantity at the deposit creation. """""" if: return 0 return self.ongoing_depostate.nb_initial",False,not self.depositstate_set.count(),not self.deposit_creation_set.count(),0.6427438259124756 3491,"def __new__(cls, value=None): value = value or [] temp = [] for item in value: if: item = cls.item_type(item) temp.append(item) while temp and (not temp[-1]): temp = temp[:-1] return super(MD_StructArray, cls).__new__(cls, temp)",False,"not isinstance(item, cls.item_type)",cls.item_type is not None,0.6468790769577026 3492,"def http_status_to_exit_status(http_status: int, follow=False) -> ExitStatus: """""" Translate HTTP status code to exit status code. (Relevant only when invoked with --check-status or --download.) """""" if: return ExitStatus.ERROR_HTTP_3XX elif 400 <= http_status <= 499: return ExitStatus.ERROR_HTTP_4XX elif 500 <= http_status <= 599: return ExitStatus.ERROR_HTTP_5XX else: return ExitStatus.SUCCESS",False,300 <= http_status <= 399 and (not follow),3 <= http_status <= 3,0.6471736431121826 3493,"def http_status_to_exit_status(http_status: int, follow=False) -> ExitStatus: """""" Translate HTTP status code to exit status code. (Relevant only when invoked with --check-status or --download.) """""" if 300 <= http_status <= 399 and (not follow): return ExitStatus.ERROR_HTTP_3XX elif: return ExitStatus.ERROR_HTTP_4XX elif 500 <= http_status <= 599: return ExitStatus.ERROR_HTTP_5XX else: return ExitStatus.SUCCESS",False,400 <= http_status <= 499,4 <= http_status <= 4 and follow,0.6525832414627075 3494,"def http_status_to_exit_status(http_status: int, follow=False) -> ExitStatus: """""" Translate HTTP status code to exit status code. (Relevant only when invoked with --check-status or --download.) """""" if 300 <= http_status <= 399 and (not follow): return ExitStatus.ERROR_HTTP_3XX elif 400 <= http_status <= 499: return ExitStatus.ERROR_HTTP_4XX elif: return ExitStatus.ERROR_HTTP_5XX else: return ExitStatus.SUCCESS",False,500 <= http_status <= 599,400 <= http_status <= 599,0.6528547406196594 3495,"def _config_checks(self): if: raise ValueError('Training type not supported: {}'.format(self.training_type))",True,self.training_type != 'selfsupervised',self.training_type != 'selfsupervised',0.6469779014587402 3496,"def gifid(update: Update, context: CallbackContext): msg = update.effective_message if: update.effective_message.reply_text(f'Gif ID:\n{msg.reply_to_message.animation.file_id}', parse_mode=ParseMode.HTML) else: update.effective_message.reply_text('Please reply to a gif to get its ID.')",False,msg.reply_to_message and msg.reply_to_message.animation,msg.reply_to_message and msg.reply_to_message.animation and msg.reply_to_message.animation.file_id,0.6433804035186768 3497,"def current_task(loop=None): """"""Return a currently executed task."""""" if: loop = events.get_running_loop() return _current_tasks.get(loop)",True,loop is None,loop is None,0.6584993600845337 3498,"def url_decode_stream(stream, charset='utf-8', decode_keys=False, include_empty=True, errors='replace', separator='&', cls=None, limit=None, return_iterator=False): """"""Works like :func:`url_decode` but decodes a stream. The behavior of stream and limit follows functions like :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is directly fed to the `cls` so you can consume the data while it's parsed. .. versionadded:: 0.8 :param stream: a stream with the encoded querystring :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param decode_keys: Used on Python 2.x to control whether keys should be forced to be unicode objects. If set to `True`, keys will be unicode in all cases. Otherwise, they remain `str` if they fit into ASCII. :param include_empty: Set to `False` if you don't want empty values to appear in the dict. :param errors: the decoding error behavior. :param separator: the pair separator to be used, defaults to ``&`` :param cls: an optional dict class to use. If this is not specified or `None` the default :class:`MultiDict` is used. :param limit: the content length of the URL data. Not necessary if a limited stream is provided. :param return_iterator: if set to `True` the `cls` argument is ignored and an iterator over all decoded pairs is returned """""" from.wsgi import make_chunk_iter pair_iter = make_chunk_iter(stream, separator, limit) decoder = _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors) if: return decoder if cls is None: from.datastructures import MultiDict cls = MultiDict return cls(decoder)",True,return_iterator,return_iterator,0.6528314352035522 3499,"def url_decode_stream(stream, charset='utf-8', decode_keys=False, include_empty=True, errors='replace', separator='&', cls=None, limit=None, return_iterator=False): """"""Works like :func:`url_decode` but decodes a stream. The behavior of stream and limit follows functions like :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is directly fed to the `cls` so you can consume the data while it's parsed. .. versionadded:: 0.8 :param stream: a stream with the encoded querystring :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param decode_keys: Used on Python 2.x to control whether keys should be forced to be unicode objects. If set to `True`, keys will be unicode in all cases. Otherwise, they remain `str` if they fit into ASCII. :param include_empty: Set to `False` if you don't want empty values to appear in the dict. :param errors: the decoding error behavior. :param separator: the pair separator to be used, defaults to ``&`` :param cls: an optional dict class to use. If this is not specified or `None` the default :class:`MultiDict` is used. :param limit: the content length of the URL data. Not necessary if a limited stream is provided. :param return_iterator: if set to `True` the `cls` argument is ignored and an iterator over all decoded pairs is returned """""" from.wsgi import make_chunk_iter pair_iter = make_chunk_iter(stream, separator, limit) decoder = _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors) if return_iterator: return decoder if: from.datastructures import MultiDict cls = MultiDict return cls(decoder)",True,cls is None,cls is None,0.6559515595436096 3500,"def create_playlist_item(self, result_set): """""" Creates a new MediaItem for a playlist of seasons. This method creates a new MediaItem from the Regular Expression or Json results . The method should be implemented by derived classes and are specific to the channel. :param dict result_set: The result_set of the self.episodeItemRegex :return: A new MediaItem of type 'folder'. :rtype: MediaItem|None """""" url = '#playlist={}'.format(result_set['name']) item = MediaItem(result_set['title'], url) item.poster = result_set.get('imageUrl') items = result_set['items'] for sub_item in items: video_item = self.create_typed_item(sub_item) if: item.items.append(video_item) return item",True,video_item,video_item,0.6601381301879883 3501,"def __init__(self, arch, vc_ver=None, vc_min_ver=0): self.pi = PlatformInfo(arch) self.ri = RegistryInfo(self.pi) self.si = SystemInfo(self.ri, vc_ver) if: err = 'No suitable Microsoft Visual C++ version found' raise distutils.errors.DistutilsPlatformError(err)",False,self.vc_ver < vc_min_ver,vc_min_ver < 0,0.6476607322692871 3502,"def write(self, text: str, *, bold: bool=False) -> None: if: text = '.B'+ text self.source.append(text)",True,bold,bold,0.680109441280365 3503,"def parse_full_cfg(parser: argparse.ArgumentParser, argv: Optional[List[str]]=None) -> argparse.Namespace: """"""Given a parser, parse all arguments and return the final configuration."""""" if: argv = sys.argv[1:] args = parser.parse_args(argv) args = postprocess_args(args, argv, parser) return args",True,argv is None,argv is None,0.6778140068054199 3504,"@property def targetcs(self): """"""Native Component Coordinate System. Returns ------- str Native Component Coordinate System. """""" if: return self.props['TargetCS'] else: return 'Global'",False,'TargetCS' in list(self.props.keys()),'TargetCS' in self.props,0.6449006199836731 3505,"def __init__(self, name, data, filename=None, headers=None): self._name = name self._filename = filename self.data = data self.headers = {} if: self.headers = dict(headers)",True,headers,headers,0.669974684715271 3506,"def vercmpparts(e1, v1, r1, e2, v2, r2): rc = vercmppart(e1, e2) if: rc = vercmppart(v1, v2) if not rc: rc = vercmppart(r1, r2) return rc",True,not rc,not rc,0.6644295454025269 3507,"def vercmpparts(e1, v1, r1, e2, v2, r2): rc = vercmppart(e1, e2) if not rc: rc = vercmppart(v1, v2) if: rc = vercmppart(r1, r2) return rc",True,not rc,not rc,0.6675032377243042 3508,"@action(detail=True, methods=['GET'], renderer_classes=[renderers.JSONRenderer], permission_classes=[EditLinkSubmissionPermission], url_path='(enketo/)?edit') def enketo_edit(self, request, pk, *args, **kwargs): submission_id = positive_int(pk) enketo_response = self._get_enketo_link(request, submission_id, 'edit') if: EnketoSessionAuthentication.prepare_response_with_csrf_cookie(request, enketo_response) return enketo_response",False,"enketo_response.status_code in (status.HTTP_201_CREATED, status.HTTP_200_OK)",settings.NEED_SESSION_Authentication,0.6481843590736389 3509,"def execute_rebalance_plan(self, moves: List[Json], version: int=1) -> Result[bool]: """"""Execute the given set of move shard operations. You can use :meth:`Cluster.calculate_rebalance_plan` to calculate these operations to improve the balance of shards, leader shards, and follower shards. :param moves: List of move shard operations. :type moves: [dict] :param version: Must be set to 1. :type version: int :return: True if the methods have been accepted and scheduled for execution. :rtype: bool :raise: arango.exceptions.ClusterRebalanceError: If request fails. """""" data: Json = dict(version=version, moves=moves) request = Request(method='post', endpoint='/_admin/cluster/rebalance/execute', data=data) def response_handler(resp: Response) -> bool: if: raise ClusterRebalanceError(resp, request) result: bool = resp.body['code'] == 202 return result return self._execute(request, response_handler)",True,not resp.is_success,not resp.is_success,0.647933840751648 3510,"def listen(self, backlog=None): if: backlog = DefaultListenBacklog dgram = self.sock.type == socket.SOCK_DGRAM if not dgram and (not self.skip): self.sock.listen(backlog)",True,backlog is None,backlog is None,0.6558690071105957 3511,"def listen(self, backlog=None): if backlog is None: backlog = DefaultListenBacklog dgram = self.sock.type == socket.SOCK_DGRAM if: self.sock.listen(backlog)",False,not dgram and (not self.skip),dgram and backlog,0.6435003280639648 3512,"@zope.component.adapter(OFS.interfaces.IItem, OFS.interfaces.IObjectWillBeMovedEvent) def dispatchObjectWillBeMovedEvent(ob, event): """"""Multi-subscriber for IItem + IObjectWillBeMovedEvent. """""" if: dispatchToSublocations(ob, event) callManageBeforeDelete(ob, event.object, event.oldParent)",False,OFS.interfaces.IObjectManager.providedBy(ob),event.sublocations,0.6477490663528442 3513,"def generate_super_empty_image(target_dir, output_super_empty): """"""Generates super_empty image from target package. Args: target_dir: Path to the target file package which contains misc_info.txt for detailed information for super image. output_super_empty: If provided, copies a super_empty.img file from the target files package to this path. """""" misc_info_txt = os.path.join(target_dir, 'META','misc_info.txt') use_dynamic_partitions = common.LoadDictionaryFromFile(misc_info_txt).get('use_dynamic_partitions') if: raise ValueError('Building super_empty.img requires use_dynamic_partitions=true.') elif use_dynamic_partitions == 'true': super_empty_img = os.path.join(target_dir, 'IMAGES','super_empty.img') build_super_image_args = [misc_info_txt, super_empty_img] build_super_image.main(build_super_image_args) if output_super_empty: shutil.copyfile(super_empty_img, output_super_empty)",False,use_dynamic_partitions != 'true' and output_super_empty,use_dynamic_partitions == 'false',0.641771674156189 3514,"def generate_super_empty_image(target_dir, output_super_empty): """"""Generates super_empty image from target package. Args: target_dir: Path to the target file package which contains misc_info.txt for detailed information for super image. output_super_empty: If provided, copies a super_empty.img file from the target files package to this path. """""" misc_info_txt = os.path.join(target_dir, 'META','misc_info.txt') use_dynamic_partitions = common.LoadDictionaryFromFile(misc_info_txt).get('use_dynamic_partitions') if use_dynamic_partitions!= 'true' and output_super_empty: raise ValueError('Building super_empty.img requires use_dynamic_partitions=true.') elif: super_empty_img = os.path.join(target_dir, 'IMAGES','super_empty.img') build_super_image_args = [misc_info_txt, super_empty_img] build_super_image.main(build_super_image_args) if output_super_empty: shutil.copyfile(super_empty_img, output_super_empty)",True,use_dynamic_partitions == 'true',use_dynamic_partitions == 'true',0.6448710560798645 3515,"def generate_super_empty_image(target_dir, output_super_empty): """"""Generates super_empty image from target package. Args: target_dir: Path to the target file package which contains misc_info.txt for detailed information for super image. output_super_empty: If provided, copies a super_empty.img file from the target files package to this path. """""" misc_info_txt = os.path.join(target_dir, 'META','misc_info.txt') use_dynamic_partitions = common.LoadDictionaryFromFile(misc_info_txt).get('use_dynamic_partitions') if use_dynamic_partitions!= 'true' and output_super_empty: raise ValueError('Building super_empty.img requires use_dynamic_partitions=true.') elif use_dynamic_partitions == 'true': super_empty_img = os.path.join(target_dir, 'IMAGES','super_empty.img') build_super_image_args = [misc_info_txt, super_empty_img] build_super_image.main(build_super_image_args) if: shutil.copyfile(super_empty_img, output_super_empty)",True,output_super_empty,output_super_empty,0.6507792472839355 3516,"def __setattr__(self, k, v): if: object.__setattr__(self, k, v) else: super().__setattr__(k, v)",False,k == '__default__',k == 'dict',0.6533061861991882 3517,"def _open(self, req, data=None): result = self._call_chain(self.handle_open, 'default', 'default_open', req) if: return result protocol = req.get_type() result = self._call_chain(self.handle_open, protocol, protocol + '_open', req) if result: return result return self._call_chain(self.handle_open, 'unknown', 'unknown_open', req)",True,result,result,0.6752873659133911 3518,"def _open(self, req, data=None): result = self._call_chain(self.handle_open, 'default', 'default_open', req) if result: return result protocol = req.get_type() result = self._call_chain(self.handle_open, protocol, protocol + '_open', req) if: return result return self._call_chain(self.handle_open, 'unknown', 'unknown_open', req)",True,result,result,0.6751552820205688 3519,"def is_only_one_item_selected(paths, app): if: app.show_error_message('Please, select only one item!') return False if not paths: app.show_error_message('No selected item!') return False return True",False,len(paths) > 1,len(paths) < 1,0.6459823250770569 3520,"def is_only_one_item_selected(paths, app): if len(paths) > 1: app.show_error_message('Please, select only one item!') return False if: app.show_error_message('No selected item!') return False return True",False,not paths,not paths[0].has_selected,0.655068039894104 3521,"def getOpenTagName(text, uri=None): """"""getOpenTagName return the current tag name """""" tree = koXMLTreeService.getService().getTreeForURI(uri, text) if: return None return tree.tagname(tree.current)",False,tree.current is None,tree is None,0.6473177075386047 3522,"@staticmethod def sieve_function(raw_data): """""" The method that splits samples and status """""" matchers = [] return_list = [] matchers.append(D1000TemperatureDataParticle.regex_compiled()) for matcher in matchers: for match in matcher.finditer(raw_data): return_list.append((match.start(), match.end())) if: log.debug('sieve_function: raw_data=%s, return_list=%s', raw_data, return_list) return return_list",False,not return_list,log.isEnabledFor(logging.DEBUG),0.6493517160415649 3523,"def decompose(self): dval = self.stop and [self.patterns['start']] or [] dval += [self.patterns[c] for c in self.encoded] if: dval.append(self.patterns['stop']) self.decomposed = ''.join(dval) return self.decomposed",True,self.stop,self.stop,0.6630008220672607 3524,"@Gtk.Template.Callback() def on_switch_button_clicked(self, switch, g_param): if: print('Button checked') else: print('Button unchecked')",False,switch.get_active(),switch == 'no',0.6507995128631592 3525,"def match_asg_tags(tags_to_match, asg): for key, value in tags_to_match.items(): for tag in asg['Tags']: if: break else: return False return True",False,key == tag['Key'] and value == tag['Value'],tag['Name'] == value,0.644949197769165 3526,"def init_weights(self): if: for m in self.heads: if hasattr(m, 'init_weights'): m.init_weights() self._is_init = True else: warnings.warn(f'init_weights of {self.__class__.__name__} has been called more than once.')",False,not self._is_init,self._is_init,0.6545224189758301 3527,"def init_weights(self): if not self._is_init: for m in self.heads: if: m.init_weights() self._is_init = True else: warnings.warn(f'init_weights of {self.__class__.__name__} has been called more than once.')",False,"hasattr(m, 'init_weights')","isinstance(m, nn.Conv2d)",0.6466503143310547 3528,"@action.register(Reverse) def action_reverse(cp_action): logger.debug(f'reverse: adjoint step back to {cp_action.n0:d}') if: raise RuntimeError('Invalid checkpointing state') if cp_action.n0 > n: raise RuntimeError('Invalid checkpointing state')",False,cp_action.n1 != n + 1,cp_action.n0 < n,0.6580827832221985 3529,"@action.register(Reverse) def action_reverse(cp_action): logger.debug(f'reverse: adjoint step back to {cp_action.n0:d}') if cp_action.n1!= n + 1: raise RuntimeError('Invalid checkpointing state') if: raise RuntimeError('Invalid checkpointing state')",False,cp_action.n0 > n,cp_action.n2 != n - 1,0.6570297479629517 3530,"def _get_end_date(self, instance, kwargs): datetime_string = kwargs.get('end') if: return None datetime_string = self._format_datetime(datetime_string) return datetime_string",True,not datetime_string,not datetime_string,0.6488219499588013 3531,"def route(self, *pattern): def wrap(func): for p in pattern: if: p = re.compile(p.replace('RE:', '')) self.url2func[p] = func return func return wrap",False,p.startswith('RE:'),p != '',0.6464273929595947 3532,"def notify(self, msg, data=None): """"""Notify a message."""""" logger.debug('notify: {}'.format(msg)) if: return cond = self._conditions.get(msg, None) self._data[msg] = data if cond is None: self._conditions[msg] = 1 elif isinstance(cond, int): return else: cond.acquire() cond.notifyAll() cond.release() self._conditions.pop(msg)",False,msg in self._disabled,not self._conditions,0.6544349789619446 3533,"def notify(self, msg, data=None): """"""Notify a message."""""" logger.debug('notify: {}'.format(msg)) if msg in self._disabled: return cond = self._conditions.get(msg, None) self._data[msg] = data if: self._conditions[msg] = 1 elif isinstance(cond, int): return else: cond.acquire() cond.notifyAll() cond.release() self._conditions.pop(msg)",True,cond is None,cond is None,0.656232476234436 3534,"def notify(self, msg, data=None): """"""Notify a message."""""" logger.debug('notify: {}'.format(msg)) if msg in self._disabled: return cond = self._conditions.get(msg, None) self._data[msg] = data if cond is None: self._conditions[msg] = 1 elif: return else: cond.acquire() cond.notifyAll() cond.release() self._conditions.pop(msg)",False,"isinstance(cond, int)",not msg,0.6458117961883545 3535,"def nodes_where_temporary_select_uid(self, temporary): mask = node_flag_temporary_file if: value = node_flag_temporary_file else: value = 0 return self.nodes_where_flag_select_uid(mask, value, dbg='nodes_where_temporary_select_uid')",True,temporary,temporary,0.6744039058685303 3536,"def _get_table_data(self): """"""Return clipboard processed as data"""""" data = self._simplify_shape(self.table_widget.get_data()) if: return array(data) elif pd and self.table_widget.df_btn.isChecked(): info = self.table_widget.pd_info buf = io.StringIO(self.table_widget.pd_text) return pd.read_csv(buf, **info) return data",False,self.table_widget.array_btn.isChecked(),len(data) > 0,0.648117184638977 3537,"def _get_table_data(self): """"""Return clipboard processed as data"""""" data = self._simplify_shape(self.table_widget.get_data()) if self.table_widget.array_btn.isChecked(): return array(data) elif: info = self.table_widget.pd_info buf = io.StringIO(self.table_widget.pd_text) return pd.read_csv(buf, **info) return data",False,pd and self.table_widget.df_btn.isChecked(),self.table_widget.pd_text is not None,0.6488419771194458 3538,"def GetQuantity(self, name): """""" Returns the specified quantity called ""name"" from the stream, or none if the specified name does not exist. Parameters ---------- name: str Returns ------- quant: float """""" for quant in self.quantities: if: return quant return None",False,quant.name == name,name == quant.name,0.6509833335876465 3539,"def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): """"""Return full path to the user-specific state dir for this application. ""appname"" is the name of application. If None, just the system directory is returned. ""appauthor"" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. ""version"" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ""."". Only applied when appname is present. ""roaming"" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user state directories are: Mac OS X: same as user_data_dir Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined Win *: same as user_data_dir For Unix, we follow this Debian proposal to extend the XDG spec and support $XDG_STATE_HOME. That means, by default ""~/.local/state/"". """""" if: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_STATE_HOME', os.path.expanduser('~/.local/state')) if appname: path = os.path.join(path, appname) if appname and version: ",True,"system in ['win32', 'darwin']","system in ['win32', 'darwin']",0.6420186758041382 3540,"def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): """"""Return full path to the user-specific state dir for this application. ""appname"" is the name of application. If None, just the system directory is returned. ""appauthor"" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. ""version"" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ""."". Only applied when appname is present. ""roaming"" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user state directories are: Mac OS X: same as user_data_dir Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined Win *: same as user_data_dir For Unix, we follow this Debian proposal to extend the XDG spec and support $XDG_STATE_HOME. That means, by default ""~/.local/state/"". """""" if system in ['win32', 'darwin']: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_STATE_HOME', os.path.expanduser('~/.local/state')) if appname: path = os.path.join(path, appname)",True,appname and version,appname and version,0.6510002613067627 3541,"def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): """"""Return full path to the user-specific state dir for this application. ""appname"" is the name of application. If None, just the system directory is returned. ""appauthor"" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. ""version"" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ""."". Only applied when appname is present. ""roaming"" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user state directories are: Mac OS X: same as user_data_dir Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined Win *: same as user_data_dir For Unix, we follow this Debian proposal to extend the XDG spec and support $XDG_STATE_HOME. That means, by default ""~/.local/state/"". """""" if system in ['win32', 'darwin']: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_STATE_HOME', os.path.expanduser('~/.local/state')) if: path = os.path.join(path, appname)",True,appname,appname,0.6609145402908325 3542,"def getobjects(bunchdt, data, commdct, key, places=7, **kwargs): """"""get all the objects of key that matches the fields in ``**kwargs``"""""" idfobjects = bunchdt[key] allobjs = [] for obj in idfobjects: if: allobjs.append(obj) return allobjs",False,"__objecthasfields(bunchdt, data, commdct, obj, places=places, **kwargs)",obj['field_name'] == data[key] and len(obj['field_names']) == places,0.6432439684867859 3543,"@ATMT.receive_condition(WAIT_DATA) def receive_data(self, pkt): if: data = pkt[TFTP_DATA] if data.block == self.blk: raise self.DATA(data)",False,TFTP_DATA in pkt,TFTP_DATA in pkt and TFTP_DATA in pkt,0.6586496233940125 3544,"@ATMT.receive_condition(WAIT_DATA) def receive_data(self, pkt): if TFTP_DATA in pkt: data = pkt[TFTP_DATA] if: raise self.DATA(data)",False,data.block == self.blk,data != self.DATA,0.6507506370544434 3545,"@logfile.setter def logfile(self, value): """""" Assigns an opened file object to the device for logging If there is an open logfile, and 'value' is ``None`` or ``False`` then close the existing file. :param file value: An open ``file`` object. :returns: the new logfile ``file`` object :raises ValueError: When **value** is not a ``file`` object """""" if: rc = self._logfile.close() self._logfile = False return rc if sys.version < '3': if not isinstance(value, file): raise ValueError('value must be a file object') else: import io if not isinstance(value, io.TextIOWrapper): raise ValueError('value must be a file object') self._logfile = value return self._logfile",False,not value and self._logfile is not None,self._logfile,0.6507032513618469 3546,"@logfile.setter def logfile(self, value): """""" Assigns an opened file object to the device for logging If there is an open logfile, and 'value' is ``None`` or ``False`` then close the existing file. :param file value: An open ``file`` object. :returns: the new logfile ``file`` object :raises ValueError: When **value** is not a ``file`` object """""" if not value and self._logfile is not None: rc = self._logfile.close() self._logfile = False return rc if: if not isinstance(value, file): raise ValueError('value must be a file object') else: import io if not isinstance(value, io.TextIOWrapper): raise ValueError('value must be a file object') self._logfile = value return self._logfile",False,sys.version < '3',"isinstance(value, (file, Path))",0.6474345326423645 3547,"@logfile.setter def logfile(self, value): """""" Assigns an opened file object to the device for logging If there is an open logfile, and 'value' is ``None`` or ``False`` then close the existing file. :param file value: An open ``file`` object. :returns: the new logfile ``file`` object :raises ValueError: When **value** is not a ``file`` object """""" if not value and self._logfile is not None: rc = self._logfile.close() self._logfile = False return rc if sys.version < '3': if: raise ValueError('value must be a file object') else: import io if not isinstance(value, io.TextIOWrapper): raise ValueError('value must be a file object') self._logfile = value return self._logfile",True,"not isinstance(value, file)","not isinstance(value, file)",0.6437522172927856 3548,"@logfile.setter def logfile(self, value): """""" Assigns an opened file object to the device for logging If there is an open logfile, and 'value' is ``None`` or ``False`` then close the existing file. :param file value: An open ``file`` object. :returns: the new logfile ``file`` object :raises ValueError: When **value** is not a ``file`` object """""" if not value and self._logfile is not None: rc = self._logfile.close() self._logfile = False return rc if sys.version < '3': if not isinstance(value, file): raise ValueError('value must be a file object') else: import io if: raise ValueError('value must be a file object') self._logfile = value return self._logfile",False,"not isinstance(value, io.TextIOWrapper)","not isinstance(value, io.File)",0.6462374329566956 3549,"def colorize(self, color_key, text): if: return text else: return self.codes[color_key] + text + self.codes['reset']",False,not sys.stdout.isatty(),color_key not in self.codes,0.6457770466804504 3550,"def _prop_name(self, nfld): fmt = nfld.get_attrs().get('valuefmt') if: return None try: index = fmt.index('[') fmt = fmt[index:] except ValueError: pass return fmt",False,not fmt,fmt is None,0.6615540981292725 3551,"@property def canvas(self): """"""Canvas managed by FigureManager."""""" if: return None return self._figure.canvas",False,not self._figure,self._figure is None,0.6527836322784424 3552,"def _find_no_duplicates(self, name, domain=None, path=None): """"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally. Takes as args name and optional domain and path. Returns a cookie.value. Throws KeyError if cookie is not found and CookieConflictError if there are multiple cookies that match name and optionally domain and path."""""" toReturn = None for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: if toReturn is not None: raise CookieConflictError('There are multiple cookies with name, %r' % name) toReturn = cookie.value if: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))",True,toReturn,toReturn,0.6551533341407776 3553,"def _find_no_duplicates(self, name, domain=None, path=None): """"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally. Takes as args name and optional domain and path. Returns a cookie.value. Throws KeyError if cookie is not found and CookieConflictError if there are multiple cookies that match name and optionally domain and path."""""" toReturn = None for cookie in iter(self): if: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: if toReturn is not None: raise CookieConflictError('There are multiple cookies with name, %r' % name) toReturn = cookie.value if toReturn: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))",True,cookie.name == name,cookie.name == name,0.653222918510437 3554,"def _find_no_duplicates(self, name, domain=None, path=None): """"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally. Takes as args name and optional domain and path. Returns a cookie.value. Throws KeyError if cookie is not found and CookieConflictError if there are multiple cookies that match name and optionally domain and path."""""" toReturn = None for cookie in iter(self): if cookie.name == name: if: if path is None or cookie.path == path: if toReturn is not None: raise CookieConflictError('There are multiple cookies with name, %r' % name) toReturn = cookie.value if toReturn: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))",True,domain is None or cookie.domain == domain,domain is None or cookie.domain == domain,0.6449606418609619 3555,"def _find_no_duplicates(self, name, domain=None, path=None): """"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally. Takes as args name and optional domain and path. Returns a cookie.value. Throws KeyError if cookie is not found and CookieConflictError if there are multiple cookies that match name and optionally domain and path."""""" toReturn = None for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if: if toReturn is not None: raise CookieConflictError('There are multiple cookies with name, %r' % name) toReturn = cookie.value if toReturn: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))",True,path is None or cookie.path == path,path is None or cookie.path == path,0.6460728049278259 3556,"def _find_no_duplicates(self, name, domain=None, path=None): """"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally. Takes as args name and optional domain and path. Returns a cookie.value. Throws KeyError if cookie is not found and CookieConflictError if there are multiple cookies that match name and optionally domain and path."""""" toReturn = None for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: if: raise CookieConflictError('There are multiple cookies with name, %r' % name) toReturn = cookie.value if toReturn: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))",True,toReturn is not None,toReturn is not None,0.6483529806137085 3557,"def forward(self, x): x0 = x x = self.conv2d1(x) if: x = self.bn1(x) x = self.activation(x) x = self.conv2d2(x) x = self.average_pooling(x) x0 = self.average_pooling(x0) if self.d_spectral_norm is False: x0 = self.bn0(x0) x0 = self.conv2d0(x0) out = x + x0 return out",True,self.d_spectral_norm is False,self.d_spectral_norm is False,0.6415021419525146 3558,"def forward(self, x): x0 = x x = self.conv2d1(x) if self.d_spectral_norm is False: x = self.bn1(x) x = self.activation(x) x = self.conv2d2(x) x = self.average_pooling(x) x0 = self.average_pooling(x0) if: x0 = self.bn0(x0) x0 = self.conv2d0(x0) out = x + x0 return out",True,self.d_spectral_norm is False,self.d_spectral_norm is False,0.6421926617622375 3559,"def forward(self, inputs): sz = inputs.size() padding = torch.autograd.Variable(torch.zeros(sz[0], self.pad, sz[2], sz[3]), requires_grad=False) if: padding = padding.cuda() padded = torch.cat((inputs, padding), 1) return padded",False,inputs.is_cuda,self.gpu,0.6499390602111816 3560,"def privilege_exist(privileges, datastore_url): """""" Check whether a entry with given datastore_name exists in privileges """""" for p in privileges: if: return True return False",False,datastore_url == p.datastore_url,p['name'] == datastore_url,0.6507198810577393 3561,"def __set__(self, instance, value): headers = self._get_headers(instance) if: headers = instance._attrs[HEADERS] = {} headers[self.name] = value instance.set_headers(headers)",False,headers is None,HEADERS not in instance._attrs,0.6610451936721802 3562,"def match(self, *args: Any, **kwargs: Any) -> bool: """"""Does this rrset match the specified attributes? Behaves as :py:func:`full_match()` if the first argument is a ``dns.name.Name``, and as :py:func:`dns.rdataset.Rdataset.match()` otherwise. (This behavior fixes a design mistake where the signature of this method became incompatible with that of its superclass. The fix makes RRsets matchable as Rdatasets while preserving backwards compatibility.) """""" if: return self.full_match(*args, **kwargs) else: return super().match(*args, **kwargs)",False,"isinstance(args[0], dns.name.Name)",self.full_match is not None,0.647274374961853 3563,"def ensure_bytes_address(*objs: str or bytes or Account): result = list() for obj in objs: if: result.append(obj) elif isinstance(obj, str): result.append(Address.b58decode(obj).to_bytes()) elif isinstance(obj, Account): result.append(obj.get_address_bytes()) else: raise SDKException(ErrorCode.param_error) return tuple(result)",True,"isinstance(obj, bytes)","isinstance(obj, bytes)",0.6461296081542969 3564,"def ensure_bytes_address(*objs: str or bytes or Account): result = list() for obj in objs: if isinstance(obj, bytes): result.append(obj) elif: result.append(Address.b58decode(obj).to_bytes()) elif isinstance(obj, Account): result.append(obj.get_address_bytes()) else: raise SDKException(ErrorCode.param_error) return tuple(result)",True,"isinstance(obj, str)","isinstance(obj, str)",0.6450813412666321 3565,"def ensure_bytes_address(*objs: str or bytes or Account): result = list() for obj in objs: if isinstance(obj, bytes): result.append(obj) elif isinstance(obj, str): result.append(Address.b58decode(obj).to_bytes()) elif: result.append(obj.get_address_bytes()) else: raise SDKException(ErrorCode.param_error) return tuple(result)",True,"isinstance(obj, Account)","isinstance(obj, Account)",0.6470848917961121 3566,"def step(self, action): if: raise RuntimeError('Need to reset before first step().') if self.timestep > self.episode_length: raise RuntimeError('Episode is over. Need to step().') if np.array(action) not in self.action_space: raise ValueError(f'Invalid action {action}') t, self.timestep = (self.timestep, self.timestep + 1) done = t == self.episode_length return (t, t * 10, done, False, {})",True,self.timestep is None,self.timestep is None,0.6511002779006958 3567,"def step(self, action): if self.timestep is None: raise RuntimeError('Need to reset before first step().') if: raise RuntimeError('Episode is over. Need to step().') if np.array(action) not in self.action_space: raise ValueError(f'Invalid action {action}') t, self.timestep = (self.timestep, self.timestep + 1) done = t == self.episode_length return (t, t * 10, done, False, {})",True,self.timestep > self.episode_length,self.timestep > self.episode_length,0.6483337879180908 3568,"def step(self, action): if self.timestep is None: raise RuntimeError('Need to reset before first step().') if self.timestep > self.episode_length: raise RuntimeError('Episode is over. Need to step().') if: raise ValueError(f'Invalid action {action}') t, self.timestep = (self.timestep, self.timestep + 1) done = t == self.episode_length return (t, t * 10, done, False, {})",True,np.array(action) not in self.action_space,np.array(action) not in self.action_space,0.6453639268875122 3569,"def search_fed_by_id(fed_id): get = FEDERATION_BYFEDID.get(fed_id) if: return False else: return get result = False for Q in curr: if Q.fed_id == fed_id: result = Q.fed_id return result",False,get == None,get is None,0.6577320694923401 3570,"def search_fed_by_id(fed_id): get = FEDERATION_BYFEDID.get(fed_id) if get == None: return False else: return get result = False for Q in curr: if: result = Q.fed_id return result",True,Q.fed_id == fed_id,Q.fed_id == fed_id,0.64959716796875 3571,"def _makeObjectWithBBT(self): from ZPublisher.interfaces import UseTraversalDefault class _DummyResult: """""" """""" def __init__(self, tag): self.tag = tag class DummyObjectWithBBT(self._makeBasicObjectClass()): """""" Dummy class with __bobo_traverse__ """""" default = _DummyResult('Default') def __bobo_traverse__(self, REQUEST, name): if: return _DummyResult('Normal') elif name == 'default': raise UseTraversalDefault raise AttributeError(name) return DummyObjectWithBBT()",True,name == 'normal',name == 'normal',0.6576862335205078 3572,"def _makeObjectWithBBT(self): from ZPublisher.interfaces import UseTraversalDefault class _DummyResult: """""" """""" def __init__(self, tag): self.tag = tag class DummyObjectWithBBT(self._makeBasicObjectClass()): """""" Dummy class with __bobo_traverse__ """""" default = _DummyResult('Default') def __bobo_traverse__(self, REQUEST, name): if name == 'normal': return _DummyResult('Normal') elif: raise UseTraversalDefault raise AttributeError(name) return DummyObjectWithBBT()",True,name == 'default',name == 'default',0.6564491391181946 3573,"def get_organization_courses(organization_id): """""" Client API operation adapter/wrapper """""" if: return [] from organizations import api as organizations_api return organizations_api.get_organization_courses(organization_id)",False,not organizations_enabled(),not organization_id,0.6494138240814209 3574,"def parse(self, req): if: raise UnknownError('non-JSON returned by Atlas metadata server instance at {0}:{1}'.format(self.host, self.port)) _ = json.loads(req.content) status = self.get_key(_, 'Status') if status == 'ACTIVE': pass elif self.high_availability and status == 'PASSIVE': pass elif status in ('BECOMING_ACTIVE', 'BECOMING_PASSIVE'): self.warning() else: self.critical() return status",False,not isJson(req.content),not req.content,0.6477764844894409 3575,"def parse(self, req): if not isJson(req.content): raise UnknownError('non-JSON returned by Atlas metadata server instance at {0}:{1}'.format(self.host, self.port)) _ = json.loads(req.content) status = self.get_key(_, 'Status') if: pass elif self.high_availability and status == 'PASSIVE': pass elif status in ('BECOMING_ACTIVE', 'BECOMING_PASSIVE'): self.warning() else: self.critical() return status",False,status == 'ACTIVE',self.low_availability and status == 'PASSIVE',0.6539421081542969 3576,"def parse(self, req): if not isJson(req.content): raise UnknownError('non-JSON returned by Atlas metadata server instance at {0}:{1}'.format(self.host, self.port)) _ = json.loads(req.content) status = self.get_key(_, 'Status') if status == 'ACTIVE': pass elif: pass elif status in ('BECOMING_ACTIVE', 'BECOMING_PASSIVE'): self.warning() else: self.critical() return status",False,self.high_availability and status == 'PASSIVE',status == 'PASSIVE',0.6457910537719727 3577,"def parse(self, req): if not isJson(req.content): raise UnknownError('non-JSON returned by Atlas metadata server instance at {0}:{1}'.format(self.host, self.port)) _ = json.loads(req.content) status = self.get_key(_, 'Status') if status == 'ACTIVE': pass elif self.high_availability and status == 'PASSIVE': pass elif: self.warning() else: self.critical() return status",False,"status in ('BECOMING_ACTIVE', 'BECOMING_PASSIVE')",self.low_availability and status == 'WARNING',0.6417731642723083 3578,"def _new_conn(self): """""" Establish a socket connection and set nodelay settings on it. :return: New socket connection. """""" extra_kw = {} if: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = connection.create_connection((self.host, self.port), self.timeout, **extra_kw) except SocketTimeout as e: raise ConnectTimeoutError(self, 'Connection to %s timed out. (connect timeout=%s)' % (self.host, self.timeout)) except SocketError as e: raise NewConnectionError(self, 'Failed to establish a new connection: %s' % e) return conn",True,self.source_address,self.source_address,0.6524739265441895 3579,"def _new_conn(self): """""" Establish a socket connection and set nodelay settings on it. :return: New socket connection. """""" extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if: extra_kw['socket_options'] = self.socket_options try: conn = connection.create_connection((self.host, self.port), self.timeout, **extra_kw) except SocketTimeout as e: raise ConnectTimeoutError(self, 'Connection to %s timed out. (connect timeout=%s)' % (self.host, self.timeout)) except SocketError as e: raise NewConnectionError(self, 'Failed to establish a new connection: %s' % e) return conn",True,self.socket_options,self.socket_options,0.6547433137893677 3580,"def start(self, id, machine_type, cluster_id=None, shutdown_timeout=None, is_preemptible=None, tags=None): """"""Start existing notebook :param str|int id: :param str machine_type: :param str cluster_id: :param str name: :param int shutdown_timeout: :param bool is_preemptible: :param list[str] tags: List of tags :return: Notebook ID :rtype str: """""" notebook = models.NotebookStart(notebook_id=id, machine_type=machine_type, cluster_id=cluster_id, shutdown_timeout=shutdown_timeout, is_preemptible=is_preemptible) repository = self.build_repository(repositories.StartNotebook) handle = repository.start(notebook) if: self.add_tags(entity_id=handle, tags=tags) return handle",True,tags,tags,0.662625789642334 3581,"def setTimeout(self, ms): if: self._timeout = None else: self._timeout = ms / 1000.0 if self.handle is not None: self.handle.settimeout(self._timeout)",False,ms is None,ms == 0,0.654640257358551 3582,"def setTimeout(self, ms): if ms is None: self._timeout = None else: self._timeout = ms / 1000.0 if: self.handle.settimeout(self._timeout)",False,self.handle is not None,self._timeout > 0,0.6474205851554871 3583,"def get_queryset(self): if: return Sponsorship.objects.select_related('sponsor').all() return self.request.user.sponsorships.select_related('sponsor')",True,self.request.user.is_superuser,self.request.user.is_superuser,0.6456594467163086 3584,"def view_dashboard(webdriver, datasets): dashboard_page = Dashboard(webdriver) def measure(): dashboard_page.go_to() dashboard_page.wait_for_page_loaded() measure_dom_requests(webdriver, interaction='selenium_view_dashboard') if: measure_browser_navi_metrics(webdriver, datasets, expected_metrics=browser_metrics['selenium_view_dashboard']) measure()",True,CONFLUENCE_SETTINGS.extended_metrics,CONFLUENCE_SETTINGS.extended_metrics,0.6449378728866577 3585,"def cb_isCopyable(self): if: return 0 if not self.cb_userHasCopyOrMovePermission(): return 0 return 1",False,"not (hasattr(self, '_canCopy') and self._canCopy(0))",not self.cb_userHasCopyOrMovePermission(),0.6487075090408325 3586,"def cb_isCopyable(self): if not (hasattr(self, '_canCopy') and self._canCopy(0)): return 0 if: return 0 return 1",False,not self.cb_userHasCopyOrMovePermission(),not self.hasCopyable(),0.6446583271026611 3587,"def _validate_indexer(self, form, key, kind): """""" If we are positional indexer, validate that we have appropriate typed bounds must be an integer. """""" assert kind in ['ix', 'loc', 'getitem', 'iloc'] if: pass elif is_integer(key): pass elif kind in ['iloc', 'getitem']: self._invalid_indexer(form, key) return key",False,key is None,is_positional(key),0.662952721118927 3588,"def _validate_indexer(self, form, key, kind): """""" If we are positional indexer, validate that we have appropriate typed bounds must be an integer. """""" assert kind in ['ix', 'loc', 'getitem', 'iloc'] if key is None: pass elif: pass elif kind in ['iloc', 'getitem']: self._invalid_indexer(form, key) return key",False,is_integer(key),"kind in ['loc', 'loc']",0.6512137651443481 3589,"def _validate_indexer(self, form, key, kind): """""" If we are positional indexer, validate that we have appropriate typed bounds must be an integer. """""" assert kind in ['ix', 'loc', 'getitem', 'iloc'] if key is None: pass elif is_integer(key): pass elif: self._invalid_indexer(form, key) return key",False,"kind in ['iloc', 'getitem']",kind == 'positional',0.6505460739135742 3590,"def _get_bg_dark(self, opposite=False): theme_style = self._get_theme_style(opposite) if: return get_color_from_hex(colors['Light']['AppBar']) elif theme_style == 'Dark': return get_color_from_hex(colors['Dark']['AppBar'])",True,theme_style == 'Light',theme_style == 'Light',0.6538368463516235 3591,"def _get_bg_dark(self, opposite=False): theme_style = self._get_theme_style(opposite) if theme_style == 'Light': return get_color_from_hex(colors['Light']['AppBar']) elif: return get_color_from_hex(colors['Dark']['AppBar'])",True,theme_style == 'Dark',theme_style == 'Dark',0.6532330513000488 3592,"def suite_from_file(filename, expected): f = None try: f = open(os.path.join(os.path.dirname(__file__), filename)) uas = f.readlines() finally: if: f.close() suite = MobileDetectionFactory(uas=uas, expected=expected) return suite",True,f,f,0.6726229190826416 3593,"def to_dict(self) -> Dict[str, Any]: a_number = self.a_number field_dict: Dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = prop.to_dict() field_dict.update({}) if: field_dict['a_number'] = a_number return field_dict",True,a_number is not UNSET,a_number is not UNSET,0.6519166827201843 3594,"def ProcessingInstruction(target, text=None): element = Element(ProcessingInstruction) element.text = target if: element.text = element.text +'' + text return element",True,text,text,0.6718730926513672 3595,"def append_data(self, samples_data): super().append_data(samples_data) if: self._buffer_full = True",False,self.idx == 0,not self._buffer_full,0.6522127389907837 3596,"def gobble(self, value, left): if: return (value[left:], 0) else: return ('', left - len(value))",False,left < len(value),len(value) > left,0.648518443107605 3597,"def __add__(self, other): """"""Implementation of + operator - returns C{L{And}}"""""" if: other = ParserElement.literalStringClass(other) if not isinstance(other, ParserElement): warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return And([self, other])",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.6513049602508545 3598,"def __add__(self, other): """"""Implementation of + operator - returns C{L{And}}"""""" if isinstance(other, basestring): other = ParserElement.literalStringClass(other) if: warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return And([self, other])",True,"not isinstance(other, ParserElement)","not isinstance(other, ParserElement)",0.649897038936615 3599,"def close_wallet(self): if: self._walletdb_loop.stop() self._walletdb_loop = None self.wallet = None self.wallet_mutex.release()",True,self.wallet is not None,self.wallet is not None,0.673790454864502 3600,"def f(x): if: g = g1 else: g = g2 return g()",False,x,x == '1',0.6649773120880127 3601,"def __import_data__(chat_id, data): locks = data.get('locks', {}) for itemlock in locks: if: sql.update_lock(chat_id, itemlock, locked=True) elif itemlock in LOCK_CHAT_RESTRICTION: sql.update_restriction(chat_id, itemlock, locked=True) else: pass",True,itemlock in LOCK_TYPES,itemlock in LOCK_TYPES,0.6509253978729248 3602,"def __import_data__(chat_id, data): locks = data.get('locks', {}) for itemlock in locks: if itemlock in LOCK_TYPES: sql.update_lock(chat_id, itemlock, locked=True) elif: sql.update_restriction(chat_id, itemlock, locked=True) else: pass",True,itemlock in LOCK_CHAT_RESTRICTION,itemlock in LOCK_CHAT_RESTRICTION,0.6439691781997681 3603,"def check_sale_order_paid(self, cr, uid, ids, context=None): """"""Esta funcion la hacemos para verificar si toda la orden de venta fue pagada en el caso de 'pago antes de la entrega' porque el problema es el siguiente, de manera original openerp genera una factura que queda vinculada por el subflow avisando cuando fue pagada a la orden de venta, el problema es que en este caso tendriamos mas de una factura ligada, por eso el chequeo hay que hacerlo aparte """""" sale_order_obj = self.pool.get('sale.order') so_ids = sale_order_obj.search(cr, uid, [('invoice_ids', 'in', ids)], context=context) for so in sale_order_obj.browse(cr, uid, so_ids, context=context): if: so.signal_workflow('subflow.paid') return True",False,so.order_policy == 'prepaid' and so.invoiced,so.get_subflow_status() == 'subflow.paid',0.6433925628662109 3604,"def imsave(img, img_path): img = np.squeeze(img) if: img = img[:, :, [2, 1, 0]] cv2.imwrite(img_path, img)",True,img.ndim == 3,img.ndim == 3,0.6512410640716553 3605,"def _expand_vars(scheme, vars): res = {} if: vars = {} _extend_dict(vars, get_config_vars()) for key, value in _SCHEMES.items(scheme): if os.name in ('posix', 'nt'): value = os.path.expanduser(value) res[key] = os.path.normpath(_subst_vars(value, vars)) return res",True,vars is None,vars is None,0.6588072776794434 3606,"def _expand_vars(scheme, vars): res = {} if vars is None: vars = {} _extend_dict(vars, get_config_vars()) for key, value in _SCHEMES.items(scheme): if: value = os.path.expanduser(value) res[key] = os.path.normpath(_subst_vars(value, vars)) return res",False,"os.name in ('posix', 'nt')",key.startswith('user:'),0.6439158320426941 3607,"def assert_equal_with_expected_fixture(actual, fixture_filename, update_fixture=False): """"""Utility to check that actual is the same as the pre-generated fixture To update all fixtures automatically, pass --update-fixtures option when invoking pytest. """""" if: Path(os.path.dirname(fixture_filename)).mkdir(parents=True, exist_ok=True) with open(fixture_filename, 'w', encoding='utf-8') as f_handle: f_handle.write(actual) f_handle.write('\n') with open(fixture_filename, encoding='utf-8') as f_handle: expected = f_handle.read() assert actual.strip() == expected.strip()",True,update_fixture,update_fixture,0.6524592638015747 3608,"def _get_off_value(flag: FeatureFlag, reason: dict) -> EvaluationDetail: off_var = flag.off_variation if: return EvaluationDetail(None, None, reason) return _get_variation(flag, off_var, reason)",True,off_var is None,off_var is None,0.6541421413421631 3609,"def _create_fc(num_features, num_classes, use_conv=False): if: fc = nn.Identity() elif use_conv: fc = nn.Conv2d(num_features, num_classes, 1, bias=True) else: fc = Linear(num_features, num_classes, bias=True) return fc",True,num_classes <= 0,num_classes <= 0,0.6545335054397583 3610,"def _create_fc(num_features, num_classes, use_conv=False): if num_classes <= 0: fc = nn.Identity() elif: fc = nn.Conv2d(num_features, num_classes, 1, bias=True) else: fc = Linear(num_features, num_classes, bias=True) return fc",True,use_conv,use_conv,0.6607420444488525 3611,"def schedule_deletion_key(self, key, pending_days, **params): """"""Schedule a key deletion :param key: key id or an instance of :class:`~openstack.kms.v1.key.Key` :param pending_days: Pending days before deletion, allow 7 to 1096 :param dict kwargs: Keyword arguments which will be used to schedule a key deletion. sequence is allowed. :rtype: :class:`~openstack.kms.v1.key.Key` """""" params.update({'pending_days': pending_days}) if: key_obj = key else: params.update({'key_id': key}) key_obj = _key.Key.new(**params) return key_obj.schedule_deletion(self._session, **params)",True,"isinstance(key, _key.Key)","isinstance(key, _key.Key)",0.6466625928878784 3612,"def replace_inline_objects(layout, fs): if: return for i, layout_object in enumerate(layout.fields): if isinstance(layout_object, Inline) and layout_object.model in fs: layout.fields[i] = fs.pop(layout_object.model) elif hasattr(layout_object, 'get_field_names'): replace_inline_objects(layout_object, fs)",False,not fs,not layout,0.6590580940246582 3613,"def replace_inline_objects(layout, fs): if not fs: return for i, layout_object in enumerate(layout.fields): if: layout.fields[i] = fs.pop(layout_object.model) elif hasattr(layout_object, 'get_field_names'): replace_inline_objects(layout_object, fs)",False,"isinstance(layout_object, Inline) and layout_object.model in fs","isinstance(layout_object, BaseModel)",0.6455196142196655 3614,"def replace_inline_objects(layout, fs): if not fs: return for i, layout_object in enumerate(layout.fields): if isinstance(layout_object, Inline) and layout_object.model in fs: layout.fields[i] = fs.pop(layout_object.model) elif: replace_inline_objects(layout_object, fs)",False,"hasattr(layout_object, 'get_field_names')","isinstance(layout_object, File)",0.6444813013076782 3615,"def alignment_details(a, b): nmatch = 0 nmismatch = 0 ngaps = 0 assert len(a) == len(b) l = len(a) for i in range(l): if: nmatch += 1 elif a[i] == '-' or b[i] == '-': ngaps += 1 else: nmismatch += 1 pctid = 100.0 * nmatch / l return (pctid, nmismatch, ngaps)",False,a[i] == b[i],a[i] == '-' and b[i] == '-',0.6502428650856018 3616,"def alignment_details(a, b): nmatch = 0 nmismatch = 0 ngaps = 0 assert len(a) == len(b) l = len(a) for i in range(l): if a[i] == b[i]: nmatch += 1 elif: ngaps += 1 else: nmismatch += 1 pctid = 100.0 * nmatch / l return (pctid, nmismatch, ngaps)",False,a[i] == '-' or b[i] == '-',a[i] > b[i],0.6474218964576721 3617,"def get_spendbundle(self, bundle_hash: bytes32) -> Optional[SpendBundle]: """"""Returns a full SpendBundle if it's inside one the mempools"""""" if: return self.mempool.spends[bundle_hash].spend_bundle return None",False,bundle_hash in self.mempool.spends,self.mempool,0.6498591303825378 3618,"@abstractmethod def __call__(self, estimator, X, y, sample_weight=None): if: warnings.warn(self._deprecation_msg, category=DeprecationWarning, stacklevel=2)",True,self._deprecation_msg is not None,self._deprecation_msg is not None,0.6497873663902283 3619,"def url_to_filename(url, etag=None): """""" Convert `url` into a hashed filename in a repeatable way. If `etag` is specified, append its hash to the URL's, delimited by a period. """""" url_bytes = url.encode('utf-8') url_hash = sha256(url_bytes) filename = url_hash.hexdigest() if: etag_bytes = etag.encode('utf-8') etag_hash = sha256(etag_bytes) filename += '.' + etag_hash.hexdigest() return filename",True,etag,etag,0.6827841997146606 3620,"def recv_getBlockedBuddyMembers(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getBlockedBuddyMembers_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.e is not None: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'getBlockedBuddyMembers failed: unknown result')",True,mtype == TMessageType.EXCEPTION,mtype == TMessageType.EXCEPTION,0.6535661220550537 3621,"def recv_getBlockedBuddyMembers(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getBlockedBuddyMembers_result() result.read(iprot) iprot.readMessageEnd() if: return result.success if result.e is not None: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'getBlockedBuddyMembers failed: unknown result')",True,result.success is not None,result.success is not None,0.649708092212677 3622,"def recv_getBlockedBuddyMembers(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = getBlockedBuddyMembers_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'getBlockedBuddyMembers failed: unknown result')",True,result.e is not None,result.e is not None,0.651020884513855 3623,"def _get_compressor_options(self, side: str, agreed_parameters: Dict[str, Any], compression_options: Optional[Dict[str, Any]]=None) -> Dict[str, Any]: """"""Converts a websocket agreed_parameters set to keyword arguments for our compressor objects. """""" options = dict(persistent=side + '_no_context_takeover' not in agreed_parameters) wbits_header = agreed_parameters.get(side + '_max_window_bits', None) if: options['max_wbits'] = zlib.MAX_WBITS else: options['max_wbits'] = int(wbits_header) options['compression_options'] = compression_options return options",True,wbits_header is None,wbits_header is None,0.6501567959785461 3624,"@property def is_nonterminal(self): """""" Here we use a simple but not robust method to judge whether self is a nonterminal action (Select), or a terminal action (C/T) :return: """""" if: return True else: return False",False,"isinstance(self.ins_id, int)",self._current.terminal and self._current.terminal.is_terminal(),0.6437879800796509 3625,"def take_from_pile(self, pile_group: str, given_amount): """""" Take the amount from a specific pile group and returns the rest of the original needed amount. Parameters ---------- pile_group : str the name of the pile group given_amount : int|float the amount which should be consumed Returns ---------- remaining_amount : int|float the amount which could not be consumed from the given pile group """""" if: return given_amount remaining_amount = given_amount if self.pile[pile_group] >= given_amount: self.pile[pile_group] -= given_amount self.total_amount -= given_amount remaining_amount = 0 elif self.pile[pile_group] > 0: remaining_amount = given_amount - self.pile[pile_group] self.total_amount -= self.pile[pile_group] self.pile[pile_group] = 0 return remaining_amount",False,given_amount == 0,given_amount is None,0.6540267467498779 3626,"def take_from_pile(self, pile_group: str, given_amount): """""" Take the amount from a specific pile group and returns the rest of the original needed amount. Parameters ---------- pile_group : str the name of the pile group given_amount : int|float the amount which should be consumed Returns ---------- remaining_amount : int|float the amount which could not be consumed from the given pile group """""" if given_amount == 0: return given_amount remaining_amount = given_amount if: self.pile[pile_group] -= given_amount self.total_amount -= given_amount remaining_amount = 0 elif self.pile[pile_group] > 0: remaining_amount = given_amount - self.pile[pile_group] self.total_amount -= self.pile[pile_group] self.pile[pile_group] = 0 return remaining_amount",False,self.pile[pile_group] >= given_amount,self.pile[pile_group] < given_amount,0.6443711519241333 3627,"def take_from_pile(self, pile_group: str, given_amount): """""" Take the amount from a specific pile group and returns the rest of the original needed amount. Parameters ---------- pile_group : str the name of the pile group given_amount : int|float the amount which should be consumed Returns ---------- remaining_amount : int|float the amount which could not be consumed from the given pile group """""" if given_amount == 0: return given_amount remaining_amount = given_amount if self.pile[pile_group] >= given_amount: self.pile[pile_group] -= given_amount self.total_amount -= given_amount remaining_amount = 0 elif: remaining_amount = given_amount - self.pile[pile_group] self.total_amount -= self.pile[pile_group] self.pile[pile_group] = 0 return remaining_amount",False,self.pile[pile_group] > 0,self.pile[pile_group] < given_amount,0.6450752019882202 3628,"def cast_any_ptr(EXPECTED_TYPE, ptr): PTRTYPE = lltype.typeOf(ptr) if: return ptr elif EXPECTED_TYPE == WeakRefPtr: return cast_ptr_to_weakrefptr(ptr) elif PTRTYPE == WeakRefPtr: ptr = cast_weakrefptr_to_ptr(None, ptr) return cast_any_ptr(EXPECTED_TYPE, ptr) elif isinstance(EXPECTED_TYPE.TO, lltype.OpaqueType) or isinstance(PTRTYPE.TO, lltype.OpaqueType): return lltype.cast_opaque_ptr(EXPECTED_TYPE, ptr) else: return lltype.cast_pointer(EXPECTED_TYPE, ptr)",False,PTRTYPE == EXPECTED_TYPE,EXPECTED_TYPE == lltype.ValueType,0.6539304256439209 3629,"def cast_any_ptr(EXPECTED_TYPE, ptr): PTRTYPE = lltype.typeOf(ptr) if PTRTYPE == EXPECTED_TYPE: return ptr elif: return cast_ptr_to_weakrefptr(ptr) elif PTRTYPE == WeakRefPtr: ptr = cast_weakrefptr_to_ptr(None, ptr) return cast_any_ptr(EXPECTED_TYPE, ptr) elif isinstance(EXPECTED_TYPE.TO, lltype.OpaqueType) or isinstance(PTRTYPE.TO, lltype.OpaqueType): return lltype.cast_opaque_ptr(EXPECTED_TYPE, ptr) else: return lltype.cast_pointer(EXPECTED_TYPE, ptr)",False,EXPECTED_TYPE == WeakRefPtr,PTRTYPE == WeakReferencePtr,0.6572035551071167 3630,"def cast_any_ptr(EXPECTED_TYPE, ptr): PTRTYPE = lltype.typeOf(ptr) if PTRTYPE == EXPECTED_TYPE: return ptr elif EXPECTED_TYPE == WeakRefPtr: return cast_ptr_to_weakrefptr(ptr) elif: ptr = cast_weakrefptr_to_ptr(None, ptr) return cast_any_ptr(EXPECTED_TYPE, ptr) elif isinstance(EXPECTED_TYPE.TO, lltype.OpaqueType) or isinstance(PTRTYPE.TO, lltype.OpaqueType): return lltype.cast_opaque_ptr(EXPECTED_TYPE, ptr) else: return lltype.cast_pointer(EXPECTED_TYPE, ptr)",False,PTRTYPE == WeakRefPtr,EXPECTED_TYPE == WeakRef,0.6624172925949097 3631,"def cast_any_ptr(EXPECTED_TYPE, ptr): PTRTYPE = lltype.typeOf(ptr) if PTRTYPE == EXPECTED_TYPE: return ptr elif EXPECTED_TYPE == WeakRefPtr: return cast_ptr_to_weakrefptr(ptr) elif PTRTYPE == WeakRefPtr: ptr = cast_weakrefptr_to_ptr(None, ptr) return cast_any_ptr(EXPECTED_TYPE, ptr) elif: return lltype.cast_opaque_ptr(EXPECTED_TYPE, ptr) else: return lltype.cast_pointer(EXPECTED_TYPE, ptr)",False,"isinstance(EXPECTED_TYPE.TO, lltype.OpaqueType) or isinstance(PTRTYPE.TO, lltype.OpaqueType)",PTRTYPE == OPaque_TYPE,0.6444809436798096 3632,"def get_order(self, byte_str): first_char = byte_str[0] if: return 94 * (first_char - 176) + byte_str[1] - 161 else: return -1",True,first_char >= 176,first_char >= 176,0.6502796411514282 3633,"def iddp_rid(eps, m, n, matvect): """""" Compute ID of a real matrix to a specified relative precision using random matrix-vector multiplication. :param eps: Relative precision. :type eps: float :param m: Matrix row dimension. :type m: int :param n: Matrix column dimension. :type n: int :param matvect: Function to apply the matrix transpose to a vector, with call signature `y = matvect(x)`, where `x` and `y` are the input and output vectors, respectively. :type matvect: function :return: Rank of ID. :rtype: int :return: Column index array. :rtype: :class:`numpy.ndarray` :return: Interpolation coefficients. :rtype: :class:`numpy.ndarray` """""" proj = np.empty(m + 1 + 2 * n * (min(m, n) + 1), order='F') k, idx, proj, ier = _id.iddp_rid(eps, m, n, matvect, proj) if: raise _RETCODE_ERROR proj = proj[:k * (n - k)].reshape((k, n - k), order='F') return (k, idx, proj)",True,ier != 0,ier != 0,0.657044529914856 3634,"def load_long_binput(self): i, = unpack(': raise ValueError('negative LONG_BINPUT argument') self.memo[i] = self.stack[-1]",False,i > maxsize,i & 128,0.6851222515106201 3635,"@classmethod def from_config(cls, cfg=None): if: cfg = OmegaConf.create() image_size = cfg.get('image_size', 224) mean = cfg.get('mean', None) std = cfg.get('std', None) return cls(image_size=image_size, mean=mean, std=std)",True,cfg is None,cfg is None,0.6700588464736938 3636,"def exclude_from_weight_decay(name): if: return True bias_suffix = ['_bias', '_b', '.b_0'] for suffix in bias_suffix: if name.endswith(suffix): return True return False",True,name.find('layer_norm') > -1,name.find('layer_norm') > -1,0.644814133644104 3637,"def exclude_from_weight_decay(name): if name.find('layer_norm') > -1: return True bias_suffix = ['_bias', '_b', '.b_0'] for suffix in bias_suffix: if: return True return False",True,name.endswith(suffix),name.endswith(suffix),0.6435736417770386 3638,"def restart_task_callback(self, _func: AsyncFunc) -> Callable: def handler(task: asyncio.Task) -> None: name = task.get_name() + '-' + getattr(task.get_coro(), '__name__', '') if: return if asyncio.iscoroutinefunction(_func): task = asyncio.create_task(_func()) task.add_done_callback(self.restart_task_callback(_func)) logging.info('%s restarting', name) return handler",False,self.sigints > 1,self.restart_task_callback(name),0.6511092185974121 3639,"def restart_task_callback(self, _func: AsyncFunc) -> Callable: def handler(task: asyncio.Task) -> None: name = task.get_name() + '-' + getattr(task.get_coro(), '__name__', '') if self.sigints > 1: return if: task = asyncio.create_task(_func()) task.add_done_callback(self.restart_task_callback(_func)) logging.info('%s restarting', name) return handler",False,asyncio.iscoroutinefunction(_func),not self.running,0.6433484554290771 3640,"def visible(self, user=None): if: return True if user is None: return False if user.is_staff: return True if user == self.nominator: return True if self.nominee and user == self.nominee.user: return True return False",False,self.accepted and self.approved and (not self.election.nominations_open_at),self.nominator is None,0.6442563533782959 3641,"def visible(self, user=None): if self.accepted and self.approved and (not self.election.nominations_open_at): return True if: return False if user.is_staff: return True if user == self.nominator: return True if self.nominee and user == self.nominee.user: return True return False",False,user is None,not user,0.6534185409545898 3642,"def visible(self, user=None): if self.accepted and self.approved and (not self.election.nominations_open_at): return True if user is None: return False if: return True if user == self.nominator: return True if self.nominee and user == self.nominee.user: return True return False",False,user.is_staff,self.accepted and user not in self.election.nominations_open_at,0.6456810235977173 3643,"def visible(self, user=None): if self.accepted and self.approved and (not self.election.nominations_open_at): return True if user is None: return False if user.is_staff: return True if: return True if self.nominee and user == self.nominee.user: return True return False",False,user == self.nominator,not self.nominee,0.6502575278282166 3644,"def visible(self, user=None): if self.accepted and self.approved and (not self.election.nominations_open_at): return True if user is None: return False if user.is_staff: return True if user == self.nominator: return True if: return True return False",False,self.nominee and user == self.nominee.user,not user.nominator.has_staff_access_token(),0.6461234092712402 3645,"@run_async def help_connect_chat(update, context): args = context.args if: send_message(update.effective_message, 'PM me with that command to get help.') return else: send_message(update.effective_message, CONN_HELP, parse_mode='markdown')",False,update.effective_message.chat.type != 'private',args.command is None,0.6443170309066772 3646,"def get_position_type(position_data: PositionData, position: Position) -> Optional[str]: """"""Gets the type of the cell in UPPERCASE, interpreted as the intestinal organoid cell type."""""" type = position_data.get_position_data(position, 'type') if: return None return type.upper()",True,type is None,type is None,0.6623108983039856 3647,"def __post_init__(self): if: return initial_copy_offsets = set() for i, byte in enumerate(self.input_utterance): if byte & 128 == 0 or byte & 192 == 192: initial_copy_offsets.add(i) self.initial_copy_offsets = frozenset(initial_copy_offsets)",False,"not isinstance(self.input_utterance, bytes)","not hasattr(self, 'input_utterance')",0.64739990234375 3648,"def __post_init__(self): if not isinstance(self.input_utterance, bytes): return initial_copy_offsets = set() for i, byte in enumerate(self.input_utterance): if: initial_copy_offsets.add(i) self.initial_copy_offsets = frozenset(initial_copy_offsets)",False,byte & 128 == 0 or byte & 192 == 192,"byte in (b'\x00', u'\x00')",0.6590814590454102 3649,"def project(hidden_states, proj_layer, key_value_states, past_key_value): """""" projects hidden states correctly to key/query states """""" if: hidden_states = shape(proj_layer(hidden_states)) elif is_None(past_key_value): hidden_states = shape(proj_layer(key_value_states)) if is_not_None(past_key_value): if is_None(key_value_states): hidden_states = torch.cat([past_key_value, hidden_states], dim=2) else: hidden_states = past_key_value return hidden_states",True,is_None(key_value_states),is_None(key_value_states),0.6443248987197876 3650,"def project(hidden_states, proj_layer, key_value_states, past_key_value): """""" projects hidden states correctly to key/query states """""" if is_None(key_value_states): hidden_states = shape(proj_layer(hidden_states)) elif is_None(past_key_value): hidden_states = shape(proj_layer(key_value_states)) if: if is_None(key_value_states): hidden_states = torch.cat([past_key_value, hidden_states], dim=2) else: hidden_states = past_key_value return hidden_states",False,is_not_None(past_key_value),past_key_value is not None,0.6433552503585815 3651,"def project(hidden_states, proj_layer, key_value_states, past_key_value): """""" projects hidden states correctly to key/query states """""" if is_None(key_value_states): hidden_states = shape(proj_layer(hidden_states)) elif: hidden_states = shape(proj_layer(key_value_states)) if is_not_None(past_key_value): if is_None(key_value_states): hidden_states = torch.cat([past_key_value, hidden_states], dim=2) else: hidden_states = past_key_value return hidden_states",True,is_None(past_key_value),is_None(past_key_value),0.644720733165741 3652,"def project(hidden_states, proj_layer, key_value_states, past_key_value): """""" projects hidden states correctly to key/query states """""" if is_None(key_value_states): hidden_states = shape(proj_layer(hidden_states)) elif is_None(past_key_value): hidden_states = shape(proj_layer(key_value_states)) if is_not_None(past_key_value): if: hidden_states = torch.cat([past_key_value, hidden_states], dim=2) else: hidden_states = past_key_value return hidden_states",False,is_None(key_value_states),past_key_value is None,0.6437866687774658 3653,"def __eq__(self, other): """""" A lattice is considered to be equal to another if the internal matrix representation satisfies np.allclose(matrix1, matrix2) to be True. """""" if: return False return np.allclose(self._matrix, other._matrix)",False,other is None,type(other) != type(self),0.655837893486023 3654,"def create_encoder(enc_type, output_stride=8, pretrained=True): if: return resnet(enc_type, pretrained) elif enc_type.startswith('resnext'): return resnext(enc_type, pretrained) elif enc_type.startswith('se'): return se_net(enc_type, pretrained) elif enc_type == 'xception65': return Xception65(output_stride) elif enc_type =='mobilenetv2': return MobileNetV2(pretrained) else: raise NotImplementedError",False,enc_type.startswith('resnet'),enc_type.startswith('res'),0.6432164311408997 3655,"def create_encoder(enc_type, output_stride=8, pretrained=True): if enc_type.startswith('resnet'): return resnet(enc_type, pretrained) elif: return resnext(enc_type, pretrained) elif enc_type.startswith('se'): return se_net(enc_type, pretrained) elif enc_type == 'xception65': return Xception65(output_stride) elif enc_type =='mobilenetv2': return MobileNetV2(pretrained) else: raise NotImplementedError",True,enc_type.startswith('resnext'),enc_type.startswith('resnext'),0.6429550647735596 3656,"def create_encoder(enc_type, output_stride=8, pretrained=True): if enc_type.startswith('resnet'): return resnet(enc_type, pretrained) elif enc_type.startswith('resnext'): return resnext(enc_type, pretrained) elif: return se_net(enc_type, pretrained) elif enc_type == 'xception65': return Xception65(output_stride) elif enc_type =='mobilenetv2': return MobileNetV2(pretrained) else: raise NotImplementedError",False,enc_type.startswith('se'),enc_type == 'se_net',0.644889771938324 3657,"def create_encoder(enc_type, output_stride=8, pretrained=True): if enc_type.startswith('resnet'): return resnet(enc_type, pretrained) elif enc_type.startswith('resnext'): return resnext(enc_type, pretrained) elif enc_type.startswith('se'): return se_net(enc_type, pretrained) elif: return Xception65(output_stride) elif enc_type =='mobilenetv2': return MobileNetV2(pretrained) else: raise NotImplementedError",True,enc_type == 'xception65',enc_type == 'xception65',0.6477757692337036 3658,"def create_encoder(enc_type, output_stride=8, pretrained=True): if enc_type.startswith('resnet'): return resnet(enc_type, pretrained) elif enc_type.startswith('resnext'): return resnext(enc_type, pretrained) elif enc_type.startswith('se'): return se_net(enc_type, pretrained) elif enc_type == 'xception65': return Xception65(output_stride) elif: return MobileNetV2(pretrained) else: raise NotImplementedError",False,enc_type == 'mobilenetv2',enc_type == 'MobileNetV2',0.6453396081924438 3659,"def _detect_bom(input): """"""Return (bom_encoding, input), with any BOM removed from the input."""""" if: return (_UTF16LE, input[2:]) if input.startswith(b'\xfe\xff'): return (_UTF16BE, input[2:]) if input.startswith(b'\xef\xbb\xbf'): return (UTF8, input[3:]) return (None, input)",False,input.startswith(b'\xff\xfe'),input.startswith(b'\x8b\x08'),0.6454650163650513 3660,"def _detect_bom(input): """"""Return (bom_encoding, input), with any BOM removed from the input."""""" if input.startswith(b'\xff\xfe'): return (_UTF16LE, input[2:]) if: return (_UTF16BE, input[2:]) if input.startswith(b'\xef\xbb\xbf'): return (UTF8, input[3:]) return (None, input)",False,input.startswith(b'\xfe\xff'),input.startswith(b'\x00\x00\x00'),0.6470751166343689 3661,"def _detect_bom(input): """"""Return (bom_encoding, input), with any BOM removed from the input."""""" if input.startswith(b'\xff\xfe'): return (_UTF16LE, input[2:]) if input.startswith(b'\xfe\xff'): return (_UTF16BE, input[2:]) if: return (UTF8, input[3:]) return (None, input)",False,input.startswith(b'\xef\xbb\xbf'),input.startswith(b'\x00\x00'),0.640681266784668 3662,"def deserialize_key(key, dataType=None): d = json.loads(key) if: return int(d) elif dataType == 'list': return [x.strip() for x in d.split(',')] return d",True,dataType == 'int',dataType == 'int',0.6536798477172852 3663,"def deserialize_key(key, dataType=None): d = json.loads(key) if dataType == 'int': return int(d) elif: return [x.strip() for x in d.split(',')] return d",False,dataType == 'list',dataType == 'string',0.6525486707687378 3664,"def __init__(self, *args, **kwargs): if: kwargs['target_type'] = SEND_TO_TRACK super().__init__(*args, **kwargs)",False,not args,'target_type' in kwargs and kwargs['target_type'] == SEND_TO_TRACK,0.6699532270431519 3665,"def R_op(self, inputs, eval_points): if: return [None] return self.make_node(eval_points[0], *inputs[1:]).outputs",False,eval_points[0] is None,len(inputs) == 0 or inputs[0] == 0 or self.is_empty(eval_points),0.6504210233688354 3666,"@property def sqlalchemy_uri_decrypted(self): conn = sqla.engine.url.make_url(self.sqlalchemy_uri) if: conn.password = custom_password_store(conn) else: conn.password = self.password return str(conn)",False,custom_password_store,self.password is None,0.6502010226249695 3667,"def append_priority_rule(self, domain=None, category=None, sponsored_source=None, has_field=None, query_params_match=None, query_person_match=None): """"""Append a new priority rule for the records returned in the response. IMPORTANT: This method can be called multiple times per request for adding multiple priority rules, each call can be with only one argument and the order of the calls matter (the first rule added is the highest priority, the second is second priority etc). For example: >>> from osrframework.thirdparties.pipl_com.lib.search import SearchAPIRequest >>> from osrframework.thirdparties.pipl_com.lib import Phone >>> request = SearchAPIRequest('samplekey', username='eric123') >>> request.append_priority_rule(domain='linkedin') >>> request.append_priority_rule(has_field=Phone) In the response to the above request records from LinkedIn will be returned before records that aren't from LinkedIn and records with phone will be returned before records without phone. Please note that in case there are too many results for the query, adding priority rules to the request does not only affect the order of the records but can significantly improve the number of useful results; when you define which records interest you, you'll get records that would have otherwise be cut-off by the limit on the number of records per query. Args: domain -- str, for example ""linkedin.com"", ""linkedin"" is also possible and it'll match ""linkedin.*"". category -- str, any one of the categories defined in osrframework.thirdparties.pipl_com.lib.source.Source.categories. sponsored_source -- bool, True will bring the records that come from a sponsored source first and False will bring the non-sponsored records first. has_fields -- A field class from osrframework.thirdparties.pipl_com.lib.fields. For example: has",False,len(params) > 1,t.get_setting('priority_rule') is None,0.6457390785217285 3668,"def append_priority_rule(self, domain=None, category=None, sponsored_source=None, has_field=None, query_params_match=None, query_person_match=None): """"""Append a new priority rule for the records returned in the response. IMPORTANT: This method can be called multiple times per request for adding multiple priority rules, each call can be with only one argument and the order of the calls matter (the first rule added is the highest priority, the second is second priority etc). For example: >>> from osrframework.thirdparties.pipl_com.lib.search import SearchAPIRequest >>> from osrframework.thirdparties.pipl_com.lib import Phone >>> request = SearchAPIRequest('samplekey', username='eric123') >>> request.append_priority_rule(domain='linkedin') >>> request.append_priority_rule(has_field=Phone) In the response to the above request records from LinkedIn will be returned before records that aren't from LinkedIn and records with phone will be returned before records without phone. Please note that in case there are too many results for the query, adding priority rules to the request does not only affect the order of the records but can significantly improve the number of useful results; when you define which records interest you, you'll get records that would have otherwise be cut-off by the limit on the number of records per query. Args: domain -- str, for example ""linkedin.com"", ""linkedin"" is also possible and it'll match ""linkedin.*"". category -- str, any one of the categories defined in osrframework.thirdparties.pipl_com.lib.source.Source.categories. sponsored_source -- bool, True will bring the records that come from a sponsored source first and False will bring the non-sponsored records first. has_fields -- A field class from osrframework.thirdparties.pipl_com.lib.fields. For example: has",False,params,t.get_setting('priority_rule') is None,0.6664526462554932 3669,"def lowday(df, window): window = int(window) grouped = df.groupby('ts_code') ts_all = [] for name, group in grouped: if: ts_array = np_lowday(np.nan_to_num(group.values), len(group)) else: ts_array = np_lowday(np.nan_to_num(group.values), window) ts_series = group ts_series.values[:] = ts_array ts_all.append(ts_series) df = pd.concat(ts_all) return df",True,len(group) < window,len(group) < window,0.6477931141853333 3670,"@specialize.argtype(0) def do_pack_fastpath_maybe(fmtiter, value): try: do_pack_fastpath(fmtiter, value) except CannotWrite: if: raise ValueError('fastpath not taken :(') return False else: return True",False,not ALLOW_SLOWPATH,value,0.6483132839202881 3671,"def factory(*args_, **kwargs_): if: subclass = getSubclassFromModule_(CurrentSubclassModule_, ErrorType) if subclass is not None: return subclass(*args_, **kwargs_) if ErrorType.subclass: return ErrorType.subclass(*args_, **kwargs_) else: return ErrorType(*args_, **kwargs_)",True,CurrentSubclassModule_ is not None,CurrentSubclassModule_ is not None,0.6517500281333923 3672,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, ErrorType) if subclass is not None: return subclass(*args_, **kwargs_) if: return ErrorType.subclass(*args_, **kwargs_) else: return ErrorType(*args_, **kwargs_)",True,ErrorType.subclass,ErrorType.subclass,0.6648430824279785 3673,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, ErrorType) if: return subclass(*args_, **kwargs_) if ErrorType.subclass: return ErrorType.subclass(*args_, **kwargs_) else: return ErrorType(*args_, **kwargs_)",True,subclass is not None,subclass is not None,0.6660475134849548 3674,"def validate_api_response(schema, raw_response, request_method='get', raw_request=None): """""" Validate the response of an api call against a swagger schema. """""" request = None if: request = normalize_request(raw_request) response = None if raw_response is not None: response = normalize_response(raw_response, request=request) if response is not None: validate_response(response=response, request_method=request_method, schema=schema)",True,raw_request is not None,raw_request is not None,0.652461051940918 3675,"def validate_api_response(schema, raw_response, request_method='get', raw_request=None): """""" Validate the response of an api call against a swagger schema. """""" request = None if raw_request is not None: request = normalize_request(raw_request) response = None if: response = normalize_response(raw_response, request=request) if response is not None: validate_response(response=response, request_method=request_method, schema=schema)",True,raw_response is not None,raw_response is not None,0.6512972116470337 3676,"def validate_api_response(schema, raw_response, request_method='get', raw_request=None): """""" Validate the response of an api call against a swagger schema. """""" request = None if raw_request is not None: request = normalize_request(raw_request) response = None if raw_response is not None: response = normalize_response(raw_response, request=request) if: validate_response(response=response, request_method=request_method, schema=schema)",True,response is not None,response is not None,0.6513687372207642 3677,"def get_resource_definition_version(self, resource_definition_id: str, resource_definition_version_id: str) -> FakeResourceDefinitionVersion: if: raise IdNotFoundException('That resources definition does not exist.') if resource_definition_version_id not in self.resource_definition_versions[resource_definition_id]: raise VersionNotFoundException(f'Version {resource_definition_version_id} of Resource List Definition {resource_definition_id} does not exist.') return self.resource_definition_versions[resource_definition_id][resource_definition_version_id]",True,resource_definition_id not in self.resource_definition_versions,resource_definition_id not in self.resource_definition_versions,0.6519389748573303 3678,"def get_resource_definition_version(self, resource_definition_id: str, resource_definition_version_id: str) -> FakeResourceDefinitionVersion: if resource_definition_id not in self.resource_definition_versions: raise IdNotFoundException('That resources definition does not exist.') if: raise VersionNotFoundException(f'Version {resource_definition_version_id} of Resource List Definition {resource_definition_id} does not exist.') return self.resource_definition_versions[resource_definition_id][resource_definition_version_id]",True,resource_definition_version_id not in self.resource_definition_versions[resource_definition_id],resource_definition_version_id not in self.resource_definition_versions[resource_definition_id],0.6483204364776611 3679,"def __iadd__(self, other): if: other = ParserElement._literalStringClass(other) return self.append(other)",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.649983286857605 3680,"@property def authenticated_safe(self): if: content = self['auth_safe']['content'] if isinstance(content, SignedData): content = content['content_info']['content'] self._authenticated_safe = AuthenticatedSafe.load(content.native) return self._authenticated_safe",False,self._authenticated_safe is None,not self._authenticated_safe,0.65299391746521 3681,"@property def authenticated_safe(self): if self._authenticated_safe is None: content = self['auth_safe']['content'] if: content = content['content_info']['content'] self._authenticated_safe = AuthenticatedSafe.load(content.native) return self._authenticated_safe",False,"isinstance(content, SignedData)",content and 'content_info' in content,0.653107762336731 3682,"@classmethod def taropen(cls, name, mode='r', fileobj=None, **kwargs): """"""Open uncompressed tar archive name for reading or writing. """""" if: raise ValueError(""mode must be 'r', 'a' or 'w'"") return cls(name, mode, fileobj, **kwargs)",False,len(mode) > 1 or mode not in 'raw',"mode not in ['r', 'a', 'w']",0.6498502492904663 3683,"def __init__(self, name, parent=None, **kw): """"""When initialized, it copies the class defaults; then takes a copy of the attributes of the parent if any. All the work is done in init - styles should cost little to use at runtime."""""" assert 'name' not in self.defaults, ""Class Defaults may not contain a 'name' attribute"" assert 'parent' not in self.defaults, ""Class Defaults may not contain a 'parent' attribute"" if: assert parent.__class__ == self.__class__, 'Parent style %s must have same class as new style %s' % (parent.__class__.__name__, self.__class__.__name__) self.name = name self.parent = parent self.__dict__.update(self.defaults) self.refresh() self._setKwds(**kw)",False,parent,parent is not None,0.692046046257019 3684,"def get_query_set(self): qs = super(QueryManager, self).get_query_set().filter(self._q) if: return qs.order_by(*self._order_by) return qs",False,"hasattr(self, '_order_by')",self._order_by,0.6467907428741455 3685,"def replaced_consumer(word, idx): if: replaced.update([word])",True,idx == dict.unk_index and word != dict.unk_word,idx == dict.unk_index and word != dict.unk_word,0.6470600366592407 3686,"def getTransport(self, trans, compresslevel=9): """"""Wrap a transport, trans, with the TZlibTransport compressed transport class, returning a new transport to the caller. @param compresslevel: The zlib compression level, ranging from 0 (no compression) to 9 (best compression). Defaults to 9. @type compresslevel: int This method returns a TZlibTransport which wraps the passed C{trans} TTransport derived instance. """""" if: return self._last_z ztrans = TZlibTransport(trans, compresslevel) self._last_trans = trans self._last_z = ztrans return ztrans",False,trans == self._last_trans,self._last_z is not None,0.6486721634864807 3687,"def assert_doc_style(name, member, doc, args, exceptions): lines = doc.split('\n') first_line = lines[0] if: exceptions.append((""{} first line should end with a '.'"".format(member), member.__module__))",False,first_line != '' and first_line.strip()[-1] != '.',member and first_line.endswith('.'),0.6463074088096619 3688,"def confirm_legacy_keyring_cleanup(self, migration_results) -> bool: """""" Ask the user whether we should remove keys from the legacy keyring. In the case of CryptFileKeyring, we can't just delete the file because other python processes might use the same keyring file. """""" keyring_name: str = '' legacy_keyring_type: Type = type(migration_results.legacy_keyring) if: keyring_name = str(migration_results.legacy_keyring.file_path) elif legacy_keyring_type is MacKeyring: keyring_name ='macOS Keychain' elif legacy_keyring_type is WinKeyring: keyring_name = 'Windows Credential Manager' prompt = 'Remove keys from old keyring' if len(keyring_name) > 0: prompt += f' ({keyring_name})?' else: prompt += '?' prompt +='(y/n) ' return prompt_yes_no(prompt)",True,legacy_keyring_type is CryptFileKeyring,legacy_keyring_type is CryptFileKeyring,0.6465514898300171 3689,"def confirm_legacy_keyring_cleanup(self, migration_results) -> bool: """""" Ask the user whether we should remove keys from the legacy keyring. In the case of CryptFileKeyring, we can't just delete the file because other python processes might use the same keyring file. """""" keyring_name: str = '' legacy_keyring_type: Type = type(migration_results.legacy_keyring) if legacy_keyring_type is CryptFileKeyring: keyring_name = str(migration_results.legacy_keyring.file_path) elif legacy_keyring_type is MacKeyring: keyring_name ='macOS Keychain' elif legacy_keyring_type is WinKeyring: keyring_name = 'Windows Credential Manager' prompt = 'Remove keys from old keyring' if: prompt += f' ({keyring_name})?' else: prompt += '?' prompt +='(y/n) ' return prompt_yes_no(prompt)",False,len(keyring_name) > 0,keyring_name,0.6459912061691284 3690,"def confirm_legacy_keyring_cleanup(self, migration_results) -> bool: """""" Ask the user whether we should remove keys from the legacy keyring. In the case of CryptFileKeyring, we can't just delete the file because other python processes might use the same keyring file. """""" keyring_name: str = '' legacy_keyring_type: Type = type(migration_results.legacy_keyring) if legacy_keyring_type is CryptFileKeyring: keyring_name = str(migration_results.legacy_keyring.file_path) elif: keyring_name ='macOS Keychain' elif legacy_keyring_type is WinKeyring: keyring_name = 'Windows Credential Manager' prompt = 'Remove keys from old keyring' if len(keyring_name) > 0: prompt += f' ({keyring_name})?' else: prompt += '?' prompt +='(y/n) ' return prompt_yes_no(prompt)",False,legacy_keyring_type is MacKeyring,legacy_keyring_type is MacOSKeyring,0.6432498693466187 3691,"def confirm_legacy_keyring_cleanup(self, migration_results) -> bool: """""" Ask the user whether we should remove keys from the legacy keyring. In the case of CryptFileKeyring, we can't just delete the file because other python processes might use the same keyring file. """""" keyring_name: str = '' legacy_keyring_type: Type = type(migration_results.legacy_keyring) if legacy_keyring_type is CryptFileKeyring: keyring_name = str(migration_results.legacy_keyring.file_path) elif legacy_keyring_type is MacKeyring: keyring_name ='macOS Keychain' elif: keyring_name = 'Windows Credential Manager' prompt = 'Remove keys from old keyring' if len(keyring_name) > 0: prompt += f' ({keyring_name})?' else: prompt += '?' prompt +='(y/n) ' return prompt_yes_no(prompt)",False,legacy_keyring_type is WinKeyring,legacy_keyring_type is WindowsCredentialManager,0.6472768187522888 3692,"def record_cuda_stream(batch): if: batch.record_stream(torch.cuda.current_stream()) elif isinstance(batch, list) or isinstance(batch, tuple): for t in batch: record_cuda_stream(t) elif isinstance(batch, dict): for t in batch.values(): record_cuda_stream(t) else: pass",False,"isinstance(batch, torch.Tensor)","isinstance(batch, torch.cuda.DataParallel)",0.6447043418884277 3693,"def record_cuda_stream(batch): if isinstance(batch, torch.Tensor): batch.record_stream(torch.cuda.current_stream()) elif: for t in batch: record_cuda_stream(t) elif isinstance(batch, dict): for t in batch.values(): record_cuda_stream(t) else: pass",False,"isinstance(batch, list) or isinstance(batch, tuple)","isinstance(batch, list)",0.6406697034835815 3694,"def record_cuda_stream(batch): if isinstance(batch, torch.Tensor): batch.record_stream(torch.cuda.current_stream()) elif isinstance(batch, list) or isinstance(batch, tuple): for t in batch: record_cuda_stream(t) elif: for t in batch.values(): record_cuda_stream(t) else: pass",True,"isinstance(batch, dict)","isinstance(batch, dict)",0.6440598368644714 3695,"def find_module(self, fullname, path=None): """""" Return self when fullname starts with root_name and the target module is one vendored through this importer. """""" root, base, target = fullname.partition(self.root_name + '.') if: return if not any(map(target.startswith, self.vendored_names)): return return self",False,root,target is None,0.6632030010223389 3696,"def find_module(self, fullname, path=None): """""" Return self when fullname starts with root_name and the target module is one vendored through this importer. """""" root, base, target = fullname.partition(self.root_name + '.') if root: return if: return return self",False,"not any(map(target.startswith, self.vendored_names))",target,0.6414666771888733 3697,"def get_output(self, train=False): if: return self.encoder.get_output(train) return self.decoder.get_output(train)",False,not train and (not self.output_reconstruction),self.mode == 'encoder',0.6449680328369141 3698,"def conv2d(input, weight, bias=None, stride=1, padding=0, dilation=1, groups=1): if: return _conv2d_gradfix(transpose=False, weight_shape=weight.shape, stride=stride, padding=padding, output_padding=0, dilation=dilation, groups=groups).apply(input, weight, bias) return torch.nn.functional.conv2d(input=input, weight=weight, bias=bias, stride=stride, padding=padding, dilation=dilation, groups=groups)",False,_should_use_custom_op(input),_needs_gradfix(input),0.6440956592559814 3699,"@classmethod def __build_error_message(class_object, type, code, data, originating_packet_data): icmp_packet = ICMP6() icmp_packet.set_type(type) icmp_packet.set_code(code) icmp_bytes = array.array('B', data).tostring() if: icmp_bytes += array.array('B', originating_packet_data).tostring() icmp_payload = Data() icmp_payload.set_data(icmp_bytes) icmp_packet.contains(icmp_payload) return icmp_packet",True,originating_packet_data is not None,originating_packet_data is not None,0.6496696472167969 3700,"def _qf_name(name, storage): if: return name return '{:s}:{:s}'.format(storage.upper(), name)",False,storage.lower() == 'ufs',storage is None,0.6483246088027954 3701,"def limit_clause(self, select): if: return '' elif select._offset: raise exc.InvalidRequestError('MaxDB does not support LIMIT with an offset.') else: return'\n LIMIT %s' % (select._limit,)",False,self.is_subquery(),select._limit is None,0.6486127376556396 3702,"def limit_clause(self, select): if self.is_subquery(): return '' elif: raise exc.InvalidRequestError('MaxDB does not support LIMIT with an offset.') else: return'\n LIMIT %s' % (select._limit,)",False,select._offset,select._limit is None,0.6552879810333252 3703,"def __setattr__(self, name, value): """"""Memberships are immutable, with the exception of last activity date. """""" creating_model = name == '_state' or self._state.adding if: current_value = getattr(self, name, None) if value == current_value: return elif current_value is not None: raise ImmutableMembershipFieldException(f""Field {name!r} shouldn't change from {current_value!r} to {value!r}"") super().__setattr__(name, value)",False,not creating_model and name in self.immutable_fields,creating_model,0.646111011505127 3704,"def __setattr__(self, name, value): """"""Memberships are immutable, with the exception of last activity date. """""" creating_model = name == '_state' or self._state.adding if not creating_model and name in self.immutable_fields: current_value = getattr(self, name, None) if: return elif current_value is not None: raise ImmutableMembershipFieldException(f""Field {name!r} shouldn't change from {current_value!r} to {value!r}"") super().__setattr__(name, value)",False,value == current_value,current_value is None and value is None,0.6514878273010254 3705,"def __setattr__(self, name, value): """"""Memberships are immutable, with the exception of last activity date. """""" creating_model = name == '_state' or self._state.adding if not creating_model and name in self.immutable_fields: current_value = getattr(self, name, None) if value == current_value: return elif: raise ImmutableMembershipFieldException(f""Field {name!r} shouldn't change from {current_value!r} to {value!r}"") super().__setattr__(name, value)",True,current_value is not None,current_value is not None,0.6491760611534119 3706,"def get_schema_from_template(ddb_template, logical_identifier): resource = ddb_template['Resources'].get(logical_identifier) if: raise KeyError('Unable to find resource with identifier %s', logical_identifier) return {k['KeyType']: k['AttributeName'] for k in resource['Properties']['KeySchema']}",True,not resource,not resource,0.6743624210357666 3707,"def load(self, root_dir: Optional[Path]) -> None: """"""Load internals of this config from the specified directory. :param root_dir: directory where to look for the matching config file """""" self._storage = {} self._loads((DATA_DIR / 'options.yaml').read_text()) if: user_path = _get_user_path(root_dir) if user_path.exists(): try: self._loads(user_path.read_text()) except ConfigError as ex: raise ConfigError(f'error loading {user_path}: {ex}') from ex self.changed.emit()",True,root_dir,root_dir,0.6537748575210571 3708,"def load(self, root_dir: Optional[Path]) -> None: """"""Load internals of this config from the specified directory. :param root_dir: directory where to look for the matching config file """""" self._storage = {} self._loads((DATA_DIR / 'options.yaml').read_text()) if root_dir: user_path = _get_user_path(root_dir) if: try: self._loads(user_path.read_text()) except ConfigError as ex: raise ConfigError(f'error loading {user_path}: {ex}') from ex self.changed.emit()",True,user_path.exists(),user_path.exists(),0.6517106890678406 3709,"def __exit__(self, ex_type, ex_val, tb): try: if: self.outer._notifier.remove_listener(self._listener) except: log.exception('Failed to remove listener from bus notifier')",False,self.outer._notifier,self._listener,0.649738609790802 3710,"def encode_lines(self, lines): """""" Encode a set of lines. All lines will be encoded together. """""" enc_lines = [] for line in lines: line = line.strip() if: return ['EMPTY', None] tokens = self.encode(line) enc_lines.append(' '.join(tokens)) return ['PASS', enc_lines]",False,len(line) == 0 and (not self.args.keep_empty),not line,0.6442418098449707 3711,"def __iter__(self): """"""Get an iterator over the file's lines. """""" while True: line = self.readline() if: break yield line",True,not line,not line,0.6551963090896606 3712,"def clearStackToTableBodyContext(self): while self.tree.openElements[-1].name not in ('tbody', 'tfoot', 'thead', 'html'): self.tree.openElements.pop() if: assert self.parser.innerHTML",True,self.tree.openElements[-1].name == 'html',self.tree.openElements[-1].name == 'html',0.6455088257789612 3713,"def recursive_average(obj, weight: torch.Tensor, distributed: bool=False): obj = recursive_sum(obj, weight, distributed) weight = weight.sum() if: torch.distributed.all_reduce(weight, op=ReduceOp.SUM) obj = recursive_divide(obj, weight) return (obj, weight)",True,distributed,distributed,0.6567447185516357 3714,"def build_x(args, *extra_args, **extra_kwargs): choice = getattr(args, registry_name, None) if: return None cls = REGISTRY[choice] if hasattr(cls, 'build_' + registry_name): builder = getattr(cls, 'build_' + registry_name) else: builder = cls set_defaults(args, cls) return builder(args, *extra_args, **extra_kwargs)",True,choice is None,choice is None,0.6583075523376465 3715,"def build_x(args, *extra_args, **extra_kwargs): choice = getattr(args, registry_name, None) if choice is None: return None cls = REGISTRY[choice] if: builder = getattr(cls, 'build_' + registry_name) else: builder = cls set_defaults(args, cls) return builder(args, *extra_args, **extra_kwargs)",False,"hasattr(cls, 'build_' + registry_name)","registry_name and hasattr(cls, 'build_' + registry_name)",0.6470337510108948 3716,"def buildChildren(self, child_, nodeName_): if: obj_ = docParamName.factory() obj_.build(child_) self.parametername.append(obj_)",True,child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'parametername',child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'parametername',0.6493064165115356 3717,"def handle_extra_path(self): if: return orig.install.handle_extra_path(self) self.path_file = None self.extra_dirs = ''",True,self.root or self.single_version_externally_managed,self.root or self.single_version_externally_managed,0.6462392807006836 3718,"def __parse_data(self, atom, data): pos = 0 while pos < atom.length - 8: length, name, flags = struct.unpack('>I4sI', data[pos:pos + 12]) if: raise MP4MetadataError('unexpected atom %r inside %r' % (name, atom.name)) yield (flags, data[pos + 16:pos + length]) pos += length",False,name != b'data',flags & 1 << 8 or flags & 1 << 8,0.6551677584648132 3719,"def stop(self): for stream in (sys.stdout, sys.stderr): if: stream.reset()",False,"hasattr(stream, 'reset')",stream.isatty() and stream.write() == 'done',0.6424166560173035 3720,"def get_server(zkclient, server_id, placement=False): """"""Return server object."""""" data = zkutils.get_default(zkclient, z.path.server(server_id), {}) if: placement_data = zkutils.get_default(zkclient, z.path.placement(server_id)) if placement_data: data.update(placement_data) return data",True,placement,placement,0.6779534220695496 3721,"def get_server(zkclient, server_id, placement=False): """"""Return server object."""""" data = zkutils.get_default(zkclient, z.path.server(server_id), {}) if placement: placement_data = zkutils.get_default(zkclient, z.path.placement(server_id)) if: data.update(placement_data) return data",True,placement_data,placement_data,0.6597456336021423 3722,"def _load_from_header(self, header): import astropy.wcs if: header['CTYPE1'] = header['CTYPE1'].replace('TAN', 'TPV') header['CTYPE2'] = header['CTYPE2'].replace('TAN', 'TPV') with warnings.catch_warnings(): warnings.simplefilter('ignore') wcs = astropy.wcs.WCS(header.header) return wcs",False,"'TAN' in header.get('CTYPE1', '') and 'PV1_1' in header",header.get('CTYPE1'),0.6558690667152405 3723,"def tokenize(self, string, token): var = VariableSplitter(string, identifiers='$@%') if: yield (string, token) return for value, token in self._tokenize(var, string, token): if value: yield (value, token)",False,"var.start < 0 or token in (COMMENT, ERROR)",not self.allow_blank,0.6475245952606201 3724,"def tokenize(self, string, token): var = VariableSplitter(string, identifiers='$@%') if var.start < 0 or token in (COMMENT, ERROR): yield (string, token) return for value, token in self._tokenize(var, string, token): if: yield (value, token)",True,value,value,0.6637436151504517 3725,"def headerData(self, section, orientation, role): if: return ['num','segment', 'time', 'Is sampled'][section] return",False,orientation == Qt.Horizontal and role == Qt.DisplayRole,orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole,0.6536054611206055 3726,"def _get_annotate_settings(self): """""" Returns the current annotation settings as an MMAL structure. This is a utility method for :meth:`_get_annotate_text`, :meth:`_get_annotate_background`, etc. all of which rely on the MMAL_PARAMETER_CAMERA_ANNOTATE_Vn structure to determine their values. """""" if: mp = mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_V3_T(mmal.MMAL_PARAMETER_HEADER_T(mmal.MMAL_PARAMETER_ANNOTATE, ct.sizeof(mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_V3_T))) else: mp = mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_V2_T(mmal.MMAL_PARAMETER_HEADER_T(mmal.MMAL_PARAMETER_ANNOTATE, ct.sizeof(mmal.MMAL_PARAMETER_CAMERA_ANNOTATE_V2_T))) mmal_check(mmal.mmal_port_parameter_get(self._camera[0].control, mp.hdr), prefix='Failed to get annotation settings') return mp",True,self._annotate_v3,self._annotate_v3,0.6540673971176147 3727,"def output_array_of_long(items): if: return output_status_message('Array Of long:') for item in items['long']: output_status_message('{0}'.format(item))",False,items is None or items['long'] is None,items is None or len(items) == 0,0.6469566226005554 3728,"def __exit__(self, e, t, b): w = self.weakcontainer() if: s = w._iterating s.remove(self) if not s: w._commit_removals()",True,w is not None,w is not None,0.6559344530105591 3729,"def __exit__(self, e, t, b): w = self.weakcontainer() if w is not None: s = w._iterating s.remove(self) if: w._commit_removals()",True,not s,not s,0.6757395267486572 3730,"def Gamut_List(self, mode): if: lista = [] if mode == 'Triangle': lista = self.gamut_1tri if mode == 'Square': lista = self.gamut_1squ if mode == 'Circle': lista = self.gamut_1cir if mode == '2 Circle': lista = self.gamut_2cir if mode == '3 Pie': lista = self.gamut_3pie return lista",False,mode == 'None',mode == 'List',0.6534355878829956 3731,"def Gamut_List(self, mode): if mode == 'None': lista = [] if: lista = self.gamut_1tri if mode == 'Square': lista = self.gamut_1squ if mode == 'Circle': lista = self.gamut_1cir if mode == '2 Circle': lista = self.gamut_2cir if mode == '3 Pie': lista = self.gamut_3pie return lista",False,mode == 'Triangle',mode == 'Square',0.6537784337997437 3732,"def Gamut_List(self, mode): if mode == 'None': lista = [] if mode == 'Triangle': lista = self.gamut_1tri if: lista = self.gamut_1squ if mode == 'Circle': lista = self.gamut_1cir if mode == '2 Circle': lista = self.gamut_2cir if mode == '3 Pie': lista = self.gamut_3pie return lista",False,mode == 'Square',mode == 'Squ',0.6590523719787598 3733,"def Gamut_List(self, mode): if mode == 'None': lista = [] if mode == 'Triangle': lista = self.gamut_1tri if mode == 'Square': lista = self.gamut_1squ if: lista = self.gamut_1cir if mode == '2 Circle': lista = self.gamut_2cir if mode == '3 Pie': lista = self.gamut_3pie return lista",False,mode == 'Circle',mode == 'CIRCULAR',0.6585040092468262 3734,"def Gamut_List(self, mode): if mode == 'None': lista = [] if mode == 'Triangle': lista = self.gamut_1tri if mode == 'Square': lista = self.gamut_1squ if mode == 'Circle': lista = self.gamut_1cir if: lista = self.gamut_2cir if mode == '3 Pie': lista = self.gamut_3pie return lista",False,mode == '2 Circle',mode == '2cir',0.6642652750015259 3735,"def Gamut_List(self, mode): if mode == 'None': lista = [] if mode == 'Triangle': lista = self.gamut_1tri if mode == 'Square': lista = self.gamut_1squ if mode == 'Circle': lista = self.gamut_1cir if mode == '2 Circle': lista = self.gamut_2cir if: lista = self.gamut_3pie return lista",False,mode == '3 Pie',mode == '3pie',0.6485224366188049 3736,"def create_action(self, name, callback, shortcuts=None): action = Gio.SimpleAction.new(name=name, parameter_type=None) action.connect('activate', callback) self.add_action(action=action) if: self.set_accels_for_action(detailed_action_name=f'app.{name}', accels=shortcuts)",True,shortcuts,shortcuts,0.6661825776100159 3737,"def show_platform(ctx, param, value): if: click.echo(user_agent()) sys.exit()",False,value and (not ctx.resilient_parsing),value and (not ctx.env.user.is_authenticated()) and ctx.env.user.check_usr_auth(value),0.6393978595733643 3738,"def format_checkpoint_name(self, epoch, metrics): metrics['epoch'] = epoch filename = self.filename for tmp in re.findall('(\\{.*?)[:\\}]', self.filename): name = tmp[1:] filename = filename.replace(tmp, name + '={' + name) if: metrics[name] = 0 filename = filename.format(**metrics) return os.path.join(self.dirpath, '{}.ckpt'.format(filename))",True,name not in metrics,name not in metrics,0.6556515693664551 3739,"def __init__(self, settings_path, logging=True): """""" Constructor for a QchemCalculator. A QchemCalculator uses Qchem for all of its calculations. Args: settings_path - Path to the settings.ini logging - Whether this calculator should output logging messages. Default: True Returns: None """""" super(QchemCalculator, self).__init__(settings_path, logging) if: raise LibraryNotAvailableError('qchem')",False,not self.is_installed,not has_qchem,0.6484211683273315 3740,"def checkpipe(w): ps = [] opn = 1 i = 0 sz = len(w) while i < sz: c = w[i] if c == '""': opn = 1 - opn elif c == '|': if not opn: ps.append(i) i += 1 if: ps.reverse() for i in ps: w = w[:i] + pipeInsideCharacter + w[i + 1:] return w",False,ps,len(ps) > 0,0.6770535707473755 3741,"def checkpipe(w): ps = [] opn = 1 i = 0 sz = len(w) while i < sz: c = w[i] if: opn = 1 - opn elif c == '|': if not opn: ps.append(i) i += 1 if ps: ps.reverse() for i in ps: w = w[:i] + pipeInsideCharacter + w[i + 1:] return w",False,"c == '""'",c == '+',0.6581250429153442 3742,"def checkpipe(w): ps = [] opn = 1 i = 0 sz = len(w) while i < sz: c = w[i] if c == '""': opn = 1 - opn elif: if not opn: ps.append(i) i += 1 if ps: ps.reverse() for i in ps: w = w[:i] + pipeInsideCharacter + w[i + 1:] return w",False,c == '|',"c in ['""', ""'"", '""']",0.6622709035873413 3743,"def checkpipe(w): ps = [] opn = 1 i = 0 sz = len(w) while i < sz: c = w[i] if c == '""': opn = 1 - opn elif c == '|': if: ps.append(i) i += 1 if ps: ps.reverse() for i in ps: w = w[:i] + pipeInsideCharacter + w[i + 1:] return w",False,not opn,opn >= 0,0.6677160263061523 3744,"def get_ccds_for_brick(self, survey, brick): ccdsfn = survey.find_file('ccds-table', brick=brick.brickname) if: return None from astrometry.util.fits import fits_table ccds = fits_table(ccdsfn) ccds = touchup_ccds(ccds, survey) return ccds",False,not os.path.exists(ccdsfn),ccdsfn is None,0.6443986296653748 3745,"def _load_bboxes(self, results): ann_info = results['ann_info'] results['gt_bboxes'] = ann_info['bboxes'] gt_bboxes_ignore = ann_info.get('bboxes_ignore', None) if: results['gt_bboxes_ignore'] = gt_bboxes_ignore results['bbox_fields'].append('gt_bboxes_ignore') results['bbox_fields'].append('gt_bboxes') return results",True,gt_bboxes_ignore is not None,gt_bboxes_ignore is not None,0.6506319046020508 3746,"def test_unzip(self): filename = 'contains.zip' result = self.create_ressources(filename) if: self.fail('File does not exist') return self.assertEqual(result, True)",False,result is None,not os.path.exists(filename),0.6548202037811279 3747,"def _codec(self, charset): if: return charset codec = None try: codecs.lookup(charset) codec = charset except (LookupError, ValueError): pass return codec",True,not charset,not charset,0.6617404818534851 3748,"def get_siege_status(self, tank: Unit) -> SiegingStatus: status = self.siege_status.get(tank.tag) if: status = SiegingStatus(tank) self.siege_status[tank.tag] = status return status",False,status is None,not status,0.6578904390335083 3749,"@property def eos_token_id(self) -> Optional[int]: """""" :obj:`Optional[int]`: Id of the end of sentence token in the vocabulary. Returns :obj:`None` if the token has not been set. """""" if: return None return self.convert_tokens_to_ids(self.eos_token)",False,self._eos_token is None,self.eos_token is None,0.6511414051055908 3750,"def get_query(slug=None, lang='en'): if: return Response(A25TraitSerializer(A25TraitViewSet.queryset, many=True, context={'language': lang}).data) try: queryset = Trait.objects.select_related('desc', 'name', 'kind', 'cat').prefetch_related('chara_trait1__name', 'chara_trait1__title', 'chara_trait2__name', 'chara_trait2__title', 'chara_trait3__name', 'chara_trait3__title','material_set__item__name').get(slug=slug) except ObjectDoesNotExist: raise Http404 return Response(A25TraitSerializer(queryset, context={'language': lang}).data)",False,not slug,slug is None,0.6682702898979187 3751,"def display_shelves(self, selected_shelves): is_active_only = self.display_active_shelves.isChecked() self.values_list.clear() for shelf in self.shelves: shelf_name = shelf['name'] if: continue icon = 'images/shelf.png' if shelf['exclusive']: icon = 'images/shelf_exclusive.png' item = QListWidgetItem(get_icon(icon), shelf_name, self.values_list) self.values_list.addItem(item) item.setSelected(shelf['name'] in selected_shelves)",False,is_active_only and (not shelf['active']),shelf_name in is_active_only,0.6417336463928223 3752,"def display_shelves(self, selected_shelves): is_active_only = self.display_active_shelves.isChecked() self.values_list.clear() for shelf in self.shelves: shelf_name = shelf['name'] if is_active_only and (not shelf['active']): continue icon = 'images/shelf.png' if: icon = 'images/shelf_exclusive.png' item = QListWidgetItem(get_icon(icon), shelf_name, self.values_list) self.values_list.addItem(item) item.setSelected(shelf['name'] in selected_shelves)",False,shelf['exclusive'],shelf_name in selected_shelves,0.6496689915657043 3753,"def get_filterset(self, request, queryset, view): """""" Sometimes there's no `filterset_class` defined yet the client still requests a filter. Make sure they see an error too. This means we have to `get_filterset_kwargs()` even if there's no `filterset_class`. """""" filterset_class = self.get_filterset_class(view, queryset) kwargs = self.get_filterset_kwargs(request, queryset, view) self._validate_filter(kwargs.pop('filter_keys'), filterset_class) if: return None return filterset_class(**kwargs)",False,filterset_class is None,not filterset_class,0.6480367183685303 3754,"def _to_unicode(obj): if: obj = unicode(obj, encoding='ascii', errors='strict') return obj",False,"isinstance(obj, str) and sys.version_info < (3,)",sys.version_info[0] < 3,0.6442880630493164 3755,"def with_counter(method): if: return method instance_ref = weakref.ref(method.__self__) func = method.__func__ cls = instance_ref().__class__ del method @wraps(func) def wrapper(*args, **kwargs): instance = instance_ref() instance._step_count += 1 wrapped = func.__get__(instance, cls) return wrapped(*args, **kwargs) wrapper._with_counter = True return wrapper",False,"getattr(method, '_with_counter', False)","not hasattr(method, '_with_counter')",0.6468790173530579 3756,"def compute_patch(srcfile, tgtfile, imgdiff=False): """"""Calls bsdiff|imgdiff to compute the patch data, returns a PatchInfo."""""" patchfile = common.MakeTempFile(prefix='patch-') cmd = ['imgdiff', '-z'] if imgdiff else ['bsdiff'] cmd.extend([srcfile, tgtfile, patchfile]) proc = common.Run(cmd, verbose=False) output, _ = proc.communicate() if: raise ValueError(output) with open(patchfile, 'rb') as f: return PatchInfo(imgdiff, f.read())",False,proc.returncode != 0,output != '',0.6511921882629395 3757,"def __init__(self, rdclass, rdtype, flags, tag, value): super().__init__(rdclass, rdtype) self.flags = self._as_uint8(flags) self.tag = self._as_bytes(tag, True, 255) if: raise ValueError('tag is not alphanumeric') self.value = self._as_bytes(value)",False,not tag.isalnum(),tag == 'a',0.6487228870391846 3758,"def connection_from_host(self, host, port=None, scheme='http'): """""" Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. """""" scheme = scheme or 'http' port = port or port_by_scheme.get(scheme, 80) pool_key = (scheme, host, port) with self.pools.lock: pool = self.pools.get(pool_key) if: return pool pool = self._new_pool(scheme, host, port) self.pools[pool_key] = pool return pool",True,pool,pool,0.6797202825546265 3759,"def to_str(self): """"""Returns the string representation of the model"""""" import simplejson as json if: import sys reload(sys) sys.setdefaultencoding('utf-8') return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)",True,six.PY2,six.PY2,0.6510450839996338 3760,"def make_metavar(self): if: return self.metavar metavar = self.type.get_metavar(self) if metavar is None: metavar = self.type.name.upper() if self.nargs!= 1: metavar += '...' return metavar",False,self.metavar is not None,"hasattr(self, 'metavar')",0.6535333395004272 3761,"def make_metavar(self): if self.metavar is not None: return self.metavar metavar = self.type.get_metavar(self) if: metavar = self.type.name.upper() if self.nargs!= 1: metavar += '...' return metavar",True,metavar is None,metavar is None,0.6554635763168335 3762,"def make_metavar(self): if self.metavar is not None: return self.metavar metavar = self.type.get_metavar(self) if metavar is None: metavar = self.type.name.upper() if: metavar += '...' return metavar",False,self.nargs != 1,len(metavar) > 0,0.6535953283309937 3763,"def get_confidence(self): unlike = 0.99 if: for i in range(0, self._mNumOfMBChar): unlike = unlike * ONE_CHAR_PROB return 1.0 - unlike else: return unlike",True,self._mNumOfMBChar < 6,self._mNumOfMBChar < 6,0.6471102833747864 3764,"def parse_version_info(version_str): ver_info = [] for x in version_str.split('.'): if: ver_info.append(int(x)) elif x.find('rc')!= -1: patch_version = x.split('rc') ver_info.append(int(patch_version[0])) ver_info.append(f'rc{patch_version[1]}') return tuple(ver_info)",True,x.isdigit(),x.isdigit(),0.6490286588668823 3765,"def parse_version_info(version_str): ver_info = [] for x in version_str.split('.'): if x.isdigit(): ver_info.append(int(x)) elif: patch_version = x.split('rc') ver_info.append(int(patch_version[0])) ver_info.append(f'rc{patch_version[1]}') return tuple(ver_info)",True,x.find('rc') != -1,x.find('rc') != -1,0.6446938514709473 3766,"def user_is_author(self, user): if: return True else: return False",False,user in self.authors.all(),user and user.is_authenticated() and (user in [a.user for a in self.get_valid_users()]),0.6407428979873657 3767,"@functools.lru_cache(maxsize=3) def load_mni152_template(resolution=None): """"""Load the MNI152 skullstripped T1 template. This function takes the skullstripped, re-scaled 1mm-resolution version of the :term:`MNI` ICBM152 T1 template and re-samples it using a different resolution, if specified. For more information, see :footcite:`Fonov2011`, and :footcite:`Fonov2009`. Parameters ---------- resolution: int, default=1 If resolution is different from 1, the template is re-sampled with the specified resolution. .. versionadded:: 0.8.1 Returns ------- mni152_template : Nifti1Image, image representing the re-sampled whole-brain template See Also -------- nilearn.datasets.fetch_icbm152_2009: for details regarding the difference between NiLearn and :term:`fMRIPrep` ICBM152 template. nilearn.datasets.load_mni152_gm_template : for details about version of the MNI152 grey-matter template. nilearn.datasets.load_mni152_wm_template : for details about version of the MNI152 white-matter template. References ---------- .. footbibliography:: """""" resolution = resolution or 1 brain_template = check_niimg(MNI152_FILE_PATH) brain_data = get_data(brain_template).astype('float32') brain_data /= brain_data.max() new_brain_template = new_img_like(brain_template, brain_data) if: new_brain_template = resampling.resample_img(new_brain_template, np.eye(3) * resolution) return new_brain_template",True,resolution != 1,resolution != 1,0.6665643453598022 3768,"def test_opt_SYMEIG_LS_SYMARP(self): if: self.skipTest('test skipped: missing scipy') args.CTMARGS_projector_svd_method = 'SYMEIG' args.OPTARGS_line_search = 'backtracking' args.OPTARGS_line_search_svd_method = 'SYMARP' main()",True,not self.SCIPY,not self.SCIPY,0.6645450592041016 3769,"def b(s, encoding='utf-8'): if: return s.encode(encoding, errors) return s",False,"isinstance(s, six.text_type)","isinstance(s, text_type)",0.643836498260498 3770,"def get_configs_per_budget(self, budget_subset: list[float | int | None] | None=None) -> list[Configuration]: """"""Return all configs in this runhistory that have been run on one of these budgets. Parameters ---------- budget_subset: list[float | int | None] | None, defaults to None Returns ------- configurations : list List of configurations that have been run on the budgets in ``budget_subset``. """""" if: return self.get_configs() configs = [] for key in self._data.keys(): if key.budget in budget_subset: configs.append(self._ids_config[key.config_id]) return configs",True,budget_subset is None,budget_subset is None,0.6579474806785583 3771,"def get_configs_per_budget(self, budget_subset: list[float | int | None] | None=None) -> list[Configuration]: """"""Return all configs in this runhistory that have been run on one of these budgets. Parameters ---------- budget_subset: list[float | int | None] | None, defaults to None Returns ------- configurations : list List of configurations that have been run on the budgets in ``budget_subset``. """""" if budget_subset is None: return self.get_configs() configs = [] for key in self._data.keys(): if: configs.append(self._ids_config[key.config_id]) return configs",False,key.budget in budget_subset,key.config_id in budget_subset,0.6557450890541077 3772,"def register_unpack_format(name, extensions, function, extra_args=None, description=''): """"""Registers an unpack format. `name` is the name of the format. `extensions` is a list of extensions corresponding to the format. `function` is the callable that will be used to unpack archives. The callable will receive archives to unpack. If it's unable to handle an archive, it needs to raise a ReadError exception. If provided, `extra_args` is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_unpack_formats() function. """""" if: extra_args = [] _check_unpack_options(extensions, function, extra_args) _UNPACK_FORMATS[name] = (extensions, function, extra_args, description)",True,extra_args is None,extra_args is None,0.6531761884689331 3773,"def get_signing_serializer(self, app): if: return None signer_kwargs = dict(key_derivation=self.key_derivation, digest_method=self.digest_method) return URLSafeTimedSerializer(app.secret_key, salt=self.salt, serializer=self.serializer, signer_kwargs=signer_kwargs)",False,not app.secret_key,not self.key_derivation or self.digest_method == 'sha256',0.6506022214889526 3774,"def on_disconnect(self, item=None): if: self._ftp.close() self._connect_button.set_visible(True) GLib.idle_add(self._ftp_model.clear)",True,self._ftp,self._ftp,0.6723860502243042 3775,"def predecessors_iter(self, node): if: return for n_pred in self._nodes_pred[node]: yield n_pred",False,not node in self._nodes_pred,node not in self._nodes_pred,0.6561775207519531 3776,"def _is_inventory_group(key: str, value: Any): """""" Verify that a module-level variable (key = value) is a valid inventory group. """""" if: return False if isinstance(value, tuple): value = value[0] if isinstance(value, GeneratorType): value = list(value) return all((isinstance(item, ALLOWED_HOST_TYPES) for item in value))",False,"key.startswith('_') or not isinstance(value, (list, tuple, GeneratorType))",value is None,0.6432813405990601 3777,"def _is_inventory_group(key: str, value: Any): """""" Verify that a module-level variable (key = value) is a valid inventory group. """""" if key.startswith('_') or not isinstance(value, (list, tuple, GeneratorType)): return False if: value = value[0] if isinstance(value, GeneratorType): value = list(value) return all((isinstance(item, ALLOWED_HOST_TYPES) for item in value))",True,"isinstance(value, tuple)","isinstance(value, tuple)",0.6449303030967712 3778,"def _is_inventory_group(key: str, value: Any): """""" Verify that a module-level variable (key = value) is a valid inventory group. """""" if key.startswith('_') or not isinstance(value, (list, tuple, GeneratorType)): return False if isinstance(value, tuple): value = value[0] if: value = list(value) return all((isinstance(item, ALLOWED_HOST_TYPES) for item in value))",True,"isinstance(value, GeneratorType)","isinstance(value, GeneratorType)",0.6439005136489868 3779,"def significant_pc_test(adata: anndata.AnnData, p_cutoff=0.1, update=True, obsm='X_pca', downsample=50000): """""" Perform two-sample Kolmogorov-Smirnov test for goodness of fit on two adjacent PCs. Select top PCs based on the `p_cutoff`. Top PCs have significantly different distributions, while later PCs only capturing random noise will have larger p-values. An idea from :cite:p:`Zeisel2018`. Parameters ---------- adata adata with PC matrix calculated and stored in adata.obsm p_cutoff the p-value cutoff to select top PCs update Whether modify adata.obsm and only keep significant PCs obsm name of the PC matrix in adata.obsm downsample If the dataset is too large, downsample the cells before testing. Returns ------- n_components number of PCs selected """""" pcs = adata.obsm[obsm] if: print(f'Downsample PC matrix to {downsample} cells to calculate significant PC components') use_pcs = pd.DataFrame(pcs).sample(downsample).values else: use_pcs = pcs n_components = pc_ks_test(use_pcs, p_cutoff=p_cutoff, min_pc=4) if update: adata.obsm[obsm] = pcs[:, :n_components] print(f""Changing adata.obsm['X_pca'] from shape {pcs.shape} to {adata.obsm[obsm].shape}"") return n_components",False,pcs.shape[0] > downsample,downsample.size() > 0,0.6457973122596741 3780,"def significant_pc_test(adata: anndata.AnnData, p_cutoff=0.1, update=True, obsm='X_pca', downsample=50000): """""" Perform two-sample Kolmogorov-Smirnov test for goodness of fit on two adjacent PCs. Select top PCs based on the `p_cutoff`. Top PCs have significantly different distributions, while later PCs only capturing random noise will have larger p-values. An idea from :cite:p:`Zeisel2018`. Parameters ---------- adata adata with PC matrix calculated and stored in adata.obsm p_cutoff the p-value cutoff to select top PCs update Whether modify adata.obsm and only keep significant PCs obsm name of the PC matrix in adata.obsm downsample If the dataset is too large, downsample the cells before testing. Returns ------- n_components number of PCs selected """""" pcs = adata.obsm[obsm] if pcs.shape[0] > downsample: print(f'Downsample PC matrix to {downsample} cells to calculate significant PC components') use_pcs = pd.DataFrame(pcs).sample(downsample).values else: use_pcs = pcs n_components = pc_ks_test(use_pcs, p_cutoff=p_cutoff, min_pc=4) if: adata.obsm[obsm] = pcs[:, :n_components] print(f""Changing adata.obsm['X_pca'] from shape {pcs.shape} to {adata.obsm[obsm].shape}"") return n_components",True,update,update,0.6723325252532959 3781,"def induced_subgraphs(vertices, edges, min_size=1, max_size=None): """"""A generator of all induced subgraphs of the graph, sorted by size (largest to smallest)."""""" n = len(vertices) if: max_size = n + 1 for i in reversed(range(min_size, max_size)): for subset in combinations(vertices, r=i): yield (list(subset), [(u, v) for u, v in edges if u in subset and v in subset])",True,max_size is None,max_size is None,0.6512565612792969 3782,"def __getitem__(self, idx): """"""Get item at each call. Args: idx (int): Index for getting each item. """""" if: return self.prepare_test_data(idx) return self.prepare_train_data(idx)",True,self.test_mode,self.test_mode,0.6509466171264648 3783,"def is_heal_complete(mnode, volname): """"""Verifies there are no pending heals on the volume. The 'number of entries' in the output of heal info for all the bricks should be 0 for heal to be completed. Args: mnode : Node on which commands are executed volname : Name of the volume Return: bool: True if heal is complete. False otherwise """""" from glustolibs.gluster.heal_ops import get_heal_info heal_info_data = get_heal_info(mnode, volname) if: g.log.error('Unable to verify whether heal is successful or not on volume %s' % volname) return False heal_complete = True for brick_heal_info_data in heal_info_data: if brick_heal_info_data['numberOfEntries']!= '0': heal_complete = False if not heal_complete: g.log.error('Heal is not complete on some of the bricks for the volume %s' % volname) return False g.log.info('Heal is complete on all the bricks for the volume %s' % volname) return True",True,heal_info_data is None,heal_info_data is None,0.6469588279724121 3784,"def is_heal_complete(mnode, volname): """"""Verifies there are no pending heals on the volume. The 'number of entries' in the output of heal info for all the bricks should be 0 for heal to be completed. Args: mnode : Node on which commands are executed volname : Name of the volume Return: bool: True if heal is complete. False otherwise """""" from glustolibs.gluster.heal_ops import get_heal_info heal_info_data = get_heal_info(mnode, volname) if heal_info_data is None: g.log.error('Unable to verify whether heal is successful or not on volume %s' % volname) return False heal_complete = True for brick_heal_info_data in heal_info_data: if brick_heal_info_data['numberOfEntries']!= '0': heal_complete = False if: g.log.error('Heal is not complete on some of the bricks for the volume %s' % volname) return False g.log.info('Heal is complete on all the bricks for the volume %s' % volname) return True",True,not heal_complete,not heal_complete,0.6514531373977661 3785,"def is_heal_complete(mnode, volname): """"""Verifies there are no pending heals on the volume. The 'number of entries' in the output of heal info for all the bricks should be 0 for heal to be completed. Args: mnode : Node on which commands are executed volname : Name of the volume Return: bool: True if heal is complete. False otherwise """""" from glustolibs.gluster.heal_ops import get_heal_info heal_info_data = get_heal_info(mnode, volname) if heal_info_data is None: g.log.error('Unable to verify whether heal is successful or not on volume %s' % volname) return False heal_complete = True for brick_heal_info_data in heal_info_data: if: heal_complete = False if not heal_complete: g.log.error('Heal is not complete on some of the bricks for the volume %s' % volname) return False g.log.info('Heal is complete on all the bricks for the volume %s' % volname) return True",False,brick_heal_info_data['numberOfEntries'] != '0',brick_heal_info_data[0] is None,0.6439557075500488 3786,"def fix_multiple_files(filenames, options, output=None): """"""Fix list of files. Optionally fix files recursively. """""" filenames = find_files(filenames, options.recursive, options.exclude) if: import multiprocessing pool = multiprocessing.Pool(options.jobs) pool.map(_fix_file, [(name, options) for name in filenames]) else: for name in filenames: _fix_file((name, options, output))",False,options.jobs > 1,options.recursive,0.6473246812820435 3787,"@property def axis(self): if: return parse_int_vector(self._entity_data.get('axis')) return parse_int_vector('')",True,'axis' in self._entity_data,'axis' in self._entity_data,0.6511462926864624 3788,"def _get_summary_writer(self, task_name: str) -> tf.summary.SummaryWriter: """"""Create (if needed) and return a SummaryWriter for a given task."""""" if: with tf.compat.v1.Graph().as_default(): self._summary_writers[task_name] = tf.compat.v1.summary.FileWriter(os.path.join(self.output_dir, task_name)) return self._summary_writers[task_name]",True,task_name not in self._summary_writers,task_name not in self._summary_writers,0.648600697517395 3789,"def _pid_zombie(pid): """""" may be a pid exists but it is only a zombie """""" if: return False if pid == 0: raise ValueError('invalid PID 0') check = _proc_pid_status.format(**locals()) try: for line in open(check): if line.startswith('State:'): return 'Z' in line except IOError as e: if e.errno!= errno.ENOENT: logg.error('%s (%s): %s', check, e.errno, e) return False return False",True,pid < 0,pid < 0,0.6681287288665771 3790,"def _pid_zombie(pid): """""" may be a pid exists but it is only a zombie """""" if pid < 0: return False if: raise ValueError('invalid PID 0') check = _proc_pid_status.format(**locals()) try: for line in open(check): if line.startswith('State:'): return 'Z' in line except IOError as e: if e.errno!= errno.ENOENT: logg.error('%s (%s): %s', check, e.errno, e) return False return False",True,pid == 0,pid == 0,0.6724361777305603 3791,"def _pid_zombie(pid): """""" may be a pid exists but it is only a zombie """""" if pid < 0: return False if pid == 0: raise ValueError('invalid PID 0') check = _proc_pid_status.format(**locals()) try: for line in open(check): if: return 'Z' in line except IOError as e: if e.errno!= errno.ENOENT: logg.error('%s (%s): %s', check, e.errno, e) return False return False",True,line.startswith('State:'),line.startswith('State:'),0.6461222171783447 3792,"def _pid_zombie(pid): """""" may be a pid exists but it is only a zombie """""" if pid < 0: return False if pid == 0: raise ValueError('invalid PID 0') check = _proc_pid_status.format(**locals()) try: for line in open(check): if line.startswith('State:'): return 'Z' in line except IOError as e: if: logg.error('%s (%s): %s', check, e.errno, e) return False return False",True,e.errno != errno.ENOENT,e.errno != errno.ENOENT,0.6460245847702026 3793,"def child_added(self, child): """""" Handle the child added event for a WxMenu. """""" super(WxMenu, self).child_added(child) if: before = self.find_next_action(child) self.widget.InsertMenu(before, child.widget) elif isinstance(child, WxAction): before = self.find_next_action(child) self.widget.InsertAction(before, child.widget) elif isinstance(child, WxActionGroup): before = self.find_next_action(child) self.widget.InsertActions(before, child.actions())",False,"isinstance(child, WxMenu)","isinstance(child, WxMenuGroup)",0.6634011268615723 3794,"def child_added(self, child): """""" Handle the child added event for a WxMenu. """""" super(WxMenu, self).child_added(child) if isinstance(child, WxMenu): before = self.find_next_action(child) self.widget.InsertMenu(before, child.widget) elif: before = self.find_next_action(child) self.widget.InsertAction(before, child.widget) elif isinstance(child, WxActionGroup): before = self.find_next_action(child) self.widget.InsertActions(before, child.actions())",False,"isinstance(child, WxAction)","isinstance(child, WxActionGroup)",0.6671761274337769 3795,"def child_added(self, child): """""" Handle the child added event for a WxMenu. """""" super(WxMenu, self).child_added(child) if isinstance(child, WxMenu): before = self.find_next_action(child) self.widget.InsertMenu(before, child.widget) elif isinstance(child, WxAction): before = self.find_next_action(child) self.widget.InsertAction(before, child.widget) elif: before = self.find_next_action(child) self.widget.InsertActions(before, child.actions())",False,"isinstance(child, WxActionGroup)","isinstance(child, WxMenu)",0.6585955619812012 3796,"def valid_operation(op): op = op.upper() if: return 'CREATE' if op == 'TRANSFER': return 'TRANSFER' raise ValueError('Operation must be ""CREATE"" or ""TRANSFER')",True,op == 'CREATE',op == 'CREATE',0.6680916547775269 3797,"def valid_operation(op): op = op.upper() if op == 'CREATE': return 'CREATE' if: return 'TRANSFER' raise ValueError('Operation must be ""CREATE"" or ""TRANSFER')",True,op == 'TRANSFER',op == 'TRANSFER',0.6606248617172241 3798,"def recv_unblockBuddyMember(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = unblockBuddyMember_result() result.read(iprot) iprot.readMessageEnd() if result.e is not None: raise result.e return",True,mtype == TMessageType.EXCEPTION,mtype == TMessageType.EXCEPTION,0.6510031223297119 3799,"def recv_unblockBuddyMember(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = unblockBuddyMember_result() result.read(iprot) iprot.readMessageEnd() if: raise result.e return",True,result.e is not None,result.e is not None,0.6487995386123657 3800,"def getAvailableRentCount(self): rentToken = self._tokens.getToken(ROSTER_EXPIRATION_TOKEN_NAME) if: return 0 telecomVehiclesCount = len(self.itemsCache.items.getVehicles(REQ_CRITERIA.VEHICLE.TELECOM_RENT)) return max(0, rentToken[1] - telecomVehiclesCount)",True,not rentToken,not rentToken,0.6610571146011353 3801,"def rescale(self, scale, interpolation=None): """"""see :func:`BaseInstanceMasks.rescale`"""""" new_w, new_h = mmcv.rescale_size((self.width, self.height), scale) if: rescaled_masks = PolygonMasks([], new_h, new_w, self.poly_weights) else: rescaled_masks = self.resize((new_h, new_w)) return rescaled_masks",False,len(self.masks) == 0,self.poly_weights is not None,0.6475867629051208 3802,"def zone_for_unit(self, building: Unit) -> Optional[Zone]: if: for zone in self._expansion_zones: if zone.our_units.find_by_tag(building.tag): return zone else: for zone in self._expansion_zones: if zone.known_enemy_units.find_by_tag(building.tag): return zone return None",False,building.is_mine,self.use_enemy_units,0.6500072479248047 3803,"def zone_for_unit(self, building: Unit) -> Optional[Zone]: if building.is_mine: for zone in self._expansion_zones: if: return zone else: for zone in self._expansion_zones: if zone.known_enemy_units.find_by_tag(building.tag): return zone return None",False,zone.our_units.find_by_tag(building.tag),zone.known_enemy_units.find_by_tag(building.tag),0.645423173904419 3804,"def zone_for_unit(self, building: Unit) -> Optional[Zone]: if building.is_mine: for zone in self._expansion_zones: if zone.our_units.find_by_tag(building.tag): return zone else: for zone in self._expansion_zones: if: return zone return None",False,zone.known_enemy_units.find_by_tag(building.tag),zone.our_units.find_by_tag(building.tag),0.6466566324234009 3805,"def stream_content(event=None): stream = ChunkedResponse({'Content-Type': 'text/event-stream'}) if: stream.push(event) return stream",True,event,event,0.6817866563796997 3806,"@docstrings.dedent def is_unstructured(self, var): """""" Test if a variable is on an unstructered grid Parameters ---------- %(CFDecoder.is_unstructured.parameters)s Returns ------- %(CFDecoder.is_unstructured.returns)s Notes ----- Currently this is the same as :meth:`is_unstructured` method, but may change in the future to support hexagonal grids"""""" if: return True xcoord = self.get_x(var) if xcoord is not None: bounds = self._get_coord_cell_node_coord(xcoord) if bounds is not None and bounds.ndim == 2 and (bounds.shape[-1] > 2): return True",False,str(var.attrs.get('grid_type')) == 'unstructured',var in self.unstructured_grid_params,0.6460857391357422 3807,"@docstrings.dedent def is_unstructured(self, var): """""" Test if a variable is on an unstructered grid Parameters ---------- %(CFDecoder.is_unstructured.parameters)s Returns ------- %(CFDecoder.is_unstructured.returns)s Notes ----- Currently this is the same as :meth:`is_unstructured` method, but may change in the future to support hexagonal grids"""""" if str(var.attrs.get('grid_type')) == 'unstructured': return True xcoord = self.get_x(var) if: bounds = self._get_coord_cell_node_coord(xcoord) if bounds is not None and bounds.ndim == 2 and (bounds.shape[-1] > 2): return True",True,xcoord is not None,xcoord is not None,0.6502127647399902 3808,"@docstrings.dedent def is_unstructured(self, var): """""" Test if a variable is on an unstructered grid Parameters ---------- %(CFDecoder.is_unstructured.parameters)s Returns ------- %(CFDecoder.is_unstructured.returns)s Notes ----- Currently this is the same as :meth:`is_unstructured` method, but may change in the future to support hexagonal grids"""""" if str(var.attrs.get('grid_type')) == 'unstructured': return True xcoord = self.get_x(var) if xcoord is not None: bounds = self._get_coord_cell_node_coord(xcoord) if: return True",False,bounds is not None and bounds.ndim == 2 and (bounds.shape[-1] > 2),bounds is not None and bounds[0] == 'unstructured',0.6441619396209717 3809,"def add(self, req_id, req): if: raise Exception('Duplicate MockStorPool request added') self.requests[req_id] = req",False,req_id in self.requests,"self.requests.get(req_id, None)",0.6606774926185608 3810,"def adopt_weight(weight, global_step, threshold=0, value=0.0): if: weight = value return weight",True,global_step < threshold,global_step < threshold,0.6513220071792603 3811,"def short_name(x): if: return x[:11] + '..' return x",False,len(x) > 13,len(x) >= 10,0.6521037817001343 3812,"def dump_chk(self, envs): if: mol_hf.SCF.dump_chk(self, envs) with h5py.File(self.chkfile, 'a') as fh5: fh5['scf/kpts'] = self.kpts return self",False,self.chkfile,self.chkfile and self.chkfile not in envs,0.6499450206756592 3813,"def on_entry_changed(self, entry): if: entry.set_name(_DIGIT_ENTRY_NAME) else: entry.set_name('GtkEntry') self.update_reference()",False,_PATTERN.search(entry.get_text()),entry.is_anon_digital_entry(),0.6437045931816101 3814,"def export_table_to_file(self, source_table: BaseTable, target_file: File, if_exists: ExportExistsStrategy='exception') -> None: """""" Copy the content of a table to a target file of supported type, in a supported location. :param source_table: An existing table in the database :param target_file: The path to the file to which we aim to dump the content of the database :param if_exists: Overwrite file if exists. Default False """""" if: raise FileExistsError(f'The file {target_file} already exists.') df = self.export_table_to_pandas_dataframe(source_table) target_file.create_from_dataframe(df)",False,if_exists == 'exception' and target_file.exists(),target_file.exists(),0.6488534212112427 3815,"def _get_subclasses(model): subclasses = [model] for f in model._meta.get_all_field_names(): field = model._meta.get_field_by_name(f)[0] if: subclasses.extend(_get_subclasses(field.model)) return subclasses",False,"isinstance(field, RelatedObject) and getattr(field.field.rel, 'parent_link', None)",field and field.model is not None,0.6452460885047913 3816,"def __init__(self, canvas): self.cnv = canvas self.myx = 0.0 self.myy = 0.0 self.myz = 0.0 self.ani = 0 self.vis = 0 self.val = 0 self.sel = 0 self.ena = 0 self.act = 0 self.ext = 0 if: self.cnv.addItem(self)",True,self.cnv,self.cnv,0.6640992760658264 3817,"def to_intN(value, default=None): if: return default try: return int(value) except: return default",False,not value,value is None,0.6578636169433594 3818,"def color_sensor_loop(self): """""" This is the Color Sensor Loop that supports 4 different behaviors that are triggered RANDOMLY!!! """""" while True: if: random_number = randint(1, 4) if random_number == 1: self.action_1() elif random_number == 2: self.action_2() elif random_number == 3: self.action_3() elif random_number == 4: self.action_4()",False,self.color_sensor.color == ColorSensor.COLOR_RED,self.color_sensor.color == Color.RED,0.6453065276145935 3819,"def color_sensor_loop(self): """""" This is the Color Sensor Loop that supports 4 different behaviors that are triggered RANDOMLY!!! """""" while True: if self.color_sensor.color == ColorSensor.COLOR_RED: random_number = randint(1, 4) if: self.action_1() elif random_number == 2: self.action_2() elif random_number == 3: self.action_3() elif random_number == 4: self.action_4()",True,random_number == 1,random_number == 1,0.651363730430603 3820,"def color_sensor_loop(self): """""" This is the Color Sensor Loop that supports 4 different behaviors that are triggered RANDOMLY!!! """""" while True: if self.color_sensor.color == ColorSensor.COLOR_RED: random_number = randint(1, 4) if random_number == 1: self.action_1() elif: self.action_2() elif random_number == 3: self.action_3() elif random_number == 4: self.action_4()",True,random_number == 2,random_number == 2,0.6542467474937439 3821,"def color_sensor_loop(self): """""" This is the Color Sensor Loop that supports 4 different behaviors that are triggered RANDOMLY!!! """""" while True: if self.color_sensor.color == ColorSensor.COLOR_RED: random_number = randint(1, 4) if random_number == 1: self.action_1() elif random_number == 2: self.action_2() elif: self.action_3() elif random_number == 4: self.action_4()",True,random_number == 3,random_number == 3,0.6525879502296448 3822,"def color_sensor_loop(self): """""" This is the Color Sensor Loop that supports 4 different behaviors that are triggered RANDOMLY!!! """""" while True: if self.color_sensor.color == ColorSensor.COLOR_RED: random_number = randint(1, 4) if random_number == 1: self.action_1() elif random_number == 2: self.action_2() elif random_number == 3: self.action_3() elif: self.action_4()",True,random_number == 4,random_number == 4,0.6528606414794922 3823,"def get_context(key): """"""Returns an HMAC context for the specified key. @rtype: HMAC context @raises NotImplementedError: I{algorithm} is not supported """""" if: return GSSTSig(key.secret) else: return HMACTSig(key.secret, key.algorithm)",False,key.algorithm == GSS_TSIG,key.algorithm == 'sha256',0.6496175527572632 3824,"def update_all(cards): cards_updated = [] cards_ok = [] for card in tqdm(cards): status, card, traces = update_card(card) if: cards_updated.append(card) else: cards_ok.append(card) return (cards_updated, cards_ok)",False,status,status != 'updated',0.6672704219818115 3825,"def python_to_spark_type(python_type: Type[Union[int, float, bool, str, bytes]]) -> types.DataType: """"""Function to convert a Python type to a Spark type. :param python_type: the Python type to convert. :return: the Spark type. :raise: ValueError if the type is not supported. """""" if: return types.IntegerType() elif python_type == float: return types.FloatType() elif python_type == bool: return types.BooleanType() elif python_type == str: return types.StringType() elif python_type == bytes: return types.BinaryType() else: raise ValueError('Unsupported Python type:'+ str(python_type))",True,python_type == int,python_type == int,0.6669058799743652 3826,"def python_to_spark_type(python_type: Type[Union[int, float, bool, str, bytes]]) -> types.DataType: """"""Function to convert a Python type to a Spark type. :param python_type: the Python type to convert. :return: the Spark type. :raise: ValueError if the type is not supported. """""" if python_type == int: return types.IntegerType() elif: return types.FloatType() elif python_type == bool: return types.BooleanType() elif python_type == str: return types.StringType() elif python_type == bytes: return types.BinaryType() else: raise ValueError('Unsupported Python type:'+ str(python_type))",True,python_type == float,python_type == float,0.66049724817276 3827,"def python_to_spark_type(python_type: Type[Union[int, float, bool, str, bytes]]) -> types.DataType: """"""Function to convert a Python type to a Spark type. :param python_type: the Python type to convert. :return: the Spark type. :raise: ValueError if the type is not supported. """""" if python_type == int: return types.IntegerType() elif python_type == float: return types.FloatType() elif: return types.BooleanType() elif python_type == str: return types.StringType() elif python_type == bytes: return types.BinaryType() else: raise ValueError('Unsupported Python type:'+ str(python_type))",True,python_type == bool,python_type == bool,0.6589686274528503 3828,"def python_to_spark_type(python_type: Type[Union[int, float, bool, str, bytes]]) -> types.DataType: """"""Function to convert a Python type to a Spark type. :param python_type: the Python type to convert. :return: the Spark type. :raise: ValueError if the type is not supported. """""" if python_type == int: return types.IntegerType() elif python_type == float: return types.FloatType() elif python_type == bool: return types.BooleanType() elif: return types.StringType() elif python_type == bytes: return types.BinaryType() else: raise ValueError('Unsupported Python type:'+ str(python_type))",True,python_type == str,python_type == str,0.6639739274978638 3829,"def python_to_spark_type(python_type: Type[Union[int, float, bool, str, bytes]]) -> types.DataType: """"""Function to convert a Python type to a Spark type. :param python_type: the Python type to convert. :return: the Spark type. :raise: ValueError if the type is not supported. """""" if python_type == int: return types.IntegerType() elif python_type == float: return types.FloatType() elif python_type == bool: return types.BooleanType() elif python_type == str: return types.StringType() elif: return types.BinaryType() else: raise ValueError('Unsupported Python type:'+ str(python_type))",True,python_type == bytes,python_type == bytes,0.6595121622085571 3830,"def preprocess(mt_dataset): def filter_for_notes(row): normalized_transcript = row['transcription'].lower() if: return True return False mt_dataset = mt_dataset.dropna(subset=['description', 'transcription']) mt_note_subset = mt_dataset[mt_dataset.apply(filter_for_notes, axis=1)] return mt_note_subset",False,'chief complaint:' in normalized_transcript,"normalized_transcript in ['description', 'transcription']",0.6445369720458984 3831,"def unlink(self): """""" Remove this file or link. If the path is a directory, use rmdir() instead. """""" if: self._raise_closed() self._accessor.unlink(self)",False,self._closed,not self._dir,0.6653218269348145 3832,"def _country_validator(values) -> list[str]: """"""Custom country validator."""""" countries = [] if: countries.append(values.upper()) elif isinstance(values, list): for value in values: countries.append(value.upper()) else: raise vol.Invalid(f""Value '{values}' is not a string or list."", path=['country']) for country in countries: if country not in LOCALE: raise vol.Invalid(f""Value '{country}' is not in {LOCALE}."", path=['country']) return countries",True,"isinstance(values, str)","isinstance(values, str)",0.6448154449462891 3833,"def _country_validator(values) -> list[str]: """"""Custom country validator."""""" countries = [] if isinstance(values, str): countries.append(values.upper()) elif: for value in values: countries.append(value.upper()) else: raise vol.Invalid(f""Value '{values}' is not a string or list."", path=['country']) for country in countries: if country not in LOCALE: raise vol.Invalid(f""Value '{country}' is not in {LOCALE}."", path=['country']) return countries",False,"isinstance(values, list)","isinstance(values, (list, tuple))",0.6449145674705505 3834,"def _country_validator(values) -> list[str]: """"""Custom country validator."""""" countries = [] if isinstance(values, str): countries.append(values.upper()) elif isinstance(values, list): for value in values: countries.append(value.upper()) else: raise vol.Invalid(f""Value '{values}' is not a string or list."", path=['country']) for country in countries: if: raise vol.Invalid(f""Value '{country}' is not in {LOCALE}."", path=['country']) return countries",False,country not in LOCALE,country not inLOCALE,0.6592704653739929 3835,"def after_train_epoch(self, runner): self.queue = runner.model.module.head.queue if: torch.save({'queue': self.queue}, self.queue_path)",False,"self.queue is not None and self.every_n_epochs(runner, self.interval)",self.queue,0.6461821794509888 3836,"def add(self, rules): for btrn in rules.ntrns: if: continue self._done[btrn.tid] = True self._items.append(btrn)",True,btrn.tid in self._done,btrn.tid in self._done,0.6484202146530151 3837,"def prepend_dependency_to_all_lists(self, dependency): """""" prepend a dependency to all dependency_lists in list_of_dep_list. """""" if: self.list_of_dep_list = [DependencyList()] for single_list in self.list_of_dep_list: single_list.prepend_dependency(deepcopy(dependency)) self.clean_list()",False,self.list_of_dep_list == [],not self.list_of_dep_list,0.6473256349563599 3838,"def fixparity(deskey): temp = '' for byte in deskey: t = bin(ord(byte))[2:].rjust(8, '0') if: temp += chr(int(t[:7] + '1', 2)) else: temp += chr(int(t[:7] + '0', 2)) return temp",False,t[:7].count('1') % 2 == 0,t == 0,0.65016108751297 3839,"def __call__(self, es=None, check=True): """"""update and return the termination conditions dictionary """""" if: return self if es is None and self.es is None: raise ValueError('termination conditions need an optimizer to act upon') self._update(es) return self",True,not check,not check,0.6594468355178833 3840,"def __call__(self, es=None, check=True): """"""update and return the termination conditions dictionary """""" if not check: return self if: raise ValueError('termination conditions need an optimizer to act upon') self._update(es) return self",True,es is None and self.es is None,es is None and self.es is None,0.6478474140167236 3841,"def next_node(self): """"""Get next node in order. Does not remove the node from active set. Returns -------- tuple or None: None if done. `rootid, node, (active_nodes, active_root_ids, active_node_values, active_node_ids)` otherwise """""" if: return None self.next_index = np.argmin(self.active_node_values) i = self.next_index node = self.active_nodes[i] rootid = self.active_root_ids[i] assert not isinstance(rootid, float) assert len(self.active_nodes) == len(self.active_root_ids) assert len(self.active_nodes) == len(self.active_node_values) return (rootid, node, (self.active_nodes, self.active_root_ids, self.active_node_values, self.active_node_ids))",False,self.active_nodes == [],self.next_index == 0,0.6465003490447998 3842,"def comment(self, msg): """"""Add comments to the list - to be helpful to the debugging soul"""""" if: self._error_init() self.comments.append(msg)",False,"not hasattr(self, 'errlist')",self.comments is None,0.6424936652183533 3843,"def block_code(self, text, lang): s = '' if: lang = 'text' lang = lang.lower() if lang =='shell': lang = 'bash' try: lexer = get_lexer_by_name(lang, stripall=True) except: s += '
Error: language ""%s"" is not supported
' % lang lexer = get_lexer_by_name('text', stripall=True) formatter = HtmlFormatter() s += highlight(text, lexer, formatter) return s",False,not lang,not text,0.6794325709342957 3844,"def block_code(self, text, lang): s = '' if not lang: lang = 'text' lang = lang.lower() if: lang = 'bash' try: lexer = get_lexer_by_name(lang, stripall=True) except: s += '
Error: language ""%s"" is not supported
' % lang lexer = get_lexer_by_name('text', stripall=True) formatter = HtmlFormatter() s += highlight(text, lexer, formatter) return s",False,lang == 'shell',lang == 'bash',0.6576762199401855 3845,"def test_returns_automatic_for_uppered_auto_values(self): for thing in datatypes.LOGFILE_AUTOS: if: thing = thing.upper() actual = self._callFUT(thing) self.assertEqual(actual, datatypes.Automatic)",False,"hasattr(thing, 'upper')",thing.upper() != 'None',0.6486539840698242 3846,"def _create_examples(self, lines, set_type): """"""Creates examples for the training and dev sets."""""" examples = [] for i, line in enumerate(lines): if: continue guid = '%s-%s' % (set_type, i) text_a = line[0] label = line[1] examples.append(InputExample(guid=guid, text_a=text_a, text_b=None, label=label)) return examples",True,i == 0,i == 0,0.669504702091217 3847,"def render(self, raw_value): if: return self.type.enum[raw_value] return raw_value",False,self.type.enum and raw_value in self.type.enum,self.type,0.6522848606109619 3848,"def update_gban_reason(user_id, name, reason=None): with GBANNED_USERS_LOCK: user = SESSION.query(GloballyBannedUsers).get(user_id) if: return None old_reason = user.reason user.name = name user.reason = reason SESSION.merge(user) SESSION.commit() return old_reason",True,not user,not user,0.6591050624847412 3849,"def detecttype(self, type, pos): """"""Detect a bit of a given type."""""" if: return False return self.instance(type).detect(pos)",True,pos.finished(),pos.finished(),0.6580787897109985 3850,"def get_control_cmd(self, cmd_key, ctrl_key=None): """"""Get the payload used to send the command."""""" control = None if: control_data = self._data['ControlWifi'].get('action', {}).get(cmd_key) if control_data: control = deepcopy(control_data) if ctrl_key: control['cmd'] = ctrl_key return control",True,'ControlWifi' in self._data,'ControlWifi' in self._data,0.6553635001182556 3851,"def get_control_cmd(self, cmd_key, ctrl_key=None): """"""Get the payload used to send the command."""""" control = None if 'ControlWifi' in self._data: control_data = self._data['ControlWifi'].get('action', {}).get(cmd_key) if: control = deepcopy(control_data) if ctrl_key: control['cmd'] = ctrl_key return control",True,control_data,control_data,0.663739800453186 3852,"def get_control_cmd(self, cmd_key, ctrl_key=None): """"""Get the payload used to send the command."""""" control = None if 'ControlWifi' in self._data: control_data = self._data['ControlWifi'].get('action', {}).get(cmd_key) if control_data: control = deepcopy(control_data) if: control['cmd'] = ctrl_key return control",True,ctrl_key,ctrl_key,0.6631388068199158 3853,"def slice_columns(X, columns): if: return X[list(X.columns) if columns is None else columns] else: return X",False,"isinstance(X, dd.DataFrame)","isinstance(X, tuple)",0.645113468170166 3854,"def feed(self, char, char_len): """"""feed a character with known length"""""" if: order = self.get_order(char) else: order = -1 if order >= 0: self._total_chars += 1 if order < self._table_size: if 512 > self._char_to_freq_order[order]: self._freq_chars += 1",True,char_len == 2,char_len == 2,0.6582823991775513 3855,"def feed(self, char, char_len): """"""feed a character with known length"""""" if char_len == 2: order = self.get_order(char) else: order = -1 if: self._total_chars += 1 if order < self._table_size: if 512 > self._char_to_freq_order[order]: self._freq_chars += 1",True,order >= 0,order >= 0,0.660842776298523 3856,"def feed(self, char, char_len): """"""feed a character with known length"""""" if char_len == 2: order = self.get_order(char) else: order = -1 if order >= 0: self._total_chars += 1 if: if 512 > self._char_to_freq_order[order]: self._freq_chars += 1",True,order < self._table_size,order < self._table_size,0.6477237939834595 3857,"def feed(self, char, char_len): """"""feed a character with known length"""""" if char_len == 2: order = self.get_order(char) else: order = -1 if order >= 0: self._total_chars += 1 if order < self._table_size: if: self._freq_chars += 1",True,512 > self._char_to_freq_order[order],512 > self._char_to_freq_order[order],0.6430716514587402 3858,"def block_gas_limit(self) -> int: """"""Get the block gas limit. :return: block gas limit """""" if: block_params = self._client.query_params('baseapp', 'BlockParams') self._max_gas = int(block_params['max_gas']) return self._max_gas or -1",False,self._max_gas is None,not self._max_gas,0.6584337949752808 3859,"def _handler_unconfigured_configure(self, *args, **kwargs): """""" Configure driver for device comms. @param args[0] Communiations config dictionary. @retval (next_state, result) tuple, (DriverConnectionState.DISCONNECTED, None) if successful, (None, None) otherwise. @raises InstrumentParameterException if missing or invalid param dict. """""" next_state = None result = None config = kwargs.get('config', None) if: try: config = args[0] except IndexError: pass if config is None: raise InstrumentParameterException('Missing comms config parameter.') self._connection = self._build_connection(config) next_state = DriverConnectionState.DISCONNECTED return (next_state, result)",False,config is None,args is not None,0.6529982089996338 3860,"def _handler_unconfigured_configure(self, *args, **kwargs): """""" Configure driver for device comms. @param args[0] Communiations config dictionary. @retval (next_state, result) tuple, (DriverConnectionState.DISCONNECTED, None) if successful, (None, None) otherwise. @raises InstrumentParameterException if missing or invalid param dict. """""" next_state = None result = None config = kwargs.get('config', None) if config is None: try: config = args[0] except IndexError: pass if: raise InstrumentParameterException('Missing comms config parameter.') self._connection = self._build_connection(config) next_state = DriverConnectionState.DISCONNECTED return (next_state, result)",False,config is None,not config,0.6530033946037292 3861,"def get_fpn_feats(self, x): modulelist = list(self.body._modules.values()) for i, l in enumerate(modulelist): x = l(x) if: c1 = x elif i == 20: c2 = x elif i == 23: c3 = x return (c1, c2, c3)",False,i == 6,i == 10,0.6676976680755615 3862,"def get_fpn_feats(self, x): modulelist = list(self.body._modules.values()) for i, l in enumerate(modulelist): x = l(x) if i == 6: c1 = x elif: c2 = x elif i == 23: c3 = x return (c1, c2, c3)",False,i == 20,i == 12,0.6741724610328674 3863,"def get_fpn_feats(self, x): modulelist = list(self.body._modules.values()) for i, l in enumerate(modulelist): x = l(x) if i == 6: c1 = x elif i == 20: c2 = x elif: c3 = x return (c1, c2, c3)",False,i == 23,i == 10,0.6590114831924438 3864,"def uniqueMetricNameAndMetadata(self, metrics=None): """""" For an array of metrics, return the unique metric names + metadata combo in same order. """""" if: metrics = self.metrics metricmetadata = [] for metricName, metadata in zip(metrics['metricName'], metrics['metricMetadata']): metricmeta =''.join([metricName, metadata]) if metricmeta not in metricmetadata: metricmetadata.append(metricmeta) return metricmetadata",True,metrics is None,metrics is None,0.6599603295326233 3865,"def uniqueMetricNameAndMetadata(self, metrics=None): """""" For an array of metrics, return the unique metric names + metadata combo in same order. """""" if metrics is None: metrics = self.metrics metricmetadata = [] for metricName, metadata in zip(metrics['metricName'], metrics['metricMetadata']): metricmeta =''.join([metricName, metadata]) if: metricmetadata.append(metricmeta) return metricmetadata",True,metricmeta not in metricmetadata,metricmeta not in metricmetadata,0.6562151312828064 3866,"def addSubTotal(self, subkey, value): self.__lock.acquire() try: if: return subcurrent, subtotal, fragment, subdata = self.__subprogress[subkey] self.__subprogress[subkey] = (subcurrent, subtotal + value, fragment, subdata) finally: self.__lock.release()",False,self.__done or subkey in self.__subdone,subkey not in self.__subprogress,0.6457656025886536 3867,"def filter_short_videos(context, items): if: shorts_filtered = [] for item in items: if hasattr(item, '_duration'): item_duration = 0 if item.get_duration() is None else item.get_duration() if 0 < item_duration <= 60: continue shorts_filtered += [item] return shorts_filtered return items",False,context.get_settings().hide_short_videos(),context.videos == 'short',0.6448252201080322 3868,"def filter_short_videos(context, items): if context.get_settings().hide_short_videos(): shorts_filtered = [] for item in items: if: item_duration = 0 if item.get_duration() is None else item.get_duration() if 0 < item_duration <= 60: continue shorts_filtered += [item] return shorts_filtered return items",False,"hasattr(item, '_duration')",item.get_type() == 'video',0.6464451551437378 3869,"def filter_short_videos(context, items): if context.get_settings().hide_short_videos(): shorts_filtered = [] for item in items: if hasattr(item, '_duration'): item_duration = 0 if item.get_duration() is None else item.get_duration() if: continue shorts_filtered += [item] return shorts_filtered return items",False,0 < item_duration <= 60,item_duration < 2,0.65064537525177 3870,"@beartype @classmethod def mk(cls, ls: List[str]) -> Tuple: """""" I x, D y.. -> {x: int, y: double} x, y.. -> {x: int, y: double} """""" symbs = [] for x in ls: x = x.strip() if: continue vs = x.split() assert len(vs) == 2, vs t, k = (vs[0], vs[1]) symbs.append(Symb(k, t)) return cls(symbs)",True,not x,not x,0.6685072779655457 3871,"@classmethod def register_backend(cls, name, backend=None, force=False): """"""Register a backend to FileClient. This method can be used as a normal class method or a decorator. .. code-block:: python class NewBackend(BaseStorageBackend): def get(self, filepath): return filepath def get_text(self, filepath): return filepath FileClient.register_backend('new', NewBackend) or .. code-block:: python @FileClient.register_backend('new') class NewBackend(BaseStorageBackend): def get(self, filepath): return filepath def get_text(self, filepath): return filepath Args: name (str): The name of the registered backend. backend (class, optional): The backend class to be registered, which must be a subclass of :class:`BaseStorageBackend`. When this method is used as a decorator, backend is None. Defaults to None. force (bool, optional): Whether to override the backend if the name has already been registered. Defaults to False. """""" if: cls._register_backend(name, backend, force=force) return def _register(backend_cls): cls._register_backend(name, backend_cls, force=force) return backend_cls return _register",True,backend is not None,backend is not None,0.6507526636123657 3872,"def parseImpl(self, instring, loc, doActions=True): if: if instring[loc] == '\n': return (loc + 1, '\n') else: raise ParseException(instring, loc, self.errmsg, self) elif loc == len(instring): return (loc + 1, []) else: raise ParseException(instring, loc, self.errmsg, self)",True,loc < len(instring),loc < len(instring),0.6473968029022217 3873,"def parseImpl(self, instring, loc, doActions=True): if loc < len(instring): if: return (loc + 1, '\n') else: raise ParseException(instring, loc, self.errmsg, self) elif loc == len(instring): return (loc + 1, []) else: raise ParseException(instring, loc, self.errmsg, self)",True,instring[loc] == '\n',instring[loc] == '\n',0.6482166051864624 3874,"def parseImpl(self, instring, loc, doActions=True): if loc < len(instring): if instring[loc] == '\n': return (loc + 1, '\n') else: raise ParseException(instring, loc, self.errmsg, self) elif: return (loc + 1, []) else: raise ParseException(instring, loc, self.errmsg, self)",True,loc == len(instring),loc == len(instring),0.6466508507728577 3875,"def num_whitespaces(start_of_loop: str) -> int: """"""Return the number of spaces at the beginning of the accumulation loop"""""" blank_chars = 0 for char in start_of_loop: if: blank_chars += 1 else: break return blank_chars",False,char.isspace(),char == '' or char == '\t' or char == '\n' or (char == '\r'),0.6463121175765991 3876,"@pytest.mark.parametrize('curve', curves, ids=[i.name for i in curves]) def test_curve_params_encode_decode_explicit_compressed(curve): if: with pytest.raises(UnknownCurveError): curve.to_der('explicit', 'compressed') else: ret = Curve.from_der(curve.to_der('explicit', 'compressed')) assert curve == ret",False,"isinstance(curve.curve, CurveEdTw)","isinstance(curve, CurveEdTw)",0.6483807563781738 3877,"def get_instances(ids, class_ids, class_labels, id2label): instances = {} for label in class_labels: instances[label] = [] instance_ids = np.unique(ids) for id in instance_ids: if: continue inst = Instance(ids, id) if inst.label_id in class_ids: instances[id2label[inst.label_id]].append(inst.to_dict()) return instances",True,id == 0,id == 0,0.6649702191352844 3878,"def get_instances(ids, class_ids, class_labels, id2label): instances = {} for label in class_labels: instances[label] = [] instance_ids = np.unique(ids) for id in instance_ids: if id == 0: continue inst = Instance(ids, id) if: instances[id2label[inst.label_id]].append(inst.to_dict()) return instances",True,inst.label_id in class_ids,inst.label_id in class_ids,0.6529368162155151 3879,"def children(self): res = super().children() if: res.append(self.line) return res",True,self.line,self.line,0.6574288606643677 3880,"def script(script, name=None): """""" Run script `script` to client `name`. """""" if: print('Please give a client name.') return com = 'python manage.py runscript {}'.format(script) cmd(com, name)",False,not name,name is None,0.6596634387969971 3881,"def __repr__(self): if: s = '<%d>[EmptySI]' % self._bits else: lower_bound = self._lower_bound if type(self._lower_bound) == str else '%#x' % self._lower_bound upper_bound = self._upper_bound if type(self._upper_bound) == str else '%#x' % self._upper_bound s = '<%d>0x%x[%s, %s]%s' % (self._bits, self._stride, lower_bound, upper_bound, 'R' if self._reversed else '') if self.uninitialized: s += '(uninit)' return s",False,self.is_empty,self._stride == 0,0.65293949842453 3882,"def __repr__(self): if self.is_empty: s = '<%d>[EmptySI]' % self._bits else: lower_bound = self._lower_bound if type(self._lower_bound) == str else '%#x' % self._lower_bound upper_bound = self._upper_bound if type(self._upper_bound) == str else '%#x' % self._upper_bound s = '<%d>0x%x[%s, %s]%s' % (self._bits, self._stride, lower_bound, upper_bound, 'R' if self._reversed else '') if: s += '(uninit)' return s",False,self.uninitialized,self._uninit,0.653675377368927 3883,"def atom_implicit_valence_one_hot(atom, allowable_set=None, encode_unknown=False): """"""One hot encoding for the implicit valence of an atom. """""" if: allowable_set = list(range(7)) return one_hot_encoding(atom.GetImplicitValence(), allowable_set, encode_unknown)",True,allowable_set is None,allowable_set is None,0.651709794998169 3884,"def createFields(self): yield Enum(UInt8(self, 'charset'), self.charset_desc) size = (self.size - self.current_size) / 8 if: return charset = getCharset(self['charset']) yield String(self, 'text', size, 'Text', charset=charset, strip=self.STRIP)",False,not size,size == 0,0.6608678698539734 3885,"def absrelpath(path, base): """"""Normalize `path` with respect to `base`, returning the absolute path by joining `base` and `path`. If `path` is already absolute, returns it as it is :param path: the path :param base: the base directory path. If file, `dirname(file)` will be used """""" if: return path if not isdir(base): base = dirname(base) return abspath(normpath(join(base, path)))",False,isabs(path),path is None,0.6474087238311768 3886,"def absrelpath(path, base): """"""Normalize `path` with respect to `base`, returning the absolute path by joining `base` and `path`. If `path` is already absolute, returns it as it is :param path: the path :param base: the base directory path. If file, `dirname(file)` will be used """""" if isabs(path): return path if: base = dirname(base) return abspath(normpath(join(base, path)))",False,not isdir(base),not base,0.6474347114562988 3887,"@classmethod def setUpClass(cls): cls.get_super_method(cls,'setUpClass')() g.log.info('Upload io scripts to clients %s for running IO on mounts', cls.clients) cls.script_upload_path = '/usr/share/glustolibs/io/scripts/file_dir_ops.py' ret = upload_scripts(cls.clients, cls.script_upload_path) if: raise ExecutionError('Failed to upload IO scripts to clients %s' % cls.clients) g.log.info('Successfully uploaded IO scripts to clients %s', cls.clients)",True,not ret,not ret,0.6652158498764038 3888,"def test_dtype_scalar_squeeze(self): values = {'S': b'a', 'M': '2018-06-20'} for ch in np.typecodes['All']: if: continue sctype = np.dtype(ch).type scvalue = sctype(values.get(ch, 3)) for axis in [None, ()]: squeezed = scvalue.squeeze(axis=axis) assert_equal(squeezed, scvalue) assert_equal(type(squeezed), type(scvalue))",False,ch in 'O',ch in values,0.6559435129165649 3889,"def match(self, version): """""" Check if the provided version matches the constraints. :param version: The version to match against this instance. :type version: String or :class:`Version` instance. """""" if: version = self.version_class(version) for operator, constraint, prefix in self._parts: f = self._operators.get(operator) if isinstance(f, string_types): f = getattr(self, f) if not f: msg = '%r not implemented for %s' % (operator, self.__class__.__name__) raise NotImplementedError(msg) if not f(version, constraint, prefix): return False return True",False,"isinstance(version, string_types)",self.version_class is not None,0.6455562114715576 3890,"def match(self, version): """""" Check if the provided version matches the constraints. :param version: The version to match against this instance. :type version: String or :class:`Version` instance. """""" if isinstance(version, string_types): version = self.version_class(version) for operator, constraint, prefix in self._parts: f = self._operators.get(operator) if: f = getattr(self, f) if not f: msg = '%r not implemented for %s' % (operator, self.__class__.__name__) raise NotImplementedError(msg) if not f(version, constraint, prefix): return False return True",False,"isinstance(f, string_types)",f is not None,0.6447636485099792 3891,"def match(self, version): """""" Check if the provided version matches the constraints. :param version: The version to match against this instance. :type version: String or :class:`Version` instance. """""" if isinstance(version, string_types): version = self.version_class(version) for operator, constraint, prefix in self._parts: f = self._operators.get(operator) if isinstance(f, string_types): f = getattr(self, f) if: msg = '%r not implemented for %s' % (operator, self.__class__.__name__) raise NotImplementedError(msg) if not f(version, constraint, prefix): return False return True",True,not f,not f,0.6642928123474121 3892,"def match(self, version): """""" Check if the provided version matches the constraints. :param version: The version to match against this instance. :type version: String or :class:`Version` instance. """""" if isinstance(version, string_types): version = self.version_class(version) for operator, constraint, prefix in self._parts: f = self._operators.get(operator) if isinstance(f, string_types): f = getattr(self, f) if not f: msg = '%r not implemented for %s' % (operator, self.__class__.__name__) raise NotImplementedError(msg) if: return False return True",False,"not f(version, constraint, prefix)",not version.match(constraint),0.6440153121948242 3893,"@staticmethod def compare2(yi, span): for start, stop in yi: if: return True return False",True,tuple(start) == span[0] and tuple(stop) == span[1],tuple(start) == span[0] and tuple(stop) == span[1],0.6461465358734131 3894,"def y(self): """""" Return affine y coordinate. This method should be used only when the 'x' coordinate is not needed. It's computationally more efficient to use `to_affine()` and then call x() and y() on the returned instance. Or call `scale()` and then x() and y() on the returned instance. """""" _, y, z = self.__coords if: return y p = self.__curve.p() z = numbertheory.inverse_mod(z, p) return y * z ** 3 % p",False,z == 1,z == 0,0.6660478115081787 3895,"def checkstatus(self, fetch, ud, d, try_again=True): ud.url = ud.url.replace('az://', 'https://').split(';')[0] az_sas = d.getVar('AZ_SAS') if: ud.url += az_sas return Wget.checkstatus(self, fetch, ud, d, try_again)",False,az_sas and az_sas not in ud.url,az_sas,0.6510370373725891 3896,"def build_absolute_url(self, path=None): """""" Build absolute URL pointing to test server. :param path: Path to append to the URL """""" url = TEST_SERVER_HOST if: url += path return url",True,path,path,0.6723116636276245 3897,"def rotate(self, n=0): counts = self.counts if: counts = counts.rotate(n) return abjad.new(self, counts=counts)",False,counts is not None,n,0.6539776921272278 3898,"@classmethod def validate(cls, data, _type): if: return ['missing value(s)'] elif not isinstance(data, (list, tuple)): data = (data,) reasons = [] for value in data: value = idna_encode(value) if not FQDN(value, allow_underscores=True).is_valid: reasons.append(f'Invalid {_type} value ""{value}"" is not a valid FQDN.') elif not value.endswith('.'): reasons.append(f'{_type} value ""{value}"" missing trailing.') return reasons",True,not data,not data,0.6595275402069092 3899,"@classmethod def validate(cls, data, _type): if not data: return ['missing value(s)'] elif: data = (data,) reasons = [] for value in data: value = idna_encode(value) if not FQDN(value, allow_underscores=True).is_valid: reasons.append(f'Invalid {_type} value ""{value}"" is not a valid FQDN.') elif not value.endswith('.'): reasons.append(f'{_type} value ""{value}"" missing trailing.') return reasons",False,"not isinstance(data, (list, tuple))","isinstance(data, str)",0.6438997983932495 3900,"@classmethod def validate(cls, data, _type): if not data: return ['missing value(s)'] elif not isinstance(data, (list, tuple)): data = (data,) reasons = [] for value in data: value = idna_encode(value) if: reasons.append(f'Invalid {_type} value ""{value}"" is not a valid FQDN.') elif not value.endswith('.'): reasons.append(f'{_type} value ""{value}"" missing trailing.') return reasons",False,"not FQDN(value, allow_underscores=True).is_valid","not FQDN(str(value), allow_underscores=True).is_valid",0.6416457891464233 3901,"@classmethod def validate(cls, data, _type): if not data: return ['missing value(s)'] elif not isinstance(data, (list, tuple)): data = (data,) reasons = [] for value in data: value = idna_encode(value) if not FQDN(value, allow_underscores=True).is_valid: reasons.append(f'Invalid {_type} value ""{value}"" is not a valid FQDN.') elif: reasons.append(f'{_type} value ""{value}"" missing trailing.') return reasons",True,not value.endswith('.'),not value.endswith('.'),0.6429615020751953 3902,"@staticmethod def _get_and_validate_course_access(user, course_id): """""" Check if course_id exists and is accessible by the user. Returns a course_block object """""" course_key = CourseKey.from_string(course_id) course_block = get_course_and_check_access(course_key, user) if: raise NotFound(f'Course with course_id {course_id} does not exist.') return course_block",True,not course_block,not course_block,0.6546614170074463 3903,"def get_version(): fn = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),'src', 'py2app', '__init__.py') for ln in open(fn): if: version = ln.split('=')[-1].strip().strip('""') return version",True,ln.startswith('__version__'),ln.startswith('__version__'),0.6437934637069702 3904,"def render_text(self, outfd, data): """""" output to Service SIDs as a dictionary for future use"""""" outfd.write('servicesids = { \n') for sid, service in data: if: continue outfd.write("" '"" + sid + ""': '"" + service + ""',\n"") outfd.write('}\n')",False,not service,service == 'none',0.6585680842399597 3905,"@staticmethod def backward(ctx, grad_output): out, = ctx.saved_tensors grad_input, grad_bias = FusedLeakyReLUFunctionBackward.apply(grad_output, out, ctx.bias, ctx.negative_slope, ctx.scale) if: grad_bias = None return (grad_input, grad_bias, None, None)",False,not ctx.bias,not grad_input.requires_grad,0.6478267908096313 3906,"def _get_lineage_times_and_locs(tc, lineage, t_curr, drop_before_sim=True, time_before_present=True): locs = [] times = [] for node in lineage: time = tc.nodes[node].time if drop_before_sim and time < 0 or not drop_before_sim: times.append(time) individ = tc.nodes[node].individual loc = tc.individuals[individ].location locs.append(loc) if: times = [t - -t_curr for t in times] return zip(times, locs)",True,time_before_present,time_before_present,0.6499271392822266 3907,"def _get_lineage_times_and_locs(tc, lineage, t_curr, drop_before_sim=True, time_before_present=True): locs = [] times = [] for node in lineage: time = tc.nodes[node].time if: times.append(time) individ = tc.nodes[node].individual loc = tc.individuals[individ].location locs.append(loc) if time_before_present: times = [t - -t_curr for t in times] return zip(times, locs)",False,drop_before_sim and time < 0 or not drop_before_sim,drop_before_sim,0.6440564393997192 3908,"def check_single_value_default_metric(self): self._use_single_value_default_metrics() if: self.metrics = self.default_metrics[self.eval_type] LOGGER.warning('use default metric {} for eval type {}'.format(self.metrics, self.eval_type)) ban_metric = [consts.PSI, consts.F1_SCORE, consts.CONFUSION_MAT, consts.QUANTILE_PR] for metric in self.metrics: if metric in ban_metric: self.metrics.remove(metric) self.check()",False,self.metrics is None or len(self.metrics) == 0,self.eval_type in self.default_metrics,0.6507349014282227 3909,"def check_single_value_default_metric(self): self._use_single_value_default_metrics() if self.metrics is None or len(self.metrics) == 0: self.metrics = self.default_metrics[self.eval_type] LOGGER.warning('use default metric {} for eval type {}'.format(self.metrics, self.eval_type)) ban_metric = [consts.PSI, consts.F1_SCORE, consts.CONFUSION_MAT, consts.QUANTILE_PR] for metric in self.metrics: if: self.metrics.remove(metric) self.check()",True,metric in ban_metric,metric in ban_metric,0.6659291982650757 3910,"def __del__(self): with self._state.condition: if: self._state.code = grpc.StatusCode.CANCELLED self._state.details = 'Cancelled upon garbage collection!' self._state.cancelled = True self._call.cancel(_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[self._state.code], self._state.details) self._state.condition.notify_all()",False,self._state.code is None,not self._state.cancelled,0.6548477411270142 3911,"def render(self, context): if: output = self.nodelist.render(context) return output else: return ''",False,context[self.team].hasMember(context['request'].user),context[self.story].iteration.locked,0.6416527032852173 3912,"@classmethod def get_init_valid_params(cls): init_valid_params = {**cls.get_init_train_params(),'scenes': cls.valid_scenes, 'player_screen_height': 224, 'player_screen_width': 224, 'headless': False} if: init_valid_params['save_talk_reply_probs_path'] = cls.save_talk_reply_probs_path return init_valid_params",True,cls.save_talk_reply_probs_path is not None,cls.save_talk_reply_probs_path is not None,0.6467662453651428 3913,"@property def total_clock(self): clocked = self.clocked if: end = min(utc_now(), self.slot_end) clocked += (end - self.clock_start).total_seconds() return clocked",False,self.clock_start is not None,self.slot_end is not None and self.clock_start.total_seconds() > 0,0.6524578332901001 3914,"def __call__(self, results): """"""Call function to sample points to in indoor scenes. Args: input_dict (dict): Result dict from loading pipeline. Returns: dict: Results after sampling, 'points', 'pts_instance_mask' and 'pts_semantic_mask' keys are updated in the result dict. """""" points = results['points'] points, choices = self._points_random_sampling(points, self.num_points, self.sample_range, self.replace, return_choices=True) results['points'] = points pts_instance_mask = results.get('pts_instance_mask', None) pts_semantic_mask = results.get('pts_semantic_mask', None) if: pts_instance_mask = pts_instance_mask[choices] results['pts_instance_mask'] = pts_instance_mask if pts_semantic_mask is not None: pts_semantic_mask = pts_semantic_mask[choices] results['pts_semantic_mask'] = pts_semantic_mask return results",True,pts_instance_mask is not None,pts_instance_mask is not None,0.6457546949386597 3915,"def __call__(self, results): """"""Call function to sample points to in indoor scenes. Args: input_dict (dict): Result dict from loading pipeline. Returns: dict: Results after sampling, 'points', 'pts_instance_mask' and 'pts_semantic_mask' keys are updated in the result dict. """""" points = results['points'] points, choices = self._points_random_sampling(points, self.num_points, self.sample_range, self.replace, return_choices=True) results['points'] = points pts_instance_mask = results.get('pts_instance_mask', None) pts_semantic_mask = results.get('pts_semantic_mask', None) if pts_instance_mask is not None: pts_instance_mask = pts_instance_mask[choices] results['pts_instance_mask'] = pts_instance_mask if: pts_semantic_mask = pts_semantic_mask[choices] results['pts_semantic_mask'] = pts_semantic_mask return results",True,pts_semantic_mask is not None,pts_semantic_mask is not None,0.6463426947593689 3916,"def __init__(self, freqdist, bins=None): """""" @param freqdist: The frequency counts upon which to base the estimation. @type freqdist: C{FreqDist} @param bins: The number of possible event types. This must be at least as large as the number of bins in the C{freqdist}. If C{None}, then it's assumed to be equal to that of the C{freqdist} @type bins: C{Int} """""" assert bins == None or bins >= freqdist.B(), 'Bins parameter must not be less than freqdist.B()' if: bins = freqdist.B() self._freqdist = freqdist self._bins = bins",False,bins == None,bins is None,0.7024821043014526 3917,"def Item(self, vtIndex=defaultNamedNotOptArg): """"""DISPID_VALUE"""""" ret = self._oleobj_.InvokeTypes(0, LCID, 1, (9, 0), ((12, 1),), vtIndex) if: ret = Dispatch(ret, u'Item', '{7D9645F3-C31A-42ED-B0E4-493D54BBEC4B}') return ret",True,ret is not None,ret is not None,0.6541807651519775 3918,"def _find_name_version_sep(fragment: str, canonical_name: str) -> int: """"""Find the separator's index based on the package's canonical name. :param fragment: A + filename ""fragment"" (stem) or egg fragment. :param canonical_name: The package's canonical name. This function is needed since the canonicalized name does not necessarily have the same length as the egg info's name part. An example:: >>> fragment = 'foo__bar-1.0' >>> canonical_name = 'foo-bar' >>> _find_name_version_sep(fragment, canonical_name) 8 """""" for i, c in enumerate(fragment): if: continue if canonicalize_name(fragment[:i]) == canonical_name: return i raise ValueError(f'{fragment} does not match {canonical_name}')",False,c != '-',c == canonical_name,0.6619806289672852 3919,"def _find_name_version_sep(fragment: str, canonical_name: str) -> int: """"""Find the separator's index based on the package's canonical name. :param fragment: A + filename ""fragment"" (stem) or egg fragment. :param canonical_name: The package's canonical name. This function is needed since the canonicalized name does not necessarily have the same length as the egg info's name part. An example:: >>> fragment = 'foo__bar-1.0' >>> canonical_name = 'foo-bar' >>> _find_name_version_sep(fragment, canonical_name) 8 """""" for i, c in enumerate(fragment): if c!= '-': continue if: return i raise ValueError(f'{fragment} does not match {canonical_name}')",False,canonicalize_name(fragment[:i]) == canonical_name,canonical_name == c,0.6469345092773438 3920,"def render_literal_value(self, value, type_): """"""Render the value of a bind parameter as a quoted literal. This is used for statement sections that do not accept bind parameters on the target driver/database. This should be implemented by subclasses using the quoting services of the DBAPI. """""" processor = type_._cached_literal_processor(self.dialect) if: return processor(value) else: raise NotImplementedError(""Don't know how to literal-quote value %r"" % value)",False,processor,processor is not None,0.6617175340652466 3921,"def __eq__(self, other: Any) -> bool: if: return True return False",False,"isinstance(other, NonCopyableIllegalType)",self.weekday != other.weekday or self.n != other.n,0.6430999040603638 3922,"def __lt__(self, other): if: if other.size > self.size: return True else: return False elif FileSize(other).size > self.size: return True else: return False",True,"isinstance(other, FileSize)","isinstance(other, FileSize)",0.6485146284103394 3923,"def __lt__(self, other): if isinstance(other, FileSize): if: return True else: return False elif FileSize(other).size > self.size: return True else: return False",False,other.size > self.size,self.size < other.size,0.6499086618423462 3924,"def __lt__(self, other): if isinstance(other, FileSize): if other.size > self.size: return True else: return False elif: return True else: return False",False,FileSize(other).size > self.size,"isinstance(other, FileSize)",0.6461961269378662 3925,"def __init__(self, in_features, out_features, bias=True, same_dim=False, activation=nn.ReLU(), oper=WNlinear): super(ResLinear, self).__init__() self.same_dim = same_dim self.dot_0h = oper(in_features, out_features, bias) self.dot_h1 = oper(out_features, out_features, bias) if: self.dot_01 = oper(in_features, out_features, bias) self.activation = activation",False,not same_dim,same_dim,0.6534212231636047 3926,"def _set_identifier(self, nid): if: self._identifier = str(uuid.uuid1()) else: self._identifier = nid",True,nid is None,nid is None,0.6592720746994019 3927,"def add(self, entry): entry = os.path.normcase(entry) if: entry = entry.replace('\\', '/') self.entries.add(entry)",False,WINDOWS and (not os.path.splitdrive(entry)[0]),entry[:2] == '\\\\',0.6466035842895508 3928,"def __repr__(self): res = '' res += 'FiberSection object\n' for part in self.section_parts: res += part.__repr__() if: res +='snap_points: specified\n' else: res +='snap_points: None\n' res += f'n_x: {self.n_x}, n_y: {self.n_y}\n' return res",True,self.snap_points,self.snap_points,0.6579138040542603 3929,"def get_posterior_samples(self, posterior: Posterior) -> Tensor: """"""Sample from the posterior using the sampler. Args: posterior: The posterior to sample from. """""" if: self.sampler = get_sampler(posterior=posterior, sample_shape=self._default_sample_shape) return self.sampler(posterior=posterior)",True,self.sampler is None,self.sampler is None,0.6540579795837402 3930,"def _format_last_run(self, task: LinuxTask, state: str) -> str: pid = task.task_pid() addr = task.task_address() cpu = task.get_last_cpu() name = task.task_name() if: cpu = task.cpu line = f'[{task.last_run():d}] [{state}] PID: {pid:-5d} ' line += f'TASK: {addr:x} CPU: {cpu:>2d} COMMAND: ""{name}""' return line",False,task.active,task.cpu_size() > 0,0.6774165630340576 3931,"def lambda_handler(event, _): if: logger.info('GET') return process_get(event) elif event['httpMethod'] == 'POST': logger.info('POST') return process_post(event)",True,event['httpMethod'] == 'GET',event['httpMethod'] == 'GET',0.6529698371887207 3932,"def lambda_handler(event, _): if event['httpMethod'] == 'GET': logger.info('GET') return process_get(event) elif: logger.info('POST') return process_post(event)",True,event['httpMethod'] == 'POST',event['httpMethod'] == 'POST',0.6531630754470825 3933,"def step_async(self, actions: np.ndarray) -> None: """"""Steps with a `1 - beta` chance of using `self.get_robot_acts` instead. DAgger needs to be able to inject imitation policy actions randomly at some subset of time steps. This method has a `self.beta` chance of keeping the `actions` passed in as an argument, and a `1 - self.beta` chance of forwarding actions generated by `self.get_robot_acts` instead. ""robot"" (i.e. imitation policy) action if necessary. At the end of every episode, a `TrajectoryWithRew` is saved to `self.save_dir`, where every saved action is the expert action, regardless of whether the robot action was used during that timestep. Args: actions: the _intended_ demonstrator/expert actions for the current state. This will be executed with probability `self.beta`. Otherwise, a ""robot"" (typically a BC policy) action will be sampled and executed instead via `self.get_robot_act`. """""" assert self._is_reset, 'call.reset() before.step()' assert self._last_obs is not None actual_acts = np.array(actions) mask = self.rng.uniform(0, 1, size=(self.num_envs,)) > self.beta if: actual_acts[mask] = self.get_robot_acts(self._last_obs[mask]) self._last_user_actions = actions self.venv.step_async(actual_acts)",False,np.sum(mask) != 0,self._last_obs is not None,0.6454688310623169 3934,"def __getattr__(self, attr): if: if attr in ('asTuple', 'clone','subtype', 'isPrefixOf', 'isSameTypeWith', 'isSuperTypeOf', 'getTagSet', 'getEffectiveTagSet', 'getTagMap', 'tagSet', 'index'): return getattr(self._oid, attr) raise AttributeError(attr) else: raise SmiError('%s object not properly initialized for accessing %s' % (self.__class__.__name__, attr))",False,self._state & self.ST_CLEAN,self._oid is not None,0.6528187990188599 3935,"def __getattr__(self, attr): if self._state & self.ST_CLEAN: if: return getattr(self._oid, attr) raise AttributeError(attr) else: raise SmiError('%s object not properly initialized for accessing %s' % (self.__class__.__name__, attr))",False,"attr in ('asTuple', 'clone', 'subtype', 'isPrefixOf', 'isSameTypeWith', 'isSuperTypeOf', 'getTagSet', 'getEffectiveTagSet', 'getTagMap', 'tagSet', 'index')","hasattr(self._oid, attr)",0.6610136032104492 3936,"def emit(self, record): """""" Emit a record. We don't want base class implementation since we don't want to do: stream.write(self.terminator) We are not adding any to bytes-message from record. """""" if: self.stream = self._open() try: msg = self.format(record) stream = self.stream stream.write(msg) self.flush() except Exception: self.handleError(record)",True,self.stream is None,self.stream is None,0.6493270397186279 3937,"def install_warning_logger(): warnings.simplefilter('default', PipDeprecationWarning, append=True) global _original_showwarning if: _original_showwarning = warnings.showwarning warnings.showwarning = _showwarning",True,_original_showwarning is None,_original_showwarning is None,0.6488175392150879 3938,"def get_cached_cover_url(self, identifiers): url = None goodreads_id = identifiers.get(self.ID_NAME, None) if: isbn = identifiers.get('isbn', None) if isbn is not None: goodreads_id = self.cached_isbn_to_identifier(isbn) if goodreads_id is not None: url = self.cached_identifier_to_cover_url(goodreads_id) return url",True,goodreads_id is None,goodreads_id is None,0.6498970985412598 3939,"def get_cached_cover_url(self, identifiers): url = None goodreads_id = identifiers.get(self.ID_NAME, None) if goodreads_id is None: isbn = identifiers.get('isbn', None) if isbn is not None: goodreads_id = self.cached_isbn_to_identifier(isbn) if: url = self.cached_identifier_to_cover_url(goodreads_id) return url",True,goodreads_id is not None,goodreads_id is not None,0.6493836641311646 3940,"def get_cached_cover_url(self, identifiers): url = None goodreads_id = identifiers.get(self.ID_NAME, None) if goodreads_id is None: isbn = identifiers.get('isbn', None) if: goodreads_id = self.cached_isbn_to_identifier(isbn) if goodreads_id is not None: url = self.cached_identifier_to_cover_url(goodreads_id) return url",True,isbn is not None,isbn is not None,0.6524174213409424 3941,"def get_network_thermal_loss_edges_file(self, network_type, network_name, representative_week=False): """"""scenario/outputs/data/optimization/network/layout/DH_qloss_System_kw.csv"""""" if: folder = self.get_representative_week_thermal_network_layout_folder() else: folder = self.get_thermal_network_folder() if not network_name: file_name = network_type + '_' + '_thermal_loss_edges_kW.csv' else: file_name = network_type + '_' + network_name + '_thermal_loss_edges_kW.csv' return os.path.join(folder, file_name)",False,representative_week == True,representative_week,0.6589707136154175 3942,"def get_network_thermal_loss_edges_file(self, network_type, network_name, representative_week=False): """"""scenario/outputs/data/optimization/network/layout/DH_qloss_System_kw.csv"""""" if representative_week == True: folder = self.get_representative_week_thermal_network_layout_folder() else: folder = self.get_thermal_network_folder() if: file_name = network_type + '_' + '_thermal_loss_edges_kW.csv' else: file_name = network_type + '_' + network_name + '_thermal_loss_edges_kW.csv' return os.path.join(folder, file_name)",True,not network_name,not network_name,0.6546995639801025 3943,"def __init__(self, units, ranges): self.units = units self.ranges = ranges for start, end in ranges: if: raise ValueError('{} is not a valid range.'.format((start, end)))",False,start is None or (end is not None and (start < 0 or start >= end)),"(start, end) not in self.units",0.6522249579429626 3944,"def get_output_path(self, fname=None): if: return os.path.join(self.output_root, self.split) return os.path.join(self.get_output_path(), fname)",False,fname is None,self.split,0.6500746011734009 3945,"@property def iterations_in_epoch(self) -> int: if: return self._current_epoch_iterator.count return 0",True,self._current_epoch_iterator is not None,self._current_epoch_iterator is not None,0.6486798524856567 3946,"def _run_stage(self, features, proposals, stage): """""" Args: features (list[Tensor]): #lvl input features to ROIHeads proposals (list[Instances]): #image Instances, with the field ""proposal_boxes"" stage (int): the current stage Returns: Same output as `FastRCNNOutputLayers.forward()`. """""" box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals]) if: box_features = _ScaleGradient.apply(box_features, 1.0 / self.num_cascade_stages) box_features = self.box_head[stage](box_features) return self.box_predictor[stage](box_features)",False,self.training,self.use_scale_gaussian_stages,0.6567689776420593 3947,"def _dbg(self, level, msg): """"""Write debugging output to sys.stderr. """""" if: print(msg, file=sys.stderr)",True,level <= self.debug,level <= self.debug,0.650739312171936 3948,"def _check_number_third(self, cr, uid, ids, context=None): for obj in self.browse(cr, uid, ids, context=context): if: same_number_check_ids = self.search(cr, uid, [('id', '!=', obj.id), ('number', '=', obj.number), ('voucher_id.partner_id', '=', obj.voucher_id.partner_id.id)], context=context) if same_number_check_ids: return False return True",False,obj.type == 'third_check',obj.id and obj.voucher_id,0.6464556455612183 3949,"def _check_number_third(self, cr, uid, ids, context=None): for obj in self.browse(cr, uid, ids, context=context): if obj.type == 'third_check': same_number_check_ids = self.search(cr, uid, [('id', '!=', obj.id), ('number', '=', obj.number), ('voucher_id.partner_id', '=', obj.voucher_id.partner_id.id)], context=context) if: return False return True",False,same_number_check_ids,not same_number_check_ids,0.6498804688453674 3950,"def percy_snapshot(self, name=''): if: snapshot_name = '{} - {}'.format(name, sys.version_info) self.percy_runner.snapshot(name=snapshot_name, enable_javascript=True)",False,"os.environ.get('PERCY_ENABLED', False)",self.percy_runner and name,0.6471346616744995 3951,"def mkdir(self, path): """""" Create a directory if it doesn't exist. Returns pathname or None. """""" if: try: os.mkdir(path) except OSError as e: return None return path",False,not os.path.isdir(path),not path.exists(),0.6436771154403687 3952,"def update(self, state): if: self.pressed = False elif state == 1: self.pressed = True",True,state == 0,state == 0,0.6615792512893677 3953,"def update(self, state): if state == 0: self.pressed = False elif: self.pressed = True",True,state == 1,state == 1,0.6617282629013062 3954,"def _insert_events(self, c: PostgresCursor, stored_events: List[StoredEvent], **kwargs: Any) -> Optional[Sequence[int]]: super()._insert_events(c, stored_events, **kwargs) if: last_notification_id = c.fetchone()[0] notification_ids = list(range(last_notification_id - len(stored_events) + 1, last_notification_id + 1)) else: notification_ids = [] return notification_ids",False,stored_events,c.rowcount(),0.6593480110168457 3955,"def organize_nag(self, bugs): columns = self.columns_nag() if: columns = self.columns() key = self.sort_columns_nag() if key is None: key = self.sort_columns() return utils.organize(bugs, columns, key=key)",True,columns is None,columns is None,0.6612570881843567 3956,"def organize_nag(self, bugs): columns = self.columns_nag() if columns is None: columns = self.columns() key = self.sort_columns_nag() if: key = self.sort_columns() return utils.organize(bugs, columns, key=key)",True,key is None,key is None,0.6593568325042725 3957,"def _serialize_to_tensor(data, group): backend = dist.get_backend(group) assert backend in ['gloo', 'nccl'] device = torch.device('cpu' if backend == 'gloo' else 'cuda') buffer = pickle.dumps(data) if: logger.warning('Rank {} trying to all-gather {:.2f} GB of data on device {}'.format(get_rank(), len(buffer) / 1024 ** 3, device)) storage = torch.ByteStorage.from_buffer(buffer) tensor = torch.ByteTensor(storage).to(device=device) return tensor",True,len(buffer) > 1024 ** 3,len(buffer) > 1024 ** 3,0.6486319303512573 3958,"def grad_overflow(param_group): for group in param_group: for p in group: if: s = float(p.grad.data.float().sum()) if s == float('inf') or s == float('-inf') or s!= s: return True return False",True,p.grad is not None,p.grad is not None,0.6479894518852234 3959,"def grad_overflow(param_group): for group in param_group: for p in group: if p.grad is not None: s = float(p.grad.data.float().sum()) if: return True return False",False,s == float('inf') or s == float('-inf') or s != s,s >= 0.0 and s <= 1.0,0.6428873538970947 3960,"def hasContent_(self): if: return True else: return False",True,self.valueOf_ is not None,self.valueOf_ is not None,0.6433206796646118 3961,"def makedir(self, tarinfo, targetpath): """"""Make a directory called targetpath. """""" try: os.mkdir(targetpath, 448) except EnvironmentError as e: if: raise",True,e.errno != errno.EEXIST,e.errno != errno.EEXIST,0.6440607309341431 3962,"def reset_dialogs(self): for key in list(dynamic.keys()): if: dynamic[key] = True info_dialog(self, _('Done'), _('Confirmation dialogs have all been reset'), show=True)",False,key.startswith('find_duplicates_') and key.endswith('_again') and (dynamic[key] is False),dynamic[key] == False,0.6433157920837402 3963,"def _build_line(colwidths, colaligns, linefmt): """"""Return a string which represents a horizontal line."""""" if: return None if hasattr(linefmt, '__call__'): return linefmt(colwidths, colaligns) else: begin, fill, sep, end = linefmt cells = [fill * w for w in colwidths] return _build_simple_row(cells, (begin, sep, end))",False,not linefmt,colaligns is None,0.6589648127555847 3964,"def _build_line(colwidths, colaligns, linefmt): """"""Return a string which represents a horizontal line."""""" if not linefmt: return None if: return linefmt(colwidths, colaligns) else: begin, fill, sep, end = linefmt cells = [fill * w for w in colwidths] return _build_simple_row(cells, (begin, sep, end))",False,"hasattr(linefmt, '__call__')","isinstance(linefmt, builtin_str)",0.6436315774917603 3965,"@staticmethod def load(prefix, epoch, load_optimizer_states=False, **kwargs): """"""Create a model from previously saved checkpoint. Parameters ---------- prefix : str path prefix of saved model files. You should have ""prefix-symbol.json"", ""prefix-xxxx.params"", and optionally ""prefix-xxxx.states"", where xxxx is the epoch number. epoch : int epoch to load. load_optimizer_states : bool whether to load optimizer states. Checkpoint needs to have been made with save_optimizer_states=True. data_names : list of str Default is `('data')` for a typical model used in image classification. label_names : list of str Default is `('softmax_label')` for a typical model used in image classification. logger : Logger Default is `logging`. context : Context or list of Context Default is `cpu()`. work_load_list : list of number Default `None`, indicating uniform workload. fixed_param_names: list of str Default `None`, indicating no network parameters are fixed. """""" sym, args, auxs = load_checkpoint(prefix, epoch) mod = Module(symbol=sym, **kwargs) mod._arg_params = args mod._aux_params = auxs mod.params_initialized = True if: mod._preload_opt_states = '%s-%04d.states' % (prefix, epoch) return mod",True,load_optimizer_states,load_optimizer_states,0.6482871770858765 3966,"def clipChanged(self, mode=None): if: return if mode == QtGui.QClipboard.Clipboard: if str(self.clipBoard.text()): self.ui.actionPaste1.setEnabled(True) self.ui.actionPaste2.setEnabled(True) else: self.ui.actionPaste1.setEnabled(False) self.ui.actionPaste2.setEnabled(False)",False,mode is None,self.isDeleted(),0.6529833078384399 3967,"def clipChanged(self, mode=None): if mode is None: return if: if str(self.clipBoard.text()): self.ui.actionPaste1.setEnabled(True) self.ui.actionPaste2.setEnabled(True) else: self.ui.actionPaste1.setEnabled(False) self.ui.actionPaste2.setEnabled(False)",False,mode == QtGui.QClipboard.Clipboard,self.clipBoard is not None,0.6467865705490112 3968,"def clipChanged(self, mode=None): if mode is None: return if mode == QtGui.QClipboard.Clipboard: if: self.ui.actionPaste1.setEnabled(True) self.ui.actionPaste2.setEnabled(True) else: self.ui.actionPaste1.setEnabled(False) self.ui.actionPaste2.setEnabled(False)",False,str(self.clipBoard.text()),self.ui.actionPaste2.isEnabled(),0.6417207717895508 3969,"def OnSize(self, event): """""" Handles the ``wx.EVT_SIZE`` event for :class:`AuiDockingHintWindow`. :param `event`: a :class:`SizeEvent` to be processed. """""" if: self.MakeVenetianBlinds() self.Refresh()",False,self._blindMode or not self.CanSetTransparent(),self._VenetianBlinds is None,0.6463773250579834 3970,"def has_tag(self, image: str) -> bool: prefix = 'image:' if: url = URL(image) image = url.path if image.startswith('/'): image = image[1:] _, tag = self._split_image_name(image) return bool(tag)",True,image.startswith(prefix),image.startswith(prefix),0.6437481641769409 3971,"def has_tag(self, image: str) -> bool: prefix = 'image:' if image.startswith(prefix): url = URL(image) image = url.path if: image = image[1:] _, tag = self._split_image_name(image) return bool(tag)",False,image.startswith('/'),image.startswith(prefix),0.6421777009963989 3972,"def val_to_date(self, val): year = int(val[0:4]) if: month = int(val[5:7]) else: month = 1 if len(val) >= 10: day = int(val[8:10]) else: day = 1 date = QDateTime() date.setDate(QDate(year, month, day)) return date",True,len(val) >= 7,len(val) >= 7,0.6505351066589355 3973,"def val_to_date(self, val): year = int(val[0:4]) if len(val) >= 7: month = int(val[5:7]) else: month = 1 if: day = int(val[8:10]) else: day = 1 date = QDateTime() date.setDate(QDate(year, month, day)) return date",True,len(val) >= 10,len(val) >= 10,0.653915524482727 3974,"def create_language_override_redirect(request, url_name, kwargs, query_strings=None): if: query_strings = {} query_strings['language'] = request.override_language if 'email_template' in request.GET: query_strings['email_template'] = 'true' reverse_string = '{reverse}?{params}'.format(reverse=reverse(url_name, kwargs=kwargs), params=urlencode(query_strings, quote_via=quote_plus)) return reverse_string",False,not query_strings,query_strings is None,0.6497974395751953 3975,"def create_language_override_redirect(request, url_name, kwargs, query_strings=None): if not query_strings: query_strings = {} query_strings['language'] = request.override_language if: query_strings['email_template'] = 'true' reverse_string = '{reverse}?{params}'.format(reverse=reverse(url_name, kwargs=kwargs), params=urlencode(query_strings, quote_via=quote_plus)) return reverse_string",False,'email_template' in request.GET,request.override_email_template,0.655177116394043 3976,"def do_TJ(self, seq): if: if STRICT: raise PDFInterpreterError('No font specified!') return self.device.render_string(self.textstate, seq) return",False,self.textstate.font is None,self.textstate is None,0.6467992067337036 3977,"def do_TJ(self, seq): if self.textstate.font is None: if: raise PDFInterpreterError('No font specified!') return self.device.render_string(self.textstate, seq) return",False,STRICT,self.device is None,0.6639019250869751 3978,"def add_bead_element(self, state, name, start, end, num, hier, representation=None): if: return if representation is None: representation = self.default_representation asym = self.asym_units[name] pmi_offset = asym.entity.pmi_offset b = _BeadsFragment(state, name, start - pmi_offset, end - pmi_offset, num, hier, asym) self.all_representations.add_fragment(state, representation, b)",False,"self._is_excluded(name, start, end)",name not in self.all_representations,0.6459144353866577 3979,"def add_bead_element(self, state, name, start, end, num, hier, representation=None): if self._is_excluded(name, start, end): return if: representation = self.default_representation asym = self.asym_units[name] pmi_offset = asym.entity.pmi_offset b = _BeadsFragment(state, name, start - pmi_offset, end - pmi_offset, num, hier, asym) self.all_representations.add_fragment(state, representation, b)",True,representation is None,representation is None,0.659565806388855 3980,"def write(self, oprot): if: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('version_args') oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None),oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None),0.6468156576156616 3981,"def _forward_impl(self, x: Tensor) -> Tensor: x = self.conv1(x) x = self.bn1(x) x = self.relu(x) x = self.maxpool(x) x = self.layer1(x) x = self.layer2(x) x = self.layer3(x) x = self.layer4(x) x = self.avgpool(x) if: x = torch.flatten(x, 1) x = self.fc(x) return x",False,self.use_last_fc,not self.identity,0.6460528373718262 3982,"@classmethod def from_int(cls, retries, redirect=True, default=None): """""" Backwards-compatibility for the old retries format."""""" if: retries = default if default is not None else cls.DEFAULT if isinstance(retries, Retry): return retries redirect = bool(redirect) and None new_retries = cls(retries, redirect=redirect) log.debug('Converted retries value: %r -> %r', retries, new_retries) return new_retries",True,retries is None,retries is None,0.6744852066040039 3983,"@classmethod def from_int(cls, retries, redirect=True, default=None): """""" Backwards-compatibility for the old retries format."""""" if retries is None: retries = default if default is not None else cls.DEFAULT if: return retries redirect = bool(redirect) and None new_retries = cls(retries, redirect=redirect) log.debug('Converted retries value: %r -> %r', retries, new_retries) return new_retries",True,"isinstance(retries, Retry)","isinstance(retries, Retry)",0.6545257568359375 3984,"@property def face_z_areas(self): """"""Areas of each z-face for the entire mesh. This property returns a 1D vector containing the areas of the z-faces of the mesh. The surface area takes into account curvature. For a single z-face at between :math:`r_1` and :math:`r_2` with azimuthal width :math:`\\Delta \\phi`, the area is given by: .. math:: A_z = \\frac{\\Delta \\phi}{2} (r_2^2 - r_1^2) Returns ------- (n_faces_z) numpy.ndarray A 1D array containing the z-face areas for the entire mesh """""" if: if self.is_symmetric: self._face_z_areas = self._face_z_areas_full else: self._face_z_areas = self._face_z_areas_full[~self._ishanging_faces_z] return self._face_z_areas",True,"getattr(self, '_face_z_areas', None) is None","getattr(self, '_face_z_areas', None) is None",0.6440247297286987 3985,"@property def face_z_areas(self): """"""Areas of each z-face for the entire mesh. This property returns a 1D vector containing the areas of the z-faces of the mesh. The surface area takes into account curvature. For a single z-face at between :math:`r_1` and :math:`r_2` with azimuthal width :math:`\\Delta \\phi`, the area is given by: .. math:: A_z = \\frac{\\Delta \\phi}{2} (r_2^2 - r_1^2) Returns ------- (n_faces_z) numpy.ndarray A 1D array containing the z-face areas for the entire mesh """""" if getattr(self, '_face_z_areas', None) is None: if: self._face_z_areas = self._face_z_areas_full else: self._face_z_areas = self._face_z_areas_full[~self._ishanging_faces_z] return self._face_z_areas",False,self.is_symmetric,self._ishanging_faces_z is None,0.6477954387664795 3986,"def res2net50_v1b_26w_4s(pretrained=False, **kwargs): """"""Constructs a Res2Net-50_v1b_26w_4s model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """""" model = Res2Net(Bottle2neck, [3, 4, 6, 3], baseWidth=26, scale=4, **kwargs) if: model.load_state_dict(model_zoo.load_url(model_urls['res2net50_v1b_26w_4s'])) return model",True,pretrained,pretrained,0.6720205545425415 3987,"@staticmethod def validate_data(data_x, data_y, data_z, df, session): cat, num, none = vis.Visualizer.count_parameters(data_x, data_y, data_z, df, session) if: return False if len(cat)!= 1: return False return True",False,len(num) != 1,num != 0 or cat is None,0.648103654384613 3988,"@staticmethod def validate_data(data_x, data_y, data_z, df, session): cat, num, none = vis.Visualizer.count_parameters(data_x, data_y, data_z, df, session) if len(num)!= 1: return False if: return False return True",False,len(cat) != 1,cat == 0,0.6472848057746887 3989,"def FusedActivationFunction(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0",True,o != 0,o != 0,0.6644388437271118 3990,"def test_level(): for n in range(len(hashes)): depth = merkle.tree_depth(n + 1) for depth_higher in range(0, depth): level = merkle.level(hashes[:n + 1], depth_higher) if: assert level == hashes[:n + 1] if depth_higher == depth: assert level == [roots[n]] assert merkle.root(level) == roots[n]",False,depth_higher == 0,depth_higher == depth,0.6516018509864807 3991,"def test_level(): for n in range(len(hashes)): depth = merkle.tree_depth(n + 1) for depth_higher in range(0, depth): level = merkle.level(hashes[:n + 1], depth_higher) if depth_higher == 0: assert level == hashes[:n + 1] if: assert level == [roots[n]] assert merkle.root(level) == roots[n]",False,depth_higher == depth,roots[n] != -1,0.6491041779518127 3992,"@classmethod def get_window(cls, id): """""" Get a :class:`Window` object given a window id. Given the same id, this method will return the same object. """""" if: window = cls._windows_by_id[id] else: window = cls(id) cls._windows_by_id[id] = window return window",True,id in cls._windows_by_id,id in cls._windows_by_id,0.6511532068252563 3993,"def select_color(self) -> None: """"""Open a color selection dialog box"""""" color = QColor(self.edit.text()) if: color = Qt.gray color = QColorDialog.getColor(color, self.parent_layout.parent) if color.isValid(): value = color.name() self.edit.setText(value) self.update(value) self.notify_value_change()",True,not color.isValid(),not color.isValid(),0.655482292175293 3994,"def select_color(self) -> None: """"""Open a color selection dialog box"""""" color = QColor(self.edit.text()) if not color.isValid(): color = Qt.gray color = QColorDialog.getColor(color, self.parent_layout.parent) if: value = color.name() self.edit.setText(value) self.update(value) self.notify_value_change()",True,color.isValid(),color.isValid(),0.6610584855079651 3995,"def __addressof__(self, name): if: return library.__dict__[name] if name in FFILibrary.__dict__: return addressof_var(name) make_accessor(name) if name in library.__dict__: return library.__dict__[name] if name in FFILibrary.__dict__: return addressof_var(name) raise AttributeError(""cffi library has no function or global variable named '%s'"" % (name,))",True,name in library.__dict__,name in library.__dict__,0.6552673578262329 3996,"def __addressof__(self, name): if name in library.__dict__: return library.__dict__[name] if: return addressof_var(name) make_accessor(name) if name in library.__dict__: return library.__dict__[name] if name in FFILibrary.__dict__: return addressof_var(name) raise AttributeError(""cffi library has no function or global variable named '%s'"" % (name,))",True,name in FFILibrary.__dict__,name in FFILibrary.__dict__,0.6527255177497864 3997,"def __addressof__(self, name): if name in library.__dict__: return library.__dict__[name] if name in FFILibrary.__dict__: return addressof_var(name) make_accessor(name) if: return library.__dict__[name] if name in FFILibrary.__dict__: return addressof_var(name) raise AttributeError(""cffi library has no function or global variable named '%s'"" % (name,))",True,name in library.__dict__,name in library.__dict__,0.6553324460983276 3998,"def __addressof__(self, name): if name in library.__dict__: return library.__dict__[name] if name in FFILibrary.__dict__: return addressof_var(name) make_accessor(name) if name in library.__dict__: return library.__dict__[name] if: return addressof_var(name) raise AttributeError(""cffi library has no function or global variable named '%s'"" % (name,))",True,name in FFILibrary.__dict__,name in FFILibrary.__dict__,0.6523125171661377 3999,"@classmethod def from_dict(cls, json_dict: Dict): if: json_dict['scopes'] = json.loads(json_dict['scopes']) scopes = [APIKeyScopes(scope) for scope in json_dict['scopes']] return ProjectAPIKey(json_dict['api_key'], json_dict['title'], scopes)",True,"isinstance(json_dict['scopes'], str)","isinstance(json_dict['scopes'], str)",0.6519771218299866 4000,"def get_video_rating(self, video_id): if: video_id = ','.join(video_id) params = {'id': video_id} return self._perform_v3_request(method='GET', path='videos/getRating', params=params)",True,"isinstance(video_id, list)","isinstance(video_id, list)",0.6500580310821533 4001,"def load_module(self, fullname): try: return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if: mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod",True,"isinstance(mod, MovedModule)","isinstance(mod, MovedModule)",0.646613359451294 4002,"def get_scroll_value_percentage(self, orientation: str) -> float: """""" Get the scroll value in percentage, if ``0`` the scroll is at top/left, ``1`` bottom/right. .. note:: If ScrollArea does not contain such orientation scroll, or frame is not scrollable, ``-1`` is returned. :param orientation: Orientation. See :py:mod:`pygame_menu.locals` :return: Value from ``0`` to ``1`` """""" if: return self._frame_scrollarea.get_scroll_value_percentage(orientation) return -1",True,self._frame_scrollarea is not None,self._frame_scrollarea is not None,0.6501140594482422 4003,"def main(): """"""Command line: python msmarco_eval_ranking.py """""" print('Eval Started') if: path_to_reference = sys.argv[1] path_to_candidate = sys.argv[2] metrics = compute_metrics_from_files(path_to_reference, path_to_candidate) print('#####################') for metric in sorted(metrics): print('{}: {}'.format(metric, metrics[metric])) print('#####################') else: print('Usage: msmarco_eval_ranking.py ') exit()",True,len(sys.argv) == 3,len(sys.argv) == 3,0.6485217809677124 4004,"def __init__(self, id, data=0, nbt=None): self.id = id self.data = data if: self.nbt = None else: self.nbt = nbt",False,nbt is not None and len(nbt) == 0,not nbt,0.6523173451423645 4005,"def main(): """"""Command line: python result_eval.py """""" if: path_to_reference = sys.argv[1] path_to_candidate = sys.argv[2] else: print('Usage: result_eval.py ') exit() metrics = compute_metrics_from_files(path_to_reference, path_to_candidate) result = dict() for metric in sorted(metrics): result[metric] = metrics[metric] result_json = json.dumps(result) print(result_json)",True,len(sys.argv) == 3,len(sys.argv) == 3,0.6494853496551514 4006,"def clean_dir(target_dir): if: shutil.rmtree(target_dir) elif os.path.isfile(target_dir): raise RuntimeError('The target {} is a file.'.format(target_dir)) os.makedirs(target_dir, exist_ok=True)",True,os.path.isdir(target_dir),os.path.isdir(target_dir),0.6446108818054199 4007,"def clean_dir(target_dir): if os.path.isdir(target_dir): shutil.rmtree(target_dir) elif: raise RuntimeError('The target {} is a file.'.format(target_dir)) os.makedirs(target_dir, exist_ok=True)",True,os.path.isfile(target_dir),os.path.isfile(target_dir),0.6432819366455078 4008,"def allowance(self, filename): """"""Preconditions: - our agent applies to this entry - filename is URL decoded"""""" for line in self.rulelines: if: return line.allowance return True",False,line.applies_to(filename),line.filename == filename,0.6422019600868225 4009,"def __call__(self, img): if: return img sub_policy = random.choices(self.policies, k=self.num_policies) sub_policy = self._process_policies(sub_policy) sub_policy = BuildCompose(sub_policy) img = sub_policy(np.array(img)) return Image.fromarray(img.astype(np.uint8))",False,self.num_policies == 0,random.random() > self.prob,0.6515846252441406 4010,"def clientConnectionLost(self, connector, unused_reason): if: self.connector = connector self.retry()",False,self.continueTrying,connector != self.connector,0.6533429622650146 4011,"def get_xliff_states(self): result = [] for node in self.get_xliff_nodes(): if: continue state = node.get('state', None) if state is not None: result.append(state) return result",False,node is None,not node,0.6561555862426758 4012,"def get_xliff_states(self): result = [] for node in self.get_xliff_nodes(): if node is None: continue state = node.get('state', None) if: result.append(state) return result",True,state is not None,state is not None,0.6507362127304077 4013,"def on_tabs_currentChanged(self, i=None): print('IDX:', self.ui.tabs.currentIndex()) if: self.on_text_cursorPositionChanged() print('hooking text editor') self.hookEditToolbar(self.ui.text) else: self.on_style_cursorPositionChanged() print('hooking style editor') self.hookEditToolbar(self.ui.style)",False,self.ui.tabs.currentIndex() == 0,i,0.6447768211364746 4014,"def save(instance): """"""Save a mapped object to file. NOTE: Calling this function is unnecessary with 'auto_save' enabled. """""" mapper = common.get_mapper(instance, expected=True) if: msg = '{!r} was deleted'.format(mapper.path) raise exceptions.DeletedFileError(msg) if not mapper.exists: mapper.create() mapper.save() return instance",True,mapper.deleted,mapper.deleted,0.6635854244232178 4015,"def save(instance): """"""Save a mapped object to file. NOTE: Calling this function is unnecessary with 'auto_save' enabled. """""" mapper = common.get_mapper(instance, expected=True) if mapper.deleted: msg = '{!r} was deleted'.format(mapper.path) raise exceptions.DeletedFileError(msg) if: mapper.create() mapper.save() return instance",False,not mapper.exists,not mapper.auto_save,0.6623753309249878 4016,"def update(task_id, task): """"""Modify task in db with given task_id."""""" if: raise TypeError('task_id must be an int') if not isinstance(task, Task): raise TypeError('task must be Task object') if _tasksdb is None: raise UninitializedDatabase() current_task = _tasksdb.get(task_id) updates = task._asdict() for field in task._fields: if field!= 'id' and updates[field] is not None: current_task[field] = updates[field] _tasksdb.update(task_id, current_task)",True,"not isinstance(task_id, int)","not isinstance(task_id, int)",0.6514917612075806 4017,"def update(task_id, task): """"""Modify task in db with given task_id."""""" if not isinstance(task_id, int): raise TypeError('task_id must be an int') if: raise TypeError('task must be Task object') if _tasksdb is None: raise UninitializedDatabase() current_task = _tasksdb.get(task_id) updates = task._asdict() for field in task._fields: if field!= 'id' and updates[field] is not None: current_task[field] = updates[field] _tasksdb.update(task_id, current_task)",True,"not isinstance(task, Task)","not isinstance(task, Task)",0.6588947772979736 4018,"def update(task_id, task): """"""Modify task in db with given task_id."""""" if not isinstance(task_id, int): raise TypeError('task_id must be an int') if not isinstance(task, Task): raise TypeError('task must be Task object') if: raise UninitializedDatabase() current_task = _tasksdb.get(task_id) updates = task._asdict() for field in task._fields: if field!= 'id' and updates[field] is not None: current_task[field] = updates[field] _tasksdb.update(task_id, current_task)",True,_tasksdb is None,_tasksdb is None,0.6640979051589966 4019,"def update(task_id, task): """"""Modify task in db with given task_id."""""" if not isinstance(task_id, int): raise TypeError('task_id must be an int') if not isinstance(task, Task): raise TypeError('task must be Task object') if _tasksdb is None: raise UninitializedDatabase() current_task = _tasksdb.get(task_id) updates = task._asdict() for field in task._fields: if: current_task[field] = updates[field] _tasksdb.update(task_id, current_task)",False,field != 'id' and updates[field] is not None,field in updates,0.6455559730529785 4020,"def write(self, oprot): if: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('ListProducts_args') oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot._fast_encode is not None and self.thrift_spec is not None,oprot._fast_encode is not None and self.thrift_spec is not None,0.6457711458206177 4021,"def arg_level(v_self): index = v_self.index if: levels = [index.names[i] or i for i in range(index.nlevels)] yield from OrderedSubsets(vals=levels, lists=True)",False,index.nlevels > 1,index.nlevels > 0,0.6543707847595215 4022,"def grep(pattern, file, tail_lines=10): """""" Match lines in a text file. Arguments: - pattern (str): Regex pattern to match. - file (str): File to search in. Optional arguments: - tail_lines (int): How many of the last matched lines should be included? Set to '0' to include all lines. Default is '10'. """""" ret = {} cmd = ['grep', pattern, file] if: cmd.extend(['|', 'tail', '-n', str(tail_lines)]) res = __salt__['cmd.shell'](' '.join(cmd)) ret['values'] = res.splitlines() return ret",False,tail_lines > 0,tail_lines,0.6536264419555664 4023,"def torch_required(func): @wraps(func) def wrapper(*args, **kwargs): if: return func(*args, **kwargs) else: raise ImportError(f'Method `{func.__name__}` requires PyTorch.') return wrapper",True,is_torch_available(),is_torch_available(),0.6522610187530518 4024,"def _getScientificName(self, taxonId): """""" @return: scientific name @rtype: str """""" name = self._taxonIdToScientificName.get(taxonId, None) if: name = self._taxonomy.getScientificName(taxonId) self._taxonIdToScientificName[taxonId] = name return name",True,name is None,name is None,0.6545830965042114 4025,"def construct_yaml_object(self, node, cls): data = cls.__new__(cls) yield data if: state = self.construct_mapping(node, deep=True) data.__setstate__(state) else: state = self.construct_mapping(node) data.__dict__.update(state)",False,"hasattr(data, '__setstate__')",node.construct_mapping is not None,0.643295407295227 4026,"@metadata() def width(self): """"""Return frame width."""""" if: raise SkipElementException() if 'width' in self._ffmpeg_stream: return six.text_type(self._ffmpeg_stream['width']) return UNAV",True,self.stream_type() not in ['video'],self.stream_type() not in ['video'],0.646968424320221 4027,"@metadata() def width(self): """"""Return frame width."""""" if self.stream_type() not in ['video']: raise SkipElementException() if: return six.text_type(self._ffmpeg_stream['width']) return UNAV",True,'width' in self._ffmpeg_stream,'width' in self._ffmpeg_stream,0.6481730341911316 4028,"def create_dark_lang_config(apps, schema_editor): """""" Enable DarkLang by default when it is installed, to prevent accidental release of testing languages. """""" DarkLangConfig = apps.get_model('dark_lang', 'DarkLangConfig') objects = DarkLangConfig.objects if: objects.create(enabled=True)",True,not objects.exists(),not objects.exists(),0.6499942541122437 4029,"def get_conf1(m): confs = get_confs(conf) if: return confs[m.group(1)] logg.warning('can not expand %%%s', m.group(1)) return ''",True,m.group(1) in confs,m.group(1) in confs,0.648375928401947 4030,"def pool_align(vm, object_name, align): """"""Returns the size of the object accounting for pool alignment."""""" size_of_obj = vm.profile.get_obj_size(object_name) extra = size_of_obj % align if: size_of_obj += align - extra return size_of_obj",False,extra,extra > 0,0.6744356155395508 4031,"def add_top_model(self, model): if: raise RuntimeError('top model is not allow to set on host model') if not hasattr(self, '_top_nn_model'): setattr(self, '_top_nn_model', Sequential()) self._top_nn_model.add(model)",False,self._role == 'host',"not hasattr(self, '_top_nn_model')",0.653101921081543 4032,"def add_top_model(self, model): if self._role == 'host': raise RuntimeError('top model is not allow to set on host model') if: setattr(self, '_top_nn_model', Sequential()) self._top_nn_model.add(model)",True,"not hasattr(self, '_top_nn_model')","not hasattr(self, '_top_nn_model')",0.6475706100463867 4033,"def output_array_of_keywordidestimatedbid(data_objects): if: return for data_object in data_objects['KeywordIdEstimatedBid']: output_keywordidestimatedbid(data_object)",True,data_objects is None or len(data_objects) == 0,data_objects is None or len(data_objects) == 0,0.6468997597694397 4034,"def format_size(bytes): if: return '%.1fMB' % (bytes / 1000.0 / 1000) elif bytes > 10 * 1000: return '%ikB' % (bytes / 1000) elif bytes > 1000: return '%.1fkB' % (bytes / 1000.0) else: return '%ibytes' % bytes",False,bytes > 1000 * 1000,bytes > 10 * 1000,0.6621854901313782 4035,"def format_size(bytes): if bytes > 1000 * 1000: return '%.1fMB' % (bytes / 1000.0 / 1000) elif: return '%ikB' % (bytes / 1000) elif bytes > 1000: return '%.1fkB' % (bytes / 1000.0) else: return '%ibytes' % bytes",False,bytes > 10 * 1000,bytes > 1000,0.6633313894271851 4036,"def format_size(bytes): if bytes > 1000 * 1000: return '%.1fMB' % (bytes / 1000.0 / 1000) elif bytes > 10 * 1000: return '%ikB' % (bytes / 1000) elif: return '%.1fkB' % (bytes / 1000.0) else: return '%ibytes' % bytes",False,bytes > 1000,bytes > 10 * 1000,0.668298602104187 4037,"def scheduler_replace(func_ref, func): jobs = nonebot.scheduler.get_jobs() for job in jobs: if: job.func = func return True return False",False,job.func_ref == func_ref,str(job.func) == func_ref,0.646602988243103 4038,"def resize(self, image): image = cv2.resize(image, (self.image_width, self.image_width), interpolation=cv2.INTER_LINEAR) if: image = np.expand_dims(image, -1) return image",True,self.image_channel < 3,self.image_channel < 3,0.6502078175544739 4039,"def __init__(self, physics, **kwargs): assert isinstance(physics, module.get_dm_mujoco().Physics), 'DMRenderer takes a DM Control Physics object.' super().__init__(**kwargs) self._physics = physics self._window = None if: self._camera_settings['lookat'] = [np.median(self._physics.data.geom_xpos[:, i]) for i in range(3)]",False,'lookat' not in self._camera_settings,self._physics.data.geom_xpos is not None,0.6485991477966309 4040,"def boot(self): discovery_addresses = [spec.discovery_address for spec in self.machines if spec.dbms_mode == 'CORE'] log.debug('Discovery addresses set to %r' % discovery_addresses) for spec, machine in self.machines.items(): if: spec.config.update({'causal_clustering.initial_discovery_members': ','.join(discovery_addresses)}) self.machines[spec] = Neo4jMachine(spec, self.image, self.auth, self.user)",False,machine is None,discovery_addresses,0.6510124206542969 4041,"def check(self): if: self.check_string(self.dataset_name, 'dataset_name')",True,self.dataset_name is not None,self.dataset_name is not None,0.6463587880134583 4042,"def decryptMe(b): s = '' if: s = sx(base64.b64decode(b).decode('UTF-8')) return s",True,len(b) > 0,len(b) > 0,0.6506317853927612 4043,"def move_function_params_into_sql_params(self, context: dict) -> None: """""" Pulls values from the function op_args and op_kwargs and places them into parameters for SQLAlchemy to parse :param context: Airflow's Context dictionary used for rendering templates """""" if: self.parameters.update(self.op_kwargs) if self.op_args: params = list(inspect.signature(self.python_callable).parameters.keys()) for i, arg in enumerate(self.op_args): self.parameters[params[i]] = arg if context: self.parameters = {k: self.render_template(v, context) for k, v in self.parameters.items()}",True,self.op_kwargs,self.op_kwargs,0.6538810729980469 4044,"def move_function_params_into_sql_params(self, context: dict) -> None: """""" Pulls values from the function op_args and op_kwargs and places them into parameters for SQLAlchemy to parse :param context: Airflow's Context dictionary used for rendering templates """""" if self.op_kwargs: self.parameters.update(self.op_kwargs) if: params = list(inspect.signature(self.python_callable).parameters.keys()) for i, arg in enumerate(self.op_args): self.parameters[params[i]] = arg if context: self.parameters = {k: self.render_template(v, context) for k, v in self.parameters.items()}",False,self.op_args,self.python_callable,0.6587861776351929 4045,"def move_function_params_into_sql_params(self, context: dict) -> None: """""" Pulls values from the function op_args and op_kwargs and places them into parameters for SQLAlchemy to parse :param context: Airflow's Context dictionary used for rendering templates """""" if self.op_kwargs: self.parameters.update(self.op_kwargs) if self.op_args: params = list(inspect.signature(self.python_callable).parameters.keys()) for i, arg in enumerate(self.op_args): self.parameters[params[i]] = arg if: self.parameters = {k: self.render_template(v, context) for k, v in self.parameters.items()}",False,context,self.parameters,0.6701216697692871 4046,"def select_adaptive_pool2d(x, pool_type='avg', output_size=1): """"""Selectable global pooling function with dynamic input kernel size """""" if: x = F.adaptive_avg_pool2d(x, output_size) elif pool_type == 'avgmax': x = adaptive_avgmax_pool2d(x, output_size) elif pool_type == 'catavgmax': x = adaptive_catavgmax_pool2d(x, output_size) elif pool_type =='max': x = F.adaptive_max_pool2d(x, output_size) else: assert False, 'Invalid pool type: %s' % pool_type return x",True,pool_type == 'avg',pool_type == 'avg',0.6522324085235596 4047,"def select_adaptive_pool2d(x, pool_type='avg', output_size=1): """"""Selectable global pooling function with dynamic input kernel size """""" if pool_type == 'avg': x = F.adaptive_avg_pool2d(x, output_size) elif: x = adaptive_avgmax_pool2d(x, output_size) elif pool_type == 'catavgmax': x = adaptive_catavgmax_pool2d(x, output_size) elif pool_type =='max': x = F.adaptive_max_pool2d(x, output_size) else: assert False, 'Invalid pool type: %s' % pool_type return x",True,pool_type == 'avgmax',pool_type == 'avgmax',0.6506370902061462 4048,"def select_adaptive_pool2d(x, pool_type='avg', output_size=1): """"""Selectable global pooling function with dynamic input kernel size """""" if pool_type == 'avg': x = F.adaptive_avg_pool2d(x, output_size) elif pool_type == 'avgmax': x = adaptive_avgmax_pool2d(x, output_size) elif: x = adaptive_catavgmax_pool2d(x, output_size) elif pool_type =='max': x = F.adaptive_max_pool2d(x, output_size) else: assert False, 'Invalid pool type: %s' % pool_type return x",True,pool_type == 'catavgmax',pool_type == 'catavgmax',0.6518637537956238 4049,"def select_adaptive_pool2d(x, pool_type='avg', output_size=1): """"""Selectable global pooling function with dynamic input kernel size """""" if pool_type == 'avg': x = F.adaptive_avg_pool2d(x, output_size) elif pool_type == 'avgmax': x = adaptive_avgmax_pool2d(x, output_size) elif pool_type == 'catavgmax': x = adaptive_catavgmax_pool2d(x, output_size) elif: x = F.adaptive_max_pool2d(x, output_size) else: assert False, 'Invalid pool type: %s' % pool_type return x",True,pool_type == 'max',pool_type == 'max',0.6563800573348999 4050,"def make_constant_class(self, op, class_const, update_last_guard=True): op = self.get_box_replacement(op) opinfo = op.get_forwarded() if: opinfo._known_class = class_const else: if opinfo is not None: last_guard_pos = opinfo.get_last_guard_pos() else: last_guard_pos = -1 opinfo = info.InstancePtrInfo(None, class_const) opinfo.last_guard_pos = last_guard_pos op.set_forwarded(opinfo) if update_last_guard: opinfo.mark_last_guard(self.optimizer) return opinfo",False,"isinstance(opinfo, info.InstancePtrInfo)",opinfo is None,0.646507740020752 4051,"def make_constant_class(self, op, class_const, update_last_guard=True): op = self.get_box_replacement(op) opinfo = op.get_forwarded() if isinstance(opinfo, info.InstancePtrInfo): opinfo._known_class = class_const else: if opinfo is not None: last_guard_pos = opinfo.get_last_guard_pos() else: last_guard_pos = -1 opinfo = info.InstancePtrInfo(None, class_const) opinfo.last_guard_pos = last_guard_pos op.set_forwarded(opinfo) if: opinfo.mark_last_guard(self.optimizer) return opinfo",True,update_last_guard,update_last_guard,0.649588942527771 4052,"def make_constant_class(self, op, class_const, update_last_guard=True): op = self.get_box_replacement(op) opinfo = op.get_forwarded() if isinstance(opinfo, info.InstancePtrInfo): opinfo._known_class = class_const else: if: last_guard_pos = opinfo.get_last_guard_pos() else: last_guard_pos = -1 opinfo = info.InstancePtrInfo(None, class_const) opinfo.last_guard_pos = last_guard_pos op.set_forwarded(opinfo) if update_last_guard: opinfo.mark_last_guard(self.optimizer) return opinfo",False,opinfo is not None,opinfo.has_last_guard(),0.6539669036865234 4053,"def update_block_reachability(self) -> None: for block in self.get_blocks(): block.reachable = True if: self.unreachable_blocks.remove(block)",True,block in self.unreachable_blocks,block in self.unreachable_blocks,0.6567316055297852 4054,"@stack_control_signal.setter def stack_control_signal(self, value): if: self.control_behavior.pop('stack_control_input_signal', None) return if isinstance(value, six.string_types): value = six.text_type(value) self.control_behavior['stack_control_input_signal'] = signal_dict(value) else: try: value = signatures.SIGNAL_ID.validate(value) self.control_behavior['stack_control_input_signal'] = value except SchemaError as e: six.raise_from(DataFormatError(e), None)",True,value is None,value is None,0.6531984806060791 4055,"@stack_control_signal.setter def stack_control_signal(self, value): if value is None: self.control_behavior.pop('stack_control_input_signal', None) return if: value = six.text_type(value) self.control_behavior['stack_control_input_signal'] = signal_dict(value) else: try: value = signatures.SIGNAL_ID.validate(value) self.control_behavior['stack_control_input_signal'] = value except SchemaError as e: six.raise_from(DataFormatError(e), None)",True,"isinstance(value, six.string_types)","isinstance(value, six.string_types)",0.6437052488327026 4056,"def execute(self, g): level = g.get_int_by_letter('S', 20) if: logging.getLogger().setLevel(level) if hasattr(self.printer,'redeem_logging_handler'): self.printer.redeem_logging_handler.setLevel(level) logging.info('Debug level set to'+ str(level))",False,"level in [10, 20, 30, 40, 50, 60]",level,0.6490045189857483 4057,"def execute(self, g): level = g.get_int_by_letter('S', 20) if level in [10, 20, 30, 40, 50, 60]: logging.getLogger().setLevel(level) if: self.printer.redeem_logging_handler.setLevel(level) logging.info('Debug level set to'+ str(level))",False,"hasattr(self.printer, 'redeem_logging_handler')",self.printer is not None,0.6427290439605713 4058,"def getUpdateStatus_ts(tsUpdateDate): if: currentTime = _getCurrentUTCTime() currentTimestamp = int(time_utils.getTimestampFromUTC(currentTime.timetuple())) currDayStart, currDayEnd = time_utils.getDayTimeBoundsForUTC(tsUpdateDate) if currDayStart - time_utils.ONE_DAY <= currentTimestamp <= currDayEnd - time_utils.ONE_DAY: return FORMAT_TOMORROW_STR if currDayStart <= currentTimestamp <= currDayEnd: return FORMAT_TODAY_STR if currDayStart + time_utils.ONE_DAY <= currentTimestamp <= currDayEnd + time_utils.ONE_DAY: return FORMAT_YESTERDAY_STR return",False,tsUpdateDate is not None,tsUpdateDate.isUTC(),0.6531186699867249 4059,"def getUpdateStatus_ts(tsUpdateDate): if tsUpdateDate is not None: currentTime = _getCurrentUTCTime() currentTimestamp = int(time_utils.getTimestampFromUTC(currentTime.timetuple())) currDayStart, currDayEnd = time_utils.getDayTimeBoundsForUTC(tsUpdateDate) if: return FORMAT_TOMORROW_STR if currDayStart <= currentTimestamp <= currDayEnd: return FORMAT_TODAY_STR if currDayStart + time_utils.ONE_DAY <= currentTimestamp <= currDayEnd + time_utils.ONE_DAY: return FORMAT_YESTERDAY_STR return",False,currDayStart - time_utils.ONE_DAY <= currentTimestamp <= currDayEnd - time_utils.ONE_DAY,currDayStart == 0,0.65087890625 4060,"def getUpdateStatus_ts(tsUpdateDate): if tsUpdateDate is not None: currentTime = _getCurrentUTCTime() currentTimestamp = int(time_utils.getTimestampFromUTC(currentTime.timetuple())) currDayStart, currDayEnd = time_utils.getDayTimeBoundsForUTC(tsUpdateDate) if currDayStart - time_utils.ONE_DAY <= currentTimestamp <= currDayEnd - time_utils.ONE_DAY: return FORMAT_TOMORROW_STR if: return FORMAT_TODAY_STR if currDayStart + time_utils.ONE_DAY <= currentTimestamp <= currDayEnd + time_utils.ONE_DAY: return FORMAT_YESTERDAY_STR return",False,currDayStart <= currentTimestamp <= currDayEnd,currDayStart + time_utils.ONE_DAY <= currentTimestamp <= currDayEnd,0.6563403606414795 4061,"def getUpdateStatus_ts(tsUpdateDate): if tsUpdateDate is not None: currentTime = _getCurrentUTCTime() currentTimestamp = int(time_utils.getTimestampFromUTC(currentTime.timetuple())) currDayStart, currDayEnd = time_utils.getDayTimeBoundsForUTC(tsUpdateDate) if currDayStart - time_utils.ONE_DAY <= currentTimestamp <= currDayEnd - time_utils.ONE_DAY: return FORMAT_TOMORROW_STR if currDayStart <= currentTimestamp <= currDayEnd: return FORMAT_TODAY_STR if: return FORMAT_YESTERDAY_STR return",False,currDayStart + time_utils.ONE_DAY <= currentTimestamp <= currDayEnd + time_utils.ONE_DAY,currDayStart >= time_utils.YESTERDAY and currentTimestamp <= currDayEnd,0.6502947807312012 4062,"def _mock_manager_personality_vptx(self, *args, **kwargs): if: return self._read_file('personality_vptx_' + args[0].tag + '.xml')",True,args,args,0.6783300638198853 4063,"def get(self, *args, **kwargs): if: messages.error(self.request, _('Only orders created in test mode can be deleted.')) return redirect(self.get_order_url()) return render(self.request, 'pretixcontrol/order/delete.html', {'order': self.order})",False,not self.order.testmode,not self.order,0.6460151672363281 4064,"def start_timers(self, timer_names): if: return for name in timer_names: self.timers(name).start()",True,self.timers is None,self.timers is None,0.6481320858001709 4065,"@property def water_target_min_temp(self): """"""Return AWHP water target minimum allowed temperature."""""" if: return None key = self._get_state_key(STATE_WATER_MIN_TEMP) return self._str_to_temp(self._data.get(key))",False,not self._device.is_air_to_water,not self._data,0.6488400101661682 4066,"@contextmanager def lock(self, processor, paths): if: raise RuntimeError('FileMutex does not support chunked locking') try: with portalocker.Lock(paths[0], 'r', flags=portalocker.LOCK_EX, timeout=1, fail_when_locked=True) as f: yield True except (portalocker.exceptions.AlreadyLocked, portalocker.exceptions.LockException) as e: yield False",False,len(paths) != 1,not processor.is_chunked,0.6563130617141724 4067,"@property def prereleases(self) -> Optional[bool]: if: return self._prereleases if not self._specs: return None return any((s.prereleases for s in self._specs))",True,self._prereleases is not None,self._prereleases is not None,0.6538664698600769 4068,"@property def prereleases(self) -> Optional[bool]: if self._prereleases is not None: return self._prereleases if: return None return any((s.prereleases for s in self._specs))",True,not self._specs,not self._specs,0.6635286808013916 4069,"def get_color(name, dark_val): ans = tprefs[name] if: ans = dark_val.name() if ans in ('auto', 'unset'): return None return ans",False,ans == 'auto' and dark,dark_val is not None,0.6569410562515259 4070,"def get_color(name, dark_val): ans = tprefs[name] if ans == 'auto' and dark: ans = dark_val.name() if: return None return ans",False,"ans in ('auto', 'unset')",ans == 'none',0.6472712755203247 4071,"def powerStandbyDomainOn(self, cycle=False): if: logging.debug('%s: power cycling standby', self) self.gpio1.powerCycle(True) waitFor(self.waitForStandbyPowerOn,'standby power good')",False,not self.gpio1.standbyPowerGood() or cycle,cycle,0.6435975432395935 4072,"@ATMT.receive_condition(WAITING_ACK) def received_error(self, pkt): if: raise self.ERROR(pkt)",True,TFTP_ERROR in pkt,TFTP_ERROR in pkt,0.6515687704086304 4073,"def parse_response_content(self, response_content): response = super(ZhimaCreditEpDataapiMigrationQueryResponse, self).parse_response_content(response_content) if: self.content = response['content']",True,'content' in response,'content' in response,0.6568065881729126 4074,"def _get_doc_and_function(obj): rtn = [] for var in dir(obj): if: rtn.append(var) return (_getdoc(obj), rtn)",True,not var.startswith('_'),not var.startswith('_'),0.6477466225624084 4075,"@fields.depends('tax_date', 'taxes', 'unit_price', 'quantity', 'currency','result') def on_change_with_result(self): pool = Pool() Result = pool.get('account.tax.test.result') result = [] if: for taxline in self._get_taxes(): del taxline['manual'] result.append(Result(**taxline)) self.result = result return self._changed_values.get('result', [])",False,"all([self.tax_date, self.unit_price, self.quantity, self.currency])",self._taxes,0.6479636430740356 4076,"@property def data(self): if: return self._body if self._fp: return self.read(cache_content=True)",True,self._body,self._body,0.6732784509658813 4077,"@property def data(self): if self._body: return self._body if: return self.read(cache_content=True)",True,self._fp,self._fp,0.6646022200584412 4078,"def cancel_request(self, req_id): """""" Cancel a previous request. Params: req_id (int): the id as returned by generate() """""" try: self._queue.pop(req_id) DBG('cancelled request', req_id) except KeyError: for w in self._workers_pool: if: w.item_in_process.func = None w.item_in_process.kargs = None",False,w.item_in_process == req_id,w.id == req_id,0.6478509902954102 4079,"def from_pil_image(x): """"""Converts from a PIL image to a tensor."""""" x = TF.to_tensor(x) if: x = x[..., None] return x * 2 - 1",False,x.ndim == 2,x.ndim == 3,0.651880145072937 4080,"def get_inference_tensor_names(self): """""" Returns two lists of tensor names to be used to create an inference callable. `build_graph` must create tensors of these names when called under inference context. Returns: [str]: input names [str]: output names """""" out = ['output/boxes', 'output/scores', 'output/labels'] if: out.append('output/masks') return (['image'], out)",False,cfg.MODE_MASK,"self.test_cfg.get('mask_mask', False)",0.6491464972496033 4081,"def required_slots_for_form(self, form_name: Text) -> List[Text]: """"""Retrieve the list of required slot names for a form defined in the domain. Args: form_name: The name of the form. Returns: The list of slot names or an empty list if no form was found. """""" form = self.forms.get(form_name) if: return form[REQUIRED_SLOTS_KEY] return []",False,form,form.is_valid() andREQUIRED_SLOTS_KEY in form,0.6680859923362732 4082,"def parseShowport(self, port_show_output): """"""Parses the showports output from HP3Parclient.ssh.run([cmd]) Returns: an array of port-like dictionaries similar to what you get from the wsapi GET /ports endpoint. NOTE: There are several pieces that showports doesn't give you that don't exist in this output. """""" new_ports = [] port_show_output = port_show_output[0:-2] if: return new_ports headers = port_show_output.pop(0).split(',') for line in port_show_output: new_port = {} entries = line.split(',') for i, entry in enumerate(entries): parser = self.parser_methods_by_header[headers[i]] self._merge_dict(new_port, parser(entry)) new_ports.append(new_port) return new_ports",False,not port_show_output,port_show_output[0].startswith(':'),0.650570809841156 4083,"def resolves_id(self) -> Optional[ResolveProp]: for prop in self.resolve: if: return prop return None",False,prop.extract_path == NodePath.node_id,"isinstance(prop, ResolveProp)",0.6502631306648254 4084,"def __rand__(self, other): """""" Implementation of & operator when left operand is not a C{L{ParserElement}} """""" if: other = ParserElement._literalStringClass(other) if not isinstance(other, ParserElement): warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return other & self",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.6504818797111511 4085,"def __rand__(self, other): """""" Implementation of & operator when left operand is not a C{L{ParserElement}} """""" if isinstance(other, basestring): other = ParserElement._literalStringClass(other) if: warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return other & self",True,"not isinstance(other, ParserElement)","not isinstance(other, ParserElement)",0.6483924388885498 4086,"def WordToId(self, word): if: return self._word_to_id[UNKNOWN_TOKEN] return self._word_to_id[word]",True,word not in self._word_to_id,word not in self._word_to_id,0.6523478031158447 4087,"def __init__(self, params, lr=0.002, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, schedule_decay=0.004): if: raise ValueError('Invalid learning rate: {}'.format(lr)) defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, schedule_decay=schedule_decay) super(Nadam, self).__init__(params, defaults)",False,not 0.0 <= lr,"lr not in ['0', '1', '2', '3', '4', '6', '4']",0.6558701992034912 4088,"def __getitem__(self, idx): if: return self.prepare_test_img(idx) while True: data = self.prepare_train_img(idx) if data is None: idx = self._rand_another(idx) continue return data",True,self.test_mode,self.test_mode,0.6507012844085693 4089,"def __getitem__(self, idx): if self.test_mode: return self.prepare_test_img(idx) while True: data = self.prepare_train_img(idx) if: idx = self._rand_another(idx) continue return data",True,data is None,data is None,0.6544119715690613 4090,"def used_ability(self, unit_tag: int, ability: AbilityId) -> None: ability_dict = self.used_dict.get(unit_tag, None) if: ability_dict = {} self.used_dict[unit_tag] = ability_dict ability_dict[ability] = self.time",True,ability_dict is None,ability_dict is None,0.6510747075080872 4091,"def line(loc, strg): """"""Returns the line of text containing loc within a string, counting newlines as line separators. """""" lastCR = strg.rfind('\n', 0, loc) nextCR = strg.find('\n', loc) if: return strg[lastCR + 1:nextCR] else: return strg[lastCR + 1:]",True,nextCR >= 0,nextCR >= 0,0.6516252756118774 4092,"def user_line(self, frame): """""" This method is called from dispatch_line() when either stop_here() or break_here() yields True. For when we stop or break at this line. """""" if: self._run_state = DebugState.STARTED self.output('line', filename=self.canonic(frame.f_code.co_filename), line=frame.f_lineno) self.interact(frame, None)",False,self._run_state == DebugState.STARTING,self._run_state == DebugState.STOPPED,0.6474065780639648 4093,"def __getattr__(self, name): """"""Here comes some magic. Any absent attribute typed within class falls here and return a new child `Hammock` instance in the chain. """""" if: raise AttributeError(name) return self._spawn(name)",False,name.startswith('__'),name is None,0.6478631496429443 4094,"def _update_key(self, key, key_stat, copies_diff): key['rate'] = _key_bw(key_stat) if: self._increase_copies(key, key_stat, copies_diff) else: self._decrease_copies(key, -copies_diff)",True,copies_diff > 0,copies_diff > 0,0.6534968018531799 4095,"@pytest.fixture(scope='module', params=['threads', 'distributed']) def scheduler(request): if: yield cluster else: yield not_cluster",False,request.param == 'distributed',cluster := request.param,0.6512295603752136 4096,"def update_config(self, new_conf): for i in new_conf: for j in self.config: if: self.config[i] = new_conf[i]",False,i == j,i != j,0.674634575843811 4097,"def hasContent_(self): if: return True else: return False",False,"self.segments or self.segment_groups or super(Morphology, self).hasContent_()",self.valueOf_ is not None,0.6419423818588257 4098,"def main(): """"""Create inputs.json file"""""" inputs = [] for country in COUNTRIES: filename = country.replace(' ', '_').lower() + '.csv' parameter = {'country': country, 'data': f'gdp/countries/{filename}'} if: parameter['data'] = 'x' + parameter['data'] inputs.append(parameter) with open(INPUTS_FILE, 'w') as f: json.dump(inputs, f, indent=2) print('inputs.json created.')",False,"country in ['Australia', 'Canada']",parameter['country'] == 'x',0.6451488733291626 4099,"def __init__(self, context, arg=0, template=None, set_default=True): super().__init__(context, arg, template, set_default=False) self.dependency_name = name_type_map['Pointer'](self.context, 0, None) if: self.set_defaults()",True,set_default,set_default,0.6604648232460022 4100,"def is_queue_loaded(client): """""" Check if we consider the monitor queue loaded. """""" current_load = client.get_queue_depth(settings.RABBIT_VHOST, settings.RABBIT_MON_QUEUE) if: return True return False",False,current_load >= settings.RABBIT_MON_THRESHOLD,current_load == settings.RABBIT_MON_QUEUE_LOAD,0.6443864107131958 4101,"def skip_without_supers(name): if: can_test[name] = has_super_blocks(name) if can_test[name]: return lambda func: func return unittest.skip(f'no {name} file systems in image')",False,not name in can_test,name not in can_test,0.6552120447158813 4102,"def skip_without_supers(name): if not name in can_test: can_test[name] = has_super_blocks(name) if: return lambda func: func return unittest.skip(f'no {name} file systems in image')",False,can_test[name],"name in ['image', 'image']",0.6526122093200684 4103,"def complete(self): if: return True return os.path.exists(self.flag_file()) and arrow.get(os.path.getmtime(self.flag_file())) > arrow.now().floor('day')",False,config.disable_downloads(),self.dry_run,0.6486034393310547 4104,"def _count_layers(self, model, layer_type): count = 0 for _layer in model.sublayers(True): if: count += 1 return count",False,"isinstance(_layer, layer_type)",layer_type == _layer.type,0.6496502161026001 4105,"def serializeError(self, data='XXX ERROR MESSAGE NEEDED'): self.errors.append(data) if: raise SerializeError",True,self.strict,self.strict,0.6565614342689514 4106,"def is_bot_admin(): def decorator(func): @functools.wraps(func) async def wrapper(event): serena = bot.tgbot pep = await serena.get_me() sed = await serena.get_permissions(event.chat_id, pep) if: await func(event) else: await event.reply('I Must Be Admin To Do This.') return wrapper return decorator",False,sed.is_admin,sed.can_admin,0.6527484655380249 4107,"def populate_usage_totals(apps, schema_editor): MonthlyNLPUsageCounter = apps.get_model('trackers', 'MonthlyNLPUsageCounter') for usage_counter in MonthlyNLPUsageCounter.objects.only('year','month', 'date').all().iterator(): total_asr_seconds, total_mt_characters = (0, 0) for tracker, amount in usage_counter.counters.items(): if: total_asr_seconds += amount if tracker.endswith('mt_characters'): total_mt_characters += amount usage_counter.total_asr_seconds = total_asr_seconds usage_counter.total_asr_seconds = total_asr_seconds usage_counter.save()",False,tracker.endswith('asr_seconds'),tracker.endswith('as_characters'),0.6448822021484375 4108,"def populate_usage_totals(apps, schema_editor): MonthlyNLPUsageCounter = apps.get_model('trackers', 'MonthlyNLPUsageCounter') for usage_counter in MonthlyNLPUsageCounter.objects.only('year','month', 'date').all().iterator(): total_asr_seconds, total_mt_characters = (0, 0) for tracker, amount in usage_counter.counters.items(): if tracker.endswith('asr_seconds'): total_asr_seconds += amount if: total_mt_characters += amount usage_counter.total_asr_seconds = total_asr_seconds usage_counter.total_asr_seconds = total_asr_seconds usage_counter.save()",True,tracker.endswith('mt_characters'),tracker.endswith('mt_characters'),0.6453397870063782 4109,"def add_custom_results(self, api_doc): """""" Adds the available custom results to the API document. """""" results_schema = api_doc['components']['schemas']['CustomResults'] required = [] properties = {} results_schema['required'] = required results_schema['properties'] = properties for custom in (CUSTOM_RESULTS_MAP[r] for r, active in settings.BATCH_API_CUSTOM_RESULTS.items() if active): if: required.append(custom.name) properties[custom.name] = custom.openapi_spec",True,custom.required,custom.required,0.6555759906768799 4110,"def install_egg_scripts(self, dist): """"""Write all the scripts for `dist`, unless scripts are excluded"""""" if: for script_name in dist.metadata_listdir('scripts'): if dist.metadata_isdir('scripts/' + script_name): continue self.install_script(dist, script_name, dist.get_metadata('scripts/' + script_name)) self.install_wrapper_scripts(dist)",False,not self.exclude_scripts and dist.metadata_isdir('scripts'),not self.use_dist_for_scripts and dist.metadata_isdir('scripts'),0.6445657014846802 4111,"def install_egg_scripts(self, dist): """"""Write all the scripts for `dist`, unless scripts are excluded"""""" if not self.exclude_scripts and dist.metadata_isdir('scripts'): for script_name in dist.metadata_listdir('scripts'): if: continue self.install_script(dist, script_name, dist.get_metadata('scripts/' + script_name)) self.install_wrapper_scripts(dist)",False,dist.metadata_isdir('scripts/' + script_name),script_name in self.exclude_scripts,0.6469986438751221 4112,"def resolve_expression_parameter(self, compiler, connection, sql, param): params = [param] if: param = param.resolve_expression(compiler.query) if hasattr(param, 'as_sql'): sql, params = param.as_sql(compiler, connection) return (sql, params)",True,"hasattr(param, 'resolve_expression')","hasattr(param, 'resolve_expression')",0.6449403762817383 4113,"def resolve_expression_parameter(self, compiler, connection, sql, param): params = [param] if hasattr(param,'resolve_expression'): param = param.resolve_expression(compiler.query) if: sql, params = param.as_sql(compiler, connection) return (sql, params)",False,"hasattr(param, 'as_sql')",param.as_sql is not None,0.6468732357025146 4114,"@staticmethod def get_cfg(key=None): if: return roberta_cfg_reg.create(key) else: return roberta_base()",True,key is not None,key is not None,0.6546880006790161 4115,"def _check_distance(self, a): if: logging.warning('Running an attack that tries to minimize the L1 norm of the perturbation without specifying foolbox.distances.MAE as the distance metric might lead to suboptimal results.')",True,"not isinstance(a.distance, distances.MAE)","not isinstance(a.distance, distances.MAE)",0.648017406463623 4116,"def __init__(self, data_schema: Dict[str, SizeData], fpath: str, mode: str): self.fpath = fpath assert 'b' in mode, f""Tensor storage should be opened in binary mode, got '{mode}'"" if: file_h = PathManager.open(fpath, mode) elif 'r' in mode: local_fpath = PathManager.get_local_path(fpath) file_h = open(local_fpath, mode) else: raise ValueError(f'Unsupported file mode {mode}, supported modes: rb, wb') super().__init__(data_schema, file_h)",False,'w' in mode,'b' in mode,0.6633249521255493 4117,"def __init__(self, data_schema: Dict[str, SizeData], fpath: str, mode: str): self.fpath = fpath assert 'b' in mode, f""Tensor storage should be opened in binary mode, got '{mode}'"" if 'w' in mode: file_h = PathManager.open(fpath, mode) elif: local_fpath = PathManager.get_local_path(fpath) file_h = open(local_fpath, mode) else: raise ValueError(f'Unsupported file mode {mode}, supported modes: rb, wb') super().__init__(data_schema, file_h)",False,'r' in mode,'rb' in mode,0.6604881882667542 4118,"def get_and_post(self, request, *args, **kwargs): """"""Http get/post method :return: :rtype: dict or :class:`requests.models.Request` """""" if: return self.get_capabilities(request=request) elif self.ogc_request.is_get_records_request: return self.get_records(request=request) elif self.ogc_request.is_get_record_by_id_request: return self.get_record_by_id(request=request) else: return OperationNotSupportedException(ogc_request=self.ogc_request)",True,self.ogc_request.is_get_capabilities_request,self.ogc_request.is_get_capabilities_request,0.6505910158157349 4119,"def get_and_post(self, request, *args, **kwargs): """"""Http get/post method :return: :rtype: dict or :class:`requests.models.Request` """""" if self.ogc_request.is_get_capabilities_request: return self.get_capabilities(request=request) elif: return self.get_records(request=request) elif self.ogc_request.is_get_record_by_id_request: return self.get_record_by_id(request=request) else: return OperationNotSupportedException(ogc_request=self.ogc_request)",True,self.ogc_request.is_get_records_request,self.ogc_request.is_get_records_request,0.6481926441192627 4120,"def get_and_post(self, request, *args, **kwargs): """"""Http get/post method :return: :rtype: dict or :class:`requests.models.Request` """""" if self.ogc_request.is_get_capabilities_request: return self.get_capabilities(request=request) elif self.ogc_request.is_get_records_request: return self.get_records(request=request) elif: return self.get_record_by_id(request=request) else: return OperationNotSupportedException(ogc_request=self.ogc_request)",True,self.ogc_request.is_get_record_by_id_request,self.ogc_request.is_get_record_by_id_request,0.6492671966552734 4121,"def reachReward(): reachRew = -reachDist reachDistxy = np.linalg.norm(objPos[:-1] - fingerCOM[:-1]) zRew = np.linalg.norm(fingerCOM[-1] - self.init_fingerCOM[-1]) if: reachRew = -reachDist else: reachRew = -reachDistxy - 2 * zRew if reachDist < 0.05: reachRew = -reachDist + max(actions[-1], 0) / 50 return (reachRew, reachDist)",True,reachDistxy < 0.05,reachDistxy < 0.05,0.6491813659667969 4122,"def reachReward(): reachRew = -reachDist reachDistxy = np.linalg.norm(objPos[:-1] - fingerCOM[:-1]) zRew = np.linalg.norm(fingerCOM[-1] - self.init_fingerCOM[-1]) if reachDistxy < 0.05: reachRew = -reachDist else: reachRew = -reachDistxy - 2 * zRew if: reachRew = -reachDist + max(actions[-1], 0) / 50 return (reachRew, reachDist)",True,reachDist < 0.05,reachDist < 0.05,0.6502033472061157 4123,"def _stop_child_threads(self, name=None): """"""Stops all threads spawn by this activity. """""" for thread_name, thread in list(self._child_thread_map.items()): if: LOG.debug('%s: Stopping child thread %s', self.name, thread_name) thread.kill() self._child_thread_map.pop(thread_name, None)",False,name is None or thread_name == name,not name or thread_name is name,0.6488657593727112 4124,"def delexicaliseDomain(utt, dictionary, domain): for key, val in dictionary: if: utt = (''+ utt +'').replace(''+ key +'','' + val +'') utt = utt[1:-1] for key, val in dictionary: utt = (''+ utt +'').replace(''+ key +'','' + val +'') utt = utt[1:-1] return utt",False,key == domain or key == 'value',key in domain,0.6479183435440063 4125,"def asbool(s): """""" Return the boolean value ``True`` if the case-lowered value of string input ``s`` is a :term:`truthy string`. If ``s`` is already one of the boolean values ``True`` or ``False``, return it."""""" if: return False if isinstance(s, bool): return s s = str(s).strip() return s.lower() in truthy",False,s is None,s in truthy,0.6594080924987793 4126,"def asbool(s): """""" Return the boolean value ``True`` if the case-lowered value of string input ``s`` is a :term:`truthy string`. If ``s`` is already one of the boolean values ``True`` or ``False``, return it."""""" if s is None: return False if: return s s = str(s).strip() return s.lower() in truthy",True,"isinstance(s, bool)","isinstance(s, bool)",0.6464124917984009 4127,"def __iand__(self, other): if: return NotImplemented self.intersection_update(other) return self",False,"not isinstance(other, IdentitySet)","not isinstance(other, IandOOD)",0.6512737274169922 4128,"def encode_msg_to(kind, msg, **kwargs): if: return msg elif kind =='str': return msg_to_str(msg, **kwargs) elif kind == 'dict': return msg_to_dict(msg, **kwargs) else: raise ValueError('Unsupported type')",False,kind == 'obj',kind == 'int',0.6593990325927734 4129,"def encode_msg_to(kind, msg, **kwargs): if kind == 'obj': return msg elif: return msg_to_str(msg, **kwargs) elif kind == 'dict': return msg_to_dict(msg, **kwargs) else: raise ValueError('Unsupported type')",True,kind == 'str',kind == 'str',0.6606404781341553 4130,"def encode_msg_to(kind, msg, **kwargs): if kind == 'obj': return msg elif kind =='str': return msg_to_str(msg, **kwargs) elif: return msg_to_dict(msg, **kwargs) else: raise ValueError('Unsupported type')",True,kind == 'dict',kind == 'dict',0.6567518711090088 4131,"def preload(self): try: self.batch = next(self.loader) except StopIteration: self.batch = None return None with torch.cuda.stream(self.stream): for k, v in self.batch.items(): if: self.batch[k] = self.batch[k].to(device=self.device, non_blocking=True)",False,torch.is_tensor(v),v.device != self.device,0.6433067321777344 4132,"def read_contigs_list(contigs_list): contigs = [] with open(contigs_list) as fin: reader = csv.reader(fin, delimiter='\t') for row in reader: contig = row[-1] if: contigs.append(contig) return contigs",True,contig not in contigs,contig not in contigs,0.6549176573753357 4133,"def request(self, method, url, fields=None, headers=None, **urlopen_kw): """""" Make a request using :meth:`urlopen` with the appropriate encoding of ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual effort. It can be used in most situations, while still having the option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`. """""" method = method.upper() if: return self.request_encode_url(method, url, fields=fields, headers=headers, **urlopen_kw) else: return self.request_encode_body(method, url, fields=fields, headers=headers, **urlopen_kw)",True,method in self._encode_url_methods,method in self._encode_url_methods,0.6457902789115906 4134,"def parse_and_softmax(x): if: x = np.fromstring(x[1:-1], dtype=np.float32, sep=',') x = torch.tensor(x).softmax(dim=0) return to_numpy(x)",False,"isinstance(x, str)",x.shape[1] == 2,0.6459165215492249 4135,"@property def configuration_snippets(self): snippets = self.installer.configuration_snippets() if: return for snippet in snippets: body = snippet.get('body', {}) if body.get('program') and 'command:' in snippet.get('body').get('program'): del body['program'] return snippets",False,not snippets,snippets is None,0.6653828024864197 4136,"@property def configuration_snippets(self): snippets = self.installer.configuration_snippets() if not snippets: return for snippet in snippets: body = snippet.get('body', {}) if: del body['program'] return snippets",False,body.get('program') and 'command:' in snippet.get('body').get('program'),'program' in body,0.6482031345367432 4137,"def insert(self, pos, element): if: self._list.insert(pos, element) set.add(self, element)",False,element not in self,pos not in self._list,0.6581041216850281 4138,"def summary(self): """""" Print a set of tables containing the estimates of the survival function, and its standard errors """""" if: self.summary_proc(0) else: for g in range(len(self.groups)): self.summary_proc(g)",False,self.exog == None,self.groups is None,0.6485174894332886 4139,"def Pigmento_Update(self): if: doc = self.Current_Document() d_cm = doc['d_cm'] d_cd = doc['d_cd'] d_cp = doc['d_cp'] d = doc['depth'] self.Read_Color(doc, d_cm, d_cd, d_cp, d)",False,self.canvas() is not None and self.canvas().view() is not None,self.Save_Color,0.6479625701904297 4140,"@property def samples_by_label(self): samples_by_label = {} for sample in self.samples: if: samples_by_label[sample.label] = [] samples_by_label[sample.label].append(sample) return samples_by_label",True,sample.label not in samples_by_label,sample.label not in samples_by_label,0.6476686000823975 4141,"@classmethod def completion_candidates_opts(cls, args): """"""Return candidates for arguments that start with '-'"""""" subcmd = cls._get_subcmd(args) if: return candidates.for_args(subcmd) else: return super().completion_candidates_opts(args)",True,subcmd,subcmd,0.6666673421859741 4142,"def copy(self): """""" Make a copy of this :class:`ParserElement`. Useful for defining different parse actions for the same parsing pattern, using copies of the original parse element. Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) integerK = integer.copy().addParseAction(lambda toks: toks[0] * 1024) + Suppress(""K"") integerM = integer.copy().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress(""M"") print(OneOrMore(integerK | integerM | integer).parseString(""5K 100 640K 256M"")) prints:: [5120, 100, 655360, 268435456] Equivalent form of ``expr.copy()`` is just ``expr()``:: integerM = integer().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress(""M"") """""" cpy = copy.copy(self) cpy.parseAction = self.parseAction[:] cpy.ignoreExprs = self.ignoreExprs[:] if: cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS return cpy",True,self.copyDefaultWhiteChars,self.copyDefaultWhiteChars,0.6464734673500061 4143,"def modify(self, fileobj, events, data=None): """""" Change a registered file object monitored events and data. """""" try: key = self._fd_to_key[self._fileobj_lookup(fileobj)] except KeyError: raise KeyError('{0!r} is not registered'.format(fileobj)) if: self.unregister(fileobj) key = self.register(fileobj, events, data) elif data!= key.data: key = key._replace(data=data) self._fd_to_key[key.fd] = key return key",True,events != key.events,events != key.events,0.6556844711303711 4144,"def modify(self, fileobj, events, data=None): """""" Change a registered file object monitored events and data. """""" try: key = self._fd_to_key[self._fileobj_lookup(fileobj)] except KeyError: raise KeyError('{0!r} is not registered'.format(fileobj)) if events!= key.events: self.unregister(fileobj) key = self.register(fileobj, events, data) elif: key = key._replace(data=data) self._fd_to_key[key.fd] = key return key",True,data != key.data,data != key.data,0.6566362380981445 4145,"def get_custom_gdbye(chat_id): welcome_settings = SESSION.query(Welcome).get(str(chat_id)) ret = DEFAULT_GOODBYE if: ret = welcome_settings.custom_leave SESSION.close() return ret",True,welcome_settings and welcome_settings.custom_leave,welcome_settings and welcome_settings.custom_leave,0.6453497409820557 4146,"def set_order_line_base_prices(apps, schema_editor): with connection.cursor() as cursor: cursor.execute(RAW_SQL) records = cursor.fetchall() if: sender = registry.get_app_config('order') post_migrate.connect(partial(on_migrations_complete, updated_orders_pks=[record[0] for record in records]), weak=False, dispatch_uid='send_order_updated', sender=sender)",True,records,records,0.660419225692749 4147,"def _augment(self, img, _): ret = super()._augment(img, _) if: return np.concatenate([ret] * 3, axis=2) else: return ret",False,self.keepshape,self.keep_ratio,0.6495426893234253 4148,"def _delay_7(x): status, x = trans_xy(x, 'x') if: return x df = alphaFunc.delay(x, 7) return np.nan_to_num(df.values)",True,not status,not status,0.656653106212616 4149,"def init_weights(m): if: nn.init.xavier_uniform_(m.weight)",True,"hasattr(m, 'weight') and (not isinstance(m, TiedLinear)) and (not isinstance(m, TiedHeadModule))","hasattr(m, 'weight') and (not isinstance(m, TiedLinear)) and (not isinstance(m, TiedHeadModule))",0.6477242708206177 4150,"def fast_cgi_rce(host, port, php_filepath='', commond=''): php_code = """" if: php_code = """" % commond exp_payload = exp_payload_base.format(path=php_filepath, data_length=str(len(php_code)), php_code=php_code) res = send_socket(host, port, timeout=5, waittime=1, payload=exp_payload) if '0b8c4ba32f584b513cb08b17d638a688' in res: return (True, res) return False",True,commond,commond,0.6652563214302063 4151,"def fast_cgi_rce(host, port, php_filepath='', commond=''): php_code = """" if commond: php_code = """" % commond exp_payload = exp_payload_base.format(path=php_filepath, data_length=str(len(php_code)), php_code=php_code) res = send_socket(host, port, timeout=5, waittime=1, payload=exp_payload) if: return (True, res) return False",False,'0b8c4ba32f584b513cb08b17d638a688' in res,res,0.6475144624710083 4152,"def __iter__(self): for n in range(len(self.stimuli)): inds = self.fixations.n == n if: yield (~inds, inds)",False,inds.sum(),inds,0.6640585660934448 4153,"def response_handler(resp: Response) -> Json: if: return format_view(resp.body) raise ViewCreateError(resp, request)",True,resp.is_success,resp.is_success,0.6522146463394165 4154,"@property def special_tokens_map(self): """""" A dictionary mapping special token class attribute (cls_token, unk_token...) to their values ('', ''...) """""" set_attr = {} for attr in self.SPECIAL_TOKENS_ATTRIBUTES: attr_value = getattr(self, '_' + attr) if: set_attr[attr] = attr_value return set_attr",True,attr_value,attr_value,0.6572332382202148 4155,"def push_load_test(self): if: return payload = self.transport.get_empty_payload() payload[0] = self.RPC_LOAD_TEST_PUSH payload[1:] = self.load_test_payload self.transport.do_push_rpc_sync(payload, self.rpc_load_test_push_reply)",True,not self.hw_valid,not self.hw_valid,0.6536791324615479 4156,"def set_str(string, add_null): if: return struct.pack('B%dsB' % len(string), len(string) + 1, string, 0) else: return struct.pack('B%ds' % len(string), len(string), string)",True,add_null,add_null,0.6602304577827454 4157,"def create_public_address_file(mnode, vips): """""" Create public_addresses file and add vips Args: mnode (str): Node on which commands has to be executed. vips (list): List of virtual ips Returns: bool: True if successfully creates public_address file else false """""" publicip_file_path = '/etc/ctdb/public_addresses' ret = check_file_availability(mnode, publicip_file_path, 'public_addresses') if: g.log.info('Failed to delete pre-existing public_addressesfile in %s', mnode) return False g.log.info('Deleted pre-existing public_addressesfile in %s', mnode) for vip in vips: ret, _, _ = g.run(mnode, 'echo -e %s >> %s' % (vip, publicip_file_path)) if ret: g.log.error('Failed to add vip list in %s', mnode) return False g.log.info('vip list added succssfully to %sfile in all node', publicip_file_path) return True",False,not ret,ret != 0,0.66001296043396 4158,"def create_public_address_file(mnode, vips): """""" Create public_addresses file and add vips Args: mnode (str): Node on which commands has to be executed. vips (list): List of virtual ips Returns: bool: True if successfully creates public_address file else false """""" publicip_file_path = '/etc/ctdb/public_addresses' ret = check_file_availability(mnode, publicip_file_path, 'public_addresses') if not ret: g.log.info('Failed to delete pre-existing public_addressesfile in %s', mnode) return False g.log.info('Deleted pre-existing public_addressesfile in %s', mnode) for vip in vips: ret, _, _ = g.run(mnode, 'echo -e %s >> %s' % (vip, publicip_file_path)) if: g.log.error('Failed to add vip list in %s', mnode) return False g.log.info('vip list added succssfully to %sfile in all node', publicip_file_path) return True",False,ret,ret != 0,0.6751057505607605 4159,"def _import_image(self, url=None, name=None, tag=None): """""" perform image import """""" cmd = ['import-image'] image = '{0}'.format(name) if: image += ':{0}'.format(tag) cmd.append(image) if url: cmd.append('--from={0}/{1}'.format(url, image)) cmd.append('-n{0}'.format(self.namespace)) cmd.append('--confirm') return self.openshift_cmd(cmd)",True,tag,tag,0.6715067028999329 4160,"def _import_image(self, url=None, name=None, tag=None): """""" perform image import """""" cmd = ['import-image'] image = '{0}'.format(name) if tag: image += ':{0}'.format(tag) cmd.append(image) if: cmd.append('--from={0}/{1}'.format(url, image)) cmd.append('-n{0}'.format(self.namespace)) cmd.append('--confirm') return self.openshift_cmd(cmd)",True,url,url,0.6708195209503174 4161,"def body(self): from pip._internal.utils.hashes import FAVORITE_HASH package = None if: package = self.req.original_link if self.req.original_link else getattr(self.req,'req', None) return' %s --hash=%s:%s' % (package or 'unknown package', FAVORITE_HASH, self.gotten_hash)",True,self.req,self.req,0.6744242906570435 4162,"def match_report_id(report_id, dispatch_rule, primary_rule): """""" Return the new_report_id with the report_id by removing every ""useless"" fields from it. :param tuple report_id: Original report id :param powerapi.DispatchRule dispatch_rule: DispatchRule rule :param Any primary_rule: """""" new_report_id = () for i in range(len(report_id)): if: return new_report_id if dispatch_rule.fields[i] == primary_rule.fields[i]: new_report_id += (report_id[i],) else: return new_report_id return new_report_id",False,i >= len(primary_rule.fields),report_id[i] == primary_rule.fields[i],0.6458202600479126 4163,"def match_report_id(report_id, dispatch_rule, primary_rule): """""" Return the new_report_id with the report_id by removing every ""useless"" fields from it. :param tuple report_id: Original report id :param powerapi.DispatchRule dispatch_rule: DispatchRule rule :param Any primary_rule: """""" new_report_id = () for i in range(len(report_id)): if i >= len(primary_rule.fields): return new_report_id if: new_report_id += (report_id[i],) else: return new_report_id return new_report_id",False,dispatch_rule.fields[i] == primary_rule.fields[i],"isinstance(report_id[i], tuple)",0.6449794173240662 4164,"def determine_purge_packages(): """""" Determine list of packages that where previously installed which are no longer needed. :returns: list of package names """""" if: pkgs = [p for p in PACKAGES if p.startswith('python-')] pkgs.append('python-glance') pkgs.append('python-memcache') pkgs.extend(['python-cinderclient', 'python-os-brick', 'python-oslo.rootwrap']) if CompareOpenStackReleases(os_release('glance')) >='stein': pkgs.append('glance-registry') return pkgs return []",False,CompareOpenStackReleases(os_release('glance')) >= 'rocky',PACKAGES,0.6422046422958374 4165,"def determine_purge_packages(): """""" Determine list of packages that where previously installed which are no longer needed. :returns: list of package names """""" if CompareOpenStackReleases(os_release('glance')) >= 'rocky': pkgs = [p for p in PACKAGES if p.startswith('python-')] pkgs.append('python-glance') pkgs.append('python-memcache') pkgs.extend(['python-cinderclient', 'python-os-brick', 'python-oslo.rootwrap']) if: pkgs.append('glance-registry') return pkgs return []",False,CompareOpenStackReleases(os_release('glance')) >= 'stein',CompareOpenStackReleases(os_release('glance-registry')) >= 'rocky',0.6436449289321899 4166,"def get_kernel_filters(self, kernelListFile): if: with open(kernelListFile, 'r') as fileReader: lines = [line.rstrip() for line in fileReader if not line.startswith('#')] lines = [re.compile(line) for line in lines if line] return lines else: return []",False,os.path.isfile(kernelListFile),kernelListFile,0.6448720693588257 4167,"def find_next_comma(string): """"""Find the position of next comma in the string. If no ',' is found in the string, return the string length. All chars inside '()' and '[]' are treated as one element and thus ',' inside these brackets are ignored. """""" assert string.count('(') == string.count(')') and string.count('[') == string.count(']'), f'Imbalanced brackets exist in {string}' end = len(string) for idx, char in enumerate(string): pre = string[:idx] if: end = idx break return end",False,"char == ',' and pre.count('(') == pre.count(')') and (pre.count('[') == pre.count(']'))","char == ',' and pre.count('(') == pre.count(')') and (pre.count(')') == pre.count(')'))",0.6470401287078857 4168,"def _path_absolute(path): """"""Replacement for os.path.abspath."""""" if: path = _os.getcwd() try: return _os._getfullpathname(path) except AttributeError: if path.startswith('/'): return path else: return _path_join(_os.getcwd(), path)",False,not path,path is None,0.6629112958908081 4169,"def _path_absolute(path): """"""Replacement for os.path.abspath."""""" if not path: path = _os.getcwd() try: return _os._getfullpathname(path) except AttributeError: if: return path else: return _path_join(_os.getcwd(), path)",False,path.startswith('/'),os.path.isabs(path),0.6424508094787598 4170,"def glob0(dirname, basename): if: if os.path.isdir(dirname): return [basename] elif os.path.lexists(os.path.join(dirname, basename)): return [basename] return []",True,not basename,not basename,0.6620343327522278 4171,"def glob0(dirname, basename): if not basename: if: return [basename] elif os.path.lexists(os.path.join(dirname, basename)): return [basename] return []",True,os.path.isdir(dirname),os.path.isdir(dirname),0.646787703037262 4172,"def glob0(dirname, basename): if not basename: if os.path.isdir(dirname): return [basename] elif: return [basename] return []",True,"os.path.lexists(os.path.join(dirname, basename))","os.path.lexists(os.path.join(dirname, basename))",0.6476970911026001 4173,"def run(): """"""Run Markdown from the command line."""""" options, logging_level = parse_options() if: sys.exit(2) logger.setLevel(logging_level) console_handler = logging.StreamHandler() logger.addHandler(console_handler) if logging_level <= WARNING: warnings.filterwarnings('default') logging.captureWarnings(True) warn_logger = logging.getLogger('py.warnings') warn_logger.addHandler(console_handler) markdown.markdownFromFile(**options)",True,not options,not options,0.657294511795044 4174,"def run(): """"""Run Markdown from the command line."""""" options, logging_level = parse_options() if not options: sys.exit(2) logger.setLevel(logging_level) console_handler = logging.StreamHandler() logger.addHandler(console_handler) if: warnings.filterwarnings('default') logging.captureWarnings(True) warn_logger = logging.getLogger('py.warnings') warn_logger.addHandler(console_handler) markdown.markdownFromFile(**options)",False,logging_level <= WARNING,options['warnings'],0.6525025963783264 4175,"def set_destination(self, destination=None, destination_path=None): if: destination_path = self.settings._destination else: self.settings._destination = destination_path self.destination = self.destination_class(destination=destination, destination_path=destination_path, encoding=self.settings.output_encoding, error_handler=self.settings.output_encoding_error_handler)",True,destination_path is None,destination_path is None,0.6540553569793701 4176,"def test_native_padding(self): align = np.dtype('i').alignment for j in range(8): if: s = 'bi' else: s = 'b%dxi' % j self._check('@' + s, {'f0': ('i1', 0), 'f1': ('i', align * (1 + j // align))}) self._check('=' + s, {'f0': ('i1', 0), 'f1': ('i', 1 + j)})",True,j == 0,j == 0,0.6681411266326904 4177,"def _set_months(self, months): self.months = months if: s = _sign(self.months) div, mod = divmod(self.months * s, 12) self.months = mod * s self.years = div * s else: self.years = 0",False,abs(self.months) > 11,self.months,0.6496173143386841 4178,"def test_find_timebase_success(self): """"""test_find_timebase_success note: test assumes you have set test_helpers.drivers_with_device_connected"""""" if: return drivers_to_use = drivers_with_device_connected def test(driver): with driver.open_unit() as device: five_milliseconds = 0.005 config = TimebaseOptions(max_time_interval=five_milliseconds, no_of_samples=None, min_collection_time=five_milliseconds * 30, oversample=1) timebase_info = device.find_timebase(config) self.assertValidTimebases(config, timebase_info) self.run_snippet_and_count_problems(drivers_to_use, test)",False,not drivers_with_device_connected,drivers_with_device_connected is None,0.650317370891571 4179,"def refactor(source, fixer_names, ignore=None, filename=''): """"""Return refactored code using lib2to3. Skip if ignore string is produced in the refactored code. """""" from lib2to3 import pgen2 try: new_text = refactor_with_2to3(source, fixer_names=fixer_names, filename=filename) except (pgen2.parse.ParseError, SyntaxError, UnicodeDecodeError, UnicodeEncodeError): return source if: if ignore in new_text and ignore not in source: return source return new_text",False,ignore,ignore is not None,0.6673616766929626 4180,"def refactor(source, fixer_names, ignore=None, filename=''): """"""Return refactored code using lib2to3. Skip if ignore string is produced in the refactored code. """""" from lib2to3 import pgen2 try: new_text = refactor_with_2to3(source, fixer_names=fixer_names, filename=filename) except (pgen2.parse.ParseError, SyntaxError, UnicodeDecodeError, UnicodeEncodeError): return source if ignore: if: return source return new_text",False,ignore in new_text and ignore not in source,new_text == '',0.649689793586731 4181,"@property def norm(self): if: return getattr(self, self.norm_name) else: return None",False,self.norm_name,self.norm_name is not None,0.6507500410079956 4182,"def get_names(self): if: try: FileHandler.names = tuple(socket.gethostbyname_ex('localhost')[2] + socket.gethostbyname_ex(socket.gethostname())[2]) except socket.gaierror: FileHandler.names = (socket.gethostbyname('localhost'),) return FileHandler.names",True,FileHandler.names is None,FileHandler.names is None,0.6493005752563477 4183,"def get_nextflow_dir(proj_location, nf_dir_arg): """"""Define nextflow directory."""""" if: default_dir = os.path.join(proj_location, NF_DIR_NAME) os.mkdir(default_dir) if not os.path.isdir(default_dir) else None return default_dir else: os.mkdir(nf_dir_arg) if not os.path.isdir(nf_dir_arg) else None return nf_dir_arg",True,nf_dir_arg is None,nf_dir_arg is None,0.6471732258796692 4184,"def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, **model_kwargs): input_shape = input_ids.shape if: attention_mask = input_ids.new_ones(input_shape) if is_not_None(past): input_ids = input_ids[:, -1:] return {'input_ids': input_ids, 'attention_mask': attention_mask}",False,is_None(attention_mask),attention_mask is None,0.6466373205184937 4185,"def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, **model_kwargs): input_shape = input_ids.shape if is_None(attention_mask): attention_mask = input_ids.new_ones(input_shape) if: input_ids = input_ids[:, -1:] return {'input_ids': input_ids, 'attention_mask': attention_mask}",False,is_not_None(past),past is not None,0.6460896730422974 4186,"@culture.setter def culture(self, value): """"""gets/sets the property value culture"""""" if: self._culture = value",True,value is not None,value is not None,0.6539340019226074 4187,"def get_readonly_fields(self, request, obj=None): if: return ['sponsorship_benefit', 'benefit_internal_value'] return []",False,obj and (not obj.open_for_editing),"isinstance(obj, models.SponsorshipBenefit)",0.6463111639022827 4188,"def check_statuses(self): field_names = ('jobid','state') cmd = f'''squeue -u {self.USER} --Format ""{','.join(field_names)}"" -h''' output = {} ret = run_command(cmd, output, num_retries=6, retry_delay_s=10) if: logger.error('Failed to run squeue command=[%s] ret=%s err=%s', cmd, ret, output['stderr']) raise ExecutionError(f'squeue command failed: {ret}') return self._get_statuses_from_output(output['stdout'])",True,ret != 0,ret != 0,0.663162350654602 4189,"def do_transform(self, datum, approved_sets=None, blocked_sets=None, **kwargs): if: return (None, None) return super().do_transform(datum, **kwargs)",False,"not oai_allowed_by_sets(datum, blocked_sets, approved_sets)",datum.id in self.db_id,0.6416930556297302 4190,"def valueText(self, property): if: v = self.propertyToData[property].value p = v.value() return self.tr('(%.2f, %.2f)' % (p.x(), p.y())) return super(VariantManager, self).valueText(property)",False,self.propertyToData.contains(property),property in self.propertyToData,0.6451529264450073 4191,"def save_pretrained(self, save_directory_or_file): """"""Save a model card object to the directory or file `save_directory_or_file`."""""" if: output_model_card_file = os.path.join(save_directory_or_file, MODEL_CARD_NAME) else: output_model_card_file = save_directory_or_file self.to_json_file(output_model_card_file) logger.info('Model card saved in {}'.format(output_model_card_file))",False,os.path.isdir(save_directory_or_file),"isinstance(save_directory_or_file, str)",0.6441595554351807 4192,"def remove_weight_norm(self): print('Removing weight norm...') for l in self.ups: if: remove_weight_norm(l[-1]) else: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post)",False,self.h.sampling_rate == 24000,"isinstance(l, nn.Conv2d)",0.6443573236465454 4193,"def forward(self, x, style, noise=None, return_noise=False): out = self.conv(x, style) if: out, noise = self.noise_injector(out, noise=noise, return_noise=return_noise) else: out = self.noise_injector(out, noise=noise, return_noise=return_noise) out = self.activate(out) if return_noise: return (out, noise) else: return out",False,return_noise,noise is None,0.6571168303489685 4194,"def forward(self, x, style, noise=None, return_noise=False): out = self.conv(x, style) if return_noise: out, noise = self.noise_injector(out, noise=noise, return_noise=return_noise) else: out = self.noise_injector(out, noise=noise, return_noise=return_noise) out = self.activate(out) if: return (out, noise) else: return out",True,return_noise,return_noise,0.6567704677581787 4195,"def __call__(self, key, *args, **kwargs): """"""Get the attribute(s) values for the data key."""""" if: return [self._lookup_single(k, *args, **kwargs) for k in key] else: return self._lookup_single(key, *args, **kwargs)",False,"isinstance(key, (list, np.ndarray, pd.Series))","isinstance(key, list)",0.6448241472244263 4196,"def __new__(clstype, name, bases, clsdict): base = ([x for x in bases if type(x) is MetaHelper] + [None])[0] clsdict.setdefault('_baseclass', base) preinit = getattr(base, '_classpreinit', None) if: clstype, name, bases, clsdict = preinit(clstype, name, bases, clsdict) return type.__new__(clstype, name, bases, clsdict)",True,preinit is not None,preinit is not None,0.6530123949050903 4197,"def assert_boolean(value): """"""Return param value it is boolean otherwise raise an RPCError."""""" if: return value raise RPCError(BAD_REQUEST, f'{value} should be a boolean value')",False,"value in (False, True)","isinstance(value, bool)",0.6535387635231018 4198,"def forward(self, pred, target, weight=None, avg_factor=1.0): if: loss_cls = self.loss_weight * sigmoid_focal_loss(pred, target, weight, gamma=self.gamma, alpha=self.alpha) else: raise NotImplementedError if self.reduction =='mean': loss_cls = loss_cls.mean() elif self.reduction =='sum': loss_cls = loss_cls.sum() return loss_cls / avg_factor",True,self.use_sigmoid,self.use_sigmoid,0.6496390104293823 4199,"def forward(self, pred, target, weight=None, avg_factor=1.0): if self.use_sigmoid: loss_cls = self.loss_weight * sigmoid_focal_loss(pred, target, weight, gamma=self.gamma, alpha=self.alpha) else: raise NotImplementedError if: loss_cls = loss_cls.mean() elif self.reduction =='sum': loss_cls = loss_cls.sum() return loss_cls / avg_factor",True,self.reduction == 'mean',self.reduction == 'mean',0.6481670141220093 4200,"def forward(self, pred, target, weight=None, avg_factor=1.0): if self.use_sigmoid: loss_cls = self.loss_weight * sigmoid_focal_loss(pred, target, weight, gamma=self.gamma, alpha=self.alpha) else: raise NotImplementedError if self.reduction =='mean': loss_cls = loss_cls.mean() elif: loss_cls = loss_cls.sum() return loss_cls / avg_factor",True,self.reduction == 'sum',self.reduction == 'sum',0.6480669975280762 4201,"@property def VSTools(self): """""" Microsoft Visual Studio Tools """""" paths = ['Common7\\IDE', 'Common7\\Tools'] if: arch_subdir = self.pi.current_dir(hidex86=True, x64=True) paths += ['Common7\\IDE\\CommonExtensions\\Microsoft\\TestWindow'] paths += ['Team Tools\\Performance Tools'] paths += ['Team Tools\\Performance Tools%s' % arch_subdir] return [os.path.join(self.si.VSInstallDir, path) for path in paths]",False,self.vc_ver >= 14.0,self.pi,0.6479980945587158 4202,"def check_running(self): """""" A function to check if the instance associated with this object is live. Outputs: (bool): a boolean representing if the current instance is in the state ""running"" or not. """""" if: condition = False print('No instance declared') else: self.instance.load() if self.instance.state['Name'] == 'running': condition = True print('Instance {} exists and is active, safe to test'.format(self.instance.instance_id)) else: condition = False print('Instance {} is {}, not safe to test.'.format(self.instance.instance_id, self.instance.state['Name'])) return condition",True,self.instance is None,self.instance is None,0.6501725316047668 4203,"def check_running(self): """""" A function to check if the instance associated with this object is live. Outputs: (bool): a boolean representing if the current instance is in the state ""running"" or not. """""" if self.instance is None: condition = False print('No instance declared') else: self.instance.load() if: condition = True print('Instance {} exists and is active, safe to test'.format(self.instance.instance_id)) else: condition = False print('Instance {} is {}, not safe to test.'.format(self.instance.instance_id, self.instance.state['Name'])) return condition",False,self.instance.state['Name'] == 'running',self.instance.state['Running'],0.6467366218566895 4204,"def setDefault(self, key: str, default_value: Any) -> None: """"""Changes the default value of a preference. If the preference is currently set to the old default, the value of the preference will be set to the new default. :param key: The key of the preference to set the default of. :param default_value: The new default value of the preference. """""" preference = self._findPreference(key) if: Logger.log('w', 'Tried to set the default value of non-existing setting %s.', key) return if preference.getValue() == preference.getDefault(): self.setValue(key, default_value) preference.setDefault(default_value)",True,not preference,not preference,0.65184485912323 4205,"def setDefault(self, key: str, default_value: Any) -> None: """"""Changes the default value of a preference. If the preference is currently set to the old default, the value of the preference will be set to the new default. :param key: The key of the preference to set the default of. :param default_value: The new default value of the preference. """""" preference = self._findPreference(key) if not preference: Logger.log('w', 'Tried to set the default value of non-existing setting %s.', key) return if: self.setValue(key, default_value) preference.setDefault(default_value)",False,preference.getValue() == preference.getDefault(),not self.getValue(key),0.6454864740371704 4206,"def free(self): """""" Release allocated key object :return: None """""" if: log.error('Free %s', status_to_str(apis.kStatus_SSS_Fail)) return apis.sss_key_object_free(ctypes.byref(self.keyobject))",True,self.keyobject is None,self.keyobject is None,0.6492781639099121 4207,"def preprocess_obs_dict(obs_dict): """""" Apply internal replay buffer representation changes: save images as bytes """""" for obs_key, obs in obs_dict.items(): if: obs_dict[obs_key] = unnormalize_image(obs) return obs_dict",True,'image' in obs_key and obs is not None,'image' in obs_key and obs is not None,0.6439459323883057 4208,"def end_features(self, x): x = self.pool(x) x = x.view(x.size(0), -1) if: x = self.final_layer(x) return x",True,self.final_layer is not None,self.final_layer is not None,0.6462841629981995 4209,"def read_exclusions(exclusions_file): """"""Reads a CSV file of excluded timestamps. Args: exclusions_file: A file object containing a csv of video-id,timestamp. Returns: A set of strings containing excluded image keys, e.g. ""aaaaaaaaaaa,0904"", or an empty set if exclusions file is None. """""" excluded = set() if: with pathmgr.open(exclusions_file, 'r') as f: reader = csv.reader(f) for row in reader: assert len(row) == 2, 'Expected only 2 columns, got:'+ row excluded.add(make_image_key(row[0], row[1])) return excluded",True,exclusions_file,exclusions_file,0.65013587474823 4210,"def get_plans(self, plan=None): plans = [Plan(None, {'billing_day_of_month': None, 'id':'mozilla-concrete-mortar', 'price': '1', 'trial_period': None}), Plan(None, {'billing_day_of_month': None, 'id':'mozilla-concrete-brick', 'price': '10', 'trial_period': None})] if: plans.append(plan) self.mocks['plans'].all.return_value = plans return config.get_plans(get_client())",True,plan,plan,0.6812646389007568 4211,"def generate(self, sentence: str): output_sentence = sentence new_sentence, changed = auxiliary_negation_removal(sentence, self.nlp) if: output_sentence = new_sentence return [output_sentence]",True,changed,changed,0.675764799118042 4212,"def sample_n(self, n, return_pre_tanh_value=False): z = self.normal.sample_n(n) if: return (torch.tanh(z), z) else: return torch.tanh(z)",True,return_pre_tanh_value,return_pre_tanh_value,0.6452785730361938 4213,"def get_tasks_by_name(self, _name=None, _instance=None): if: _name = '%' if not _instance: return self.get_dict(DB_TASK_TABLE + '_by_name', (_name,)) else: return self.get_dict(DB_TASK_TABLE + '_by_instance', (_name, _instance))",True,not _name,not _name,0.6641800403594971 4214,"def get_tasks_by_name(self, _name=None, _instance=None): if not _name: _name = '%' if: return self.get_dict(DB_TASK_TABLE + '_by_name', (_name,)) else: return self.get_dict(DB_TASK_TABLE + '_by_instance', (_name, _instance))",False,not _instance,_instance is None,0.6548538208007812 4215,"def do_POST(self): parsed = urlparse(self.path) self.server.rest_server.trace.append(f'POST:{parsed.path}') if: return for endpoint, fns in endpoints: if re.search(endpoint, parsed.path) is not None and 'POST' in fns: return self.handle_fn(fns['POST'], parsed.path, parsed.query) self.not_found()",False,not self.authenticated(),not parsed.path,0.6498814821243286 4216,"def do_POST(self): parsed = urlparse(self.path) self.server.rest_server.trace.append(f'POST:{parsed.path}') if not self.authenticated(): return for endpoint, fns in endpoints: if: return self.handle_fn(fns['POST'], parsed.path, parsed.query) self.not_found()",True,"re.search(endpoint, parsed.path) is not None and 'POST' in fns","re.search(endpoint, parsed.path) is not None and 'POST' in fns",0.6447993516921997 4217,"def __init__(self, oid, value): if: raise TypeError('oid must be an ObjectIdentifier') self._oid = oid self._value = value",True,"not isinstance(oid, ObjectIdentifier)","not isinstance(oid, ObjectIdentifier)",0.657292902469635 4218,"def grokparse(input): fullpath = dsz.ui.GetString('Please enter the full path to the file you want parse: ', '') if: dsz.ui.Echo('No string entered', dsz.ERROR) return False success = parsefile(fullpath) if not success: return False return True",True,fullpath == '',fullpath == '',0.6566593647003174 4219,"def grokparse(input): fullpath = dsz.ui.GetString('Please enter the full path to the file you want parse: ', '') if fullpath == '': dsz.ui.Echo('No string entered', dsz.ERROR) return False success = parsefile(fullpath) if: return False return True",True,not success,not success,0.6566974520683289 4220,"def __init__(self, args, dicts, training): super().__init__(args) self.dicts = dicts self.training = training if: self.lang_pairs = args.lang_pairs args.source_lang, args.target_lang = args.lang_pairs[0].split('-') else: self.lang_pairs = ['{}-{}'.format(args.source_lang, args.target_lang)] self.eval_lang_pairs = self.lang_pairs self.model_lang_pairs = self.lang_pairs self.langs = list(dicts.keys())",False,training,args.lang_pairs is not None,0.6730592250823975 4221,"def linestrings(geom): if: return if geom.geom_type == 'LineString': yield geom elif geom.geom_type in ('MultiLineString', 'GeometryCollection'): for g in geom.geoms: for ls in linestrings(g): yield ls else: raise RuntimeError('unexpected geom_type %s' % geom.geom_type)",False,geom is None,geom.geom_type == 'MultiLineString',0.6695955991744995 4222,"def linestrings(geom): if geom is None: return if: yield geom elif geom.geom_type in ('MultiLineString', 'GeometryCollection'): for g in geom.geoms: for ls in linestrings(g): yield ls else: raise RuntimeError('unexpected geom_type %s' % geom.geom_type)",True,geom.geom_type == 'LineString',geom.geom_type == 'LineString',0.6518832445144653 4223,"def linestrings(geom): if geom is None: return if geom.geom_type == 'LineString': yield geom elif: for g in geom.geoms: for ls in linestrings(g): yield ls else: raise RuntimeError('unexpected geom_type %s' % geom.geom_type)",False,"geom.geom_type in ('MultiLineString', 'GeometryCollection')",geom.geom_type == 'MultiLineString',0.6458837389945984 4224,"def instance_bce_with_logits(logits, labels, reduction='mean'): assert logits.dim() == 2 loss = F.binary_cross_entropy_with_logits(logits, labels, reduction=reduction) if: loss *= labels.size(1) return loss",False,reduction == 'mean',labels.dim() == 2,0.6509321928024292 4225,"def reraise(tp, value, tb=None): if: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value",True,value is None,value is None,0.6635467410087585 4226,"def reraise(tp, value, tb=None): if value is None: value = tp() if: raise value.with_traceback(tb) raise value",True,value.__traceback__ is not tb,value.__traceback__ is not tb,0.6544689536094666 4227,"def calculate_uncertainty(logits, classes): """""" We estimate uncerainty as L1 distance between 0.0 and the logit prediction in 'logits' for the foreground class in `classes`. Args: logits (Tensor): A tensor of shape (R, C,...) or (R, 1,...) for class-specific or class-agnostic, where R is the total number of predicted masks in all images and C is the number of foreground classes. The values are logits. classes (list): A list of length R that contains either predicted of ground truth class for eash predicted mask. Returns: scores (Tensor): A tensor of shape (R, 1,...) that contains uncertainty scores with the most uncertain locations having the highest uncertainty score. """""" if: gt_class_logits = logits.clone() else: gt_class_logits = logits[torch.arange(logits.shape[0], device=logits.device), classes].unsqueeze(1) return -torch.abs(gt_class_logits)",True,logits.shape[1] == 1,logits.shape[1] == 1,0.651202917098999 4228,"def level_to_args(level): level = level / MAX_LEVEL * 30 if: level = -level return (level, replace_value)",False,np.random.random() < 0.5,random.random() > 0.5,0.6478989124298096 4229,"def get_mujoco_py_mjlib(): """"""Returns the mujoco_py mjlib module."""""" class MjlibDelegate: """"""Wrapper that forwards mjlib calls."""""" def __init__(self, lib): self._lib = lib def __getattr__(self, name: str): if: return getattr(self._lib, '_' + name) raise AttributeError(name) return MjlibDelegate(get_mujoco_py().cymj)",False,name.startswith('mj'),name.startswith('mujoco_'),0.6457219123840332 4230,"def __iadd__(self, other): if: other = Literal(other) return self.append(other)",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.6489107608795166 4231,"def gfind_delete(mnode, volname, sessname, debug=False): """"""Deletes the given session Args: mnode (str): Node on which cmd has to be executed. volname (str): volume name sessname (str): session name Kwargs: debug (bool): If this option is set to True, then the command will be run with debug mode. If this option is set to False, then the command will not be run with debug mode. Returns: tuple: Tuple containing three elements (ret, out, err). The first element'ret' is of type 'int' and is the return value of command execution. The second element 'out' is of type'str' and is the stdout value of the command execution. The third element 'err' is of type'str' and is the stderr value of the command execution. Example: gfind_delete(""abc.com"", testvol, testsession) """""" params = '' if: params = params +'--debug' cmd = 'glusterfind delete %s %s %s' % (sessname, volname, params) return g.run(mnode, cmd)",True,debug,debug,0.6690095663070679 4232,"def to_bytes(self): if: raise SDKException(ErrorCode.unsupported_signature_scheme) return bytes.fromhex(self.__value)",False,self.__scheme != SignatureScheme.SHA256withECDSA,self.__value is None,0.6601133346557617 4233,"def observe_event(self, ev_cls, states=None): brick = _lookup_service_brick_by_ev_cls(ev_cls) if: brick.register_observer(ev_cls, self.name, states)",True,brick is not None,brick is not None,0.6583319902420044 4234,"@staticmethod def _get_successors(program_warp, op, successors): for peer in program_warp.forward_edges[op]: if: successors.add(peer) Reformer._get_successors(program_warp, peer, successors)",True,peer not in successors,peer not in successors,0.6489831209182739 4235,"def test_video_exists(page_url): logger.info(""(page_url='%s')"" % page_url) response = httptools.downloadpage(page_url) if: return (False, '[Vidzi] El archivo no existe o ha sido borrado') return (True, '')",False,not response.sucess or 'File was deleted or expired' in response.data,'Object not found' in response,0.6465617418289185 4236,"def batch_iterator() -> Iterator[types.TransitionMapping]: for epoch_num in itertools.islice(itertools.count(), self.n_epochs): some_batch_was_yielded = False for batch in self.batch_loader: yield batch some_batch_was_yielded = True if: raise AssertionError(f'Data loader returned no data during epoch {epoch_num} -- did it reset correctly?') if self.on_epoch_end is not None: self.on_epoch_end(epoch_num)",False,not some_batch_was_yielded,some_batch_was_yielded,0.6492682695388794 4237,"def batch_iterator() -> Iterator[types.TransitionMapping]: for epoch_num in itertools.islice(itertools.count(), self.n_epochs): some_batch_was_yielded = False for batch in self.batch_loader: yield batch some_batch_was_yielded = True if not some_batch_was_yielded: raise AssertionError(f'Data loader returned no data during epoch {epoch_num} -- did it reset correctly?') if: self.on_epoch_end(epoch_num)",True,self.on_epoch_end is not None,self.on_epoch_end is not None,0.6460894346237183 4238,"def __init__(self, mailer, default_device=None, allow_duplicates=False): self.mailer = mailer self.default_device = default_device self.filename = 'jobs.sqlite' self.allow_duplicates = allow_duplicates if: conn = self._conn() conn.execute('create table jobs (id integer primary key, created text, last_attempt text, build_url text, build_id text, build_type text, build_abi text, build_platform text, build_sdk text, changeset text, changeset_dirs text, tree text, revision text, builder_type text, enable_unittests int, attempts int, device text)') conn.execute('create table tests (id integer primary key, name text, config_file text, chunk int, guid text, repos text, jobid integer)') conn.execute('create table treeherder (id integer primary key, attempts int, last_attempt text, machine text,project text,job_collection text)') conn.commit() conn.close()",False,not os.path.exists(self.filename),allow_duplicates,0.6436935663223267 4239,"def validate_address(self, data): try: data['country_code'] = data.get('country', '') if: data['street_address'] = f""{data['street_address_1']}\n{data['street_address_2']}"" normalized_data = i18naddress.normalize_address(data) if getattr(self, 'enable_normalization', True): data = normalized_data del data['sorting_code'] except i18naddress.InvalidAddress as exc: self.add_field_errors(exc.errors) return data",False,data['street_address_1'] or data['street_address_2'],'Street_address_1' in data and 'street_address_2' in data,0.6422083377838135 4240,"def validate_address(self, data): try: data['country_code'] = data.get('country', '') if data['street_address_1'] or data['street_address_2']: data['street_address'] = f""{data['street_address_1']}\n{data['street_address_2']}"" normalized_data = i18naddress.normalize_address(data) if: data = normalized_data del data['sorting_code'] except i18naddress.InvalidAddress as exc: self.add_field_errors(exc.errors) return data",False,"getattr(self, 'enable_normalization', True)",len(normalized_data) > 0,0.6435102224349976 4241,"def possible_objects(self, *args, **kwargs): """"""return an iterator on possible objects in this registry for the given context """""" for objects in self.values(): obj = self._select_best(objects, *args, **kwargs) if: continue yield obj",True,obj is None,obj is None,0.6531797647476196 4242,"def check_update(self, update): if: return self.filters(update)",True,"isinstance(update, Update) and update.effective_message","isinstance(update, Update) and update.effective_message",0.6439880132675171 4243,"def create(name, *args, **kwargs): """""" Create a model instance. Parameters ---------- name : str Model name. Can be one of 'inception','resnet18','resnet34', 'resnet50','resnet101', and'resnet152'. pretrained : bool, optional Only applied for'resnet*' models. If True, will use ImageNet pretrained model. Default: True cut_at_pooling : bool, optional If True, will cut the model before the last global pooling layer and ignore the remaining kwargs. Default: False num_features : int, optional If positive, will append a Linear layer after the global pooling layer, with this number of output units, followed by a BatchNorm layer. Otherwise these layers will not be appended. Default: 256 for 'inception', 0 for'resnet*' norm : bool, optional If True, will normalize the feature to be unit L2-norm for each sample. Otherwise will append a ReLU layer after the above Linear layer if num_features > 0. Default: False dropout : float, optional If positive, will append a Dropout layer with this dropout rate. Default: 0 num_classes : int, optional If positive, will append a Linear layer at the end as the classifier with this number of output units. Default: 0 """""" if: raise KeyError('Unknown model:', name) return __factory[name](*args, **kwargs)",True,name not in __factory,name not in __factory,0.6516941785812378 4244,"def as_list(seq): if: return list(seq) else: return [seq]",False,is_sequence(seq),"isinstance(seq, (list, tuple))",0.6495057940483093 4245,"def __read_file(self, strInfo, strFilePath, listStore): locations = fileUtils.read_file(strInfo, strFilePath) if: for strLoc in locations.keys(): listStore.append([strLoc, locations[strLoc][0], locations[strLoc][1], locations[strLoc][2]]) return listStore",False,locations,not listStore,0.6684142351150513 4246,"def batch_completed(self, batch_size, duration): """"""Callback indicate how long it took to run a batch"""""" if: old_duration = self._smoothed_batch_duration if old_duration == 0: new_duration = duration else: new_duration = 0.8 * old_duration + 0.2 * duration self._smoothed_batch_duration = new_duration",False,batch_size == self._effective_batch_size,self._smoothed_batch_duration != 0 or self._smoothed_batch_duration != 0,0.6449193954467773 4247,"def batch_completed(self, batch_size, duration): """"""Callback indicate how long it took to run a batch"""""" if batch_size == self._effective_batch_size: old_duration = self._smoothed_batch_duration if: new_duration = duration else: new_duration = 0.8 * old_duration + 0.2 * duration self._smoothed_batch_duration = new_duration",False,old_duration == 0,old_duration is None or old_duration < 0.1,0.6602780818939209 4248,"def referenceButtonClicked(self): file, _selectedFilter = QFileDialog.getOpenFileName(self, self.tr('Select reference'), self.reference.text(), '*.ref') if: self.reference.setText(file)",True,file,file,0.6638280749320984 4249,"def after_run(self, run_context, run_values): duration = time.time() - self._start_time loss_value = run_values.results if: num_examples_per_step = FLAGS.batch_size examples_per_sec = num_examples_per_step / duration sec_per_batch = float(duration) format_str = '%s: step %d, loss = %.2f (%.1f examples/sec; %.3f sec/batch)' print(format_str % (datetime.now(), self._step, loss_value, examples_per_sec, sec_per_batch))",False,self._step % 10 == 0,duration > 0,0.6597130298614502 4250,"@property def MaxDist(self): if: return int(self._entity_data.get('MaxDist')) return int(2000)",True,'MaxDist' in self._entity_data,'MaxDist' in self._entity_data,0.6485474109649658 4251,"def _check_log_interval(self): """"""Log to tensorboard."""""" if: for metric_name in self.list_metrics_name: logging.info(f'(Step: {self.steps}) train_{metric_name} = {self.train_metrics[metric_name].result():.4f}.') self._write_to_tensorboard(self.train_metrics, stage='train') self.reset_states_train()",False,self.steps % self.config['log_interval_steps'] == 0,self.steps > 0 and self.list_metrics_name,0.646125853061676 4252,"def fetchone(self): if: return None try: return next(self.rows) except StopIteration: return None",True,self.rows is None,self.rows is None,0.6488251686096191 4253,"def stop(self): """"""Stop the video stimulus."""""" if: self._file.stop() self.rewind() if self._backend =='mediadecoder' and self._file.audioformat: self._audio.close_stream()",False,self._is_preloaded,self._backend == 'videodecoder',0.6489619612693787 4254,"def stop(self): """"""Stop the video stimulus."""""" if self._is_preloaded: self._file.stop() self.rewind() if: self._audio.close_stream()",False,self._backend == 'mediadecoder' and self._file.audioformat,self._audio,0.644709587097168 4255,"def _cueout_duration(line): param, value = line.split(':', 1) res = re.match('DURATION=(.*)', value) if: return (None, res.group(1))",True,res,res,0.6716028451919556 4256,"def process_mmdet_results(mmdet_results, cat_id=1): """"""Process mmdet results, and return a list of bboxes. :param mmdet_results: :param cat_id: category id (default: 1 for human) :return: a list of detected bounding boxes """""" if: det_results = mmdet_results[0] else: det_results = mmdet_results bboxes = det_results[cat_id - 1] person_results = [] for bbox in bboxes: person = {} person['bbox'] = bbox person_results.append(person) return person_results",False,"isinstance(mmdet_results, tuple)",len(mmdet_results) == 1,0.644173264503479 4257,"def json(self): """"""Get the result of simplejson.loads if possible."""""" if: raise AttributeError('Not a JSON response') try: from simplejson import loads except ImportError: from json import loads return loads(self.data)",False,'json' not in self.mimetype,not self.data,0.6501449942588806 4258,"def insertPlando(self, params): if: return None try: sql = 'insert into plando_repo (plando_name, init_time, author, long_desc, suggested_preset, update_key, ips_max_size) values (%s, now(), %s, %s, %s, %s, %s);' self.cursor.execute(sql, params) self.commit() except Exception as e: print('DB.insertPlando::error execute: {} error: {}'.format(sql, e)) self.dbAvailable = False",True,self.dbAvailable == False,self.dbAvailable == False,0.6538499593734741 4259,"def __str__(self): """"""returns object as string"""""" if: self.__init() return self._json",True,self._json is None,self._json is None,0.6558138132095337 4260,"def setUp(self): self.get_super_method(self,'setUp')() self.volume['voltype']['dist_count'] = 1 ret = self.setup_volume_and_mount_volume([self.mounts[0]]) if: raise ExecutionError('Volume creation or mount failed: %s' % self.volname) self.first_client = self.mounts[0].client_system",True,not ret,not ret,0.6643942594528198 4261,"def find_volume_by_name(self, volume, mounts=False): """""" return the index of a volume """""" volumes = [] if: volumes = self.get_volume_mounts() else: volumes = self.get_volumes() for exist_volume in volumes: if exist_volume['name'] == volume['name']: return exist_volume return None",True,mounts,mounts,0.6683362722396851 4262,"def find_volume_by_name(self, volume, mounts=False): """""" return the index of a volume """""" volumes = [] if mounts: volumes = self.get_volume_mounts() else: volumes = self.get_volumes() for exist_volume in volumes: if: return exist_volume return None",True,exist_volume['name'] == volume['name'],exist_volume['name'] == volume['name'],0.6477402448654175 4263,"def guess_byte_order(self): self.mat_stream.seek(0) mopt = read_dtype(self.mat_stream, np.dtype('i4')) self.mat_stream.seek(0) if: return '<' if mopt < 0 or mopt > 5000: return SYS_LITTLE_ENDIAN and '>' or '<' return SYS_LITTLE_ENDIAN and '<' or '>'",False,mopt == 0,mopt < 10000,0.6639753580093384 4264,"def guess_byte_order(self): self.mat_stream.seek(0) mopt = read_dtype(self.mat_stream, np.dtype('i4')) self.mat_stream.seek(0) if mopt == 0: return '<' if: return SYS_LITTLE_ENDIAN and '>' or '<' return SYS_LITTLE_ENDIAN and '<' or '>'",False,mopt < 0 or mopt > 5000,mopt == 1,0.6549861431121826 4265,"@heading_underline.setter def heading_underline(self, value): """"""Setter for heading_underline."""""" if: raise AttributeError(TextScreen._getter_exception_message.format('heading_underline')) else: self._heading_underline = value",True,self.has_surface,self.has_surface,0.6558957099914551 4266,"def get_minwidth_str(self): """""" computes theme.minwidth string based on traffic.format and traffic.graphlen parameters """""" minwidth_str = '' if: graph_len = int(self._graphlen / 2) graph_prefix = '0' * graph_len minwidth_str += graph_prefix minwidth_str += '1000' try: length = int(re.match('{:\\.(\\d+)f}', self._format).group(1)) if length > 0: minwidth_str += '.' + '0' * length except AttributeError: return '1000.00KiB/s' finally: minwidth_str += 'KiB/s' return minwidth_str",True,self._graphlen > 0,self._graphlen > 0,0.6530315279960632 4267,"def get_minwidth_str(self): """""" computes theme.minwidth string based on traffic.format and traffic.graphlen parameters """""" minwidth_str = '' if self._graphlen > 0: graph_len = int(self._graphlen / 2) graph_prefix = '0' * graph_len minwidth_str += graph_prefix minwidth_str += '1000' try: length = int(re.match('{:\\.(\\d+)f}', self._format).group(1)) if: minwidth_str += '.' + '0' * length except AttributeError: return '1000.00KiB/s' finally: minwidth_str += 'KiB/s' return minwidth_str",True,length > 0,length > 0,0.6603907942771912 4268,"def execute_cli(self): r = [] v = self.cli('show arp') if: for match in self.rx_line.finditer(v): r += [{'ip': match.group('ip'),'mac': match.group('mac'), 'interface': 'ip%s' % match.group('interface')}] else: for match in self.rx_line1.finditer(v): r += [match.groupdict()] return r",False,not self.is_iscom2624g,self.use_mac_address,0.6441394090652466 4269,"def create_session(self, image_in, params=None): """""" Create New Session :param image_in: filepath for image to be sent to server as part of session creation :param params: additional JSON params as part of session reqeust :return: json response which contains session id and other details """""" selector = '/session/' params = self._update_client_id(params) status, response, _ = MONAILabelUtils.http_upload('PUT', self._server_url, selector, params, [image_in], headers=self._headers) if: raise MONAILabelClientException(MONAILabelError.SERVER_ERROR, f'Status: {status}; Response: {bytes_to_str(response)}', status, response) response = bytes_to_str(response) logging.debug(f'Response: {response}') return json.loads(response)",True,status != 200,status != 200,0.6772538423538208 4270,"def _set_experiment_name(config): experiment_name = 'id_' + hashlib.sha1(repr(sorted(config.items())).encode()).hexdigest() if: experiment_name += '_' + FLAGS.expt_name_suffix config['experiment_name'] = experiment_name",True,FLAGS.expt_name_suffix,FLAGS.expt_name_suffix,0.648231029510498 4271,"def __eq__(self, other): """"""Compares the current instance with another one."""""" if: return True if not isinstance(other, self.__class__): raise TypeError('Can only compare repeated composite fields against other repeated composite fields.') return self._values == other._values",True,self is other,self is other,0.6568971872329712 4272,"def __eq__(self, other): """"""Compares the current instance with another one."""""" if self is other: return True if: raise TypeError('Can only compare repeated composite fields against other repeated composite fields.') return self._values == other._values",False,"not isinstance(other, self.__class__)",self.__class__ != other.__class__,0.6458083391189575 4273,"def __init__(self, config): super().__init__(config) if: logger.warning('If you want to use `RobertaForMaskedLM` make sure `config.is_decoder=False` for bi-directional self-attention.') self.roberta = RobertaModel(config, add_pooling_layer=False) self.lm_head = RobertaLMHead(config) self.update_keys_to_ignore(config, ['lm_head.decoder.weight']) self.init_weights()",False,config.is_decoder,not config.is_decoder,0.6593718528747559 4274,"def safe_repr(obj, short=False): try: result = repr(obj) except Exception: result = object.__repr__(obj) if: return result return result[:_MAX_LENGTH] +'[truncated]...'",False,not short or len(result) < _MAX_LENGTH,short,0.6463885307312012 4275,"def default(self, obj): if: return obj.isoformat()",False,"isinstance(obj, (datetime.date, datetime.datetime))","isinstance(obj, datetime.date)",0.6442791223526001 4276,"def check_user(self, username, password): if: from glances.password import GlancesPassword pwd = GlancesPassword(username=username, config=self.config) return pwd.check_password(self.server.user_dict[username], password) else: return False",False,username in self.server.user_dict,username and password,0.6453099250793457 4277,"def __delitem__(self, key): if: filename = self.filename(key) if os.path.exists(filename): os.remove(filename) del self._cache[key]",False,self.cache_location is not None,key in self._cache,0.6463392972946167 4278,"def __delitem__(self, key): if self.cache_location is not None: filename = self.filename(key) if: os.remove(filename) del self._cache[key]",False,os.path.exists(filename),filename,0.6454284191131592 4279,"def parses(self, sentence: str, max_depth: Optional[int]=None) -> Iterator[Tree]: """"""Parses to an iterator of unambiguous Trees"""""" if: max_depth = len(sentence) * 10 start_pos = CharPosition(sentence) chart = EarleyLRChart(self, start_pos, use_backpointers=True) meta = chart.final_meta() if meta: return _generate_from_meta(meta=meta, max_depth=max_depth) else: return _find_longest_parsable_prefix(chart)",True,max_depth is None,max_depth is None,0.6578086614608765 4280,"def parses(self, sentence: str, max_depth: Optional[int]=None) -> Iterator[Tree]: """"""Parses to an iterator of unambiguous Trees"""""" if max_depth is None: max_depth = len(sentence) * 10 start_pos = CharPosition(sentence) chart = EarleyLRChart(self, start_pos, use_backpointers=True) meta = chart.final_meta() if: return _generate_from_meta(meta=meta, max_depth=max_depth) else: return _find_longest_parsable_prefix(chart)",True,meta,meta,0.6684451699256897 4281,"def _fill_virsh_allocated_ips_and_bridges(self): with LibvirtController.connection_context(libvirt_uri=self._libvirt_uri) as conn: for net in conn.listAllNetworks(): try: for lease in net.DHCPLeases(): net_bridge = lease.get('iface') if: self._add_allocated_net_bridge(net_bridge) ipaddr = lease.get('ipaddr') if ipaddr: self._add_allocated_ip(IPAddress(ipaddr)) except libvirt.libvirtError: log.info(f'Can not get dhcp leases from {net.name()}')",True,net_bridge,net_bridge,0.6605123281478882 4282,"def _fill_virsh_allocated_ips_and_bridges(self): with LibvirtController.connection_context(libvirt_uri=self._libvirt_uri) as conn: for net in conn.listAllNetworks(): try: for lease in net.DHCPLeases(): net_bridge = lease.get('iface') if net_bridge: self._add_allocated_net_bridge(net_bridge) ipaddr = lease.get('ipaddr') if: self._add_allocated_ip(IPAddress(ipaddr)) except libvirt.libvirtError: log.info(f'Can not get dhcp leases from {net.name()}')",True,ipaddr,ipaddr,0.6730599403381348 4283,"def set_bn_fix(m): classname = m.__class__.__name__ if: for p in m.parameters(): p.requires_grad = False",True,classname.find('BatchNorm') != -1,classname.find('BatchNorm') != -1,0.6449122428894043 4284,"def set(chat_id: Union[str, int], admins_: List[User]): if: chat_id = str(chat_id) admins[chat_id] = admins_",True,"isinstance(chat_id, int)","isinstance(chat_id, int)",0.6513153910636902 4285,"def parse_sources(sources: dict) -> Union[SourcesV1, SourcesV2, SourcesV3]: """"""Parse sources.json Args: sources: A dict of sources.json Returns: Union[SourcesV1, SourcesV2, SourcesV3] """""" dbt_schema_version = get_dbt_schema_version(artifact_json=sources) if: return SourcesV1(**sources) elif dbt_schema_version == ArtifactTypes.SOURCES_V2.value.dbt_schema_version: return SourcesV2(**sources) elif dbt_schema_version == ArtifactTypes.SOURCES_V3.value.dbt_schema_version: return SourcesV3(**sources) raise ValueError('Not a manifest.json')",True,dbt_schema_version == ArtifactTypes.SOURCES_V1.value.dbt_schema_version,dbt_schema_version == ArtifactTypes.SOURCES_V1.value.dbt_schema_version,0.645764946937561 4286,"def parse_sources(sources: dict) -> Union[SourcesV1, SourcesV2, SourcesV3]: """"""Parse sources.json Args: sources: A dict of sources.json Returns: Union[SourcesV1, SourcesV2, SourcesV3] """""" dbt_schema_version = get_dbt_schema_version(artifact_json=sources) if dbt_schema_version == ArtifactTypes.SOURCES_V1.value.dbt_schema_version: return SourcesV1(**sources) elif: return SourcesV2(**sources) elif dbt_schema_version == ArtifactTypes.SOURCES_V3.value.dbt_schema_version: return SourcesV3(**sources) raise ValueError('Not a manifest.json')",True,dbt_schema_version == ArtifactTypes.SOURCES_V2.value.dbt_schema_version,dbt_schema_version == ArtifactTypes.SOURCES_V2.value.dbt_schema_version,0.6448081731796265 4287,"def parse_sources(sources: dict) -> Union[SourcesV1, SourcesV2, SourcesV3]: """"""Parse sources.json Args: sources: A dict of sources.json Returns: Union[SourcesV1, SourcesV2, SourcesV3] """""" dbt_schema_version = get_dbt_schema_version(artifact_json=sources) if dbt_schema_version == ArtifactTypes.SOURCES_V1.value.dbt_schema_version: return SourcesV1(**sources) elif dbt_schema_version == ArtifactTypes.SOURCES_V2.value.dbt_schema_version: return SourcesV2(**sources) elif: return SourcesV3(**sources) raise ValueError('Not a manifest.json')",True,dbt_schema_version == ArtifactTypes.SOURCES_V3.value.dbt_schema_version,dbt_schema_version == ArtifactTypes.SOURCES_V3.value.dbt_schema_version,0.6446651220321655 4288,"@property def n_to_1_limit(self): """""" Maximum number of interference combinations to run per receiver for N to 1. - A value of ``0`` disables N to 1 entirely. - A value of ``-1`` allows unlimited N to 1. (N is set to the maximum.) Examples -------- >>> aedtapp.results.current_revision.n_to_1_limit = 2**20 >>> aedtapp.results.current_revision.n_to_1_limit 1048576 """""" if: raise RuntimeError('This function only supported in AEDT version 2024.1 and later.') if self.revision_loaded: engine = self.emit_project._emit_api.get_engine() max_instances = engine.n_to_1_limit else: max_instances = None return max_instances",False,self.emit_project._aedt_version < '2024.1',self.aedt_version < '2024.1',0.6489173173904419 4289,"@property def n_to_1_limit(self): """""" Maximum number of interference combinations to run per receiver for N to 1. - A value of ``0`` disables N to 1 entirely. - A value of ``-1`` allows unlimited N to 1. (N is set to the maximum.) Examples -------- >>> aedtapp.results.current_revision.n_to_1_limit = 2**20 >>> aedtapp.results.current_revision.n_to_1_limit 1048576 """""" if self.emit_project._aedt_version < '2024.1': raise RuntimeError('This function only supported in AEDT version 2024.1 and later.') if: engine = self.emit_project._emit_api.get_engine() max_instances = engine.n_to_1_limit else: max_instances = None return max_instances",False,self.revision_loaded,self.emit_project._emit_api,0.6475289463996887 4290,"def Authextra(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if: x = self._tab.Indirect(o + self._tab.Pos) from wamp.Map import Map obj = Map() obj.Init(self._tab.Bytes, x) return obj return None",True,o != 0,o != 0,0.6666703224182129 4291,"@property def mixup_enabled(self): if: return self.loader.collate_fn.mixup_enabled else: return False",False,"isinstance(self.loader.collate_fn, FastCollateMixup)","hasattr(self.loader, 'collate_fn')",0.6440582275390625 4292,"def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True): all_encoder_layers = [] for layer_module in self.layer: hidden_states = layer_module(hidden_states, attention_mask) if output_all_encoded_layers: all_encoder_layers.append(hidden_states) if: all_encoder_layers.append(hidden_states) return all_encoder_layers",False,not output_all_encoded_layers,output_all_encoded_layers,0.6459535360336304 4293,"def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True): all_encoder_layers = [] for layer_module in self.layer: hidden_states = layer_module(hidden_states, attention_mask) if: all_encoder_layers.append(hidden_states) if not output_all_encoded_layers: all_encoder_layers.append(hidden_states) return all_encoder_layers",True,output_all_encoded_layers,output_all_encoded_layers,0.646918535232544 4294,"def _vxc_mat(self, cell, ao, wv, mask, xctype, shls_slice, ao_loc, hermi): if: wv[0] *= 0.5 if xctype == 'MGGA': tau_idx = 4 wv[tau_idx] *= 0.5 return _vxc_mat(cell, ao, wv, mask, xctype, shls_slice, ao_loc, hermi)",False,hermi == 1,xctype == 'GCC',0.6569929122924805 4295,"def _vxc_mat(self, cell, ao, wv, mask, xctype, shls_slice, ao_loc, hermi): if hermi == 1: wv[0] *= 0.5 if: tau_idx = 4 wv[tau_idx] *= 0.5 return _vxc_mat(cell, ao, wv, mask, xctype, shls_slice, ao_loc, hermi)",False,xctype == 'MGGA',ao_loc == 0,0.6487569808959961 4296,"def get_dependencies(self, string=True): """""" Return all the dependencies within the distribution. Parameters ---------- string : bool If True, return dependencies as strings. Otherwise, as a tuple of tuples. """""" if: f = self._stringify else: f = lambda d: d return set(map(f, self.atoms.keys()))",True,string,string,0.671151876449585 4297,"def readline(self): s = self.char or b'' self.char = None c = self.fp.read(1) while c not in b'\r\n': s = s + c c = self.fp.read(1) self.char = self.fp.read(1) if: self.char = None return s.decode('latin-1')",False,self.char in b'\r\n',s == b'\x00',0.6482902765274048 4298,"def lyft_data_prep(root_path, info_prefix, version, dataset_name, out_dir, max_sweeps=10): """"""Prepare data related to Lyft dataset. Related data consists of '.pkl' files recording basic infos, and 2D annotations. Although the ground truth database is not used in Lyft, it can also be generated like nuScenes. Args: root_path (str): Path of dataset root. info_prefix (str): The prefix of info filenames. version (str): Dataset version. dataset_name (str): The dataset class name. out_dir (str): Output directory of the groundtruth database info. Not used here if the groundtruth database is not generated. max_sweeps (int): Number of input consecutive frames. Default: 10 """""" lyft_converter.create_lyft_infos(root_path, info_prefix, version=version, max_sweeps=max_sweeps) if: return train_info_name = f'{info_prefix}_infos_train' val_info_name = f'{info_prefix}_infos_val' info_train_path = osp.join(root_path, f'{train_info_name}.pkl') info_val_path = osp.join(root_path, f'{val_info_name}.pkl') lyft_converter.export_2d_annotation(root_path, info_train_path, version=version) lyft_converter.export_2d_annotation(root_path, info_val_path, version=version)",False,version == 'v1.01-test',osp.exists(root_path),0.6432472467422485 4299,"def read_tx(self): version = self._read_le_int32() time = self._read_le_uint32() inputs = self._read_inputs() outputs = self._read_outputs() locktime = self._read_le_uint32() if: txcomment = self._read_varbytes() else: txcomment = b'' return TxTrezarcoin(version, time, inputs, outputs, locktime, txcomment)",True,version >= 2,version >= 2,0.6646723747253418 4300,"def register_run(self, targets: Iterable[Step], name: Optional[str]=None) -> Run: if: name = petname.generate() steps: Dict[str, StepInfo] = {} for step in targets: step_info = StepInfo.new_from_step(step) self.unique_id_to_info[step.unique_id] = step_info steps[step.unique_id] = step_info run = Run(name, steps, utc_now_datetime()) self.runs[name] = run return run",True,name is None,name is None,0.6606816649436951 4301,"def layer_url_template(counter, layer, conf): if: return 'http://ed-map-fi.wide.basefarm.net/ol_tiles/fi/%s/%i/%i/%i.jpeg' else: return 'http://ed-map-fi.wide.basefarm.net/ol_tiles/fi/%s/%i/%i/%i.png'",False,layer == LAYER_SAT,layer == 'image',0.6479644775390625 4302,"def close(self): if: self.viewer = None self._viewers = {}",True,self.viewer is not None,self.viewer is not None,0.6494806408882141 4303,"def load(module, prefix=''): local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {}) module._load_from_state_dict(state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs) for name, child in module._modules.items(): if: load(child, prefix + name + '.')",True,child is not None,child is not None,0.6521399021148682 4304,"def run(self, sess): callback_log('Trigger OnceCallback') if: self._func(sess, self._func_kwargs) else: raise ValueError('No callback function to execute.')",True,self._func,self._func,0.6654449105262756 4305,"def toolsPaths(): """""" return all the tools paths in the hierarqui, sorted correctly! """""" toolsPaths = [pipe.roots.tools()] if: toolsPaths.append(pipe.admin.job().path('tools')) if pipe.admin.job.shot(): toolsPaths.append(pipe.admin.job.shot().path('tools')) toolsPaths.append(pipe.admin.job.shot.user().path('tools')) toolsPaths.reverse() return toolsPaths",False,pipe.admin.job.current(),pipe.admin.job().exists(),0.6484575271606445 4306,"def toolsPaths(): """""" return all the tools paths in the hierarqui, sorted correctly! """""" toolsPaths = [pipe.roots.tools()] if pipe.admin.job.current(): toolsPaths.append(pipe.admin.job().path('tools')) if: toolsPaths.append(pipe.admin.job.shot().path('tools')) toolsPaths.append(pipe.admin.job.shot.user().path('tools')) toolsPaths.reverse() return toolsPaths",False,pipe.admin.job.shot(),pipe.admin.job.shot.user().path(),0.6496037244796753 4307,"def columnCount(self, index=QModelIndex()): """"""DataFrame column number"""""" if: return 2 elif self.total_cols <= self.cols_loaded: return self.total_cols + 1 else: return self.cols_loaded + 1",False,len(self.df.shape) == 1,self.total_cols <= 2,0.6491976380348206 4308,"def columnCount(self, index=QModelIndex()): """"""DataFrame column number"""""" if len(self.df.shape) == 1: return 2 elif: return self.total_cols + 1 else: return self.cols_loaded + 1",False,self.total_cols <= self.cols_loaded,len(self.df.shape) == 2,0.6501741409301758 4309,"def delete_from_spine(self, item): """""" Given a manifest item, remove it from the spine """""" item_id = item.get('id') itemrefs = self.opf.xpath('//opf:spine/opf:itemref[@idref=""%s""]' % item_id, namespaces={'opf': OPF_NS}) if: self.log('\t Spine itemref removed:', item_id) itemref = itemrefs[0] self.fix_tail_before_delete(itemref) itemref.getparent().remove(itemref) self.set(self.opf_name, self.opf)",False,len(itemrefs) > 0,itemrefs,0.6484655737876892 4310,"def _rle_to_string(rle: Sequence[int]) -> str: """""" COCO-compatible RLE-encoded mask to string serialisation """""" rle_string = '' for i, x in enumerate(rle): if: x -= rle[i - 2] more = 1 while more: c = x & 31 x >>= 5 if c & 16: more = x!= -1 else: more = x!= 0 if more: c |= 32 c += 48 rle_string += chr(c) return rle_string",False,i > 2,x & rle[i - 2] > 0,0.668708086013794 4311,"def _rle_to_string(rle: Sequence[int]) -> str: """""" COCO-compatible RLE-encoded mask to string serialisation """""" rle_string = '' for i, x in enumerate(rle): if i > 2: x -= rle[i - 2] more = 1 while more: c = x & 31 x >>= 5 if: more = x!= -1 else: more = x!= 0 if more: c |= 32 c += 48 rle_string += chr(c) return rle_string",False,c & 16,c & 1 << 6,0.6675059795379639 4312,"def _rle_to_string(rle: Sequence[int]) -> str: """""" COCO-compatible RLE-encoded mask to string serialisation """""" rle_string = '' for i, x in enumerate(rle): if i > 2: x -= rle[i - 2] more = 1 while more: c = x & 31 x >>= 5 if c & 16: more = x!= -1 else: more = x!= 0 if: c |= 32 c += 48 rle_string += chr(c) return rle_string",True,more,more,0.675827145576477 4313,"@property def resources(self): """"""returns the admin sites resources"""""" if: self.__init() return self._resources",True,self._resources is None,self._resources is None,0.6574187874794006 4314,"def __init__(self, step, gamma=0.1, min_lr=None, **kwargs): if: assert deep3dmap.core.utils.is_list_of(step, int) assert all([s > 0 for s in step]) elif isinstance(step, int): assert step > 0 else: raise TypeError('""step"" must be a list or integer') self.step = step self.gamma = gamma self.min_lr = min_lr super(StepLrUpdaterHook, self).__init__(**kwargs)",True,"isinstance(step, list)","isinstance(step, list)",0.6502507925033569 4315,"def __init__(self, step, gamma=0.1, min_lr=None, **kwargs): if isinstance(step, list): assert deep3dmap.core.utils.is_list_of(step, int) assert all([s > 0 for s in step]) elif: assert step > 0 else: raise TypeError('""step"" must be a list or integer') self.step = step self.gamma = gamma self.min_lr = min_lr super(StepLrUpdaterHook, self).__init__(**kwargs)",True,"isinstance(step, int)","isinstance(step, int)",0.6511112451553345 4316,"def migrate_chat(old_chat_id, new_chat_id): with INSERTION_LOCK: chat = SESSION.query(Chats).get(str(old_chat_id)) if: chat.chat_id = str(new_chat_id) SESSION.add(chat) SESSION.flush() chat_members = SESSION.query(ChatMembers).filter(ChatMembers.chat == str(old_chat_id)).all() for member in chat_members: member.chat = str(new_chat_id) SESSION.add(member) SESSION.commit()",True,chat,chat,0.6685153841972351 4317,"@pyqtSlot(QtWidgets.QAction) def viewChangeTriggered(self, action): """"""Change widget visibility in response to View menu actions Args: action (QAction): QAction in View menu """""" action_text = action.text() self.logger.info('View change action = %s' % action_text) if: self.main_frame.foldFilterButtonClicked() elif action_text == 'Toggle Tab Pane': self.main_frame.foldTabButtonClicked() elif action_text == 'Toggle Status bar': self.main_frame.statusbarViewChange() else: self.main_frame.metaTabViewChange(action_text) return",False,action_text == 'Toggle Filter List',action_text == 'Filter',0.6537272930145264 4318,"@pyqtSlot(QtWidgets.QAction) def viewChangeTriggered(self, action): """"""Change widget visibility in response to View menu actions Args: action (QAction): QAction in View menu """""" action_text = action.text() self.logger.info('View change action = %s' % action_text) if action_text == 'Toggle Filter List': self.main_frame.foldFilterButtonClicked() elif: self.main_frame.foldTabButtonClicked() elif action_text == 'Toggle Status bar': self.main_frame.statusbarViewChange() else: self.main_frame.metaTabViewChange(action_text) return",False,action_text == 'Toggle Tab Pane',action_text == 'Toggle TabList',0.647758424282074 4319,"@pyqtSlot(QtWidgets.QAction) def viewChangeTriggered(self, action): """"""Change widget visibility in response to View menu actions Args: action (QAction): QAction in View menu """""" action_text = action.text() self.logger.info('View change action = %s' % action_text) if action_text == 'Toggle Filter List': self.main_frame.foldFilterButtonClicked() elif action_text == 'Toggle Tab Pane': self.main_frame.foldTabButtonClicked() elif: self.main_frame.statusbarViewChange() else: self.main_frame.metaTabViewChange(action_text) return",False,action_text == 'Toggle Status bar',action_text == 'Statusbar',0.6514468193054199 4320,"def mask_boxes_outside_range_numpy(boxes, limit_range, min_num_corners=1): """""" Args: boxes: (N, 7) [x, y, z, dx, dy, dz, heading,...], (x, y, z) is the box center limit_range: [minx, miny, minz, maxx, maxy, maxz] min_num_corners: Returns: """""" if: boxes = boxes[:, 0:7] corners = boxes_to_corners_3d(boxes) mask = ((corners >= limit_range[0:3]) & (corners <= limit_range[3:6])).all(axis=2) mask = mask.sum(axis=1) >= min_num_corners return mask",False,boxes.shape[1] > 7,boxes.shape[1] == 2,0.6487515568733215 4321,"def build_index(vcf_file, csi=False): """"""A helper function for indexing VCF files. Args: vcf_file: string. Path to the VCF file to be indexed. csi: bool. If true, index using the CSI format. """""" if: tabix.build_csi_index(vcf_file, min_shift=14) else: tabix.build_index(vcf_file)",True,csi,csi,0.67081218957901 4322,"def extra_info(self): msg = '' if: msg = "", build: '{}'"".format(self.build) if self.expected_build: if not re.match(self.expected_build, self.build): self.critical() msg += "" (expected '{}')"".format(self.expected_build) return msg",True,self.build,self.build,0.6560017466545105 4323,"def extra_info(self): msg = '' if self.build: msg = "", build: '{}'"".format(self.build) if: if not re.match(self.expected_build, self.build): self.critical() msg += "" (expected '{}')"".format(self.expected_build) return msg",True,self.expected_build,self.expected_build,0.655515193939209 4324,"def extra_info(self): msg = '' if self.build: msg = "", build: '{}'"".format(self.build) if self.expected_build: if: self.critical() msg += "" (expected '{}')"".format(self.expected_build) return msg",False,"not re.match(self.expected_build, self.build)",not self.critical,0.6438734531402588 4325,"def query_task_info(task_id): """"""Query task info."""""" pids = get_vega_pids() if: for id, pid in enumerate(pids): info = query_process(pid) if isinstance(info, dict) and info.get('task_id', None) == task_id: return info return None",True,pids,pids,0.6745076775550842 4326,"def query_task_info(task_id): """"""Query task info."""""" pids = get_vega_pids() if pids: for id, pid in enumerate(pids): info = query_process(pid) if: return info return None",False,"isinstance(info, dict) and info.get('task_id', None) == task_id",info.get_id() == task_id,0.6495307087898254 4327,"def connection_from_url(url, **kw): """""" Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \\**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') """""" scheme, host, port = get_host(url) port = port or port_by_scheme.get(scheme, 80) if: return HTTPSConnectionPool(host, port=port, **kw) else: return HTTPConnectionPool(host, port=port, **kw)",True,scheme == 'https',scheme == 'https',0.6492980718612671 4328,"def _on_match(self, pattern_id, matched_from, matched_to, flags, ctx): loaded_pattern = self._sigs.pattern_id_pattern.get(pattern_id) if: return self.matchtracker.add_match(pattern_id, MatchContext(matched_str=ctx[0], orig_str=ctx[1], event=ctx[2], kind=ctx[3], processing_ctx=ctx[4], subtype=ctx[5], extra_safelistdata=ctx[6]))",False,loaded_pattern.subtype and loaded_pattern.subtype != ctx[5],loaded_pattern is None,0.6389764547348022 4329,"def post_val_loop(self, step: int, epoch: int, val_metric: float, best_val_metric: float) -> None: if: wandb.log({f'val/{self.train_config.val_metric_name}': val_metric, f'val/best_{self.train_config.val_metric_name}': best_val_metric, 'epoch': epoch}, step=step + 1)",False,self.is_local_main_process,self.train_config.val_metric_enabled,0.6476960182189941 4330,"def emit(self, o, level=0): """"""Emit an include history file, and its children."""""" if: spaces =''* (level - 1) o.write('# %s%s' % (spaces, self.filename)) if len(self.children) > 0: o.write(' includes:') else: o.write('#\n# INCLUDE HISTORY:\n#') level = level + 1 for child in self.children: o.write('\n') child.emit(o, level)",False,level,self.filename,0.6668748259544373 4331,"def emit(self, o, level=0): """"""Emit an include history file, and its children."""""" if level: spaces =''* (level - 1) o.write('# %s%s' % (spaces, self.filename)) if: o.write(' includes:') else: o.write('#\n# INCLUDE HISTORY:\n#') level = level + 1 for child in self.children: o.write('\n') child.emit(o, level)",False,len(self.children) > 0,self.include_includes,0.644345760345459 4332,"def output_array_of_apifaultdetail(data_objects): if: return for data_object in data_objects['ApiFaultDetail']: output_apifaultdetail(data_object)",True,data_objects is None or len(data_objects) == 0,data_objects is None or len(data_objects) == 0,0.647541880607605 4333,"def _recur_strip(s): if: s_tokens = s.split() if eos_token in s_tokens: return''.join(s_tokens[:s_tokens.index(eos_token)]) else: return s else: s_ = [_recur_strip(si) for si in s] return _maybe_list_to_array(s_, s)",True,is_str(s),is_str(s),0.6461538076400757 4334,"def _recur_strip(s): if is_str(s): s_tokens = s.split() if: return''.join(s_tokens[:s_tokens.index(eos_token)]) else: return s else: s_ = [_recur_strip(si) for si in s] return _maybe_list_to_array(s_, s)",True,eos_token in s_tokens,eos_token in s_tokens,0.6518728733062744 4335,"def _read_headers(self): def remove_bom(unicode_str): unicode_bom = u'\ufeff' if: unicode_str = unicode_str[1:] return unicode_str headers = next(self._csv_reader) return [remove_bom(header) for header in headers]",False,unicode_str and unicode_str[0] == unicode_bom,unicode_str.startswith('|'),0.6461989283561707 4336,"def dump(self, indent=0): print(''* indent + self.name,'start') self.expression.dump(indent + 1) self.if_block.dump(indent + 1) if: self.else_block.dump(indent + 1) print(''* indent + self.name, 'end.')",False,self.else_block is not None,"isinstance(self.expression, IfExpression)",0.6470222473144531 4337,"def _validate_key_id(self, key_id: str) -> None: """"""Determine whether a key ID exists. - raw key ID - key ARN - alias name - alias ARN """""" is_alias_arn = key_id.startswith('arn:') and ':alias/' in key_id is_alias_name = key_id.startswith('alias/') if: self._validate_alias(key_id) return self._validate_cmk_id(key_id)",True,is_alias_arn or is_alias_name,is_alias_arn or is_alias_name,0.6505614519119263 4338,"def tft_load_model(self, episode): path = f'./Checkpoints/checkpoint_{episode}' if: self.load_state_dict(torch.load(path)) self.eval()",False,os.path.isfile(path),os.path.exists(path),0.6439727544784546 4339,"def response(self, response, content): challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {}) if: return True return False",True,"challenge.get('reason') in ['integrity', 'stale']","challenge.get('reason') in ['integrity', 'stale']",0.6471117734909058 4340,"def _tzd_to_seconds(tzd): """""" Given w3c compliant TZD, return how far ahead UTC is, else raise ValueError exception. """""" if: return 0 if not (len(tzd) == 6 and (tzd[0] == '-' or tzd[0] == '+') and (tzd[3] == ':' or tzd[3] == '-')): raise ValueError(""Only timezones like +08:00 are accepted and not '{tzd}'."".format(tzd=tzd)) return -60 * (60 * int(tzd[:3]) + int(tzd[4:]))",False,tzd == 'Z',tzd is None,0.6556777954101562 4341,"def _tzd_to_seconds(tzd): """""" Given w3c compliant TZD, return how far ahead UTC is, else raise ValueError exception. """""" if tzd == 'Z': return 0 if: raise ValueError(""Only timezones like +08:00 are accepted and not '{tzd}'."".format(tzd=tzd)) return -60 * (60 * int(tzd[:3]) + int(tzd[4:]))",False,not (len(tzd) == 6 and (tzd[0] == '-' or tzd[0] == '+') and (tzd[3] == ':' or tzd[3] == '-')),"tzd not in ('+08', 'Z')",0.651579737663269 4342,"def _freeze_stages(self): if: self.bn1.eval() for m in [self.conv1, self.bn1]: for param in m.parameters(): param.requires_grad = False for i in range(1, self.frozen_stages + 1): m = getattr(self, 'layer{}'.format(i)) print('layer{}'.format(i)) m.eval() for param in m.parameters(): param.requires_grad = False",True,self.frozen_stages >= 0,self.frozen_stages >= 0,0.6462111473083496 4343,"def _complete_hpc_job_id(self, job_id, serialize=True): self._job_status.hpc_job_ids.remove(job_id) logger.info('Completed HPC job_id=%s', job_id) if: self._serialize_jobs('complete_hpc_job_id')",True,serialize,serialize,0.6840291023254395 4344,"def _eval_op(lhs: str, op: Op, rhs: str) -> bool: try: spec = Specifier(''.join([op.serialize(), rhs])) except InvalidSpecifier: pass else: return spec.contains(lhs) oper: Optional[Operator] = _operators.get(op.serialize()) if: raise UndefinedComparison(f'Undefined {op!r} on {lhs!r} and {rhs!r}.') return oper(lhs, rhs)",True,oper is None,oper is None,0.6557482481002808 4345,"def __init__(self, filename_or_file): if: self.file = open(filename_or_file, 'wt') self.own_file = True else: assert hasattr(filename_or_file,'read'), 'expected file or str, got %s' % filename_or_file self.file = filename_or_file self.own_file = False",True,"isinstance(filename_or_file, str)","isinstance(filename_or_file, str)",0.6490728855133057 4346,"def get_expiration_time(self, app, session): """"""A helper method that returns an expiration date for the session or `None` if the session is linked to the browser session. The default implementation returns now + the permanent session lifetime configured on the application. """""" if: return datetime.utcnow() + app.permanent_session_lifetime",False,session.permanent,self.is_browser_session(app),0.6478058099746704 4347,"def write_pkl(save_path: str, pkl_data: Any, create_dir: bool=False, use_torch=False): """"""Serialize data into a pickle file."""""" if: os.makedirs(os.path.dirname(save_path), exist_ok=True) if use_torch: torch.save(pkl_data, save_path, pickle_protocol=pickle.HIGHEST_PROTOCOL) else: with open(save_path, 'wb') as handle: pickle.dump(pkl_data, handle, protocol=pickle.HIGHEST_PROTOCOL)",True,create_dir,create_dir,0.6575853824615479 4348,"def write_pkl(save_path: str, pkl_data: Any, create_dir: bool=False, use_torch=False): """"""Serialize data into a pickle file."""""" if create_dir: os.makedirs(os.path.dirname(save_path), exist_ok=True) if: torch.save(pkl_data, save_path, pickle_protocol=pickle.HIGHEST_PROTOCOL) else: with open(save_path, 'wb') as handle: pickle.dump(pkl_data, handle, protocol=pickle.HIGHEST_PROTOCOL)",True,use_torch,use_torch,0.6482206583023071 4349,"def wrapper(): all_dev_batches = [] for epoch_index in range(epoch): if: self.current_example = 0 self.current_epoch = epoch_index if shuffle: np.random.shuffle(examples) for batch_data in self._prepare_batch_data(examples, batch_size, phase=phase, read_id=read_id): if len(all_dev_batches) < dev_count: all_dev_batches.append(batch_data) if len(all_dev_batches) == dev_count: for batch in all_dev_batches: yield batch all_dev_batches = []",True,phase == 'train',phase == 'train',0.6497775912284851 4350,"def wrapper(): all_dev_batches = [] for epoch_index in range(epoch): if phase == 'train': self.current_example = 0 self.current_epoch = epoch_index if: np.random.shuffle(examples) for batch_data in self._prepare_batch_data(examples, batch_size, phase=phase, read_id=read_id): if len(all_dev_batches) < dev_count: all_dev_batches.append(batch_data) if len(all_dev_batches) == dev_count: for batch in all_dev_batches: yield batch all_dev_batches = []",True,shuffle,shuffle,0.6593450903892517 4351,"def wrapper(): all_dev_batches = [] for epoch_index in range(epoch): if phase == 'train': self.current_example = 0 self.current_epoch = epoch_index if shuffle: np.random.shuffle(examples) for batch_data in self._prepare_batch_data(examples, batch_size, phase=phase, read_id=read_id): if: all_dev_batches.append(batch_data) if len(all_dev_batches) == dev_count: for batch in all_dev_batches: yield batch all_dev_batches = []",True,len(all_dev_batches) < dev_count,len(all_dev_batches) < dev_count,0.6407375335693359 4352,"def wrapper(): all_dev_batches = [] for epoch_index in range(epoch): if phase == 'train': self.current_example = 0 self.current_epoch = epoch_index if shuffle: np.random.shuffle(examples) for batch_data in self._prepare_batch_data(examples, batch_size, phase=phase, read_id=read_id): if len(all_dev_batches) < dev_count: all_dev_batches.append(batch_data) if: for batch in all_dev_batches: yield batch all_dev_batches = []",True,len(all_dev_batches) == dev_count,len(all_dev_batches) == dev_count,0.641424298286438 4353,"def _get_lr(self, t): if: lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps] else: lrs = [v * self.decay_rate ** (t // self.decay_t) for v in self.base_values] return lrs",False,t < self.warmup_t,self.warmup_steps,0.6539220809936523 4354,"@staticmethod def getBlueDoors(doors): for door in DoorsManager.doors.values(): if: doors.append(door.id)",False,not door.canRandom,door.id not in doors,0.6479164361953735 4355,"def set_temp_thresh(self): if: min_temp = self.threshold_config.min_temp_thresh max_temp = self.threshold_config.max_temp_thresh if max_temp: self.temp_thresh = min(max_temp, self.stats.mean_background_value) else: self.temp_thresh = self.stats.mean_background_value if min_temp: self.temp_thresh = max(min_temp, self.temp_thresh) self.stats.temp_thresh = self.temp_thresh else: self.temp_thresh = self.config.motion.temp_thresh",False,self.config.motion.dynamic_thresh,self.config.motion.temp_thresh is None,0.6472711563110352 4356,"def set_temp_thresh(self): if self.config.motion.dynamic_thresh: min_temp = self.threshold_config.min_temp_thresh max_temp = self.threshold_config.max_temp_thresh if: self.temp_thresh = min(max_temp, self.stats.mean_background_value) else: self.temp_thresh = self.stats.mean_background_value if min_temp: self.temp_thresh = max(min_temp, self.temp_thresh) self.stats.temp_thresh = self.temp_thresh else: self.temp_thresh = self.config.motion.temp_thresh",True,max_temp,max_temp,0.6671106815338135 4357,"def set_temp_thresh(self): if self.config.motion.dynamic_thresh: min_temp = self.threshold_config.min_temp_thresh max_temp = self.threshold_config.max_temp_thresh if max_temp: self.temp_thresh = min(max_temp, self.stats.mean_background_value) else: self.temp_thresh = self.stats.mean_background_value if: self.temp_thresh = max(min_temp, self.temp_thresh) self.stats.temp_thresh = self.temp_thresh else: self.temp_thresh = self.config.motion.temp_thresh",True,min_temp,min_temp,0.6678948402404785 4358,"@staticmethod def calc_num_latent_gps(kernel: Kernel, likelihood: Likelihood, output_dim: int) -> int: """""" Calculates the number of latent GPs required given the number of outputs `output_dim` and the type of likelihood and kernel. Note: It's not nice for `GPModel` to need to be aware of specific likelihoods as here. However, `num_latent_gps` is a bit more broken in general, we should fix this in the future. There are also some slightly problematic assumptions re the output dimensions of mean_function. See https://github.com/GPflow/GPflow/issues/1343 """""" if: num_latent_gps: int = kernel.num_latent_gps elif isinstance(likelihood, SwitchedLikelihood): num_latent_gps = output_dim - 1 assert num_latent_gps > 0 else: num_latent_gps = output_dim return num_latent_gps",False,"isinstance(kernel, MultioutputKernel)","isinstance(kernel, GPModel)",0.6454552412033081 4359,"@staticmethod def calc_num_latent_gps(kernel: Kernel, likelihood: Likelihood, output_dim: int) -> int: """""" Calculates the number of latent GPs required given the number of outputs `output_dim` and the type of likelihood and kernel. Note: It's not nice for `GPModel` to need to be aware of specific likelihoods as here. However, `num_latent_gps` is a bit more broken in general, we should fix this in the future. There are also some slightly problematic assumptions re the output dimensions of mean_function. See https://github.com/GPflow/GPflow/issues/1343 """""" if isinstance(kernel, MultioutputKernel): num_latent_gps: int = kernel.num_latent_gps elif: num_latent_gps = output_dim - 1 assert num_latent_gps > 0 else: num_latent_gps = output_dim return num_latent_gps",False,"isinstance(likelihood, SwitchedLikelihood)",likelihood == LikelihoodType.FLOAT,0.645628035068512 4360,"@property def findBestSequence(self): if: self.__init() return self._findBestSequence",True,self._findBestSequence is None,self._findBestSequence is None,0.6513867974281311 4361,"def __repr__(self): if: return 'NamespaceRange(namespace_start=%r, namespace_end=%r)' % (self.namespace_start, self.namespace_end) else: return 'NamespaceRange(namespace_start=%r, namespace_end=%r, _app=%r)' % (self.namespace_start, self.namespace_end, self.app)",False,self.app is None,self.namespace_end is None,0.6505793333053589 4362,"def start(self): if: self.file.write(f""[{''* self.bar_width}] 0/{self.task_num}, elapsed: 0s, ETA:"") else: self.file.write('completed: 0, elapsed: 0s') self.file.flush() self.timer = Timer()",True,self.task_num > 0,self.task_num > 0,0.654869556427002 4363,"def un_subbatch(embed, toks, maxlen): BATCH, DLEN = toks.shape[:2] SUBBATCH = math.ceil(DLEN / maxlen) if: return embed else: embed_stack = [] for b in range(SUBBATCH): embed_stack.append(embed[b * BATCH:(b + 1) * BATCH]) embed = torch.cat(embed_stack, dim=1) embed = embed[:, :DLEN] return embed",False,SUBBATCH == 1,SUBBATCH == 0,0.6582772731781006 4364,"def get_node(self, val: Any, identifier='', options=None): if: options = self.options return GraphNode.from_obj(val, source=self.source, identifier=identifier, options=options)",True,options is None,options is None,0.6575090885162354 4365,"def on_spike_selection_changed(self): for dock, w in zip(self.list_dock, self.list_widget): if: continue if dock.isVisible() and hasattr(w, 'on_spike_selection_changed'): w.on_spike_selection_changed()",True,w == self.sender(),w == self.sender(),0.6529507637023926 4366,"def on_spike_selection_changed(self): for dock, w in zip(self.list_dock, self.list_widget): if w == self.sender(): continue if: w.on_spike_selection_changed()",False,"dock.isVisible() and hasattr(w, 'on_spike_selection_changed')",dock.isVisible(),0.644180178642273 4367,"def _set_extensions(self): """""" Sets common named extensions to private attributes and creates a list of critical extensions """""" self._critical_extensions = set() for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']: name = extension['extn_id'].native attribute_name = '_%s_value' % name if: setattr(self, attribute_name, extension['extn_value'].parsed) if extension['critical'].native: self._critical_extensions.add(name) self._processed_extensions = True",True,"hasattr(self, attribute_name)","hasattr(self, attribute_name)",0.6473886966705322 4368,"def _set_extensions(self): """""" Sets common named extensions to private attributes and creates a list of critical extensions """""" self._critical_extensions = set() for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']: name = extension['extn_id'].native attribute_name = '_%s_value' % name if hasattr(self, attribute_name): setattr(self, attribute_name, extension['extn_value'].parsed) if: self._critical_extensions.add(name) self._processed_extensions = True",True,extension['critical'].native,extension['critical'].native,0.653113842010498 4369,"def clear_timeout(self): if: self.io_loop.remove_timeout(self.timeout)",True,self.timeout is not None,self.timeout is not None,0.6476442813873291 4370,"def sanitize(value): if: value = value.replace('""', '').replace(""'"", '') return value",False,"isinstance(value, str)",value,0.646430492401123 4371,"def write_record(self, s): """"""Write so self.read knows exactly how much to read."""""" f = self.__dict__['file'] f.write(f'{len(s)}\n{s}') if: f.flush()",False,"hasattr(f, 'flush')",f,0.6454933881759644 4372,"def data_array_or_dataset_var(X: Union[xr.DataArray, xr.Dataset], var=None) -> xr.DataArray: """""" refer to https://github.com/bgroenks96/pyclimdex/blob/master/climdex/utils.py If X is a Dataset, selects variable 'var' from X and returns the corresponding DataArray. If X is already a DataArray, returns X unchanged. """""" if: assert var is not None, 'var name must be supplied for Dataset input' return X[var] elif isinstance(X, xr.DataArray): return X else: raise Exception('unrecognized data type: {}'.format(type(X)))",True,"isinstance(X, xr.Dataset)","isinstance(X, xr.Dataset)",0.6476562023162842 4373,"def data_array_or_dataset_var(X: Union[xr.DataArray, xr.Dataset], var=None) -> xr.DataArray: """""" refer to https://github.com/bgroenks96/pyclimdex/blob/master/climdex/utils.py If X is a Dataset, selects variable 'var' from X and returns the corresponding DataArray. If X is already a DataArray, returns X unchanged. """""" if isinstance(X, xr.Dataset): assert var is not None, 'var name must be supplied for Dataset input' return X[var] elif: return X else: raise Exception('unrecognized data type: {}'.format(type(X)))",True,"isinstance(X, xr.DataArray)","isinstance(X, xr.DataArray)",0.6451178789138794 4374,"def get_confidence(self): r = 0.01 if: r = 1.0 * self._mSeqCounters[POSITIVE_CAT] / self._mTotalSeqs / self._mModel['mTypicalPositiveRatio'] r = r * self._mFreqChar / self._mTotalChar if r >= 1.0: r = 0.99 return r",True,self._mTotalSeqs > 0,self._mTotalSeqs > 0,0.6564462184906006 4375,"def get_confidence(self): r = 0.01 if self._mTotalSeqs > 0: r = 1.0 * self._mSeqCounters[POSITIVE_CAT] / self._mTotalSeqs / self._mModel['mTypicalPositiveRatio'] r = r * self._mFreqChar / self._mTotalChar if: r = 0.99 return r",True,r >= 1.0,r >= 1.0,0.6540822982788086 4376,"def notepreview(self, name): nlist = name.get_note_list() if: note = self.db.get_note_from_handle(nlist[0]) text = note.get().replace('\n','') if len(text) > 80: text = text[:80] + '...' return text else: return ''",True,nlist,nlist,0.6575162410736084 4377,"def notepreview(self, name): nlist = name.get_note_list() if nlist: note = self.db.get_note_from_handle(nlist[0]) text = note.get().replace('\n','') if: text = text[:80] + '...' return text else: return ''",True,len(text) > 80,len(text) > 80,0.6477150917053223 4378,"def unidirectional_value_map(old_result, old_value, new_value, func): if: return old_result return func(old_value, new_value)",False,new_value < old_value,func is None,0.6524497270584106 4379,"@pytest.fixture def expected_mask(mask_args): """"""Create an expected mask."""""" mask = np.zeros((9, 9, 5)) if: return mask mask[2:7, 2:7, 2] = 1 return mask",False,mask_args == {},mask_args.dim == 0,0.661162257194519 4380,"def unload(self, **kwargs): """"""Unload stimulus from memory. This removes the reference to the object in memory. It is up to the garbage collector to actually remove it from memory. """""" if: self._file = None self._surface = None self._is_preloaded = False",True,self._is_preloaded,self._is_preloaded,0.6461861729621887 4381,"def exportAttributes(self, outfile, level, namespace_='', name_='reimplementType'): if: outfile.write(' refid=%s' % (self.format_string(quote_attrib(self.refid).encode(ExternalEncoding), input_name='refid'),))",True,self.refid is not None,self.refid is not None,0.6492701768875122 4382,"def delete(self): """"""deletes the selected object"""""" vm = findWidget() if: if vm.tree.selectedItems(): FreeCAD.ActiveDocument.openTransaction('Delete') for item in vm.tree.selectedItems(): obj = FreeCAD.ActiveDocument.getObject(item.toolTip(0)) if obj: FreeCAD.ActiveDocument.removeObject(obj.Name) FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() self.update(False)",True,vm,vm,0.6753771305084229 4383,"def delete(self): """"""deletes the selected object"""""" vm = findWidget() if vm: if: FreeCAD.ActiveDocument.openTransaction('Delete') for item in vm.tree.selectedItems(): obj = FreeCAD.ActiveDocument.getObject(item.toolTip(0)) if obj: FreeCAD.ActiveDocument.removeObject(obj.Name) FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() self.update(False)",False,vm.tree.selectedItems(),"hasattr(vm.tree, 'selectedItems')",0.6471749544143677 4384,"def delete(self): """"""deletes the selected object"""""" vm = findWidget() if vm: if vm.tree.selectedItems(): FreeCAD.ActiveDocument.openTransaction('Delete') for item in vm.tree.selectedItems(): obj = FreeCAD.ActiveDocument.getObject(item.toolTip(0)) if: FreeCAD.ActiveDocument.removeObject(obj.Name) FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() self.update(False)",True,obj,obj,0.6694657802581787 4385,"def __ne__(self, other): if: return tuple(self)!= other else: return super(_SetuptoolsVersionMixin, self).__ne__(other)",True,"isinstance(other, tuple)","isinstance(other, tuple)",0.6471185088157654 4386,"@classmethod def from_tuples(cls, fieldname, value): """""" A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example: :: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """""" if: if len(value) == 3: filename, data, content_type = value else: filename, data = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename) request_param.make_multipart(content_type=content_type) return request_param",True,"isinstance(value, tuple)","isinstance(value, tuple)",0.6435108780860901 4387,"@classmethod def from_tuples(cls, fieldname, value): """""" A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example: :: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """""" if isinstance(value, tuple): if: filename, data, content_type = value else: filename, data = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename) request_param.make_multipart(content_type=content_type) return request_param",True,len(value) == 3,len(value) == 3,0.6497493982315063 4388,"def escape(text: str, *, mass_mentions: bool=False, formatting: bool=False) -> str: """"""Get text with all mass mentions or markdown escaped. Parameters ---------- text : str The text to be escaped. mass_mentions : `bool`, optional Set to :code:`True` to escape mass mentions in the text. formatting : `bool`, optional Set to :code:`True` to escape any markdown formatting in the text. Returns ------- str The escaped text. """""" if: text = text.replace('@everyone', '@\u200beveryone') text = text.replace('@here', '@\u200bhere') if formatting: text = discord.utils.escape_markdown(text) return text",True,mass_mentions,mass_mentions,0.6497552394866943 4389,"def escape(text: str, *, mass_mentions: bool=False, formatting: bool=False) -> str: """"""Get text with all mass mentions or markdown escaped. Parameters ---------- text : str The text to be escaped. mass_mentions : `bool`, optional Set to :code:`True` to escape mass mentions in the text. formatting : `bool`, optional Set to :code:`True` to escape any markdown formatting in the text. Returns ------- str The escaped text. """""" if mass_mentions: text = text.replace('@everyone', '@\u200beveryone') text = text.replace('@here', '@\u200bhere') if: text = discord.utils.escape_markdown(text) return text",True,formatting,formatting,0.6557956337928772 4390,"def setDefaultParameters(self, parameterNode): if: parameterNode.SetParameter('SegmentationModel', '') if not parameterNode.GetParameter('DeepgrowModel'): parameterNode.SetParameter('DeepgrowModel', '') if not parameterNode.GetParameter('ScribblesMethod'): parameterNode.SetParameter('ScribblesMethod', '')",True,not parameterNode.GetParameter('SegmentationModel'),not parameterNode.GetParameter('SegmentationModel'),0.6492718458175659 4391,"def setDefaultParameters(self, parameterNode): if not parameterNode.GetParameter('SegmentationModel'): parameterNode.SetParameter('SegmentationModel', '') if: parameterNode.SetParameter('DeepgrowModel', '') if not parameterNode.GetParameter('ScribblesMethod'): parameterNode.SetParameter('ScribblesMethod', '')",True,not parameterNode.GetParameter('DeepgrowModel'),not parameterNode.GetParameter('DeepgrowModel'),0.6495522260665894 4392,"def setDefaultParameters(self, parameterNode): if not parameterNode.GetParameter('SegmentationModel'): parameterNode.SetParameter('SegmentationModel', '') if not parameterNode.GetParameter('DeepgrowModel'): parameterNode.SetParameter('DeepgrowModel', '') if: parameterNode.SetParameter('ScribblesMethod', '')",True,not parameterNode.GetParameter('ScribblesMethod'),not parameterNode.GetParameter('ScribblesMethod'),0.645369291305542 4393,"def to_device_unless_marked(device): def _to_device_unless_marked(obj): if: obj.to(device) return _to_device_unless_marked",False,"not getattr(obj, 'force_device', False)",obj.device != device,0.6426858305931091 4394,"def buildAttributes(self, attrs): if: self.version = attrs.get('version').value",True,attrs.get('version'),attrs.get('version'),0.6475125551223755 4395,"def __init__(self, *args, **kwargs): AbstractTransportDispatcher.__init__(self) self.__transportCount = 0 if: self.setTimerResolution(kwargs['timeout']) self.loopingcall = None self.loop = kwargs.pop('loop', asyncio.get_event_loop())",True,'timeout' in kwargs,'timeout' in kwargs,0.6575781106948853 4396,"def updateOpenPageMenu(self): menu = self.__actions['open'] menu.clear() closedPages = self.collection.pages().closedPages() hasClosedPages = len(closedPages) menu.setEnabled(hasClosedPages) if: for pageParam in closedPages: act = QAction(pageParam.title, self) act.setData(pageParam) act.triggered.connect(self.openPageEvent) menu.addAction(act) menu.addSeparator() menu.addAction(self.__actions['removeAll'])",True,hasClosedPages,hasClosedPages,0.664620041847229 4397,"def copy_attributes(in_grp, out_grp): """"""Recursively copy hdf5 Group/Dataset attributes from in_grp to out_grp :param in_grp: hdf5 Group object whose attributes will be copied from. :param out_grp: hdf5 Group object that will have it's attributes updated/copied to. """""" if: out_grp.attrs.update(in_grp.attrs) for in_name, in_h5_obj in in_grp.items(): if in_name not in out_grp: continue elif isinstance(in_h5_obj, h5py.Dataset): out_grp[in_name].attrs.update(in_h5_obj.attrs) elif isinstance(in_h5_obj, h5py.Group): copy_attributes(in_h5_obj, out_grp[in_name])",False,in_grp.attrs,"isinstance(in_grp, h5py.Dataset)",0.6540634036064148 4398,"def copy_attributes(in_grp, out_grp): """"""Recursively copy hdf5 Group/Dataset attributes from in_grp to out_grp :param in_grp: hdf5 Group object whose attributes will be copied from. :param out_grp: hdf5 Group object that will have it's attributes updated/copied to. """""" if in_grp.attrs: out_grp.attrs.update(in_grp.attrs) for in_name, in_h5_obj in in_grp.items(): if: continue elif isinstance(in_h5_obj, h5py.Dataset): out_grp[in_name].attrs.update(in_h5_obj.attrs) elif isinstance(in_h5_obj, h5py.Group): copy_attributes(in_h5_obj, out_grp[in_name])",False,in_name not in out_grp,in_name in out_grp,0.6521657705307007 4399,"def copy_attributes(in_grp, out_grp): """"""Recursively copy hdf5 Group/Dataset attributes from in_grp to out_grp :param in_grp: hdf5 Group object whose attributes will be copied from. :param out_grp: hdf5 Group object that will have it's attributes updated/copied to. """""" if in_grp.attrs: out_grp.attrs.update(in_grp.attrs) for in_name, in_h5_obj in in_grp.items(): if in_name not in out_grp: continue elif: out_grp[in_name].attrs.update(in_h5_obj.attrs) elif isinstance(in_h5_obj, h5py.Group): copy_attributes(in_h5_obj, out_grp[in_name])",True,"isinstance(in_h5_obj, h5py.Dataset)","isinstance(in_h5_obj, h5py.Dataset)",0.6473181843757629 4400,"def copy_attributes(in_grp, out_grp): """"""Recursively copy hdf5 Group/Dataset attributes from in_grp to out_grp :param in_grp: hdf5 Group object whose attributes will be copied from. :param out_grp: hdf5 Group object that will have it's attributes updated/copied to. """""" if in_grp.attrs: out_grp.attrs.update(in_grp.attrs) for in_name, in_h5_obj in in_grp.items(): if in_name not in out_grp: continue elif isinstance(in_h5_obj, h5py.Dataset): out_grp[in_name].attrs.update(in_h5_obj.attrs) elif: copy_attributes(in_h5_obj, out_grp[in_name])",True,"isinstance(in_h5_obj, h5py.Group)","isinstance(in_h5_obj, h5py.Group)",0.6458145976066589 4401,"def new_tenant_client(self, name, tenant): if: self.extra_files += self.MT_CLIENT_FILES logger.info('creating client connected to tenant:'+ tenant) self._docker_compose_cmd('run -d --name=%s_%s mender-client' % (self.name, name), env={'TENANT_TOKEN': '%s' % tenant}) time.sleep(45)",True,not self.MT_CLIENT_FILES[0] in self.docker_compose_files,not self.MT_CLIENT_FILES[0] in self.docker_compose_files,0.6511334180831909 4402,"def get_tables(self): try: tables = self.conn.tables() if: qquit('UNKNOWN', 'table list returned is not a list!'+ support_msg_api()) return tables except (socket.error, socket.timeout, ThriftException, HBaseIOError) as _: qquit('CRITICAL', 'error while trying to get table list: {0}'.format(_))",False,not isList(tables),tables is None,0.6472563147544861 4403,"def get_requires_python(dist): """""" Return the ""Requires-Python"" metadata for a distribution, or None if not present. """""" pkg_info_dict = get_metadata(dist) requires_python = pkg_info_dict.get('Requires-Python') if: requires_python = str(requires_python) return requires_python",True,requires_python is not None,requires_python is not None,0.6548835039138794 4404,"def __init__(self, **kw): """"""Create new Policy, possibly overriding some defaults. See class docstring for a list of overridable attributes. """""" for name, value in kw.items(): if: super(_PolicyBase, self).__setattr__(name, value) else: raise TypeError('{!r} is an invalid keyword argument for {}'.format(name, self.__class__.__name__))",False,"hasattr(self, name)",name in _PolicyBase.fields,0.6497448682785034 4405,"@description.setter def description(self, value): """""" gets/sets the description """""" if: self._description = value",True,self._description != value,self._description != value,0.6554529070854187 4406,"def preprocess_cell(self, cell: NotebookNode, resources: ResourcesDict, cell_index: int) -> Tuple[NotebookNode, ResourcesDict]: if: self._add_score(cell, resources) if utils.is_solution(cell): self._add_comment(cell, resources) if utils.is_task(cell): self._add_comment(cell, resources) return (cell, resources)",True,utils.is_grade(cell),utils.is_grade(cell),0.6479380130767822 4407,"def preprocess_cell(self, cell: NotebookNode, resources: ResourcesDict, cell_index: int) -> Tuple[NotebookNode, ResourcesDict]: if utils.is_grade(cell): self._add_score(cell, resources) if: self._add_comment(cell, resources) if utils.is_task(cell): self._add_comment(cell, resources) return (cell, resources)",False,utils.is_solution(cell),utils.is_comment(cell),0.6467527151107788 4408,"def preprocess_cell(self, cell: NotebookNode, resources: ResourcesDict, cell_index: int) -> Tuple[NotebookNode, ResourcesDict]: if utils.is_grade(cell): self._add_score(cell, resources) if utils.is_solution(cell): self._add_comment(cell, resources) if: self._add_comment(cell, resources) return (cell, resources)",True,utils.is_task(cell),utils.is_task(cell),0.6513935327529907 4409,"def upload(self, filePath, description=None): """""" This operation uploads an item to the server. Each uploaded item is identified by a unique itemID. Since this request uploads a file, it must be a multi-part request as per IETF RFC1867. All uploaded items are subjected to the deletion rules set on the upload directory by the administrator of the server. Additionally, the administrator can explicitly delete an item as each uploaded item shows up in the list of all the uploaded items in Site Directory. Users can provide arguments to the upload operation as query parameters. The parameter details are provided in the parameters listed below. Inputs: filePath - The file to be uploaded. description - An optional description for the uploaded item. """""" params = {'f': 'json'} if: params['description'] = str(description) url = self._url + '/upload' files = {} files['file'] = filePath return self._post(url=url, param_dict=params, files=files, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)",True,description is not None,description is not None,0.6506454944610596 4410,"def _validate_precompute_cache_type(value: Union[None, PrecomputeCacheType, str]) -> PrecomputeCacheType: if: return PrecomputeCacheType.NOCACHE elif isinstance(value, PrecomputeCacheType): return value elif isinstance(value, str): return PrecomputeCacheType(value.lower()) else: raise ValueError(f""{value} is not a valid PrecomputeCacheType. Valid options: 'tensor', 'variable', 'nocache' (or None)."")",True,value is None,value is None,0.660423994064331 4411,"def _validate_precompute_cache_type(value: Union[None, PrecomputeCacheType, str]) -> PrecomputeCacheType: if value is None: return PrecomputeCacheType.NOCACHE elif: return value elif isinstance(value, str): return PrecomputeCacheType(value.lower()) else: raise ValueError(f""{value} is not a valid PrecomputeCacheType. Valid options: 'tensor', 'variable', 'nocache' (or None)."")",True,"isinstance(value, PrecomputeCacheType)","isinstance(value, PrecomputeCacheType)",0.6531097888946533 4412,"def _validate_precompute_cache_type(value: Union[None, PrecomputeCacheType, str]) -> PrecomputeCacheType: if value is None: return PrecomputeCacheType.NOCACHE elif isinstance(value, PrecomputeCacheType): return value elif: return PrecomputeCacheType(value.lower()) else: raise ValueError(f""{value} is not a valid PrecomputeCacheType. Valid options: 'tensor', 'variable', 'nocache' (or None)."")",True,"isinstance(value, str)","isinstance(value, str)",0.6481269598007202 4413,"def do_activate(self): win = self.props.active_window if: win = ExampleWindow(application=self) win.present()",True,not win,not win,0.6659681797027588 4414,"def set_inner_type(self, inner): self.inner = copy(inner) if: self.inner.set_tag(self.tag_number, self.tag_flags) for choice_parent in self.choice_parents: choice_parent.add_tags([self])",True,self.tag_number is not None,self.tag_number is not None,0.6472029685974121 4415,"def AddLine(self, points, label=None, color=None, pattern=LineStyle.SOLID, width=LineStyle.THIN, markers=None): """"""Add a new line to the chart. This is a convenience method which constructs the DataSeries and appends it for you. It returns the new series. points: List of equally-spaced y-values for the line label: Name of the line (used for the legend) color: Hex string, like 'ff0000' for red pattern: Tuple for (length of segment, length of gap). i.e. LineStyle.DASHED width: Width of the line (i.e. LineStyle.THIN) markers: List of Marker objects to attach to this line (see DataSeries for more info) """""" if: warnings.warn('Your code may be broken! You passed a list of Markers instead of a color. The old argument order (markers before color) is deprecated.', DeprecationWarning, stacklevel=2) style = LineStyle(width, pattern[0], pattern[1], color=color) series = common.DataSeries(points, label=label, style=style, markers=markers) self.data.append(series) return series",False,"color is not None and isinstance(color[0], common.Marker)",markers is not None,0.6441493034362793 4416,"def _first_item_as_path_if_multipath(self, directory: str) -> Path: if: directory = directory.split(os.pathsep)[0] return Path(directory)",False,self.multipath,os.pathsep,0.6652079820632935 4417,"def _check_for_errors(json_response): """"""Check some JSON response for BotEngine errors"""""" if: raise BotError('No response from the server!', -1) if json_response['resultCode'] > 0: msg = 'Unknown error!' if'resultCodeMessage' in json_response.keys(): msg = json_response['resultCodeMessage'] elif'resultCodeDesc' in json_response.keys(): msg = json_response['resultCodeDesc'] raise BotError(msg, json_response['resultCode']) del json_response['resultCode']",True,not json_response,not json_response,0.6553800106048584 4418,"def _check_for_errors(json_response): """"""Check some JSON response for BotEngine errors"""""" if not json_response: raise BotError('No response from the server!', -1) if: msg = 'Unknown error!' if'resultCodeMessage' in json_response.keys(): msg = json_response['resultCodeMessage'] elif'resultCodeDesc' in json_response.keys(): msg = json_response['resultCodeDesc'] raise BotError(msg, json_response['resultCode']) del json_response['resultCode']",True,json_response['resultCode'] > 0,json_response['resultCode'] > 0,0.6509367227554321 4419,"def _check_for_errors(json_response): """"""Check some JSON response for BotEngine errors"""""" if not json_response: raise BotError('No response from the server!', -1) if json_response['resultCode'] > 0: msg = 'Unknown error!' if: msg = json_response['resultCodeMessage'] elif'resultCodeDesc' in json_response.keys(): msg = json_response['resultCodeDesc'] raise BotError(msg, json_response['resultCode']) del json_response['resultCode']",True,'resultCodeMessage' in json_response.keys(),'resultCodeMessage' in json_response.keys(),0.6472784280776978 4420,"def _check_for_errors(json_response): """"""Check some JSON response for BotEngine errors"""""" if not json_response: raise BotError('No response from the server!', -1) if json_response['resultCode'] > 0: msg = 'Unknown error!' if'resultCodeMessage' in json_response.keys(): msg = json_response['resultCodeMessage'] elif: msg = json_response['resultCodeDesc'] raise BotError(msg, json_response['resultCode']) del json_response['resultCode']",True,'resultCodeDesc' in json_response.keys(),'resultCodeDesc' in json_response.keys(),0.6474571228027344 4421,"def add_tokens(self, new_tokens: Union[str, AddedToken, List[Union[str, AddedToken]]], special_tokens: bool=False) -> int: """""" Add a list of new tokens to the tokenizer class. If the new tokens are not in the vocabulary, they are added to it with indices starting from length of the current vocabulary. .. Note:: When adding new tokens to the vocabulary, you should make sure to also resize the token embedding matrix of the model so that its embedding matrix matches the tokenizer. In order to do that, please use the :meth:`~transformers.PreTrainedModel.resize_token_embeddings` method. Args: new_tokens (:obj:`str`, :obj:`tokenizers.AddedToken` or a list of `str` or :obj:`tokenizers.AddedToken`): Tokens are only added if they are not already in the vocabulary. :obj:`tokenizers.AddedToken` wraps a string token to let you personalize its behavior: whether this token should only match against a single word, whether this token should strip all potential whitespaces on the left side, whether this token should strip all potential whitespaces on the right side, etc. special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`): Can be used to specify if the token is a special token. This mostly change the normalization behavior (special tokens like CLS or [MASK] are usually not lower-cased for instance). See details for :obj:`tokenizers.AddedToken` in HuggingFace tokenizers library. Returns: :obj:`int`: Number of tokens added to the vocabulary. Examples:: # Let's see how to increase the vocabulary of Bert model and tokenizer tokenizer = BertTokenizerFast.from_pretrained('bert-base-uncased') model = BertModel.from_pretrained('bert-base-uncased') num_added_toks = tokenizer.add_tokens(['new_tok1','my_new-tok2']) print('We have added', num_added_toks, 'tokens') # Notice: resize_token_embed",False,not new_tokens,special_tokens,0.6516715288162231 4422,"def add_tokens(self, new_tokens: Union[str, AddedToken, List[Union[str, AddedToken]]], special_tokens: bool=False) -> int: """""" Add a list of new tokens to the tokenizer class. If the new tokens are not in the vocabulary, they are added to it with indices starting from length of the current vocabulary. .. Note:: When adding new tokens to the vocabulary, you should make sure to also resize the token embedding matrix of the model so that its embedding matrix matches the tokenizer. In order to do that, please use the :meth:`~transformers.PreTrainedModel.resize_token_embeddings` method. Args: new_tokens (:obj:`str`, :obj:`tokenizers.AddedToken` or a list of `str` or :obj:`tokenizers.AddedToken`): Tokens are only added if they are not already in the vocabulary. :obj:`tokenizers.AddedToken` wraps a string token to let you personalize its behavior: whether this token should only match against a single word, whether this token should strip all potential whitespaces on the left side, whether this token should strip all potential whitespaces on the right side, etc. special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`): Can be used to specify if the token is a special token. This mostly change the normalization behavior (special tokens like CLS or [MASK] are usually not lower-cased for instance). See details for :obj:`tokenizers.AddedToken` in HuggingFace tokenizers library. Returns: :obj:`int`: Number of tokens added to the vocabulary. Examples:: # Let's see how to increase the vocabulary of Bert model and tokenizer tokenizer = BertTokenizerFast.from_pretrained('bert-base-uncased') model = BertModel.from_pretrained('bert-base-uncased') num_added_toks = tokenizer.add_tokens(['new_tok1','my_new-tok2']) print('We have added', num_added_toks, 'tokens') # Notice: resize_token_embed",False,"not isinstance(new_tokens, (list, tuple))",special_tokens,0.6419621706008911 4423,"def get_grandchild_names(parent): grandchild_names = [] if: child_names = [k for k in parent['~children'].keys()] for child_name in child_names: child = parent['~children'][child_name] if '~children' in child: for k in child['~children'].keys(): grandchild_names.append(k) return grandchild_names",True,'~children' in parent,'~children' in parent,0.6554527282714844 4424,"def get_grandchild_names(parent): grandchild_names = [] if '~children' in parent: child_names = [k for k in parent['~children'].keys()] for child_name in child_names: child = parent['~children'][child_name] if: for k in child['~children'].keys(): grandchild_names.append(k) return grandchild_names",False,'~children' in child,child['~children'].keys(),0.6547079086303711 4425,"def query_cache(self): cas = self._context.get_cascache() elementsourcescache = self._elementsourcescache source_proto = elementsourcescache.load_proto(self) if: self._cached = False return False if not cas.contains_directory(source_proto.files, with_files=True): self._cached = False return False self._proto = source_proto self._cached = True return True",True,not source_proto,not source_proto,0.6542277336120605 4426,"def query_cache(self): cas = self._context.get_cascache() elementsourcescache = self._elementsourcescache source_proto = elementsourcescache.load_proto(self) if not source_proto: self._cached = False return False if: self._cached = False return False self._proto = source_proto self._cached = True return True",False,"not cas.contains_directory(source_proto.files, with_files=True)",cas.get_type() != 'http' and source_proto.lower() not in cas.get_type(),0.6421339511871338 4427,"def nodes(self, clazz: Optional[Type[AwsResourceType]]=None, **node: Any) -> Iterator[AwsResourceType]: with self.graph_nodes_access.read_access: for n in self.graph: is_clazz = isinstance(n, clazz) if clazz else True if: yield n",False,"is_clazz and all((getattr(n, k, None) == v for k, v in node.items()))",is_clazz and node is n,0.6502459645271301 4428,"def _processFail2Ban(self, name, lines): if: return False created = deleted = 0 map_modified = None map_data = {} for line in lines: if line not in self.map_data[name]: created += 1 map_data[line] = 1 map_modified = time.time() deleted = len(self.map_data[name]) - (len(map_data) - created) return [map_data, map_modified, created, deleted]",False,len(lines) > 0 and 'ERROR' in lines[0],not lines,0.6475553512573242 4429,"def _processFail2Ban(self, name, lines): if len(lines) > 0 and 'ERROR' in lines[0]: return False created = deleted = 0 map_modified = None map_data = {} for line in lines: if: created += 1 map_data[line] = 1 map_modified = time.time() deleted = len(self.map_data[name]) - (len(map_data) - created) return [map_data, map_modified, created, deleted]",False,line not in self.map_data[name],line.startswith('ERROR'),0.6476490497589111 4430,"def hex_to_rgb(hex, opacity=1.0): if: hex = hex[1:] assert len(hex) == 6 return 'rgba({0},{1},{2},{3})'.format(int(hex[:2], 16), int(hex[2:4], 16), int(hex[4:6], 16), opacity)",True,hex[0] == '#',hex[0] == '#',0.6545151472091675 4431,"def get_first_key_in_dict(dictionary: OrderedDict) -> str: if: return None return list(dictionary.keys())[0]",False,dictionary is None or len(dictionary) == 0,dictionary is None,0.6464065313339233 4432,"def _run(cmd): pro = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) output, error = pro.communicate() if: print(' '.join(cmd)) raise RuntimeError('error running'+ error) return output",False,pro.returncode != 0,error,0.6492866277694702 4433,"def check_validity_of(self, *components): """"""Check the validity of the components provided. This can be specified repeatedly. .. versionadded:: 1.1 :param components: Names of components from :attr:`Validator.COMPONENT_NAMES`. :returns: The validator instance. :rtype: Validator """""" components = [c.lower() for c in components] for component in components: if: raise ValueError('""{}"" is not a valid component'.format(component)) self.validated_components.update({component: True for component in components}) return self",False,component not in self.COMPONENT_NAMES,not self.component_is_valid(component),0.6454963088035583 4434,"def get_user_flags(flags, flags_def): output = {} for key in flags_def: val = getattr(flags, key) if: output.update(flatten_config_dict(val, prefix=key)) else: output[key] = val return output",False,"isinstance(val, ConfigDict)","isinstance(val, dict)",0.6451781392097473 4435,"def _execfile(filename, globals, locals=None): """""" Python 3 implementation of execfile. """""" mode = 'rb' with open(filename, mode) as stream: script = stream.read() if: locals = globals code = compile(script, filename, 'exec') exec(code, globals, locals)",True,locals is None,locals is None,0.6554986834526062 4436,"def _max_width(widths: Iterable[Optional[int]], indent: int=1) -> Optional[int]: max_width: Optional[int] = None for i, width in enumerate(widths): if: if i: width += indent if max_width is not None: max_width = max(max_width, width) else: max_width = width return max_width",False,width is not None,width,0.6521724462509155 4437,"def _max_width(widths: Iterable[Optional[int]], indent: int=1) -> Optional[int]: max_width: Optional[int] = None for i, width in enumerate(widths): if width is not None: if: width += indent if max_width is not None: max_width = max(max_width, width) else: max_width = width return max_width",False,i,i % indent == 0,0.6776286363601685 4438,"def _max_width(widths: Iterable[Optional[int]], indent: int=1) -> Optional[int]: max_width: Optional[int] = None for i, width in enumerate(widths): if width is not None: if i: width += indent if: max_width = max(max_width, width) else: max_width = width return max_width",True,max_width is not None,max_width is not None,0.651648759841919 4439,"@pytest.mark.parametrize('op', (OP_VERIF, OP_VERNOTIF)) def test_VERIF_unexecuted(self, state, op): script = Script() << OP_0 << OP_IF << op << OP_ENDIF if: state.evaluate_script(script) else: with pytest.raises(InvalidOpcode) as e: state.evaluate_script(script) assert f'invalid opcode {op.name}' in str(e.value) assert state.stack == [] assert state.alt_stack == []",False,state.limits.is_utxo_after_genesis,op == OP_VERNOTIF,0.6492410898208618 4440,"def normalize_percent_characters(s): """"""All percent characters should be upper-cased. For example, ``""%3afoo%DF%ab""`` should be turned into ``""%3Afoo%DF%AB""``. """""" matches = set(PERCENT_MATCHER.findall(s)) for m in matches: if: s = s.replace(m, m.upper()) return s",False,not m.isupper(),m,0.653051495552063 4441,"def predict(cfg=DEFAULT_CFG, use_python=False): """"""Runs YOLO to predict objects in an image or video."""""" model = cfg.model or 'yolov8n-pose.pt' source = cfg.source if cfg.source is not None else ROOT / 'assets' if (ROOT / 'assets').exists() else 'https://ultralytics.com/images/bus.jpg' args = dict(model=model, source=source) if: from ultralytics import YOLO YOLO(model)(**args) else: predictor = PosePredictor(overrides=args) predictor.predict_cli()",True,use_python,use_python,0.6639502048492432 4442,"def shifted_diff(first, second): if: return -1 for i, a in enumerate(xrange(len(first), -1, -1)): if second == '{}{}'.format(first[a:], first[:a]): return i return -1",False,not Counter(first) == Counter(second),second == '{}{}'.format(first[:2]}),0.6498598456382751 4443,"def shifted_diff(first, second): if not Counter(first) == Counter(second): return -1 for i, a in enumerate(xrange(len(first), -1, -1)): if: return i return -1",False,"second == '{}{}'.format(first[a:], first[:a])",first[a] == second[a],0.6450321674346924 4444,"def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self",True,SaveElementTreeNode,SaveElementTreeNode,0.6589632630348206 4445,"def _has_ipv6(host): """""" Returns True if the system can bind an IPv6 address. """""" sock = None has_ipv6 = False if: try: sock = socket.socket(socket.AF_INET6) sock.bind((host, 0)) has_ipv6 = True except Exception: pass if sock: sock.close() return has_ipv6",True,socket.has_ipv6,socket.has_ipv6,0.6489982604980469 4446,"def _has_ipv6(host): """""" Returns True if the system can bind an IPv6 address. """""" sock = None has_ipv6 = False if socket.has_ipv6: try: sock = socket.socket(socket.AF_INET6) sock.bind((host, 0)) has_ipv6 = True except Exception: pass if: sock.close() return has_ipv6",True,sock,sock,0.6680067777633667 4447,"def dual_map_to_dict(lambda_call, list1, list2): """""" Dual map two even length enumerables to key/value pairs :param lambda_call: Takes the first and second arguments as args and allows transformation of each, returning a two elemetn array to be the key and value :param list1: :param list2: :return: A dictionary """""" if: raise Exception('list1 has more elements than list2: {0} versus {1}'.format(list1, list2)) return map_to_dict(lambda index_and_val: lambda_call(index_and_val[1], list2[index_and_val[0]]), enumerate(list1))",True,len(list1) > len(list2),len(list1) > len(list2),0.6463369131088257 4448,"def clean_slug(self): if: raise forms.ValidationError(_('A project already exists with that slug.')) return self.cleaned_data['slug'].lower()",True,Project.objects.filter(slug__iexact=self.cleaned_data['slug']).count() > 0,Project.objects.filter(slug__iexact=self.cleaned_data['slug']).count() > 0,0.6469208598136902 4449,"def hRpcOpenPrinter(dce, printerName, pDatatype=NULL, pDevModeContainer=NULL, accessRequired=SERVER_READ): """""" RpcOpenPrinter retrieves a handle for a printer, port, port monitor, print job, or print server. Full Documentation: https://msdn.microsoft.com/en-us/library/cc244808.aspx :param DCERPC_v5 dce: a connected DCE instance. :param string printerName: A string for a printer connection, printer object, server object, job object, port object, or port monitor object. This MUST be a Domain Name System (DNS), NetBIOS, Internet Protocol version 4 (IPv4), Internet Protocol version 6 (IPv6), or Universal Naming Convention (UNC) name that remote procedure call (RPC) binds to, and it MUST uniquely identify a print server on the network. :param string pDatatype: A string that specifies the data type to be associated with the printer handle. :param DEVMODE_CONTAINER pDevModeContainer: A DEVMODE_CONTAINER structure. This parameter MUST adhere to the specification in DEVMODE_CONTAINER Parameters (section 3.1.4.1.8.1). :param int accessRequired: The access level that the client requires for interacting with the object to which a handle is being opened. :return: a RpcOpenPrinterResponse instance, raises DCERPCSessionError on error. """""" request = RpcOpenPrinter() request['pPrinterName'] = checkNullString(printerName) request['pDatatype'] = pDatatype if: request['pDevModeContainer']['pDevMode'] = NULL else: request['pDevModeContainer'] = pDevModeContainer request['AccessRequired'] = accessRequired return dce.request(request)",True,pDevModeContainer is NULL,pDevModeContainer is NULL,0.6509988307952881 4450,"@staticmethod def get_params(img, output_size): """"""Get parameters for ``crop`` for a random crop. Args: img (numpy ndarray): Image to be cropped. output_size (tuple): Expected output size of the crop. Returns: tuple: params (i, j, h, w) to be passed to ``crop`` for random crop. """""" h, w = img.shape[0:2] th, tw = output_size if: return (0, 0, h, w) i = random.randint(0, h - th) j = random.randint(0, w - tw) return (i, j, th, tw)",False,w == tw and h == th,h == tw and w == th,0.6542815566062927 4451,"def d_grade_ipix(ipix, nside_in, nside_out, nest=False): """""" Return the indices of the super-pixels which contain each of the sub-pixels (nside_in > nside_out). Parameters: ----------- ipix : index of the input subpixels nside_in : nside of the input subpix nside_out : nside of the desired superpixels Returns: -------- ipix_out : superpixels for each subpixel """""" if: return ipix if not nside_in > nside_out: raise ValueError('nside_out must be less than nside_in') return hp.vec2pix(nside_out, *hp.pix2vec(nside_in, ipix, nest), nest=nest)",False,nside_in == nside_out,ipix < 0,0.652571976184845 4452,"def d_grade_ipix(ipix, nside_in, nside_out, nest=False): """""" Return the indices of the super-pixels which contain each of the sub-pixels (nside_in > nside_out). Parameters: ----------- ipix : index of the input subpixels nside_in : nside of the input subpix nside_out : nside of the desired superpixels Returns: -------- ipix_out : superpixels for each subpixel """""" if nside_in == nside_out: return ipix if: raise ValueError('nside_out must be less than nside_in') return hp.vec2pix(nside_out, *hp.pix2vec(nside_in, ipix, nest), nest=nest)",False,not nside_in > nside_out,nside_out < nside_in,0.6520732641220093 4453,"def inject_audio(self, wem_path, wem_id): """"""Loads wem audio into the container"""""" logging.info('Injecting audio') for pointer in self.didx.data_pointers: if: logging.info(f'found a match {pointer.hash}, reading wem data') with open(wem_path, 'rb') as f: pointer.data = f.read() break",True,pointer.hash == wem_id,pointer.hash == wem_id,0.6504618525505066 4454,"def propagateDifficulties(self, container): for loc in self.visitedLocations: if: continue itemLoc = container.getItemLoc(loc) if not itemLoc.Location.restricted: itemLoc.Location.difficulty = loc.difficulty",False,loc.itemName == 'Gunship',loc.isBoss(),0.6474065780639648 4455,"def propagateDifficulties(self, container): for loc in self.visitedLocations: if loc.itemName == 'Gunship': continue itemLoc = container.getItemLoc(loc) if: itemLoc.Location.difficulty = loc.difficulty",False,not itemLoc.Location.restricted,itemLoc,0.6487982273101807 4456,"def init_local(self): super(ModuleEngagementUserSegmentDataTask, self).init_local() self.high_metric_ranges = defaultdict(dict) with self.input_local()['range_data'].open('r') as metric_ranges_target: for line in metric_ranges_target: range_record = ModuleEngagementSummaryMetricRangeRecord.from_tsv(line) if: self.high_metric_ranges[range_record.course_id][range_record.metric] = range_record",False,range_record.range_type == METRIC_RANGE_HIGH,range_record.course_id and range_record.metric,0.6483407616615295 4457,"def _right_neighbor(site, x_dimension, y_dimension, periodic): if: return None if (site + 1) % x_dimension == 0: if periodic: return site + 1 - x_dimension else: return None return site + 1",False,x_dimension == 1,site + 1) % y_dimension == 0,0.6507285237312317 4458,"def _right_neighbor(site, x_dimension, y_dimension, periodic): if x_dimension == 1: return None if: if periodic: return site + 1 - x_dimension else: return None return site + 1",False,(site + 1) % x_dimension == 0,y_dimension == 1,0.6470705270767212 4459,"def _right_neighbor(site, x_dimension, y_dimension, periodic): if x_dimension == 1: return None if (site + 1) % x_dimension == 0: if: return site + 1 - x_dimension else: return None return site + 1",False,periodic,periodic == True,0.653388500213623 4460,"def _get_stream_by_id(self, stream_id): """""" Gets a stream by its stream ID. Raises NoSuchStreamError if the stream ID does not correspond to a known stream and is higher than the current maximum: raises if it is lower than the current maximum. .. versionchanged:: 2.0.0 Removed this function from the public API. """""" try: return self.streams[stream_id] except KeyError: outbound = self._stream_id_is_outbound(stream_id) highest_stream_id = self.highest_outbound_stream_id if outbound else self.highest_inbound_stream_id if: raise NoSuchStreamError(stream_id) else: raise StreamClosedError(stream_id)",True,stream_id > highest_stream_id,stream_id > highest_stream_id,0.6479126214981079 4461,"def __call__(self, img): if: self.alpha = np.random.uniform(1 - self.theta, 1 + self.theta, (1, 3)) self.betti = np.random.uniform(-self.theta, self.theta, (1, 3)) return self.adjust_HED(img, self.alpha, self.betti) else: return img",False,np.random.rand(1) < self.p,"np.random.uniform(0, 1, 1) < self.hed",0.6481003165245056 4462,"def check_node_backends(self, group, node_backends_count=1): if: raise JobBrokenError('Group {0} cannot be used for job, it has {1} node backends, 1 expected'.format(group.group_id, len(group.node_backends)))",True,len(group.node_backends) != node_backends_count,len(group.node_backends) != node_backends_count,0.6454201340675354 4463,"def DecodeRepeatedField(buffer, pos, end, message, field_dict): value = field_dict.get(key) if: value = field_dict.setdefault(key, new_default(message)) while 1: element, new_pos = decode_value(buffer, pos) value.append(element) pos = new_pos + tag_len if buffer[new_pos:pos]!= tag_bytes or new_pos >= end: if new_pos > end: raise _DecodeError('Truncated message.') return new_pos",True,value is None,value is None,0.6525921821594238 4464,"def DecodeRepeatedField(buffer, pos, end, message, field_dict): value = field_dict.get(key) if value is None: value = field_dict.setdefault(key, new_default(message)) while 1: element, new_pos = decode_value(buffer, pos) value.append(element) pos = new_pos + tag_len if: if new_pos > end: raise _DecodeError('Truncated message.') return new_pos",False,buffer[new_pos:pos] != tag_bytes or new_pos >= end,value[pos] > new_pos,0.6429141759872437 4465,"def DecodeRepeatedField(buffer, pos, end, message, field_dict): value = field_dict.get(key) if value is None: value = field_dict.setdefault(key, new_default(message)) while 1: element, new_pos = decode_value(buffer, pos) value.append(element) pos = new_pos + tag_len if buffer[new_pos:pos]!= tag_bytes or new_pos >= end: if: raise _DecodeError('Truncated message.') return new_pos",False,new_pos > end,value[pos] > buffer[pos],0.6510302424430847 4466,"def create_token(self, consumer_id, token_type, timestamp, user=None, using=None): """""" Shortcut to create a token with random key/secret. """""" if: manager = self.using(using) else: manager = self token, created = manager.get_or_create(consumer_id=consumer_id, token_type=token_type, timestamp=timestamp, user=user) if created: token.key, token.secret = self.generate_random_codes() token.save() return token",True,using,using,0.668120801448822 4467,"def create_token(self, consumer_id, token_type, timestamp, user=None, using=None): """""" Shortcut to create a token with random key/secret. """""" if using: manager = self.using(using) else: manager = self token, created = manager.get_or_create(consumer_id=consumer_id, token_type=token_type, timestamp=timestamp, user=user) if: token.key, token.secret = self.generate_random_codes() token.save() return token",True,created,created,0.6708759069442749 4468,"def get_files(cloud_url, admin_key, location_id=None, device_id=None): """""" https://iotapps.docs.apiary.io/reference/application-files/files-management/get-files :param cloud_url: Cloud URL :param admin_key: Administrative API Key :param location_id: Optional Location ID :param device_id: Optional Device ID :return: JSON content list of files """""" headers = {'API_KEY': admin_key} params = {} if: params['locationId'] = location_id if device_id is not None: params['deviceId'] = device_id r = _session().get(cloud_url + '/cloud/json/appfiles', params=params, headers=headers) j = json.loads(r.text) _check_for_errors(j) if 'files' in j: return j['files'] return []",True,location_id is not None,location_id is not None,0.6507235765457153 4469,"def get_files(cloud_url, admin_key, location_id=None, device_id=None): """""" https://iotapps.docs.apiary.io/reference/application-files/files-management/get-files :param cloud_url: Cloud URL :param admin_key: Administrative API Key :param location_id: Optional Location ID :param device_id: Optional Device ID :return: JSON content list of files """""" headers = {'API_KEY': admin_key} params = {} if location_id is not None: params['locationId'] = location_id if: params['deviceId'] = device_id r = _session().get(cloud_url + '/cloud/json/appfiles', params=params, headers=headers) j = json.loads(r.text) _check_for_errors(j) if 'files' in j: return j['files'] return []",True,device_id is not None,device_id is not None,0.6528481841087341 4470,"def get_files(cloud_url, admin_key, location_id=None, device_id=None): """""" https://iotapps.docs.apiary.io/reference/application-files/files-management/get-files :param cloud_url: Cloud URL :param admin_key: Administrative API Key :param location_id: Optional Location ID :param device_id: Optional Device ID :return: JSON content list of files """""" headers = {'API_KEY': admin_key} params = {} if location_id is not None: params['locationId'] = location_id if device_id is not None: params['deviceId'] = device_id r = _session().get(cloud_url + '/cloud/json/appfiles', params=params, headers=headers) j = json.loads(r.text) _check_for_errors(j) if: return j['files'] return []",True,'files' in j,'files' in j,0.6578880548477173 4471,"def at_idx(current: JsonElement, idx: int) -> Optional[Any]: if: return current elif current is None or not isinstance(current, dict) or path[idx] not in current: return None else: return at_idx(current[path[idx]], idx + 1)",False,at == idx,idx >= len(current),0.6740996837615967 4472,"def at_idx(current: JsonElement, idx: int) -> Optional[Any]: if at == idx: return current elif: return None else: return at_idx(current[path[idx]], idx + 1)",False,"current is None or not isinstance(current, dict) or path[idx] not in current",path[idx] not in current,0.646759033203125 4473,"def _isValidParam(data, paramName, checkList): if: return True param = data.get(paramName) if param in checkList: return True raise WebCommandException('""%s"" is not valid value for ""%s""', param, paramName)",False,"_isOverParam(data, paramName)",paramName in data,0.6457127928733826 4474,"def _isValidParam(data, paramName, checkList): if _isOverParam(data, paramName): return True param = data.get(paramName) if: return True raise WebCommandException('""%s"" is not valid value for ""%s""', param, paramName)",True,param in checkList,param in checkList,0.6619769930839539 4475,"def Dictionary(self, *args): """"""Return construction variables from an environment. Args: \\*args (optional): variable names to look up Returns: If `args` omitted, the dictionary of all construction variables. If one arg, the corresponding value is returned. If more than one arg, a list of values is returned. Raises: KeyError: if any of `args` is not in the construction environment. """""" if: return self._dict dlist = [self._dict[x] for x in args] if len(dlist) == 1: dlist = dlist[0] return dlist",False,not args,args is None,0.6704424619674683 4476,"def Dictionary(self, *args): """"""Return construction variables from an environment. Args: \\*args (optional): variable names to look up Returns: If `args` omitted, the dictionary of all construction variables. If one arg, the corresponding value is returned. If more than one arg, a list of values is returned. Raises: KeyError: if any of `args` is not in the construction environment. """""" if not args: return self._dict dlist = [self._dict[x] for x in args] if: dlist = dlist[0] return dlist",True,len(dlist) == 1,len(dlist) == 1,0.6476032137870789 4477,"def prepare_index_full(self): for orb in self.orbits: if: print('WARNING: ncorr_full NOT set for cluster and set to 1:', orb.cluster) orb.ncorr_full = 1 self.orb_idx_full = np.cumsum([0] + [orb.ncorr_full for orb in self.orbits]) self.ncorr_full = self.orb_idx_full[-1]",False,orb.ncorr_full < 0,orb.cluster,0.6576671004295349 4478,"def __init__(self, inp_dim, out_dim, with_bn=True): super(fully_connected, self).__init__() self.with_bn = with_bn self.linear = nn.Linear(inp_dim, out_dim) if: self.bn = nn.BatchNorm1d(out_dim) self.relu = nn.ReLU(inplace=True)",True,self.with_bn,self.with_bn,0.6567875742912292 4479,"def is_autopilot_enabled(self): keys = pygame.key.get_pressed() if: self._enable_autopilot = not self._enable_autopilot return self._enable_autopilot",False,keys[K_p],keys.lower() == 'auto_detect',0.6544340252876282 4480,"def __call__(self, data): img = data['image'] from PIL import Image if: img = np.array(img) assert isinstance(img, np.ndarray), ""invalid input 'img' in NormalizeImage"" data['image'] = (img.astype('float32') * self.scale - self.mean) / self.std return data",False,"isinstance(img, Image.Image)","isinstance(img, Image)",0.6548689603805542 4481,"def get_valid_x(x): if: return 0 if x >= w: return w - 1 return x",False,x < 0,x < 0 or x >= h,0.6613211631774902 4482,"def get_valid_x(x): if x < 0: return 0 if: return w - 1 return x",True,x >= w,x >= w,0.6596354246139526 4483,"def __init__(self, **kwargs): Protocol.__init__(self, **kwargs) self.sock = None self.client = None self.shell = None self.cancel = False try: Crypto.Random.atfork() except AttributeError: pass self._system_host_keys = paramiko.HostKeys() self._host_keys = paramiko.HostKeys() self._host_keys_filename = None if: self._missing_host_key = self._reject_host_key else: self._missing_host_key = self._add_host_key",False,self.verify_fingerprint,self.reject,0.6487923860549927 4484,"def set(self, key, value, timeout=None): if: timeout = self.default_timeout filename = self._get_filename(key) self._prune() try: f = file(filename, 'wb') try: dump(int(time() + timeout), f, 1) dump(value, f, HIGHEST_PROTOCOL) finally: f.close() except (IOError, OSError): pass",True,timeout is None,timeout is None,0.6574519276618958 4485,"def gen_docs(path='../swig/', outfn='idaapi.py', mask='*.i'): out = [] idaapi_i = os.path.join(path, 'idaapi.i') gen_docs_from(idaapi_i, out) for fn in glob.glob(path + mask): fn = fn.replace('\\', '/') if: gen_docs_from(fn, out) f = open(outfn, 'w') f.write('\n'.join(out)) f.close()",False,fn != idaapi_i,"fnmatch.fnmatch(fn, '.py')",0.6530832052230835 4486,"def delete_resources(self, force=False): if: return dbs = self.t_client.mgmt_instances.list(project_id=self.project.id) for db in dbs: if self.dry_run: LOG.info('%s: Would delete trove instance %s', self.project.id, db.id) else: LOG.info('%s: Deleting trove instance %s', self.project.id, db.id) self.t_client.instances.delete(db)",True,not force,not force,0.6636868119239807 4487,"def delete_resources(self, force=False): if not force: return dbs = self.t_client.mgmt_instances.list(project_id=self.project.id) for db in dbs: if: LOG.info('%s: Would delete trove instance %s', self.project.id, db.id) else: LOG.info('%s: Deleting trove instance %s', self.project.id, db.id) self.t_client.instances.delete(db)",False,self.dry_run,force,0.6543712615966797 4488,"def _get_config_directory(): """"""Find the predefined detector config directory."""""" try: repo_dpath = dirname(dirname(dirname(__file__))) except NameError: import mmdet repo_dpath = dirname(dirname(mmdet.__file__)) config_dpath = join(repo_dpath, 'configs') if: raise Exception('Cannot find config path') return config_dpath",True,not exists(config_dpath),not exists(config_dpath),0.6436673402786255 4489,"def run(self, ssoObs, orb, Hval): vis = _setVis(ssoObs, self.snrLimit, self.snrCol, self.visCol) if: return 0 arc = ssoObs[self.mjdCol][vis].max() - ssoObs[self.mjdCol][vis].min() return arc",True,len(vis) == 0,len(vis) == 0,0.6540035009384155 4490,"def tearDown(self): """""" tearDown for every test """""" ret = is_peer_connected(self.mnode, self.servers) if: ret = peer_probe_servers(self.mnode, self.random_server) if not ret: raise ExecutionError('Failed to peer probe failed in servers %s' % self.random_server) g.log.info('All peers are in connected state') self.get_super_method(self, 'tearDown')()",True,not ret,not ret,0.6610760688781738 4491,"def tearDown(self): """""" tearDown for every test """""" ret = is_peer_connected(self.mnode, self.servers) if not ret: ret = peer_probe_servers(self.mnode, self.random_server) if: raise ExecutionError('Failed to peer probe failed in servers %s' % self.random_server) g.log.info('All peers are in connected state') self.get_super_method(self, 'tearDown')()",True,not ret,not ret,0.6615009307861328 4492,"def __call__(self, ds: tf.data.Dataset, task_feature_lengths: Mapping[str, int]) -> tf.data.Dataset: if: return self.prefixsuffixlm_feature_converter(ds, task_feature_lengths) if 'inputs' in task_feature_lengths: return self.prefixlm_feature_converter(ds, task_feature_lengths) else: return self.strictlm_feature_converter(ds, task_feature_lengths)",True,'suffixes' in task_feature_lengths,'suffixes' in task_feature_lengths,0.6533792614936829 4493,"def __call__(self, ds: tf.data.Dataset, task_feature_lengths: Mapping[str, int]) -> tf.data.Dataset: if'suffixes' in task_feature_lengths: return self.prefixsuffixlm_feature_converter(ds, task_feature_lengths) if: return self.prefixlm_feature_converter(ds, task_feature_lengths) else: return self.strictlm_feature_converter(ds, task_feature_lengths)",True,'inputs' in task_feature_lengths,'inputs' in task_feature_lengths,0.6535411477088928 4494,"def _aug_mapper(self, x): check_dtype(x) with self._exception_handler.catch(): if: x = copy_mod.deepcopy(x) return self.augs.augment(x)",True,self._copy,self._copy,0.6640074253082275 4495,"def print_race(Race_list, position): racemsg = '' for id in range(1, 6): cid = Race_list[id - 1] c = runchara.Run_chara(str(cid)) icon = c.geticon() for n in range(1, ROADLENGTH + 1): if n!= position[id - 1]: racemsg = racemsg + ROAD else: racemsg = racemsg + str(icon) if: racemsg = racemsg + '\n' return racemsg",False,id != 5,position[id - 1] != '\n',0.6624009609222412 4496,"def print_race(Race_list, position): racemsg = '' for id in range(1, 6): cid = Race_list[id - 1] c = runchara.Run_chara(str(cid)) icon = c.geticon() for n in range(1, ROADLENGTH + 1): if: racemsg = racemsg + ROAD else: racemsg = racemsg + str(icon) if id!= 5: racemsg = racemsg + '\n' return racemsg",False,n != position[id - 1],icon == 'RAD' or n == 0,0.6494740843772888 4497,"def getTimezone(self, tzid): for timezone in self.getComponents(definitions.cICalComponent_VTIMEZONE): if: return timezone else: return None",False,timezone.getID() == tzid,timezone.getId() == tzid,0.6492871046066284 4498,"def ensure_empty_line_above(self, starts_with, ends_with): index = len(self.__lines) - 2 while index >= 0: potentialEmptyLine = self.__lines[index] if: break elif not potentialEmptyLine.item(0).startswith(starts_with) and potentialEmptyLine.item(-1)!= ends_with: self.__lines.insert(index + 1, OutputLine(self)) self.previous_line = self.__lines[-2] break index -= 1",False,potentialEmptyLine.is_empty(),potentialEmptyLine is None,0.6441745758056641 4499,"def ensure_empty_line_above(self, starts_with, ends_with): index = len(self.__lines) - 2 while index >= 0: potentialEmptyLine = self.__lines[index] if potentialEmptyLine.is_empty(): break elif: self.__lines.insert(index + 1, OutputLine(self)) self.previous_line = self.__lines[-2] break index -= 1",False,not potentialEmptyLine.item(0).startswith(starts_with) and potentialEmptyLine.item(-1) != ends_with,potentialEmptyLine.startswith(starts_with) and potentialEmptyLine.endswith(ends_with),0.6445999145507812 4500,"@property def imports(self) -> Tuple[Import,...]: if: return chain_as_tuple(super().imports, (IMPORT_FIELD,)) return super().imports",False,any((f for f in self.fields if f.field)),self.field is not None,0.6539884805679321 4501,"def __init__(self, exprs, savelist=False): super(Or, self).__init__(exprs, savelist) if: self.mayReturnEmpty = any((e.mayReturnEmpty for e in self.exprs)) else: self.mayReturnEmpty = True",True,self.exprs,self.exprs,0.6554183959960938 4502,"def __init__(self, state=None): if: return super().__init__(state) self.count = 0 self._intersection = None",True,state is not None,state is not None,0.6596710085868835 4503,"def noepyCheckType(data): """"""Verify that the format is supported by this plugin. Default yes"""""" bs = NoeBitStream(data) idstring = noeStrFromBytes(bs.readBytes(4)) if: return 1 return 0",False,idstring == 'BSOB',idstring == 'MOXBIKSA',0.647636890411377 4504,"@property def netloc(self): """"""Network location including host and port"""""" if: return '%s:%d' % (self.host, self.port) return self.host",True,self.port,self.port,0.6600240468978882 4505,"def __init__(self, secret_key, expires_in=None, **kwargs): JSONWebSignatureSerializer.__init__(self, secret_key, **kwargs) if: expires_in = self.DEFAULT_EXPIRES_IN self.expires_in = expires_in",True,expires_in is None,expires_in is None,0.6524479389190674 4506,"def eval_func(func: Callable[..., tf.Tensor]) -> tf.Tensor: feval = func(mc_Xr, **Ys) feval = tf.reshape(feval, (S, N, -1)) if: log_S = tf.math.log(to_default_float(S)) return tf.reduce_logsumexp(feval, axis=0) - log_S else: return tf.reduce_mean(feval, axis=0)",False,logspace,LOG_MODE,0.6649727821350098 4507,"def align(self, align: AlignMethod, width: int, character: str=' ') -> None: """"""Align text to a given width. Args: align (AlignMethod): One of ""left"", ""center"", or ""right"". width (int): Desired width. character (str, optional): Character to pad with. Defaults to "" "". """""" self.truncate(width) excess_space = width - cell_len(self.plain) if: if align == 'left': self.pad_right(excess_space, character) elif align == 'center': left = excess_space // 2 self.pad_left(left, character) self.pad_right(excess_space - left, character) else: self.pad_left(excess_space, character)",False,excess_space,excess_space > 0,0.6537415981292725 4508,"def align(self, align: AlignMethod, width: int, character: str=' ') -> None: """"""Align text to a given width. Args: align (AlignMethod): One of ""left"", ""center"", or ""right"". width (int): Desired width. character (str, optional): Character to pad with. Defaults to "" "". """""" self.truncate(width) excess_space = width - cell_len(self.plain) if excess_space: if: self.pad_right(excess_space, character) elif align == 'center': left = excess_space // 2 self.pad_left(left, character) self.pad_right(excess_space - left, character) else: self.pad_left(excess_space, character)",True,align == 'left',align == 'left',0.6542656421661377 4509,"def align(self, align: AlignMethod, width: int, character: str=' ') -> None: """"""Align text to a given width. Args: align (AlignMethod): One of ""left"", ""center"", or ""right"". width (int): Desired width. character (str, optional): Character to pad with. Defaults to "" "". """""" self.truncate(width) excess_space = width - cell_len(self.plain) if excess_space: if align == 'left': self.pad_right(excess_space, character) elif: left = excess_space // 2 self.pad_left(left, character) self.pad_right(excess_space - left, character) else: self.pad_left(excess_space, character)",False,align == 'center',align == 'right',0.6552609205245972 4510,"def __init__(self, masks, height, width): assert isinstance(masks, list) if: assert isinstance(masks[0], list) assert isinstance(masks[0][0], np.ndarray) self.height = height self.width = width self.masks = masks",False,len(masks) > 0,len(masks) == 1,0.6555612683296204 4511,"def on_search_btn_clicked(self): self.ui.stackedWidget.setCurrentIndex(5) search_text = self.ui.search_input.text().strip() if: self.ui.label_9.setText(search_text)",True,search_text,search_text,0.6528464555740356 4512,"def getPower(self): if: return self.power.read() return self._tryComputeMul(self.current, self.voltage)",False,self.power.exists(),self.power is not None,0.6530599594116211 4513,"def can_build_robot(ores, cost): for o, c in zip(ores, cost): if: return False return True",True,o < c,o < c,0.6644717454910278 4514,"def _get_offset(value, index): if: try: offset = int(value[1:]) except ValueError: offset = 0 offset += index else: try: offset = int(value) except ValueError: offset = None return offset",False,index is not None and value.startswith('p'),value.startswith('/'),0.6415404081344604 4515,"def init(with_threads=1): """"""Initialize threading. Don't bother calling this. If it needs to happen, it will happen. """""" global threaded, _synchLockCreator, XLock if: if not threaded: if threadmodule is not None: threaded = True class XLock(threadingmodule._RLock, object): def __reduce__(self): return (unpickle_lock, ()) _synchLockCreator = XLock() else: raise RuntimeError('Cannot initialize threading, platform lacks thread support') elif threaded: raise RuntimeError('Cannot uninitialize threads') else: pass",True,with_threads,with_threads,0.6598089933395386 4516,"def init(with_threads=1): """"""Initialize threading. Don't bother calling this. If it needs to happen, it will happen. """""" global threaded, _synchLockCreator, XLock if with_threads: if: if threadmodule is not None: threaded = True class XLock(threadingmodule._RLock, object): def __reduce__(self): return (unpickle_lock, ()) _synchLockCreator = XLock() else: raise RuntimeError('Cannot initialize threading, platform lacks thread support') elif threaded: raise RuntimeError('Cannot uninitialize threads') else: pass",False,not threaded,not _synchLockCreator,0.6622679233551025 4517,"def init(with_threads=1): """"""Initialize threading. Don't bother calling this. If it needs to happen, it will happen. """""" global threaded, _synchLockCreator, XLock if with_threads: if not threaded: if threadmodule is not None: threaded = True class XLock(threadingmodule._RLock, object): def __reduce__(self): return (unpickle_lock, ()) _synchLockCreator = XLock() else: raise RuntimeError('Cannot initialize threading, platform lacks thread support') elif: raise RuntimeError('Cannot uninitialize threads') else: pass",True,threaded,threaded,0.6657181978225708 4518,"def init(with_threads=1): """"""Initialize threading. Don't bother calling this. If it needs to happen, it will happen. """""" global threaded, _synchLockCreator, XLock if with_threads: if not threaded: if: threaded = True class XLock(threadingmodule._RLock, object): def __reduce__(self): return (unpickle_lock, ()) _synchLockCreator = XLock() else: raise RuntimeError('Cannot initialize threading, platform lacks thread support') elif threaded: raise RuntimeError('Cannot uninitialize threads') else: pass",True,threadmodule is not None,threadmodule is not None,0.6526422500610352 4519,"def get_objects_from_zip_stream(zipf, file_handler, object_handler): for filename in zipf.namelist(): if: blob = zipf.read(filename) extract_iwa_archives(blob, filename, file_handler, object_handler) else: blob = zipf.read(filename) file_handler(filename, blob)",False,filename.endswith('.iwa'),"isinstance(filename, str)",0.644232451915741 4520,"def unsign(wheelfile): """""" Remove RECORD.jws from a wheel by truncating the zip file. RECORD.jws must be at the end of the archive. The zip file must be an ordinary archive, with the compressed files and the directory in the same order, and without any non-zip content after the truncation point. """""" vzf = VerifyingZipFile(wheelfile, 'a') info = vzf.infolist() if: raise WheelError('RECORD.jws not found at end of archive.') vzf.pop() vzf.close()",False,not (len(info) and info[-1].filename.endswith('/RECORD.jws')),info['end'] != info['zip'],0.6426606178283691 4521,"def __init__(self, dp_replace=None, request_limit=0): """""" :param dp_replace: optional tuple of (regex, replacement) indicating the location of data files :param request_limit: optional limit (in bytes) on how much data can be extracted at once """""" if: self.dp_replace_re = re.compile(dp_replace[0]) self.dp_replace_sub = dp_replace[1] else: self.dp_replace_re = None self.dp_replace_sub = None self.request_limit = request_limit",True,dp_replace,dp_replace,0.6659560799598694 4522,"def AddSuperSplit(output_zip): """"""Create split super_*.img and store it in output_zip."""""" outdir = os.path.join(OPTIONS.input_tmp, 'OTA') built = build_super_image.BuildSuperImage(OPTIONS.input_tmp, outdir) if: for dev in OPTIONS.info_dict['super_block_devices'].strip().split(): img = OutputFile(output_zip, OPTIONS.input_tmp, 'OTA','super_' + dev + '.img') img.Write()",True,built,built,0.6638088226318359 4523,"def resnet50(num_classes, loss='softmax', pretrained=True, **kwargs): model = ResNet(num_classes=num_classes, loss=loss, block=Bottleneck, layers=[3, 4, 6, 3], width_ratio=1.0, height_ratio=0.3, **kwargs) if: init_pretrained_weights(model, model_urls['resnet50']) return model",True,pretrained,pretrained,0.6702910661697388 4524,"def visit(self, buffer): pos = 0 n_buffer = len(buffer) bufn = (buffer, n_buffer) magic, pos = ff_read(bufn, pos, 4) if: raise Exception('Bad MAGIC {}'.format(magic)) version, pos = ff_read_u32(bufn, pos) self.visit_node(bufn, pos, 0)",False,magic != b'RTPC',magic != 'MODL',0.6465376019477844 4525,"def _appendToHistory(self, txt): if: return if self._hasHistory: self._histText.insert(tkinter.END, '\n' + txt.strip(), force=True) else: self._histText.insert(tkinter.END, txt.strip(), force=True) self._hasHistory = True if self._histFrame.winfo_ismapped(): self._histText.see(tkinter.END) self._expBttn.configure(state=tkinter.NORMAL)",False,len(txt.strip()) < 1,txt is None,0.6463403701782227 4526,"def _appendToHistory(self, txt): if len(txt.strip()) < 1: return if: self._histText.insert(tkinter.END, '\n' + txt.strip(), force=True) else: self._histText.insert(tkinter.END, txt.strip(), force=True) self._hasHistory = True if self._histFrame.winfo_ismapped(): self._histText.see(tkinter.END) self._expBttn.configure(state=tkinter.NORMAL)",False,self._hasHistory,txt[0] == '\n',0.6540970802307129 4527,"def _appendToHistory(self, txt): if len(txt.strip()) < 1: return if self._hasHistory: self._histText.insert(tkinter.END, '\n' + txt.strip(), force=True) else: self._histText.insert(tkinter.END, txt.strip(), force=True) self._hasHistory = True if: self._histText.see(tkinter.END) self._expBttn.configure(state=tkinter.NORMAL)",False,self._histFrame.winfo_ismapped(),self._expBttn.status == tkinter.END,0.6451781988143921 4528,"def add_noise(self, noise=0): """""" :param noise: magnitude of the noise :type noise: float :return: a copy of state with noisy on-site tensors. For default value of ``noise`` being zero ``self`` is returned. :rtype: IPEPS_ABELIAN_C4V Create a new state by adding random uniform noise with magnitude ``noise`` to all copies of on-site tensors. The noise is added to all blocks making up the individual on-site tensors. """""" if: return self _tmp = self.site() t_data, D_data = _tmp.get_leg_charges_and_dims(native=True) t_noise = yastn.rand(config=_tmp.config, s=_tmp.s, n=_tmp.n, t=t_data, D=D_data, isdiag=_tmp.isdiag) site = _tmp + noise * t_noise state = IPEPS_ABELIAN_C4V(self.engine, site, self.irrep) state = state.symmetrize() return state",True,noise == 0,noise == 0,0.6646823883056641 4529,"def schedule(req): if: return if req.constraint: return ordered_reqs.add(req) for dep in self._dependencies[req]: schedule(dep) order.append(req)",False,req.satisfied_by or req in ordered_reqs,req in ordered_reqs,0.6483808755874634 4530,"def schedule(req): if req.satisfied_by or req in ordered_reqs: return if: return ordered_reqs.add(req) for dep in self._dependencies[req]: schedule(dep) order.append(req)",False,req.constraint,req in self._dependencies,0.6606564521789551 4531,"def validate_unique(self, exclude=None): if: qs = ReleaseFile.objects.filter(release=self.release, os=self.os, download_button=True).exclude(pk=self.id) if qs.count() > 0: raise ValidationError('Only one Release File per OS can have ""Download button"" enabled') super(ReleaseFile, self).validate_unique(exclude=exclude)",False,self.download_button,exclude is None,0.6536102294921875 4532,"def validate_unique(self, exclude=None): if self.download_button: qs = ReleaseFile.objects.filter(release=self.release, os=self.os, download_button=True).exclude(pk=self.id) if: raise ValidationError('Only one Release File per OS can have ""Download button"" enabled') super(ReleaseFile, self).validate_unique(exclude=exclude)",False,qs.count() > 0,len(qs) > 1,0.6513723134994507 4533,"def datagram_received(self, datagram, transportAddress): if: raise error.CarrierError('Unable to call cbFun') else: self.loop.call_soon(self._cbFun, self, transportAddress, datagram)",False,self._cbFun is None,not self._cbFun,0.6486669778823853 4534,"def _get_optimizer_ucl(self, lr=None, lr_rho=None): if: lr = self.lr if lr_rho is None: lr_rho = self.lr_rho if self.args.optimizer == 'Adam': return torch.optim.Adam(self.model.parameters(), lr=lr, lr_rho=lr_rho, param_name=self.param_name) if self.args.optimizer == 'SGD': return torch.optim.SGD(self.model.parameters(), lr=lr)",True,lr is None,lr is None,0.664311408996582 4535,"def _get_optimizer_ucl(self, lr=None, lr_rho=None): if lr is None: lr = self.lr if: lr_rho = self.lr_rho if self.args.optimizer == 'Adam': return torch.optim.Adam(self.model.parameters(), lr=lr, lr_rho=lr_rho, param_name=self.param_name) if self.args.optimizer == 'SGD': return torch.optim.SGD(self.model.parameters(), lr=lr)",True,lr_rho is None,lr_rho is None,0.6596890091896057 4536,"def _get_optimizer_ucl(self, lr=None, lr_rho=None): if lr is None: lr = self.lr if lr_rho is None: lr_rho = self.lr_rho if: return torch.optim.Adam(self.model.parameters(), lr=lr, lr_rho=lr_rho, param_name=self.param_name) if self.args.optimizer == 'SGD': return torch.optim.SGD(self.model.parameters(), lr=lr)",True,self.args.optimizer == 'Adam',self.args.optimizer == 'Adam',0.6493844985961914 4537,"def _get_optimizer_ucl(self, lr=None, lr_rho=None): if lr is None: lr = self.lr if lr_rho is None: lr_rho = self.lr_rho if self.args.optimizer == 'Adam': return torch.optim.Adam(self.model.parameters(), lr=lr, lr_rho=lr_rho, param_name=self.param_name) if: return torch.optim.SGD(self.model.parameters(), lr=lr)",True,self.args.optimizer == 'SGD',self.args.optimizer == 'SGD',0.6511936187744141 4538,"def irc_KICK(self, prefix, params): """""" Called when a user is kicked from a channel. """""" kicker = prefix.split('!')[0] channel = params[0] kicked = params[1] message = params[-1] if: self.kickedFrom(channel, kicker, message) else: self.userKicked(kicked, channel, kicker, message)",False,kicked.lower() == self.nickname.lower(),kicked == 'from',0.6449493169784546 4539,"@property def shape_nodes(self): """"""Return the number of nodes along each axis. This property returns a tuple containing the number of nodes along the :math:`x` (radial), :math:`y` (azimuthal) and :math:`z` (vertical) directions, respectively. In the case where the mesh is symmetric, the number of nodes defining the discretization in the azimuthal direction is *0* ; see :py:attr:`~.CylindricalMesh.is_symmetric`. Returns ------- (dim) tuple of int Number of nodes in the :math:`x` (radial), :math:`y` (azimuthal) and :math:`z` (vertical) directions, respectively. """""" vnC = self.shape_cells if: if self.includes_zero: return (vnC[0], 0, vnC[2] + 1) return (vnC[0] + 1, 0, vnC[2] + 1) elif self.is_wrapped: return (vnC[0] + 1, vnC[1], vnC[2] + 1) else: return super().shape_nodes",True,self.is_symmetric,self.is_symmetric,0.6489313244819641 4540,"@property def shape_nodes(self): """"""Return the number of nodes along each axis. This property returns a tuple containing the number of nodes along the :math:`x` (radial), :math:`y` (azimuthal) and :math:`z` (vertical) directions, respectively. In the case where the mesh is symmetric, the number of nodes defining the discretization in the azimuthal direction is *0* ; see :py:attr:`~.CylindricalMesh.is_symmetric`. Returns ------- (dim) tuple of int Number of nodes in the :math:`x` (radial), :math:`y` (azimuthal) and :math:`z` (vertical) directions, respectively. """""" vnC = self.shape_cells if self.is_symmetric: if: return (vnC[0], 0, vnC[2] + 1) return (vnC[0] + 1, 0, vnC[2] + 1) elif self.is_wrapped: return (vnC[0] + 1, vnC[1], vnC[2] + 1) else: return super().shape_nodes",False,self.includes_zero,self.is_wrapped,0.6477648615837097 4541,"@property def shape_nodes(self): """"""Return the number of nodes along each axis. This property returns a tuple containing the number of nodes along the :math:`x` (radial), :math:`y` (azimuthal) and :math:`z` (vertical) directions, respectively. In the case where the mesh is symmetric, the number of nodes defining the discretization in the azimuthal direction is *0* ; see :py:attr:`~.CylindricalMesh.is_symmetric`. Returns ------- (dim) tuple of int Number of nodes in the :math:`x` (radial), :math:`y` (azimuthal) and :math:`z` (vertical) directions, respectively. """""" vnC = self.shape_cells if self.is_symmetric: if self.includes_zero: return (vnC[0], 0, vnC[2] + 1) return (vnC[0] + 1, 0, vnC[2] + 1) elif: return (vnC[0] + 1, vnC[1], vnC[2] + 1) else: return super().shape_nodes",False,self.is_wrapped,self.includes_one,0.6511040925979614 4542,"def __init__(self, vehicle, shotmgr): self.vehicle = vehicle self.shotmgr = shotmgr self.waypoints = [] self.roi = None self.pathHandler = None self.camPitch = 0 self.camYaw = 0 self.camDir = 1 self.canResetCam = False self.setButtonMappings() self.altLimit = self.shotmgr.getParam('FENCE_ALT_MAX', DEFAULT_FENCE_ALT_MAX) if: self.altLimit = None logger.log('[Selfie]: Altitude Limit is disabled.')",False,"self.shotmgr.getParam('FENCE_ENABLE', DEFAULT_FENCE_ENABLE) == 0",self.altLimit is None or self.altLimit < 0,0.650628924369812 4543,"def _get_miscstats_string(self): """"""Return portion of extended stat string about misc attributes"""""" misc_string = '' tdsc = self._get_total_dest_size_change() if: misc_string += 'TotalDestinationSizeChange %s (%s)\n' % (tdsc, self.get_byte_summary_string(tdsc)) if self.Errors is not None: misc_string += 'Errors %d\n' % self.Errors return misc_string",True,tdsc is not None,tdsc is not None,0.6628061532974243 4544,"def _get_miscstats_string(self): """"""Return portion of extended stat string about misc attributes"""""" misc_string = '' tdsc = self._get_total_dest_size_change() if tdsc is not None: misc_string += 'TotalDestinationSizeChange %s (%s)\n' % (tdsc, self.get_byte_summary_string(tdsc)) if: misc_string += 'Errors %d\n' % self.Errors return misc_string",False,self.Errors is not None,self.Errors,0.6499463319778442 4545,"def get_corresponding_lineno(self, lineno): """"""Return the source line number of a line number in the generated bytecode as they are not in sync. """""" for template_line, code_line in reversed(self.debug_info): if: return template_line return 1",False,code_line <= lineno,code_line == lineno,0.6536411046981812 4546,"@property def symmetry(self): if: self._symmetry = load_mesh_symmetry(self.mesh_info.symmetry, self.device) return self._symmetry",True,self._symmetry is None and self.mesh_info is not None,self._symmetry is None and self.mesh_info is not None,0.646405816078186 4547,"def preprocess(i): os_info = i['os_info'] if: return {'return': 1, 'error': 'Windows is not supported in this script yet'} env = i['env'] automation = i['automation'] recursion_spaces = i['recursion_spaces'] need_version = env.get('CM_VERSION', '') if need_version == '': return {'return': 1, 'error': 'internal problem - CM_VERSION is not defined in env'} print(recursion_spaces +' # Requested version: {}'.format(need_version)) return {'return': 0}",True,os_info['platform'] == 'windows',os_info['platform'] == 'windows',0.6515749096870422 4548,"def preprocess(i): os_info = i['os_info'] if os_info['platform'] == 'windows': return {'return': 1, 'error': 'Windows is not supported in this script yet'} env = i['env'] automation = i['automation'] recursion_spaces = i['recursion_spaces'] need_version = env.get('CM_VERSION', '') if: return {'return': 1, 'error': 'internal problem - CM_VERSION is not defined in env'} print(recursion_spaces +' # Requested version: {}'.format(need_version)) return {'return': 0}",False,need_version == '',need_version is None,0.6547970771789551 4549,"def _infer_return_type(*args): """"""Look at the type of all args and divine their implied return type."""""" return_type = None for arg in args: if arg is None: continue if isinstance(arg, bytes): if return_type is str: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = bytes else: if return_type is bytes: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = str if: return str return return_type",True,return_type is None,return_type is None,0.6477899551391602 4550,"def _infer_return_type(*args): """"""Look at the type of all args and divine their implied return type."""""" return_type = None for arg in args: if: continue if isinstance(arg, bytes): if return_type is str: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = bytes else: if return_type is bytes: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = str if return_type is None: return str return return_type",True,arg is None,arg is None,0.6522343158721924 4551,"def _infer_return_type(*args): """"""Look at the type of all args and divine their implied return type."""""" return_type = None for arg in args: if arg is None: continue if: if return_type is str: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = bytes else: if return_type is bytes: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = str if return_type is None: return str return return_type",False,"isinstance(arg, bytes)",arg == 'mix_bytes',0.6436149477958679 4552,"def _infer_return_type(*args): """"""Look at the type of all args and divine their implied return type."""""" return_type = None for arg in args: if arg is None: continue if isinstance(arg, bytes): if: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = bytes else: if return_type is bytes: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = str if return_type is None: return str return return_type",False,return_type is str,return_type is None,0.6503479480743408 4553,"def _infer_return_type(*args): """"""Look at the type of all args and divine their implied return type."""""" return_type = None for arg in args: if arg is None: continue if isinstance(arg, bytes): if return_type is str: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = bytes else: if: raise TypeError(""Can't mix bytes and non-bytes in path components."") return_type = str if return_type is None: return str return return_type",False,return_type is bytes,return_type is str,0.6504719257354736 4554,"def nif_name(self, blender_name): if: return self.nif.nif_name(blender_name) else: return blender_name",True,self.do_rename_bones or self.rename_bones_nift,self.do_rename_bones or self.rename_bones_nift,0.6467915773391724 4555,"def __del__(self): if: warnings.warn(f'unclosed transport {self!r}', ResourceWarning, source=self) self._pipe.close()",True,self._pipe is not None,self._pipe is not None,0.6556999683380127 4556,"def load_vocab(vocab_file): """"""Loads a vocabulary file into a dictionary."""""" vocab = collections.OrderedDict() index = 0 with open(vocab_file, 'r', encoding='utf-8') as reader: while True: token = convert_to_unicode(reader.readline()) if: break token = token.strip() vocab[token] = index index += 1 return vocab",True,not token,not token,0.6575927138328552 4557,"@property def domain(self): if: return None return self[-1].domain",False,len(self) < 3,not self,0.6508684158325195 4558,"def destroy(self): if: self.toggle_radar() sensors = [self.camera_manager.sensor, self.collision_sensor.sensor, self.lane_invasion_sensor.sensor, self.gnss_sensor.sensor, self.imu_sensor.sensor] for sensor in sensors: if sensor is not None: sensor.stop() sensor.destroy() if self.player is not None: self.player.destroy()",False,self.radar_sensor is not None,self.is_radar,0.648886501789093 4559,"def destroy(self): if self.radar_sensor is not None: self.toggle_radar() sensors = [self.camera_manager.sensor, self.collision_sensor.sensor, self.lane_invasion_sensor.sensor, self.gnss_sensor.sensor, self.imu_sensor.sensor] for sensor in sensors: if sensor is not None: sensor.stop() sensor.destroy() if: self.player.destroy()",True,self.player is not None,self.player is not None,0.6479796171188354 4560,"def destroy(self): if self.radar_sensor is not None: self.toggle_radar() sensors = [self.camera_manager.sensor, self.collision_sensor.sensor, self.lane_invasion_sensor.sensor, self.gnss_sensor.sensor, self.imu_sensor.sensor] for sensor in sensors: if: sensor.stop() sensor.destroy() if self.player is not None: self.player.destroy()",False,sensor is not None,sensor.enabled,0.6564157009124756 4561,"def atom_degree_one_hot(atom, allowable_set=None, encode_unknown=False): """"""One hot encoding for the degree of an atom. """""" if: allowable_set = list(range(11)) return one_hot_encoding(atom.GetDegree(), allowable_set, encode_unknown)",True,allowable_set is None,allowable_set is None,0.6521517038345337 4562,"def _finish_pending_start_element(self, endElement=False): if: self._write('>') self._pending_start_element = False",False,self._pending_start_element,endElement,0.647447943687439 4563,"def getIrafVerTup(): """""" Return current IRAF version as a tuple (ints until last item) """""" verlist = getIrafVer().split('.') outlist = [] for v in verlist: if: outlist.append(int(v)) else: outlist.append(v) return tuple(outlist)",True,v.isdigit(),v.isdigit(),0.6490563154220581 4564,"def index(self, subStr, _fromIndex=0): s = self.__str__() ls = len(s) if: _fromIndex = 0 if _fromIndex >= ls: _fromIndex = ls - 1 s = s[_fromIndex:] if self.__contains__(subStr): return super().index(subStr) return -1",True,_fromIndex < 0,_fromIndex < 0,0.6544984579086304 4565,"def index(self, subStr, _fromIndex=0): s = self.__str__() ls = len(s) if _fromIndex < 0: _fromIndex = 0 if: _fromIndex = ls - 1 s = s[_fromIndex:] if self.__contains__(subStr): return super().index(subStr) return -1",False,_fromIndex >= ls,ls > 1,0.6538748741149902 4566,"def index(self, subStr, _fromIndex=0): s = self.__str__() ls = len(s) if _fromIndex < 0: _fromIndex = 0 if _fromIndex >= ls: _fromIndex = ls - 1 s = s[_fromIndex:] if: return super().index(subStr) return -1",False,self.__contains__(subStr),subStr in s,0.6470857858657837 4567,"@classmethod def _init_multihandler(cls): with cls._multi_handler_lock: if: return cls._multi_handler = MultiLogfileHandler(cls._MAP_KEY) cls._multi_handler.setLevel(logging.DEBUG)",True,cls._multi_handler,cls._multi_handler,0.6533100605010986 4568,"def patch_device(module): try: graphs = [module.graph] if hasattr(module, 'graph') else [] except RuntimeError: graphs = [] if: graphs.append(module.forward1.graph) for graph in graphs: for node in graph.findAllNodes('prim::Constant'): if 'value' in node.attributeNames() and str(node['value']).startswith('cuda'): node.copyAttributes(device_node)",True,"hasattr(module, 'forward1')","hasattr(module, 'forward1')",0.6467068195343018 4569,"def patch_device(module): try: graphs = [module.graph] if hasattr(module, 'graph') else [] except RuntimeError: graphs = [] if hasattr(module, 'forward1'): graphs.append(module.forward1.graph) for graph in graphs: for node in graph.findAllNodes('prim::Constant'): if: node.copyAttributes(device_node)",True,'value' in node.attributeNames() and str(node['value']).startswith('cuda'),'value' in node.attributeNames() and str(node['value']).startswith('cuda'),0.6435933113098145 4570,"def parse(self, source, sink, **args): assert sink.store.context_aware g = ConjunctiveGraph(store=sink.store) self._parser = create_parser(g) content_handler = self._parser.getContentHandler() preserve_bnode_ids = args.get('preserve_bnode_ids', None) if: content_handler.preserve_bnode_ids = preserve_bnode_ids self._parser.parse(source)",True,preserve_bnode_ids is not None,preserve_bnode_ids is not None,0.6490976810455322 4571,"def wrap_socket(self, socket, server_hostname=None): warnings.warn('A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.', InsecurePlatformWarning) kwargs = {'keyfile': self.keyfile, 'certfile': self.certfile, 'ca_certs': self.ca_certs, 'cert_reqs': self.verify_mode,'ssl_version': self.protocol} if: return wrap_socket(socket, ciphers=self.ciphers, **kwargs) else: return wrap_socket(socket, **kwargs)",True,self.supports_set_ciphers,self.supports_set_ciphers,0.6450991630554199 4572,"def _replacement(self): if: self._tlm_adjoint__replacement = ReplacementConstant(self) return self._tlm_adjoint__replacement",True,"not hasattr(self, '_tlm_adjoint__replacement')","not hasattr(self, '_tlm_adjoint__replacement')",0.6492080688476562 4573,"def slugify(self, tag, i=None): slug = 'category-%s' % tag.lower() if: slug += '-%d' % i return slug",True,i is not None,i is not None,0.6546849012374878 4574,"def disable_torque(self): if: return with self.pt_lock: with DelayedKeyboardInterrupt(): dxl_comm_result, dxl_error = self.packet_handler.write1ByteTxRx(self.port_handler, self.dxl_id, XL430_ADDR_TORQUE_ENABLE, 0) self.handle_comm_result('XL430_ADDR_TORQUE_ENABLE', dxl_comm_result, dxl_error)",True,not self.hw_valid,not self.hw_valid,0.6542553305625916 4575,"def add_other_text_param(self, key, value): if: self.udf_params = dict() self.udf_params[key] = value",True,not self.udf_params,not self.udf_params,0.6541146039962769 4576,"def run(self): src_dir = self.input().path os.system('mkdir -p ""%s""' % self.output().path) pattern = join(src_dir, '*.zip') zip_files = glob.glob(pattern) if: logging.warning('Expected to find one or more daily med SPL files') extract_dir = self.output().path for zip_file in zip_files: common.shell_cmd_quiet('unzip -oq -d %(extract_dir)s %(zip_file)s' % locals())",True,len(zip_files) == 0,len(zip_files) == 0,0.6471353769302368 4577,"def set_request(self, request, *args, **kwargs): """""" Set request to coroutine `request` Any positional or keyword arguments are passed to `request` at each call. """""" self.skip_ongoing_request() self._debug_info['request'] = _func_call_str(request, *args, **kwargs) log.debug('Setting new request: %s', self) if: self._request = functools.partial(request, *args, **kwargs) else: self._request = request",False,args or kwargs,self._do_partial,0.6627829670906067 4578,"def runCreateBackup(self): self.defineCurrentProject() if: return if self.isLayerOnEdition(): return utils = QGISRedUtils(self.ProjectDirectory, self.NetworkName, self.iface) path = utils.saveBackup() self.iface.messageBar().pushMessage('QGISRed', 'Backup stored in:'+ path, level=0, duration=5)",True,not self.isValidProject(),not self.isValidProject(),0.651165246963501 4579,"def runCreateBackup(self): self.defineCurrentProject() if not self.isValidProject(): return if: return utils = QGISRedUtils(self.ProjectDirectory, self.NetworkName, self.iface) path = utils.saveBackup() self.iface.messageBar().pushMessage('QGISRed', 'Backup stored in:'+ path, level=0, duration=5)",True,self.isLayerOnEdition(),self.isLayerOnEdition(),0.6473019123077393 4580,"def get_data(self, obj): kwargs = {'parent_lookup_asset': obj.uid} format = self.context.get('format') if: kwargs['format'] = format return reverse('submission-list', kwargs=kwargs, request=self.context.get('request', None))",True,format,format,0.6767302751541138 4581,"def fetch_trades(self, symbol, since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = {'symbol': market['id']} if: request['limit'] = limit if since is not None: request['sort'] = 'ASC' request['from'] = self.iso8601(since) response = self.publicGetTradesSymbol(self.extend(request, params)) return self.parse_trades(response, market, since, limit)",True,limit is not None,limit is not None,0.6618914008140564 4582,"def fetch_trades(self, symbol, since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = {'symbol': market['id']} if limit is not None: request['limit'] = limit if: request['sort'] = 'ASC' request['from'] = self.iso8601(since) response = self.publicGetTradesSymbol(self.extend(request, params)) return self.parse_trades(response, market, since, limit)",True,since is not None,since is not None,0.6591042280197144 4583,"def valid(self): if: return False now = (datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds() if self.EXPIRE - 432000 < now: self.refresh_token() return True",False,self.TOKEN == '',not self.token,0.6534029841423035 4584,"def valid(self): if self.TOKEN == '': return False now = (datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds() if: self.refresh_token() return True",False,self.EXPIRE - 432000 < now,now >= self.TOKEN_REFRESH_INTERVAL,0.6478370428085327 4585,"@property def published_to(self): if: return self.published_at.data.split(' - ')[1].strip() +'23:59:59' else: return self.published_from",True,self.published_at.data and '-' in self.published_at.data,self.published_at.data and '-' in self.published_at.data,0.6485066413879395 4586,"def __str__(self): if: return self.name if self.strRepr is None: self.strRepr = '{' +''.join((_ustr(e) for e in self.exprs)) + '}' return self.strRepr",True,"hasattr(self, 'name')","hasattr(self, 'name')",0.6499038934707642 4587,"def __str__(self): if hasattr(self, 'name'): return self.name if: self.strRepr = '{' +''.join((_ustr(e) for e in self.exprs)) + '}' return self.strRepr",True,self.strRepr is None,self.strRepr is None,0.6494103670120239 4588,"def setUp(self): """""" Setup and mount volume or raise ExecutionError """""" self.get_super_method(self,'setUp')() ret = self.setup_volume_and_mount_volume([self.mounts[0]]) if: raise ExecutionError('Failed to Setup and Mount Volume') self.add_brick_list = form_bricks_list_to_add_brick(self.mnode, self.volname, self.servers, self.all_servers_info, distribute_count=1) if not self.add_brick_list: raise ExecutionError('Volume %s: Failed to form bricks list for add-brick' % self.volname) g.log.info('Volume %s: Formed bricks list for add-brick operation', (self.add_brick_list, self.volname))",True,not ret,not ret,0.6630406379699707 4589,"def setUp(self): """""" Setup and mount volume or raise ExecutionError """""" self.get_super_method(self,'setUp')() ret = self.setup_volume_and_mount_volume([self.mounts[0]]) if not ret: raise ExecutionError('Failed to Setup and Mount Volume') self.add_brick_list = form_bricks_list_to_add_brick(self.mnode, self.volname, self.servers, self.all_servers_info, distribute_count=1) if: raise ExecutionError('Volume %s: Failed to form bricks list for add-brick' % self.volname) g.log.info('Volume %s: Formed bricks list for add-brick operation', (self.add_brick_list, self.volname))",True,not self.add_brick_list,not self.add_brick_list,0.6485990285873413 4590,"def output_array_of_mediametadata(data_objects): if: return for data_object in data_objects['MediaMetaData']: output_mediametadata(data_object)",True,data_objects is None or len(data_objects) == 0,data_objects is None or len(data_objects) == 0,0.6468867063522339 4591,"def get_next_row(self): row = self.get_dict_next() namespace = None instance = None day = None if: namespace = row['namespace'] instance = row['instance'] day = row['day'] file = row['file'] blob = self.get_file(file) if blob: json_data = json.loads(blob) else: json_data = [] row = json_data return (row, namespace, instance, day)",False,row,row['namespace'],0.6863338351249695 4592,"def get_next_row(self): row = self.get_dict_next() namespace = None instance = None day = None if row: namespace = row['namespace'] instance = row['instance'] day = row['day'] file = row['file'] blob = self.get_file(file) if: json_data = json.loads(blob) else: json_data = [] row = json_data return (row, namespace, instance, day)",True,blob,blob,0.6800668239593506 4593,"def truncate(self, size=None): if: raise ValueError('I/O operation on closed file') raise IOError(9, 'Bad file descriptor')",True,self.closed,self.closed,0.662164032459259 4594,"def _escape_cdata(text, encoding=None, replace=string.replace): try: if: try: text = _encode(text, encoding) except UnicodeError: return _encode_entity(text) text = replace(text, '&', '&') text = replace(text, '<', '<') text = replace(text, '>', '>') return text except (TypeError, AttributeError): _raise_serialization_error(text)",True,encoding,encoding,0.6617276072502136 4595,"def __init__(self, config, parent_name=None, name='ssd_feature_extractor', **kwargs): super(SSDFeatureExtractor, self).__init__(config, name=name, **kwargs) if: raise ValueError('Invalid architecture ""{}""'.format(self._architecture)) self.parent_name = parent_name self.activation_fn = tf.nn.relu",False,self._architecture not in VALID_SSD_ARCHITECTURES,self._architecture not in VALID_ARCHITECTURES,0.6456208229064941 4596,"def _parse_command_response(self, response, prompt): """""" Instrument will send an 'unrecognized command' response if an error occurred while sending a command. Raise an exception if this occurs. """""" log.debug('%% IN _parse_command_response RESPONSE = %r', response) if: log.debug('command was not recognized') raise InstrumentCommandException('unrecognized command') return response",False,'unrecognized command' in response,response == commands.CMD_NO_COMMAND and (not prompt),0.654981791973114 4597,"def quote_attrib(inStr): s1 = isinstance(inStr, str) and inStr or '%s' % inStr s1 = s1.replace('&', '&') s1 = s1.replace('<', '<') s1 = s1.replace('>', '>') if: if ""'"" in s1: s1 = '""%s""' % s1.replace('""', '"') else: s1 = ""'%s'"" % s1 else: s1 = '""%s""' % s1 return s1",True,"'""' in s1","'""' in s1",0.6593708992004395 4598,"def quote_attrib(inStr): s1 = isinstance(inStr, str) and inStr or '%s' % inStr s1 = s1.replace('&', '&') s1 = s1.replace('<', '<') s1 = s1.replace('>', '>') if '""' in s1: if: s1 = '""%s""' % s1.replace('""', '"') else: s1 = ""'%s'"" % s1 else: s1 = '""%s""' % s1 return s1",True,"""'"" in s1","""'"" in s1",0.6581223011016846 4599,"def output_array_of_deviceosreportfilter(value_sets): if: return output_status_message('Array Of DeviceOSReportFilter:\n') for value_set in value_sets['DeviceOSReportFilter']: output_deviceosreportfilter(value_set)",True,value_sets is None or len(value_sets) == 0,value_sets is None or len(value_sets) == 0,0.643896222114563 4600,"def halves_of_4x4_CTM_MOVE_UP(coord, state, env, verbosity=0): """""" :param coord: site for which to build two halfs of 2x2 subsystem embedded in environment :type coord: tuple(int,int) :param state: wavefunction :type state: IPEPS_ABELIAN :param env: environment :type env: ENV_ABELIAN :return: right and left half of the system as matrices :rtype: yastn.Tensor, yastn.Tensor Builds right and left half of 2x2 subsystem embedded into environment. The `coord` specifies the upper-right site of the 2x2 subsystem. Performs following contraction and then reshaping the resulting tensors into matrices:: C T T C = C2x2_LU(coord+(-1,0)) C2x2(coord) T A B(coord) T C2x2_LD(coord+(-1,1)) C2x2(coord+(0,1)) T C D T C T T C C2x2--1->0 0--C2x2(coord) = _0 0_ |0 1| | | |0 0| half2 half1 C2x2--1 1--C2x2 |_1 1_| """""" tensors = c2x2_RU_t(coord, state, env) + c2x2_RD_t((coord[0], coord[1] + 1), state, env) + c2x2_LU_t((coord[0] - 1, coord[1]), state, env) + c2x2_LD_t((coord[0] - 1, coord[1] + 1), state, env) if: raise RuntimeError('Checkpointing not implemented') else: return halves_of_4x4_CTM_MOVE_UP_c(*tensors)",False,ctm_args.fwd_checkpoint_halves,verbosity > 0,0.6442754864692688 4601,"def vertex_normals_at(self, frame_id): """"""Get or compute the vertex normals at the given frame."""""" if: vn, _ = self.compute_vertex_and_face_normals(frame_id, normalize=True) else: assert len(self._vertex_normals.shape) == 3, f'Got shape {self._vertex_normals.shape}' vn = self._vertex_normals[frame_id] return vn",True,self._vertex_normals is None,self._vertex_normals is None,0.6508874893188477 4602,"def check_arg_type(argument, arg_typ: type): """"""Check the type of an argument"""""" if: raise TypeError('Argument must be %s' % str(arg_typ))",False,"not isinstance(argument, arg_typ)",type(argument) != arg_typ,0.6508891582489014 4603,"def __mod__(self, y): cls = self.__class__ if: cls = self.maxcast(y) return cls(self.arg - y * (self // y))",False,"isinstance(y, moduint)",not cls.maxcast,0.6511019468307495 4604,"def instance(self, **layer_kwargs) -> AbstractModule: kwargs = self.kwargs.copy() kwargs.update(layer_kwargs) stride = kwargs.pop('stride') ops = [self.cls(*self.args, stride=stride if i == 0 else 1, **kwargs) for i in range(self.stacked)] if: return ops[0] return SequentialModulesF(ops)",False,self.stacked == 1,len(ops) == 1,0.6570013761520386 4605,"@classmethod def _parse_hextet(cls, hextet_str): """"""Convert an IPv6 hextet string into an integer. Args: hextet_str: A string, the number to parse. Returns: The hextet as an integer. Raises: ValueError: if the input isn't strictly a hex number from [0..FFFF]. """""" if: raise ValueError('Only hex digits permitted in %r' % hextet_str) if len(hextet_str) > 4: msg = 'At most 4 characters permitted in %r' raise ValueError(msg % hextet_str) return int(hextet_str, 16)",False,not cls._HEX_DIGITS.issuperset(hextet_str),len(hextet_str) > 33554432,0.6426379680633545 4606,"@classmethod def _parse_hextet(cls, hextet_str): """"""Convert an IPv6 hextet string into an integer. Args: hextet_str: A string, the number to parse. Returns: The hextet as an integer. Raises: ValueError: if the input isn't strictly a hex number from [0..FFFF]. """""" if not cls._HEX_DIGITS.issuperset(hextet_str): raise ValueError('Only hex digits permitted in %r' % hextet_str) if: msg = 'At most 4 characters permitted in %r' raise ValueError(msg % hextet_str) return int(hextet_str, 16)",False,len(hextet_str) > 4,len(hextet_str) >= 4,0.646833062171936 4607,"def wait_for_namespace_deleted(namespace): field_selector ='metadata.name={}'.format(namespace) for _ in range(KUBERNETES_RESOURCE_OP_MAX_POLLS): namespaces = core_api().list_namespace(field_selector=field_selector).items if: return cli_logger.verbose('Waiting for namespace delete operation to finish...') time.sleep(KUBERNETES_RESOURCE_OP_POLL_INTERVAL) raise RuntimeError(""Namespace deletion doesn't completed after {} seconds."".format(KUBERNETES_RESOURCE_OP_MAX_POLLS * KUBERNETES_RESOURCE_OP_POLL_INTERVAL))",False,len(namespaces) == 0,namespaces.delete(),0.6500070691108704 4608,"def end(status, message): """"""Exits the plugin with first arg as the return code and the second arg as the message to output"""""" check = 'Portage ' if: print('%sOK: %s' % (check, message)) sys.exit(OK) elif status == WARNING: print('%sWARNING: %s' % (check, message)) sys.exit(WARNING) elif status == CRITICAL: print('%sCRITICAL: %s' % (check, message)) sys.exit(CRITICAL) else: print('UNKNOWN: %s' % message) sys.exit(UNKNOWN)",True,status == OK,status == OK,0.6644324660301208 4609,"def end(status, message): """"""Exits the plugin with first arg as the return code and the second arg as the message to output"""""" check = 'Portage ' if status == OK: print('%sOK: %s' % (check, message)) sys.exit(OK) elif: print('%sWARNING: %s' % (check, message)) sys.exit(WARNING) elif status == CRITICAL: print('%sCRITICAL: %s' % (check, message)) sys.exit(CRITICAL) else: print('UNKNOWN: %s' % message) sys.exit(UNKNOWN)",True,status == WARNING,status == WARNING,0.6635311245918274 4610,"def end(status, message): """"""Exits the plugin with first arg as the return code and the second arg as the message to output"""""" check = 'Portage ' if status == OK: print('%sOK: %s' % (check, message)) sys.exit(OK) elif status == WARNING: print('%sWARNING: %s' % (check, message)) sys.exit(WARNING) elif: print('%sCRITICAL: %s' % (check, message)) sys.exit(CRITICAL) else: print('UNKNOWN: %s' % message) sys.exit(UNKNOWN)",True,status == CRITICAL,status == CRITICAL,0.6625660061836243 4611,"def setup(self, recreate=True, env=None): cmd = 'up -d' if: cmd +='--no-recreate' self._docker_compose_cmd(cmd, env=env) self._wait_for_containers()",False,not recreate,recreate,0.675291895866394 4612,"@classmethod def parser(cls, buf, offset): type_, len_, experimenter = struct.unpack_from(ofproto.OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR, buf, offset) data = buf[offset + ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE:offset + len_] if: obj = NXAction.parse(data) else: obj = OFPActionExperimenterUnknown(experimenter, data) obj.len = len_ return obj",False,experimenter == ofproto_common.NX_EXPERIMENTER_ID,type_ == 1,0.6462374329566956 4613,"def save(self, save_path): """"""Save the parameters of a tensorflow model. Parameters ---------- save_path : str Prefix of filenames created for the checkpoint """""" self.saver.save(self.sess, save_path, global_step=self.steps) if: self.policy_tf.replay_buffer.save(save_path + '-{}.rb'.format(self.steps))",False,self.save_replay_buffer,self.steps > 0,0.6437937021255493 4614,"def dumps(self, buff): label = self.label body = self.body if: buff.append(f' {label:s};\n') else: buff.append(f' {label:s} {body:s};\n')",True,body is None,body is None,0.6541808247566223 4615,"def sort_method(tpl: WorkStatus): if: return tpl.available return tpl.available * 10",False,tpl.unit.type_id in buildings_5x5,tpl.available == 1,0.6438885927200317 4616,"@recipe('mark importlib as zipsafe', modules=['importlib']) def mark_importlib_zipsafe(graph: ModuleGraph, options: RecipeOptions) -> None: node = graph.find_node('importlib') if: return node.init_module.extension_attributes['py2app.zipsafe'] = True",True,node is None,node is None,0.6596956253051758 4617,"@staticmethod def from_alipay_dict(d): if: return None o = MybankMarketingCampaignBkpromosceneorderPointRefundModel() if 'biz_id' in d: o.biz_id = d['biz_id'] if 'deducted_request_id' in d: o.deducted_request_id = d['deducted_request_id'] if 'lm_order_id' in d: o.lm_order_id = d['lm_order_id'] if'request_id' in d: o.request_id = d['request_id'] if 'user_id' in d: o.user_id = d['user_id'] return o",True,not d,not d,0.6630702018737793 4618,"@staticmethod def from_alipay_dict(d): if not d: return None o = MybankMarketingCampaignBkpromosceneorderPointRefundModel() if: o.biz_id = d['biz_id'] if 'deducted_request_id' in d: o.deducted_request_id = d['deducted_request_id'] if 'lm_order_id' in d: o.lm_order_id = d['lm_order_id'] if'request_id' in d: o.request_id = d['request_id'] if 'user_id' in d: o.user_id = d['user_id'] return o",True,'biz_id' in d,'biz_id' in d,0.6539384126663208 4619,"@staticmethod def from_alipay_dict(d): if not d: return None o = MybankMarketingCampaignBkpromosceneorderPointRefundModel() if 'biz_id' in d: o.biz_id = d['biz_id'] if: o.deducted_request_id = d['deducted_request_id'] if 'lm_order_id' in d: o.lm_order_id = d['lm_order_id'] if'request_id' in d: o.request_id = d['request_id'] if 'user_id' in d: o.user_id = d['user_id'] return o",False,'deducted_request_id' in d,'ddeducted_request_id' in d,0.6533161401748657 4620,"@staticmethod def from_alipay_dict(d): if not d: return None o = MybankMarketingCampaignBkpromosceneorderPointRefundModel() if 'biz_id' in d: o.biz_id = d['biz_id'] if 'deducted_request_id' in d: o.deducted_request_id = d['deducted_request_id'] if: o.lm_order_id = d['lm_order_id'] if'request_id' in d: o.request_id = d['request_id'] if 'user_id' in d: o.user_id = d['user_id'] return o",True,'lm_order_id' in d,'lm_order_id' in d,0.6572126150131226 4621,"@staticmethod def from_alipay_dict(d): if not d: return None o = MybankMarketingCampaignBkpromosceneorderPointRefundModel() if 'biz_id' in d: o.biz_id = d['biz_id'] if 'deducted_request_id' in d: o.deducted_request_id = d['deducted_request_id'] if 'lm_order_id' in d: o.lm_order_id = d['lm_order_id'] if: o.request_id = d['request_id'] if 'user_id' in d: o.user_id = d['user_id'] return o",True,'request_id' in d,'request_id' in d,0.6529725790023804 4622,"@staticmethod def from_alipay_dict(d): if not d: return None o = MybankMarketingCampaignBkpromosceneorderPointRefundModel() if 'biz_id' in d: o.biz_id = d['biz_id'] if 'deducted_request_id' in d: o.deducted_request_id = d['deducted_request_id'] if 'lm_order_id' in d: o.lm_order_id = d['lm_order_id'] if'request_id' in d: o.request_id = d['request_id'] if: o.user_id = d['user_id'] return o",True,'user_id' in d,'user_id' in d,0.6527937650680542 4623,"def have_close_members(self, c0, c1, rmsd_cutoff, metric): """""" returns true if c0 and c1 have members that are closer than rmsd_cutoff """""" print('check close members for clusters'+ str(c0.cluster_id) +'and'+ str(c1.cluster_id)) for n0, n1 in itertools.product(c0.members[1:], c1.members): _ = self.stath0[n0] _ = self.stath1[n1] rmsd, _ = self.rmsd(metric=metric) if: return True return False",False,rmsd < rmsd_cutoff,rmsd != None,0.6460832357406616 4624,"def characters(self, content): if: self.data += content elif self.current == 'phrases': self.total = int(content) elif self.current == 'translated': self.translated = int(content) pc = int(float(self.translated) / self.total * 100) self.data +=':'+ str(pc) + '%\n' elif self.current =='message': self.data += content",False,self.current == 'name',self.current == 'string',0.6537771821022034 4625,"def characters(self, content): if self.current == 'name': self.data += content elif: self.total = int(content) elif self.current == 'translated': self.translated = int(content) pc = int(float(self.translated) / self.total * 100) self.data +=':'+ str(pc) + '%\n' elif self.current =='message': self.data += content",False,self.current == 'phrases',self.current == 'total',0.6486866474151611 4626,"def characters(self, content): if self.current == 'name': self.data += content elif self.current == 'phrases': self.total = int(content) elif: self.translated = int(content) pc = int(float(self.translated) / self.total * 100) self.data +=':'+ str(pc) + '%\n' elif self.current =='message': self.data += content",True,self.current == 'translated',self.current == 'translated',0.6516928672790527 4627,"def characters(self, content): if self.current == 'name': self.data += content elif self.current == 'phrases': self.total = int(content) elif self.current == 'translated': self.translated = int(content) pc = int(float(self.translated) / self.total * 100) self.data +=':'+ str(pc) + '%\n' elif: self.data += content",False,self.current == 'message',self.current == 'content',0.6510397791862488 4628,"def html_tag_and_attrbutes(self): attributes = f'id=""{self.css_id}""' if self.css_id else '' tag ='s' if: tag = 'a' id = self.layout.register_on_click_handler(on_click) attributes += f' href=""{id}""' if (title := self.kwargs.get('title')): attributes += f' title=""{title}""' return (tag, attributes)",False,on_click := self.kwargs.get('on_click'),self.layout.get_on_click_handler,0.6460857391357422 4629,"def html_tag_and_attrbutes(self): attributes = f'id=""{self.css_id}""' if self.css_id else '' tag ='s' if (on_click := self.kwargs.get('on_click')): tag = 'a' id = self.layout.register_on_click_handler(on_click) attributes += f' href=""{id}""' if: attributes += f' title=""{title}""' return (tag, attributes)",False,title := self.kwargs.get('title'),title,0.6470811367034912 4630,"def dot_is_none(self, *args): dot_alpha = 0 if self.selected % self.multiples_of == 0 else 1 if: self._selection_dot_color.a = dot_alpha",False,self._selection_dot_color,self._selection_dot_color is not None,0.6451257467269897 4631,"def Boolean_getValue(self): b = trch.Parameter_Boolean_getValue(self.param) if: return 'True' else: return 'False'",True,b,b,0.6761996746063232 4632,"def int_to_bytes(val, length=None, byteorder='big'): """"""Return number converted to bytes"""""" if: length = byte_length(val) if byteorder == 'big': return bytearray((val >> i & 255 for i in reversed(range(0, length * 8, 8)))) if byteorder == 'little': return bytearray((val >> i & 255 for i in range(0, length * 8, 8))) raise ValueError(""Only 'big' or 'little' endian supported"")",True,length is None,length is None,0.6638147830963135 4633,"def int_to_bytes(val, length=None, byteorder='big'): """"""Return number converted to bytes"""""" if length is None: length = byte_length(val) if: return bytearray((val >> i & 255 for i in reversed(range(0, length * 8, 8)))) if byteorder == 'little': return bytearray((val >> i & 255 for i in range(0, length * 8, 8))) raise ValueError(""Only 'big' or 'little' endian supported"")",True,byteorder == 'big',byteorder == 'big',0.6608785390853882 4634,"def int_to_bytes(val, length=None, byteorder='big'): """"""Return number converted to bytes"""""" if length is None: length = byte_length(val) if byteorder == 'big': return bytearray((val >> i & 255 for i in reversed(range(0, length * 8, 8)))) if: return bytearray((val >> i & 255 for i in range(0, length * 8, 8))) raise ValueError(""Only 'big' or 'little' endian supported"")",True,byteorder == 'little',byteorder == 'little',0.661827564239502 4635,"@property def log(self): """""" class property can be used to override the class global logging setting Returns: bool: True if logging is enable False otherwise """""" if: return self.__log return JobsTableView.classLog()",True,self.__log is not None,self.__log is not None,0.6499423384666443 4636,"def check_firmware_updates(device_info: Dict[FirmwareTarget, DeviceInfoCache], targets: Optional[Set[FirmwareTarget]]=None, force: bool=False) -> Dict[FirmwareTarget, Tuple[int, str]]: """"""Returns a dict of NodeIds that require a firmware update and the path to the file to update them."""""" known_firmware = load_firmware_manifest() if: log.error('Could not load the known firmware.') return devices_to_check = _devices_to_check(device_info, targets) update_types = _update_types_from_devices(devices_to_check) update_info = _info_for_required_updates(force, known_firmware, update_types) update_files = _update_files_from_types(update_info) return {node: (next_version, filepath) for node, next_version, filepath in update_files}",True,known_firmware is None,known_firmware is None,0.6629600524902344 4637,"def create_session(self, image_in, params=None): """""" Create New Session :param image_in: filepath for image to be sent to server as part of session creation :param params: additional JSON params as part of session reqeust :return: json response which contains session id and other details """""" selector = '/session/' params = self._update_client_id(params) status, response, _ = MONAILabelUtils.http_upload('PUT', self._server_url, selector, params, [image_in], headers=self._headers) if: raise MONAILabelClientException(MONAILabelError.SERVER_ERROR, f'Status: {status}; Response: {bytes_to_str(response)}', status, response) response = bytes_to_str(response) logging.debug(f'Response: {response}') return json.loads(response)",True,status != 200,status != 200,0.6772538423538208 4638,"def to_key_val_list(value): """"""Take an object and test to see if it can be represented as a dictionary. If it can be, return a list of tuples, e.g., :: >>> to_key_val_list([('key', 'val')]) [('key', 'val')] >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') ValueError: cannot encode objects that are not 2-tuples. """""" if: return None if isinstance(value, (str, bytes, bool, int)): raise ValueError('cannot encode objects that are not 2-tuples') if isinstance(value, collections.Mapping): value = value.items() return list(value)",True,value is None,value is None,0.6520777344703674 4639,"def to_key_val_list(value): """"""Take an object and test to see if it can be represented as a dictionary. If it can be, return a list of tuples, e.g., :: >>> to_key_val_list([('key', 'val')]) [('key', 'val')] >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') ValueError: cannot encode objects that are not 2-tuples. """""" if value is None: return None if: raise ValueError('cannot encode objects that are not 2-tuples') if isinstance(value, collections.Mapping): value = value.items() return list(value)",True,"isinstance(value, (str, bytes, bool, int))","isinstance(value, (str, bytes, bool, int))",0.6435593366622925 4640,"def to_key_val_list(value): """"""Take an object and test to see if it can be represented as a dictionary. If it can be, return a list of tuples, e.g., :: >>> to_key_val_list([('key', 'val')]) [('key', 'val')] >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') ValueError: cannot encode objects that are not 2-tuples. """""" if value is None: return None if isinstance(value, (str, bytes, bool, int)): raise ValueError('cannot encode objects that are not 2-tuples') if: value = value.items() return list(value)",False,"isinstance(value, collections.Mapping)","isinstance(value, Mapping)",0.640528678894043 4641,"def https_open(self, req): def http_class_wrapper(host, **kwargs): full_kwargs = dict(self._connection_args) full_kwargs.update(kwargs) return ValidatingHTTPSConnection(host, **full_kwargs) try: return self.do_open(http_class_wrapper, req) except URLError as e: if: raise InvalidCertificateException(req.host, '', e.reason.args[1]) raise",False,type(e.reason) == ssl.SSLError and e.reason.args[0] == 1,e.reason and e.reason.args[1] in SSL_SSL_ERROR_IGNORE_CERTIFICATES,0.6458232998847961 4642,"def _get_hdfs_urls(self, source): """"""Recursively list all files inside the source directory on the hdfs filesystem."""""" if: for source in luigi.contrib.hdfs.listdir(source, recursive=True): yield source",False,luigi.contrib.hdfs.exists(source),os.path.isdir(source),0.6473086476325989 4643,"def forward(self, x): if: x = self.relu(self.fuse_equivalent_conv(x)) return x if self.identity: identity_out = self.identity(x) else: identity_out = 0 x = self.relu(self.conv3x3(x) + self.conv1x1(x) + identity_out) return x",False,self.deploy,self.fuse_equivalent_conv is not None,0.6556750535964966 4644,"def forward(self, x): if self.deploy: x = self.relu(self.fuse_equivalent_conv(x)) return x if: identity_out = self.identity(x) else: identity_out = 0 x = self.relu(self.conv3x3(x) + self.conv1x1(x) + identity_out) return x",False,self.identity,self.identity is not None,0.654172420501709 4645,"def receive_alt_svc(self, frame): """""" An Alternative Service frame was received on the stream. This frame inherits the origin associated with this stream. """""" self.config.logger.debug('Receive Alternative Service frame on stream %r', self) if: return ([], []) events = self.state_machine.process_input(StreamInputs.RECV_ALTERNATIVE_SERVICE) if events: assert isinstance(events[0], AlternativeServiceAvailable) events[0].origin = self._authority events[0].field_value = frame.field return ([], events)",False,frame.origin,frame.origin != self._authority,0.6588755249977112 4646,"def receive_alt_svc(self, frame): """""" An Alternative Service frame was received on the stream. This frame inherits the origin associated with this stream. """""" self.config.logger.debug('Receive Alternative Service frame on stream %r', self) if frame.origin: return ([], []) events = self.state_machine.process_input(StreamInputs.RECV_ALTERNATIVE_SERVICE) if: assert isinstance(events[0], AlternativeServiceAvailable) events[0].origin = self._authority events[0].field_value = frame.field return ([], events)",True,events,events,0.6710010766983032 4647,"def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for lib_name, build_info in self.libraries: sources = build_info.get('sources') if: raise DistutilsSetupError(""in 'libraries' option (library '%s'),'sources' must be present and must be a list of source filenames"" % lib_name) filenames.extend(sources) return filenames",False,"sources is None or not isinstance(sources, (list, tuple))",not sources,0.6466606855392456 4648,"def _shellquote(s): """"""Return a shell-escaped version of the string *s*."""""" if: return ""''"" if _find_unsafe(s) is None: return s return ""'"" + s.replace(""'"", '\'""\'""\'') + ""'""",False,not s,"s == '' or s == '\'""'",0.6767115592956543 4649,"def _shellquote(s): """"""Return a shell-escaped version of the string *s*."""""" if not s: return ""''"" if: return s return ""'"" + s.replace(""'"", '\'""\'""\'') + ""'""",False,_find_unsafe(s) is None,"re.search('[^\\w]', s)",0.6458202600479126 4650,"def add_scalars(self, scalars: Dict[str, Union[float, int]]) -> None: for k in scalars: if: if k not in self._sums: self._sums[k] = float(scalars[k]) self._counts[k] = 1 else: self._sums[k] += float(scalars[k]) self._counts[k] += 1",False,np.isscalar(scalars[k]),k in scalars,0.6465378999710083 4651,"def add_scalars(self, scalars: Dict[str, Union[float, int]]) -> None: for k in scalars: if np.isscalar(scalars[k]): if: self._sums[k] = float(scalars[k]) self._counts[k] = 1 else: self._sums[k] += float(scalars[k]) self._counts[k] += 1",False,k not in self._sums,k not in self._counts,0.6533949375152588 4652,"def init_weights(self): for m in self.modules(): if: kaiming_init(m) for m in self.modules(): if isinstance(m, nn.ConvTranspose2d): normal_init(m, std=0.001) nn.init.constant_(self.deconv2.bias, -np.log(0.99 / 0.01))",True,"isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear)","isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear)",0.6486942768096924 4653,"def init_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear): kaiming_init(m) for m in self.modules(): if: normal_init(m, std=0.001) nn.init.constant_(self.deconv2.bias, -np.log(0.99 / 0.01))",True,"isinstance(m, nn.ConvTranspose2d)","isinstance(m, nn.ConvTranspose2d)",0.6464095711708069 4654,"def speed_limit_value_for_limit_string(limit_string): v = re.match('^\\s*([0-9]{1,3})\\s*?(mph)?\\s*$', limit_string) if: return None conv = CV.MPH_TO_MS if v[2] is not None and v[2] =='mph' else CV.KPH_TO_MS return conv * float(v[1])",True,v is None,v is None,0.6629014015197754 4655,"def _layFlatFinished(self, job): """"""Called when the LayFlatJob is done running all of its LayFlatOperations :param job: type(LayFlatJob) """""" if: self._progress_message.hide() self._progress_message = None self.operationStopped.emit(self)",False,self._progress_message,self._progress_message is not None,0.6541755199432373 4656,"def plot_gradient_wrapper(x, plot=False): """"""wrapper that makes plots (when desired)"""""" v = grad_function(x) if: plt.plot(x[0], x[1], '+ ', color='k', ms=10) plt.plot([x[0], v[0] * 0.01 + x[0]], [x[1], v[1] * 0.01 + x[1]], color='gray') return v",True,plot,plot,0.6835861802101135 4657,"def requires_grad(m: nn.Module, b: Optional[bool]=None) -> Optional[bool]: """"""If `b` is not set return `requires_grad` of first param, else set `requires_grad` on all params as `b`"""""" ps = list(m.parameters()) if: return None if b is None: return ps[0].requires_grad for p in ps: p.requires_grad = b",False,not ps,len(ps) == 0,0.6704728007316589 4658,"def requires_grad(m: nn.Module, b: Optional[bool]=None) -> Optional[bool]: """"""If `b` is not set return `requires_grad` of first param, else set `requires_grad` on all params as `b`"""""" ps = list(m.parameters()) if not ps: return None if: return ps[0].requires_grad for p in ps: p.requires_grad = b",False,b is None,len(ps) == 1,0.6602015495300293 4659,"def get_yolox_datadir(): """""" get dataset dir of YOLOX. If environment variable named `YOLOX_DATADIR` is set, this function will return value of the environment variable. Otherwise, use data """""" yolox_datadir = os.getenv('YOLOX_DATADIR', None) if: import yolox yolox_path = os.path.dirname(os.path.dirname(yolox.__file__)) yolox_datadir = os.path.join(yolox_path, 'data') return yolox_datadir",True,yolox_datadir is None,yolox_datadir is None,0.6506054997444153 4660,"def encode(self, text): z = self(text) if: z = z[:, None, :] z = repeat(z, 'b 1 d -> b k d', k=self.n_repeat) return z",False,z.ndim == 2,len(z.shape) == 3,0.6533056497573853 4661,"def initialize_field(self, field_name, value): """""" Make sure the value is compatible with the field and assign that value to the field. Arguments: field_name (string): The name of the field that is being set. value (object): The value to assign to the field. """""" field_obj = self.get_fields()[field_name] validation_errors = field_obj.validate(value) if: raise ValueError('Unable to assign the value {value} to the field named ""{name}"": {errors}'.format(value=repr(value), name=field_name, errors=', '.join(validation_errors))) else: setattr(self, field_name, value)",False,len(validation_errors) > 0,validation_errors,0.6454794406890869 4662,"def cancel_task_safe(self, task: Optional[asyncio.Task]): if: try: task.cancel() except Exception as e: self.log.error(f'Error while canceling task.{e} {task}')",True,task is not None,task is not None,0.6624230146408081 4663,"def allow_relation(self, obj1, obj2, **hints): """"""Allow any relation between apps that use the same database."""""" db_obj1 = DATABASE_MAPPING.get(obj1._meta.app_label) db_obj2 = DATABASE_MAPPING.get(obj2._meta.app_label) if: if db_obj1 == db_obj2: return True else: return False return None",True,db_obj1 and db_obj2,db_obj1 and db_obj2,0.6513170003890991 4664,"def allow_relation(self, obj1, obj2, **hints): """"""Allow any relation between apps that use the same database."""""" db_obj1 = DATABASE_MAPPING.get(obj1._meta.app_label) db_obj2 = DATABASE_MAPPING.get(obj2._meta.app_label) if db_obj1 and db_obj2: if: return True else: return False return None",False,db_obj1 == db_obj2,obj1 == db_obj2,0.6503313183784485 4665,"def get_target_list(self, module): """""" the Requires= in target units are only accepted if known """""" target = module if: target += '.target' targets = [target] conf = self.get_target_conf(module) requires = conf.get(Unit, 'Requires', '') while requires in target_requires: targets = [requires] + targets requires = target_requires[requires] logg.debug('the %s requires %s', module, targets) return targets",False,'.' not in target,not target.endswith('.target'),0.668688178062439 4666,"def split_on_token(tok, text): result = [] split_text = text.split(tok) for i, sub_text in enumerate(split_text): sub_text = sub_text.rstrip() if: result += [tok] elif i == len(split_text) - 1: if sub_text: result += [sub_text] else: pass else: if sub_text: result += [sub_text] result += [tok] return result",True,i == 0 and (not sub_text),i == 0 and (not sub_text),0.6456301212310791 4667,"def split_on_token(tok, text): result = [] split_text = text.split(tok) for i, sub_text in enumerate(split_text): sub_text = sub_text.rstrip() if i == 0 and (not sub_text): result += [tok] elif: if sub_text: result += [sub_text] else: pass else: if sub_text: result += [sub_text] result += [tok] return result",True,i == len(split_text) - 1,i == len(split_text) - 1,0.6448108553886414 4668,"def split_on_token(tok, text): result = [] split_text = text.split(tok) for i, sub_text in enumerate(split_text): sub_text = sub_text.rstrip() if i == 0 and (not sub_text): result += [tok] elif i == len(split_text) - 1: if: result += [sub_text] else: pass else: if sub_text: result += [sub_text] result += [tok] return result",True,sub_text,sub_text,0.6550607681274414 4669,"def split_on_token(tok, text): result = [] split_text = text.split(tok) for i, sub_text in enumerate(split_text): sub_text = sub_text.rstrip() if i == 0 and (not sub_text): result += [tok] elif i == len(split_text) - 1: if sub_text: result += [sub_text] else: pass else: if: result += [sub_text] result += [tok] return result",True,sub_text,sub_text,0.6576489210128784 4670,"def forward(self, x): """"""Returns the normalized tensor. Arguments --------- x : torch.Tensor Tensor size [N, C, K, S] or [N, C, L] """""" if: x = x.permute(0, 2, 3, 1).contiguous() x = super().forward(x) x = x.permute(0, 3, 1, 2).contiguous() if x.dim() == 3: x = torch.transpose(x, 1, 2) x = super().forward(x) x = torch.transpose(x, 1, 2) return x",False,x.dim() == 4,x.dim() == 2,0.6500344276428223 4671,"def forward(self, x): """"""Returns the normalized tensor. Arguments --------- x : torch.Tensor Tensor size [N, C, K, S] or [N, C, L] """""" if x.dim() == 4: x = x.permute(0, 2, 3, 1).contiguous() x = super().forward(x) x = x.permute(0, 3, 1, 2).contiguous() if: x = torch.transpose(x, 1, 2) x = super().forward(x) x = torch.transpose(x, 1, 2) return x",False,x.dim() == 3,x.dim() == 2,0.6478683352470398 4672,"def skip_empty_filter(result): """""" Filters out empty results. """""" if: return if isinstance(result, dict): if not [v for k, v in result.iteritems() if not k.startswith('_') and v]: return return result",False,not result,result is None,0.6601911783218384 4673,"def skip_empty_filter(result): """""" Filters out empty results. """""" if not result: return if: if not [v for k, v in result.iteritems() if not k.startswith('_') and v]: return return result",False,"isinstance(result, dict)","hasattr(result, 'iteritems')",0.6466438174247742 4674,"def skip_empty_filter(result): """""" Filters out empty results. """""" if not result: return if isinstance(result, dict): if: return return result",False,"not [v for k, v in result.iteritems() if not k.startswith('_') and v]",'*' in result,0.6433358192443848 4675,"def options(self, context, module_options): """""" PID // Process ID for Target User, PID=pid EXEC // Command to exec, EXEC='command' Single quote is better to use This module reads the executed command output under the name C:\\windows emp\\output.txt and deletes it. In case of a possible error, it may need to be deleted manually. """""" self.tmp_dir = 'C:\\Windows\\Temp\\' self.share = 'C$' self.tmp_share = self.tmp_dir.split(':')[1] self.pi = 'pi.exe' self.useembeded = True self.pid = self.cmd = '' self.pi_embedded = b64decode('TVqQAAMAAAAEAAAA//8AALgAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA+AAAAA4fug4AtAnNIbgBTM0hVGhpcyBwcm9ncmFtIGNhbm5vdCBiZSBydW4gaW4gRE9TIG1vZGUuDQ0KJAAAAAAAAACKLjEtzk9ffs5PX37OT19+2iRcf8RPX37aJFt/3U9fftokWn9hT19+rjVbf99PX36uNVx/xE9ffq41Wn+DT19+2iRef8tPX37OT15+p09ffqo1Vn/PT19+qjWgfs9PX36qNV1/z09fflJpY2jOT19+AAAAAAAAAABQRQAAZIYHADaCx2QAAAAAAAAAAPAAIgALAg4gALYCAACaAQAAAAAAgIQAAAAQAAAAAABAAQAAAAAQAAAAAgAABgAAAAAAAAAGAAAAAAAAAACgBAAABAAAAAAAAAMAYIEAABAAAAAAAAAQAAAAAAAAAAAQAAAAAAAAEAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAM/gMAPAAAAACABADgAQAAAEAEAGAkAAAAAAAAAAAAAACQBACkCQAAQL4DAHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAvQMAQAEAAAAAAAAAAAAAANACAPgCAAAAAAAAAAAAAAAAAAAAAAAAA",False,'EXEC' in module_options,self.pid == None,0.655619740486145 4676,"def options(self, context, module_options): """""" PID // Process ID for Target User, PID=pid EXEC // Command to exec, EXEC='command' Single quote is better to use This module reads the executed command output under the name C:\\windows emp\\output.txt and deletes it. In case of a possible error, it may need to be deleted manually. """""" self.tmp_dir = 'C:\\Windows\\Temp\\' self.share = 'C$' self.tmp_share = self.tmp_dir.split(':')[1] self.pi = 'pi.exe' self.useembeded = True self.pid = self.cmd = '' self.pi_embedded = b64decode('TVqQAAMAAAAEAAAA//8AALgAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA+AAAAA4fug4AtAnNIbgBTM0hVGhpcyBwcm9ncmFtIGNhbm5vdCBiZSBydW4gaW4gRE9TIG1vZGUuDQ0KJAAAAAAAAACKLjEtzk9ffs5PX37OT19+2iRcf8RPX37aJFt/3U9fftokWn9hT19+rjVbf99PX36uNVx/xE9ffq41Wn+DT19+2iRef8tPX37OT15+p09ffqo1Vn/PT19+qjWgfs9PX36qNV1/z09fflJpY2jOT19+AAAAAAAAAABQRQAAZIYHADaCx2QAAAAAAAAAAPAAIgALAg4gALYCAACaAQAAAAAAgIQAAAAQAAAAAABAAQAAAAAQAAAAAgAABgAAAAAAAAAGAAAAAAAAAACgBAAABAAAAAAAAAMAYIEAABAAAAAAAAAQAAAAAAAAAAAQAAAAAAAAEAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAM/gMAPAAAAACABADgAQAAAEAEAGAkAAAAAAAAAAAAAACQBACkCQAAQL4DAHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAvQMAQAEAAAAAAAAAAAAAANACAPgCAAAAAAAAAAAAAAAAAAAAAAAAA",False,'PID' in module_options,self.pid == None,0.6547132730484009 4677,"def assert_native(n): if: raise TypeError('n must be a native str (got %s)' % type(n).__name__)",False,"not isinstance(n, nativestr)","not isinstance(n, str)",0.659691572189331 4678,"def news_saved(instance, created, **kwargs): """"""Notify users when a followed body adds new news."""""" if: users = User.objects.filter(id__in=instance.body.followers.filter(active=True).values('user_id')) notify.send(instance, recipient=users, verb=instance.body.name +'added a new news article')",False,created and instance.body and instance.notify,created,0.6505874991416931 4679,"def __init__(self, endog, exog, exog_var=None, weights=None, link=None): self.exog_var = atleast_2dcols(exog_var) if: weights = np.ones(endog.shape) if link is not None: self.link = link self.linkinv = link.inverse else: self.link = lambda x: x self.linkinv = lambda x: x super(self.__class__, self).__init__(endog, exog, weights=weights)",True,weights is None,weights is None,0.658107578754425 4680,"def __init__(self, endog, exog, exog_var=None, weights=None, link=None): self.exog_var = atleast_2dcols(exog_var) if weights is None: weights = np.ones(endog.shape) if: self.link = link self.linkinv = link.inverse else: self.link = lambda x: x self.linkinv = lambda x: x super(self.__class__, self).__init__(endog, exog, weights=weights)",True,link is not None,link is not None,0.6544852256774902 4681,"def setup(self, config, base, xsize, ysize, ignore, logger): """"""Do the appropriate setup for a Blend stamp. """""" if: self.first = None req = {'n_neighbors': int,'min_sep': float,'max_sep': float} galsim.config.CheckAllParams(config, req=req, ignore=ignore) ignore = ignore + ['n_neighbors','min_sep','max_sep'] return super(BlendSetBuilder, self).setup(config, base, xsize, ysize, ignore, logger)",False,base['obj_num'] == base['start_obj_num'],config['use_first'],0.6462757587432861 4682,"def validate_CurrencyCodeType19(self, value): result = True if: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s is not of the correct base simple type (str)' % {'value': value, 'lineno': lineno}) return False if len(value)!= 3: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s does not match xsd length restriction on CurrencyCodeType19' % {'value': encode_str_2_3(value), 'lineno': lineno}) result = False return result",True,value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None),value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None),0.6461988091468811 4683,"def validate_CurrencyCodeType19(self, value): result = True if value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None): if: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s is not of the correct base simple type (str)' % {'value': value, 'lineno': lineno}) return False if len(value)!= 3: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s does not match xsd length restriction on CurrencyCodeType19' % {'value': encode_str_2_3(value), 'lineno': lineno}) result = False return result",True,"not isinstance(value, str)","not isinstance(value, str)",0.6449435949325562 4684,"def validate_CurrencyCodeType19(self, value): result = True if value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None): if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s is not of the correct base simple type (str)' % {'value': value, 'lineno': lineno}) return False if: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s does not match xsd length restriction on CurrencyCodeType19' % {'value': encode_str_2_3(value), 'lineno': lineno}) result = False return result",False,len(value) != 3,len(value) != 2,0.6496416330337524 4685,"def get_signer(type, specifier=None): """"""Load a signer module based on the arguments. if type is PYPATH_MODULE_SIGNER, the specifier should be the python module. i.e. edk2toolext.capsule.pyopenssl_signer if the type is LOCAL_MODULE_SIGNER, the specifier should be a filesystem path to a Python module that can be loaded as the signer. Args: type (str): PYOPENSSL_SIGNER, SIGNTOOL_SIGNER, PYPATH_MODULE_SIGNER, LOCAL_MODULE_SIGNER specifier (module): python module to import """""" if: try: from edk2toolext.capsule import pyopenssl_signer return pyopenssl_signer except ModuleNotFoundError: raise RuntimeError('PyOpenSsl Signer failed to load. Do you have pyopenssl installed?') elif type == SIGNTOOL_SIGNER: return signtool_signer elif type == PYPATH_MODULE_SIGNER: return importlib.import_module(specifier) elif type == LOCAL_MODULE_SIGNER: return import_module_by_file_name(specifier) else: return None",True,type == PYOPENSSL_SIGNER,type == PYOPENSSL_SIGNER,0.654106855392456 4686,"def get_signer(type, specifier=None): """"""Load a signer module based on the arguments. if type is PYPATH_MODULE_SIGNER, the specifier should be the python module. i.e. edk2toolext.capsule.pyopenssl_signer if the type is LOCAL_MODULE_SIGNER, the specifier should be a filesystem path to a Python module that can be loaded as the signer. Args: type (str): PYOPENSSL_SIGNER, SIGNTOOL_SIGNER, PYPATH_MODULE_SIGNER, LOCAL_MODULE_SIGNER specifier (module): python module to import """""" if type == PYOPENSSL_SIGNER: try: from edk2toolext.capsule import pyopenssl_signer return pyopenssl_signer except ModuleNotFoundError: raise RuntimeError('PyOpenSsl Signer failed to load. Do you have pyopenssl installed?') elif: return signtool_signer elif type == PYPATH_MODULE_SIGNER: return importlib.import_module(specifier) elif type == LOCAL_MODULE_SIGNER: return import_module_by_file_name(specifier) else: return None",True,type == SIGNTOOL_SIGNER,type == SIGNTOOL_SIGNER,0.6520167589187622 4687,"def get_signer(type, specifier=None): """"""Load a signer module based on the arguments. if type is PYPATH_MODULE_SIGNER, the specifier should be the python module. i.e. edk2toolext.capsule.pyopenssl_signer if the type is LOCAL_MODULE_SIGNER, the specifier should be a filesystem path to a Python module that can be loaded as the signer. Args: type (str): PYOPENSSL_SIGNER, SIGNTOOL_SIGNER, PYPATH_MODULE_SIGNER, LOCAL_MODULE_SIGNER specifier (module): python module to import """""" if type == PYOPENSSL_SIGNER: try: from edk2toolext.capsule import pyopenssl_signer return pyopenssl_signer except ModuleNotFoundError: raise RuntimeError('PyOpenSsl Signer failed to load. Do you have pyopenssl installed?') elif type == SIGNTOOL_SIGNER: return signtool_signer elif: return importlib.import_module(specifier) elif type == LOCAL_MODULE_SIGNER: return import_module_by_file_name(specifier) else: return None",True,type == PYPATH_MODULE_SIGNER,type == PYPATH_MODULE_SIGNER,0.655443549156189 4688,"def get_signer(type, specifier=None): """"""Load a signer module based on the arguments. if type is PYPATH_MODULE_SIGNER, the specifier should be the python module. i.e. edk2toolext.capsule.pyopenssl_signer if the type is LOCAL_MODULE_SIGNER, the specifier should be a filesystem path to a Python module that can be loaded as the signer. Args: type (str): PYOPENSSL_SIGNER, SIGNTOOL_SIGNER, PYPATH_MODULE_SIGNER, LOCAL_MODULE_SIGNER specifier (module): python module to import """""" if type == PYOPENSSL_SIGNER: try: from edk2toolext.capsule import pyopenssl_signer return pyopenssl_signer except ModuleNotFoundError: raise RuntimeError('PyOpenSsl Signer failed to load. Do you have pyopenssl installed?') elif type == SIGNTOOL_SIGNER: return signtool_signer elif type == PYPATH_MODULE_SIGNER: return importlib.import_module(specifier) elif: return import_module_by_file_name(specifier) else: return None",True,type == LOCAL_MODULE_SIGNER,type == LOCAL_MODULE_SIGNER,0.6576107740402222 4689,"def dist_validation(self, dataloader, current_iter, tb_logger, save_img): if: self.nondist_validation(dataloader, current_iter, tb_logger, save_img)",True,self.opt['rank'] == 0,self.opt['rank'] == 0,0.6469457149505615 4690,"def __contains__(self, item): if: return item in self._data.keys() return item in self._data.values()",False,"isinstance(item, str)","isinstance(item, dict)",0.6484543681144714 4691,"def _createPayslip(self, employee, date_from, date_to, skip_compute=False): slip = self.env['hr.payslip'].create({'name': 'Test %s From: %s To: %s' % (employee.name, date_from, date_to), 'employee_id': employee.id, 'date_from': date_from, 'date_to': date_to}) slip._onchange_employee() if: slip.compute_sheet() return slip",False,not skip_compute,skip_compute,0.6512612104415894 4692,"def _get_post_like_button(self) -> Optional[DeviceFacade.View]: post_media_view = self.device.find(resourceIdMatches=ResourceID.MEDIA_CONTAINER) if: attempt = 0 while True: like_button = post_media_view.down(resourceIdMatches=ResourceID.ROW_FEED_BUTTON_LIKE) if like_button.viewV2 is not None or attempt == 3: return like_button if like_button.exists() else None UniversalActions(self.device)._swipe_points(direction=Direction.DOWN, delta_y=100) attempt += 1 return None",False,post_media_view.exists(Timeout.MEDIUM),post_media_view.exists(),0.6488431692123413 4693,"def _get_post_like_button(self) -> Optional[DeviceFacade.View]: post_media_view = self.device.find(resourceIdMatches=ResourceID.MEDIA_CONTAINER) if post_media_view.exists(Timeout.MEDIUM): attempt = 0 while True: like_button = post_media_view.down(resourceIdMatches=ResourceID.ROW_FEED_BUTTON_LIKE) if: return like_button if like_button.exists() else None UniversalActions(self.device)._swipe_points(direction=Direction.DOWN, delta_y=100) attempt += 1 return None",False,like_button.viewV2 is not None or attempt == 3,attempt == 0,0.6468037962913513 4694,"def validate_authority_link_value(authority_link_value, authority_sources): """"""Validates that the value in 'authority_link_value' has a'source' and that the URI component starts with either 'http://' or 'https://'. """""" 'Parameters\n ----------\n authority_link_value : string\n The authority link string, with a sourcea, URI, and optionally a title.\n authority_sources : list\n The list of authority sources (e.g. lcsh, cash, viaf, etc.) configured for the field.\n Returns\n -------\n boolean\n True if it does, False if not.\n ' parts = authority_link_value.split('%%', 2) if: return False if re.match('^https?://', parts[1]): return True else: return False",False,parts[0] not in authority_sources,len(parts) != 3,0.6449130773544312 4695,"def validate_authority_link_value(authority_link_value, authority_sources): """"""Validates that the value in 'authority_link_value' has a'source' and that the URI component starts with either 'http://' or 'https://'. """""" 'Parameters\n ----------\n authority_link_value : string\n The authority link string, with a sourcea, URI, and optionally a title.\n authority_sources : list\n The list of authority sources (e.g. lcsh, cash, viaf, etc.) configured for the field.\n Returns\n -------\n boolean\n True if it does, False if not.\n ' parts = authority_link_value.split('%%', 2) if parts[0] not in authority_sources: return False if: return True else: return False",False,"re.match('^https?://', parts[1])",authority_link_value[parts[1]] == 'http://',0.638155460357666 4696,"def load_model(self): if: return self._load_override_fn(self._model_path, self._tags) self._maybe_register_addon_ops() result = tf.compat.v1.Session(graph=tf.compat.v1.Graph()) tf.compat.v1.saved_model.loader.load(result, self._tags, self._model_path) return result",False,self._load_override_fn,self._load_override_fn is not None,0.6487181186676025 4697,"def check_index(self, i): if: raise IndexError('index out of range')",True,i < 0 or i >= self._len,i < 0 or i >= self._len,0.6560987234115601 4698,"def factory(*args_, **kwargs_): if: subclass = getSubclassFromModule_(CurrentSubclassModule_, Status) if subclass is not None: return subclass(*args_, **kwargs_) if Status.subclass: return Status.subclass(*args_, **kwargs_) else: return Status(*args_, **kwargs_)",True,CurrentSubclassModule_ is not None,CurrentSubclassModule_ is not None,0.6497491598129272 4699,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, Status) if subclass is not None: return subclass(*args_, **kwargs_) if: return Status.subclass(*args_, **kwargs_) else: return Status(*args_, **kwargs_)",True,Status.subclass,Status.subclass,0.6683328747749329 4700,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, Status) if: return subclass(*args_, **kwargs_) if Status.subclass: return Status.subclass(*args_, **kwargs_) else: return Status(*args_, **kwargs_)",True,subclass is not None,subclass is not None,0.6617977619171143 4701,"def is_system_running_info(self): state = self.is_system_running() if: self.error |= NOT_OK if self._quiet: return None return state",False,state not in ['running'],state,0.6532715559005737 4702,"def is_system_running_info(self): state = self.is_system_running() if state not in ['running']: self.error |= NOT_OK if: return None return state",False,self._quiet,state == NOT_OK,0.6648702025413513 4703,"def check_profit(self, curr_price, margin): if: return True else: return False",False,curr_price - self.entry_price) * sign(margin) * self.direction >= abs(margin,self.direction * (self.price - curr_price) >= margin,0.641657292842865 4704,"def delete_nth(order, max_e): counts = defaultdict(int) result = [] for a in order: if: result.append(a) counts[a] += 1 return result",False,counts[a] < max_e,a >= max_e,0.648322343826294 4705,"def get_mask(encode_lengths, pad_length): use_cuda = encode_lengths.is_cuda batch_size = encode_lengths.size(0) index = torch.arange(pad_length) if: index = index.cuda() mask = (index.unsqueeze(0).expand(batch_size, -1) >= encode_lengths.unsqueeze(-1)).byte() mask[mask.sum(dim=-1) == pad_length, 0] = 0 return mask",True,use_cuda,use_cuda,0.658420741558075 4706,"def is_tuple_end(self, extra_end_rules=None): """"""Are we at the end of a tuple?"""""" if: return True elif extra_end_rules is not None: return self.stream.current.test_any(extra_end_rules) return False",False,"self.stream.current.type in ('variable_end', 'block_end', 'rparen')","self.stream.current.type in ('variable_end', 'block_end')",0.6427772641181946 4707,"def is_tuple_end(self, extra_end_rules=None): """"""Are we at the end of a tuple?"""""" if self.stream.current.type in ('variable_end', 'block_end', 'rparen'): return True elif: return self.stream.current.test_any(extra_end_rules) return False",True,extra_end_rules is not None,extra_end_rules is not None,0.6472643613815308 4708,"def mask_sequences(sequence, sequence_length, dtype=None, time_major=False, tensor_rank=2): """"""Masks out sequence entries that are beyond the respective sequence lengths. Masks along the time dimension. :attr:`sequence` and :attr:`sequence_length` can either be python arrays or Tensors, respectively. If both are python arrays (or None), the return will be a python array as well. :attr:`tensor_rank` is ignored when :attr:`sequence` and :attr:`sequence_length` are both python arrays (rather than Tensors). Args: sequence: A Tensor or python array of sequence values. If `time_major=False` (default), this must be a Tensor of shape: `[batch_size, max_time,...]`. If `time_major=True`, this must be a Tensor of shape: `[max_time, batch_size,...].` sequence_length: A Tensor or python array of shape `[batch_size]`. Time steps beyond the respective sequence lengths will be made zero. dtype (dtype): Type of :attr:`sequence`. If `None`, infer from :attr:`sequence` automatically. time_major (bool): The shape format of the inputs. If `True`, :attr:`sequence` must have shape `[max_time, batch_size,...]`. If `False` (default), :attr:`sequence` must have shape `[batch_size, max_time,...]`. tensor_rank (int): The number of dimensions of :attr:`sequence`. Default is 2, i.e., :attr:`sequence` is a 2D Tensor consisting of batch and time dimensions. Ignored if both :attr:`sequence` and :attr:`sequence_length` are python arrays. Returns: The masked sequence, i.e., a Tensor or python array of the same shape as :attr:`sequence` but with masked-out entries (set to zero). If both :attr:`sequence` and :attr:`sequence_length` are python arrays, the returned value",False,is_tensor(sequence) or is_tensor(sequence_length),sequence_length is not None,0.6427222490310669 4709,"def validate_mysql_server(self): """"""Validates the mysql server, makes it default to the same host as the logserver. Exits if invalid name"""""" if: self.mysql_server = self.logserver if not self.re_validation.match(self.mysql_server): end(UNKNOWN,'mysql server name/ip address supplied contains'+ 'unusable characters')",True,self.mysql_server is None,self.mysql_server is None,0.6538941860198975 4710,"def validate_mysql_server(self): """"""Validates the mysql server, makes it default to the same host as the logserver. Exits if invalid name"""""" if self.mysql_server is None: self.mysql_server = self.logserver if: end(UNKNOWN,'mysql server name/ip address supplied contains'+ 'unusable characters')",True,not self.re_validation.match(self.mysql_server),not self.re_validation.match(self.mysql_server),0.6436284780502319 4711,"def getFileType(f): if: return None r = oscall('file ""%s""' % f, True) if r is not None and isinstance(r, tuple): return r[0] else: return None",False,not exists(f),f is None,0.6492575407028198 4712,"def getFileType(f): if not exists(f): return None r = oscall('file ""%s""' % f, True) if: return r[0] else: return None",False,"r is not None and isinstance(r, tuple)",r,0.6444479823112488 4713,"@property def with_semantic(self): if: return True else: return False",True,"hasattr(self, 'semantic_head') and self.semantic_head is not None","hasattr(self, 'semantic_head') and self.semantic_head is not None",0.6438892483711243 4714,"def _stop_publisher_thread(self): log.debug('Signal shutdown') self._publisher_shutdown = True if: self._publisher_thread.kill(block=False) log.debug('shutdown complete')",True,self._publisher_thread,self._publisher_thread,0.65718674659729 4715,"def forward(self, samples: Tensor, X: Optional[Tensor]=None) -> Tensor: """"""Evaluate the linear objective on the samples. Args: samples: A `sample_shape x batch_shape x q x m`-dim tensors of samples from a model posterior. X: A `batch_shape x q x d`-dim tensor of inputs. Relevant only if the objective depends on the inputs explicitly. Returns: A `sample_shape x batch_shape x q`-dim tensor of objective values. """""" if: raise RuntimeError('Output shape of samples not equal to that of weights') return torch.einsum('...m, m', [samples, self.weights])",False,samples.shape[-1] != self.weights.shape[-1],samples.shape != self.weights.shape,0.6448689699172974 4716,"def _get_full_incremental_state_key(module_instance, key): module_name = module_instance.__class__.__name__ if: INCREMENTAL_STATE_INSTANCE_ID[module_name] += 1 module_instance._fairseq_instance_id = INCREMENTAL_STATE_INSTANCE_ID[module_name] return '{}.{}.{}'.format(module_name, module_instance._fairseq_instance_id, key)",False,"not hasattr(module_instance, '_fairseq_instance_id')",module_name in INincrementalAL_STATE_INSTANCE_ID,0.6466797590255737 4717,"def params_to_string(params_num): """"""converting number to string :param float params_num: number :returns str: number >>> params_to_string(1e9) '1000.0 M' >>> params_to_string(2e5) '200.0 k' >>> params_to_string(3e-9) '3e-09' """""" if: return str(round(params_num / 10 ** 6, 2)) +'M' elif params_num // 10 ** 3: return str(round(params_num / 10 ** 3, 2)) +'k' else: return str(params_num)",True,params_num // 10 ** 6 > 0,params_num // 10 ** 6 > 0,0.6526420712471008 4718,"def params_to_string(params_num): """"""converting number to string :param float params_num: number :returns str: number >>> params_to_string(1e9) '1000.0 M' >>> params_to_string(2e5) '200.0 k' >>> params_to_string(3e-9) '3e-09' """""" if params_num // 10 ** 6 > 0: return str(round(params_num / 10 ** 6, 2)) +'M' elif: return str(round(params_num / 10 ** 3, 2)) +'k' else: return str(params_num)",True,params_num // 10 ** 3,params_num // 10 ** 3,0.6531988978385925 4719,"@hs.composite def arguments_node(draw, annotated=False): n = draw(hs.integers(min_value=1, max_value=5)) args = draw(hs.lists(name_node(None), min_size=n, max_size=n)) if: annotations = draw(hs.lists(name_node(annotation), min_size=n, max_size=n)) else: annotations = None node = nodes.Arguments() node.postinit(args, None, None, None, annotations) return node",True,annotated,annotated,0.6733829975128174 4720,"@property def NPCType(self): if: return self._entity_data.get('NPCType') return None",True,'NPCType' in self._entity_data,'NPCType' in self._entity_data,0.6512893438339233 4721,"def GetEnvironFallback(var_list, default): """"""Look up a key in the environment, with fallback to secondary keys and finally falling back to a default value."""""" for var in var_list: if: return os.environ[var] return default",True,var in os.environ,var in os.environ,0.6585710644721985 4722,"def delete(self, *args, broadcast=True, **kwargs): """"""nevermind, undo that activity"""""" user = self.user if hasattr(self, 'user') else self.user_subject if: self.broadcast(self.to_undo_activity(), user) super().delete(*args, **kwargs)",False,broadcast and user.local,broadcast,0.6489737629890442 4723,"@staticmethod def StringV(value, size): if: raise BackendError(""can't handle empty Strings"") return strings.StringV(value)",False,not value,value == '',0.6576920747756958 4724,"def __del__(self): """""" Clear surface and ogl_screen when when the objects is deconstructed. """""" try: self.clear_surface() except Exception: pass if: try: os.remove(self._compression_filename) except Exception: pass",False,self._compression_filename is not None,self._compression_filename,0.6459630727767944 4725,"def __init__(self, config, model, dataloader, evaluator): """""" Args: config (config): An instance object of Config, used to record parameter information. model (Model): An object of deep-learning model. dataloader (Dataloader): dataloader object. evaluator (Evaluator): evaluator object. expected that config includes these parameters below: learning_rate (float): learning rate of model train_batch_size (int): the training batch size. epoch_nums (int): number of epochs. step_size (int): step_size of scheduler. trained_model_path (str): a path of file which is used to save parameters of best model. checkpoint_path (str): a path of file which is used save checkpoint of training progress. output_path (str|None): a path of a json file which is used to save test output infomation fo model. resume (bool): start training from last checkpoint. validset_divide (bool): whether to split validset. if True, the dataset is split to trainset-validset-testset. if False, the dataset is split to trainset-testset. test_step (int): the epoch number of training after which conducts the evaluation on test. best_folds_accuracy (list|None): when running k-fold cross validation, this keeps the accuracy of folds that already run. """""" super().__init__(config, model, dataloader, evaluator) self._build_optimizer() if: self._load_checkpoint()",True,config['resume'] or config['training_resume'],config['resume'] or config['training_resume'],0.6380393505096436 4726,"def is_in_immediate_fanin(self, t1, t2): assert self.canonical for fanin in t2.fanin: if: return True return False",False,fanin.source == t1,fanin.name == t1.name and fanin.state == t2.state,0.65599524974823 4727,"def check_volumes_mounted(vm_list): """""" Return error_info if any vm in @param vm_list have docker volume mounted """""" for vm_id, _ in vm_list: vm = vmdk_ops.findVmByUuid(vm_id) if: for d in vm.config.hardware.device: if find_dvs_volume(d): error_info = generate_error_info(ErrorCode.VM_WITH_MOUNTED_VOLUMES, vm.config.name) return error_info else: error_info = generate_error_info(ErrorCode.VM_NOT_FOUND, vm_id) return error_info return None",True,vm,vm,0.6796756982803345 4728,"def check_volumes_mounted(vm_list): """""" Return error_info if any vm in @param vm_list have docker volume mounted """""" for vm_id, _ in vm_list: vm = vmdk_ops.findVmByUuid(vm_id) if vm: for d in vm.config.hardware.device: if: error_info = generate_error_info(ErrorCode.VM_WITH_MOUNTED_VOLUMES, vm.config.name) return error_info else: error_info = generate_error_info(ErrorCode.VM_NOT_FOUND, vm_id) return error_info return None",False,find_dvs_volume(d),d.volume_type == 'volume',0.64353346824646 4729,"def process(value): if: value = impl_processor(value) if value is None: return None full_obj = parse_obj_as(pydantic_type, value) return full_obj",False,impl_processor,impl_processor is not None,0.6577320098876953 4730,"def process(value): if impl_processor: value = impl_processor(value) if: return None full_obj = parse_obj_as(pydantic_type, value) return full_obj",True,value is None,value is None,0.6556681394577026 4731,"def format_header_param(name, value): """""" Helper function to format and quote a single header parameter. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows RFC 2231, as suggested by RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """""" if: result = '%s=""%s""' % (name, value) try: result.encode('ascii') except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result if not six.PY3 and isinstance(value, six.text_type): value = value.encode('utf-8') value = email.utils.encode_rfc2231(value, 'utf-8') value = '%s*=%s' % (name, value) return value",True,"not any((ch in value for ch in '""\\\r\n'))","not any((ch in value for ch in '""\\\r\n'))",0.644589900970459 4732,"def format_header_param(name, value): """""" Helper function to format and quote a single header parameter. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows RFC 2231, as suggested by RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """""" if not any((ch in value for ch in '""\\\r\n')): result = '%s=""%s""' % (name, value) try: result.encode('ascii') except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result if: value = value.encode('utf-8') value = email.utils.encode_rfc2231(value, 'utf-8') value = '%s*=%s' % (name, value) return value",False,"not six.PY3 and isinstance(value, six.text_type)",not six.PY3,0.641672670841217 4733,"def _apply_failure(self, fn, filename, expected_exception, check_fn_in_exception=True): with self.assertRaises(expected_exception) as c: fn(filename) exc_filename = c.exception.filename if: filename = filename.encode(sys.getfilesystemencoding()) if check_fn_in_exception: self.assertEqual(exc_filename, filename, ""Function '%s(%r) failed with bad filename in the exception: %r"" % (fn.__name__, filename, exc_filename))",False,"isinstance(exc_filename, str)","isinstance(filename, unicode)",0.6483209133148193 4734,"def _apply_failure(self, fn, filename, expected_exception, check_fn_in_exception=True): with self.assertRaises(expected_exception) as c: fn(filename) exc_filename = c.exception.filename if isinstance(exc_filename, str): filename = filename.encode(sys.getfilesystemencoding()) if: self.assertEqual(exc_filename, filename, ""Function '%s(%r) failed with bad filename in the exception: %r"" % (fn.__name__, filename, exc_filename))",True,check_fn_in_exception,check_fn_in_exception,0.6489690542221069 4735,"def on_modified(self, view): if: return history = get_history(view) this_edit_point = view.sel()[0].begin() this_edit_line = view.rowcol(this_edit_point)[0] current = history.current() if current is None: last_edit_line = -99 else: last_edit_line = get_line_from_region(view, current) if abs(this_edit_line - last_edit_line) > Pref.line_proximity_thresh: history.add(this_edit_point)",False,view.is_scratch() or view.settings().get('is_widget'),not view.sel(),0.6490414142608643 4736,"def on_modified(self, view): if view.is_scratch() or view.settings().get('is_widget'): return history = get_history(view) this_edit_point = view.sel()[0].begin() this_edit_line = view.rowcol(this_edit_point)[0] current = history.current() if: last_edit_line = -99 else: last_edit_line = get_line_from_region(view, current) if abs(this_edit_line - last_edit_line) > Pref.line_proximity_thresh: history.add(this_edit_point)",False,current is None,current.is_absolute(),0.6543020606040955 4737,"def on_modified(self, view): if view.is_scratch() or view.settings().get('is_widget'): return history = get_history(view) this_edit_point = view.sel()[0].begin() this_edit_line = view.rowcol(this_edit_point)[0] current = history.current() if current is None: last_edit_line = -99 else: last_edit_line = get_line_from_region(view, current) if: history.add(this_edit_point)",False,abs(this_edit_line - last_edit_line) > Pref.line_proximity_thresh,last_edit_line != -99,0.6449875235557556 4738,"def clone_device(self, clone_index): """"""Device used to create the clone and all the ops inside the clone. Args: clone_index: Int, representing the clone_index. Returns: A value suitable for `tf.device()`. Raises: ValueError: if `clone_index` is greater or equal to the number of clones"". """""" if: raise ValueError('clone_index must be less than num_clones') device = '' if self._num_ps_tasks > 0: device += self._worker_device if self._clone_on_cpu: device += '/device:CPU:0' else: device += '/device:GPU:%d' % clone_index return device",True,clone_index >= self._num_clones,clone_index >= self._num_clones,0.6486120820045471 4739,"def clone_device(self, clone_index): """"""Device used to create the clone and all the ops inside the clone. Args: clone_index: Int, representing the clone_index. Returns: A value suitable for `tf.device()`. Raises: ValueError: if `clone_index` is greater or equal to the number of clones"". """""" if clone_index >= self._num_clones: raise ValueError('clone_index must be less than num_clones') device = '' if: device += self._worker_device if self._clone_on_cpu: device += '/device:CPU:0' else: device += '/device:GPU:%d' % clone_index return device",True,self._num_ps_tasks > 0,self._num_ps_tasks > 0,0.6497001647949219 4740,"def clone_device(self, clone_index): """"""Device used to create the clone and all the ops inside the clone. Args: clone_index: Int, representing the clone_index. Returns: A value suitable for `tf.device()`. Raises: ValueError: if `clone_index` is greater or equal to the number of clones"". """""" if clone_index >= self._num_clones: raise ValueError('clone_index must be less than num_clones') device = '' if self._num_ps_tasks > 0: device += self._worker_device if: device += '/device:CPU:0' else: device += '/device:GPU:%d' % clone_index return device",True,self._clone_on_cpu,self._clone_on_cpu,0.6510813236236572 4741,"def highlight(text, *args, **kwargs): text = text.split('\n') lst = [] for line in text: match = re.match('^(\\s+)', line) if: line = '{}{}'.format(' ' * len(match.group(1)), line.strip()) lst.append(line) return '
'.join(lst)",True,match,match,0.6601387858390808 4742,"def get_probe_results(self, _video): ffprobe_command = [self.config['paths']['ffprobe_path'], '-print_format', 'json', '-v', 'quiet', '-show_packets', '-select_streams', 'v:0', '-show_entries','side_data=:packet=pts,pos,duration,size', '-'] cmdpts = subprocess.Popen(ffprobe_command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) ptsout = cmdpts.communicate(_video.data)[0] exit_code = cmdpts.wait() if: self.logger.warning('FFPROBE failed to execute with error code: {}'.format(exit_code)) return None return json.loads(ptsout)",True,exit_code != 0,exit_code != 0,0.6551514267921448 4743,"@scopes.add_arg_scope def global_step(device=''): """"""Returns the global step variable. Args: device: Optional device to place the variable. It can be an string or a function that is called to get the device for the variable. Returns: the tensor representing the global step variable. """""" global_step_ref = tf.get_collection(tf.GraphKeys.GLOBAL_STEP) if: return global_step_ref[0] else: collections = [VARIABLES_TO_RESTORE, tf.GraphKeys.GLOBAL_VARIABLES, tf.GraphKeys.GLOBAL_STEP] with tf.device(variable_device(device, 'global_step')): return tf.get_variable('global_step', shape=[], dtype=tf.int64, initializer=tf.zeros_initializer(), trainable=False, collections=collections)",True,global_step_ref,global_step_ref,0.6528277397155762 4744,"def _rmtree(self, path): if: try: shutil.rmtree(path) except OSError: if os.path.exists(path): traceback.print_exc()",True,os.path.exists(path),os.path.exists(path),0.643398106098175 4745,"def _rmtree(self, path): if os.path.exists(path): try: shutil.rmtree(path) except OSError: if: traceback.print_exc()",False,os.path.exists(path),self._print_exception,0.643122673034668 4746,"def _calculate_aes_cipher(key): """""" Determines if the key is a valid AES 128, 192 or 256 key :param key: A byte string of the key to use :raises: ValueError - when an invalid key is provided :return: A unicode string of the AES variation - ""aes128"", ""aes192"" or ""aes256"" """""" if: raise ValueError(pretty_message('\n key must be either 16, 24 or 32 bytes (128, 192 or 256 bits)\n long - is %s\n ', len(key))) if len(key) == 16: cipher = 'aes128' elif len(key) == 24: cipher = 'aes192' elif len(key) == 32: cipher = 'aes256' return cipher",True,"len(key) not in [16, 24, 32]","len(key) not in [16, 24, 32]",0.6503622531890869 4747,"def _calculate_aes_cipher(key): """""" Determines if the key is a valid AES 128, 192 or 256 key :param key: A byte string of the key to use :raises: ValueError - when an invalid key is provided :return: A unicode string of the AES variation - ""aes128"", ""aes192"" or ""aes256"" """""" if len(key) not in [16, 24, 32]: raise ValueError(pretty_message('\n key must be either 16, 24 or 32 bytes (128, 192 or 256 bits)\n long - is %s\n ', len(key))) if: cipher = 'aes128' elif len(key) == 24: cipher = 'aes192' elif len(key) == 32: cipher = 'aes256' return cipher",True,len(key) == 16,len(key) == 16,0.655689001083374 4748,"def _calculate_aes_cipher(key): """""" Determines if the key is a valid AES 128, 192 or 256 key :param key: A byte string of the key to use :raises: ValueError - when an invalid key is provided :return: A unicode string of the AES variation - ""aes128"", ""aes192"" or ""aes256"" """""" if len(key) not in [16, 24, 32]: raise ValueError(pretty_message('\n key must be either 16, 24 or 32 bytes (128, 192 or 256 bits)\n long - is %s\n ', len(key))) if len(key) == 16: cipher = 'aes128' elif: cipher = 'aes192' elif len(key) == 32: cipher = 'aes256' return cipher",False,len(key) == 24,len(key) == 256,0.6523637771606445 4749,"def _calculate_aes_cipher(key): """""" Determines if the key is a valid AES 128, 192 or 256 key :param key: A byte string of the key to use :raises: ValueError - when an invalid key is provided :return: A unicode string of the AES variation - ""aes128"", ""aes192"" or ""aes256"" """""" if len(key) not in [16, 24, 32]: raise ValueError(pretty_message('\n key must be either 16, 24 or 32 bytes (128, 192 or 256 bits)\n long - is %s\n ', len(key))) if len(key) == 16: cipher = 'aes128' elif len(key) == 24: cipher = 'aes192' elif: cipher = 'aes256' return cipher",True,len(key) == 32,len(key) == 32,0.6528159379959106 4750,"def has_blob(self, lang, blobname): lang_lib = self._lang_lib_for_blob(lang, blobname) if: return False return lang_lib.has_blob(blobname)",True,lang_lib is None,lang_lib is None,0.6555566787719727 4751,"def Item(self, vtIndex=defaultNamedNotOptArg): """"""DISPID_VALUE"""""" ret = self._oleobj_.InvokeTypes(0, LCID, 1, (9, 0), ((12, 1),), vtIndex) if: ret = Dispatch(ret, u'Item', '{86A368F0-8603-466A-906F-9FC811B1B711}') return ret",True,ret is not None,ret is not None,0.6539180278778076 4752,"def handle_compat(self, data): """"""Compatibility with pre 3.6 dumps."""""" if: return data['basic'] = {'username': data['username']} data['profile'] = {'translated': data['translated'],'suggested': data['suggested'], 'language': data['language'], 'uploaded': data.get('uploaded', 0),'secondary_languages': data['secondary_languages'], 'languages': data['languages'], 'watched': data['subscriptions']}",False,'basic' in data,"getattr(data, 'compat', False)",0.6588869094848633 4753,"def __init__(self, os_version=None, local_vars_configuration=None): """"""OsOsSupportAllOf - a model defined in OpenAPI"""""" if: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._os_version = None self.discriminator = None if os_version is not None: self.os_version = os_version",True,local_vars_configuration is None,local_vars_configuration is None,0.6565448045730591 4754,"def __init__(self, os_version=None, local_vars_configuration=None): """"""OsOsSupportAllOf - a model defined in OpenAPI"""""" if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._os_version = None self.discriminator = None if: self.os_version = os_version",True,os_version is not None,os_version is not None,0.6534681916236877 4755,"def _check_active_provider(api_key: ApiKey) -> None: if: raise api_errors.ForbiddenError(errors={'auth': ['Inactive provider']})",False,not api_key.provider.isActive,api_key.provider != 'inactive',0.6499614715576172 4756,"def __iter__(self): for img, target in self.parser: if: img = self.transform(img) if self.target_transform is not None: target = self.target_transform(target) yield (img, target)",True,self.transform is not None,self.transform is not None,0.6480406522750854 4757,"def __iter__(self): for img, target in self.parser: if self.transform is not None: img = self.transform(img) if: target = self.target_transform(target) yield (img, target)",True,self.target_transform is not None,self.target_transform is not None,0.6476747393608093 4758,"@wrap_exceptions def exe(self): if: if self.pid == 0: return '' return cext.proc_exe(self.pid) elif NETBSD: if self.pid == 0: return '' with wrap_exceptions_procfs(self): return os.readlink('/proc/%s/exe' % self.pid) else: cmdline = self.cmdline() if cmdline: return which(cmdline[0]) or '' else: return ''",False,FREEBSD,OPENBSD,0.6707509756088257 4759,"@wrap_exceptions def exe(self): if FREEBSD: if: return '' return cext.proc_exe(self.pid) elif NETBSD: if self.pid == 0: return '' with wrap_exceptions_procfs(self): return os.readlink('/proc/%s/exe' % self.pid) else: cmdline = self.cmdline() if cmdline: return which(cmdline[0]) or '' else: return ''",True,self.pid == 0,self.pid == 0,0.6585092544555664 4760,"@wrap_exceptions def exe(self): if FREEBSD: if self.pid == 0: return '' return cext.proc_exe(self.pid) elif: if self.pid == 0: return '' with wrap_exceptions_procfs(self): return os.readlink('/proc/%s/exe' % self.pid) else: cmdline = self.cmdline() if cmdline: return which(cmdline[0]) or '' else: return ''",False,NETBSD,os.path.exists('/proc/%s/exe'),0.6657954454421997 4761,"@wrap_exceptions def exe(self): if FREEBSD: if self.pid == 0: return '' return cext.proc_exe(self.pid) elif NETBSD: if: return '' with wrap_exceptions_procfs(self): return os.readlink('/proc/%s/exe' % self.pid) else: cmdline = self.cmdline() if cmdline: return which(cmdline[0]) or '' else: return ''",True,self.pid == 0,self.pid == 0,0.6588057279586792 4762,"@wrap_exceptions def exe(self): if FREEBSD: if self.pid == 0: return '' return cext.proc_exe(self.pid) elif NETBSD: if self.pid == 0: return '' with wrap_exceptions_procfs(self): return os.readlink('/proc/%s/exe' % self.pid) else: cmdline = self.cmdline() if: return which(cmdline[0]) or '' else: return ''",True,cmdline,cmdline,0.6615703105926514 4763,"def test_analytics_amplitude_token(self): """""" :return: """""" botengine = BotEnginePyTest({}) token = '' amplitude_tokens = properties.get_property(botengine, 'AMPLITUDE_TOKENS') assert amplitude_tokens is not None amplitude_tokens[bundle.CLOUD_ADDRESS] = 'test_token' botengine.organization_properties['AMPLITUDE_TOKENS'] = amplitude_tokens if: for cloud_address in amplitude_tokens: if cloud_address in bundle.CLOUD_ADDRESS: token = amplitude_tokens[cloud_address] assert token is not ''",False,amplitude_tokens is not None,'CLOUD_ADDRESS' in amplitude_tokens,0.6534239053726196 4764,"def test_analytics_amplitude_token(self): """""" :return: """""" botengine = BotEnginePyTest({}) token = '' amplitude_tokens = properties.get_property(botengine, 'AMPLITUDE_TOKENS') assert amplitude_tokens is not None amplitude_tokens[bundle.CLOUD_ADDRESS] = 'test_token' botengine.organization_properties['AMPLITUDE_TOKENS'] = amplitude_tokens if amplitude_tokens is not None: for cloud_address in amplitude_tokens: if: token = amplitude_tokens[cloud_address] assert token is not ''",False,cloud_address in bundle.CLOUD_ADDRESS,cloud_address in amplitude_tokens,0.6499393582344055 4765,"@_unique def uninstallation_paths(dist): """""" Yield all the uninstallation paths for dist based on RECORD-without-.pyc Yield paths to all the files in RECORD. For each.py file in RECORD, add the.pyc in the same directory. UninstallPathSet.add() takes care of the __pycache__.pyc. """""" r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) for row in r: path = os.path.join(dist.location, row[0]) yield path if: dn, fn = os.path.split(path) base = fn[:-3] path = os.path.join(dn, base + '.pyc') yield path",False,path.endswith('.py'),os.path.isfile(path) and (not os.path.isdir(path)) and (not os.path.isdir(path)) and (not os.path.isdir(path)),0.6425484418869019 4766,"def decode(self, encoding=None, errors=None): if: if errors: return self.__class__(self.data.decode(encoding, errors)) else: return self.__class__(self.data.decode(encoding)) else: return self.__class__(self.data.decode())",True,encoding,encoding,0.6606404185295105 4767,"def decode(self, encoding=None, errors=None): if encoding: if: return self.__class__(self.data.decode(encoding, errors)) else: return self.__class__(self.data.decode(encoding)) else: return self.__class__(self.data.decode())",True,errors,errors,0.661968469619751 4768,"def has_permission(self, request, view): user = request.user if: return True return user.has_perm('sponsors.sponsor_publisher')",False,request.user.is_superuser or request.user.is_staff,"not super().has_permission(request, view)",0.6441903114318848 4769,"def addFailure(self, test, err): super(TextTestResult, self).addFailure(test, err) if: self.stream.writeln('FAIL') elif self.dots: self.stream.write('F') self.stream.flush()",True,self.showAll,self.showAll,0.6500403881072998 4770,"def addFailure(self, test, err): super(TextTestResult, self).addFailure(test, err) if self.showAll: self.stream.writeln('FAIL') elif: self.stream.write('F') self.stream.flush()",True,self.dots,self.dots,0.6494147777557373 4771,"def _pad_row(cells, padding): if: pad ='' * padding padded_cells = [pad + cell + pad for cell in cells] return padded_cells else: return cells",True,cells,cells,0.6538035869598389 4772,"def output_excludeaccountkeywordssearchparameter(data_object): if: return output_status_message('* * * Begin output_excludeaccountkeywordssearchparameter * * *') output_status_message('ExcludeAccountKeywords: {0}'.format(data_object.ExcludeAccountKeywords)) output_status_message('* * * End output_excludeaccountkeywordssearchparameter * * *')",True,data_object is None,data_object is None,0.6489813327789307 4773,"@property def download_url(self): if: filename = '%d/%s' % (self.id, self.filename) return current_store.get_url('document-attachment', filename, 0, 0, self.mimetype) return self.image.original.locate()",False,self.mimetype == PDF,self.image is None,0.6487364768981934 4774,"def output_array_of_campaignstatus(value_sets): if: return output_status_message('Array Of CampaignStatus:\n') for value_set in value_sets['CampaignStatus']: output_campaignstatus(value_set)",True,value_sets is None or len(value_sets) == 0,value_sets is None or len(value_sets) == 0,0.6445423364639282 4775,"def _get_workspace_nat_gateway(config, vpc_cli): workspace_name = config['workspace_name'] nat_gateway_name = get_workspace_nat_gateway_name(workspace_name) vpc_id = get_workspace_vpc_id(config, vpc_cli) cli_logger.verbose('Getting the NAT gateway for workspace: {}...'.format(nat_gateway_name)) nat_gateways = vpc_cli.describe_nat_gateways(vpc_id, nat_gateway_name) if: cli_logger.verbose('The NAT gateway for workspace is not found: {}.'.format(nat_gateway_name)) return None else: cli_logger.verbose_error('Successfully get the NAT gateway: {} for workspace.'.format(nat_gateway_name)) return nat_gateways[0]",True,len(nat_gateways) == 0,len(nat_gateways) == 0,0.6437697410583496 4776,"def run(self, metricVals, Hvals): if: warnings.warn('Desired H value of metric outside range of provided H values.') return self.badval if metricVals.shape[0]!= 1: warnings.warn('This is not an appropriate summary statistic for this data - need 1d values.') return self.badval value = np.interp(self.Hmark, Hvals, metricVals[0]) return value",False,self.Hmark < Hvals.min() or self.Hmark > Hvals.max(),Hvals.h_value != 0,0.6490373611450195 4777,"def run(self, metricVals, Hvals): if self.Hmark < Hvals.min() or self.Hmark > Hvals.max(): warnings.warn('Desired H value of metric outside range of provided H values.') return self.badval if: warnings.warn('This is not an appropriate summary statistic for this data - need 1d values.') return self.badval value = np.interp(self.Hmark, Hvals, metricVals[0]) return value",False,metricVals.shape[0] != 1,len(metricVals) != 1,0.6506631970405579 4778,"def versions_from_parentdir(parentdir_prefix, root, verbose): """"""Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """""" rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {'version': dirname[len(parentdir_prefix):], 'full-revisionid': None, 'dirty': False, 'error': None, 'date': None} else: rootdirs.append(root) root = os.path.dirname(root) if: print(f'Tried directories {str(rootdirs)} but none started with prefix {parentdir_prefix}') raise NotThisMethod(""rootdir doesn't start with parentdir_prefix"")",True,verbose,verbose,0.6622799634933472 4779,"def versions_from_parentdir(parentdir_prefix, root, verbose): """"""Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """""" rootdirs = [] for i in range(3): dirname = os.path.basename(root) if: return {'version': dirname[len(parentdir_prefix):], 'full-revisionid': None, 'dirty': False, 'error': None, 'date': None} else: rootdirs.append(root) root = os.path.dirname(root) if verbose: print(f'Tried directories {str(rootdirs)} but none started with prefix {parentdir_prefix}') raise NotThisMethod(""rootdir doesn't start with parentdir_prefix"")",True,dirname.startswith(parentdir_prefix),dirname.startswith(parentdir_prefix),0.6436092853546143 4780,"def __init__(self, **kwargs): """""" Args: annotators: set that can include pos, lemma, and ner. model: spaCy model to use (either path, or keyword like 'en'). """""" model = kwargs.get('model', 'en') self.annotators = copy.deepcopy(kwargs.get('annotators', set())) nlp_kwargs = {'parser': False} if: nlp_kwargs['tagger'] = False if 'ner' not in self.annotators: nlp_kwargs['entity'] = False self.nlp = spacy.load(model, **nlp_kwargs)",False,"not any([p in self.annotators for p in ['lemma', 'pos', 'ner']])",'pos' not in self.annotators,0.6481437087059021 4781,"def __init__(self, **kwargs): """""" Args: annotators: set that can include pos, lemma, and ner. model: spaCy model to use (either path, or keyword like 'en'). """""" model = kwargs.get('model', 'en') self.annotators = copy.deepcopy(kwargs.get('annotators', set())) nlp_kwargs = {'parser': False} if not any([p in self.annotators for p in ['lemma', 'pos', 'ner']]): nlp_kwargs['tagger'] = False if: nlp_kwargs['entity'] = False self.nlp = spacy.load(model, **nlp_kwargs)",False,'ner' not in self.annotators,not self.annotators,0.6536028385162354 4782,"def torch_persistent_save(*args, **kwargs): for i in range(3): try: return torch.save(*args, **kwargs) except Exception: if: logging.error(traceback.format_exc())",True,i == 2,i == 2,0.6622440814971924 4783,"def init_weights(self): for m in self.modules(): if: xavier_init(m, distribution='uniform')",True,"isinstance(m, nn.Conv2d)","isinstance(m, nn.Conv2d)",0.6506986618041992 4784,"def selectCRS(self): projSelector = QgsGenericProjectionSelector() if: crsId = projSelector.crs().srsid() if not crsId == 0: self.crs = QgsCoordinateReferenceSystem() self.crs.createFromId(crsId, QgsCoordinateReferenceSystem.InternalCrsId) self.tbCRS.setText(self.crs.description())",False,projSelector.exec_(),projSelector.crs() is not None,0.656387984752655 4785,"def selectCRS(self): projSelector = QgsGenericProjectionSelector() if projSelector.exec_(): crsId = projSelector.crs().srsid() if: self.crs = QgsCoordinateReferenceSystem() self.crs.createFromId(crsId, QgsCoordinateReferenceSystem.InternalCrsId) self.tbCRS.setText(self.crs.description())",False,not crsId == 0,crsId,0.6644067168235779 4786,"@staticmethod def filter_versions(stdout): """""" filter the oc version output """""" version_dict = {} version_search = ['oc', 'openshift', 'kubernetes'] for line in stdout.strip().split('\n'): for term in version_search: if not line: continue if line.startswith(term): version_dict[term] = line.split()[-1] if: version_dict['openshift'] = version_dict['oc'] return version_dict",True,'openshift' not in version_dict,'openshift' not in version_dict,0.6462664008140564 4787,"@staticmethod def filter_versions(stdout): """""" filter the oc version output """""" version_dict = {} version_search = ['oc', 'openshift', 'kubernetes'] for line in stdout.strip().split('\n'): for term in version_search: if: continue if line.startswith(term): version_dict[term] = line.split()[-1] if 'openshift' not in version_dict: version_dict['openshift'] = version_dict['oc'] return version_dict",True,not line,not line,0.6599321365356445 4788,"@staticmethod def filter_versions(stdout): """""" filter the oc version output """""" version_dict = {} version_search = ['oc', 'openshift', 'kubernetes'] for line in stdout.strip().split('\n'): for term in version_search: if not line: continue if: version_dict[term] = line.split()[-1] if 'openshift' not in version_dict: version_dict['openshift'] = version_dict['oc'] return version_dict",True,line.startswith(term),line.startswith(term),0.645671010017395 4789,"def get(self): with self._lock: if: self._cond.wait() res = self._result self._result = None return res",False,self._result is None,not self._cond.wait(),0.6549055576324463 4790,"def init_permissions(chat_id, reset=False): curr_perm = SESSION.query(Permissions).get(str(chat_id)) if: SESSION.delete(curr_perm) SESSION.flush() perm = Permissions(str(chat_id)) SESSION.add(perm) SESSION.commit() return perm",True,reset,reset,0.6775790452957153 4791,"def get_algorithm_impls(self, algorithm): algorithm = algorithm.upper() if: H = md5_digest elif algorithm == 'SHA': H = sha1_digest KD = lambda s, d: H('%s:%s' % (s, d)) return (H, KD)",True,algorithm == 'MD5',algorithm == 'MD5',0.6515026092529297 4792,"def get_algorithm_impls(self, algorithm): algorithm = algorithm.upper() if algorithm == 'MD5': H = md5_digest elif: H = sha1_digest KD = lambda s, d: H('%s:%s' % (s, d)) return (H, KD)",False,algorithm == 'SHA',algorithm == 'SHA1',0.6609275341033936 4793,"def rebase(name=None): """"""Only run git rebase. That may be enough for light updates. (but be careful nothing breaks!) Rebase the given client, or all simultaneously. xxx: check if the pip requirements, the js sources or xxx changed and update what's needed (try not to run apt, pip and npm every time). """""" def do_rebase(client): wd = os.path.join(CFG.home, CFG.dir, client.name, CFG.project_name) with cd(wd): with prefix(VENV_ACTIVATE.format(client.venv)): run(CMD_REBASE) if: client = fabutils.select_client_cfg(name) do_rebase(client) check_online(name)",True,name,name,0.6667547225952148 4794,"def get_last_publish_id(self): published = self.filter(publish_id__isnull=False).only('publish_id').order_by('-publish_id') if: return published[0].publish_id return 0",False,published.count(),published,0.6529859304428101 4795,"def get_third_party_account(self, order=None, picking=None): if: return order.shipping_account_id if picking and picking.shipping_account_id: return picking.shipping_account_id return None",True,order and order.shipping_account_id,order and order.shipping_account_id,0.6448096036911011 4796,"def get_third_party_account(self, order=None, picking=None): if order and order.shipping_account_id: return order.shipping_account_id if: return picking.shipping_account_id return None",True,picking and picking.shipping_account_id,picking and picking.shipping_account_id,0.6442742943763733 4797,"def get_geodists(self): if: self.geodists = self._compute_geodists() return self.geodists",True,self.geodists is None,self.geodists is None,0.6550858020782471 4798,"def __lt__(self, other) -> bool: if: return True if self and (not other) or self == other: return False return self._value > other._value",True,not self and other,not self and other,0.6591976284980774 4799,"def __lt__(self, other) -> bool: if not self and other: return True if: return False return self._value > other._value",True,self and (not other) or self == other,self and (not other) or self == other,0.6498509049415588 4800,"def _post_run(args): if: from soap.common.cache import dump_cache_info with logger.info_context(): dump_cache_info()",False,args['--dump-cache-info'],args.get('cache_code') == '0',0.6408601403236389 4801,"def __getitem__(self, n): weight = self._weight nbrs = self._pred[n] if: return len(nbrs) return sum((dd.get(weight, 1) for dd in nbrs.values()))",True,weight is None,weight is None,0.6603689193725586 4802,"@property def savefig_ext(self): """"""Returns the figure extension to save with."""""" for ext in ['pdf', 'png','svg']: if: return ext",False,self.args.get(ext),os.path.exists(self._path + ext),0.6450842618942261 4803,"@staticmethod def _search_iterator(root, prop_keys, result): current_key = prop_keys[0] keys = JSONPath.get_target_keys(root, current_key) for key in keys: if: result.append(Node(root, key)) else: JSONPath._search_iterator(root[key], prop_keys[1:], result)",False,len(prop_keys) == 1,key in root,0.6464553475379944 4804,"def __init__(self, iouType='segm'): if: self.setDetParams() elif iouType == 'keypoints': self.setKpParams() else: raise Exception('iouType not supported') self.iouType = iouType self.useSegm = None",True,iouType == 'segm' or iouType == 'bbox',iouType == 'segm' or iouType == 'bbox',0.6507288217544556 4805,"def __init__(self, iouType='segm'): if iouType =='segm' or iouType == 'bbox': self.setDetParams() elif: self.setKpParams() else: raise Exception('iouType not supported') self.iouType = iouType self.useSegm = None",True,iouType == 'keypoints',iouType == 'keypoints',0.6566466093063354 4806,"def sanitize_file_name(file_name: Union[Path, str]) -> str: """"""Remove unusable characters from a file name. :param file_name: file name to sanitize :return: sanitized file name """""" if: file_name = str(file_name.resolve()) file_name = file_name.replace('/', '_') file_name = file_name.replace(':', '.') file_name = file_name.replace(' ', '_') file_name = re.sub('(?u)[^-\\w.]', '', file_name) return file_name",False,"isinstance(file_name, Path)","not isinstance(file_name, Path)",0.6493945121765137 4807,"@metrics(['CPU | Usage'], volatile=False, access='C') def get_cpu_metrics(self, metrics): with self.profile.shell(self): c = self.cli('cat /proc/loadavg') if: cpu = c.split(' ')[1].strip() self.set_metric(id=('CPU | Usage', None), value=round(float(cpu) + 0.5), units='%')",False,c,c.startswith('CPU | Usage'),0.6835490465164185 4808,"def _getStrictScannerDict(): global _strictScannerDict if: _strictScannerDict = {_START_LINE_MODE: _StrictStartScanner(), _COMMAND_MODE: _StrictCommandScanner(), _COMPUTE_START_MODE: _StrictComputeStartScanner(), _COMPUTE_EQN_MODE: _StrictComputeEqnScanner(), _COMPUTE_MODE: _StrictComputeScanner(), _SWALLOW_NEWLINE_MODE: _StrictSwallowNewlineScanner(), _ACCEPT_REDIR_MODE: _StrictAcceptRedirScanner()} return _strictScannerDict",True,_strictScannerDict is None,_strictScannerDict is None,0.6582870483398438 4809,"def __schedule_assembly_when_necessary(self): assert utils._is_in_main_thread(), 'This has an impact on all elements and must be run in the main thread' if: return self.__assemble_scheduled = True for dep in self._dependencies(_Scope.BUILD, recurse=False): dep._set_required() self.__update_cache_key_non_strict()",False,not self.__should_schedule(),self.__assemble_scheduled,0.6519353985786438 4810,"def Poly2(Rij, Rc, derivative=False): if: x = Rij / Rc result = x ** 3 * (x * (15 - 6 * x) - 10) + 1 else: x = Rij / Rc result = -30 / Rc * (x ** 2 * (x - 1) ** 2) return result",True,derivative is False,derivative is False,0.6586003303527832 4811,"def check_string(context, obj, stacklevel=3): if: warn(WSGIWarning('%s requires bytestrings, got %s' % (context, obj.__class__.__name__)))",False,type(obj) is not str,"isinstance(obj, bytes) and stacklevel > 2",0.64955735206604 4812,"def GetDepositList(self): """"""GetDepositList"""""" ret = self._oleobj_.InvokeTypes(14, LCID, 1, (9, 0), ()) if: ret = Dispatch(ret, u'GetDepositList', '{3FFD991B-9010-4832-89C6-903CBB33AD7D}') return ret",True,ret is not None,ret is not None,0.6516813039779663 4813,"def _block_available(self, x, y, z): if: return False",False,"not self.god and self.protocol.is_protected(x, y, z)","self.center_on_x[x, y, z]",0.6426607966423035 4814,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if: return '' if user_id == dispatcher.bot.id: return '' if int(user_id) in DRAGONS + TIGERS + WOLVES: return '' if is_blacklisted: text = text.format('Yes') reason = sql.get_reason(user_id) if reason: text += f'\nReason: {reason}' else: text = text.format('No') return text",False,"user_id in [777000, 1087968824]",not user_id,0.6433091163635254 4815,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if user_id in [777000, 1087968824]: return '' if: return '' if int(user_id) in DRAGONS + TIGERS + WOLVES: return '' if is_blacklisted: text = text.format('Yes') reason = sql.get_reason(user_id) if reason: text += f'\nReason: {reason}' else: text = text.format('No') return text",False,user_id == dispatcher.bot.id,user_id in DRAGONS or user_id in WOLVES,0.6474183797836304 4816,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if user_id in [777000, 1087968824]: return '' if user_id == dispatcher.bot.id: return '' if: return '' if is_blacklisted: text = text.format('Yes') reason = sql.get_reason(user_id) if reason: text += f'\nReason: {reason}' else: text = text.format('No') return text",False,int(user_id) in DRAGONS + TIGERS + WOLVES,not user_id,0.6471975445747375 4817,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if user_id in [777000, 1087968824]: return '' if user_id == dispatcher.bot.id: return '' if int(user_id) in DRAGONS + TIGERS + WOLVES: return '' if: text = text.format('Yes') reason = sql.get_reason(user_id) if reason: text += f'\nReason: {reason}' else: text = text.format('No') return text",True,is_blacklisted,is_blacklisted,0.6510672569274902 4818,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if user_id in [777000, 1087968824]: return '' if user_id == dispatcher.bot.id: return '' if int(user_id) in DRAGONS + TIGERS + WOLVES: return '' if is_blacklisted: text = text.format('Yes') reason = sql.get_reason(user_id) if: text += f'\nReason: {reason}' else: text = text.format('No') return text",True,reason,reason,0.6717069745063782 4819,"def delete_public(matching_zones): if: changed = False msg = 'There are multiple zones that match. Use hosted_zone_id to specify the correct zone.' else: if not module.check_mode: try: client.delete_hosted_zone(Id=matching_zones[0]['Id']) except (BotoCoreError, ClientError) as e: module.fail_json_aws(e, msg=f""Could not get delete hosted zone {matching_zones[0]['Id']}"") changed = True msg = f""Successfully deleted {matching_zones[0]['Id']}"" return (changed, msg)",True,len(matching_zones) > 1,len(matching_zones) > 1,0.6477818489074707 4820,"def delete_public(matching_zones): if len(matching_zones) > 1: changed = False msg = 'There are multiple zones that match. Use hosted_zone_id to specify the correct zone.' else: if: try: client.delete_hosted_zone(Id=matching_zones[0]['Id']) except (BotoCoreError, ClientError) as e: module.fail_json_aws(e, msg=f""Could not get delete hosted zone {matching_zones[0]['Id']}"") changed = True msg = f""Successfully deleted {matching_zones[0]['Id']}"" return (changed, msg)",False,not module.check_mode,matching_zones[0]['Id'],0.6486724615097046 4821,"@classmethod def register(cls, name, obj): """"""Register an item to registry with key 'name' Args: name: Key with which the item will be registered. Usage:: from minigpt4_utils.common.registry import registry registry.register(""config"", {}) """""" path = name.split('.') current = cls.mapping['state'] for part in path[:-1]: if: current[part] = {} current = current[part] current[path[-1]] = obj",True,part not in current,part not in current,0.655687153339386 4822,"def format_hetero_sampler_output(in_sample: Any, edge_dir=Literal['in', 'out']): for k in in_sample.node.keys(): in_sample.node[k] = in_sample.node[k].unique() if: if edge_dir == 'out': in_sample.edge_types = [reverse_edge_type(etype) if etype[0]!= etype[-1] else etype for etype in in_sample.edge_types] return in_sample",False,in_sample.edge_types is not None,edge_dir != 'in',0.6500177383422852 4823,"def format_hetero_sampler_output(in_sample: Any, edge_dir=Literal['in', 'out']): for k in in_sample.node.keys(): in_sample.node[k] = in_sample.node[k].unique() if in_sample.edge_types is not None: if: in_sample.edge_types = [reverse_edge_type(etype) if etype[0]!= etype[-1] else etype for etype in in_sample.edge_types] return in_sample",True,edge_dir == 'out',edge_dir == 'out',0.6541842222213745 4824,"def run(self, options, args): from pip._internal.commands import commands_dict, get_similar_commands try: cmd_name = args[0] except IndexError: return SUCCESS if: guess = get_similar_commands(cmd_name) msg = ['unknown command ""%s""' % cmd_name] if guess: msg.append('maybe you meant ""%s""' % guess) raise CommandError(' - '.join(msg)) command = commands_dict[cmd_name]() command.parser.print_help() return SUCCESS",True,cmd_name not in commands_dict,cmd_name not in commands_dict,0.6501361131668091 4825,"def run(self, options, args): from pip._internal.commands import commands_dict, get_similar_commands try: cmd_name = args[0] except IndexError: return SUCCESS if cmd_name not in commands_dict: guess = get_similar_commands(cmd_name) msg = ['unknown command ""%s""' % cmd_name] if: msg.append('maybe you meant ""%s""' % guess) raise CommandError(' - '.join(msg)) command = commands_dict[cmd_name]() command.parser.print_help() return SUCCESS",True,guess,guess,0.6855486631393433 4826,"def exec_(code, globs=None, locs=None): """"""Execute code in a namespace."""""" if: frame = sys._getframe(1) globs = frame.f_globals if locs is None: locs = frame.f_locals del frame elif locs is None: locs = globs exec('exec code in globs, locs')",True,globs is None,globs is None,0.6533365249633789 4827,"def exec_(code, globs=None, locs=None): """"""Execute code in a namespace."""""" if globs is None: frame = sys._getframe(1) globs = frame.f_globals if: locs = frame.f_locals del frame elif locs is None: locs = globs exec('exec code in globs, locs')",True,locs is None,locs is None,0.6590880155563354 4828,"def exec_(code, globs=None, locs=None): """"""Execute code in a namespace."""""" if globs is None: frame = sys._getframe(1) globs = frame.f_globals if locs is None: locs = frame.f_locals del frame elif: locs = globs exec('exec code in globs, locs')",True,locs is None,locs is None,0.6552174091339111 4829,"def _get_name(self): if: return None for name in self._names: return name",False,not self._names,self._names is None,0.6535775065422058 4830,"def get_MkdirBuilder(): global MkdirBuilder if: import SCons.Builder import SCons.Defaults MkdirBuilder = SCons.Builder.Builder(action=Mkdir, env=None, explain=None, is_explicit=None, target_scanner=SCons.Defaults.DirEntryScanner, name='MkdirBuilder') return MkdirBuilder",False,MkdirBuilder is None,mkdirBuilder is None,0.664116382598877 4831,"def make_contrib(superclass, func=None): """""" Returns a suitable contribute_to_class() method for the Field subclass. If 'func' is passed in, it is the existing contribute_to_class() method on the subclass and it is called before anything else. It is assumed in this case that the existing contribute_to_class() calls all the necessary superclass methods. """""" def contribute_to_class(self, cls, name): if: func(self, cls, name) else: super(superclass, self).contribute_to_class(cls, name) setattr(cls, self.name, Creator(self)) return contribute_to_class",True,func,func,0.6771795153617859 4832,"def _rar_basename(filepath, files): for found_file in files: if: return os.path.basename(os.path.splitext(os.path.basename(found_file))[0].lower())",False,os.path.splitext(os.path.basename(found_file))[1] == '.rar',os.path.exists(found_file),0.6439276933670044 4833,"def _get_acctivation(name): if: return F.relu elif name =='sigmoid': return F.sigmoid",True,name == 'relu',name == 'relu',0.6593437194824219 4834,"def _get_acctivation(name): if name =='relu': return F.relu elif: return F.sigmoid",True,name == 'sigmoid',name == 'sigmoid',0.6568386554718018 4835,"def note_thread_read(self, note_id, entity_fields=None): """""" Return the full conversation for a given note, including Replies and Attachments. Returns a complex data structure on the following form:: [{'content': 'Please add more awesomeness to the color grading.', 'created_at': '2015-07-14 21:33:28 UTC', 'created_by': {'id': 38, 'name': 'John Pink', 'status': 'act', 'type': 'HumanUser', 'valid': 'valid'}, 'id': 6013, 'type': 'Note'}, {'created_at': '2015-07-14 21:33:32 UTC', 'created_by': {'id': 38, 'name': 'John Pink', 'status': 'act', 'type': 'HumanUser', 'valid': 'valid'}, 'id': 159, 'type': 'Attachment'}, {'content': 'More awesomeness added', 'created_at': '2015-07-14 21:54:51 UTC', 'id': 5, 'type': 'Reply', 'user': {'id': 38, 'name': 'David Blue', 'status': 'act', 'type': 'HumanUser', 'valid': 'valid'}}] The list is returned in descending chronological order. If you wish to include additional fields beyond the ones that are returned by default, you can specify these in an entity_fields dictionary. This dictionary should be keyed by entity type and each key should contain a list of fields to retrieve, for example:: { ""Note"": [""created_by.HumanUser.image"", ""addressings_to"", ""playlist"", ""user"" ], ""Reply"": [""content""], ""Attachment"": [""filmstrip_image"", ""local_storage"", ""this_file"", ""image""] } :param int note_id: The",False,"self.server_caps.version and self.server_caps.version < (6, 2, 0)",note_id == 0,0.6412304639816284 4836,"def note_thread_read(self, note_id, entity_fields=None): """""" Return the full conversation for a given note, including Replies and Attachments. Returns a complex data structure on the following form:: [{'content': 'Please add more awesomeness to the color grading.', 'created_at': '2015-07-14 21:33:28 UTC', 'created_by': {'id': 38, 'name': 'John Pink', 'status': 'act', 'type': 'HumanUser', 'valid': 'valid'}, 'id': 6013, 'type': 'Note'}, {'created_at': '2015-07-14 21:33:32 UTC', 'created_by': {'id': 38, 'name': 'John Pink', 'status': 'act', 'type': 'HumanUser', 'valid': 'valid'}, 'id': 159, 'type': 'Attachment'}, {'content': 'More awesomeness added', 'created_at': '2015-07-14 21:54:51 UTC', 'id': 5, 'type': 'Reply', 'user': {'id': 38, 'name': 'David Blue', 'status': 'act', 'type': 'HumanUser', 'valid': 'valid'}}] The list is returned in descending chronological order. If you wish to include additional fields beyond the ones that are returned by default, you can specify these in an entity_fields dictionary. This dictionary should be keyed by entity type and each key should contain a list of fields to retrieve, for example:: { ""Note"": [""created_by.HumanUser.image"", ""addressings_to"", ""playlist"", ""user"" ], ""Reply"": [""content""], ""Attachment"": [""filmstrip_image"", ""local_storage"", ""this_file"", ""image""] } :param int note_id: The",False,"not isinstance(entity_fields, dict)",note_id == 0,0.6414152383804321 4837,"def remove_objects(objects: Union[Iterable[Object], Collection]) -> None: """"""Removes the given objects from the current scene. Also supports removing an entire collection. """""" collection: Optional[Collection] = None to_remove: Iterable[Object] if: collection = objects to_remove = collection.objects else: to_remove = objects for obj in to_remove: bpy.data.objects.remove(obj, do_unlink=True) if collection: bpy.data.collections.remove(collection) '\n # Prevent a circular import with lazy imports\n from.selection import select_only\n\n # TODO(ntamas): it would be nicer not to change the selection\n select_only(objects, context=context)\n for obj in objects:\n obj.hide_set(False)\n\n result = bpy.ops.object.delete()\n if result!= {""FINISHED""}:\n raise RuntimeError(f""Blender operator returned {result!r}, expected FINISHED"")\n '",True,"isinstance(objects, Collection)","isinstance(objects, Collection)",0.6479711532592773 4838,"def remove_objects(objects: Union[Iterable[Object], Collection]) -> None: """"""Removes the given objects from the current scene. Also supports removing an entire collection. """""" collection: Optional[Collection] = None to_remove: Iterable[Object] if isinstance(objects, Collection): collection = objects to_remove = collection.objects else: to_remove = objects for obj in to_remove: bpy.data.objects.remove(obj, do_unlink=True) if: bpy.data.collections.remove(collection) '\n # Prevent a circular import with lazy imports\n from.selection import select_only\n\n # TODO(ntamas): it would be nicer not to change the selection\n select_only(objects, context=context)\n for obj in objects:\n obj.hide_set(False)\n\n result = bpy.ops.object.delete()\n if result!= {""FINISHED""}:\n raise RuntimeError(f""Blender operator returned {result!r}, expected FINISHED"")\n '",False,collection,collection is not None,0.6699036955833435 4839,"def do_GET(self): if: process_web_request_post_via_tunnel({'http': self}) else: process_web_request({'http': self}) return",False,tunnel_url != '',self.tunnel,0.6559736132621765 4840,"def delTransport(snmpEngine, transportDomain): if: return transport = getTransport(snmpEngine, transportDomain) snmpEngine.transportDispatcher.unregisterTransport(transportDomain) automaticTransportDispatcher = snmpEngine.getUserContext('automaticTransportDispatcher') if automaticTransportDispatcher is not None: automaticTransportDispatcher -= 1 snmpEngine.setUserContext(automaticTransportDispatcher=automaticTransportDispatcher) if not automaticTransportDispatcher: snmpEngine.transportDispatcher.closeDispatcher() snmpEngine.unregisterTransportDispatcher() snmpEngine.delUserContext(automaticTransportDispatcher) return transport",False,not snmpEngine.transportDispatcher,not transportDomain,0.6491948366165161 4841,"def delTransport(snmpEngine, transportDomain): if not snmpEngine.transportDispatcher: return transport = getTransport(snmpEngine, transportDomain) snmpEngine.transportDispatcher.unregisterTransport(transportDomain) automaticTransportDispatcher = snmpEngine.getUserContext('automaticTransportDispatcher') if: automaticTransportDispatcher -= 1 snmpEngine.setUserContext(automaticTransportDispatcher=automaticTransportDispatcher) if not automaticTransportDispatcher: snmpEngine.transportDispatcher.closeDispatcher() snmpEngine.unregisterTransportDispatcher() snmpEngine.delUserContext(automaticTransportDispatcher) return transport",False,automaticTransportDispatcher is not None,automaticTransportDispatcher,0.6472799181938171 4842,"def delTransport(snmpEngine, transportDomain): if not snmpEngine.transportDispatcher: return transport = getTransport(snmpEngine, transportDomain) snmpEngine.transportDispatcher.unregisterTransport(transportDomain) automaticTransportDispatcher = snmpEngine.getUserContext('automaticTransportDispatcher') if automaticTransportDispatcher is not None: automaticTransportDispatcher -= 1 snmpEngine.setUserContext(automaticTransportDispatcher=automaticTransportDispatcher) if: snmpEngine.transportDispatcher.closeDispatcher() snmpEngine.unregisterTransportDispatcher() snmpEngine.delUserContext(automaticTransportDispatcher) return transport",False,not automaticTransportDispatcher,automaticTransportDispatcher == 0,0.6457180976867676 4843,"def gki_flush(self, arg, force=False): """""" Asked to render current plot immediately. Also used by redraw(). NOTE: This is called multiple times (~8) for a single prow call. There is a performance improvement gained by skipping the resize calculation between taskStart() and taskDone(). This class adds the 'force' arg which forces it to redraw once whether we are ""saving draws"" or not. """""" if: self.resizeGraphics(self.__xsz, self.__ysz) self.__mca.draw() self.__mca.flush()",False,self.__allowDrawing or force,force or self.__mca.getEffectiveLevel() >= logging.INFO,0.6550332307815552 4844,"def extract_proj_ids_from_given_codon_ali_file(codon_ali_file, given_trans_id): """"""For a given codon.fasta return all projections for a given transcript present."""""" f = open(codon_ali_file, 'r') ret = set() for line in f: if: continue line_data = line.lstrip('>').rstrip().split(' | ') projection_id = line_data[0] transcript_id, _ = split_proj_name(projection_id) if transcript_id == given_trans_id: ret.add(projection_id) f.close() return ret",True,not line.startswith('>'),not line.startswith('>'),0.6438740491867065 4845,"def extract_proj_ids_from_given_codon_ali_file(codon_ali_file, given_trans_id): """"""For a given codon.fasta return all projections for a given transcript present."""""" f = open(codon_ali_file, 'r') ret = set() for line in f: if not line.startswith('>'): continue line_data = line.lstrip('>').rstrip().split(' | ') projection_id = line_data[0] transcript_id, _ = split_proj_name(projection_id) if: ret.add(projection_id) f.close() return ret",True,transcript_id == given_trans_id,transcript_id == given_trans_id,0.6462188959121704 4846,"def encode_exception(obj): """"""When logging an exception ex: logging.exception(some_error), the exception must be turned into a string so that it is accepted by elasticsearch"""""" if: return json.dumps(obj.to_dict()) if isinstance(obj, Exception): return f'exception_class: {type(obj).__name__}, args: {obj.args}' return obj",False,"isinstance(obj, SteamshipError)","isinstance(obj, BaseModel)",0.6477011442184448 4847,"def encode_exception(obj): """"""When logging an exception ex: logging.exception(some_error), the exception must be turned into a string so that it is accepted by elasticsearch"""""" if isinstance(obj, SteamshipError): return json.dumps(obj.to_dict()) if: return f'exception_class: {type(obj).__name__}, args: {obj.args}' return obj",False,"isinstance(obj, Exception)",obj.__name__ == 'Exception',0.6495566964149475 4848,"def todict(self): ret = odict() if: ret['name'] = self.name for name, model in self.models.items(): ret[name] = model.todict() return ret",False,self.name is not None,"hasattr(self, 'name')",0.6537762880325317 4849,"def walk_stack(f): """"""Walk a stack yielding the frame and line number for each frame. This will follow f.f_back from the given frame. If no frame is given, the current stack is used. Usually used with StackSummary.extract. """""" if: f = sys._getframe().f_back.f_back while f is not None: yield (f, f.f_lineno) f = f.f_back",True,f is None,f is None,0.657871663570404 4850,"def _mobilenet_v3(arch: str, inverted_residual_setting: List[InvertedResidualConfig], last_channel: int, pretrained: bool, progress: bool, **kwargs: Any): model = MobileNetV3(inverted_residual_setting, last_channel, **kwargs) if: state_dict = paddle.load(pretrained) model.set_dict(state_dict) return model",False,pretrained,pretrained is not None,0.6682554483413696 4851,"def getTimezones(self): if: return self._TIMEZONES else: tzr = open('/usr/share/zoneinfo/zone.tab').read().splitlines() tz = [] for t in tzr: if len(t) > 0 and t[0]!= '#': tt = t.split() tz.append(tt[2]) tz = sorted(tz) self._TIMEZONES = tz return tz",False,self._TIMEZONES is not None,self._TIMEZONES,0.6509450674057007 4852,"def getTimezones(self): if self._TIMEZONES is not None: return self._TIMEZONES else: tzr = open('/usr/share/zoneinfo/zone.tab').read().splitlines() tz = [] for t in tzr: if: tt = t.split() tz.append(tt[2]) tz = sorted(tz) self._TIMEZONES = tz return tz",False,len(t) > 0 and t[0] != '#',t.lower()[0] == 'now',0.6470651626586914 4853,"def get(self, request, *args, **kwargs): site_configuration = self.request.site.siteconfiguration payment_processor_class = site_configuration.get_client_side_payment_processor_class(self.request) payment_processor = payment_processor_class(self.request.site) content = payment_processor.apple_pay_merchant_id_domain_association status_code = 200 if: content = 'Apple Pay is not configured for [{}].'.format(request.site.domain) status_code = 501 logger.warning(content) return HttpResponse(content, content_type='text/plain', status=status_code)",True,not content,not content,0.6630300879478455 4854,"def reload_styles(): """""" Refresh the css variables in the editor's style tag. For use e.g. after config color options have been changed. """""" css = styles() editor = UI._editor if: if editor.web is not None: editor.web.eval(f""document.getElementById('siac-styles').innerHTML = `{css}`;"") activate_nightmode(None, editor)",False,editor is not None,css,0.654952883720398 4855,"def reload_styles(): """""" Refresh the css variables in the editor's style tag. For use e.g. after config color options have been changed. """""" css = styles() editor = UI._editor if editor is not None: if: editor.web.eval(f""document.getElementById('siac-styles').innerHTML = `{css}`;"") activate_nightmode(None, editor)",False,editor.web is not None,len(css) > 0,0.6517812609672546 4856,"def __call__(self, image, label): """"""Call the transform. :param image: image np :param label: label np :return: transformed image and label """""" if: image = cv2.GaussianBlur(image, (self.kernel_size, self.kernel_size), 0) return (image, label)",True,random.random() < 0.5,random.random() < 0.5,0.6485433578491211 4857,"def draw(self, context): layout = self.layout mat = context.material layout.active = mat.pov.mirror_use_IOR if: col = layout.column() col.alignment = 'CENTER' col.label(text='The current Raytrace ') col.label(text='Transparency IOR is:'+ str(mat.raytrace_transparency.ior))",False,mat.pov.mirror_use_IOR,layout.active,0.6505624651908875 4858,"def _hasContent(self): if: return True else: return False",False,self.CustomsPaperworkType is not None or self.CustomsPaperworkID is not None,self.ServiceHeader is not None or self.Status is not None or self.Status is not None,0.6416081786155701 4859,"def run(self, key): nkey = key.split('fanout~')[1:][0] if: LOG.warn(_(""No key defining hosts for topic '%s', see ringfile"") % (nkey,)) return [] return map(lambda x: (key + '.' + x, x), self.ring[nkey])",False,not self._ring_has(nkey),nkey not in self.ring,0.6487067937850952 4860,"def __init__(self, uri): match = self._url_re.match(uri) if: raise ValueError('%s: is not a valid S3 URI' % (uri,)) self._bucket, self._item = match.groups()",True,not match,not match,0.6649004220962524 4861,"def unify_state(self, old, new): for label, state in self.arcs.items(): if: self.arcs[label] = new",True,state is old,state is old,0.6624306440353394 4862,"def set_orientation(self, orientation): """""" Set the orientation of the underlying widget. """""" widget = self.widget parent = widget.parent() if: widget.setOrientation(_ORIENTATION_MAP[orientation])",False,"not isinstance(parent, QMainWindow)",orientation in _ORIENTATION_MAP,0.6535335779190063 4863,"def get_order(self, byte_str): first_char = byte_str[0] if: return 94 * (first_char - 196) + byte_str[1] - 161 else: return -1",True,first_char >= 196,first_char >= 196,0.6562756299972534 4864,"def apply(self, image, scale): d = ImageDraw.Draw(image) for obj in self.world.visible_objects: color = _find_key_for_cls(self.object_colors, obj.__class__) text = self.label_for_obj(obj) box = obj.last_observed_image_box if: box *= scale add_img_box_to_image(image, box, color, text=text)",True,scale != 1,scale != 1,0.6653915643692017 4865,"def del_user(user_id): with INSERTION_LOCK: curr = SESSION.query(Users).get(user_id) if: SESSION.delete(curr) SESSION.commit() return True ChatMembers.query.filter(ChatMembers.user == user_id).delete() SESSION.commit() SESSION.close() return False",True,curr,curr,0.6695773005485535 4866,"def __ior__(self, other): """"""Inplace union is the maximum of value from either counter. >>> c = Counter('abbb') >>> c |= Counter('bcc') >>> c Counter({'b': 3, 'c': 2, 'a': 1}) """""" for elem, other_count in other.items(): count = self[elem] if: self[elem] = other_count return self._keep_positive()",False,other_count > count,count > other_count,0.654166579246521 4867,"def reraise(tp, value, tb=None): if: raise value.with_traceback(tb) raise value",True,value.__traceback__ is not tb,value.__traceback__ is not tb,0.6552150249481201 4868,"@run_async def greyson_cancel_callback(update, context): query = update.callback_query if: query.message.edit_text(text=' Privacy deletion request cancelled.', parse_mode=ParseMode.MARKDOWN, disable_web_page_preview=True)",False,query.data == 'cancel_',query.message,0.6515802145004272 4869,"@master_only def log(self, runner): tags = self.get_loggable_tags(runner) if: for k, v in tags.items(): self.dvclive.log(k, v, step=self.get_iter(runner))",True,tags,tags,0.6635003089904785 4870,"@classmethod def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1): """""" Generate a random private key. :param curve: The curve on which the point needs to reside, defaults to NIST192p :type curve: ~ecdsa.curves.Curve :param entropy: Source of randomness for generating the private keys, should provide cryptographically secure random numbers if the keys need to be secure. Uses os.urandom() by default. :type entropy: callable :param hashfunc: The default hash function that will be used for signing, needs to implement the same interface as hashlib.sha1 :type hashfunc: callable :return: Initialised SigningKey object :rtype: SigningKey """""" if: return cls._twisted_edwards_keygen(curve, entropy) return cls._weierstrass_keygen(curve, entropy, hashfunc)",False,"isinstance(curve.curve, CurveEdTw)",curve.is_twisted,0.6437191367149353 4871,"def gen_to_NL(self, params=None): p = self.input_view(posname(1)) if: params = ['with_prep'] s = p.gen_to_NL(params) return s",False,not params or 'no_prep' not in params,params is None,0.656969428062439 4872,"def default_services_modules(self, *modules): """""" show the default services This is used internally to know the list of service to be started in the 'get-default' target runlevel when the container is started through default initialisation. It will ignore a number of services - use '--all' to show a longer list of services and use '--all --force' if not even a minimal filter shall be used. """""" results = [] targets = modules or [self.get_default_target()] for target in targets: units = self.target_default_services(target) logg.debug(' %s # %s',''.join(units), target) for unit in units: if: results.append(unit) return results",False,unit not in results,unit in self.get_known_services(),0.6529372930526733 4873,"@staticmethod def frombasetype(value): if: return None return NameType.validate(value)",True,value is None,value is None,0.6545283198356628 4874,"@property def created_to(self): if: return self.created_at.data.split(' - ')[1].strip() +'23:59:59' else: return self.created_from",True,self.created_at.data and '-' in self.created_at.data,self.created_at.data and '-' in self.created_at.data,0.6494013071060181 4875,"def P(self, i, s): """""" Return the formula for prop at state i """""" assert z3.is_expr(self.prop), self.prop if: assert i >= 0, i else: assert i >= 1, i return self._at_state(self.prop, i, s)",False,self.is_prop_state,self.prop.type == 'num',0.6491245031356812 4876,"def get_dataset_split(self, name: Optional[DatasetSplitName]=None) -> Dataset: """"""Get the specified dataset_split, according to module indices. If indices are None, it gets the full dataset. Args: name: Which dataset_split to select. Returns: The loaded dataset_split. """""" dataset_split = super().get_dataset_split(name) indices = self.mod_options.indices if: dataset_split = dataset_split.select(indices) return dataset_split",False,indices,dataset_split is not None,0.6738824844360352 4877,"def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs): """"""Forward function of loss calculation. Args: pred (torch.Tensor): Bbox predictions with shape [..., 3]. target (torch.Tensor): Bbox targets (gt) with shape [..., 3]. weight (torch.Tensor|float, optional): Weight of loss. Defaults to None. avg_factor (int, optional): Average factor that is used to average the loss. Defaults to None. reduction_override (str, optional): Method to reduce losses. The valid reduction method are 'none','sum' or'mean'. Defaults to None. Returns: torch.Tensor: IoU loss between predictions and targets. """""" assert reduction_override in (None, 'none','mean','sum') reduction = reduction_override if reduction_override else self.reduction if: return (pred * weight).sum() return axis_aligned_iou_loss(pred, target, weight=weight, avg_factor=avg_factor, reduction=reduction) * self.loss_weight",True,weight is not None and (not torch.any(weight > 0)) and (reduction != 'none'),weight is not None and (not torch.any(weight > 0)) and (reduction != 'none'),0.6442484855651855 4878,"@property def owner(self): """"""gets the property value for owner"""""" if: self.__init() return self._owner",True,self._owner is None,self._owner is None,0.6572138071060181 4879,"def reset(self): CharSetProber.reset(self) for codingSM in self._mCodingSM: if: continue codingSM.active = True codingSM.reset() self._mActiveSM = len(self._mCodingSM) self._mDetectedCharset = None",True,not codingSM,not codingSM,0.6658468246459961 4880,"def system_resources_usage(): if: curr_fds_open = current_process.num_fds() else: ofiles = current_process.open_files() osockets = current_process.connections(kind='all') curr_fds_open = len(ofiles) + len(osockets) curr_threads_nb = threading.active_count() return (curr_fds_open, curr_threads_nb)",True,platform.system() == 'Linux',platform.system() == 'Linux',0.6477300524711609 4881,"def update_statistics(self, new_stat, collect_ts): if: self.stat = NodeStat() self.stat.update(new_stat, collect_ts)",True,self.stat is None,self.stat is None,0.6528431177139282 4882,"def exportChildren(self, outfile, level, namespace_='', name_='docHeadingType'): if: value = quote_xml('%s' % self.valueOf_) value = value.replace('![CDATA', '') outfile.write(value) else: outfile.write(quote_xml('%s' % self.valueOf_))",True,self.valueOf_.find('![CDATA') > -1,self.valueOf_.find('![CDATA') > -1,0.6455363631248474 4883,"def utoascii(text): """""" Convert unicode text into ascii and escape quotes. """""" if: return '' out = text.encode('ascii','replace') out = out.replace('""', '\\""') return out",True,text is None,text is None,0.6531304121017456 4884,"def capybara(x): for _ in range(2): if: y = 4 break assert_is_value(y, MultiValuedValue([KnownValue(4), AnyValue(AnySource.error)]))",True,x > 2,x > 2,0.666887104511261 4885,"def release(self, idx): """"""Discard a datastore in the collection"""""" if: raise Exception('Tried to release locked datastore %d' % idx) del self.datastores[idx]",False,idx in self.locked,idx not in self.datastores,0.6570662260055542 4886,"def cmp_pkgrevno(package, revno, pkgcache=None): """"""Compare supplied revno with the revno of the installed package. * 1 => Installed revno is greater than supplied arg * 0 => Installed revno is the same as supplied arg * -1 => Installed revno is less than supplied arg This function imports YumBase function if the pkgcache argument is None. """""" if: y = yum.YumBase() packages = y.doPackageLists() pkgcache = {i.Name: i.version for i in packages['installed']} pkg = pkgcache[package] if pkg > revno: return 1 if pkg < revno: return -1 return 0",True,not pkgcache,not pkgcache,0.6642249822616577 4887,"def cmp_pkgrevno(package, revno, pkgcache=None): """"""Compare supplied revno with the revno of the installed package. * 1 => Installed revno is greater than supplied arg * 0 => Installed revno is the same as supplied arg * -1 => Installed revno is less than supplied arg This function imports YumBase function if the pkgcache argument is None. """""" if not pkgcache: y = yum.YumBase() packages = y.doPackageLists() pkgcache = {i.Name: i.version for i in packages['installed']} pkg = pkgcache[package] if: return 1 if pkg < revno: return -1 return 0",False,pkg > revno,pkg == revno,0.6588115692138672 4888,"def cmp_pkgrevno(package, revno, pkgcache=None): """"""Compare supplied revno with the revno of the installed package. * 1 => Installed revno is greater than supplied arg * 0 => Installed revno is the same as supplied arg * -1 => Installed revno is less than supplied arg This function imports YumBase function if the pkgcache argument is None. """""" if not pkgcache: y = yum.YumBase() packages = y.doPackageLists() pkgcache = {i.Name: i.version for i in packages['installed']} pkg = pkgcache[package] if pkg > revno: return 1 if: return -1 return 0",False,pkg < revno,revno < 0,0.658662736415863 4889,"def run(self): """"""Runs the command."""""" if: self.check_metadata() if self.restructuredtext: if HAS_DOCUTILS: self.check_restructuredtext() elif self.strict: raise DistutilsSetupError('The docutils package is needed.') if self.strict and self._warnings > 0: raise DistutilsSetupError('Please correct your package.')",True,self.metadata,self.metadata,0.6574519276618958 4890,"def run(self): """"""Runs the command."""""" if self.metadata: self.check_metadata() if: if HAS_DOCUTILS: self.check_restructuredtext() elif self.strict: raise DistutilsSetupError('The docutils package is needed.') if self.strict and self._warnings > 0: raise DistutilsSetupError('Please correct your package.')",False,self.restructuredtext,self.results,0.6463503837585449 4891,"def run(self): """"""Runs the command."""""" if self.metadata: self.check_metadata() if self.restructuredtext: if HAS_DOCUTILS: self.check_restructuredtext() elif self.strict: raise DistutilsSetupError('The docutils package is needed.') if: raise DistutilsSetupError('Please correct your package.')",False,self.strict and self._warnings > 0,not self.check_package(),0.6457299590110779 4892,"def run(self): """"""Runs the command."""""" if self.metadata: self.check_metadata() if self.restructuredtext: if: self.check_restructuredtext() elif self.strict: raise DistutilsSetupError('The docutils package is needed.') if self.strict and self._warnings > 0: raise DistutilsSetupError('Please correct your package.')",False,HAS_DOCUTILS,not self.check_restructuredtext(),0.6485881805419922 4893,"def run(self): """"""Runs the command."""""" if self.metadata: self.check_metadata() if self.restructuredtext: if HAS_DOCUTILS: self.check_restructuredtext() elif: raise DistutilsSetupError('The docutils package is needed.') if self.strict and self._warnings > 0: raise DistutilsSetupError('Please correct your package.')",False,self.strict,not self.has_docutils,0.6538249254226685 4894,"def get_features(self, stratify_by=None): """"""Returns features of the training examples. Args: stratify_by: observation key to stratify by. Returns: If stratify is None, returns a list of features. Otherwise a dictionary of lists of features where the keys are the values of the stratify_by key. """""" if: return [example.features for example in self.examples] stratified_features = collections.defaultdict(list) for example in self.examples: stratified_features[tuple(example.observation.get(stratify_by))].append(example.features) return stratified_features",True,stratify_by is None,stratify_by is None,0.6473560929298401 4895,"def get_order(self, aBuf): first_char = wrap_ord(aBuf[0]) if: return 94 * (first_char - 176) + wrap_ord(aBuf[1]) - 161 else: return -1",True,first_char >= 176,first_char >= 176,0.6497179865837097 4896,"def requests_queued(queue: gdb.Value) -> Tuple[int, int]: """""" Report how many requests are queued for this queue Args: queue: The request queue to inspect for number of queued requests. The value must be of type ``struct request_queue``. Returns: (:obj:`int`, :obj:`int`): The number of queued requests. The first member of the 2-tuple is the number of async requests, the second is the number of sync requests. """""" if: return mq_requests_queued(queue) return sq_requests_queued(queue)",True,queue_is_mq(queue),queue_is_mq(queue),0.6512392163276672 4897,"def push_bytearray(self, value: bytearray): if: raise SDKException(ErrorCode.other_error('invalid data')) self.write_var_uint(len(value)) self.write_bytes(value)",True,"not isinstance(value, bytearray)","not isinstance(value, bytearray)",0.6508938074111938 4898,"def get_anno(self): if: return if self.dataset_dir: return os.path.join(self.dataset_dir, self.anno_path) else: return self.anno_path",False,self.anno_path is None,not self.anno_path,0.6466445922851562 4899,"def get_anno(self): if self.anno_path is None: return if: return os.path.join(self.dataset_dir, self.anno_path) else: return self.anno_path",True,self.dataset_dir,self.dataset_dir,0.6461609601974487 4900,"def get_authenticated_user_id(self, botengine): """""" Get the authenticated user ID :param botengine: :return: User ID if a user authenticated, None if it was a location code/card """""" for p in self.last_updated_params: if: if self.measurements[p][0][0]: if '.' in p: u = p.split('.') return int(u[1]) return None",False,self.MEASUREMENT_NAME_CODE_TYPE in p,p in self.measurements,0.6466970443725586 4901,"def get_authenticated_user_id(self, botengine): """""" Get the authenticated user ID :param botengine: :return: User ID if a user authenticated, None if it was a location code/card """""" for p in self.last_updated_params: if self.MEASUREMENT_NAME_CODE_TYPE in p: if: if '.' in p: u = p.split('.') return int(u[1]) return None",False,self.measurements[p][0][0],p.startswith('access_token '),0.6452220678329468 4902,"def get_authenticated_user_id(self, botengine): """""" Get the authenticated user ID :param botengine: :return: User ID if a user authenticated, None if it was a location code/card """""" for p in self.last_updated_params: if self.MEASUREMENT_NAME_CODE_TYPE in p: if self.measurements[p][0][0]: if: u = p.split('.') return int(u[1]) return None",False,'.' in p,p.startswith('access_token='),0.6622278690338135 4903,"@property def filter_radius(self): if: return float(self._entity_data.get('filter_radius')) return float(0)",True,'filter_radius' in self._entity_data,'filter_radius' in self._entity_data,0.6502079963684082 4904,"def append_dims(x, target_dims): """"""Appends dimensions to the end of a tensor until it has target_dims dimensions. From https://github.com/crowsonkb/k-diffusion/blob/master/k_diffusion/utils.py"""""" dims_to_append = target_dims - x.ndim if: raise ValueError(f'input has {x.ndim} dims but target_dims is {target_dims}, which is less') return x[(...,) + (None,) * dims_to_append]",True,dims_to_append < 0,dims_to_append < 0,0.6502718925476074 4905,"def get_advanced_field(id): if: return id.split('|') for model, field in ((ServiceProfile, 'total_services'), (SubscriberProfile, 'total_subscribers')): if model.get_by_id(id): return (field, id) return 'total_services'",False,'|' in id,"isinstance(id, str)",0.67225182056427 4906,"def get_advanced_field(id): if '|' in id: return id.split('|') for model, field in ((ServiceProfile, 'total_services'), (SubscriberProfile, 'total_subscribers')): if: return (field, id) return 'total_services'",False,model.get_by_id(id),model == id,0.6478804349899292 4907,"def __init__(self, text_or_uri, tag=None): if: text_or_uri = '{%s}%s' % (text_or_uri, tag) self.text = text_or_uri",True,tag,tag,0.6726609468460083 4908,"def set_bn_fix(m): classname = m.__class__.__name__ if: for p in m.parameters(): p.requires_grad = False",True,classname.find('BatchNorm') != -1,classname.find('BatchNorm') != -1,0.6449122428894043 4909,"def __init__(self, verbose=False, array_check=None, **kwargs): self.verbose = verbose self._transformers = None self._learners = None super(BaseEval, self).__init__(**kwargs) if: warnings.warn('array checking is deprecated. The array_check argument will be removed in 0.2.4.', DeprecationWarning)",False,array_check is not None,array_check is not None and array_check is not None,0.6499980688095093 4910,"def _refresh(self, http_request): """"""Refreshes the access_token. This method first checks by reading the Storage object if available. If a refresh is still needed, it holds the Storage lock until the refresh is completed. Args: http_request: callable, a callable that matches the method signature of httplib2.Http.request, used to make the refresh request. Raises: AccessTokenRefreshError: When the refresh fails. """""" if: self._do_refresh_request(http_request) else: self.store.acquire_lock() try: new_cred = self.store.locked_get() if new_cred and (not new_cred.invalid) and (new_cred.access_token!= self.access_token): logger.info('Updated access_token read from Storage') self._updateFromCredential(new_cred) else: self._do_refresh_request(http_request) finally: self.store.release_lock()",True,not self.store,not self.store,0.6504783630371094 4911,"def _refresh(self, http_request): """"""Refreshes the access_token. This method first checks by reading the Storage object if available. If a refresh is still needed, it holds the Storage lock until the refresh is completed. Args: http_request: callable, a callable that matches the method signature of httplib2.Http.request, used to make the refresh request. Raises: AccessTokenRefreshError: When the refresh fails. """""" if not self.store: self._do_refresh_request(http_request) else: self.store.acquire_lock() try: new_cred = self.store.locked_get() if: logger.info('Updated access_token read from Storage') self._updateFromCredential(new_cred) else: self._do_refresh_request(http_request) finally: self.store.release_lock()",False,new_cred and (not new_cred.invalid) and (new_cred.access_token != self.access_token),new_cred is not None,0.6472350358963013 4912,"def _get_new_header(self, name): header = 'def %s(self' % name if: header += ', host' definition_info = functionutils.DefinitionInfo.read(self.pyfunction) others = definition_info.arguments_to_string(1) if others: header += ','+ others return header + '):'",False,self._is_host_used(),self.host,0.6531659364700317 4913,"def _get_new_header(self, name): header = 'def %s(self' % name if self._is_host_used(): header += ', host' definition_info = functionutils.DefinitionInfo.read(self.pyfunction) others = definition_info.arguments_to_string(1) if: header += ','+ others return header + '):'",True,others,others,0.6819846034049988 4914,"def span_recall(true_span, pred_span): overlap = span_overlap(true_span, pred_span) if: return 0 return span_len(overlap) / span_len(true_span)",True,overlap is None,overlap is None,0.6519275903701782 4915,"def forward(self, x): residual = x x = self.conv_dw(x) x = self.bn1(x) x = self.act1(x) x = self.se(x) x = self.conv_pw(x) x = self.bn2(x) x = self.act2(x) if: if self.drop_connect_rate > 0.0: x = drop_connect(x, self.training, self.drop_connect_rate) x += residual return x",False,self.has_residual,self.training,0.6457797288894653 4916,"def forward(self, x): residual = x x = self.conv_dw(x) x = self.bn1(x) x = self.act1(x) x = self.se(x) x = self.conv_pw(x) x = self.bn2(x) x = self.act2(x) if self.has_residual: if: x = drop_connect(x, self.training, self.drop_connect_rate) x += residual return x",False,self.drop_connect_rate > 0.0,self.drop_connect is not None,0.6463563442230225 4917,"def handle(self, ctx: Context, next: Handler) -> None: if: return next(ctx) if self.methods and ctx.method not in self.methods: return next(ctx) raw_path = ctx.path ctx.path = ctx.path[len(self.prefix):] try: self.inner.handle(ctx, next) except: ctx.path = raw_path raise",False,not ctx.path.startswith(self.prefix),self.no_res,0.6417078971862793 4918,"def handle(self, ctx: Context, next: Handler) -> None: if not ctx.path.startswith(self.prefix): return next(ctx) if: return next(ctx) raw_path = ctx.path ctx.path = ctx.path[len(self.prefix):] try: self.inner.handle(ctx, next) except: ctx.path = raw_path raise",False,self.methods and ctx.method not in self.methods,self.inner is None,0.6436207294464111 4919,"def get_plot_cmds(plot): """"""Helper function for running tests interactively with plots flashing to screen"""""" if: print('Close plot window, then press q to continue') return [SCRIPTNAME] return [SCRIPTNAME, '--dumpimages']",False,plot,plot.get_selected_window() is None,0.6948679089546204 4920,"def backslashreplace_decode_fn(err): raw_bytes = (err.object[i] for i in range(err.start, err.end)) if: raw_bytes = (ord(b) for b in raw_bytes) return (u''.join((u'\\x%x' % c for c in raw_bytes)), err.end)",False,sys.version_info[0] == 2,err.end == 255,0.6478993892669678 4921,"def query_worker(self, task, function): ips = function(task.domain, task.dns_type) if: g.domain_cache.set_ips(task.domain, ips, task.dns_type) task.put(ips)",False,len(ips),not task.dns_cache.contains_ips(ips),0.6571944952011108 4922,"def _raise_timeout(self, err, url, timeout_value): """"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""""" if: raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value) if hasattr(err, 'errno') and err.errno in _blocking_errnos: raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value) if 'timed out' in str(err) or 'did not complete (read)' in str(err): raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value)",True,"isinstance(err, SocketTimeout)","isinstance(err, SocketTimeout)",0.6491681337356567 4923,"def _raise_timeout(self, err, url, timeout_value): """"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""""" if isinstance(err, SocketTimeout): raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value) if: raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value) if 'timed out' in str(err) or 'did not complete (read)' in str(err): raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value)",True,"hasattr(err, 'errno') and err.errno in _blocking_errnos","hasattr(err, 'errno') and err.errno in _blocking_errnos",0.6462153196334839 4924,"def _raise_timeout(self, err, url, timeout_value): """"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""""" if isinstance(err, SocketTimeout): raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value) if hasattr(err, 'errno') and err.errno in _blocking_errnos: raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value) if: raise ReadTimeoutError(self, url, 'Read timed out. (read timeout=%s)' % timeout_value)",True,'timed out' in str(err) or 'did not complete (read)' in str(err),'timed out' in str(err) or 'did not complete (read)' in str(err),0.647000789642334 4925,"def chat_should_report(chat_id: Union[str, int]) -> bool: try: chat_setting = SESSION.query(ReportingChatSettings).get(str(chat_id)) if: return chat_setting.should_report return False finally: SESSION.close()",True,chat_setting,chat_setting,0.6592684984207153 4926,"@frustums_enabled.setter def frustums_enabled(self, enabled): """"""Setting this to True shows the frustums of active cameras."""""" if: return self._frustums_enabled = enabled for i in self.active_cameras.keys(): if enabled: self.cameras[i].show_frustum(self.cols, self.rows, self.billboard_distance) else: self.cameras[i].hide_frustum()",False,enabled == self._frustums_enabled,not enabled,0.6523323059082031 4927,"@frustums_enabled.setter def frustums_enabled(self, enabled): """"""Setting this to True shows the frustums of active cameras."""""" if enabled == self._frustums_enabled: return self._frustums_enabled = enabled for i in self.active_cameras.keys(): if: self.cameras[i].show_frustum(self.cols, self.rows, self.billboard_distance) else: self.cameras[i].hide_frustum()",True,enabled,enabled,0.6644771099090576 4928,"def loss(self, batch_inputs_dict: Dict[List, torch.Tensor], batch_data_samples: List[Det3DDataSample], **kwargs) -> List[Det3DDataSample]: """""" Args: batch_inputs_dict (dict): The model input dict which include 'points' and `imgs` keys. - points (list[torch.Tensor]): Point cloud of each sample. - imgs (torch.Tensor): Tensor of batch images, has shape (B, C, H,W) batch_data_samples (List[:obj:`Det3DDataSample`]): The Data Samples. It usually includes information such as `gt_instance_3d`,. Returns: dict[str, Tensor]: A dictionary of loss components. """""" batch_input_metas = [item.metainfo for item in batch_data_samples] img_feats, pts_feats = self.extract_feat(batch_inputs_dict, batch_input_metas) losses = dict() if: losses_pts = self.pts_bbox_head.loss(pts_feats, batch_data_samples, **kwargs) losses.update(losses_pts) if img_feats: losses_img = self.loss_imgs(img_feats, batch_data_samples) losses.update(losses_img) return losses",True,pts_feats,pts_feats,0.6492003798484802 4929,"def loss(self, batch_inputs_dict: Dict[List, torch.Tensor], batch_data_samples: List[Det3DDataSample], **kwargs) -> List[Det3DDataSample]: """""" Args: batch_inputs_dict (dict): The model input dict which include 'points' and `imgs` keys. - points (list[torch.Tensor]): Point cloud of each sample. - imgs (torch.Tensor): Tensor of batch images, has shape (B, C, H,W) batch_data_samples (List[:obj:`Det3DDataSample`]): The Data Samples. It usually includes information such as `gt_instance_3d`,. Returns: dict[str, Tensor]: A dictionary of loss components. """""" batch_input_metas = [item.metainfo for item in batch_data_samples] img_feats, pts_feats = self.extract_feat(batch_inputs_dict, batch_input_metas) losses = dict() if pts_feats: losses_pts = self.pts_bbox_head.loss(pts_feats, batch_data_samples, **kwargs) losses.update(losses_pts) if: losses_img = self.loss_imgs(img_feats, batch_data_samples) losses.update(losses_img) return losses",True,img_feats,img_feats,0.6507899761199951 4930,"def inversion(img, p): if: return ImageOps.invert(img) else: return img",False,np.random.sample() <= p,p.image_type == 'rot',0.6462348699569702 4931,"def outer(func): @partial_safe_wraps(func) def inner(*args, **inner_kwargs): if: return return func(*args, **inner_kwargs) return inner",True,any((inner_kwargs.get(key) is EMPTY for key in kwargs)),any((inner_kwargs.get(key) is EMPTY for key in kwargs)),0.6473367214202881 4932,"def find_all_logs(path_walk): """""" find all.log files from target dir """""" for root, ds, files in os.walk(path_walk): for file_name in files: if: full_path = os.path.join(root, file_name) yield (file_name, full_path)",False,"re.match('.*.log', file_name)",ds.exists(file_name),0.6401976346969604 4933,"def valueText(self, property): internProp = propertyToWrappedProperty().get(property, 0) if: if len(internProp.displayText()) > 0: return internProp.displayText() else: return internProp.valueText() else: return ''",True,internProp,internProp,0.6571474075317383 4934,"def valueText(self, property): internProp = propertyToWrappedProperty().get(property, 0) if internProp: if: return internProp.displayText() else: return internProp.valueText() else: return ''",False,len(internProp.displayText()) > 0,propertyIsDisplayable(),0.6453183889389038 4935,"def on_rex(self, rex: Union[str, re.Pattern], only_to_me=False, normalize=True) -> Callable: if: rex = re.compile(rex) def deco(func) -> Callable: sf = ServiceFunc(self, func, only_to_me, normalize) trigger.rex.add(rex, sf) return func return deco",False,"isinstance(rex, str)","not isinstance(rex, str)",0.6470366716384888 4936,"def __call__(self, img): h, w = img.shape[:2] scale = self.resize_short / min(h, w) h_resize = round(h * scale) w_resize = round(w * scale) if: h_resize = math.ceil(h_resize / self.size_divisor) * self.size_divisor w_resize = math.ceil(w_resize / self.size_divisor) * self.size_divisor img = self.resize(img, (w_resize, h_resize), self.interp_dict[self.interp]) return img",False,self.size_divisor is not None,self.size_divisor > 0.0,0.6535716652870178 4937,"def toolbar_button_clicked(self): if: return self.find_book_duplicates() forward = True mods = QApplication.keyboardModifiers() if mods & Qt.ControlModifier or mods & Qt.ShiftModifier: forward = False self.show_next_result(forward)",False,not self.duplicate_finder.has_results(),not self.is_unique(),0.6460052728652954 4938,"def toolbar_button_clicked(self): if not self.duplicate_finder.has_results(): return self.find_book_duplicates() forward = True mods = QApplication.keyboardModifiers() if: forward = False self.show_next_result(forward)",False,mods & Qt.ControlModifier or mods & Qt.ShiftModifier,mods == Qt.ControlModifier,0.6494919061660767 4939,"def is_rate_limited(self, ctx: ContextLike) -> bool: """"""Returns a boolean determining if the ratelimiter is ratelimited or not."""""" now = time.monotonic() if: if data.reset_at <= now: return False return data.remaining <= 0 return False",False,data := self._bucket_data.get(self.get_key(ctx)),"data := self.data.get(ctx, RATE_RATE_AGNOSTIC_KEY)",0.6491033434867859 4940,"def is_rate_limited(self, ctx: ContextLike) -> bool: """"""Returns a boolean determining if the ratelimiter is ratelimited or not."""""" now = time.monotonic() if (data := self._bucket_data.get(self.get_key(ctx))): if: return False return data.remaining <= 0 return False",False,data.reset_at <= now,data is None or data.remaining is None or now - data.remaining > self.rate_limit,0.6523634195327759 4941,"@property def num_faces(self): """"""Returns the number of faces in the mesh."""""" if: return self.face.size(self.__cat_dim__('face', self.face)) return None",True,self.face is not None,self.face is not None,0.6487091779708862 4942,"def iterative_fit(self, maxiter=3): """""" Perform an iterative two-stage procedure to estimate a GLS model. The model is assumed to have AR(p) errors, AR(p) parameters and regression coefficients are estimated simultaneously. Parameters ---------- maxiter : integer, optional the number of iterations """""" for i in range(maxiter - 1): if hasattr(self, 'pinv_wexog'): del self.pinv_wexog self.initialize() results = self.fit() self.rho, _ = yule_walker(results.resid, order=self.order, df=None) if: del self.pinv_wexog self.initialize() results = self.fit() return results",True,"hasattr(self, 'pinv_wexog')","hasattr(self, 'pinv_wexog')",0.6446259021759033 4943,"def iterative_fit(self, maxiter=3): """""" Perform an iterative two-stage procedure to estimate a GLS model. The model is assumed to have AR(p) errors, AR(p) parameters and regression coefficients are estimated simultaneously. Parameters ---------- maxiter : integer, optional the number of iterations """""" for i in range(maxiter - 1): if: del self.pinv_wexog self.initialize() results = self.fit() self.rho, _ = yule_walker(results.resid, order=self.order, df=None) if hasattr(self, 'pinv_wexog'): del self.pinv_wexog self.initialize() results = self.fit() return results",True,"hasattr(self, 'pinv_wexog')","hasattr(self, 'pinv_wexog')",0.6445518732070923 4944,"def merge(self, extracted_config): if: raise ConfigExtractionError(f'Cannot merge configurations from different families: {extracted_config.family} {self.family}') self.sources.update(extracted_config.sources) for key, values in extracted_config.values.items(): existing = self.values.get(key) if existing: existing.update(values) else: self.values[key] = values",True,extracted_config.family != self.family,extracted_config.family != self.family,0.6486971378326416 4945,"def merge(self, extracted_config): if extracted_config.family!= self.family: raise ConfigExtractionError(f'Cannot merge configurations from different families: {extracted_config.family} {self.family}') self.sources.update(extracted_config.sources) for key, values in extracted_config.values.items(): existing = self.values.get(key) if: existing.update(values) else: self.values[key] = values",True,existing,existing,0.6731289625167847 4946,"@property def request_uri(self): """"""Absolute path including the query string."""""" uri = self.path or '/' if: uri += '?' + self.query return uri",True,self.query is not None,self.query is not None,0.6530168056488037 4947,"def __init__(self, class_num, alpha=None, gamma=2, size_average=True): super(FocalLoss, self).__init__() if: self.alpha = torch.ones(class_num, 1) else: self.alpha = alpha self.gamma = gamma self.class_num = class_num self.size_average = size_average",True,alpha is None,alpha is None,0.6624733209609985 4948,"def get_timing_config(self, speech_len=None, lpz_len=None): """"""Obtain parameters to determine time stamps."""""" timing_cfg = {'index_duration': self.config.index_duration} if: if self.samples_to_frames_ratio is None: ratio = self.estimate_samples_to_frames_ratio() self.samples_to_frames_ratio = ratio index_duration = self.samples_to_frames_ratio / self.fs else: assert self.time_stamps == 'auto' samples_to_frames_ratio = speech_len / lpz_len index_duration = samples_to_frames_ratio / self.fs timing_cfg['index_duration'] = index_duration return timing_cfg",False,self.time_stamps == 'fixed',speech_len is None or lpz_len is None,0.6499360799789429 4949,"def get_timing_config(self, speech_len=None, lpz_len=None): """"""Obtain parameters to determine time stamps."""""" timing_cfg = {'index_duration': self.config.index_duration} if self.time_stamps == 'fixed': if: ratio = self.estimate_samples_to_frames_ratio() self.samples_to_frames_ratio = ratio index_duration = self.samples_to_frames_ratio / self.fs else: assert self.time_stamps == 'auto' samples_to_frames_ratio = speech_len / lpz_len index_duration = samples_to_frames_ratio / self.fs timing_cfg['index_duration'] = index_duration return timing_cfg",False,self.samples_to_frames_ratio is None,speech_len is None or lpz_len is None,0.6473634839057922 4950,"def readCellLine(line): line_list = [] for seg in line.split(): if: sp = seg.split('*') seg_arr = np.ones((int(sp[0]),)) * float(sp[1]) else: seg_arr = np.array([float(seg)], float) line_list.append(seg_arr) return np.concatenate(line_list)",False,'*' in seg,type(seg) == str,0.6607280969619751 4951,"def _interpret_inherit(self, body, defs, inherit_template, ns): if: raise TemplateError('You cannot use inheritance without passing in get_template', position=None, name=self.name) templ = self.get_template(inherit_template, self) self_ = TemplateObject(self.name) for name, value in iteritems(defs): setattr(self_, name, value) self_.body = body ns = ns.copy() ns['self'] = self_ return templ.substitute(ns)",False,not self.get_template,inherit_template is None,0.6547220945358276 4952,"@classmethod def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5): """""" A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """""" if: if len(value) == 3: filename, data, content_type = value else: filename, data = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename, header_formatter=header_formatter) request_param.make_multipart(content_type=content_type) return request_param",True,"isinstance(value, tuple)","isinstance(value, tuple)",0.6432081460952759 4953,"@classmethod def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5): """""" A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """""" if isinstance(value, tuple): if: filename, data, content_type = value else: filename, data = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename, header_formatter=header_formatter) request_param.make_multipart(content_type=content_type) return request_param",True,len(value) == 3,len(value) == 3,0.6493709087371826 4954,"def expand_blocks_cfg(stage_blocks_cfg: Union[BlocksCfg, Sequence[BlocksCfg]]) -> List[BlocksCfg]: if: stage_blocks_cfg = (stage_blocks_cfg,) block_cfgs = [] for i, cfg in enumerate(stage_blocks_cfg): block_cfgs += [replace(cfg, d=1) for _ in range(cfg.d)] return block_cfgs",True,"not isinstance(stage_blocks_cfg, Sequence)","not isinstance(stage_blocks_cfg, Sequence)",0.6497632265090942 4955,"def __init__(self, config: MPTConfig, mm_vision_tower=None, mm_hidden_size=None): super(LlavaMPTModel, self).__init__(config) if: self.vision_tower = [CLIPVisionModel.from_pretrained(config.mm_vision_tower)] if hasattr(config, 'use_mm_proj'): self.mm_projector = nn.Linear(config.mm_hidden_size, config.d_model)",False,"hasattr(config, 'mm_vision_tower')",mm_vision_tower is not None,0.6478519439697266 4956,"def __init__(self, config: MPTConfig, mm_vision_tower=None, mm_hidden_size=None): super(LlavaMPTModel, self).__init__(config) if hasattr(config,'mm_vision_tower'): self.vision_tower = [CLIPVisionModel.from_pretrained(config.mm_vision_tower)] if: self.mm_projector = nn.Linear(config.mm_hidden_size, config.d_model)",False,"hasattr(config, 'use_mm_proj')","hasattr(config, 'mm_hidden_size')",0.6451375484466553 4957,"@classmethod def collect(cls: Type[AwsResource], json: List[Json], builder: GraphBuilder) -> None: for js in json: if: builder.add_node(instance, js)",False,"instance := cls.from_api(js, builder)","instance := AwsResource.from_api(js, builder)",0.6472501754760742 4958,"def optimize(self, opt: th.optim.Optimizer): if: return self._optimize_fp16(opt) else: return self._optimize_normal(opt)",True,self.use_fp16,self.use_fp16,0.6529273986816406 4959,"def command(self, path, marker=None, begin=None, end=None): self.path = path start = (marker or MARKER_DEFAULT).format(mark=begin or MARKER_BEGIN_DEFAULT) end = (marker or MARKER_DEFAULT).format(mark=end or MARKER_END_DEFAULT) if: raise ValueError(f""delimiters for block must be different but found only '{start}'"") backstop = make_formatted_string_command('(find {0} -type f > /dev/null && echo {1} || echo {2} )', QuoteString(path), QuoteString(f'{EXISTS}{path}'), QuoteString(f'{MISSING}{path}')) cmd = make_formatted_string_command(f""awk \\'/{end}/{{{{f=0}}}} f; /{start}/{{{{f=1}}}}\\' {{0}} || {backstop}"", QuoteString(path)) return cmd",False,start == end,start != end,0.6732081174850464 4960,"def __init__(self, valueOf_='', mixedclass_=None, content_=None): if: self.mixedclass_ = MixedContainer else: self.mixedclass_ = mixedclass_ if content_ is None: self.content_ = [] else: self.content_ = content_",True,mixedclass_ is None,mixedclass_ is None,0.6566738486289978 4961,"def __init__(self, valueOf_='', mixedclass_=None, content_=None): if mixedclass_ is None: self.mixedclass_ = MixedContainer else: self.mixedclass_ = mixedclass_ if: self.content_ = [] else: self.content_ = content_",True,content_ is None,content_ is None,0.657039999961853 4962,"@skipUnlessDBFeature('supports_tablespaces') def test_tablespace_for_model(self): sql = sql_for_table(Scientist).lower() if: self.assertNumContains(sql, 'tbl_tbsp', 1) self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 1) else: self.assertNumContains(sql, 'tbl_tbsp', 2)",True,settings.DEFAULT_INDEX_TABLESPACE,settings.DEFAULT_INDEX_TABLESPACE,0.6489966511726379 4963,"def unquote_header_value(value, is_filename=False): """"""Unquotes a header value. (Reversal of :func:`quote_header_value`). This does not use the real unquoting but what browsers are actually using for quoting. :param value: the header value to unquote. """""" if: value = value[1:-1] if not is_filename or value[:2]!= '\\\\': return value.replace('\\\\', '\\').replace('\\""', '""') return value",True,"value and value[0] == value[-1] == '""'","value and value[0] == value[-1] == '""'",0.6461827754974365 4964,"def unquote_header_value(value, is_filename=False): """"""Unquotes a header value. (Reversal of :func:`quote_header_value`). This does not use the real unquoting but what browsers are actually using for quoting. :param value: the header value to unquote. """""" if value and value[0] == value[-1] == '""': value = value[1:-1] if: return value.replace('\\\\', '\\').replace('\\""', '""') return value",True,not is_filename or value[:2] != '\\\\',not is_filename or value[:2] != '\\\\',0.6455633640289307 4965,"def _to_unicode(obj): if: obj = unicode(obj, encoding='ascii', errors='strict') return obj",False,"isinstance(obj, str) and sys.version_info < (3,)",sys.version_info[0] < 3,0.6442880630493164 4966,"def detect_version(i): r = i['automation'].parse_version({'match_text': 'Python\\s*([\\d.]+)', 'group_number': 1, 'env_key': 'CM_PYTHON_VERSION', 'which_env': i['env']}) if: return r version = r['version'] print(i['recursion_spaces'] +' Detected version: {}'.format(version)) return {'return': 0,'version': version}",True,r['return'] > 0,r['return'] > 0,0.6558247804641724 4967,"def __init__(self, minimizing=False): self.minimizing = minimizing if: self.time = 10000000000.0 else: self.time = 0 self.ncalls = 0",False,self.minimizing,minimizing,0.6558648347854614 4968,"def did_change_hemoglobin(self, botengine): """""" Did the hemoglobin change? (Protein in red blood cell that carries oxygen - low or high oxygenated red blood cell count - tells if one is anemic or not) :param botengine: :return: True if hemoglobin changed """""" if: return HealthDevice.MEASUREMENT_NAME_HEMOGLOBIN in self.last_updated_params",True,HealthDevice.MEASUREMENT_NAME_HEMOGLOBIN in self.measurements,HealthDevice.MEASUREMENT_NAME_HEMOGLOBIN in self.measurements,0.6491186022758484 4969,"@metadata() def sound(self): """"""Return ""Yes"" if sound channels are present, otherwise ""No""."""""" if: raise SkipElementException() if self._tracks[0].count_of_audio_streams is not None and int(self._tracks[0].count_of_audio_streams) > 0: return 'Yes' if self._tracks[0].audio_count is not None and int(self._tracks[0].audio_count) > 0: return 'Yes' return 'No'",False,self.stream_type() not in ['video'],self.stream_type() not in ['audio'],0.6466168165206909 4970,"@metadata() def sound(self): """"""Return ""Yes"" if sound channels are present, otherwise ""No""."""""" if self.stream_type() not in ['video']: raise SkipElementException() if: return 'Yes' if self._tracks[0].audio_count is not None and int(self._tracks[0].audio_count) > 0: return 'Yes' return 'No'",False,self._tracks[0].count_of_audio_streams is not None and int(self._tracks[0].count_of_audio_streams) > 0,len(self._tracks) == 0,0.6482785940170288 4971,"@metadata() def sound(self): """"""Return ""Yes"" if sound channels are present, otherwise ""No""."""""" if self.stream_type() not in ['video']: raise SkipElementException() if self._tracks[0].count_of_audio_streams is not None and int(self._tracks[0].count_of_audio_streams) > 0: return 'Yes' if: return 'Yes' return 'No'",False,self._tracks[0].audio_count is not None and int(self._tracks[0].audio_count) > 0,self._tracks[0].count_of_audio_streams is not None and len(self._tracks[0].count_of_audio_streams) > 0,0.6460508704185486 4972,"def synchronize(): """""" Helper function to synchronize (barrier) among all processes when using distributed training """""" if: return if not dist.is_initialized(): return world_size = dist.get_world_size() if world_size == 1: return dist.barrier()",True,not dist.is_available(),not dist.is_available(),0.6511260867118835 4973,"def synchronize(): """""" Helper function to synchronize (barrier) among all processes when using distributed training """""" if not dist.is_available(): return if: return world_size = dist.get_world_size() if world_size == 1: return dist.barrier()",True,not dist.is_initialized(),not dist.is_initialized(),0.6499569416046143 4974,"def synchronize(): """""" Helper function to synchronize (barrier) among all processes when using distributed training """""" if not dist.is_available(): return if not dist.is_initialized(): return world_size = dist.get_world_size() if: return dist.barrier()",True,world_size == 1,world_size == 1,0.6522853970527649 4975,"def _remove_extra_items(self): if: del self.undo_list[0:len(self.undo_list) - self.max_undos]",True,len(self.undo_list) > self.max_undos,len(self.undo_list) > self.max_undos,0.6466555595397949 4976,"@api_versions.wraps('2.19', '2.89') def update(self, name=None, description=None): """""" Update attributes of this server. :param name: Update the server's name. :param description: Update the server's description. :returns: :class:`Server` """""" update_kwargs = {'name': name} if: update_kwargs['description'] = description return self.manager.update(self, **update_kwargs)",False,description is not None,description,0.6540963649749756 4977,"def on_reviewer_did_answer(reviewer, card, ease): reviewer.web.eval(""if (document.getElementById('siac-link-modal')) { $('#siac-link-modal').remove(); }"") if: return review_interruptor()",False,"not get_config_value_or_default('mix_reviews_and_reading', False) or state.rr_mix_disabled",not ease or card.status != 'draft',0.6453194618225098 4978,"@classmethod def parse_obj(cls: Type[BaseModel], obj: Any) -> Response: obj['data'] = obj.get('data') or obj.get('data_') if: del obj['data_'] return super().parse_obj(obj)",True,'data_' in obj,'data_' in obj,0.6656749248504639 4979,"def create_date_string(date: datetime.datetime) -> Optional[str]: """""" Convert datetime to betfair date string. """""" if: return date.strftime(BETFAIR_DATE_FORMAT)",True,date,date,0.674219012260437 4980,"def init_weights(self, pretrained=None): """"""Initialize the weights of model. Args: pretrained (str, optional): Path to pre-trained weights. Default: None. """""" if: print_log('load model from: {}'.format(pretrained), logger='root') self.backbone.init_weights(pretrained=pretrained) self.neck.init_weights(init_linear='kaiming')",True,pretrained is not None,pretrained is not None,0.6540039777755737 4981,"def applyRule(self, term): """"""applies self's rule to term. If the start of term lines within the span of self, then term may be modified by self"""""" if: return if self.__scope[0] <= term.getStart() <= self.__scope[1]: term.updateModifiedBy(self)",False,not self.getRule() or self.getRule() == 'terminate',not term,0.6528807282447815 4982,"def applyRule(self, term): """"""applies self's rule to term. If the start of term lines within the span of self, then term may be modified by self"""""" if not self.getRule() or self.getRule() == 'terminate': return if: term.updateModifiedBy(self)",False,self.__scope[0] <= term.getStart() <= self.__scope[1],self.start <= term <= self.end,0.6483572721481323 4983,"def get_ldap_username(self): extended_request = ldapasn1_impacket.ExtendedRequest() extended_request['requestName'] = '1.3.6.1.4.1.4203.1.11.3' response = self.ldapConnection.sendReceive(extended_request) for message in response: search_result = message['protocolOp'].getComponent() if: response_value = search_result['responseValue'] if response_value.hasValue(): value = response_value.asOctets().decode(response_value.encoding)[2:] return value.split('\\')[1] return ''",False,search_result['resultCode'] == ldapasn1_impacket.ResultCode('success'),search_result is not None,0.6445242166519165 4984,"def get_ldap_username(self): extended_request = ldapasn1_impacket.ExtendedRequest() extended_request['requestName'] = '1.3.6.1.4.1.4203.1.11.3' response = self.ldapConnection.sendReceive(extended_request) for message in response: search_result = message['protocolOp'].getComponent() if search_result['resultCode'] == ldapasn1_impacket.ResultCode('success'): response_value = search_result['responseValue'] if: value = response_value.asOctets().decode(response_value.encoding)[2:] return value.split('\\')[1] return ''",False,response_value.hasValue(),response_value and response_value.encoding,0.6509354114532471 4985,"def step(self, action, step): from rl_studio.envs.gazebo.f1.models.step import StepFollowLane if: return StepFollowLane.step_followlane_state_image_actions_discretes(self, action, step) else: return StepFollowLane.step_followlane_state_sp_actions_discretes(self, action, step)",False,self.state_space == 'image',self.action_space == 'image',0.6462507247924805 4986,"@property def content(self): """"""Content of the response, in bytes."""""" if: if self._content_consumed: raise RuntimeError('The content for this response was already consumed') if self.status_code == 0 or self.raw is None: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() self._content_consumed = True return self._content",True,self._content is False,self._content is False,0.6552231311798096 4987,"@property def content(self): """"""Content of the response, in bytes."""""" if self._content is False: if: raise RuntimeError('The content for this response was already consumed') if self.status_code == 0 or self.raw is None: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() self._content_consumed = True return self._content",True,self._content_consumed,self._content_consumed,0.6564973592758179 4988,"@property def content(self): """"""Content of the response, in bytes."""""" if self._content is False: if self._content_consumed: raise RuntimeError('The content for this response was already consumed') if: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() self._content_consumed = True return self._content",False,self.status_code == 0 or self.raw is None,self.status_code == 0,0.6442666053771973 4989,"def post_build(self, p, pay): if: l = len(p) - 4 p = p[:2] + struct.pack('!H', l) + p[4:] return p + pay",True,self.len is None,self.len is None,0.6532700657844543 4990,"def sup_loss(self, output, pooled_rep, images, targets): if: outputs = torch.cat([output.clone().unsqueeze(1), output.clone().unsqueeze(1)], dim=1) else: outputs = torch.cat([pooled_rep.clone().unsqueeze(1), pooled_rep.clone().unsqueeze(1)], dim=1) sup_loss = self.sup_con(outputs, targets, args=self.args) return sup_loss",False,self.args.sup_head,images,0.6452671885490417 4991,"def bn_with_conv_layer(self, input_index, output_shape, kernel_size, stride, activation, index=None): if: index = input_index + 1 bn_index = self.bn_with_scale_layer(input_index, activation) conv_index = self.conv_layer(bn_index, output_shape, kernel_size, stride, None) return conv_index",True,index == None,index == None,0.6579506397247314 4992,"def _prepare_proxy(self, conn): """""" Establish tunnel connection early, because otherwise httplib would improperly set Host: header to proxy's IP:port. """""" try: set_tunnel = conn.set_tunnel except AttributeError: set_tunnel = conn._set_tunnel if: set_tunnel(self.host, self.port) else: set_tunnel(self.host, self.port, self.proxy_headers) conn.connect()",False,"sys.version_info <= (2, 6, 4) and (not self.proxy_headers)",self.proxy_headers is None,0.6429418325424194 4993,"def __eq__(self, other): """"""Returns true if both objects are equal"""""" if: return False return self.__dict__ == other.__dict__",False,"not isinstance(other, V1alpha1WorkflowTemplateCreateRequest)","not isinstance(other, V1alpha1WorkflowSpec)",0.6479641199111938 4994,"def clean2(html, strip=False, remove_non_ascii=False): cleaned = unescape(html) if: cleaned = cleanUnicode(cleaned) if remove_non_ascii: return re.sub('[^\\x00-\\x7F]+','', cleaned) else: return cleaned",False,strip == True,strip,0.6570212841033936 4995,"def clean2(html, strip=False, remove_non_ascii=False): cleaned = unescape(html) if strip == True: cleaned = cleanUnicode(cleaned) if: return re.sub('[^\\x00-\\x7F]+','', cleaned) else: return cleaned",True,remove_non_ascii,remove_non_ascii,0.6498267650604248 4996,"def empty_trash(): """"""Empty trash folder. """""" text = '[tl] Empty the trash' if: print('linux: %s' % text) os.system('rm -rf ~/.local/share/Trash/*') elif _platform == 'darwin': print('OS X: %s' % text) os.system('sudo rm -rf ~/.Trash/*') elif _platform == 'win32': print('Windows: %s' % text) try: os.system('rd /s c:\\$Recycle.Bin') except: pass try: os.system('rd /s c:\recycler') except: pass else: print(_platform)",False,_platform == 'linux' or _platform == 'linux2',_platform == 'linux',0.6463195085525513 4997,"def empty_trash(): """"""Empty trash folder. """""" text = '[tl] Empty the trash' if _platform == 'linux' or _platform == 'linux2': print('linux: %s' % text) os.system('rm -rf ~/.local/share/Trash/*') elif: print('OS X: %s' % text) os.system('sudo rm -rf ~/.Trash/*') elif _platform == 'win32': print('Windows: %s' % text) try: os.system('rd /s c:\\$Recycle.Bin') except: pass try: os.system('rd /s c:\recycler') except: pass else: print(_platform)",True,_platform == 'darwin',_platform == 'darwin',0.6493530869483948 4998,"def empty_trash(): """"""Empty trash folder. """""" text = '[tl] Empty the trash' if _platform == 'linux' or _platform == 'linux2': print('linux: %s' % text) os.system('rm -rf ~/.local/share/Trash/*') elif _platform == 'darwin': print('OS X: %s' % text) os.system('sudo rm -rf ~/.Trash/*') elif: print('Windows: %s' % text) try: os.system('rd /s c:\\$Recycle.Bin') except: pass try: os.system('rd /s c:\recycler') except: pass else: print(_platform)",True,_platform == 'win32',_platform == 'win32',0.6504746079444885 4999,"def CalculateNewSplitSize(self): """""" Calculates the size of the new split. """""" tab_ctrl_count = 0 all_panes = self._mgr.GetAllPanes() for pane in all_panes: if pane.name == 'dummy': continue tab_ctrl_count += 1 if: new_split_size = self.GetClientSize() new_split_size.x /= 2 new_split_size.y /= 2 else: new_split_size = wx.Size(180, 180) return new_split_size",False,tab_ctrl_count < 2,tab_ctrl_count == 3,0.6495382785797119 5000,"def CalculateNewSplitSize(self): """""" Calculates the size of the new split. """""" tab_ctrl_count = 0 all_panes = self._mgr.GetAllPanes() for pane in all_panes: if: continue tab_ctrl_count += 1 if tab_ctrl_count < 2: new_split_size = self.GetClientSize() new_split_size.x /= 2 new_split_size.y /= 2 else: new_split_size = wx.Size(180, 180) return new_split_size",False,pane.name == 'dummy',pane.Name.startswith('panel'),0.655375599861145 5001,"def load_url_dist(url, model_dir=None): """"""In distributed setting, this function only download checkpoint at local rank 0."""""" rank, world_size = get_dist_info() rank = int(os.environ.get('LOCAL_RANK', rank)) if: checkpoint = model_zoo.load_url(url, model_dir=model_dir) if world_size > 1: torch.distributed.barrier() if rank > 0: checkpoint = model_zoo.load_url(url, model_dir=model_dir) return checkpoint",False,rank == 0,rank > 0,0.6677994728088379 5002,"def load_url_dist(url, model_dir=None): """"""In distributed setting, this function only download checkpoint at local rank 0."""""" rank, world_size = get_dist_info() rank = int(os.environ.get('LOCAL_RANK', rank)) if rank == 0: checkpoint = model_zoo.load_url(url, model_dir=model_dir) if: torch.distributed.barrier() if rank > 0: checkpoint = model_zoo.load_url(url, model_dir=model_dir) return checkpoint",False,world_size > 1,world_size == 1,0.6483353972434998 5003,"def load_url_dist(url, model_dir=None): """"""In distributed setting, this function only download checkpoint at local rank 0."""""" rank, world_size = get_dist_info() rank = int(os.environ.get('LOCAL_RANK', rank)) if rank == 0: checkpoint = model_zoo.load_url(url, model_dir=model_dir) if world_size > 1: torch.distributed.barrier() if: checkpoint = model_zoo.load_url(url, model_dir=model_dir) return checkpoint",False,rank > 0,world_size > 1,0.6653300523757935 5004,"def __getattr__(self, name): if: return _NormalAttr() if name in self.attrs: return _OtherAttr(name, True) raise AttributeError(name)",True,name == 'normal',name == 'normal',0.6597249507904053 5005,"def __getattr__(self, name): if name == 'normal': return _NormalAttr() if: return _OtherAttr(name, True) raise AttributeError(name)",False,name in self.attrs,name == 'other',0.653833270072937 5006,"def get_type_hints(obj, globalns=None, localns=None, include_extras=False): """"""Return type hints for an object. This is often the same as obj.__annotations__, but it handles forward references encoded as string literals, adds Optional[t] if a default value equal to None is set and recursively replaces all 'Annotated[T,...]' with 'T' (unless 'include_extras=True'). The argument may be a module, class, method, or function. The annotations are returned as a dictionary. For classes, annotations include also inherited members. TypeError is raised if the argument is not of a type that can contain annotations, and an empty dictionary is returned if no annotations are present. BEWARE -- the behavior of globalns and localns is counterintuitive (unless you are familiar with how eval() and exec() work). The search order is locals first, then globals. - If no dict arguments are passed, an attempt is made to use the globals from obj (or the respective module's globals for classes), and these are also used as the locals. If the object does not appear to have globals, an empty dictionary is used. - If one dict argument is passed, it is used for both globals and locals. - If two dict arguments are passed, they specify globals and locals, respectively. """""" hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) if: return hint return {k: _strip_annotations(t) for k, t in hint.items()}",True,include_extras,include_extras,0.656079888343811 5007,"def __call__(self, name, typeList, typeDict=None): if: typeList = copy.deepcopy(typeList) typeList[0] = 'pointer' return self.f(name, typeList, typeDict)",False,len(typeList) and typeList[0] == 'pointer64',typeList is not None,0.6488806009292603 5008,"def __eq__(self, other): if: return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if my_val!= other_val: return False return True",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.64579176902771 5009,"def __eq__(self, other): if not isinstance(other, self.__class__): return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if: return False return True",True,my_val != other_val,my_val != other_val,0.6494433879852295 5010,"def convert_bed_to_vcf(self): last_contig = None last_start = None batch = [] for line in self.bed: if: continue record = CNVRecord(line) if last_contig == record.contig and last_start == record.start: batch.append(record) else: self.collapse_and_write_batch(batch=batch) last_contig = record.contig last_start = record.start batch = [record] self.collapse_and_write_batch(batch=batch)",False,line.startswith('#'),not line,0.6407938003540039 5011,"def convert_bed_to_vcf(self): last_contig = None last_start = None batch = [] for line in self.bed: if line.startswith('#'): continue record = CNVRecord(line) if: batch.append(record) else: self.collapse_and_write_batch(batch=batch) last_contig = record.contig last_start = record.start batch = [record] self.collapse_and_write_batch(batch=batch)",False,last_contig == record.contig and last_start == record.start,last_contig is None or last_start is None,0.6467423439025879 5012,"@expression.setter def expression(self, value): fallback_val = self._expression self._expression = value if: self._expression = fallback_val if self._app: self._app.logger.error('Failed to update property Expression.')",False,not self._update_var(),fallback_val is not None,0.651776909828186 5013,"@expression.setter def expression(self, value): fallback_val = self._expression self._expression = value if not self._update_var(): self._expression = fallback_val if: self._app.logger.error('Failed to update property Expression.')",False,self._app,fallback_val is None,0.6641743183135986 5014,"def unmap(data, count, inds, fill=0): """"""Unmap a subset of item (data) back to the original set of items (of size count)"""""" if: ret = data.new_full((count,), fill) ret[inds.type(torch.bool)] = data else: new_size = (count,) + data.size()[1:] ret = data.new_full(new_size, fill) ret[inds.type(torch.bool), :] = data return ret",True,data.dim() == 1,data.dim() == 1,0.6512020826339722 5015,"def run(self): if: self.multi_version = True self.uninstall_link() else: self.install_for_development() self.warn_deprecated_options()",True,self.uninstall,self.uninstall,0.6695848703384399 5016,"def add_part_to_set(name, x, y, ID=None): model.GetSetList().CreatePartSet(name) model.GetSetList().GetSet(name).SetMatcherType('Selection') model.GetSetList().GetSet(name).ClearParts() sel = model.GetSetList().GetSet(name).GetSelection() if: sel.SelectPartByPosition(x, y, 0) else: sel.SelectPart(ID) model.GetSetList().GetSet(name).AddSelected(sel)",True,ID is None,ID is None,0.6533036231994629 5017,"def update_keys_to_ignore(self, config, del_keys_to_ignore): """"""Remove some keys from ignore list"""""" if: self._keys_to_ignore_on_save = [k for k in self._keys_to_ignore_on_save if k not in del_keys_to_ignore] self._keys_to_ignore_on_load_missing = [k for k in self._keys_to_ignore_on_load_missing if k not in del_keys_to_ignore]",False,not config.tie_word_embeddings,"config.get('ignore_on_save', False)",0.6424753665924072 5018,"def mousePressEvent(self, event: QMouseEvent) -> None: if: self.begin_drag_mode(DragMode.VIDEO_POSITION, event) elif event.button() == Qt.MouseButton.MiddleButton: self.begin_drag_mode(DragMode.VIDEO_POSITION, event) self.end_drag_mode()",True,event.button() == Qt.MouseButton.LeftButton,event.button() == Qt.MouseButton.LeftButton,0.6507177352905273 5019,"def mousePressEvent(self, event: QMouseEvent) -> None: if event.button() == Qt.MouseButton.LeftButton: self.begin_drag_mode(DragMode.VIDEO_POSITION, event) elif: self.begin_drag_mode(DragMode.VIDEO_POSITION, event) self.end_drag_mode()",True,event.button() == Qt.MouseButton.MiddleButton,event.button() == Qt.MouseButton.MiddleButton,0.6523803472518921 5020,"def create_action(self, name, callback, shortcuts=None): action = Gio.SimpleAction.new(name=name, parameter_type=None) action.connect('activate', callback) self.add_action(action=action) if: self.set_accels_for_action(detailed_action_name=f'app.{name}', accels=shortcuts)",True,shortcuts,shortcuts,0.6661825776100159 5021,"def tearDown(self): g.log.info('Starting to Unmount Volume and Cleanup Volume') ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts) if: raise ExecutionError('Failed to umount the vol & cleanup Volume') g.log.info('Successful in umounting the volume and Cleanup') self.get_super_method(self, 'tearDown')()",True,not ret,not ret,0.660548746585846 5022,"def decorator(func): def inner(request, *args, **kwargs): if: return HttpResponseNotAllowed(request_method_list) return func(request, *args, **kwargs) return wraps(func)(inner)",True,request.method not in request_method_list,request.method not in request_method_list,0.6502691507339478 5023,"def get_country_name(self, ip_address, debug_message=None): """""" Find country name for a given IP address. The ip address might not provide a country name, so return UNKNOWN_COUNTRY in those cases. """""" try: name = self.geoip.country_name_by_addr(ip_address) except Exception: if debug_message: log.exception(""Encountered exception getting country name for ip_address '%s': %s."", ip_address, debug_message) name = UNKNOWN_COUNTRY if: if debug_message: log.error(""No country name found for ip_address '%s': %s."", ip_address, debug_message) name = UNKNOWN_COUNTRY return name",False,name is None or len(name.strip()) <= 0,name is None,0.6438824534416199 5024,"def get_country_name(self, ip_address, debug_message=None): """""" Find country name for a given IP address. The ip address might not provide a country name, so return UNKNOWN_COUNTRY in those cases. """""" try: name = self.geoip.country_name_by_addr(ip_address) except Exception: if debug_message: log.exception(""Encountered exception getting country name for ip_address '%s': %s."", ip_address, debug_message) name = UNKNOWN_COUNTRY if name is None or len(name.strip()) <= 0: if: log.error(""No country name found for ip_address '%s': %s."", ip_address, debug_message) name = UNKNOWN_COUNTRY return name",True,debug_message,debug_message,0.6556711196899414 5025,"def get_country_name(self, ip_address, debug_message=None): """""" Find country name for a given IP address. The ip address might not provide a country name, so return UNKNOWN_COUNTRY in those cases. """""" try: name = self.geoip.country_name_by_addr(ip_address) except Exception: if: log.exception(""Encountered exception getting country name for ip_address '%s': %s."", ip_address, debug_message) name = UNKNOWN_COUNTRY if name is None or len(name.strip()) <= 0: if debug_message: log.error(""No country name found for ip_address '%s': %s."", ip_address, debug_message) name = UNKNOWN_COUNTRY return name",True,debug_message,debug_message,0.6564831733703613 5026,"def unsecured_rejoinreq(panid): if: row_data = {'pkt_time': '{:.6f}'.format(config.row['pkt_time']), 'description': 'Unsecured rejoin request'} config.db.insert(Table.EVENTS.value, row_data)",False,config.row['error_msg'] is None and config.row['mac_frametype'] == '0b001: MAC Data' and (config.row['mac_dstpanid'] == panid) and (config.row['nwk_frametype'] == '0b01: NWK Command') and (config.row['nwk_security'] == '0b0: NWK Security Disabled') and (config.row['nwk_cmd_id'] == '0x06: NWK Rejoin Request'),config.row['status'] == Table.STATUS_REJOIN_REQ and config.row['status'] == Table.STATUS_UNsecured,0.6665171384811401 5027,"def load_vocab(vocab_file): """"""Loads a vocabulary file into a dictionary."""""" vocab = collections.OrderedDict() with open(vocab_file, encoding='utf8') as fin: for num, line in enumerate(fin): items = convert_to_unicode(line.strip()).split('\t') if: break token = items[0] index = items[1] if len(items) == 2 else num token = token.strip() vocab[token] = int(index) return vocab",True,len(items) > 2,len(items) > 2,0.6483539342880249 5028,"def is_fully_bayesian(model: Model) -> bool: """"""Check if at least one model is a SaasFullyBayesianSingleTaskGP Args: model: A BoTorch model (may be a `ModelList` or `ModelListGP`) d: The dimension of the tensor to index. Returns: True if at least one model is a `SaasFullyBayesianSingleTaskGP` """""" from botorch.models import ModelList from botorch.models.fully_bayesian import SaasFullyBayesianSingleTaskGP from botorch.models.fully_bayesian_multitask import SaasFullyBayesianMultiTaskGP full_bayesian_model_cls = (SaasFullyBayesianSingleTaskGP, SaasFullyBayesianMultiTaskGP) if: return True elif isinstance(model, ModelList): for m in model.models: if is_fully_bayesian(m): return True return False",False,"isinstance(model, full_bayesian_model_cls) or getattr(model, 'is_fully_bayesian', False)","isinstance(model, full_bayesian_model_cls)",0.6462864875793457 5029,"def is_fully_bayesian(model: Model) -> bool: """"""Check if at least one model is a SaasFullyBayesianSingleTaskGP Args: model: A BoTorch model (may be a `ModelList` or `ModelListGP`) d: The dimension of the tensor to index. Returns: True if at least one model is a `SaasFullyBayesianSingleTaskGP` """""" from botorch.models import ModelList from botorch.models.fully_bayesian import SaasFullyBayesianSingleTaskGP from botorch.models.fully_bayesian_multitask import SaasFullyBayesianMultiTaskGP full_bayesian_model_cls = (SaasFullyBayesianSingleTaskGP, SaasFullyBayesianMultiTaskGP) if isinstance(model, full_bayesian_model_cls) or getattr(model, 'is_fully_bayesian', False): return True elif: for m in model.models: if is_fully_bayesian(m): return True return False",True,"isinstance(model, ModelList)","isinstance(model, ModelList)",0.6494989991188049 5030,"def is_fully_bayesian(model: Model) -> bool: """"""Check if at least one model is a SaasFullyBayesianSingleTaskGP Args: model: A BoTorch model (may be a `ModelList` or `ModelListGP`) d: The dimension of the tensor to index. Returns: True if at least one model is a `SaasFullyBayesianSingleTaskGP` """""" from botorch.models import ModelList from botorch.models.fully_bayesian import SaasFullyBayesianSingleTaskGP from botorch.models.fully_bayesian_multitask import SaasFullyBayesianMultiTaskGP full_bayesian_model_cls = (SaasFullyBayesianSingleTaskGP, SaasFullyBayesianMultiTaskGP) if isinstance(model, full_bayesian_model_cls) or getattr(model, 'is_fully_bayesian', False): return True elif isinstance(model, ModelList): for m in model.models: if: return True return False",False,is_fully_bayesian(m),"isinstance(m, SasasMultiTaskGP)",0.6461147665977478 5031,"def check_initialized(inst): """"""Check if a ParallelProcessing instance is initialized properly."""""" if: msg = ""ParallelProcessing is not initialized. Call 'initialize' before calling 'fit'."" raise ParallelProcessingError(msg) if getattr(inst, '__fitted__', None): if inst.layers.raise_on_exception: raise ParallelProcessingError(""This instance is already fitted and its parallel processing jobs has not been terminated. To refit instance, call 'terminate' before calling 'fit'."") else: warnings.warn(""This instance is already fitted and its parallel processing job has not been terminated. Will refit using previous job's cache."", ParallelProcessingWarning)",False,not inst.__initialized__,not inst.is_initialized,0.6533555388450623 5032,"def check_initialized(inst): """"""Check if a ParallelProcessing instance is initialized properly."""""" if not inst.__initialized__: msg = ""ParallelProcessing is not initialized. Call 'initialize' before calling 'fit'."" raise ParallelProcessingError(msg) if: if inst.layers.raise_on_exception: raise ParallelProcessingError(""This instance is already fitted and its parallel processing jobs has not been terminated. To refit instance, call 'terminate' before calling 'fit'."") else: warnings.warn(""This instance is already fitted and its parallel processing job has not been terminated. Will refit using previous job's cache."", ParallelProcessingWarning)",False,"getattr(inst, '__fitted__', None)",not inst.is_fitted() or inst.is_parallel_job_terminated(),0.6460248231887817 5033,"def check_initialized(inst): """"""Check if a ParallelProcessing instance is initialized properly."""""" if not inst.__initialized__: msg = ""ParallelProcessing is not initialized. Call 'initialize' before calling 'fit'."" raise ParallelProcessingError(msg) if getattr(inst, '__fitted__', None): if: raise ParallelProcessingError(""This instance is already fitted and its parallel processing jobs has not been terminated. To refit instance, call 'terminate' before calling 'fit'."") else: warnings.warn(""This instance is already fitted and its parallel processing job has not been terminated. Will refit using previous job's cache."", ParallelProcessingWarning)",False,inst.layers.raise_on_exception,not inst.is_parallel_job_terminated,0.6454683542251587 5034,"@record def test_get_account_information_with_blob_sas(self): if: return blob_name = self._create_block_blob() token = self.bs.generate_blob_shared_access_signature(self.container_name, blob_name, permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=1)) bs_with_sas = BlockBlobService(account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, protocol=self.settings.PROTOCOL) info = bs_with_sas.get_blob_account_information(self.container_name, blob_name) self.assertIsNotNone(info.sku_name) self.assertIsNotNone(info.account_kind)",True,TestMode.need_recording_file(self.test_mode),TestMode.need_recording_file(self.test_mode),0.643768846988678 5035,"def register_repr(self, registers): regs = [] for i in range(0, 32): if: regs.append(i) return '+'.join((f'REG{b}' for b in regs))",False,registers & 1 << i,registers[i] in self._regs,0.6733225584030151 5036,"def close(self): if: return for meter in self.meters: meter.finalize() for writer in self.writers: writer.close() self.closed = True",True,self.closed,self.closed,0.6597498655319214 5037,"def setTreeStyle(self, mode): if: sysconf.set('package-tree', mode) self.refreshPackages()",True,mode != sysconf.get('package-tree'),mode != sysconf.get('package-tree'),0.6423446536064148 5038,"def __new__(cls, func): if: raise RuntimeError('IterI requires greenlet support') stream = object.__new__(cls) stream._parent = greenlet.getcurrent() stream._buffer = [] stream.closed = False stream.pos = 0 def run(): func(stream) stream.flush() g = greenlet.greenlet(run, stream._parent) while 1: rv = g.switch() if not rv: return yield rv[0]",True,greenlet is None,greenlet is None,0.6633991003036499 5039,"def __new__(cls, func): if greenlet is None: raise RuntimeError('IterI requires greenlet support') stream = object.__new__(cls) stream._parent = greenlet.getcurrent() stream._buffer = [] stream.closed = False stream.pos = 0 def run(): func(stream) stream.flush() g = greenlet.greenlet(run, stream._parent) while 1: rv = g.switch() if: return yield rv[0]",True,not rv,not rv,0.6556086540222168 5040,"def _maybe_add_bearer_token(self, headers: dict[str, str] | None): if: if headers is None: headers = {} headers['Authorization'] = f'Bearer {self.bearer_token}' return headers",True,self.bearer_token,self.bearer_token,0.6514153480529785 5041,"def _maybe_add_bearer_token(self, headers: dict[str, str] | None): if self.bearer_token: if: headers = {} headers['Authorization'] = f'Bearer {self.bearer_token}' return headers",True,headers is None,headers is None,0.6584972143173218 5042,"def handle_testlist_gexp(self, gexp_node): if: return self.handle_genexp(gexp_node) return self.handle_testlist(gexp_node)",False,len(gexp_node.children) > 1 and gexp_node.children[1].type == syms.comp_for,self.mode == 'genexp',0.6459699869155884 5043,"def forward(self, x): """"""Compute the kernel values Args: x (torch.tensor): e-e distance Nbatch, Nele_pairs Returns: torch.tensor: values of the f_ij """""" nbatch, npairs = x.shape x = x.reshape(-1, 1) x = self.fc1(x) x = self.nl_func(x) x = self.fc2(x) x = self.nl_func(x) x = self.fc3(x) x = self.nl_func(x) x = x.reshape(nbatch, npairs) if: x = x + self.var_cusp_weight[self.idx_pair] return x",False,self.include_cusp_weight,self.idx_pair >= 0,0.6454840898513794 5044,"def col(self, indx): if: self.__cols[indx] = self.Column(indx, self) return self.__cols[indx]",True,indx not in self.__cols,indx not in self.__cols,0.6564230918884277 5045,"def get_finger_pos(menu: Optional['pygame_menu.Menu'], event: EventType) -> Tuple2IntType: """""" Return the position from finger (or mouse) event on x-axis and y-axis (x, y). :param menu: Menu object for relative positioning in finger events :param event: Pygame event object :return: Position on x-axis and y-axis (x, y) in px """""" if: assert menu is not None,'menu reference cannot be none while using finger position' display_size = menu.get_window_size() finger_pos = (int(event.x * display_size[0]), int(event.y * display_size[1])) return finger_pos return event.pos",False,"event.type in (FINGERDOWN, FINGERMOTION, FINGERUP)",event.pos is None,0.6516558527946472 5046,"def __getitem__(self, idx): """"""Given index, sample the data from multiple datasets with the given proportion."""""" p = np.random.rand() for i in range(len(self.datasets)): if: index_new = (idx + np.random.rand()) * len(self.datasets[i]) / self.length index_new = int(np.round(index_new)) % len(self.datasets[i]) return self.datasets[i][index_new] return None",False,p <= self.partition[i],p < len(self.datasets[i]),0.6486886739730835 5047,"def endElement(self, name, value, connection): if: self.zone = value elif name == 'groupName': self.group_name = value elif name == 'tenancy': self.tenancy = value else: setattr(self, name, value)",False,name == 'availabilityZone',name == 'zone',0.6537482142448425 5048,"def endElement(self, name, value, connection): if name == 'availabilityZone': self.zone = value elif: self.group_name = value elif name == 'tenancy': self.tenancy = value else: setattr(self, name, value)",True,name == 'groupName',name == 'groupName',0.6582446098327637 5049,"def endElement(self, name, value, connection): if name == 'availabilityZone': self.zone = value elif name == 'groupName': self.group_name = value elif: self.tenancy = value else: setattr(self, name, value)",True,name == 'tenancy',name == 'tenancy',0.654587984085083 5050,"def get_model_stats(model, cfg, mode, use_train_input): """""" Compute statistics for the current model given the config. Args: model (model): model to perform analysis. cfg (CfgNode): configs. Details can be found in slowfast/config/defaults.py mode (str): Options include `flop` or `activation`. Compute either flop (gflops) or activation count (mega). use_train_input (bool): if True, compute statistics for training. Otherwise, compute statistics for testing. Returns: float: the total number of count of the given model. """""" assert mode in ['flop', 'activation'], ""'{}' not supported for model analysis"".format(mode) if: model_stats_fun = flop_count elif mode == 'activation': model_stats_fun = activation_count model_mode = model.training model.eval() inputs = _get_model_analysis_input(cfg, use_train_input) count_dict, *_ = model_stats_fun(model, inputs) count = sum(count_dict.values()) model.train(model_mode) return count",True,mode == 'flop',mode == 'flop',0.6555771827697754 5051,"def get_model_stats(model, cfg, mode, use_train_input): """""" Compute statistics for the current model given the config. Args: model (model): model to perform analysis. cfg (CfgNode): configs. Details can be found in slowfast/config/defaults.py mode (str): Options include `flop` or `activation`. Compute either flop (gflops) or activation count (mega). use_train_input (bool): if True, compute statistics for training. Otherwise, compute statistics for testing. Returns: float: the total number of count of the given model. """""" assert mode in ['flop', 'activation'], ""'{}' not supported for model analysis"".format(mode) if mode == 'flop': model_stats_fun = flop_count elif: model_stats_fun = activation_count model_mode = model.training model.eval() inputs = _get_model_analysis_input(cfg, use_train_input) count_dict, *_ = model_stats_fun(model, inputs) count = sum(count_dict.values()) model.train(model_mode) return count",True,mode == 'activation',mode == 'activation',0.6568681001663208 5052,"def on_button_search_clicked(self, toggle_button): if: self.search_bar.set_search_mode(search_mode=False) else: self.search_bar.set_search_mode(search_mode=True)",False,self.search_bar.get_search_mode(),toggle_button.isChecked() == False,0.6447437405586243 5053,"def concatenate(box_mask_lists, fields=None): """"""Concatenate list of box_mask_lists. This op concatenates a list of input box_mask_lists into a larger box_mask_list. It also handles concatenation of box_mask_list fields as long as the field tensor shapes are equal except for the first dimension. Args: box_mask_lists: list of np_box_mask_list.BoxMaskList objects fields: optional list of fields to also concatenate. By default, all fields from the first BoxMaskList in the list are included in the concatenation. Returns: a box_mask_list with number of boxes equal to sum([box_mask_list.num_boxes() for box_mask_list in box_mask_list]) Raises: ValueError: if box_mask_lists is invalid (i.e., is not a list, is empty, or contains non box_mask_list objects), or if requested fields are not contained in all box_mask_lists """""" if: if'masks' not in fields: fields.append('masks') return box_list_to_box_mask_list(np_box_list_ops.concatenate(boxlists=box_mask_lists, fields=fields))",True,fields is not None,fields is not None,0.6507326364517212 5054,"def concatenate(box_mask_lists, fields=None): """"""Concatenate list of box_mask_lists. This op concatenates a list of input box_mask_lists into a larger box_mask_list. It also handles concatenation of box_mask_list fields as long as the field tensor shapes are equal except for the first dimension. Args: box_mask_lists: list of np_box_mask_list.BoxMaskList objects fields: optional list of fields to also concatenate. By default, all fields from the first BoxMaskList in the list are included in the concatenation. Returns: a box_mask_list with number of boxes equal to sum([box_mask_list.num_boxes() for box_mask_list in box_mask_list]) Raises: ValueError: if box_mask_lists is invalid (i.e., is not a list, is empty, or contains non box_mask_list objects), or if requested fields are not contained in all box_mask_lists """""" if fields is not None: if: fields.append('masks') return box_list_to_box_mask_list(np_box_list_ops.concatenate(boxlists=box_mask_lists, fields=fields))",True,'masks' not in fields,'masks' not in fields,0.6519908905029297 5055,"def __init__(self, context=None, arg=0, template=None, set_default=True): if: self.set_defaults() else: self._value = 0",True,set_default,set_default,0.6618826389312744 5056,"def update_buttons(bar, link=False, pin=False): buttons = bar.buttons() buttons |= bar.MaximizeButton buttons &= ~bar.RestoreButton if: buttons |= bar.LinkButton if pin: buttons |= bar.PinButton bar.setButtons(buttons)",True,link,link,0.6788994073867798 5057,"def update_buttons(bar, link=False, pin=False): buttons = bar.buttons() buttons |= bar.MaximizeButton buttons &= ~bar.RestoreButton if link: buttons |= bar.LinkButton if: buttons |= bar.PinButton bar.setButtons(buttons)",True,pin,pin,0.675557017326355 5058,"def on_stage_start(self, stage, epoch=None): """"""Gets called when a stage (either training, validation, test) starts."""""" if: self.per_metrics = self.hparams.per_stats()",True,stage != sb.Stage.TRAIN,stage != sb.Stage.TRAIN,0.650809645652771 5059,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if: return '' if user_id == dispatcher.bot.id: return '' if int(user_id) in DRAGONS + TIGERS + WOLVES: return '' if is_blacklisted: text = text.format('Yes') reason = sql.get_reason(user_id) if reason: text += f'\nReason: {reason}' else: text = text.format('No') return text",False,"user_id in [777000, 1087968824]",not user_id,0.6433091163635254 5060,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if user_id in [777000, 1087968824]: return '' if: return '' if int(user_id) in DRAGONS + TIGERS + WOLVES: return '' if is_blacklisted: text = text.format('Yes') reason = sql.get_reason(user_id) if reason: text += f'\nReason: {reason}' else: text = text.format('No') return text",False,user_id == dispatcher.bot.id,user_id in DRAGONS or user_id in WOLVES,0.6474183797836304 5061,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if user_id in [777000, 1087968824]: return '' if user_id == dispatcher.bot.id: return '' if: return '' if is_blacklisted: text = text.format('Yes') reason = sql.get_reason(user_id) if reason: text += f'\nReason: {reason}' else: text = text.format('No') return text",False,int(user_id) in DRAGONS + TIGERS + WOLVES,not user_id,0.6471975445747375 5062,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if user_id in [777000, 1087968824]: return '' if user_id == dispatcher.bot.id: return '' if int(user_id) in DRAGONS + TIGERS + WOLVES: return '' if: text = text.format('Yes') reason = sql.get_reason(user_id) if reason: text += f'\nReason: {reason}' else: text = text.format('No') return text",True,is_blacklisted,is_blacklisted,0.6510672569274902 5063,"def __user_info__(user_id): is_blacklisted = sql.is_user_blacklisted(user_id) text = 'Blacklisted: {}' if user_id in [777000, 1087968824]: return '' if user_id == dispatcher.bot.id: return '' if int(user_id) in DRAGONS + TIGERS + WOLVES: return '' if is_blacklisted: text = text.format('Yes') reason = sql.get_reason(user_id) if: text += f'\nReason: {reason}' else: text = text.format('No') return text",True,reason,reason,0.6717069745063782 5064,"def preprocess(self, data): images = [] for row in data: image = row.get('data') or row.get('body') if: image = base64.b64decode(image) image = mmcv.imfrombytes(image) images.append(image) return images",False,"isinstance(image, str)",self.config.parse_image_size and image.startswith('image://'),0.6439789533615112 5065,"def split(self, depth: int) -> Tuple['Name', 'Name']: """"""Split a name into a prefix and suffix names at the specified depth. *depth* is an ``int`` specifying the number of labels in the suffix Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the name. Returns the tuple ``(prefix, suffix)``. """""" l = len(self.labels) if: return (self, dns.name.empty) elif depth == l: return (dns.name.empty, self) elif depth < 0 or depth > l: raise ValueError('depth must be >= 0 and <= the length of the name') return (Name(self[:-depth]), Name(self[-depth:]))",True,depth == 0,depth == 0,0.665411651134491 5066,"def split(self, depth: int) -> Tuple['Name', 'Name']: """"""Split a name into a prefix and suffix names at the specified depth. *depth* is an ``int`` specifying the number of labels in the suffix Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the name. Returns the tuple ``(prefix, suffix)``. """""" l = len(self.labels) if depth == 0: return (self, dns.name.empty) elif: return (dns.name.empty, self) elif depth < 0 or depth > l: raise ValueError('depth must be >= 0 and <= the length of the name') return (Name(self[:-depth]), Name(self[-depth:]))",False,depth == l,l == 0,0.6667429804801941 5067,"def split(self, depth: int) -> Tuple['Name', 'Name']: """"""Split a name into a prefix and suffix names at the specified depth. *depth* is an ``int`` specifying the number of labels in the suffix Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the name. Returns the tuple ``(prefix, suffix)``. """""" l = len(self.labels) if depth == 0: return (self, dns.name.empty) elif depth == l: return (dns.name.empty, self) elif: raise ValueError('depth must be >= 0 and <= the length of the name') return (Name(self[:-depth]), Name(self[-depth:]))",False,depth < 0 or depth > l,depth < l,0.65338134765625 5068,"def setUp(self): self.get_super_method(self,'setUp')() if: raise ExecutionError('Failed to Setup and mount volume') self.is_io_running = False",False,not self.setup_volume_and_mount_volume([self.mounts[0]]),not self.setup_volume_and_mount_volume(mounts=[self.mounts[0]]),0.6432540416717529 5069,"def authenticate_redirect(self): """""" Informs the browser that this resource requires authentication (status code 401) which should prompt the browser to reply with credentials. The browser will be informed that we support both Negotiate (GSSAPI/SSO) and Basic auth. """""" if: raise Exception(_('Headers have already been written')) self.set_status(401) self.add_header('WWW-Authenticate', 'Negotiate') self.add_header('WWW-Authenticate', 'Basic realm=""%s""' % self.settings['sso_realm']) self.finish() return False",True,self._headers_written,self._headers_written,0.6520984172821045 5070,"def get_interpolation_matrix(self, loc, location_type='cell_centers', zeros_outside=False, **kwargs): if: raise TypeError('The locType keyword argument has been removed, please use location_type. This will be removed in discretize 1.0.0') if 'zerosOutside' in kwargs: raise TypeError('The zerosOutside keyword argument has been removed, please use zeros_outside. This will be removed in discretize 1.0.0') return self._get_interpolation_matrix(loc, location_type, zeros_outside)",True,'locType' in kwargs,'locType' in kwargs,0.6529345512390137 5071,"def get_interpolation_matrix(self, loc, location_type='cell_centers', zeros_outside=False, **kwargs): if 'locType' in kwargs: raise TypeError('The locType keyword argument has been removed, please use location_type. This will be removed in discretize 1.0.0') if: raise TypeError('The zerosOutside keyword argument has been removed, please use zeros_outside. This will be removed in discretize 1.0.0') return self._get_interpolation_matrix(loc, location_type, zeros_outside)",True,'zerosOutside' in kwargs,'zerosOutside' in kwargs,0.6553617715835571 5072,"def unmap(data, count, inds, fill=0): """""" Unmap a subset of item (data) back to the original set of items (of size count) """""" if: ret = data.new_full((count,), fill) ret[inds] = data else: new_size = (count,) + data.size()[1:] ret = data.new_full(new_size, fill) ret[inds, :] = data return ret",True,data.dim() == 1,data.dim() == 1,0.6511886119842529 5073,"def find_next_search(self): if: return if self.navigation_history_state.goto_next(): next_search = self.navigation_history_state.get_current() try: self.is_navigating_history = True self.gui.search.set_search_string(next_search, store_in_history=True) finally: self.is_navigating_history = False",False,self.gui.search.count() == 0,self.is_navigating_history,0.6452270746231079 5074,"def find_next_search(self): if self.gui.search.count() == 0: return if: next_search = self.navigation_history_state.get_current() try: self.is_navigating_history = True self.gui.search.set_search_string(next_search, store_in_history=True) finally: self.is_navigating_history = False",False,self.navigation_history_state.goto_next(),self.is_navigating_history,0.642853856086731 5075,"@classmethod def get_regular_meeting_at_date(cls, session, calendar, end_date, full_day=None): """""" Retrieve the list of recursive meetings happening at the specified end_date. :kwarg full_day: Can be True, False or None. True will restrict to only meetings which take up the full day. False will only select meetings which do not take the full day. None will not restrict. Default to None """""" meetings_tmp = cls.expand_regular_meetings(cls.get_active_regular_meeting(session, calendar, end_date, end_date, full_day), end_date) meetings = [] for meeting in meetings_tmp: if: meetings.append(meeting) meetings.sort(key=operator.attrgetter('meeting_date','meeting_time_start','meeting_name')) return meetings",False,meeting.meeting_date == end_date,meeting.date >= end_date,0.6479058265686035 5076,"@classmethod def mk(cls, tracefile, test_tracefile): assert tracefile.is_file(), tracefile assert test_tracefile is None or test_tracefile.is_file() inv_decls, dtraces = DTraces.vread(tracefile) test_dtraces = None if: _, test_dtraces = DTraces.vread(test_tracefile) return cls(tracefile, inv_decls, dtraces, test_dtraces)",False,test_tracefile,test_tracefile.is_file(),0.6544214487075806 5077,"def modules(self): list_of_modules = [item for sub in self.__args.modules for item in sub] if: list_of_modules = util.format.aslist(self.get('modules', [])) return list_of_modules",False,list_of_modules == [],not list_of_modules,0.6471132040023804 5078,"def del_host_from_chain(ip=None, mac=None): if: os.system('arptables -D IN -s %s --source-mac %s -j ACCEPT' % (ip, mac)) if os.system('arptables -D OUTPUT -d %s -j ACCEPT' % ip): os.system('arptables -D OUT -d %s -j ACCEPT' % ip)",False,"os.system('arptables -D INPUT -s %s --source-mac %s -j ACCEPT' % (ip, mac))",os.system('arptables -D IN -d %s -j ACCEPT' % ip and mac,0.6501554250717163 5079,"def del_host_from_chain(ip=None, mac=None): if os.system('arptables -D INPUT -s %s --source-mac %s -j ACCEPT' % (ip, mac)): os.system('arptables -D IN -s %s --source-mac %s -j ACCEPT' % (ip, mac)) if: os.system('arptables -D OUT -d %s -j ACCEPT' % ip)",False,os.system('arptables -D OUTPUT -d %s -j ACCEPT' % ip),"os.system('arptables -D OUT -d %s -j ACCEPT' % ip, mac)",0.6444172859191895 5080,"def fail(self, msg, lineno=None, exc=TemplateSyntaxError): """"""Convenience method that raises `exc` with the message, passed line number or last line number as well as the current name and filename. """""" if: lineno = self.stream.current.lineno raise exc(msg, lineno, self.name, self.filename)",True,lineno is None,lineno is None,0.6612528562545776 5081,"def before_train_epoch(self, runner): super(Fp16GradAccumulateOptimizerHook, self).before_train_epoch(runner) if: assert self.nominal_batch_size is not None samples_per_gpu = runner.data_loader.sampler.samples_per_gpu _, word_size = get_dist_info() self.accumulation = math.ceil(self.nominal_batch_size / (samples_per_gpu * word_size))",True,self.accumulation is None,self.accumulation is None,0.6536580324172974 5082,"@property def maxzoom(self): """"""gets the value maxzoom"""""" if: self.init() return self._maxzoom",True,self._maxzoom is None,self._maxzoom is None,0.6564142107963562 5083,"def is_engine_available(**kwargs): """""" Check whether the Sphinx engine is available. """""" if: return True try: from.engine import SphinxEngine return True except ImportError as e: _log.warning('Failed to import from Sphinx engine module: %s', e) return False except Exception as e: _log.exception('Exception during import of Sphinx engine module: %s', e) return False return True",False,_engine,kwargs.get('enable_sphinx'),0.6776231527328491 5084,"def _poly2mask(mask_ann, img_h, img_w): if: rles = maskUtils.frPyObjects(mask_ann, img_h, img_w) rle = maskUtils.merge(rles) elif isinstance(mask_ann['counts'], list): rle = maskUtils.frPyObjects(mask_ann, img_h, img_w) else: rle = mask_ann mask = maskUtils.decode(rle) return mask",True,"isinstance(mask_ann, list)","isinstance(mask_ann, list)",0.6463358402252197 5085,"def _poly2mask(mask_ann, img_h, img_w): if isinstance(mask_ann, list): rles = maskUtils.frPyObjects(mask_ann, img_h, img_w) rle = maskUtils.merge(rles) elif: rle = maskUtils.frPyObjects(mask_ann, img_h, img_w) else: rle = mask_ann mask = maskUtils.decode(rle) return mask",True,"isinstance(mask_ann['counts'], list)","isinstance(mask_ann['counts'], list)",0.6476714611053467 5086,"def validate_SignatoryType(self, value): result = True if: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s is not of the correct base simple type (str)' % {'value': value, 'lineno': lineno}) return False pass return result",True,value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None),value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None),0.6480987071990967 5087,"def validate_SignatoryType(self, value): result = True if value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None): if: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s is not of the correct base simple type (str)' % {'value': value, 'lineno': lineno}) return False pass return result",True,"not isinstance(value, str)","not isinstance(value, str)",0.6478409767150879 5088,"def get_repacked_binary_and_file_name(self, uid: str) -> tuple[bytes | None, str | None]: file_name = self.db_interface.get_file_name(uid) if: return (None, None) repack_service = TarRepack() tar = repack_service.tar_repack(self.fs_organizer.generate_path_from_uid(uid)) name = f'{file_name}.tar.gz' return (tar, name)",False,file_name is None,not file_name,0.6533805131912231 5089,"def build(self) -> Sequence[GeneratorError]: """"""Create the project from templates"""""" if: print(f'Generating {self.package_name}') else: print(f'Generating {self.project_name}') try: self.project_dir.mkdir() except FileExistsError: return [GeneratorError(detail='Directory already exists. Delete it or use the update command.')] self._create_package() self._build_metadata() self._build_models() self._build_api() self._run_post_hooks() return self._get_errors()",False,self.meta == MetaType.NONE,self.package_name,0.649908185005188 5090,"def exportLiteral(self, outfile, level, name_='MorrisLecarCell'): level += 1 already_processed = set() self.exportLiteralAttributes(outfile, level, already_processed, name_) if: self.exportLiteralChildren(outfile, level, name_)",True,self.hasContent_(),self.hasContent_(),0.6507828831672668 5091,"def test_containment(bvh_tree: Optional[BVHTree], point: Coordinate3D) -> bool: """"""Given a point and a BVH-tree, tests whether the point is _probably_ within the mesh represented by the BVH-tree. This is done by casting three rays in the X, Y and Z directions. The point is assumed to be within the mesh if all three rays hit the mesh. Returns True if the BVH-tree is missing. """""" global CONTAINMENT_TEST_AXES if: return True for axis in CONTAINMENT_TEST_AXES: _, _, _, dist = bvh_tree.ray_cast(point, axis) if dist is None or dist == -1: return False return True",False,not bvh_tree,bvh_tree is None,0.6527002453804016 5092,"def test_containment(bvh_tree: Optional[BVHTree], point: Coordinate3D) -> bool: """"""Given a point and a BVH-tree, tests whether the point is _probably_ within the mesh represented by the BVH-tree. This is done by casting three rays in the X, Y and Z directions. The point is assumed to be within the mesh if all three rays hit the mesh. Returns True if the BVH-tree is missing. """""" global CONTAINMENT_TEST_AXES if not bvh_tree: return True for axis in CONTAINMENT_TEST_AXES: _, _, _, dist = bvh_tree.ray_cast(point, axis) if: return False return True",False,dist is None or dist == -1,dist,0.6503852605819702 5093,"def __lt__(self, other): if: return self._oid < other else: raise SmiError('%s object not properly initialized' % self.__class__.__name__)",True,self._state & self.ST_CLEAN,self._state & self.ST_CLEAN,0.6545597314834595 5094,"def write(self, oprot): if: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setNotificationsEnabled_result') if self.e is not None: oprot.writeFieldBegin('e', TType.STRUCT, 1) self.e.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot._fast_encode is not None and self.thrift_spec is not None,oprot._fast_encode is not None and self.thrift_spec is not None,0.6458766460418701 5095,"def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('setNotificationsEnabled_result') if: oprot.writeFieldBegin('e', TType.STRUCT, 1) self.e.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,self.e is not None,self.e is not None,0.6483011841773987 5096,"def std_dev(data): """""" returns standard deviation of values rounded to pl decimal places S = sqrt( (sum(x-xm)^2) / n-1 ) xm = sum(x)/n :param data: list with integer or float values :type data: list >>> l1 = [1.334, 1.322, 1.345, 1.451, 1.000, 1.434, 1.321, 1.322] >>> l2 = [1.234, 1.222, 1.345, 1.451, 2.500, 1.234, 1.321, 1.222] >>> round(std_dev(l1), 8) 0.13797871 >>> round(std_dev(l2), 8) 0.43536797 >>> median(l1) 1.328 >>> mean(l1) 1.316125 """""" if: return 0 K = data[0] n = 0 Sum = 0 Sum_sqr = 0 for x in data: n += 1 Sum += x - K Sum_sqr += (x - K) * (x - K) variance = (Sum_sqr - Sum * Sum / n) / (n - 1) return sqrt(variance)",True,len(data) == 0,len(data) == 0,0.6493635177612305 5097,"def alter_targets(self): """"""Return any corresponding targets in a variant directory. """""" if: return ([], None) return self.fs.variant_dir_target_climb(self, self.dir, [self.name])",False,self.is_derived(),self.dir is None,0.6498351693153381 5098,"def test_get_file_to_path_with_mode(self): if: return with open(FILE_PATH, 'wb') as stream: stream.write(b'abcdef') with self.assertRaises(BaseException): file = self.fs.get_file_to_path(self.share_name, self.directory_name, self.byte_file, FILE_PATH, 'a+b')",True,TestMode.need_recording_file(self.test_mode),TestMode.need_recording_file(self.test_mode),0.644657552242279 5099,"def action_matches(applicable_actions: List[str], action: str) -> bool: alternatives = ALTERNATIVE_ACTIONS[action] if: return True return False",False,any((alt in applicable_actions for alt in alternatives)),any((applicable_actions[action] == alternatives[0]) for action in alternatives)),0.6538166999816895 5100,"def _maybe_add_keypoints(obj: Dict[str, Any], ann_dict: Dict[str, Any]): if: return keypts = ann_dict['keypoints'] for idx, v in enumerate(keypts): if idx % 3!= 2: keypts[idx] = v + 0.5 obj['keypoints'] = keypts",True,'keypoints' not in ann_dict,'keypoints' not in ann_dict,0.6539626717567444 5101,"def _maybe_add_keypoints(obj: Dict[str, Any], ann_dict: Dict[str, Any]): if 'keypoints' not in ann_dict: return keypts = ann_dict['keypoints'] for idx, v in enumerate(keypts): if: keypts[idx] = v + 0.5 obj['keypoints'] = keypts",True,idx % 3 != 2,idx % 3 != 2,0.6632081270217896 5102,"def as_event(cls, *args, **kwds): if: return cls._expect(*args, **kwds) else: return cls._reverse_expect(*args, **kwds)",True,cls.message_type is ArsdkMessageType.EVT,cls.message_type is ArsdkMessageType.EVT,0.6447559595108032 5103,"@staticmethod def service_list_to_str(services: Optional[List[Any]]) -> str: """"""convert the list of services to a concise list of services"""""" if: return 'all' convert = {'gsi': 'Indexing', 'cbas': 'Analytics', 'ft': 'Full Text Search'} return ', '.join([convert[service] if service in convert else service.title() for service in services])",True,not services,not services,0.6701916456222534 5104,"def check_model(self, yml_conf): """""" Raises: ValueError: loaded model not in supported model type """""" for support_model in SUPPORT_MODELS: if: return True raise ValueError('Unsupported arch: {}, expect {}'.format(yml_conf['arch'], SUPPORT_MODELS))",True,support_model in yml_conf['arch'],support_model in yml_conf['arch'],0.6486483216285706 5105,"def __iter__(self): dummy_ptr = self._dummy_key() table = self.field('table') for i in safe_range(self.field('mask') + 1): setentry = table[i] key = setentry['key'] if: yield PyObjectPtr.from_pyobject_ptr(key)",False,key != 0 and key != dummy_ptr,key is not None,0.6533042192459106 5106,"def on_value_changed(self): """""" The signal handler for the 'valueChanged' signal. """""" if: self._guard |= VALUE_FLAG try: self.declaration.value = self.widget.value() finally: self._guard &= ~VALUE_FLAG",True,not self._guard & VALUE_FLAG,not self._guard & VALUE_FLAG,0.6571481227874756 5107,"def expand_bool_array(val): if: return [val] * len(convolutions) return val",False,"isinstance(val, bool)","isinstance(val, bool) or val is None",0.6466636657714844 5108,"def convert_enum_to_dict(obj): """""" @author Roger Unwin @brief converts an enum to a dict """""" dic = {} for i in [v for v in dir(obj) if not callable(getattr(obj, v))]: if: dic[i] = getattr(obj, i) log.debug('enum dictionary ='+ repr(dic)) return dic",False,False == i.startswith('_'),"hasattr(obj, i)",0.6499691009521484 5109,"def createFields(self): yield Bytes(self, 'data', self.datasize / 8) padding = self._size - self.current_size if: yield createPaddingField(self, padding)",False,padding,padding > 0,0.675218403339386 5110,"def get_jobs(self, tag: Optional[Hashable]=None) -> List['Job']: """""" Gets scheduled jobs marked with the given tag, or all jobs if tag is omitted. :param tag: An identifier used to identify a subset of jobs to retrieve """""" if: return self.jobs[:] else: return [job for job in self.jobs if tag in job.tags]",True,tag is None,tag is None,0.6560312509536743 5111,"def default(self, obj): if: if isinstance(obj, (datetime, obj)): return obj.strftime('%Y-%m-%d %H:%M:%S') raise TypeError('Type %s not serializable' % type(obj))",False,obj,obj is not None,0.6779782176017761 5112,"def default(self, obj): if obj: if: return obj.strftime('%Y-%m-%d %H:%M:%S') raise TypeError('Type %s not serializable' % type(obj))",False,"isinstance(obj, (datetime, obj))","isinstance(obj, datetime.datetime)",0.6501161456108093 5113,"def evaluate(ref_file, trans_file, metric, subword_option=None): """"""Pick a metric and evaluate depending on task."""""" if: evaluation_score = _bleu(ref_file, trans_file, subword_option=subword_option) elif metric.lower() == 'rouge': evaluation_score = _rouge(ref_file, trans_file, subword_option=subword_option) elif metric.lower() == 'accuracy': evaluation_score = _accuracy(ref_file, trans_file) elif metric.lower() == 'word_accuracy': evaluation_score = _word_accuracy(ref_file, trans_file) else: raise ValueError('Unknown metric %s' % metric) return evaluation_score",True,metric.lower() == 'bleu',metric.lower() == 'bleu',0.652167558670044 5114,"def evaluate(ref_file, trans_file, metric, subword_option=None): """"""Pick a metric and evaluate depending on task."""""" if metric.lower() == 'bleu': evaluation_score = _bleu(ref_file, trans_file, subword_option=subword_option) elif: evaluation_score = _rouge(ref_file, trans_file, subword_option=subword_option) elif metric.lower() == 'accuracy': evaluation_score = _accuracy(ref_file, trans_file) elif metric.lower() == 'word_accuracy': evaluation_score = _word_accuracy(ref_file, trans_file) else: raise ValueError('Unknown metric %s' % metric) return evaluation_score",True,metric.lower() == 'rouge',metric.lower() == 'rouge',0.6532976627349854 5115,"def evaluate(ref_file, trans_file, metric, subword_option=None): """"""Pick a metric and evaluate depending on task."""""" if metric.lower() == 'bleu': evaluation_score = _bleu(ref_file, trans_file, subword_option=subword_option) elif metric.lower() == 'rouge': evaluation_score = _rouge(ref_file, trans_file, subword_option=subword_option) elif: evaluation_score = _accuracy(ref_file, trans_file) elif metric.lower() == 'word_accuracy': evaluation_score = _word_accuracy(ref_file, trans_file) else: raise ValueError('Unknown metric %s' % metric) return evaluation_score",True,metric.lower() == 'accuracy',metric.lower() == 'accuracy',0.6529915928840637 5116,"def evaluate(ref_file, trans_file, metric, subword_option=None): """"""Pick a metric and evaluate depending on task."""""" if metric.lower() == 'bleu': evaluation_score = _bleu(ref_file, trans_file, subword_option=subword_option) elif metric.lower() == 'rouge': evaluation_score = _rouge(ref_file, trans_file, subword_option=subword_option) elif metric.lower() == 'accuracy': evaluation_score = _accuracy(ref_file, trans_file) elif: evaluation_score = _word_accuracy(ref_file, trans_file) else: raise ValueError('Unknown metric %s' % metric) return evaluation_score",True,metric.lower() == 'word_accuracy',metric.lower() == 'word_accuracy',0.6513720154762268 5117,"def deprecated(reason, replacement, gone_in, issue=None): """"""Helper to deprecate existing functionality. reason: Textual reason shown to the user about why this functionality has been deprecated. replacement: Textual suggestion shown to the user about what alternative functionality they can use. gone_in: The version of pip does this functionality should get removed in. Raises errors if pip's current version is greater than or equal to this. issue: Issue number on the tracker that would serve as a useful place for users to find related discussion and provide feedback. Always pass replacement, gone_in and issue as keyword arguments for clarity at the call site. """""" sentences = [(reason, DEPRECATION_MSG_PREFIX + '{}'), (gone_in, 'pip {} will remove support for this functionality.'), (replacement, 'A possible replacement is {}.'), (issue, 'You can find discussion regarding this at https://github.com/pypa/pip/issues/{}.')] message =''.join((template.format(val) for val, template in sentences if val is not None)) if: raise PipDeprecationWarning(message) warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)",False,gone_in is not None and parse(current_version) >= parse(gone_in),not issue,0.6401240825653076 5118,"def __check_update_ok(self): """"""Checks if an update is possible, raises a DatasetError otherwise. Note that dataset_older_than_database returns a tuple (disk_datetime_processed, database_datetime_processed, tile_ingested_datetime) if no ingestion required"""""" tile_class_filter = (TC_SINGLE_SCENE, TC_SUPERSEDED) time_tuple = self.db.dataset_older_than_database(self.dataset_dict['dataset_id'], self.dataset_dict['datetime_processed'], tile_class_filter) if: disk_datetime_processed, database_datetime_processed, tile_ingested_datetime = time_tuple if disk_datetime_processed == database_datetime_processed: skip_message = 'Dataset has already been ingested' elif disk_datetime_processed < database_datetime_processed: skip_message = 'Dataset on disk is older than dataset in DB' else: skip_message = 'Dataset on disk was created after currently ingested contents' skip_message +='(Disk = %s, DB = %s, Ingested = %s)' % time_tuple raise DatasetSkipError(skip_message)",True,time_tuple is not None,time_tuple is not None,0.6486437320709229 5119,"def __check_update_ok(self): """"""Checks if an update is possible, raises a DatasetError otherwise. Note that dataset_older_than_database returns a tuple (disk_datetime_processed, database_datetime_processed, tile_ingested_datetime) if no ingestion required"""""" tile_class_filter = (TC_SINGLE_SCENE, TC_SUPERSEDED) time_tuple = self.db.dataset_older_than_database(self.dataset_dict['dataset_id'], self.dataset_dict['datetime_processed'], tile_class_filter) if time_tuple is not None: disk_datetime_processed, database_datetime_processed, tile_ingested_datetime = time_tuple if: skip_message = 'Dataset has already been ingested' elif disk_datetime_processed < database_datetime_processed: skip_message = 'Dataset on disk is older than dataset in DB' else: skip_message = 'Dataset on disk was created after currently ingested contents' skip_message +='(Disk = %s, DB = %s, Ingested = %s)' % time_tuple raise DatasetSkipError(skip_message)",False,disk_datetime_processed == database_datetime_processed,disk_datetime_processed is None,0.6510151624679565 5120,"def __check_update_ok(self): """"""Checks if an update is possible, raises a DatasetError otherwise. Note that dataset_older_than_database returns a tuple (disk_datetime_processed, database_datetime_processed, tile_ingested_datetime) if no ingestion required"""""" tile_class_filter = (TC_SINGLE_SCENE, TC_SUPERSEDED) time_tuple = self.db.dataset_older_than_database(self.dataset_dict['dataset_id'], self.dataset_dict['datetime_processed'], tile_class_filter) if time_tuple is not None: disk_datetime_processed, database_datetime_processed, tile_ingested_datetime = time_tuple if disk_datetime_processed == database_datetime_processed: skip_message = 'Dataset has already been ingested' elif: skip_message = 'Dataset on disk is older than dataset in DB' else: skip_message = 'Dataset on disk was created after currently ingested contents' skip_message +='(Disk = %s, DB = %s, Ingested = %s)' % time_tuple raise DatasetSkipError(skip_message)",False,disk_datetime_processed < database_datetime_processed,tile_ingested_datetime > disk_datetime_processed,0.6499894857406616 5121,"def _find(self, name, domain=None, path=None): """"""Requests uses this method internally to get cookie values. If there are conflicting cookies, _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown if there are conflicting cookies. :param name: a string containing name of cookie :param domain: (optional) string containing domain of cookie :param path: (optional) string containing path of cookie :return: cookie.value """""" for cookie in iter(self): if: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: return cookie.value raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))",True,cookie.name == name,cookie.name == name,0.6522976160049438 5122,"def _find(self, name, domain=None, path=None): """"""Requests uses this method internally to get cookie values. If there are conflicting cookies, _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown if there are conflicting cookies. :param name: a string containing name of cookie :param domain: (optional) string containing domain of cookie :param path: (optional) string containing path of cookie :return: cookie.value """""" for cookie in iter(self): if cookie.name == name: if: if path is None or cookie.path == path: return cookie.value raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))",True,domain is None or cookie.domain == domain,domain is None or cookie.domain == domain,0.6444485187530518 5123,"def _find(self, name, domain=None, path=None): """"""Requests uses this method internally to get cookie values. If there are conflicting cookies, _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown if there are conflicting cookies. :param name: a string containing name of cookie :param domain: (optional) string containing domain of cookie :param path: (optional) string containing path of cookie :return: cookie.value """""" for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if: return cookie.value raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))",True,path is None or cookie.path == path,path is None or cookie.path == path,0.6459365487098694 5124,"def get_config_section(self, name): """"""Get a section of a configuration"""""" if: return self.config.items(name) return []",False,self.config.has_section(name),self.config,0.6461418867111206 5125,"@validate(name=ConfWithStats.STATS_TIME) def validate_stats_time(stats_time): if: raise ConfigTypeError(desc='Statistics log timer value has to be of integral type but got: %r' % stats_time) if stats_time < 10: raise ConfigValueError(desc='Statistics log timer cannot be set to less then 10 sec, given timer value %s.' % stats_time) return stats_time",False,"not isinstance(stats_time, numbers.Integral)","not isinstance(stats_time, int) or stats_time not in (0, 1)",0.6462117433547974 5126,"@validate(name=ConfWithStats.STATS_TIME) def validate_stats_time(stats_time): if not isinstance(stats_time, numbers.Integral): raise ConfigTypeError(desc='Statistics log timer value has to be of integral type but got: %r' % stats_time) if: raise ConfigValueError(desc='Statistics log timer cannot be set to less then 10 sec, given timer value %s.' % stats_time) return stats_time",True,stats_time < 10,stats_time < 10,0.6661722660064697 5127,"def normalizeToken(self, token): """""" HTML5 specific normalizations to the token stream """""" if: raw = token['data'] token['data'] = OrderedDict(raw) if len(raw) > len(token['data']): token['data'].update(raw[::-1]) return token",False,token['type'] == tokenTypes['StartTag'],token['type'] == 'json',0.6514172554016113 5128,"def normalizeToken(self, token): """""" HTML5 specific normalizations to the token stream """""" if token['type'] == tokenTypes['StartTag']: raw = token['data'] token['data'] = OrderedDict(raw) if: token['data'].update(raw[::-1]) return token",False,len(raw) > len(token['data']),raw[::-1] != '.',0.6489127278327942 5129,"def create(self, validated_data): data = self.context['request'].data validated_data['status'] = True validated_data['deleted'] = False validated_data['user'] = self.context['request'].user.profile validated_data['product_image'] = ','.join(data['product_image']) if 'product_image' in data else '' if: validated_data['price'] = 0 return super().create(validated_data)",False,validated_data['action'] == 'giveaway','price' not in validated_data,0.6450543403625488 5130,"def ioctl_GWINSZ(fd): try: import fcntl import termios import struct cr = struct.unpack_from('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')) except: return None if: return None return cr",False,"cr == (0, 0)",cr == -1,0.6515616178512573 5131,"def __init__(self, addr, from_base=False): self.ptrsize = current_arch.ptrsize if: self.chunk_base_address = addr self.address = addr + 2 * self.ptrsize else: self.chunk_base_address = int(addr - 2 * self.ptrsize) self.address = addr self.size_addr = int(self.address - self.ptrsize) self.prev_size_addr = self.chunk_base_address return",True,from_base,from_base,0.6588475704193115 5132,"def __getitem__(self, item: Union[int, str]) -> Union[Any, EncodingFast]: """""" If the key is a string, returns the value of the dict associated to :obj:`key` ('input_ids', 'attention_mask', etc.). If the key is an integer, get the :obj:`tokenizers.Encoding` for batch item with index :obj:`key`. """""" if: return self.data[item] elif self._encodings is not None: return self._encodings[item] else: raise KeyError('Indexing with integers (to access backend Encoding for a given batch index) is not available when using Python based tokenizers')",True,"isinstance(item, str)","isinstance(item, str)",0.6499534249305725 5133,"def __getitem__(self, item: Union[int, str]) -> Union[Any, EncodingFast]: """""" If the key is a string, returns the value of the dict associated to :obj:`key` ('input_ids', 'attention_mask', etc.). If the key is an integer, get the :obj:`tokenizers.Encoding` for batch item with index :obj:`key`. """""" if isinstance(item, str): return self.data[item] elif: return self._encodings[item] else: raise KeyError('Indexing with integers (to access backend Encoding for a given batch index) is not available when using Python based tokenizers')",False,self._encodings is not None,"isinstance(item, int)",0.6546435356140137 5134,"def quiet_option(f): def callback(ctx, param, value): state = ctx.ensure_object(State) if: if state.verbose: raise BadArgumentUsage('--verbose and --quiet are mutually exclusive! Please choose one!', ctx=ctx) state.quiet = True setup_verbosity(ctx, param, -1) return option('--quiet', '-q', is_flag=True, expose_value=False, callback=callback, help='Quiet mode.', type=click_types.BOOL)(f)",False,value,value and (not value),0.6665977239608765 5135,"def quiet_option(f): def callback(ctx, param, value): state = ctx.ensure_object(State) if value: if: raise BadArgumentUsage('--verbose and --quiet are mutually exclusive! Please choose one!', ctx=ctx) state.quiet = True setup_verbosity(ctx, param, -1) return option('--quiet', '-q', is_flag=True, expose_value=False, callback=callback, help='Quiet mode.', type=click_types.BOOL)(f)",False,state.verbose,state.quiet and (not state.quiet),0.6574726104736328 5136,"def resolve(self, item, captcha_cb=None, select_cb=None): item = item.copy() if: result = self._resolve_live(item) result = sorted(result, key=lambda x: x['quality'], reverse=True) else: result = self._resolve_vod(item) if len(result) > 0 and select_cb: return select_cb(result) return result",False,'live.html' in item['url'],self.mode == 'live',0.6473560333251953 5137,"def resolve(self, item, captcha_cb=None, select_cb=None): item = item.copy() if 'live.html' in item['url']: result = self._resolve_live(item) result = sorted(result, key=lambda x: x['quality'], reverse=True) else: result = self._resolve_vod(item) if: return select_cb(result) return result",False,len(result) > 0 and select_cb,select_cb,0.6455256342887878 5138,"@property def readonly(self): """"""returns the readonly property"""""" if: self.__init() return self._readonly",True,self._readonly is None,self._readonly is None,0.6549603343009949 5139,"def drawContents(self, *args): if: return apply(QtGui.QFrame.drawContents, (self,) + args) else: painter, clipx, clipy, clipw, cliph = args color = self.eraseColor() painter.fillRect(clipx, clipy, clipw, cliph, QtGui.QBrush(color)) QtGui.QScrollArea.drawContents(self, painter, clipx, clipy, clipw, cliph)",True,len(args) == 1,len(args) == 1,0.652539849281311 5140,"def set_power(self, p_E, p_B): self.p_E = p_E self.p_B = p_B if: self.amplitude_E = None else: self.amplitude_E = np.sqrt(self._generate_power_array(p_E)) / self.pixel_size if p_B is None: self.amplitude_B = None else: self.amplitude_B = np.sqrt(self._generate_power_array(p_B)) / self.pixel_size",True,p_E is None,p_E is None,0.650566577911377 5141,"def set_power(self, p_E, p_B): self.p_E = p_E self.p_B = p_B if p_E is None: self.amplitude_E = None else: self.amplitude_E = np.sqrt(self._generate_power_array(p_E)) / self.pixel_size if: self.amplitude_B = None else: self.amplitude_B = np.sqrt(self._generate_power_array(p_B)) / self.pixel_size",True,p_B is None,p_B is None,0.6508200168609619 5142,"def eval_exprcompose(self, expr, **kwargs): """"""[DEV]: Evaluate an ExprCompose using the current state"""""" args = [] for arg in expr.args: arg = self.eval_expr_visitor(arg, **kwargs) if: return exprid_top(expr) args.append(arg) ret = ExprCompose(*args) return ret",False,arg.is_id(TOPSTR),arg.id == 0,0.6474357843399048 5143,"def expand(self, expanded_h, expanded_w, top, left): """"""See :func:`BaseInstanceMasks.expand`."""""" if: expanded_mask = np.empty((0, expanded_h, expanded_w), dtype=np.uint8) else: expanded_mask = np.zeros((len(self), expanded_h, expanded_w), dtype=np.uint8) expanded_mask[:, top:top + self.height, left:left + self.width] = self.masks return BitmapMasks(expanded_mask, expanded_h, expanded_w)",True,len(self.masks) == 0,len(self.masks) == 0,0.6475925445556641 5144,"def lz(x): if: return extraop.LeadingZeros(x) else: aux = core.Variable('{}_{}lz'.format(prefix, self._i_auxvar), x.width) self._i_auxvar += 1 assertions.append(operation.BvComp(aux, extraop.LeadingZeros(x))) return aux",False,are_cte_differences,self._i_auxvar == 0,0.6470450162887573 5145,"def closeEvent(self, event): """""" Handle the close event for the dock item. This handler will reject the event if the item is not closable. """""" event.ignore() if: event.accept() area = self.rootDockArea() if area is not None and area.dockEventsEnabled(): event = QDockItemEvent(DockItemClosed, self.objectName()) QApplication.postEvent(area, event)",False,self._closable,not self.closable,0.6572868824005127 5146,"def closeEvent(self, event): """""" Handle the close event for the dock item. This handler will reject the event if the item is not closable. """""" event.ignore() if self._closable: event.accept() area = self.rootDockArea() if: event = QDockItemEvent(DockItemClosed, self.objectName()) QApplication.postEvent(area, event)",False,area is not None and area.dockEventsEnabled(),area.isDock(),0.6468573808670044 5147,"def sizeHint(self): """""" Get the size hint for the layout. """""" hint = self._size_hint if: return hint hint = super(QDockFrameLayout, self).sizeHint() if not hint.isValid(): hint = QSize(256, 192) self._size_hint = hint return hint",False,hint.isValid(),hint is not None,0.6576007604598999 5148,"def sizeHint(self): """""" Get the size hint for the layout. """""" hint = self._size_hint if hint.isValid(): return hint hint = super(QDockFrameLayout, self).sizeHint() if: hint = QSize(256, 192) self._size_hint = hint return hint",False,not hint.isValid(),not hint,0.6513206958770752 5149,"def add_sensor(self, sensor): """"""Add a sensor to the player vehicle (see sensor.py)."""""" if: raise ValueError('Sensor not supported') self._sensors.append(sensor)",False,"not isinstance(sensor, carla_sensor.Sensor)",sensor not in self._sensors,0.6480464339256287 5150,"@property def links(self): """"""Returns the parsed header links of the response, if any."""""" header = self.headers.get('link') l = {} if: links = parse_header_links(header) for link in links: key = link.get('rel') or link.get('url') l[key] = link return l",True,header,header,0.6690298318862915 5151,"def __isub__(self, other): """"""Translate point by adding another point's coordinates."""""" if: return NotImplemented self._x -= other._x self._y -= other._y return self",True,"not isinstance(other, Point)","not isinstance(other, Point)",0.6524357199668884 5152,"def garen_ability(champion, data): neighbors = field.find_neighbors(champion.y, champion.x) team = champion.team enemies_around = [] for n in neighbors: c = field.coordinates[n[0]][n[1]] if: enemies_around.append(c) for e in enemies_around: champion.spell(e, stats.ABILITY_DMG[champion.name][champion.stars] / stats.ABILITY_SLICES[champion.name])",False,c and c.team != team and c.champion,c.team != team,0.6506630182266235 5153,"def get_indent(element, default_level): indent = '' if: text = element.nodeValue indent = text[:len(text) - len(text.lstrip())] if not indent or indent == '\n': indent = '\n' +'' * default_level * 4 return indent",False,element is not None and element.nodeType == minidom.Node.TEXT_NODE,element.nodeValue,0.6483150720596313 5154,"def get_indent(element, default_level): indent = '' if element is not None and element.nodeType == minidom.Node.TEXT_NODE: text = element.nodeValue indent = text[:len(text) - len(text.lstrip())] if: indent = '\n' +'' * default_level * 4 return indent",False,not indent or indent == '\n',default_level is not None,0.6529646515846252 5155,"def _merge_multi_context(outputs, major_axis): """"""Merge outputs that lives on multiple context into one, so that they look like living on one context. """""" rets = [] for tensors, axis in zip(outputs, major_axis): if: rets.append(nd.concatenate(tensors, axis=axis, always_copy=False)) else: rets.append(tensors[0]) return rets",False,axis >= 0,len(tensors) > 1,0.6531930565834045 5156,"def get_sampling_benign_loader(dataset_name, image_size, split, batch_size, frac_per_class, start_frac=0.0, **kwargs): if: nclasses, nimgs_per_class = (10, 1000 if split == 'test' else 5000) nchosen_per_class = int(frac_per_class * nimgs_per_class) else: assert False ds_handler = lambda ds: Subset(ds, range(int(nimgs_per_class * start_frac), nclasses * nimgs_per_class, nimgs_per_class // nchosen_per_class)) data_loader = get_benign_loader(dataset_name, image_size, split, batch_size, dataset_handler=ds_handler, **kwargs) return data_loader",False,dataset_name.startswith('CIFAR10'),frac_per_class != None,0.6442797780036926 5157,"@metadata() def height(self): """"""If image exists, return its height, otherwise return (:unav)."""""" if: return UNAV return six.text_type(self._image.height)",True,not self._image,not self._image,0.6559816598892212 5158,"def detect_os(self): """"""Detect operating system string in runtime, just use default if not found."""""" if: with open(RHEL_ID, 'r', encoding='utf-8') as buffer: major = buffer.read().split(' ')[6].split('.')[0].replace(""'"", '') self.osver = 'x86_64_RH_' + str(major) logger.debug('RHEL version found in %s', RHEL_ID)",False,RHEL_ID.is_file(),not self.osver and os.path.exists(RHEL_ID),0.648882269859314 5159,"@staticmethod def _apply_administrative_domain(mo: ManagedObject, r): if: return r['administrative_domain'] = {'id': str(mo.administrative_domain.id), 'name': qs(mo.administrative_domain.name)} if mo.administrative_domain.remote_system and mo.administrative_domain.remote_id: r['administrative_domain']['remote_system'] = {'id': str(mo.administrative_domain.remote_system.id), 'name': qs(mo.administrative_domain.remote_system.name)} r['administrative_domain']['remote_id'] = mo.administrative_domain.remote_id",True,not mo.administrative_domain,not mo.administrative_domain,0.6521909832954407 5160,"@staticmethod def _apply_administrative_domain(mo: ManagedObject, r): if not mo.administrative_domain: return r['administrative_domain'] = {'id': str(mo.administrative_domain.id), 'name': qs(mo.administrative_domain.name)} if: r['administrative_domain']['remote_system'] = {'id': str(mo.administrative_domain.remote_system.id), 'name': qs(mo.administrative_domain.remote_system.name)} r['administrative_domain']['remote_id'] = mo.administrative_domain.remote_id",False,mo.administrative_domain.remote_system and mo.administrative_domain.remote_id,mo.administrative_domain.remote_system,0.6463660001754761 5161,"@user_admin def unsetlog(update: Update, context: CallbackContext): bot = context.bot message = update.effective_message chat = update.effective_chat log_channel = sql.stop_chat_logging(chat.id) if: bot.send_message(log_channel, f'Channel has been unlinked from {chat.title}') message.reply_text('Log channel has been un-set.') else: message.reply_text('No log channel has been set yet!')",True,log_channel,log_channel,0.6625585556030273 5162,"def write(self, artifact: T, dir: PathOrStr): filename = self._get_artifact_path(dir) open_method = _OPEN_FUNCTIONS[self.compress] with open_method(filename, 'wb') as f: pickler = dill.Pickler(file=f) pickler.dump(self.VERSION) if: pickler.dump(True) for item in cast(Iterable, artifact): pickler.dump(item) else: pickler.dump(False) pickler.dump(artifact)",False,"hasattr(artifact, '__next__')","isinstance(artifact, Iterable)",0.6431344151496887 5163,"def check_cuda(use_cuda, err='\nYou can not set use_cuda = True in the model because you are using paddlepaddle-cpu.\n Please: 1. Install paddlepaddle-gpu to run your models on GPU or 2. Set use_cuda = False to run models on CPU.\n'): try: if: log.error(err) sys.exit(1) except Exception as e: pass",True,use_cuda == True and fluid.is_compiled_with_cuda() == False,use_cuda == True and fluid.is_compiled_with_cuda() == False,0.6431101560592651 5164,"def get_str_key(name, required=False): if: default = DEFAULTS[name] else: default = None if not (data := env.str(name, default=default)) and (not required): LOGGER.warn('No str key:'+ name) return None elif not data: LOGGER.critical('No str key:'+ name) sys.exit(2) else: return data",True,name in DEFAULTS,name in DEFAULTS,0.6668882966041565 5165,"def get_str_key(name, required=False): if name in DEFAULTS: default = DEFAULTS[name] else: default = None if: LOGGER.warn('No str key:'+ name) return None elif not data: LOGGER.critical('No str key:'+ name) sys.exit(2) else: return data",True,"not (data := env.str(name, default=default)) and (not required)","not (data := env.str(name, default=default)) and (not required)",0.6426480412483215 5166,"def get_str_key(name, required=False): if name in DEFAULTS: default = DEFAULTS[name] else: default = None if not (data := env.str(name, default=default)) and (not required): LOGGER.warn('No str key:'+ name) return None elif: LOGGER.critical('No str key:'+ name) sys.exit(2) else: return data",True,not data,not data,0.6604516506195068 5167,"def exportLiteral(self, outfile, level, name_='SpikeGeneratorRandom'): level += 1 already_processed = set() self.exportLiteralAttributes(outfile, level, already_processed, name_) if: self.exportLiteralChildren(outfile, level, name_)",True,self.hasContent_(),self.hasContent_(),0.6505838632583618 5168,"def _are_hosts_using_agent_image(hosts: List[Dict[str, Any]], image: str) -> bool: for host in hosts: if: return False return True",False,host.get('discovery_agent_version') != image,host['image'] != image,0.6434395909309387 5169,"@property def content(self): """"""Content of the response, in bytes."""""" if: try: if self._content_consumed: raise RuntimeError('The content for this response was already consumed') if self.status_code == 0: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() except AttributeError: self._content = None self._content_consumed = True return self._content",True,self._content is False,self._content is False,0.6522984504699707 5170,"@property def content(self): """"""Content of the response, in bytes."""""" if self._content is False: try: if: raise RuntimeError('The content for this response was already consumed') if self.status_code == 0: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() except AttributeError: self._content = None self._content_consumed = True return self._content",True,self._content_consumed,self._content_consumed,0.6535818576812744 5171,"@property def content(self): """"""Content of the response, in bytes."""""" if self._content is False: try: if self._content_consumed: raise RuntimeError('The content for this response was already consumed') if: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() except AttributeError: self._content = None self._content_consumed = True return self._content",True,self.status_code == 0,self.status_code == 0,0.6491794586181641 5172,"def setText(self, text: str, color=None): """""" Set text of a Plaintextfield with lines wrapped at newline characters. """""" if: self.setBackground(color) self.setPlainText(text)",True,color,color,0.6694139242172241 5173,"def hook(module, input, output): module_idx = len(layer_output_dict) m_key = list(layer_dict)[module_idx] if: output = F.relu(output) layer_output_dict[m_key] = output",False,force_relu,input.dtype == 'float64',0.6590660810470581 5174,"def __init__(self, iouType='segm'): if: self.setDetParams() elif iouType == 'keypoints': self.setKpParams() else: raise Exception('iouType not supported') self.iouType = iouType self.useSegm = None",True,iouType == 'segm' or iouType == 'bbox',iouType == 'segm' or iouType == 'bbox',0.6507288217544556 5175,"def __init__(self, iouType='segm'): if iouType =='segm' or iouType == 'bbox': self.setDetParams() elif: self.setKpParams() else: raise Exception('iouType not supported') self.iouType = iouType self.useSegm = None",True,iouType == 'keypoints',iouType == 'keypoints',0.6566466093063354 5176,"def get_data(self, mode): if: return (self.train, self.word2id, None) elif mode == 'valid': return (self.dev, self.word2id, None) elif mode == 'test': return (self.test, self.word2id, None) else: print('Mode is not set properly (train/dev/test)') exit()",True,mode == 'train',mode == 'train',0.6605533361434937 5177,"def get_data(self, mode): if mode == 'train': return (self.train, self.word2id, None) elif: return (self.dev, self.word2id, None) elif mode == 'test': return (self.test, self.word2id, None) else: print('Mode is not set properly (train/dev/test)') exit()",True,mode == 'valid',mode == 'valid',0.6520118713378906 5178,"def get_data(self, mode): if mode == 'train': return (self.train, self.word2id, None) elif mode == 'valid': return (self.dev, self.word2id, None) elif: return (self.test, self.word2id, None) else: print('Mode is not set properly (train/dev/test)') exit()",True,mode == 'test',mode == 'test',0.6538553833961487 5179,"def drawWedge(self, wedge): P = wedge.asPolygon() if: self.drawPath(P) else: self.drawPolygon(P)",False,"isinstance(P, Path)",self.drawPath,0.6496634483337402 5180,"def need_more_events(self): if: return True event = self.events[0] if isinstance(event, DocumentStartEvent): return self.need_events(1) elif isinstance(event, SequenceStartEvent): return self.need_events(2) elif isinstance(event, MappingStartEvent): return self.need_events(3) else: return False",False,not self.events,len(self.events) == 0,0.6553441286087036 5181,"def need_more_events(self): if not self.events: return True event = self.events[0] if: return self.need_events(1) elif isinstance(event, SequenceStartEvent): return self.need_events(2) elif isinstance(event, MappingStartEvent): return self.need_events(3) else: return False",False,"isinstance(event, DocumentStartEvent)","isinstance(event, ErrorEvent)",0.6515271067619324 5182,"def need_more_events(self): if not self.events: return True event = self.events[0] if isinstance(event, DocumentStartEvent): return self.need_events(1) elif: return self.need_events(2) elif isinstance(event, MappingStartEvent): return self.need_events(3) else: return False",False,"isinstance(event, SequenceStartEvent)","isinstance(event, ErrorStartEvent)",0.650382399559021 5183,"def need_more_events(self): if not self.events: return True event = self.events[0] if isinstance(event, DocumentStartEvent): return self.need_events(1) elif isinstance(event, SequenceStartEvent): return self.need_events(2) elif: return self.need_events(3) else: return False",False,"isinstance(event, MappingStartEvent)","isinstance(event, SequenceEndEvent)",0.64676433801651 5184,"def __eq__(self, other): """"""Returns true if both objects are equal"""""" if: return False return self.__dict__ == other.__dict__",False,"not isinstance(other, CustomResult)","not isinstance(other, V1alpha1WorkflowSpec)",0.6505078673362732 5185,"def clean_doc(obj): """"""Return the document(s) with all extra system keys stripped. :param obj: document(s) :type obj: list | dict | arango.cursor.Cursor :return: Document(s) with the system keys stripped :rtype: list | dict """""" if: docs = [clean_doc(d) for d in obj] return sorted(docs, key=lambda doc: doc['_key']) if isinstance(obj, dict): return {field: value for field, value in obj.items() if field in {'_key', '_from', '_to'} or not field.startswith('_')}",False,"isinstance(obj, (Cursor, list, deque))","isinstance(obj, list)",0.648979902267456 5186,"def clean_doc(obj): """"""Return the document(s) with all extra system keys stripped. :param obj: document(s) :type obj: list | dict | arango.cursor.Cursor :return: Document(s) with the system keys stripped :rtype: list | dict """""" if isinstance(obj, (Cursor, list, deque)): docs = [clean_doc(d) for d in obj] return sorted(docs, key=lambda doc: doc['_key']) if: return {field: value for field, value in obj.items() if field in {'_key', '_from', '_to'} or not field.startswith('_')}",True,"isinstance(obj, dict)","isinstance(obj, dict)",0.6457338333129883 5187,"def __call__(self, buf): data = buf.maybe_extract_at_most(999999999) if: return None return Data(data=data)",True,data is None,data is None,0.6579583883285522 5188,"def to_xyz(self, num_digits=14): """""" Gets the string representation of this atom in the xyz file format Args: num_digits - The number of digits after the decimal point to include when writing this atom's coordinates. Default: 14 Maximum: 14 Returns: String containing this atom's atomic symbol and coordinates in the xyz format """""" x = round(self.get_x(), num_digits) y = round(self.get_y(), num_digits) z = round(self.get_z(), num_digits) if: x = 0.0 if y == -0.0: y = 0.0 if z == -0.0: z = 0.0 return '{0:2} {1:22.14e} {2:22.14e} {3:22.14e}'.format(self.name, x, y, z)",True,x == -0.0,x == -0.0,0.6596672534942627 5189,"def to_xyz(self, num_digits=14): """""" Gets the string representation of this atom in the xyz file format Args: num_digits - The number of digits after the decimal point to include when writing this atom's coordinates. Default: 14 Maximum: 14 Returns: String containing this atom's atomic symbol and coordinates in the xyz format """""" x = round(self.get_x(), num_digits) y = round(self.get_y(), num_digits) z = round(self.get_z(), num_digits) if x == -0.0: x = 0.0 if: y = 0.0 if z == -0.0: z = 0.0 return '{0:2} {1:22.14e} {2:22.14e} {3:22.14e}'.format(self.name, x, y, z)",True,y == -0.0,y == -0.0,0.6629421710968018 5190,"def to_xyz(self, num_digits=14): """""" Gets the string representation of this atom in the xyz file format Args: num_digits - The number of digits after the decimal point to include when writing this atom's coordinates. Default: 14 Maximum: 14 Returns: String containing this atom's atomic symbol and coordinates in the xyz format """""" x = round(self.get_x(), num_digits) y = round(self.get_y(), num_digits) z = round(self.get_z(), num_digits) if x == -0.0: x = 0.0 if y == -0.0: y = 0.0 if: z = 0.0 return '{0:2} {1:22.14e} {2:22.14e} {3:22.14e}'.format(self.name, x, y, z)",True,z == -0.0,z == -0.0,0.6595704555511475 5191,"def set_shutdown(self): if: return with self.pt_lock: with DelayedKeyboardInterrupt(): dxl_comm_result, dxl_error = self.packet_handler.write1ByteTxRx(self.port_handler, self.dxl_id, XL430_ADDR_SHUTDOWN, id) self.handle_comm_result('XL430_ADDR_SHUTDOWN', dxl_comm_result, dxl_error)",True,not self.hw_valid,not self.hw_valid,0.6533569097518921 5192,"def restore_from_classification_checkpoint_fn(self, first_stage_feature_extractor_scope, second_stage_feature_extractor_scope): """"""Returns a map of variables to load from a foreign checkpoint. Note that this overrides the default implementation in faster_rcnn_meta_arch.FasterRCNNFeatureExtractor which does not work for InceptionResnetV2 checkpoints. TODO(jonathanhuang,rathodv): revisit whether it's possible to force the `Repeat` namescope as created in `_extract_box_classifier_features` to start counting at 2 (e.g. `Repeat_2`) so that the default restore_fn can be used. Args: first_stage_feature_extractor_scope: A scope name for the first stage feature extractor. second_stage_feature_extractor_scope: A scope name for the second stage feature extractor. Returns: A dict mapping variable names (to load from a checkpoint) to variables in the model graph. """""" variables_to_restore = {} for variable in tf.global_variables(): if: var_name = variable.op.name.replace(first_stage_feature_extractor_scope + '/', '') variables_to_restore[var_name] = variable if variable.op.name.startswith(second_stage_feature_extractor_scope): var_name = variable.op.name.replace(second_stage_feature_extractor_scope + '/InceptionResnetV2/Repeat', 'InceptionResnetV2/Repeat_2') var_name = var_name.replace(second_stage_feature_extractor_scope + '/', '') variables_to_restore[var_name] = variable return variables_to_restore",True,variable.op.name.startswith(first_stage_feature_extractor_scope),variable.op.name.startswith(first_stage_feature_extractor_scope),0.6435510516166687 5193,"def restore_from_classification_checkpoint_fn(self, first_stage_feature_extractor_scope, second_stage_feature_extractor_scope): """"""Returns a map of variables to load from a foreign checkpoint. Note that this overrides the default implementation in faster_rcnn_meta_arch.FasterRCNNFeatureExtractor which does not work for InceptionResnetV2 checkpoints. TODO(jonathanhuang,rathodv): revisit whether it's possible to force the `Repeat` namescope as created in `_extract_box_classifier_features` to start counting at 2 (e.g. `Repeat_2`) so that the default restore_fn can be used. Args: first_stage_feature_extractor_scope: A scope name for the first stage feature extractor. second_stage_feature_extractor_scope: A scope name for the second stage feature extractor. Returns: A dict mapping variable names (to load from a checkpoint) to variables in the model graph. """""" variables_to_restore = {} for variable in tf.global_variables(): if variable.op.name.startswith(first_stage_feature_extractor_scope): var_name = variable.op.name.replace(first_stage_feature_extractor_scope + '/', '') variables_to_restore[var_name] = variable if: var_name = variable.op.name.replace(second_stage_feature_extractor_scope + '/InceptionResnetV2/Repeat', 'InceptionResnetV2/Repeat_2') var_name = var_name.replace(second_stage_feature_extractor_scope + '/', '') variables_to_restore[var_name] = variable return variables_to_restore",True,variable.op.name.startswith(second_stage_feature_extractor_scope),variable.op.name.startswith(second_stage_feature_extractor_scope),0.6437842845916748 5194,"def __init__(self, scale_factors=None): """"""Constructor for SquareBoxCoder. Args: scale_factors: List of 3 positive scalars to scale ty, tx, and tl. If set to None, does not perform scaling. For faster RCNN, the open-source implementation recommends using [10.0, 10.0, 5.0]. Raises: ValueError: If scale_factors is not length 3 or contains values less than or equal to 0. """""" if: if len(scale_factors)!= 3: raise ValueError('The argument scale_factors must be a list of length 3.') if any((scalar <= 0 for scalar in scale_factors)): raise ValueError('The values in scale_factors must all be greater than 0.') self._scale_factors = scale_factors",False,scale_factors,scale_factors is not None,0.6546217203140259 5195,"def __init__(self, scale_factors=None): """"""Constructor for SquareBoxCoder. Args: scale_factors: List of 3 positive scalars to scale ty, tx, and tl. If set to None, does not perform scaling. For faster RCNN, the open-source implementation recommends using [10.0, 10.0, 5.0]. Raises: ValueError: If scale_factors is not length 3 or contains values less than or equal to 0. """""" if scale_factors: if: raise ValueError('The argument scale_factors must be a list of length 3.') if any((scalar <= 0 for scalar in scale_factors)): raise ValueError('The values in scale_factors must all be greater than 0.') self._scale_factors = scale_factors",True,len(scale_factors) != 3,len(scale_factors) != 3,0.6481379270553589 5196,"def __init__(self, scale_factors=None): """"""Constructor for SquareBoxCoder. Args: scale_factors: List of 3 positive scalars to scale ty, tx, and tl. If set to None, does not perform scaling. For faster RCNN, the open-source implementation recommends using [10.0, 10.0, 5.0]. Raises: ValueError: If scale_factors is not length 3 or contains values less than or equal to 0. """""" if scale_factors: if len(scale_factors)!= 3: raise ValueError('The argument scale_factors must be a list of length 3.') if: raise ValueError('The values in scale_factors must all be greater than 0.') self._scale_factors = scale_factors",False,any((scalar <= 0 for scalar in scale_factors)),scale_factors[0] < 0,0.6510893106460571 5197,"def contain_nonum(results): for ele in results: if: return True return False",False,not is_number(ele),not ele.count('nonum'),0.6470295190811157 5198,"def _register_module(self, module_class): """"""Register a module. Args: module (:obj:`nn.Module`): Module to be registered. """""" if: raise TypeError('module must be a child of nn.Module, but got {}'.format(module_class)) module_name = module_class.__name__ if module_name in self._module_dict: raise KeyError('{} is already registered in {}'.format(module_name, self.name)) self._module_dict[module_name] = module_class",True,"not issubclass(module_class, nn.Module)","not issubclass(module_class, nn.Module)",0.6559798717498779 5199,"def _register_module(self, module_class): """"""Register a module. Args: module (:obj:`nn.Module`): Module to be registered. """""" if not issubclass(module_class, nn.Module): raise TypeError('module must be a child of nn.Module, but got {}'.format(module_class)) module_name = module_class.__name__ if: raise KeyError('{} is already registered in {}'.format(module_name, self.name)) self._module_dict[module_name] = module_class",True,module_name in self._module_dict,module_name in self._module_dict,0.6494145393371582 5200,"def _from_fields(self, fields): len_t, val_t = self.list_dtype() n = int(_np.dtype(len_t).type(next(fields))) data = _np.loadtxt(list(_islice(fields, n)), val_t, ndmin=1) if: raise StopIteration return data",False,len(data) < n,len(data) == 0,0.6520447731018066 5201,"def _detect_case_sensitive_readwrite(self, subdir): """""" Determine if directory at rp is case sensitive by writing """""" assert self.writable, 'Detection method can only work read-write.' upper_a = subdir.append('A') upper_a.touch() lower_a = subdir.append('a') if: lower_a.delete() upper_a.setdata() if upper_a.lstat(): log.Log.FatalError(""We're sorry but the target file system at path '{pa}' isn't deemed reliable enough for a backup. It takes too long or doesn't register case insensitive deletion of files."".format(pa=subdir)) self.case_sensitive = False else: upper_a.delete() self.case_sensitive = True",True,lower_a.lstat(),lower_a.lstat(),0.6504238843917847 5202,"def _detect_case_sensitive_readwrite(self, subdir): """""" Determine if directory at rp is case sensitive by writing """""" assert self.writable, 'Detection method can only work read-write.' upper_a = subdir.append('A') upper_a.touch() lower_a = subdir.append('a') if lower_a.lstat(): lower_a.delete() upper_a.setdata() if: log.Log.FatalError(""We're sorry but the target file system at path '{pa}' isn't deemed reliable enough for a backup. It takes too long or doesn't register case insensitive deletion of files."".format(pa=subdir)) self.case_sensitive = False else: upper_a.delete() self.case_sensitive = True",False,upper_a.lstat(),upper_a.lstat()['A'] > subdir.lstat()['Z'],0.652103841304779 5203,"def tokenize_input_and_cls_pos(self, input, stem): tokens = stem.norm_layer(stem.proj(input)) assert tokens.ndim == 3 assert tokens.shape[2] == self.embed_dim B = tokens.shape[0] if: class_tokens = self.cls_token.expand(shape=[B, -1, -1]) tokens = paddle.concat(x=(class_tokens, tokens), axis=1) if self.use_pos_embed: tokens = tokens + self.pos_embed return tokens",False,self.num_cls_tokens > 0,self.cls_token is not None,0.6487225294113159 5204,"def tokenize_input_and_cls_pos(self, input, stem): tokens = stem.norm_layer(stem.proj(input)) assert tokens.ndim == 3 assert tokens.shape[2] == self.embed_dim B = tokens.shape[0] if self.num_cls_tokens > 0: class_tokens = self.cls_token.expand(shape=[B, -1, -1]) tokens = paddle.concat(x=(class_tokens, tokens), axis=1) if: tokens = tokens + self.pos_embed return tokens",False,self.use_pos_embed,self.pos_embed is not None,0.6521567106246948 5205,"def extract_nodes_from_tf_state(tf_state, network_names, role): """""" :tags: QE """""" data = {} for domains in [r['instances'] for r in tf_state.resources if r['type'] == 'libvirt_domain' and role in r['module']]: for d in domains: for nic in d['attributes']['network_interface']: if: continue data[nic['mac']] = {'ip': nic['addresses'], 'name': d['attributes']['name'], 'role': role} return data",False,nic['network_name'] not in network_names,nic['address'] in network_names,0.6495248079299927 5206,"def check_compatibility(version, name): """"""Raises errors or warns if called with an incompatible Wheel-Version. pip should refuse to install a Wheel-Version that's a major series ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when installing a version only minor version ahead (e.g 1.2 > 1.1). version: a 2-tuple representing a Wheel-Version (Major, Minor) name: name of wheel or package to raise exception about :raises UnsupportedWheel: when an incompatible Wheel-Version is given """""" if: raise UnsupportedWheel(""{}'s Wheel-Version ({}) is not compatible with this version of pip"".format(name, '.'.join(map(str, version)))) elif version > VERSION_COMPATIBLE: logger.warning('Installing from a newer Wheel-Version (%s)', '.'.join(map(str, version)))",False,version[0] > VERSION_COMPATIBLE[0],name != 'Wheel-Version',0.6487045288085938 5207,"def check_compatibility(version, name): """"""Raises errors or warns if called with an incompatible Wheel-Version. pip should refuse to install a Wheel-Version that's a major series ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when installing a version only minor version ahead (e.g 1.2 > 1.1). version: a 2-tuple representing a Wheel-Version (Major, Minor) name: name of wheel or package to raise exception about :raises UnsupportedWheel: when an incompatible Wheel-Version is given """""" if version[0] > VERSION_COMPATIBLE[0]: raise UnsupportedWheel(""{}'s Wheel-Version ({}) is not compatible with this version of pip"".format(name, '.'.join(map(str, version)))) elif: logger.warning('Installing from a newer Wheel-Version (%s)', '.'.join(map(str, version)))",True,version > VERSION_COMPATIBLE,version > VERSION_COMPATIBLE,0.6514861583709717 5208,"def __ror__(self, left): if: return NotImplemented return typing.Union[left, self]",False,not _is_unionable(left),"not isinstance(left, typing.Union)",0.649090588092804 5209,"def join(a, *p): """"""Join two or more pathname components, inserting '/' as needed. If any component is an absolute path, all previous path components will be discarded. An empty last part will result in a path that ends with a separator."""""" sep = _get_sep(a) path = a try: if: path[:0] + sep for b in p: if b.startswith(sep): path = b elif not path or path.endswith(sep): path += b else: path += sep + b except (TypeError, AttributeError, BytesWarning): genericpath._check_arg_types('join', a, *p) raise return path",False,not p,os.path.isabs(path),0.6625291109085083 5210,"def join(a, *p): """"""Join two or more pathname components, inserting '/' as needed. If any component is an absolute path, all previous path components will be discarded. An empty last part will result in a path that ends with a separator."""""" sep = _get_sep(a) path = a try: if not p: path[:0] + sep for b in p: if: path = b elif not path or path.endswith(sep): path += b else: path += sep + b except (TypeError, AttributeError, BytesWarning): genericpath._check_arg_types('join', a, *p) raise return path",False,b.startswith(sep),path.startswith(b),0.6442696452140808 5211,"def join(a, *p): """"""Join two or more pathname components, inserting '/' as needed. If any component is an absolute path, all previous path components will be discarded. An empty last part will result in a path that ends with a separator."""""" sep = _get_sep(a) path = a try: if not p: path[:0] + sep for b in p: if b.startswith(sep): path = b elif: path += b else: path += sep + b except (TypeError, AttributeError, BytesWarning): genericpath._check_arg_types('join', a, *p) raise return path",False,not path or path.endswith(sep),os.path.isabs(b),0.642837643623352 5212,"def execute(self, context): if: bpy.ops.wm.save_homefile() else: self.report({'ERROR'}, 'Warning: CellBlender is enabled. Use File menu to over-ride.') return {'FINISHED'}",False,context.scene.cellblender_test_suite.test_status == '?',bpy.ops.wm.get_enabled(),0.6468564867973328 5213,"def resetPosition(self, kill=False): self.talkS = talk('Resetting Position?', self.talkS) self.x = selfSpawn[self.level].x self.y = selfSpawn[self.level].y self.target = Point(self.x, self.y) if: self.lives -= 1 return self",True,kill,kill,0.6762955188751221 5214,"def enable_apex(self, val): def _apply_flag(module): if: module.apex_enabled = val self.apply(_apply_flag)",False,"hasattr(module, 'apex_enabled')","isinstance(module, ApexData)",0.6443307399749756 5215,"def pull_keys_from_obj(*keys): def outer(func): @skip_if_empty @partial_safe_wraps(func) def inner(obj, *args, **kwargs): for key in keys: assert key not in kwargs kwargs[key] = obj.get(key, EMPTY) if: return return func(**kwargs) return inner return outer",False,all((v is EMPTY for v in kwargs.values())),not kwargs[key],0.6444041728973389 5216,"def _fix_times(dims): for key, val in dims.items(): if: dims[key] = to_datetime([val])[0]",False,"np.issubdtype(np.asarray(val).dtype, np.datetime64)","isinstance(val, float) and (not val.is_integer())",0.6499564051628113 5217,"def recv_resendPinCodeBySMS(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = resendPinCodeBySMS_result() result.read(iprot) iprot.readMessageEnd() if result.e is not None: raise result.e return",True,mtype == TMessageType.EXCEPTION,mtype == TMessageType.EXCEPTION,0.650862455368042 5218,"def recv_resendPinCodeBySMS(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = resendPinCodeBySMS_result() result.read(iprot) iprot.readMessageEnd() if: raise result.e return",True,result.e is not None,result.e is not None,0.6484419107437134 5219,"def __next__(self): """"""Return next object in iter, or raise StopIteration"""""" if: self.currently_in_file.close() type = None while not type: type, data = self._get() if type == b'z': raise StopIteration elif type == b'r': return self._get_rorp(data) elif type == b'o': return data else: raise IterFileException('Bad file type %s' % (type,))",True,self.currently_in_file,self.currently_in_file,0.6533421277999878 5220,"def __next__(self): """"""Return next object in iter, or raise StopIteration"""""" if self.currently_in_file: self.currently_in_file.close() type = None while not type: type, data = self._get() if: raise StopIteration elif type == b'r': return self._get_rorp(data) elif type == b'o': return data else: raise IterFileException('Bad file type %s' % (type,))",False,type == b'z',not type,0.6560673713684082 5221,"def __next__(self): """"""Return next object in iter, or raise StopIteration"""""" if self.currently_in_file: self.currently_in_file.close() type = None while not type: type, data = self._get() if type == b'z': raise StopIteration elif: return self._get_rorp(data) elif type == b'o': return data else: raise IterFileException('Bad file type %s' % (type,))",False,type == b'r',type == b'f',0.6586929559707642 5222,"def __next__(self): """"""Return next object in iter, or raise StopIteration"""""" if self.currently_in_file: self.currently_in_file.close() type = None while not type: type, data = self._get() if type == b'z': raise StopIteration elif type == b'r': return self._get_rorp(data) elif: return data else: raise IterFileException('Bad file type %s' % (type,))",False,type == b'o',type == b'f',0.6581172943115234 5223,"def _valid_reshape2(self, shape): changed = False for sh in shape: if: return False if sh!= 0: changed = True return True",False,sh == 0 and changed,changed,0.6566145420074463 5224,"def _valid_reshape2(self, shape): changed = False for sh in shape: if sh == 0 and changed: return False if: changed = True return True",False,sh != 0,shape[sh] == 2 and shape[sh] and (changed,0.6562989354133606 5225,"def add_samples(self, samples): """""" Adds list of samples to dataset :param track_filter: optional filter """""" result = 0 for sample in samples: if: result += 1 return result",False,self.add_clip_sample_mappings(sample),self.filter_filter(sample),0.6428408622741699 5226,"def __init__(self, x, rhs): deps, nl_deps = extract_dependencies(rhs) if: raise ValueError('Invalid dependency') deps, nl_deps = (list(deps.values()), tuple(nl_deps.values())) deps.insert(0, x) super().__init__(x, deps, nl_deps=nl_deps, ic=False, adj_ic=False) self._rhs = rhs",False,var_id(x) in deps,not nl_deps,0.6482714414596558 5227,"def get_batch(self) -> (np.ndarray, np.ndarray): if: raise StopIteration() batch = self.armory_generator.get_batch() self.batches_processed += 1 return batch",False,self.batches_processed == self.num_eval_batches,self.batches_processed >= self.num_armories,0.6469882726669312 5228,"def response_handler(resp: Response) -> bool: if: return True raise PermissionUpdateError(resp, request)",True,resp.is_success,resp.is_success,0.6525986194610596 5229,"def elementClass(self, name, namespace=None): if: node = self.dom.createElement(name) else: node = self.dom.createElementNS(namespace, name) return NodeBuilder(node)",False,namespace is None and self.defaultNamespace is None,namespace is None,0.6449484825134277 5230,"def init_weights(self, pretrained=None): if: logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif pretrained is None: for m in self.modules(): if isinstance(m, nn.Conv2d): kaiming_init(m) elif isinstance(m, (_BatchNorm, nn.GroupNorm)): constant_init(m, 1) else: raise TypeError('pretrained must be a str or None')",True,"isinstance(pretrained, str)","isinstance(pretrained, str)",0.6459510326385498 5231,"def init_weights(self, pretrained=None): if isinstance(pretrained, str): logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif: for m in self.modules(): if isinstance(m, nn.Conv2d): kaiming_init(m) elif isinstance(m, (_BatchNorm, nn.GroupNorm)): constant_init(m, 1) else: raise TypeError('pretrained must be a str or None')",True,pretrained is None,pretrained is None,0.6522245407104492 5232,"def init_weights(self, pretrained=None): if isinstance(pretrained, str): logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif pretrained is None: for m in self.modules(): if: kaiming_init(m) elif isinstance(m, (_BatchNorm, nn.GroupNorm)): constant_init(m, 1) else: raise TypeError('pretrained must be a str or None')",True,"isinstance(m, nn.Conv2d)","isinstance(m, nn.Conv2d)",0.6485409736633301 5233,"def init_weights(self, pretrained=None): if isinstance(pretrained, str): logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif pretrained is None: for m in self.modules(): if isinstance(m, nn.Conv2d): kaiming_init(m) elif: constant_init(m, 1) else: raise TypeError('pretrained must be a str or None')",False,"isinstance(m, (_BatchNorm, nn.GroupNorm))","isinstance(m, nn.BatchNorm2d)",0.6417074203491211 5234,"def positional_decorator(wrapped): def positional_wrapper(*args, **kwargs): if: plural_s = '' if max_positional_args!= 1: plural_s ='s' message = '%s() takes at most %d positional argument%s (%d given)' % (wrapped.__name__, max_positional_args, plural_s, len(args)) if FLAGS.positional_parameters_enforcement == 'EXCEPTION': raise TypeError(message) elif FLAGS.positional_parameters_enforcement == 'WARNING': logger.warning(message) else: pass return wrapped(*args, **kwargs) return positional_wrapper",False,len(args) > max_positional_args,"not hasattr(wrapped, '__call__')",0.6475914716720581 5235,"def positional_decorator(wrapped): def positional_wrapper(*args, **kwargs): if len(args) > max_positional_args: plural_s = '' if: plural_s ='s' message = '%s() takes at most %d positional argument%s (%d given)' % (wrapped.__name__, max_positional_args, plural_s, len(args)) if FLAGS.positional_parameters_enforcement == 'EXCEPTION': raise TypeError(message) elif FLAGS.positional_parameters_enforcement == 'WARNING': logger.warning(message) else: pass return wrapped(*args, **kwargs) return positional_wrapper",False,max_positional_args != 1,FLAGS.positional_parameters_enforcement == 'S',0.6536043882369995 5236,"def positional_decorator(wrapped): def positional_wrapper(*args, **kwargs): if len(args) > max_positional_args: plural_s = '' if max_positional_args!= 1: plural_s ='s' message = '%s() takes at most %d positional argument%s (%d given)' % (wrapped.__name__, max_positional_args, plural_s, len(args)) if: raise TypeError(message) elif FLAGS.positional_parameters_enforcement == 'WARNING': logger.warning(message) else: pass return wrapped(*args, **kwargs) return positional_wrapper",False,FLAGS.positional_parameters_enforcement == 'EXCEPTION',"not isinstance(message, (int, float, complex))",0.644031286239624 5237,"def positional_decorator(wrapped): def positional_wrapper(*args, **kwargs): if len(args) > max_positional_args: plural_s = '' if max_positional_args!= 1: plural_s ='s' message = '%s() takes at most %d positional argument%s (%d given)' % (wrapped.__name__, max_positional_args, plural_s, len(args)) if FLAGS.positional_parameters_enforcement == 'EXCEPTION': raise TypeError(message) elif: logger.warning(message) else: pass return wrapped(*args, **kwargs) return positional_wrapper",True,FLAGS.positional_parameters_enforcement == 'WARNING',FLAGS.positional_parameters_enforcement == 'WARNING',0.6440649032592773 5238,"def float_feature(values): """"""Returns a TF-Feature of floats. Args: values: A scalar of list of values. Returns: A TF-Feature. """""" if: values = [values] return tf.train.Feature(float_list=tf.train.FloatList(value=values))",True,"not isinstance(values, (tuple, list))","not isinstance(values, (tuple, list))",0.6457357406616211 5239,"def best_config(self): """"""Get current best score config. :return: dict {'config_id': int, 'score': float, 'configs': dict} config_id, score, and configs of the current best config. """""" if: idx = random.randint(0, len(self.hyperparameter_list)) result = {'config_id': idx,'score': -1 * float('inf'), 'configs': self.hyperparameter_list[idx]} return result else: best_iter_id = max(self.best_score_dict.items(), key=operator.itemgetter(1))[0] return self.sha_list[best_iter_id].best_config()",False,self.total_propose == 0,self.best_score_dict is None,0.6475984454154968 5240,"def to_str(self): """"""Returns the string representation of the model"""""" import simplejson as json if: import sys reload(sys) sys.setdefaultencoding('utf-8') return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)",True,six.PY2,six.PY2,0.6510450839996338 5241,"@run_async @user_admin def echo(update: Update, context: CallbackContext): args = update.effective_message.text.split(None, 1) message = update.effective_message if: message.reply_to_message.reply_text(args[1], parse_mode='MARKDOWN', disable_web_page_preview=True) else: message.reply_text(args[1], quote=False, parse_mode='MARKDOWN', disable_web_page_preview=True) message.delete()",False,message.reply_to_message,args[1],0.6449340581893921 5242,"@staticmethod def set_address_part(bulk_ad_extension, set_func): if: bulk_ad_extension.location_ad_extension.Address = _CAMPAIGN_OBJECT_FACTORY_V13.create('Address') set_func(bulk_ad_extension.location_ad_extension.Address)",True,bulk_ad_extension.location_ad_extension.Address is None,bulk_ad_extension.location_ad_extension.Address is None,0.6496016979217529 5243,"def info_data(self): data = super(Lrc822v1Groupset, self).info_data() data['type'] = GROUPSET_LRC data['settings'] = {'scheme': self.scheme, 'part_size': self.part_size} if: data['couple'] = str(self.couple) else: data['couple'] = None return data",False,self.couple,self.couple is not None,0.6636109352111816 5244,"def print_numpy(x, val=True, shp=False): """"""Print the mean, min, max, median, std, and size of a numpy array Parameters: val (bool) -- if print the values of the numpy array shp (bool) -- if print the shape of the numpy array """""" x = x.astype(np.float64) if: print('shape,', x.shape) if val: x = x.flatten() print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % (np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x)))",True,shp,shp,0.6623551845550537 5245,"def print_numpy(x, val=True, shp=False): """"""Print the mean, min, max, median, std, and size of a numpy array Parameters: val (bool) -- if print the values of the numpy array shp (bool) -- if print the shape of the numpy array """""" x = x.astype(np.float64) if shp: print('shape,', x.shape) if: x = x.flatten() print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % (np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x)))",True,val,val,0.6730660200119019 5246,"def get_order(self, byte_str): first_char, second_char = (byte_str[0], byte_str[1]) if: if second_char >= 161: return 157 * (first_char - 164) + second_char - 161 + 63 else: return 157 * (first_char - 164) + second_char - 64 else: return -1",True,first_char >= 164,first_char >= 164,0.6514998078346252 5247,"def get_order(self, byte_str): first_char, second_char = (byte_str[0], byte_str[1]) if first_char >= 164: if: return 157 * (first_char - 164) + second_char - 161 + 63 else: return 157 * (first_char - 164) + second_char - 64 else: return -1",True,second_char >= 161,second_char >= 161,0.6575212478637695 5248,"def matches_event(self, event: Dict[str, Any]) -> bool: if: return True event = json.loads(json.dumps(event)) return self._does_event_match(event, self._pattern)",False,not self._pattern,event in self._patterns and event not in self._patterns,0.6509333252906799 5249,"def getColorsAsByteArray(self) -> Optional[bytes]: if: return None return self._colors.tobytes()",True,self._colors is None,self._colors is None,0.6553027629852295 5250,"def existed_persitables(var): if: return False if not os.path.exists(os.path.join(init_checkpoint_path, var.name)): print('Var not exists: [%s]\t%s' % (var.name, os.path.join(init_checkpoint_path, var.name))) return os.path.exists(os.path.join(init_checkpoint_path, var.name))",False,not fluid.io.is_persistable(var),"not isinstance(var, persitables.Base)",0.644503116607666 5251,"def existed_persitables(var): if not fluid.io.is_persistable(var): return False if: print('Var not exists: [%s]\t%s' % (var.name, os.path.join(init_checkpoint_path, var.name))) return os.path.exists(os.path.join(init_checkpoint_path, var.name))",True,"not os.path.exists(os.path.join(init_checkpoint_path, var.name))","not os.path.exists(os.path.join(init_checkpoint_path, var.name))",0.6481183767318726 5252,"def format_value_in(self, skin: CustomSkin, value: Optional[str]) -> str: if: formatted = f'`{value}`' else: formatted = skin.bot.translator.t(_p('skinsettings|colours|format:not_set', 'Not Set')) return formatted",True,value,value,0.6687024235725403 5253,"def __init__(self, document: nodes.document, docstring_linker: 'DocstringLinker'): self._linker = docstring_linker if: if docutils_version_info >= (0, 19): settings = frontend.get_default_settings(html4css1.Writer()) else: settings = frontend.OptionParser([html4css1.Writer()]).get_default_values() self.__class__.settings = settings document.settings = self.settings super().__init__(document) self.section_level += 1",False,self.settings is None,"not hasattr(self, '__class__')",0.6540873050689697 5254,"def __init__(self, document: nodes.document, docstring_linker: 'DocstringLinker'): self._linker = docstring_linker if self.settings is None: if: settings = frontend.get_default_settings(html4css1.Writer()) else: settings = frontend.OptionParser([html4css1.Writer()]).get_default_values() self.__class__.settings = settings document.settings = self.settings super().__init__(document) self.section_level += 1",False,"docutils_version_info >= (0, 19)",frontend.has_default_settings(),0.6456277966499329 5255,"def _assert_no_error(error, exception_class=None): """""" Checks the return code and throws an exception if there is an error to report """""" if: return cf_error_string = Security.SecCopyErrorMessageString(error, None) output = _cf_string_to_unicode(cf_error_string) CoreFoundation.CFRelease(cf_error_string) if output is None or output == u'': output = u'OSStatus %s' % error if exception_class is None: exception_class = ssl.SSLError raise exception_class(output)",False,error == 0,error is None,0.6736668348312378 5256,"def _assert_no_error(error, exception_class=None): """""" Checks the return code and throws an exception if there is an error to report """""" if error == 0: return cf_error_string = Security.SecCopyErrorMessageString(error, None) output = _cf_string_to_unicode(cf_error_string) CoreFoundation.CFRelease(cf_error_string) if: output = u'OSStatus %s' % error if exception_class is None: exception_class = ssl.SSLError raise exception_class(output)",False,output is None or output == u'',error != 0,0.6527950167655945 5257,"def _assert_no_error(error, exception_class=None): """""" Checks the return code and throws an exception if there is an error to report """""" if error == 0: return cf_error_string = Security.SecCopyErrorMessageString(error, None) output = _cf_string_to_unicode(cf_error_string) CoreFoundation.CFRelease(cf_error_string) if output is None or output == u'': output = u'OSStatus %s' % error if: exception_class = ssl.SSLError raise exception_class(output)",True,exception_class is None,exception_class is None,0.6525774598121643 5258,"def handleResponsePdu(self, snmpEngine, sendRequestHandle, errorIndication, PDU, cbCtx): stateReference, reqPDU = cbCtx if: PDU = v2c.apiPDU.getResponse(reqPDU) PDU.setErrorStatus(PDU, 5) self.sendPdu(snmpEngine, stateReference, PDU) self.releaseStateInformation(stateReference)",False,errorIndication,errorIndication == 1,0.6715266108512878 5259,"def rotate_img(self, image, angle): if: image = np.array(image) h, w = image.shape[:2] scale = 1.0 center = (w / 2, h / 2) M = cv2.getRotationMatrix2D(center, angle, scale) image = cv2.warpAffine(image, M, (w, h)) image = Image.fromarray(image) return image",True,not angle == 0.0,not angle == 0.0,0.6520473957061768 5260,"def __init__(self, *args, **kwargs): """""" Initialize a wxProperRadioButton. *args, **kwargs The positional and keyword arguments required to initialize a wx.RadioButton. """""" super(wxProperRadioButton, self).__init__(*args, **kwargs) parent = self.GetParent() if: children = self._parents.setdefault(parent, []) children.append(self) self._last = self.GetValue() self._in_click = False self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.Bind(wx.EVT_LEFT_UP, self.OnLeftUp) self.Bind(wx.EVT_RADIOBUTTON, self.OnToggled)",False,parent,parent and parent not in self._parents,0.6809478998184204 5261,"@classmethod def setWs(cls, Ws, PackagesPath=None): """"""Set WORKSPACE and PACKAGES_PATH environment. Args: cls (obj): The class pointer Ws (str): initialize WORKSPACE variable PackagesPath (str): initialize PackagesPath variable """""" cls.WORKSPACE = Ws if: cls.PACKAGES_PATH = [cls.convertPackagePath(Ws, os.path.normpath(Path.strip())) for Path in PackagesPath.split(os.pathsep)] else: cls.PACKAGES_PATH = []",True,PackagesPath,PackagesPath,0.6665385961532593 5262,"def fetch_flagged_messages(self, max_count: Optional[int]) -> list[FlaggedMessage]: qry = self.db.query(FlaggedMessage) if: qry = qry.limit(max_count) return qry.all()",True,max_count is not None,max_count is not None,0.6597882509231567 5263,"def onMessage(self, payload, isBinary): if: print('Text message received: {}'.format(payload.decode('utf8'))) reactor.callLater(1, self.sendHello)",False,not isBinary,isBinary,0.6503841876983643 5264,"def datetime_from_date(date: Union[str, datetime.datetime, datetime.date]) -> datetime.datetime: """"""Set time to 00:00:00 in a date, keep time info if given a datetime object"""""" if: return date if isinstance(date, str): raise ValueError(f'Is the string {date} a date?') return datetime.datetime.combine(date, datetime.datetime.min.time())",True,"isinstance(date, datetime.datetime)","isinstance(date, datetime.datetime)",0.6478968262672424 5265,"def datetime_from_date(date: Union[str, datetime.datetime, datetime.date]) -> datetime.datetime: """"""Set time to 00:00:00 in a date, keep time info if given a datetime object"""""" if isinstance(date, datetime.datetime): return date if: raise ValueError(f'Is the string {date} a date?') return datetime.datetime.combine(date, datetime.datetime.min.time())",False,"isinstance(date, str)",type(date) != str,0.6487365961074829 5266,"def _show_info(self, resname: Path, hklname: Path, resdata: Union[str, None], hkldata: Union[str, None]) -> None: if: self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {resname.name}.') if hkldata and (not resdata): self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {hklname.name}.') if hkldata and resdata: self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {resname.name} and {hklname.name}.')",False,resdata and (not hkldata),resname and resdata,0.6491628289222717 5267,"def _show_info(self, resname: Path, hklname: Path, resdata: Union[str, None], hkldata: Union[str, None]) -> None: if resdata and (not hkldata): self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {resname.name}.') if: self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {hklname.name}.') if hkldata and resdata: self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {resname.name} and {hklname.name}.')",False,hkldata and (not resdata),hkldata and resdata,0.6496071815490723 5268,"def _show_info(self, resname: Path, hklname: Path, resdata: Union[str, None], hkldata: Union[str, None]) -> None: if resdata and (not hkldata): self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {resname.name}.') if hkldata and (not resdata): self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {hklname.name}.') if: self._statusbar.show_message(f'{self._statusbar.current_message}\nFinished writing data to {resname.name} and {hklname.name}.')",True,hkldata and resdata,hkldata and resdata,0.657315731048584 5269,"def speak(self, text): mic_state = natlink.getMicState() natlink.execScript('TTSPlayString ""%s""' % text) if: natlink.setMicState(mic_state)",False,mic_state != natlink.getMicState(),mic_state != mic_state,0.6455487608909607 5270,"def __init__(self, env_params, sim_params, network, simulator='traci'): """"""See parent class."""""" for p in CLOSED_ENV_PARAMS.keys(): if: raise KeyError('Env parameter ""{}"" not supplied'.format(p)) super(AVClosedEnv, self).__init__(env_params=env_params, sim_params=sim_params, network=network, simulator=simulator)",True,p not in env_params.additional_params,p not in env_params.additional_params,0.6478058099746704 5271,"def urlopen(self, method, url, body=None, headers=None, retries=3, redirect=True, assert_same_host=True): if: headers = {} headers['Connection'] = 'Keep-Alive' return super(NTLMConnectionPool, self).urlopen(method, url, body, headers, retries, redirect, assert_same_host)",True,headers is None,headers is None,0.6546896696090698 5272,"def output_bulk_location_ad_extensions(bulk_entities): for entity in bulk_entities: output_status_message('BulkLocationAdExtension:') output_status_message('Account Id: {0}'.format(entity.account_id)) output_status_message('Client Id: {0}'.format(entity.client_id)) if: output_status_message('LastModifiedTime: {0}'.format(entity.last_modified_time)) output_adextension(entity.location_ad_extension) if entity.has_errors: output_bulk_errors(entity.errors) output_status_message('')",True,entity.last_modified_time is not None,entity.last_modified_time is not None,0.6465399861335754 5273,"def output_bulk_location_ad_extensions(bulk_entities): for entity in bulk_entities: output_status_message('BulkLocationAdExtension:') output_status_message('Account Id: {0}'.format(entity.account_id)) output_status_message('Client Id: {0}'.format(entity.client_id)) if entity.last_modified_time is not None: output_status_message('LastModifiedTime: {0}'.format(entity.last_modified_time)) output_adextension(entity.location_ad_extension) if: output_bulk_errors(entity.errors) output_status_message('')",True,entity.has_errors,entity.has_errors,0.6502421498298645 5274,"def is_disjoint(self, othermask): assert isinstance(othermask, Mask), 'need to compare to another Mask instance' if: return True else: return False return",False,np.sum(self.mask & othermask.mask) == 0,othermask.get_rank() == self.order_by_id,0.6473311185836792 5275,"def _cb_download_progress(dest, dltotal, dlnow, uptotal, upnow, dwl_data, *args, **kargs): complete_cb, progress_cb, min_size = dwl_data if: progress_cb(dest, dltotal, dlnow, *args, **kargs) return 0",False,progress_cb and callable(progress_cb),progress_cb is not None,0.6441696882247925 5276,"def install(packages, options=None, fatal=False): """"""Install one or more packages."""""" cmd = ['yum', '--assumeyes'] if: cmd.extend(options) cmd.append('install') if isinstance(packages, str): cmd.append(packages) else: cmd.extend(packages) log('Installing {} with options: {}'.format(packages, options)) _run_yum_command(cmd, fatal)",False,options is not None,"isinstance(options, dict)",0.6557868719100952 5277,"def install(packages, options=None, fatal=False): """"""Install one or more packages."""""" cmd = ['yum', '--assumeyes'] if options is not None: cmd.extend(options) cmd.append('install') if: cmd.append(packages) else: cmd.extend(packages) log('Installing {} with options: {}'.format(packages, options)) _run_yum_command(cmd, fatal)",True,"isinstance(packages, str)","isinstance(packages, str)",0.6463998556137085 5278,"def item_selected(self, url, user_data): dia = EmcDialog(self._lbl, style='list', done_cb=self._dia_list_selected_cb) for string in self._sli: if: it = dia.list_item_append(string, end='icon/check_on') it.selected = True else: dia.list_item_append(string) dia.list_go()",False,"string == ini.get(self._sec, self._opt)",user_data['button'] == 'check_on',0.6420152187347412 5279,"def __init__(self, target_size): """""" Resize image to target size, convert normalized xywh to pixel xyxy format ([x_center, y_center, width, height] -> [x0, y0, x1, y1]). Args: target_size (int|list): image target size. """""" super(LetterBoxResize, self).__init__() if: target_size = [target_size, target_size] self.target_size = target_size",True,"isinstance(target_size, int)","isinstance(target_size, int)",0.6471319198608398 5280,"def set_bucket_info(self, num_buckets): self.num_buckets = num_buckets if: self._collated_sizes = np.minimum(np.array(self.sizes), self.max_sample_size) self.buckets = get_buckets(self._collated_sizes, self.num_buckets) self._bucketed_sizes = get_bucketed_sizes(self._collated_sizes, self.buckets) logger.info(f'{len(self.buckets)} bucket(s) for the audio dataset: {self.buckets}')",True,self.num_buckets > 0,self.num_buckets > 0,0.6511995792388916 5281,"def __post_init__(self): client = None if: with suppress(RuntimeError, TimeoutError): client = self.get_api_client() self._set('openshift_version', utils.get_openshift_version(allow_default=True, client=client)) Trigger.trigger_configurations([self], get_default_triggers())",False,not self.is_kube_api,self._get('openshift_version') == 0,0.6554040908813477 5282,"def forward(self, x): x = self._conv(x) x = self._batch_norm(x) if: x = F.relu(x) elif self.act =='relu6': x = F.relu6(x) elif self.act == 'leaky': x = F.leaky_relu(x) elif self.act == 'hard_swish': x = hard_swish(x) return x",True,self.act == 'relu',self.act == 'relu',0.6493022441864014 5283,"def forward(self, x): x = self._conv(x) x = self._batch_norm(x) if self.act =='relu': x = F.relu(x) elif: x = F.relu6(x) elif self.act == 'leaky': x = F.leaky_relu(x) elif self.act == 'hard_swish': x = hard_swish(x) return x",True,self.act == 'relu6',self.act == 'relu6',0.6492123603820801 5284,"def forward(self, x): x = self._conv(x) x = self._batch_norm(x) if self.act =='relu': x = F.relu(x) elif self.act =='relu6': x = F.relu6(x) elif: x = F.leaky_relu(x) elif self.act == 'hard_swish': x = hard_swish(x) return x",False,self.act == 'leaky',self.act == 'leaky_relu',0.6488652229309082 5285,"def forward(self, x): x = self._conv(x) x = self._batch_norm(x) if self.act =='relu': x = F.relu(x) elif self.act =='relu6': x = F.relu6(x) elif self.act == 'leaky': x = F.leaky_relu(x) elif: x = hard_swish(x) return x",True,self.act == 'hard_swish',self.act == 'hard_swish',0.6502360105514526 5286,"def role_from_id(self, role_id: int) -> Group: """"""Returns a `Group` for the given role id. Parameters ---------- role_id : int A role id. Returns ------- `Group ` The role's Group object. Raises ------ TypeError If the given role_id parameter is not of type int """""" if: raise TypeError(f'role_id should be of type int, not {role_id.__class__.__name__}') return self._get_base_group(self.ROLE, str(role_id))",True,type(role_id) is not int,type(role_id) is not int,0.6510578393936157 5287,"def setUp(self): for server in self.servers[1:]: ret, _, _ = peer_detach(self.mnode, server) if: raise ExecutionError('Peer detach failed') g.log.info('Peer detach SUCCESSFUL.') self.get_super_method(self,'setUp')() self.node_to_probe = choice(self.servers[1:])",False,ret,ret != 0,0.6703723073005676 5288,"def TEMA(df, p): if: p = [p, 90] df['TEMA'] = ta.TEMA(df.close, p[1]) df = df.replace([np.inf, -np.inf], np.nan) return df",True,len(p) <= 2,len(p) <= 2,0.6595357656478882 5289,"def _flowableSublist(V): """"""if it isn't a list or tuple, wrap it in a list"""""" if: V = V is not None and [V] or [] from reportlab.platypus.doctemplate import LCActionFlowable assert not [x for x in V if isinstance(x, LCActionFlowable)], 'LCActionFlowables not allowed in sublists' return V",False,"not isinstance(V, (list, tuple))","isinstance(V, (list, tuple))",0.6504648923873901 5290,"def get_binary_stream(name: ""te.Literal['stdin','stdout','stderr']"") -> t.BinaryIO: """"""Returns a system stream for byte processing. :param name: the name of the stream to open. Valid names are ``'stdin'``, ``'stdout'`` and ``'stderr'`` """""" opener = binary_streams.get(name) if: raise TypeError(f""Unknown standard stream '{name}'"") return opener()",True,opener is None,opener is None,0.6591485738754272 5291,"def set_value(self, key, value): """"""Modify a value in the configuration. """""" self._ensure_have_load_only() fname, parser = self._get_parser_to_modify() if: section, name = _disassemble_key(key) if not parser.has_section(section): parser.add_section(section) parser.set(section, name, value) self._config[self.load_only][key] = value self._mark_as_modified(fname, parser)",False,parser is not None,value is not None,0.6538031101226807 5292,"def set_value(self, key, value): """"""Modify a value in the configuration. """""" self._ensure_have_load_only() fname, parser = self._get_parser_to_modify() if parser is not None: section, name = _disassemble_key(key) if: parser.add_section(section) parser.set(section, name, value) self._config[self.load_only][key] = value self._mark_as_modified(fname, parser)",True,not parser.has_section(section),not parser.has_section(section),0.6417287588119507 5293,"def __repr__(self): if: return '%s (%s)' % (self, self.location) else: return str(self)",True,self.location,self.location,0.6533442735671997 5294,"def SetPointerType(pointer, cls): if: raise RuntimeError('This type already exists in the cache') if id(pointer) not in _pointer_type_cache: raise RuntimeError(""What's this???"") pointer.set_type(cls) _pointer_type_cache[cls] = pointer del _pointer_type_cache[id(pointer)]",True,"_pointer_type_cache.get(cls, None) is not None","_pointer_type_cache.get(cls, None) is not None",0.6491004228591919 5295,"def SetPointerType(pointer, cls): if _pointer_type_cache.get(cls, None) is not None: raise RuntimeError('This type already exists in the cache') if: raise RuntimeError(""What's this???"") pointer.set_type(cls) _pointer_type_cache[cls] = pointer del _pointer_type_cache[id(pointer)]",True,id(pointer) not in _pointer_type_cache,id(pointer) not in _pointer_type_cache,0.6468887329101562 5296,"def clip_grad_norm(self, max_norm): """"""Clips gradient norm."""""" if: return torch.nn.utils.clip_grad_norm_(self.params, max_norm) else: return math.sqrt(sum((p.grad.data.norm() ** 2 for p in self.params)))",False,max_norm > 0,self.grad is None,0.6522472500801086 5297,"def __init__(self, name, db, cache_size=10): """""" Get access to a table. :param name: The name of the table. :type name: str :param db: The parent database. :type db: tinydb.database.TinyDB :param cache_size: Maximum size of query cache. """""" self.name = name self._db = db self._query_cache = LRUCache(capacity=cache_size) old_ids = self._read().keys() if: self._last_id = max((i for i in old_ids)) else: self._last_id = 0",True,old_ids,old_ids,0.6631596088409424 5298,"def generate_regions(self, start_line=1, end_line=None): block_start = 1 readline = LinesToReadline(self.lines, block_start) shifted = start_line - block_start + 1 try: for start, end in self._logical_lines(readline): real_start = start + block_start - 1 real_start = self._first_non_blank(real_start) if: break real_end = end + block_start - 1 if real_start >= start_line: yield (real_start, real_end) except tokenize.TokenError as e: pass",False,end_line is not None and real_start >= end_line,real_start == end_line,0.6438643932342529 5299,"def generate_regions(self, start_line=1, end_line=None): block_start = 1 readline = LinesToReadline(self.lines, block_start) shifted = start_line - block_start + 1 try: for start, end in self._logical_lines(readline): real_start = start + block_start - 1 real_start = self._first_non_blank(real_start) if end_line is not None and real_start >= end_line: break real_end = end + block_start - 1 if: yield (real_start, real_end) except tokenize.TokenError as e: pass",False,real_start >= start_line,real_end >= shifted,0.6448943614959717 5300,"def is_entrypoint_wrapper(name): if: matchname = name[:-4] elif name.lower().endswith('-script.py'): matchname = name[:-10] elif name.lower().endswith('.pya'): matchname = name[:-4] else: matchname = name return matchname in console or matchname in gui",False,name.lower().endswith('.exe'),name.lower().endswith('-main.py') or name.lower().endswith('-main.py'),0.64222651720047 5301,"def is_entrypoint_wrapper(name): if name.lower().endswith('.exe'): matchname = name[:-4] elif: matchname = name[:-10] elif name.lower().endswith('.pya'): matchname = name[:-4] else: matchname = name return matchname in console or matchname in gui",False,name.lower().endswith('-script.py'),name.lower().endswith('.pyc'),0.6415789127349854 5302,"def is_entrypoint_wrapper(name): if name.lower().endswith('.exe'): matchname = name[:-4] elif name.lower().endswith('-script.py'): matchname = name[:-10] elif: matchname = name[:-4] else: matchname = name return matchname in console or matchname in gui",False,name.lower().endswith('.pya'),name.lower().endswith('-pyw.g.file'),0.6422131061553955 5303,"def allow_migrate(self, db, app_label, model=None, **hints): """""" Make sure the auth app only appears in the 'auth_db' database. """""" if: return DATABASE_MAPPING.get(app_label) == db elif app_label in DATABASE_MAPPING: return False return None",False,db in DATABASE_MAPPING.values(),app_label in DATABASE_MAPPING,0.6508094072341919 5304,"def allow_migrate(self, db, app_label, model=None, **hints): """""" Make sure the auth app only appears in the 'auth_db' database. """""" if db in DATABASE_MAPPING.values(): return DATABASE_MAPPING.get(app_label) == db elif: return False return None",False,app_label in DATABASE_MAPPING,"app_label in ('auth', 'auth_db')",0.6514379978179932 5305,"def maybe_randomize(list_in: List[_A], randomize: bool) -> List[_A]: """""" If randomize is true, return a shuffled copy of the list. Otherwise, just return the list. """""" if: return random.sample(list_in, len(list_in)) else: return list_in",True,randomize,randomize,0.6697658896446228 5306,"def set_vel_I_gain(self, x): if: return with self.pt_lock: with DelayedKeyboardInterrupt(): dxl_comm_result, dxl_error = self.packet_handler.write2ByteTxRx(self.port_handler, self.dxl_id, XL430_ADDR_VELOCITY_I_GAIN, int(x)) self.handle_comm_result('XL430_ADDR_VELOCITY_I_GAIN', dxl_comm_result, dxl_error)",True,not self.hw_valid,not self.hw_valid,0.6529350876808167 5307,"def _resolve_ambiguous_time(self, dt): idx = self._find_last_transition(dt) _fold = self._fold(dt) if: return idx idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) return idx - idx_offset",True,idx is None or idx == 0,idx is None or idx == 0,0.6587058901786804 5308,"def predict(self, X): if: raise NotImplementedError return self.estimator.predict(X)",True,self.estimator is None,self.estimator is None,0.6472927331924438 5309,"def has_timeout(timeout): if: return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT return timeout is not None",False,"hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT')","isinstance(timeout, numbers.Number)",0.648670494556427 5310,"def parse_unary(self): token_type = self.stream.current.type lineno = self.stream.current.lineno if: next(self.stream) node = self.parse_unary() return nodes.Neg(node, lineno=lineno) if token_type == 'add': next(self.stream) node = self.parse_unary() return nodes.Pos(node, lineno=lineno) return self.parse_primary()",False,token_type == 'sub',token_type == 'neg',0.6496208906173706 5311,"def parse_unary(self): token_type = self.stream.current.type lineno = self.stream.current.lineno if token_type =='sub': next(self.stream) node = self.parse_unary() return nodes.Neg(node, lineno=lineno) if: next(self.stream) node = self.parse_unary() return nodes.Pos(node, lineno=lineno) return self.parse_primary()",False,token_type == 'add',token_type == 'l',0.6513705253601074 5312,"def get_server(target, endpoints, serializer=None): assert TRANSPORT is not None if: serializer = ProfilerRequestContextSerializer(serializer) else: serializer = RequestContextSerializer(serializer) access_policy = dispatcher.DefaultRPCAccessPolicy return messaging.get_rpc_server(TRANSPORT, target, endpoints, executor='eventlet', serializer=serializer, access_policy=access_policy)",False,profiler,"isinstance(serializer, basestring)",0.6878862380981445 5313,"def get_desc(self, obj): if: return obj.eff_en.desc elif self.context['language'] == 'ja': return obj.eff_ja.desc else: return obj.eff_en.desc",True,'language' not in self.context,'language' not in self.context,0.6494885683059692 5314,"def get_desc(self, obj): if 'language' not in self.context: return obj.eff_en.desc elif: return obj.eff_ja.desc else: return obj.eff_en.desc",True,self.context['language'] == 'ja',self.context['language'] == 'ja',0.6482892036437988 5315,"def __repr__(self): if: return '' % (self.bucket.name, self.name) else: return '' % self.name",True,self.bucket,self.bucket,0.6655426621437073 5316,"def on_task_cancel(self, app, task): if: task.cancel() self.on_task_done(app, task)",False,"show_dialog(DialogType.QUESTION, self._main_window) == Gtk.ResponseType.OK","self.on_task_cancel_event(app, task)",0.6475565433502197 5317,"def _collect_weights(self): """"""Collects (non-)trainable weights of each of the parallel layers. """""" if: pass for layer in self._layers: if self.trainable: add_variable(layer._trainable_weights, self._trainable_weights) else: add_variable(layer._trainable_weights, self._non_trainable_weights) add_variable(layer._non_trainable_weights, self._non_trainable_weights)",False,self._layers is None,self._non_trainable_weights is None,0.6498070955276489 5318,"def _collect_weights(self): """"""Collects (non-)trainable weights of each of the parallel layers. """""" if self._layers is None: pass for layer in self._layers: if: add_variable(layer._trainable_weights, self._trainable_weights) else: add_variable(layer._trainable_weights, self._non_trainable_weights) add_variable(layer._non_trainable_weights, self._non_trainable_weights)",False,self.trainable,"isinstance(layer, nn.Linear)",0.6576576828956604 5319,"def apply_update_callbacks(self, events: EventListType) -> 'Widget': """""" Apply callbacks on Widget update. .. note:: Readonly widgets or hidden widgets do not apply update callbacks. :param events: Events list :return: Self reference """""" if: return self for callback in self._update_callbacks.values(): callback(events, self, self._menu) return self",False,len(self._update_callbacks) == 0 or self.readonly,not self._update_callbacks,0.6466811299324036 5320,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if isinstance(auth, tuple) and len(auth) == 2: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,auth is None,auth is None,0.6572875380516052 5321,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if: if isinstance(auth, tuple) and len(auth) == 2: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,auth,auth,0.6750684380531311 5322,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,"isinstance(auth, tuple) and len(auth) == 2","isinstance(auth, tuple) and len(auth) == 2",0.6471284627914429 5323,"def gen_post_code(self, attrib, pc_value): out = [] if: out.append('%s = %s;' % (self.C_PC, pc_value)) out.append('dump_gpregs_32(jitcpu->cpu);') return out",False,attrib.log_regs,attrib.get('code') == 'C_PC' and pc_value,0.6464082598686218 5324,"def _read_sections(self): while not self._doc.eof(): data = self._read_to_next_section() name = data[0].strip() if: yield (name, data[1:]) elif len(data) < 2: yield StopIteration else: yield (name, self._strip(data[2:]))",False,name.startswith('..'),len(data) > 1,0.6406940221786499 5325,"def _read_sections(self): while not self._doc.eof(): data = self._read_to_next_section() name = data[0].strip() if name.startswith('..'): yield (name, data[1:]) elif: yield StopIteration else: yield (name, self._strip(data[2:]))",False,len(data) < 2,len(data) == 1,0.6487002372741699 5326,"def set_default_positive_bijector(value: str) -> None: """""" Sets positive bijector type. There are currently two options implemented: ""exp"" and ""softplus"". """""" type_map = positive_bijector_type_map() if: value = value.lower() if value not in type_map: raise ValueError(f'`{value}` not in set of valid bijectors: {sorted(type_map)}') set_config(replace(config(), positive_bijector=value))",True,"isinstance(value, str)","isinstance(value, str)",0.6504300832748413 5327,"def set_default_positive_bijector(value: str) -> None: """""" Sets positive bijector type. There are currently two options implemented: ""exp"" and ""softplus"". """""" type_map = positive_bijector_type_map() if isinstance(value, str): value = value.lower() if: raise ValueError(f'`{value}` not in set of valid bijectors: {sorted(type_map)}') set_config(replace(config(), positive_bijector=value))",True,value not in type_map,value not in type_map,0.652694821357727 5328,"def slotPropertyChanged(self, property, value): editors = self.m_createdEditors.get(property) if: return for editor in editors: editor.setValue(value)",True,not editors,not editors,0.6502171158790588 5329,"def _validate_allowed_url(self, full_url): parsed = botocore.compat.urlparse(full_url) is_whitelisted_host = self._check_if_whitelisted_host(parsed.hostname) if: raise ValueError(""Unsupported host '%s'. Can only retrieve metadata from these hosts: %s"" % (parsed.hostname, ', '.join(self._ALLOWED_HOSTS)))",True,not is_whitelisted_host,not is_whitelisted_host,0.6475695371627808 5330,"def __eq__(self, that): if: return False for nt in self.__annotations__: if getattr(self, nt)!= getattr(that, nt): return False return True",False,type(self) != type(that),self.__annotations__ != that.__annotations__,0.6447950005531311 5331,"def __eq__(self, that): if type(self)!= type(that): return False for nt in self.__annotations__: if: return False return True",False,"getattr(self, nt) != getattr(that, nt)",nt != that and nt != that,0.6451401710510254 5332,"def accept(self, visitor: ParseTreeVisitor): if: return visitor.visitUnaryExpression3(self) else: return visitor.visitChildren(self)",True,"hasattr(visitor, 'visitUnaryExpression3')","hasattr(visitor, 'visitUnaryExpression3')",0.6440174579620361 5333,"def __init__(self, item=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = None if: self.item = [] else: self.item = item self.item_nsprefix_ = 'tns'",True,item is None,item is None,0.6584115028381348 5334,"def forward(self, x): identity = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) if: identity = self.downsample(x) out += identity out = self.relu(out) return out",True,self.downsample is not None,self.downsample is not None,0.6450522541999817 5335,"def whiten_cholesky_stat1(self, mu, sigma): """"""Whiten first-order statistics by using Cholesky decomposition of Sigma :param mu: array, mean vector to be subtracted from the statistics :param sigma: narray, co-variance matrix or covariance super-vector """""" if: inv_sigma = scipy.linalg.inv(sigma) chol_invcov = scipy.linalg.cholesky(inv_sigma).T self.center_stat1(mu) self.stat1 = self.stat1.dot(chol_invcov) elif sigma.ndim == 1: self.center_stat1(mu) self.stat1 = self.stat1 / numpy.sqrt(sigma) else: raise Exception('Wrong dimension of Sigma, must be 1 or 2')",True,sigma.ndim == 2,sigma.ndim == 2,0.6540717482566833 5336,"def whiten_cholesky_stat1(self, mu, sigma): """"""Whiten first-order statistics by using Cholesky decomposition of Sigma :param mu: array, mean vector to be subtracted from the statistics :param sigma: narray, co-variance matrix or covariance super-vector """""" if sigma.ndim == 2: inv_sigma = scipy.linalg.inv(sigma) chol_invcov = scipy.linalg.cholesky(inv_sigma).T self.center_stat1(mu) self.stat1 = self.stat1.dot(chol_invcov) elif: self.center_stat1(mu) self.stat1 = self.stat1 / numpy.sqrt(sigma) else: raise Exception('Wrong dimension of Sigma, must be 1 or 2')",True,sigma.ndim == 1,sigma.ndim == 1,0.6547554731369019 5337,"def _display_human(self, fig: plt.Figure) -> None: if: fig.canvas.draw() if jumanji.environments.is_colab(): plt.show(self._name) else: fig.canvas.draw_idle() fig.canvas.start_event_loop(2.0)",True,plt.isinteractive(),plt.isinteractive(),0.6488767862319946 5338,"def _display_human(self, fig: plt.Figure) -> None: if plt.isinteractive(): fig.canvas.draw() if: plt.show(self._name) else: fig.canvas.draw_idle() fig.canvas.start_event_loop(2.0)",True,jumanji.environments.is_colab(),jumanji.environments.is_colab(),0.6450182795524597 5339,"def manual_slices(self, video): """"""Create slices manually."""""" self.stdout.write('\nCreating recording slices from user inputs...') while True: manual_start_recording = input('\nStart recording? (y/N)') if: start_recording(video) manual_stop_recording = input('Stop recording? (y/N)') if manual_stop_recording.lower() == 'y': stop_recording(video) else: break else: break",True,manual_start_recording.lower() == 'y',manual_start_recording.lower() == 'y',0.6465060710906982 5340,"def manual_slices(self, video): """"""Create slices manually."""""" self.stdout.write('\nCreating recording slices from user inputs...') while True: manual_start_recording = input('\nStart recording? (y/N)') if manual_start_recording.lower() == 'y': start_recording(video) manual_stop_recording = input('Stop recording? (y/N)') if: stop_recording(video) else: break else: break",True,manual_stop_recording.lower() == 'y',manual_stop_recording.lower() == 'y',0.6469433903694153 5341,"def write(self, oprot): if: oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('getMetaProfile_args') oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot._fast_encode is not None and self.thrift_spec is not None,oprot._fast_encode is not None and self.thrift_spec is not None,0.6459730863571167 5342,"def cursor(self): curs = DummyCursor() self._cursors.append(curs) if: curs.close() return curs",False,self._closed or self._closed_on_server,self._is_cursor_closed(),0.6498550176620483 5343,"@classmethod def from_tuples(cls, fieldname, value): """""" A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """""" if: if len(value) == 3: filename, data, content_type = value else: filename, data = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename) request_param.make_multipart(content_type=content_type) return request_param",True,"isinstance(value, tuple)","isinstance(value, tuple)",0.6433138251304626 5344,"@classmethod def from_tuples(cls, fieldname, value): """""" A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """""" if isinstance(value, tuple): if: filename, data, content_type = value else: filename, data = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename) request_param.make_multipart(content_type=content_type) return request_param",True,len(value) == 3,len(value) == 3,0.649527907371521 5345,"def check(cmd: 'build_app.py2app', mf: ModuleGraph) -> typing.Optional[RecipeInfo]: to_return: typing.List[str] = [] for python_package, expected_missing in AUTO_MISSING: m = mf.findNode(python_package) if m is None or m.filename is None: continue to_return.extend(expected_missing) if: return {'expected_missing_imports': set(to_return)} return None",True,to_return,to_return,0.6627007722854614 5346,"def check(cmd: 'build_app.py2app', mf: ModuleGraph) -> typing.Optional[RecipeInfo]: to_return: typing.List[str] = [] for python_package, expected_missing in AUTO_MISSING: m = mf.findNode(python_package) if: continue to_return.extend(expected_missing) if to_return: return {'expected_missing_imports': set(to_return)} return None",True,m is None or m.filename is None,m is None or m.filename is None,0.6491450667381287 5347,"def to_xml(self, element, obj): if: return attr = getattr(obj, self.attr_name) if attr!= self.default: new_el = element.makeelement(QName(default_ns, self.adm_name)) new_el.text = self.type.dumps_func(attr) element.append(new_el)",False,self.parse_only,self.attr_name is None,0.6490534543991089 5348,"def to_xml(self, element, obj): if self.parse_only: return attr = getattr(obj, self.attr_name) if: new_el = element.makeelement(QName(default_ns, self.adm_name)) new_el.text = self.type.dumps_func(attr) element.append(new_el)",False,attr != self.default,attr,0.6509677171707153 5349,"def is_waror(context): if: return True",True,"context.version == 20 and context.user_version in (24724, 25108, 24596) and (context.is_dev == 0)","context.version == 20 and context.user_version in (24724, 25108, 24596) and (context.is_dev == 0)",0.6456003189086914 5350,"def _is_mutated(self): """""" :return: A boolean - if the sequence or any children (recursively) have been mutated """""" mutated = self._mutated if: for child in self.children: if isinstance(child, Sequence) or isinstance(child, SequenceOf): mutated = mutated or child._is_mutated() return mutated",True,self.children is not None,self.children is not None,0.6449151039123535 5351,"def _is_mutated(self): """""" :return: A boolean - if the sequence or any children (recursively) have been mutated """""" mutated = self._mutated if self.children is not None: for child in self.children: if: mutated = mutated or child._is_mutated() return mutated",True,"isinstance(child, Sequence) or isinstance(child, SequenceOf)","isinstance(child, Sequence) or isinstance(child, SequenceOf)",0.6417675614356995 5352,"def str2bool(val): """""" Converts strings like, 'false', 'true', '0', and '1' into their boolean equivalents (in Python). If no logical match is found, return False. Examples:: >>> str2bool('false') False >>> str2bool('1') True >>> str2bool('whatever') False """""" if: return True else: return False",False,"isinstance(val, basestring) and val.lower() in ['1', 'true', 'yes']","val in ['false', 'true', '0', '1', 'whatever']",0.641567051410675 5353,"def forward(self, x, y): out = self.bn(x) if: gamma, beta = self.embed(y).chunk(2, dim=1) out = gamma.view(-1, self.num_features, 1, 1) * out + beta.view(-1, self.num_features, 1, 1) else: gamma = self.embed(y) out = gamma.view(-1, self.num_features, 1, 1) * out return out",False,self.bias,"isinstance(y, np.ndarray)",0.6495733261108398 5354,"def compose_scalar_node(self, anchor): event = self.get_event() tag = event.tag if: tag = self.resolve(ScalarNode, event.value, event.implicit) node = ScalarNode(tag, event.value, event.start_mark, event.end_mark, style=event.style) if anchor is not None: self.anchors[anchor] = node return node",False,tag is None or tag == '!',tag is None or tag == '!' or tag == '!',0.6510351896286011 5355,"def compose_scalar_node(self, anchor): event = self.get_event() tag = event.tag if tag is None or tag == '!': tag = self.resolve(ScalarNode, event.value, event.implicit) node = ScalarNode(tag, event.value, event.start_mark, event.end_mark, style=event.style) if: self.anchors[anchor] = node return node",True,anchor is not None,anchor is not None,0.6538481712341309 5356,"def get_hessian(self, sc, with_setup_cell=True): if: self.setup_cell(sc) return self.get_dmnomass(np.zeros((1, 3)))[:, :, 0].real else: mass = np.sqrt(sc.atomic_masses).repeat(3) return self.get_dm_supercell(sc) * np.outer(mass, mass)",True,with_setup_cell,with_setup_cell,0.6536605358123779 5357,"def search_line(start, end): if: return global max_size line_cnt = 0 mid = int((start + end) / 2) for line in lan_lines: line_cnt += line // mid if line_cnt >= N: max_size = max(max_size, mid) search_line(mid + 1, end) else: search_line(start, mid - 1)",False,start > end,start == end,0.6567143201828003 5358,"def search_line(start, end): if start > end: return global max_size line_cnt = 0 mid = int((start + end) / 2) for line in lan_lines: line_cnt += line // mid if: max_size = max(max_size, mid) search_line(mid + 1, end) else: search_line(start, mid - 1)",False,line_cnt >= N,line_cnt > max_size,0.6490465402603149 5359,"@udf_params.setter def udf_params(self, value): if: return self._udf_params = value",True,"not isinstance(value, dict)","not isinstance(value, dict)",0.6461789608001709 5360,"def published_articles(self): articles = submission_models.Article.objects.filter(authors=self, stage=submission_models.STAGE_PUBLISHED, date_published__lte=timezone.now()) request = utils_logic.get_current_request() if: articles.filter(journal=request.journal) return articles",False,request and request.journal,request.journal,0.6562454700469971 5361,"def export(self, outfile, level, namespace_='', name_='docEntryType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s %s' % (namespace_, name_, namespacedef_)) self.exportAttributes(outfile, level, namespace_, name_='docEntryType') if: outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) showIndent(outfile, level) outfile.write('\n' % (namespace_, name_)) else: outfile.write(' />\n')",True,self.hasContent_(),self.hasContent_(),0.6511865258216858 5362,"def __new__(cls, value: str, prefix: str) -> 'ClassName': new_value = fix_reserved_words(pascal_case(sanitize(value))) if: value = f'{prefix}{new_value}' new_value = fix_reserved_words(pascal_case(sanitize(value))) return str.__new__(cls, new_value)",False,not new_value.isidentifier(),prefix,0.6496367454528809 5363,"def gcd(a, b): """"""Calculate the Greatest Common Divisor of a and b. Unless b==0, the result will have the same sign as b (so that when b is divided by it, the result comes out positive). """""" import warnings warnings.warn('fractions.gcd() is deprecated. Use math.gcd() instead.', DeprecationWarning, 2) if: if (b or a) < 0: return -math.gcd(a, b) return math.gcd(a, b) return _gcd(a, b)",False,type(a) is int is type(b),a == 0,0.6429922580718994 5364,"def gcd(a, b): """"""Calculate the Greatest Common Divisor of a and b. Unless b==0, the result will have the same sign as b (so that when b is divided by it, the result comes out positive). """""" import warnings warnings.warn('fractions.gcd() is deprecated. Use math.gcd() instead.', DeprecationWarning, 2) if type(a) is int is type(b): if: return -math.gcd(a, b) return math.gcd(a, b) return _gcd(a, b)",False,(b or a) < 0,a == 0,0.651755690574646 5365,"def has_metadata_cell(cells, fn): for c in cells: if: return c",False,"re.search(f""update_nb_metadata\\('{fn}'"", c['source'])",c.cell_type == fn,0.6446504592895508 5366,"def __call__(cls, *args, **kwargs): """""" Overload to prevent `Flat(...)` being used """""" g = cls._expected_grade(*args, **kwargs) if: if g is None: raise TypeError('Must construct as {}[n](...)'.format(cls.__name__)) return cls[g].__call__(*args, **kwargs) if g is not None and g!= cls._grade: raise ValueError('Grade inferred from arguments ({}) does not match class grade ({})'.format(g, cls._grade)) return super().__call__(*args, **kwargs)",False,not cls.__instantiated,cls.__grade__ is None,0.6521517038345337 5367,"def __call__(cls, *args, **kwargs): """""" Overload to prevent `Flat(...)` being used """""" g = cls._expected_grade(*args, **kwargs) if not cls.__instantiated: if g is None: raise TypeError('Must construct as {}[n](...)'.format(cls.__name__)) return cls[g].__call__(*args, **kwargs) if: raise ValueError('Grade inferred from arguments ({}) does not match class grade ({})'.format(g, cls._grade)) return super().__call__(*args, **kwargs)",False,g is not None and g != cls._grade,g != cls._grade,0.6500716209411621 5368,"def __call__(cls, *args, **kwargs): """""" Overload to prevent `Flat(...)` being used """""" g = cls._expected_grade(*args, **kwargs) if not cls.__instantiated: if: raise TypeError('Must construct as {}[n](...)'.format(cls.__name__)) return cls[g].__call__(*args, **kwargs) if g is not None and g!= cls._grade: raise ValueError('Grade inferred from arguments ({}) does not match class grade ({})'.format(g, cls._grade)) return super().__call__(*args, **kwargs)",True,g is None,g is None,0.66280597448349 5369,"def get_platform(): if: return get_host_platform() cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') if cross_compilation_target not in _TARGET_TO_PLAT: return get_host_platform() return _TARGET_TO_PLAT[cross_compilation_target]",True,os.name != 'nt',os.name != 'nt',0.6532081961631775 5370,"def get_platform(): if os.name!= 'nt': return get_host_platform() cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') if: return get_host_platform() return _TARGET_TO_PLAT[cross_compilation_target]",False,cross_compilation_target not in _TARGET_TO_PLAT,cross_compilation_target not in _TARGET_TO_TO_PLAT,0.6486022472381592 5371,"def EncAlgo(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if: return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0",True,o != 0,o != 0,0.6645307540893555 5372,"def __init__(self, superclass, name, register=True): self._imp_table = {} self.name = name self.objc_cls = create_subclass(superclass, name) self._as_parameter_ = self.objc_cls if: self.register()",True,register,register,0.6764043569564819 5373,"def send_from_directory(directory, filename, **options): """"""Send a file from a given directory with :func:`send_file`. This is a secure way to quickly expose static files from an upload folder or something similar. Example usage:: @app.route('/uploads/') def download_file(filename): return send_from_directory(app.config['UPLOAD_FOLDER'], filename, as_attachment=True) .. admonition:: Sending files and Performance It is strongly recommended to activate either `X-Sendfile` support in your webserver or (if no authentication happens) to tell the webserver to serve files for the given path on its own without calling into the web application for improved performance. .. versionadded:: 0.5 :param directory: the directory where all the files are stored. :param filename: the filename relative to that directory to download. :param options: optional keyword arguments that are directly forwarded to :func:`send_file`. """""" filename = posixpath.normpath(filename) if: raise NotFound() filename = os.path.join(directory, filename) if not os.path.isfile(filename): raise NotFound() return send_file(filename, conditional=True, **options)",False,"filename.startswith(('/', '../'))",not os.path.isfile(filename),0.6401010751724243 5374,"def send_from_directory(directory, filename, **options): """"""Send a file from a given directory with :func:`send_file`. This is a secure way to quickly expose static files from an upload folder or something similar. Example usage:: @app.route('/uploads/') def download_file(filename): return send_from_directory(app.config['UPLOAD_FOLDER'], filename, as_attachment=True) .. admonition:: Sending files and Performance It is strongly recommended to activate either `X-Sendfile` support in your webserver or (if no authentication happens) to tell the webserver to serve files for the given path on its own without calling into the web application for improved performance. .. versionadded:: 0.5 :param directory: the directory where all the files are stored. :param filename: the filename relative to that directory to download. :param options: optional keyword arguments that are directly forwarded to :func:`send_file`. """""" filename = posixpath.normpath(filename) if filename.startswith(('/', '../')): raise NotFound() filename = os.path.join(directory, filename) if: raise NotFound() return send_file(filename, conditional=True, **options)",False,not os.path.isfile(filename),not os.path.exists(filename),0.6424171924591064 5375,"def _delete_id_and_check_empty_stat(stats_dict): for stat in stats_dict.copy(): if: del stats_dict[stat]['_id'] if stats_dict[stat] is None: stats_dict[stat] = {}",False,stats_dict[stat] is not None,'_id' in stats_dict[stat],0.647645115852356 5376,"def _delete_id_and_check_empty_stat(stats_dict): for stat in stats_dict.copy(): if stats_dict[stat] is not None: del stats_dict[stat]['_id'] if: stats_dict[stat] = {}",False,stats_dict[stat] is None,stat not in stats_dict,0.6456952095031738 5377,"def __init__(self, max_length=None, min_length=None, *args, **kwargs): self.max_length, self.min_length = (max_length, min_length) super(CharField, self).__init__(*args, **kwargs) if: self.validators.append(validators.MinLengthValidator(int(min_length))) if max_length is not None: self.validators.append(validators.MaxLengthValidator(int(max_length)))",True,min_length is not None,min_length is not None,0.6499724388122559 5378,"def __init__(self, max_length=None, min_length=None, *args, **kwargs): self.max_length, self.min_length = (max_length, min_length) super(CharField, self).__init__(*args, **kwargs) if min_length is not None: self.validators.append(validators.MinLengthValidator(int(min_length))) if: self.validators.append(validators.MaxLengthValidator(int(max_length)))",True,max_length is not None,max_length is not None,0.6499303579330444 5379,"def reset(self): if: return self.reset_camera() elif self.sensor == 'laser': return self.reset_laser()",True,self.sensor == 'camera',self.sensor == 'camera',0.6592477560043335 5380,"def reset(self): if self.sensor == 'camera': return self.reset_camera() elif: return self.reset_laser()",True,self.sensor == 'laser',self.sensor == 'laser',0.6539779901504517 5381,"def setUp(self): """""" setUp method for every test """""" self.get_super_method(self,'setUp')() ret = self.setup_volume() if: raise ExecutionError('Volume creation failed: %s' % self.volname)",True,not ret,not ret,0.6618443727493286 5382,"def TimeMajor(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False",True,o != 0,o != 0,0.6626211404800415 5383,"@torch.jit.script def script_skip_tensor(x: Tensor, mask): if: return x res = x[mask] if x.size(0) == mask.size(0) else x[:, mask] if res.numel() == 0: return x else: return res",False,x.size(0) == 0,x.numel() > mask.numel(),0.6514720916748047 5384,"@torch.jit.script def script_skip_tensor(x: Tensor, mask): if x.size(0) == 0: return x res = x[mask] if x.size(0) == mask.size(0) else x[:, mask] if: return x else: return res",False,res.numel() == 0,res.dim() > 2,0.6486388444900513 5385,"def fetch_deposits(self, code=None, since=None, limit=None, params={}): self.load_markets() request = {} method = 'accountGetDepositHistory' currency = None if: currency = self.currency(code) request['code'] = currency['code'] method += 'Currency' response = getattr(self, method)(self.extend(request, params)) return self.parse_transactions(response, currency, since, limit, params)",True,code is not None,code is not None,0.6574212312698364 5386,"@property def factors_as_text(self): """"""Getter for factors_as_text. Return all factor names and values as string line. """""" all_factors = '' for f in self.factor_names: all_factors = all_factors + u'{0} = {1}\n '.format(f, self.get_factor(f, return_none_if_not_defined=True)) all_factors = all_factors.rstrip() if: all_factors = all_factors[:-1] return all_factors",False,"len(all_factors) >= 1 and all_factors[-1] == ','",all_factors.endswith('\t'),0.6432846784591675 5387,"@staticmethod def _get_lr(param_group, param_state): if: min_step = 1e-06 * param_state['step'] if param_group['warmup_init'] else 0.01 lr_t = min(min_step, 1.0 / math.sqrt(param_state['step'])) param_scale = 1.0 if param_group['scale_parameter']: param_scale = max(param_group['eps_scale'], param_state['RMS']) param_group['lr'] = lr_t * param_scale return param_group['lr']",True,param_group['relative_step'],param_group['relative_step'],0.6478651165962219 5388,"@staticmethod def _get_lr(param_group, param_state): if param_group['relative_step']: min_step = 1e-06 * param_state['step'] if param_group['warmup_init'] else 0.01 lr_t = min(min_step, 1.0 / math.sqrt(param_state['step'])) param_scale = 1.0 if: param_scale = max(param_group['eps_scale'], param_state['RMS']) param_group['lr'] = lr_t * param_scale return param_group['lr']",True,param_group['scale_parameter'],param_group['scale_parameter'],0.6470333337783813 5389,"@staticmethod def extract_bang_question_C_command(subrule_dependency, rule): """"""!?C Reject if word contains?C Effects on Dependency: Add a dependency Args: subrule_dependency: dependency_lists from previous transformations rule: the tokenized transformation. Returns: An instance of SubruleDependency containing all possible dependency_lists """""" if: subrule_dependency.prepend_dependency_to_all_lists(RejectIfContainsNumberChars(set(Dicts.classes[rule[2]]), 1)) else: raise FatalRuntimeError('Unknown Class Type: {}'.format(rule[2])) return subrule_dependency",True,rule[2] in CHAR_CLASSES,rule[2] in CHAR_CLASSES,0.6475943326950073 5390,"def enterRule(self, listener: ParseTreeListener): if: listener.enterSelectClause(self)",True,"hasattr(listener, 'enterSelectClause')","hasattr(listener, 'enterSelectClause')",0.6438581943511963 5391,"def replace(atoms, found, to=17, bond_length=2.0, radial=0.0, prob=0.75): replace_mask = np.random.rand(len(found)) < prob for i, foundi in enumerate(found): if: iatom = foundi[0] bvec = foundi[1] rb = np.linalg.norm(bvec) bvec *= (bond_length - rb) / rb atoms[iatom, 0] = to atoms[iatom, 1:] += bvec return atoms",False,replace_mask[i],foundi[0] == atom_find_zero or replace_mask,0.6522502303123474 5392,"def enterRule(self, listener: ParseTreeListener): if: listener.enterPrimaryNoNewArray_lf_primary_lf_arrayAccess_lf_primary(self)",True,"hasattr(listener, 'enterPrimaryNoNewArray_lf_primary_lf_arrayAccess_lf_primary')","hasattr(listener, 'enterPrimaryNoNewArray_lf_primary_lf_arrayAccess_lf_primary')",0.643952488899231 5393,"def edit(self, fn: Callable[[sublime.Edit], Any]): is_read_only = self.view.is_read_only() if: self.view.set_read_only(False) core.edit(self.view, fn) self.view.set_read_only(True) else: core.edit(self.view, fn)",True,is_read_only,is_read_only,0.6514405608177185 5394,"def on_new_line(self, line, is_full_line): """""" Put your parsing code here. :param line: Line to process, can be only part of line. New line chars are removed from line. :param is_full_line: True if line had new line chars, False otherwise :return: None """""" if: try: self._parse_error(line) self._parse_cut(line) except ParsingDone: pass return super(Cut, self).on_new_line(line, is_full_line)",True,is_full_line,is_full_line,0.6499454975128174 5395,"def collect_additional_context(self, context, update, dispatcher, check_result): if: context.args = update.effective_message.text.split()[1:] else: context.args = check_result[0] if isinstance(check_result[1], dict): context.update(check_result[1])",True,"isinstance(check_result, bool)","isinstance(check_result, bool)",0.6451963186264038 5396,"def collect_additional_context(self, context, update, dispatcher, check_result): if isinstance(check_result, bool): context.args = update.effective_message.text.split()[1:] else: context.args = check_result[0] if: context.update(check_result[1])",True,"isinstance(check_result[1], dict)","isinstance(check_result[1], dict)",0.6433937549591064 5397,"def __xor__(self, other): """""" Implementation of ^ operator - returns C{L{Or}} """""" if: other = ParserElement._literalStringClass(other) if not isinstance(other, ParserElement): warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return Or([self, other])",True,"isinstance(other, basestring)","isinstance(other, basestring)",0.6512212753295898 5398,"def __xor__(self, other): """""" Implementation of ^ operator - returns C{L{Or}} """""" if isinstance(other, basestring): other = ParserElement._literalStringClass(other) if: warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2) return None return Or([self, other])",True,"not isinstance(other, ParserElement)","not isinstance(other, ParserElement)",0.6495002508163452 5399,"def scroll_right(self, _): if: core.event.trigger('output.scroll-right')",False,self.__offset + self.__width < self.__widgetcount,self.get_scroll_right(),0.6471312046051025 5400,"def _raise_for_error(self, body): err = body.get('status') if: code = err['value'] message = err['message'] if message.startswith('user account not enabled to use'): raise GeocoderInsufficientPrivileges(message) if code == 10: raise GeocoderAuthenticationFailure(message) if code in (18, 19, 20): raise GeocoderQuotaExceeded(message) raise GeocoderServiceError(message)",True,err,err,0.6698545217514038 5401,"def _raise_for_error(self, body): err = body.get('status') if err: code = err['value'] message = err['message'] if: raise GeocoderInsufficientPrivileges(message) if code == 10: raise GeocoderAuthenticationFailure(message) if code in (18, 19, 20): raise GeocoderQuotaExceeded(message) raise GeocoderServiceError(message)",False,message.startswith('user account not enabled to use'),code == 0,0.6413052678108215 5402,"def _raise_for_error(self, body): err = body.get('status') if err: code = err['value'] message = err['message'] if message.startswith('user account not enabled to use'): raise GeocoderInsufficientPrivileges(message) if: raise GeocoderAuthenticationFailure(message) if code in (18, 19, 20): raise GeocoderQuotaExceeded(message) raise GeocoderServiceError(message)",False,code == 10,"code in (0, 17, 19, 19)",0.670569658279419 5403,"def _raise_for_error(self, body): err = body.get('status') if err: code = err['value'] message = err['message'] if message.startswith('user account not enabled to use'): raise GeocoderInsufficientPrivileges(message) if code == 10: raise GeocoderAuthenticationFailure(message) if: raise GeocoderQuotaExceeded(message) raise GeocoderServiceError(message)",False,"code in (18, 19, 20)",code == 100,0.6494104862213135 5404,"def result(self): """"""Return whether checks ran without issues (`True`) or there were failures (`False`)."""""" if: raise RuntimeError('Check has not been perfomed yet') return not bool(self.failures)",False,not self._checked,self.failures is None,0.6635111570358276 5405,"def _find_unicode_literals_frame(): import __future__ if: return 0 frm = sys._getframe(1) idx = 1 while frm is not None: if frm.f_globals.get('__name__', '').startswith('click.'): frm = frm.f_back idx += 1 elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: return idx else: break return 0",False,"not hasattr(sys, '_getframe')","not hasattr(sys, 'unicode_literals')",0.6462661027908325 5406,"def _find_unicode_literals_frame(): import __future__ if not hasattr(sys, '_getframe'): return 0 frm = sys._getframe(1) idx = 1 while frm is not None: if: frm = frm.f_back idx += 1 elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: return idx else: break return 0",False,"frm.f_globals.get('__name__', '').startswith('click.')","isinstance(frm, __future__)",0.6471080780029297 5407,"def _find_unicode_literals_frame(): import __future__ if not hasattr(sys, '_getframe'): return 0 frm = sys._getframe(1) idx = 1 while frm is not None: if frm.f_globals.get('__name__', '').startswith('click.'): frm = frm.f_back idx += 1 elif: return idx else: break return 0",False,frm.f_code.co_flags & __future__.unicode_literals.compiler_flag,"__future__.f_globals.get('__name__', '').startswith('error.')",0.6431248784065247 5408,"@metadata() def compression(self): """"""Return the compression type if image exists, otherwise (:unav)."""""" if: return UNAV return self._image.compression",True,not self._image,not self._image,0.6588031053543091 5409,"def _validate_loss_fn_args(loss_fn): """"""Validates loss_fn arguments. Required arguments: labels, logits. Optional arguments: features. Args: loss_fn: The loss function. Raises: ValueError: If the signature is unexpected. """""" loss_fn_args = function_utils.fn_args(loss_fn) for required_arg in ['labels', 'logits']: if required_arg not in loss_fn_args: raise ValueError('loss_fn must contain argument: {}. Given arguments: {}'.format(required_arg, loss_fn_args)) invalid_args = list(set(loss_fn_args) - set(['labels', 'logits', 'features'])) if: raise ValueError('loss_fn has unexpected args: {}'.format(invalid_args))",True,invalid_args,invalid_args,0.6578114032745361 5410,"def _validate_loss_fn_args(loss_fn): """"""Validates loss_fn arguments. Required arguments: labels, logits. Optional arguments: features. Args: loss_fn: The loss function. Raises: ValueError: If the signature is unexpected. """""" loss_fn_args = function_utils.fn_args(loss_fn) for required_arg in ['labels', 'logits']: if: raise ValueError('loss_fn must contain argument: {}. Given arguments: {}'.format(required_arg, loss_fn_args)) invalid_args = list(set(loss_fn_args) - set(['labels', 'logits', 'features'])) if invalid_args: raise ValueError('loss_fn has unexpected args: {}'.format(invalid_args))",True,required_arg not in loss_fn_args,required_arg not in loss_fn_args,0.6494064331054688 5411,"def generic_search(self, search_dict: dict, skip: int=0, limit: int=0, only_fo_parent_firmware: bool=False, inverted: bool=False, as_meta: bool=False): with self.get_read_only_session() as session: query = build_generic_search_query(search_dict, only_fo_parent_firmware, inverted) query = self._apply_offset_and_limit(query, skip, limit) results = session.execute(query).scalars() if: return [self._get_meta_for_entry(element) for element in results] return [element.uid for element in results]",True,as_meta,as_meta,0.659392237663269 5412,"def feed(self, byte_str): byte_str = self.filter_with_english_letters(byte_str) for c in byte_str: char_class = Latin1_CharToClass[c] freq = Latin1ClassModel[self._last_char_class * CLASS_NUM + char_class] if: self._state = ProbingState.NOT_ME break self._freq_counter[freq] += 1 self._last_char_class = char_class return self.state",True,freq == 0,freq == 0,0.6607621908187866 5413,"def time_to_live_unsafe(self, key: str): key = self._add_namespace(key) time_to_live = self._redis.ttl(key) if: return None else: return time_to_live",False,time_to_live == -1 or time_to_live == -2,time_to_live is None,0.646674394607544 5414,"def build_chute(): main = {} chute = {'version': 1,'services': {'main': main}} chute['name'] = get_name() chute['description'] = get_description() main['type'] = get_chute_type() main['source'] = '.' if: main['image'] = get_base_name() main['command'] = get_command(name=chute['name'], use=main['image']) return chute",False,main['type'] == 'light','image' not in chute,0.6552547216415405 5415,"def do_activate(self): win = self.props.active_window if: win = ExampleWindow(application=self) win.present()",True,not win,not win,0.6659681797027588 5416,"def back_whenever_touched(self, speed: float=1000): while True: if: self.front_foot_motor.run_time(speed=-speed, time=1000, then=Stop.COAST, wait=False) self.back_foot_motor.run_time(speed=-speed, time=1000, then=Stop.COAST, wait=True)",True,self.touch_sensor.pressed(),self.touch_sensor.pressed(),0.6464081406593323 5417,"def normalize_final_sql(format_sql_5): format_sql_final = format_sql_5.replace('\n','').replace('. ', '.').replace('group by', 'group_by').replace('order by', 'order_by').replace('! =', '!=').replace('limit value', 'limit_value') if: format_sql_final = format_sql_final.replace('t2.dormid', 'dorm.dormid') format_sql_final = format_sql_final.replace('select city.city_name where city.state_name in ( select state.state_name where state.state_name in ( select river.traverse where river.river_name = value ) and state.area = ( select min ( state.area ) where state.state_name in ( select river.traverse where river.river_name = value ) ) ) order_by population desc limit_value','select city.city_name where city.state_name in ( select state.state_name where state.state_name in ( select river.traverse where river.river_name = value ) and state.area = ( select min ( state.area ) where state.state_name in ( select river.traverse where river.river_name = value ) ) ) order_by city.population desc limit_value') return format_sql_final",False,'t1' in format_sql_final or 't2' in format_sql_final or 't3' in format_sql_final or ('t4' in format_sql_final),settings.Dorm_OID_ON_NONE,0.6516327857971191 5418,"def __iter__(self): for token in base.Filter.__iter__(self): token = self.sanitize_token(token) if: yield token",False,token,self.check_token(token),0.6709120273590088 5419,"def __call__(self, results): """""" Call function to distort the input images' photo metric. Args: results (dict): A result dict contains the img1 and img2. Returns: results (dict): A result dict contains the img1 and img2. """""" for key in ['img1', 'img2']: img = results[key] img = self.brightness(img) mode = random.randint(2) if: img = self.contrast(img) img = self.saturation(img) img = self.hue(img) if mode == 0: img = self.contrast(img) results[key] = img return results",True,mode == 1,mode == 1,0.6586330533027649 5420,"def __call__(self, results): """""" Call function to distort the input images' photo metric. Args: results (dict): A result dict contains the img1 and img2. Returns: results (dict): A result dict contains the img1 and img2. """""" for key in ['img1', 'img2']: img = results[key] img = self.brightness(img) mode = random.randint(2) if mode == 1: img = self.contrast(img) img = self.saturation(img) img = self.hue(img) if: img = self.contrast(img) results[key] = img return results",True,mode == 0,mode == 0,0.658726155757904 5421,"def cleanSelection(self, ind): """""" Clean only selected oscillations and sort them in time. """""" not_selected = [] selected = [] for i in range(len(self.list_oscillation)): if: selected.append(self.list_oscillation[i]) else: not_selected.append(self.list_oscillation[i]) list_cleaned = clean_oscillations_list(selected, minimum_cycle_number=self.minimum_cycle_number, eliminate_simultaneous=self.eliminate_simultaneous, regroup_full_overlap=self.regroup_full_overlap, eliminate_partial_overlap=self.eliminate_partial_overlap) self.list_oscillation = list_cleaned + not_selected self.sortOscillations()",False,i in ind,"ind in [0, 1]",0.6662567853927612 5422,"def CAkFeedbackNode__SetInitialValues(obj, cls): obj = obj.node('FeedbackInitialValues') obj.u32('numSources') for elem in obj.list('pSource', 'AkBankSourceData', obj.lastval): elem.U16('CompanyID') elem.U16('DeviceID') elem.f32('fVolumeOffset') CAkBankMgr__LoadSource(elem, cls, True) CAkParameterNodeBase__SetNodeBaseParams(obj, cls) if: obj.s16('Loop') obj.s16('LoopMod.min') obj.s16('LoopMod.max') else: pass return",True,cls.version <= 56,cls.version <= 56,0.6499956846237183 5423,"def handleFreeLookPointing(self): """"""Handle free look"""""" if: msg = self.vehicle.message_factory.mount_control_encode(0, 1, self.camPitch * 100.0, 0.0, self.camYaw * 100.0, 0) else: msg = self.vehicle.message_factory.command_long_encode(0, 0, mavutil.mavlink.MAV_CMD_CONDITION_YAW, 0, self.camYaw, YAW_SPEED, self.camDir, 0.0, 0, 0, 0) self.vehicle.send_mavlink(msg)",True,self.vehicle.mount_status[0] is not None,self.vehicle.mount_status[0] is not None,0.6467090845108032 5424,"def __str__(self): snippet = self.get_snippet() where =' in ""%s"", line %d, column %d' % (self.name, self.line + 1, self.column + 1) if: where += ':\n' + snippet return where",True,snippet is not None,snippet is not None,0.6652492880821228 5425,"def __init__(self, *args): FieldSet.__init__(self, *args) self._size = self['size'].value * 8 type = self['type'].value if: self._name = self.ATTR_INFO[type][0] self._parser = self.ATTR_INFO[type][2]",False,type in self.ATTR_INFO,self.ATTR_INFO[type][0] in self.ATTR_INFO[type][1],0.6582961082458496 5426,"def log(text, array=None): """"""Prints a text message. And, optionally, if a Numpy array is provided it prints it's shape, min, and max values. """""" if: text = text.ljust(25) text +='shape: {:20} min: {:10.5f} max: {:10.5f} {}'.format(str(array.shape), array.min() if array.size else '', array.max() if array.size else '', array.dtype) print(text)",True,array is not None,array is not None,0.6544446349143982 5427,"def read_state_dict(checkpoint_file, print_global_state=False): _, extension = os.path.splitext(checkpoint_file) if: pl_sd = load_file(checkpoint_file, device='cpu') else: pl_sd = torch.load(checkpoint_file, map_location='cpu') if print_global_state and 'global_step' in pl_sd: print(f""Global Step: {pl_sd['global_step']}"") sd = get_state_dict_from_checkpoint(pl_sd) return sd",False,extension.lower() == '.safetensors',extension.lower() == '.torch',0.646547794342041 5428,"def read_state_dict(checkpoint_file, print_global_state=False): _, extension = os.path.splitext(checkpoint_file) if extension.lower() == '.safetensors': pl_sd = load_file(checkpoint_file, device='cpu') else: pl_sd = torch.load(checkpoint_file, map_location='cpu') if: print(f""Global Step: {pl_sd['global_step']}"") sd = get_state_dict_from_checkpoint(pl_sd) return sd",False,print_global_state and 'global_step' in pl_sd,print_global_state,0.64362633228302 5429,"def torch_persistent_save(*args, **kwargs): for i in range(3): try: return torch.save(*args, **kwargs) except Exception: if: logging.error(traceback.format_exc())",True,i == 2,i == 2,0.6622440814971924 5430,"def first_user_setup(self): if: self.wait_until_clickable(LoginPageLocators.skip_welcome_button).click() if self.get_element(LoginPageLocators.current_step_sel).text == 'Upload your photo': self.wait_until_clickable(LoginPageLocators.skip_photo_upload).click() if self.get_element(LoginPageLocators.current_step_sel).text == 'Find content': self.wait_until_any_element_visible(LoginPageLocators.skip_find_content)[0].click() self.wait_until_clickable(LoginPageLocators.finish_setup).click()",False,self.get_element(LoginPageLocators.current_step_sel).text == 'Welcome',self.get_element(LoginPageLocators.current_step_sel).text == 'User setup',0.6487319469451904 5431,"def first_user_setup(self): if self.get_element(LoginPageLocators.current_step_sel).text == 'Welcome': self.wait_until_clickable(LoginPageLocators.skip_welcome_button).click() if: self.wait_until_clickable(LoginPageLocators.skip_photo_upload).click() if self.get_element(LoginPageLocators.current_step_sel).text == 'Find content': self.wait_until_any_element_visible(LoginPageLocators.skip_find_content)[0].click() self.wait_until_clickable(LoginPageLocators.finish_setup).click()",False,self.get_element(LoginPageLocators.current_step_sel).text == 'Upload your photo',self.get_element(LoginPageLocators.current_step_sel).text == 'Photo upload',0.6473886966705322 5432,"def first_user_setup(self): if self.get_element(LoginPageLocators.current_step_sel).text == 'Welcome': self.wait_until_clickable(LoginPageLocators.skip_welcome_button).click() if self.get_element(LoginPageLocators.current_step_sel).text == 'Upload your photo': self.wait_until_clickable(LoginPageLocators.skip_photo_upload).click() if: self.wait_until_any_element_visible(LoginPageLocators.skip_find_content)[0].click() self.wait_until_clickable(LoginPageLocators.finish_setup).click()",False,self.get_element(LoginPageLocators.current_step_sel).text == 'Find content',self.get_element(LoginPageLocators.current_step_sel) is not None,0.6464476585388184 5433,"def __init__(self, pth_file): if: raise UninstallationError('Cannot remove entries from nonexistent file %s' % pth_file) self.file = pth_file self.entries = set() self._saved_lines = None",False,not os.path.isfile(pth_file),not pth_file,0.6504340171813965 5434,"@text_bold.setter def text_bold(self, value): """"""Setter for text_bold."""""" if: raise AttributeError(TextBox._getter_exception_message.format('text_bold')) else: self._text_bold = value",True,self.has_surface,self.has_surface,0.6553983092308044 5435,"@cached_property def regressed_by(self) -> int | None: """"""The ID of the bug that one of its patches could have caused the crash. If there are multiple bugs, the value will be `None`. """""" bug_ids = self.regressed_by_potential_bug_ids if: return next(iter(bug_ids)) return None",False,len(bug_ids) == 1,bug_ids,0.6485418081283569 5436,"def setupmethod(f: F) -> F: """"""Wraps a method so that it performs a check in debug mode if the first request was already handled. """""" def wrapper_func(self, *args: t.Any, **kwargs: t.Any) -> t.Any: if: raise AssertionError('A setup function was called after the first request was handled. This usually indicates a bug in the application where a module was not imported and decorators or other functionality was called too late.\nTo fix this make sure to import all your view modules, database models, and everything related at a central place before the application starts serving requests.') return f(self, *args, **kwargs) return t.cast(F, update_wrapper(wrapper_func, f))",False,self._is_setup_finished(),self.debug and (not self._debug),0.6514652967453003 5437,"def checkUnindent(s, l, t): if: return curCol = col(l, s) if not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])): raise ParseException(s, l, 'not an unindent') indentStack.pop()",True,l >= len(s),l >= len(s),0.6486468315124512 5438,"def checkUnindent(s, l, t): if l >= len(s): return curCol = col(l, s) if: raise ParseException(s, l, 'not an unindent') indentStack.pop()",True,not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])),not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])),0.6463941931724548 5439,"def __init__(self, config): """""" Args: config (mwptoolkit.config.configuration.Config) expected that config includes these parameters below: rule1 (bool): convert equation according to rule 1. rule2 (bool): convert equation according to rule 2. parse_tree_file_name (str|None): the name of the file to save parse tree information. model (str): model name. dataset (str): dataset name. equation_fix (str): [infix | postfix | prefix], convert equation to specified format. dataset_dir or dataset_path (str): the road path of dataset folder. language (str): a property of dataset, the language of dataset. single (bool): a property of dataset, the equation of dataset is single or not. linear (bool): a property of dataset, the equation of dataset is linear or not. source_equation_fix (str): [infix | postfix | prefix], a property of dataset, the source format of equation of dataset. rebuild (bool): when loading additional dataset information, this can decide to build information anew or load information built before. validset_divide (bool): whether to split validset. if True, the dataset is split to trainset-validset-testset. if False, the dataset is split to trainset-testset. mask_symbol (str): [NUM | number], the symbol to mask numbers in equation. min_word_keep (int): in dataset, words that count greater than the value, will be kept in input vocabulary. min_generate_keep (int): generate number that count greater than the value, will be kept in output symbols. symbol_for_tree (bool): build output symbols for tree or not. share_vocab (bool): encoder and decoder of the model share the same vocabulary, often seen in Seq2Seq models. k_fold (int|None): if it's an integer, it indicates to run k-fold cross validation. if it's None, it indicates to run trainset-validset-testset split. read_local_folds",False,self.parse_tree_path is not None,config['scale_symbol'],0.6413372159004211 5440,"def __init__(self, config): """""" Args: config (mwptoolkit.config.configuration.Config) expected that config includes these parameters below: rule1 (bool): convert equation according to rule 1. rule2 (bool): convert equation according to rule 2. parse_tree_file_name (str|None): the name of the file to save parse tree information. model (str): model name. dataset (str): dataset name. equation_fix (str): [infix | postfix | prefix], convert equation to specified format. dataset_dir or dataset_path (str): the road path of dataset folder. language (str): a property of dataset, the language of dataset. single (bool): a property of dataset, the equation of dataset is single or not. linear (bool): a property of dataset, the equation of dataset is linear or not. source_equation_fix (str): [infix | postfix | prefix], a property of dataset, the source format of equation of dataset. rebuild (bool): when loading additional dataset information, this can decide to build information anew or load information built before. validset_divide (bool): whether to split validset. if True, the dataset is split to trainset-validset-testset. if False, the dataset is split to trainset-testset. mask_symbol (str): [NUM | number], the symbol to mask numbers in equation. min_word_keep (int): in dataset, words that count greater than the value, will be kept in input vocabulary. min_generate_keep (int): generate number that count greater than the value, will be kept in output symbols. symbol_for_tree (bool): build output symbols for tree or not. share_vocab (bool): encoder and decoder of the model share the same vocabulary, often seen in Seq2Seq models. k_fold (int|None): if it's an integer, it indicates to run k-fold cross validation. if it's None, it indicates to run trainset-validset-testset split. read_local_folds",False,not os.path.isabs(self.parse_tree_path),config['scale_symbol'],0.6397731900215149 5441,"@ide.route('/active') @require_admin() @json_response def admin_ide_active(): """""" Get the list of all active Theia ides within the current course context. :return: """""" session = TheiaSession.query.filter(TheiaSession.active, TheiaSession.owner_id == current_user.id, TheiaSession.course_id == course_context.id, TheiaSession.assignment_id == None).first() if: return success_response({'session': None}) return success_response({'session': session.data,'settings': session.settings})",True,session is None,session is None,0.6552146673202515 5442,"def find_id(all_coins, x): for coin in all_coins: if: return coin['symbol']",False,coin['id'] == x,coin['x'] == x['y'],0.6584781408309937 5443,"def do_mailfy(self, query, **kwargs): """"""Verifying a mailfy query in this platform This might be redefined in any class inheriting from Platform. The only condition is that any of this should return an equivalent array. Args: query: The element to be searched. Returns: A list of elements to be appended. A sample output format is as follows: [ { ""attributes"": [ { ""attributes"": [], ""type"": ""com.i3visio.Email"", ""value"": ""contacto@i3visio.com"" }, { ""attributes"": [], ""type"": ""com.i3visio.Alias"", ""value"": ""contacto"" }, { ""attributes"": [], ""type"": ""com.i3visio.Domain"", ""value"": ""i3visio.com"" }, { ""attributes"": [], ""type"": ""com.i3visio.Platform"", ""value"": ""Twitter"" } ], ""type"": ""com.i3visio.Profile"", ""value"": ""Twitter - contacto@i3visio.com"" } ] """""" if: expanded_entities = general.expand_entities_from_email(query) r = {'type': 'com.i3visio.Profile', 'value': self.platformName +'-'+ query, 'attributes': expanded_entities + [{'type': 'com.i3visio.Platform', 'value': self.platformName, 'attributes': []}]} return [r] return []",False,"self.check_mailfy(query, **kwargs)",query,0.6421409845352173 5444,"def _init_cls_convs(self): """"""Initialize classification conv layers of the head."""""" self.cls_convs = nn.ModuleList() for i in range(self.stacked_convs): chn = self.in_channels if i == 0 else self.feat_channels if: conv_cfg = dict(type='DCNv2') else: conv_cfg = self.conv_cfg self.cls_convs.append(ConvModule(chn, self.feat_channels, 3, stride=1, padding=1, conv_cfg=conv_cfg, norm_cfg=self.norm_cfg, bias=self.conv_bias))",True,self.dcn_on_last_conv and i == self.stacked_convs - 1,self.dcn_on_last_conv and i == self.stacked_convs - 1,0.6477503776550293 5445,"def sort_list(l, key, reverse=False): if: return l.sort(lambda a, b: cmp(key(b), key(a))) else: return l.sort(lambda a, b: cmp(key(a), key(b)))",True,reverse,reverse,0.6682181358337402 5446,"def __init__(self, size=0, maxtasksperchild=10000): if: size = multiprocessing.cpu_count() self.size = size self._pool = multiprocessing.Pool(processes=size, maxtasksperchild=maxtasksperchild) self._stopped = False",False,size <= 0,size == 0,0.6749229431152344 5447,"def get_rotor_weight(self, gravity=9.8, stack_length=None): material_density_rho = get_material_data()[0] if: return gravity * self.get_rotor_volume() * material_density_rho else: return gravity * self.get_rotor_volume(stack_length=stack_length) * material_density_rho",True,stack_length is None,stack_length is None,0.6500901579856873 5448,"@property def base_version(self) -> str: parts = [] if: parts.append(f'{self.epoch}!') parts.append('.'.join((str(x) for x in self.release))) return ''.join(parts)",True,self.epoch != 0,self.epoch != 0,0.6532317399978638 5449,"def handle_data(self, data: str) -> None: data = data.strip() data = data.replace('\xa0','') if: self.tags.append((self.tag_stack, data))",False,data,len(data) > 0 and data[0] in self.tag_stack,0.6677725315093994 5450,"@staticmethod def swap_dir_in_path(fullpath, dir1, dir2): """"""Given a pathname fullpath, replace right-most occurrence of dir1 with dir2 and return the result."""""" dirname = fullpath leaf = None newpath_list = [] while leaf!= '': dirname, leaf = os.path.split(dirname) if: newpath_list.append(dir2) break newpath_list.append(leaf) newpath = dirname newpath_list.reverse() for subdirectory in newpath_list: newpath = os.path.join(newpath, subdirectory) return newpath",False,leaf == dir1,leaf == '',0.6640353202819824 5451,"def _setupDelegates(self): """""" Store delegates in self.delegates dictionary by column name and apply delegates to table view """""" self.delegates['status'] = StatusComboBoxDelegate(self.proxyModel) for columnName, delegate in self.delegates.items(): for i, column in enumerate(self.model.dataset.headerName): if: self.tableView.setItemDelegateForColumn(i, delegate)",False,column == columnName,columnName == column,0.6780922412872314 5452,"def default(self, obj): if: return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, np.ndarray): return obj.tolist() return json.JSONEncoder.default(self, obj)",False,"isinstance(obj, np.integer)","isinstance(obj, int)",0.646202564239502 5453,"def default(self, obj): if isinstance(obj, np.integer): return int(obj) elif: return float(obj) elif isinstance(obj, np.ndarray): return obj.tolist() return json.JSONEncoder.default(self, obj)",False,"isinstance(obj, np.floating)","isinstance(obj, np.float64)",0.6459647417068481 5454,"def default(self, obj): if isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif: return obj.tolist() return json.JSONEncoder.default(self, obj)",True,"isinstance(obj, np.ndarray)","isinstance(obj, np.ndarray)",0.6445704698562622 5455,"def on_spawn_location(self, pos): if: while True: x = randint(0, 511) y = randint(0, 511) z = self.protocol.map.get_z(x, y) if z!= 63 or WATER_SPAWNS: break z -= 2.4 x += 0.5 y += 0.5 return (x, y, z) return connection.on_spawn_location(self, pos)",False,not self.score_hack and self.protocol.free_for_all,self._protocol.map,0.6424878239631653 5456,"def on_spawn_location(self, pos): if not self.score_hack and self.protocol.free_for_all: while True: x = randint(0, 511) y = randint(0, 511) z = self.protocol.map.get_z(x, y) if: break z -= 2.4 x += 0.5 y += 0.5 return (x, y, z) return connection.on_spawn_location(self, pos)",False,z != 63 or WATER_SPAWNS,z < 2.4,0.648476243019104 5457,"def accept(self, visitor: ParseTreeVisitor): if: return visitor.visitMethodReference5(self) else: return visitor.visitChildren(self)",True,"hasattr(visitor, 'visitMethodReference5')","hasattr(visitor, 'visitMethodReference5')",0.6442598104476929 5458,"def addNoiseVariance(self, config, base, im, include_obj_var, logger): if: im += base['current_noise_image'] im += self.getNoiseVariance(config, base, full=True)",False,include_obj_var,include_obj_var == 'current_noise_image',0.6530711650848389 5459,"def _assert_singular_result_list(found_items: Sequence[OntologyElement], title: str, type_: Any) -> None: if: raise OntologyError(f'No item was found in the ontology with the given title `{title}` and type `{type_}`') elif len(found_items) > 1: raise OntologyError(f'More than one item was found in the ontology with the given title `{title}` and type `{type_}`. Use the `get_children_by_title` or `get_child_by_hash` function instead. The found items are `{found_items}`.')",True,len(found_items) == 0,len(found_items) == 0,0.653594970703125 5460,"def _assert_singular_result_list(found_items: Sequence[OntologyElement], title: str, type_: Any) -> None: if len(found_items) == 0: raise OntologyError(f'No item was found in the ontology with the given title `{title}` and type `{type_}`') elif: raise OntologyError(f'More than one item was found in the ontology with the given title `{title}` and type `{type_}`. Use the `get_children_by_title` or `get_child_by_hash` function instead. The found items are `{found_items}`.')",True,len(found_items) > 1,len(found_items) > 1,0.6522406339645386 5461,"@property def ensure_on_navmesh_on_finish(self): if: return self._entity_data.get('ensure_on_navmesh_on_finish') return '1'",True,'ensure_on_navmesh_on_finish' in self._entity_data,'ensure_on_navmesh_on_finish' in self._entity_data,0.6491043567657471 5462,"def __getstate__(self): if: self.content return {attr: getattr(self, attr, None) for attr in self.__attrs__}",True,not self._content_consumed,not self._content_consumed,0.6536366939544678 5463,"def __find_image(self, result_set, fallback=None): if: first_key = list(result_set['images'].keys())[0] images = result_set['images'][first_key] images = images.get('16_9_Landscape', images.get('default', {})) if'styles' in images and 'large' in images['styles']: return images['styles']['large'] return fallback",False,'images' in result_set and result_set['images'],result_set is not None,0.6465675830841064 5464,"def __find_image(self, result_set, fallback=None): if 'images' in result_set and result_set['images']: first_key = list(result_set['images'].keys())[0] images = result_set['images'][first_key] images = images.get('16_9_Landscape', images.get('default', {})) if: return images['styles']['large'] return fallback",False,'styles' in images and 'large' in images['styles'],images['styles'],0.6455250382423401 5465,"def create_embeddings(self): if: W = tf.get_variable(name='W', shape=[self.config['vocab_size'], self.config['embeddings_size']], initializer=tf.constant_initializer(self.embeddings), trainable=True) else: W = tf.get_variable(name='W', shape=[self.config['vocab_size'], self.config['embeddings_size']], trainable=True) word_embeddings = tf.nn.embedding_lookup(W, self._input_placeholder) return word_embeddings",False,self.embeddings != None,self._use_embeddings,0.6492716073989868 5466,"def enterRule(self, listener: ParseTreeListener): if: listener.enterClassType(self)",True,"hasattr(listener, 'enterClassType')","hasattr(listener, 'enterClassType')",0.6455535888671875 5467,"def render_resource_object(self, iri_or_blanknode: _IriOrBlanknode) -> dict: _resource_object = {**self.render_identifier_object(iri_or_blanknode)} _twopledict = self._data.tripledict.get(iri_or_blanknode) or {} if isinstance(iri_or_blanknode, str) else primitive_rdf.twopledict_from_twopleset(iri_or_blanknode) for _pred, _obj_set in _twopledict.items(): if _pred!= RDF.type: self._render_field(_pred, _obj_set, into=_resource_object) if: _resource_object.setdefault('links', {})['self'] = iri_or_blanknode return _resource_object",False,"isinstance(iri_or_blanknode, str)",iri_or_blanknode,0.6461703181266785 5468,"def render_resource_object(self, iri_or_blanknode: _IriOrBlanknode) -> dict: _resource_object = {**self.render_identifier_object(iri_or_blanknode)} _twopledict = self._data.tripledict.get(iri_or_blanknode) or {} if isinstance(iri_or_blanknode, str) else primitive_rdf.twopledict_from_twopleset(iri_or_blanknode) for _pred, _obj_set in _twopledict.items(): if: self._render_field(_pred, _obj_set, into=_resource_object) if isinstance(iri_or_blanknode, str): _resource_object.setdefault('links', {})['self'] = iri_or_blanknode return _resource_object",False,_pred != RDF.type,_obj_set,0.6581794023513794 5469,"def GetList(self): """"""GetList"""""" ret = self._oleobj_.InvokeTypes(14, LCID, 1, (9, 0), ()) if: ret = Dispatch(ret, u'GetList', '{6692DB25-3CF4-44C4-932D-1F6D13DF6E7C}') return ret",True,ret is not None,ret is not None,0.6521528959274292 5470,"def _append_use_existing_client_secretn(action): if: return action action['AuthenticateOidcConfig']['UseExistingClientSecret'] = True return action",False,action['Type'] != 'authenticate-oidc',action['AuthenticateOidcConfig']['UseExistingClientSecret'],0.6485613584518433 5471,"@property def capabilities(self): """"""gets the value capabilities"""""" if: self.init() return self._capabilities",True,self._capabilities is None,self._capabilities is None,0.651340126991272 5472,"def _strip_spaces(text): ns_chars = [] ns_to_s_map = collections.OrderedDict() for i, c in enumerate(text): if: continue ns_to_s_map[len(ns_chars)] = i ns_chars.append(c) ns_text = ''.join(ns_chars) return (ns_text, ns_to_s_map)",False,c == '',len(ns_chars) == 0,0.6554815173149109 5473,"def PayloadEncryptionCryptobox(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False",True,o != 0,o != 0,0.6653809547424316 5474,"def get_all_entity_names(sample, cell_with_links): entity_names = [] for entity in cell_with_links: entity_name_in_table = entity['name'] entity_text_url = entity['url'] entity_text = get_passage(sample, entity_text_url) if: continue entity_name_in_text, _ = get_first_NER(entity_text) if entity_name_in_text is None or entity_name_in_table is None: continue if not phrase_overlap(entity_name_in_text, entity_name_in_table) == True: continue entity['text_name'] = entity_name_in_text entity['table_name'] = entity_name_in_table entity_names.append(entity) return entity_names",True,entity_text is None,entity_text is None,0.6495709419250488 5475,"def get_all_entity_names(sample, cell_with_links): entity_names = [] for entity in cell_with_links: entity_name_in_table = entity['name'] entity_text_url = entity['url'] entity_text = get_passage(sample, entity_text_url) if entity_text is None: continue entity_name_in_text, _ = get_first_NER(entity_text) if: continue if not phrase_overlap(entity_name_in_text, entity_name_in_table) == True: continue entity['text_name'] = entity_name_in_text entity['table_name'] = entity_name_in_table entity_names.append(entity) return entity_names",False,entity_name_in_text is None or entity_name_in_table is None,entity_name_in_table is None,0.646329402923584 5476,"def get_all_entity_names(sample, cell_with_links): entity_names = [] for entity in cell_with_links: entity_name_in_table = entity['name'] entity_text_url = entity['url'] entity_text = get_passage(sample, entity_text_url) if entity_text is None: continue entity_name_in_text, _ = get_first_NER(entity_text) if entity_name_in_text is None or entity_name_in_table is None: continue if: continue entity['text_name'] = entity_name_in_text entity['table_name'] = entity_name_in_table entity_names.append(entity) return entity_names",False,"not phrase_overlap(entity_name_in_text, entity_name_in_table) == True",'classifier' in entity_name_in_text,0.6449264883995056 5477,"def singe_map(st, en): total_p = 0.0 correct_num = 0.0 for index in xrange(st, en): if int(preds[index][2])!= 0: correct_num += 1 total_p += correct_num / (index - st + 1) if: return 0.0 return total_p / correct_num",False,int(correct_num) == 0,correct_num == 0,0.6507935523986816 5478,"def singe_map(st, en): total_p = 0.0 correct_num = 0.0 for index in xrange(st, en): if: correct_num += 1 total_p += correct_num / (index - st + 1) if int(correct_num) == 0: return 0.0 return total_p / correct_num",False,int(preds[index][2]) != 0,index % en == 0,0.6491070985794067 5479,"def AddSystemOther(output_zip): """"""Turn the contents of SYSTEM_OTHER into a system_other image and store it in output_zip."""""" img = OutputFile(output_zip, OPTIONS.input_tmp, 'IMAGES','system_other.img') if: logger.info('system_other.img already exists; no need to rebuild...') return CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict,'system_other', img)",False,os.path.exists(img.name),"os.path.exists(os.path.join(output_zip, 'system_other.img'))",0.6493115425109863 5480,"def construct_python_module(self, suffix, node): value = self.construct_scalar(node) if: raise ConstructorError('while constructing a Python module', node.start_mark, 'expected the empty value, but found %r' % value, node.start_mark) return self.find_python_module(suffix, node.start_mark)",True,value,value,0.6793338656425476 5481,"def errorMessage(self, text): """"""Truncate number of error messages produced in a plot."""""" if: print(text) self._errorMessageCount = self._errorMessageCount + 1 elif self._errorMessageCount == self.MAX_ERROR_COUNT: print('\nAdditional graphics error messages suppressed') self._errorMessageCount = self._errorMessageCount + 1",False,self._errorMessageCount < self.MAX_ERROR_COUNT,self._errorMessageCount < self.MIN_ERROR_COUNT,0.6513015031814575 5482,"def errorMessage(self, text): """"""Truncate number of error messages produced in a plot."""""" if self._errorMessageCount < self.MAX_ERROR_COUNT: print(text) self._errorMessageCount = self._errorMessageCount + 1 elif: print('\nAdditional graphics error messages suppressed') self._errorMessageCount = self._errorMessageCount + 1",False,self._errorMessageCount == self.MAX_ERROR_COUNT,self._errorMessageCount >= self.MAX_ERROR_COUNT,0.650102972984314 5483,"def __rmul__(self, other): if: return FloatWithUnit(float(self) * other, unit_type=self._unit_type, unit=self._unit) return FloatWithUnit(float(self) * other, unit_type=None, unit=self._unit * other._unit)",True,"not isinstance(other, FloatWithUnit)","not isinstance(other, FloatWithUnit)",0.6490469574928284 5484,"def weighted_choice(self, choices): import random total = sum((w for c, w in choices)) r = random.uniform(0, total) upto = 0 for c, w in choices: if: return c upto += w assert False, ""Shouldn't get here""",False,upto + w > r,r <= upto,0.6579726934432983 5485,"def unlink(self, omd): if: self.prev.next = self.next if self.next: self.next.prev = self.prev if omd._first_bucket is self: omd._first_bucket = self.next if omd._last_bucket is self: omd._last_bucket = self.prev",True,self.prev,self.prev,0.6520448923110962 5486,"def unlink(self, omd): if self.prev: self.prev.next = self.next if: self.next.prev = self.prev if omd._first_bucket is self: omd._first_bucket = self.next if omd._last_bucket is self: omd._last_bucket = self.prev",True,self.next,self.next,0.6561459302902222 5487,"def unlink(self, omd): if self.prev: self.prev.next = self.next if self.next: self.next.prev = self.prev if: omd._first_bucket = self.next if omd._last_bucket is self: omd._last_bucket = self.prev",False,omd._first_bucket is self,omts._first_bucket is self,0.6489882469177246 5488,"def unlink(self, omd): if self.prev: self.prev.next = self.next if self.next: self.next.prev = self.prev if omd._first_bucket is self: omd._first_bucket = self.next if: omd._last_bucket = self.prev",False,omd._last_bucket is self,omts._last_bucket is self,0.6480567455291748 5489,"def get(self, request, *args, **kwargs): if: return HttpResponseForbidden() return super().get(request, *args, **kwargs)",False,not request.user.is_authenticated,request.user.is_authenticated or request.user.is_superuser,0.6459367871284485 5490,"def Scanner(function, *args, **kw): """"""Factory function to create a Scanner Object. Creates the appropriate Scanner based on the type of ""function"". TODO: Deprecate this some day. We've moved the functionality inside the Base class and really don't need this factory function any more. It was, however, used by some of our Tool modules, so the call probably ended up in various people's custom modules patterned on SCons code. """""" if: return Selector(function, *args, **kw) else: return Base(function, *args, **kw)",False,SCons.Util.is_Dict(function),"hasattr(Selector, '__call__')",0.6438033580780029 5491,"def cancel_rollback(self, connection): if: return S_NO_ROLLBACK_IN_PROGRESS result = S_ROLLBACK_CANCELLED.format(player=connection.name) self.end_rollback(result)",False,not self.rollback_in_progress,self.is_open_rollback_in_progress(),0.6487672328948975 5492,"def render_GET(self, request): overview = self.parent.get_overview() request.setHeader('content-type', 'png/image') request.setHeader('content-length', str(len(overview))) if: return '' return overview",False,request.method == 'HEAD',not overview,0.6509859561920166 5493,"def __init__(self, padding, filename_tmpl='{:08d}'): if: raise ValueError(f'Wrong padding mode {padding}.Should be ""replicate"", ""reflection"", ""reflection_circle"", ""circle""') self.padding = padding self.filename_tmpl = filename_tmpl",False,"padding not in ('replicate', 'reflection', 'reflection_circle', 'circle')","padding not in ['reflection', 'reflection_circle']",0.6505506634712219 5494,"def _reset_parameters(self): for p in self.parameters(): if: xavier_uniform_(p) else: constant_(p)",True,p.dim() > 1,p.dim() > 1,0.6513280868530273 5495,"@property def is_sound_on(self): """"""Get sound on/off."""""" if: return None return self._update_feature(MicroWaveFeatures.SOUND, status == MODE_VOLON, False)",True,(status := self.data.get(STATE_SOUND)) is None,(status := self.data.get(STATE_SOUND)) is None,0.6499799489974976 5496,"def forward(self, x, size=None): out = self.D(x) src_out = self.cls1(out) tgt_out = self.cls2(out) out = torch.cat((src_out, tgt_out), dim=1) if: out = F.interpolate(out, size=size, mode='bilinear', align_corners=True) return out",True,size is not None,size is not None,0.6492502689361572 5497,"def success(args, hostname): errorStatus, errorIndex, varBinds = args if: print('%s: %s at %s' % (hostname, errorStatus.prettyPrint(), errorIndex and varBinds[int(errorIndex) - 1][0] or '?')) else: for varBind in varBinds: print(' = '.join([x.prettyPrint() for x in varBind]))",True,errorStatus,errorStatus,0.6731153726577759 5498,"def data(self, index, role): if: i = index.row() * self._columns + index.column() if i < len(self._indices): return self.page_name(i).split('/', 1)[-1] if role == QtCore.Qt.DecorationRole: i = index.row() * self._columns + index.column() if i < len(self._indices): return self.page_image(i)",False,role == QtCore.Qt.ToolTipRole,role == QtCore.Qt.DisplayRole,0.6499382257461548 5499,"def data(self, index, role): if role == QtCore.Qt.ToolTipRole: i = index.row() * self._columns + index.column() if i < len(self._indices): return self.page_name(i).split('/', 1)[-1] if: i = index.row() * self._columns + index.column() if i < len(self._indices): return self.page_image(i)",False,role == QtCore.Qt.DecorationRole,role == QtCore.Qt.ImageRole,0.6513864398002625 5500,"def data(self, index, role): if role == QtCore.Qt.ToolTipRole: i = index.row() * self._columns + index.column() if: return self.page_name(i).split('/', 1)[-1] if role == QtCore.Qt.DecorationRole: i = index.row() * self._columns + index.column() if i < len(self._indices): return self.page_image(i)",True,i < len(self._indices),i < len(self._indices),0.6479231119155884 5501,"def data(self, index, role): if role == QtCore.Qt.ToolTipRole: i = index.row() * self._columns + index.column() if i < len(self._indices): return self.page_name(i).split('/', 1)[-1] if role == QtCore.Qt.DecorationRole: i = index.row() * self._columns + index.column() if: return self.page_image(i)",True,i < len(self._indices),i < len(self._indices),0.6471094489097595 5502,"@run_async @bot_admin @can_restrict @typing_action def kickme(update, context): user_id = update.effective_message.from_user.id if: update.effective_message.reply_text('Yeahhh.. not gonna kick an admin.') return res = update.effective_chat.unban_member(user_id) if res: update.effective_message.reply_text(""Yeah, you're right Get Out!.."") else: update.effective_message.reply_text(""Huh? I can't :/"")",True,"is_user_admin(update.effective_chat, user_id)","is_user_admin(update.effective_chat, user_id)",0.6429746150970459 5503,"@run_async @bot_admin @can_restrict @typing_action def kickme(update, context): user_id = update.effective_message.from_user.id if is_user_admin(update.effective_chat, user_id): update.effective_message.reply_text('Yeahhh.. not gonna kick an admin.') return res = update.effective_chat.unban_member(user_id) if: update.effective_message.reply_text(""Yeah, you're right Get Out!.."") else: update.effective_message.reply_text(""Huh? I can't :/"")",True,res,res,0.6656123399734497 5504,"def info(self): selector = '/info/' status, response, _ = MONAILabelUtils.http_method('GET', self._server_url, selector) if: raise MONAILabelClientException(MONAILabelError.SERVER_ERROR, f'Status: {status}; Response: {response}', status, response) response = response.decode('utf-8') if isinstance(response, bytes) else response logging.debug(f'Response: {response}') return json.loads(response)",True,status != 200,status != 200,0.681888222694397 5505,"def unsubscribe(self, channel, target_callback_fn, *args, **kwargs): """""" Unsubscribe callback function from channel """""" if: for any_channel_fn, *_ in self.any_channel: if target_callback_fn == any_channel_fn: self.any_channel.remove([target_callback_fn, *_]) else: callback_fn_list = self.pubsub_channels.get(channel) for callback_fn, *_ in callback_fn_list: if target_callback_fn == callback_fn: callback_fn_list.remove([target_callback_fn, *_])",False,channel == 'any',self.any_channel,0.6500998735427856 5506,"def unsubscribe(self, channel, target_callback_fn, *args, **kwargs): """""" Unsubscribe callback function from channel """""" if channel == 'any': for any_channel_fn, *_ in self.any_channel: if: self.any_channel.remove([target_callback_fn, *_]) else: callback_fn_list = self.pubsub_channels.get(channel) for callback_fn, *_ in callback_fn_list: if target_callback_fn == callback_fn: callback_fn_list.remove([target_callback_fn, *_])",True,target_callback_fn == any_channel_fn,target_callback_fn == any_channel_fn,0.6442728042602539 5507,"def unsubscribe(self, channel, target_callback_fn, *args, **kwargs): """""" Unsubscribe callback function from channel """""" if channel == 'any': for any_channel_fn, *_ in self.any_channel: if target_callback_fn == any_channel_fn: self.any_channel.remove([target_callback_fn, *_]) else: callback_fn_list = self.pubsub_channels.get(channel) for callback_fn, *_ in callback_fn_list: if: callback_fn_list.remove([target_callback_fn, *_])",True,target_callback_fn == callback_fn,target_callback_fn == callback_fn,0.6480973958969116 5508,"def get_lable_data1(self, label_path, gallery_flag): dataset = [] with open(label_path, 'r') as f: data = f.read() data_list = data.split('\n') for num, d in enumerate(data_list): add, label = d.split(',') if: img_path = add else: img_path = add dataset.append((img_path, int(label), 0)) return dataset",True,gallery_flag,gallery_flag,0.6456047892570496 5509,"@unittest.skipIf(not HAS_RLIMIT, 'not supported') def test_rlimit_set(self): p = self.spawn_psproc() p.rlimit(psutil.RLIMIT_NOFILE, (5, 5)) self.assertEqual(p.rlimit(psutil.RLIMIT_NOFILE), (5, 5)) if: with self.assertRaisesRegex(ValueError, ""can't use prlimit""): psutil._psplatform.Process(0).rlimit(0) with self.assertRaises(ValueError): p.rlimit(psutil.RLIMIT_NOFILE, (5, 5, 5))",False,LINUX,psplatform.Process(0).is_prlimit,0.6613467931747437 5510,"@staticmethod def merge_vlans(vlans: List['DiscoveryVLAN']) -> List['DiscoveryVLAN']: """""" Merge object vlans with artifactory ones :param vlans: :return: """""" r = [] processed = [] for v in vlans: if: continue r.append(v) processed.append(v.id) return vlans",True,v.id in processed,v.id in processed,0.6557514667510986 5511,"def dfs(n): global length visited[n] = True next = graph[n] print(dist[n]) length += dist[n] if: dfs(next)",False,not vistied[next],visited[n],0.6515978574752808 5512,"@hooks.hook('storage-backend-relation-changed') @os_requires_version('mitaka', 'glance-common') @restart_on_change(restart_map(), stopstart=True) def storage_backend_hook(): resolve_CONFIGS() if: juju_log('storage-backend relation incomplete. Peer not ready?') return install_packages_for_cinder_store() CONFIGS.write(GLANCE_API_CONF)",False,'storage-backend' not in CONFIGS.complete_contexts(),not configS.check_storage_backend_relation(GLANCE_API_CONF),0.6461843252182007 5513,"@fields.depends('action', '_parent_action.complaint', '_parent_action._parent_complaint.state') def on_change_with_complaint_state(self, name=None): if: return self.action.complaint.state",False,self.action and self.action.complaint,self.action,0.6479032039642334 5514,"@remove_unfiltered_items.setter def remove_unfiltered_items(self, value): if: self.infinity_settings.pop('remove_unfiltered_items', None) elif isinstance(value, bool): self.infinity_settings['remove_unfiltered_items'] = value else: raise TypeError(""'remove_unfiltered_items' must be a bool or None"")",True,value is None,value is None,0.6556593179702759 5515,"@remove_unfiltered_items.setter def remove_unfiltered_items(self, value): if value is None: self.infinity_settings.pop('remove_unfiltered_items', None) elif: self.infinity_settings['remove_unfiltered_items'] = value else: raise TypeError(""'remove_unfiltered_items' must be a bool or None"")",True,"isinstance(value, bool)","isinstance(value, bool)",0.644860029220581 5516,"def forward(self, x): """"""Forward function."""""" outs = list(super(DetectoRS_ResNet, self).forward(x)) if: outs.insert(0, x) return tuple(outs)",True,self.output_img,self.output_img,0.6539658308029175 5517,"def _check_player_is_active(fn): """""" Decorator that execute the decorated function if the dbus connection is alive, otherwise the function call is cached to run when the connection will be available """""" def wrapper(self, *args, **kargs): if: try: return fn(self, *args, **kargs) except dbus.exceptions.DBusException: pass elif self._exe: DBG('WARNING: player not active, caching command: %s' % fn.__name__) self._cached_commands[fn] = (args, kargs) return wrapper",False,self._root_iface and self._exe and (not self._exe.is_deleted()),fn is not None and self._is_alive,0.645082950592041 5518,"def _check_player_is_active(fn): """""" Decorator that execute the decorated function if the dbus connection is alive, otherwise the function call is cached to run when the connection will be available """""" def wrapper(self, *args, **kargs): if self._root_iface and self._exe and (not self._exe.is_deleted()): try: return fn(self, *args, **kargs) except dbus.exceptions.DBusException: pass elif: DBG('WARNING: player not active, caching command: %s' % fn.__name__) self._cached_commands[fn] = (args, kargs) return wrapper",False,self._exe,fn not in self._cached_commands,0.6565351486206055 5519,"def _recv_msg_memory_map_message_end(self, evt, *, msg): if: self._nav_memory_map = self._pending_nav_memory_map self._pending_nav_memory_map = None self.dispatch_event(nav_memory_map.EvtNewNavMemoryMap, nav_memory_map=self._nav_memory_map) else: logger.error('NavMemoryMap end without begin - ignoring')",True,self._pending_nav_memory_map is not None,self._pending_nav_memory_map is not None,0.6424176692962646 5520,"def filter(self, record: logging.LogRecord) -> bool: if: return False return True",False,'Cannot resolve forward reference' in record.msg,record.levelno < logging.INFO,0.6442811489105225 5521,"def get_td_map(self): """"""Return dict map for the summary (to be run in the algorithm)."""""" if: return self._get_td_map_maddpg() else: return self._get_td_map_basic()",False,self.maddpg,self.is_maddpg,0.6553565263748169 5522,"def any_requires_grad(self, x): for tensor in x: if: return True return False",True,tensor.requires_grad,tensor.requires_grad,0.6518598794937134 5523,"def focale35(self): focale35mm = self.tagExif('FocalLengthIn35mmFormat') if: if focale35mm == '0': focale35mm = '' if len(focale35mm) > 1: if focale35mm[0:2] == '0 ': focale35mm = '' return focale35mm",False,len(focale35mm) == 1,len(focale35mm) == 0,0.6516688466072083 5524,"def focale35(self): focale35mm = self.tagExif('FocalLengthIn35mmFormat') if len(focale35mm) == 1: if focale35mm == '0': focale35mm = '' if: if focale35mm[0:2] == '0 ': focale35mm = '' return focale35mm",False,len(focale35mm) > 1,len(focale35mm) > 2,0.6499054431915283 5525,"def focale35(self): focale35mm = self.tagExif('FocalLengthIn35mmFormat') if len(focale35mm) == 1: if: focale35mm = '' if len(focale35mm) > 1: if focale35mm[0:2] == '0 ': focale35mm = '' return focale35mm",False,focale35mm == '0',focale35mm[0:2] == '1',0.6557826995849609 5526,"def focale35(self): focale35mm = self.tagExif('FocalLengthIn35mmFormat') if len(focale35mm) == 1: if focale35mm == '0': focale35mm = '' if len(focale35mm) > 1: if: focale35mm = '' return focale35mm",False,focale35mm[0:2] == '0 ',focale35mm == '1',0.6502692103385925 5527,"def clean_email(self): email = self.cleaned_data['email'] if: raise forms.ValidationError(self.error_messages['duplicate_identifier'], code='duplicate_identifier') return email",False,User.objects.filter(Q(email__iexact=email) & ~Q(pk=self.instance.pk)).exists(),email in self.duplicates,0.6450191736221313 5528,"def __init__(self, max_replay_buffer_size, env, env_info_sizes=None): """""" :param max_replay_buffer_size: :param env: """""" self.env = env self._ob_space = env.observation_space self._action_space = env.action_space if: if hasattr(env, 'info_sizes'): env_info_sizes = env.info_sizes else: env_info_sizes = dict() super().__init__(max_replay_buffer_size=max_replay_buffer_size, observation_dim=get_dim(self._ob_space), action_dim=get_dim(self._action_space), env_info_sizes=env_info_sizes)",True,env_info_sizes is None,env_info_sizes is None,0.6532431840896606 5529,"def __init__(self, max_replay_buffer_size, env, env_info_sizes=None): """""" :param max_replay_buffer_size: :param env: """""" self.env = env self._ob_space = env.observation_space self._action_space = env.action_space if env_info_sizes is None: if: env_info_sizes = env.info_sizes else: env_info_sizes = dict() super().__init__(max_replay_buffer_size=max_replay_buffer_size, observation_dim=get_dim(self._ob_space), action_dim=get_dim(self._action_space), env_info_sizes=env_info_sizes)",True,"hasattr(env, 'info_sizes')","hasattr(env, 'info_sizes')",0.6473445296287537 5530,"@profile('Parameter_Reference.get_as_string_or_value') def get_as_string_or_value(self, plist=None, as_expr=False): """"""Return a string represeting the numeric value or a non-blank expression"""""" rna_par = self.get_param(plist) if: return self.get_expr(plist) else: isint = False s = None if 'user_type' in rna_par and rna_par['user_type'] == 'i': s = '%g' % int(self.get_value(plist)) else: s = '%g' % self.get_value(plist) return s",False,as_expr and len(rna_par.expr.strip()) > 0,as_expr,0.6442834138870239 5531,"@profile('Parameter_Reference.get_as_string_or_value') def get_as_string_or_value(self, plist=None, as_expr=False): """"""Return a string represeting the numeric value or a non-blank expression"""""" rna_par = self.get_param(plist) if as_expr and len(rna_par.expr.strip()) > 0: return self.get_expr(plist) else: isint = False s = None if: s = '%g' % int(self.get_value(plist)) else: s = '%g' % self.get_value(plist) return s",False,'user_type' in rna_par and rna_par['user_type'] == 'i',isint,0.6453817486763 5532,"def acquire(self): """"""Return next available identity or None. """""" if: return self.available.pop() else: return None",True,self.available,self.available,0.659140944480896 5533,"@property def message(self): if: message = self.args[0] if message is not None: return message",True,self.args,self.args,0.6582489609718323 5534,"@property def message(self): if self.args: message = self.args[0] if: return message",True,message is not None,message is not None,0.6516129970550537 5535,"def set_service_name(self, _channel_dict): updated_chnum = utils.wrap_chnum(str(_channel_dict['display_number']), _channel_dict['namespace'], _channel_dict['instance'], self.config) if: service_name = updated_chnum +'' + _channel_dict['display_name'] else: service_name = _channel_dict['display_name'] return service_name",False,self.config['epg']['epg_channel_number'],updated_chnum,0.6432791948318481 5536,"def create_node_from_type_name(self, d_context, name, register, tags=None): name, clevel = get_type_and_clevel(name) t = self.node_types[name] nd = t() nd.created_turn = d_context.get_turn_num() if d_context else 0 nd.constraint_level = clevel nd.inputs.aliases = nd.signature.aliases if: d_context.register_node(nd) if tags: nd.add_tags(tags) nd.context = d_context return nd",False,register and d_context,register,0.6530507802963257 5537,"def create_node_from_type_name(self, d_context, name, register, tags=None): name, clevel = get_type_and_clevel(name) t = self.node_types[name] nd = t() nd.created_turn = d_context.get_turn_num() if d_context else 0 nd.constraint_level = clevel nd.inputs.aliases = nd.signature.aliases if register and d_context: d_context.register_node(nd) if: nd.add_tags(tags) nd.context = d_context return nd",True,tags,tags,0.6713403463363647 5538,"def is_address_in_network(network, address): """""" Determine whether the provided address is within a network range. :param network (str): CIDR presentation format. For example, '192.168.1.0/24'. :param address: An individual IPv4 or IPv6 address without a net mask or subnet prefix. For example, '192.168.1.1'. :returns boolean: Flag indicating whether address is in network. """""" try: network = netaddr.IPNetwork(network) except (netaddr.core.AddrFormatError, ValueError): raise ValueError('Network (%s) is not in CIDR presentation format' % network) try: address = netaddr.IPAddress(address) except (netaddr.core.AddrFormatError, ValueError): raise ValueError('Address (%s) is not in correct presentation format' % address) if: return True else: return False",False,address in network,address >= network and address < network and address + 1 and (network == network),0.6560741662979126 5539,"def key_press(self, widget, event, conf): if: self.btn_save_clicked(0, conf)",True,"event.state & gtk.gdk.CONTROL_MASK != 0 and event.keyval in [83, 115]","event.state & gtk.gdk.CONTROL_MASK != 0 and event.keyval in [83, 115]",0.6460066437721252 5540,"def mutate_sub_blocks(block, target_desc, block_type_iter): """"""Mutate Sub Blocks."""""" if: return target_desc block_type = next(block_type_iter) target_block = BlockGenerator(c_in=block.c_in, block_type=block_type).run(block) if target_block: return mutate_block(target_desc, block, target_block) return target_desc",False,not block.c_in or not block.c_out,block.c_in in target_desc.c_in,0.6492673754692078 5541,"def mutate_sub_blocks(block, target_desc, block_type_iter): """"""Mutate Sub Blocks."""""" if not block.c_in or not block.c_out: return target_desc block_type = next(block_type_iter) target_block = BlockGenerator(c_in=block.c_in, block_type=block_type).run(block) if: return mutate_block(target_desc, block, target_block) return target_desc",False,target_block,"block_type in (BlockType.sub_block, BlockType.sub_out)",0.667129635810852 5542,"@classmethod def get_init_valid_params(cls): init_valid_params = {**cls.get_init_train_params(),'scenes': cls.valid_scenes, 'player_screen_height': 224, 'player_screen_width': 224, 'headless': False} if: init_valid_params['save_talk_reply_probs_path'] = cls.save_talk_reply_probs_path return init_valid_params",True,cls.save_talk_reply_probs_path is not None,cls.save_talk_reply_probs_path is not None,0.6467662453651428 5543,"def test_add_privilege_default_datastore_false(self): if: return tenant = self.tenantMgr.CreateTenant(name=TENANT_NAME, description=TENANT_DESC) p1 = self.create_privilege() p2 = self.create_privilege_2() self.tenantMgr.AddPrivilege(tenant, p1) self.tenantMgr.AddPrivilege(tenant, p2, default_datastore=False) result = self.tenantMgr.GetTenants(name=TENANT_NAME) self.assertTrue(result) self.assertEqual(result[0].default_datastore, self.datastore)",False,not self.datastore2,self.datastore is False,0.6569033265113831 5544,"def _fix(target: dict) -> None: target['region'] = target.pop('world_name') target['area'] = target.pop('area_name') if: target['node'] = target.pop('node_name')",False,'node_name' in target,target['node'],0.6565820574760437 5545,"def log(self, level, msg, **kw): if: msg = msg % kw self.data.append(msg)",False,kw,level <= self.log_level,0.7005583643913269 5546,"def _find_path(self, key, lang, items): if: return self._wem_path paths = items.get(key) if paths: path = self._mod_path + self._find_path_lang(paths, lang) else: path = self._wem_path return path",False,not self._auto_find or key is None,not items,0.6465244293212891 5547,"def _find_path(self, key, lang, items): if not self._auto_find or key is None: return self._wem_path paths = items.get(key) if: path = self._mod_path + self._find_path_lang(paths, lang) else: path = self._wem_path return path",True,paths,paths,0.6694031953811646 5548,"def _update(self): if: raise RuntimeError('Uniform variable is not active') if self._gtype in (gl.GL_FLOAT_MAT2, gl.GL_FLOAT_MAT3, gl.GL_FLOAT_MAT4): transpose = False self._ufunction(self._handle, 1, transpose, self._data) elif self._gtype in (gl.GL_SAMPLER_1D, gl.GL_SAMPLER_2D): gl.glUniform1i(self._handle, self._unit) else: self._ufunction(self._handle, 1, self._data)",False,not self._active,self._handle is None,0.662697970867157 5549,"def _update(self): if not self._active: raise RuntimeError('Uniform variable is not active') if: transpose = False self._ufunction(self._handle, 1, transpose, self._data) elif self._gtype in (gl.GL_SAMPLER_1D, gl.GL_SAMPLER_2D): gl.glUniform1i(self._handle, self._unit) else: self._ufunction(self._handle, 1, self._data)",False,"self._gtype in (gl.GL_FLOAT_MAT2, gl.GL_FLOAT_MAT3, gl.GL_FLOAT_MAT4)","self._gtype in (gl.GL_SAMPLER_1D, gl.GL_SAMPLER_2D)",0.6469180583953857 5550,"def _update(self): if not self._active: raise RuntimeError('Uniform variable is not active') if self._gtype in (gl.GL_FLOAT_MAT2, gl.GL_FLOAT_MAT3, gl.GL_FLOAT_MAT4): transpose = False self._ufunction(self._handle, 1, transpose, self._data) elif: gl.glUniform1i(self._handle, self._unit) else: self._ufunction(self._handle, 1, self._data)",False,"self._gtype in (gl.GL_SAMPLER_1D, gl.GL_SAMPLER_2D)",self._gtype == gl.GL_FLOAT_MAT3,0.6451563835144043 5551,"def fetch_trading_limits(self, symbols=None, params={}): self.load_markets() if: symbols = self.symbols result = {} for i in range(0, len(symbols)): symbol = symbols[i] result[symbol] = self.fetch_trading_limits_by_id(self.market_id(symbol), params) return result",True,symbols is None,symbols is None,0.6584390997886658 5552,"def fix_dependencies(self, deps): for i, dep in enumerate(deps): tex_dtype = self.header.textures.data[i].dtype if: dep.dependency_name.data = ''",False,tex_dtype == FgmDtype.RGBA,"tex_dtype in (tf.Tensor, torch.Tensor.Float)",0.6444164514541626 5553,"def createFlashCache(self, sizeInGib, mode): """"""Creates a new FlashCache :param sizeInGib: Specifies the node pair size of the Flash Cache on the system. :type: int :param: mode : Simulator: 1 Real: 2 (default) :type: int :raises: :class:`~hpe3parclient.exceptions.HTTPBadRequest` - NO_SPACE - Not enough space is available for the operation. :raises: :class:`~hpe3parclient.exceptions.HTTPBadRequest` - INV_INPUT_EXCEEDS_RANGE - A JSON input object contains a name-value pair with a numeric value that exceeds the expected range. Flash Cache exceeds the expected range. The HTTP ref member contains the name. :raises: :class:`~hpe3parclient.exceptions.HTTPConflict` - EXISTENT_FLASH_CACHE - The Flash Cache already exists. :raises: :class:`~hpe3parclient.exceptions.HTTPForbidden` - FLASH_CACHE_NOT_SUPPORTED - Flash Cache is not supported. :raises: :class:`~hpe3parclient.exceptions.HTTPBadRequest` - INV_FLASH_CACHE_SIZE - Invalid Flash Cache size. The size must be a multiple of 16 G. """""" flash_cache = {'sizeGiB': sizeInGib} if: mode = {'mode': mode} flash_cache = self._mergeDict(flash_cache, mode) info = {'flashCache': flash_cache} response, body = self.http.post('/', body=info) return body",False,mode is not None,"isinstance(mode, int)",0.6517804265022278 5554,"def _resize_seg(self, results): """"""Resize semantic segmentation map with ``results['scale']``."""""" for key in results.get('seg_fields', []): if: gt_seg = mmcv.imrescale(results[key], results['scale'], interpolation='nearest', backend=self.backend) else: gt_seg = mmcv.imresize(results[key], results['scale'], interpolation='nearest', backend=self.backend) results['gt_semantic_seg'] = gt_seg",True,self.keep_ratio,self.keep_ratio,0.6498093605041504 5555,"def close(self): if: self.writer.Close() self.writer = None",False,self.writer,self.writer is not None,0.6563953161239624 5556,"def _run(self): """""" While more time on the clock, stay in the inner loop. If not, carry out the timeout function """""" while self.running: if: while self.time_left and self.running: time.sleep(1) self.lock.acquire() self.time_left -= 1 self.lock.release() self._on_timeout() else: time.sleep(1)",False,self.time_left,self._on_timeout is not None,0.6519863605499268 5557,"def check_model(self, yml_conf): """""" Raises: ValueError: loaded model not in supported model type """""" for support_model in SUPPORT_MODELS: if: return True raise ValueError('Unsupported arch: {}, expect {}'.format(yml_conf['arch'], SUPPORT_MODELS))",True,support_model in yml_conf['arch'],support_model in yml_conf['arch'],0.6481178998947144 5558,"def prepare_mask(self, img_num, input_im, input_im_all): with torch.no_grad(): if: self.input_mask_all = [] for i in range(img_num): input_im = input_im_all[i].cuda() self.input_mask_all.append(self.parse_mask(input_im).cpu()) self.input_mask = self.input_mask_all[0].cuda() else: self.input_mask = self.parse_mask(input_im)",False,self.joint_train,input_im_all is not None,0.6502342224121094 5559,"def op_continue(self): if: var = self.switch_continue_stack[-1] self.write(self.indent()) self.write(var +'= True\n') self.write(self.indent()) self.write('break\n') else: self.write(self.indent()) self.write('continue\n')",False,self.switch_expr_stack,self.switch_continue_stack,0.6489455699920654 5560,"def process_view(self, request, callback, callback_args, callback_kwargs): if: self.profiler = cProfile.Profile() args = (request,) + callback_args return self.profiler.runcall(callback, *args, **callback_kwargs)",False,settings.DEBUG and 'prof' in request.GET,self.profiler is None,0.6492940783500671 5561,"def query(self, dbase, task, query): logger.info('Running query...') cmd = 'java -jar casjobs.jar run -t %s -n %s -f %s' % (dbase, task, query) logger.info(cmd) ret = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) if: raise subprocess.CalledProcessError(1, cmd, ret) return ret",False,'ERROR:' in ret,ret != 0,0.6594752669334412 5562,"def _check_slice_time_ref(slice_time_ref): """"""Check that slice_time_ref is a number between 0 and 1."""""" if: raise TypeError(f""'slice_time_ref' must be a float or an integer. Got {type(slice_time_ref)} instead."") if slice_time_ref < 0 or slice_time_ref > 1: raise ValueError(f""'slice_time_ref' must be between 0 and 1. Got {slice_time_ref} instead."")",False,"not isinstance(slice_time_ref, (float, int))","not isinstance(slice_time_ref, float_types)",0.6489183902740479 5563,"def _check_slice_time_ref(slice_time_ref): """"""Check that slice_time_ref is a number between 0 and 1."""""" if not isinstance(slice_time_ref, (float, int)): raise TypeError(f""'slice_time_ref' must be a float or an integer. Got {type(slice_time_ref)} instead."") if: raise ValueError(f""'slice_time_ref' must be between 0 and 1. Got {slice_time_ref} instead."")",True,slice_time_ref < 0 or slice_time_ref > 1,slice_time_ref < 0 or slice_time_ref > 1,0.6446752548217773 5564,"def connect(self): if: logger.log('[button]: Creating a new thread to connect to Artoo.') self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.client.setblocking(0) self.connectingThread = threading.Thread(target=self.connectThread) self.connectingThread.daemon = True self.connectingThread.start()",False,not self.connectingThread or not self.connectingThread.is_alive(),not self.connectingThread or self.connectingThread.running == True,0.6515257954597473 5565,"def __init__(self, expr, stopOn=None): super(_MultipleMatch, self).__init__(expr) self.saveAsList = True ender = stopOn if: ender = ParserElement._literalStringClass(ender) self.not_ender = ~ender if ender is not None else None",True,"isinstance(ender, basestring)","isinstance(ender, basestring)",0.6504501104354858 5566,"def forwardRequires(pkg, set): for req in pkg.requires: if: set.add(req) for prv in req.providedby: if prv not in set: set.add(prv) for prvpkg in prv.packages: if prvpkg not in set: set.add(prvpkg) forwardRequires(prvpkg, set)",False,req not in set,req.providedby,0.6612359285354614 5567,"def forwardRequires(pkg, set): for req in pkg.requires: if req not in set: set.add(req) for prv in req.providedby: if: set.add(prv) for prvpkg in prv.packages: if prvpkg not in set: set.add(prvpkg) forwardRequires(prvpkg, set)",True,prv not in set,prv not in set,0.6570099592208862 5568,"def forwardRequires(pkg, set): for req in pkg.requires: if req not in set: set.add(req) for prv in req.providedby: if prv not in set: set.add(prv) for prvpkg in prv.packages: if: set.add(prvpkg) forwardRequires(prvpkg, set)",True,prvpkg not in set,prvpkg not in set,0.6540502309799194 5569,"def predict(self, features: torch.Tensor, compute_covariance=False): """""" Make predictions Args: features: (Npoints x Nfeatures) tensor containing test features compute_covariance: whether to compute and return a covariance matrix over test targets Returns: pred_mu: a (Npoints) tensor containing predicted values for the test points pred_cov: A (Npoints x Npoints) covariance matrix if compute_covariance is True, else None """""" assert len(self.w) == features.shape[1] assert self.w is not None,'model must be fit before it can make predictions' pred_mu = features @ self.w if: pred_cov = features @ torch.solve(features.T, self.posterior_prec)[0] else: pred_cov = None return (pred_mu, pred_cov)",True,compute_covariance,compute_covariance,0.664837121963501 5570,"def output(vis, fname): if: print(fname) cv2.imshow('window', vis.get_image()[:, :, ::-1]) cv2.waitKey() else: filepath = os.path.join(dirname, fname) print('Saving to {}...'.format(filepath)) vis.save(filepath)",True,args.show,args.show,0.6586213111877441 5571,"def instantiate_embedding_manager(self, config, embedder): model = instantiate_from_config(config, embedder=embedder) if: model.load(config.params.embedding_manager_ckpt) return model",True,"config.params.get('embedding_manager_ckpt', None)","config.params.get('embedding_manager_ckpt', None)",0.6457057595252991 5572,"def _get(self): fastest_time = 9999 fastest_sock = None for sock in self.pool: hs_time = self.pool[sock] if: fastest_time = hs_time fastest_sock = sock self.pool.pop(fastest_sock) return (fastest_time, fastest_sock)",False,hs_time < fastest_time or not fastest_sock,hs_time > fastest_time,0.6450691223144531 5573,"def build_wheel(self, wheel_directory, config_settings=None, metadata_directory=None): """"""Build a wheel from this project. Returns the name of the newly created file. In general, this will call the 'build_wheel' hook in the backend. However, if that was previously called by 'prepare_metadata_for_build_wheel', and the same metadata_directory is used, the previously built wheel will be copied to wheel_directory. """""" if: metadata_directory = abspath(metadata_directory) return self._call_hook('build_wheel', {'wheel_directory': abspath(wheel_directory), 'config_settings': config_settings,'metadata_directory': metadata_directory})",False,metadata_directory is not None,"metadata_directory and (not isinstance(metadata_directory, abspath))",0.6497817039489746 5574,"def MD5collect(signatures): """""" Collects a list of signatures into an aggregate signature. signatures - a list of signatures returns - the aggregate signature """""" if: return signatures[0] else: return MD5signature(', '.join(signatures))",True,len(signatures) == 1,len(signatures) == 1,0.6471107006072998 5575,"def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): if: raise ArgumentsRequired(self.id +'fetchOpenOrders requires a symbol argument') self.load_markets() market = self.market(symbol) request = {'market': market['id']} response = self.privateGetTradesMy(self.extend(request, params)) return self.parse_trades(response, market, since, limit)",True,symbol is None,symbol is None,0.659775972366333 5576,"def getCentroid(r_rmsd, beta=1.0): """""" Gets index of the centroid of a cluster. Follows previous MDTraj procedure. http://mdtraj.org/1.4.2/examples/centroids.html Parameters ----------- r_rmsd : np.ndarray Pairwise RMSD distances used for obtaining centroids beta : float, optinal parameter used in computing the similarity score below default set to 1.0 Returns ----------- int If cluster only has one member, then return an index of 0. Otherwise, return the maximum of the computed similarity score. """""" if: return np.exp(-beta * r_rmsd / r_rmsd.std()).sum(axis=1).argmax() else: return 0",False,len(r_rmsd) > 1,r_rmsd.sum() > 1,0.6473062038421631 5577,"def get_python(path): if: return sys.executable if os.path.isabs(path): return path path = path.replace('.', '') vers = ('26', '26-32', '26-64', '27', '27-32', '27-64', '36', '36-32', '36-64', '37', '37-32', '37-64', '38', '38-32', '38-64', '39-32', '39-64') for v in vers: pypath = 'C:\\\\python%s\\python.exe' % v if path in pypath and os.path.isfile(pypath): return pypath",False,not path,path is None,0.6669911742210388 5578,"def get_python(path): if not path: return sys.executable if: return path path = path.replace('.', '') vers = ('26', '26-32', '26-64', '27', '27-32', '27-64', '36', '36-32', '36-64', '37', '37-32', '37-64', '38', '38-32', '38-64', '39-32', '39-64') for v in vers: pypath = 'C:\\\\python%s\\python.exe' % v if path in pypath and os.path.isfile(pypath): return pypath",False,os.path.isabs(path),os.path.isfile(path),0.6459641456604004 5579,"def get_python(path): if not path: return sys.executable if os.path.isabs(path): return path path = path.replace('.', '') vers = ('26', '26-32', '26-64', '27', '27-32', '27-64', '36', '36-32', '36-64', '37', '37-32', '37-64', '38', '38-32', '38-64', '39-32', '39-64') for v in vers: pypath = 'C:\\\\python%s\\python.exe' % v if: return pypath",False,path in pypath and os.path.isfile(pypath),os.path.exists(pypath),0.6464709639549255 5580,"def _get_seeds_indexes(self): if: index = torch.randperm(self.input_len) else: index = torch.arange(self.input_len) seeds_indexes = [] for rank in range(self.num_workers): start, end = self._worker_seeds_ranges[rank] seeds_indexes.append(index[start:end]) return seeds_indexes",False,self.sampling_config.shuffle,self.shuffle,0.6456879377365112 5581,"def add(self, cls, estimators, preprocessing=None, meta=False, **kwargs): """"""Add layer to ensemble. For full set of optional arguments, see the ensemble API for the specified type. Parameters ---------- cls : str layer class. Accepted types are: * 'blend' : blend ensemble *'subsemble' : subsemble *'stack' : super learner estimators: dict of lists or list or instance estimators constituting the layer. If preprocessing is none and the layer is meant to be the meta estimator, it is permissible to pass a single instantiated estimator. If ``preprocessing`` is ``None`` or ``list``, ``estimators`` should be a ``list``. The list can either contain estimator instances, named tuples of estimator instances, or a combination of both. :: option_1 = [estimator_1, estimator_2] option_2 = [(""est-1"", estimator_1), (""est-2"", estimator_2)] option_3 = [estimator_1, (""est-2"", estimator_2)] If different preprocessing pipelines are desired, a dictionary that maps estimators to preprocessing pipelines must be passed. The names of the estimator dictionary must correspond to the names of the estimator dictionary. :: preprocessing_cases = {""case-1"": [trans_1, trans_2], ""case-2"": [alt_trans_1, alt_trans_2]} estimators = {""case-1"": [est_a, est_b], ""case-2"": [est_c, est_d]} The lists for each dictionary entry can be any of ``option_1``, ``option_2`` and ``option_3``. preprocessing: dict of lists or list, optional (default = None) preprocessing pipelines for given layer. If the same preprocessing applies to all estimators, ``preprocessing`` should be a list of transformer instances. The list can contain the instances directly, named tuples of transformers, or a combination of both",False,cls not in INDEXERS,meta,0.6458154916763306 5582,"def add(self, cls, estimators, preprocessing=None, meta=False, **kwargs): """"""Add layer to ensemble. For full set of optional arguments, see the ensemble API for the specified type. Parameters ---------- cls : str layer class. Accepted types are: * 'blend' : blend ensemble *'subsemble' : subsemble *'stack' : super learner estimators: dict of lists or list or instance estimators constituting the layer. If preprocessing is none and the layer is meant to be the meta estimator, it is permissible to pass a single instantiated estimator. If ``preprocessing`` is ``None`` or ``list``, ``estimators`` should be a ``list``. The list can either contain estimator instances, named tuples of estimator instances, or a combination of both. :: option_1 = [estimator_1, estimator_2] option_2 = [(""est-1"", estimator_1), (""est-2"", estimator_2)] option_3 = [estimator_1, (""est-2"", estimator_2)] If different preprocessing pipelines are desired, a dictionary that maps estimators to preprocessing pipelines must be passed. The names of the estimator dictionary must correspond to the names of the estimator dictionary. :: preprocessing_cases = {""case-1"": [trans_1, trans_2], ""case-2"": [alt_trans_1, alt_trans_2]} estimators = {""case-1"": [est_a, est_b], ""case-2"": [est_c, est_d]} The lists for each dictionary entry can be any of ``option_1``, ``option_2`` and ``option_3``. preprocessing: dict of lists or list, optional (default = None) preprocessing pipelines for given layer. If the same preprocessing applies to all estimators, ``preprocessing`` should be a list of transformer instances. The list can contain the instances directly, named tuples of transformers, or a combination of both",False,cls == 'subsemble' and 'partition_estimator' in kwargs,meta,0.6422111392021179 5583,"def uncompressed_rle(mask): l = mask.flatten(order='F').tolist() counts = [] p = False cnt = 0 for i in l: if: cnt += 1 else: counts.append(cnt) p = i cnt = 1 counts.append(cnt) return {'counts': counts,'size': [mask.shape[0], mask.shape[1]]}",True,i == p,i == p,0.6687400341033936 5584,"def fmt_ctl_handle_mutual_exclude(value, target, other): new = value.split(',') while ':all:' in new: other.clear() target.clear() target.add(':all:') del new[:new.index(':all:') + 1] if: return for name in new: if name == ':none:': target.clear() continue name = canonicalize_name(name) other.discard(name) target.add(name)",False,':none:' not in new,len(new) > 1,0.6518645286560059 5585,"def fmt_ctl_handle_mutual_exclude(value, target, other): new = value.split(',') while ':all:' in new: other.clear() target.clear() target.add(':all:') del new[:new.index(':all:') + 1] if ':none:' not in new: return for name in new: if: target.clear() continue name = canonicalize_name(name) other.discard(name) target.add(name)",False,name == ':none:',name in target,0.6499770879745483 5586,"@classmethod def get_repository_root(cls, location: str) -> Optional[str]: loc = super().get_repository_root(location) if: return loc try: r = cls.run_command(['rev-parse', '--show-toplevel'], cwd=location, show_stdout=False, stdout_only=True, on_returncode='raise', log_failed_cmd=False) except BadCommand: logger.debug('could not determine if %s is under git control because git is not available', location) return None except InstallationError: return None return os.path.normpath(r.rstrip('\r\n'))",True,loc,loc,0.6742532849311829 5587,"@fields.depends('subscription', '_parent_subscription.state') def on_change_with_subscription_state(self, name=None): if: return self.subscription.state",True,self.subscription,self.subscription,0.6606709361076355 5588,"@staticmethod def update_layouts(): for l in Layout.layouts: l.update() if: Layout.render_debug_info()",True,Layout.debug,Layout.debug,0.6598820090293884 5589,"def extra_current_labels(self): """""" return whether there are labels currently stored that user hasn't directly provided """""" extra_labels = self.get_extra_current_labels() if: return True return False",False,len(extra_labels) > 0,extra_labels is not None,0.6428934335708618 5590,"def Add(self): """"""method Add"""""" ret = self._oleobj_.InvokeTypes(2, LCID, 1, (9, 0), ()) if: ret = Dispatch(ret, u'Add', '{F6B86D1A-4FD8-434C-B0B4-9E6B9FB54BFC}') return ret",True,ret is not None,ret is not None,0.6525436639785767 5591,"def addSubMatcher(self, matcher: IControlMatcher, priority: int=0) -> None: """""" Register a control matcher to work as a component of this control matcher This allows for more complex control mappings to be made without the need to implement a full control matcher ### Args: * `matcher` (`IControlMatcher`): control matcher to add * `priority` (`int`): Matcher priority of control (higher priority controls will be matched first) """""" if: self._sub_matchers[priority].append(matcher) else: self._priorities.add(priority) self._sub_matchers[priority] = [matcher]",True,priority in self._sub_matchers,priority in self._sub_matchers,0.646377682685852 5592,"def construct_request_table_expr(self, agg_spec: AggregationSpec, request_table_name: str) -> expressions.Select: """""" Construct a Select statement that forms the processed request table Parameters ---------- agg_spec: AggregationSpec Aggregation specification request_table_name: str Name of the original request table that is determined at runtime Returns ------- expressions.Select """""" selected_columns = [] if: selected_columns.append(quoted_identifier(SpecialColumnName.POINT_IN_TIME)) selected_columns.extend([quoted_identifier(x) for x in agg_spec.serving_names]) select_distinct_expr = select(*selected_columns).distinct().from_(request_table_name) return select_distinct_expr",False,self.is_time_aware,agg_spec.serving_names is None,0.645368218421936 5593,"def pixelwise_fss_function(target_tensor, prediction_tensor): """"""Computes pixelwise fractions skill score (FSS) at a given scale. :param target_tensor: Tensor of target (actual) values. :param prediction_tensor: Tensor of predicted values. :return: pixelwise_fss: Pixelwise FSS (scalar). """""" target_tensor, prediction_tensor = _filter_fields(target_tensor=target_tensor, prediction_tensor=prediction_tensor, spatial_coeff_matrix=spatial_coeff_matrix, frequency_coeff_matrix=frequency_coeff_matrix, orig_num_rows=orig_num_rows, orig_num_columns=orig_num_columns)[:2] fss_value = get_pixelwise_fss(target_tensor=target_tensor, prediction_tensor=prediction_tensor, mask_matrix=mask_matrix) if: return 1.0 - fss_value return fss_value",True,use_as_loss_function,use_as_loss_function,0.6469223499298096 5594,"def describe(net, name=None): num = 0 lines = [] if: lines.append(name) num = len(name) _describe(net, lines, num) return '\n'.join(lines)",False,name is not None,name,0.6518299579620361 5595,"def next(self, length): if: return None elif self._index + length > len(self._data): length = len(self._data) - self._index self._index += length return self._data[self._index - length:self._index]",False,self._index == len(self._data),self._index >= len(self._data),0.6478126049041748 5596,"def next(self, length): if self._index == len(self._data): return None elif: length = len(self._data) - self._index self._index += length return self._data[self._index - length:self._index]",True,self._index + length > len(self._data),self._index + length > len(self._data),0.6475909352302551 5597,"@pytest.mark.parametrize('is_assigned, expected', [(True, 1), (False, 0)]) def test_project_list_external_employee(external_employee_client, is_assigned, expected): ProjectFactory.create_batch(4) project = ProjectFactory.create() if: project.assignees.add(external_employee_client.user) url = reverse('project-list') response = external_employee_client.get(url) assert response.status_code == status.HTTP_200_OK json = response.json() assert len(json['data']) == expected",True,is_assigned,is_assigned,0.661645770072937 5598,"def init_optimizer(self, kvstore='local', optimizer='sgd', optimizer_params=(('learning_rate', 0.01),), force_init=False): assert self.binded and self.params_initialized if: self.logger.warning('optimizer already initialized, ignoring.') return self._curr_module._preload_opt_states = self._preload_opt_states self._curr_module.init_optimizer(kvstore, optimizer, optimizer_params, force_init=force_init) self.optimizer_initialized = True",False,self.optimizer_initialized and (not force_init),self.optimizer_initialized,0.6438612937927246 5599,"def tcachebin(self, i): """"""Return head chunk in tcache[i]."""""" heap_base = HeapBaseFunction.heap_base() addr = dereference(heap_base + 2 * current_arch.ptrsize + self.TCACHE_MAX_BINS + i * current_arch.ptrsize) if: return None return GlibcChunk(long(addr))",False,not addr,addr == 0,0.6686329245567322 5600,"def wrap(api, authorizer): """"""Returns module API wrapped with authorizer function."""""" for action in dir(api): if: continue if authorizer: auth = _authorize(authorizer) attr = getattr(api, action) if hasattr(attr, '__call__'): setattr(api, action, auth(attr)) elif hasattr(attr, '__init__'): setattr(api, action, wrap(attr, authorizer)) else: _LOGGER.want('unknown attribute type: %r, %s', api, action) return api",False,action.startswith('_'),action.startswith('authorize'),0.6478747129440308 5601,"def wrap(api, authorizer): """"""Returns module API wrapped with authorizer function."""""" for action in dir(api): if action.startswith('_'): continue if: auth = _authorize(authorizer) attr = getattr(api, action) if hasattr(attr, '__call__'): setattr(api, action, auth(attr)) elif hasattr(attr, '__init__'): setattr(api, action, wrap(attr, authorizer)) else: _LOGGER.want('unknown attribute type: %r, %s', api, action) return api",False,authorizer,action == 'authorize',0.6750999689102173 5602,"def wrap(api, authorizer): """"""Returns module API wrapped with authorizer function."""""" for action in dir(api): if action.startswith('_'): continue if authorizer: auth = _authorize(authorizer) attr = getattr(api, action) if: setattr(api, action, auth(attr)) elif hasattr(attr, '__init__'): setattr(api, action, wrap(attr, authorizer)) else: _LOGGER.want('unknown attribute type: %r, %s', api, action) return api",False,"hasattr(attr, '__call__')","attr and isinstance(attr, types.AuthType)",0.6449244022369385 5603,"def wrap(api, authorizer): """"""Returns module API wrapped with authorizer function."""""" for action in dir(api): if action.startswith('_'): continue if authorizer: auth = _authorize(authorizer) attr = getattr(api, action) if hasattr(attr, '__call__'): setattr(api, action, auth(attr)) elif: setattr(api, action, wrap(attr, authorizer)) else: _LOGGER.want('unknown attribute type: %r, %s', api, action) return api",False,"hasattr(attr, '__init__')","hasattr(attr, '__call__')",0.6440829038619995 5604,"@torch.no_grad() def to_rgb(self, x): x = x.float() if: self.colorize = torch.randn(3, x.shape[1], 1, 1).to(x) x = nn.functional.conv2d(x, weight=self.colorize) x = 2.0 * (x - x.min()) / (x.max() - x.min()) - 1.0 return x",True,"not hasattr(self, 'colorize')","not hasattr(self, 'colorize')",0.6425018310546875 5605,"def loss(self, pred, label): if: label = label.cuda() return F.cross_entropy(pred, label)",True,self.gpu,self.gpu,0.6573907136917114 5606,"def __contains__(self, item): if: return item in self._cache else: for i in self: if i == item: return True elif i > item: return False return False",True,self._cache_complete,self._cache_complete,0.6481263637542725 5607,"def __contains__(self, item): if self._cache_complete: return item in self._cache else: for i in self: if: return True elif i > item: return False return False",False,i == item,i < item,0.6707817316055298 5608,"def __contains__(self, item): if self._cache_complete: return item in self._cache else: for i in self: if i == item: return True elif: return False return False",False,i > item,i == item,0.6675930023193359 5609,"def shutdown(self, wait: bool=False, cancel_futures: bool=True) -> None: if: self._shutdown_event.set() can_cancel = 'cancel_futures' in inspect.getfullargspec(self._executor_pool.shutdown).kwonlyargs self._executor_pool.shutdown(wait=wait, cancel_futures=cancel_futures) if can_cancel else self._executor_pool.shutdown(wait=False) self._executor_pool = None",True,self._executor_pool,self._executor_pool,0.654017448425293 5610,"def PPO(df, p): if: p = [p, 30, 90] df['PPO'] = ta.PPO(df.close, p[1], p[2]) df = df.replace([np.inf, -np.inf], np.nan) return df",True,len(p) <= 2,len(p) <= 2,0.6596046686172485 5611,"def test14_equals(self): """"""Testing the'same_as' and 'equals' lookup types."""""" if: return pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326) c1 = City.objects.get(point=pnt) c2 = City.objects.get(point__same_as=pnt) c3 = City.objects.get(point__equals=pnt) for c in [c1, c2, c3]: self.assertEqual('Houston', c.name)",False,DISABLE,"isinstance(self, City)",0.6793748140335083 5612,"def __getitem__(self, idxs): samples = self.dataset[idxs, :] if: samples = normalize_image(samples) return np.float32(samples)",False,self.should_normalize,self.normalize,0.648504376411438 5613,"@auto_fp16() def forward(self, x): for conv in self.convs: x = conv(x) if: x = self.downsample_conv(x) x = x.flatten(1) for fc in self.fcs: x = self.relu(fc(x)) mask_pred = self.fc_logits(x).view(x.size(0), self.num_classes, *self.output_size) return mask_pred",False,self.downsample_conv is not None,self.downsample,0.6469526290893555 5614,"def add_flops_counter_hook_function(module): if: if hasattr(module, '__flops_handle__'): return else: handle = module.register_forward_hook(get_modules_mapping()[type(module)]) module.__flops_handle__ = handle",True,is_supported_instance(module),is_supported_instance(module),0.6484450101852417 5615,"def add_flops_counter_hook_function(module): if is_supported_instance(module): if: return else: handle = module.register_forward_hook(get_modules_mapping()[type(module)]) module.__flops_handle__ = handle",True,"hasattr(module, '__flops_handle__')","hasattr(module, '__flops_handle__')",0.6505140066146851 5616,"def add_heatmap(tensor): """""" Add heatmap to 2D tensor. Args: tensor (tensor): a 2D tensor. Tensor value must be in [0..1] range. Returns: heatmap (tensor): a 3D tensor. Result of applying heatmap to the 2D tensor. """""" assert tensor.ndim == 2, 'Only support 2D tensors.' if: arr = tensor.cpu() else: arr = tensor arr = arr.numpy() cm = plt.get_cmap('viridis') heatmap = cm(arr) heatmap = heatmap[:, :, :3] heatmap = torch.Tensor(heatmap).permute(2, 0, 1) return heatmap",True,tensor.device != torch.device('cpu'),tensor.device != torch.device('cpu'),0.6464866995811462 5617,"@staticmethod def _make_close_to_resources(inference_spec_type: model_spec_pb2.InferenceSpecType) -> str: """"""Proximity resources not otherwise known (or visible) to Beam."""""" if: estimated_num_workers = 100 model_path = inference_spec_type.saved_model_spec.model_path model_size_bytes = RunInferenceImpl._model_size_bytes(model_path) return f'{model_path}[{model_size_bytes * estimated_num_workers}]' else: estimated_rpc_traffic_size_bytes = 1 << 40 del estimated_rpc_traffic_size_bytes return ''",False,_using_in_process_inference(inference_spec_type),inference_spec_type.saved_model_spec.model_path is not None,0.644621729850769 5618,"def _adjust_topcrash_criteria(self, topcrash_criteria): factor = 2 new_criteria = [] for criterion in topcrash_criteria: criterion = {**criterion, 'tc_limit': criterion['tc_limit'] * factor} if: criterion['tc_startup_limit'] = criterion['tc_startup_limit'] * factor new_criteria.append(criterion) return new_criteria",False,'tc_startup_limit' in criterion,criterion['tc_startup_limit'] is not None,0.6532732248306274 5619,"@functools.wraps(fn) def wrapped(self, prospective, spec): if: return False return fn(self, prospective, spec)",True,"not isinstance(prospective, Version)","not isinstance(prospective, Version)",0.6451810002326965 5620,"def _get_paths_from_images(path): assert os.path.isdir(path), '{:s} is not a valid directory'.format(path) images = [] for dirpath, _, fnames in sorted(os.walk(path)): for fname in sorted(fnames): if: img_path = os.path.join(dirpath, fname) images.append(img_path) assert images, '{:s} has no valid image file'.format(path) return images",True,is_image_file(fname),is_image_file(fname),0.6424165964126587 5621,"@wraps(func) def pin_rights(update: Update, context: CallbackContext, *args, **kwargs): bot = context.bot chat = update.effective_chat update_chat_title = chat.title message_chat_title = update.effective_message.chat.title if: cant_pin = ""I can't pin messages here!\nMake sure I'm admin and can pin messages."" else: cant_pin = f""I can't pin messages in {update_chat_title}!\nMake sure I'm admin and can pin messages there."" if chat.get_member(bot.id).can_pin_messages: return func(update, context, *args, **kwargs) else: update.effective_message.reply_text(cant_pin, parse_mode=ParseMode.HTML)",True,update_chat_title == message_chat_title,update_chat_title == message_chat_title,0.6492061614990234 5622,"@wraps(func) def pin_rights(update: Update, context: CallbackContext, *args, **kwargs): bot = context.bot chat = update.effective_chat update_chat_title = chat.title message_chat_title = update.effective_message.chat.title if update_chat_title == message_chat_title: cant_pin = ""I can't pin messages here!\nMake sure I'm admin and can pin messages."" else: cant_pin = f""I can't pin messages in {update_chat_title}!\nMake sure I'm admin and can pin messages there."" if: return func(update, context, *args, **kwargs) else: update.effective_message.reply_text(cant_pin, parse_mode=ParseMode.HTML)",True,chat.get_member(bot.id).can_pin_messages,chat.get_member(bot.id).can_pin_messages,0.6447633504867554 5623,"def serialize(self): data = super(DraftService, self).serialize() data['id'] = self.id if: data['serviceId'] = self.service_id data['links']['publish'] = url_for('main.publish_draft_service', draft_id=self.id) data['links']['complete'] = url_for('main.complete_draft_service', draft_id=self.id) data['links']['copy'] = url_for('main.copy_draft_service', draft_id=self.id) return data",True,self.service_id,self.service_id,0.6576943397521973 5624,"def encode_netloc(self): """"""Encodes the netloc part to an ASCII safe URL as bytes."""""" rv = self.ascii_host or '' if: rv = '[%s]' % rv port = self.port if port is not None: rv = '%s:%d' % (rv, port) auth = ':'.join(filter(None, [url_quote(self.raw_username or '', 'utf-8','strict', '/:%'), url_quote(self.raw_password or '', 'utf-8','strict', '/:%')])) if auth: rv = '%s@%s' % (auth, rv) return to_native(rv)",False,':' in rv,rv,0.6613795161247253 5625,"def encode_netloc(self): """"""Encodes the netloc part to an ASCII safe URL as bytes."""""" rv = self.ascii_host or '' if ':' in rv: rv = '[%s]' % rv port = self.port if: rv = '%s:%d' % (rv, port) auth = ':'.join(filter(None, [url_quote(self.raw_username or '', 'utf-8','strict', '/:%'), url_quote(self.raw_password or '', 'utf-8','strict', '/:%')])) if auth: rv = '%s@%s' % (auth, rv) return to_native(rv)",False,port is not None,port,0.6548203229904175 5626,"def encode_netloc(self): """"""Encodes the netloc part to an ASCII safe URL as bytes."""""" rv = self.ascii_host or '' if ':' in rv: rv = '[%s]' % rv port = self.port if port is not None: rv = '%s:%d' % (rv, port) auth = ':'.join(filter(None, [url_quote(self.raw_username or '', 'utf-8','strict', '/:%'), url_quote(self.raw_password or '', 'utf-8','strict', '/:%')])) if: rv = '%s@%s' % (auth, rv) return to_native(rv)",True,auth,auth,0.6771259903907776 5627,"def FMScore(x, p, d): if: return 4 elif x <= d[p][0.5]: return 3 elif x <= d[p][0.75]: return 2 else: return 1",False,x <= d[p][0.25],x <= d[p][0.4],0.6493056416511536 5628,"def FMScore(x, p, d): if x <= d[p][0.25]: return 4 elif: return 3 elif x <= d[p][0.75]: return 2 else: return 1",False,x <= d[p][0.5],x <= d[p][0.2],0.646380603313446 5629,"def FMScore(x, p, d): if x <= d[p][0.25]: return 4 elif x <= d[p][0.5]: return 3 elif: return 2 else: return 1",False,x <= d[p][0.75],x <= d[p][0.6],0.6460409760475159 5630,"def __init__(self, fileobj): self._fileobj = fileobj for m in ['close', 'tell','seek', 'write', 'name']: if: setattr(self, m, getattr(fileobj, m))",True,"hasattr(fileobj, m)","hasattr(fileobj, m)",0.6529086828231812 5631,"def get_metadata(dist): if: metadata = dist.get_metadata('METADATA') elif dist.has_metadata('PKG-INFO'): metadata = dist.get_metadata('PKG-INFO') else: logger.warning('No metadata found in %s', display_path(dist.location)) metadata = '' feed_parser = FeedParser() feed_parser.feed(metadata) return feed_parser.close()",False,"isinstance(dist, pkg_resources.DistInfoDistribution) and dist.has_metadata('METADATA')",dist.has_metadata('METADATA'),0.6451722383499146 5632,"def get_metadata(dist): if isinstance(dist, pkg_resources.DistInfoDistribution) and dist.has_metadata('METADATA'): metadata = dist.get_metadata('METADATA') elif: metadata = dist.get_metadata('PKG-INFO') else: logger.warning('No metadata found in %s', display_path(dist.location)) metadata = '' feed_parser = FeedParser() feed_parser.feed(metadata) return feed_parser.close()",True,dist.has_metadata('PKG-INFO'),dist.has_metadata('PKG-INFO'),0.6439063549041748 5633,"def create_user(self, username, fullname, args): """"""Creates a new user Args: username: A unique username for the user fullname: The alias or full name of the user args: additional options (see REST API documentation) Returns: (data, etag): new user object and eTag """""" params = {'username': username, 'fullname': fullname} params.update(args) data, etag, resp = self._post_url('domain-types/user_config/collections/all', data=params) if: return (data, etag) resp.raise_for_status()",True,resp.status_code == 200,resp.status_code == 200,0.6536195278167725 5634,"def start_refreshing(self): self._refreshing_enabled = True if: self.cache.load() self._refresh_thread = threading.Thread(target=self._refresh_task) self._refresh_thread.start()",False,self._refresh_thread is None or not self._refresh_thread.is_alive(),self._refresh_enabled,0.6505850553512573 5635,"def post(self, request): """"""edit the site settings"""""" site = models.SiteSettings.objects.get() form = forms.RegistrationForm(request.POST, request.FILES, instance=site) if: data = {'form': form} return TemplateResponse(request,'settings/registration.html', data) site = form.save(request) data = {'form': forms.RegistrationForm(instance=site),'success': True} return TemplateResponse(request,'settings/registration.html', data)",True,not form.is_valid(),not form.is_valid(),0.646659255027771 5636,"def close(self): if: self._fp.close() if self._connection: self._connection.close() if not self.auto_close: io.IOBase.close(self)",True,not self.closed,not self.closed,0.659623384475708 5637,"def close(self): if not self.closed: self._fp.close() if: self._connection.close() if not self.auto_close: io.IOBase.close(self)",True,self._connection,self._connection,0.6678416728973389 5638,"def close(self): if not self.closed: self._fp.close() if self._connection: self._connection.close() if: io.IOBase.close(self)",False,not self.auto_close,not self._io_base,0.64827960729599 5639,"@register.filter(name='rating') def get_rating(book, user): """"""get the overall rating of a book"""""" if: return None return cache.get_or_set(f'book-rating-{book.parent_work.id}', lambda u, b: models.Review.objects.filter(book__parent_work__editions=b, rating__gt=0).aggregate(Avg('rating'))['rating__avg'] or 0, user, book, timeout=15552000)",False,not book.parent_work,not user,0.6516214609146118 5640,"def mini_progress(self, what, i, total): i = i + 1 pec = float(i) / float(total) * 100 sys.stdout.write('\rUpdating %s %d%%' % (what, pec)) sys.stdout.flush() if: sys.stdout.write('\n') sys.stdout.flush()",False,int(pec) == 100,what == 'no',0.6542561054229736 5641,"def __init__(self, value, *hints): from taucmdr.cli.commands.project.create import COMMAND as project_create_cmd from taucmdr.cli.commands.project.select import COMMAND as project_select_cmd from taucmdr.cli.commands.project.list import COMMAND as project_list_cmd if: hints = ('Use `%s` to create a new project configuration.' % project_create_cmd, 'Use `%s ` to select a project configuration.' % project_select_cmd, 'Use `%s` to see available project configurations.' % project_list_cmd) super().__init__(value, *hints)",False,not hints,not hints and (not project_create_cmd(value) or project_select_cmd(value),0.6724509000778198 5642,"def path_for_nlu_lang(self, lang) -> List[Text]: if: return self._nlu_files return [x for x in self._nlu_files if f'nlu/{lang}' in x or f'nlu-{lang}' in x]",False,len(self.nlu_config.keys()) < 2,lang == 'pre',0.6491036415100098 5643,"def __le__(self, other): if: return NotImplemented if len(self) > len(other): return False for elem in self: if elem not in other: return False return True",True,"not isinstance(other, Set)","not isinstance(other, Set)",0.6466336250305176 5644,"def __le__(self, other): if not isinstance(other, Set): return NotImplemented if: return False for elem in self: if elem not in other: return False return True",False,len(self) > len(other),self.size() > other.size(),0.6458858251571655 5645,"def __le__(self, other): if not isinstance(other, Set): return NotImplemented if len(self) > len(other): return False for elem in self: if: return False return True",False,elem not in other,elem > other,0.6584483981132507 5646,"def redirect(location, code=302, Response=None): """"""Returns a response object (a WSGI application) that, if called, redirects the client to the target location. Supported codes are 301, 302, 303, 305, and 307. 300 is not supported because it's not a real redirect and 304 because it's the answer for a request with a request with defined If-Modified-Since headers. .. versionadded:: 0.6 The location can now be a unicode string that is encoded using the :func:`iri_to_uri` function. .. versionadded:: 0.10 The class used for the Response object can now be passed in. :param location: the location the response should redirect to. :param code: the redirect status code. defaults to 302. :param class Response: a Response class to use when instantiating a response. The default is :class:`werkzeug.wrappers.Response` if unspecified. """""" if: from werkzeug.wrappers import Response display_location = escape(location) if isinstance(location, text_type): from werkzeug.urls import iri_to_uri location = iri_to_uri(location, safe_conversion=True) response = Response('\nRedirecting...\n

Redirecting...

\n

You should be redirected automatically to target URL: %s. If not click the link.' % (escape(location), display_location), code, mimetype='text/html') response.headers['Location'] = location return response",False,Response is None,response is None,0.6614083051681519 5647,"def redirect(location, code=302, Response=None): """"""Returns a response object (a WSGI application) that, if called, redirects the client to the target location. Supported codes are 301, 302, 303, 305, and 307. 300 is not supported because it's not a real redirect and 304 because it's the answer for a request with a request with defined If-Modified-Since headers. .. versionadded:: 0.6 The location can now be a unicode string that is encoded using the :func:`iri_to_uri` function. .. versionadded:: 0.10 The class used for the Response object can now be passed in. :param location: the location the response should redirect to. :param code: the redirect status code. defaults to 302. :param class Response: a Response class to use when instantiating a response. The default is :class:`werkzeug.wrappers.Response` if unspecified. """""" if Response is None: from werkzeug.wrappers import Response display_location = escape(location) if: from werkzeug.urls import iri_to_uri location = iri_to_uri(location, safe_conversion=True) response = Response('\nRedirecting...\n

Redirecting...

\n

You should be redirected automatically to target URL: %s. If not click the link.' % (escape(location), display_location), code, mimetype='text/html') response.headers['Location'] = location return response",False,"isinstance(location, text_type)","not isinstance(location, basestring)",0.6443129777908325 5648,"def __call__(self, parser, namespace, values, option_string=None): if: parser.print_help() else: CBHelpAction._show_man_page(self.klass.get_man_page_name()) parser.exit()",False,option_string == '-h',self.klass is None,0.6533405780792236 5649,"def read_header(self, static_header_size): """"""Return the AuxPow block header bytes"""""" start = self.cursor version = self._read_le_uint32() if: self.cursor = start self.cursor += static_header_size self.read_auxpow() header_end = self.cursor else: header_end = start + static_header_size self.cursor = start return self._read_nbytes(header_end - start)",False,version & self.VERSION_AUXPOW,version == 1,0.6510335206985474 5650,"def iter_fields(fields): """""" .. deprecated:: 1.6 Iterate over fields. The addition of :class:`~urllib3.fields.RequestField` makes this function obsolete. Instead, use :func:`iter_field_objects`, which returns :class:`~urllib3.fields.RequestField` objects. Supports list of (k, v) tuples and dicts. """""" if: return ((k, v) for k, v in six.iteritems(fields)) return ((k, v) for k, v in fields)",True,"isinstance(fields, dict)","isinstance(fields, dict)",0.6449024677276611 5651,"def getTagMap(self): if: return Set.getTagMap(self) else: return Set.getComponentTagMap(self)",True,self._tagSet,self._tagSet,0.6560609936714172 5652,"def write_unknown_genders_to_log(self, gender_results): unknown_gender_names = [name for name in gender_results if gender_results[name] == 'unknown'] if: logger.warning('Unknown gender names after trying in all caches and services: {0}'.format(str(unknown_gender_names)))",True,unknown_gender_names,unknown_gender_names,0.6542909741401672 5653,"def diffValue2txt(diff): last = 0 for d in sorted(diff2text.keys()): if: return diff2text[last] last = d return None",False,diff >= last and diff < d,diff2text[d] == diff2text[last],0.6497024297714233 5654,"def gen_packages_items(): if: for info in parse_require_file(require_fpath): parts = [info['package']] if with_version and'version' in info: parts.extend(info['version']) if not sys.version.startswith('3.4'): platform_deps = info.get('platform_deps') if platform_deps is not None: parts.append(';' + platform_deps) item = ''.join(parts) yield item",True,exists(require_fpath),exists(require_fpath),0.6486630439758301 5655,"def gen_packages_items(): if exists(require_fpath): for info in parse_require_file(require_fpath): parts = [info['package']] if: parts.extend(info['version']) if not sys.version.startswith('3.4'): platform_deps = info.get('platform_deps') if platform_deps is not None: parts.append(';' + platform_deps) item = ''.join(parts) yield item",True,with_version and 'version' in info,with_version and 'version' in info,0.6492561101913452 5656,"def gen_packages_items(): if exists(require_fpath): for info in parse_require_file(require_fpath): parts = [info['package']] if with_version and'version' in info: parts.extend(info['version']) if: platform_deps = info.get('platform_deps') if platform_deps is not None: parts.append(';' + platform_deps) item = ''.join(parts) yield item",True,not sys.version.startswith('3.4'),not sys.version.startswith('3.4'),0.641803503036499 5657,"def gen_packages_items(): if exists(require_fpath): for info in parse_require_file(require_fpath): parts = [info['package']] if with_version and'version' in info: parts.extend(info['version']) if not sys.version.startswith('3.4'): platform_deps = info.get('platform_deps') if: parts.append(';' + platform_deps) item = ''.join(parts) yield item",True,platform_deps is not None,platform_deps is not None,0.6494563817977905 5658,"def add_layer(layer, group_node): nonlocal layers results = LayerConverter.layer_to_QgsLayer(layer, input_file, context=context, fallback_crs=QgsCoordinateReferenceSystem('EPSG:4326')) for res in results: node = group_node.addLayer(res) if: node.setItemVisibilityChecked(False) if len(node.children()) > 10: node.setExpanded(False) layers.append(res)",False,not layer.visible,node.isChecked(),0.6598972678184509 5659,"def add_layer(layer, group_node): nonlocal layers results = LayerConverter.layer_to_QgsLayer(layer, input_file, context=context, fallback_crs=QgsCoordinateReferenceSystem('EPSG:4326')) for res in results: node = group_node.addLayer(res) if not layer.visible: node.setItemVisibilityChecked(False) if: node.setExpanded(False) layers.append(res)",False,len(node.children()) > 10,not node.expanded,0.6445847749710083 5660,"def get_save_image(self, to_gray=True, imageName='treasure2.png', lang='en'): a_path = 'imgs_eng_1024x768' if: a_path = 'imgs_chs_1600x900' imgPath = os.path.join('.','resource', a_path, 'img', imageName) src = cv2.imread(imgPath) if to_gray: image = cv2.cvtColor(src, cv2.COLOR_BGR2GRAY) else: image = np.array(src) return image",False,'en' != lang,os.path.exists('imgs_chs_1600x900'),0.6639295816421509 5661,"def get_save_image(self, to_gray=True, imageName='treasure2.png', lang='en'): a_path = 'imgs_eng_1024x768' if 'en'!= lang: a_path = 'imgs_chs_1600x900' imgPath = os.path.join('.','resource', a_path, 'img', imageName) src = cv2.imread(imgPath) if: image = cv2.cvtColor(src, cv2.COLOR_BGR2GRAY) else: image = np.array(src) return image",True,to_gray,to_gray,0.660420298576355 5662,"@classmethod def from_y2k(cls, value): """""" Revert substitution of year 2000. :param value: A datetime.datetime object which is 2000 years in the future. :return: A new extended_datetime or datetime.datetime object. """""" year = value.year - 2000 if: new_cls = datetime else: new_cls = cls return new_cls(year, value.month, value.day, value.hour, value.minute, value.second, value.microsecond, value.tzinfo)",False,year > 0,type(value) == datetime,0.6648703217506409 5663,"def test_samplingpath_oddcase(): x0 = np.array([0.19833663, 0.49931288, 0.62744967, 0.47308545, 0.48858042, 0.49025685, 0.48481497, 0.49068977, 0.49562456, 0.51102634]) v0 = np.array([-0.00053468, -0.00106889, 0.0012165, 0.00737494, 0.00152363, -0.00164736, 0.00371493, 0.02057758, -0.00260349, 0.01266826]) L0 = 0.0 path = SamplingPath(x0, v0, L0) for i in range(-10, 11): if: path.extrapolate(i)",False,i != 0,i % 2 != 0,0.6611816883087158 5664,"def to_rgb(self, x): assert self.image_key =='segmentation' if: self.register_buffer('colorize', torch.randn(3, x.shape[1], 1, 1).to(x)) x = F.conv2d(x, weight=self.colorize) x = 2.0 * (x - x.min()) / (x.max() - x.min()) - 1.0 return x",True,"not hasattr(self, 'colorize')","not hasattr(self, 'colorize')",0.6446002125740051 5665,"def normalise_histogram(self, histo, norm): for i, n in enumerate(norm): if: histo[i] = float(histo[i]) / n return histo",True,n > 0,n > 0,0.665541410446167 5666,"@read_mode.setter def read_mode(self, value): if: self.control_behavior.pop('circuit_hand_read_mode', None) else: self.control_behavior['circuit_hand_read_mode'] = ReadMode(value)",True,value is None,value is None,0.6546565294265747 5667,"def create_url_alias(config, node_id, url_alias): json = {'path': [{'value': '/node/' + str(node_id)}], 'alias': [{'value': url_alias}]} headers = {'Content-Type': 'application/json'} response = issue_request(config, 'POST', config['host'] + '/entity/path_alias?_format=json', headers, json, None) if: logging.error(""URL alias '%s' not created for node %s, HTTP response code was %s (it might already exist)."", url_alias, config['host'] + '/node/' + str(node_id), response.status_code)",False,response.status_code != 201,response.status_code != 200,0.6568155288696289 5668,"def ArrayParse(field): """"""parse an array descriptor"""""" arridx = field.find('[') if: return (field, -1) alen = int(field[arridx + 1:-1]) fieldname = field[:arridx] return (fieldname, alen)",False,arridx == -1,arridx < 0,0.6553341150283813 5669,"def _filter_columns(array, filters, combination='and'): """"""Return indices of recarray entries that match criteria. Parameters ---------- array : numpy array with columns Array in which data will be filtered. filters : list of criteria See _filter_column. combination : string {'and', 'or'}, default='and' String describing the combination operator. Possible values are ""and"" and ""or"". """""" if: fcomb = np.logical_and mask = np.ones(array.shape[0], dtype=bool) elif combination == 'or': fcomb = np.logical_or mask = np.zeros(array.shape[0], dtype=bool) else: raise ValueError(f'Combination mode not known: {combination}') for column in filters: mask = fcomb(mask, _filter_column(array, column, filters[column])) return mask",True,combination == 'and',combination == 'and',0.6495420932769775 5670,"def _filter_columns(array, filters, combination='and'): """"""Return indices of recarray entries that match criteria. Parameters ---------- array : numpy array with columns Array in which data will be filtered. filters : list of criteria See _filter_column. combination : string {'and', 'or'}, default='and' String describing the combination operator. Possible values are ""and"" and ""or"". """""" if combination == 'and': fcomb = np.logical_and mask = np.ones(array.shape[0], dtype=bool) elif: fcomb = np.logical_or mask = np.zeros(array.shape[0], dtype=bool) else: raise ValueError(f'Combination mode not known: {combination}') for column in filters: mask = fcomb(mask, _filter_column(array, column, filters[column])) return mask",True,combination == 'or',combination == 'or',0.6487377882003784 5671,"def emit_op_gc_store_indexed(self, op, arglocs, regalloc, fcond): value_loc, base_loc, index_loc, size_loc, ofs_loc = arglocs assert index_loc.is_core_reg() if: self.mc.ADD_ri(r.ip.value, index_loc.value, imm=ofs_loc.value) index_loc = r.ip scale = get_scale(size_loc.value) self._write_to_mem(value_loc, base_loc, index_loc, imm(scale), fcond) return fcond",False,ofs_loc.value > 0,ofs_loc.value,0.6517384052276611 5672,"def get_seq_len(self, seq_len): x = torch.ceil(seq_len.to(dtype=torch.float) / self.hop_length).to(dtype=torch.int) if: x = torch.ceil(x.float() / self.frame_splicing).to(dtype=torch.int) return x",False,self.frame_splicing > 1,self.frame_splicing > 0.0,0.6486848592758179 5673,"def _do_request(self): server_wrapper = self.server.server_wrapper server_wrapper.requests.put(MockServerRequest(self)) handler = server_wrapper.matchers.get(self.path) if: handler.write(self) else: self.send_error(404)",True,handler,handler,0.6740772724151611 5674,"def search(self, term=None, key=None): """"""Search all episodes in season, returns a list of matching Episode instances. >>> indexer_api = Tvdb() >>> indexer_api['scrubs'][1].search('first day') [] >>> See Show.search documentation for further information on search """""" results = [] for ep in itervalues(self): searchresult = ep.search(term=term, key=key) if: results.append(searchresult) return results",False,searchresult is not None,searchresult,0.6506540179252625 5675,"@Jsbx_e.setter def Jsbx_e(self, Jsbx_e): if: self.__idxsbx_e = J_to_idx_slack(Jsbx_e) else: raise Exception('Invalid Jsbx_e value. Exiting.')",False,"isinstance(Jsbx_e, np.ndarray)","isinstance(Jsbx_e, int)",0.6477773189544678 5676,"def _create_examples(self, lines, set_type): examples = [] for i, line in enumerate(lines): if: continue guid = '%s-%s' % (set_type, line[0]) text_a = line[1] text_b = line[2] label = None if set_type == 'test' else line[-1] examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples",True,i == 0,i == 0,0.6710289716720581 5677,"def accept(self, visitor: ParseTreeVisitor): if: return visitor.visitNumliteral(self) else: return visitor.visitChildren(self)",True,"hasattr(visitor, 'visitNumliteral')","hasattr(visitor, 'visitNumliteral')",0.6436711549758911 5678,"@property def lightprobetexture(self): if: return self._entity_data.get('lightprobetexture') return ''",True,'lightprobetexture' in self._entity_data,'lightprobetexture' in self._entity_data,0.6503129005432129 5679,"def _ensure_file_existence(self): if: return True open(self._filepath, 'w').close() return False",False,os.path.isfile(self._filepath),not os.path.exists(self._filepath),0.6442433595657349 5680,"def get_out_size(self): if: return self.encode_proj.out_features return self.config.hidden_size",False,self.encode_proj,self.encode_proj is not None,0.6581853628158569 5681,"def _make_data_config(self, raw_data, data): if: data['json'] = {} data['name'] = '.Settings'",False,'json' not in data,raw_data is None,0.6566569805145264 5682,"def _interpolation(kwargs): interpolation = kwargs.pop('resample', Image.BILINEAR) if: return random.choice(interpolation) else: return interpolation",True,"isinstance(interpolation, (list, tuple))","isinstance(interpolation, (list, tuple))",0.6434451341629028 5683,"def article_modifier(r: Reference, a: Article) -> Article: assert r.article == a.identifier if: return a nonlocal times_called times_called = times_called + 1 return attr.evolve(a, title='Modified')",False,a.identifier != '2:1/A',attr.evolve_only,0.6493325233459473 5684,"def _check_input_size(self) -> bool: """""" Check input size. :return: ``True`` if the input must be limited """""" if: return False return self._maxchar <= len(self._input_string)",False,self._maxchar == 0,self._maxchar is None or self._maxchar >= len(self._input_string),0.6556239128112793 5685,"def __init__(self, config): super(Embeddings, self).__init__() self.word_embeddings = nn.Embedding(config.vocab_size, config.dim, padding_idx=0) self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.dim) if: create_sinusoidal_embeddings(n_pos=config.max_position_embeddings, dim=config.dim, out=self.position_embeddings.weight) self.LayerNorm = nn.LayerNorm(config.dim, eps=1e-12) self.dropout = nn.Dropout(config.dropout)",False,config.sinusoidal_pos_embds,self.with_sinusoidal,0.6476320028305054 5686,"@property def schema_version(self) -> int: """"""The version of the database schema."""""" if: raise DatabaseStateConflictError('The schema version is not known.') return self._schema_version",True,self._schema_version is None,self._schema_version is None,0.6497884392738342 5687,"def always_iterable(obj, base_type=(str, bytes)): """"""If *obj* is iterable, return an iterator over its items:: >>> obj = (1, 2, 3) >>> list(always_iterable(obj)) [1, 2, 3] If *obj* is not iterable, return a one-item iterable containing *obj*:: >>> obj = 1 >>> list(always_iterable(obj)) [1] If *obj* is ``None``, return an empty iterable: >>> obj = None >>> list(always_iterable(None)) [] By default, binary and text strings are not considered iterable:: >>> obj = 'foo' >>> list(always_iterable(obj)) ['foo'] If *base_type* is set, objects for which ``isinstance(obj, base_type)`` returns ``True`` won't be considered iterable. >>> obj = {'a': 1} >>> list(always_iterable(obj)) # Iterate over the dict's keys ['a'] >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit [{'a': 1}] Set *base_type* to ``None`` to avoid any special handling and treat objects Python considers iterable as iterable: >>> obj = 'foo' >>> list(always_iterable(obj, base_type=None)) ['f', 'o', 'o'] """""" if: return iter(()) if base_type is not None and isinstance(obj, base_type): return iter((obj,)) try: return iter(obj) except TypeError: return iter((obj,))",True,obj is None,obj is None,0.6526253819465637 5688,"def always_iterable(obj, base_type=(str, bytes)): """"""If *obj* is iterable, return an iterator over its items:: >>> obj = (1, 2, 3) >>> list(always_iterable(obj)) [1, 2, 3] If *obj* is not iterable, return a one-item iterable containing *obj*:: >>> obj = 1 >>> list(always_iterable(obj)) [1] If *obj* is ``None``, return an empty iterable: >>> obj = None >>> list(always_iterable(None)) [] By default, binary and text strings are not considered iterable:: >>> obj = 'foo' >>> list(always_iterable(obj)) ['foo'] If *base_type* is set, objects for which ``isinstance(obj, base_type)`` returns ``True`` won't be considered iterable. >>> obj = {'a': 1} >>> list(always_iterable(obj)) # Iterate over the dict's keys ['a'] >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit [{'a': 1}] Set *base_type* to ``None`` to avoid any special handling and treat objects Python considers iterable as iterable: >>> obj = 'foo' >>> list(always_iterable(obj, base_type=None)) ['f', 'o', 'o'] """""" if obj is None: return iter(()) if: return iter((obj,)) try: return iter(obj) except TypeError: return iter((obj,))",False,"base_type is not None and isinstance(obj, base_type)",base_type == 'binary',0.6414353251457214 5689,"def freeze_to(self, n: int) -> None: """"""Freeze layers up to layer group `n`."""""" for g in self.layer_groups[:n]: for l in g: if: requires_grad(l, False) for g in self.layer_groups[n:]: requires_grad(g, True) self.create_opt(defaults.lr)",False,"not self.train_bn or not isinstance(l, bn_types)",l.freeze,0.6444768905639648 5690,"def Item(self, vtIndex=defaultNamedNotOptArg): """"""DISPID_VALUE"""""" ret = self._oleobj_.InvokeTypes(0, LCID, 1, (9, 0), ((12, 1),), vtIndex) if: ret = Dispatch(ret, u'Item', '{797B09FC-E40A-42FA-9045-30B351D937F1}') return ret",True,ret is not None,ret is not None,0.6542524695396423 5691,"def new_tenant_docker_client(self, name, tenant): if: self.extra_files += self.MT_DOCKER_CLIENT_FILES logger.info('creating docker client connected to tenant:'+ tenant) self._docker_compose_cmd('run -d --name=%s_%s mender-client' % (self.name, name), env={'SERVER_URL': 'https://%s' % self.get_mender_gateway(), 'TENANT_TOKEN': '%s' % tenant}) time.sleep(5)",True,not self.MT_DOCKER_CLIENT_FILES[0] in self.docker_compose_files,not self.MT_DOCKER_CLIENT_FILES[0] in self.docker_compose_files,0.653084397315979 5692,"def fn_recursive_set_mem_eff(module: nn.Layer): if: module.set_use_memory_efficient_attention_xformers(valid, attention_op) for child in module.children(): fn_recursive_set_mem_eff(child)",False,"hasattr(module, 'set_use_memory_efficient_attention_xformers')",attention_op is not None,0.6412526369094849 5693,"def flush(self): if: return self._fp.flush()",True,"self._fp is not None and hasattr(self._fp, 'flush')","self._fp is not None and hasattr(self._fp, 'flush')",0.6415743231773376 5694,"def ptr_access_expr(self, baseexpr, fldname, baseexpr_is_const=False): fldname = self.c_struct_field_name(fldname) if: return '%s->%s' % (baseexpr, fldname) return 'RPyField(%s, %s)' % (baseexpr, fldname)",True,baseexpr_is_const,baseexpr_is_const,0.6510125994682312 5695,"def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='IafTauRefCell'): super(IafTauRefCell, self).exportAttributes(outfile, level, already_processed, namespace_, name_='IafTauRefCell') if: already_processed.add('refract') outfile.write(' refract=%s' % (quote_attrib(self.refract),))",True,self.refract is not None and 'refract' not in already_processed,self.refract is not None and 'refract' not in already_processed,0.6456304788589478 5696,"def seek(self, n): if: n = self._file.size elif n < 0: raise ValueError('Seeking negative') self.pos = n",False,n > self._file.size,n == -1,0.6541809439659119 5697,"def seek(self, n): if n > self._file.size: n = self._file.size elif: raise ValueError('Seeking negative') self.pos = n",True,n < 0,n < 0,0.6692816019058228 5698,"@lru_cache(maxsize=4) def get_damping_aniso_perp(self, om, data_name=None): if: ks = self.camb.ks else: ks = self.data_dict[data_name]['ks_input'] return np.exp(-np.outer(ks ** 2, 1.0 - self.mu ** 2) * self.get_pregen('sigma', om) / 2.0)",True,data_name is None,data_name is None,0.6511521935462952 5699,"def __len__(self) -> int: if: log.debug('Try not to use len(reader) as it takes extra time') self._wordCount = fileCountLines(self._filename, newline=b'\n\n') - self._leadingLinesCount return self._wordCount",True,self._wordCount is None,self._wordCount is None,0.6566838622093201 5700,"@classmethod def get_classes(cls, classes=None): """"""Get class names of current dataset. Args: classes (Sequence[str] | str | None): If classes is None, use default CLASSES defined by builtin dataset. If classes is a string, take it as a file name. The file contains the name of classes where each line contains one class name. If classes is a tuple or list, override the CLASSES defined by the dataset. Return: list[str]: A list of class names. """""" if: return cls.CLASSES if isinstance(classes, str): class_names = mmcv.list_from_file(classes) elif isinstance(classes, (tuple, list)): class_names = classes else: raise ValueError(f'Unsupported type {type(classes)} of classes.') return class_names",True,classes is None,classes is None,0.6613746881484985 5701,"@classmethod def get_classes(cls, classes=None): """"""Get class names of current dataset. Args: classes (Sequence[str] | str | None): If classes is None, use default CLASSES defined by builtin dataset. If classes is a string, take it as a file name. The file contains the name of classes where each line contains one class name. If classes is a tuple or list, override the CLASSES defined by the dataset. Return: list[str]: A list of class names. """""" if classes is None: return cls.CLASSES if: class_names = mmcv.list_from_file(classes) elif isinstance(classes, (tuple, list)): class_names = classes else: raise ValueError(f'Unsupported type {type(classes)} of classes.') return class_names",True,"isinstance(classes, str)","isinstance(classes, str)",0.6486306190490723 5702,"@classmethod def get_classes(cls, classes=None): """"""Get class names of current dataset. Args: classes (Sequence[str] | str | None): If classes is None, use default CLASSES defined by builtin dataset. If classes is a string, take it as a file name. The file contains the name of classes where each line contains one class name. If classes is a tuple or list, override the CLASSES defined by the dataset. Return: list[str]: A list of class names. """""" if classes is None: return cls.CLASSES if isinstance(classes, str): class_names = mmcv.list_from_file(classes) elif: class_names = classes else: raise ValueError(f'Unsupported type {type(classes)} of classes.') return class_names",True,"isinstance(classes, (tuple, list))","isinstance(classes, (tuple, list))",0.6470937728881836 5703,"def split_half_float_double(tensors): dtypes = ['torch.cuda.HalfTensor', 'torch.cuda.FloatTensor', 'torch.cuda.DoubleTensor', 'torch.cuda.BFloat16Tensor'] buckets = [] for i, dtype in enumerate(dtypes): bucket = [t for t in tensors if t.type() == dtype] if: buckets.append(bucket) return buckets",False,bucket,i >= 0,0.6896851062774658 5704,"def hook_batchnormNd(m, x, y): num_ele = y.numel() flops = 2 * num_ele if: flops += 2 * num_ele return int(flops)",False,m.affine,m.dim() > 2,0.6528567671775818 5705,"def __call__(self, request): """""" Base implementation to ease the transition to Django 3.2 Prior versions of Django used a method called 'process_request'. In this base implementation we maintain that behaviour by calling the older interface from the new one. The remaining implementation follows the django documentation for 3.2+ """""" if: response = self.process_request(request) if response is not None: return response response = self.get_response(request) if hasattr(self, 'process_response'): self.process_response(request, response) return response",True,"hasattr(self, 'process_request')","hasattr(self, 'process_request')",0.6471612453460693 5706,"def __call__(self, request): """""" Base implementation to ease the transition to Django 3.2 Prior versions of Django used a method called 'process_request'. In this base implementation we maintain that behaviour by calling the older interface from the new one. The remaining implementation follows the django documentation for 3.2+ """""" if hasattr(self, 'process_request'): response = self.process_request(request) if response is not None: return response response = self.get_response(request) if: self.process_response(request, response) return response",False,"hasattr(self, 'process_response')",response is not None,0.64457106590271 5707,"def __call__(self, request): """""" Base implementation to ease the transition to Django 3.2 Prior versions of Django used a method called 'process_request'. In this base implementation we maintain that behaviour by calling the older interface from the new one. The remaining implementation follows the django documentation for 3.2+ """""" if hasattr(self, 'process_request'): response = self.process_request(request) if: return response response = self.get_response(request) if hasattr(self, 'process_response'): self.process_response(request, response) return response",True,response is not None,response is not None,0.653111457824707 5708,"def handle_data(self, data): if: data = unicode(data, self.encoding, 'ignore') self.__builder.data(data)",False,"isinstance(data, type('')) and is_not_ascii(data)",self.encoding,0.6437779068946838 5709,"def __init__(self, stimuli_models, check_stimuli=True, fallback_model=None, **kwargs): super(StimulusDependentScanpathModel, self).__init__(**kwargs) self.stimuli_models = stimuli_models self.fallback_model = fallback_model if: self.check_stimuli()",True,check_stimuli,check_stimuli,0.653899073600769 5710,"def handle_testlist_gexp(self, gexp_node): if: return self.handle_genexp(gexp_node) return self.handle_testlist(gexp_node)",False,len(gexp_node.children) > 1 and gexp_node.children[1].type == syms.comp_for,self.mode == 'genexp',0.6459699869155884 5711,"def CodeInteger(self, value, num_bytes=4): """"""Construct a little endian string for the indicated value. Two and 4 byte integers are the only ones allowed. """""" assert num_bytes == 1 or num_bytes == 2 or num_bytes == 4 value = int(value) s = chr(value & 255) if: s += chr((value & 255 << 8) >> 8) if num_bytes == 4: s += chr((value & 255 << 16) >> 16) s += chr((value & 255 << 24) >> 24) assert len(s) == 4 return s",False,num_bytes >= 2,num_bytes == 2,0.6529914736747742 5712,"def CodeInteger(self, value, num_bytes=4): """"""Construct a little endian string for the indicated value. Two and 4 byte integers are the only ones allowed. """""" assert num_bytes == 1 or num_bytes == 2 or num_bytes == 4 value = int(value) s = chr(value & 255) if num_bytes >= 2: s += chr((value & 255 << 8) >> 8) if: s += chr((value & 255 << 16) >> 16) s += chr((value & 255 << 24) >> 24) assert len(s) == 4 return s",False,num_bytes == 4,num_bytes >= 4,0.6579334139823914 5713,"def check_position(self, callback=_print_warning): """"""Call callback with an error message if the position is outside the azimuth and elevation ranges. """""" if: callback('{name}: azimuth {azimuth} out of range {az_range}.'.format(name=self.name, azimuth=self.polar_position.azimuth, az_range=self.az_range)) if not self.el_range[0] <= self.polar_position.elevation <= self.el_range[1]: callback('{name}: elevation {elevation} out of range {el_range}.'.format(name=self.name, elevation=self.polar_position.elevation, el_range=self.el_range))",False,"not inside_angle_range(self.polar_position.azimuth, *self.az_range)",not self.az_range[0] <= self.polar_position.azimuth <= self.az_range[1],0.64583420753479 5714,"def check_position(self, callback=_print_warning): """"""Call callback with an error message if the position is outside the azimuth and elevation ranges. """""" if not inside_angle_range(self.polar_position.azimuth, *self.az_range): callback('{name}: azimuth {azimuth} out of range {az_range}.'.format(name=self.name, azimuth=self.polar_position.azimuth, az_range=self.az_range)) if: callback('{name}: elevation {elevation} out of range {el_range}.'.format(name=self.name, elevation=self.polar_position.elevation, el_range=self.el_range))",False,not self.el_range[0] <= self.polar_position.elevation <= self.el_range[1],"not inside_angle_range(self.polar_position.elevation, *self.el_range)",0.6489138603210449 5715,"def __init__(self, teacher_model): """"""Initialize duration calculator module. Args: teacher_model (e2e_tts_transformer.Transformer): Pretrained auto-regressive Transformer. """""" super(DurationCalculator, self).__init__() if: self.register_buffer('diag_head_idx', torch.tensor(-1)) elif isinstance(teacher_model, Tacotron2): pass else: raise ValueError('teacher model should be the instance of e2e_tts_transformer.Transformer or e2e_tts_tacotron2.Tacotron2.') self.teacher_model = teacher_model",False,"isinstance(teacher_model, Transformer)",teacher_model is None,0.6485357284545898 5716,"def __init__(self, teacher_model): """"""Initialize duration calculator module. Args: teacher_model (e2e_tts_transformer.Transformer): Pretrained auto-regressive Transformer. """""" super(DurationCalculator, self).__init__() if isinstance(teacher_model, Transformer): self.register_buffer('diag_head_idx', torch.tensor(-1)) elif: pass else: raise ValueError('teacher model should be the instance of e2e_tts_transformer.Transformer or e2e_tts_tacotron2.Tacotron2.') self.teacher_model = teacher_model",False,"isinstance(teacher_model, Tacotron2)",teacher_model is e2e_tts_tacotron2.Tacotron2,0.6466804146766663 5717,"def __eq__(self, other): if: return False if not isinstance(other, type(self)): other = type(self)(other) return dict(((k.lower(), v) for k, v in self.itermerged())) == dict(((k.lower(), v) for k, v in other.itermerged()))",True,"not isinstance(other, Mapping) and (not hasattr(other, 'keys'))","not isinstance(other, Mapping) and (not hasattr(other, 'keys'))",0.6434697508811951 5718,"def __eq__(self, other): if not isinstance(other, Mapping) and (not hasattr(other, 'keys')): return False if: other = type(self)(other) return dict(((k.lower(), v) for k, v in self.itermerged())) == dict(((k.lower(), v) for k, v in other.itermerged()))",True,"not isinstance(other, type(self))","not isinstance(other, type(self))",0.6476306915283203 5719,"@property def numViews(self): """"""gets the property value for numViews"""""" if: self.__init() return self._numViews",True,self._numViews is None,self._numViews is None,0.6616941094398499 5720,"def file_name(prefix, lang): fname = prefix if: fname += '.{lang}'.format(lang=lang) return fname",False,lang is not None,not os.path.exists(fname),0.658955991268158 5721,"def forward(self, x, residual=None): if: residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) out += residual out = self.relu(out) return out",True,residual is None,residual is None,0.6576036810874939 5722,"def __set__(self, instance, value): if: instance.__dict__[self.storage_name] = value else: msg = f'{self.storage_name} must be > 0' raise ValueError(msg)",True,value > 0,value > 0,0.6644260883331299 5723,"def enterRule(self, listener: ParseTreeListener): if: listener.enterElementValuePair(self)",True,"hasattr(listener, 'enterElementValuePair')","hasattr(listener, 'enterElementValuePair')",0.64424729347229 5724,"@property def scale_reward(self): if: return self._scale_reward(self._iteration_pl) elif isinstance(self._scale_reward, Number): return self._scale_reward raise ValueError('scale_reward must be either callable or scalar')",False,callable(self._scale_reward),"isinstance(self._scale_reward, Callable)",0.6463238000869751 5725,"@property def scale_reward(self): if callable(self._scale_reward): return self._scale_reward(self._iteration_pl) elif: return self._scale_reward raise ValueError('scale_reward must be either callable or scalar')",False,"isinstance(self._scale_reward, Number)","isinstance(self._scale_reward, np.ndarray)",0.6449823379516602 5726,"def update(self, *inputs): """"""Update the metric."""""" if: raise ValueError('PSNR need 2 inputs (y_pred, y), but got {}'.format(len(inputs))) y_pred = self._convert_data(inputs[0]) y = self._convert_data(inputs[1]) res = self.compute_sr_metric(y_pred, y) n = y_pred.shape[0] self.data_num += n self.sum = self.sum + res * n self.pfm = self.sum / self.data_num",True,len(inputs) != 2,len(inputs) != 2,0.6554058194160461 5727,"def setUp(self): """""" setup volume, mount volume and initialize necessary variables which is used in tests """""" self.get_super_method(self,'setUp')() self.all_mounts_procs = [] self.io_validation_complete = False g.log.info('Starting to Setup Volume and Mount Volume') ret = self.setup_volume_and_mount_volume(mounts=self.mounts) if: raise ExecutionError('Failed to Setup_Volume and Mount_Volume') g.log.info('Successful in Setup Volume and Mount Volume') ret = is_shd_daemonized(self.servers) if not ret: raise ExecutionError('Self Heal Daemon process was still holding parent process.') g.log.info('Self Heal Daemon processes are online') self.glustershd = '/var/lib/glusterd/glustershd/glustershd-server.vol'",True,not ret,not ret,0.661597490310669 5728,"def setUp(self): """""" setup volume, mount volume and initialize necessary variables which is used in tests """""" self.get_super_method(self,'setUp')() self.all_mounts_procs = [] self.io_validation_complete = False g.log.info('Starting to Setup Volume and Mount Volume') ret = self.setup_volume_and_mount_volume(mounts=self.mounts) if not ret: raise ExecutionError('Failed to Setup_Volume and Mount_Volume') g.log.info('Successful in Setup Volume and Mount Volume') ret = is_shd_daemonized(self.servers) if: raise ExecutionError('Self Heal Daemon process was still holding parent process.') g.log.info('Self Heal Daemon processes are online') self.glustershd = '/var/lib/glusterd/glustershd/glustershd-server.vol'",True,not ret,not ret,0.6607242822647095 5729,"def _translate(env, target=None, source=SCons.Environment._null, *args, **kw): """""" Function for `Translate()` pseudo-builder """""" if: target = [] pot = env.POTUpdate(None, source, *args, **kw) po = env.POUpdate(target, pot, *args, **kw) return po",True,target is None,target is None,0.6590232849121094 5730,"def validate(self): if: raise TProtocol.TProtocolException(message='Required field column_family is unset!') return",True,self.column_family is None,self.column_family is None,0.6474336385726929 5731,"def display_note_modal(self, id): """""" Open the edit modal for the given ID. """""" if: showInfo('Cannot edit that note: It is currently opened in the reader.') return if not state.note_editor_shown: add_tmp_hook('user-note-closed', self.refresh_all) dialog = NoteEditor(self, id, add_only=True, read_note_id=None)",False,Reader.note_id == id,self.read_note_id is None,0.6503233909606934 5732,"def display_note_modal(self, id): """""" Open the edit modal for the given ID. """""" if Reader.note_id == id: showInfo('Cannot edit that note: It is currently opened in the reader.') return if: add_tmp_hook('user-note-closed', self.refresh_all) dialog = NoteEditor(self, id, add_only=True, read_note_id=None)",False,not state.note_editor_shown,id > 0 and id < self.note_list.count(),0.6453596353530884 5733,"def __init__(self, in_channels=None, learned=False, mode='bilinear'): super().__init__() self.with_conv = learned self.mode = mode if: print(f'Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode') raise NotImplementedError() assert in_channels is not None self.conv = torch.nn.Conv2d(in_channels, in_channels, kernel_size=4, stride=2, padding=1)",False,self.with_conv,learned,0.6526448726654053 5734,"def value_in(self, skin: CustomSkin) -> Optional[str]: """""" The value of a compound setting is the shared value of the components. """""" value = self.NOTSHARED for setting in self.settings: setting_value = setting.value_in(skin) or setting.default_value_in(skin) if: value = setting_value elif value!= setting_value: value = self.NOTSHARED break return value",False,value is self.NOTSHARED,value is None,0.6527364253997803 5735,"def value_in(self, skin: CustomSkin) -> Optional[str]: """""" The value of a compound setting is the shared value of the components. """""" value = self.NOTSHARED for setting in self.settings: setting_value = setting.value_in(skin) or setting.default_value_in(skin) if value is self.NOTSHARED: value = setting_value elif: value = self.NOTSHARED break return value",False,value != setting_value,value == setting_value,0.6509641408920288 5736,"def sendHtml(self, html): """""" Send HTML page HTTP response. """""" responseBody = html.encode('utf8') response = 'HTTP/1.1 200 OK\r\n' if: response += 'Server: {}\r\n'.format(self.factory.server) response += 'Content-Type: text/html; charset=UTF-8\r\n' response += 'Content-Length: {}\r\n'.format(len(responseBody)) response += '\r\n' self.sendData(response.encode('utf8')) self.sendData(responseBody)",False,self.factory.server is not None and self.factory.server != '',self.factory.server,0.6447383165359497 5737,"def startTLS(self, ctx, normal=True): """""" @see: L{ITLSTransport.startTLS} """""" if: self._tlsWaiting = _TLSDelayed([], ctx, normal) return False startTLS(self, ctx, normal, FileDescriptor)",False,self.dataBuffer or self._tempDataBuffer,self._tlsWaiting is None,0.642928957939148 5738,"def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth, stale=False): """"""Constructs a WWW-Authenticate header for Digest authentication."""""" if: raise ValueError(""Unsupported value for qop: '%s'"" % qop) if algorithm not in valid_algorithms: raise ValueError(""Unsupported value for algorithm: '%s'"" % algorithm) if nonce is None: nonce = synthesize_nonce(realm, key) s = 'Digest realm=""%s"", nonce=""%s"", algorithm=""%s"", qop=""%s""' % (realm, nonce, algorithm, qop) if stale: s += ', stale=""true""' return s",True,qop not in valid_qops,qop not in valid_qops,0.6546849012374878 5739,"def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth, stale=False): """"""Constructs a WWW-Authenticate header for Digest authentication."""""" if qop not in valid_qops: raise ValueError(""Unsupported value for qop: '%s'"" % qop) if: raise ValueError(""Unsupported value for algorithm: '%s'"" % algorithm) if nonce is None: nonce = synthesize_nonce(realm, key) s = 'Digest realm=""%s"", nonce=""%s"", algorithm=""%s"", qop=""%s""' % (realm, nonce, algorithm, qop) if stale: s += ', stale=""true""' return s",False,algorithm not in valid_algorithms,algorithm not in valid_algorithm_auth,0.6516063213348389 5740,"def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth, stale=False): """"""Constructs a WWW-Authenticate header for Digest authentication."""""" if qop not in valid_qops: raise ValueError(""Unsupported value for qop: '%s'"" % qop) if algorithm not in valid_algorithms: raise ValueError(""Unsupported value for algorithm: '%s'"" % algorithm) if: nonce = synthesize_nonce(realm, key) s = 'Digest realm=""%s"", nonce=""%s"", algorithm=""%s"", qop=""%s""' % (realm, nonce, algorithm, qop) if stale: s += ', stale=""true""' return s",True,nonce is None,nonce is None,0.6633433103561401 5741,"def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth, stale=False): """"""Constructs a WWW-Authenticate header for Digest authentication."""""" if qop not in valid_qops: raise ValueError(""Unsupported value for qop: '%s'"" % qop) if algorithm not in valid_algorithms: raise ValueError(""Unsupported value for algorithm: '%s'"" % algorithm) if nonce is None: nonce = synthesize_nonce(realm, key) s = 'Digest realm=""%s"", nonce=""%s"", algorithm=""%s"", qop=""%s""' % (realm, nonce, algorithm, qop) if: s += ', stale=""true""' return s",False,stale,stale and qop_auth,0.674422025680542 5742,"def path(self, env, dir=None, target=None, source=None): if: return () if self.argument is not _null: return self.path_function(env, dir, target, source, self.argument) else: return self.path_function(env, dir, target, source)",False,not self.path_function,self.path_function is None,0.6485538482666016 5743,"def path(self, env, dir=None, target=None, source=None): if not self.path_function: return () if: return self.path_function(env, dir, target, source, self.argument) else: return self.path_function(env, dir, target, source)",False,self.argument is not _null,self.argument,0.6457017660140991 5744,"def optimizeconst(f): def new_func(self, node, frame, **kwargs): if: new_node = self.optimizer.visit(node, frame.eval_ctx) if new_node!= node: return self.visit(new_node, frame) return f(self, node, frame, **kwargs) return update_wrapper(new_func, f)",False,self.optimized and (not frame.eval_ctx.volatile),self.optimizer is not None,0.6431903839111328 5745,"def optimizeconst(f): def new_func(self, node, frame, **kwargs): if self.optimized and (not frame.eval_ctx.volatile): new_node = self.optimizer.visit(node, frame.eval_ctx) if: return self.visit(new_node, frame) return f(self, node, frame, **kwargs) return update_wrapper(new_func, f)",False,new_node != node,new_node is not frame,0.6586867570877075 5746,"def prepend_root(path): if: return path else: return change_root(root, path)",True,root is None or not os.path.isabs(path),root is None or not os.path.isabs(path),0.6412145495414734 5747,"def get_vocab(filename): word_vocab = {} content = read_file_line_by_line(filename) print(' Getting vocabulory from file: ""%s"".........' % filename) for line in content: line = line.strip() line = add_space_punc(line) line = remove_multi_space(line) tokens = split_string(line) for word in tokens: word = word.strip() if: word_vocab[word] = 1 else: word_vocab[word] += 1 return word_vocab",True,word not in word_vocab,word not in word_vocab,0.6497247219085693 5748,"def log_epoch_stats(self, cur_epoch): """""" Log the stats of the current epoch. Args: cur_epoch (int): the number of current epoch. """""" if: self.finalize_metrics(log=False) stats = {'_type': '{}_epoch'.format(self.mode), 'cur_epoch': '{}'.format(cur_epoch + 1),'mode': self.mode,'map': self.full_map, 'gpu_mem': '{:.2f}G'.format(misc.gpu_mem_usage()), 'RAM': '{:.2f}/{:.2f}G'.format(*misc.cpu_mem_usage())} logging.log_json_stats(stats)",True,"self.mode in ['val', 'test']","self.mode in ['val', 'test']",0.6444146633148193 5749,"@metadata() def codec_quality(self): """"""Return codec quality. AIFF-C can contain lossless or lossy compression. We'll set the codec quality based on the codec ID. """""" if: return self._stream.compression_mode.lower() if self._stream.codec_id is not None: if self._stream.codec_id not in ['raw', 'twos','swot', 'fl32', 'fl64', 'in24', 'in32']: return 'lossy' return 'lossless'",True,self._stream.compression_mode is not None,self._stream.compression_mode is not None,0.6484376192092896 5750,"@metadata() def codec_quality(self): """"""Return codec quality. AIFF-C can contain lossless or lossy compression. We'll set the codec quality based on the codec ID. """""" if self._stream.compression_mode is not None: return self._stream.compression_mode.lower() if: if self._stream.codec_id not in ['raw', 'twos','swot', 'fl32', 'fl64', 'in24', 'in32']: return 'lossy' return 'lossless'",True,self._stream.codec_id is not None,self._stream.codec_id is not None,0.6489461660385132 5751,"@metadata() def codec_quality(self): """"""Return codec quality. AIFF-C can contain lossless or lossy compression. We'll set the codec quality based on the codec ID. """""" if self._stream.compression_mode is not None: return self._stream.compression_mode.lower() if self._stream.codec_id is not None: if: return 'lossy' return 'lossless'",False,"self._stream.codec_id not in ['raw', 'twos', 'swot', 'fl32', 'fl64', 'in24', 'in32']",self._stream.codec_id == self._stream.codec_id,0.6467671394348145 5752,"def _merge(*lists) -> list: result: List[Optional[T]] = [] linearizations = DependencyList(*lists) while True: if: return result for head in linearizations.heads: if head and head not in linearizations.tails: result.append(head) linearizations.remove(head) break else: raise ValueError('Cannot compute linearization of the class inheritance hierarchy')",False,linearizations.exhausted,not linearizations,0.6512418985366821 5753,"def _merge(*lists) -> list: result: List[Optional[T]] = [] linearizations = DependencyList(*lists) while True: if linearizations.exhausted: return result for head in linearizations.heads: if: result.append(head) linearizations.remove(head) break else: raise ValueError('Cannot compute linearization of the class inheritance hierarchy')",False,head and head not in linearizations.tails,head in result,0.646652340888977 5754,"def __get__(self, obj: object, type: type=None) -> t.Any: if: return self with self.lock: return super().__get__(obj, type=type)",True,obj is None,obj is None,0.6609925031661987 5755,"def __init__(self, type_url=None, value=None): """"""GoogleProtobufAny - a model defined in Swagger"""""" self._type_url = None self._value = None self.discriminator = None if: self.type_url = type_url if value is not None: self.value = value",True,type_url is not None,type_url is not None,0.6544267535209656 5756,"def __init__(self, type_url=None, value=None): """"""GoogleProtobufAny - a model defined in Swagger"""""" self._type_url = None self._value = None self.discriminator = None if type_url is not None: self.type_url = type_url if: self.value = value",True,value is not None,value is not None,0.6553105115890503 5757,"def _isNewUpdate(self, version, current_updates_r, current_version): if: if version.getBranchString() in current_updates_r and current_updates_r[version.getBranchString()][0].compare(version) < 1: return False return True return False",False,current_version.compare(version) == 1,version.getBranchString() in current_versions,0.6465301513671875 5758,"def _isNewUpdate(self, version, current_updates_r, current_version): if current_version.compare(version) == 1: if: return False return True return False",False,version.getBranchString() in current_updates_r and current_updates_r[version.getBranchString()][0].compare(version) < 1,not current_updates_r.exists(),0.6456043720245361 5759,"def gds_validate_simple_patterns(self, patterns, target): found1 = True target = str(target) for patterns1 in patterns: found2 = False for patterns2 in patterns1: mo = re_.search(patterns2, target) if mo is not None and len(mo.group(0)) == len(target): found2 = True break if: found1 = False break return found1",True,not found2,not found2,0.6500401496887207 5760,"def gds_validate_simple_patterns(self, patterns, target): found1 = True target = str(target) for patterns1 in patterns: found2 = False for patterns2 in patterns1: mo = re_.search(patterns2, target) if: found2 = True break if not found2: found1 = False break return found1",False,mo is not None and len(mo.group(0)) == len(target),mo,0.6430091857910156 5761,"def set_config(config_id: str, config: Json, resotocore_uri: Optional[str]=None, psk: Optional[str]=None, verify: Union[str, bool, None]=None) -> str: resotocore_uri, psk, headers = default_args(resotocore_uri, psk) log.debug(f'Storing config {config_id}') r = requests.put(f'{resotocore_uri}/config/{config_id}', json=config, headers=headers, verify=verify) if: revision = r.headers.get('Resoto-Config-Revision', 'unknown') return revision raise RuntimeError(f""Error storing config {config_id}: {r.content.decode('utf-8')}"")",True,r.status_code == 200,r.status_code == 200,0.6585233807563782 5762,"def auto_pop(self, exc): if: self.preserved = True self._preserved_exc = exc else: self.pop(exc)",False,self.request.environ.get('flask._preserve_context') or (exc is not None and self.app.preserve_context_on_exception),self.request.environ.get('flask._preserve_context') or (self.app.preserve_context_on_exception and exc is not None),0.6475242376327515 5763,"def _append_additional_imports(self, additional_imports: Optional[List[str]]) -> None: if: additional_imports = [] for additional_import_string in additional_imports: new_import = Import.from_full_path(additional_import_string) self.imports.append(new_import)",True,additional_imports is None,additional_imports is None,0.6552847027778625 5764,"def raise_for_status(self): """"""Raises stored :class:`HTTPError`, if one occurred."""""" http_error_msg = '' if: http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url) elif 500 <= self.status_code < 600: http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url) if http_error_msg: raise HTTPError(http_error_msg, response=self)",True,400 <= self.status_code < 500,400 <= self.status_code < 500,0.6525332927703857 5765,"def raise_for_status(self): """"""Raises stored :class:`HTTPError`, if one occurred."""""" http_error_msg = '' if 400 <= self.status_code < 500: http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url) elif 500 <= self.status_code < 600: http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url) if: raise HTTPError(http_error_msg, response=self)",True,http_error_msg,http_error_msg,0.6601170301437378 5766,"def raise_for_status(self): """"""Raises stored :class:`HTTPError`, if one occurred."""""" http_error_msg = '' if 400 <= self.status_code < 500: http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url) elif: http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url) if http_error_msg: raise HTTPError(http_error_msg, response=self)",True,500 <= self.status_code < 600,500 <= self.status_code < 600,0.6515320539474487 5767,"def rollback(self): if: logger.error('Cannot roll back changes to %s, none were made', self.file) return False logger.debug('Rolling %s back to previous state', self.file) with open(self.file, 'wb') as fh: fh.writelines(self._saved_lines) return True",True,self._saved_lines is None,self._saved_lines is None,0.6478904485702515 5768,"def get_cache_key(self, name, filename=None): """"""Returns the unique hash key for this template name."""""" hash = sha1(name.encode('utf-8')) if: filename = '|' + filename if isinstance(filename, unicode): filename = filename.encode('utf-8') hash.update(filename) return hash.hexdigest()",True,filename is not None,filename is not None,0.6519659757614136 5769,"def get_cache_key(self, name, filename=None): """"""Returns the unique hash key for this template name."""""" hash = sha1(name.encode('utf-8')) if filename is not None: filename = '|' + filename if: filename = filename.encode('utf-8') hash.update(filename) return hash.hexdigest()",False,"isinstance(filename, unicode)","not isinstance(filename, bytes)",0.6450446844100952 5770,"@instrument_w_nvtx def __release_param(self, param: Parameter) -> None: if: debug_rank0(f'-release: {param.ds_summary()}') param.partition() self.__n_available_params -= param.ds_numel",False,param.ds_status == ZeroParamStatus.AVAILABLE and (not param.ds_active_sub_modules),param.ds_numel != 0,0.6455931663513184 5771,"def clear_memory(self): """""" Clears all memory that isn't an essential attribute for the calculator """""" attrs = list(vars(self).keys()) for attr in attrs: if: delattr(self, attr) return",False,"attr not in {'_nmax', '_lmax', '_rcut', '_alpha', '_derivative', '_stress', '_cutoff_function', 'weight_on', 'primitive'}",attr.endswith('_ calculator'),0.6498615145683289 5772,"def select_wait_for_socket(sock, read=False, write=False, timeout=None): if: raise RuntimeError('must specify at least one of read=True, write=True') rcheck = [] wcheck = [] if read: rcheck.append(sock) if write: wcheck.append(sock) fn = partial(select.select, rcheck, wcheck, wcheck) rready, wready, xready = _retry_on_intr(fn, timeout) return bool(rready or wready or xready)",True,not read and (not write),not read and (not write),0.6490919589996338 5773,"def select_wait_for_socket(sock, read=False, write=False, timeout=None): if not read and (not write): raise RuntimeError('must specify at least one of read=True, write=True') rcheck = [] wcheck = [] if: rcheck.append(sock) if write: wcheck.append(sock) fn = partial(select.select, rcheck, wcheck, wcheck) rready, wready, xready = _retry_on_intr(fn, timeout) return bool(rready or wready or xready)",True,read,read,0.6767613291740417 5774,"def select_wait_for_socket(sock, read=False, write=False, timeout=None): if not read and (not write): raise RuntimeError('must specify at least one of read=True, write=True') rcheck = [] wcheck = [] if read: rcheck.append(sock) if: wcheck.append(sock) fn = partial(select.select, rcheck, wcheck, wcheck) rready, wready, xready = _retry_on_intr(fn, timeout) return bool(rready or wready or xready)",True,write,write,0.6767053008079529 5775,"def _validate_record(self, record): """""" Checks that the record is valid before saving it. Args: record: `dict` Raises: InvalidRecord when required keys are missing from the record. """""" record_keys = set(record.keys()) if: raise InvalidRecord('Missing keys: {}'.format(REQUIRED_KEYS - record_keys)) if len(record['gt_boxes']) == 0: raise InvalidRecord('Record should have at least one `gt_boxes`') for gt_box in record['gt_boxes']: gt_keys = set(gt_box.keys()) if gt_keys!= REQUIRED_GT_KEYS: raise InvalidRecord('Missing gt boxes keys {}'.format(REQUIRED_GT_KEYS - gt_keys))",True,record_keys != REQUIRED_KEYS,record_keys != REQUIRED_KEYS,0.6579644680023193 5776,"def _validate_record(self, record): """""" Checks that the record is valid before saving it. Args: record: `dict` Raises: InvalidRecord when required keys are missing from the record. """""" record_keys = set(record.keys()) if record_keys!= REQUIRED_KEYS: raise InvalidRecord('Missing keys: {}'.format(REQUIRED_KEYS - record_keys)) if: raise InvalidRecord('Record should have at least one `gt_boxes`') for gt_box in record['gt_boxes']: gt_keys = set(gt_box.keys()) if gt_keys!= REQUIRED_GT_KEYS: raise InvalidRecord('Missing gt boxes keys {}'.format(REQUIRED_GT_KEYS - gt_keys))",False,len(record['gt_boxes']) == 0,'gt_boxes' not in record,0.6465702652931213 5777,"def _validate_record(self, record): """""" Checks that the record is valid before saving it. Args: record: `dict` Raises: InvalidRecord when required keys are missing from the record. """""" record_keys = set(record.keys()) if record_keys!= REQUIRED_KEYS: raise InvalidRecord('Missing keys: {}'.format(REQUIRED_KEYS - record_keys)) if len(record['gt_boxes']) == 0: raise InvalidRecord('Record should have at least one `gt_boxes`') for gt_box in record['gt_boxes']: gt_keys = set(gt_box.keys()) if: raise InvalidRecord('Missing gt boxes keys {}'.format(REQUIRED_GT_KEYS - gt_keys))",True,gt_keys != REQUIRED_GT_KEYS,gt_keys != REQUIRED_GT_KEYS,0.6555675268173218 5778,"@staticmethod def add_custom_versions(versions): """""" create custom versions strings """""" versions_dict = {} for tech, version in versions.items(): if: version = version.split('-')[0] if version.startswith('v'): version = version[1:] versions_dict[tech + '_numeric'] = version.split('+')[0] versions_dict[tech + '_short'] = '{}.{}'.format(*version.split('.')) return versions_dict",True,'-' in version,'-' in version,0.663262128829956 5779,"@staticmethod def add_custom_versions(versions): """""" create custom versions strings """""" versions_dict = {} for tech, version in versions.items(): if '-' in version: version = version.split('-')[0] if: version = version[1:] versions_dict[tech + '_numeric'] = version.split('+')[0] versions_dict[tech + '_short'] = '{}.{}'.format(*version.split('.')) return versions_dict",True,version.startswith('v'),version.startswith('v'),0.6427865624427795 5780,"def _create_fbank_matrix(self, f_central_mat, band_mat): """"""Returns fbank matrix to use for averaging the spectrum with the set of filter-banks. Arguments --------- f_central : Tensor Tensor gathering central frequencies of each filter. band : Tensor Tensor gathering the bands of each filter. smooth_factor: Tensor Smoothing factor of the gaussian filter. It can be used to employ sharper or flatter filters. """""" if: fbank_matrix = self._triangular_filters(self.all_freqs_mat, f_central_mat, band_mat) elif self.filter_shape =='rectangular': fbank_matrix = self._rectangular_filters(self.all_freqs_mat, f_central_mat, band_mat) else: fbank_matrix = self._gaussian_filters(self.all_freqs_mat, f_central_mat, band_mat) return fbank_matrix",True,self.filter_shape == 'triangular',self.filter_shape == 'triangular',0.6467019319534302 5781,"def _create_fbank_matrix(self, f_central_mat, band_mat): """"""Returns fbank matrix to use for averaging the spectrum with the set of filter-banks. Arguments --------- f_central : Tensor Tensor gathering central frequencies of each filter. band : Tensor Tensor gathering the bands of each filter. smooth_factor: Tensor Smoothing factor of the gaussian filter. It can be used to employ sharper or flatter filters. """""" if self.filter_shape == 'triangular': fbank_matrix = self._triangular_filters(self.all_freqs_mat, f_central_mat, band_mat) elif: fbank_matrix = self._rectangular_filters(self.all_freqs_mat, f_central_mat, band_mat) else: fbank_matrix = self._gaussian_filters(self.all_freqs_mat, f_central_mat, band_mat) return fbank_matrix",True,self.filter_shape == 'rectangular',self.filter_shape == 'rectangular',0.6465821266174316 5782,"def predicate(self: ReactionPredicate, r: discord.Reaction, u: discord.abc.User) -> bool: if: return False try: self.result = not bool(self.YES_OR_NO_EMOJIS.index(r.emoji)) except ValueError: return False else: return True",False,"not same_context(r, u)","not self.check_r(r, u)",0.6470916867256165 5783,"def orig_pickReward(): hScale = 100 if: return hScale * heightTarget elif reachDist < 0.04 and objPos[2] > self.objHeight + 0.005: return hScale * min(heightTarget, objPos[2]) else: return 0",True,self.pickCompleted and (not objDropped()),self.pickCompleted and (not objDropped()),0.6451133489608765 5784,"def orig_pickReward(): hScale = 100 if self.pickCompleted and (not objDropped()): return hScale * heightTarget elif: return hScale * min(heightTarget, objPos[2]) else: return 0",False,reachDist < 0.04 and objPos[2] > self.objHeight + 0.005,reachDist < 0.1 and objPos[2] > self.objHeight + 0.005,0.646020770072937 5785,"def __str__(self): s = self.__doc__ if: s = '%s: %s' % (s,''.join(self.args)) s = '%s.' % s return s",True,self.args,self.args,0.6607184410095215 5786,"def get_order(self, byte_str): if: return (-1, 1) first_char = byte_str[0] if first_char == 142 or 161 <= first_char <= 254: char_len = 2 elif first_char == 143: char_len = 3 else: char_len = 1 if len(byte_str) > 1: second_char = byte_str[1] if first_char == 164 and 161 <= second_char <= 243: return (second_char - 161, char_len) return (-1, char_len)",True,not byte_str,not byte_str,0.6532833576202393 5787,"def get_order(self, byte_str): if not byte_str: return (-1, 1) first_char = byte_str[0] if: char_len = 2 elif first_char == 143: char_len = 3 else: char_len = 1 if len(byte_str) > 1: second_char = byte_str[1] if first_char == 164 and 161 <= second_char <= 243: return (second_char - 161, char_len) return (-1, char_len)",True,first_char == 142 or 161 <= first_char <= 254,first_char == 142 or 161 <= first_char <= 254,0.6488200426101685 5788,"def get_order(self, byte_str): if not byte_str: return (-1, 1) first_char = byte_str[0] if first_char == 142 or 161 <= first_char <= 254: char_len = 2 elif first_char == 143: char_len = 3 else: char_len = 1 if: second_char = byte_str[1] if first_char == 164 and 161 <= second_char <= 243: return (second_char - 161, char_len) return (-1, char_len)",True,len(byte_str) > 1,len(byte_str) > 1,0.647244930267334 5789,"def get_order(self, byte_str): if not byte_str: return (-1, 1) first_char = byte_str[0] if first_char == 142 or 161 <= first_char <= 254: char_len = 2 elif: char_len = 3 else: char_len = 1 if len(byte_str) > 1: second_char = byte_str[1] if first_char == 164 and 161 <= second_char <= 243: return (second_char - 161, char_len) return (-1, char_len)",True,first_char == 143,first_char == 143,0.6541718244552612 5790,"def get_order(self, byte_str): if not byte_str: return (-1, 1) first_char = byte_str[0] if first_char == 142 or 161 <= first_char <= 254: char_len = 2 elif first_char == 143: char_len = 3 else: char_len = 1 if len(byte_str) > 1: second_char = byte_str[1] if: return (second_char - 161, char_len) return (-1, char_len)",True,first_char == 164 and 161 <= second_char <= 243,first_char == 164 and 161 <= second_char <= 243,0.6476236581802368 5791,"def get_scope(self): if: self.scope = self._create_scope() return self.scope",False,self.scope is None,not self.scope,0.6516574025154114 5792,"def set_onclose(self, onclose: Optional[Union['_events.MenuAction', Callable[['Menu'], Any], CallableNoArgsType]]) -> 'Menu': """""" Set ``onclose`` callback. Callback can only receive 1 argument maximum (if not ``None``), if so, the Menu instance is provided: .. code-block:: python onclose(menu) onclose() .. note:: This is applied only to the base Menu (not the currently displayed, stored in ``_current`` pointer); for such behaviour apply to :py:meth:`pygame_menu.menu.Menu.get_current` object. :param onclose: Onclose callback, it can be a function, a pygame-menu event, or None :return: Self reference """""" assert callable(onclose) or _events.is_event(onclose) or onclose is None, 'onclose must be a MenuAction (event), callable (function-type), or None' if: onclose = None self._onclose = onclose return self",False,onclose == _events.NONE,onclose is None,0.6505111455917358 5793,"def __get_comment_pages(self, fileobj, info): page = OggPage(fileobj) while info.serial!= page.serial or not page.packets[0].startswith(b'OpusTags'): page = OggPage(fileobj) pages = [page] while not (pages[-1].complete or len(pages[-1].packets) > 1): page = OggPage(fileobj) if: pages.append(page) return pages",False,page.serial == pages[0].serial,page.serial != info.serial,0.6456701755523682 5794,"def set_epoch(self, epoch): super().set_epoch(epoch) if: return self._cur_epoch = epoch rng = np.random.RandomState([42, self.seed % 2 ** 32, self._cur_epoch]) self._cur_indices = plasma_utils.PlasmaArray(rng.choice(len(self.dataset), self.actual_size, replace=self.replace, p=None if self.weights is None else self.weights.array))",False,epoch == self._cur_epoch,self._cur_epoch < 0,0.6560690999031067 5795,"@private def log_disk_info(self, sys_disks): number_of_disks = len(sys_disks) if: log_info = {ok: {ik: iv for ik, iv in ov.items() if ik in ('lunid','serial')} for ok, ov in sys_disks.items()} self.logger.info('Found disks: %r', log_info) else: self.logger.info('Found %d disks', number_of_disks) return number_of_disks",False,number_of_disks <= 25,number_of_disks == 0,0.654829740524292 5796,"def orphannotif(mac_seqnum, mac_srcextendedaddr): if: raise ValueError('Invalid MAC sequence number') elif mac_srcextendedaddr < 0 or mac_srcextendedaddr.bit_length() > 64: raise ValueError('Invalid extended source MAC address') forged_pkt = Dot15d4FCS(fcf_frametype=3, fcf_security=0, fcf_pending=0, fcf_ackreq=0, fcf_panidcompress=False, fcf_destaddrmode=2, fcf_framever=0, fcf_srcaddrmode=3, seqnum=mac_seqnum) / Dot15d4Cmd(dest_panid=65535, dest_addr=65535, src_panid=65535, src_addr=mac_srcextendedaddr, cmd_id=6) return forged_pkt",False,mac_seqnum < 0 or mac_seqnum > 255,mac_seqnum < 0 or mac_seqnum.bit_length() > 32,0.6472730040550232 5797,"def orphannotif(mac_seqnum, mac_srcextendedaddr): if mac_seqnum < 0 or mac_seqnum > 255: raise ValueError('Invalid MAC sequence number') elif: raise ValueError('Invalid extended source MAC address') forged_pkt = Dot15d4FCS(fcf_frametype=3, fcf_security=0, fcf_pending=0, fcf_ackreq=0, fcf_panidcompress=False, fcf_destaddrmode=2, fcf_framever=0, fcf_srcaddrmode=3, seqnum=mac_seqnum) / Dot15d4Cmd(dest_panid=65535, dest_addr=65535, src_panid=65535, src_addr=mac_srcextendedaddr, cmd_id=6) return forged_pkt",False,mac_srcextendedaddr < 0 or mac_srcextendedaddr.bit_length() > 64,mac_seqnum >= 255,0.6449936628341675 5798,"def test_error_message_control_character(): """""" Make sure that no actual illegal control characters are included in the scraper error message, only their hex representation. The error message may be printed into an XML file, and XML files do not allow most control characters. """""" scraper = TextEncodingScraper(filename='tests/data/text_plain/invalid__control_character.txt', mimetype='text/plain', params={'charset': 'UTF-8'}) scraper.scrape_file() assert not partial_message_included('\x1f', scraper.errors()) character = ""'\\x1f'"" if: character = ""''\\x1f'"" assert partial_message_included('Illegal character %s in position 4' % character, scraper.errors())",False,six.PY2,scraper.dialect.lower() == 'C',0.6533843278884888 5799,"def get(self, key, default=None, type=None): for d in self.dicts: if: if type is not None: try: return type(d[key]) except ValueError: continue return d[key] return default",True,key in d,key in d,0.6607194542884827 5800,"def get(self, key, default=None, type=None): for d in self.dicts: if key in d: if: try: return type(d[key]) except ValueError: continue return d[key] return default",True,type is not None,type is not None,0.6523356437683105 5801,"def _check(self, elem, size): if: raise ReaderError(""can't read requested 0x%x bytes at 0x%x"" % (size, self.current()))",False,not elem or len(elem) != size,size < 0,0.648056149482727 5802,"def forward(self, x): output = inplace_abn(x, self.weight, self.bias, self.running_mean, self.running_var, self.training, self.momentum, self.eps, self.act_name, self.act_param) if: output = output[0] return output",True,"isinstance(output, tuple)","isinstance(output, tuple)",0.6446313858032227 5803,"def _init_weights(self, m): if: trunc_normal_(m.weight) if isinstance(m, nn.Linear) and m.bias is not None: zeros_(m.bias) elif isinstance(m, nn.LayerNorm): zeros_(m.bias) ones_(m.weight)",True,"isinstance(m, nn.Linear)","isinstance(m, nn.Linear)",0.653522253036499 5804,"def _init_weights(self, m): if isinstance(m, nn.Linear): trunc_normal_(m.weight) if: zeros_(m.bias) elif isinstance(m, nn.LayerNorm): zeros_(m.bias) ones_(m.weight)",False,"isinstance(m, nn.Linear) and m.bias is not None",m.bias is not None,0.6479496359825134 5805,"def _init_weights(self, m): if isinstance(m, nn.Linear): trunc_normal_(m.weight) if isinstance(m, nn.Linear) and m.bias is not None: zeros_(m.bias) elif: zeros_(m.bias) ones_(m.weight)",True,"isinstance(m, nn.LayerNorm)","isinstance(m, nn.LayerNorm)",0.647905707359314 5806,"def mast_query_from_file_list(files=[], os_open=True): """""" Generate a MAST query on datasets in a list. """""" if: files = glob.glob('*raw.fits') if len(files) == 0: print('No `files` specified.') return False datasets = np.unique([file[:6] + '*' for file in files]).tolist() URL = 'http://archive.stsci.edu/hst/search.php?action=Search&' URL +='sci_data_set_name=' + ','.join(datasets) if os_open: os.system('open ""{0}""'.format(URL)) return URL",False,len(files) == 0,files is None,0.6535266041755676 5807,"def mast_query_from_file_list(files=[], os_open=True): """""" Generate a MAST query on datasets in a list. """""" if len(files) == 0: files = glob.glob('*raw.fits') if: print('No `files` specified.') return False datasets = np.unique([file[:6] + '*' for file in files]).tolist() URL = 'http://archive.stsci.edu/hst/search.php?action=Search&' URL +='sci_data_set_name=' + ','.join(datasets) if os_open: os.system('open ""{0}""'.format(URL)) return URL",True,len(files) == 0,len(files) == 0,0.6505923271179199 5808,"def mast_query_from_file_list(files=[], os_open=True): """""" Generate a MAST query on datasets in a list. """""" if len(files) == 0: files = glob.glob('*raw.fits') if len(files) == 0: print('No `files` specified.') return False datasets = np.unique([file[:6] + '*' for file in files]).tolist() URL = 'http://archive.stsci.edu/hst/search.php?action=Search&' URL +='sci_data_set_name=' + ','.join(datasets) if: os.system('open ""{0}""'.format(URL)) return URL",True,os_open,os_open,0.6615483164787292 5809,"def __init__(self, reader, output_dir, split='data'): """""" Args: reader: output_dir: Directory to save the resulting tfrecords. split: Split being save, which is used as a filename for the resulting file. """""" super(ObjectDetectionWriter, self).__init__() if: raise ValueError('Saver needs a valid ObjectDetectionReader subclass') self._reader = reader self._output_dir = output_dir self._split = split",True,"not isinstance(reader, ObjectDetectionReader)","not isinstance(reader, ObjectDetectionReader)",0.6475815773010254 5810,"def update(self, new_val): """"""Update the estimate. Parameters ---------- new_val: float new observated value of estimated quantity. """""" if: self._value = new_val else: self._value = self._gamma * self._value + (1.0 - self._gamma) * new_val",False,self._value is None,self._gamma is None,0.6493908166885376 5811,"def _recursive_tagfind(self, ltag, rtag, start_index, block): while 1: i = block.find(rtag, start_index) if: return -1 j = block.find(ltag, start_index) if j > i or j == -1: return i + len(rtag) j = block.find('>', j) start_index = self._recursive_tagfind(ltag, rtag, j + 1, block) if start_index == -1: return -1",False,i == -1,i < 0 or i > len(ltag) or i > -1,0.6663340330123901 5812,"def _recursive_tagfind(self, ltag, rtag, start_index, block): while 1: i = block.find(rtag, start_index) if i == -1: return -1 j = block.find(ltag, start_index) if: return i + len(rtag) j = block.find('>', j) start_index = self._recursive_tagfind(ltag, rtag, j + 1, block) if start_index == -1: return -1",False,j > i or j == -1,j == -1,0.6553361415863037 5813,"def _recursive_tagfind(self, ltag, rtag, start_index, block): while 1: i = block.find(rtag, start_index) if i == -1: return -1 j = block.find(ltag, start_index) if j > i or j == -1: return i + len(rtag) j = block.find('>', j) start_index = self._recursive_tagfind(ltag, rtag, j + 1, block) if: return -1",True,start_index == -1,start_index == -1,0.6556721925735474 5814,"def f(x): b = B() c = C() b.n() if: a = b else: a = c a.m()",False,x,c.n(),0.6674495935440063 5815,"@property def action_space(self): """"""See class definition."""""" if: return Discrete(2) else: return Box(low=-1, high=1, shape=(1,), dtype=np.float32)",True,self.discrete,self.discrete,0.6536769270896912 5816,"def EncSerializer(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if: return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0",True,o != 0,o != 0,0.6660686731338501 5817,"def __cast(self, other): if: return other.data else: return other",False,"isinstance(other, UserList)","issubclass(other.__class__, Binary)",0.6497035622596741 5818,"@staticmethod def _filter_in_token(query, app_id, redirect_url, permissions): if: query = query.where(models.Token.app_id == app_id) if redirect_url is not None: query = query.where(models.Token.redirect_url == redirect_url) if permissions is not None: query = query.where(models.Token.permissions.contains(permissions)) return query",True,app_id is not None,app_id is not None,0.6526281237602234 5819,"@staticmethod def _filter_in_token(query, app_id, redirect_url, permissions): if app_id is not None: query = query.where(models.Token.app_id == app_id) if: query = query.where(models.Token.redirect_url == redirect_url) if permissions is not None: query = query.where(models.Token.permissions.contains(permissions)) return query",True,redirect_url is not None,redirect_url is not None,0.6521701812744141 5820,"@staticmethod def _filter_in_token(query, app_id, redirect_url, permissions): if app_id is not None: query = query.where(models.Token.app_id == app_id) if redirect_url is not None: query = query.where(models.Token.redirect_url == redirect_url) if: query = query.where(models.Token.permissions.contains(permissions)) return query",True,permissions is not None,permissions is not None,0.651727557182312 5821,"def step(self, action) -> Tuple[DictOfListsObservations, Sequence, SeqBools, SeqBools, Sequence[Dict]]: action = action[0] obs, rew, terminated, truncated, info = self.env.step(action) if: obs, info['reset_info'] = self.env.reset() return (self._obs(obs), [rew], [terminated], [truncated], [info])",False,terminated | truncated,terminated or truncated,0.6645276546478271 5822,"def get_ubr_pvm(self): vlans = self.cli('show cable l2-vpn dot1q-vc-map') pvm = {} for line in vlans.split('\n'): match = self.rx_vlan_ubr.search(line) if: port = match.group('port') vlan_id = int(match.group('vlan_id')) if port not in pvm: pvm[port] = ['%s' % vlan_id] else: pvm[port] += ['%s' % vlan_id] return pvm",True,match,match,0.6635193824768066 5823,"def get_ubr_pvm(self): vlans = self.cli('show cable l2-vpn dot1q-vc-map') pvm = {} for line in vlans.split('\n'): match = self.rx_vlan_ubr.search(line) if match: port = match.group('port') vlan_id = int(match.group('vlan_id')) if: pvm[port] = ['%s' % vlan_id] else: pvm[port] += ['%s' % vlan_id] return pvm",False,port not in pvm,vlan_id not in pvm,0.6587727665901184 5824,"def collect_points(pc): if: return pc[:NUM_POINT, :] else: return np.concatenate((np.array(pc), np.array(pc[0:NUM_POINT - pc.shape[0], :])), axis=0)",False,pc.shape[0] >= NUM_POINT,NUM_POINT < pc.shape[0],0.653990626335144 5825,"def groundtruth_lists(self, field): """"""Access list of groundtruth tensors. Args: field: a string key, options are fields.BoxListFields.{boxes,classes,masks,keypoints} Returns: a list of tensors holding groundtruth information (see also provide_groundtruth function below), with one entry for each image in the batch. Raises: RuntimeError: if the field has not been provided via provide_groundtruth. """""" if: raise RuntimeError('Groundtruth tensor %s has not been provided', field) return self._groundtruth_lists[field]",True,field not in self._groundtruth_lists,field not in self._groundtruth_lists,0.648036003112793 5826,"def get_env(env, default=None): res = os.environ.get(env, '').strip() if: res = default return res",True,"not res or res == '""""'","not res or res == '""""'",0.6505128145217896 5827,"def run(self): vers = get_versions(verbose=True) print('Version: %s' % vers['version']) print(' full-revisionid: %s' % vers.get('full-revisionid')) print(' dirty: %s' % vers.get('dirty')) print(' date: %s' % vers.get('date')) if: print(' error: %s' % vers['error'])",True,vers['error'],vers['error'],0.6548444628715515 5828,"def _filter_function(self, entry: '_BaseResult') -> bool: molecule: Molecule = Molecule.from_mapped_smiles(entry.cmiles, allow_undefined_stereo=True) total_charge = molecule.total_charge.m_as(unit.elementary_charge) if: return total_charge in self.charges_to_include return total_charge not in self.charges_to_exclude",False,self.charges_to_include is not None,self.include_on_elementary_charge,0.645899772644043 5829,"def handle_message(self, msg): """"""manage message of different type and in the context of path"""""" if: self._parse_template() self.msgs += [str(getattr(msg, field)) for field in self.msgargs]",False,self.header is None,self.template is None,0.6510940790176392 5830,"def _wait_exception(func, args=[], kwargs={}, timeout=15): count = 0 while True: try: func(*args, **kwargs) count += 1 time.sleep(1) except Exception: return if: raise TimeoutError",False,count > timeout,count >= timeout,0.6540802121162415 5831,"def fore(self, fore=None, light=False, on_stderr=False): if: fore = self._default_fore self._fore = fore if light: self._light |= WinStyle.BRIGHT else: self._light &= ~WinStyle.BRIGHT self.set_console(on_stderr=on_stderr)",True,fore is None,fore is None,0.6617927551269531 5832,"def fore(self, fore=None, light=False, on_stderr=False): if fore is None: fore = self._default_fore self._fore = fore if: self._light |= WinStyle.BRIGHT else: self._light &= ~WinStyle.BRIGHT self.set_console(on_stderr=on_stderr)",True,light,light,0.6762998700141907 5833,"def _dec_unique_name(self, unique_name): """""" Decompose unique app name into a list containing the app name, instance id and unique id. Return dummy entries if not an app unique name is passed in params. """""" if: parts = unique_name.split('#') if '/' in parts[-1]: parts[-1:] = parts[-1].split('/', 1) else: parts = unique_name.rsplit('-', 2) len_ = len(parts) if len_!= 3: parts[len_:3] = ['_'] * (3 - len_) return parts",False,'#' in unique_name,self.app_id,0.6541140675544739 5834,"def _dec_unique_name(self, unique_name): """""" Decompose unique app name into a list containing the app name, instance id and unique id. Return dummy entries if not an app unique name is passed in params. """""" if '#' in unique_name: parts = unique_name.split('#') if '/' in parts[-1]: parts[-1:] = parts[-1].split('/', 1) else: parts = unique_name.rsplit('-', 2) len_ = len(parts) if: parts[len_:3] = ['_'] * (3 - len_) return parts",False,len_ != 3,len_ > 3,0.6580660343170166 5835,"def _dec_unique_name(self, unique_name): """""" Decompose unique app name into a list containing the app name, instance id and unique id. Return dummy entries if not an app unique name is passed in params. """""" if '#' in unique_name: parts = unique_name.split('#') if: parts[-1:] = parts[-1].split('/', 1) else: parts = unique_name.rsplit('-', 2) len_ = len(parts) if len_!= 3: parts[len_:3] = ['_'] * (3 - len_) return parts",False,'/' in parts[-1],parts[-1].startswith('/'),0.6464525461196899 5836,"def connect(self): conn = self._new_conn() self._prepare_conn(conn) if: self.ssl_context = create_urllib3_context(ssl_version=resolve_ssl_version(None), cert_reqs=resolve_cert_reqs(None)) self.sock = ssl_wrap_socket(sock=conn, keyfile=self.key_file, certfile=self.cert_file, ssl_context=self.ssl_context)",True,self.ssl_context is None,self.ssl_context is None,0.6521728038787842 5837,"def __init__(self, step_dict: Dict[str, Step]): is_ordered = self._is_ordered(step_dict) if: self.parsed_steps = {step.name: step for step in self.ordered_steps(step_dict)} else: self.parsed_steps = {} for step_name, step in step_dict.items(): step.name = step_name self.parsed_steps[step_name] = step self._sanity_check()",False,not is_ordered,is_ordered,0.6557246446609497 5838,"def tearDown(self): g.log.info('Starting to Unmount Volume and Cleanup Volume') ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts) if: raise ExecutionError('Failed to umount the vol & cleanup Volume') g.log.info('Successful in umounting the volume and Cleanup') self.get_super_method(self, 'tearDown')()",True,not ret,not ret,0.660548746585846 5839,"def validate_update_args(**kwargs: Any) -> Any: if: assert kwargs['ResourceArn'] == topic.arn assert kwargs['Tags'] == [{'Key': 'foo', 'Value': 'bar'}]",False,kwargs['action'] == 'tag-resource',kwargs['ResourceArn'] == topic.api_version,0.6554265022277832 5840,"def cam_cls_seg(self, feat): """"""CAM feature classification."""""" if: feat = self.dropout(feat) output = self.cam_conv_seg(feat) return output",True,self.dropout is not None,self.dropout is not None,0.6502269506454468 5841,"def check_addrs(conn): for addr in (conn.laddr, conn.raddr): if: assert isinstance(addr, tuple), type(addr) if not addr: continue assert isinstance(addr.port, int), type(addr.port) assert 0 <= addr.port <= 65535, addr.port check_net_address(addr.ip, conn.family) elif conn.family == AF_UNIX: assert isinstance(addr, str), type(addr)",False,"conn.family in (AF_INET, AF_INET6)",conn.family == AF_INET,0.646514892578125 5842,"def check_addrs(conn): for addr in (conn.laddr, conn.raddr): if conn.family in (AF_INET, AF_INET6): assert isinstance(addr, tuple), type(addr) if: continue assert isinstance(addr.port, int), type(addr.port) assert 0 <= addr.port <= 65535, addr.port check_net_address(addr.ip, conn.family) elif conn.family == AF_UNIX: assert isinstance(addr, str), type(addr)",False,not addr,addr.ip == AF_INET,0.6643252372741699 5843,"def check_addrs(conn): for addr in (conn.laddr, conn.raddr): if conn.family in (AF_INET, AF_INET6): assert isinstance(addr, tuple), type(addr) if not addr: continue assert isinstance(addr.port, int), type(addr.port) assert 0 <= addr.port <= 65535, addr.port check_net_address(addr.ip, conn.family) elif: assert isinstance(addr, str), type(addr)",False,conn.family == AF_UNIX,conn.family == AF_INET6,0.6486409306526184 5844,"def enterRule(self, listener: ParseTreeListener): if: listener.enterIdentifier(self)",True,"hasattr(listener, 'enterIdentifier')","hasattr(listener, 'enterIdentifier')",0.6437613368034363 5845,"def _set_sharpness(self, value): self._check_camera_open() if: raise PiCameraValueError('Invalid sharpness value: %d (valid range -100..100)' % value) self._camera.control.params[mmal.MMAL_PARAMETER_SHARPNESS] = Fraction(value, 100)",True,not -100 <= value <= 100,not -100 <= value <= 100,0.6660147905349731 5846,"@staticmethod def run_apps(app_lists): """"""Run a set of Ryu applications A convenient method to load and instantiate apps. This blocks until all relevant apps stop. """""" app_mgr = AppManager.get_instance() app_mgr.load_apps(app_lists) contexts = app_mgr.create_contexts() services = app_mgr.instantiate_apps(**contexts) webapp = wsgi.start_service(app_mgr) if: services.append(hub.spawn(webapp)) try: hub.joinall(services) finally: app_mgr.close()",True,webapp,webapp,0.6745538711547852 5847,"def transpose(self, method): if: raise NotImplementedError('Only FLIP_LEFT_RIGHT and FLIP_TOP_BOTTOM implemented') flipped_polygons = [] for polygon in self.polygons: flipped_polygons.append(polygon.transpose(method)) return PolygonList(flipped_polygons, size=self.size)",False,"method not in (FLIP_LEFT_RIGHT, FLIP_TOP_BOTTOM)",self.shape[1] > 1,0.6470310091972351 5848,"def iter_multi_items(mapping): """"""Iterates over the items of a mapping yielding keys and values without dropping any from more complex structures. """""" if: for item in mapping.iteritems(multi=True): yield item elif isinstance(mapping, dict): for key, value in mapping.iteritems(): if isinstance(value, (tuple, list)): for value in value: yield (key, value) else: yield (key, value) else: for item in mapping: yield item",False,"isinstance(mapping, MultiDict)","isinstance(mapping, list)",0.6437191963195801 5849,"def iter_multi_items(mapping): """"""Iterates over the items of a mapping yielding keys and values without dropping any from more complex structures. """""" if isinstance(mapping, MultiDict): for item in mapping.iteritems(multi=True): yield item elif: for key, value in mapping.iteritems(): if isinstance(value, (tuple, list)): for value in value: yield (key, value) else: yield (key, value) else: for item in mapping: yield item",False,"isinstance(mapping, dict)","isinstance(mapping, Mapping)",0.6437994837760925 5850,"def iter_multi_items(mapping): """"""Iterates over the items of a mapping yielding keys and values without dropping any from more complex structures. """""" if isinstance(mapping, MultiDict): for item in mapping.iteritems(multi=True): yield item elif isinstance(mapping, dict): for key, value in mapping.iteritems(): if: for value in value: yield (key, value) else: yield (key, value) else: for item in mapping: yield item",False,"isinstance(value, (tuple, list))","isinstance(value, (list, tuple))",0.6420347094535828 5851,"def is_in_guilds(*guild_ids): def predicate(ctx): guild = ctx.guild if: return False return guild.id in guild_ids return commands.check(predicate)",True,guild is None,guild is None,0.6567844748497009 5852,"def set_current_enabled(self): if: return self.set_current_value(self.current_enable_value) self.current_enabled = True",False,self.current_enabled,not self.current_enabled,0.6471275091171265 5853,"def mouseMoveEvent(self, ev): y = ev.pos().y() prev = self.mouse_hover_result self.mouse_hover_result = self.item_from_y(y) if: self.update()",False,prev != self.mouse_hover_result,not self.item_hover_result or prev == self.item_from_y(y),0.6500537991523743 5854,"@property def ssl_version(self): if: return self.ssl_context.protocol else: return self._ssl_version",False,self._has_ssl_context,self._ssl_version is None,0.6490321755409241 5855,"def onleave1(widget, _) -> None: """""" Onleave event. """""" self.assertEqual(btn1, widget) test[1] = not test[1] if: print('Leave 1')",True,print_events,print_events,0.6531139016151428 5856,"def isNearTarget(self): if: return True elif self.desiredSpeed < 0.0 and location_helpers.getDistanceFromPoints3d(self.vehicle.location.global_relative_frame, self.pt1) < WAYPOINT_NEARNESS_THRESHOLD: return True return False",False,"self.desiredSpeed > 0.0 and location_helpers.getDistanceFromPoints3d(self.vehicle.location.global_relative_frame, self.pt2) < WAYPOINT_NEARNESS_THRESHOLD",self.desiredSpeed == -90.0 and self.pt1 is None,0.6503297686576843 5857,"def isNearTarget(self): if self.desiredSpeed > 0.0 and location_helpers.getDistanceFromPoints3d(self.vehicle.location.global_relative_frame, self.pt2) < WAYPOINT_NEARNESS_THRESHOLD: return True elif: return True return False",False,"self.desiredSpeed < 0.0 and location_helpers.getDistanceFromPoints3d(self.vehicle.location.global_relative_frame, self.pt1) < WAYPOINT_NEARNESS_THRESHOLD","self.desiredSpeed >= 0.0 and location_helpers.getDistanceFromPoints3d(self.vehicle.location.global_relative_frame, self.pt2) < WAYPOINT_NEARNESS_THRESHOLD",0.6499708890914917 5858,"def get_latest_parse_data_language(all_events): events = reversed(all_events) try: while True: event = next(events) if: return event['parse_data']['language'] except StopIteration: return None",False,event['event'] == 'user' and 'parse_data' in event and ('language' in event['parse_data']),event.get('parse_data'),0.6456453800201416 5859,"def download_link(self, obj): download_url = '' if: download_url = obj.archive.url elif obj.archive_link: download_url = obj.archive_link html = '{1}'.format(download_url, unicode(_(u'Download'))) return html",True,obj.archive,obj.archive,0.6601207852363586 5860,"def download_link(self, obj): download_url = '' if obj.archive: download_url = obj.archive.url elif: download_url = obj.archive_link html = '{1}'.format(download_url, unicode(_(u'Download'))) return html",True,obj.archive_link,obj.archive_link,0.6564463376998901 5861,"def delay_exponential(base, growth_factor, attempts): """"""Calculate time to sleep based on exponential function. The format is:: base * growth_factor ^ (attempts - 1) If ``base`` is set to 'rand' then a random number between 0 and 1 will be used as the base. Base must be greater than 0, otherwise a ValueError will be raised. """""" if: base = random.random() elif base <= 0: raise ValueError(""The 'base' param must be greater than 0, got: %s"" % base) time_to_sleep = base * growth_factor ** (attempts - 1) return time_to_sleep",True,base == 'rand',base == 'rand',0.6578077077865601 5862,"def delay_exponential(base, growth_factor, attempts): """"""Calculate time to sleep based on exponential function. The format is:: base * growth_factor ^ (attempts - 1) If ``base`` is set to 'rand' then a random number between 0 and 1 will be used as the base. Base must be greater than 0, otherwise a ValueError will be raised. """""" if base == 'rand': base = random.random() elif: raise ValueError(""The 'base' param must be greater than 0, got: %s"" % base) time_to_sleep = base * growth_factor ** (attempts - 1) return time_to_sleep",False,base <= 0,base < 0,0.6662132740020752 5863,"def _resize_masks(self, results): """"""Resize masks with ``results['scale']``"""""" for key in results.get('mask_fields', []): if: continue if self.keep_ratio: results[key] = results[key].rescale(results['scale']) else: results[key] = results[key].resize(results['img_shape'][:2])",False,results[key] is None,"key in ['scale', 'ratio']",0.648421049118042 5864,"def _resize_masks(self, results): """"""Resize masks with ``results['scale']``"""""" for key in results.get('mask_fields', []): if results[key] is None: continue if: results[key] = results[key].rescale(results['scale']) else: results[key] = results[key].resize(results['img_shape'][:2])",True,self.keep_ratio,self.keep_ratio,0.6518999338150024 5865,"def rebuild_auth(self, prepared_request, response): """""" When being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. """""" headers = prepared_request.headers url = prepared_request.url if: original_parsed = urlparse(response.request.url) redirect_parsed = urlparse(url) if original_parsed.hostname!= redirect_parsed.hostname: del headers['Authorization'] new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: prepared_request.prepare_auth(new_auth) return",True,'Authorization' in headers,'Authorization' in headers,0.6506553888320923 5866,"def rebuild_auth(self, prepared_request, response): """""" When being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. """""" headers = prepared_request.headers url = prepared_request.url if 'Authorization' in headers: original_parsed = urlparse(response.request.url) redirect_parsed = urlparse(url) if original_parsed.hostname!= redirect_parsed.hostname: del headers['Authorization'] new_auth = get_netrc_auth(url) if self.trust_env else None if: prepared_request.prepare_auth(new_auth) return",True,new_auth is not None,new_auth is not None,0.6487936973571777 5867,"def rebuild_auth(self, prepared_request, response): """""" When being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. """""" headers = prepared_request.headers url = prepared_request.url if 'Authorization' in headers: original_parsed = urlparse(response.request.url) redirect_parsed = urlparse(url) if: del headers['Authorization'] new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: prepared_request.prepare_auth(new_auth) return",True,original_parsed.hostname != redirect_parsed.hostname,original_parsed.hostname != redirect_parsed.hostname,0.6436768770217896 5868,"def forward(self, bsp_feature, reference_temporal_iou=None, tmin=None, tmax=None, tmin_score=None, tmax_score=None, video_meta=None, return_loss=True): """"""Define the computation performed at every call."""""" if: return self.forward_train(bsp_feature, reference_temporal_iou) return self.forward_test(bsp_feature, tmin, tmax, tmin_score, tmax_score, video_meta)",True,return_loss,return_loss,0.663973331451416 5869,"def _compute_loss_key_value(self, state: RunnerState, criterion): if: return super()._compute_loss_key_value(state, criterion) lam = state.input['mixup_lambda'] index = state.input['mixup_index'] pred = self._get_output(state.output, self.output_key) y_a = self._get_input(state.input, self.input_key) y_b = y_a[index] loss = lam * criterion(pred, y_a) + (1 - lam) * criterion(pred, y_b) return loss",False,not self.is_needed,not self.has_mixup_key,0.6497228145599365 5870,"def print_text(token_id: int): nonlocal last_token_id if: text = decode_token(last_token_id) stream_response = interface.GenerateStreamResponse(token=interface.Token(text=text, id=last_token_id)) output_queue.put_nowait(stream_response) last_token_id = token_id",False,last_token_id is not None,token_id != 0 and last_token_id != -1,0.6485183238983154 5871,"def Item(self, vtIndex=defaultNamedNotOptArg): """"""DISPID_VALUE"""""" ret = self._oleobj_.InvokeTypes(0, LCID, 1, (9, 0), ((12, 1),), vtIndex) if: ret = Dispatch(ret, u'Item', '{9739A029-5EB0-499A-9E72-967B98D55ECF}') return ret",True,ret is not None,ret is not None,0.6538131833076477 5872,"def enterClassDeclaration(self, ctx: JavaParserLabeled.ClassDeclarationContext): class_identifier = ctx.IDENTIFIER().getText() if: self.is_source_class = True else: self.is_source_class = False",False,class_identifier == self.source_class,class_identifier in self.source_classes,0.6544011831283569 5873,"def aps_verifykey(pkt): if: return config.row['aps_verifykey_extendedaddr'] = format(pkt[ZigbeeAppCommandPayload].address, '016x') config.row['aps_verifykey_keyhash'] = pkt[ZigbeeAppCommandPayload].key_hash.hex() if len(bytes(pkt[ZigbeeAppCommandPayload].payload))!= 0: config.row['error_msg'] = 'PE433: Unexpected payload' return",False,"not config.update_row('aps_verifykey_stdkeytype', pkt[ZigbeeAppCommandPayload].key_type, STANDARD_KEY_TYPES, 'PE423: Unknown standard key type')",not pkt,0.6520046591758728 5874,"def aps_verifykey(pkt): if not config.update_row('aps_verifykey_stdkeytype', pkt[ZigbeeAppCommandPayload].key_type, STANDARD_KEY_TYPES, 'PE423: Unknown standard key type'): return config.row['aps_verifykey_extendedaddr'] = format(pkt[ZigbeeAppCommandPayload].address, '016x') config.row['aps_verifykey_keyhash'] = pkt[ZigbeeAppCommandPayload].key_hash.hex() if: config.row['error_msg'] = 'PE433: Unexpected payload' return",True,len(bytes(pkt[ZigbeeAppCommandPayload].payload)) != 0,len(bytes(pkt[ZigbeeAppCommandPayload].payload)) != 0,0.6506317853927612 5875,"def enqueue_all(self): if: return notes = get_unqueued_notes_for_tag(self.tag_displayed) if len(notes) == 0: return dialog = PriorityDialog(self, None) if dialog.exec_(): prio = dialog.value for n in notes: update_priority_list(n.id, prio) self.parent.refresh_queue_list() self.refresh() tooltip(f'Added all with tag {self.tag_displayed}')",True,self.tag_displayed is None,self.tag_displayed is None,0.6467685103416443 5876,"def enqueue_all(self): if self.tag_displayed is None: return notes = get_unqueued_notes_for_tag(self.tag_displayed) if: return dialog = PriorityDialog(self, None) if dialog.exec_(): prio = dialog.value for n in notes: update_priority_list(n.id, prio) self.parent.refresh_queue_list() self.refresh() tooltip(f'Added all with tag {self.tag_displayed}')",False,len(notes) == 0,notes is None,0.6484568119049072 5877,"def enqueue_all(self): if self.tag_displayed is None: return notes = get_unqueued_notes_for_tag(self.tag_displayed) if len(notes) == 0: return dialog = PriorityDialog(self, None) if: prio = dialog.value for n in notes: update_priority_list(n.id, prio) self.parent.refresh_queue_list() self.refresh() tooltip(f'Added all with tag {self.tag_displayed}')",True,dialog.exec_(),dialog.exec_(),0.6562564373016357 5878,"def load_matrix(path, path_type=constants.PATH_NPY_ARRAY): numpy_data = None if: numpy_data = numpy.load(path) elif path_type == constants.PATH_TEXT_ARRAY: numpy_data = numpy.loadtxt(path) else: raise Exception('Invalid path type given %s' % path_type) matrix = numpy.array(numpy_data) return matrix",True,path_type == constants.PATH_NPY_ARRAY,path_type == constants.PATH_NPY_ARRAY,0.6506789922714233 5879,"def load_matrix(path, path_type=constants.PATH_NPY_ARRAY): numpy_data = None if path_type == constants.PATH_NPY_ARRAY: numpy_data = numpy.load(path) elif: numpy_data = numpy.loadtxt(path) else: raise Exception('Invalid path type given %s' % path_type) matrix = numpy.array(numpy_data) return matrix",False,path_type == constants.PATH_TEXT_ARRAY,path_type == constants.PATH_NPY_STRING,0.651941180229187 5880,"def is_column_type_decimal(schema, column_name): column = find_column(schema, column_name) if: raise ValueError('Column {} not found.'.format(column_name)) return type(column.type) == pa.lib.Decimal128Type",False,not column,column is None,0.6682562828063965 5881,"@staticmethod def from_alipay_dict(d): if: return None o = AlipayEcoMycarDataserviceMaintainvehicleShareModel() if 'vid' in d: o.vid = d['vid'] return o",True,not d,not d,0.667117178440094 5882,"@staticmethod def from_alipay_dict(d): if not d: return None o = AlipayEcoMycarDataserviceMaintainvehicleShareModel() if: o.vid = d['vid'] return o",True,'vid' in d,'vid' in d,0.663202166557312 5883,"def on_stop(self, *args): self.learner.save() if: self.join_batcher_thread() self.stop.emit(self.object_id, {self.object_id: self.learner.timing}) super().on_stop(*args) del self.learner.actor_critic del self.learner",False,not self.cfg.serial_mode,self.learner.join_on_last_run,0.6469846367835999 5884,"def dict(self, *args, **kwargs): data = super().dict(*args, **kwargs) for field in ('use_multi_node_manager','spark_config', 'append_job_name', 'append_output_dir', 'ext'): if: data.pop(field) return data",False,data[field] == GenericCommandParametersModel.__fields__[field].default,field in data,0.6441377997398376 5885,"def _set_function_def_environment(self, node: nodes.FunctionDef) -> None: """"""Method to set environment of a FunctionDef node."""""" node.type_environment = Environment() if: node.type_environment.locals['self'] = ForwardRef(node.parent.name) self._populate_local_env(node) self._populate_local_env_attrs(node) node.type_environment.locals['return'] = self.type_constraints.fresh_tvar(node)",False,"node.args.args and node.args.args[0].name == 'self' and isinstance(node.parent, nodes.ClassDef)",node.parent.name,0.6518294811248779 5886,"def _load_plugins(): """"""Load plugins."""""" if: return None plugins_file = os.path.join(os.environ['TREADMILL_APPROOT'], 'plugins.json') try: with io.open(plugins_file) as f: plugins = json.loads(f.read()) return plugins except OSError as err: return None",False,'TREADMILL_APPROOT' not in os.environ,os.environ['TREADMILL_APPROOT'] is None,0.6480581164360046 5887,"def tearDown(self): if: for queue_name in self.test_queues: try: self.qs.delete_queue(queue_name) except: pass return super(StorageQueueEncodingTest, self).tearDown()",False,not self.is_playback(),not self.q_in_use,0.6429082155227661 5888,"@access_control_allow_credentials.setter def access_control_allow_credentials(self, value: t.Optional[bool]) -> None: if: self.headers['Access-Control-Allow-Credentials'] = 'true' else: self.headers.pop('Access-Control-Allow-Credentials', None)",False,value is True,value is None,0.6570004224777222 5889,"def is_valid(value, matcher, require): """"""Determine if a value is valid based on the provided matcher. :param str value: Value to validate. :param matcher: Compiled regular expression to use to validate the value. :param require: Whether or not the value is required. """""" if: return value is not None and matcher.match(value) return value is None or matcher.match(value)",False,require,require is None,0.6660714149475098 5890,"def forward_embedding(self, x): if: for conv in self.shared_convs: x = conv(x) if self.num_shared_fcs > 0: if self.with_avg_pool: x = self.avg_pool(x) x = x.flatten(1) for fc in self.shared_fcs: x = self.relu(fc(x)) return x",True,self.num_shared_convs > 0,self.num_shared_convs > 0,0.6454466581344604 5891,"def forward_embedding(self, x): if self.num_shared_convs > 0: for conv in self.shared_convs: x = conv(x) if: if self.with_avg_pool: x = self.avg_pool(x) x = x.flatten(1) for fc in self.shared_fcs: x = self.relu(fc(x)) return x",True,self.num_shared_fcs > 0,self.num_shared_fcs > 0,0.643315315246582 5892,"def forward_embedding(self, x): if self.num_shared_convs > 0: for conv in self.shared_convs: x = conv(x) if self.num_shared_fcs > 0: if: x = self.avg_pool(x) x = x.flatten(1) for fc in self.shared_fcs: x = self.relu(fc(x)) return x",True,self.with_avg_pool,self.with_avg_pool,0.6442190408706665 5893,"def _env_info(self): env_info = {'gym_version': version.VERSION} if: env_info['env_id'] = self.env.spec.id return env_info",True,self.env.spec,self.env.spec,0.6505796909332275 5894,"def test_get_blob_to_path_with_mode(self): if: return with open(FILE_PATH, 'wb') as stream: stream.write(b'abcdef') with self.assertRaises(BaseException): blob = self.bs.get_blob_to_path(self.container_name, self.byte_blob, FILE_PATH, 'a+b')",True,TestMode.need_recording_file(self.test_mode),TestMode.need_recording_file(self.test_mode),0.6443670988082886 5895,"def callMultiF(f, n, cache): """""" Try to get n unique results by calling f() multiple times >>> import random >>> random.seed(0) >>> callMultiF(lambda : random.randint(0,10), 9, set()) [9, 8, 4, 2, 5, 3, 6, 10] >>> random.seed(0) >>> callMultiF(lambda : random.randint(0,10), 9, set([8,9,10])) [4, 2, 5, 3, 6, 7] """""" rs = [] rs_s = set() if: for c in cache: rs_s.add(c) for _ in range(n): c = f() c_iter = 0 while c in rs_s and c_iter < 3: c_iter += 1 c = f() if c not in rs_s: rs_s.add(c) rs.append(c) assert len(rs) <= n return rs",True,cache,cache,0.6642289161682129 5896,"def callMultiF(f, n, cache): """""" Try to get n unique results by calling f() multiple times >>> import random >>> random.seed(0) >>> callMultiF(lambda : random.randint(0,10), 9, set()) [9, 8, 4, 2, 5, 3, 6, 10] >>> random.seed(0) >>> callMultiF(lambda : random.randint(0,10), 9, set([8,9,10])) [4, 2, 5, 3, 6, 7] """""" rs = [] rs_s = set() if cache: for c in cache: rs_s.add(c) for _ in range(n): c = f() c_iter = 0 while c in rs_s and c_iter < 3: c_iter += 1 c = f() if: rs_s.add(c) rs.append(c) assert len(rs) <= n return rs",True,c not in rs_s,c not in rs_s,0.655996561050415 5897,"def parse_explicit_events_file(self, explicit_events_file): """"""Parse explicit_events_file and load in memory."""""" events = set() for line in explicit_events_file: if: continue if line.startswith('#'): continue _, event_source, event_type = line.rstrip('\n').split('\t') events.add(ExplicitEventType(event_source, event_type)) return events",False,not line.strip(),not line,0.6508703231811523 5898,"def parse_explicit_events_file(self, explicit_events_file): """"""Parse explicit_events_file and load in memory."""""" events = set() for line in explicit_events_file: if not line.strip(): continue if: continue _, event_source, event_type = line.rstrip('\n').split('\t') events.add(ExplicitEventType(event_source, event_type)) return events",False,line.startswith('#'),explicit_events_file.endswith('\t'),0.6413639783859253 5899,"@udf_params.setter def udf_params(self, value): if: return self._udf_params = value",True,"not isinstance(value, dict)","not isinstance(value, dict)",0.6461789608001709 5900,"def match(rex, str): m = rex.match(str) if: return m.group(0) else: return None",True,m,m,0.6817741394042969 5901,"def _rot_bbox_points(self, input_dict): """"""Private function to rotate bounding boxes and points. Args: input_dict (dict): Result dict from loading pipeline. Returns: dict: Results after rotation, 'points', 'pcd_rotation' and keys in input_dict['bbox3d_fields'] are updated in the result dict. """""" rotation = self.rot_range if: rotation = [-rotation, rotation] noise_rotation = np.random.uniform(rotation[0], rotation[1]) for key in input_dict['bbox3d_fields']: if len(input_dict[key].tensor)!= 0: points, rot_mat_T = input_dict[key].rotate(noise_rotation, input_dict['points']) input_dict['points'] = points input_dict['pcd_rotation'] = rot_mat_T",False,"not isinstance(rotation, list)","np.random.uniform(0, 1) != 0",0.6473472118377686 5902,"def _rot_bbox_points(self, input_dict): """"""Private function to rotate bounding boxes and points. Args: input_dict (dict): Result dict from loading pipeline. Returns: dict: Results after rotation, 'points', 'pcd_rotation' and keys in input_dict['bbox3d_fields'] are updated in the result dict. """""" rotation = self.rot_range if not isinstance(rotation, list): rotation = [-rotation, rotation] noise_rotation = np.random.uniform(rotation[0], rotation[1]) for key in input_dict['bbox3d_fields']: if: points, rot_mat_T = input_dict[key].rotate(noise_rotation, input_dict['points']) input_dict['points'] = points input_dict['pcd_rotation'] = rot_mat_T",False,len(input_dict[key].tensor) != 0,"np.random.uniform(0, 1) < self.rot_range",0.6443102359771729 5903,"@property def supported_features(self) -> int: """"""Return the list of supported features."""""" if: return 0 return ClimateEntityFeature.TARGET_TEMPERATURE",False,not self._wrap_device.device.set_values_allowed,self._device.target_TEMPERATURE not in self._device.available_features,0.6463865041732788 5904,"def is_absolute(self): """"""True if the path is absolute (has both a root and, if applicable, a drive)."""""" if: return False return not self._flavour.has_drv or bool(self._drv)",True,not self._root,not self._root,0.6569713354110718 5905,"def unregister_extension(self, extension_name): """"""Unregisters an extension. Parameters ---------- extension_name : str """""" if: raise InvalidParameter(f""extension {extension_name} isn't registered"") self._extensions.pop(extension_name) self._serialize_registry()",True,extension_name not in self._extensions,extension_name not in self._extensions,0.6507534384727478 5906,"def _GCD(self, values): if: return 1 if len(values) == 1: return values[0] elif len(values) == 2: return math.gcd(values[0], values[1]) else: middle = len(values) // 2 return math.gcd(self._GCD(values[:middle]), self._GCD(values[middle:]))",True,len(values) == 0,len(values) == 0,0.6503927707672119 5907,"def _GCD(self, values): if len(values) == 0: return 1 if: return values[0] elif len(values) == 2: return math.gcd(values[0], values[1]) else: middle = len(values) // 2 return math.gcd(self._GCD(values[:middle]), self._GCD(values[middle:]))",True,len(values) == 1,len(values) == 1,0.6494678258895874 5908,"def _GCD(self, values): if len(values) == 0: return 1 if len(values) == 1: return values[0] elif: return math.gcd(values[0], values[1]) else: middle = len(values) // 2 return math.gcd(self._GCD(values[:middle]), self._GCD(values[middle:]))",True,len(values) == 2,len(values) == 2,0.6504542231559753 5909,"def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CustomTransitTimeDetail', fromsubclass_=False, pretty_print=True): if: eol_ = '\n' else: eol_ = '' if self.Key is not None: namespaceprefix_ = self.Key_nsprefix_ + ':' if UseCapturedNS_ and self.Key_nsprefix_ else '' showIndent(outfile, level, pretty_print) outfile.write('<%sKey>%s%s' % (namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')), namespaceprefix_, eol_))",True,pretty_print,pretty_print,0.6643433570861816 5910,"def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CustomTransitTimeDetail', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if: namespaceprefix_ = self.Key_nsprefix_ + ':' if UseCapturedNS_ and self.Key_nsprefix_ else '' showIndent(outfile, level, pretty_print) outfile.write('<%sKey>%s%s' % (namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')), namespaceprefix_, eol_))",True,self.Key is not None,self.Key is not None,0.6488668918609619 5911,"def get_blacklist_setting(chat_id): try: setting = CHAT_SETTINGS_BLACKLISTS.get(str(chat_id)) if: return (setting['blacklist_type'], setting['value']) return (1, '0') finally: SESSION.close()",True,setting,setting,0.6630433797836304 5912,"def init_weights(self, pretrained=None): """"""Init backbone weights. Args: pretrained (str | None): If pretrained is a string, then it initializes backbone weights by loading the pretrained checkpoint. If pretrained is None, then it follows default initializer or customized initializer in subclasses. """""" if: logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif pretrained is None: pass else: raise TypeError(f'pretrained must be a str or None. But received {type(pretrained)}.')",True,"isinstance(pretrained, str)","isinstance(pretrained, str)",0.6466339826583862 5913,"def init_weights(self, pretrained=None): """"""Init backbone weights. Args: pretrained (str | None): If pretrained is a string, then it initializes backbone weights by loading the pretrained checkpoint. If pretrained is None, then it follows default initializer or customized initializer in subclasses. """""" if isinstance(pretrained, str): logger = logging.getLogger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif: pass else: raise TypeError(f'pretrained must be a str or None. But received {type(pretrained)}.')",True,pretrained is None,pretrained is None,0.6528476476669312 5914,"def points_in_convex_polygon_3d_jit(points, polygon_surfaces, num_surfaces=None): """"""Check points is in 3d convex polygons. Args: points (np.ndarray): Input points with shape of (num_points, 3). polygon_surfaces (np.ndarray): Polygon surfaces with shape of (num_polygon, max_num_surfaces, max_num_points_of_surface, 3). All surfaces' normal vector must direct to internal. Max_num_points_of_surface must at least 3. num_surfaces (np.ndarray, optional): Number of surfaces a polygon contains shape of (num_polygon). Defaults to None. Returns: np.ndarray: Result matrix with the shape of [num_points, num_polygon]. """""" max_num_surfaces, max_num_points_of_surface = polygon_surfaces.shape[1:3] num_polygons = polygon_surfaces.shape[0] if: num_surfaces = np.full((num_polygons,), 9999999, dtype=np.int64) normal_vec, d = surface_equ_3d(polygon_surfaces[:, :, :3, :]) return _points_in_convex_polygon_3d_jit(points, polygon_surfaces, normal_vec, d, num_surfaces)",True,num_surfaces is None,num_surfaces is None,0.6495777368545532 5915,"def get_vel_limit(self): if: return 0 with self.pt_lock: with DelayedKeyboardInterrupt(): p, dxl_comm_result, dxl_error = self.packet_handler.read4ByteTxRx(self.port_handler, self.dxl_id, XL430_ADDR_VELOCITY_LIMIT) self.handle_comm_result('XL430_ADDR_VELOCITY_LIMIT', dxl_comm_result, dxl_error) return p",True,not self.hw_valid,not self.hw_valid,0.6526356935501099 5916,"def mouseMoveEvent(self, event): if: newValue = self.oldValue + (event.pos().x() - self.clickPosition.x()) / 50 self.setValue(newValue) self.valueChanged.emit(newValue)",False,self.allowDrag,event.buttons() & Qt.LeftButton,0.6521770358085632 5917,"def _get_attribute_full_path(self, node): """"""Traverse an attribute to generate a full name e.g. tf.foo.bar. Args: node: A Node of type Attribute. Returns: a '.'-delimited full-name or None if the tree was not a simple form. i.e. `foo()+b).bar` returns None, while `a.b.c` would return ""a.b.c"". """""" curr = node items = [] while not isinstance(curr, ast.Name): if: return None items.append(curr.attr) curr = curr.value items.append(curr.id) return '.'.join(reversed(items))",False,"not isinstance(curr, ast.Attribute)",curr.attr is None,0.644837498664856 5918,"def set_field_null(self, model, f, null): qn = self.connection.ops.quote_name params = (qn(model._meta.db_table), qn(f.column), f.db_type()) if: return 'ALTER TABLE %s MODIFY COLUMN %s %s DEFAULT NULL;' % params else: return 'ALTER TABLE %s MODIFY COLUMN %s %s NOT NULL;' % params",True,null,null,0.6690963506698608 5919,"def wait_for_socket(*args, **kwargs): global wait_for_socket if: wait_for_socket = poll_wait_for_socket elif hasattr(select,'select'): wait_for_socket = select_wait_for_socket else: wait_for_socket = null_wait_for_socket return wait_for_socket(*args, **kwargs)",False,_have_working_poll(),"hasattr(poll, 'poll')",0.651729166507721 5920,"def wait_for_socket(*args, **kwargs): global wait_for_socket if _have_working_poll(): wait_for_socket = poll_wait_for_socket elif: wait_for_socket = select_wait_for_socket else: wait_for_socket = null_wait_for_socket return wait_for_socket(*args, **kwargs)",False,"hasattr(select, 'select')",_have_select_wait_for_socket(),0.6451219320297241 5921,"def get_core_axis(self, obj): """"""returns a part line representing the core axis of the wall"""""" import Part p1 = self.get_first_point(obj) p2 = self.get_last_point(obj) if: print('Points are equal, cannot get the axis') return None else: core_axis = Part.Line(p1, p2) return core_axis",True,p1 == p2,p1 == p2,0.6598876714706421 5922,"def update_available(): response = requests.get('https://pypi.python.org/pypi/gramaddict/json') if: latest_version = response.json()['info']['version'] current_version = parse_version(__version__) latest_version = parse_version(latest_version) return (current_version < latest_version, latest_version) else: return (False, None)",False,response.ok,response.json().get('info'),0.6595999002456665 5923,"def get_version(): try: version_string = subprocess.check_output(['z3', '-version']).decode('utf-8') version_match = re.match('Z3 version (.*)\n', version_string) if: return (False, None, f'Found malformed version string: {version_string}') return (True, version_match.group(1), None) except subprocess.CalledProcessError as ex: return (False, None, f'Not found, error: {ex}') except OSError as ex: return (False, None, f'Not found, error: {ex}')",False,not version_match,version_match is None,0.6474841237068176 5924,"def forward(self, x, style, noise=None): out = self.modulated_conv(x, style) if: b, _, h, w = out.shape noise = out.new_empty(b, 1, h, w).normal_() out = out + self.weight * noise out = self.activate(out) return out",True,noise is None,noise is None,0.6569819450378418 5925,"def on_touch_down(self, touch): if: return False elif not self.collide_point(touch.x, touch.y): return False elif self in touch.ud: return False elif self.disabled: return False else: self.fade_bg = Animation(duration=0.5, _current_button_color=self.md_bg_color_down) self.fade_bg.start(self) return super().on_touch_down(touch)",False,touch.is_mouse_scrolling,"not self.collide_point(touch.x, touch.y)",0.6483708620071411 5926,"def on_touch_down(self, touch): if touch.is_mouse_scrolling: return False elif: return False elif self in touch.ud: return False elif self.disabled: return False else: self.fade_bg = Animation(duration=0.5, _current_button_color=self.md_bg_color_down) self.fade_bg.start(self) return super().on_touch_down(touch)",False,"not self.collide_point(touch.x, touch.y)",not self.collide_point(*touch.pos),0.6454470753669739 5927,"def on_touch_down(self, touch): if touch.is_mouse_scrolling: return False elif not self.collide_point(touch.x, touch.y): return False elif: return False elif self.disabled: return False else: self.fade_bg = Animation(duration=0.5, _current_button_color=self.md_bg_color_down) self.fade_bg.start(self) return super().on_touch_down(touch)",False,self in touch.ud,not self.visible,0.6573796272277832 5928,"def on_touch_down(self, touch): if touch.is_mouse_scrolling: return False elif not self.collide_point(touch.x, touch.y): return False elif self in touch.ud: return False elif: return False else: self.fade_bg = Animation(duration=0.5, _current_button_color=self.md_bg_color_down) self.fade_bg.start(self) return super().on_touch_down(touch)",False,self.disabled,not self.enabled,0.6481961011886597 5929,"def build(self): if: raise RuntimeError('Use save/restore instead of build in eager mode.') self._build(self._filename, build_save=True, build_restore=True)",False,context.executing_eagerly(),self._load_eager,0.6460968255996704 5930,"def get_centre_slice(self): """"""returns a 2D AcquisitionGeometry that corresponds to the centre slice of the input"""""" if: return self AG_2D = copy.deepcopy(self) AG_2D.config.system = self.config.system.get_centre_slice() AG_2D.config.panel.num_pixels[1] = 1 AG_2D.config.panel.pixel_size[1] = abs(self.config.system.detector.direction_y[2]) * self.config.panel.pixel_size[1] return AG_2D",False,self.dimension == '2D',self.config.panel.num_pixels[1] == 1 or self.config.system.num_pixels[2] == 1,0.6538234949111938 5931,"@contextmanager def tmp_cfg(cfg_key, cfg_value='yes'): """""" Yields nothing, but assignes the given configuration key (ck.cfg[cfg_key]) the given value. Restores the old value afterwards. """""" saved_value = ck.cfg.get(cfg_key, None) try: ck.cfg[cfg_key] = cfg_value yield finally: if: ck.cfg.pop(cfg_key, None) else: ck.cfg[cfg_key] = saved_value",True,saved_value is None,saved_value is None,0.6502205729484558 5932,"def add_bed_to_graph(G, bed, families): for seqid, bs in bed.sub_beds(): prev_node, prev_strand = (None, '+') for b in bs: accn = b.accn strand = b.strand node = '='.join(families[accn]) if: G.add_edge(prev_node, node, prev_strand, strand) prev_node, prev_strand = (node, strand) return G",True,prev_node,prev_node,0.6537488698959351 5933,"def save(self, filename=None, ignore_discard=False, ignore_expires=False): if: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) f = open(filename, 'w') try: debug('Saving LWP cookies file') f.write('#LWP-Cookies-2.0\n') f.write(self.as_lwp_str(ignore_discard, ignore_expires)) finally: f.close()",True,filename is None,filename is None,0.6542143821716309 5934,"def save(self, filename=None, ignore_discard=False, ignore_expires=False): if filename is None: if: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) f = open(filename, 'w') try: debug('Saving LWP cookies file') f.write('#LWP-Cookies-2.0\n') f.write(self.as_lwp_str(ignore_discard, ignore_expires)) finally: f.close()",True,self.filename is not None,self.filename is not None,0.6482458114624023 5935,"def add_pdf_note(self, item_clicked): full_path = item_clicked.data(Qt.ItemDataRole.UserRole) if: if self.path_displayed is not None: tab = self def after(): tab.load_folders_unused_pdfs(tab.path_displayed) tab.parent.refresh_queue_list() tab.parent.pdfs_tab.refresh() add_tmp_hook('user-note-created', after) e = NoteEditor(self.parent, add_only=True, source_prefill=full_path) else: tooltip('Close the opened Note dialog first!')",False,not state.note_editor_shown,full_path,0.6435221433639526 5936,"def add_pdf_note(self, item_clicked): full_path = item_clicked.data(Qt.ItemDataRole.UserRole) if not state.note_editor_shown: if: tab = self def after(): tab.load_folders_unused_pdfs(tab.path_displayed) tab.parent.refresh_queue_list() tab.parent.pdfs_tab.refresh() add_tmp_hook('user-note-created', after) e = NoteEditor(self.parent, add_only=True, source_prefill=full_path) else: tooltip('Close the opened Note dialog first!')",False,self.path_displayed is not None,full_path,0.6444161534309387 5937,"def _mock_manager(self, *args, **kwargs): if: device_params = kwargs['device_params'] device_handler = make_device_handler(device_params) session = SSHSession(device_handler) return Manager(session, device_handler) elif args: return self._read_file(args[0].tag + '.xml')",True,kwargs,kwargs,0.6723109483718872 5938,"def _mock_manager(self, *args, **kwargs): if kwargs: device_params = kwargs['device_params'] device_handler = make_device_handler(device_params) session = SSHSession(device_handler) return Manager(session, device_handler) elif: return self._read_file(args[0].tag + '.xml')",True,args,args,0.6775000095367432 5939,"def _get_runner(self, runner): """""" :param runner: Runner :return: Runner instance """""" return_runner = runner if: return_runner = self.connection.get_runner() if return_runner is None: return_runner = get_runner() return return_runner",False,return_runner is None and self.connection is not None,self.connection is not None,0.6478105783462524 5940,"def _get_runner(self, runner): """""" :param runner: Runner :return: Runner instance """""" return_runner = runner if return_runner is None and self.connection is not None: return_runner = self.connection.get_runner() if: return_runner = get_runner() return return_runner",True,return_runner is None,return_runner is None,0.6511318683624268 5941,"def _read_lines(env_file: str) -> Iterator[str]: with open(env_file, encoding='utf-8-sig') as ef: lines = ef.read().splitlines() for line in lines: line = line.lstrip() if: yield line",False,line and (not line.startswith('#')),line,0.6439728736877441 5942,"def _iter_unique_lines(file: io.TextIOWrapper): existed_lines: Set[Text] = set() for line in file: if: continue yield line existed_lines.add(line)",True,line in existed_lines,line in existed_lines,0.6506876945495605 5943,"def previous_element(dct: OrderedDict[Any, Any], key: Any) -> Optional[Any]: """""" Returns the value of the element that comes before the given key in an ordered dictionary. If the key is not in the dictionary, or if it is the first element in the dictionary, returns None. Arguments: dct: An ordered dictionary. key: The key of the element whose previous element we want to find. Returns: The value of the element that comes before the given key in the dictionary, or None if there is no such element. Example: >>> dct = OrderedDict([(1, 'a'), (2, 'b'), (3, 'c')]) >>> previous_element(dct, 2) # Returns 'a' 'a' >>> previous_element(dct, 3) # Returns 'b' 'b' >>> previous_element(dct, 1) # Returns None >>> previous_element(dct, 4) # Returns None """""" if: key_list = list(dct.keys()) idx = key_list.index(key) if idx == 0: ans = None else: ans = dct[key_list[idx - 1]] else: ans = None return ans",True,key in dct,key in dct,0.6620299220085144 5944,"def previous_element(dct: OrderedDict[Any, Any], key: Any) -> Optional[Any]: """""" Returns the value of the element that comes before the given key in an ordered dictionary. If the key is not in the dictionary, or if it is the first element in the dictionary, returns None. Arguments: dct: An ordered dictionary. key: The key of the element whose previous element we want to find. Returns: The value of the element that comes before the given key in the dictionary, or None if there is no such element. Example: >>> dct = OrderedDict([(1, 'a'), (2, 'b'), (3, 'c')]) >>> previous_element(dct, 2) # Returns 'a' 'a' >>> previous_element(dct, 3) # Returns 'b' 'b' >>> previous_element(dct, 1) # Returns None >>> previous_element(dct, 4) # Returns None """""" if key in dct: key_list = list(dct.keys()) idx = key_list.index(key) if: ans = None else: ans = dct[key_list[idx - 1]] else: ans = None return ans",True,idx == 0,idx == 0,0.6615336537361145 5945,"def _load_module(self): if: self._language = self._get_engine_language() self._module = self._load_module_for_language(self._language)",False,not self._language,"not hasattr(self, '_language')",0.6600335836410522 5946,"def setAtoms(pos, vel=None, force=None): n = len(pos) if: vel = np.zeros((n, 2)) if force is None: force = np.zeros((n, 2)) lib.setAtoms(n, pos, vel, force) return (vel, force)",True,vel is None,vel is None,0.6626383066177368 5947,"def setAtoms(pos, vel=None, force=None): n = len(pos) if vel is None: vel = np.zeros((n, 2)) if: force = np.zeros((n, 2)) lib.setAtoms(n, pos, vel, force) return (vel, force)",True,force is None,force is None,0.6572126150131226 5948,"def _set_parent_ns(packageName): parts = packageName.split('.') name = parts.pop() if: parent = '.'.join(parts) setattr(sys.modules[parent], name, sys.modules[packageName])",True,parts,parts,0.6587563157081604 5949,"def read_txt(txt): if: with open(txt, 'r', encoding='utf-8') as f: lines = f.read().splitlines() else: lines = [txt] return lines",False,os.path.isfile(txt),"sys.version_info >= (3, 0)",0.6433552503585815 5950,"def tril_softmax_dropout(self, x): if: x = flow._C.fused_scale_tril_softmax_dropout(x, diagonal=0, scale=self.coeff, fill_value=float('-inf'), rate=self.attention_dropout_rate) else: x = flow._C.fused_scale_tril(x, fill_value=float('-inf'), scale=self.coeff) x = flow._C.softmax(x, dim=x.ndim - 1) x = self.multihead_attn_dropout(x) return x",False,self.scale_tril_softmax_dropout_fusion,self.attention_dropout_rate,0.642555832862854 5951,"def _construct_form(self, i, **kwargs): form = super(BaseInlineFormSet, self)._construct_form(i, **kwargs) if: form.data[form.add_prefix(self._pk_field.name)] = None return form",False,self.save_as_new,self._pk_field.name,0.6507587432861328 5952,"def on_conflict(self, ignore=False): if: self._conflict = RawExpr(sql.SQL('DO NOTHING')) return self",True,ignore,ignore,0.6674989461898804 5953,"def updateOutput(self, input): if: self.mininput = input.new() self.mininput.resize_as_(input).copy_(input).mul_(-1) self._backend.SoftMax_updateOutput(self._backend.library_state, self.mininput, self.output, self._get_dim(input)) return self.output",True,self.mininput is None,self.mininput is None,0.6510879397392273 5954,"def _tasks_from_predictions(self, predictions): """""" Get COCO API ""tasks"" (i.e. iou_type) from COCO-format predictions. """""" tasks = {'bbox'} for pred in predictions: if: tasks.add('segm') if 'keypoints' in pred: tasks.add('keypoints') return sorted(tasks)",False,'segmentation' in pred,'segmentm' in pred,0.6546024680137634 5955,"def _tasks_from_predictions(self, predictions): """""" Get COCO API ""tasks"" (i.e. iou_type) from COCO-format predictions. """""" tasks = {'bbox'} for pred in predictions: if'segmentation' in pred: tasks.add('segm') if: tasks.add('keypoints') return sorted(tasks)",True,'keypoints' in pred,'keypoints' in pred,0.6570204496383667 5956,"def parse_list_header(value): """"""Parse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Quotes are removed automatically after parsing. It basically works like :func:`parse_set_header` just that items may appear multiple times and case sensitivity is preserved. The return value is a standard :class:`list`: >>> parse_list_header('token, ""quoted value""') ['token', 'quoted value'] To create a header from the :class:`list` again, use the :func:`dump_header` function. :param value: a string with a list header. :return: :class:`list` """""" result = [] for item in _parse_list_header(value): if: item = unquote_header_value(item[1:-1]) result.append(item) return result",True,"item[:1] == item[-1:] == '""'","item[:1] == item[-1:] == '""'",0.647624135017395 5957,"def __init__(self, year, *args, **kwargs): """""" :param year: The integer 0 :param args: Other positional arguments; see datetime.datetime. :param kwargs: Other keyword arguments; see datetime.datetime. """""" if: raise ValueError('year must be 0') self._y2k = datetime(2000, *args, **kwargs)",False,year != 0,year < 0,0.6629319787025452 5958,"def webhook_log_msg() -> Union[str, None]: """""" Log message for the webhook. We log the :return: """""" content_type = request.headers.get('Content-Type', None) x_github_event = request.headers.get('X-GitHub-Event', None) if: return request.json.get('repository', {}).get('name', None) return None",False,content_type == 'application/json' and x_github_event == 'push',content_type == 'webhook' and x_github_event and (content_type == 'application/json'),0.6440003514289856 5959,"@api.one @api.onchange('checkbook_id') def onchange_checkbook(self): if: self.number = self.checkbook_id.next_check_number",True,self.checkbook_id,self.checkbook_id,0.6538314819335938 5960,"def list(self, force=False): """"""Return a list of Gist objects."""""" try: self.res = requests.get(GIST_BASE_URL % 'gists?per_page=1000', headers=self.headers, proxies=self.proxies) except requests.exceptions.RequestException as e: if: print('requests request exception:'+ str(e)) return return self.res",False,self.settings['debug_mode'],force,0.6440274715423584 5961,"def forward(self, x): if: x = self.pool(x) y = self.conv1(x) y = self.relu2(y) y = self.conv2(y) if self.stride > 1: x = self.shortcut(x) y = y + x return y",True,self.stride > 1,self.stride > 1,0.6524165868759155 5962,"def forward(self, x): if self.stride > 1: x = self.pool(x) y = self.conv1(x) y = self.relu2(y) y = self.conv2(y) if: x = self.shortcut(x) y = y + x return y",True,self.stride > 1,self.stride > 1,0.6541896462440491 5963,"def __init__(self, pool, url, reason=None): self.reason = reason message = 'Max retries exceeded with url: %s' % url if: message +='(Caused by %s: %s)' % (type(reason), reason) else: message +='(Caused by redirect)' RequestError.__init__(self, pool, url, message)",True,reason,reason,0.6769113540649414 5964,"def _warn_on_replacement(self, metadata): replacement_char = b'\xef\xbf\xbd'.decode('utf-8') if: tmpl = '{self.path} could not be properly decoded in UTF-8' msg = tmpl.format(**locals()) warnings.warn(msg)",False,replacement_char in metadata,replacement_char in self.path,0.6565138101577759 5965,"def cleanup(self): if: return SHUT_RDWR = 2 try: self.sock.shutdown(SHUT_RDWR) self.sock.close() except OSError: pass",False,not self.sock,self.do_run,0.6492595672607422 5966,"def test_nested_dict_mandatory_value_outer(self, key: DictKeyType, match: str) -> None: c = OmegaConf.create({key: {'b': '???'}}) with raises(MissingMandatoryValue, match=match): c[key].b if: with raises(MissingMandatoryValue, match=match): getattr(c, key).b",False,"isinstance(key, str)","hasattr(c, key)",0.6475024819374084 5967,"@classmethod def from_cfg(cls, cfg, dtype=None): if: new_cfg = cfg.clone() new_cfg.defrost() new_cfg.MODEL.dtype = dtype return cls(cfg=new_cfg) else: return cls(cfg=cfg)",True,dtype is not None,dtype is not None,0.6591956615447998 5968,"def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): if: return arch if arch.startswith('ppc'): return 'ppc' return 'i386'",False,not is_32bit,is_32bit,0.6509251594543457 5969,"def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): if not is_32bit: return arch if: return 'ppc' return 'i386'",True,arch.startswith('ppc'),arch.startswith('ppc'),0.6448097229003906 5970,"def close(self): self._stream.close() returncode = self._proc.wait() if: return None if name == 'nt': return returncode else: return returncode << 8",True,returncode == 0,returncode == 0,0.6565254330635071 5971,"def close(self): self._stream.close() returncode = self._proc.wait() if returncode == 0: return None if: return returncode else: return returncode << 8",False,name == 'nt',returncode == 1,0.6546343564987183 5972,"def forward(self, x): x = self.global_pool(x) if: x = F.dropout(x, p=float(self.drop_rate), training=self.training) x = self.fc(x) x = self.flatten(x) return x",True,self.drop_rate,self.drop_rate,0.6477751731872559 5973,"def format(self, val, attachment=[], include_media_url=False, *args, **kwargs): if: val = '' if not include_media_url: return {self.name: val} download_url = attachment[0].get('download_url', '') if attachment else '' return {self.name: val, f'{self.name}_URL': download_url}",True,val is None,val is None,0.6592485904693604 5974,"def format(self, val, attachment=[], include_media_url=False, *args, **kwargs): if val is None: val = '' if: return {self.name: val} download_url = attachment[0].get('download_url', '') if attachment else '' return {self.name: val, f'{self.name}_URL': download_url}",False,not include_media_url,include_media_url,0.649240255355835 5975,"def serialize(self): if: self.length = self._MIN_LEN buf = struct.pack(self._PACK_STR, self.chunk_type(), self.tflag, self.length) return buf",True,0 == self.length,0 == self.length,0.669891357421875 5976,"def _set_result_column(self, column): """"""Set Column to Table based on alter_metadata evaluation."""""" self.process_column(column) if: self.current_name = column.name if self.alter_metadata: self._result_column = column else: self._result_column = column.copy_fixed()",False,"not hasattr(self, 'current_name')",column.name,0.6457271575927734 5977,"def _set_result_column(self, column): """"""Set Column to Table based on alter_metadata evaluation."""""" self.process_column(column) if not hasattr(self, 'current_name'): self.current_name = column.name if: self._result_column = column else: self._result_column = column.copy_fixed()",False,self.alter_metadata,self.use_result_column,0.6534218192100525 5978,"def get_volumes(module, ec2_conn): instance = module.params.get('instance') find_params = dict() if: find_params['Filters'] = ansible_dict_to_boto3_filter_list({'attachment.instance-id': instance}) vols = [] try: vols_response = ec2_conn.describe_volumes(aws_retry=True, **find_params) vols = [camel_dict_to_snake_dict(vol) for vol in vols_response.get('Volumes', [])] except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: module.fail_json_aws(e, msg='Error while getting EBS volumes') return vols",True,instance,instance,0.6822794675827026 5979,"def _check_branches(self, num_branches, num_blocks, in_channels, num_channels): if: error_msg = 'NUM_BRANCHES({}) <> NUM_BLOCKS({})'.format(num_branches, len(num_blocks)) raise ValueError(error_msg) if num_branches!= len(num_channels): error_msg = 'NUM_BRANCHES({}) <> NUM_CHANNELS({})'.format(num_branches, len(num_channels)) raise ValueError(error_msg) if num_branches!= len(in_channels): error_msg = 'NUM_BRANCHES({}) <> NUM_INCHANNELS({})'.format(num_branches, len(in_channels)) raise ValueError(error_msg)",True,num_branches != len(num_blocks),num_branches != len(num_blocks),0.6441241502761841 5980,"def _check_branches(self, num_branches, num_blocks, in_channels, num_channels): if num_branches!= len(num_blocks): error_msg = 'NUM_BRANCHES({}) <> NUM_BLOCKS({})'.format(num_branches, len(num_blocks)) raise ValueError(error_msg) if: error_msg = 'NUM_BRANCHES({}) <> NUM_CHANNELS({})'.format(num_branches, len(num_channels)) raise ValueError(error_msg) if num_branches!= len(in_channels): error_msg = 'NUM_BRANCHES({}) <> NUM_INCHANNELS({})'.format(num_branches, len(in_channels)) raise ValueError(error_msg)",True,num_branches != len(num_channels),num_branches != len(num_channels),0.6430579423904419 5981,"def _check_branches(self, num_branches, num_blocks, in_channels, num_channels): if num_branches!= len(num_blocks): error_msg = 'NUM_BRANCHES({}) <> NUM_BLOCKS({})'.format(num_branches, len(num_blocks)) raise ValueError(error_msg) if num_branches!= len(num_channels): error_msg = 'NUM_BRANCHES({}) <> NUM_CHANNELS({})'.format(num_branches, len(num_channels)) raise ValueError(error_msg) if: error_msg = 'NUM_BRANCHES({}) <> NUM_INCHANNELS({})'.format(num_branches, len(in_channels)) raise ValueError(error_msg)",True,num_branches != len(in_channels),num_branches != len(in_channels),0.6431231498718262 5982,"def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None): if: makeDoc = EditorDocument if makeView is None: makeView = EditorView EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView)",True,makeDoc is None,makeDoc is None,0.666877031326294 5983,"def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None): if makeDoc is None: makeDoc = EditorDocument if: makeView = EditorView EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView)",True,makeView is None,makeView is None,0.66578608751297 5984,"def _update_step(self): if: self._step += 1 elif self._mode == 'val': pass else: raise NotImplementedError",True,self._mode == 'train',self._mode == 'train',0.653534471988678 5985,"def _update_step(self): if self._mode == 'train': self._step += 1 elif: pass else: raise NotImplementedError",True,self._mode == 'val',self._mode == 'val',0.6507266759872437 5986,"def setUp(self): self.get_super_method(self,'setUp')() ret = self.validate_peers_are_connected() if: raise ExecutionError('Servers are not in peer probed state') g.log.info('Started creating volume') ret = self.setup_volume_and_mount_volume(self.mounts) if not ret: raise ExecutionError('Volume creation failed: %s' % self.volname) g.log.info('Volume created successfully : %s', self.volname)",True,not ret,not ret,0.664014995098114 5987,"def setUp(self): self.get_super_method(self,'setUp')() ret = self.validate_peers_are_connected() if not ret: raise ExecutionError('Servers are not in peer probed state') g.log.info('Started creating volume') ret = self.setup_volume_and_mount_volume(self.mounts) if: raise ExecutionError('Volume creation failed: %s' % self.volname) g.log.info('Volume created successfully : %s', self.volname)",True,not ret,not ret,0.6598207950592041 5988,"def toXmlName(self, name): nameFirst = name[0] nameRest = name[1:] m = nonXmlNameFirstBMPRegexp.match(nameFirst) if: warnings.warn('Coercing non-XML name', DataLossWarning) nameFirstOutput = self.getReplacementCharacter(nameFirst) else: nameFirstOutput = nameFirst nameRestOutput = nameRest replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) for char in replaceChars: warnings.warn('Coercing non-XML name', DataLossWarning) replacement = self.getReplacementCharacter(char) nameRestOutput = nameRestOutput.replace(char, replacement) return nameFirstOutput + nameRestOutput",True,m,m,0.6751848459243774 5989,"@classmethod def search(cls, handler, query): q = Q(name__icontains=query) sq = ManagedObject.get_search_Q(query) if: q |= sq if not handler.current_user.is_superuser: q &= UserAccess.Q(handler.current_user) r = [] for mo in ManagedObject.objects.filter(q): r += [{'scope':'managedobject', 'id': mo.id, 'label': '%s (%s) [%s]' % (mo.name, mo.address, mo.platform)}] return r",True,sq,sq,0.7205355167388916 5990,"@classmethod def search(cls, handler, query): q = Q(name__icontains=query) sq = ManagedObject.get_search_Q(query) if sq: q |= sq if: q &= UserAccess.Q(handler.current_user) r = [] for mo in ManagedObject.objects.filter(q): r += [{'scope':'managedobject', 'id': mo.id, 'label': '%s (%s) [%s]' % (mo.name, mo.address, mo.platform)}] return r",False,not handler.current_user.is_superuser,handler.current_user,0.6475178003311157 5991,"@classmethod def get_init_valid_params(cls): init_valid_params = {**cls.get_init_train_params(),'scenes': cls.valid_scenes, 'player_screen_height': 224, 'player_screen_width': 224, 'headless': False} if: init_valid_params['save_talk_reply_probs_path'] = cls.save_talk_reply_probs_path return init_valid_params",True,cls.save_talk_reply_probs_path is not None,cls.save_talk_reply_probs_path is not None,0.6467662453651428 5992,"def eval(self, results, epoch): if: fout = open(os.path.join(self.output_dir,'results.txt'), 'a') for res in results: image_id = res[self.key] caption = res[self.value] fout.write('{}\t{}\n'.format(image_id, caption)) fout.close() return results",False,self.output_dir is not None,self.output_dir,0.6494379043579102 5993,"def __init__(self, *args, **kw): super().__init__(*args, **kw) if: self._nusc_infos = list(sorted(self._nusc_infos, key=lambda e: e['timestamp'])) self._nusc_infos = self._nusc_infos[::2]",True,len(self._nusc_infos) > 28000,len(self._nusc_infos) > 28000,0.651129961013794 5994,"def square(number: float) -> float: """"""Return the square of the number."""""" if: return number ** 2 return number ** 3",False,True,number < 1,0.6607375741004944 5995,"def getComponent(self, innerFlag=0): if: raise error.PyAsn1Error('Component not chosen') else: c = self._componentValues[self._currentIdx] if innerFlag and isinstance(c, Choice): return c.getComponent(innerFlag) else: return c",True,self._currentIdx is None,self._currentIdx is None,0.6512144207954407 5996,"def getComponent(self, innerFlag=0): if self._currentIdx is None: raise error.PyAsn1Error('Component not chosen') else: c = self._componentValues[self._currentIdx] if: return c.getComponent(innerFlag) else: return c",False,"innerFlag and isinstance(c, Choice)",innerFlag,0.6452875137329102 5997,"def get_gas_price(self, is_full: bool=False) -> int or dict: """""" This interface is used to get the gas price in current network. Return: the value of gas price. """""" payload = self.generate_json_rpc_payload(RpcMethod.GET_GAS_PRICE) response = self.__post(self._url, payload) if: return response return response['result']['gasprice']",True,is_full,is_full,0.6575177907943726 5998,"def AuthSubTokenFromHttpBody(http_body): """"""Extracts the AuthSub token from an HTTP body string. Used to find the new session token after making a request to upgrade a single use AuthSub token. Args: http_body: str The repsonse from the server which contains the AuthSub key. For example, this function would find the new session token from the server's response to an upgrade token request. Returns: The header value to use for Authorization which contains the AuthSub token. """""" token_value = token_from_http_body(http_body) if: return '%s%s' % (AUTHSUB_AUTH_LABEL, token_value) return None",True,token_value,token_value,0.6543992757797241 5999,"def visit_DockLayout(self, node): """""" The visitor method for a DockLayout node. """""" has_non_floating_area = False for item in node.items: if: if not item.floating: self.warn('non-floating toplevel ItemLayout') elif not item.floating: if has_non_floating_area: self.warn('multiple non-floating AreaLayout items') has_non_floating_area = True self.visit(item)",False,"isinstance(item, ItemLayout)","isinstance(item, DockLayout)",0.6552807092666626 6000,"def visit_DockLayout(self, node): """""" The visitor method for a DockLayout node. """""" has_non_floating_area = False for item in node.items: if isinstance(item, ItemLayout): if: self.warn('non-floating toplevel ItemLayout') elif not item.floating: if has_non_floating_area: self.warn('multiple non-floating AreaLayout items') has_non_floating_area = True self.visit(item)",False,not item.floating,has_non_floating_area,0.6524361371994019 6001,"def visit_DockLayout(self, node): """""" The visitor method for a DockLayout node. """""" has_non_floating_area = False for item in node.items: if isinstance(item, ItemLayout): if not item.floating: self.warn('non-floating toplevel ItemLayout') elif: if has_non_floating_area: self.warn('multiple non-floating AreaLayout items') has_non_floating_area = True self.visit(item)",False,not item.floating,"isinstance(item, AreaLayout)",0.6528353691101074 6002,"def visit_DockLayout(self, node): """""" The visitor method for a DockLayout node. """""" has_non_floating_area = False for item in node.items: if isinstance(item, ItemLayout): if not item.floating: self.warn('non-floating toplevel ItemLayout') elif not item.floating: if: self.warn('multiple non-floating AreaLayout items') has_non_floating_area = True self.visit(item)",True,has_non_floating_area,has_non_floating_area,0.6463700532913208 6003,"def Set_Size(self, widget_width, widget_height): self.widget_width = widget_width self.widget_height = widget_height if: self.other_a = widget_width * 0.4 self.other_b = widget_width * 0.7 else: self.other_a = self.ui_k self.other_b = widget_width - self.ui_k self.update()",False,widget_width <= 200,self.ui_k == -1,0.6612989902496338 6004,"def kern(self) -> None: """""" Insert `Kern` nodes between `Char` nodes to set kerning. The `Char` nodes themselves determine the amount of kerning they need (in `~Char.get_kerning`), and this function just creates the correct linked list. """""" new_children = [] num_children = len(self.children) if: for i in range(num_children): elem = self.children[i] if i < num_children - 1: next = self.children[i + 1] else: next = None new_children.append(elem) kerning_distance = elem.get_kerning(next) if kerning_distance!= 0.0: kern = Kern(kerning_distance) new_children.append(kern) self.children = new_children",False,num_children,num_children > 0,0.6510016918182373 6005,"def kern(self) -> None: """""" Insert `Kern` nodes between `Char` nodes to set kerning. The `Char` nodes themselves determine the amount of kerning they need (in `~Char.get_kerning`), and this function just creates the correct linked list. """""" new_children = [] num_children = len(self.children) if num_children: for i in range(num_children): elem = self.children[i] if: next = self.children[i + 1] else: next = None new_children.append(elem) kerning_distance = elem.get_kerning(next) if kerning_distance!= 0.0: kern = Kern(kerning_distance) new_children.append(kern) self.children = new_children",False,i < num_children - 1,elem.is_kern,0.6486455798149109 6006,"def kern(self) -> None: """""" Insert `Kern` nodes between `Char` nodes to set kerning. The `Char` nodes themselves determine the amount of kerning they need (in `~Char.get_kerning`), and this function just creates the correct linked list. """""" new_children = [] num_children = len(self.children) if num_children: for i in range(num_children): elem = self.children[i] if i < num_children - 1: next = self.children[i + 1] else: next = None new_children.append(elem) kerning_distance = elem.get_kerning(next) if: kern = Kern(kerning_distance) new_children.append(kern) self.children = new_children",False,kerning_distance != 0.0,kerning_distance is not None,0.6498175859451294 6007,"def predict(self, X): """"""Predict with fitted weights."""""" if: raise NotFittedError(""Estimator not fitted. Call 'fit' first."") X = check_array(X, accept_sparse=False) return np.dot(X, self.coef_.T)",False,"not hasattr(self, 'coef_')",self.fitted,0.6468791961669922 6008,"def maybe_record_provenance(frame: Frame, name: str, provider_type: str) -> None: if: _PROVIDER_PROVENANCE_LOOKUP[name] = (provider_type, informative_upstream_callsites_from_frame(frame))",False,registration_provenance_tracking_is_on(),name not in _PROVIDER_PROVENANCE_LOOKUP,0.6502106189727783 6009,"@property def prefix(self): if: return self.path.nlri.addr + '/' + str(self.path.nlri.length) elif isinstance(self.path, Vpnv4Path) or isinstance(self.path, Vpnv6Path) or isinstance(self.path, EvpnPath): return self.path.nlri.prefix else: return None",False,"isinstance(self.path, Ipv4Path) or isinstance(self.path, Ipv6Path)","isinstance(self.path, VpnPath)",0.6449025273323059 6010,"@property def prefix(self): if isinstance(self.path, Ipv4Path) or isinstance(self.path, Ipv6Path): return self.path.nlri.addr + '/' + str(self.path.nlri.length) elif: return self.path.nlri.prefix else: return None",False,"isinstance(self.path, Vpnv4Path) or isinstance(self.path, Vpnv6Path) or isinstance(self.path, EvpnPath)","isinstance(self.path, Path)",0.6482518911361694 6011,"def __init__(self, partition2workers: List[List[str]]): for pidx, rpc_worker_list in enumerate(partition2workers): if: raise ValueError(f""'RpcDataPartitionRouter': no rpc worker is responsible for data partition '{pidx}'."") self.partition2workers = partition2workers self.rpc_worker_indexs = [0 for _ in range(len(partition2workers))]",False,len(rpc_worker_list) == 0,rpc_worker_list[0] is None,0.6543548107147217 6012,"def _get_body_string(self): """""" read body until content-length or MEMFILE_MAX into a string. Raise HTTPError(413) on requests that are to large. """""" clen = self.content_length if: raise HTTPError(413, 'Request to large') if clen < 0: clen = self.MEMFILE_MAX + 1 data = self.body.read(clen) if len(data) > self.MEMFILE_MAX: raise HTTPError(413, 'Request to large') return data",False,clen > self.MEMFILE_MAX,clen > self.content_length,0.6505470275878906 6013,"def _get_body_string(self): """""" read body until content-length or MEMFILE_MAX into a string. Raise HTTPError(413) on requests that are to large. """""" clen = self.content_length if clen > self.MEMFILE_MAX: raise HTTPError(413, 'Request to large') if: clen = self.MEMFILE_MAX + 1 data = self.body.read(clen) if len(data) > self.MEMFILE_MAX: raise HTTPError(413, 'Request to large') return data",False,clen < 0,clen == 0,0.666481614112854 6014,"def _get_body_string(self): """""" read body until content-length or MEMFILE_MAX into a string. Raise HTTPError(413) on requests that are to large. """""" clen = self.content_length if clen > self.MEMFILE_MAX: raise HTTPError(413, 'Request to large') if clen < 0: clen = self.MEMFILE_MAX + 1 data = self.body.read(clen) if: raise HTTPError(413, 'Request to large') return data",True,len(data) > self.MEMFILE_MAX,len(data) > self.MEMFILE_MAX,0.6465579271316528 6015,"def _get_log_stream_buffer_size(filename: str) -> int: if: return -1 if D2_LOG_BUFFER_SIZE_KEY in os.environ: return int(os.environ[D2_LOG_BUFFER_SIZE_KEY]) return DEFAULT_LOG_BUFFER_SIZE",False,'://' not in filename,filename.endswith('.log'),0.6590899229049683 6016,"def _get_log_stream_buffer_size(filename: str) -> int: if '://' not in filename: return -1 if: return int(os.environ[D2_LOG_BUFFER_SIZE_KEY]) return DEFAULT_LOG_BUFFER_SIZE",True,D2_LOG_BUFFER_SIZE_KEY in os.environ,D2_LOG_BUFFER_SIZE_KEY in os.environ,0.6524333953857422 6017,"def set_database_path(_config_obj, _section, _key): if: set_path(_config_obj, _section, _key, _config_obj.data['paths']['data_dir'], 'db')",True,_config_obj.data[_section][_key] is None,_config_obj.data[_section][_key] is None,0.6450611352920532 6018,"def populate_asset_date_deployed(apps, schema_editor): if: print('\n !!! ATTENTION!!!\n If you have existing projects you need to run this management command:\n\n > python manage.py populate_asset_date_deployed\n\n Otherwise, project views API will not be able to sort by `date_deployed`\n ') else: print('\n This might take a while. If it is too slow, you may want to re-run the\n migration with SKIP_HEAVY_MIGRATIONS=True and run the management command\n `populate_asset_date_deployed`.\n ') call_command('populate_asset_date_deployed', verbosity=0)",True,settings.SKIP_HEAVY_MIGRATIONS,settings.SKIP_HEAVY_MIGRATIONS,0.6456403732299805 6019,"def add_params_to_uri(uri, params, fragment=False): """"""Add a list of two-tuples to the uri query components."""""" sch, net, path, par, query, fra = urlparse.urlparse(uri) if: fra = add_params_to_qs(query, params) else: query = add_params_to_qs(query, params) return urlparse.urlunparse((sch, net, path, par, query, fra))",True,fragment,fragment,0.662326455116272 6020,"def get_real_name(self): """"""Returns the real name (object name) of this identifier."""""" dot = self.token_next_match(0, T.Punctuation, '.') if: return self.token_next_by_type(0, T.Name).value else: next_ = self.token_next_by_type(self.token_index(dot), (T.Name, T.Wildcard)) if next_ is None: return None return next_.value",True,dot is None,dot is None,0.6507800817489624 6021,"def get_real_name(self): """"""Returns the real name (object name) of this identifier."""""" dot = self.token_next_match(0, T.Punctuation, '.') if dot is None: return self.token_next_by_type(0, T.Name).value else: next_ = self.token_next_by_type(self.token_index(dot), (T.Name, T.Wildcard)) if: return None return next_.value",False,next_ is None,next_.is_empty or next_.value is None,0.6511273384094238 6022,"def __eq__(self, other): ret = self.cmp_base(other) if: return False return self.name == other.name",True,ret,ret,0.6798872947692871 6023,"def __init__(self, job, timeout=10): scan.__init__(self, job) setattr(self, 'datasize', 0) if: self.type = job[0].split('|')[1] self.port = job[0].split('|')[2] self.scan_type = _whats_your_name() if timeout >= 60: self.timeout = 59 else: self.timeout = timeout",True,len(job) > 1,len(job) > 1,0.6528384685516357 6024,"def __init__(self, job, timeout=10): scan.__init__(self, job) setattr(self, 'datasize', 0) if len(job) > 1: self.type = job[0].split('|')[1] self.port = job[0].split('|')[2] self.scan_type = _whats_your_name() if: self.timeout = 59 else: self.timeout = timeout",False,timeout >= 60,timeout < 10,0.6693509817123413 6025,"def process_docstring(app, what, name, obj, options, lines): """"""Process the docstring for a given python object. Called when autodoc has read and processed a docstring. `lines` is a list of docstring lines that `_process_docstring` modifies in place to change what Sphinx outputs. The following settings in conf.py control what styles of docstrings will be parsed: * ``napoleon_google_docstring`` -- parse Google style docstrings * ``napoleon_numpy_docstring`` -- parse NumPy style docstrings Parameters ---------- app : sphinx.application.Sphinx Application object representing the Sphinx process. what : str A string specifying the type of the object to which the docstring belongs. Valid values: ""module"", ""class"", ""exception"", ""function"", ""method"", ""attribute"". name : str The fully qualified name of the object. obj : module, class, exception, function, method, or attribute The object to which the docstring belongs. options : sphinx.ext.autodoc.Options The options given to the directive: an object with attributes inherited_members, undoc_members, show_inheritance and noindex that are True if the flag option of same name was given to the auto directive. lines : list of str The lines of the docstring, see above. .. note:: `lines` is modified *in place* Notes ----- This function is (to most parts) taken from the :mod:`sphinx.ext.napoleon` module, sphinx version 1.3.1, and adapted to the classes defined here"""""" result_lines = lines if: docstring = ExtendedNumpyDocstring(result_lines, app.config, app, what, name, obj, options) result_lines = docstring.lines() if app.config.napoleon_google_docstring: docstring = ExtendedGoogleDocstring(result_lines, app.config, app, what, name, obj, options) result_lines = docstring.lines() lines[:] = result_lines",False,app.config.napoleon_numpy_docstring,app.config.numpy_docstring,0.6464402675628662 6026,"def process_docstring(app, what, name, obj, options, lines): """"""Process the docstring for a given python object. Called when autodoc has read and processed a docstring. `lines` is a list of docstring lines that `_process_docstring` modifies in place to change what Sphinx outputs. The following settings in conf.py control what styles of docstrings will be parsed: * ``napoleon_google_docstring`` -- parse Google style docstrings * ``napoleon_numpy_docstring`` -- parse NumPy style docstrings Parameters ---------- app : sphinx.application.Sphinx Application object representing the Sphinx process. what : str A string specifying the type of the object to which the docstring belongs. Valid values: ""module"", ""class"", ""exception"", ""function"", ""method"", ""attribute"". name : str The fully qualified name of the object. obj : module, class, exception, function, method, or attribute The object to which the docstring belongs. options : sphinx.ext.autodoc.Options The options given to the directive: an object with attributes inherited_members, undoc_members, show_inheritance and noindex that are True if the flag option of same name was given to the auto directive. lines : list of str The lines of the docstring, see above. .. note:: `lines` is modified *in place* Notes ----- This function is (to most parts) taken from the :mod:`sphinx.ext.napoleon` module, sphinx version 1.3.1, and adapted to the classes defined here"""""" result_lines = lines if app.config.napoleon_numpy_docstring: docstring = ExtendedNumpyDocstring(result_lines, app.config, app, what, name, obj, options) result_lines = docstring.lines() if: docstring = ExtendedGoogleDocstring(result_lines, app.config, app, what, name, obj, options) result_lines = docstring.lines() lines[:] = result_lines",True,app.config.napoleon_google_docstring,app.config.napoleon_google_docstring,0.6468740701675415 6027,"def filter(self, record: logging.LogRecord) -> bool: if: record.msg = f'{self.field} {self.value} - {record.msg}' return True",False,self.value,self.field and self.value and (not record.msg.startswith(':')),0.657547652721405 6028,"def validate_AWBNumber(self, value): result = True if: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s is not of the correct base simple type (str)' % {'value': value, 'lineno': lineno}) return False if len(value) > 11: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s does not match xsd maxLength restriction on AWBNumber' % {'value': encode_str_2_3(value), 'lineno': lineno}) result = False return result",True,value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None),value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None),0.6462996602058411 6029,"def validate_AWBNumber(self, value): result = True if value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None): if: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s is not of the correct base simple type (str)' % {'value': value, 'lineno': lineno}) return False if len(value) > 11: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s does not match xsd maxLength restriction on AWBNumber' % {'value': encode_str_2_3(value), 'lineno': lineno}) result = False return result",True,"not isinstance(value, str)","not isinstance(value, str)",0.6451764106750488 6030,"def validate_AWBNumber(self, value): result = True if value is not None and Validate_simpletypes_ and (self.gds_collector_ is not None): if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s is not of the correct base simple type (str)' % {'value': value, 'lineno': lineno}) return False if: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value ""%(value)s""%(lineno)s does not match xsd maxLength restriction on AWBNumber' % {'value': encode_str_2_3(value), 'lineno': lineno}) result = False return result",False,len(value) > 11,len(value) > 35,0.6492421627044678 6031,"def is_relevant(self): if: return False return super().is_relevant()",False,self.component.project.component_set.exclude(is_glossary=True).count() <= 1,"self.app.config.getoption('check_for_lua_test', False)",0.6433730125427246 6032,"def get_headers(self): self.ensure_one() if: api_key = self.secret_key_test else: api_key = self.secret_key b64_auth_key = b64encode(api_key.encode()).decode().replace('=', '') headers = {'Authorization': 'Basic'+ b64_auth_key, 'Content-Type': 'application/json'} return headers",True,self.test_mode,self.test_mode,0.6480203866958618 6033,"def could_use_op(input): if: return False if input.device.type!= 'cuda': return False if any((torch.__version__.startswith(x) for x in ['1.7.', '1.8.'])): return True warnings.warn(f'conv2d_gradfix not supported on PyTorch {torch.__version__}. Falling back to torch.nn.functional.conv2d().') return False",True,not enabled or not torch.backends.cudnn.enabled,not enabled or not torch.backends.cudnn.enabled,0.6467422246932983 6034,"def could_use_op(input): if not enabled or not torch.backends.cudnn.enabled: return False if: return False if any((torch.__version__.startswith(x) for x in ['1.7.', '1.8.'])): return True warnings.warn(f'conv2d_gradfix not supported on PyTorch {torch.__version__}. Falling back to torch.nn.functional.conv2d().') return False",True,input.device.type != 'cuda',input.device.type != 'cuda',0.6485928297042847 6035,"def could_use_op(input): if not enabled or not torch.backends.cudnn.enabled: return False if input.device.type!= 'cuda': return False if: return True warnings.warn(f'conv2d_gradfix not supported on PyTorch {torch.__version__}. Falling back to torch.nn.functional.conv2d().') return False",False,"any((torch.__version__.startswith(x) for x in ['1.7.', '1.8.']))","any((torch.__version__.startswith(x) for x in ['1.7.', '1.8.', '1.9']))",0.6475076079368591 6036,"@property def average_node_to_edge(self): if: n_edges = self.n_edges ind_ptr = 2 * np.arange(n_edges + 1) col_inds = self._edges.reshape(-1) Aij = np.full(2 * n_edges, 0.5) self._average_node_to_edge = sp.csr_matrix((Aij, col_inds, ind_ptr), shape=(n_edges, self.n_nodes)) return self._average_node_to_edge",True,"getattr(self, '_average_node_to_edge', None) is None","getattr(self, '_average_node_to_edge', None) is None",0.6472365260124207 6037,"def __getitem__(self, i): if: try: index = next(self.iter_sampler) except StopIteration: self.iter_sampler = iter(self.sampler) index = next(self.iter_sampler) else: index = i fname, fname2 = self.samples[index] label = self.targets[index] return (fname, fname2, label)",True,self.use_sampler,self.use_sampler,0.6538046598434448 6038,"def initialization(self): self._database_connection() if: raise PullerInitializationException('No filters') for _, dispatcher in self.state.report_filter.filters: dispatcher.connect_data()",False,not self.state.report_filter.filters,not self.state.report_filter,0.6479483842849731 6039,"def __init__(self, dataclass_types: Union[DataClassType, Iterable[DataClassType]], **kwargs): """""" Args: dataclass_types: Dataclass type, or list of dataclass types for which we will ""fill"" instances with the parsed args. kwargs: (Optional) Passed to `argparse.ArgumentParser()` in the regular way. """""" super().__init__(**kwargs) if: dataclass_types = [dataclass_types] self.dataclass_types = dataclass_types for dtype in self.dataclass_types: self._add_dataclass_arguments(dtype)",False,dataclasses.is_dataclass(dataclass_types),"not isinstance(dataclass_types, DataClassType)",0.6498691439628601 6040,"def translate(self, trans_vector): """"""Translate boxes with the given translation vector. Args: trans_vector (torch.Tensor): Translation vector of size 1x3. """""" if: trans_vector = self.tensor.new_tensor(trans_vector) self.tensor[:, :3] += trans_vector",True,"not isinstance(trans_vector, torch.Tensor)","not isinstance(trans_vector, torch.Tensor)",0.6481316089630127 6041,"def update(self, dis_dict, delta=None): for layer_name in self.distant_dict.keys(): self.distant_dict[layer_name] += dis_dict[layer_name] self.flann_dict[layer_name].build_index(np.array(self.distant_dict[layer_name])) if: self.current += delta else: self.current += self.all_coverage(dis_dict)",True,delta,delta,0.6749968528747559 6042,"@property def get_print_dict(self): evals = self.outputs printdict = {} if: if 'printdata' in evals['semgraph']: printdict = evals['semgraph'] if 'attribute' in evals: printdict['attribute'] = evals['attribute'] if 'deptree' in evals: if 'printdata' in evals['deptree']: printdict = evals['deptree'] return printdict",True,'semgraph' in evals,'semgraph' in evals,0.6681829690933228 6043,"@property def get_print_dict(self): evals = self.outputs printdict = {} if'semgraph' in evals: if 'printdata' in evals['semgraph']: printdict = evals['semgraph'] if 'attribute' in evals: printdict['attribute'] = evals['attribute'] if: if 'printdata' in evals['deptree']: printdict = evals['deptree'] return printdict",False,'deptree' in evals,'dptree' in evals,0.6639930605888367 6044,"@property def get_print_dict(self): evals = self.outputs printdict = {} if'semgraph' in evals: if: printdict = evals['semgraph'] if 'attribute' in evals: printdict['attribute'] = evals['attribute'] if 'deptree' in evals: if 'printdata' in evals['deptree']: printdict = evals['deptree'] return printdict",True,'printdata' in evals['semgraph'],'printdata' in evals['semgraph'],0.6571205854415894 6045,"@property def get_print_dict(self): evals = self.outputs printdict = {} if'semgraph' in evals: if 'printdata' in evals['semgraph']: printdict = evals['semgraph'] if: printdict['attribute'] = evals['attribute'] if 'deptree' in evals: if 'printdata' in evals['deptree']: printdict = evals['deptree'] return printdict",True,'attribute' in evals,'attribute' in evals,0.6661165952682495 6046,"@property def get_print_dict(self): evals = self.outputs printdict = {} if'semgraph' in evals: if 'printdata' in evals['semgraph']: printdict = evals['semgraph'] if 'attribute' in evals: printdict['attribute'] = evals['attribute'] if 'deptree' in evals: if: printdict = evals['deptree'] return printdict",False,'printdata' in evals['deptree'],'dptree' in evals,0.6533496379852295 6047,"def serialize_body(self) -> bytes: if: raise InvalidFrameError('PING frame may not have more than 8 bytes of data, got %r' % self.opaque_data) data = self.opaque_data data += b'\x00' * (8 - len(self.opaque_data)) return data",True,len(self.opaque_data) > 8,len(self.opaque_data) > 8,0.6529414653778076 6048,"def start_cycle_later(self, delay): if: self.delay_call.cancel() self.delay_call = None if self.cycle_loop and self.cycle_loop.running: self.cycle_loop.stop() self.cycle_loop = LoopingCall(self.cycle) if delay > 0.0: self.delay_call = callLater(delay, self.cycle_loop.start, self.speed) else: self.cycle_loop.start(self.speed)",False,self.delay_call and self.delay_call.active(),self.delay_call and self.delay_call.cancelled(),0.6478732824325562 6049,"def start_cycle_later(self, delay): if self.delay_call and self.delay_call.active(): self.delay_call.cancel() self.delay_call = None if: self.cycle_loop.stop() self.cycle_loop = LoopingCall(self.cycle) if delay > 0.0: self.delay_call = callLater(delay, self.cycle_loop.start, self.speed) else: self.cycle_loop.start(self.speed)",True,self.cycle_loop and self.cycle_loop.running,self.cycle_loop and self.cycle_loop.running,0.6465160250663757 6050,"def start_cycle_later(self, delay): if self.delay_call and self.delay_call.active(): self.delay_call.cancel() self.delay_call = None if self.cycle_loop and self.cycle_loop.running: self.cycle_loop.stop() self.cycle_loop = LoopingCall(self.cycle) if: self.delay_call = callLater(delay, self.cycle_loop.start, self.speed) else: self.cycle_loop.start(self.speed)",False,delay > 0.0,delay,0.6543070077896118 6051,"def forward_dummy(self, x, proposals): """"""Dummy forward function."""""" outs = () rois = bbox2roi([proposals]) if: bbox_results = self._bbox_forward(x, rois) outs = outs + (bbox_results['cls_score'], bbox_results['bbox_pred']) if self.with_mask: mask_rois = rois[:100] mask_results = self._mask_forward(x, mask_rois) outs = outs + (mask_results['mask_pred'],) return outs",True,self.with_bbox,self.with_bbox,0.6509759426116943 6052,"def forward_dummy(self, x, proposals): """"""Dummy forward function."""""" outs = () rois = bbox2roi([proposals]) if self.with_bbox: bbox_results = self._bbox_forward(x, rois) outs = outs + (bbox_results['cls_score'], bbox_results['bbox_pred']) if: mask_rois = rois[:100] mask_results = self._mask_forward(x, mask_rois) outs = outs + (mask_results['mask_pred'],) return outs",True,self.with_mask,self.with_mask,0.6510191559791565 6053,"def __init__(self, num_pos_feats: int=64, scale: Optional[float]=None) -> None: super().__init__() if: scale = 1.0 self.register_buffer('positional_encoding_gaussian_matrix', scale * torch.randn((2, num_pos_feats)))",False,scale is None or scale <= 0.0,scale is None,0.6564701199531555 6054,"def run(self): self.remove_output_on_overwrite() super(LastDailyIpAddressOfUserTask, self).run() for date in self.interval: url = self.output_path_for_key(date.isoformat()) target = get_target_from_url(url) if: target.open('w').close()",False,not target.exists(),target and target.exists(),0.6522923707962036 6055,"def heappop(heap): """"""Pop the smallest item off the heap, maintaining the heap invariant."""""" lastelt = heap.pop() if: returnitem = heap[0] heap[0] = lastelt _siftup(heap, 0) else: returnitem = lastelt return returnitem",False,heap,len(heap) > 1,0.673172116279602 6056,"def get_updated_taxid(self, taxid): """""" Return current taxid, in case it was merged @attention: taxid is not accepted as digit!!! @param taxid: ncbi taxonomic identifier @type taxid: str @return: ncbi taxonomic identifier @rtype: str | unicode """""" assert isinstance(taxid, str) if: return taxid if taxid not in NcbiTaxonomy.taxid_old_to_taxid_new: self._logger.error(""Invalid taxid: '{}'"".format(taxid)) raise ValueError('Invalid taxid') taxid_new = NcbiTaxonomy.taxid_old_to_taxid_new[taxid] self._logger.warning(""Merged id: '{}' -> '{}'"".format(taxid, taxid_new)) return taxid_new",False,taxid in NcbiTaxonomy.taxid_to_rank,taxid in NcbiTaxonomy.taxid_empty,0.6482677459716797 6057,"def get_updated_taxid(self, taxid): """""" Return current taxid, in case it was merged @attention: taxid is not accepted as digit!!! @param taxid: ncbi taxonomic identifier @type taxid: str @return: ncbi taxonomic identifier @rtype: str | unicode """""" assert isinstance(taxid, str) if taxid in NcbiTaxonomy.taxid_to_rank: return taxid if: self._logger.error(""Invalid taxid: '{}'"".format(taxid)) raise ValueError('Invalid taxid') taxid_new = NcbiTaxonomy.taxid_old_to_taxid_new[taxid] self._logger.warning(""Merged id: '{}' -> '{}'"".format(taxid, taxid_new)) return taxid_new",True,taxid not in NcbiTaxonomy.taxid_old_to_taxid_new,taxid not in NcbiTaxonomy.taxid_old_to_taxid_new,0.6484701037406921 6058,"def _ensure_rules_action_has_arn(self, rules): """""" If a rule Action has been passed with a Target Group Name instead of ARN, lookup the ARN and replace the name. :param rules: a list of rule dicts :return: the same list of dicts ensuring that each rule Actions dict has TargetGroupArn key. If a TargetGroupName key exists, it is removed. """""" fixed_rules = [] for rule in rules: fixed_actions = [] for action in rule['Actions']: if: action['TargetGroupArn'] = convert_tg_name_to_arn(self.connection, self.module, action['TargetGroupName']) del action['TargetGroupName'] fixed_actions.append(action) rule['Actions'] = fixed_actions fixed_rules.append(rule) return fixed_rules",False,'TargetGroupName' in action,action['TargetGroupArn'] == self.api_host_value,0.6578398942947388 6059,"def _balanced_latin_square_sequence(n_elements, row): """"""helper function: creates a sequence for a balanced latin square Based on ""Bradley, J. V. Complete counterbalancing of immediate sequential effects in a Latin square design. J. Amer. Statist. Ass.,.1958, 53, 525-528. "" """""" result = [] j = 0 h = 0 for i in range(n_elements): if i < 2 or i % 2!= 0: val = j j += 1 else: val = n_elements - h - 1 h += 1 result.append((val + row) % n_elements) if: return list(reversed(result)) else: return result",False,n_elements % 2 != 0 and row % 2 != 0,n_elements > 1,0.6488087177276611 6060,"def _balanced_latin_square_sequence(n_elements, row): """"""helper function: creates a sequence for a balanced latin square Based on ""Bradley, J. V. Complete counterbalancing of immediate sequential effects in a Latin square design. J. Amer. Statist. Ass.,.1958, 53, 525-528. "" """""" result = [] j = 0 h = 0 for i in range(n_elements): if: val = j j += 1 else: val = n_elements - h - 1 h += 1 result.append((val + row) % n_elements) if n_elements % 2!= 0 and row % 2!= 0: return list(reversed(result)) else: return result",False,i < 2 or i % 2 != 0,i == n_elements - 1,0.6541219353675842 6061,"@torch.no_grad() def step(self): for group in self.param_groups: for p in group['params']: if: continue p.sub_(self.state[p]['e_w']) self.base_optimizer.step()",False,p.grad is None,p.requires_grad,0.6495254635810852 6062,"def __init__(self, action_space, reward_fn, observation_space, params): self.initial_params = copy.deepcopy(params) if: reward_fn = rewards.NullReward() super(_BaseAgent, self).__init__(action_space, reward_fn, observation_space) self.rng = np.random.RandomState()",True,reward_fn is None,reward_fn is None,0.6506838798522949 6063,"def writeAntonyms(self, hf: 'T_htmlfile', antonyms: 'list[dict[str, str]] | None') -> None: if: return with hf.element('div'): hf.write('Antonyms: ') for i, item in enumerate(antonyms): if i > 0: hf.write(', ') word = item.get('word') if not word: continue self.addWordLink(hf, word)",False,not antonyms,antonyms is None,0.6479564905166626 6064,"def writeAntonyms(self, hf: 'T_htmlfile', antonyms: 'list[dict[str, str]] | None') -> None: if not antonyms: return with hf.element('div'): hf.write('Antonyms: ') for i, item in enumerate(antonyms): if: hf.write(', ') word = item.get('word') if not word: continue self.addWordLink(hf, word)",False,i > 0,item['href'],0.6676362156867981 6065,"def writeAntonyms(self, hf: 'T_htmlfile', antonyms: 'list[dict[str, str]] | None') -> None: if not antonyms: return with hf.element('div'): hf.write('Antonyms: ') for i, item in enumerate(antonyms): if i > 0: hf.write(', ') word = item.get('word') if: continue self.addWordLink(hf, word)",True,not word,not word,0.6633734703063965 6066,"def update_user_related_records(sender, instance, created, **kwargs): if: return instance.radiususergroup_set.update(username=instance.username) instance.radiuscheck_set.update(username=instance.username) instance.radiusreply_set.update(username=instance.username)",False,created,not created,0.6672160625457764 6067,"def pairs(value, n_bytes): for n in range(n_bytes): yield (0, 0) if: yield (0, value[0]) for n in range(len(value) - 1 - n_bytes): yield (value[n], value[n + 1])",False,n_bytes < len(value),len(value) == 1,0.6498988270759583 6068,"def check_env(config) -> bool: """"""Check security env."""""" if: print('Vega can not run on TensorFlow in the security model.') return False pipeline = config.get('pipeline', []) for step in pipeline: if step in config: if config[step]['pipe_step']['type'] == 'HorovodTrainStep': print('Vega can not run on Horovod in the security model.') return False return True",False,"os.environ.get('BACKEND_TYPE', None) == 'TENSORFLOW'",config.get('pipeline') is None,0.6440946459770203 6069,"def check_env(config) -> bool: """"""Check security env."""""" if os.environ.get('BACKEND_TYPE', None) == 'TENSORFLOW': print('Vega can not run on TensorFlow in the security model.') return False pipeline = config.get('pipeline', []) for step in pipeline: if: if config[step]['pipe_step']['type'] == 'HorovodTrainStep': print('Vega can not run on Horovod in the security model.') return False return True",True,step in config,step in config,0.6600682735443115 6070,"def check_env(config) -> bool: """"""Check security env."""""" if os.environ.get('BACKEND_TYPE', None) == 'TENSORFLOW': print('Vega can not run on TensorFlow in the security model.') return False pipeline = config.get('pipeline', []) for step in pipeline: if step in config: if: print('Vega can not run on Horovod in the security model.') return False return True",False,config[step]['pipe_step']['type'] == 'HorovodTrainStep',"os.environ.get('BACKEND_TYPE', None) != step",0.6414752006530762 6071,"def event_to_time_slot_str_node(event_entry: EventEntry): params = [] if: params.append(f'start={datetime_to_str_node(event_entry.starts_at)}') if event_entry.ends_at is not None: params.append(f'end={datetime_to_str_node(event_entry.ends_at)}') if len(params) > 1: params.append(f'duration={duration_to_str_node(event_entry.starts_at, event_entry.ends_at)}') if params: return f""TimeSlot({', '.join(params)})"" return None",True,event_entry.starts_at is not None,event_entry.starts_at is not None,0.6470268964767456 6072,"def event_to_time_slot_str_node(event_entry: EventEntry): params = [] if event_entry.starts_at is not None: params.append(f'start={datetime_to_str_node(event_entry.starts_at)}') if: params.append(f'end={datetime_to_str_node(event_entry.ends_at)}') if len(params) > 1: params.append(f'duration={duration_to_str_node(event_entry.starts_at, event_entry.ends_at)}') if params: return f""TimeSlot({', '.join(params)})"" return None",True,event_entry.ends_at is not None,event_entry.ends_at is not None,0.6474400758743286 6073,"def event_to_time_slot_str_node(event_entry: EventEntry): params = [] if event_entry.starts_at is not None: params.append(f'start={datetime_to_str_node(event_entry.starts_at)}') if event_entry.ends_at is not None: params.append(f'end={datetime_to_str_node(event_entry.ends_at)}') if: params.append(f'duration={duration_to_str_node(event_entry.starts_at, event_entry.ends_at)}') if params: return f""TimeSlot({', '.join(params)})"" return None",False,len(params) > 1,event_entry.starts_at is not None and event_entry.ends_at is not None,0.6480274200439453 6074,"def event_to_time_slot_str_node(event_entry: EventEntry): params = [] if event_entry.starts_at is not None: params.append(f'start={datetime_to_str_node(event_entry.starts_at)}') if event_entry.ends_at is not None: params.append(f'end={datetime_to_str_node(event_entry.ends_at)}') if len(params) > 1: params.append(f'duration={duration_to_str_node(event_entry.starts_at, event_entry.ends_at)}') if: return f""TimeSlot({', '.join(params)})"" return None",True,params,params,0.6717274785041809 6075,"def _forward_shared(self, x): """"""Forward function for shared part."""""" if: for conv in self.shared_convs: x = conv(x) if self.num_shared_fcs > 0: if self.with_avg_pool: x = self.avg_pool(x) x = x.flatten(1) for fc in self.shared_fcs: x = self.relu(fc(x)) return x",True,self.num_shared_convs > 0,self.num_shared_convs > 0,0.64601069688797 6076,"def _forward_shared(self, x): """"""Forward function for shared part."""""" if self.num_shared_convs > 0: for conv in self.shared_convs: x = conv(x) if: if self.with_avg_pool: x = self.avg_pool(x) x = x.flatten(1) for fc in self.shared_fcs: x = self.relu(fc(x)) return x",True,self.num_shared_fcs > 0,self.num_shared_fcs > 0,0.6438944339752197 6077,"def _forward_shared(self, x): """"""Forward function for shared part."""""" if self.num_shared_convs > 0: for conv in self.shared_convs: x = conv(x) if self.num_shared_fcs > 0: if: x = self.avg_pool(x) x = x.flatten(1) for fc in self.shared_fcs: x = self.relu(fc(x)) return x",True,self.with_avg_pool,self.with_avg_pool,0.6450808048248291 6078,"def hexdigest(self): retval = b2a_hex(self.digest()) if: return retval else: return retval.decode()",False,sys.version_info[0] == 2,retval.version == 2,0.6449540853500366 6079,"def test_lookup_by_markup_type(self): if: self.assertEqual(registry.lookup('html'), LXMLTreeBuilder) self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML) else: self.assertEqual(registry.lookup('xml'), None) if HTML5LIB_PRESENT: self.assertEqual(registry.lookup('html'), HTML5TreeBuilder) else: self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)",True,LXML_PRESENT,LXML_PRESENT,0.650579571723938 6080,"def test_lookup_by_markup_type(self): if LXML_PRESENT: self.assertEqual(registry.lookup('html'), LXMLTreeBuilder) self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML) else: self.assertEqual(registry.lookup('xml'), None) if: self.assertEqual(registry.lookup('html'), HTML5TreeBuilder) else: self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)",False,HTML5LIB_PRESENT,HTML5TreeBuilder,0.6502025127410889 6081,"def hmValidateInput(inVal): if: return ""'"" + inVal + ""' does not exist!"" return None",False,os.path.exists(inVal) is not True,inVal != 'file',0.6458693742752075 6082,"def write(data): if: data = str(data) fp.write(data)",True,"not isinstance(data, basestring)","not isinstance(data, basestring)",0.6453970074653625 6083,"def size(self): """"""Returns the total size of the RangeSet (ie, how many integers are in the set). >>> RangeSet(""10-19 30-34"").size() 15 """""" total = 0 for i, p in enumerate(self.data): if: total += p else: total -= p return total",True,i % 2,i % 2,0.6709581613540649 6084,"def _log_data(self, net_info_type='active_only', pop=None, value=0): """"""Get the evolution and network information of children. :param net_info_type: defaults to 'active_only' :type net_info_type: str :param pop: defaults to None :type pop: list :param value: defaults to 0 :type value: int :return: log_list :rtype: list """""" log_list = [value, pop.parameter, pop.flops] if: log_list.append(pop.active_net_list()) elif net_info_type == 'full': log_list += pop.gene.flatten().tolist() else: pass return log_list",True,net_info_type == 'active_only',net_info_type == 'active_only',0.6464889645576477 6085,"def _log_data(self, net_info_type='active_only', pop=None, value=0): """"""Get the evolution and network information of children. :param net_info_type: defaults to 'active_only' :type net_info_type: str :param pop: defaults to None :type pop: list :param value: defaults to 0 :type value: int :return: log_list :rtype: list """""" log_list = [value, pop.parameter, pop.flops] if net_info_type == 'active_only': log_list.append(pop.active_net_list()) elif: log_list += pop.gene.flatten().tolist() else: pass return log_list",False,net_info_type == 'full',net_info_type == 'gene',0.6457709074020386 6086,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if path.strip(): yield expand_path(path.strip()) if: if _system_folder1: yield _system_folder1 if _system_folder2: yield _system_folder2 if _system_folder3: yield _system_folder3 if _system_folder4: yield _system_folder4 if _system_folder5: yield _system_folder5 if _system_folder6: yield _system_folder6 if _system_folderX: yield _system_folderX",True,SYSTEMD_UNIT_PATH.endswith(':'),SYSTEMD_UNIT_PATH.endswith(':'),0.6414530873298645 6087,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if: yield expand_path(path.strip()) if SYSTEMD_UNIT_PATH.endswith(':'): if _system_folder1: yield _system_folder1 if _system_folder2: yield _system_folder2 if _system_folder3: yield _system_folder3 if _system_folder4: yield _system_folder4 if _system_folder5: yield _system_folder5 if _system_folder6: yield _system_folder6 if _system_folderX: yield _system_folderX",True,path.strip(),path.strip(),0.6536701321601868 6088,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if path.strip(): yield expand_path(path.strip()) if SYSTEMD_UNIT_PATH.endswith(':'): if: yield _system_folder1 if _system_folder2: yield _system_folder2 if _system_folder3: yield _system_folder3 if _system_folder4: yield _system_folder4 if _system_folder5: yield _system_folder5 if _system_folder6: yield _system_folder6 if _system_folderX: yield _system_folderX",True,_system_folder1,_system_folder1,0.655576765537262 6089,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if path.strip(): yield expand_path(path.strip()) if SYSTEMD_UNIT_PATH.endswith(':'): if _system_folder1: yield _system_folder1 if: yield _system_folder2 if _system_folder3: yield _system_folder3 if _system_folder4: yield _system_folder4 if _system_folder5: yield _system_folder5 if _system_folder6: yield _system_folder6 if _system_folderX: yield _system_folderX",True,_system_folder2,_system_folder2,0.6553324460983276 6090,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if path.strip(): yield expand_path(path.strip()) if SYSTEMD_UNIT_PATH.endswith(':'): if _system_folder1: yield _system_folder1 if _system_folder2: yield _system_folder2 if: yield _system_folder3 if _system_folder4: yield _system_folder4 if _system_folder5: yield _system_folder5 if _system_folder6: yield _system_folder6 if _system_folderX: yield _system_folderX",True,_system_folder3,_system_folder3,0.656520426273346 6091,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if path.strip(): yield expand_path(path.strip()) if SYSTEMD_UNIT_PATH.endswith(':'): if _system_folder1: yield _system_folder1 if _system_folder2: yield _system_folder2 if _system_folder3: yield _system_folder3 if: yield _system_folder4 if _system_folder5: yield _system_folder5 if _system_folder6: yield _system_folder6 if _system_folderX: yield _system_folderX",True,_system_folder4,_system_folder4,0.6530529260635376 6092,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if path.strip(): yield expand_path(path.strip()) if SYSTEMD_UNIT_PATH.endswith(':'): if _system_folder1: yield _system_folder1 if _system_folder2: yield _system_folder2 if _system_folder3: yield _system_folder3 if _system_folder4: yield _system_folder4 if: yield _system_folder5 if _system_folder6: yield _system_folder6 if _system_folderX: yield _system_folderX",True,_system_folder5,_system_folder5,0.6531875133514404 6093,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if path.strip(): yield expand_path(path.strip()) if SYSTEMD_UNIT_PATH.endswith(':'): if _system_folder1: yield _system_folder1 if _system_folder2: yield _system_folder2 if _system_folder3: yield _system_folder3 if _system_folder4: yield _system_folder4 if _system_folder5: yield _system_folder5 if: yield _system_folder6 if _system_folderX: yield _system_folderX",True,_system_folder6,_system_folder6,0.654258131980896 6094,"def system_folders(self): SYSTEMD_UNIT_PATH = self.get_SYSTEMD_UNIT_PATH() for path in SYSTEMD_UNIT_PATH.split(':'): if path.strip(): yield expand_path(path.strip()) if SYSTEMD_UNIT_PATH.endswith(':'): if _system_folder1: yield _system_folder1 if _system_folder2: yield _system_folder2 if _system_folder3: yield _system_folder3 if _system_folder4: yield _system_folder4 if _system_folder5: yield _system_folder5 if _system_folder6: yield _system_folder6 if: yield _system_folderX",True,_system_folderX,_system_folderX,0.6524566411972046 6095,"def gather_elements(self, client, node, style): rows = [client.gen_elements(n) for n in node.children] t = [] for r in rows: if: continue t.append(r) t_style = TableStyle(client.styles['table'].commands) colWidths = client.styles['table'].colWidths return [DelayedTable(t, style=t_style, colWidths=colWidths)]",True,not r,not r,0.6714627742767334 6096,"def __eq__(self, other): """""" Specie is equal to other only if element and oxidation states are exactly the same. """""" if: return False return self.symbol == other.symbol and self._oxi_state == other._oxi_state and (self._properties == other._properties)",False,"not isinstance(other, Specie)","not isinstance(other, Element)",0.6487754583358765 6097,"def __getitem__(self, idx: int) -> Tuple[torch.Tensor, torch.Tensor]: """""" The getter yields a tuple (X, y), where: - `X `is a concatenation of all encoded representations of the row. Size: (B, n_features) - `y` is the encoded target. Size: (B, n_features) :param idx: index of the row to access. :return: tuple (X, y) with encoded data. """""" if: X = self.X_cache[idx, :] Y = self.Y_cache[idx] else: X, Y = self._encode_idxs([idx]) if self.use_cache: self.X_cache[idx, :] = X self.Y_cache[idx, :] = Y return (X, Y)",False,self.use_cache and self.X_cache[idx] is not torch.nan,self.use_cache,0.6463552117347717 6098,"def __getitem__(self, idx: int) -> Tuple[torch.Tensor, torch.Tensor]: """""" The getter yields a tuple (X, y), where: - `X `is a concatenation of all encoded representations of the row. Size: (B, n_features) - `y` is the encoded target. Size: (B, n_features) :param idx: index of the row to access. :return: tuple (X, y) with encoded data. """""" if self.use_cache and self.X_cache[idx] is not torch.nan: X = self.X_cache[idx, :] Y = self.Y_cache[idx] else: X, Y = self._encode_idxs([idx]) if: self.X_cache[idx, :] = X self.Y_cache[idx, :] = Y return (X, Y)",True,self.use_cache,self.use_cache,0.6520450115203857 6099,"def filter_by_rand(self, p: float, seed: int=None): """"""Keep random sample of `items` with probability `p` and an optional `seed`."""""" if: set_all_seed(seed) return self.filter_by_func(lambda o: rand_bool(p))",True,seed is not None,seed is not None,0.653406023979187 6100,"def output_dsaautotargetperformancereportfilter(data_object): if: return output_status_message('* * * Begin output_dsaautotargetperformancereportfilter * * *') output_status_message('AccountStatus: {0}'.format(data_object.AccountStatus)) output_status_message('AdGroupStatus: {0}'.format(data_object.AdGroupStatus)) output_status_message('BidStrategyType: {0}'.format(data_object.BidStrategyType)) output_status_message('CampaignStatus: {0}'.format(data_object.CampaignStatus)) output_status_message('DynamicAdTargetStatus: {0}'.format(data_object.DynamicAdTargetStatus)) output_status_message('Language: {0}'.format(data_object.Language)) output_status_message('* * * End output_dsaautotargetperformancereportfilter * * *')",True,data_object is None,data_object is None,0.6504815816879272 6101,"@staticmethod def _get_index_handle_from_file(file: File) -> str: for tag in file.tags: if: return tag.value[TagValueKey.STRING_VALUE] raise SteamshipError(f'Could not find index handle on file with id {file.id}')",False,tag.kind == TagKind.CHAT and tag.name == ChatTag.INDEX_HANDLE,tag.name == TagValueKey.INDEX_HARD_Handle,0.648333728313446 6102,"def _cb_item_activated(li, item): callback = item.data['callback'] subitem = item.data['sublist'].selected_item if: callback(subitem.data['url']) else: callback()",False,subitem,subitem and subitem.data['url'],0.6685456037521362 6103,"def get_due(self): if: return self.parse_date(self.record['fields']['duedate']) sprints = self.__get_sprints() for sprint in filter(lambda e: e.get('state', '').lower()!= 'closed', sprints): endDate = sprint['endDate'] if endDate!= '': return self.parse_date(endDate)",False,self.record['fields'].get('duedate'),not self.record['fields'].get('duedate'),0.645400881767273 6104,"def get_due(self): if self.record['fields'].get('duedate'): return self.parse_date(self.record['fields']['duedate']) sprints = self.__get_sprints() for sprint in filter(lambda e: e.get('state', '').lower()!= 'closed', sprints): endDate = sprint['endDate'] if: return self.parse_date(endDate)",False,endDate != '',endDate,0.6466952562332153 6105,"def label_row(self, row): if: return 'Fail' elif row.RD_Median_Separation >= 0.4 and row.BAF1_prob >= 0.9: return 'Pass' else: return 'Unlabeled'",False,row.RD_Median_Separation < 0.15 and row.BAF1_prob < 0.4 and (row.PE_log_pval < -np.log10(0.05)),row.RD_Median_Separation >= 0.3 and row.BAF1_prob >= 0.9,0.6522176861763 6106,"def label_row(self, row): if row.RD_Median_Separation < 0.15 and row.BAF1_prob < 0.4 and (row.PE_log_pval < -np.log10(0.05)): return 'Fail' elif: return 'Pass' else: return 'Unlabeled'",False,row.RD_Median_Separation >= 0.4 and row.BAF1_prob >= 0.9,row.RD_Median_Separation >= 0.9 and row.BAF1_prob >= 0.4 and (row.PE_log_pval >= np.log10(0.05),0.6461036205291748 6107,"def elementClass(self, name, namespace=None): if: node = self.dom.createElement(name) else: node = self.dom.createElementNS(namespace, name) return NodeBuilder(node)",False,namespace is None and self.defaultNamespace is None,namespace is None,0.6449484825134277 6108,"@operation def packages(packages=None, present=True, latest=False, update=False, upgrade=False): """""" Add/remove/update pkgin packages. + packages: list of packages to ensure + present: whether the packages should be installed + latest: whether to upgrade packages without a specified version + update: run ``pkgin update`` before installing packages + upgrade: run ``pkgin upgrade`` before installing packages **Examples:** .. code:: python # Update package list and install packages pkgin.packages( name=""Install tmux and Vim"", packages=[""tmux"", ""vim""], update=True, ) # Install the latest versions of packages (always check) pkgin.packages( name=""Install latest Vim"", packages=[""vim""], latest=True, ) """""" if: yield from _update() if upgrade: yield from _upgrade() yield from ensure_packages(host, packages, host.get_fact(PkginPackages), present, install_command='pkgin -y install', uninstall_command='pkgin -y remove', upgrade_command='pkgin -y upgrade', latest=latest)",True,update,update,0.6699923276901245 6109,"@operation def packages(packages=None, present=True, latest=False, update=False, upgrade=False): """""" Add/remove/update pkgin packages. + packages: list of packages to ensure + present: whether the packages should be installed + latest: whether to upgrade packages without a specified version + update: run ``pkgin update`` before installing packages + upgrade: run ``pkgin upgrade`` before installing packages **Examples:** .. code:: python # Update package list and install packages pkgin.packages( name=""Install tmux and Vim"", packages=[""tmux"", ""vim""], update=True, ) # Install the latest versions of packages (always check) pkgin.packages( name=""Install latest Vim"", packages=[""vim""], latest=True, ) """""" if update: yield from _update() if: yield from _upgrade() yield from ensure_packages(host, packages, host.get_fact(PkginPackages), present, install_command='pkgin -y install', uninstall_command='pkgin -y remove', upgrade_command='pkgin -y upgrade', latest=latest)",True,upgrade,upgrade,0.6674365997314453 6110,"def is_simple_convish(expr): if: return False return is_conv(expr) or is_dense(expr)",False,"not isinstance(expr, r.Call)",expr.type() != 'simple',0.6459879875183105 6111,"@property def mode_jet(self): """"""Return Jet Mode status."""""" if: return None key = self._get_state_key(STATE_MODE_JET) if (value := self.lookup_enum(key, True)) is None: return None try: status = JetMode(value)!= JetMode.OFF except ValueError: status = False return self._update_feature(AirConditionerFeatures.MODE_JET, status, False)",False,self._device.supported_mode_jet == JetModeSupport.NONE,not self._device.is_on,0.6470136642456055 6112,"@property def mode_jet(self): """"""Return Jet Mode status."""""" if self._device.supported_mode_jet == JetModeSupport.NONE: return None key = self._get_state_key(STATE_MODE_JET) if: return None try: status = JetMode(value)!= JetMode.OFF except ValueError: status = False return self._update_feature(AirConditionerFeatures.MODE_JET, status, False)",False,"(value := self.lookup_enum(key, True)) is None",(value := self.lookup_range(key)) is None,0.6449933648109436 6113,"def factory(*args_, **kwargs_): if: subclass = getSubclassFromModule_(CurrentSubclassModule_, DangerousGoodsDetail) if subclass is not None: return subclass(*args_, **kwargs_) if DangerousGoodsDetail.subclass: return DangerousGoodsDetail.subclass(*args_, **kwargs_) else: return DangerousGoodsDetail(*args_, **kwargs_)",True,CurrentSubclassModule_ is not None,CurrentSubclassModule_ is not None,0.6499806642532349 6114,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, DangerousGoodsDetail) if subclass is not None: return subclass(*args_, **kwargs_) if: return DangerousGoodsDetail.subclass(*args_, **kwargs_) else: return DangerousGoodsDetail(*args_, **kwargs_)",True,DangerousGoodsDetail.subclass,DangerousGoodsDetail.subclass,0.6516668200492859 6115,"def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_(CurrentSubclassModule_, DangerousGoodsDetail) if: return subclass(*args_, **kwargs_) if DangerousGoodsDetail.subclass: return DangerousGoodsDetail.subclass(*args_, **kwargs_) else: return DangerousGoodsDetail(*args_, **kwargs_)",True,subclass is not None,subclass is not None,0.6629124879837036 6116,"def _hasContent(self): if: return True else: return False",False,self.AuthenicateAccount is not None or self.PkgInfo is not None,self.ServiceHeader is not None or self.Status is not None or self.Status is not None,0.6426000595092773 6117,"def set_recipient(self, number: str) -> None: """""" Sets recipient for TestResult object + any expected receipts Args: number (str): Number of the bot performing the test """""" logging.info(f'Setting payment recipient as test orchestration account: {number}') self.test_account = number if: for pair in self.expected_receipts: receipt = pair[1] receipt.recipient = number",True,self.expected_receipts,self.expected_receipts,0.6513838768005371 6118,"def get_record(self, step_name, worker_id): """"""Get value from Shared Memory."""""" result = self.client.send(action='get_record', data={'step_name': step_name, 'worker_id': worker_id}) if: raise Exception(f'Failed to get record: {result}') return ReportRecord().load_dict(result['data'])",False,"not isinstance(result, dict) or 'result' not in result or result['result'] != 'success'",result['status'] != 'SUCCESS',0.6481902599334717 6119,"def loadValueDuration(self, value_name): if: dvalue = self.findFirstProperty(value_name).getDurationValue() if dvalue is not None: return dvalue.getValue() return None",True,self.hasProperty(value_name),self.hasProperty(value_name),0.6498695015907288 6120,"def loadValueDuration(self, value_name): if self.hasProperty(value_name): dvalue = self.findFirstProperty(value_name).getDurationValue() if: return dvalue.getValue() return None",True,dvalue is not None,dvalue is not None,0.6492394208908081 6121,"def get_cluster_of_cutoff(self, threshold='unique'): """""" Get all cluster of a threshold @param threshold: Cluster threshold @type threshold: str|unicode | int|float @return: List of cluster @rtype: list[list[str|unicode]]] """""" assert isinstance(threshold, (int, float, str)) if: assert isinstance(threshold, (int, float)) threshold = '{th:.{pre}f}'.format(th=threshold, pre=self._precision) if threshold not in self._cutoff_to_cluster: if self._logger: self._logger.error('Bad cutoff: {}'.format(threshold)) return None return self._cutoff_to_cluster[threshold]['cluster']",True,not threshold == 'unique',not threshold == 'unique',0.6578698754310608 6122,"def get_cluster_of_cutoff(self, threshold='unique'): """""" Get all cluster of a threshold @param threshold: Cluster threshold @type threshold: str|unicode | int|float @return: List of cluster @rtype: list[list[str|unicode]]] """""" assert isinstance(threshold, (int, float, str)) if not threshold == 'unique': assert isinstance(threshold, (int, float)) threshold = '{th:.{pre}f}'.format(th=threshold, pre=self._precision) if: if self._logger: self._logger.error('Bad cutoff: {}'.format(threshold)) return None return self._cutoff_to_cluster[threshold]['cluster']",True,threshold not in self._cutoff_to_cluster,threshold not in self._cutoff_to_cluster,0.6475424766540527 6123,"def get_cluster_of_cutoff(self, threshold='unique'): """""" Get all cluster of a threshold @param threshold: Cluster threshold @type threshold: str|unicode | int|float @return: List of cluster @rtype: list[list[str|unicode]]] """""" assert isinstance(threshold, (int, float, str)) if not threshold == 'unique': assert isinstance(threshold, (int, float)) threshold = '{th:.{pre}f}'.format(th=threshold, pre=self._precision) if threshold not in self._cutoff_to_cluster: if: self._logger.error('Bad cutoff: {}'.format(threshold)) return None return self._cutoff_to_cluster[threshold]['cluster']",True,self._logger,self._logger,0.6633795499801636 6124,"def get_dead_cats(self): self.dead_cats = [game.clan.instructor] if game.clan.instructor.df else [] for the_cat in Cat.all_cats_list: if: self.dead_cats.append(the_cat)",False,the_cat.dead and the_cat.ID != game.clan.instructor.ID and the_cat.df and (not the_cat.faded),the_cat.dead,0.6483384370803833 6125,"def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, assert_hostname=None, assert_fingerprint=None, ca_cert_dir=None): if: cert_reqs = 'CERT_REQUIRED' self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.ca_certs = ca_certs self.ca_cert_dir = ca_cert_dir self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint",False,(ca_certs or ca_cert_dir) and cert_reqs is None,cert_reqs is None,0.6493464708328247 6126,"def get_transponder_services(self, tr_url, sat_position=None, use_pids=False): """""" Returns services for given transponder. @param tr_url: transponder URL. @param sat_position: custom satellite position. Sometimes required to adjust the namespace. @param use_pids: if possible use additional pids [video, audio]. """""" try: self._t_url = tr_url self.init_data(tr_url) except ValueError as e: log(e) return [] else: if: return self.get_lyngsat_services(sat_position, use_pids) elif self._source is SatelliteSource.KINGOFSAT: return self.get_kingofsat_services(sat_position, use_pids) return []",False,self._source is SatelliteSource.LYNGSAT,self._source is SatelliteSource.LYNGAT,0.649201512336731 6127,"def get_transponder_services(self, tr_url, sat_position=None, use_pids=False): """""" Returns services for given transponder. @param tr_url: transponder URL. @param sat_position: custom satellite position. Sometimes required to adjust the namespace. @param use_pids: if possible use additional pids [video, audio]. """""" try: self._t_url = tr_url self.init_data(tr_url) except ValueError as e: log(e) return [] else: if self._source is SatelliteSource.LYNGSAT: return self.get_lyngsat_services(sat_position, use_pids) elif: return self.get_kingofsat_services(sat_position, use_pids) return []",True,self._source is SatelliteSource.KINGOFSAT,self._source is SatelliteSource.KINGOFSAT,0.646328330039978 6128,"def string(self): """"""Read a MacRoman-encoded Pascal string."""""" count = self._str[self._ptr] self._ptr += 1 + count if: return str(self._str[self._ptr - count:self._ptr], 'MacRoman') else: return ''",False,count,self._ptr < count,0.6657518744468689 6129,"def _build_dagger_trainer(tmpdir, venv, beta_schedule, expert_policy, pendulum_expert_rollouts: List[TrajectoryWithRew], custom_logger, rng: np.random.Generator): del expert_policy if: pytest.skip('DAggerTrainer does not use trajectories. Skipping to avoid duplicate test.') bc_trainer = bc.BC(observation_space=venv.observation_space, action_space=venv.action_space, optimizer_kwargs=dict(lr=0.001), custom_logger=custom_logger, rng=rng) return dagger.DAggerTrainer(venv=venv, scratch_dir=tmpdir, beta_schedule=beta_schedule, bc_trainer=bc_trainer, custom_logger=custom_logger, rng=rng)",False,pendulum_expert_rollouts is not None,pendulum_expert_rollouts,0.6486430764198303 6130,"def get_context_words(self, words, idx): """""" Get the context word list of target word. words: the words of the current line idx: input word index window_size: window size """""" target_window = self.random_generator() start_point = idx - target_window if: start_point = 0 end_point = idx + target_window targets = words[start_point:idx] + words[idx + 1:end_point + 1] return targets",True,start_point < 0,start_point < 0,0.6555883288383484 6131,"def __call__(self, req: Request) -> Request: url, username, password = self._get_url_and_credentials(req.url) req.url = url if: req = HTTPBasicAuth(username, password)(req) req.register_hook('response', self.handle_401) return req",False,username is not None and password is not None,username and password,0.6530368328094482 6132,"def render_git_describe(pieces): """"""TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """""" if: rendered = pieces['closest-tag'] if pieces['distance']: rendered += '-%d-g%s' % (pieces['distance'], pieces['short']) else: rendered = pieces['short'] if pieces['dirty']: rendered += '-dirty' return rendered",True,pieces['closest-tag'],pieces['closest-tag'],0.6458100080490112 6133,"def render_git_describe(pieces): """"""TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """""" if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance']: rendered += '-%d-g%s' % (pieces['distance'], pieces['short']) else: rendered = pieces['short'] if: rendered += '-dirty' return rendered",True,pieces['dirty'],pieces['dirty'],0.6518610715866089 6134,"def render_git_describe(pieces): """"""TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """""" if pieces['closest-tag']: rendered = pieces['closest-tag'] if: rendered += '-%d-g%s' % (pieces['distance'], pieces['short']) else: rendered = pieces['short'] if pieces['dirty']: rendered += '-dirty' return rendered",True,pieces['distance'],pieces['distance'],0.6501687169075012 6135,"def _clear_selection(self): for image in self.images: if: image.set_selected(False)",True,image.get_selected(),image.get_selected(),0.6516220569610596 6136,"def __iter__(self): if: return iter(()) return CumSequence._HoleyIter(iter(self._critical_values.items()))",False,len(self._critical_values) == 0,self._critical_values is None,0.6477198600769043 6137,"def print_callers(self, *amount): width, list = self.get_print_list(amount) if: self.print_call_heading(width, 'was called by...') for func in list: cc, nc, tt, ct, callers = self.stats[func] self.print_call_line(width, func, callers, '<-') print(file=self.stream) print(file=self.stream) return self",False,list,amount,0.6633347868919373 6138,"@staticmethod def _find_object_body(object_name, javascript): object_name = object_name.replace('$', '\\$') match = re.search('var %s={(?P.*?})};' % object_name, javascript, re.S) if: return match.group('object_body') return ''",True,match,match,0.6647512912750244 6139,"def unwrap(self) -> list[dict[str, Any]]: unwrapped = [] for t in self._body: if: unwrapped.append(t.unwrap()) else: unwrapped.append(t) return unwrapped",False,"hasattr(t, 'unwrap')","isinstance(t, UnwrapableString)",0.6471902132034302 6140,"def different_ability(input, disability_names, nlp): text = input.lower() text = replace_punc(text) for name in disability_names.keys(): if: doc = nlp(text) wl, pl = postag(doc) indices = get_index(wl, name) text = placement(indices, wl, pl, input, disability_names, name) text = preserve_capitalization(input, text) text = restore_punc(text) return text",False,name in text,name != 'text',0.6606283187866211 6141,"def remove_unusable_urls(url, use_ascp): if: if url.startswith('fasp'): return url elif url.startswith('http') or url.startswith('ftp'): return url return None",True,use_ascp,use_ascp,0.6524307131767273 6142,"def remove_unusable_urls(url, use_ascp): if use_ascp: if: return url elif url.startswith('http') or url.startswith('ftp'): return url return None",False,url.startswith('fasp'),not url,0.6445889472961426 6143,"def remove_unusable_urls(url, use_ascp): if use_ascp: if url.startswith('fasp'): return url elif: return url return None",False,url.startswith('http') or url.startswith('ftp'),url.endswith('git://'),0.641863226890564 6144,"def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): """""" Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """""" if: raise LocationValueError('No host specified.') request_context = self._merge_pool_kwargs(pool_kwargs) request_context['scheme'] = scheme or 'http' if not port: port = port_by_scheme.get(request_context['scheme'].lower(), 80) request_context['port'] = port request_context['host'] = host return self.connection_from_context(request_context)",True,not host,not host,0.6652770042419434 6145,"def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): """""" Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """""" if not host: raise LocationValueError('No host specified.') request_context = self._merge_pool_kwargs(pool_kwargs) request_context['scheme'] = scheme or 'http' if: port = port_by_scheme.get(request_context['scheme'].lower(), 80) request_context['port'] = port request_context['host'] = host return self.connection_from_context(request_context)",False,not port,port is None,0.6615735292434692 6146,"def findSilence(name, matchers, silences): for silence in silences: if: pairs = zip(matchers, silence['matchers']) has_differences = any((x!= y for x, y in pairs)) if not has_differences: return silence return None",False,silence['comment'] == name,name in silence['name'],0.648257851600647 6147,"def findSilence(name, matchers, silences): for silence in silences: if silence['comment'] == name: pairs = zip(matchers, silence['matchers']) has_differences = any((x!= y for x, y in pairs)) if: return silence return None",False,not has_differences,has_differences,0.647197961807251 6148,"def ranklock(self, guildid): lock = self._rank_locks.get(guildid, None) if: lock = self._rank_locks[guildid] = asyncio.Lock() logger.debug(f'Getting rank lock for guild (locked: {lock.locked()})') return lock",True,lock is None,lock is None,0.6612976789474487 6149,"def resolve_columns(self, row, fields=()): if: row = row[1:] values = [] index_extra_select = len(self.query.extra_select) for value, field in zip_longest(row[index_extra_select:], fields): if field: try: value = self.connection.ops.convert_values(value, field) except ValueError: pass values.append(value) return row[:index_extra_select] + tuple(values)",False,"getattr(self, '_using_row_number', False)",row[0] == '#',0.6452817916870117 6150,"def resolve_columns(self, row, fields=()): if getattr(self, '_using_row_number', False): row = row[1:] values = [] index_extra_select = len(self.query.extra_select) for value, field in zip_longest(row[index_extra_select:], fields): if: try: value = self.connection.ops.convert_values(value, field) except ValueError: pass values.append(value) return row[:index_extra_select] + tuple(values)",True,field,field,0.6650636196136475 6151,"def delete_saml_provider(self, saml_provider_arn: str) -> None: try: for saml_provider in list(self.list_saml_providers()): if: del self.saml_providers[saml_provider.name] except KeyError: raise IAMNotFoundException(f'SAMLProvider {saml_provider_arn} not found')",False,saml_provider.arn == saml_provider_arn,six.string_types(formCode_provider.name) == 'signData',0.6552406549453735 6152,"def __getstate__(self): if: self.content return dict(((attr, getattr(self, attr, None)) for attr in self.__attrs__))",True,not self._content_consumed,not self._content_consumed,0.6558880805969238 6153,"def output_accountmigrationstatusesinfo(data_object): if: return output_status_message('* * * Begin output_accountmigrationstatusesinfo * * *') output_status_message('AccountId: {0}'.format(data_object.AccountId)) output_status_message('MigrationStatusInfos:') output_array_of_migrationstatusinfo(data_object.MigrationStatusInfos) output_status_message('* * * End output_accountmigrationstatusesinfo * * *')",True,data_object is None,data_object is None,0.6513686180114746 6154,"def add_attr(self, key, value): if: pass elif isinstance(value, list) or isinstance(value, np.ndarray): value =''.join([str(val).lower() for val in value]) else: value = str(value).lower() self.attrs[key] = value return self",True,"isinstance(value, str)","isinstance(value, str)",0.6475352048873901 6155,"def add_attr(self, key, value): if isinstance(value, str): pass elif: value =''.join([str(val).lower() for val in value]) else: value = str(value).lower() self.attrs[key] = value return self",True,"isinstance(value, list) or isinstance(value, np.ndarray)","isinstance(value, list) or isinstance(value, np.ndarray)",0.6424102783203125 6156,"def __init__(self, description=None): Exception.__init__(self, '%d %s' % (self.code, self.name)) if: self.description = description",False,description is not None,description,0.6545815467834473 6157,"@property def lower_inf(self): """"""`!True` if the range doesn't have a lower bound."""""" if: return False return self._lower is None",True,self._bounds is None,self._bounds is None,0.6512495875358582 6158,"def sample_goals(self, batch_size): """"""See parent class. The goal is the desired x,y coordinates. """""" if: goals = np.repeat(self.fixed_goal.copy()[None], batch_size, 0) else: goals = np.zeros((batch_size, self.obs_range.low.size)) for b in range(batch_size): if batch_size > 1: logging.warning('This is very slow!') goals[b, :] = self._sample_position(self.obs_range.low, self.obs_range.high) return {'goals': goals}",True,self.fixed_goal is not None,self.fixed_goal is not None,0.647354781627655 6159,"def sample_goals(self, batch_size): """"""See parent class. The goal is the desired x,y coordinates. """""" if self.fixed_goal is not None: goals = np.repeat(self.fixed_goal.copy()[None], batch_size, 0) else: goals = np.zeros((batch_size, self.obs_range.low.size)) for b in range(batch_size): if: logging.warning('This is very slow!') goals[b, :] = self._sample_position(self.obs_range.low, self.obs_range.high) return {'goals': goals}",False,batch_size > 1,b % 2 == 0,0.6533453464508057 6160,"def perturbation_accuracy(self, targeted): if: return perturbation_accuracy(self.benign_task, self.targeted_task, self.perturbation) else: return perturbation_accuracy(self.benign_task, self.adversarial_task, self.perturbation)",True,targeted,targeted,0.6629294157028198 6161,"def terminate(self): self.mon.trace(self, '') self.terminate_signal = True if: self.stop() else: self.end('killed', 'terminate with no track or show open')",False,self.play_state == 'showing',self.show_track or self.show_show,0.6467710733413696 6162,"def do_attribute(self, node): if: valid = False else: key = self.get_attr_key(node) valid = key in self.context or key in self.allowed_values if not valid: raise SyntaxError('invalid expression: %s' % key) if key in self.context: result = self.context[key] else: result = self.allowed_values[key] return result",False,"not isinstance(node.value, ast.Name)",node is None,0.645540714263916 6163,"def do_attribute(self, node): if not isinstance(node.value, ast.Name): valid = False else: key = self.get_attr_key(node) valid = key in self.context or key in self.allowed_values if: raise SyntaxError('invalid expression: %s' % key) if key in self.context: result = self.context[key] else: result = self.allowed_values[key] return result",True,not valid,not valid,0.661990761756897 6164,"def do_attribute(self, node): if not isinstance(node.value, ast.Name): valid = False else: key = self.get_attr_key(node) valid = key in self.context or key in self.allowed_values if not valid: raise SyntaxError('invalid expression: %s' % key) if: result = self.context[key] else: result = self.allowed_values[key] return result",False,key in self.context,self.context,0.6552761793136597 6165,"def _get_self_name(self): param_names = self.info.scope.pyobject.get_param_names() if: return param_names[0]",True,param_names,param_names,0.66180020570755 6166,"def recurse(node): if: return recurse(node._right_child) return node.start_offset",False,node.right_child is not None,"isinstance(node, Child) and hasattr(node._right_child, 'name')",0.6541430950164795 6167,"def get_definition_location(self): """"""Returns a (module, lineno) tuple"""""" if: self.lineno = self.assignments[0].get_lineno() return (self.module, self.lineno)",False,self.lineno is None and self.assignments,self.lineno is None,0.655021607875824 6168,"@classmethod def _cast_from(cls, source): if: source = _cast_source_to_int(source) return cls(bool(source))",False,"not isinstance(source, (int, long, float))","isinstance(source, int)",0.6522344350814819 6169,"@property def local_angles(self): if: return parse_int_vector(self._entity_data.get('local_angles')) return parse_int_vector('None')",True,'local_angles' in self._entity_data,'local_angles' in self._entity_data,0.6517603397369385 6170,"@classmethod def parse(cls, flags): """"""Parse EventFD flags into list of flags. """""" masks = [] remain_flags = flags for flag in cls: if flags & flag.value: remain_flags ^= flag.value masks.append(flag) if: masks.append(remain_flags) return masks",True,remain_flags,remain_flags,0.6550271511077881 6171,"@classmethod def parse(cls, flags): """"""Parse EventFD flags into list of flags. """""" masks = [] remain_flags = flags for flag in cls: if: remain_flags ^= flag.value masks.append(flag) if remain_flags: masks.append(remain_flags) return masks",False,flags & flag.value,flag.value != 0,0.6491988897323608 6172,"def energy(cc, t1, t2, eris): nocc, nvir = t1.shape fock = eris.fock e = einsum('ia,ia', fock[:nocc, nocc:], t1) eris_oovv = np.array(eris.oovv) e += 0.25 * np.einsum('ijab,ijab', t2, eris_oovv) e += 0.5 * np.einsum('ia,jb,ijab', t1, t1, eris_oovv) if: logger.warn(cc, 'Non-zero imaginary part found in GCCSD energy %s', e) return e.real",False,abs(e.imag) > 0.0001,e.imag == 0,0.6477057337760925 6173,"def forward(self, x, return_shared_feat=False): """"""Forward function. Args: x (Tensor): input features return_shared_feat (bool): If True, return cls-reg-shared feature. Return: out (tuple[Tensor]): contain ``cls_score`` and ``bbox_pred``, if ``return_shared_feat`` is True, append ``x_shared`` to the returned tuple. """""" x_shared = self._forward_shared(x) out = self._forward_cls_reg(x_shared) if: out += (x_shared,) return out",True,return_shared_feat,return_shared_feat,0.6491784453392029 6174,"def strip_consts(graph_def, max_const_size=32): """"""Strip large constant values from graph_def."""""" strip_def = tf.GraphDef() for n0 in graph_def.node: n = strip_def.node.add() n.MergeFrom(n0) if: tensor = n.attr['value'].tensor size = len(tensor.tensor_content) if size > max_const_size: tensor.tensor_content = '' % size return strip_def",False,n.op == 'Const','value' in n.attr,0.6499069929122925 6175,"def strip_consts(graph_def, max_const_size=32): """"""Strip large constant values from graph_def."""""" strip_def = tf.GraphDef() for n0 in graph_def.node: n = strip_def.node.add() n.MergeFrom(n0) if n.op == 'Const': tensor = n.attr['value'].tensor size = len(tensor.tensor_content) if: tensor.tensor_content = '' % size return strip_def",True,size > max_const_size,size > max_const_size,0.651700496673584 6176,"def encode_genotype(s, mindepth=3, depth_index=2, nohet=False): """""" >>> encode_genotype(""1/1:128,18,0:6:18"") # homozygote B 'B' >>> encode_genotype(""0/1:0,0,0:0:3"") # missing data '-' >>> encode_genotype(""0/1:128,0,26:7:22"") # heterozygous A/B 'X' """""" atoms = s.split(':') if: return g2x[atoms[0]] inferred = atoms[0] depth = int(atoms[depth_index]) if depth < mindepth: return '-' if inferred == '0/0': return 'A' if inferred == '0/1': return '-' if nohet else 'X' if inferred == '1/1': return 'B' return '-'",False,len(atoms) < 3,atoms[0] in g2x,0.6496390104293823 6177,"def encode_genotype(s, mindepth=3, depth_index=2, nohet=False): """""" >>> encode_genotype(""1/1:128,18,0:6:18"") # homozygote B 'B' >>> encode_genotype(""0/1:0,0,0:0:3"") # missing data '-' >>> encode_genotype(""0/1:128,0,26:7:22"") # heterozygous A/B 'X' """""" atoms = s.split(':') if len(atoms) < 3: return g2x[atoms[0]] inferred = atoms[0] depth = int(atoms[depth_index]) if: return '-' if inferred == '0/0': return 'A' if inferred == '0/1': return '-' if nohet else 'X' if inferred == '1/1': return 'B' return '-'",False,depth < mindepth,depth == mindepth,0.6555042266845703 6178,"def encode_genotype(s, mindepth=3, depth_index=2, nohet=False): """""" >>> encode_genotype(""1/1:128,18,0:6:18"") # homozygote B 'B' >>> encode_genotype(""0/1:0,0,0:0:3"") # missing data '-' >>> encode_genotype(""0/1:128,0,26:7:22"") # heterozygous A/B 'X' """""" atoms = s.split(':') if len(atoms) < 3: return g2x[atoms[0]] inferred = atoms[0] depth = int(atoms[depth_index]) if depth < mindepth: return '-' if: return 'A' if inferred == '0/1': return '-' if nohet else 'X' if inferred == '1/1': return 'B' return '-'",False,inferred == '0/0',inferred == '0/1',0.6484050750732422 6179,"def encode_genotype(s, mindepth=3, depth_index=2, nohet=False): """""" >>> encode_genotype(""1/1:128,18,0:6:18"") # homozygote B 'B' >>> encode_genotype(""0/1:0,0,0:0:3"") # missing data '-' >>> encode_genotype(""0/1:128,0,26:7:22"") # heterozygous A/B 'X' """""" atoms = s.split(':') if len(atoms) < 3: return g2x[atoms[0]] inferred = atoms[0] depth = int(atoms[depth_index]) if depth < mindepth: return '-' if inferred == '0/0': return 'A' if: return '-' if nohet else 'X' if inferred == '1/1': return 'B' return '-'",False,inferred == '0/1',inferred == '1/1',0.6483035087585449 6180,"def encode_genotype(s, mindepth=3, depth_index=2, nohet=False): """""" >>> encode_genotype(""1/1:128,18,0:6:18"") # homozygote B 'B' >>> encode_genotype(""0/1:0,0,0:0:3"") # missing data '-' >>> encode_genotype(""0/1:128,0,26:7:22"") # heterozygous A/B 'X' """""" atoms = s.split(':') if len(atoms) < 3: return g2x[atoms[0]] inferred = atoms[0] depth = int(atoms[depth_index]) if depth < mindepth: return '-' if inferred == '0/0': return 'A' if inferred == '0/1': return '-' if nohet else 'X' if: return 'B' return '-'",False,inferred == '1/1',inferred == '1/B',0.6482664346694946 6181,"def get_zbx_user_query_data(zapi, user_name): """""" If name exists, retrieve it, and build query params. """""" query = {} if: zbx_user = get_user(zapi, user_name) query = {'userid': zbx_user['userid']} return query",True,user_name,user_name,0.66136634349823 6182,"def get_env_device(): """""" Return the device name of running environment. """""" if: return 'gpu' elif 'npu' in paddle.device.get_all_custom_device_type(): return 'npu' elif paddle.is_compiled_with_rocm(): return 'rocm' elif paddle.is_compiled_with_xpu(): return 'xpu' return 'cpu'",False,paddle.is_compiled_with_cuda(),paddle.device.get_all_custom_device_type(),0.6458518505096436 6183,"def get_env_device(): """""" Return the device name of running environment. """""" if paddle.is_compiled_with_cuda(): return 'gpu' elif: return 'npu' elif paddle.is_compiled_with_rocm(): return 'rocm' elif paddle.is_compiled_with_xpu(): return 'xpu' return 'cpu'",False,'npu' in paddle.device.get_all_custom_device_type(),paddle.is_compiled_with_npu(),0.647673487663269 6184,"def get_env_device(): """""" Return the device name of running environment. """""" if paddle.is_compiled_with_cuda(): return 'gpu' elif 'npu' in paddle.device.get_all_custom_device_type(): return 'npu' elif: return 'rocm' elif paddle.is_compiled_with_xpu(): return 'xpu' return 'cpu'",False,paddle.is_compiled_with_rocm(),'rocm' in paddle.device.get_all_custom_device_type(),0.6456636786460876 6185,"def get_env_device(): """""" Return the device name of running environment. """""" if paddle.is_compiled_with_cuda(): return 'gpu' elif 'npu' in paddle.device.get_all_custom_device_type(): return 'npu' elif paddle.is_compiled_with_rocm(): return 'rocm' elif: return 'xpu' return 'cpu'",False,paddle.is_compiled_with_xpu(),paddle.device.get_all_custom_device_type(),0.6442536115646362 6186,"def split_first(s, delims): """""" Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """""" min_idx = None min_delim = None for d in delims: idx = s.find(d) if idx < 0: continue if min_idx is None or idx < min_idx: min_idx = idx min_delim = d if: return (s, '', None) return (s[:min_idx], s[min_idx + 1:], min_delim)",True,min_idx is None or min_idx < 0,min_idx is None or min_idx < 0,0.6452157497406006 6187,"def split_first(s, delims): """""" Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """""" min_idx = None min_delim = None for d in delims: idx = s.find(d) if: continue if min_idx is None or idx < min_idx: min_idx = idx min_delim = d if min_idx is None or min_idx < 0: return (s, '', None) return (s[:min_idx], s[min_idx + 1:], min_delim)",True,idx < 0,idx < 0,0.6565911769866943 6188,"def split_first(s, delims): """""" Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """""" min_idx = None min_delim = None for d in delims: idx = s.find(d) if idx < 0: continue if: min_idx = idx min_delim = d if min_idx is None or min_idx < 0: return (s, '', None) return (s[:min_idx], s[min_idx + 1:], min_delim)",True,min_idx is None or idx < min_idx,min_idx is None or idx < min_idx,0.6470838785171509 6189,"def replacer(self, line, find, repl1, repl2): if: return line if line.count(find) % 2!= 0: return line is_open = False while find in line: if not is_open: repl = repl1 else: repl = repl2 line = line.replace(find, repl, 1) is_open = not is_open return line",False,not line,find not in line,0.6545436978340149 6190,"def replacer(self, line, find, repl1, repl2): if not line: return line if: return line is_open = False while find in line: if not is_open: repl = repl1 else: repl = repl2 line = line.replace(find, repl, 1) is_open = not is_open return line",False,line.count(find) % 2 != 0,find not in line,0.6469787359237671 6191,"def replacer(self, line, find, repl1, repl2): if not line: return line if line.count(find) % 2!= 0: return line is_open = False while find in line: if: repl = repl1 else: repl = repl2 line = line.replace(find, repl, 1) is_open = not is_open return line",False,not is_open,is_open and repl1 and (find == repl2),0.6543519496917725 6192,"def set_version(self, v): if: print('Invalid version for Transport') else: self.version = self.sync_handler.version = self.async_handler.version = v",False,not v == 0 and (not v == 1),not v,0.6522153615951538 6193,"@cached def decompile(mimic, env): if: return expression_factory(mimic.op, *(decompile(a, env) for a in mimic.args)) if isinstance(mimic, Val): return env[mimic.name] if is_constant(mimic): return mimic raise ValueError('Do not know how to convert {!r} to an Expression instance.'.format(mimic))",False,"isinstance(mimic, ExprMimic)","isinstance(mimic, Expression)",0.6516277194023132 6194,"@cached def decompile(mimic, env): if isinstance(mimic, ExprMimic): return expression_factory(mimic.op, *(decompile(a, env) for a in mimic.args)) if: return env[mimic.name] if is_constant(mimic): return mimic raise ValueError('Do not know how to convert {!r} to an Expression instance.'.format(mimic))",False,"isinstance(mimic, Val)",mimic.name in env,0.6541109681129456 6195,"@cached def decompile(mimic, env): if isinstance(mimic, ExprMimic): return expression_factory(mimic.op, *(decompile(a, env) for a in mimic.args)) if isinstance(mimic, Val): return env[mimic.name] if: return mimic raise ValueError('Do not know how to convert {!r} to an Expression instance.'.format(mimic))",False,is_constant(mimic),"isinstance(mimic, Expression)",0.6482985019683838 6196,"def parse_protobuf(self, proto_type): """"""Parse the data into an instance of proto_type."""""" if: raise BadRequest('Not a Protobuf request') obj = proto_type() try: obj.ParseFromString(self.data) except Exception: raise BadRequest('Unable to parse Protobuf request') if self.protobuf_check_initialization and (not obj.IsInitialized()): raise BadRequest('Partial Protobuf request') return obj",False,"'protobuf' not in self.environ.get('CONTENT_TYPE', '')",not self.data,0.6480851173400879 6197,"def parse_protobuf(self, proto_type): """"""Parse the data into an instance of proto_type."""""" if 'protobuf' not in self.environ.get('CONTENT_TYPE', ''): raise BadRequest('Not a Protobuf request') obj = proto_type() try: obj.ParseFromString(self.data) except Exception: raise BadRequest('Unable to parse Protobuf request') if: raise BadRequest('Partial Protobuf request') return obj",False,self.protobuf_check_initialization and (not obj.IsInitialized()),"self.environ.get('CONTENT_TYPE', '').startswith('HEAD')",0.6425554156303406 6198,"@is_semantic_parsed.default def _is_semantic_parsed_default(self) -> bool: if: return False if self.children is not None: for c in self.children: if not isinstance(c, SubArticleElement): break if c.CAN_BE_SEMANTIC_PARSED and (not c.is_semantic_parsed): return False return True",False,self.semantic_data is None,self.semantic_parsed is None,0.6448332071304321 6199,"@is_semantic_parsed.default def _is_semantic_parsed_default(self) -> bool: if self.semantic_data is None: return False if: for c in self.children: if not isinstance(c, SubArticleElement): break if c.CAN_BE_SEMANTIC_PARSED and (not c.is_semantic_parsed): return False return True",False,self.children is not None,"isinstance(self.semantic_data, SemanticData)",0.6465697288513184 6200,"@is_semantic_parsed.default def _is_semantic_parsed_default(self) -> bool: if self.semantic_data is None: return False if self.children is not None: for c in self.children: if: break if c.CAN_BE_SEMANTIC_PARSED and (not c.is_semantic_parsed): return False return True",False,"not isinstance(c, SubArticleElement)","isinstance(c, SemanticData)",0.6409262418746948 6201,"@is_semantic_parsed.default def _is_semantic_parsed_default(self) -> bool: if self.semantic_data is None: return False if self.children is not None: for c in self.children: if not isinstance(c, SubArticleElement): break if: return False return True",False,c.CAN_BE_SEMANTIC_PARSED and (not c.is_semantic_parsed),c.semantic_data == self.semantic_data,0.6446877717971802 6202,"def omax(self, size_left): offset = self.__r.current() if: raise ParseError('object max size 0x%x + 0x%x over parent 0x%x' % (offset, size_left, self._omax), self) self._omax = offset + size_left return self",False,self._omax and self._omax < offset + size_left,offset < 0 or offset >= size_left,0.6480310559272766 6203,"def get_peid_signatures(self): """"""Return a list of matched PEID signatures."""""" if: self._load_pe_sigdb() return self._peid_sigdb.match(self._pe, ep_only=True) or []",False,not self._peid_sigdb,self._peid_sigdb is None,0.6528528928756714 6204,"def _check_input_dim(self, input): if: raise ValueError('expected 2D or 3D input (got {}D input)'.format(input.dim()))",True,input.dim() != 2 and input.dim() != 3,input.dim() != 2 and input.dim() != 3,0.6509600877761841 6205,"@staticmethod def create_dataset(**kwargs): """""" Args: name: dataset name 'VOT2018', 'VOT2016' dataset_root: dataset root Return: dataset """""" assert 'name' in kwargs,'should provide dataset name' name = kwargs['name'] if: dataset = VOTDataset(**kwargs) else: raise Exception('unknow dataset {}'.format(kwargs['name'])) return dataset",False,'VOT2018' == name or 'VOT2016' == name or 'VOT2019' == name,name == 'VOT2016',0.6471201181411743 6206,"def nf(*args, **kwargs): res = f(*args, **kwargs) if: validate_return_type(f, res, nf.returns) return res",False,conf.debug_mode,npy.returns is not None,0.6516700983047485 6207,"def __init__(self, config_file, no_notify=False, reset=False): """"""The 'no_notify' flag suppresses the Slack notifications. The'reset' flag causes the notification state files to be ignored. """""" super().__init__() if: raise SlackConfigError('No Slack config filename provided') self.read(config_file) self.handlers = [] self.no_notify = no_notify self.reset = reset",True,not config_file,not config_file,0.6519981622695923 6208,"def get_free_var_types(e, fvars_needed: FrozenSet[str]) -> PMap[str, Type]: if: return pmap() if e.op == 'variable' and e.name in fvars_needed: return pmap({e.name: e.type}) return reduce(combine_pmaps, [get_free_var_types(ch, fvars_needed.difference([e.bound_var.name]) if e.is_binder and e.binds_in_child(i) else fvars_needed) for i, ch in enumerate(e.children)], pmap())",False,len(fvars_needed) == 0,e is None,0.6518079042434692 6209,"def get_free_var_types(e, fvars_needed: FrozenSet[str]) -> PMap[str, Type]: if len(fvars_needed) == 0: return pmap() if: return pmap({e.name: e.type}) return reduce(combine_pmaps, [get_free_var_types(ch, fvars_needed.difference([e.bound_var.name]) if e.is_binder and e.binds_in_child(i) else fvars_needed) for i, ch in enumerate(e.children)], pmap())",False,e.op == 'variable' and e.name in fvars_needed,e.type is not e.type,0.6496294140815735 6210,"def multi_step(self, actions_as_ints: Tuple[int,...]) -> List[Dict[str, Any]]: step_results = super(FurnLiftNApartStateLoggingEpisode, self).multi_step(actions_as_ints=actions_as_ints) visibility = self.goal_visibility() for i in range(self.environment.num_agents): if: self._first_view_of_target[i] = min(self._first_view_of_target[i], self.num_steps_taken_in_episode()) return step_results",False,visibility[i],self._first_view_of_target[i] > visibility,0.6574292182922363 6211,"def _group_add(self, config): if: config = hnode_misc.NodeConfig() tnode = wtxtp_tree.TxtpNode(self._current, config=config) self._current.append(tnode) self._current = tnode return tnode",False,not config,config is None,0.6618176698684692 6212,"def reload_or_restart_unit_from(self, conf): """""" do'reload' if specified, otherwise do'restart' """""" if: return False with waitlock(conf): logg.info(' reload-or-restart unit %s => %s', conf.name(), strQ(conf.filename())) return self.do_reload_or_restart_unit_from(conf)",True,not conf,not conf,0.659113883972168 6213,"def generate_sample_problems(self, solver): linear, quadratic = self.sapi.problem problems = [('sample_ising', (linear, quadratic)), ('sample_qubo', (quadratic,))] if: bqm = dimod.BQM.from_ising(linear, quadratic) problems.append(('sample_bqm', (bqm,))) return problems",False,dimod,self.model_type == 'quadratic',0.6707044839859009 6214,"def matchPreviousLiteral(expr): """""" Helper to define an expression that is indirectly defined from the tokens matched in a previous expression, that is, it looks for a'repeat' of a previous expression. For example:: first = Word(nums) second = matchPreviousLiteral(first) matchExpr = first + "":"" + second will match C{""1:1""}, but not C{""1:2""}. Because this matches a previous literal, will also match the leading C{""1:1""} in C{""1:10""}. If this is not desired, use C{matchPreviousExpr}. Do I{not} use with packrat parsing enabled. """""" rep = Forward() def copyTokenToRepeater(s, l, t): if: if len(t) == 1: rep << t[0] else: tflat = _flatten(t.asList()) rep << And((Literal(tt) for tt in tflat)) else: rep << Empty() expr.addParseAction(copyTokenToRepeater, callDuringTry=True) rep.setName('(prev)'+ _ustr(expr)) return rep",True,t,t,0.6793944835662842 6215,"def matchPreviousLiteral(expr): """""" Helper to define an expression that is indirectly defined from the tokens matched in a previous expression, that is, it looks for a'repeat' of a previous expression. For example:: first = Word(nums) second = matchPreviousLiteral(first) matchExpr = first + "":"" + second will match C{""1:1""}, but not C{""1:2""}. Because this matches a previous literal, will also match the leading C{""1:1""} in C{""1:10""}. If this is not desired, use C{matchPreviousExpr}. Do I{not} use with packrat parsing enabled. """""" rep = Forward() def copyTokenToRepeater(s, l, t): if t: if: rep << t[0] else: tflat = _flatten(t.asList()) rep << And((Literal(tt) for tt in tflat)) else: rep << Empty() expr.addParseAction(copyTokenToRepeater, callDuringTry=True) rep.setName('(prev)'+ _ustr(expr)) return rep",True,len(t) == 1,len(t) == 1,0.6495482921600342 6216,"def ensure_dir(path): """""" Ensures that the specified directory exists. :param path: Path to the directory. """""" if: os.makedirs(path)",True,not os.path.exists(path),not os.path.exists(path),0.6436993479728699 6217,"def open_random(self): id = get_random_id() if: tooltip('You have no notes') else: self.chosen_id = id self.accept()",False,id is None or id < 0,id == 0,0.6488205194473267 6218,"def _build_name(self, full_name): if: return full_name i = full_name.rfind(self.namespace_separator) if i == -1: return full_name namespace, name = (full_name[:i], full_name[i + 1:]) short_namespace = self.namespaces.get(namespace, namespace) if not short_namespace: return name else: return self.namespace_separator.join((short_namespace, name))",False,not self.namespaces,not full_name,0.6504181623458862 6219,"def _build_name(self, full_name): if not self.namespaces: return full_name i = full_name.rfind(self.namespace_separator) if: return full_name namespace, name = (full_name[:i], full_name[i + 1:]) short_namespace = self.namespaces.get(namespace, namespace) if not short_namespace: return name else: return self.namespace_separator.join((short_namespace, name))",False,i == -1,i < 0,0.6679086685180664 6220,"def _build_name(self, full_name): if not self.namespaces: return full_name i = full_name.rfind(self.namespace_separator) if i == -1: return full_name namespace, name = (full_name[:i], full_name[i + 1:]) short_namespace = self.namespaces.get(namespace, namespace) if: return name else: return self.namespace_separator.join((short_namespace, name))",False,not short_namespace,short_namespace is None,0.6561227440834045 6221,"def test_suite(): if: return TestSuite([makeSuite(t, 'check') for t in TestClasses]) else: return TestSuite([])",False,sys.version[0] == '2',TestClasses,0.6507202386856079 6222,"def cancel_order(self, id, symbol=None, params={}): if: raise ArgumentsRequired(self.id +'cancelOrder requires a symbol argument') self.load_markets() market = self.market(symbol) request = {'market_id': market['id'], 'order_id': id} response = self.privatePostCancelOrder(self.extend(request, params)) data = self.safe_value(response, 'data') return self.parse_order(data)",True,symbol is None,symbol is None,0.6574738025665283 6223,"def _site_url(): urldefault = None if: import cmk.utils.site siteconfig = cmk.utils.site.get_omd_config() urldefault = 'http://%s:%s/%s' % (siteconfig['CONFIG_APACHE_TCP_ADDR'], siteconfig['CONFIG_APACHE_TCP_PORT'], os.environ['OMD_SITE']) return urldefault",False,"os.environ.get('HOME', 'a') == os.environ.get('OMD_ROOT', 'b')",'OMD_SITE' in os.environ,0.6517515778541565 6224,"def assert_fingerprint(cert, fingerprint): """""" Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. """""" fingerprint = fingerprint.replace(':', '').lower() digest_length = len(fingerprint) hashfunc = HASHFUNC_MAP.get(digest_length) if: raise SSLError('Fingerprint of invalid length: {0}'.format(fingerprint)) fingerprint_bytes = unhexlify(fingerprint.encode()) cert_digest = hashfunc(cert).digest() if not _const_compare_digest(cert_digest, fingerprint_bytes): raise SSLError('Fingerprints did not match. Expected ""{0}"", got ""{1}"".'.format(fingerprint, hexlify(cert_digest)))",True,not hashfunc,not hashfunc,0.6531475782394409 6225,"def assert_fingerprint(cert, fingerprint): """""" Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. """""" fingerprint = fingerprint.replace(':', '').lower() digest_length = len(fingerprint) hashfunc = HASHFUNC_MAP.get(digest_length) if not hashfunc: raise SSLError('Fingerprint of invalid length: {0}'.format(fingerprint)) fingerprint_bytes = unhexlify(fingerprint.encode()) cert_digest = hashfunc(cert).digest() if: raise SSLError('Fingerprints did not match. Expected ""{0}"", got ""{1}"".'.format(fingerprint, hexlify(cert_digest)))",True,"not _const_compare_digest(cert_digest, fingerprint_bytes)","not _const_compare_digest(cert_digest, fingerprint_bytes)",0.6415911912918091 6226,"def preprocess_input(x, size=None, BGRTranspose=True): """"""input standardizing function Args: x: numpy.ndarray with shape (H, W, C) size: tuple (H_new, W_new), resized input shape Return: x: numpy.ndarray """""" if: x = resize(x, size) * 255 if BGRTranspose: x = x[..., ::-1] return x",False,size,size is not None,0.6722501516342163 6227,"def preprocess_input(x, size=None, BGRTranspose=True): """"""input standardizing function Args: x: numpy.ndarray with shape (H, W, C) size: tuple (H_new, W_new), resized input shape Return: x: numpy.ndarray """""" if size: x = resize(x, size) * 255 if: x = x[..., ::-1] return x",True,BGRTranspose,BGRTranspose,0.6468619108200073 6228,"def __init__(self, action_space, epsilon, max_sigma=1.0, min_sigma=None, decay_period=1000000): assert len(action_space.shape) == 1 if: min_sigma = max_sigma self._max_sigma = max_sigma self._epsilon = epsilon self._min_sigma = min_sigma self._decay_period = decay_period self._action_space = action_space",True,min_sigma is None,min_sigma is None,0.664386510848999 6229,"def dict2str(opt, indent_l=1): msg = '' for k, v in opt.items(): if: msg +='' * (indent_l * 2) + k + ':[\n' msg += dict2str(v, indent_l + 1) msg +='' * (indent_l * 2) + ']\n' else: msg +='' * (indent_l * 2) + k + ':'+ str(v) + '\n' return msg",True,"isinstance(v, dict)","isinstance(v, dict)",0.6452008485794067 6230,"def forward(self, x): if: x = x.squeeze(3).squeeze(2) cls_score = self.cls_score(x) if not self.training: cls_score = F.softmax(cls_score, dim=1) bbox_pred = self.bbox_pred(x) return (cls_score, bbox_pred)",True,x.dim() == 4,x.dim() == 4,0.649858832359314 6231,"def forward(self, x): if x.dim() == 4: x = x.squeeze(3).squeeze(2) cls_score = self.cls_score(x) if: cls_score = F.softmax(cls_score, dim=1) bbox_pred = self.bbox_pred(x) return (cls_score, bbox_pred)",False,not self.training,self.with_softmax,0.6493474841117859 6232,"def obj_create(self, bundle, request=None, **kwargs): res = super(IterationResource, self).obj_create(bundle, request) if: signals.iteration_updated.send(sender=request, iteration=res.obj, user=request.user) else: signals.iteration_created.send(sender=request, iteration=res.obj, user=request.user) return res",False,'pk' in kwargs.keys(),self.save_prediction_only,0.6479754447937012 6233,"def acceptNavigationRequest(self, url, type_, isMainFrame): if: return False return super().acceptNavigationRequest(url, type_, isMainFrame)",False,type_ == QWebEnginePage.NavigationTypeLinkClicked,url.scheme() != 'http',0.6457794308662415 6234,"def _is_shape(values): if: return False for v in values: if not isinstance(v, int): return False return True",False,"not isinstance(values, (list, tuple))","not isinstance(values, list)",0.6441157460212708 6235,"def _is_shape(values): if not isinstance(values, (list, tuple)): return False for v in values: if: return False return True",False,"not isinstance(v, int)",not _is_shape(v),0.6450600028038025 6236,"def start_application(self, application_id: str) -> None: if: raise ResourceNotFoundException(application_id) self.applications[application_id].state = 'STARTED'",False,application_id not in self.applications.keys(),application_id not in self.applications,0.6491796374320984 6237,"def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True, join_mask=None, from_layer=0): all_encoder_layers = [] for i, layer_module in enumerate(self.layer): if i < from_layer: continue hidden_states = layer_module(hidden_states, attention_mask, join_mask=None if i >= self.join_layer else join_mask) if output_all_encoded_layers: all_encoder_layers.append(hidden_states) if: all_encoder_layers.append(hidden_states) return all_encoder_layers",False,not output_all_encoded_layers,output_all_encoded_layers,0.6458449363708496 6238,"def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True, join_mask=None, from_layer=0): all_encoder_layers = [] for i, layer_module in enumerate(self.layer): if: continue hidden_states = layer_module(hidden_states, attention_mask, join_mask=None if i >= self.join_layer else join_mask) if output_all_encoded_layers: all_encoder_layers.append(hidden_states) if not output_all_encoded_layers: all_encoder_layers.append(hidden_states) return all_encoder_layers",False,i < from_layer,layer_module is None,0.650843620300293 6239,"def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True, join_mask=None, from_layer=0): all_encoder_layers = [] for i, layer_module in enumerate(self.layer): if i < from_layer: continue hidden_states = layer_module(hidden_states, attention_mask, join_mask=None if i >= self.join_layer else join_mask) if: all_encoder_layers.append(hidden_states) if not output_all_encoded_layers: all_encoder_layers.append(hidden_states) return all_encoder_layers",True,output_all_encoded_layers,output_all_encoded_layers,0.6472333669662476 6240,"def copy_metadata_to(self, target_dir): """"""Copy metadata (egg info) to the target_dir"""""" norm_egg_info = os.path.normpath(self.egg_info) prefix = os.path.join(norm_egg_info, '') for path in self.ei_cmd.filelist.files: if: target = os.path.join(target_dir, path[len(prefix):]) ensure_directory(target) self.copy_file(path, target)",True,path.startswith(prefix),path.startswith(prefix),0.6418628692626953 6241,"def _get_matched_text(self, match): mapping = {} for name in self.goal.get_names(): node = match.get_ast(name) if: raise similarfinder.BadNameInCheckError('Unknown name <%s>' % name) force = self._is_expression() and match.ast == node mapping[name] = self._get_node_text(node, force) unindented = self.goal.substitute(mapping) return self._auto_indent(match.get_region()[0], unindented)",True,node is None,node is None,0.6591237783432007 6242,"def activate_environment(environment): """"""Activate an environment by name. Usage: >>> cpenv.activate_environment('MyEnvironment') Arguments: environment (str): Name of Environment Returns: list of Module objects that have been activated """""" for repo in get_repos(): for env in repo.list_environments(): if: return activate(env.requires) else: raise ResolveError('Failed to resolve Environment: %s' % environment)",False,env.name == environment,env.environment == environment,0.6545580625534058 6243,"def test_flow_func_parameters(self): G = self.G fv = 3.0 for interface_func in interface_funcs: for flow_func in flow_funcs: result = interface_func(G, 'x', 'y', flow_func=flow_func) if: result = result[0] assert_equal(fv, result, msg=msgi.format(flow_func.__name__, interface_func.__name__))",False,interface_func in max_min_funcs,"isinstance(result, tuple)",0.6468717455863953 6244,"def create_refresh_action_mask(self, player): """"""Create refresh action mask Invalid: - Player has less than 2 gold Refresh Action Vector: (1) """""" refresh_action_mask = 1 if: refresh_action_mask = 0 return refresh_action_mask",False,player.gold < 2,player.gold_v2,0.6533514857292175 6245,"def __enter__(self): if: self.__time = time = self.__timer() else: time = self.__time self.__nesting += 1 return time",False,self.__nesting == 0,self.__nesting == 1,0.6571447253227234 6246,"def __call__(self, node): method = getattr(self, '_' + node.__class__.__name__, None) if: return method(node) warnings.warn('Unknown node type <%s>; please report!' % node.__class__.__name__, RuntimeWarning) node.region = (self.source.offset, self.source.offset) if self.children: node.sorted_children = ast.get_children(node)",True,method is not None,method is not None,0.6576331853866577 6247,"def __call__(self, node): method = getattr(self, '_' + node.__class__.__name__, None) if method is not None: return method(node) warnings.warn('Unknown node type <%s>; please report!' % node.__class__.__name__, RuntimeWarning) node.region = (self.source.offset, self.source.offset) if: node.sorted_children = ast.get_children(node)",True,self.children,self.children,0.6560337543487549 6248,"@override_method(backend_Function, 'function_space') def Function_function_space(self, orig, orig_args): if: return self._tlm_adjoint__var_interface_attrs['space'] else: return orig_args()",False,"hasattr(self, '_tlm_adjoint__var_interface_attrs') and 'space' in self._tlm_adjoint__var_interface_attrs",self._tlm_adjoint__var_interface_attrs is not None,0.6507883071899414 6249,"def parse_publisher(self, root): publisher_node = root.xpath('//div[@class="" reissues""]/ul/li/text()') self.log('parse_publisher - publisher_node: ', publisher_node) if: if publisher_node[0].strip() == 'First Edition': return publisher_node[1].strip() else: return publisher_node[1].strip()",True,publisher_node,publisher_node,0.6625900864601135 6250,"def parse_publisher(self, root): publisher_node = root.xpath('//div[@class="" reissues""]/ul/li/text()') self.log('parse_publisher - publisher_node: ', publisher_node) if publisher_node: if: return publisher_node[1].strip() else: return publisher_node[1].strip()",False,publisher_node[0].strip() == 'First Edition','LL' in publisher_node[1].tag,0.644852876663208 6251,"def _restart(self, dt): """"""Prefill audio and attempt to replay audio."""""" if: self.refill_source_player() self._xa2_source_voice.play()",False,self._playing and self._xa2_source_voice,self._xa2_source_voice.used,0.6470900177955627 6252,"def exportLiteral(self, outfile, level, name_='compoundRefType'): level += 1 self.exportLiteralAttributes(outfile, level, name_) if: self.exportLiteralChildren(outfile, level, name_)",True,self.hasContent_(),self.hasContent_(),0.6502895951271057 6253,"def __getstate__(self) -> dict[str, Any]: if: s = self.__dict__.copy() s['_parent_seq'] = s['_parent_seq']() return s return self.__dict__",False,self.parent_seq is not None,'_parent_seq' in self.__dict__,0.6539090275764465 6254,"def _read(self, filename, filepath): if: self.config = ConfigParser.ConfigParser() self.config.optionxform = str self.config.read(filepath) return ('normal', filename +'read') else: return ('error', filename +'not found at:'+ filepath)",True,os.path.exists(filepath),os.path.exists(filepath),0.6445389986038208 6255,"def recursive_set(data, keys, value): """"""Recursively slice a nested dictionary by a list of keys and set the value. Parameters: data (dict): nested dictionary to get from. keys (list): list of keys/indices to delve into. value: value to store under key. Raises: KeyError: if any intermediate keys are missing. """""" if: if len(keys) == 1: data[keys[0]] = value else: return recursive_set(data[keys[0]], keys[1:], value) else: data[keys] = value",False,"isinstance(keys, (list, tuple))","isinstance(keys, slice)",0.6425410509109497 6256,"def recursive_set(data, keys, value): """"""Recursively slice a nested dictionary by a list of keys and set the value. Parameters: data (dict): nested dictionary to get from. keys (list): list of keys/indices to delve into. value: value to store under key. Raises: KeyError: if any intermediate keys are missing. """""" if isinstance(keys, (list, tuple)): if: data[keys[0]] = value else: return recursive_set(data[keys[0]], keys[1:], value) else: data[keys] = value",True,len(keys) == 1,len(keys) == 1,0.6482141017913818 6257,"def encode(self, name, data, check_types=True, check_constraints=False, **kwargs): """"""Encode given dictionary `data` as given type `name` and return the encoded data as a bytes object. If `check_types` is ``True`` all objects in `data` are checked against the expected Python type for its ASN.1 type. Set `check_types` to ``False`` to minimize the runtime overhead, but instead get less informative error messages. See `Types`_ for a mapping table from ASN.1 types to Python types. If `check_constraints` is ``True`` all objects in `data` are checked against their ASN.1 type constraints. A ConstraintsError exception is raised if the constraints are not fulfilled. Set `check_constraints` to ``False`` to skip the constraints check and minimize the runtime overhead, but instead get less informative error messages and allow encoding of values not fulfilling the constraints. >>> foo.encode('Question', {'id': 1, 'question': 'Is 1+1=3?'}) b'0\\x0e\\x02\\x01\\x01\\x16\\x09Is 1+1=3?' """""" try: type_ = self._types[name] except KeyError: raise EncodeError(""Type '{}' not found in types dictionary."".format(name)) if: type_.check_types(data) if check_constraints: type_.check_constraints(data) return bytes(type_.encode(data, **kwargs))",True,check_types,check_types,0.6557484269142151 6258,"def encode(self, name, data, check_types=True, check_constraints=False, **kwargs): """"""Encode given dictionary `data` as given type `name` and return the encoded data as a bytes object. If `check_types` is ``True`` all objects in `data` are checked against the expected Python type for its ASN.1 type. Set `check_types` to ``False`` to minimize the runtime overhead, but instead get less informative error messages. See `Types`_ for a mapping table from ASN.1 types to Python types. If `check_constraints` is ``True`` all objects in `data` are checked against their ASN.1 type constraints. A ConstraintsError exception is raised if the constraints are not fulfilled. Set `check_constraints` to ``False`` to skip the constraints check and minimize the runtime overhead, but instead get less informative error messages and allow encoding of values not fulfilling the constraints. >>> foo.encode('Question', {'id': 1, 'question': 'Is 1+1=3?'}) b'0\\x0e\\x02\\x01\\x01\\x16\\x09Is 1+1=3?' """""" try: type_ = self._types[name] except KeyError: raise EncodeError(""Type '{}' not found in types dictionary."".format(name)) if check_types: type_.check_types(data) if: type_.check_constraints(data) return bytes(type_.encode(data, **kwargs))",True,check_constraints,check_constraints,0.6558020114898682 6259,"def list_value(x): x = resolve1(x) if: if STRICT: raise PDFTypeError('List required: %r' % x) return [] return x",False,"not (isinstance(x, list) or isinstance(x, tuple))","not isinstance(x, (list, tuple))",0.6443597078323364 6260,"def list_value(x): x = resolve1(x) if not (isinstance(x, list) or isinstance(x, tuple)): if: raise PDFTypeError('List required: %r' % x) return [] return x",True,STRICT,STRICT,0.6751432418823242 6261,"def ffft(qubits: Sequence[cirq.Qid]) -> cirq.OP_TREE: """"""Performs fast fermionic Fourier transform. Generates a circuit that performs fast fermionic Fourier transform (FFFT) which transforms a set of fermionic creation operators $\\hat{a}_n^\\dagger$, $n \\in 1, 2, \\dots, N$ according to: $$ \\mathit{FFFT}^\\dagger \\tilde{a}_k^\\dagger \\mathit{FFFT} = {1 \\over \\sqrt{N}} \\sum_{n=0}^{N-1} e^{-i {2\\pi \\over N} n k} \\hat{a}^\\dagger_n \\,, $$ where $\\tilde{a}_k^\\dagger$ are transformed operators and $N$ is size of the input `qubits` sequence. This function assumes JWT representation of fermionic modes which are big-endian encoded on consecutive qubits: $a_0^\\dagger \\lvert 0.. \\rangle = \\lvert 1.. \\rangle$, $a_1^\\dagger \\lvert 0.. \\rangle = \\vert 01.. \\rangle$, $a_2^\\dagger \\lvert 0.. \\rangle = \\vert 001.. \\rangle$, $\\dots$. The gate count of generated circuit is $\\theta(N^2)$, generated circuit depth is $\\theta(N)$ and distinct gates count is $\\theta(N_1^2 + N_2^2 + \\dots + N_n^2)$, where $N = N_1 N_2 \\dots N_n$ is prime decomposition of $N$. In a case where $N$ is some power of 2, it reduces to $\\theta(\\log(N))$. An equivalent circuit can be generated using the `openfermion.bogoliubov_transform` function with appropriately prepared `transformation_matrix` argument:",False,n == 0,qubits.size() == 0,0.6610281467437744 6262,"def ffft(qubits: Sequence[cirq.Qid]) -> cirq.OP_TREE: """"""Performs fast fermionic Fourier transform. Generates a circuit that performs fast fermionic Fourier transform (FFFT) which transforms a set of fermionic creation operators $\\hat{a}_n^\\dagger$, $n \\in 1, 2, \\dots, N$ according to: $$ \\mathit{FFFT}^\\dagger \\tilde{a}_k^\\dagger \\mathit{FFFT} = {1 \\over \\sqrt{N}} \\sum_{n=0}^{N-1} e^{-i {2\\pi \\over N} n k} \\hat{a}^\\dagger_n \\,, $$ where $\\tilde{a}_k^\\dagger$ are transformed operators and $N$ is size of the input `qubits` sequence. This function assumes JWT representation of fermionic modes which are big-endian encoded on consecutive qubits: $a_0^\\dagger \\lvert 0.. \\rangle = \\lvert 1.. \\rangle$, $a_1^\\dagger \\lvert 0.. \\rangle = \\vert 01.. \\rangle$, $a_2^\\dagger \\lvert 0.. \\rangle = \\vert 001.. \\rangle$, $\\dots$. The gate count of generated circuit is $\\theta(N^2)$, generated circuit depth is $\\theta(N)$ and distinct gates count is $\\theta(N_1^2 + N_2^2 + \\dots + N_n^2)$, where $N = N_1 N_2 \\dots N_n$ is prime decomposition of $N$. In a case where $N$ is some power of 2, it reduces to $\\theta(\\log(N))$. An equivalent circuit can be generated using the `openfermion.bogoliubov_transform` function with appropriately prepared `transformation_matrix` argument:",False,n == 1,qubits.size() == 0,0.6610519289970398 6263,"def try_int(o: Any) -> Any: """"""Try to convert `o` to int, default to `o` if not possible."""""" if: return o if o.ndim else int(o) if isinstance(o, collections.abc.Sized) or getattr(o, '__array_interface__', False): return o try: return int(o) except: return o",False,"isinstance(o, (np.ndarray, Tensor))","isinstance(o, math.dim)",0.6475276947021484 6264,"def try_int(o: Any) -> Any: """"""Try to convert `o` to int, default to `o` if not possible."""""" if isinstance(o, (np.ndarray, Tensor)): return o if o.ndim else int(o) if: return o try: return int(o) except: return o",False,"isinstance(o, collections.abc.Sized) or getattr(o, '__array_interface__', False)","not isinstance(o, int)",0.6439752578735352 6265,"def create_action(self, name, callback, shortcuts=None): action = Gio.SimpleAction.new(name=name, parameter_type=None) action.connect('activate', callback) self.add_action(action=action) if: self.set_accels_for_action(detailed_action_name=f'app.{name}', accels=shortcuts)",True,shortcuts,shortcuts,0.6661825776100159 6266,"def copy_value(self, orig_key, new_key): """"""Copy value"""""" data = self.model.get_data() if: data.append(data[orig_key]) if isinstance(data, set): data.add(data[orig_key]) else: data[new_key] = data[orig_key] self.set_data(data)",False,"isinstance(data, list)","isinstance(data, dict)",0.6482599973678589 6267,"def copy_value(self, orig_key, new_key): """"""Copy value"""""" data = self.model.get_data() if isinstance(data, list): data.append(data[orig_key]) if: data.add(data[orig_key]) else: data[new_key] = data[orig_key] self.set_data(data)",False,"isinstance(data, set)",not new_key in data,0.6471239328384399 6268,"def cmp_name(event): if: return False return basename == event.name",False,"getattr(event, 'name') is None","not hasattr(event, 'name')",0.64491868019104 6269,"def options(self, context, module_options): """""" COMPUTER Computer name or wildcard ex: WIN-S10, WIN-* etc. Default: * """""" self.computer = None if: self.computer = module_options['COMPUTER']",True,'COMPUTER' in module_options,'COMPUTER' in module_options,0.6576352119445801 6270,"def feed(self, next_bytes): assert self._feeding view = _get_data_from_buffer(next_bytes) if: raise BufferFull if self._buf_checkpoint > 0: del self._buffer[:self._buf_checkpoint] self._buff_i -= self._buf_checkpoint self._buf_checkpoint = 0 self._buffer.extend(view)",False,len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size,self._buff_i < 0,0.6484838128089905 6271,"def feed(self, next_bytes): assert self._feeding view = _get_data_from_buffer(next_bytes) if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size: raise BufferFull if: del self._buffer[:self._buf_checkpoint] self._buff_i -= self._buf_checkpoint self._buf_checkpoint = 0 self._buffer.extend(view)",False,self._buf_checkpoint > 0,self._buf_checkpoint < self._max_buffer_size,0.6581629514694214 6272,"def init_weight(self): for ly in self.children(): if: nn.init.kaiming_normal_(ly.weight, a=1) if not ly.bias is None: nn.init.constant_(ly.bias, 0)",True,"isinstance(ly, nn.Conv2d)","isinstance(ly, nn.Conv2d)",0.6496706008911133 6273,"def init_weight(self): for ly in self.children(): if isinstance(ly, nn.Conv2d): nn.init.kaiming_normal_(ly.weight, a=1) if: nn.init.constant_(ly.bias, 0)",True,not ly.bias is None,not ly.bias is None,0.6541802883148193 6274,"def __call__(self, x, update=True): if: self.rs.push(x) if self.demean: x = x - self.rs.mean if self.destd: x = x / (self.rs.std + 1e-08) if self.clip: x = np.clip(x, -self.clip, self.clip) return x",True,update,update,0.6758875250816345 6275,"def __call__(self, x, update=True): if update: self.rs.push(x) if: x = x - self.rs.mean if self.destd: x = x / (self.rs.std + 1e-08) if self.clip: x = np.clip(x, -self.clip, self.clip) return x",False,self.demean,self.mean,0.6520663499832153 6276,"def __call__(self, x, update=True): if update: self.rs.push(x) if self.demean: x = x - self.rs.mean if: x = x / (self.rs.std + 1e-08) if self.clip: x = np.clip(x, -self.clip, self.clip) return x",True,self.destd,self.destd,0.6571958065032959 6277,"def __call__(self, x, update=True): if update: self.rs.push(x) if self.demean: x = x - self.rs.mean if self.destd: x = x / (self.rs.std + 1e-08) if: x = np.clip(x, -self.clip, self.clip) return x",True,self.clip,self.clip,0.6555068492889404 6278,"def Tab_4_F_Update_Equation(self): modifiers = QtWidgets.QApplication.keyboardModifiers() if: Eval = not App().optionWindow.cb_F_EvalF.isChecked() else: Eval = App().optionWindow.cb_F_EvalF.isChecked() Text = self.Tab_4_FormulaInput.text() AMaS_Object = self.Tab_4_Active_Equation self.Set_AMaS_Flags(AMaS_Object, f_eval=Eval) self.TC('WORK', AMaS_Object, lambda: AC.AMaS.UpdateEquation(AMaS_Object, Text=Text), self.Tab_4_F_Display)",False,modifiers == QtCore.Qt.ControlModifier,modifiers == QtWidgets.QApplication.ModifierType.CONTROL_MASK,0.6535496711730957 6279,"def hasContent_(self): if: return True else: return False",False,self.para is not None or self.sect1 is not None,self.valueOf_ is not None,0.6448482275009155 6280,"@add_python_message def named_arguments_must_follow_bare_star(message='', **_kwargs): _ = current_lang.translate if: return {} hint = _('Did you forget something after `*`?\n') cause = _('Assuming you were defining a function, you need\nto replace `*` by either `*arguments` or\nby `*, named_argument=value`.\n') return {'cause': cause,'suggest': hint}",False,message != 'named arguments must follow bare *','named arguments must follow barestar' not in message,0.6605154871940613 6281,"def load_conf(conf_file, local_dict): with open(conf_file) as fin: for line in fin: group = line.strip().split('=') if: continue local_dict[group[0]] = group[1] return local_dict",False,len(group) != 2,not group or group[0] == '.',0.6462176442146301 6282,"def _make_boundary(self): """""" creates a boundary for multipart post (form post)"""""" if: return '-===============%s==' % uuid.uuid4().get_hex() elif self.PY3: return '-===============%s==' % uuid.uuid4().hex else: from random import choice digits = '0123456789' letters = 'abcdefghijklmnopqrstuvwxyz' return '-===============%s==' % ''.join((choice(letters + digits) for i in range(15)))",True,self.PY2,self.PY2,0.6612326502799988 6283,"def _make_boundary(self): """""" creates a boundary for multipart post (form post)"""""" if self.PY2: return '-===============%s==' % uuid.uuid4().get_hex() elif: return '-===============%s==' % uuid.uuid4().hex else: from random import choice digits = '0123456789' letters = 'abcdefghijklmnopqrstuvwxyz' return '-===============%s==' % ''.join((choice(letters + digits) for i in range(15)))",True,self.PY3,self.PY3,0.6616452932357788 6284,"def get_response(self, task): backend_logs = Path(config.backend.logging.file_backend) if: return backend_logs.read_text().splitlines()[-100:] return []",False,backend_logs.is_file(),backend_logs.exists(),0.6490932703018188 6285,"def _writer(self, key): if: return None if key not in self._writers: self._writers[key] = self.SummaryWriter(os.path.join(self.tensorboard_logdir, key)) self._writers[key].add_text('args', str(vars(self.args))) self._writers[key].add_text('sys.argv',''.join(sys.argv)) return self._writers[key]",True,self.SummaryWriter is None,self.SummaryWriter is None,0.65156489610672 6286,"def _writer(self, key): if self.SummaryWriter is None: return None if: self._writers[key] = self.SummaryWriter(os.path.join(self.tensorboard_logdir, key)) self._writers[key].add_text('args', str(vars(self.args))) self._writers[key].add_text('sys.argv',''.join(sys.argv)) return self._writers[key]",True,key not in self._writers,key not in self._writers,0.6561585664749146 6287,"def meter(self, mkey, *args, **kwargs): if: raise ValueError(f'{mkey} is not registered') self.metrics[mkey]['metric'].record(*args, **kwargs)",True,mkey not in self.metrics,mkey not in self.metrics,0.6540051102638245 6288,"@property def project(self): if: self._project = self.get_project() return self._project",True,self._project is None,self._project is None,0.660429835319519 6289,"def inet_ntoa(packed_ip): """""" Convert an IP address from 32-bit packed binary format to string format. """""" if: raise TypeError('string type expected, not %s' % str(type(packed_ip))) if len(packed_ip)!= 4: raise ValueError('invalid length of packed IP address string') return '%d.%d.%d.%d' % _unpack('4B', packed_ip)",False,not _is_str(packed_ip),"not isinstance(packed_ip, str_cls)",0.6490377187728882 6290,"def inet_ntoa(packed_ip): """""" Convert an IP address from 32-bit packed binary format to string format. """""" if not _is_str(packed_ip): raise TypeError('string type expected, not %s' % str(type(packed_ip))) if: raise ValueError('invalid length of packed IP address string') return '%d.%d.%d.%d' % _unpack('4B', packed_ip)",False,len(packed_ip) != 4,packed_ip.size != 4,0.6518685817718506 6291,"def clone(self, value=None, tagSet=None, subtypeSpec=None, namedValues=None): if: return self if value is None: value = self._value if tagSet is None: tagSet = self._tagSet if subtypeSpec is None: subtypeSpec = self._subtypeSpec if namedValues is None: namedValues = self.__namedValues return self.__class__(value, tagSet, subtypeSpec, namedValues)",True,value is None and tagSet is None and (subtypeSpec is None) and (namedValues is None),value is None and tagSet is None and (subtypeSpec is None) and (namedValues is None),0.6470418572425842 6292,"def clone(self, value=None, tagSet=None, subtypeSpec=None, namedValues=None): if value is None and tagSet is None and (subtypeSpec is None) and (namedValues is None): return self if: value = self._value if tagSet is None: tagSet = self._tagSet if subtypeSpec is None: subtypeSpec = self._subtypeSpec if namedValues is None: namedValues = self.__namedValues return self.__class__(value, tagSet, subtypeSpec, namedValues)",True,value is None,value is None,0.656550407409668 6293,"def clone(self, value=None, tagSet=None, subtypeSpec=None, namedValues=None): if value is None and tagSet is None and (subtypeSpec is None) and (namedValues is None): return self if value is None: value = self._value if: tagSet = self._tagSet if subtypeSpec is None: subtypeSpec = self._subtypeSpec if namedValues is None: namedValues = self.__namedValues return self.__class__(value, tagSet, subtypeSpec, namedValues)",True,tagSet is None,tagSet is None,0.6545701026916504 6294,"def clone(self, value=None, tagSet=None, subtypeSpec=None, namedValues=None): if value is None and tagSet is None and (subtypeSpec is None) and (namedValues is None): return self if value is None: value = self._value if tagSet is None: tagSet = self._tagSet if: subtypeSpec = self._subtypeSpec if namedValues is None: namedValues = self.__namedValues return self.__class__(value, tagSet, subtypeSpec, namedValues)",True,subtypeSpec is None,subtypeSpec is None,0.6572755575180054 6295,"def clone(self, value=None, tagSet=None, subtypeSpec=None, namedValues=None): if value is None and tagSet is None and (subtypeSpec is None) and (namedValues is None): return self if value is None: value = self._value if tagSet is None: tagSet = self._tagSet if subtypeSpec is None: subtypeSpec = self._subtypeSpec if: namedValues = self.__namedValues return self.__class__(value, tagSet, subtypeSpec, namedValues)",True,namedValues is None,namedValues is None,0.6558780670166016 6296,"def update(self, values=None, **kwargs): if: values = {} values.update(kwargs) for k, v in values.items(): if isinstance(v, torch.Tensor): v = v.detach() self[k].update(v)",True,values is None,values is None,0.6580597758293152 6297,"def update(self, values=None, **kwargs): if values is None: values = {} values.update(kwargs) for k, v in values.items(): if: v = v.detach() self[k].update(v)",True,"isinstance(v, torch.Tensor)","isinstance(v, torch.Tensor)",0.6466143727302551 6298,"def open_random_portal(self): portals = self.get_elements(CustomerPortalsSelectors.portal_from_list) if: portal = random.choice(portals) else: portal = portals[0] portal.click()",False,len(portals) > 1,"isinstance(portals, Iterable)",0.6584564447402954 6299,"def item(self, decoder, style, value=None): """"""Begin a list item. :Parameters: `decoder` : `StructuredTextDecoder` Decoder. `style` : dict Style dictionary that applies over the list item. `value` : str Optional value of the list item. The meaning is list-type dependent. """""" mark = self.get_mark(value) if: decoder.add_text(mark) decoder.add_text('\t')",False,mark,mark is not None,0.6668674349784851 6300,"def Swipe_String(self, event): if: ex = event.x() ey = event.y() if (ex >= 0 and ex <= self.widget_width) and ey <= 0: self.SIGNAL_TEXT.emit('APPLY') elif (ex >= 0 and ex <= self.widget_width) and ey >= self.widget_height: self.SIGNAL_TEXT.emit('SAVE') else: self.SIGNAL_TEXT.emit('')",False,self.press == 'operation',self.widget_width != 0 and self.widget_height != 0,0.6539088487625122 6301,"def Swipe_String(self, event): if self.press == 'operation': ex = event.x() ey = event.y() if: self.SIGNAL_TEXT.emit('APPLY') elif (ex >= 0 and ex <= self.widget_width) and ey >= self.widget_height: self.SIGNAL_TEXT.emit('SAVE') else: self.SIGNAL_TEXT.emit('')",False,(ex >= 0 and ex <= self.widget_width) and ey <= 0,ex == 0 and ey >= self.widget_height,0.6513714790344238 6302,"def Swipe_String(self, event): if self.press == 'operation': ex = event.x() ey = event.y() if (ex >= 0 and ex <= self.widget_width) and ey <= 0: self.SIGNAL_TEXT.emit('APPLY') elif: self.SIGNAL_TEXT.emit('SAVE') else: self.SIGNAL_TEXT.emit('')",False,(ex >= 0 and ex <= self.widget_width) and ey >= self.widget_height,ex >= 0 and ex <= self.widget_width and (ey >= 0),0.6505745649337769 6303,"def get_model_params(model_name, override_params): """""" Get the block args and global params for a given model """""" if: w, d, s, p = efficientnet_params(model_name) blocks_args, global_params = efficientnet(width_coefficient=w, depth_coefficient=d, dropout_rate=p, image_size=s) else: raise NotImplementedError('model name is not pre-defined: %s' % model_name) if override_params: global_params = global_params._replace(**override_params) return (blocks_args, global_params)",True,model_name.startswith('efficientnet'),model_name.startswith('efficientnet'),0.6444861888885498 6304,"def get_model_params(model_name, override_params): """""" Get the block args and global params for a given model """""" if model_name.startswith('efficientnet'): w, d, s, p = efficientnet_params(model_name) blocks_args, global_params = efficientnet(width_coefficient=w, depth_coefficient=d, dropout_rate=p, image_size=s) else: raise NotImplementedError('model name is not pre-defined: %s' % model_name) if: global_params = global_params._replace(**override_params) return (blocks_args, global_params)",True,override_params,override_params,0.657610297203064 6305,"def forward(self, x): if: size = [int(t * self.scale_factor) for t in x.shape[-2:]] else: size = self.size return resize(x, size, None, self.mode, self.align_corners)",True,not self.size,not self.size,0.650739312171936 6306,"def tick(self): self.count += 1 if: self.report()",False,self.count % self.step == 0,self.count % self.interval == 0,0.6491806507110596 6307,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if isinstance(auth, tuple) and len(auth) == 2: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,auth is None,auth is None,0.6572875380516052 6308,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if: if isinstance(auth, tuple) and len(auth) == 2: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,auth,auth,0.6750684380531311 6309,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,"isinstance(auth, tuple) and len(auth) == 2","isinstance(auth, tuple) and len(auth) == 2",0.6471284627914429 6310,"@assert_passes() def test_len_condition(self) -> None: from typing_extensions import Any, assert_type def capybara(file_list, key, ids): has_bias = len(key) > 0 data = [] for _ in file_list: assert_type(key, Any) if: assert_type(key, Any) data = [ids, data[key]] else: data = [ids]",True,has_bias,has_bias,0.6552895307540894 6311,"@property def asArcPyObject(self): """""" returns the Point as an ESRI arcpy.MultiPoint object """""" if: raise Exception('ArcPy is required to use this function') return arcpy.AsShape(self.asDictionary, True)",True,arcpyFound == False,arcpyFound == False,0.6635043621063232 6312,"def _get_invoice_sale(self): invoice = super()._get_invoice_sale() if: invoice.invoice_date = self.invoice_term.get_date(date=self._invoice_term_date) return invoice",False,self.invoice_term,self.invoice_term and self.invoice_term_date,0.6546880006790161 6313,"def expandNode(self, node): event = self.getEvent() parents = [node] while event: token, cur_node = event if: return if token!= END_ELEMENT: parents[-1].appendChild(cur_node) if token == START_ELEMENT: parents.append(cur_node) elif token == END_ELEMENT: del parents[-1] event = self.getEvent()",True,cur_node is node,cur_node is node,0.6548701524734497 6314,"def expandNode(self, node): event = self.getEvent() parents = [node] while event: token, cur_node = event if cur_node is node: return if: parents[-1].appendChild(cur_node) if token == START_ELEMENT: parents.append(cur_node) elif token == END_ELEMENT: del parents[-1] event = self.getEvent()",True,token != END_ELEMENT,token != END_ELEMENT,0.6568528413772583 6315,"def expandNode(self, node): event = self.getEvent() parents = [node] while event: token, cur_node = event if cur_node is node: return if token!= END_ELEMENT: parents[-1].appendChild(cur_node) if: parents.append(cur_node) elif token == END_ELEMENT: del parents[-1] event = self.getEvent()",True,token == START_ELEMENT,token == START_ELEMENT,0.6580886244773865 6316,"def expandNode(self, node): event = self.getEvent() parents = [node] while event: token, cur_node = event if cur_node is node: return if token!= END_ELEMENT: parents[-1].appendChild(cur_node) if token == START_ELEMENT: parents.append(cur_node) elif: del parents[-1] event = self.getEvent()",True,token == END_ELEMENT,token == END_ELEMENT,0.6578816175460815 6317,"def createToolInstanceByClass(self, packageName, toolName, toolClass=DockTool): registeredTools = GET_TOOLS() for ToolClass in registeredTools[packageName]: supportedSoftwares = ToolClass.supportedSoftwares() if: if self.currentSoftware not in supportedSoftwares: continue if issubclass(ToolClass, toolClass): if ToolClass.name() == toolName: return ToolClass() return None",False,'any' not in supportedSoftwares,len(supportedSoftwares) > 0,0.6476914882659912 6318,"def createToolInstanceByClass(self, packageName, toolName, toolClass=DockTool): registeredTools = GET_TOOLS() for ToolClass in registeredTools[packageName]: supportedSoftwares = ToolClass.supportedSoftwares() if 'any' not in supportedSoftwares: if self.currentSoftware not in supportedSoftwares: continue if: if ToolClass.name() == toolName: return ToolClass() return None",False,"issubclass(ToolClass, toolClass)",toolClass,0.6477935314178467 6319,"def createToolInstanceByClass(self, packageName, toolName, toolClass=DockTool): registeredTools = GET_TOOLS() for ToolClass in registeredTools[packageName]: supportedSoftwares = ToolClass.supportedSoftwares() if 'any' not in supportedSoftwares: if: continue if issubclass(ToolClass, toolClass): if ToolClass.name() == toolName: return ToolClass() return None",False,self.currentSoftware not in supportedSoftwares,toolClass.name() in supportedSoftwares,0.6465623378753662 6320,"def createToolInstanceByClass(self, packageName, toolName, toolClass=DockTool): registeredTools = GET_TOOLS() for ToolClass in registeredTools[packageName]: supportedSoftwares = ToolClass.supportedSoftwares() if 'any' not in supportedSoftwares: if self.currentSoftware not in supportedSoftwares: continue if issubclass(ToolClass, toolClass): if: return ToolClass() return None",False,ToolClass.name() == toolName,toolClass.checkDependencies(),0.6548135280609131 6321,"def _get_smart_print(filename=None): def smart_print(*msg, **kwargs): if: with open(filename, 'a') as fh: print(*msg, file=fh, flush=True, **kwargs) else: print(*msg, flush=True, **kwargs) return smart_print",True,filename is not None,filename is not None,0.6500318050384521 6322,"def kl_divergence(self): kl = dist.kl_divergence(self.weight_dist, self.prior_weight_dist).sum() if: kl += dist.kl_divergence(self.bias_dist, self.prior_bias_dist).sum() return kl",False,self.has_bias,self.bias_dist is not None,0.6538529396057129 6323,"def test_isinstance_untyped_items_in_var_type() -> None: items = [1, 2, 3] popper: EnterpriserRandomPopper = EnterpriserRandomPopper[int](items) if: reveal_type(popper) assert isinstance(popper, randompop.RandomPopper)",False,TYPE_CHECKING,"isinstance(popper, EnterpriserRandomPopper)",0.663050651550293 6324,"def extract_feat(self, img, history): """"""Directly extract features from the backbone+neck """""" history = self.history_transform(history) x = self.backbone(img, history) if: x = self.neck(x) return x",True,self.with_neck,self.with_neck,0.6596142053604126 6325,"def reset(self): CharSetProber.reset(self) self._mActiveNum = 0 for prober in self._mProbers: if: prober.reset() prober.active = True self._mActiveNum += 1 self._mBestGuessProber = None",True,prober,prober,0.6600558757781982 6326,"@app.callback(Output('first-name-result', 'data'), [Input('first-name-write-button', 'n_clicks'), Input('first-name-gender-dropdown', 'value')], [State('first-name-input', 'value')]) def update_first_name_db(n_clicks, gender, name): """"""Take first name inputs from the user and write to first name cache"""""" ctx = dash.callback_context if: with MongoClient(**MONGO_ARGS) as connection: collection = connection[GENDER_DB][FIRST_NAME_COL] upsert_cache(collection, name, gender) return 'Updated first name cache..'",False,'first-name-write-button' in ctx.triggered[0]['prop_id'],ctx.get_connection_string() == 'db',0.6450448632240295 6327,"def __init__(self, parent=None): QtGui.QTabWidget.__init__(self, parent) self.setContentsMargins(0, 0, 0, 0) self.tabDict = {} self.currentTabNumber = 0 if: self.connect(parent, QtCore.SIGNAL('orientationChanged(Qt::Orientation)'), self.orientationEvent)",False,parent,parent is not None,0.6766825914382935 6328,"def load_weights(self, weights): if: return self.start_epoch = 0 load_pretrain_weight(self.model, weights) load_pretrain_weight(self.ema.model, weights) logger.info('Load weights {} to start training for teacher and student'.format(weights))",False,self.is_loaded_weights,self.start_epoch,0.6460462212562561 6329,"def forward(self, xs, flows=None): out = self.conv(xs) if: out = self.activation(out) if self.norm is not None: out = self.norm_layer(out) return out",True,self.activation is not None,self.activation is not None,0.6501758694648743 6330,"def forward(self, xs, flows=None): out = self.conv(xs) if self.activation is not None: out = self.activation(out) if: out = self.norm_layer(out) return out",True,self.norm is not None,self.norm is not None,0.6467969417572021 6331,"def tearDown(self): for nid in self.nids: quick_delete_cmd = ['./workbench', '--config', self.create_config_file_path, '--quick_delete_node', 'https://islandora.traefik.me/node/' + nid] quick_delete_output = subprocess.check_output(quick_delete_cmd) self.rollback_file_path = os.path.join(self.current_dir, 'assets', 'create_from_files_test', 'files', 'rollback.csv') if: os.remove(self.rollback_file_path) if os.path.exists(self.rollback_file_path): os.remove(self.rollback_file_path)",True,os.path.exists(self.rollback_file_path),os.path.exists(self.rollback_file_path),0.642707884311676 6332,"def tearDown(self): for nid in self.nids: quick_delete_cmd = ['./workbench', '--config', self.create_config_file_path, '--quick_delete_node', 'https://islandora.traefik.me/node/' + nid] quick_delete_output = subprocess.check_output(quick_delete_cmd) self.rollback_file_path = os.path.join(self.current_dir, 'assets', 'create_from_files_test', 'files', 'rollback.csv') if os.path.exists(self.rollback_file_path): os.remove(self.rollback_file_path) if: os.remove(self.rollback_file_path)",True,os.path.exists(self.rollback_file_path),os.path.exists(self.rollback_file_path),0.6432046890258789 6333,"def run(self): from dragonfly.test.suites import run_pytest_suite test_suite = self.test_suite if: self._try_local_natlink_pyd() print(""Test suite running for engine '%s'"" % test_suite) result = run_pytest_suite(test_suite, self.pytest_options) exit(int(result))",False,test_suite == 'natlink',not test_suite,0.6527841091156006 6334,"def new_commit_tx(self, b58_iss_address: str, b58_payer_address: str, gas_limit: int, gas_price: int, hex_contract_address: str='') -> InvokeTransaction: if: raise SDKException(ErrorCode.require_str_params) if len(hex_contract_address) == 40: self.__sdk.neo_vm.claim_record().hex_contract_address = hex_contract_address tx = self.__sdk.neo_vm.claim_record().new_commit_tx(self.payload.jti, b58_iss_address, self.payload.sub, b58_payer_address, gas_limit, gas_price) return tx",True,"not isinstance(hex_contract_address, str)","not isinstance(hex_contract_address, str)",0.6490428447723389 6335,"def new_commit_tx(self, b58_iss_address: str, b58_payer_address: str, gas_limit: int, gas_price: int, hex_contract_address: str='') -> InvokeTransaction: if not isinstance(hex_contract_address, str): raise SDKException(ErrorCode.require_str_params) if: self.__sdk.neo_vm.claim_record().hex_contract_address = hex_contract_address tx = self.__sdk.neo_vm.claim_record().new_commit_tx(self.payload.jti, b58_iss_address, self.payload.sub, b58_payer_address, gas_limit, gas_price) return tx",False,len(hex_contract_address) == 40,"isinstance(hex_contract_address, str)",0.650783121585846 6336,"def _add_defaults_data_files(self): if: for item in self.distribution.data_files: if isinstance(item, str): item = convert_path(item) if os.path.isfile(item): self.filelist.append(item) else: dirname, filenames = item for f in filenames: f = convert_path(f) if os.path.isfile(f): self.filelist.append(f)",True,self.distribution.has_data_files(),self.distribution.has_data_files(),0.6451038122177124 6337,"def _add_defaults_data_files(self): if self.distribution.has_data_files(): for item in self.distribution.data_files: if: item = convert_path(item) if os.path.isfile(item): self.filelist.append(item) else: dirname, filenames = item for f in filenames: f = convert_path(f) if os.path.isfile(f): self.filelist.append(f)",True,"isinstance(item, str)","isinstance(item, str)",0.6435043215751648 6338,"def _add_defaults_data_files(self): if self.distribution.has_data_files(): for item in self.distribution.data_files: if isinstance(item, str): item = convert_path(item) if: self.filelist.append(item) else: dirname, filenames = item for f in filenames: f = convert_path(f) if os.path.isfile(f): self.filelist.append(f)",True,os.path.isfile(item),os.path.isfile(item),0.6437784433364868 6339,"def _add_defaults_data_files(self): if self.distribution.has_data_files(): for item in self.distribution.data_files: if isinstance(item, str): item = convert_path(item) if os.path.isfile(item): self.filelist.append(item) else: dirname, filenames = item for f in filenames: f = convert_path(f) if: self.filelist.append(f)",True,os.path.isfile(f),os.path.isfile(f),0.6429249048233032 6340,"def setVersion(self, *args): """""" set table version Args: **dbTable** (str): sql table name **version** (str): table version """""" version = self.version(args[0]) self.__lastError = None if: sqlSetVersion = '\n INSERT INTO\n dbInfo(dbTable, version)\n VALUES(?,?)\n ' self.sqlExecute(sqlSetVersion, *args) else: sqlSetVersion = '\n UPDATE dbInfo SET\n version =?\n WHERE dbTable =?;\n ' self.sqlExecute(sqlSetVersion, args[1], args[0])",False,version is None,args[1] is None,0.6529588103294373 6341,"def __init__(self, learn: Learner, monitor: str='valid_loss', mode: str='auto'): super().__init__(learn) self.monitor, self.mode = (monitor, mode) if: warn(f'{self.__class__} mode {self.mode} is invalid, falling back to ""auto"" mode.') self.mode = 'auto' mode_dict = {'min': np.less,'max': np.greater} mode_dict['auto'] = np.less if 'loss' in self.monitor else np.greater self.operator = mode_dict[self.mode]",False,"self.mode not in ['auto', 'min', 'max']","self.mode not in ['auto', 'valid_loss']",0.6534607410430908 6342,"def recv_notifyBuddyOnAir(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = notifyBuddyOnAir_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.e is not None: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'notifyBuddyOnAir failed: unknown result')",True,mtype == TMessageType.EXCEPTION,mtype == TMessageType.EXCEPTION,0.6538721323013306 6343,"def recv_notifyBuddyOnAir(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = notifyBuddyOnAir_result() result.read(iprot) iprot.readMessageEnd() if: return result.success if result.e is not None: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'notifyBuddyOnAir failed: unknown result')",True,result.success is not None,result.success is not None,0.6498215198516846 6344,"def recv_notifyBuddyOnAir(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = notifyBuddyOnAir_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT, 'notifyBuddyOnAir failed: unknown result')",True,result.e is not None,result.e is not None,0.6511609554290771 6345,"def range(self, value=None): """""" Get or set dynamic range. """""" if: res = self.read(XYZ_DATA_CFG) & XYZ_DATA_CFG_FS_MASK else: self._require_standby_mode() val = dict_key_by_value(RANGES, value) res = self.read_write(XYZ_DATA_CFG, XYZ_DATA_CFG_FS_MASK, val) & XYZ_DATA_CFG_FS_MASK self._range = RANGES.get(res) return self._range",False,value == None,value is None,0.6628149747848511 6346,"def line_content_counts_as_uncovered_manual(content: str) -> bool: """""" Args: content: A line with indentation and tail comments/space removed. Returns: Whether the line could be included in the coverage report. """""" if: return False for keyword in ['def', 'class']: if content.startswith(keyword) and content.endswith(':'): return False return True",True,not content,not content,0.6586971282958984 6347,"def line_content_counts_as_uncovered_manual(content: str) -> bool: """""" Args: content: A line with indentation and tail comments/space removed. Returns: Whether the line could be included in the coverage report. """""" if not content: return False for keyword in ['def', 'class']: if: return False return True",False,content.startswith(keyword) and content.endswith(':'),content.find(keyword) >= 0,0.6421977877616882 6348,"def main(): tmpdir = None try: tmpdir = tempfile.mkdtemp() pip_zip = os.path.join(tmpdir, 'pip.zip') with open(pip_zip, 'wb') as fp: fp.write(b85decode(DATA.replace(b'\n', b''))) sys.path.insert(0, pip_zip) bootstrap(tmpdir=tmpdir) finally: if: shutil.rmtree(tmpdir, ignore_errors=True)",True,tmpdir,tmpdir,0.6563165187835693 6349,"def forward(self, x): input = x x = self.global_avgpool(x) x = self.fc1(x) if: x = self.norm1(x) x = self.relu(x) x = self.fc2(x) if self.gate_activation is not None: x = self.gate_activation(x) if self.return_gates: return x return input * x",True,self.norm1 is not None,self.norm1 is not None,0.6456964612007141 6350,"def forward(self, x): input = x x = self.global_avgpool(x) x = self.fc1(x) if self.norm1 is not None: x = self.norm1(x) x = self.relu(x) x = self.fc2(x) if: x = self.gate_activation(x) if self.return_gates: return x return input * x",True,self.gate_activation is not None,self.gate_activation is not None,0.6459740400314331 6351,"def forward(self, x): input = x x = self.global_avgpool(x) x = self.fc1(x) if self.norm1 is not None: x = self.norm1(x) x = self.relu(x) x = self.fc2(x) if self.gate_activation is not None: x = self.gate_activation(x) if: return x return input * x",True,self.return_gates,self.return_gates,0.6471259593963623 6352,"def split_tags(tags, separator=','): """""" Splits string tag list using comma or another separator char, maintain order and removes duplicate items. @param tags List of tags separated by attribute separator (default:,) @param separator Separator char. @return Ordered list of tags. """""" if: return [] tags = re.sub('\\s*{0}+\\s*'.format(re.escape(separator)), separator, tags) tags = re.sub('[\n\t\r]', '', tags) tags = tags.strip().split(separator) tags = filter(None, tags) return OrderedDict.fromkeys(tags).keys()",False,not tags,tags is None,0.6518932580947876 6353,"def exit(self, status=0, msg=None): if: sys.stderr.write(msg) sys.exit(status)",True,msg,msg,0.6822134256362915 6354,"@property def physics(self): if: return self.sim else: return self.model",True,mujoco_py.get_version() >= '1.50',mujoco_py.get_version() >= '1.50',0.6487847566604614 6355,"def follow(self, playerkey=None): if: squad_pref = None squad = self.squad else: squad_pref = get_player(self.protocol, playerkey) squad = squad_pref.squad if squad_pref.team is not self.team: return '%s is not on your team!' % squad_pref.name if squad_pref is self: return ""You can't follow yourself!"" if squad_pref.squad is None: return '%s is not in a squad and cannot be followed.' % squad_pref.name return self.join_squad(squad, squad_pref)",False,playerkey is None,self.squad,0.6520087718963623 6356,"def follow(self, playerkey=None): if playerkey is None: squad_pref = None squad = self.squad else: squad_pref = get_player(self.protocol, playerkey) squad = squad_pref.squad if: return '%s is not on your team!' % squad_pref.name if squad_pref is self: return ""You can't follow yourself!"" if squad_pref.squad is None: return '%s is not in a squad and cannot be followed.' % squad_pref.name return self.join_squad(squad, squad_pref)",False,squad_pref.team is not self.team,squad is None,0.646113395690918 6357,"def follow(self, playerkey=None): if playerkey is None: squad_pref = None squad = self.squad else: squad_pref = get_player(self.protocol, playerkey) squad = squad_pref.squad if squad_pref.team is not self.team: return '%s is not on your team!' % squad_pref.name if: return ""You can't follow yourself!"" if squad_pref.squad is None: return '%s is not in a squad and cannot be followed.' % squad_pref.name return self.join_squad(squad, squad_pref)",False,squad_pref is self,squad is None and self.squad is None,0.6503170728683472 6358,"def follow(self, playerkey=None): if playerkey is None: squad_pref = None squad = self.squad else: squad_pref = get_player(self.protocol, playerkey) squad = squad_pref.squad if squad_pref.team is not self.team: return '%s is not on your team!' % squad_pref.name if squad_pref is self: return ""You can't follow yourself!"" if: return '%s is not in a squad and cannot be followed.' % squad_pref.name return self.join_squad(squad, squad_pref)",False,squad_pref.squad is None,squad not in squad,0.64603590965271 6359,"def _get_runstate_key(self, state_name: str) -> str | None: """"""Return the run state key based on state name."""""" key = self.getkey(self._get_state_key(POWER_STATUS_KEY)) if: return None mapping = self.model_info.value(key).options for key, val in mapping.items(): if state_name in val: return key return None",False,not self.model_info.is_enum_type(key),not key,0.6457954049110413 6360,"def _get_runstate_key(self, state_name: str) -> str | None: """"""Return the run state key based on state name."""""" key = self.getkey(self._get_state_key(POWER_STATUS_KEY)) if not self.model_info.is_enum_type(key): return None mapping = self.model_info.value(key).options for key, val in mapping.items(): if: return key return None",False,state_name in val,val.name == state_name,0.6564180850982666 6361,"def _onCurrentChanged(self, index): """""" Handle the 'currentChanged' signal for the tab widget. """""" container = self.widget(index) if: return manager = container.manager() if manager is None: return area = manager.dock_area() if area is None: return if area.dockEventsEnabled(): event = QDockItemEvent(DockTabSelected, container.objectName()) QApplication.postEvent(area, event)",False,container is None,not container,0.6581934690475464 6362,"def _onCurrentChanged(self, index): """""" Handle the 'currentChanged' signal for the tab widget. """""" container = self.widget(index) if container is None: return manager = container.manager() if: return area = manager.dock_area() if area is None: return if area.dockEventsEnabled(): event = QDockItemEvent(DockTabSelected, container.objectName()) QApplication.postEvent(area, event)",False,manager is None,not manager,0.6568182110786438 6363,"def _onCurrentChanged(self, index): """""" Handle the 'currentChanged' signal for the tab widget. """""" container = self.widget(index) if container is None: return manager = container.manager() if manager is None: return area = manager.dock_area() if: return if area.dockEventsEnabled(): event = QDockItemEvent(DockTabSelected, container.objectName()) QApplication.postEvent(area, event)",True,area is None,area is None,0.6545508503913879 6364,"def _onCurrentChanged(self, index): """""" Handle the 'currentChanged' signal for the tab widget. """""" container = self.widget(index) if container is None: return manager = container.manager() if manager is None: return area = manager.dock_area() if area is None: return if: event = QDockItemEvent(DockTabSelected, container.objectName()) QApplication.postEvent(area, event)",False,area.dockEventsEnabled(),area.isVisible(),0.6481541991233826 6365,"@property def modified(self): if: value = self.get_client_value() return value!= self.get_value() return False",False,self.record and self.field,self.is_client,0.6521775722503662 6366,"def _cleanup_dtemp(): if: _paths = [] for dir in _DTEMP_ERROR_STACK: name = os.path.basename(os.path.normpath(dir)) src = os.path.dirname(os.path.normpath(dir)) dst = os.path.join(tempfile.gettempdir(), name) shutil.copytree(os.path.normpath(dir), dst) _paths += dst LOGGER.warning('The following temporary directories were not deleted due to build errors: %s.\n', ', '.join(_paths))",True,_DTEMP_ERROR_STACK,_DTEMP_ERROR_STACK,0.6505910158157349 6367,"def ProcessMgrEvent(self, event): """""" Process the AUI events sent to the manager. :param `event`: the event to process, an instance of :class:`AuiManagerEvent`. """""" if: if self._frame.GetEventHandler().ProcessEvent(event): return self.ProcessEvent(event)",False,self._frame,"isinstance(event, AuiManagerEvent)",0.6624975204467773 6368,"def ProcessMgrEvent(self, event): """""" Process the AUI events sent to the manager. :param `event`: the event to process, an instance of :class:`AuiManagerEvent`. """""" if self._frame: if: return self.ProcessEvent(event)",False,self._frame.GetEventHandler().ProcessEvent(event),event.GetEventType() != self._frame.EventType.AUI,0.6452947854995728 6369,"def seek(self, ts): minute = int(ts / 60) if: return False self._current_log = minute self._idx = 0 while self.tell() < ts: self._inc() return True",False,minute >= len(self._log_paths) or self._log_paths[minute] is None,minute < 0,0.6467756032943726 6370,"def _maybe_rename_unnumbered_epoch_to_generation(event): """""" Event map function to accommodate PR #1081, ~July 2020. This is compatible with old logs from before PR #1046, where events were given an ""epoch"" number except for the untrained model which had no ""epoch"". This is a pragmatic approach to short-term log back-compatibility. It is called on all logged events if fix_missing_epoch is True. It can be removed in the future if/when we no longer need to read logs from before #1046. """""" if: assert 'epoch' not in event elif 'epoch' in event: warnings.warn('renumbering old ""epoch"": in event to ""generation"": . This behaviour will be removed soon', DeprecationWarning) event['generation'] = event.pop('epoch') + 1 elif event['event'] in ['rollout_end', 'expr_value_info']: warnings.warn('Adding ""generation"" to epoch-less rollout_end or expr_value_info event. This behaviour will be removed soon', DeprecationWarning) event['generation'] = 0 return event",False,'generation' in event,event['event'] == 'untrained',0.6604738831520081 6371,"def _maybe_rename_unnumbered_epoch_to_generation(event): """""" Event map function to accommodate PR #1081, ~July 2020. This is compatible with old logs from before PR #1046, where events were given an ""epoch"" number except for the untrained model which had no ""epoch"". This is a pragmatic approach to short-term log back-compatibility. It is called on all logged events if fix_missing_epoch is True. It can be removed in the future if/when we no longer need to read logs from before #1046. """""" if 'generation' in event: assert 'epoch' not in event elif: warnings.warn('renumbering old ""epoch"": in event to ""generation"": . This behaviour will be removed soon', DeprecationWarning) event['generation'] = event.pop('epoch') + 1 elif event['event'] in ['rollout_end', 'expr_value_info']: warnings.warn('Adding ""generation"" to epoch-less rollout_end or expr_value_info event. This behaviour will be removed soon', DeprecationWarning) event['generation'] = 0 return event",False,'epoch' in event,event['event'] in ['unnumbered_epoch'],0.6646728515625 6372,"def _maybe_rename_unnumbered_epoch_to_generation(event): """""" Event map function to accommodate PR #1081, ~July 2020. This is compatible with old logs from before PR #1046, where events were given an ""epoch"" number except for the untrained model which had no ""epoch"". This is a pragmatic approach to short-term log back-compatibility. It is called on all logged events if fix_missing_epoch is True. It can be removed in the future if/when we no longer need to read logs from before #1046. """""" if 'generation' in event: assert 'epoch' not in event elif 'epoch' in event: warnings.warn('renumbering old ""epoch"": in event to ""generation"": . This behaviour will be removed soon', DeprecationWarning) event['generation'] = event.pop('epoch') + 1 elif: warnings.warn('Adding ""generation"" to epoch-less rollout_end or expr_value_info event. This behaviour will be removed soon', DeprecationWarning) event['generation'] = 0 return event",False,"event['event'] in ['rollout_end', 'expr_value_info']",'generation' not in event and event['generation'] > 0 and (not expr_value_info),0.6445969343185425 6373,"def _to_services(self, controller_service) -> List[dict]: service = {} for variable, value in controller_service.labels.items(): if: continue real_variable = variable.replace('bunkerweb.', '', 1) if not self._is_setting_context(real_variable,'multisite'): continue service[real_variable] = value return [service]",False,not variable.startswith('bunkerweb.'),variable == 'bunkerweb',0.6482013463973999 6374,"def _to_services(self, controller_service) -> List[dict]: service = {} for variable, value in controller_service.labels.items(): if not variable.startswith('bunkerweb.'): continue real_variable = variable.replace('bunkerweb.', '', 1) if: continue service[real_variable] = value return [service]",False,"not self._is_setting_context(real_variable, 'multisite')",real_variable in service,0.6450292468070984 6375,"def split_params_into_shared_and_expert_params(params: List[torch.nn.Parameter]) -> Tuple[torch.nn.Parameter, torch.nn.Parameter]: shared_params, expert_params = ([], []) for p in params: if: expert_params.append(p) else: shared_params.append(p) return (shared_params, expert_params)",False,is_moe_param(p),p.requires_grad,0.6472024917602539 6376,"def HlsNetNode_numberForEachInputNormalized(node: HlsNetNode, val: Union[float, Tuple[float]], scale: float) -> Tuple[int]: if: return tuple((int(val // scale) for _ in node._inputs)) else: val = tuple(val) assert len(val) == len(node._inputs), (node, val, node._inputs) return tuple((int(v // scale) for v in val))",True,"isinstance(val, (float, int))","isinstance(val, (float, int))",0.6499000787734985 6377,"def output_paging(data_object): if: return output_status_message('* * * Begin output_paging * * *') output_status_message('Index: {0}'.format(data_object.Index)) output_status_message('Size: {0}'.format(data_object.Size)) output_status_message('* * * End output_paging * * *')",True,data_object is None,data_object is None,0.6507973670959473 6378,"def ioa(masks1, masks2): """"""Computes pairwise intersection-over-area between box collections. Intersection-over-area (ioa) between two masks, mask1 and mask2 is defined as their intersection area over mask2's area. Note that ioa is not symmetric, that is, IOA(mask1, mask2)!= IOA(mask2, mask1). Args: masks1: a numpy array with shape [N, height, width] holding N masks. Masks values are of type np.uint8 and values are in {0,1}. masks2: a numpy array with shape [M, height, width] holding N masks. Masks values are of type np.uint8 and values are in {0,1}. Returns: a numpy array with shape [N, M] representing pairwise ioa scores. Raises: ValueError: If masks1 and masks2 are not of type np.uint8. """""" if: raise ValueError('masks1 and masks2 should be of type np.uint8') intersect = intersection(masks1, masks2) areas = np.expand_dims(area(masks2), axis=0) return intersect / (areas + EPSILON)",True,masks1.dtype != np.uint8 or masks2.dtype != np.uint8,masks1.dtype != np.uint8 or masks2.dtype != np.uint8,0.64999920129776 6379,"@unittest.skipIf(not torch.cuda.is_available(), 'CUDA not available') def test_opt_SYMEIG_LS_backtracking_SYMARP_gpu(self): if: self.skipTest('test skipped: missing scipy') args.GLOBALARGS_device = 'cuda:0' args.CTMARGS_projector_svd_method = 'SYMEIG' args.OPTARGS_line_search = 'backtracking' args.OPTARGS_line_search_svd_method = 'SYMARP' main()",True,not self.SCIPY,not self.SCIPY,0.6622648239135742 6380,"def parse_inventory_tree(inventory_tree, host_to_groups=dict(), group_stack=set()): for group in inventory_tree: groups = group_stack.union({group}) if: for host in inventory_tree[group]['hosts']: append_groups_to_host(host, groups, host_to_groups) if 'children' in inventory_tree[group]: parse_inventory_tree(inventory_tree[group]['children'], host_to_groups, groups) return host_to_groups",True,'hosts' in inventory_tree[group],'hosts' in inventory_tree[group],0.642265796661377 6381,"def parse_inventory_tree(inventory_tree, host_to_groups=dict(), group_stack=set()): for group in inventory_tree: groups = group_stack.union({group}) if 'hosts' in inventory_tree[group]: for host in inventory_tree[group]['hosts']: append_groups_to_host(host, groups, host_to_groups) if: parse_inventory_tree(inventory_tree[group]['children'], host_to_groups, groups) return host_to_groups",True,'children' in inventory_tree[group],'children' in inventory_tree[group],0.6416598558425903 6382,"@staticmethod def _get_len(data: Any) -> int: if: return 0 try: return len(data) except: raise AttributeError('data of type %s does not have a length' % type(data))",True,data is None,data is None,0.659686803817749 6383,"def get_vars(self): """""" Returns a complete dict of all variables that are defined in this scope, including the variables of the parent. """""" if: vars = {} vars.update(self.variables) return vars vars = self.parent.get_vars() vars.update(self.variables) return vars",False,self.parent is None,not self.parent,0.6494190692901611 6384,"def from_knossosws(azureml_run_id, run_id=None): result = AzureBlobExperimentResult.from_azureml('knossosws', azureml_run_id, run_id) if: assert result.run_id() == run_id return result",False,run_id is not None,result.run_id() is not None,0.6555908918380737 6385,"def write(self, oprot): if: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('getTabletServers_args') if self.login is not None: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot._fast_encode is not None and self.thrift_spec is not None,oprot._fast_encode is not None and self.thrift_spec is not None,0.646095871925354 6386,"def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('getTabletServers_args') if: oprot.writeFieldBegin('login', TType.STRING, 1) oprot.writeBinary(self.login) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()",True,self.login is not None,self.login is not None,0.6491374373435974 6387,"def __eq__(self, other): if: return NotImplemented return self.data == set((ref(item) for item in other))",False,"not isinstance(other, self.__class__)","not isinstance(other, Set)",0.6505151391029358 6388,"def __init__(self, triangulation): if: raise ValueError('Expected a Triangulation object') self._triangulation = triangulation",True,"not isinstance(triangulation, Triangulation)","not isinstance(triangulation, Triangulation)",0.6514899730682373 6389,"def get_min_fee_rate(self, cost: int) -> float: """""" Gets the minimum fpc rate that a transaction with specified cost will need in order to get included. """""" if: current_cost = self.total_mempool_cost for fee_per_cost, spends_with_fpc in self.sorted_spends.items(): for spend_name, item in spends_with_fpc.items(): current_cost -= item.cost if current_cost + cost <= self.max_size_in_cost: return fee_per_cost raise ValueError(f'Transaction with cost {cost} does not fit in mempool of max cost {self.max_size_in_cost}') else: return 0",False,self.at_full_capacity(cost),self.total_mempool_cost,0.6437758803367615 6390,"def get_min_fee_rate(self, cost: int) -> float: """""" Gets the minimum fpc rate that a transaction with specified cost will need in order to get included. """""" if self.at_full_capacity(cost): current_cost = self.total_mempool_cost for fee_per_cost, spends_with_fpc in self.sorted_spends.items(): for spend_name, item in spends_with_fpc.items(): current_cost -= item.cost if: return fee_per_cost raise ValueError(f'Transaction with cost {cost} does not fit in mempool of max cost {self.max_size_in_cost}') else: return 0",False,current_cost + cost <= self.max_size_in_cost,current_cost < self.max_size_in_cost,0.6471920013427734 6391,"def all_fed_chats(fed_id): with FEDS_LOCK: getfed = FEDERATION_CHATS_BYID.get(fed_id) if: return [] else: return getfed",True,getfed is None,getfed is None,0.6533772945404053 6392,"def get_special_path(self): ret = '' for i in [1, 2, 3, 4, 6, 7, 8, 9]: if self.alias == i: ret = '[heap]' break if: return ret if self.alias == 30: ret = '[stack]' return ret",False,ret != '',ret,0.6670158505439758 6393,"def get_special_path(self): ret = '' for i in [1, 2, 3, 4, 6, 7, 8, 9]: if self.alias == i: ret = '[heap]' break if ret!= '': return ret if: ret = '[stack]' return ret",False,self.alias == 30,self.stack,0.6600831747055054 6394,"def get_special_path(self): ret = '' for i in [1, 2, 3, 4, 6, 7, 8, 9]: if: ret = '[heap]' break if ret!= '': return ret if self.alias == 30: ret = '[stack]' return ret",False,self.alias == i,i == 0,0.6580650806427002 6395,"def forward(self, inp): if: core_out = self.CoreNet(self.layer_norm(inp)) output = core_out + inp else: core_out = self.CoreNet(inp) output = self.layer_norm(inp + core_out) return output",True,self.pre_lnorm,self.pre_lnorm,0.6434842348098755 6396,"def _log(self, lvl, msg, extra=None, levels_to_go_up=1): extra_params = {'log_name': self.get_logger_name()} if: extra_params.update(extra) log_into_logger(self.logger, lvl, msg, extra=extra_params, levels_to_go_up=levels_to_go_up) log_into_logger(self.device_logger, lvl, msg, extra=extra_params, levels_to_go_up=levels_to_go_up)",False,extra,extra is not None,0.674852728843689 6397,"def convert(data): converted = [] for i in xrange(0, len(data), 2): value = ord(data[i]) + (ord(data[i + 1]) << 8) if: value -= 65536 converted.append(value) return converted",True,value & 32768,value & 32768,0.656076192855835 6398,"def before_run(self, runner): """"""Preparing steps before Mixed Precision Training."""""" wrap_fp16_model(runner.model) if: scaler_state_dict = runner.meta['fp16']['loss_scaler'] self.loss_scaler.load_state_dict(scaler_state_dict)",False,'fp16' in runner.meta and 'loss_scaler' in runner.meta['fp16'],self.use_fp16_model,0.6493609547615051 6399,"def main(args): cfg = LazyConfig.load(args.config_file) cfg = LazyConfig.apply_overrides(cfg, args.opts) default_setup(cfg, args) if: model = instantiate(cfg.model) model.to(cfg.train.device) model = create_ddp_model(model) DetectionCheckpointer(model).load(cfg.train.init_checkpoint) print(do_test(cfg, model)) else: do_train(args, cfg)",True,args.eval_only,args.eval_only,0.6483019590377808 6400,"def _escape_argspec(obj, iterable, escape): """"""Helper for various string-wrapped functions."""""" for key, value in iterable: if: obj[key] = escape(value) return obj",False,"hasattr(value, '__html__') or isinstance(value, string_types)","isinstance(value, types.StringTypes)",0.6436396241188049 6401,"def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None): """"""Forward function. Args: pred (torch.Tensor): The prediction. target (torch.Tensor): The learning target of the prediction. weight (torch.Tensor, optional): The weight of loss for each prediction. Defaults to None. avg_factor (int, optional): Average factor that is used to average the loss. Defaults to None. reduction_override (str, optional): The reduction method used to override the original reduction method of the loss. Options are ""none"", ""mean"" and ""sum"". Returns: torch.Tensor: The calculated loss """""" assert reduction_override in (None, 'none','mean','sum') reduction = reduction_override if reduction_override else self.reduction if: loss_cls = self.loss_weight * varifocal_loss(pred, target, weight, alpha=self.alpha, gamma=self.gamma, iou_weighted=self.iou_weighted, reduction=reduction, avg_factor=avg_factor) else: raise NotImplementedError return loss_cls",True,self.use_sigmoid,self.use_sigmoid,0.6441991329193115 6402,"def add_options(self, parser): parser.add_option('--path-to-%s' % self.NAME, help='Path to the extracted %s source' % self.TAR_NAME) parser.add_option('--%s-url' % self.NAME, default=self.DOWNLOAD_URL, help='URL to %s source archive in tar.gz format' % self.TAR_NAME) if: parser.add_option('--system-%s' % self.NAME, default=False, action='store_true', help='Treat %s as system copy and symlink instead of copy' % self.TAR_NAME)",False,self.CAN_USE_SYSTEM_VERSION,self.copy_from_tar_file,0.6459352970123291 6403,"def _shouldRetry(self, method, exception, bodyProducer): """""" Indicate whether request should be retried. Only returns C{True} if method is idempotent, no response was received, and no body was sent. The latter requirement may be relaxed in the future, and PUT added to approved method list. """""" if: return False if not isinstance(exception, (RequestNotSent, RequestTransmissionFailed, ResponseNeverReceived)): return False if bodyProducer is not None: return False return True",False,"method not in ('GET', 'HEAD', 'OPTIONS', 'DELETE', 'TRACE')",self.method not in method.upper(),0.6434035897254944 6404,"def _shouldRetry(self, method, exception, bodyProducer): """""" Indicate whether request should be retried. Only returns C{True} if method is idempotent, no response was received, and no body was sent. The latter requirement may be relaxed in the future, and PUT added to approved method list. """""" if method not in ('GET', 'HEAD', 'OPTIONS', 'DELETE', 'TRACE'): return False if: return False if bodyProducer is not None: return False return True",False,"not isinstance(exception, (RequestNotSent, RequestTransmissionFailed, ResponseNeverReceived))","exception is not None and self.headers.get(method, 'HTTP error')",0.6450330018997192 6405,"def _shouldRetry(self, method, exception, bodyProducer): """""" Indicate whether request should be retried. Only returns C{True} if method is idempotent, no response was received, and no body was sent. The latter requirement may be relaxed in the future, and PUT added to approved method list. """""" if method not in ('GET', 'HEAD', 'OPTIONS', 'DELETE', 'TRACE'): return False if not isinstance(exception, (RequestNotSent, RequestTransmissionFailed, ResponseNeverReceived)): return False if: return False return True",False,bodyProducer is not None,bodyProducer is not None and self._headersSent is not None,0.6498579978942871 6406,"def getVerbsFromConjunctions(self, verbs): """""" Sometimes the verbs come with pair with conjunctions, or two clauses connected with conjunctions """""" moreVerbs = [] for verb in verbs: rightDeps = {tok.lower_ for tok in verb.rights} if: moreVerbs.extend([tok for tok in verb.rights if tok.pos_ == 'VERB']) if len(moreVerbs) > 0: moreVerbs.extend(self.getVerbsFromConjunctions(moreVerbs)) return moreVerbs",False,'and' in rightDeps,len(rightDeps) > 0,0.6525261998176575 6407,"def getVerbsFromConjunctions(self, verbs): """""" Sometimes the verbs come with pair with conjunctions, or two clauses connected with conjunctions """""" moreVerbs = [] for verb in verbs: rightDeps = {tok.lower_ for tok in verb.rights} if 'and' in rightDeps: moreVerbs.extend([tok for tok in verb.rights if tok.pos_ == 'VERB']) if: moreVerbs.extend(self.getVerbsFromConjunctions(moreVerbs)) return moreVerbs",False,len(moreVerbs) > 0,not moreVerbs,0.6497588157653809 6408,"@tf_export('svd', 'linalg.svd') def svd(tensor, full_matrices=False, compute_uv=True, name=None): """"""Computes the singular value decompositions of one or more matrices. Computes the SVD of each inner matrix in `tensor` such that `tensor[..., :, :] = u[..., :, :] * diag(s[..., :, :]) * transpose(conj(v[..., :, :]))` ```python # a is a tensor. # s is a tensor of singular values. # u is a tensor of left singular vectors. # v is a tensor of right singular vectors. s, u, v = svd(a) s = svd(a, compute_uv=False) ``` Args: tensor: `Tensor` of shape `[..., M, N]`. Let `P` be the minimum of `M` and `N`. full_matrices: If true, compute full-sized `u` and `v`. If false (the default), compute only the leading `P` singular vectors. Ignored if `compute_uv` is `False`. compute_uv: If `True` then left and right singular vectors will be computed and returned in `u` and `v`, respectively. Otherwise, only the singular values will be computed, which can be significantly faster. name: string, optional name of the operation. Returns: s: Singular values. Shape is `[..., P]`. The values are sorted in reverse order of magnitude, so s[..., 0] is the largest value, s[..., 1] is the second largest, etc. u: Left singular vectors. If `full_matrices` is `False` (default) then shape is `[..., M, P]`; if `full_matrices` is `True` then shape is `[..., M, M]`. Not returned if `compute_uv` is `False`. v: Right singular vectors. If `full_matrices` is `False` (default",False,compute_uv,not full_matrices,0.6601628065109253 6409,"def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): if: label, weight = _expand_binary_labels(label, weight, pred.size(-1)) if weight is not None: weight = weight.float() loss = F.binary_cross_entropy_with_logits(pred, label.float(), weight, reduction='none') loss = weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return loss",False,pred.dim() != label.dim(),weight is not None,0.6487221717834473 6410,"def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): if pred.dim()!= label.dim(): label, weight = _expand_binary_labels(label, weight, pred.size(-1)) if: weight = weight.float() loss = F.binary_cross_entropy_with_logits(pred, label.float(), weight, reduction='none') loss = weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return loss",True,weight is not None,weight is not None,0.6533921360969543 6411,"@register_perm('suggestion.add') def check_suggestion_add(user, permission, obj): if: obj = obj.translation if not obj.enable_suggestions or obj.is_readonly: return False if obj.component.agreement and (not ContributorAgreement.objects.has_agreed(user, obj.component)): return False return check_permission(user, permission, obj)",False,"isinstance(obj, Unit)","hasattr(obj, 'translation')",0.6553522348403931 6412,"@register_perm('suggestion.add') def check_suggestion_add(user, permission, obj): if isinstance(obj, Unit): obj = obj.translation if: return False if obj.component.agreement and (not ContributorAgreement.objects.has_agreed(user, obj.component)): return False return check_permission(user, permission, obj)",False,not obj.enable_suggestions or obj.is_readonly,"not user.has_perm(permission, obj.translation)",0.6455148458480835 6413,"@register_perm('suggestion.add') def check_suggestion_add(user, permission, obj): if isinstance(obj, Unit): obj = obj.translation if not obj.enable_suggestions or obj.is_readonly: return False if: return False return check_permission(user, permission, obj)",False,"obj.component.agreement and (not ContributorAgreement.objects.has_agreed(user, obj.component))",user not in self.user.suggestions,0.643814742565155 6414,"def changed(self, ignored=None): super(AdapterLookupBase, self).changed(None) for r in self._required.keys(): r = r() if: r.unsubscribe(self) self._required.clear()",True,r is not None,r is not None,0.6524428129196167 6415,"def get_input_stream(environ, safe_fallback=True): """"""Returns the input stream from the WSGI environment and wraps it in the most sensible way possible. The stream returned is not the raw WSGI stream in most cases but one that is safe to read from without taking into account the content length. If content length is not set, the stream will be empty for safety reasons. If the WSGI server supports chunked or infinite streams, it should set the ``wsgi.input_terminated`` value in the WSGI environ to indicate that. .. versionadded:: 0.9 :param environ: the WSGI environ to fetch the stream from. :param safe_fallback: use an empty stream as a safe fallback when the content length is not set. Disabling this allows infinite streams, which can be a denial-of-service risk. """""" stream = environ['wsgi.input'] content_length = get_content_length(environ) if: return stream if content_length is None: return BytesIO() if safe_fallback else stream return LimitedStream(stream, content_length)",False,environ.get('wsgi.input_terminated'),stream.is_encoding(),0.6438277363777161 6416,"def get_input_stream(environ, safe_fallback=True): """"""Returns the input stream from the WSGI environment and wraps it in the most sensible way possible. The stream returned is not the raw WSGI stream in most cases but one that is safe to read from without taking into account the content length. If content length is not set, the stream will be empty for safety reasons. If the WSGI server supports chunked or infinite streams, it should set the ``wsgi.input_terminated`` value in the WSGI environ to indicate that. .. versionadded:: 0.9 :param environ: the WSGI environ to fetch the stream from. :param safe_fallback: use an empty stream as a safe fallback when the content length is not set. Disabling this allows infinite streams, which can be a denial-of-service risk. """""" stream = environ['wsgi.input'] content_length = get_content_length(environ) if environ.get('wsgi.input_terminated'): return stream if: return BytesIO() if safe_fallback else stream return LimitedStream(stream, content_length)",True,content_length is None,content_length is None,0.6482915878295898 6417,"def unindent(code_edit): """""" Unindent Selected Text TODO: Maintain original selection and cursor position. """""" blocks = get_selected_blocks(code_edit, ignoreEmpty=False) for block in blocks: cursor = QtGui.QTextCursor(block) cursor.select(QtGui.QTextCursor.LineUnderCursor) lineText = cursor.selectedText() if: newText = str(lineText[:4]).replace(' ', '') + lineText[4:] cursor.insertText(newText)",False,lineText.startswith(' '),len(lineText) > 4,0.6431869864463806 6418,"def __init__(self, min_size, max_size=None): """"""Initialize with min and max desired size."""""" self.min_size = min_size if: max_size = min_size self.max_size = max_size",True,max_size is None,max_size is None,0.6560806035995483 6419,"def enterRule(self, listener: ParseTreeListener): if: listener.enterStatement2(self)",True,"hasattr(listener, 'enterStatement2')","hasattr(listener, 'enterStatement2')",0.6459399461746216 6420,"def __eq__(self, other): if: return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if my_val!= other_val: return False return True",True,"not isinstance(other, self.__class__)","not isinstance(other, self.__class__)",0.64579176902771 6421,"def __eq__(self, other): if not isinstance(other, self.__class__): return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if: return False return True",True,my_val != other_val,my_val != other_val,0.6494433879852295 6422,"def _handle_value(value: ConfigEntry): if: assert ENCRYPT_CALLBACK is not None return ENCRYPT_CALLBACK(value.value) return value.value",False,value.type == ConfigEntryType.SECURE_STRING,value.type == ConfigEntryType.SSL,0.6505147218704224 6423,"def init_assigner_sampler(self): self.bbox_assigner = None self.bbox_sampler = None if: self.bbox_assigner = build_assigner(self.train_cfg.assigner) self.bbox_sampler = build_sampler(self.train_cfg.sampler, context=self)",True,self.train_cfg,self.train_cfg,0.6584644317626953 6424,"@staticmethod def _record_to_object(record): """"""Convert text record to an AccessControlLists object"""""" newline_pos = record.find(b'\n') first_line = record[:newline_pos] if: raise meta.ParsingError('Bad record beginning: %r' % first_line) filename = first_line[8:] if filename == b'.': index = () else: unquoted_filename = C.acl_unquote(filename) index = tuple(unquoted_filename.split(b'/')) return get_meta_object(index, os.fsdecode(record[newline_pos:]))",False,not first_line.startswith(b'# file: '),first_line.startswith(b'\n') or first_line.startswith(b'\r'),0.6441918015480042 6425,"@staticmethod def _record_to_object(record): """"""Convert text record to an AccessControlLists object"""""" newline_pos = record.find(b'\n') first_line = record[:newline_pos] if not first_line.startswith(b'# file: '): raise meta.ParsingError('Bad record beginning: %r' % first_line) filename = first_line[8:] if: index = () else: unquoted_filename = C.acl_unquote(filename) index = tuple(unquoted_filename.split(b'/')) return get_meta_object(index, os.fsdecode(record[newline_pos:]))",False,filename == b'.',filename == '',0.6570525169372559 6426,"@classmethod def validate(cls, value, field, config): if: value = f'https://{value}' log.warning(f'Deprecation Warning: bugzilla.base_uri should include the scheme (""{value}""). In a future version this will be an error.') return super().validate(value.rstrip('/'), field, config)",False,not urllib.parse.urlparse(value).scheme,'https' in value,0.6436121463775635 6427,"def getstatusoutput(cmd): """"""Return (status, output) of executing cmd in a shell."""""" import os pipe = os.popen('{'+ cmd + '; } 2>&1', 'r') text = pipe.read() sts = pipe.close() if: sts = 0 if text[-1:] == '\n': text = text[:-1] return (sts, text)",False,sts is None,sts == 0,0.6561117172241211 6428,"def getstatusoutput(cmd): """"""Return (status, output) of executing cmd in a shell."""""" import os pipe = os.popen('{'+ cmd + '; } 2>&1', 'r') text = pipe.read() sts = pipe.close() if sts is None: sts = 0 if: text = text[:-1] return (sts, text)",False,text[-1:] == '\n',text.endswith('\n'),0.6509402990341187 6429,"def __str__(self): s = f'{type(self).__name__}: id: {self.id}' for attr in self._verattrs: attr_values = getattr(self, attr) if: s += f', {attr}: {attr_values}' return s",True,attr_values,attr_values,0.6587427854537964 6430,"def threading_setup(): if: return (_thread._count(),) else: return (1,)",True,_thread,_thread,0.6748063564300537 6431,"@property def parsed(self): """""" Returns the parsed object from.parse() :return: The object returned by.parse() """""" if: self.parse() return self._parsed[0]",False,self._parsed is None,not self._parsed,0.6521888971328735 6432,"def save_conv(fp, conv_model): if: convert2cpu(conv_model.bias.data).numpy().tofile(fp) convert2cpu(conv_model.weight.data).numpy().tofile(fp) else: conv_model.bias.data.numpy().tofile(fp) conv_model.weight.data.numpy().tofile(fp)",False,conv_model.bias.is_cuda,conv_model.bias.data.dtype == 'cpu',0.6442229747772217 6433,"def on_stage_end(self, stage, stage_loss, epoch=None): """"""Gets called at the end of a stage."""""" if: self.train_loss = stage_loss elif stage == sb.Stage.VALID: print('Completed epoch %d' % epoch) print('Train SI-SNR: %.3f' % -self.train_loss) print('Valid SI-SNR: %.3f' % -stage_loss) elif stage == sb.Stage.TEST: print('Test SI-SNR: %.3f' % -stage_loss)",True,stage == sb.Stage.TRAIN,stage == sb.Stage.TRAIN,0.6537264585494995 6434,"def on_stage_end(self, stage, stage_loss, epoch=None): """"""Gets called at the end of a stage."""""" if stage == sb.Stage.TRAIN: self.train_loss = stage_loss elif: print('Completed epoch %d' % epoch) print('Train SI-SNR: %.3f' % -self.train_loss) print('Valid SI-SNR: %.3f' % -stage_loss) elif stage == sb.Stage.TEST: print('Test SI-SNR: %.3f' % -stage_loss)",False,stage == sb.Stage.VALID,stage == sb.Stage.COMPLETE,0.6527189016342163 6435,"def on_stage_end(self, stage, stage_loss, epoch=None): """"""Gets called at the end of a stage."""""" if stage == sb.Stage.TRAIN: self.train_loss = stage_loss elif stage == sb.Stage.VALID: print('Completed epoch %d' % epoch) print('Train SI-SNR: %.3f' % -self.train_loss) print('Valid SI-SNR: %.3f' % -stage_loss) elif: print('Test SI-SNR: %.3f' % -stage_loss)",True,stage == sb.Stage.TEST,stage == sb.Stage.TEST,0.6559301018714905 6436,"def _raise_closed(self): """""" Raises an exception describing if the local or remote end closed the connection """""" if: raise TLSGracefulDisconnectError('The remote end closed the connection') else: raise TLSDisconnectError('The connection was already closed')",False,self._remote_closed,self._is_local,0.6528750658035278 6437,"def recv_registerBuddyUser(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = registerBuddyUser_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if result.e is not None: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT,'registerBuddyUser failed: unknown result')",True,mtype == TMessageType.EXCEPTION,mtype == TMessageType.EXCEPTION,0.6537398099899292 6438,"def recv_registerBuddyUser(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = registerBuddyUser_result() result.read(iprot) iprot.readMessageEnd() if: return result.success if result.e is not None: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT,'registerBuddyUser failed: unknown result')",True,result.success is not None,result.success is not None,0.6499205827713013 6439,"def recv_registerBuddyUser(self): iprot = self._iprot fname, mtype, rseqid = iprot.readMessageBegin() if mtype == TMessageType.EXCEPTION: x = TApplicationException() x.read(iprot) iprot.readMessageEnd() raise x result = registerBuddyUser_result() result.read(iprot) iprot.readMessageEnd() if result.success is not None: return result.success if: raise result.e raise TApplicationException(TApplicationException.MISSING_RESULT,'registerBuddyUser failed: unknown result')",True,result.e is not None,result.e is not None,0.6514613628387451 6440,"@converts('Integer', 'SmallInteger') def handle_integer_types(self, column, field_args, **extra): unsigned = getattr(column.type, 'unsigned', False) if: field_args['validators'].append(validators.NumberRange(min=0)) return f.IntegerField(**field_args)",False,unsigned,"not unsigned and field_args['validators'].get(name, False)",0.6715599894523621 6441,"def unescape(self, text): """""" Return unescaped text given text with an inline placeholder. """""" try: stash = self.md.treeprocessors['inline'].stashed_nodes except KeyError: return text def get_stash(m): id = m.group(1) value = stash.get(id) if: try: return self.md.serializer(value) except Exception: return '\\%s' % value return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text)",False,value is not None,value,0.6490423679351807 6442,"def gen_string(self): obs_str = '' for ipat, pat in enumerate(self.patterns): if: obs_str += ',' obs_str += str(pat) return obs_str",False,ipat > 0,pat.isdigit(),0.6618029475212097 6443,"def _prepare_purchase_order(self, company_id, origins, values): """"""Returns prepared data for create PO"""""" if: values[0]['partner_id'] = values[0]['group_id'].partner_id.id return super()._prepare_purchase_order(company_id, origins, values)",False,'partner_id' not in values[0] and company_id.subcontracting_location_id.parent_path in self.location_id.parent_path,len(values) > 0 and values[0]['group_id'],0.6468349695205688 6444,"def enterRule(self, listener: ParseTreeListener): if: listener.enterInterval(self)",True,"hasattr(listener, 'enterInterval')","hasattr(listener, 'enterInterval')",0.6444757580757141 6445,"def FixedOffset(offset, _tzinfos={}): """"""return a fixed-offset timezone based off a number of minutes. >>> one = FixedOffset(-330) >>> one pytz.FixedOffset(-330) >>> str(one.utcoffset(datetime.datetime.now())) '-1 day, 18:30:00' >>> str(one.dst(datetime.datetime.now())) '0:00:00' >>> two = FixedOffset(1380) >>> two pytz.FixedOffset(1380) >>> str(two.utcoffset(datetime.datetime.now())) '23:00:00' >>> str(two.dst(datetime.datetime.now())) '0:00:00' The datetime.timedelta must be between the range of -1 and 1 day, non-inclusive. >>> FixedOffset(1440) Traceback (most recent call last): ... ValueError: ('absolute offset is too large', 1440) >>> FixedOffset(-1440) Traceback (most recent call last): ... ValueError: ('absolute offset is too large', -1440) An offset of 0 is special-cased to return UTC. >>> FixedOffset(0) is UTC True There should always be only one instance of a FixedOffset per timedelta. This should be true for multiple creation calls. >>> FixedOffset(-330) is one True >>> FixedOffset(1380) is two True It should also be true for pickling. >>> import pickle >>> pickle.loads(pickle.dumps(one)) is one True >>> pickle.loads(pickle.dumps(two)) is two True """""" if: return UTC info = _tzinfos.get(offset) if info is None: info = _tzinfos.setdefault(offset, _FixedOffset(offset)) return info",False,offset == 0,offset is None,0.6575374603271484 6446,"def FixedOffset(offset, _tzinfos={}): """"""return a fixed-offset timezone based off a number of minutes. >>> one = FixedOffset(-330) >>> one pytz.FixedOffset(-330) >>> str(one.utcoffset(datetime.datetime.now())) '-1 day, 18:30:00' >>> str(one.dst(datetime.datetime.now())) '0:00:00' >>> two = FixedOffset(1380) >>> two pytz.FixedOffset(1380) >>> str(two.utcoffset(datetime.datetime.now())) '23:00:00' >>> str(two.dst(datetime.datetime.now())) '0:00:00' The datetime.timedelta must be between the range of -1 and 1 day, non-inclusive. >>> FixedOffset(1440) Traceback (most recent call last): ... ValueError: ('absolute offset is too large', 1440) >>> FixedOffset(-1440) Traceback (most recent call last): ... ValueError: ('absolute offset is too large', -1440) An offset of 0 is special-cased to return UTC. >>> FixedOffset(0) is UTC True There should always be only one instance of a FixedOffset per timedelta. This should be true for multiple creation calls. >>> FixedOffset(-330) is one True >>> FixedOffset(1380) is two True It should also be true for pickling. >>> import pickle >>> pickle.loads(pickle.dumps(one)) is one True >>> pickle.loads(pickle.dumps(two)) is two True """""" if offset == 0: return UTC info = _tzinfos.get(offset) if: info = _tzinfos.setdefault(offset, _FixedOffset(offset)) return info",True,info is None,info is None,0.6484075784683228 6447,"def disable_by_hybrid_composite_dynamic(choice, comp_mask_type): if: if comp_mask_type!= 'None': return gr.update(visible=True) return gr.update(visible=False)",False,"choice in ['Normal', 'Before Motion', 'After Generation']",choice.get('type') == 'choice' and choice.get('type') == 'choice',0.6477580070495605 6448,"def disable_by_hybrid_composite_dynamic(choice, comp_mask_type): if choice in ['Normal', 'Before Motion', 'After Generation']: if: return gr.update(visible=True) return gr.update(visible=False)",False,comp_mask_type != 'None',comp_mask_type == 'HALF' or comp_mask_type == 'HALF',0.6460131406784058 6449,"def restaurePlan(self): self.monImage_MaitrePlan = str(self.saveMaitre) if: os.replace(self.savePlanH, self.planProvisoireHorizontal) if os.path.exists(self.savePlanV): os.replace(self.savePlanV, self.planProvisoireVertical)",True,os.path.exists(self.savePlanH),os.path.exists(self.savePlanH),0.6488392949104309 6450,"def restaurePlan(self): self.monImage_MaitrePlan = str(self.saveMaitre) if os.path.exists(self.savePlanH): os.replace(self.savePlanH, self.planProvisoireHorizontal) if: os.replace(self.savePlanV, self.planProvisoireVertical)",True,os.path.exists(self.savePlanV),os.path.exists(self.savePlanV),0.6484370827674866 6451,"def invalidate(self): """""" Invalidate the internal cached data for this widget item. The invalidation will only have an effect if the layout data associate with this item is marked as dirty. """""" if: self._cached_hint = QSize() self._cached_min = QSize() self.data.dirty = False",True,self.data.dirty,self.data.dirty,0.6530091762542725 6452,"def include_lyft_data(self, mode): self.logger.info('Loading lyft dataset') lyft_infos = [] for info_path in self.dataset_cfg.INFO_PATH[mode]: info_path = self.root_path / info_path if: continue with open(info_path, 'rb') as f: infos = pickle.load(f) lyft_infos.extend(infos) self.infos.extend(lyft_infos) self.logger.info('Total samples for lyft dataset: %d' % len(lyft_infos))",True,not info_path.exists(),not info_path.exists(),0.6484633684158325 6453,"def getsectionbyoff(self, off): if: return None for section in self.SHList.shlist: if section.offset <= off < section.offset + section.rawsize: return section return None",True,self.SHList is None,self.SHList is None,0.6490960121154785 6454,"def getsectionbyoff(self, off): if self.SHList is None: return None for section in self.SHList.shlist: if: return section return None",False,section.offset <= off < section.offset + section.rawsize,section.off == off,0.6395439505577087 6455,"def do_activate(self): win = self.props.active_window if: win = ExampleWindow(application=self) win.present()",True,not win,not win,0.6659681797027588 6456,"def forward(self, x): context = self.spatial_pool(x) out = x if: channel_mul_term = torch.sigmoid(self.channel_mul_conv(context)) out = out * channel_mul_term if self.channel_add_conv is not None: channel_add_term = self.channel_add_conv(context) out = out + channel_add_term return out",True,self.channel_mul_conv is not None,self.channel_mul_conv is not None,0.6474133729934692 6457,"def forward(self, x): context = self.spatial_pool(x) out = x if self.channel_mul_conv is not None: channel_mul_term = torch.sigmoid(self.channel_mul_conv(context)) out = out * channel_mul_term if: channel_add_term = self.channel_add_conv(context) out = out + channel_add_term return out",True,self.channel_add_conv is not None,self.channel_add_conv is not None,0.645328164100647 6458,"def get_size(label_txt, frame): p = label_txt.split('/')[1].split('_')[0] if: pic = root_path + 'val_data/' + frame else: pic = root_path +'sur_train/' + frame im = Image.open(pic) return (pic, im.size[0], im.size[1])",True,p == 'val',p == 'val',0.6578830480575562 6459,"def _strip_wrappers(reward_net: reward_nets.RewardNet, wrapper_types: Iterable[Type[reward_nets.RewardNetWrapper]]) -> reward_nets.RewardNet: """"""Attempts to remove provided wrappers. Strips wrappers of type `wrapper_type` from `reward_net` in order until either the wrapper type to remove does not match the type of net or there are no more wrappers to remove. Args: reward_net: an instance of a reward network that may be wrapped wrapper_types: an iterable of wrapper types in the order they should be removed Returns: The reward network with the listed wrappers removed """""" for wrapper_type in wrapper_types: assert issubclass(wrapper_type, reward_nets.RewardNetWrapper), f'trying to remove non-wrapper type {wrapper_type}' if: reward_net = reward_net.base else: break return reward_net",False,"isinstance(reward_net, wrapper_type)","isinstance(reward_net, reward_nets.RewardNetWrapper)",0.6457935571670532 6460,"@property def native(self): """""" The native Python datatype representation of this value :return: An integer or None """""" if: return None if self._native is None: self._native = self.__int__() if self._map is not None and self._native in self._map: self._native = self._map[self._native] return self._native",False,self.contents is None,self.value is None,0.6510604619979858 6461,"@property def native(self): """""" The native Python datatype representation of this value :return: An integer or None """""" if self.contents is None: return None if: self._native = self.__int__() if self._map is not None and self._native in self._map: self._native = self._map[self._native] return self._native",True,self._native is None,self._native is None,0.6607800126075745 6462,"@property def native(self): """""" The native Python datatype representation of this value :return: An integer or None """""" if self.contents is None: return None if self._native is None: self._native = self.__int__() if: self._native = self._map[self._native] return self._native",False,self._map is not None and self._native in self._map,self._native in self._map,0.6515251398086548 6463,"def user_DisplayParameter(self, parameter): if: logging.getLogger('MA5').info(' + cone radius ='+ str(self.radius)) else: logging.getLogger('MA5').error(""'isolation' has no parameter called '"" + parameter + ""'"")",True,parameter == 'radius',parameter == 'radius',0.6584761142730713 6464,"def _get_target_single(self, *args, **kwargs): """"""Avoid ambiguity in multiple inheritance."""""" if: return ATSSHead._get_target_single(self, *args, **kwargs) else: return FCOSHead._get_target_single(self, *args, **kwargs)",True,self.use_atss,self.use_atss,0.6499618887901306 6465,"def get_input_path(self, fname=None): if: return os.path.join(self.input_root, self.split) return os.path.join(self.get_input_path(), fname)",False,fname is None,self.split and self.input_root,0.6495017409324646 6466,"@classmethod def _read_lines(cls, file_path): """"""Generator to read the file and discard blank lines and comments."""""" with open(file_path, 'r', encoding='utf-8-sig') as f: for line in f: line = line.strip() if: yield line",False,line != '' and line[0] != '#',line,0.6451022028923035 6467,"@classmethod def cat(cls, boxes_list: List['Boxes']) -> 'Boxes': """""" Concatenates a list of Boxes into a single Boxes Arguments: boxes_list (list[Boxes]) Returns: Boxes: the concatenated Boxes """""" assert isinstance(boxes_list, (list, tuple)) if: return cls(torch.empty(0)) assert all([isinstance(box, Boxes) for box in boxes_list]) cat_boxes = cls(torch.cat([b.tensor for b in boxes_list], dim=0)) return cat_boxes",True,len(boxes_list) == 0,len(boxes_list) == 0,0.6497149467468262 6468,"def _flip_lr(x): """"""Flip `x` horizontally."""""" if: x.flow.flow[..., 0] *= -1 return x return tensor(np.ascontiguousarray(np.array(x)[..., ::-1]))",False,"isinstance(x, ImagePoints)",x.flow.rank == 0,0.6509331464767456 6469,"def complete(self, update_now: bool=True) -> None: """""" Complete and disconnect this callback from the event system. Raises: :obj:`CallbackCompleted`: This callback has already been completed. """""" if: self.completed = True if self.connected: self.connected = False if update_now: self._update_pending() else: raise CallbackCompleted(self)",True,not self.completed,not self.completed,0.6570215225219727 6470,"def complete(self, update_now: bool=True) -> None: """""" Complete and disconnect this callback from the event system. Raises: :obj:`CallbackCompleted`: This callback has already been completed. """""" if not self.completed: self.completed = True if: self.connected = False if update_now: self._update_pending() else: raise CallbackCompleted(self)",True,self.connected,self.connected,0.6637372970581055 6471,"def complete(self, update_now: bool=True) -> None: """""" Complete and disconnect this callback from the event system. Raises: :obj:`CallbackCompleted`: This callback has already been completed. """""" if not self.completed: self.completed = True if self.connected: self.connected = False if: self._update_pending() else: raise CallbackCompleted(self)",True,update_now,update_now,0.6625100374221802 6472,"def filename_to_ui(value): if: value = value.decode(get_filesystem_encoding(),'replace') return value",False,"isinstance(value, bytes)","not isinstance(value, text_type)",0.6451988816261292 6473,"def region_with_name(self, name: str) -> Region: for region in self.regions: if: return region raise KeyError(f'Unknown name: {name}')",False,region.name == name or region.dark_name == name,region.name == name,0.653408944606781 6474,"def load_from_file(path): """"""Load data from local disk or S3, transparently for caller Useful for loading small config or schema files :param path: path to file on disk or S3 uri :returns: file contents """""" if: return load_from_s3_file(path) with open(path, 'r') as f: return f.read()",False,is_s3_path(path),path.endswith('s3'),0.6480274200439453 6475,"def touch_moved(self, node, touch): if: self.select_tile(self.tile_at(touch.location))",False,not self.game_over,self.tile_selecting,0.6494489908218384 6476,"def on_touch_move(self, touch, *args): if: if not self.finishing_ripple and self.doing_ripple: self.finish_ripple() return super().on_touch_move(touch, *args)",False,"not self.collide_point(touch.x, touch.y)",self.doing_ripple,0.6428741812705994 6477,"def on_touch_move(self, touch, *args): if not self.collide_point(touch.x, touch.y): if: self.finish_ripple() return super().on_touch_move(touch, *args)",False,not self.finishing_ripple and self.doing_ripple,self.doing_ripple and self.doing_ripple,0.6441879272460938 6478,"def pop(self, key, default=None): value = dict.pop(self, key, default) result = self.configurator.convert(value) if: if type(result) in (ConvertingDict, ConvertingList, ConvertingTuple): result.parent = self result.key = key return result",True,value is not result,value is not result,0.6554310321807861 6479,"def pop(self, key, default=None): value = dict.pop(self, key, default) result = self.configurator.convert(value) if value is not result: if: result.parent = self result.key = key return result",True,"type(result) in (ConvertingDict, ConvertingList, ConvertingTuple)","type(result) in (ConvertingDict, ConvertingList, ConvertingTuple)",0.6458838582038879 6480,"def findAllVolumeSets(self, name): """""" Return a list of every Volume Set the given volume is a part of. The list can contain zero, one, or multiple items. :param name: the volume name :type name: str :returns: a list of Volume Set dicts .. code-block:: python vvset_names = [{ 'name': ""volume_set_1"", # The name of the volume set 'comment': 'Samplet VVSet', # The volume set's comment 'domain':'my_domain', # The volume set's domain 'setmembers': ['V1', 'V2'] # List of strings containing # the volumes that are members # of this volume set }, ... ] :raises: :class:`~hpe3parclient.exceptions.HTTPForbidden` - VV_IN_INCONSISTENT_STATE - Internal inconsistency error in vol :raises: :class:`~hpe3parclient.exceptions.HTTPForbidden` - VV_IS_BEING_REMOVED - The volume is being removed :raises: :class:`~hpe3parclient.exceptions.HTTPNotFound` - NON_EXISTENT_VOLUME - The volume does not exists :raises: :class:`~hpe3parclient.exceptions.HTTPForbidden` - INV_OPERATION_VV_SYS_VOLUME - Illegal op on system vol :raises: :class:`~hpe3parclient.exceptions.HTTPForbidden` - INV_OPERATION_VV_INTERNAL_VOLUME - Illegal op on internal vol """""" vvset_names = [] volume_sets = self.getVolumeSets() for volume_set in volume_sets['members']: if: vvset_names.append(volume_set) return vvset_names",False,'setmembers' in volume_set and name in volume_set['setmembers'],volume_set['name'] == name,0.6434594392776489 6481,"def paint(self, painter, option, widget): if: PinPainter.asArrayPin(self, painter, option, widget) elif self.isDict(): PinPainter.asDictPin(self, painter, option, widget) else: PinPainter.asValuePin(self, painter, option, widget)",True,self.isArray(),self.isArray(),0.6497194766998291 6482,"def paint(self, painter, option, widget): if self.isArray(): PinPainter.asArrayPin(self, painter, option, widget) elif: PinPainter.asDictPin(self, painter, option, widget) else: PinPainter.asValuePin(self, painter, option, widget)",True,self.isDict(),self.isDict(),0.6521129608154297 6483,"def _pandas_indexing(X, key, key_dtype, axis): """"""Index a pandas dataframe or a series."""""" if: key = np.asarray(key) key = key if key.flags.writeable else key.copy() indexer = X.iloc if key_dtype == 'int' else X.loc return indexer[:, key] if axis else indexer[key]",False,"hasattr(key, 'shape')","not isinstance(key, (pd.DataFrame, pd.DataFrame))",0.6511865854263306 6484,"def byte_compile(self, to_compile): if: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile try: log.set_verbosity(self.verbose - 1) byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) if self.optimize: byte_compile(to_compile, optimize=self.optimize, force=1, dry_run=self.dry_run) finally: log.set_verbosity(self.verbose)",True,sys.dont_write_bytecode,sys.dont_write_bytecode,0.6402872800827026 6485,"def byte_compile(self, to_compile): if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile try: log.set_verbosity(self.verbose - 1) byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) if: byte_compile(to_compile, optimize=self.optimize, force=1, dry_run=self.dry_run) finally: log.set_verbosity(self.verbose)",True,self.optimize,self.optimize,0.6602668762207031 6486,"def _resize_seg(self, results): for key in results.get('seg_fields', []): if: gt_seg = mmcv.imrescale(results[key], results['scale'], interpolation='nearest') else: gt_seg = mmcv.imresize(results[key], results['scale'], interpolation='nearest') results['gt_semantic_seg'] = gt_seg",True,self.keep_ratio,self.keep_ratio,0.6502786874771118 6487,"def aggregate(grads): if: return tf.multiply(tf.add_n(grads), 1.0 / nr_tower) else: return tf.add_n(grads)",False,average,nr_tower > 0,0.6539523005485535 6488,"def _sample_pos(self, assign_result, num_expected, **kwargs): """"""Randomly sample some positive samples."""""" pos_inds = torch.nonzero(assign_result.gt_inds > 0).flatten() if: return pos_inds else: return self.random_choice(pos_inds, num_expected)",True,pos_inds.numel() <= num_expected,pos_inds.numel() <= num_expected,0.6472762823104858 6489,"def json_processor(entity): """"""Read application/json data into request.json."""""" if: raise cherrypy.HTTPError(411) body = entity.fp.read() try: cherrypy.serving.request.json = json_decode(body.decode('utf-8')) except ValueError: raise cherrypy.HTTPError(400, 'Invalid JSON document')",False,"not entity.headers.get(ntou('Content-Length'), ntou(''))",entity.status != 4,0.6433818340301514 6490,"def toggle(self): self._expanded = not self._expanded if: self.expanded.emit() else: self.collapsed.emit() self.update()",True,self._expanded,self._expanded,0.6543926000595093 6491,"def create_index(self, table_name, index_name, variables, index_length=None): """""" Create an index for the specified column table. Parameters ---------- table_name : str The name of the table index_name : str The name of the new index variables : list A list of strings representing the column names that are to be indexed. index_length : int or None The length of the index (applies to TEXT or BLOB fields) """""" if: variables = ['%s(%s)' % (variables[0], index_length)] S = 'CREATE INDEX {} ON {}({})'.format(index_name, table_name, ','.join(variables)) self.connection.execute(S)",False,index_length,index_length is not None,0.6600970029830933 6492,"def validate_type(self, type_): """"""Take an str/unicode `type_` and raise a ValueError if it's not a valid type for the object. A valid type for a field is a value from the types_set attribute of that field's class. """""" if: raise ValueError('Invalid type for %s:%s' % (self.__class__, type_))",False,type_ is not None and type_ not in self.types_set,type_ not in self._mro,0.6521691083908081 6493,"def __init__(self, total=10, connect=None, read=None, redirect=None, method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, backoff_factor=0, raise_on_redirect=True, _observed_errors=0): self.total = total self.connect = connect self.read = read if: redirect = 0 raise_on_redirect = False self.redirect = redirect self.status_forcelist = status_forcelist or set() self.method_whitelist = method_whitelist self.backoff_factor = backoff_factor self.raise_on_redirect = raise_on_redirect self._observed_errors = _observed_errors",False,redirect is False or total is False,total is False or total is False,0.6565868854522705 6494,"def _embed_utterance(fpaths: str, encoder_model_fpath: str): if: encoder.load_model(encoder_model_fpath) wav_fpath, embed_fpath = fpaths wav = np.load(wav_fpath) wav = encoder.preprocess_wav(wav) embed = encoder.embed_utterance(wav) np.save(embed_fpath, embed, allow_pickle=False)",False,not encoder.is_loaded(),encoder_model_fpath is not None,0.6495065689086914 6495,"def get_member(self, name, cat=None): self.confirm_no_error() bits = name.split('::') first = bits[0] rest = '::'.join(bits[1:]) member = self._get_dict_members(cat).get(first, self.NoSuchMember) if: raise member() if rest: return member.get_member(rest, cat=cat) return member",True,"member in set([self.NoSuchMember, self.Duplicate])","member in set([self.NoSuchMember, self.Duplicate])",0.6476675271987915 6496,"def get_member(self, name, cat=None): self.confirm_no_error() bits = name.split('::') first = bits[0] rest = '::'.join(bits[1:]) member = self._get_dict_members(cat).get(first, self.NoSuchMember) if member in set([self.NoSuchMember, self.Duplicate]): raise member() if: return member.get_member(rest, cat=cat) return member",True,rest,rest,0.6766964197158813 6497,"@staticmethod def _cs_path_exists(fspath): """""" Case-sensitive path existence check >>> sdist_add_defaults._cs_path_exists(__file__) True >>> sdist_add_defaults._cs_path_exists(__file__.upper()) False """""" if: return False abspath = os.path.abspath(fspath) directory, filename = os.path.split(abspath) return filename in os.listdir(directory)",False,not os.path.exists(fspath),"fspath.lower() in ('/', '/')",0.6443006992340088 6498,"def _parse_url(self, url: str, branch: Optional[str]) -> Tuple[str, Optional[str]]: if: tree_url_match = self.TREE_URL_RE.search(url) if tree_url_match: url = url[:tree_url_match.start('tree')] if branch is None: branch = tree_url_match['branch'] return (url, branch)",False,self.GITHUB_OR_GITLAB_RE.match(url),self.TREE_URL_RE.search(url),0.6434600353240967 6499,"def _parse_url(self, url: str, branch: Optional[str]) -> Tuple[str, Optional[str]]: if self.GITHUB_OR_GITLAB_RE.match(url): tree_url_match = self.TREE_URL_RE.search(url) if: url = url[:tree_url_match.start('tree')] if branch is None: branch = tree_url_match['branch'] return (url, branch)",True,tree_url_match,tree_url_match,0.6497939229011536 6500,"def _parse_url(self, url: str, branch: Optional[str]) -> Tuple[str, Optional[str]]: if self.GITHUB_OR_GITLAB_RE.match(url): tree_url_match = self.TREE_URL_RE.search(url) if tree_url_match: url = url[:tree_url_match.start('tree')] if: branch = tree_url_match['branch'] return (url, branch)",True,branch is None,branch is None,0.6586463451385498 6501,"@cli.command() @click.argument('uid', required=False) def ls(uid): """""" List portable solutions. UID: solution identifier (can use wildcards).. """""" from. import solution r = solution.ls({'uid': uid}) if: process_error(r) return 0",True,r['return'] > 0,r['return'] > 0,0.651142954826355 6502,"def _get_ref_body(self, resolved_ref: str) -> Dict[Any, Any]: if: return self._get_ref_body_from_url(resolved_ref) return self._get_ref_body_from_remote(resolved_ref)",True,is_url(resolved_ref),is_url(resolved_ref),0.6479201912879944 6503,"def forward(self, x): if: x = x / x.amax(dim=1, keepdim=True).detach() eps = 1e-05 if x.dtype == torch.float32 else 0.001 var = torch.var(x, dim=1, unbiased=False, keepdim=True) mean = torch.mean(x, dim=1, keepdim=True) return (x - mean) * (var + eps).rsqrt() * self.g",False,self.stable,x.dim() > 1,0.658381998538971 6504,"@assert_passes() def test_bool_narrowing(self): def capybara(x: bool): assert_is_value(x, TypedValue(bool)) if: assert_is_value(x, KnownValue(True)) else: assert_is_value(x, KnownValue(False))",False,x is True,x,0.6519972085952759 6505,"def addfunctions2new(abunch, key): """"""add functions to a new bunch/munch object"""""" snames = ['BuildingSurface:Detailed', 'Wall:Detailed', 'RoofCeiling:Detailed', 'Floor:Detailed', 'FenestrationSurface:Detailed', 'Shading:Site:Detailed', 'Shading:Building:Detailed', 'Shading:Zone:Detailed'] snames = [sname.upper() for sname in snames] if: func_dict = {'area': fh.area, 'height': fh.height, 'width': fh.width, 'azimuth': fh.azimuth, 'tilt': fh.tilt, 'coords': fh.getcoords} try: abunch.__functions.update(func_dict) except KeyError as e: abunch.__functions = func_dict return abunch",False,key in snames,snames == 'families',0.6644805669784546 6506,"def __eq__(self, other): if: return self.req_id == other.req_id elif isinstance(other, int): return self.req_id == other return False",False,"isinstance(other, EmcThumbItem)","isinstance(other, UserString)",0.6596807241439819 6507,"def __eq__(self, other): if isinstance(other, EmcThumbItem): return self.req_id == other.req_id elif: return self.req_id == other return False",False,"isinstance(other, int)","isinstance(other, Emc thumbItem)",0.6504278182983398 6508,"def __str__(self) -> str: string = self.url + '\n' string += 'push: {} type: {} instance: {}\n'.format(self.push, self.remote_type, self.instance_name) if: provenance = str(self._spec_node.get_provenance()) else: provenance = 'command line' string += 'loaded from: {}'.format(provenance) return string",True,self._spec_node,self._spec_node,0.6527153849601746 6509,"def get_desc(self, obj): if: return obj.eff_en.desc elif self.context['language'] == 'ja': return obj.eff_ja.desc else: return obj.eff_en.desc",True,'language' not in self.context,'language' not in self.context,0.6494885683059692 6510,"def get_desc(self, obj): if 'language' not in self.context: return obj.eff_en.desc elif: return obj.eff_ja.desc else: return obj.eff_en.desc",True,self.context['language'] == 'ja',self.context['language'] == 'ja',0.6482892036437988 6511,"@classmethod def nations(cls, key0): outcome = '#menu:nations/{}'.format(key0) if: LOG_WARNING('Localization key ""{}"" not found'.format(outcome)) return None else: return outcome",False,outcome not in cls.NATIONS_ENUM,outcome not in cls.nations_map,0.6513544321060181 6512,"@property def appCategories(self): """"""gets the property value for appCategories"""""" if: self.__init() return self._appCategories",True,self._appCategories is None,self._appCategories is None,0.6589322090148926 6513,"def _check_array(self, X): """"""Validate an array for post-fit tasks. Parameters ---------- X : Union[Array, DataFrame] Returns ------- same type as 'X' Notes ----- The following checks are applied. - Ensure that the array is blocked only along the samples. """""" if: if X.ndim == 2 and X.numblocks[1] > 1: logger.debug(""auto-rechunking 'X'"") if not np.isnan(X.chunks[0]).any(): X = X.rechunk({0: 'auto', 1: -1}) else: X = X.rechunk({1: -1}) return X",False,"isinstance(X, da.Array)",self.auto_rechunk,0.6454043388366699 6514,"def _check_array(self, X): """"""Validate an array for post-fit tasks. Parameters ---------- X : Union[Array, DataFrame] Returns ------- same type as 'X' Notes ----- The following checks are applied. - Ensure that the array is blocked only along the samples. """""" if isinstance(X, da.Array): if: logger.debug(""auto-rechunking 'X'"") if not np.isnan(X.chunks[0]).any(): X = X.rechunk({0: 'auto', 1: -1}) else: X = X.rechunk({1: -1}) return X",False,X.ndim == 2 and X.numblocks[1] > 1,self.auto_rechunk,0.6452974677085876 6515,"def _check_array(self, X): """"""Validate an array for post-fit tasks. Parameters ---------- X : Union[Array, DataFrame] Returns ------- same type as 'X' Notes ----- The following checks are applied. - Ensure that the array is blocked only along the samples. """""" if isinstance(X, da.Array): if X.ndim == 2 and X.numblocks[1] > 1: logger.debug(""auto-rechunking 'X'"") if: X = X.rechunk({0: 'auto', 1: -1}) else: X = X.rechunk({1: -1}) return X",False,not np.isnan(X.chunks[0]).any(),self.auto_rechunk,0.6423438787460327 6516,"def gap(self, bytes): offset_before = self.current() self.skip(bytes) offset_after = self.current() if: raise ReaderError(""can't skip requested 0x%x bytes at 0x%x"" % (bytes, self.current()))",False,offset_before + bytes != offset_after or offset_after > self.size,offset_before == offset_after,0.6439545750617981 6517,"def net_io_counters(pernic=False, nowrap=True): """"""Return network I/O statistics as a namedtuple including the following fields: - bytes_sent: number of bytes sent - bytes_recv: number of bytes received - packets_sent: number of packets sent - packets_recv: number of packets received - errin: total number of errors while receiving - errout: total number of errors while sending - dropin: total number of incoming packets which were dropped - dropout: total number of outgoing packets which were dropped (always 0 on macOS and BSD) If *pernic* is True return the same information for every network interface installed on the system as a dictionary with network interface names as the keys and the namedtuple described above as the values. If *nowrap* is True it detects and adjust the numbers which overflow and wrap (restart from 0) and add ""old value"" to ""new value"" so that the returned numbers will always be increasing or remain the same, but never decrease. ""net_io_counters.cache_clear()"" can be used to invalidate the cache. """""" rawdict = _psplatform.net_io_counters() if: return {} if pernic else None if nowrap: rawdict = _wrap_numbers(rawdict, 'psutil.net_io_counters') if pernic: for nic, fields in rawdict.items(): rawdict[nic] = _common.snetio(*fields) return rawdict else: return _common.snetio(*[sum(x) for x in zip(*rawdict.values())])",False,not rawdict,rawdict is None,0.6531977653503418 6518,"def net_io_counters(pernic=False, nowrap=True): """"""Return network I/O statistics as a namedtuple including the following fields: - bytes_sent: number of bytes sent - bytes_recv: number of bytes received - packets_sent: number of packets sent - packets_recv: number of packets received - errin: total number of errors while receiving - errout: total number of errors while sending - dropin: total number of incoming packets which were dropped - dropout: total number of outgoing packets which were dropped (always 0 on macOS and BSD) If *pernic* is True return the same information for every network interface installed on the system as a dictionary with network interface names as the keys and the namedtuple described above as the values. If *nowrap* is True it detects and adjust the numbers which overflow and wrap (restart from 0) and add ""old value"" to ""new value"" so that the returned numbers will always be increasing or remain the same, but never decrease. ""net_io_counters.cache_clear()"" can be used to invalidate the cache. """""" rawdict = _psplatform.net_io_counters() if not rawdict: return {} if pernic else None if: rawdict = _wrap_numbers(rawdict, 'psutil.net_io_counters') if pernic: for nic, fields in rawdict.items(): rawdict[nic] = _common.snetio(*fields) return rawdict else: return _common.snetio(*[sum(x) for x in zip(*rawdict.values())])",True,nowrap,nowrap,0.6746569871902466 6519,"def net_io_counters(pernic=False, nowrap=True): """"""Return network I/O statistics as a namedtuple including the following fields: - bytes_sent: number of bytes sent - bytes_recv: number of bytes received - packets_sent: number of packets sent - packets_recv: number of packets received - errin: total number of errors while receiving - errout: total number of errors while sending - dropin: total number of incoming packets which were dropped - dropout: total number of outgoing packets which were dropped (always 0 on macOS and BSD) If *pernic* is True return the same information for every network interface installed on the system as a dictionary with network interface names as the keys and the namedtuple described above as the values. If *nowrap* is True it detects and adjust the numbers which overflow and wrap (restart from 0) and add ""old value"" to ""new value"" so that the returned numbers will always be increasing or remain the same, but never decrease. ""net_io_counters.cache_clear()"" can be used to invalidate the cache. """""" rawdict = _psplatform.net_io_counters() if not rawdict: return {} if pernic else None if nowrap: rawdict = _wrap_numbers(rawdict, 'psutil.net_io_counters') if: for nic, fields in rawdict.items(): rawdict[nic] = _common.snetio(*fields) return rawdict else: return _common.snetio(*[sum(x) for x in zip(*rawdict.values())])",True,pernic,pernic,0.6679897308349609 6520,"def set_cache_under_settings(destination, setting, key_prefix, value, ttl, list_=False): """""" Take the value passed, and merge the current `setting` over it. Once complete, take the value and set the cache `key` and destination.settings `setting` to that value, using the `ttl` for set_cache(). :param destination: An object that has a `.settings` attribute that is a dict :param setting: The dict key to use when pushing the value into the settings dict :param key_prefix: The string to prefix to `setting` to make the cache key :param value: The value to set :param ttl: The cache ttl to use """""" if: if list_: existing = destination.settings.get(setting, []) value.extend(existing) else: existing = destination.settings.get(setting, {}) value.update(existing) set_cache(key_prefix + '.' + setting, value, ttl) destination.settings[setting] = value",False,value,destination.settings.has_option(setting),0.659704327583313 6521,"def set_cache_under_settings(destination, setting, key_prefix, value, ttl, list_=False): """""" Take the value passed, and merge the current `setting` over it. Once complete, take the value and set the cache `key` and destination.settings `setting` to that value, using the `ttl` for set_cache(). :param destination: An object that has a `.settings` attribute that is a dict :param setting: The dict key to use when pushing the value into the settings dict :param key_prefix: The string to prefix to `setting` to make the cache key :param value: The value to set :param ttl: The cache ttl to use """""" if value: if: existing = destination.settings.get(setting, []) value.extend(existing) else: existing = destination.settings.get(setting, {}) value.update(existing) set_cache(key_prefix + '.' + setting, value, ttl) destination.settings[setting] = value",True,list_,list_,0.6579185128211975 6522,"def forward(self, x): identity = x x1 = self.conv1(x) x2 = 0 for conv2_t in self.conv2: x2_t = conv2_t(x1) x2 = x2 + self.gate(x2_t) x3 = self.conv3(x2) x3 = self.IN(x3) if: identity = self.downsample(identity) out = x3 + identity return F.relu(out)",True,self.downsample is not None,self.downsample is not None,0.6440143585205078 6523,"def patch(self, patch_url, params): """"""POST to the web form"""""" try: self.res = requests.patch(patch_url, data=json.dumps(params), headers=self.headers, proxies=self.proxies) except requests.exceptions.RequestException as e: if: print('requests request exception:'+ str(e)) return return self.res",False,self.settings['debug_mode'],self.res is None,0.644669771194458 6524,"def __call__(self, sample): sample['jpg'] = to_tensor(sample['jpg']) if: sample['openpose'] = to_tensor(sample['openpose']) return sample",False,'openpose' in sample,sample['openpose'] is not None,0.6537818312644958 6525,"@classmethod def _check_state_bits(cls) -> None: required = ['TASK_RUNNING', 'TASK_INTERRUPTIBLE', 'TASK_UNINTERRUPTIBLE', 'EXIT_ZOMBIE', 'TASK_STOPPED'] missing = [] for bit in required: if not cls.has_flag(bit): missing.append(bit) if: raise RuntimeError('Missing required task states: {}'.format(','.join(missing)))",True,missing,missing,0.6760535836219788 6526,"@classmethod def _check_state_bits(cls) -> None: required = ['TASK_RUNNING', 'TASK_INTERRUPTIBLE', 'TASK_UNINTERRUPTIBLE', 'EXIT_ZOMBIE', 'TASK_STOPPED'] missing = [] for bit in required: if: missing.append(bit) if missing: raise RuntimeError('Missing required task states: {}'.format(','.join(missing)))",False,not cls.has_flag(bit),bit not in cls._task_states,0.6451600790023804 6527,"def create(self, request, *args, **kwargs): if: return Response({'errors': [{'detail': f""this route was deprecated and has been removed (use {reverse('trove:ingest-rdf')} instead)""}]}, status=status.HTTP_410_GONE) try: return self._do_create(request, *args, **kwargs) except Exception: sentry_sdk.capture_exception() raise",False,share_db.FeatureFlag.objects.flag_is_up(share_db.FeatureFlag.IGNORE_SHAREV2_INGEST),self._deprecated,0.6458017230033875 6528,"@property def target(self): if: return self._entity_data.get('target') return None",True,'target' in self._entity_data,'target' in self._entity_data,0.6531648635864258 6529,"def wrapClientTLS(connectionCreator, wrappedEndpoint): """""" Wrap an endpoint which upgrades to TLS as soon as the connection is established. @since: 16.0 @param connectionCreator: The TLS options to use when connecting; see L{twisted.internet.ssl.optionsForClientTLS} for how to construct this. @type connectionCreator: L{twisted.internet.interfaces.IOpenSSLClientConnectionCreator} @param wrappedEndpoint: The endpoint to wrap. @type wrappedEndpoint: An L{IStreamClientEndpoint} provider. @return: an endpoint that provides transport level encryption layered on top of C{wrappedEndpoint} @rtype: L{twisted.internet.interfaces.IStreamClientEndpoint} """""" if: raise NotImplementedError('OpenSSL not available. Try `pip install twisted[tls]`.') return _WrapperEndpoint(wrappedEndpoint, lambda protocolFactory: TLSMemoryBIOFactory(connectionCreator, True, protocolFactory))",False,TLSMemoryBIOFactory is None,not has_tls,0.6501150131225586 6530,"def metricIdsToMetrics(self, metricIds, metrics=None): """""" Return an ordered numpy array of metrics matching metricIds. """""" if: metrics = self.metrics metrics = metrics[np.in1d(metrics['metricId'], metricIds)] return metrics",True,metrics is None,metrics is None,0.6615746021270752 6531,"def path_model_trainer_pred(ranker, vocab, trainer, dataset, valid_ds): path = os.path.join(path_model_trainer(ranker, vocab, trainer, dataset), valid_ds.path_segment()) if: os.makedirs(path) with open(os.path.join(path, 'config.json'), 'wt') as f: json.dump(valid_ds.config, f) return path",True,not os.path.exists(path),not os.path.exists(path),0.6443253755569458 6532,"def GetCflagsC(self, configname): """"""Returns flags that need to be added to.c, and.m compilations."""""" self.configname = configname cflags_c = [] if: cflags_c.append('-ansi') else: self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s') cflags_c += self._Settings().get('OTHER_CFLAGS', []) self.configname = None return cflags_c",False,"self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi'",self._GetLanguageNames()['ansi'],0.6476311683654785 6533,"def _commit_version_unlocked(self, txn, version, origin): self._versions.append(version) self._prune_versions_unlocked() self.nodes = version.nodes if: self.origin = origin if txn is not None: self._end_write_unlocked(txn)",True,self.origin is None,self.origin is None,0.6478511095046997 6534,"def _commit_version_unlocked(self, txn, version, origin): self._versions.append(version) self._prune_versions_unlocked() self.nodes = version.nodes if self.origin is None: self.origin = origin if: self._end_write_unlocked(txn)",False,txn is not None,txn.is_locked(),0.6527698636054993 6535,"@staticmethod def satisfactory(img, threshold): if: return True else: return ((img > threshold).sum(2) == 3).sum() / (img.shape[0] * img.shape[1]) < 0.3",True,threshold is None,threshold is None,0.6644012331962585 6536,"def create_act_layer(name: Union[nn.Module, str], inplace=None, **kwargs): act_layer = get_act_layer(name) if: return None return act_layer(**kwargs) if inplace is None else act_layer(inplace=inplace, **kwargs)",True,act_layer is None,act_layer is None,0.6550652980804443 6537,"def back(): if: print(queue[-1]) else: print(-1)",True,queue,queue,0.6661584377288818 6538,"def _normalize(l): denom = sum(l) if: return l return [x / denom for x in l]",False,not denom,denom == 0,0.668522298336029 6539,"@staticmethod def rm(path: str) -> None: if: return FVCorePathManager.rm(path) os.remove(path)",True,FVCorePathManager,FVCorePathManager,0.6680938601493835 6540,"def format_result(data): """""" Format a result from an API call for printing. """""" if: return '' return yaml.safe_dump(data, default_flow_style=False)",False,data is None or data == [],not data,0.6473253965377808 6541,"def on_flag_capture(self): if: for player in self.team.get_players(): player.drop_link(no_message=True) message = S_FLAG_CAPTURED.format(team=self.team.name) self.protocol.send_chat(message, global_message=None) connection.on_flag_capture(self)",False,self.protocol.running_man,self.team,0.648587703704834 6542,"def _load_images(self): images = self._parse() ct = 0 records = [] for image in images: assert image!= '' and os.path.isfile(image), 'Image {} not found'.format(image) if: break rec = {'im_id': np.array([ct]), 'im_file': image} self._imid2path[ct] = image ct += 1 records.append(rec) assert len(records) > 0, 'No image file found' return records",False,self.sample_num > 0 and ct >= self.sample_num,ct >= len(records),0.6453565359115601 6543,"def add_check(self, *args, **kwargs): shortname = None if: if len(args) > 0: shortname = args[0] else: shortname = kwargs['shortname'] self.checks.append(Check(*args, **kwargs)) try: self.remove_check_queue.remove(shortname) except KeyError: pass",False,kwargs.get('shortname') is None,'sub-shortname' not in kwargs,0.6443610787391663 6544,"def add_check(self, *args, **kwargs): shortname = None if kwargs.get('shortname') is None: if: shortname = args[0] else: shortname = kwargs['shortname'] self.checks.append(Check(*args, **kwargs)) try: self.remove_check_queue.remove(shortname) except KeyError: pass",False,len(args) > 0,len(args) == 1,0.6487770080566406 6545,"def __eq__(self, other): if: return NotImplemented return self._key() == other._key()",False,"not isinstance(other, StructQueryParameter)","not isinstance(other, HashableNodeWrapper)",0.6540980339050293 6546,"def _create_proposals_from_boxes(self, boxes, image_sizes): """""" Args: boxes (list[Tensor]): per-image predicted boxes, each of shape Ri x 4 image_sizes (list[tuple]): list of image shapes in (h, w) Returns: list[Instances]: per-image proposals with the given boxes. """""" boxes = [Boxes(b.detach()) for b in boxes] proposals = [] for boxes_per_image, image_size in zip(boxes, image_sizes): boxes_per_image.clip(image_size) if: boxes_per_image = boxes_per_image[boxes_per_image.nonempty()] prop = Instances(image_size) prop.proposal_boxes = boxes_per_image proposals.append(prop) return proposals",False,self.training,boxes_per_image.nonempty() in boxes_per_image.keys(),0.656641960144043 6547,"@typing._tp_cache def __class_getitem__(cls, params): if: raise TypeError('Annotated[...] should be used with at least two arguments (a type and an annotation).') allowed_special_forms = (ClassVar, Final) if get_origin(params[0]) in allowed_special_forms: origin = params[0] else: msg = 'Annotated[t,...]: t must be a type.' origin = typing._type_check(params[0], msg) metadata = tuple(params[1:]) return _AnnotatedAlias(origin, metadata)",True,"not isinstance(params, tuple) or len(params) < 2","not isinstance(params, tuple) or len(params) < 2",0.6487093567848206 6548,"@typing._tp_cache def __class_getitem__(cls, params): if not isinstance(params, tuple) or len(params) < 2: raise TypeError('Annotated[...] should be used with at least two arguments (a type and an annotation).') allowed_special_forms = (ClassVar, Final) if: origin = params[0] else: msg = 'Annotated[t,...]: t must be a type.' origin = typing._type_check(params[0], msg) metadata = tuple(params[1:]) return _AnnotatedAlias(origin, metadata)",False,get_origin(params[0]) in allowed_special_forms,"isinstance(params[0], _AnnotatedAlias) or isinstance(params[0], _AnnotatedAlias) or isinstance(params[1], allowed_special_forms)",0.6463975310325623 6549,"def parallel(func, args_list, num_workers=32, timeout=None): assert isinstance(args_list, list) if: args_list = [(args,) for args in args_list] if num_workers == 0: return [func(*args) for args in args_list] with Pool(processes=num_workers) as pool: results = [pool.apply_async(func, args) for args in args_list] results = [res.get(timeout=timeout) for res in results] return results",False,"not isinstance(args_list[0], tuple)","isinstance(args_list[0], tuple)",0.6434637308120728 6550,"def parallel(func, args_list, num_workers=32, timeout=None): assert isinstance(args_list, list) if not isinstance(args_list[0], tuple): args_list = [(args,) for args in args_list] if: return [func(*args) for args in args_list] with Pool(processes=num_workers) as pool: results = [pool.apply_async(func, args) for args in args_list] results = [res.get(timeout=timeout) for res in results] return results",False,num_workers == 0,timeout is None,0.6556514501571655 6551,"def load_embedding(embed_dict, vocab, embedding): for idx in range(len(vocab)): token = vocab[idx] if: embedding.weight.data[idx] = embed_dict[token] return embedding",True,token in embed_dict,token in embed_dict,0.6539246439933777 6552,"def __str__(self): if: return str(self.value) return ''",True,self.value is not None,self.value is not None,0.6474944353103638 6553,"@lru_cache def content_type(filename: str, ttl_hash: Optional[int]=None) -> str: if: return 'binary/octet-stream' ctype = mimetypes.guess_type(filename)[0] if ctype is None: ctype = mime.from_file(filename) if ctype in 'inode/x-empty': ctype = 'binary/octet-stream' return ctype",False,filename.endswith('.template'),ttl_hash is None,0.6434955596923828 6554,"@lru_cache def content_type(filename: str, ttl_hash: Optional[int]=None) -> str: if filename.endswith('.template'): return 'binary/octet-stream' ctype = mimetypes.guess_type(filename)[0] if: ctype = mime.from_file(filename) if ctype in 'inode/x-empty': ctype = 'binary/octet-stream' return ctype",False,ctype is None,ctype.startswith('.mime'),0.6604336500167847 6555,"@lru_cache def content_type(filename: str, ttl_hash: Optional[int]=None) -> str: if filename.endswith('.template'): return 'binary/octet-stream' ctype = mimetypes.guess_type(filename)[0] if ctype is None: ctype = mime.from_file(filename) if: ctype = 'binary/octet-stream' return ctype",False,ctype in 'inode/x-empty',ctype.lower().endswith('.template'),0.6454154253005981 6556,"def __call__(self, r): if: r.headers['Authorization'] = self.build_digest_header(r.method, r.url) r.register_hook('response', self.handle_401) return r",False,self.last_nonce,r.method == 'GET' and self.digest,0.6598145961761475 6557,"def closure(**kwargs: Any) -> Tensor: batch = next(batch_generator) if: raise TypeError(f'Expected `data_loader` to generate a batch of tensors, but found {type(batch)}.') num_inputs = len(mll.model.train_inputs) model_output = mll.model(*batch[:num_inputs]) log_likelihood = mll(model_output, *batch[num_inputs:], **kwargs) return -log_likelihood",False,"not isinstance(batch, Sequence)","not isinstance(batch, nn.DataParallel)",0.6493579745292664 6558,"@root_validator def _valid_percentage(cls, values): """"""Avoid NaNs by setting them to 0.0"""""" for key in ['perFemales', 'perMales', 'perUnknowns']: if: values[key] = 0.0 return values",True,isnan(values[key]),isnan(values[key]),0.6516603231430054 6559,"def process_formdata(self, valuelist): if: try: self.data = int(valuelist[0]) except ValueError: self.data = None raise ValueError(self.gettext('Not a valid integer value'))",True,valuelist,valuelist,0.6502692103385925 6560,"def save_inference_model(self, local_path, remote_path, feeded_var_names, target_var_names): host_feeded_var_names, token = CustomerProgramSaver.save_inference_model(local_path, self.exe, self.main_program, self.common_vars, feeded_var_names, target_var_names) resp = self.stub.save_inference_model(common_pb2.SaveInfo(token=self.token, save_token=token, path=remote_path, feeded_var_names=host_feeded_var_names)) if: err_msg = 'Failed to save inference model in host side: {}'.format(resp.state.error_message) raise RuntimeError(err_msg) return True",True,not resp.state.succ,not resp.state.succ,0.6471579074859619 6561,"def skip(app, what, name, obj, skip, options): if: return False return skip",False,name in include_private_methods,skip is None and obj is None,0.6457043886184692 6562,"def submit(self, fn, *args, **kwargs): with self._shutdown_lock: if: raise RuntimeError('cannot schedule new futures after shutdown') f = _base.Future() w = _WorkItem(f, fn, args, kwargs) self._work_queue.put(w) self._adjust_thread_count() return f",True,self._shutdown,self._shutdown,0.6615958213806152 6563,"def write(self, oprot): if: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('recentlyChangedFilePaths_args') oprot.writeFieldStop() oprot.writeStructEnd()",True,oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None),oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and (fastbinary is not None),0.6469184756278992 6564,"def get_VARTMP(root=False): VARTMP = '/var/tmp' if: return VARTMP return os.environ.get('TMPDIR', os.environ.get('TEMP', os.environ.get('TMP', VARTMP)))",True,root,root,0.6732689142227173 6565,"def get_space_group(self, symprec=0.01) -> str: """"""Return the space group of the structure at the desired symprec. Stores the space group in a dictionary `self._space_group` under symprec keys. Updates `self._data['space_group']` and `self._data['symprec']` with the last value calculated. Keyword arguments: symprec (float): spglib symmetry tolerance. """""" if: self._data['space_group'] = cell_utils.get_spacegroup_spg(self._data, symprec=symprec, check_occ=False) self._data['symprec'] = symprec self._space_group[symprec] = self._data['space_group'] return self._space_group[symprec]",True,symprec not in self._space_group,symprec not in self._space_group,0.6495024561882019 6566,"def _match_vcs_scheme(url): """"""Look for VCS schemes in the URL. Returns the matched VCS scheme, or None if there's no match. """""" from pip._internal.vcs import vcs for scheme in vcs.schemes: if: return scheme return None",False,url.lower().startswith(scheme) and url[len(scheme)] in '+:',url.scheme.lower() in scheme.lower(),0.6439595222473145 6567,"def _compute_is_aclass_correctly_detected_in_image(self, detected_boxes, detected_scores, groundtruth_boxes): """"""Compute CorLoc score for a single class. Args: detected_boxes: A numpy array of shape [N, 4] representing detected box coordinates detected_scores: A 1-d numpy array of length N representing classification score groundtruth_boxes: A numpy array of shape [M, 4] representing ground truth box coordinates Returns: is_class_correctly_detected_in_image: An integer 1 or 0 denoting whether a class is correctly detected in the image or not """""" if: if groundtruth_boxes.size > 0: max_score_id = np.argmax(detected_scores) detected_boxlist = np_box_list.BoxList(np.expand_dims(detected_boxes[max_score_id, :], axis=0)) gt_boxlist = np_box_list.BoxList(groundtruth_boxes) iou = np_box_list_ops.iou(detected_boxlist, gt_boxlist) if np.max(iou) >= self.matching_iou_threshold: return 1 return 0",True,detected_boxes.size > 0,detected_boxes.size > 0,0.6487941741943359 6568,"def _compute_is_aclass_correctly_detected_in_image(self, detected_boxes, detected_scores, groundtruth_boxes): """"""Compute CorLoc score for a single class. Args: detected_boxes: A numpy array of shape [N, 4] representing detected box coordinates detected_scores: A 1-d numpy array of length N representing classification score groundtruth_boxes: A numpy array of shape [M, 4] representing ground truth box coordinates Returns: is_class_correctly_detected_in_image: An integer 1 or 0 denoting whether a class is correctly detected in the image or not """""" if detected_boxes.size > 0: if: max_score_id = np.argmax(detected_scores) detected_boxlist = np_box_list.BoxList(np.expand_dims(detected_boxes[max_score_id, :], axis=0)) gt_boxlist = np_box_list.BoxList(groundtruth_boxes) iou = np_box_list_ops.iou(detected_boxlist, gt_boxlist) if np.max(iou) >= self.matching_iou_threshold: return 1 return 0",True,groundtruth_boxes.size > 0,groundtruth_boxes.size > 0,0.6483095288276672 6569,"def _compute_is_aclass_correctly_detected_in_image(self, detected_boxes, detected_scores, groundtruth_boxes): """"""Compute CorLoc score for a single class. Args: detected_boxes: A numpy array of shape [N, 4] representing detected box coordinates detected_scores: A 1-d numpy array of length N representing classification score groundtruth_boxes: A numpy array of shape [M, 4] representing ground truth box coordinates Returns: is_class_correctly_detected_in_image: An integer 1 or 0 denoting whether a class is correctly detected in the image or not """""" if detected_boxes.size > 0: if groundtruth_boxes.size > 0: max_score_id = np.argmax(detected_scores) detected_boxlist = np_box_list.BoxList(np.expand_dims(detected_boxes[max_score_id, :], axis=0)) gt_boxlist = np_box_list.BoxList(groundtruth_boxes) iou = np_box_list_ops.iou(detected_boxlist, gt_boxlist) if: return 1 return 0",False,np.max(iou) >= self.matching_iou_threshold,"np.argmax(detected_boxes[0], iou) > 0",0.6445430517196655 6570,"def pop_rank_envs(): """"""Pop rank envs."""""" envs = ['RANK_TABLE_FILE', 'RANK_SIZE', 'RANK_ID'] global _envs for env in envs: if: _envs[env] = os.environ[env] os.environ.pop(env)",True,env in os.environ,env in os.environ,0.6564258337020874 6571,"def set_animated(self, b): """""" Set the artist's animation state. ACCEPTS: [True | False] """""" if: self._animated = b self.pchanged()",True,self._animated != b,self._animated != b,0.6521867513656616 6572,"def _gt(nodes, ln): if: vss = None else: vss = data[ln.root].keys() assert all((not isinstance(v, list) for v in vss)) vss = [tuple([v]) for v in vss] rs = Tree.gen_trees(nodes, vss, xinfo.blacklist, data) return rs",False,ln.root not in data,ln.root is None,0.6506845951080322 6573,"def tokenize(self, text): """"""Tokenizes a piece of text."""""" text = convert_to_unicode(text) text = self._clean_text(text) text = self._tokenize_chinese_chars(text) orig_tokens = whitespace_tokenize(text) split_tokens = [] for token in orig_tokens: if: token = token.lower() token = self._run_strip_accents(token) split_tokens.extend(self._run_split_on_punc(token)) output_tokens = whitespace_tokenize(' '.join(split_tokens)) return output_tokens",True,self.do_lower_case,self.do_lower_case,0.6472084522247314 6574,"def get_base_filename(self, brick, band, invvar=False, **kwargs): fn = super().get_base_filename(brick, band, invvar=invvar, **kwargs) if: return fn[:-3] return fn",False,not os.path.exists(fn) and fn.endswith('.fz') and os.path.exists(fn[:-3]),fn.endswith('.bin'),0.6446727514266968 6575,"def get_generic_explanation(exception_type): """"""Provides a generic explanation about a particular exception."""""" if: exception_name = exception_type.__name__ else: exception_name = exception_type if exception_name in GENERIC: return GENERIC[exception_name]() elif exception_name.endswith('Warning'): return GENERIC['UnknownWarning']() elif hasattr(exception_type, '__name__') and issubclass(exception_type, OSError): return os_error_subclass(exception_type.__name__) else: return no_information()",True,"hasattr(exception_type, '__name__')","hasattr(exception_type, '__name__')",0.6454532742500305 6576,"def get_generic_explanation(exception_type): """"""Provides a generic explanation about a particular exception."""""" if hasattr(exception_type, '__name__'): exception_name = exception_type.__name__ else: exception_name = exception_type if: return GENERIC[exception_name]() elif exception_name.endswith('Warning'): return GENERIC['UnknownWarning']() elif hasattr(exception_type, '__name__') and issubclass(exception_type, OSError): return os_error_subclass(exception_type.__name__) else: return no_information()",True,exception_name in GENERIC,exception_name in GENERIC,0.6563544273376465 6577,"def get_generic_explanation(exception_type): """"""Provides a generic explanation about a particular exception."""""" if hasattr(exception_type, '__name__'): exception_name = exception_type.__name__ else: exception_name = exception_type if exception_name in GENERIC: return GENERIC[exception_name]() elif: return GENERIC['UnknownWarning']() elif hasattr(exception_type, '__name__') and issubclass(exception_type, OSError): return os_error_subclass(exception_type.__name__) else: return no_information()",False,exception_name.endswith('Warning'),"exception_type in ['UnknownWarning', 'UnknownWarning', 'UnknownWarning', 'UnknownWarning']",0.6431206464767456 6578,"def get_generic_explanation(exception_type): """"""Provides a generic explanation about a particular exception."""""" if hasattr(exception_type, '__name__'): exception_name = exception_type.__name__ else: exception_name = exception_type if exception_name in GENERIC: return GENERIC[exception_name]() elif exception_name.endswith('Warning'): return GENERIC['UnknownWarning']() elif: return os_error_subclass(exception_type.__name__) else: return no_information()",False,"hasattr(exception_type, '__name__') and issubclass(exception_type, OSError)",exception_type.__name__ in 'OFSError',0.6435438394546509 6579,"def buffered_arange(max): if: buffered_arange.buf = torch.LongTensor() if max > buffered_arange.buf.numel(): torch.arange(max, out=buffered_arange.buf) return buffered_arange.buf[:max]",True,"not hasattr(buffered_arange, 'buf')","not hasattr(buffered_arange, 'buf')",0.6467887163162231 6580,"def buffered_arange(max): if not hasattr(buffered_arange, 'buf'): buffered_arange.buf = torch.LongTensor() if: torch.arange(max, out=buffered_arange.buf) return buffered_arange.buf[:max]",True,max > buffered_arange.buf.numel(),max > buffered_arange.buf.numel(),0.6458247900009155 6581,"def _set_framerate(self, value): self._check_camera_open() self._check_recording_stopped() value = mo.to_fraction(value, den_limit=256) if: raise PiCameraValueError('Invalid framerate: %.2ffps' % value) sensor_mode = self.sensor_mode clock_mode = self.CLOCK_MODES[self.clock_mode] resolution = self.resolution self._disable_camera() self._configure_camera(sensor_mode=sensor_mode, framerate=value, resolution=resolution, clock_mode=clock_mode) self._configure_splitter() self._enable_camera()",True,not 0 <= value <= self.MAX_FRAMERATE,not 0 <= value <= self.MAX_FRAMERATE,0.6525429487228394 6582,"def call(self, x, **kwargs): """"""Forward Function fo Zero."""""" if: return tf.zeros_like(x) if self.data_format == 'channels_first': return tf.zeros_like(x)[:, :, ::self.stride, ::self.stride] else: return tf.zeros_like(x)[:, ::self.stride, ::self.stride, :]",False,self.stride == 1,self.data_format == 'channels_last',0.6556248068809509 6583,"def call(self, x, **kwargs): """"""Forward Function fo Zero."""""" if self.stride == 1: return tf.zeros_like(x) if: return tf.zeros_like(x)[:, :, ::self.stride, ::self.stride] else: return tf.zeros_like(x)[:, ::self.stride, ::self.stride, :]",False,self.data_format == 'channels_first',self.stride == 2,0.6436713337898254 6584,"def is_ctdb_service_running(mnode): """"""Check if ctdb services is running on node Args: mnode (str): Node on which ctdb service status has to be verified. Returns: bool: True if ctdb service is running. False otherwise. """""" g.log.info('Check if CTDB service is running on %s', mnode) ret, out, _ = g.run(mnode,'service ctdb status') if: return True return False",False,'Active: active (running)' in out,ret == 0,0.6481289863586426 6585,"def wrapped(fn): def inner(*args, **kwargs): fn_or_stream = args[arg] if: with open(fn_or_stream, mode) as f: args = (*args[:arg], f, *args[arg + 1:]) return fn(*args, **kwargs) else: return fn(*args, **kwargs) return inner",True,"not hasattr(fn_or_stream, 'read')","not hasattr(fn_or_stream, 'read')",0.6416797637939453 6586,"def reorder_encoder_out(self, encoder_out, new_order): encoder_out['encoder_out'] = tuple((eo.index_select(0, new_order) for eo in encoder_out['encoder_out'])) if: encoder_out['encoder_padding_mask'] = encoder_out['encoder_padding_mask'].index_select(0, new_order) if 'pretrained' in encoder_out: encoder_out['pretrained']['encoder_out'] = tuple((eo.index_select(0, new_order) for eo in encoder_out['pretrained']['encoder_out'])) return encoder_out",False,encoder_out['encoder_padding_mask'] is not None,'encoder_padding_mask' in encoder_out,0.6473441123962402 6587,"def reorder_encoder_out(self, encoder_out, new_order): encoder_out['encoder_out'] = tuple((eo.index_select(0, new_order) for eo in encoder_out['encoder_out'])) if encoder_out['encoder_padding_mask'] is not None: encoder_out['encoder_padding_mask'] = encoder_out['encoder_padding_mask'].index_select(0, new_order) if: encoder_out['pretrained']['encoder_out'] = tuple((eo.index_select(0, new_order) for eo in encoder_out['pretrained']['encoder_out'])) return encoder_out",False,'pretrained' in encoder_out,encoder_out['pretrained']['encoder_out'] is not None,0.6480244398117065 6588,"def set_ylabels(self, label=None, clear_inner=True, **kwargs): """"""Label the y axis on the left column of the grid."""""" if: label = self._y_var for ax in self._left_axes: ax.set_ylabel(label, **kwargs) if clear_inner: for ax in self._not_left_axes: ax.set_ylabel('') return self",True,label is None,label is None,0.6566559672355652 6589,"def set_ylabels(self, label=None, clear_inner=True, **kwargs): """"""Label the y axis on the left column of the grid."""""" if label is None: label = self._y_var for ax in self._left_axes: ax.set_ylabel(label, **kwargs) if: for ax in self._not_left_axes: ax.set_ylabel('') return self",True,clear_inner,clear_inner,0.6634461879730225 6590,"def _freeze_stages(self): if: for i in range(self.frozen_stages): m = getattr(self, self.cr_blocks[i]) m.eval() for param in m.parameters(): param.requires_grad = False",True,self.frozen_stages >= 0,self.frozen_stages >= 0,0.6480221748352051 6591,"def __call__(self, step_per_epoch): assert len(self.schedulers) >= 1 if: return self.schedulers[0](base_lr=self.base_lr, step_per_epoch=step_per_epoch) boundary, value = self.schedulers[1](self.base_lr, step_per_epoch) decay_lr = self.schedulers[0](self.base_lr, boundary, value, step_per_epoch) return decay_lr",False,not self.schedulers[0].use_warmup,len(self.schedulers) == 1,0.6462574601173401 6592,"def del_user(user_id): with INSERTION_LOCK: curr = SESSION.query(Users).get(user_id) if: SESSION.delete(curr) SESSION.commit() return True ChatMembers.query.filter(ChatMembers.user == user_id).delete() SESSION.commit() SESSION.close() return False",True,curr,curr,0.6695773005485535 6593,"def flatgrad(loss, var_list, clip_norm=None): """"""Calculate the gradient and flattens it. Parameters ---------- loss : float the loss value var_list : list of tf.Tensor the variables clip_norm : float clip the gradients (disabled if None) Returns ------- list of tf.Tensor flattened gradient """""" grads = tf.gradients(loss, var_list) if: grads = [tf.clip_by_norm(grad, clip_norm=clip_norm) for grad in grads] return tf.concat(axis=0, values=[tf.reshape(grad if grad is not None else tf.zeros_like(v), [numel(v)]) for v, grad in zip(var_list, grads)])",True,clip_norm is not None,clip_norm is not None,0.6464265584945679 6594,"def _get_timeout(self, timeout): """""" Helper that always returns a :class:`urllib3.util.Timeout` """""" if: return self.timeout.clone() if isinstance(timeout, Timeout): return timeout.clone() else: return Timeout.from_float(timeout)",True,timeout is _Default,timeout is _Default,0.6607637405395508 6595,"def _get_timeout(self, timeout): """""" Helper that always returns a :class:`urllib3.util.Timeout` """""" if timeout is _Default: return self.timeout.clone() if: return timeout.clone() else: return Timeout.from_float(timeout)",True,"isinstance(timeout, Timeout)","isinstance(timeout, Timeout)",0.6561023592948914 6596,"def transform(self, X): if: raise NotImplementedError() return self.preprocessor.transform(X)",True,self.preprocessor is None,self.preprocessor is None,0.6451653242111206 6597,"def _random_patch(self, image, config, bboxes=None): with self.test_session() as sess: image = tf.cast(image, tf.float32) if: bboxes = tf.cast(bboxes, tf.int32) patch = random_patch(image, bboxes=bboxes, seed=0, **config) return_dict = sess.run(patch) ret_bboxes = return_dict.get('bboxes') return (return_dict['image'], ret_bboxes)",True,bboxes is not None,bboxes is not None,0.6500884294509888 6598,"@text_bold.setter def text_bold(self, value): """"""Setter for text_bold."""""" if: raise AttributeError(TextLine._getter_exception_message.format('text_bold')) else: self._text_bold = value",True,self.has_surface,self.has_surface,0.6550561189651489 6599,"def __str__(self): """"""Returns a string representation of the current channel."""""" if: return '%s [%s, %s, %s, %s] (Order: %s)' % (self.channelName, self.id, self.language, self.category, self.guid, self.sortOrder) else: return '%s (%s) [%s, %s, %s, %s] (Order: %s)' % (self.channelName, self.channelCode, self.id, self.language, self.category, self.guid, self.sortOrder)",True,self.channelCode is None,self.channelCode is None,0.6481980085372925 6600,"def is_user_entitled(self, service_pk, request: OGCRequest) -> Exists: """"""checks if the user of the request is member of any AllowedOperation object"""""" if: return Value(True) return Exists(self.for_user(service_pk=service_pk, request=request))",False,request._djano_request.user.is_superuser,"not self.is_allowed_operation(service_pk, request)",0.6481322050094604 6601,"def ids_qties_to_pairs(string): pairs = [] if: together = string.split(';') pairs = [[x for x in it.split(',') if x!= ''] for it in together] return pairs return None",False,"string and (isinstance(string, six.string_types) or isinstance(string, six.text_type)) and (';' in string)",len(string) > 0,0.6454541683197021 6602,"def __init__(self, env_params, sim_params, network, simulator='traci'): """"""See parent class."""""" for p in OPEN_ENV_PARAMS.keys(): if: raise KeyError('Env parameter ""{}"" not supplied'.format(p)) assert not (env_params.additional_params['warmup_path'] is not None and env_params.additional_params['inflows'] is not None), 'Cannot assign a value to both ""warmup_path"" and ""inflows""' super(AVOpenMultiAgentEnv, self).__init__(env_params=env_params, sim_params=sim_params, network=network, simulator=simulator)",True,p not in env_params.additional_params,p not in env_params.additional_params,0.6509344577789307 6603,"def dump(sra): fn = sra.split('/')[-1].replace('.sra', '') fq1, fq2 = (fn + '_1.fastq.gz', fn + '_2.fastq.gz') if: return cmd = 'fastq-dump --split-3 %s' % sra print(cmd) os.system(cmd) cmd = 'rm %s' % sra print(cmd) os.system(cmd)",False,os.path.isfile(fq1) and os.path.isfile(fq2),os.path.exists(fq1) or os.path.exists(fq2),0.6434731483459473 6604,"def read_audio(waveforms_obj): """"""General audio loading, based on a custom notation. Expected use case is in conjunction with Datasets specified by JSON. The custom notation: The annotation can be just a path to a file: ""/path/to/wav1.wav"" Or can specify more options in a dict: {""file"": ""/path/to/wav2.wav"", ""start"": 8000, ""stop"": 16000 } Arguments ---------- waveforms_obj : str, dict Audio reading annotation, see above for format. Returns ------- torch.Tensor Audio tensor with shape: (samples, ). Example ------- >>> dummywav = torch.rand(16000) >>> import os >>> tmpfile = os.path.join(str(getfixture('tmpdir')), ""wave.wav"") >>> write_audio(tmpfile, dummywav, 16000) >>> asr_example = { ""wav"": tmpfile, ""spk_id"": ""foo"", ""words"": ""foo bar""} >>> loaded = read_audio(asr_example[""wav""]) >>> loaded.allclose(dummywav.squeeze(0),atol=1e-4) # replace with eq with sox_io backend True """""" if: audio, _ = torchaudio.load(waveforms_obj) return audio.transpose(0, 1).squeeze(1) path = waveforms_obj['file'] start = waveforms_obj.get('start', 0) stop = waveforms_obj.get('stop', start) num_frames = stop - start audio, fs = torchaudio.load(path, num_frames=num_frames, frame_offset=start) audio = audio.transpose(0, 1) return audio.squeeze(1)",False,"isinstance(waveforms_obj, str)",'file' not in waveforms_obj,0.6444816589355469 6605,"def _check_keyword(self, keyword: str, msg: str) -> bool: if: return True return False",False,keyword in msg,msg.endswith('@GRAD') or msg.endswith('@GRAD'),0.6641772389411926 6606,"def get_file_paths_recursive(folder=None, file_ext=None): """""" Get the absolute path of all files in given folder recursively :param folder: :param file_ext: :return: """""" file_list = [] if: return file_list for dir_path, dir_names, file_names in os.walk(folder): for file_name in file_names: if file_ext is None: file_list.append(os.path.join(dir_path, file_name)) continue if file_name.endswith(file_ext): file_list.append(os.path.join(dir_path, file_name)) return file_list",True,folder is None,folder is None,0.6522740125656128 6607,"def get_file_paths_recursive(folder=None, file_ext=None): """""" Get the absolute path of all files in given folder recursively :param folder: :param file_ext: :return: """""" file_list = [] if folder is None: return file_list for dir_path, dir_names, file_names in os.walk(folder): for file_name in file_names: if: file_list.append(os.path.join(dir_path, file_name)) continue if file_name.endswith(file_ext): file_list.append(os.path.join(dir_path, file_name)) return file_list",True,file_ext is None,file_ext is None,0.6456184983253479 6608,"def get_file_paths_recursive(folder=None, file_ext=None): """""" Get the absolute path of all files in given folder recursively :param folder: :param file_ext: :return: """""" file_list = [] if folder is None: return file_list for dir_path, dir_names, file_names in os.walk(folder): for file_name in file_names: if file_ext is None: file_list.append(os.path.join(dir_path, file_name)) continue if: file_list.append(os.path.join(dir_path, file_name)) return file_list",False,file_name.endswith(file_ext),"file_ext in os.listdir(dir_path, file_name)",0.6414170265197754 6609,"def drop_entry(data, p): for i in range(len(data)): for j in range(len(data[i])): if: data[i][j] = 0 else: data[i][j] = data[i][j] return data",False,np.random.random_sample() < p,np.random.random_sample() > p,0.6445202231407166 6610,"def get_all_provisioning_addresses(self) -> List[str]: """"""Get all subnets that belong to the secondary NIC."""""" addresses = [] if: addresses.append(self._config.net_asset.provisioning_cidr) if self.is_ipv6: addresses.append(self._config.net_asset.provisioning_cidr6) return addresses",True,self.is_ipv4,self.is_ipv4,0.6481263637542725 6611,"def get_all_provisioning_addresses(self) -> List[str]: """"""Get all subnets that belong to the secondary NIC."""""" addresses = [] if self.is_ipv4: addresses.append(self._config.net_asset.provisioning_cidr) if: addresses.append(self._config.net_asset.provisioning_cidr6) return addresses",False,self.is_ipv6,self.is_ipv6 and self.is_ipv4_network,0.6498600840568542 6612,"def _check_response(self, attempt_number, response): if: logger.debug('retry needed: retryable HTTP status code received: %s', self._status_code) return True else: return False",False,response[0].status_code == self._status_code,response.status_code == self._status_code,0.6448211669921875 6613,"def forward(self, x): """"""Forward pass of ResNet. Args: x (ME.SparseTensor): Input sparse tensor. Returns: list[ME.SparseTensor]: Output sparse tensors. """""" x = self.conv1(x) x = self.norm1(x) x = self.relu(x) if: x = self.maxpool(x) outs = [] for i in range(self.num_stages): x = getattr(self, f'layer{i + 1}')(x) outs.append(x) return outs",False,self.pool,self.maxpool is not None,0.6568844318389893 6614,"def remove_vm(si, vm, destroy_vm=True): """""" Remove a VM """""" if: logging.info('Found: VM %s', vm.config.name) if vm.runtime.powerState == vim.VirtualMachinePowerState.poweredOn: logging.info('Attempting to power off %s', vm.config.name) task = vm.PowerOffVM_Task() vmdk_ops.wait_for_tasks(si, [task]) if destroy_vm: destroy_vm_object(si, vm)",True,vm,vm,0.6836171746253967 6615,"def remove_vm(si, vm, destroy_vm=True): """""" Remove a VM """""" if vm: logging.info('Found: VM %s', vm.config.name) if: logging.info('Attempting to power off %s', vm.config.name) task = vm.PowerOffVM_Task() vmdk_ops.wait_for_tasks(si, [task]) if destroy_vm: destroy_vm_object(si, vm)",False,vm.runtime.powerState == vim.VirtualMachinePowerState.poweredOn,vm.power_off,0.6463230848312378 6616,"def remove_vm(si, vm, destroy_vm=True): """""" Remove a VM """""" if vm: logging.info('Found: VM %s', vm.config.name) if vm.runtime.powerState == vim.VirtualMachinePowerState.poweredOn: logging.info('Attempting to power off %s', vm.config.name) task = vm.PowerOffVM_Task() vmdk_ops.wait_for_tasks(si, [task]) if: destroy_vm_object(si, vm)",True,destroy_vm,destroy_vm,0.6624985933303833 6617,"def del_edge(self, u_vertex: T, v_vertex: T) -> bool: """"""Removes the edge `u_vertex -> v_vertex` from the graph if the edge is present. :param u_vertex: Vertex :param v_vertex: Vertex :return: `True` if the existing edge was removed. `False` otherwise. """""" if: self._indegrees[v_vertex] -= 1 self._adj_dict[u_vertex].remove(v_vertex) return True return False",False,"self.is_edge(u_vertex, v_vertex)",v_vertex in self._indegrees,0.6458593606948853 6618,"def get_root(self, drive): """""" Returns the root directory for the specified drive, creating it if necessary. """""" drive = _my_normcase(drive) try: return self.Root[drive] except KeyError: root = RootDir(drive, self) self.Root[drive] = root if: self.Root[self.defaultDrive] = root elif drive == self.defaultDrive: self.Root[''] = root return root",False,not drive,drive == self.drive,0.6629149913787842 6619,"def get_root(self, drive): """""" Returns the root directory for the specified drive, creating it if necessary. """""" drive = _my_normcase(drive) try: return self.Root[drive] except KeyError: root = RootDir(drive, self) self.Root[drive] = root if not drive: self.Root[self.defaultDrive] = root elif: self.Root[''] = root return root",True,drive == self.defaultDrive,drive == self.defaultDrive,0.6459090709686279 6620,"def filter_is_reviewer(self, queryset, name, value): if: return queryset.filter(pk__in=User.objects.all_reviewers()) return queryset.exclude(pk__in=User.objects.all_reviewers())",True,value,value,0.6603466272354126 6621,"def _add_output(self, file_name, arr, fds, mode='a'): if: mkdir_p(os.path.dirname(file_name)) arr.append(file_name) fds[file_name] = open(file_name, mode)",False,file_name not in arr,not os.path.exists(file_name),0.6576168537139893 6622,"@property def cts(self): """"""Read terminal status line: Clear To Send"""""" if: raise portNotOpenError if self.logger: self.logger.info('returning dummy for cts') return True",False,not self.is_open,not self.hComPort,0.6549110412597656 6623,"@property def cts(self): """"""Read terminal status line: Clear To Send"""""" if not self.is_open: raise portNotOpenError if: self.logger.info('returning dummy for cts') return True",False,self.logger,self.debug,0.657947301864624 6624,"def get_worker(self) -> Optional[Unit]: if: return None worker = self.cache.by_tag(self.proxy_worker_tag) if worker: return worker available_workers = self.roles.free_workers if not available_workers: return None worker = available_workers.closest_to(self.zone_manager.enemy_start_location) self.proxy_worker_tag = worker.tag return worker",False,self.ai.time < 0 and self.proxy_worker_tag,self.ai.time < 25 and self.proxy_worker_tag,0.6480406522750854 6625,"def get_worker(self) -> Optional[Unit]: if self.ai.time < 0 and self.proxy_worker_tag: return None worker = self.cache.by_tag(self.proxy_worker_tag) if: return worker available_workers = self.roles.free_workers if not available_workers: return None worker = available_workers.closest_to(self.zone_manager.enemy_start_location) self.proxy_worker_tag = worker.tag return worker",True,worker,worker,0.6818000078201294 6626,"def get_worker(self) -> Optional[Unit]: if self.ai.time < 0 and self.proxy_worker_tag: return None worker = self.cache.by_tag(self.proxy_worker_tag) if worker: return worker available_workers = self.roles.free_workers if: return None worker = available_workers.closest_to(self.zone_manager.enemy_start_location) self.proxy_worker_tag = worker.tag return worker",True,not available_workers,not available_workers,0.6525324583053589 6627,"def init_neutrino_data(self, srv): if: return tr_data = get_attributes(srv.transponder) self._transponder_id_entry.set_text(str(int(tr_data.get('id', '0'), 16))) self._network_id_entry.set_text(str(int(tr_data.get('on', '0'), 16))) self.select_active_text(self._invertion_combo_box, Inversion(tr_data.get('inv', '2')).name) self.select_active_text(self._service_type_combo_box, srv.service_type) self.update_reference_entry()",False,self._tr_type is not TrType.Satellite,srv.transponder is None,0.6542978286743164 6628,"def __call__(self, results): """"""Call function to corrupt image. Args: results (dict): Result dict from loading pipeline. Returns: dict: Result dict with images corrupted. """""" if: raise RuntimeError('imagecorruptions is not installed') if 'img_fields' in results: assert results['img_fields'] == ['img'], 'Only single img_fields is allowed' results['img'] = corrupt(results['img'].astype(np.uint8), corruption_name=self.corruption, severity=self.severity) return results",False,corrupt is None,not self.imagecorruptions,0.6578205227851868 6629,"def __call__(self, results): """"""Call function to corrupt image. Args: results (dict): Result dict from loading pipeline. Returns: dict: Result dict with images corrupted. """""" if corrupt is None: raise RuntimeError('imagecorruptions is not installed') if: assert results['img_fields'] == ['img'], 'Only single img_fields is allowed' results['img'] = corrupt(results['img'].astype(np.uint8), corruption_name=self.corruption, severity=self.severity) return results",False,'img_fields' in results,self.check_img_fields,0.6539871692657471 6630,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if isinstance(auth, tuple) and len(auth) == 2: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,auth is None,auth is None,0.6572875380516052 6631,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if: if isinstance(auth, tuple) and len(auth) == 2: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,auth,auth,0.6750684380531311 6632,"def prepare_auth(self, auth, url=''): """"""Prepares the given HTTP auth data."""""" if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if: auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)",True,"isinstance(auth, tuple) and len(auth) == 2","isinstance(auth, tuple) and len(auth) == 2",0.6471284031867981 6633,"def refine_labels(config, multianimal=False): """""" Refines the labels of the outlier frames extracted from the analyzed videos. Helps in augmenting the training dataset. Use the function ``analyze_video`` to analyze a video and extracts the outlier frames using the function ``extract_outlier_frames`` before refining the labels. Parameters ---------- config : string Full path of the config.yaml file as a string. Screens : int value of the number of Screens in landscape mode, i.e. if you have 2 screens, enter 2. Default is 1. scale_h & scale_w : you can modify how much of the screen the GUI should occupy. The default is.9 and.8, respectively. img_scale : if you want to make the plot of the frame larger, consider changing this to.008 or more. Be careful though, too large and you will not see the buttons fully! Examples -------- >>> deeplabcut.refine_labels('/analysis/project/reaching-task/config.yaml', Screens=2, imag_scale=.0075) -------- """""" startpath = os.getcwd() wd = Path(config).resolve().parents[0] os.chdir(str(wd)) cfg = auxiliaryfunctions.read_config(config) if: from deeplabcut.refine_training_dataset import refinement refinement.show(config) else: from deeplabcut.refine_training_dataset import multiple_individuals_refinement_toolbox multiple_individuals_refinement_toolbox.show(config) os.chdir(startpath)",False,"multianimal == False and (not cfg.get('multianimalproject', False))",multianimal,0.6476901173591614 6634,"def _connect_positions_by_angle(window: Window): if: return for tab in window.get_gui_experiment().get_active_tabs(): from organoid_tracker.connecting import connector_using_angles connections = connector_using_angles.create_connections(tab.experiment, print_progress=True) result_message = tab.undo_redo.do(_ReplaceConnectionsAction(tab.experiment.connections, connections), tab.experiment) window.set_status(result_message)",False,"not dialog.popup_message_cancellable('Automatic neighbor detection', 'This algorithm will look at the 10 nearest cells of any cell. If there are no cells in between that cell and any of the nearest cells, the cells are considered neighbors. The algorithm is not exact, as it uses only the cell center positions, not the full shape of the cells.\n\nNote: this computation might take a few minutes.')",not window.get_gui_experiment().get_active_tabs(),0.6593179702758789 6635,"def __get_stat_display(self, stats, layer): """"""Return a dict of dict with all the stats display. # TODO: Drop extra parameter :param stats: Global stats dict :param layer: ~ cs_status ""None"": standalone or server mode ""Connected"": Client is connected to a Glances server ""SNMP"": Client is connected to a SNMP server ""Disconnected"": Client is disconnected from the server :returns: dict of dict * key: plugin name * value: dict returned by the get_stats_display Plugin method """""" ret = {} for p in stats.getPluginsList(enable=False): if: continue plugin_max_width = None if p in self._left_sidebar: plugin_max_width = max(self._left_sidebar_min_width, self.term_window.getmaxyx()[1] - 105) plugin_max_width = min(self._left_sidebar_max_width, plugin_max_width) ret[p] = stats.get_plugin(p).get_stats_display(args=self.args, max_width=plugin_max_width) return ret",False,p == 'quicklook' or p == 'processlist',p == layer,0.6430870890617371 6636,"def __get_stat_display(self, stats, layer): """"""Return a dict of dict with all the stats display. # TODO: Drop extra parameter :param stats: Global stats dict :param layer: ~ cs_status ""None"": standalone or server mode ""Connected"": Client is connected to a Glances server ""SNMP"": Client is connected to a SNMP server ""Disconnected"": Client is disconnected from the server :returns: dict of dict * key: plugin name * value: dict returned by the get_stats_display Plugin method """""" ret = {} for p in stats.getPluginsList(enable=False): if p == 'quicklook' or p == 'processlist': continue plugin_max_width = None if: plugin_max_width = max(self._left_sidebar_min_width, self.term_window.getmaxyx()[1] - 105) plugin_max_width = min(self._left_sidebar_max_width, plugin_max_width) ret[p] = stats.get_plugin(p).get_stats_display(args=self.args, max_width=plugin_max_width) return ret",False,p in self._left_sidebar,self.args.host_max_yx,0.6472606658935547