1 | | input | is_correct | expected_cond | predicted_cond | score |
---|
2 | 0 | def get_source(self, environment, template):
if<mask>:
return (self.overriden_templates[template], template, True)
if template.startswith('admin/'):
template = template[6:]
template = '/'.join(['admin', 'templates', template])
return super(ThemeLoader, self).get_source(environment, template)
template = '/'.join(['user', 'templates', template])
return super(ThemeLoader, self).get_source(environment, template) | True | template in self.overriden_templates | template in self.overriden_templates | 0.6552744507789612 |
3 | 1 | def get_source(self, environment, template):
if template in self.overriden_templates:
return (self.overriden_templates[template], template, True)
if<mask>:
template = template[6:]
template = '/'.join(['admin', 'templates', template])
return super(ThemeLoader, self).get_source(environment, template)
template = '/'.join(['user', 'templates', template])
return super(ThemeLoader, self).get_source(environment, template) | True | template.startswith('admin/') | template.startswith('admin/') | 0.6439536213874817 |
4 | 2 | def __init__(self, ref_list: Iterable[_Reference]) -> None:
self.matching_refs: list[list[_Reference]] = []
for ref in ref_list:
add = True
for other_refs in self.matching_refs:
if other_refs[0].matches(ref):
add = False
other_refs.append(ref)
break
if<mask>:
self.matching_refs.append([ref]) | True | add | add | 0.6711677312850952 |
5 | 3 | def __init__(self, ref_list: Iterable[_Reference]) -> None:
self.matching_refs: list[list[_Reference]] = []
for ref in ref_list:
add = True
for other_refs in self.matching_refs:
if<mask>:
add = False
other_refs.append(ref)
break
if add:
self.matching_refs.append([ref]) | False | other_refs[0].matches(ref) | ref in other_refs | 0.6418271064758301 |
6 | 4 | def generate_sample_problems(self, solver):
linear, quadratic = self.sapi.problem
problems = [('sample_ising', (linear, quadratic)), ('sample_qubo', (quadratic,))]
if<mask>:
bqm = dimod.BQM.from_ising(linear, quadratic)
problems.append(('sample_bqm', (bqm,)))
return problems | False | dimod | self.model_type == 'quadratic' | 0.6707044839859009 |
7 | 5 | def get_temperature_c(self, botengine=None):
"""
Get the latest temperature in Celsius
:param botengine:
:return: temperature in Celsius
"""
if<mask>:
return self.measurements[TemperatureDevice.MEASUREMENT_DEG_C][0][0]
return None | True | TemperatureDevice.MEASUREMENT_DEG_C in self.measurements | TemperatureDevice.MEASUREMENT_DEG_C in self.measurements | 0.649125874042511 |
8 | 6 | def __init__(self, cfg, input_size):
super().__init__(cfg)
self.cfg = cfg
self.is_gru = False
if<mask>:
self.core = nn.GRU(input_size, cfg.rnn_size, cfg.rnn_num_layers)
self.is_gru = True
elif cfg.rnn_type == 'lstm':
self.core = nn.LSTM(input_size, cfg.rnn_size, cfg.rnn_num_layers)
else:
raise RuntimeError(f'Unknown RNN type {cfg.rnn_type}')
self.core_output_size = cfg.rnn_size
self.rnn_num_layers = cfg.rnn_num_layers | True | cfg.rnn_type == 'gru' | cfg.rnn_type == 'gru' | 0.6513108015060425 |
9 | 7 | def __init__(self, cfg, input_size):
super().__init__(cfg)
self.cfg = cfg
self.is_gru = False
if cfg.rnn_type == 'gru':
self.core = nn.GRU(input_size, cfg.rnn_size, cfg.rnn_num_layers)
self.is_gru = True
elif<mask>:
self.core = nn.LSTM(input_size, cfg.rnn_size, cfg.rnn_num_layers)
else:
raise RuntimeError(f'Unknown RNN type {cfg.rnn_type}')
self.core_output_size = cfg.rnn_size
self.rnn_num_layers = cfg.rnn_num_layers | True | cfg.rnn_type == 'lstm' | cfg.rnn_type == 'lstm' | 0.6525049209594727 |
10 | 8 | def add_gain(self, val):
if<mask>:
self.importance += val
else:
self.importance_2 += val | False | self.main_type == 'gain' | self.importance_2 is None | 0.6461501121520996 |
11 | 9 | def delete_all_connections(self, location):
new_data = self.adj_list.copy()
loc_name = UpdateAdjList.get_location_name(location)
if<mask>:
new_data.pop(loc_name)
for key in new_data:
if loc_name in new_data[key]:
new_data[key].pop(loc_name)
with open(self.adj_list_path, 'w') as f:
f.write(str(new_data)) | False | loc_name in new_data | loc_name is not None | 0.6534073352813721 |
12 | 10 | def delete_all_connections(self, location):
new_data = self.adj_list.copy()
loc_name = UpdateAdjList.get_location_name(location)
if loc_name in new_data:
new_data.pop(loc_name)
for key in new_data:
if<mask>:
new_data[key].pop(loc_name)
with open(self.adj_list_path, 'w') as f:
f.write(str(new_data)) | False | loc_name in new_data[key] | key in new_data | 0.6481366157531738 |
13 | 11 | def start_img(self, attributes):
A = self.getAttributes(attributes, _imgAttrMap)
if<mask>:
self._syntax_error('<img> needs src attribute')
A['_selfClosingTag'] = 'img'
self._push('img', **A) | False | not A.get('src') | A['src'] is None | 0.6531503200531006 |
14 | 12 | def get_credential(self, username, password):
q = select(self.CredentialsTable).filter(self.CredentialsTable.c.username == username, self.CredentialsTable.c.password == password)
results = self.sess.execute(q).first()
if<mask>:
return None
else:
return results.id | True | results is None | results is None | 0.6523830890655518 |
15 | 13 | def flipy(self):
if<mask>:
print('%s.%s()' % (self.__class__.__name__, _fn_name()))
return True | False | _debug | self.verbose | 0.6635514497756958 |
16 | 14 | def _find_terminator(self, iterator):
"""The terminator might have some additional newlines before it.
There is at least one application that sends additional newlines
before headers (the python setuptools package).
"""
for line in iterator:
if<mask>:
break
line = line.strip()
if line:
return line
return '' | True | not line | not line | 0.6531288623809814 |
17 | 15 | def _find_terminator(self, iterator):
"""The terminator might have some additional newlines before it.
There is at least one application that sends additional newlines
before headers (the python setuptools package).
"""
for line in iterator:
if not line:
break
line = line.strip()
if<mask>:
return line
return '' | True | line | line | 0.6587799787521362 |
18 | 16 | def canRunGUI() -> bool:
if<mask>:
return bool(os.getenv('DISPLAY'))
if core.sysName == 'darwin':
try:
import tkinter
except ModuleNotFoundError:
return False
return True | False | core.sysName == 'linux' | 'DISPLAY' in os.environ | 0.6488453149795532 |
19 | 17 | def canRunGUI() -> bool:
if core.sysName == 'linux':
return bool(os.getenv('DISPLAY'))
if<mask>:
try:
import tkinter
except ModuleNotFoundError:
return False
return True | True | core.sysName == 'darwin' | core.sysName == 'darwin' | 0.6462803483009338 |
20 | 18 | def unassign_role_from_group(self, session, group, role):
"""Unassigns a role from a group on a domain"""
url = utils.urljoin(self.base_path, self.id, 'groups', group.id, 'roles', role.id)
resp = session.delete(url, endpoint_filter=self.service)
if<mask>:
return True
return False | False | resp.status_code == 204 | resp.status_code == 200 | 0.6449288129806519 |
21 | 19 | def __exit__(self, type, value, traceback):
if<mask>:
self.close()
else:
if not self._extfileobj:
self.fileobj.close()
self.closed = True | True | type is None | type is None | 0.6553890109062195 |
22 | 20 | def __exit__(self, type, value, traceback):
if type is None:
self.close()
else:
if<mask>:
self.fileobj.close()
self.closed = True | True | not self._extfileobj | not self._extfileobj | 0.6471370458602905 |
23 | 21 | def _unassign_params(self, tensor_id):
if<mask>:
del self.id_to_params[tensor_id] | False | tensor_id in self.id_to_params.keys() | tensor_id in self.id_to_params | 0.6464389562606812 |
24 | 22 | def tenant_access_ls(args):
""" Handle tenant access ls command """
name = args.name
error_info, privileges = auth_api._tenant_access_ls(name)
if<mask>:
return err_out(error_info.msg)
header = tenant_access_ls_headers()
error_info, rows = generate_tenant_access_ls_rows(privileges, name)
if error_info:
return err_out(error_info.msg)
else:
printList(args.output_format, header, rows) | True | error_info | error_info | 0.6591681838035583 |
25 | 23 | def tenant_access_ls(args):
""" Handle tenant access ls command """
name = args.name
error_info, privileges = auth_api._tenant_access_ls(name)
if error_info:
return err_out(error_info.msg)
header = tenant_access_ls_headers()
error_info, rows = generate_tenant_access_ls_rows(privileges, name)
if<mask>:
return err_out(error_info.msg)
else:
printList(args.output_format, header, rows) | True | error_info | error_info | 0.6594505906105042 |
26 | 24 | def batch_counter_hook(module, input, output):
batch_size = 1
if<mask>:
input = input[0]
batch_size = len(input)
else:
print('Warning! No positional inputs found for a module, assuming batch size is 1.')
module.__batch_counter__ += batch_size | True | len(input) > 0 | len(input) > 0 | 0.6514883041381836 |
27 | 25 | def __init__(self, default_color=None, *args, **kwargs):
super(QColorButton, self).__init__(*args, **kwargs)
self._color = None
if<mask>:
self.gl_color = default_color
self.setMaximumWidth(32)
self.setColor(self._color)
self.pressed.connect(self.onColorPicker) | False | default_color is not None | default_color | 0.6507023572921753 |
28 | 26 | def decode_rollback(self, element):
frame = self._get_frame_from_depth()
if<mask>:
raise GrammarError('Recognition decoding stack broken')
if frame is self._stack[-1]:
self._index = frame.begin
else:
raise GrammarError('Recognition decoding stack broken')
self._log_step(element, 'rollback') | False | not frame or frame.actor != element | not frame | 0.6501692533493042 |
29 | 27 | def decode_rollback(self, element):
frame = self._get_frame_from_depth()
if not frame or frame.actor!= element:
raise GrammarError('Recognition decoding stack broken')
if<mask>:
self._index = frame.begin
else:
raise GrammarError('Recognition decoding stack broken')
self._log_step(element, 'rollback') | False | frame is self._stack[-1] | frame.begin | 0.6485610008239746 |
30 | 28 | def normalize(self):
length2 = self.x * self.x + self.y * self.y
length = math.sqrt(length2)
if<mask>:
self.x /= length
self.y /= length
return length | False | length != 0 | self.z | 0.6626908779144287 |
31 | 29 | def get_person(self, p):
g = ICSCalendar.SUM_PAT.match(p)
if<mask>:
p = g.group(1)
p = p.strip()
return p | True | g | g | 0.6680970191955566 |
32 | 30 | def file_name(prefix, lang):
fname = prefix
if<mask>:
fname += f'.{lang}'
return fname | False | lang is not None | lang | 0.6595156192779541 |
33 | 31 | def get_val(name, section='DEFAULT', default=None, encoding=None):
"""Get a value from the per-user config file
Parameters
----------
name : str
The name of the value to set.
section : str
The section to store the name/value in.
default :
The value to return if `name` is not set.
encoding : str
The config file's encoding, defaults to :py:data:`default_encoding`.
Examples
--------
>>> get_val("junk") is None
True
>>> set_val("junk", "random")
>>> get_val("junk")
u'random'
>>> set_val("junk", None)
>>> get_val("junk") is None
True
"""
if<mask>:
if encoding == None:
encoding = default_encoding
config = ConfigParser.ConfigParser()
f = codecs.open(path(), 'r', encoding)
config.readfp(f, path())
f.close()
try:
return config.get(section, name)
except ConfigParser.NoOptionError:
return default
else:
return default | False | os.path.exists(path()) | name | 0.6423336863517761 |
34 | 32 | def get_val(name, section='DEFAULT', default=None, encoding=None):
"""Get a value from the per-user config file
Parameters
----------
name : str
The name of the value to set.
section : str
The section to store the name/value in.
default :
The value to return if `name` is not set.
encoding : str
The config file's encoding, defaults to :py:data:`default_encoding`.
Examples
--------
>>> get_val("junk") is None
True
>>> set_val("junk", "random")
>>> get_val("junk")
u'random'
>>> set_val("junk", None)
>>> get_val("junk") is None
True
"""
if os.path.exists(path()):
if<mask>:
encoding = default_encoding
config = ConfigParser.ConfigParser()
f = codecs.open(path(), 'r', encoding)
config.readfp(f, path())
f.close()
try:
return config.get(section, name)
except ConfigParser.NoOptionError:
return default
else:
return default | False | encoding == None | encoding is None | 0.6599355936050415 |
35 | 33 | def clean_edges(arg):
if<mask>:
return replace_colon(arg)
try:
return tuple((clean_edges(x) for x in arg))
except TypeError:
return replace_colon(arg) | False | isinstance(arg, str) | isinstance(arg, tuple) | 0.6459245085716248 |
36 | 34 | def Equals(self, other):
"""
Test for equality.
Args:
other (obj):
Returns:
bool: True `other` equals self.
"""
if<mask>:
return False
if other is self:
return True
return self.Hash == other.Hash | False | other is None | type(other) is not type(self) | 0.6544768810272217 |
37 | 35 | def Equals(self, other):
"""
Test for equality.
Args:
other (obj):
Returns:
bool: True `other` equals self.
"""
if other is None:
return False
if<mask>:
return True
return self.Hash == other.Hash | False | other is self | self.Hash == other.Hash | 0.6560910940170288 |
38 | 36 | def mouseMoveEvent(self, event):
if<mask>:
self.setPos(self.mapToParent(event.pos() - event.buttonDownPos(Qt.LeftButton)))
event.setAccepted(True)
else:
event.setAccepted(False) | False | event.buttons() & Qt.LeftButton | event.buttonDownPos(Qt.LeftButton) | 0.652923047542572 |
39 | 37 | def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if<mask>:
self._oprot = oprot
self._seqid = 0 | True | oprot is not None | oprot is not None | 0.6590708494186401 |
40 | 38 | def new_epoch(self):
if<mask>:
self.history.append(self.avg)
self.reset()
self.has_new_data = True
else:
self.has_new_data = False | False | self.count > 0 | self.has_new_data or self.has_new_data is False or self.avg != self.avg | 0.6490698456764221 |
41 | 39 | def all_reduce_operation_in_group_for_variables(variables, operator, group):
for i in range(len(variables)):
if<mask>:
variables[i] = torch.tensor(variables[i]).cuda()
torch.distributed.all_reduce(variables[i], op=operator, group=group)
variables[i] = variables[i].item()
return variables | True | not torch.is_tensor(variables[i]) | not torch.is_tensor(variables[i]) | 0.6446045637130737 |
42 | 40 | def set_accept(self, media_type: Optional[str]=None, ask_version: Optional[str]=None, accept_version: Optional[str]=None, media_type_params: Optional[dict]=None, strict_mode: Optional[bool]=None) -> None:
"""Set the request and expected response media type, going forward."""
self._media_type = media_type
self._ask_version = ask_version
self._accept_version = accept_version
self._media_type_params = media_type_params
if<mask>:
self._strict_mode = strict_mode | True | strict_mode is not None | strict_mode is not None | 0.6540040373802185 |
43 | 41 | def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if<mask>:
if new is None:
new = name
self.mod = new
else:
self.mod = old | True | PY3 | PY3 | 0.6688830256462097 |
44 | 42 | def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if<mask>:
new = name
self.mod = new
else:
self.mod = old | True | new is None | new is None | 0.6595436334609985 |
45 | 43 | def clean(self, value):
value = super(ITSocialSecurityNumberField, self).clean(value)
if<mask>:
return value
value = re.sub('\\s', u'', value).upper()
try:
check_digit = ssn_check_digit(value)
except ValueError:
raise ValidationError(self.error_messages['invalid'])
if not value[15] == check_digit:
raise ValidationError(self.error_messages['invalid'])
return value | False | value == u'' | value is None | 0.6641252040863037 |
46 | 44 | def clean(self, value):
value = super(ITSocialSecurityNumberField, self).clean(value)
if value == u'':
return value
value = re.sub('\\s', u'', value).upper()
try:
check_digit = ssn_check_digit(value)
except ValueError:
raise ValidationError(self.error_messages['invalid'])
if<mask>:
raise ValidationError(self.error_messages['invalid'])
return value | False | not value[15] == check_digit | check_digit < 0 or value > self.settings['DECIMAL_DIGITS'] | 0.6454792618751526 |
47 | 45 | def _convert_token_to_id_with_added_voc(self, token):
id = self._tokenizer.token_to_id(token)
if<mask>:
return self.unk_token_id
return id | True | id is None | id is None | 0.6589533090591431 |
48 | 46 | def tablet(self, x, rot=0):
"""Tablet test objective function"""
if<mask>:
x = rotate(x)
x = [x] if isscalar(x[0]) else x
f = [1000000.0 * x[0] ** 2 + sum(x[1:] ** 2) for x in x]
return f if len(f) > 1 else f[0] | False | rot and rot is not fcts.tablet | rot > 0 | 0.6522568464279175 |
49 | 47 | def exit_on_disconnect(state):
"""Watch for connection events and exit if disconnected."""
_LOGGER.debug('ZK connection state: %s', state)
if<mask>:
_LOGGER.info('Exiting on ZK connection lost.')
utils.sys_exit(-1) | False | state != states.KazooState.CONNECTED | state == b'connected' | 0.6500900983810425 |
50 | 48 | def all_formats(self):
formats = self.conn.get('SELECT DISTINCT format from data')
if<mask>:
return set()
return {f[0] for f in formats} | True | not formats | not formats | 0.6549617052078247 |
51 | 49 | def setInfo(self, info):
self._checkLock()
if<mask>:
self._markAsChanged('info')
self.info = info
return True
return False | False | self.info != info | info != self.info | 0.6478448510169983 |
52 | 50 | def check_for_progressive_training_update(self, is_resume_from_ckpt=False):
for i in range(len(self.opts.progressive_steps)):
if<mask>:
self.net.encoder.set_progressive_stage(ProgressiveStage(i))
if self.global_step == self.opts.progressive_steps[i]:
self.net.encoder.set_progressive_stage(ProgressiveStage(i)) | False | is_resume_from_ckpt and self.global_step >= self.opts.progressive_steps[i] | is_resume_from_ckpt and self.opts.progressive_steps[i] | 0.6433718204498291 |
53 | 51 | def check_for_progressive_training_update(self, is_resume_from_ckpt=False):
for i in range(len(self.opts.progressive_steps)):
if is_resume_from_ckpt and self.global_step >= self.opts.progressive_steps[i]:
self.net.encoder.set_progressive_stage(ProgressiveStage(i))
if<mask>:
self.net.encoder.set_progressive_stage(ProgressiveStage(i)) | False | self.global_step == self.opts.progressive_steps[i] | self.global_step - self.opts.progressive_steps >= 1 | 0.6428817510604858 |
54 | 52 | def __getattr__(self, element):
if<mask>:
return self.__class__(self, self.__selected)
else:
return self.__class__(self, self.__selected + (element,)) | False | element in self.__selected | element is None | 0.6532191038131714 |
55 | 53 | def output_keyword(data_object):
if<mask>:
return
output_status_message('* * * Begin output_keyword * * *')
output_status_message('Id: {0}'.format(data_object.Id))
output_status_message('MatchType: {0}'.format(data_object.MatchType))
output_status_message('Text: {0}'.format(data_object.Text))
output_status_message('* * * End output_keyword * * *') | True | data_object is None | data_object is None | 0.650916576385498 |
56 | 54 | def _dump_wwnames_start(self):
infoname = self._get_dump_infoname()
if<mask>:
return
base_path, base_name = infoname
dump_name = base_name
dump_type = wdumper.TYPE_EMPTY
dumper = wdumper.DumpPrinter(self.parser.get_banks(), dump_type, dump_name)
dumper.dump()
self.names.save_lst(basename=dump_name, path=base_path) | True | not infoname | not infoname | 0.6580004692077637 |
57 | 55 | def encode(self, media, filename, file=None, **kwargs):
"""Attempt to encode a pyglet object to a specified format. All registered
encoders that advertise support for the specific file extension will be tried.
If no encoders are available, an EncodeException will be raised.
"""
first_exception = None
for encoder in self.get_encoders(filename):
try:
return encoder.encode(media, filename, file, **kwargs)
except EncodeException as e:
first_exception = first_exception or e
if<mask>:
raise EncodeException(f"No Encoders are available for this extension: '{filename}'")
raise first_exception | False | not first_exception | not self.encoders | 0.647996723651886 |
58 | 56 | def load_all(stream, Loader=None):
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
"""
if<mask>:
load_warning('load_all')
Loader = FullLoader
loader = Loader(stream)
try:
while loader.check_data():
yield loader.get_data()
finally:
loader.dispose() | True | Loader is None | Loader is None | 0.6556233167648315 |
59 | 57 | def decorate(self, pos, data, is_first=True):
self._table.register(pos)
row = self._table.get_row(pos)
if<mask>:
update_method = row.name.update
decowidget = super().decorate(pos, row.name, is_first=is_first)
decowidget.update = update_method
row.replace('name', decowidget)
file_widget = self._filewidgetcls(data, row)
self._widgets[data['id']] = file_widget
return file_widget | False | row.exists('name') | row.name and row.name.update | 0.6482201814651489 |
60 | 58 | def cancel_backup(self, name=None):
"""See :func:`burpui.misc.parser.interface.BUIparser.cancel_backup`"""
path = self._get_server_backup_path(name)
try:
if<mask>:
os.unlink(path)
else:
return [NOTIF_WARN, 'There is no backup scheduled for this client']
except OSError as exp:
return [NOTIF_ERROR, 'Unable to cancel backup: {}'.format(str(exp))]
return [NOTIF_OK, 'Backup successfully canceled'] | True | os.path.exists(path) | os.path.exists(path) | 0.6445538997650146 |
61 | 59 | def get_adapter(self, url):
"""Returns the appropriate connnection adapter for the given URL."""
for prefix, adapter in self.adapters.items():
if<mask>:
return adapter
raise InvalidSchema("No connection adapters were found for '%s'" % url) | False | url.lower().startswith(prefix) | url.lower().startswith(prefix.lower()) | 0.6471322774887085 |
62 | 60 | @metadata()
def version(self):
"""Return version."""
if<mask>:
return UNAP
return UNAV | False | self.mimetype() in self._supported | self._meta.unap_version | 0.6507033705711365 |
63 | 61 | def license(self):
if<mask>:
os.popen('%s/autodesk/maya%s/vray/bin/setvrlservice -server=127.0.0.1 -port=30305 -server1=0.0.0.0 -port1=30306 -server2=192.168.0.17 -port2=30306' % (self.path(), maya().version())).readlines()
os.system('%s/docker/start.sh &' % self.path()) | False | float(self.version().split('.')[0]) < 4 | maya().version() != '30305' | 0.6497805714607239 |
64 | 62 | @patch('edx_rest_framework_extensions.permissions.log')
@ddt.data(*JWT_AUTH_TYPES)
def test_jwt_no_scopes(self, auth_type, mock_log):
""" Returns 403 when scopes are enforced with JwtHasScope. """
jwt_token = self._create_jwt_token(self.student, auth_type, scopes=[])
resp = self.get_response(AuthType.jwt, token=jwt_token)
is_enforced = auth_type == AuthType.jwt_restricted
assert resp.status_code == (status.HTTP_403_FORBIDDEN if is_enforced else status.HTTP_200_OK)
if<mask>:
self._assert_in_log('JwtHasScope', mock_log.warning) | False | is_enforced | mock_log | 0.6513158082962036 |
65 | 63 | def forward(self, input):
if<mask>:
y = F.linear(input, self.weight, None)
bg = bias_gelu(self.bias, y)
return bg
elif self.fused_tanh:
return bias_tanh(self.bias, F.linear(input, self.weight, None))
else:
return self.act_fn(F.linear(input, self.weight, self.bias)) | True | self.fused_gelu | self.fused_gelu | 0.6473233699798584 |
66 | 64 | def forward(self, input):
if self.fused_gelu:
y = F.linear(input, self.weight, None)
bg = bias_gelu(self.bias, y)
return bg
elif<mask>:
return bias_tanh(self.bias, F.linear(input, self.weight, None))
else:
return self.act_fn(F.linear(input, self.weight, self.bias)) | True | self.fused_tanh | self.fused_tanh | 0.6495752334594727 |
67 | 65 | def ensure_x_visible(self, x):
"""Adjust `view_x` so that the given X coordinate is visible.
The X coordinate is given relative to the current `view_x`.
:Parameters:
`x` : int
X coordinate
"""
x -= self._x
if<mask>:
self.view_x = x
elif x >= self.view_x + self.width:
self.view_x = x - self.width
elif x >= self.view_x + self.width and self.content_width > self.width:
self.view_x = x - self.width
elif self.view_x + self.width > self.content_width:
self.view_x = self.content_width | False | x <= self.view_x | self.view_x is None | 0.6498140096664429 |
68 | 66 | def ensure_x_visible(self, x):
"""Adjust `view_x` so that the given X coordinate is visible.
The X coordinate is given relative to the current `view_x`.
:Parameters:
`x` : int
X coordinate
"""
x -= self._x
if x <= self.view_x:
self.view_x = x
elif<mask>:
self.view_x = x - self.width
elif x >= self.view_x + self.width and self.content_width > self.width:
self.view_x = x - self.width
elif self.view_x + self.width > self.content_width:
self.view_x = self.content_width | False | x >= self.view_x + self.width | x >= self.view_x and x < self.width | 0.6440525054931641 |
69 | 67 | def ensure_x_visible(self, x):
"""Adjust `view_x` so that the given X coordinate is visible.
The X coordinate is given relative to the current `view_x`.
:Parameters:
`x` : int
X coordinate
"""
x -= self._x
if x <= self.view_x:
self.view_x = x
elif x >= self.view_x + self.width:
self.view_x = x - self.width
elif<mask>:
self.view_x = x - self.width
elif self.view_x + self.width > self.content_width:
self.view_x = self.content_width | False | x >= self.view_x + self.width and self.content_width > self.width | x >= self.view_x - self.width | 0.6447702050209045 |
70 | 68 | def ensure_x_visible(self, x):
"""Adjust `view_x` so that the given X coordinate is visible.
The X coordinate is given relative to the current `view_x`.
:Parameters:
`x` : int
X coordinate
"""
x -= self._x
if x <= self.view_x:
self.view_x = x
elif x >= self.view_x + self.width:
self.view_x = x - self.width
elif x >= self.view_x + self.width and self.content_width > self.width:
self.view_x = x - self.width
elif<mask>:
self.view_x = self.content_width | False | self.view_x + self.width > self.content_width | x >= self.view_x and x > self.content_width | 0.6426810026168823 |
71 | 69 | def create_rotation_matrix(self, offset=0):
center = (self.center[0] + offset, self.center[1] + offset)
rm = cv2.getRotationMatrix2D(tuple(center), self.angle, 1)
if<mask>:
rot_im_center = cv2.transform(self.image_center[None, None, :] + offset, rm)[0, 0, :]
new_center = np.array([self.bound_w / 2, self.bound_h / 2]) + offset - rot_im_center
rm[:, 2] += new_center
return rm | False | self.expand | self.image_center is not None | 0.6642165780067444 |
72 | 70 | def get_size_distribution_index(tokens, num_sizes):
start = int(tokens[1])
end = int(tokens[2])
interval_size = end - start
for i in range(num_sizes):
if<mask>:
return i
return len(SIZES) | False | interval_size < SIZES[i] | tokens[i] > interval_size | 0.6498247385025024 |
73 | 71 | @staticmethod
def get_states_by_contract_address(event: dict, hex_contract_address: str):
if<mask>:
raise SDKException(ErrorCode.require_str_params)
notify_list = Event.__get_notify_list_by_contract_address(event, hex_contract_address)
states_list = list()
for notify in notify_list:
states = notify.get('States', list())
states_list.append(states)
states_list.count(list)
if len(states_list) == 1:
states_list = states_list[0]
return states_list | True | not isinstance(hex_contract_address, str) | not isinstance(hex_contract_address, str) | 0.646195650100708 |
74 | 72 | @staticmethod
def get_states_by_contract_address(event: dict, hex_contract_address: str):
if not isinstance(hex_contract_address, str):
raise SDKException(ErrorCode.require_str_params)
notify_list = Event.__get_notify_list_by_contract_address(event, hex_contract_address)
states_list = list()
for notify in notify_list:
states = notify.get('States', list())
states_list.append(states)
states_list.count(list)
if<mask>:
states_list = states_list[0]
return states_list | False | len(states_list) == 1 | states_list > 1 | 0.6476594805717468 |
75 | 73 | def get_gcp_managed_cloud_database_info(config, cloud_provider, info):
workspace_name = config['workspace_name']
database_instance = get_managed_database_instance(cloud_provider, workspace_name)
if<mask>:
db_address = _get_managed_database_address(database_instance)
managed_cloud_database_info = {CLOUDTIK_MANAGED_CLOUD_DATABASE_ENDPOINT: db_address}
info[CLOUDTIK_MANAGED_CLOUD_DATABASE] = managed_cloud_database_info | True | database_instance is not None | database_instance is not None | 0.6524078845977783 |
76 | 74 | def find_value_for_api_version(for_version: APIVersion, values: Dict[str, float]) -> float:
"""
Either parse a dict that looks like
{"2.0": 5,
"2.5": 4}
(aka the flow rate values from pipette config) and return the value for
the highest api level that is at or underneath ``for_version``,
or return the value passed in, if it's only a float.
"""
if<mask>:
return values
sorted_versions = sorted({APIVersion.from_string(k): v for k, v in values.items()})
last = values[str(sorted_versions[0])]
for version in sorted_versions:
if version > for_version:
break
last = values[str(version)]
return last | False | isinstance(values, float) | values.get('api_version') is None or values['api_version'] is None | 0.6466754674911499 |
77 | 75 | def find_value_for_api_version(for_version: APIVersion, values: Dict[str, float]) -> float:
"""
Either parse a dict that looks like
{"2.0": 5,
"2.5": 4}
(aka the flow rate values from pipette config) and return the value for
the highest api level that is at or underneath ``for_version``,
or return the value passed in, if it's only a float.
"""
if isinstance(values, float):
return values
sorted_versions = sorted({APIVersion.from_string(k): v for k, v in values.items()})
last = values[str(sorted_versions[0])]
for version in sorted_versions:
if<mask>:
break
last = values[str(version)]
return last | False | version > for_version | version == for_version | 0.650262176990509 |
78 | 76 | def is_writable(self, path):
result = False
while not result:
if<mask>:
result = os.access(path, os.W_OK)
break
parent = os.path.dirname(path)
if parent == path:
break
path = parent
return result | False | os.path.exists(path) | os.path.isfile(path) | 0.6451069712638855 |
79 | 77 | def is_writable(self, path):
result = False
while not result:
if os.path.exists(path):
result = os.access(path, os.W_OK)
break
parent = os.path.dirname(path)
if<mask>:
break
path = parent
return result | False | parent == path | parent and result | 0.6620122790336609 |
80 | 78 | def fail(self, module):
if<mask>:
module.fail_json_aws(self.exception, msg=self.message, **self.kwargs)
module.fail_json(msg=self.message, **self.kwargs) | True | self.exception | self.exception | 0.658711314201355 |
81 | 79 | def get_cell_input_shapes(self, flatten=False) -> ShapeList:
""" input shape(s) of each cell in order """
if<mask>:
self.cached['all_input_shapes'] = self._get_cell_input_shapes()
shapes = self.get_cached('all_input_shapes')
return shapes.flatten(flatten) | False | self.get_cached('all_input_shapes') is None | 'all_input_shapes' not in self.cached | 0.6453176736831665 |
82 | 80 | def __init__(self, inplanes, use_conv=False):
super(UpSampleBlock, self).__init__()
self.use_conv = use_conv
if<mask>:
self.conv = nn.Conv2d(inplanes, inplanes, kernel_size=3, stride=1, padding=1, groups=1, bias=True) | True | self.use_conv | self.use_conv | 0.6527868509292603 |
83 | 81 | def __init__(self, create_options=None, cron_workflow=None, namespace=None):
"""V1alpha1CreateCronWorkflowRequest - a model defined in Swagger"""
self._create_options = None
self._cron_workflow = None
self._namespace = None
self.discriminator = None
if<mask>:
self.create_options = create_options
if cron_workflow is not None:
self.cron_workflow = cron_workflow
if namespace is not None:
self.namespace = namespace | True | create_options is not None | create_options is not None | 0.6539716124534607 |
84 | 82 | def __init__(self, create_options=None, cron_workflow=None, namespace=None):
"""V1alpha1CreateCronWorkflowRequest - a model defined in Swagger"""
self._create_options = None
self._cron_workflow = None
self._namespace = None
self.discriminator = None
if create_options is not None:
self.create_options = create_options
if<mask>:
self.cron_workflow = cron_workflow
if namespace is not None:
self.namespace = namespace | True | cron_workflow is not None | cron_workflow is not None | 0.6575189232826233 |
85 | 83 | def __init__(self, create_options=None, cron_workflow=None, namespace=None):
"""V1alpha1CreateCronWorkflowRequest - a model defined in Swagger"""
self._create_options = None
self._cron_workflow = None
self._namespace = None
self.discriminator = None
if create_options is not None:
self.create_options = create_options
if cron_workflow is not None:
self.cron_workflow = cron_workflow
if<mask>:
self.namespace = namespace | True | namespace is not None | namespace is not None | 0.6571111083030701 |
86 | 84 | def _generate_tunnel_id(session):
try:
tunnels = session.query(ovs_models_v2.TunnelEndpoint).all()
except exc.NoResultFound:
return 0
tunnel_ids = [tunnel['id'] for tunnel in tunnels]
if<mask>:
id = max(tunnel_ids)
else:
id = 0
return id + 1 | False | tunnel_ids | len(tunnel_ids) > 0 | 0.6630378365516663 |
87 | 85 | def juggle_axes(xs, ys, zs, zdir):
"""
Reorder coordinates so that 2D *xs*, *ys* can be plotted in the plane
orthogonal to *zdir*. *zdir* is normally 'x', 'y' or 'z'. However, if
*zdir* starts with a '-' it is interpreted as a compensation for
`rotate_axes`.
"""
if<mask>:
return (zs, xs, ys)
elif zdir == 'y':
return (xs, zs, ys)
elif zdir[0] == '-':
return rotate_axes(xs, ys, zs, zdir)
else:
return (xs, ys, zs) | True | zdir == 'x' | zdir == 'x' | 0.6569070816040039 |
88 | 86 | def juggle_axes(xs, ys, zs, zdir):
"""
Reorder coordinates so that 2D *xs*, *ys* can be plotted in the plane
orthogonal to *zdir*. *zdir* is normally 'x', 'y' or 'z'. However, if
*zdir* starts with a '-' it is interpreted as a compensation for
`rotate_axes`.
"""
if zdir == 'x':
return (zs, xs, ys)
elif<mask>:
return (xs, zs, ys)
elif zdir[0] == '-':
return rotate_axes(xs, ys, zs, zdir)
else:
return (xs, ys, zs) | True | zdir == 'y' | zdir == 'y' | 0.6588462591171265 |
89 | 87 | def juggle_axes(xs, ys, zs, zdir):
"""
Reorder coordinates so that 2D *xs*, *ys* can be plotted in the plane
orthogonal to *zdir*. *zdir* is normally 'x', 'y' or 'z'. However, if
*zdir* starts with a '-' it is interpreted as a compensation for
`rotate_axes`.
"""
if zdir == 'x':
return (zs, xs, ys)
elif zdir == 'y':
return (xs, zs, ys)
elif<mask>:
return rotate_axes(xs, ys, zs, zdir)
else:
return (xs, ys, zs) | False | zdir[0] == '-' | zdir == 'z' | 0.6553357839584351 |
90 | 88 | @property
def swing_mode(self) -> str | None:
"""Return the swing mode setting."""
if<mask>:
return self._get_swing_mode(True)
return self._get_swing_mode(False) | False | self._set_hor_swing and self._support_hor_swing | self._use_swing_mode | 0.6456040143966675 |
91 | 89 | def get_thumbnail(thumb_size, thumbnails):
if<mask>:
thumbnail_sizes = ['high','medium', 'default']
else:
thumbnail_sizes = ['medium', 'high', 'default']
image = ''
for thumbnail_size in thumbnail_sizes:
try:
image = thumbnails.get(thumbnail_size, {}).get('url', '')
except AttributeError:
image = thumbnails.get(thumbnail_size, '')
if image:
break
return image | True | thumb_size == 'high' | thumb_size == 'high' | 0.6561036109924316 |
92 | 90 | def get_thumbnail(thumb_size, thumbnails):
if thumb_size == 'high':
thumbnail_sizes = ['high','medium', 'default']
else:
thumbnail_sizes = ['medium', 'high', 'default']
image = ''
for thumbnail_size in thumbnail_sizes:
try:
image = thumbnails.get(thumbnail_size, {}).get('url', '')
except AttributeError:
image = thumbnails.get(thumbnail_size, '')
if<mask>:
break
return image | True | image | image | 0.6701157689094543 |
93 | 91 | def processRecord(self):
self.current['timestamp'] = utcnow()
if<mask>:
self.service.dispatchEvent(self.current)
else:
log.msg(self.current) | True | self.service | self.service | 0.6587178707122803 |
94 | 92 | def getIndicatorPLM(self, indicatorFlags):
ret = None
if<mask>:
ret = colors2plmIndicator[self.color][indicatorsDirection[self.facing]]
return ret | False | indicatorFlags & self.indicator != 0 and self.color in colors2plmIndicator | indicatorFlags & 1 | 0.6515902280807495 |
95 | 93 | @classmethod
@contextlib.contextmanager
def synchronized_changes(cls, timeout, step=0.001, formats=None, initial_clipboard=None):
seq_no = win32clipboard.GetClipboardSequenceNumber()
if<mask>:
initial_clipboard = cls(from_system=True)
try:
yield
finally:
cls._wait_for_change(timeout, step, formats, initial_clipboard, seq_no) | False | formats and (not initial_clipboard) | initial_clipboard | 0.6439756155014038 |
96 | 94 | def quantity(self, card=None, card_id=None):
"""Return the total quantity of copies in it, or the quantity of the given card.
card: card object
card_id: id (int)
return: int.
"""
if<mask>:
return sum([it.nb for it in self.basketcopies_set.all()])
else:
it = card or card_id
return self.basketcopies_set.get(card=it).nb
return -1 | False | not card | card is None | 0.6663801670074463 |
97 | 95 | def select_delta(self, dist_post_update, current_iteration):
"""
Choose the delta at the scale of distance
between x and perturbed sample.
"""
if<mask>:
delta = 0.1 * (self.clip_max - self.clip_min)
elif self.constraint == 'l2':
delta = np.sqrt(self.d) * self.theta * dist_post_update
elif self.constraint == 'linf':
delta = self.d * self.theta * dist_post_update
return delta | False | current_iteration == 1 | self.constraint == 'l1' | 0.6562577486038208 |
98 | 96 | def select_delta(self, dist_post_update, current_iteration):
"""
Choose the delta at the scale of distance
between x and perturbed sample.
"""
if current_iteration == 1:
delta = 0.1 * (self.clip_max - self.clip_min)
elif<mask>:
delta = np.sqrt(self.d) * self.theta * dist_post_update
elif self.constraint == 'linf':
delta = self.d * self.theta * dist_post_update
return delta | False | self.constraint == 'l2' | self.constraint == 'sqrt' | 0.6519610285758972 |
99 | 97 | def select_delta(self, dist_post_update, current_iteration):
"""
Choose the delta at the scale of distance
between x and perturbed sample.
"""
if current_iteration == 1:
delta = 0.1 * (self.clip_max - self.clip_min)
elif self.constraint == 'l2':
delta = np.sqrt(self.d) * self.theta * dist_post_update
elif<mask>:
delta = self.d * self.theta * dist_post_update
return delta | False | self.constraint == 'linf' | self.constraint == 'l3' | 0.6501039862632751 |
100 | 98 | def _maybe_add_iscrowd_annotations(cocoapi) -> None:
for ann in cocoapi.dataset['annotations']:
if<mask>:
ann['iscrowd'] = 0 | True | 'iscrowd' not in ann | 'iscrowd' not in ann | 0.6548173427581787 |
101 | 99 | def to_rgb(self, x):
assert self.image_key =='segmentation'
if<mask>:
self.register_buffer('colorize', torch.randn(3, x.shape[1], 1, 1).to(x))
x = F.conv2d(x, weight=self.colorize)
x = 2.0 * (x - x.min()) / (x.max() - x.min()) - 1.0
return x | True | not hasattr(self, 'colorize') | not hasattr(self, 'colorize') | 0.6446002125740051 |
102 | 100 | def validate(self):
sync = self.stream.searchBytes('G', 0, 204 * 8)
if<mask>:
return 'Unable to find synchronization byte'
for index in xrange(5):
try:
packet = self['packet[%u]' % index]
except (ParserError, MissingField):
if index and self.eof:
return True
else:
return 'Unable to get packet #%u' % index
err = packet.isValid()
if err:
return 'Packet #%u is invalid: %s' % (index, err)
return True | False | sync is None | sync | 0.6542575359344482 |
103 | 101 | def validate(self):
sync = self.stream.searchBytes('G', 0, 204 * 8)
if sync is None:
return 'Unable to find synchronization byte'
for index in xrange(5):
try:
packet = self['packet[%u]' % index]
except (ParserError, MissingField):
if index and self.eof:
return True
else:
return 'Unable to get packet #%u' % index
err = packet.isValid()
if<mask>:
return 'Packet #%u is invalid: %s' % (index, err)
return True | True | err | err | 0.6731647253036499 |
104 | 102 | def validate(self):
sync = self.stream.searchBytes('G', 0, 204 * 8)
if sync is None:
return 'Unable to find synchronization byte'
for index in xrange(5):
try:
packet = self['packet[%u]' % index]
except (ParserError, MissingField):
if<mask>:
return True
else:
return 'Unable to get packet #%u' % index
err = packet.isValid()
if err:
return 'Packet #%u is invalid: %s' % (index, err)
return True | False | index and self.eof | sync | 0.6574536561965942 |
105 | 103 | def __init__(self, theme, module, widget):
self.__attributes = {}
for key in self.__COMMON_THEME_FIELDS:
tmp = theme.get(key, widget)
if<mask>:
self.__attributes[key] = tmp
self.__attributes['name'] = module.id
self.__attributes['instance'] = widget.id
self.__attributes['prev-bg'] = theme.get('bg', 'previous') | True | tmp is not None | tmp is not None | 0.6631288528442383 |
106 | 104 | def _get_viewer(self):
if<mask>:
self.viewer = mujoco_py.MjViewer(self.sim)
self.viewer_setup()
return self.viewer | True | self.viewer is None | self.viewer is None | 0.6544407606124878 |
107 | 105 | def generate_key(self, url, suffix=''):
"""
Generates a key to store the cache under
:param url:
The URL being cached
:param suffix:
A string to append to the key
:return:
A string key for the URL
"""
if<mask>:
url = url.encode('utf-8')
key = hashlib.md5(url).hexdigest()
return key + suffix | False | isinstance(url, str_cls) | isinstance(url, unicode) | 0.6456655263900757 |
108 | 106 | def denormalize(val):
""" De-normalize a string """
if<mask>:
val = val.replace('_', '-')
return val | False | val.find('_') != -1 | val and '_' in val | 0.6521086692810059 |
109 | 107 | def __init__(self, host, port=None):
if<mask>:
raise LocationValueError('No host specified.')
self.host = _normalize_host(host, scheme=self.scheme)
self._proxy_host = host.lower()
self.port = port | True | not host | not host | 0.6742929220199585 |
110 | 108 | def _test_end(self, msg=None, report=None):
self.test_thread = None
if<mask>:
self.logger.info(msg)
if report:
self._output_test_report(report)
pid = os.getpid()
os.kill(pid, signal.SIGTERM) | True | msg | msg | 0.6828615665435791 |
111 | 109 | def _test_end(self, msg=None, report=None):
self.test_thread = None
if msg:
self.logger.info(msg)
if<mask>:
self._output_test_report(report)
pid = os.getpid()
os.kill(pid, signal.SIGTERM) | True | report | report | 0.6682855486869812 |
112 | 110 | def run_read(self):
api_result = self.api_read()
if<mask>:
return 'Apache root directory not found.'
else:
rows = []
rows.append(['Apache root directories'])
rows.append([])
for key_name in api_result:
for directory in api_result[key_name]:
rows.append([directory])
result_table = table(rows)
result_table.draw(80)
return rows | True | not api_result['apache_root_directory'] | not api_result['apache_root_directory'] | 0.6433871984481812 |
113 | 111 | def _get_version(self, conf_file: str) -> str:
"""Parse the version from the conf_file.
version should be in #!VERSION={value} format
!!! note
"0.0" is returned if no version is found
"""
version = '0.0'
with open(conf_file, 'r') as f:
for line in f.readlines():
if<mask>:
try:
version = str(float(line.split('=')[1].split()[0].strip()))
break
except Exception:
pass
return version | False | line.startswith('#!VERSION=') | line.startswith('#') | 0.6472935676574707 |
114 | 112 | def _eval_dropouts(mod):
module_name = mod.__class__.__name__
if<mask>:
mod.training = False
for module in mod.children():
_eval_dropouts(module) | False | 'Dropout' in module_name or 'BatchNorm' in module_name | 'training' not in module_name.lower() | 0.6447892189025879 |
115 | 113 | def pred_ctxt(self):
device = util.device(self.config, self.logger)
if<mask>:
datasource = self._preload_batches(device)
else:
datasource = self._reload_batches(device)
return PredictorContext(self, datasource, device) | False | self.config['preload'] | self.preload | 0.6453684568405151 |
116 | 114 | def zcl_readattributes(pkt):
config.row['zcl_readattributes_identifiers'] = ','.join(['0x{:04x}'.format(identifier) for identifier in pkt[ZCLGeneralReadAttributes].attribute_identifiers])
if<mask>:
config.row['error_msg'] = 'Unexpected payload'
return | True | len(bytes(pkt[ZCLGeneralReadAttributes].payload)) != 0 | len(bytes(pkt[ZCLGeneralReadAttributes].payload)) != 0 | 0.6488480567932129 |
117 | 115 | def get_first_iter_element(iterable: Iterable[T]) -> Tuple[T, Iterable[T]]:
"""Get first element of an iterable and a new fresh iterable.
The fresh iterable has the first element added back using ``itertools.chain``.
If the iterable is not an iterator, this is equivalent to
``(next(iter(iterable)), iterable)``.
Args:
iterable: The iterable to get the first element of.
Returns:
A tuple containing the first element of the iterable, and a fresh iterable
with all the elements.
Raises:
ValueError: `iterable` is empty -- the first call to it returns no elements.
"""
iterator = iter(iterable)
try:
first_element = next(iterator)
except StopIteration:
raise ValueError(f'iterable {iterable} had no elements to iterate over.')
return_iterable: Iterable[T]
if<mask>:
return_iterable = itertools.chain([first_element], iterator)
else:
return_iterable = iterable
return (first_element, return_iterable) | False | iterator == iterable | isinstance(first_element, Iterable) | 0.656818151473999 |
118 | 116 | def __call__(self, parent, params, response):
"""
:type parent: ServiceResource
:param parent: The resource instance to which this action is attached.
:type params: dict
:param params: Request parameters sent to the service.
:type response: dict
:param response: Low-level operation response.
"""
if<mask>:
response = jmespath.search(self.search_path, response)
return response | True | self.search_path and self.search_path != '$' | self.search_path and self.search_path != '$' | 0.6462432742118835 |
119 | 117 | def set_repository_id(self, repository: HacsRepository, repo_id: str):
"""Update a repository id."""
existing_repo_id = str(repository.data.id)
if<mask>:
return
if existing_repo_id!= '0':
raise ValueError(f'The repo id for {repository.data.full_name_lower} is already set to {existing_repo_id}')
repository.data.id = repo_id
self.register(repository) | False | existing_repo_id == repo_id | not repository.data.full_name_lower or existing_repo_id == repo_id | 0.6502279043197632 |
120 | 118 | def set_repository_id(self, repository: HacsRepository, repo_id: str):
"""Update a repository id."""
existing_repo_id = str(repository.data.id)
if existing_repo_id == repo_id:
return
if<mask>:
raise ValueError(f'The repo id for {repository.data.full_name_lower} is already set to {existing_repo_id}')
repository.data.id = repo_id
self.register(repository) | False | existing_repo_id != '0' | repository.data.full_name_lower in existing_repo_id | 0.6515269875526428 |
121 | 119 | def __getitem__(self, name):
"""Returns a BoundField with the given name."""
try:
field = self.fields[name]
except KeyError:
raise KeyError("Key %r not found in '%s'" % (name, self.__class__.__name__))
if<mask>:
self._bound_fields_cache[name] = BoundField(self, field, name)
return self._bound_fields_cache[name] | True | name not in self._bound_fields_cache | name not in self._bound_fields_cache | 0.6495532989501953 |
122 | 120 | def get_incumbent(self) -> Configuration | None:
"""Returns the current incumbent in a single-objective setting."""
if<mask>:
raise ValueError('Cannot get a single incumbent for multi-objective optimization.')
if len(self._incumbents) == 0:
return None
assert len(self._incumbents) == 1
return self._incumbents[0] | False | self._scenario.count_objectives() > 1 | len(self) > 1 | 0.6493242979049683 |
123 | 121 | def get_incumbent(self) -> Configuration | None:
"""Returns the current incumbent in a single-objective setting."""
if self._scenario.count_objectives() > 1:
raise ValueError('Cannot get a single incumbent for multi-objective optimization.')
if<mask>:
return None
assert len(self._incumbents) == 1
return self._incumbents[0] | False | len(self._incumbents) == 0 | not self._incumbents | 0.6508061289787292 |
124 | 122 | def to(self, device: torch.device):
device_symmetry = self._symmetry
if<mask>:
device_symmetry = {key: value.to(device) for key, value in device_symmetry.items()}
return Mesh(_maybe_copy_to_device(self._vertices, device), _maybe_copy_to_device(self._faces, device), _maybe_copy_to_device(self._geodists, device), device_symmetry, _maybe_copy_to_device(self._texcoords, device), self.mesh_info, device) | False | device_symmetry | isinstance(device_symmetry, dict) | 0.6596102714538574 |
125 | 123 | def del_extra_repr(m):
if<mask>:
m.extra_repr = m.original_extra_repr
del m.original_extra_repr
if hasattr(m, 'accumulate_flops'):
del m.accumulate_flops | True | hasattr(m, 'original_extra_repr') | hasattr(m, 'original_extra_repr') | 0.6439371109008789 |
126 | 124 | def del_extra_repr(m):
if hasattr(m, 'original_extra_repr'):
m.extra_repr = m.original_extra_repr
del m.original_extra_repr
if<mask>:
del m.accumulate_flops | True | hasattr(m, 'accumulate_flops') | hasattr(m, 'accumulate_flops') | 0.643975019454956 |
127 | 125 | @njit
def _downsample_sample_count(a, max_count):
a = a.astype(np.float64)
total = a.sum()
p = a / total
if<mask>:
b = max_count * p
else:
b = a
b = b.astype(np.int64)
return b | False | total > max_count | max_count is not None and p > 0 | 0.652897834777832 |
128 | 126 | @classmethod
def from_param(cls, obj):
if<mask>:
return obj
return base.from_param(obj) | False | obj is None | isinstance(obj, cls) | 0.6590157747268677 |
129 | 127 | def post_add(self, datasource):
datasource.refresh_metrics()
security_manager.merge_perm('datasource_access', datasource.get_perm())
if<mask>:
security_manager.merge_perm('schema_access', datasource.schema_perm) | False | datasource.schema | datasource.schema_perm is not None | 0.6557527780532837 |
130 | 128 | def get_property(self, stylenode):
border = None
for propertyname in TABLEPROPERTYNAMES:
border = stylenode.get('{%s}%s' % (CNSD['fo'], propertyname))
if<mask>:
return border
return border | False | border is not None and border != 'none' | border | 0.6498315334320068 |
131 | 129 | def ber_decode(value):
"""Return decoded BER length as integer given bytes."""
if<mask>:
if len(value) > 1:
raise ValueError
return bytes_to_int(value)
else:
if len(value)!= value[0] - 127:
raise ValueError
return bytes_to_int(value[1:]) | False | bytes_to_int(value) < 128 | isinstance(value, bytes) | 0.6481873989105225 |
132 | 130 | def ber_decode(value):
"""Return decoded BER length as integer given bytes."""
if bytes_to_int(value) < 128:
if<mask>:
raise ValueError
return bytes_to_int(value)
else:
if len(value)!= value[0] - 127:
raise ValueError
return bytes_to_int(value[1:]) | False | len(value) > 1 | len(value) != b'\x00' | 0.6486057639122009 |
133 | 131 | def ber_decode(value):
"""Return decoded BER length as integer given bytes."""
if bytes_to_int(value) < 128:
if len(value) > 1:
raise ValueError
return bytes_to_int(value)
else:
if<mask>:
raise ValueError
return bytes_to_int(value[1:]) | False | len(value) != value[0] - 127 | value.startswith('0') or value.startswith('1') | 0.6444279551506042 |
134 | 132 | def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None):
loss = F.cross_entropy(pred, label, reduction='none')
if<mask>:
weight = weight.float()
loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor)
return loss | True | weight is not None | weight is not None | 0.6550837755203247 |
135 | 133 | def create_code(traits):
"""Assign bits to list of traits.
"""
code = 1
result = {INVALID: code}
if<mask>:
return result
for trait in traits:
code = code << 1
result[trait] = code
return result | True | not traits | not traits | 0.667203426361084 |
136 | 134 | def item_to_buf_log_item(item: gdb.Value) -> gdb.Value:
"""
Converts an xfs_log_item to an xfs_buf_log_item
Args:
item: The log item to convert. The value must be of
type ``struct xfs_log_item``.
Returns:
:obj:`gdb.Value`: The converted log item. The value will be of
type ``struct xfs_buf_log_item``.
Raises:
InvalidArgumentError: The type of log item is not ``XFS_LI_BUF``
:obj:`gdb.NotAvailableError`: The target value was not available.
"""
if<mask>:
raise InvalidArgumentError('item is not a buf log item')
return container_of(item, types.xfs_buf_log_item_type, 'bli_item') | True | item['li_type'] != XFS_LI_BUF | item['li_type'] != XFS_LI_BUF | 0.6482463479042053 |
137 | 135 | def __get_resource_string(self, req, bucket_name, key):
if<mask>:
encoded_uri = v2_uri_encode('/' + bucket_name + '/' + key)
else:
encoded_uri = v2_uri_encode('/')
logger.info('encoded_uri={0} key={1}'.format(encoded_uri, key))
return encoded_uri + self.__get_canonalized_query_string(req) | True | bucket_name | bucket_name | 0.6638178825378418 |
138 | 136 | @property
def minimum(self) -> Optional[SupportsFloat]:
if<mask>:
return None
if not self.metric_single_values_list and (not self.metric_aggregated_list):
return None
metrics = self.metric_single_values_list + [s.min for s in self.metric_aggregated_list]
return min(metrics) | False | 'Minimum' not in self.stats | self.metric_single_values_list is None | 0.6523749828338623 |
139 | 137 | @property
def minimum(self) -> Optional[SupportsFloat]:
if 'Minimum' not in self.stats:
return None
if<mask>:
return None
metrics = self.metric_single_values_list + [s.min for s in self.metric_aggregated_list]
return min(metrics) | False | not self.metric_single_values_list and (not self.metric_aggregated_list) | len(self.metric_aggregated_list) == 0 | 0.6491511464118958 |
140 | 138 | def __init__(self, alpha=1.5, beta=1.5):
"""
CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features, see https://arxiv.org/abs/1905.04899
Cutmix image and gt_bbbox/gt_score
Args:
alpha (float): alpha parameter of beta distribute
beta (float): beta parameter of beta distribute
"""
super(Cutmix, self).__init__()
self.alpha = alpha
self.beta = beta
if<mask>:
raise ValueError('alpha shold be positive in {}'.format(self))
if self.beta <= 0.0:
raise ValueError('beta shold be positive in {}'.format(self)) | True | self.alpha <= 0.0 | self.alpha <= 0.0 | 0.6523735523223877 |
141 | 139 | def __init__(self, alpha=1.5, beta=1.5):
"""
CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features, see https://arxiv.org/abs/1905.04899
Cutmix image and gt_bbbox/gt_score
Args:
alpha (float): alpha parameter of beta distribute
beta (float): beta parameter of beta distribute
"""
super(Cutmix, self).__init__()
self.alpha = alpha
self.beta = beta
if self.alpha <= 0.0:
raise ValueError('alpha shold be positive in {}'.format(self))
if<mask>:
raise ValueError('beta shold be positive in {}'.format(self)) | True | self.beta <= 0.0 | self.beta <= 0.0 | 0.6557456254959106 |
142 | 140 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if<mask>:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr | True | PY3 | PY3 | 0.6618987321853638 |
143 | 141 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if<mask>:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr | True | new_mod is None | new_mod is None | 0.6534161567687988 |
144 | 142 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if<mask>:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr | True | new_attr is None | new_attr is None | 0.6529781818389893 |
145 | 143 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if<mask>:
old_attr = name
self.attr = old_attr | True | old_attr is None | old_attr is None | 0.6546398401260376 |
146 | 144 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if<mask>:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr | True | old_attr is None | old_attr is None | 0.6559209227561951 |
147 | 145 | def process_word(word):
if<mask>:
raise UDError('There is a cycle in a sentence')
if word.parent is None:
head = int(word.columns[HEAD])
if head < 0 or head > len(ud.words) - sentence_start:
raise UDError("HEAD '{}' points outside of the sentence".format(word.columns[HEAD]))
if head:
parent = ud.words[sentence_start + head - 1]
word.parent ='remapping'
process_word(parent)
word.parent = parent | False | word.parent == 'remapping' | ud.cycle_count | 0.6516072750091553 |
148 | 146 | def process_word(word):
if word.parent =='remapping':
raise UDError('There is a cycle in a sentence')
if<mask>:
head = int(word.columns[HEAD])
if head < 0 or head > len(ud.words) - sentence_start:
raise UDError("HEAD '{}' points outside of the sentence".format(word.columns[HEAD]))
if head:
parent = ud.words[sentence_start + head - 1]
word.parent ='remapping'
process_word(parent)
word.parent = parent | False | word.parent is None | HEAD in word.columns | 0.6528672575950623 |
149 | 147 | def process_word(word):
if word.parent =='remapping':
raise UDError('There is a cycle in a sentence')
if word.parent is None:
head = int(word.columns[HEAD])
if<mask>:
raise UDError("HEAD '{}' points outside of the sentence".format(word.columns[HEAD]))
if head:
parent = ud.words[sentence_start + head - 1]
word.parent ='remapping'
process_word(parent)
word.parent = parent | False | head < 0 or head > len(ud.words) - sentence_start | head > sentence_start | 0.6500768661499023 |
150 | 148 | def process_word(word):
if word.parent =='remapping':
raise UDError('There is a cycle in a sentence')
if word.parent is None:
head = int(word.columns[HEAD])
if head < 0 or head > len(ud.words) - sentence_start:
raise UDError("HEAD '{}' points outside of the sentence".format(word.columns[HEAD]))
if<mask>:
parent = ud.words[sentence_start + head - 1]
word.parent ='remapping'
process_word(parent)
word.parent = parent | False | head | head < len(ud.words) | 0.6833953857421875 |
151 | 149 | def RemoveAllDrawPointOnMap():
""" Remove all features on Point Layer """
pointLyr = selectLayerByName(Point_lyr, groupName)
if<mask>:
return
pointLyr.startEditing()
pointLyr.dataProvider().truncate()
CommonLayer(pointLyr)
return | False | pointLyr is None | not pointLyr | 0.6616418957710266 |
152 | 150 | def assert_current_keychain(self, *keys):
ak = tuple(self.active_keychains)
if<mask>:
self.assertEqual(ak, ())
else:
self.assertEqual(self.km.current_keychain, keys) | False | len(keys) < 1 | ak | 0.6502760648727417 |
153 | 151 | def filter_dont_care(gt: NDArrayObject, class_name: str) -> bool:
"""Fitlers detections that are considered don't care under current LCA evaluation."""
if<mask>:
return True
if gt == class_name:
return True
else:
return False | False | gt == 'ignore' | isinstance(gt, NDArrayObject) and any((gt.get_kind() == 'CAR' for gt in class_name)) | 0.6548080444335938 |
154 | 152 | def filter_dont_care(gt: NDArrayObject, class_name: str) -> bool:
"""Fitlers detections that are considered don't care under current LCA evaluation."""
if gt == 'ignore':
return True
if<mask>:
return True
else:
return False | False | gt == class_name | 'use_lca' in class_name or 'use_lca' in class_name | 0.6553903818130493 |
155 | 153 | def is_solution(cell: NotebookNode) -> bool:
"""Returns True if the cell is a solution cell."""
if<mask>:
return False
return cell.metadata['nbgrader'].get('solution', False) | False | 'nbgrader' not in cell.metadata | not is_solution(cell) | 0.6587323546409607 |
156 | 154 | def gather_elements(self, client, node, style):
if<mask>:
client.pending_targets.append(node['refid'])
return client.gather_elements(node, style) | False | 'refid' in node | node['refid'] not in client.pending_targets | 0.6568597555160522 |
157 | 155 | def join_stream_mode_on(self):
"""
Supervisor behaviour when stream mode is on.
When end raise (for exemple by CRTL+C)
-> Kill all actor in the following order (Puller - Dispatcher/Formula - Pusher)
1. Send SIGTERM
2. Join X seconds
3. If still alive, send SIGKILL
4. Join
"""
for actor in self.supervised_actors:
if<mask>:
self.kill_actors()
return
actor_sentinels = [actor.sentinel for actor in self.supervised_actors]
select.select(actor_sentinels, actor_sentinels, actor_sentinels)
self.kill_actors() | False | not actor.is_alive() | actor.sentinel | 0.6499453783035278 |
158 | 156 | def test_customlabel(self):
"""Limited test of custom custom labeling"""
if<mask>:
tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1)
tbl[1][1].data = np.nan
tbl.label_cells(custom_labeller)
desired = '\n*****************************\n* * header1 * header2 *\n*****************************\n* stub1 * -- * 1 *\n* stub2 * 2.00 * 3 *\n*****************************\n'
actual = '\n%s\n' % tbl.as_text(missing='--')
self.assertEqual(actual, desired) | False | has_numpy | custom_labeller is not None and custom_labeller is not None | 0.6552488207817078 |
159 | 157 | def Dequantize(v, ty):
v -= ty.zeroPoint
if<mask>:
v *= ty.scale
if isinstance(ty.extraParams, SymmPerChannelQuantParams):
v *= ty.extraParams.GetScalesBroadcastArray(ty.dimensions)
return v | False | ty.scale != 0 | isinstance(ty.scale, Number) | 0.6618735790252686 |
160 | 158 | def Dequantize(v, ty):
v -= ty.zeroPoint
if ty.scale!= 0:
v *= ty.scale
if<mask>:
v *= ty.extraParams.GetScalesBroadcastArray(ty.dimensions)
return v | False | isinstance(ty.extraParams, SymmPerChannelQuantParams) | ty.extraParams != None | 0.6478604078292847 |
161 | 159 | def submit(self, expect_errors=False, data=None):
if<mask>:
data = {}
submission = {'paymethod': self.paymethod.get_uri(), 'plan_id':'moz-brick'}
submission.update(data)
form = SubscriptionForm(submission)
if not expect_errors:
assert form.is_valid(), form.errors.as_text()
return form | False | not data | data is None | 0.6652536392211914 |
162 | 160 | def submit(self, expect_errors=False, data=None):
if not data:
data = {}
submission = {'paymethod': self.paymethod.get_uri(), 'plan_id':'moz-brick'}
submission.update(data)
form = SubscriptionForm(submission)
if<mask>:
assert form.is_valid(), form.errors.as_text()
return form | False | not expect_errors | expect_errors | 0.6541720628738403 |
163 | 161 | def __getitem__(self, i):
ptr, size = self._index[i]
tensor = torch.from_numpy(np.frombuffer(self._bin_buffer, dtype=self._index.dtype, count=size, offset=ptr))
if<mask>:
return tensor
else:
return tensor.long() | False | tensor.dtype == torch.int64 | isinstance(i, slice) | 0.6494283676147461 |
164 | 162 | def moletteLinux4(self, event):
if<mask>:
return
self.redrawDeZoom() | True | event.widget != self.canvas | event.widget != self.canvas | 0.6526194214820862 |
165 | 163 | def random_sampling(pc, num_sample, replace=None, return_choices=False):
""" Input is NxC, output is num_samplexC
"""
if<mask>:
replace = pc.shape[0] < num_sample
choices = np.random.choice(pc.shape[0], num_sample, replace=replace)
if return_choices:
return (pc[choices], choices)
else:
return pc[choices] | True | replace is None | replace is None | 0.6564013957977295 |
166 | 164 | def random_sampling(pc, num_sample, replace=None, return_choices=False):
""" Input is NxC, output is num_samplexC
"""
if replace is None:
replace = pc.shape[0] < num_sample
choices = np.random.choice(pc.shape[0], num_sample, replace=replace)
if<mask>:
return (pc[choices], choices)
else:
return pc[choices] | True | return_choices | return_choices | 0.6621496677398682 |
167 | 165 | def _iterate_parents(self, upto=None):
if<mask>:
return (self,)
else:
if self._parent is None:
raise sa_exc.InvalidRequestError('Transaction %s is not on the active transaction list' % upto)
return (self,) + self._parent._iterate_parents(upto) | False | self._parent is upto | upto is None | 0.6606658697128296 |
168 | 166 | def _iterate_parents(self, upto=None):
if self._parent is upto:
return (self,)
else:
if<mask>:
raise sa_exc.InvalidRequestError('Transaction %s is not on the active transaction list' % upto)
return (self,) + self._parent._iterate_parents(upto) | False | self._parent is None | upto is None | 0.6587779521942139 |
169 | 167 | def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if<mask>:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper | True | slots is not None | slots is not None | 0.6484737396240234 |
170 | 168 | def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if<mask>:
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper | True | isinstance(slots, str) | isinstance(slots, str) | 0.6445169448852539 |
171 | 169 | def main(argv):
args = common.ParseOptions(argv, __doc__)
if<mask>:
common.Usage(__doc__)
sys.exit(1)
common.InitLogging()
CheckPartitionSizes(args[0]) | True | len(args) != 1 | len(args) != 1 | 0.6504237651824951 |
172 | 170 | @property
def directionsLanguage(self):
if<mask>:
self.__init()
return self._directionsLanguage | True | self._directionsLanguage is None | self._directionsLanguage is None | 0.6546432375907898 |
173 | 171 | def __init__(self, module):
super(ZipResourceFinder, self).__init__(module)
archive = self.loader.archive
self.prefix_len = 1 + len(archive)
if<mask>:
self._files = self.loader._files
else:
self._files = zipimport._zip_directory_cache[archive]
self.index = sorted(self._files) | True | hasattr(self.loader, '_files') | hasattr(self.loader, '_files') | 0.6460102796554565 |
174 | 172 | def isModifiedByAnd(self, terms):
"""return True if self is modified by all items in the list terms"""
if<mask>:
return False
if type(terms) == type(''):
return self.isModifiedBy(terms)
for t in terms:
if not self.isModifiedBy(t):
return False
return True | True | not self.__modifiedBy | not self.__modifiedBy | 0.6492145657539368 |
175 | 173 | def isModifiedByAnd(self, terms):
"""return True if self is modified by all items in the list terms"""
if not self.__modifiedBy:
return False
if<mask>:
return self.isModifiedBy(terms)
for t in terms:
if not self.isModifiedBy(t):
return False
return True | True | type(terms) == type('') | type(terms) == type('') | 0.6433017253875732 |
176 | 174 | def isModifiedByAnd(self, terms):
"""return True if self is modified by all items in the list terms"""
if not self.__modifiedBy:
return False
if type(terms) == type(''):
return self.isModifiedBy(terms)
for t in terms:
if<mask>:
return False
return True | True | not self.isModifiedBy(t) | not self.isModifiedBy(t) | 0.6428544521331787 |
177 | 175 | def send_break(self, duration=0.25):
"""Send break condition. Timed, returns to idle state after given duration."""
if<mask>:
raise portNotOpenError
self.sPort.sendBreak(duration * 1000.0) | True | not self.sPort | not self.sPort | 0.6548901200294495 |
178 | 176 | def iter_fields(fields):
"""
.. deprecated:: 1.6
Iterate over fields.
The addition of :class:`~urllib3.fields.RequestField` makes this function
obsolete. Instead, use :func:`iter_field_objects`, which returns
:class:`~urllib3.fields.RequestField` objects.
Supports list of (k, v) tuples and dicts.
"""
if<mask>:
return ((k, v) for k, v in six.iteritems(fields))
return ((k, v) for k, v in fields) | True | isinstance(fields, dict) | isinstance(fields, dict) | 0.6449024677276611 |
179 | 177 | def __eq__(self, other):
if<mask>:
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec == other._spec | True | isinstance(other, string_types) | isinstance(other, string_types) | 0.6474127769470215 |
180 | 178 | def __eq__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif<mask>:
return NotImplemented
return self._spec == other._spec | True | not isinstance(other, self.__class__) | not isinstance(other, self.__class__) | 0.6466397047042847 |
181 | 179 | @staticmethod
def check_source(opts, source_class, source_spec: str, sink_class, sink_spec: str) -> couchbaseConstants.PUMP_ERROR:
if<mask>:
return f'error: source and sink must be different; source: {source_spec} sink: {sink_spec}'
return 0 | False | source_spec == sink_spec | source_spec != sink_spec | 0.65130615234375 |
182 | 180 | def inspection_types(self):
"""
Lazily loads *all* inspection types into memory and returns a dictionary
keyed by inspection type ID.
"""
if<mask>:
self._inspection_type_cache = dict([(row['InspectionTypeID'], row) for row in self.mdb_table('tblInspectionTypes')])
if not self._inspection_type_cache:
raise ScraperBroken('tblInspectionTypes was either empty or nonexistent')
return self._inspection_type_cache | False | self._inspection_type_cache is None | not hasattr(self, '_inspection_type_cache') | 0.653220534324646 |
183 | 181 | def inspection_types(self):
"""
Lazily loads *all* inspection types into memory and returns a dictionary
keyed by inspection type ID.
"""
if self._inspection_type_cache is None:
self._inspection_type_cache = dict([(row['InspectionTypeID'], row) for row in self.mdb_table('tblInspectionTypes')])
if<mask>:
raise ScraperBroken('tblInspectionTypes was either empty or nonexistent')
return self._inspection_type_cache | True | not self._inspection_type_cache | not self._inspection_type_cache | 0.6526228189468384 |
184 | 182 | def pack_value(value: BitPackValue, metadata: dict | None=None) -> bytes:
if<mask>:
metadata = {}
results = []
for i, (value_argument, value_format) in enumerate(value.bit_pack_encode(metadata)):
if 0 <= value_argument < value_format:
results.append((value_argument, value_format))
else:
raise ValueError(f'At {i}, got {value_argument} which not in range [0, {value_format}[')
return _pack_encode_results(results) | True | metadata is None | metadata is None | 0.6583060026168823 |
185 | 183 | def pack_value(value: BitPackValue, metadata: dict | None=None) -> bytes:
if metadata is None:
metadata = {}
results = []
for i, (value_argument, value_format) in enumerate(value.bit_pack_encode(metadata)):
if<mask>:
results.append((value_argument, value_format))
else:
raise ValueError(f'At {i}, got {value_argument} which not in range [0, {value_format}[')
return _pack_encode_results(results) | False | 0 <= value_argument < value_format | i == 0 | 0.6489828824996948 |
186 | 184 | def do():
try:
if<mask>:
self.run_do()
except Exception as e:
if self.logger:
self.logger.exception(e)
else:
raise e
finally:
if self.lock and self.lock.locked():
self.lock.release() | False | not self.lock or self.lock.acquire(0) | self.run_do | 0.6447834968566895 |
187 | 185 | def do():
try:
if not self.lock or self.lock.acquire(0):
self.run_do()
except Exception as e:
if self.logger:
self.logger.exception(e)
else:
raise e
finally:
if<mask>:
self.lock.release() | False | self.lock and self.lock.locked() | not self.lock | 0.6460902690887451 |
188 | 186 | def do():
try:
if not self.lock or self.lock.acquire(0):
self.run_do()
except Exception as e:
if<mask>:
self.logger.exception(e)
else:
raise e
finally:
if self.lock and self.lock.locked():
self.lock.release() | True | self.logger | self.logger | 0.6533626317977905 |
189 | 187 | def reset(self):
"""
Reset all environments
"""
obs = self.venv.reset()
if<mask>:
obs['vector'] = self._obfilt(obs['vector'])
else:
obs = self._obfilt(obs)
return obs | False | isinstance(obs, dict) | 'vector' in obs | 0.6489845514297485 |
190 | 188 | def _get_subscriber(self):
activation_info = []
entry = {}
cmd = ['/opt/ibm/seprovider/bin/getSubscriber']
output, error, rc = run_command(cmd)
if<mask>:
return activation_info
if rc!= 0:
raise OperationFailed('GINSEP0007E')
if len(output) > 1:
for line in output.splitlines():
if len(line) > 0:
entry = SUBSCRIBER.search(line).groupdict()
activation_info.append(entry['hostname'])
return activation_info | False | rc == 1 | error is None | 0.6652562618255615 |
191 | 189 | def _get_subscriber(self):
activation_info = []
entry = {}
cmd = ['/opt/ibm/seprovider/bin/getSubscriber']
output, error, rc = run_command(cmd)
if rc == 1:
return activation_info
if<mask>:
raise OperationFailed('GINSEP0007E')
if len(output) > 1:
for line in output.splitlines():
if len(line) > 0:
entry = SUBSCRIBER.search(line).groupdict()
activation_info.append(entry['hostname'])
return activation_info | False | rc != 0 | error == 2 | 0.6630527973175049 |
192 | 190 | def _get_subscriber(self):
activation_info = []
entry = {}
cmd = ['/opt/ibm/seprovider/bin/getSubscriber']
output, error, rc = run_command(cmd)
if rc == 1:
return activation_info
if rc!= 0:
raise OperationFailed('GINSEP0007E')
if<mask>:
for line in output.splitlines():
if len(line) > 0:
entry = SUBSCRIBER.search(line).groupdict()
activation_info.append(entry['hostname'])
return activation_info | False | len(output) > 1 | error == 2 | 0.6492688655853271 |
193 | 191 | def _get_subscriber(self):
activation_info = []
entry = {}
cmd = ['/opt/ibm/seprovider/bin/getSubscriber']
output, error, rc = run_command(cmd)
if rc == 1:
return activation_info
if rc!= 0:
raise OperationFailed('GINSEP0007E')
if len(output) > 1:
for line in output.splitlines():
if<mask>:
entry = SUBSCRIBER.search(line).groupdict()
activation_info.append(entry['hostname'])
return activation_info | False | len(line) > 0 | line | 0.6486085653305054 |
194 | 192 | def remove_empty_columns(R, M):
new_R, new_M = ([], [])
for j, sum_column in enumerate(M.sum(axis=0)):
if<mask>:
new_R.append(R[:, j])
new_M.append(M[:, j])
return (numpy.array(new_R).T, numpy.array(new_M).T) | True | sum_column > 0 | sum_column > 0 | 0.6547243595123291 |
195 | 193 | def _get_bpe_word_idx(self, x):
"""
Given a list of BPE tokens, for every index in the tokens list,
return the index of the word grouping that it belongs to.
For example, for input x corresponding to ["how", "are", "y@@", "ou"],
return [[0], [1], [2], [2]].
"""
bpe_end = self.bpe_end[x]
if<mask>:
return np.array([[0]])
word_idx = bpe_end[::-1].cumsum(0)[::-1]
word_idx = word_idx.max(0)[None, :] - word_idx
return word_idx | False | x.size(0) == 1 and x.size(1) == 1 | x.size(0) == 1 and x.size(1) == 1 and (x.size(1) == 1) and (x.size(1) == 1) | 0.6478654742240906 |
196 | 194 | def build_auth_path(self, bucket, key=''):
key = boto.utils.get_utf8_value(key)
path = ''
if<mask>:
path = '/' + bucket
return path + '/%s' % urllib.parse.quote(key) | False | bucket != '' | bucket | 0.6901131272315979 |
197 | 195 | def output_applicationfault(data_object):
if<mask>:
return
output_status_message('* * * Begin output_applicationfault * * *')
output_status_message('TrackingId: {0}'.format(data_object.TrackingId))
if data_object.Type == 'AdApiFaultDetail':
output_adapifaultdetail(data_object)
if data_object.Type == 'ApiFault':
output_apifault(data_object)
output_status_message('* * * End output_applicationfault * * *') | True | data_object is None | data_object is None | 0.6521626710891724 |
198 | 196 | def output_applicationfault(data_object):
if data_object is None:
return
output_status_message('* * * Begin output_applicationfault * * *')
output_status_message('TrackingId: {0}'.format(data_object.TrackingId))
if<mask>:
output_adapifaultdetail(data_object)
if data_object.Type == 'ApiFault':
output_apifault(data_object)
output_status_message('* * * End output_applicationfault * * *') | False | data_object.Type == 'AdApiFaultDetail' | data_object.Type == 'Adapifaultdetail' | 0.6456727981567383 |
199 | 197 | def output_applicationfault(data_object):
if data_object is None:
return
output_status_message('* * * Begin output_applicationfault * * *')
output_status_message('TrackingId: {0}'.format(data_object.TrackingId))
if data_object.Type == 'AdApiFaultDetail':
output_adapifaultdetail(data_object)
if<mask>:
output_apifault(data_object)
output_status_message('* * * End output_applicationfault * * *') | False | data_object.Type == 'ApiFault' | data_object.Type == 'ApApiFault' | 0.6474363207817078 |
200 | 198 | def apply_reorder_incremental_state(module):
if<mask>:
module.reorder_incremental_state(incremental_state, new_order) | True | module != self and hasattr(module, 'reorder_incremental_state') | module != self and hasattr(module, 'reorder_incremental_state') | 0.6419668197631836 |
201 | 199 | @register.simple_tag(takes_context=True)
def tbi_slug(context: dict, label: str, slug: str):
"""Render an internal TBItem link from a slug.
The value must include everything after "/tb/t/".
Usage:
{% tbi_slug "my-slug" %}
"""
url = reverse('topic_blog:view_item_by_slug', args=[slug])
if<mask>:
request = context['request']
url = request.build_absolute_uri(url)
html = '<a href="{url}">{label}</a>'.format(url=url, label=label)
return mark_safe(html) | False | k_render_as_email in context | context.get('request') | 0.6502838134765625 |
202 | 200 | @catch_all
def validate(self, inp, pos):
if<mask>:
return (self.State.Acceptable, inp, pos)
return (self.State.Intermediate, inp, pos) | False | os.path.abspath(inp) == inp | self.State.Acceptable is not None | 0.6479153633117676 |
203 | 201 | @cache.CacheDecorator('tests/connections')
def calculate(self):
addr_space = utils.load_as(self._config)
if<mask>:
debug.error('This command does not support the selected profile.')
return network.determine_connections(addr_space) | False | not self.is_valid_profile(addr_space.profile) | 'networks' not in self._config | 0.6450906991958618 |
204 | 202 | def delete_background_image(sender, instance, **kwargs):
if<mask>:
delete_from_storage_task.delay(img.name) | True | img := instance.background_image | img := instance.background_image | 0.6498156189918518 |
205 | 203 | def _resolve_link(self, link_path, target_path):
self._links[link_path] = target_path
for cached_link_path, cached_target_path in self._links.items():
if<mask>:
self._links[cached_link_path] = target_path | False | self._expand_link(cached_target_path) == link_path | cached_target_path == link_path | 0.6445895433425903 |
206 | 204 | def get_confidence(self):
"""return confidence based on existing data"""
if<mask>:
return SURE_NO
if self._mTotalChars!= self._mFreqChars:
r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio)
if r < SURE_YES:
return r
return SURE_YES | True | self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD | self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD | 0.6502353549003601 |
207 | 205 | def get_confidence(self):
"""return confidence based on existing data"""
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if<mask>:
r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio)
if r < SURE_YES:
return r
return SURE_YES | True | self._mTotalChars != self._mFreqChars | self._mTotalChars != self._mFreqChars | 0.6556333303451538 |
208 | 206 | def get_confidence(self):
"""return confidence based on existing data"""
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars!= self._mFreqChars:
r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio)
if<mask>:
return r
return SURE_YES | True | r < SURE_YES | r < SURE_YES | 0.6624365448951721 |
209 | 207 | def __cmp__(self, other):
if<mask>:
return -1
if self.name == 'default':
return -1
return 1 if other.name == 'default' else 1 | False | not isinstance(other, SoundModes.SoundModeDesc) | self.name == other.name | 0.6463820934295654 |
210 | 208 | def __cmp__(self, other):
if not isinstance(other, SoundModes.SoundModeDesc):
return -1
if<mask>:
return -1
return 1 if other.name == 'default' else 1 | False | self.name == 'default' | self.name != other.name | 0.6544499397277832 |
211 | 209 | def _embed_img(display):
"""Embed an image or just return its instance if already embedded.
Parameters
----------
display : obj
A Nilearn plotting object to display.
Returns
-------
embed : str
Binary image string.
"""
if<mask>:
return None
if isinstance(display, str):
return display
return figure_to_svg_base64(display.frame_axes.figure) | True | display is None | display is None | 0.6550337076187134 |
212 | 210 | def _embed_img(display):
"""Embed an image or just return its instance if already embedded.
Parameters
----------
display : obj
A Nilearn plotting object to display.
Returns
-------
embed : str
Binary image string.
"""
if display is None:
return None
if<mask>:
return display
return figure_to_svg_base64(display.frame_axes.figure) | False | isinstance(display, str) | display.embed | 0.645338773727417 |
213 | 211 | def warns(self, before, after, message, unchanged=False):
tree = self._check(before, after)
self.assertIn(message, ''.join(self.fixer_log))
if<mask>:
self.assertTrue(tree.was_changed) | False | not unchanged | unchanged | 0.6654309630393982 |
214 | 212 | def tlv(buf):
n = 4
try:
t, l = struct.unpack('>HH', buf[:n])
except struct.error:
raise dpkt.UnpackError('invalid type, length fields')
v = buf[n:n + l]
if<mask>:
raise dpkt.NeedData('%d left, %d needed' % (len(v), l))
buf = buf[n + l:]
return (t, l, v, buf) | False | len(v) < l | len(v) != l | 0.649031400680542 |
215 | 213 | def validate_minimal(self) -> bool:
"""Checks if the loaded JSON file represents a valid minimal VERSIONINFO resource.
Returns:
(bool): valid or not
"""
valid = True
required = self._minimal_required_fields.copy()
for key in self._version_dict:
if key not in required:
valid = False
logging.error('Invalid minimal parameter: %s.', key)
else:
required.remove(key.upper())
if<mask>:
return False
return validate_version_number(self._version_dict[PEStrings.FILE_VERSION_STR]) | True | not valid | not valid | 0.6595147848129272 |
216 | 214 | def validate_minimal(self) -> bool:
"""Checks if the loaded JSON file represents a valid minimal VERSIONINFO resource.
Returns:
(bool): valid or not
"""
valid = True
required = self._minimal_required_fields.copy()
for key in self._version_dict:
if<mask>:
valid = False
logging.error('Invalid minimal parameter: %s.', key)
else:
required.remove(key.upper())
if not valid:
return False
return validate_version_number(self._version_dict[PEStrings.FILE_VERSION_STR]) | False | key not in required | key.upper() not in required | 0.6601539850234985 |
217 | 215 | def decorated_function(function):
"""Wrap function."""
if<mask>:
return function
else:
def passer(*args, **kwargs):
print('Missing dependencies: {d}.'.format(d=missing))
print('Not running `{}`.'.format(function.__name__))
return passer | False | not missing | is_missing(function) | 0.6559614539146423 |
218 | 216 | def _get_step_config_from_proto(preprocessor_step_config, step_name):
"""Returns the value of a field named step_name from proto.
Args:
preprocessor_step_config: A preprocessor_pb2.PreprocessingStep object.
step_name: Name of the field to get value from.
Returns:
result_dict: a sub proto message from preprocessor_step_config which will be
later converted to a dictionary.
Raises:
ValueError: If field does not exist in proto.
"""
for field, value in preprocessor_step_config.ListFields():
if<mask>:
return value
raise ValueError('Could not get field %s from proto!', step_name) | False | field.name == step_name | field == step_name | 0.6503969430923462 |
219 | 217 | def postprocess_obs_dict(obs_dict):
"""
Undo internal replay buffer representation changes: save images as bytes
"""
for obs_key, obs in obs_dict.items():
if<mask>:
obs_dict[obs_key] = normalize_image(obs)
return obs_dict | True | 'image' in obs_key and obs is not None | 'image' in obs_key and obs is not None | 0.6442729830741882 |
220 | 218 | def check_orphan(self):
res = self.is_orphan()
if<mask>:
self._callback()
return res | False | res and self._callback | self._callback | 0.6531074643135071 |
221 | 219 | def validate_logserver(self):
"""Validates logserver and exits if invalid"""
if<mask>:
end(HELP, 'You must enter a logserver hostname or ip address')
if not self.re_validation.match(self.logserver):
end(UNKNOWN, 'logserver name/ip address supplied contains'+ 'unusable characters') | False | self.logserver is None | self.logserver is None or self.logserver.hostname is None or self.ip_address is None | 0.6505424380302429 |
222 | 220 | def validate_logserver(self):
"""Validates logserver and exits if invalid"""
if self.logserver is None:
end(HELP, 'You must enter a logserver hostname or ip address')
if<mask>:
end(UNKNOWN, 'logserver name/ip address supplied contains'+ 'unusable characters') | False | not self.re_validation.match(self.logserver) | not self.logserver.lower().startswith('http:') or not self.logserver.lower().startswith('http:') | 0.642812192440033 |
223 | 221 | def to_python_variable_name(name: str):
result = name.lower().replace(' ', '_').replace('-', '_')
if<mask>:
result = 'digit_' + result
result = re.sub('\\W+', '_', result)
result = result.lower()
return result | False | result[0].isdigit() | type(result) == type('') | 0.6494843363761902 |
224 | 222 | def close(self):
if<mask>:
self._debug_conn.safe_shutdown()
self._debug_conn = None | False | self._debug_conn and self._debug_conn is not self._conn | self._debug_conn is not None | 0.6469855308532715 |
225 | 223 | def __fullread(self, size):
""" Read a certain number of bytes from the source file. """
try:
if size < 0:
raise ValueError('Requested bytes (%s) less than zero' % size)
if size > self.__filesize:
raise EOFError('Requested %#x of %#x (%s)' % (int(size), int(self.__filesize), self.filename))
except AttributeError:
pass
data = self._fileobj.read(size)
if<mask>:
raise EOFError
self.__readbytes += size
return data | False | len(data) != size | not data | 0.6459337472915649 |
226 | 224 | def __fullread(self, size):
""" Read a certain number of bytes from the source file. """
try:
if<mask>:
raise ValueError('Requested bytes (%s) less than zero' % size)
if size > self.__filesize:
raise EOFError('Requested %#x of %#x (%s)' % (int(size), int(self.__filesize), self.filename))
except AttributeError:
pass
data = self._fileobj.read(size)
if len(data)!= size:
raise EOFError
self.__readbytes += size
return data | True | size < 0 | size < 0 | 0.6645493507385254 |
227 | 225 | def __fullread(self, size):
""" Read a certain number of bytes from the source file. """
try:
if size < 0:
raise ValueError('Requested bytes (%s) less than zero' % size)
if<mask>:
raise EOFError('Requested %#x of %#x (%s)' % (int(size), int(self.__filesize), self.filename))
except AttributeError:
pass
data = self._fileobj.read(size)
if len(data)!= size:
raise EOFError
self.__readbytes += size
return data | False | size > self.__filesize | size > self.__fileobj.read(self.__fileobj, size) | 0.6537340879440308 |
228 | 226 | def freeze(self, freeze_at=0):
"""
Freeze the first several stages of the model. Commonly used in fine-tuning.
Layers that produce the same feature map spatial size are defined as one
"stage" by :paper:`FPN`.
Args:
freeze_at (int): number of stages to freeze.
`1` means freezing the stem. `2` means freezing the stem and
one residual stage, etc.
Returns:
nn.Module: this model itself
"""
if<mask>:
self.stem.freeze()
for idx, (stage, _) in enumerate(self.stages_and_names, start=2):
if freeze_at >= idx:
for block in stage.children():
block.freeze()
return self | False | freeze_at >= 1 | freeze_at >= 0 | 0.6577537059783936 |
229 | 227 | def freeze(self, freeze_at=0):
"""
Freeze the first several stages of the model. Commonly used in fine-tuning.
Layers that produce the same feature map spatial size are defined as one
"stage" by :paper:`FPN`.
Args:
freeze_at (int): number of stages to freeze.
`1` means freezing the stem. `2` means freezing the stem and
one residual stage, etc.
Returns:
nn.Module: this model itself
"""
if freeze_at >= 1:
self.stem.freeze()
for idx, (stage, _) in enumerate(self.stages_and_names, start=2):
if<mask>:
for block in stage.children():
block.freeze()
return self | False | freeze_at >= idx | freeze_at >= 0 | 0.6535623073577881 |
230 | 228 | def get_parser():
parser = argparse.ArgumentParser(description='PyTorch Semantic Segmentation')
parser.add_argument('--config', type=str, default='config/ade20k/ade20k_pspnet50.yaml', help='config file')
parser.add_argument('opts', help='see config/ade20k/ade20k_pspnet50.yaml for all options', default=None, nargs=argparse.REMAINDER)
args = parser.parse_args()
assert args.config is not None
cfg = config.load_cfg_from_cfg_file(args.config)
if<mask>:
cfg = config.merge_cfg_from_list(cfg, args.opts)
return cfg | True | args.opts is not None | args.opts is not None | 0.6515011787414551 |
231 | 229 | def ParseNoFromSeason(season, episode):
if<mask>:
return str(episode)
elif season == 0:
return 'S' + str(episode) | False | season >= 1 | season == 1 | 0.6585592031478882 |
232 | 230 | def ParseNoFromSeason(season, episode):
if season >= 1:
return str(episode)
elif<mask>:
return 'S' + str(episode) | False | season == 0 | season == 12 | 0.6639558672904968 |
233 | 231 | def get_fed_id(chat_id):
get = FEDERATION_CHATS.get(str(chat_id))
if<mask>:
return False
else:
return get['fid'] | True | get is None | get is None | 0.6571329832077026 |
234 | 232 | def gds_validate_builtin_ST_(self, validator, value, input_name, min_occurs=None, max_occurs=None, required=None):
if<mask>:
try:
validator(value, input_name=input_name)
except GDSParseError as parse_error:
self.gds_collector_.add_message(str(parse_error)) | True | value is not None | value is not None | 0.6505969166755676 |
235 | 233 | def close(self):
"""
Shuts down the TLS session and socket and forcibly closes it
"""
try:
self.shutdown()
finally:
if<mask>:
try:
self._socket.close()
except socket_.error:
pass
self._socket = None
if self._connection_id in _socket_refs:
del _socket_refs[self._connection_id] | True | self._socket | self._socket | 0.656726598739624 |
236 | 234 | def close(self):
"""
Shuts down the TLS session and socket and forcibly closes it
"""
try:
self.shutdown()
finally:
if self._socket:
try:
self._socket.close()
except socket_.error:
pass
self._socket = None
if<mask>:
del _socket_refs[self._connection_id] | True | self._connection_id in _socket_refs | self._connection_id in _socket_refs | 0.6468115448951721 |
237 | 235 | def append_data_list(file_dir, data_list, only_true_data=False):
if<mask>:
for root, dirs, files in os.walk(file_dir):
for dir in dirs:
data_list.append(os.path.join(file_dir, dir))
break
return data_list | False | file_dir != 'xxx' | only_true_data | 0.6505033373832703 |
238 | 236 | def fix_atomic_specifiers(decl):
"""Atomic specifiers like _Atomic(type) are unusually structured,
conferring a qualifier upon the contained type.
This function fixes a decl with atomic specifiers to have a sane AST
structure, by removing spurious Typename->TypeDecl pairs and attaching
the _Atomic qualifier in the right place.
"""
while True:
decl, found = _fix_atomic_specifiers_once(decl)
if not found:
break
typ = decl
while not isinstance(typ, c_ast.TypeDecl):
try:
typ = typ.type
except AttributeError:
return decl
if<mask>:
decl.quals.append('_Atomic')
if typ.declname is None:
typ.declname = decl.name
return decl | False | '_Atomic' in typ.quals and '_Atomic' not in decl.quals | isinstance(decl, c_ast.TypeDecl) | 0.647423505783081 |
239 | 237 | def fix_atomic_specifiers(decl):
"""Atomic specifiers like _Atomic(type) are unusually structured,
conferring a qualifier upon the contained type.
This function fixes a decl with atomic specifiers to have a sane AST
structure, by removing spurious Typename->TypeDecl pairs and attaching
the _Atomic qualifier in the right place.
"""
while True:
decl, found = _fix_atomic_specifiers_once(decl)
if not found:
break
typ = decl
while not isinstance(typ, c_ast.TypeDecl):
try:
typ = typ.type
except AttributeError:
return decl
if '_Atomic' in typ.quals and '_Atomic' not in decl.quals:
decl.quals.append('_Atomic')
if<mask>:
typ.declname = decl.name
return decl | False | typ.declname is None | hasattr(typ, 'declname') | 0.6491817235946655 |
240 | 238 | def fix_atomic_specifiers(decl):
"""Atomic specifiers like _Atomic(type) are unusually structured,
conferring a qualifier upon the contained type.
This function fixes a decl with atomic specifiers to have a sane AST
structure, by removing spurious Typename->TypeDecl pairs and attaching
the _Atomic qualifier in the right place.
"""
while True:
decl, found = _fix_atomic_specifiers_once(decl)
if<mask>:
break
typ = decl
while not isinstance(typ, c_ast.TypeDecl):
try:
typ = typ.type
except AttributeError:
return decl
if '_Atomic' in typ.quals and '_Atomic' not in decl.quals:
decl.quals.append('_Atomic')
if typ.declname is None:
typ.declname = decl.name
return decl | True | not found | not found | 0.6604551076889038 |
241 | 239 | def __init__(self, logger=None, handler=None, formatter=None):
if<mask>:
logger = logging.getLogger('pyasn1')
logger.setLevel(logging.DEBUG)
if handler is None:
handler = logging.StreamHandler()
if formatter is None:
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
self.__logger = logger | True | logger is None | logger is None | 0.6581186652183533 |
242 | 240 | def __init__(self, logger=None, handler=None, formatter=None):
if logger is None:
logger = logging.getLogger('pyasn1')
logger.setLevel(logging.DEBUG)
if<mask>:
handler = logging.StreamHandler()
if formatter is None:
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
self.__logger = logger | True | handler is None | handler is None | 0.6543397903442383 |
243 | 241 | def __init__(self, logger=None, handler=None, formatter=None):
if logger is None:
logger = logging.getLogger('pyasn1')
logger.setLevel(logging.DEBUG)
if handler is None:
handler = logging.StreamHandler()
if<mask>:
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
self.__logger = logger | True | formatter is None | formatter is None | 0.6560178995132446 |
244 | 242 | def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate, ibn):
super(_DenseBlock, self).__init__()
for i in range(num_layers):
if<mask>:
layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate, True)
else:
layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate, False)
self.add_module('denselayer%d' % (i + 1), layer) | False | ibn and i % 3 == 0 | ibn | 0.6528332829475403 |
245 | 243 | def output_array_of_bidlandscapepoint(data_objects):
if<mask>:
return
for data_object in data_objects['BidLandscapePoint']:
output_bidlandscapepoint(data_object) | True | data_objects is None or len(data_objects) == 0 | data_objects is None or len(data_objects) == 0 | 0.6454564332962036 |
246 | 244 | def wait(self):
self._status_update_count += 1
if<mask>:
raise TimeoutException('Timeout at polling.')
if self._status_update_count >= _PollingBlocker.NUMBER_OF_INITIAL_STATUS_CHECKS:
time.sleep(self._poll_interval_in_milliseconds / 1000.0)
else:
time.sleep(_PollingBlocker.INITIAL_STATUS_CHECK_INTERVAL_IN_MS / 1000.0) | False | self._timeout_stamp is not None and int(round(time.time()) * 1000) > self._timeout_stamp | self._status_update_count >= _polling_interval | 0.6489636898040771 |
247 | 245 | def wait(self):
self._status_update_count += 1
if self._timeout_stamp is not None and int(round(time.time()) * 1000) > self._timeout_stamp:
raise TimeoutException('Timeout at polling.')
if<mask>:
time.sleep(self._poll_interval_in_milliseconds / 1000.0)
else:
time.sleep(_PollingBlocker.INITIAL_STATUS_CHECK_INTERVAL_IN_MS / 1000.0) | False | self._status_update_count >= _PollingBlocker.NUMBER_OF_INITIAL_STATUS_CHECKS | self._status_update_count == 0 | 0.6448144912719727 |
248 | 246 | def OnPaint(self, event):
if<mask>:
dc = wx.BufferedPaintDC(self, self._Buffer)
else:
dc = wx.PaintDC(self)
self.PrepareDC(dc)
dc.DrawBitmap(self._Buffer, 0, 0) | True | USE_BUFFERED_DC | USE_BUFFERED_DC | 0.6554020643234253 |
249 | 247 | @staticmethod
def _get_previous_pitch(music_specifier, previous_pitch_by_music_specifier, voice):
key = (voice, music_specifier)
if<mask>:
previous_pitch_by_music_specifier[key] = None
previous_pitch = previous_pitch_by_music_specifier[key]
return previous_pitch | True | key not in previous_pitch_by_music_specifier | key not in previous_pitch_by_music_specifier | 0.6451401710510254 |
250 | 248 | @property
def _n_hanging_faces_x(self):
"""Number of hanging Fx."""
if<mask>:
return int(np.prod(self.shape_cells[1:]))
else:
return 0 | False | self.includes_zero | self.dim >= 2 | 0.6462757587432861 |
251 | 249 | def decode_sequence(vocab, seq):
N, T = seq.size()
sents = []
for n in range(N):
words = []
for t in range(T):
ix = seq[n, t]
if<mask>:
break
words.append(vocab[ix])
sent =''.join(words)
sents.append(sent)
return sents | True | ix == 0 | ix == 0 | 0.6593527793884277 |
252 | 250 | def __init__(self, config, model, dataloader, evaluator):
"""
Args:
config (config): An instance object of Config, used to record parameter information.
model (Model): An object of deep-learning model.
dataloader (Dataloader): dataloader object.
evaluator (Evaluator): evaluator object.
expected that config includes these parameters below:
learning_rate (float): learning rate of model
train_batch_size (int): the training batch size.
epoch_nums (int): number of epochs.
step_size (int): step_size of scheduler.
trained_model_path (str): a path of file which is used to save parameters of best model.
checkpoint_path (str): a path of file which is used save checkpoint of training progress.
output_path (str|None): a path of a json file which is used to save test output infomation fo model.
resume (bool): start training from last checkpoint.
validset_divide (bool): whether to split validset. if True, the dataset is split to trainset-validset-testset. if False, the dataset is split to trainset-testset.
test_step (int): the epoch number of training after which conducts the evaluation on test.
best_folds_accuracy (list|None): when running k-fold cross validation, this keeps the accuracy of folds that already run.
"""
super().__init__(config, model, dataloader, evaluator)
self.t_start_epoch = 0
self.s_start_epoch = 0
self.t_epoch_i = 0
self.s_epoch_i = 0
self._build_optimizer()
if<mask>:
self._load_checkpoint() | False | config['resume'] or config['training_resume'] | config['resume'] | 0.638759970664978 |
253 | 251 | def __post_init__(self):
super().__post_init__()
if<mask>:
self.cluster_name = ClusterName()
if self.kubeconfig_path is None:
self.kubeconfig_path = utils.get_kubeconfig_path(self.cluster_name.get()) | False | self.cluster_name is None or isinstance(self.cluster_name, str) | self.cluster_name is None | 0.648577094078064 |
254 | 252 | def __post_init__(self):
super().__post_init__()
if self.cluster_name is None or isinstance(self.cluster_name, str):
self.cluster_name = ClusterName()
if<mask>:
self.kubeconfig_path = utils.get_kubeconfig_path(self.cluster_name.get()) | True | self.kubeconfig_path is None | self.kubeconfig_path is None | 0.6508557200431824 |
255 | 253 | def __init__(self, config, *inputs, **kwargs):
super(PreTrainedModel, self).__init__()
if<mask>:
raise ValueError('Parameter config in `{}(config)` should be an instance of class `PretrainedConfig`. To create a model from a pretrained model use `model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`'.format(self.__class__.__name__, self.__class__.__name__))
self.config = config | False | not isinstance(config, PretrainedConfig) | not isinstance(config, pretrainedConfig) | 0.6529104709625244 |
256 | 254 | def start(self):
ignored = []
selection = self.ctx.result.get('identify', {}).get('selection', [])
for f in selection[:]:
ignore = self._safelist_file(f)
if<mask>:
ignored.append(ignore)
selection.remove(f)
return ignored | True | ignore | ignore | 0.668021559715271 |
257 | 255 | def new_selection(self, selection):
if<mask>:
for key in self.widgets:
self.widgets[key].set_value(None)
self.set_enabled(False)
return
for key in self.widgets:
self._update_widget(key, selection)
self.set_enabled(True) | False | not selection | selection is None | 0.6494703888893127 |
258 | 256 | def __getitem__(self, idx):
image = self.load_image(idx)
annots = self.load_annots(idx)
scale = np.array(1.0).astype(np.float32)
size = np.array([image.shape[0], image.shape[1]]).astype(np.float32)
sample = {'image': image, 'annots': annots,'scale': scale,'size': size}
if<mask>:
sample = self.transform(sample)
return sample | True | self.transform | self.transform | 0.6671528816223145 |
259 | 257 | @pytest.mark.parametrize('framework_status', Framework.STATUSES)
def test_services_can_not_be_copied_to_a_framework_that_is_not_open(self, framework_status):
if<mask>:
return
self.set_framework_status('g-cloud-7', framework_status)
res = self.client.put('/draft-services/copy-from/{}'.format(self.service_id), data=json.dumps({**self.updater_json, **self.basic_questions_json, 'targetFramework': 'g-cloud-7'}), content_type='application/json')
assert res.status_code == 400
assert 'Target framework is not open' in res.get_data(as_text=True) | False | framework_status == 'open' | framework_status == Framework.STATUSES | 0.6536476612091064 |
260 | 258 | @property
def braintree_data(self):
data = {'amount': self.cleaned_data['amount'], 'options': {'submit_for_settlement': True}}
if<mask>:
data['payment_method_token'] = self.cleaned_data['paymethod'].provider_id
elif self.cleaned_data.get('nonce'):
data['payment_method_nonce'] = self.cleaned_data['nonce']
return data | True | self.cleaned_data.get('paymethod') | self.cleaned_data.get('paymethod') | 0.6514558792114258 |
261 | 259 | @property
def braintree_data(self):
data = {'amount': self.cleaned_data['amount'], 'options': {'submit_for_settlement': True}}
if self.cleaned_data.get('paymethod'):
data['payment_method_token'] = self.cleaned_data['paymethod'].provider_id
elif<mask>:
data['payment_method_nonce'] = self.cleaned_data['nonce']
return data | True | self.cleaned_data.get('nonce') | self.cleaned_data.get('nonce') | 0.6491628885269165 |
262 | 260 | def _getParent(self, ncbid):
"""
Gets direct parent ncbi taxon id.
"""
parent = self._ncbidToNcbidParent.get(ncbid, None)
if<mask>:
parent = self._taxonomy.getParentNcbid(ncbid)
self._ncbidToNcbidParent[ncbid] = parent
return parent | True | parent is None | parent is None | 0.6548084020614624 |
263 | 261 | def _check_is_max_context(doc_spans, cur_span_index, position):
"""Check if this is the'max context' doc span for the token."""
best_score = None
best_span_index = None
for span_index, doc_span in enumerate(doc_spans):
end = doc_span.start + doc_span.length - 1
if<mask>:
continue
if position > end:
continue
num_left_context = position - doc_span.start
num_right_context = end - position
score = min(num_left_context, num_right_context) + 0.01 * doc_span.length
if best_score is None or score > best_score:
best_score = score
best_span_index = span_index
return cur_span_index == best_span_index | False | position < doc_span.start | span_index == 0 | 0.6510602831840515 |
264 | 262 | def _check_is_max_context(doc_spans, cur_span_index, position):
"""Check if this is the'max context' doc span for the token."""
best_score = None
best_span_index = None
for span_index, doc_span in enumerate(doc_spans):
end = doc_span.start + doc_span.length - 1
if position < doc_span.start:
continue
if<mask>:
continue
num_left_context = position - doc_span.start
num_right_context = end - position
score = min(num_left_context, num_right_context) + 0.01 * doc_span.length
if best_score is None or score > best_score:
best_score = score
best_span_index = span_index
return cur_span_index == best_span_index | False | position > end | end < doc_span.start | 0.6604651212692261 |
265 | 263 | def _check_is_max_context(doc_spans, cur_span_index, position):
"""Check if this is the'max context' doc span for the token."""
best_score = None
best_span_index = None
for span_index, doc_span in enumerate(doc_spans):
end = doc_span.start + doc_span.length - 1
if position < doc_span.start:
continue
if position > end:
continue
num_left_context = position - doc_span.start
num_right_context = end - position
score = min(num_left_context, num_right_context) + 0.01 * doc_span.length
if<mask>:
best_score = score
best_span_index = span_index
return cur_span_index == best_span_index | False | best_score is None or score > best_score | score > best_score | 0.6450030207633972 |
266 | 264 | def __init__(self, optimizer, big_gamma=0.999, epsilon=1e-08, from_grad=True):
""" Apply Gap Aware on computed gradients """
super().__init__(optimizer)
self.big_gamma = big_gamma
self.running_avg_step = init_running_avg_step(optimizer)
self.epsilon = epsilon
for pg in self.optimizer.param_groups:
for p in pg['params']:
if<mask>:
self.optimizer.state[p]['momentum_buffer'] = torch.zeros_like(p) | False | 'momentum_buffer' not in self.optimizer.state[p] | p.requires_grad | 0.6456429958343506 |
267 | 265 | def _names_by_code(states):
d = {}
for name in states.__dict__:
if<mask>:
code = getattr(states, name)
d[code] = name
return d | False | not name.startswith('__') | hasattr(states, name) | 0.6441135406494141 |
268 | 266 | @restart_executor
def map(self, function, *iterables, **kwargs):
"""
Calls *function* for every item in *iterables* then calls *callback* (
if provided as a keyword argument via *kwargs*) with a list containing
the results when complete. The results list will be in the order in
which *iterables* was passed to *function* (not random or based on how
long they took to complete).
Any additional *kwargs* will be passed to the *function* with each
iteration of *iterables*.
"""
callback = kwargs.pop('callback', None)
futures = []
for i in iterables:
futures.append(self.executor.submit(safe_call, function, i, **kwargs))
if<mask>:
callback_when_complete(futures, callback)
return futures | False | callback | callback is not None | 0.6648369431495667 |
269 | 267 | def close(self):
try:
if self.game is not None:
self.game.close()
except RuntimeError as exc:
log.warning('Runtime error in VizDoom game close(): %r', exc)
if<mask>:
import pygame
pygame.display.quit()
pygame.quit() | True | self.screen is not None | self.screen is not None | 0.6483498811721802 |
270 | 268 | def close(self):
try:
if<mask>:
self.game.close()
except RuntimeError as exc:
log.warning('Runtime error in VizDoom game close(): %r', exc)
if self.screen is not None:
import pygame
pygame.display.quit()
pygame.quit() | False | self.game is not None | hasattr(self, 'game') | 0.6549391746520996 |
271 | 269 | @confluence_measure('locust_search_cql:search_results')
def search_cql():
r = locust.get(f"/rest/api/search?cql=siteSearch~'{generate_random_string(3, only_letters=True)}'&start=0&limit=20", catch_response=True)
if<mask>:
logger.locust_info(r.content.decode('utf-8'))
content = r.content.decode('utf-8')
if'results' not in content:
logger.error(f'Search cql failed: {content}')
assert'results' in content, 'Search cql failed.'
locust.get('/rest/mywork/latest/status/notification/count', catch_response=True) | False | '{"results":[' not in r.content.decode('utf-8') | r.status_code == 200 | 0.6443426012992859 |
272 | 270 | @confluence_measure('locust_search_cql:search_results')
def search_cql():
r = locust.get(f"/rest/api/search?cql=siteSearch~'{generate_random_string(3, only_letters=True)}'&start=0&limit=20", catch_response=True)
if '{"results":[' not in r.content.decode('utf-8'):
logger.locust_info(r.content.decode('utf-8'))
content = r.content.decode('utf-8')
if<mask>:
logger.error(f'Search cql failed: {content}')
assert'results' in content, 'Search cql failed.'
locust.get('/rest/mywork/latest/status/notification/count', catch_response=True) | False | 'results' not in content | content not in '{'content' | 0.6513659954071045 |
273 | 271 | def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
coarsest_stride = self.coarsest_stride
if<mask>:
return (im, im_info)
im_c, im_h, im_w = im.shape
pad_h = int(np.ceil(float(im_h) / coarsest_stride) * coarsest_stride)
pad_w = int(np.ceil(float(im_w) / coarsest_stride) * coarsest_stride)
padding_im = np.zeros((im_c, pad_h, pad_w), dtype=np.float32)
padding_im[:, :im_h, :im_w] = im
return (padding_im, im_info) | True | coarsest_stride <= 0 | coarsest_stride <= 0 | 0.6593202352523804 |
274 | 272 | def test_commands_hofvarpnir_config():
if<mask>:
pytest.skip('commands.py not ported to Windows.')
output_dir = '/data/output'
commands = _run_commands_from_flags(output_dir=output_dir, remote=True)
assert len(commands) == 1
expected = EXPECTED_HOFVARPNIR_CONFIG_TEMPLATE.format(output_dir=output_dir)
assert commands[0] == expected | True | os.name == 'nt' | os.name == 'nt' | 0.6564439535140991 |
275 | 273 | def __update_job(self, plugin: str, name: str, success: bool):
with self.__thread_lock:
err = self.__db.update_job(plugin, name, success)
if<mask>:
self.__logger.info(f'Successfully updated database for the job {name} from plugin {plugin}')
else:
self.__logger.warning(f'Failed to update database for the job {name} from plugin {plugin}: {err}') | False | not err | err is None | 0.6602324843406677 |
276 | 274 | def add_style(self, **kwargs) -> Style:
"""Add a new style to the current document. If no style name is
provided, the next available numbered style will be generated.
"""
if<mask>:
raise IndexError(f"style '{kwargs['name']}' already exists")
style = Style(**kwargs)
if style.name is None:
style.name = self._model.custom_style_name()
style._update_styles = True
self._model.styles[style.name] = style
return style | False | 'name' in kwargs and kwargs['name'] is not None and (kwargs['name'] in self._model.styles) | kwargs['name'] in self._model.styles | 0.6507617235183716 |
277 | 275 | def add_style(self, **kwargs) -> Style:
"""Add a new style to the current document. If no style name is
provided, the next available numbered style will be generated.
"""
if 'name' in kwargs and kwargs['name'] is not None and (kwargs['name'] in self._model.styles):
raise IndexError(f"style '{kwargs['name']}' already exists")
style = Style(**kwargs)
if<mask>:
style.name = self._model.custom_style_name()
style._update_styles = True
self._model.styles[style.name] = style
return style | False | style.name is None | not style.name | 0.6527454853057861 |
278 | 276 | def serialize_to_request(self, parameters, operation_model):
shape = operation_model.input_shape
serialized = self._create_default_request()
serialized['method'] = operation_model.http.get('method', self.DEFAULT_METHOD)
body_params = self.MAP_TYPE()
body_params['Action'] = operation_model.name
body_params['Version'] = operation_model.metadata['apiVersion']
if<mask>:
self._serialize(body_params, parameters, shape)
serialized['body'] = body_params
return serialized | True | shape is not None | shape is not None | 0.6587272882461548 |
279 | 277 | def make_safe_filename(name: str) -> str:
if<mask>:
return name
else:
from tango.common.det_hash import det_hash
name_hash = det_hash(name)
name = name.replace(' ', '-').replace('/', '--')
return ''.join((c for c in name if c in SAFE_FILENAME_CHARS)) + f'-{name_hash[:7]}' | False | filename_is_safe(name) | not name.startswith('--') | 0.6487612724304199 |
280 | 278 | def jellyFor(self, jellier):
qual = reflect.qual(PBMind)
if<mask>:
qual = qual.encode('utf-8')
return (qual, jellier.invoker.registerReference(self)) | True | isinstance(qual, unicode) | isinstance(qual, unicode) | 0.6507209539413452 |
281 | 279 | def convert_pytorch_name_to_tf(torch_name, module_name=None):
"""Convert a pytorch weight name in a tensorflow model weight name."""
op_name = torch_name.split('.')[-1]
if<mask>:
op_name = 'gamma' if module_name and '/BatchNorm2d' in module_name else 'kernel'
transpose = bool(op_name == 'kernel' or 'emb_projs' in op_name or 'out_projs' in op_name)
if op_name == 'bias':
op_name = 'bias' if module_name and '/Linear' in module_name else 'beta'
if op_name == 'running_mean':
op_name ='moving_mean'
if op_name == 'running_var':
op_name ='moving_variance'
return (module_name + '/' + op_name, transpose) | False | op_name == 'weight' | op_name == 'gamma' | 0.6558821201324463 |
282 | 280 | def convert_pytorch_name_to_tf(torch_name, module_name=None):
"""Convert a pytorch weight name in a tensorflow model weight name."""
op_name = torch_name.split('.')[-1]
if op_name == 'weight':
op_name = 'gamma' if module_name and '/BatchNorm2d' in module_name else 'kernel'
transpose = bool(op_name == 'kernel' or 'emb_projs' in op_name or 'out_projs' in op_name)
if<mask>:
op_name = 'bias' if module_name and '/Linear' in module_name else 'beta'
if op_name == 'running_mean':
op_name ='moving_mean'
if op_name == 'running_var':
op_name ='moving_variance'
return (module_name + '/' + op_name, transpose) | True | op_name == 'bias' | op_name == 'bias' | 0.6536109447479248 |
283 | 281 | def convert_pytorch_name_to_tf(torch_name, module_name=None):
"""Convert a pytorch weight name in a tensorflow model weight name."""
op_name = torch_name.split('.')[-1]
if op_name == 'weight':
op_name = 'gamma' if module_name and '/BatchNorm2d' in module_name else 'kernel'
transpose = bool(op_name == 'kernel' or 'emb_projs' in op_name or 'out_projs' in op_name)
if op_name == 'bias':
op_name = 'bias' if module_name and '/Linear' in module_name else 'beta'
if<mask>:
op_name ='moving_mean'
if op_name == 'running_var':
op_name ='moving_variance'
return (module_name + '/' + op_name, transpose) | True | op_name == 'running_mean' | op_name == 'running_mean' | 0.6522010564804077 |
284 | 282 | def convert_pytorch_name_to_tf(torch_name, module_name=None):
"""Convert a pytorch weight name in a tensorflow model weight name."""
op_name = torch_name.split('.')[-1]
if op_name == 'weight':
op_name = 'gamma' if module_name and '/BatchNorm2d' in module_name else 'kernel'
transpose = bool(op_name == 'kernel' or 'emb_projs' in op_name or 'out_projs' in op_name)
if op_name == 'bias':
op_name = 'bias' if module_name and '/Linear' in module_name else 'beta'
if op_name == 'running_mean':
op_name ='moving_mean'
if<mask>:
op_name ='moving_variance'
return (module_name + '/' + op_name, transpose) | False | op_name == 'running_var' | op_name == 'running_variance' | 0.6520791053771973 |
285 | 283 | @require_GET
def celery_ping(request):
"""Just tells you if Celery is on or not"""
try:
ping = celery.control.inspect().ping()
if<mask>:
return HttpResponse()
except Exception:
pass
return HttpResponse(status=500) | False | ping | ping['status'] != 'on' | 0.6677079200744629 |
286 | 284 | def get_pipe_transport(self, fd):
if<mask>:
return self._pipes[fd].pipe
else:
return None | True | fd in self._pipes | fd in self._pipes | 0.656368613243103 |
287 | 285 | def get_backup_logs(self, number, client, forward=False, deep=False, agent=None):
"""See :func:`burpui.misc.backend.interface.BUIbackend.get_backup_logs`"""
if<mask>:
return {} if number and number!= -1 else []
if number == -1:
return self._get_all_backup_logs(client, forward, deep)
return self._get_backup_logs(number, client, forward, deep) | True | not client or not number | not client or not number | 0.6508263945579529 |
288 | 286 | def get_backup_logs(self, number, client, forward=False, deep=False, agent=None):
"""See :func:`burpui.misc.backend.interface.BUIbackend.get_backup_logs`"""
if not client or not number:
return {} if number and number!= -1 else []
if<mask>:
return self._get_all_backup_logs(client, forward, deep)
return self._get_backup_logs(number, client, forward, deep) | True | number == -1 | number == -1 | 0.6587668061256409 |
289 | 287 | def try_sample_affine_and_pad(img, p, pad_k, G=None):
batch, _, height, width = img.shape
G_try = G
if<mask>:
G_try = sample_affine(p, batch, height, width)
pad_x1, pad_x2, pad_y1, pad_y2 = get_padding(torch.inverse(G_try), height, width, pad_k)
img_pad = F.pad(img, (pad_x1 + pad_k, pad_x2 + pad_k, pad_y1 + pad_k, pad_y2 + pad_k), mode='reflect')
return (img_pad, G_try, (pad_x1, pad_x2, pad_y1, pad_y2)) | False | G is None | len(p.shape) > 0 | 0.650160551071167 |
290 | 288 | def __get__(self, obj, cls=None):
if<mask>:
return self
value = self.func(obj)
object.__setattr__(obj, self.func.__name__, value)
return value | True | obj is None | obj is None | 0.6594734191894531 |
291 | 289 | def release(self, t):
super().release(t)
s = t.storage
if<mask>:
assert s.ref_int == 0
self._evict(s) | True | s.ref_ext == 0 and s.material and (not s.pinned) | s.ref_ext == 0 and s.material and (not s.pinned) | 0.6471388339996338 |
292 | 290 | def json_has_required_keys(data, keys):
missing_keys = set(keys) - set(data.keys())
if<mask>:
abort(400, "Invalid JSON must have '{}' keys".format("', '".join(missing_keys))) | True | missing_keys | missing_keys | 0.6594750285148621 |
293 | 291 | def __init__(self, **options):
self.consumer_key = options.get('consumer_key')
self.consumer_secret = options.get('consumer_secret')
self.sandbox = options.get('sandbox', True)
if<mask>:
default_service_host ='sandbox.evernote.com'
else:
default_service_host = 'www.evernote.com'
self.service_host = options.get('service_host', default_service_host)
self.additional_headers = options.get('additional_headers', {})
self.token = options.get('token')
self.secret = options.get('secret') | False | self.sandbox | options.get('sandbox') | 0.675239086151123 |
294 | 292 | def get_subtasks(task_or_mixture):
"""Returns all the Tasks in a Mixture as a list or the Task itself."""
if<mask>:
return [task_or_mixture]
else:
return task_or_mixture.tasks | False | isinstance(task_or_mixture, Task) | not hasattr(task_or_mixture, 'tasks') | 0.6510021686553955 |
295 | 293 | def parse_response_content(self, response_content):
response = super(AlipayUserDtbankcustActivityorderQueryResponse, self).parse_response_content(response_content)
if<mask>:
self.activity_id = response['activity_id']
if 'activity_order_id' in response:
self.activity_order_id = response['activity_order_id']
if 'out_biz_no' in response:
self.out_biz_no = response['out_biz_no']
if'status' in response:
self.status = response['status'] | True | 'activity_id' in response | 'activity_id' in response | 0.658087968826294 |
296 | 294 | def parse_response_content(self, response_content):
response = super(AlipayUserDtbankcustActivityorderQueryResponse, self).parse_response_content(response_content)
if 'activity_id' in response:
self.activity_id = response['activity_id']
if<mask>:
self.activity_order_id = response['activity_order_id']
if 'out_biz_no' in response:
self.out_biz_no = response['out_biz_no']
if'status' in response:
self.status = response['status'] | True | 'activity_order_id' in response | 'activity_order_id' in response | 0.6560535430908203 |
297 | 295 | def parse_response_content(self, response_content):
response = super(AlipayUserDtbankcustActivityorderQueryResponse, self).parse_response_content(response_content)
if 'activity_id' in response:
self.activity_id = response['activity_id']
if 'activity_order_id' in response:
self.activity_order_id = response['activity_order_id']
if<mask>:
self.out_biz_no = response['out_biz_no']
if'status' in response:
self.status = response['status'] | True | 'out_biz_no' in response | 'out_biz_no' in response | 0.6516119241714478 |
298 | 296 | def parse_response_content(self, response_content):
response = super(AlipayUserDtbankcustActivityorderQueryResponse, self).parse_response_content(response_content)
if 'activity_id' in response:
self.activity_id = response['activity_id']
if 'activity_order_id' in response:
self.activity_order_id = response['activity_order_id']
if 'out_biz_no' in response:
self.out_biz_no = response['out_biz_no']
if<mask>:
self.status = response['status'] | True | 'status' in response | 'status' in response | 0.6611331701278687 |
299 | 297 | def __exit__(self, exc_type, exc_val, exc_tb):
try:
self.close()
except BaseException:
if<mask>:
raise | False | exc_val is None | exc_val is not None | 0.6497892141342163 |
300 | 298 | def make_config_from_repo(repo_path, rev=None, hooks=None, check=True):
manifest = load_manifest(os.path.join(repo_path, C.MANIFEST_FILE))
config = {'repo': f'file://{repo_path}','rev': rev or git.head_rev(repo_path), 'hooks': hooks or [{'id': hook['id']} for hook in manifest]}
if<mask>:
wrapped = validate({'repos': [config]}, CONFIG_SCHEMA)
wrapped = apply_defaults(wrapped, CONFIG_SCHEMA)
config, = wrapped['repos']
return config
else:
return config | True | check | check | 0.6717772483825684 |
301 | 299 | def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None, class_weight=None):
"""Calculate the CrossEntropy loss.
Args:
pred (torch.Tensor): The prediction with shape (N, C), C is the number
of classes.
label (torch.Tensor): The learning label of the prediction.
weight (torch.Tensor, optional): Sample-wise loss weight.
reduction (str, optional): The method used to reduce the loss.
avg_factor (int, optional): Average factor that is used to average
the loss. Defaults to None.
class_weight (list[float], optional): The weight for each class.
Returns:
torch.Tensor: The calculated loss
"""
loss = F.cross_entropy(pred, label, weight=class_weight, reduction='none')
if<mask>:
weight = weight.float()
loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor)
return loss | True | weight is not None | weight is not None | 0.6537113189697266 |
302 | 300 | def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
key = 'quote'
rate = market[takerOrMaker]
cost = float(self.cost_to_precision(symbol, amount * rate))
if<mask>:
cost *= price
else:
key = 'base'
return {'type': takerOrMaker, 'currency': market[key], 'rate': rate, 'cost': cost} | False | side == 'sell' | type == 'linear' | 0.6546257734298706 |
303 | 301 | def _sum_of_host_slots(normalized_hosts, validate=True):
total_slots = 0
for normalized_host in normalized_hosts:
slots = normalized_host['slots']
if<mask>:
if validate:
raise ValueError('No slots defined for host: {}'.format(normalized_host['ip']))
continue
total_slots += slots
return total_slots | True | slots is None | slots is None | 0.6507751941680908 |
304 | 302 | def _sum_of_host_slots(normalized_hosts, validate=True):
total_slots = 0
for normalized_host in normalized_hosts:
slots = normalized_host['slots']
if slots is None:
if<mask>:
raise ValueError('No slots defined for host: {}'.format(normalized_host['ip']))
continue
total_slots += slots
return total_slots | False | validate | validate and normalized_host['ip'] not in VALID_HOSTS | 0.6601842641830444 |
305 | 303 | def visit_set(self, node: nodes.Set) -> None:
if<mask>:
node.inf_type = TypeInfo(Set[self.type_constraints.fresh_tvar(node)])
else:
elt_inf_type = self._unify_elements(node.elts, node)
node.inf_type = wrap_container(Set, elt_inf_type) | False | not node.elts | self.type_constraints is not None | 0.6602118015289307 |
306 | 304 | def multi_party_run(self, **kwargs):
"""
Run 3 parties with target function or other additional arguments.
:param kwargs:
:return:
"""
target = kwargs['target']
parties = []
for role in range(self.party_num):
kwargs.update({'role': role})
parties.append(Aby3Process(target=target, kwargs=kwargs))
parties[-1].start()
for party in parties:
party.join()
if<mask>:
return party.exception
return (True,) | True | party.exception | party.exception | 0.6527734994888306 |
307 | 305 | def load_build(self):
"""Set the state of a newly created object.
We capture it to replace our place-holder objects,
NDArrayWrapper, by the array we are interested in. We
replace them directly in the stack of pickler.
"""
Unpickler.load_build(self)
if<mask>:
if self.np is None:
raise ImportError("Trying to unpickle an ndarray, but numpy didn't import correctly")
nd_array_wrapper = self.stack.pop()
array = nd_array_wrapper.read(self)
self.stack.append(array) | False | isinstance(self.stack[-1], NDArrayWrapper) | self.stack and self.stack[-1].type == 'array' | 0.643934965133667 |
308 | 306 | def load_build(self):
"""Set the state of a newly created object.
We capture it to replace our place-holder objects,
NDArrayWrapper, by the array we are interested in. We
replace them directly in the stack of pickler.
"""
Unpickler.load_build(self)
if isinstance(self.stack[-1], NDArrayWrapper):
if<mask>:
raise ImportError("Trying to unpickle an ndarray, but numpy didn't import correctly")
nd_array_wrapper = self.stack.pop()
array = nd_array_wrapper.read(self)
self.stack.append(array) | False | self.np is None | not np.p(self.stack[-1]) | 0.6498368978500366 |
309 | 307 | @property
def flashlight_enabled(self):
if<mask>:
return bool(self._entity_data.get('flashlight_enabled'))
return bool(0) | True | 'flashlight_enabled' in self._entity_data | 'flashlight_enabled' in self._entity_data | 0.6500662565231323 |
310 | 308 | def _compileRegex(self) -> None:
for field_name in ('_delete_word_pattern', '_ignore_word_with_pattern', '_alternates_from_word_pattern', '_alternates_from_defi_pattern', '_rule_v1_defi_pattern', '_rule_v5_defi_pattern', '_rule_vs_defi_pattern', '_rule_vk_defi_pattern', '_rule_adji_defi_pattern'):
value = getattr(self, field_name)
if<mask>:
setattr(self, field_name, re.compile(value)) | False | value and isinstance(value, str) | value is not None | 0.6457546949386597 |
311 | 309 | def update(self, result: TaskResult):
if<mask>:
self._progress.update(self._overall_progress_task, advance=1, total=total + len(result.subtasks)) | False | (total := self._progress.tasks[0].total) is not None | self._progress is not None | 0.6477746367454529 |
312 | 310 | def reset_device(self) -> bool:
for _ in range(5):
self.send(b'\xb5b\x06\x04\x04\x00\xff\xff\x00\x00\x0c]')
time.sleep(1)
init_baudrate(self)
self.send_with_ack(b'\xb5b\x06\t\r\x00\x1f\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x17q\xd7')
self.send_with_ack(b'\xb5b\t\x14\x04\x00\x01\x00\x00\x00"\xf0')
self.send(b'\xb5b\t\x14\x00\x00\x1d`')
status = self.wait_for_backup_restore_status()
if<mask>:
return True
return False | False | status == 1 or status == 3 | status == 0 | 0.6511972546577454 |
313 | 311 | def load(self, track_file, liveshow, track_type):
self.track_file = track_file
self.liveshow = liveshow
if<mask>:
self.track_file = '/home/pi/pp_home/media/river.jpg'
self.img = Image.open(self.track_file)
else:
self.img = Image.open(self.track_file)
self.used_file = '/tmp/image_time_ny.jpg'
self.overlay_text()
return ('normal', '', self.used_file) | False | self.liveshow == True | track_type == 'image' | 0.6509339809417725 |
314 | 312 | def validate(self, data: Union[numbers.Real, str, bool, int, float, list, dict]) -> base.ValidationResult:
if<mask>:
data = data.compute()
passes = isinstance(data, self.datatype)
return base.ValidationResult(passes=passes, message=f"Requires data type: {self.datatype}. Got data type: {type(data)}. This {('is' if passes else 'is not')} a match.", diagnostics={'required_data_type': self.datatype, 'actual_data_type': type(data)}) | False | hasattr(data, 'dask') | isinstance(data, numbers.Real) | 0.6471971869468689 |
315 | 313 | def get_email(self, obj):
if<mask>:
if 'email' in obj.extra_data:
return obj.extra_data.get('email')
return obj.extra_data.get('userPrincipalName') | True | obj.extra_data | obj.extra_data | 0.6541895866394043 |
316 | 314 | def get_email(self, obj):
if obj.extra_data:
if<mask>:
return obj.extra_data.get('email')
return obj.extra_data.get('userPrincipalName') | False | 'email' in obj.extra_data | obj.extra_data.get('email') | 0.6492137908935547 |
317 | 315 | def __init__(self, name, num_clients=0):
self.num_clients = num_clients
if<mask>:
raise NotImplementedError('Clients not implemented on setup time, use new_tenant_client')
else:
DockerComposeNamespace.__init__(self, name, self.ENTERPRISE_FILES + self.MENDER_GATEWAY_FILES + self.MENDER_GATEWAY_CLIENT_FILES) | True | self.num_clients > 0 | self.num_clients > 0 | 0.6552126407623291 |
318 | 316 | def get_edge(self, x):
"""See parent class."""
for edge, start_pos in reversed(self.total_edgestarts):
if<mask>:
return (edge, x - start_pos) | False | x >= start_pos | self._is_edge(edge, x) | 0.6512272357940674 |
319 | 317 | def is_valid(arch):
"""Return if the arch in search space.
:param arch: current arch code
:type arch: str
:return: if the model is valid (bool)
"""
stages = arch.split('-')
length = 0
for stage in stages:
if<mask>:
return False
length += len(stage)
return min_block <= length <= max_block | False | len(stage) == 0 | not stage.startswith('-') | 0.649925947189331 |
320 | 318 | def _sort_links(self, links):
"""
Returns elements of links in order, non-egg links first, egg links
second, while eliminating duplicates
"""
eggs, no_eggs = ([], [])
seen = set()
for link in links:
if<mask>:
seen.add(link)
if link.egg_fragment:
eggs.append(link)
else:
no_eggs.append(link)
return no_eggs + eggs | True | link not in seen | link not in seen | 0.6594653129577637 |
321 | 319 | def _sort_links(self, links):
"""
Returns elements of links in order, non-egg links first, egg links
second, while eliminating duplicates
"""
eggs, no_eggs = ([], [])
seen = set()
for link in links:
if link not in seen:
seen.add(link)
if<mask>:
eggs.append(link)
else:
no_eggs.append(link)
return no_eggs + eggs | True | link.egg_fragment | link.egg_fragment | 0.6484694480895996 |
322 | 320 | def close(self):
"""Close related fileobj, pass return value"""
if<mask>:
return None
self.closed = 1
if self.fileobj:
return self.fileobj.close()
if self.callback:
self.callback(self.base_rp)
self.base_rp.touch() | True | self.closed | self.closed | 0.6634231209754944 |
323 | 321 | def close(self):
"""Close related fileobj, pass return value"""
if self.closed:
return None
self.closed = 1
if<mask>:
return self.fileobj.close()
if self.callback:
self.callback(self.base_rp)
self.base_rp.touch() | True | self.fileobj | self.fileobj | 0.657850980758667 |
324 | 322 | def close(self):
"""Close related fileobj, pass return value"""
if self.closed:
return None
self.closed = 1
if self.fileobj:
return self.fileobj.close()
if<mask>:
self.callback(self.base_rp)
self.base_rp.touch() | True | self.callback | self.callback | 0.6590456366539001 |
325 | 323 | def ancestors(self, op_name, deps):
"""Get all nodes upstream of the current node."""
explored = set()
queue = deque([op_name])
while len(queue)!= 0:
current = queue.popleft()
for parent in deps[current]:
if<mask>:
continue
explored.add(parent)
queue.append(parent)
return explored | True | parent in explored | parent in explored | 0.650050699710846 |
326 | 324 | def close(self) -> None:
if<mask>:
self.run_txt_data['stop_time'] = datetime.datetime.now().isoformat(sep=' ')
with open(os.path.join(self.submit_config.run_dir, 'run.txt'), 'w') as f:
pprint.pprint(self.run_txt_data, stream=f, indent=4, width=200, compact=False)
self.has_closed = True
global _run_context
if _run_context is self:
_run_context = None | False | not self.has_closed | self.has_closed | 0.6480180621147156 |
327 | 325 | def close(self) -> None:
if not self.has_closed:
self.run_txt_data['stop_time'] = datetime.datetime.now().isoformat(sep=' ')
with open(os.path.join(self.submit_config.run_dir, 'run.txt'), 'w') as f:
pprint.pprint(self.run_txt_data, stream=f, indent=4, width=200, compact=False)
self.has_closed = True
global _run_context
if<mask>:
_run_context = None | False | _run_context is self | _run_context.get('cancel_on_close', False) | 0.6482930183410645 |
328 | 326 | def __getitem__(self, idx):
if<mask>:
return self.__class__(self.__baseTag, *getitem(self.__superTags, idx))
return self.__superTags[idx] | False | isinstance(idx, slice) | idx < len(self.__superTags) | 0.6505007743835449 |
329 | 327 | def run():
_, e = p.communicate()
if<mask>:
err.append(e) | True | e | e | 0.6757959127426147 |
330 | 328 | def endElement(self, name, value, connection):
if<mask>:
self.name = value
elif name == 'OwnerAlias':
self.owner_alias = value | False | name == 'GroupName' | name == 'Name' | 0.6580395698547363 |
331 | 329 | def endElement(self, name, value, connection):
if name == 'GroupName':
self.name = value
elif<mask>:
self.owner_alias = value | False | name == 'OwnerAlias' | name == 'ownerAlias' | 0.6533467173576355 |
332 | 330 | def factory(*args_, **kwargs_):
if<mask>:
return docSect2Type.subclass(*args_, **kwargs_)
else:
return docSect2Type(*args_, **kwargs_) | True | docSect2Type.subclass | docSect2Type.subclass | 0.6573271751403809 |
333 | 331 | def save_quantized_model(self, model, path, input_spec=None, **kwargs):
"""
Save the quantized inference model.
Args:
model (Layer): The model to be saved.
path (str): The path prefix to save model. The format is
``dirname/file_prefix`` or ``file_prefix``.
input_spec (list[InputSpec|Tensor], optional): Describes the input
of the saved model's forward method, which can be described by
InputSpec or example Tensor. If None, all input variables of
the original Layer's forward method would be the inputs of
the saved model. Default: None.
kwargs (dict, optional): Other save configuration options for compatibility.
Returns:
None
"""
assert isinstance(model, paddle.nn.Layer), 'The model must be the instance of paddle.nn.Layer.'
training = model.training
if<mask>:
model.eval()
self.ptq.save_quantized_model(model=model, path=path, input_spec=input_spec, **kwargs)
if training:
model.train() | False | training | eval | 0.6665236949920654 |
334 | 332 | def save_quantized_model(self, model, path, input_spec=None, **kwargs):
"""
Save the quantized inference model.
Args:
model (Layer): The model to be saved.
path (str): The path prefix to save model. The format is
``dirname/file_prefix`` or ``file_prefix``.
input_spec (list[InputSpec|Tensor], optional): Describes the input
of the saved model's forward method, which can be described by
InputSpec or example Tensor. If None, all input variables of
the original Layer's forward method would be the inputs of
the saved model. Default: None.
kwargs (dict, optional): Other save configuration options for compatibility.
Returns:
None
"""
assert isinstance(model, paddle.nn.Layer), 'The model must be the instance of paddle.nn.Layer.'
training = model.training
if training:
model.eval()
self.ptq.save_quantized_model(model=model, path=path, input_spec=input_spec, **kwargs)
if<mask>:
model.train() | True | training | training | 0.6648621559143066 |
335 | 333 | @property
def sides2(self):
if<mask>:
return parse_int_vector(self._entity_data.get('sides2'))
return parse_int_vector('None') | True | 'sides2' in self._entity_data | 'sides2' in self._entity_data | 0.6507736444473267 |
336 | 334 | def inner(*args, **kwargs):
out = f(*args, **kwargs)
if<mask>:
self._enqueue(next_name, out)
return out | False | out is not None and next | next_name is not None | 0.6514477729797363 |
337 | 335 | def _trim_dests(self, pdf, dests, pages):
"""
Removes any named destinations that are not a part of the specified
page set.
"""
new_dests = []
prev_header_added = True
for k, o in list(dests.items()):
for j in range(*pages):
if<mask>:
o[NameObject('/Page')] = o['/Page'].getObject()
assert str_(k) == str_(o['/Title'])
new_dests.append(o)
break
return new_dests | False | pdf.getPage(j).getObject() == o['/Page'].getObject() | j == prev_header_added | 0.6477280259132385 |
338 | 336 | def checkip(ipstring):
try:
ipsplit = ipstring.split('.')
if<mask>:
return False
for oct in ipsplit:
if int(oct) > 255 or int(oct) < 0:
return False
except:
return False
return True | False | len(ipsplit) != 4 | len(ipsplit) != 3 | 0.6500380039215088 |
339 | 337 | def checkip(ipstring):
try:
ipsplit = ipstring.split('.')
if len(ipsplit)!= 4:
return False
for oct in ipsplit:
if<mask>:
return False
except:
return False
return True | False | int(oct) > 255 or int(oct) < 0 | ord(oct) != ord(255) | 0.6431970596313477 |
340 | 338 | def _find_exe_version(cmd):
"""Find the version of an executable by running `cmd` in the shell.
If the command is not found, or the output does not match
`RE_VERSION`, returns None.
"""
executable = cmd.split()[0]
if<mask>:
return None
out = Popen(cmd, shell=True, stdout=PIPE).stdout
try:
out_string = out.read()
finally:
out.close()
result = RE_VERSION.search(out_string)
if result is None:
return None
return LooseVersion(result.group(1).decode()) | False | find_executable(executable) is None | not os.path.exists(executable) | 0.652163028717041 |
341 | 339 | def _find_exe_version(cmd):
"""Find the version of an executable by running `cmd` in the shell.
If the command is not found, or the output does not match
`RE_VERSION`, returns None.
"""
executable = cmd.split()[0]
if find_executable(executable) is None:
return None
out = Popen(cmd, shell=True, stdout=PIPE).stdout
try:
out_string = out.read()
finally:
out.close()
result = RE_VERSION.search(out_string)
if<mask>:
return None
return LooseVersion(result.group(1).decode()) | True | result is None | result is None | 0.6599942445755005 |
342 | 340 | def _validate_value(self, key, value, scheme=None):
if<mask>:
pattern, exclusions = self.SYNTAX_VALIDATORS[key]
if (scheme or self.scheme) not in exclusions:
m = pattern.match(value)
if not m:
raise MetadataInvalidError("'%s' is an invalid value for the '%s' property" % (value, key)) | True | key in self.SYNTAX_VALIDATORS | key in self.SYNTAX_VALIDATORS | 0.6501220464706421 |
343 | 341 | def _validate_value(self, key, value, scheme=None):
if key in self.SYNTAX_VALIDATORS:
pattern, exclusions = self.SYNTAX_VALIDATORS[key]
if<mask>:
m = pattern.match(value)
if not m:
raise MetadataInvalidError("'%s' is an invalid value for the '%s' property" % (value, key)) | True | (scheme or self.scheme) not in exclusions | (scheme or self.scheme) not in exclusions | 0.6453512907028198 |
344 | 342 | def _validate_value(self, key, value, scheme=None):
if key in self.SYNTAX_VALIDATORS:
pattern, exclusions = self.SYNTAX_VALIDATORS[key]
if (scheme or self.scheme) not in exclusions:
m = pattern.match(value)
if<mask>:
raise MetadataInvalidError("'%s' is an invalid value for the '%s' property" % (value, key)) | True | not m | not m | 0.670219361782074 |
345 | 343 | def getAttr(self, vFlip, hFlip):
byte = 40
if<mask>:
byte |= 128
if hFlip:
byte |= 64
return byte | True | vFlip | vFlip | 0.6632107496261597 |
346 | 344 | def getAttr(self, vFlip, hFlip):
byte = 40
if vFlip:
byte |= 128
if<mask>:
byte |= 64
return byte | True | hFlip | hFlip | 0.6686927080154419 |
347 | 345 | def __index_surname(surn_list):
"""
All non pa/matronymic surnames are used in indexing.
pa/matronymic not as they change for every generation!
returns a byte string
"""
from..lib import NameOriginType
if<mask>:
surn =''.join([x[0] for x in surn_list if not x[3][0] in [NameOriginType.PATRONYMIC, NameOriginType.MATRONYMIC]])
else:
surn = ''
return surn | True | surn_list | surn_list | 0.6651697158813477 |
348 | 346 | def set_gban_reason(user_id, reason):
with _GBAN_LOCK:
user = SESSION.query(GBan).get(str(user_id))
if<mask>:
return ''
prev_reason = user.reason
user.reason = reason
SESSION.merge(user)
SESSION.commit()
return prev_reason | True | not user | not user | 0.6579863429069519 |
349 | 347 | def is_auto_report_enable(group_id: str):
if<mask>:
return False
if'report_mode' not in group_config[group_id]:
return False
if group_config[group_id]['report_mode'] == 'yobot_standalone':
return True
elif group_config[group_id]['report_mode'] == 'yobot_embedded':
return True
return False | True | group_id not in group_config | group_id not in group_config | 0.650682270526886 |
350 | 348 | def is_auto_report_enable(group_id: str):
if group_id not in group_config:
return False
if<mask>:
return False
if group_config[group_id]['report_mode'] == 'yobot_standalone':
return True
elif group_config[group_id]['report_mode'] == 'yobot_embedded':
return True
return False | False | 'report_mode' not in group_config[group_id] | group_config[group_id]['auto_report_mode'] == 'auto' | 0.6453258991241455 |
351 | 349 | def is_auto_report_enable(group_id: str):
if group_id not in group_config:
return False
if'report_mode' not in group_config[group_id]:
return False
if<mask>:
return True
elif group_config[group_id]['report_mode'] == 'yobot_embedded':
return True
return False | False | group_config[group_id]['report_mode'] == 'yobot_standalone' | group_config[group_id]['report_mode'] == 'auto' | 0.6436742544174194 |
352 | 350 | def is_auto_report_enable(group_id: str):
if group_id not in group_config:
return False
if'report_mode' not in group_config[group_id]:
return False
if group_config[group_id]['report_mode'] == 'yobot_standalone':
return True
elif<mask>:
return True
return False | False | group_config[group_id]['report_mode'] == 'yobot_embedded' | group_config[group_id]['report_mode'] == 'auto' | 0.6437225341796875 |
353 | 351 | def decorator(func):
@functools.wraps(func)
def wrapper(cls, records, *args, **kwargs):
if<mask>:
result = func(cls, records, *args, **kwargs)
cls.write(records, {f: None for f in fields})
if when == 'before':
result = func(cls, records, *args, **kwargs)
return result
return wrapper | True | when == 'after' | when == 'after' | 0.6542140245437622 |
354 | 352 | def decorator(func):
@functools.wraps(func)
def wrapper(cls, records, *args, **kwargs):
if when == 'after':
result = func(cls, records, *args, **kwargs)
cls.write(records, {f: None for f in fields})
if<mask>:
result = func(cls, records, *args, **kwargs)
return result
return wrapper | True | when == 'before' | when == 'before' | 0.6536306142807007 |
355 | 353 | def reset(self):
self.step = 0
self.epoch = 0
for k, v in self.state_dict.items():
if<mask>:
self.state_dict[k] = v
else:
self.state_dict[k] = paddle.zeros_like(v) | False | k in self.ema_black_list | isinstance(v, tuple) | 0.6479570269584656 |
356 | 354 | def __delitem__(self, key):
if<mask>:
del self.attributes[key]
elif isinstance(key, int):
del self.children[key]
elif isinstance(key, slice):
assert key.step in (None, 1), 'cannot handle slice with stride'
del self.children[key.start:key.stop]
else:
raise TypeError('element index must be an integer, a simple slice, or an attribute name string') | False | isinstance(key, str) | isinstance(key, attribute) | 0.6518378257751465 |
357 | 355 | def __delitem__(self, key):
if isinstance(key, str):
del self.attributes[key]
elif<mask>:
del self.children[key]
elif isinstance(key, slice):
assert key.step in (None, 1), 'cannot handle slice with stride'
del self.children[key.start:key.stop]
else:
raise TypeError('element index must be an integer, a simple slice, or an attribute name string') | False | isinstance(key, int) | isinstance(key, integer_types) | 0.6536860466003418 |
358 | 356 | def __delitem__(self, key):
if isinstance(key, str):
del self.attributes[key]
elif isinstance(key, int):
del self.children[key]
elif<mask>:
assert key.step in (None, 1), 'cannot handle slice with stride'
del self.children[key.start:key.stop]
else:
raise TypeError('element index must be an integer, a simple slice, or an attribute name string') | True | isinstance(key, slice) | isinstance(key, slice) | 0.6491026878356934 |
359 | 357 | def paint(self):
self.GLViewWidget.qglColor(self.color)
if<mask>:
if isinstance(self.pos, (list, tuple, np.ndarray)):
for p, text in zip(self.pos, self.text):
self.GLViewWidget.renderText(*p, text, self.font)
else:
self.GLViewWidget.renderText(*self.pos, self.text, self.font) | True | self.pos is not None and self.text is not None | self.pos is not None and self.text is not None | 0.6460979580879211 |
360 | 358 | def paint(self):
self.GLViewWidget.qglColor(self.color)
if self.pos is not None and self.text is not None:
if<mask>:
for p, text in zip(self.pos, self.text):
self.GLViewWidget.renderText(*p, text, self.font)
else:
self.GLViewWidget.renderText(*self.pos, self.text, self.font) | True | isinstance(self.pos, (list, tuple, np.ndarray)) | isinstance(self.pos, (list, tuple, np.ndarray)) | 0.6437488794326782 |
361 | 359 | def get_name(self, obj):
if<mask>:
return obj.cat_en.name
elif self.context['language'] == 'ja':
return obj.cat_ja.name
else:
return obj.cat_en.name | True | 'language' not in self.context | 'language' not in self.context | 0.6512870788574219 |
362 | 360 | def get_name(self, obj):
if 'language' not in self.context:
return obj.cat_en.name
elif<mask>:
return obj.cat_ja.name
else:
return obj.cat_en.name | True | self.context['language'] == 'ja' | self.context['language'] == 'ja' | 0.6506295204162598 |
363 | 361 | def __call__(self, decorated: typing.Callable) -> typing.Callable:
if<mask>:
if not getattr(self, 'CAN_BE_USED_ON_CLASSES', True):
raise TypeError(f'{self.__class__.__name__} cannot be used to decorate a class')
return self.decorate_class(decorated)
if callable(decorated):
return self.decorate_callable(decorated)
raise TypeError(f'Cannot decorate object {decorated}') | True | isinstance(decorated, type) | isinstance(decorated, type) | 0.6492458581924438 |
364 | 362 | def __call__(self, decorated: typing.Callable) -> typing.Callable:
if isinstance(decorated, type):
if not getattr(self, 'CAN_BE_USED_ON_CLASSES', True):
raise TypeError(f'{self.__class__.__name__} cannot be used to decorate a class')
return self.decorate_class(decorated)
if<mask>:
return self.decorate_callable(decorated)
raise TypeError(f'Cannot decorate object {decorated}') | True | callable(decorated) | callable(decorated) | 0.6536573171615601 |
365 | 363 | def __call__(self, decorated: typing.Callable) -> typing.Callable:
if isinstance(decorated, type):
if<mask>:
raise TypeError(f'{self.__class__.__name__} cannot be used to decorate a class')
return self.decorate_class(decorated)
if callable(decorated):
return self.decorate_callable(decorated)
raise TypeError(f'Cannot decorate object {decorated}') | False | not getattr(self, 'CAN_BE_USED_ON_CLASSES', True) | not self.decorate_class | 0.6462966799736023 |
366 | 364 | def unwrap_term_buckets(self, timestamp, term_buckets):
for term_data in term_buckets:
if<mask>:
self.unwrap_interval_buckets(timestamp, term_data['key'], term_data['interval_aggs']['buckets'])
else:
self.check_matches(timestamp, term_data['key'], term_data) | True | 'interval_aggs' in term_data | 'interval_aggs' in term_data | 0.6469130516052246 |
367 | 365 | def save_binary(self, path: str):
"""Saves the loaded model to a binary.mjb file."""
if<mask>:
raise ValueError('[MujocoSimRobot] Path already exists: {}'.format(path))
if not path.endswith('.mjb'):
path = path + '.mjb'
if self._use_dm_backend:
self.model.save_binary(path)
else:
with open(path, 'wb') as f:
f.write(self.model.get_mjb()) | True | os.path.exists(path) | os.path.exists(path) | 0.6452264189720154 |
368 | 366 | def save_binary(self, path: str):
"""Saves the loaded model to a binary.mjb file."""
if os.path.exists(path):
raise ValueError('[MujocoSimRobot] Path already exists: {}'.format(path))
if<mask>:
path = path + '.mjb'
if self._use_dm_backend:
self.model.save_binary(path)
else:
with open(path, 'wb') as f:
f.write(self.model.get_mjb()) | True | not path.endswith('.mjb') | not path.endswith('.mjb') | 0.6442418098449707 |
369 | 367 | def save_binary(self, path: str):
"""Saves the loaded model to a binary.mjb file."""
if os.path.exists(path):
raise ValueError('[MujocoSimRobot] Path already exists: {}'.format(path))
if not path.endswith('.mjb'):
path = path + '.mjb'
if<mask>:
self.model.save_binary(path)
else:
with open(path, 'wb') as f:
f.write(self.model.get_mjb()) | False | self._use_dm_backend | os.path.isfile(path) | 0.6447663903236389 |
370 | 368 | def safe_shutdown(self):
"""
Shutdown TLS and socket. Ignore any exceptions.
"""
try:
if<mask>:
self.shutdown()
if self.sock:
self.sock.shutdown(2)
except (OSError, _nassl.OpenSSLError, AttributeError):
pass
finally:
if self.sock:
self.sock.close()
self.sock = None | False | self.get_underlying_socket() | not self.sock | 0.6479790210723877 |
371 | 369 | def safe_shutdown(self):
"""
Shutdown TLS and socket. Ignore any exceptions.
"""
try:
if self.get_underlying_socket():
self.shutdown()
if<mask>:
self.sock.shutdown(2)
except (OSError, _nassl.OpenSSLError, AttributeError):
pass
finally:
if self.sock:
self.sock.close()
self.sock = None | False | self.sock | self.sock and (not self.sock.shutdown) | 0.6549347639083862 |
372 | 370 | def safe_shutdown(self):
"""
Shutdown TLS and socket. Ignore any exceptions.
"""
try:
if self.get_underlying_socket():
self.shutdown()
if self.sock:
self.sock.shutdown(2)
except (OSError, _nassl.OpenSSLError, AttributeError):
pass
finally:
if<mask>:
self.sock.close()
self.sock = None | True | self.sock | self.sock | 0.6537888646125793 |
373 | 371 | def _get_client(self, name, path):
"""Return client conf and refresh it if necessary
:rtype: Config
"""
if<mask>:
self._clients_conf.clear()
self._load_conf_clients()
if name not in self._clients_conf:
return self._new_client_conf(name, path)
if self._clients_conf[name].changed:
self._clients_conf[name].parse()
return self._clients_conf[name] | False | self._clientconfdir_changed() and name not in self._clients_conf | not hasattr(self, '_clients_conf') | 0.6536871194839478 |
374 | 372 | def _get_client(self, name, path):
"""Return client conf and refresh it if necessary
:rtype: Config
"""
if self._clientconfdir_changed() and name not in self._clients_conf:
self._clients_conf.clear()
self._load_conf_clients()
if<mask>:
return self._new_client_conf(name, path)
if self._clients_conf[name].changed:
self._clients_conf[name].parse()
return self._clients_conf[name] | True | name not in self._clients_conf | name not in self._clients_conf | 0.655162513256073 |
375 | 373 | def _get_client(self, name, path):
"""Return client conf and refresh it if necessary
:rtype: Config
"""
if self._clientconfdir_changed() and name not in self._clients_conf:
self._clients_conf.clear()
self._load_conf_clients()
if name not in self._clients_conf:
return self._new_client_conf(name, path)
if<mask>:
self._clients_conf[name].parse()
return self._clients_conf[name] | False | self._clients_conf[name].changed | self._clients_conf[name].is_initialized | 0.6496726870536804 |
376 | 374 | def function_argspec(self, func, **kw):
if<mask>:
return self.process(func.clause_expr)
else:
return '' | False | func.clauses is not None and len(func.clauses) | func.clause_expr is not None | 0.6485062837600708 |
377 | 375 | def scores(self):
value = self.value()
if<mask>:
return value
else:
assert type(value) in {list, tuple}
return list(zip(self.names(), self.value())) | False | type(value) == dict | isinstance(value, scores.Mapping) | 0.648817241191864 |
378 | 376 | def getMessageCount(jsonData):
if<mask>:
return 0
else:
return jsonData['count'] | False | 'count' not in jsonData | jsonData['count'] == 0 | 0.6522445678710938 |
379 | 377 | def _update_ground_truth_statistics(self, groundtruth_class_labels, groundtruth_is_difficult_list, groundtruth_is_group_of_list):
"""Update grouth truth statitistics.
1. Difficult boxes are ignored when counting the number of ground truth
instances as done in Pascal VOC devkit.
2. Difficult boxes are treated as normal boxes when computing CorLoc related
statitistics.
Args:
groundtruth_class_labels: An integer numpy array of length M,
representing M class labels of object instances in ground truth
groundtruth_is_difficult_list: A boolean numpy array of length M denoting
whether a ground truth box is a difficult instance or not
groundtruth_is_group_of_list: A boolean numpy array of length M denoting
whether a ground truth box is a group-of box or not
"""
for class_index in range(self.num_class):
num_gt_instances = np.sum(groundtruth_class_labels[~groundtruth_is_difficult_list & ~groundtruth_is_group_of_list] == class_index)
num_groupof_gt_instances = self.group_of_weight * np.sum(groundtruth_class_labels[groundtruth_is_group_of_list] == class_index)
self.num_gt_instances_per_class[class_index] += num_gt_instances + num_groupof_gt_instances
if<mask>:
self.num_gt_imgs_per_class[class_index] += 1 | True | np.any(groundtruth_class_labels == class_index) | np.any(groundtruth_class_labels == class_index) | 0.6460565328598022 |
380 | 378 | def cached_func(*args, **kwargs):
if<mask>:
return cfunc(*args, **kwargs)
else:
return func(*args, **kwargs) | False | context.Cache.current_context | cfunc is not None | 0.6489722728729248 |
381 | 379 | def close_container(self, container, event):
""" Handle a close request for a QDockContainer.
This method is called by the framework at the appropriate times
and should not be called directly by user code.
Parameters
----------
window : QDockContainer
The dock container to close.
event : QCloseEvent
The close event passed to the event handler.
"""
item = container.dockItem()
if<mask>:
if not container.isWindow():
container.unplug()
self._free_container(container)
else:
event.ignore() | False | item is None or item.close() | item is not None | 0.6482319235801697 |
382 | 380 | def close_container(self, container, event):
""" Handle a close request for a QDockContainer.
This method is called by the framework at the appropriate times
and should not be called directly by user code.
Parameters
----------
window : QDockContainer
The dock container to close.
event : QCloseEvent
The close event passed to the event handler.
"""
item = container.dockItem()
if item is None or item.close():
if<mask>:
container.unplug()
self._free_container(container)
else:
event.ignore() | False | not container.isWindow() | container.plug | 0.6514645218849182 |
383 | 381 | def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
:param headers: dictionary to extract encoding from.
"""
content_type = headers.get('content-type')
if<mask>:
return None
content_type, params = cgi.parse_header(content_type)
if 'charset' in params:
return params['charset'].strip('\'"')
if 'text' in content_type:
return 'ISO-8859-1' | True | not content_type | not content_type | 0.6526246070861816 |
384 | 382 | def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
:param headers: dictionary to extract encoding from.
"""
content_type = headers.get('content-type')
if not content_type:
return None
content_type, params = cgi.parse_header(content_type)
if<mask>:
return params['charset'].strip('\'"')
if 'text' in content_type:
return 'ISO-8859-1' | True | 'charset' in params | 'charset' in params | 0.6576825976371765 |
385 | 383 | def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
:param headers: dictionary to extract encoding from.
"""
content_type = headers.get('content-type')
if not content_type:
return None
content_type, params = cgi.parse_header(content_type)
if 'charset' in params:
return params['charset'].strip('\'"')
if<mask>:
return 'ISO-8859-1' | True | 'text' in content_type | 'text' in content_type | 0.6484091281890869 |
386 | 384 | def _overlay_text(image_file_name, x_offset_from_left_px, y_offset_from_top_px, text_string, font_size, use_north_gravity):
"""Overlays text on image.
:param image_file_name: Path to image file.
:param x_offset_from_left_px: Left-relative x-coordinate (pixels).
:param y_offset_from_top_px: Top-relative y-coordinate (pixels).
:param text_string: String to overlay.
:param font_size: Font size.
:param use_north_gravity: Boolean flag.
:raises: ValueError: if ImageMagick command (which is ultimately a Unix
command) fails.
"""
command_string = '"{0:s}" "{1:s}"'.format(CONVERT_EXE_NAME, image_file_name)
if<mask>:
command_string +='-gravity North'
command_string +='-pointsize {0:d} -font "{1:s}" -fill "rgb(0, 0, 0)" -annotate {2:+d}{3:+d} "{4:s}" "{5:s}"'.format(font_size, TITLE_FONT_NAME, x_offset_from_left_px, y_offset_from_top_px, text_string, image_file_name)
exit_code = os.system(command_string)
if exit_code == 0:
return
raise ValueError(imagemagick_utils.ERROR_STRING) | True | use_north_gravity | use_north_gravity | 0.6455235481262207 |
387 | 385 | def _overlay_text(image_file_name, x_offset_from_left_px, y_offset_from_top_px, text_string, font_size, use_north_gravity):
"""Overlays text on image.
:param image_file_name: Path to image file.
:param x_offset_from_left_px: Left-relative x-coordinate (pixels).
:param y_offset_from_top_px: Top-relative y-coordinate (pixels).
:param text_string: String to overlay.
:param font_size: Font size.
:param use_north_gravity: Boolean flag.
:raises: ValueError: if ImageMagick command (which is ultimately a Unix
command) fails.
"""
command_string = '"{0:s}" "{1:s}"'.format(CONVERT_EXE_NAME, image_file_name)
if use_north_gravity:
command_string +='-gravity North'
command_string +='-pointsize {0:d} -font "{1:s}" -fill "rgb(0, 0, 0)" -annotate {2:+d}{3:+d} "{4:s}" "{5:s}"'.format(font_size, TITLE_FONT_NAME, x_offset_from_left_px, y_offset_from_top_px, text_string, image_file_name)
exit_code = os.system(command_string)
if<mask>:
return
raise ValueError(imagemagick_utils.ERROR_STRING) | True | exit_code == 0 | exit_code == 0 | 0.652776837348938 |
388 | 386 | def bad_words_ids(self, input_ids: torch.Tensor, special_ids: List[int]=None) -> List[List[int]]:
"""
Args:
input_ids: Tensor of shape (num_sentences, sentence_length), containing token ids (int).
special_ids: List[int] containing special ids which will not be forbidden.
Returns: List[List[int]]
Returns a list of list of integers, corresponding to sequences of ids.
"""
bad_words_ids = list()
for row in input_ids.tolist():
if<mask>:
row = [item for item in row if item not in special_ids]
for item_ix, item in enumerate(row):
if random.random() < self.drop_chance:
bad_words_ids.append(item)
bad_words_ids = [[item] for item in bad_words_ids]
return bad_words_ids | True | special_ids | special_ids | 0.6551713943481445 |
389 | 387 | def bad_words_ids(self, input_ids: torch.Tensor, special_ids: List[int]=None) -> List[List[int]]:
"""
Args:
input_ids: Tensor of shape (num_sentences, sentence_length), containing token ids (int).
special_ids: List[int] containing special ids which will not be forbidden.
Returns: List[List[int]]
Returns a list of list of integers, corresponding to sequences of ids.
"""
bad_words_ids = list()
for row in input_ids.tolist():
if special_ids:
row = [item for item in row if item not in special_ids]
for item_ix, item in enumerate(row):
if<mask>:
bad_words_ids.append(item)
bad_words_ids = [[item] for item in bad_words_ids]
return bad_words_ids | False | random.random() < self.drop_chance | item_ix % 2 != 0 | 0.6444206237792969 |
390 | 388 | def load_image(test_data, shape):
"""Load calibration images."""
test_np = np.fromfile(test_data, dtype=np.float32)
test_shape = (-1,) + tuple(shape[1:])
test_np = np.reshape(test_np, test_shape)
calib_num = 32
if<mask>:
return test_np[:calib_num]
else:
return test_np | False | test_np.shape[0] > calib_num | calib_num < 32 | 0.6428342461585999 |
391 | 389 | def deserialize(self, value):
"""The method to deserialize the DynamoDB data types.
:param value: A DynamoDB value to be deserialized to a pythonic value.
Here are the various conversions:
DynamoDB Python
-------- ------
{'NULL': True} None
{'BOOL': True/False} True/False
{'N': str(value)} Decimal(str(value))
{'S': string} string
{'B': bytes} Binary(bytes)
{'NS': [str(value)]} set([Decimal(str(value))])
{'SS': [string]} set([string])
{'BS': [bytes]} set([bytes])
{'L': list} list
{'M': dict} dict
:returns: The pythonic value of the DynamoDB type.
"""
if<mask>:
raise TypeError('Value must be a nonempty dictionary whose key is a valid dynamodb type.')
dynamodb_type = list(value.keys())[0]
try:
deserializer = getattr(self, '_deserialize_%s' % dynamodb_type.lower())
except AttributeError:
raise TypeError('Dynamodb type %s is not supported' % dynamodb_type)
return deserializer(value[dynamodb_type]) | True | not value | not value | 0.6604611277580261 |
392 | 390 | def cat_core(list_of_columns: List, sep: str):
"""
Auxiliary function for :meth:`str.cat`
Parameters
----------
list_of_columns : list of numpy arrays
List of arrays to be concatenated with sep;
these arrays may not contain NaNs!
sep : string
The separator string for concatenating the columns.
Returns
-------
nd.array
The concatenation of list_of_columns with sep.
"""
if<mask>:
arr_of_cols = np.asarray(list_of_columns, dtype=object)
return np.sum(arr_of_cols, axis=0)
list_with_sep = [sep] * (2 * len(list_of_columns) - 1)
list_with_sep[::2] = list_of_columns
arr_with_sep = np.asarray(list_with_sep, dtype=object)
return np.sum(arr_with_sep, axis=0) | True | sep == '' | sep == '' | 0.6636082530021667 |
393 | 391 | def _match_ne(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if<mask>:
result = version!= constraint
else:
result = not _match_prefix(version, constraint)
return result | True | not prefix | not prefix | 0.6541920900344849 |
394 | 392 | def _snap_exec(commands):
"""
Execute snap commands.
:param commands: List commands
:return: Integer exit code
"""
assert isinstance(commands, list)
retry_count = 0
return_code = None
while return_code is None or return_code == SNAP_NO_LOCK:
try:
return_code = subprocess.check_call(['snap'] + commands, env=os.environ)
except subprocess.CalledProcessError as e:
retry_count += +1
if<mask>:
raise CouldNotAcquireLockException('Could not acquire lock after {} attempts'.format(SNAP_NO_LOCK_RETRY_COUNT))
return_code = e.returncode
log('Snap failed to acquire lock, trying again in {} seconds.'.format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN')
sleep(SNAP_NO_LOCK_RETRY_DELAY)
return return_code | False | retry_count > SNAP_NO_LOCK_RETRY_COUNT | retry_count > 3 | 0.6437193155288696 |
395 | 393 | def selected_alpn_protocol(self):
proto = self._conn.get_alpn_proto_negotiated()
if<mask>:
proto = proto.decode('ascii')
return proto if proto else None | True | isinstance(proto, bytes) | isinstance(proto, bytes) | 0.6463732719421387 |
396 | 394 | def _truncate_text(self, text, max_length):
if<mask>:
if text.strip('$').isdigit():
text = int(text.strip('$'))
return '${:.2E}'.format(text)
return text[:max_length - 3] + '...'
return text | True | len(text) > max_length | len(text) > max_length | 0.6456489562988281 |
397 | 395 | def _truncate_text(self, text, max_length):
if len(text) > max_length:
if<mask>:
text = int(text.strip('$'))
return '${:.2E}'.format(text)
return text[:max_length - 3] + '...'
return text | True | text.strip('$').isdigit() | text.strip('$').isdigit() | 0.6489506959915161 |
398 | 396 | def is_header(self, line: IndentedLine, previous_line: IndentedLine) -> bool:
if<mask>:
return False
if not self.strict:
return is_uppercase_hun(line.content[0]) or self.extract_identifier(line) is not None
if previous_line == EMPTY_LINE and is_uppercase_hun(line.content[0]):
return True
return super().is_header(line, previous_line) | False | not line.bold | not line or not previous_line | 0.6590715646743774 |
399 | 397 | def is_header(self, line: IndentedLine, previous_line: IndentedLine) -> bool:
if not line.bold:
return False
if<mask>:
return is_uppercase_hun(line.content[0]) or self.extract_identifier(line) is not None
if previous_line == EMPTY_LINE and is_uppercase_hun(line.content[0]):
return True
return super().is_header(line, previous_line) | False | not self.strict | not previous_line | 0.6520365476608276 |
400 | 398 | def is_header(self, line: IndentedLine, previous_line: IndentedLine) -> bool:
if not line.bold:
return False
if not self.strict:
return is_uppercase_hun(line.content[0]) or self.extract_identifier(line) is not None
if<mask>:
return True
return super().is_header(line, previous_line) | False | previous_line == EMPTY_LINE and is_uppercase_hun(line.content[0]) | previous_line.content and self.is_identifier(line) | 0.6457589864730835 |
401 | 399 | def align_code(groups, buff=ALIGN_SPACING):
dir = groups[0][0]
align = groups[0][1]
g = VGroup(*[g if type(g)!= list else align_code(g) for g in groups[1:]])
if<mask>:
g.arrange_in_grid(rows=1, row_alignments=align, buff=buff)
else:
g.arrange_in_grid(cols=1, col_alignments=align, buff=buff)
return g | True | dir == '-' | dir == '-' | 0.6616206169128418 |
402 | 400 | def get_warns(user_id, chat_id):
try:
user = SESSION.query(Warns).get((user_id, str(chat_id)))
if<mask>:
return None
reasons = user.reasons
num = user.num_warns
return (num, reasons)
finally:
SESSION.close() | True | not user | not user | 0.6579923629760742 |
403 | 401 | def _get_weight_regex(self, text_in):
if<mask>:
replaces = {'(': '\\(', ')': '\\)', '[': '\\[', ']': '\\]', '.': '\\.', '*': '.*?'}
regex_in = text_in
for key, val in replaces.items():
regex_in = regex_in.replace(key, val)
regex = re.compile(regex_in, re.IGNORECASE)
else:
regex = re.compile(re.escape(text_in), re.IGNORECASE)
return regex | False | '*' in text_in | isinstance(text_in, tuple) | 0.6528275012969971 |
404 | 402 | def on_touch_down(self, touch):
if<mask>:
self.h_picker_touch = False
else:
self.h_picker_touch = True
super().on_touch_down(touch) | True | not self._h_picker.collide_point(*touch.pos) | not self._h_picker.collide_point(*touch.pos) | 0.6440072059631348 |
405 | 403 | def pathmagic(string):
parts = string.split('\\')
if<mask>:
return string
elif len(parts) == 3:
return os.path.join(*parts)
else:
return string | True | len(parts) == 1 | len(parts) == 1 | 0.6467896699905396 |
406 | 404 | def pathmagic(string):
parts = string.split('\\')
if len(parts) == 1:
return string
elif<mask>:
return os.path.join(*parts)
else:
return string | True | len(parts) == 3 | len(parts) == 3 | 0.6465815901756287 |
407 | 405 | def checkUnindent(s, l, t):
if<mask>:
return
curCol = col(l, s)
if not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])):
raise ParseException(s, l, 'not an unindent')
indentStack.pop() | True | l >= len(s) | l >= len(s) | 0.6486468315124512 |
408 | 406 | def checkUnindent(s, l, t):
if l >= len(s):
return
curCol = col(l, s)
if<mask>:
raise ParseException(s, l, 'not an unindent')
indentStack.pop() | True | not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])) | not (indentStack and curCol < indentStack[-1] and (curCol <= indentStack[-2])) | 0.6463941931724548 |
409 | 407 | def execute(self, notification: SponsorEmailNotificationTemplate, sponsorships, contact_types, **kwargs):
msg_kwargs = {'to_primary': SponsorContact.PRIMARY_CONTACT in contact_types, 'to_administrative': SponsorContact.ADMINISTRATIVE_CONTACT in contact_types, 'to_accounting': SponsorContact.ACCOUTING_CONTACT in contact_types, 'to_manager': SponsorContact.MANAGER_CONTACT in contact_types}
for sponsorship in sponsorships:
email = notification.get_email_message(sponsorship, **msg_kwargs)
if<mask>:
continue
email.send()
self.notify(notification=notification, sponsorship=sponsorship, contact_types=contact_types, request=kwargs.get('request')) | False | not email | email is None | 0.6633118987083435 |
410 | 408 | def close_w(self) -> None:
if<mask>:
os.close(self.w)
self.w = None | True | self.w is not None | self.w is not None | 0.6481721997261047 |
411 | 409 | def __ixor__(self, other):
if<mask>:
other = ParserElement._literalStringClass(other)
return self.append(other) | True | isinstance(other, basestring) | isinstance(other, basestring) | 0.6507259011268616 |
412 | 410 | def _truncate_seq_pair(self, tokens_a, tokens_b, max_length):
"""Truncates a sequence pair in place to the maximum length."""
while True:
total_length = len(tokens_a) + len(tokens_b)
if<mask>:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop() | True | total_length <= max_length | total_length <= max_length | 0.6482000350952148 |
413 | 411 | def _truncate_seq_pair(self, tokens_a, tokens_b, max_length):
"""Truncates a sequence pair in place to the maximum length."""
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if<mask>:
tokens_a.pop()
else:
tokens_b.pop() | True | len(tokens_a) > len(tokens_b) | len(tokens_a) > len(tokens_b) | 0.64232337474823 |
414 | 412 | def on_actionLoad_Style_triggered(self, b=None):
if<mask>:
return
fname = QtGui.QFileDialog.getOpenFileName(self, 'Open File', os.getcwd(),'style files (*.json *.style)')
self.style_fname = fname
self.disableHL()
self.ui.style.setPlainText(codecs.open(self.style_fname, 'rb', 'utf-8').read())
self.enableHL() | False | b is None | b is not None | 0.6597837209701538 |
415 | 413 | def __getitem__(self, name):
if<mask>:
raise NotImplementedError
else:
return self.element.attributes[name].value | False | isinstance(name, tuple) | name not in self.element.attributes.keys() | 0.65006422996521 |
416 | 414 | def get_RESTART_warning(self):
"""Print a warning if the RESTART keyword is detected"""
if<mask>:
print('WARNING: DUMPFLUX file contains a RESTART.\n')
print('This may cause problems with execution of DUMPFLUX run.\n')
print('Please check the RESTART file path before you proceed!') | False | self.has_KW('RESTART') | self.restart_file is None | 0.646465003490448 |
417 | 415 | def _black_or_white_by_color_brightness(color):
if<mask>:
return 'black'
else:
return 'white' | True | _color_brightness(color) >= 500 | _color_brightness(color) >= 500 | 0.6438709497451782 |
418 | 416 | def classify_cert(cert_meta, now, time_remaining, expire_window, cert_list):
"""Given metadata about a certificate under examination, classify it
into one of three categories, 'ok', 'warning', and 'expired'.
Params:
- `cert_meta` dict - A dict with certificate metadata. Required fields
include: 'cert_cn', 'path', 'expiry', 'days_remaining', 'health'.
- `now` (datetime) - a datetime object of the time to calculate the certificate 'time_remaining' against
- `time_remaining` (datetime.timedelta) - a timedelta for how long until the cert expires
- `expire_window` (datetime.timedelta) - a timedelta for how long the warning window is
- `cert_list` list - A list to shove the classified cert into
Return:
- `cert_list` - The updated list of classified certificates
"""
expiry_str = str(cert_meta['expiry'])
if<mask>:
cert_meta['health'] = 'expired'
elif time_remaining < expire_window:
cert_meta['health'] = 'warning'
else:
cert_meta['health'] = 'ok'
cert_meta['expiry'] = expiry_str
cert_meta['serial_hex'] = hex(int(cert_meta['serial']))
cert_list.append(cert_meta)
return cert_list | False | cert_meta['expiry'] < now | expire_window is None | 0.6477249264717102 |
419 | 417 | def classify_cert(cert_meta, now, time_remaining, expire_window, cert_list):
"""Given metadata about a certificate under examination, classify it
into one of three categories, 'ok', 'warning', and 'expired'.
Params:
- `cert_meta` dict - A dict with certificate metadata. Required fields
include: 'cert_cn', 'path', 'expiry', 'days_remaining', 'health'.
- `now` (datetime) - a datetime object of the time to calculate the certificate 'time_remaining' against
- `time_remaining` (datetime.timedelta) - a timedelta for how long until the cert expires
- `expire_window` (datetime.timedelta) - a timedelta for how long the warning window is
- `cert_list` list - A list to shove the classified cert into
Return:
- `cert_list` - The updated list of classified certificates
"""
expiry_str = str(cert_meta['expiry'])
if cert_meta['expiry'] < now:
cert_meta['health'] = 'expired'
elif<mask>:
cert_meta['health'] = 'warning'
else:
cert_meta['health'] = 'ok'
cert_meta['expiry'] = expiry_str
cert_meta['serial_hex'] = hex(int(cert_meta['serial']))
cert_list.append(cert_meta)
return cert_list | False | time_remaining < expire_window | cert_meta['expiry'] > now | 0.6497220993041992 |
420 | 418 | def bytes_to_human_readable(memory_amount: int) -> str:
""" Utility to convert a number of bytes (int) in a human readable string (with units)
"""
for unit in ['B', 'KB', 'MB', 'GB']:
if<mask>:
return '{:.3f}{}'.format(memory_amount, unit)
memory_amount /= 1024.0
return '{:.3f}TB'.format(memory_amount) | False | memory_amount > -1024.0 and memory_amount < 1024.0 | memory_amount >= 1024.0 | 0.6476356983184814 |
421 | 419 | def multiply_grads(self, c):
"""Multiplies grads by a constant ``c``."""
if<mask>:
self._sync_fp16_grads_to_fp32(c)
elif self.has_flat_params:
self.fp32_params.grad.data.mul_(c)
else:
for p32 in self.fp32_params:
p32.grad.data.mul_(c) | False | self._needs_sync | self.use_fp16 | 0.6519771814346313 |
422 | 420 | def multiply_grads(self, c):
"""Multiplies grads by a constant ``c``."""
if self._needs_sync:
self._sync_fp16_grads_to_fp32(c)
elif<mask>:
self.fp32_params.grad.data.mul_(c)
else:
for p32 in self.fp32_params:
p32.grad.data.mul_(c) | False | self.has_flat_params | self.use_fp16_grads | 0.6455222964286804 |
423 | 421 | def get_priority(priority: Union[int, str, Priority]) -> int:
"""Get priority value.
Args:
priority (int or str or :obj:`Priority`): Priority.
Returns:
int: The priority value.
"""
if<mask>:
if priority < 0 or priority > 100:
raise ValueError('priority must be between 0 and 100')
return priority
elif isinstance(priority, Priority):
return priority.value
elif isinstance(priority, str):
return Priority[priority.upper()].value
else:
raise TypeError('priority must be an integer or Priority enum value') | True | isinstance(priority, int) | isinstance(priority, int) | 0.6532579064369202 |
424 | 422 | def get_priority(priority: Union[int, str, Priority]) -> int:
"""Get priority value.
Args:
priority (int or str or :obj:`Priority`): Priority.
Returns:
int: The priority value.
"""
if isinstance(priority, int):
if<mask>:
raise ValueError('priority must be between 0 and 100')
return priority
elif isinstance(priority, Priority):
return priority.value
elif isinstance(priority, str):
return Priority[priority.upper()].value
else:
raise TypeError('priority must be an integer or Priority enum value') | False | priority < 0 or priority > 100 | not 0 <= priority <= 100 | 0.6681342124938965 |
425 | 423 | def get_priority(priority: Union[int, str, Priority]) -> int:
"""Get priority value.
Args:
priority (int or str or :obj:`Priority`): Priority.
Returns:
int: The priority value.
"""
if isinstance(priority, int):
if priority < 0 or priority > 100:
raise ValueError('priority must be between 0 and 100')
return priority
elif<mask>:
return priority.value
elif isinstance(priority, str):
return Priority[priority.upper()].value
else:
raise TypeError('priority must be an integer or Priority enum value') | True | isinstance(priority, Priority) | isinstance(priority, Priority) | 0.657296895980835 |
426 | 424 | def get_priority(priority: Union[int, str, Priority]) -> int:
"""Get priority value.
Args:
priority (int or str or :obj:`Priority`): Priority.
Returns:
int: The priority value.
"""
if isinstance(priority, int):
if priority < 0 or priority > 100:
raise ValueError('priority must be between 0 and 100')
return priority
elif isinstance(priority, Priority):
return priority.value
elif<mask>:
return Priority[priority.upper()].value
else:
raise TypeError('priority must be an integer or Priority enum value') | True | isinstance(priority, str) | isinstance(priority, str) | 0.6517472267150879 |
427 | 425 | def main():
args = parser.parse_args()
if<mask>:
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
cudnn.deterministic = True
main_worker(args) | True | args.seed is not None | args.seed is not None | 0.6485235691070557 |
428 | 426 | def _eq_verbose(self, acl):
"""Returns same as __eq__ but print explanation if not equal.
TEST: This function is used solely as part of the test suite."""
if<mask>:
print('ACL entries for {rp} compare differently'.format(rp=self))
return 0
if not self.cmp_entry_list(self.default_entry_list, acl.default_entry_list):
print('Default ACL entries for {rp} do not compare'.format(rp=self))
return 0
return 1 | False | not self.cmp_entry_list(self.entry_list, acl.entry_list) | not self.cmp_entry_list(acl.entry_list) | 0.6439564228057861 |
429 | 427 | def _eq_verbose(self, acl):
"""Returns same as __eq__ but print explanation if not equal.
TEST: This function is used solely as part of the test suite."""
if not self.cmp_entry_list(self.entry_list, acl.entry_list):
print('ACL entries for {rp} compare differently'.format(rp=self))
return 0
if<mask>:
print('Default ACL entries for {rp} do not compare'.format(rp=self))
return 0
return 1 | False | not self.cmp_entry_list(self.default_entry_list, acl.default_entry_list) | not self.cmp_default_list(acl.default_list) | 0.6422747373580933 |
430 | 428 | def attr_value(self, target, index=0):
"""
The attribute value for the given target node (e.g. 'PROJCS'). The index
keyword specifies an index of the child node to return.
"""
if<mask>:
raise TypeError
return get_attr_value(self._ptr, target, index) | False | not isinstance(target, str) or not isinstance(index, int) | not isinstance(target, type) | 0.6442840099334717 |
431 | 429 | def compute_drmsd_np(structure_1, structure_2, mask=None):
structure_1 = torch.tensor(structure_1)
structure_2 = torch.tensor(structure_2)
if<mask>:
mask = torch.tensor(mask)
return compute_drmsd(structure_1, structure_2, mask) | True | mask is not None | mask is not None | 0.6515742540359497 |
432 | 430 | def plug_float_update_callback(self, context):
if<mask>:
active_module = self.get_active_module()
if active_module!= None:
if 'parameter_dictionary' in dir(active_module):
active_module.parameter_dictionary[self.key_name]['val'] = self.float_val_shadow | False | len(self.key_name) > 0 | self.float_val_shadow != None | 0.6486524343490601 |
433 | 431 | def plug_float_update_callback(self, context):
if len(self.key_name) > 0:
active_module = self.get_active_module()
if<mask>:
if 'parameter_dictionary' in dir(active_module):
active_module.parameter_dictionary[self.key_name]['val'] = self.float_val_shadow | False | active_module != None | active_module is not None | 0.6539262533187866 |
434 | 432 | def plug_float_update_callback(self, context):
if len(self.key_name) > 0:
active_module = self.get_active_module()
if active_module!= None:
if<mask>:
active_module.parameter_dictionary[self.key_name]['val'] = self.float_val_shadow | False | 'parameter_dictionary' in dir(active_module) | active_module.parameter_dictionary.get(self.key_name) is not None | 0.6464701294898987 |
435 | 433 | def _md5check(fullname, md5sum=None):
if<mask>:
return True
md5 = hashlib.md5()
with open(fullname, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b''):
md5.update(chunk)
calc_md5sum = md5.hexdigest()
if calc_md5sum!= md5sum:
return False
return True | True | md5sum is None | md5sum is None | 0.6490707993507385 |
436 | 434 | def _md5check(fullname, md5sum=None):
if md5sum is None:
return True
md5 = hashlib.md5()
with open(fullname, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b''):
md5.update(chunk)
calc_md5sum = md5.hexdigest()
if<mask>:
return False
return True | True | calc_md5sum != md5sum | calc_md5sum != md5sum | 0.6423662900924683 |
437 | 435 | def _cache_group_by_group_id(self, group_id):
if<mask>:
return None
group = storage.groups[group_id]
if group.type!= storage.Group.TYPE_CACHE:
return None
return group | True | group_id not in storage.groups | group_id not in storage.groups | 0.649545431137085 |
438 | 436 | def _cache_group_by_group_id(self, group_id):
if group_id not in storage.groups:
return None
group = storage.groups[group_id]
if<mask>:
return None
return group | False | group.type != storage.Group.TYPE_CACHE | group is None | 0.6449147462844849 |
439 | 437 | def _index(self):
self._property_lookup = dict(((prop.name, prop) for prop in self._properties))
if<mask>:
raise ValueError('two properties with same name') | True | len(self._property_lookup) != len(self._properties) | len(self._property_lookup) != len(self._properties) | 0.6455563306808472 |
440 | 438 | def wait_for_glusterd_to_start(servers, glusterd_start_wait_timeout=80):
"""Checks glusterd is running on nodes with timeout.
Args:
servers (str|list): A server|List of server hosts on which glusterd
status has to be checked.
glusterd_start_wait_timeout: timeout to retry glusterd running
check in node.
Returns:
bool : True if glusterd is running on servers.
False otherwise.
"""
if<mask>:
servers = [servers]
count = 0
while count <= glusterd_start_wait_timeout:
ret = is_glusterd_running(servers)
if not ret:
g.log.info('glusterd is running on %s', servers)
return True
sleep(1)
count += 1
g.log.error('glusterd is not running on %s', servers)
return False | True | not isinstance(servers, list) | not isinstance(servers, list) | 0.6427684426307678 |
441 | 439 | def wait_for_glusterd_to_start(servers, glusterd_start_wait_timeout=80):
"""Checks glusterd is running on nodes with timeout.
Args:
servers (str|list): A server|List of server hosts on which glusterd
status has to be checked.
glusterd_start_wait_timeout: timeout to retry glusterd running
check in node.
Returns:
bool : True if glusterd is running on servers.
False otherwise.
"""
if not isinstance(servers, list):
servers = [servers]
count = 0
while count <= glusterd_start_wait_timeout:
ret = is_glusterd_running(servers)
if<mask>:
g.log.info('glusterd is running on %s', servers)
return True
sleep(1)
count += 1
g.log.error('glusterd is not running on %s', servers)
return False | False | not ret | ret | 0.654487669467926 |
442 | 440 | def onAccountBecomePlayer(self):
if<mask>:
self.__webController.invalidate()
self.__onServerSettingsChange(self.__lobbyCtx.getServerSettings().getSettings()) | False | self.__webController.getStateID() == WebControllerStates.STATE_NOT_DEFINED | not self.__lobbyCtx.getServerSettings().getSettings()['enabled'] | 0.6495983004570007 |
443 | 441 | def __setstate__(self, state):
self.__dict__.update(state)
for ffname in self.ffdata:
if<mask>:
temp = etree.ElementTree(etree.fromstring(self.ffdata[ffname]))
self.ffdata[ffname] = temp | True | self.ffdata_isxml[ffname] | self.ffdata_isxml[ffname] | 0.6492067575454712 |
444 | 442 | def __init__(self, x, y):
if<mask>:
raise ValueError('Invalid shape')
check_space_types(x, y)
super().__init__(x, [x, y], nl_deps=[], ic=False, adj_ic=False) | False | var_local_size(x) != var_local_size(y) | x.dim() != 2 or y.dim() != 3 | 0.6450034379959106 |
445 | 443 | def decode(self, data, items):
"""Decodes the data to return the tensors specified by the list of
items.
Args:
data: The scalar data to decode.
items: A list of strings, each of which is the name of the resulting
tensors to retrieve.
Returns:
A list of tensors, each of which corresponds to each item.
"""
data = tf.reshape(data, shape=[])
if<mask>:
decoded_data = tf.string_to_number(data, out_type=self._dtype)
else:
decoded_data = (tf.cast(data, self._dtype),)
outputs = {self._data_name: decoded_data}
return [outputs[item] for item in items] | False | data.dtype is tf.string | isinstance(data, tf.string_types) | 0.6459290981292725 |
446 | 444 | def __eq__(self, other):
if<mask>:
return False
return self.hashcmp == other.hashcmp | True | not isinstance(other, self.__class__) | not isinstance(other, self.__class__) | 0.6471760272979736 |
447 | 445 | def load_mask(self, idx):
if<mask>:
idx = np.random.randint(0, len(self.mask_path_list))
elif self.mask_choice == 'inorder':
idx = idx % len(self.mask_path_list)
mask = cv2.imdecode(np.fromfile(self.mask_path_list[idx], dtype=np.uint8), cv2.IMREAD_GRAYSCALE)
return mask.astype(np.float32) | True | self.mask_choice == 'random' | self.mask_choice == 'random' | 0.650625467300415 |
448 | 446 | def load_mask(self, idx):
if self.mask_choice == 'random':
idx = np.random.randint(0, len(self.mask_path_list))
elif<mask>:
idx = idx % len(self.mask_path_list)
mask = cv2.imdecode(np.fromfile(self.mask_path_list[idx], dtype=np.uint8), cv2.IMREAD_GRAYSCALE)
return mask.astype(np.float32) | True | self.mask_choice == 'inorder' | self.mask_choice == 'inorder' | 0.6518155932426453 |
449 | 447 | def parse_body(body: bytes) -> None:
res_json = parse_json(body)
if<mask>:
raise TiebaServerError(code, res_json['error_msg']) | True | code := int(res_json['error_code']) | code := int(res_json['error_code']) | 0.6448076963424683 |
450 | 448 | def load(self, require=True, *args, **kwargs):
"""
Require packages for this EntryPoint, then resolve it.
"""
if<mask>:
warnings.warn('Parameters to load are deprecated. Call.resolve and.require separately.', PkgResourcesDeprecationWarning, stacklevel=2)
if require:
self.require(*args, **kwargs)
return self.resolve() | True | not require or args or kwargs | not require or args or kwargs | 0.6531832218170166 |
451 | 449 | def load(self, require=True, *args, **kwargs):
"""
Require packages for this EntryPoint, then resolve it.
"""
if not require or args or kwargs:
warnings.warn('Parameters to load are deprecated. Call.resolve and.require separately.', PkgResourcesDeprecationWarning, stacklevel=2)
if<mask>:
self.require(*args, **kwargs)
return self.resolve() | True | require | require | 0.679688572883606 |
452 | 450 | @certfile.setter
def certfile(self, certfile):
if<mask>:
raise ValueError('certfile is needed for server-side')
if certfile and (not os.access(certfile, os.R_OK)):
raise IOError('No such certfile found: %s' % certfile)
self._certfile = certfile | False | self._server_side and (not certfile) | not certfile | 0.6473574638366699 |
453 | 451 | @certfile.setter
def certfile(self, certfile):
if self._server_side and (not certfile):
raise ValueError('certfile is needed for server-side')
if<mask>:
raise IOError('No such certfile found: %s' % certfile)
self._certfile = certfile | False | certfile and (not os.access(certfile, os.R_OK)) | certfile is None | 0.6480023860931396 |
454 | 452 | def score(self, rigid_0: ru.Rigid, rigid_t: ru.Rigid, t: float):
tran_0, rot_0 = _extract_trans_rots(rigid_0)
tran_t, rot_t = _extract_trans_rots(rigid_t)
if<mask>:
rot_score = np.zeros_like(rot_0)
else:
rot_score = self._so3_diffuser.score(rot_t, t)
if not self._diffuse_trans:
trans_score = np.zeros_like(tran_0)
else:
trans_score = self._r3_diffuser.score(tran_t, tran_0, t)
return (trans_score, rot_score) | False | not self._diffuse_rot | not self._diffuse_trans | 0.6493487358093262 |
455 | 453 | def score(self, rigid_0: ru.Rigid, rigid_t: ru.Rigid, t: float):
tran_0, rot_0 = _extract_trans_rots(rigid_0)
tran_t, rot_t = _extract_trans_rots(rigid_t)
if not self._diffuse_rot:
rot_score = np.zeros_like(rot_0)
else:
rot_score = self._so3_diffuser.score(rot_t, t)
if<mask>:
trans_score = np.zeros_like(tran_0)
else:
trans_score = self._r3_diffuser.score(tran_t, tran_0, t)
return (trans_score, rot_score) | False | not self._diffuse_trans | not self._diffuse_rot | 0.6473543643951416 |
456 | 454 | def __init__(self, file_pattern: Union[Text, List[Text]], raw_record_column_name: Text, telemetry_descriptors: List[Text]):
"""Initializer.
Args:
file_pattern: One or a list of glob patterns. If a list, must not be
empty.
raw_record_column_name: Name of the raw record column.
telemetry_descriptors: A set of descriptors that identify the component
that is instantiating this TFXIO. These will be used to construct the
namespace to contain metrics for profiling and are therefore expected to
be identifiers of the component itself and not individual instances of
source use.
"""
super().__init__(telemetry_descriptors=telemetry_descriptors, physical_format='tfrecords_gzip', raw_record_column_name=raw_record_column_name)
if<mask>:
file_pattern = [file_pattern]
assert file_pattern, 'Must provide at least one file pattern.'
self._file_pattern = file_pattern | True | not isinstance(file_pattern, list) | not isinstance(file_pattern, list) | 0.6433738470077515 |
457 | 455 | @property
def paths(self) -> list[str]:
"""Resolve SFTP file paths with prefix"""
url = urlparse(self.path)
uri = self.get_uri()
full_paths = []
prefixes = self.hook.get_tree_map(url.netloc, prefix=url.netloc + url.path)
for keys in prefixes:
if<mask>:
full_paths.extend(keys)
paths = [uri + '/' + path for path in full_paths]
return paths | True | len(keys) > 0 | len(keys) > 0 | 0.6496731638908386 |
458 | 456 | def is_valid_size(self, target_size_product):
if<mask>:
return False
if target_size_product < self.min_input_sizesquare:
return False
return True | True | target_size_product > self.max_input_sizesquare | target_size_product > self.max_input_sizesquare | 0.6462470293045044 |
459 | 457 | def is_valid_size(self, target_size_product):
if target_size_product > self.max_input_sizesquare:
return False
if<mask>:
return False
return True | True | target_size_product < self.min_input_sizesquare | target_size_product < self.min_input_sizesquare | 0.6456623077392578 |
460 | 458 | def __call__(self):
obj = self.ref()
if<mask>:
raise exceptions.InvalidRequestError('stale association proxy, parent object has gone out of scope')
return getattr(obj, self.target) | False | obj is None | obj.scope != self.scope | 0.6637649536132812 |
461 | 459 | def iterate_models(self, **kwargs):
"""
Iterate Scraper models.
:kwargs: FFProbe results and index
:returns: Metadata model
"""
for md_class in self._supported_metadata:
if<mask>:
md_object = md_class(**kwargs)
if md_object.av_format_supported() is not None:
yield md_object | False | md_class.is_supported(self._predefined_mimetype, self._predefined_version, self._params) | md_class.allow_convert_all_versions | 0.6452722549438477 |
462 | 460 | def iterate_models(self, **kwargs):
"""
Iterate Scraper models.
:kwargs: FFProbe results and index
:returns: Metadata model
"""
for md_class in self._supported_metadata:
if md_class.is_supported(self._predefined_mimetype, self._predefined_version, self._params):
md_object = md_class(**kwargs)
if<mask>:
yield md_object | False | md_object.av_format_supported() is not None | md_object.has_model(self._predefined_mimetype) | 0.6457136869430542 |
463 | 461 | def read_line(self):
""" Read a line from a nmea port
return Line from the nmea port
"""
line = ''
while True:
c = self.read()
if<mask>:
break
while c == '\r':
c = self.read()
if c == '\n':
break
line += c
return line | False | not c | c == '\r' | 0.6661070585250854 |
464 | 462 | def read_line(self):
""" Read a line from a nmea port
return Line from the nmea port
"""
line = ''
while True:
c = self.read()
if not c:
break
while c == '\r':
c = self.read()
if<mask>:
break
line += c
return line | False | c == '\n' | not c | 0.6589483618736267 |
465 | 463 | def warn(self, *message, tag: Optional[str]=None, end: str='\n', split: str=' ', flush: bool=True, stack_trace: Optional[FrameType]=None) -> None:
if<mask>:
return
self.make_log(messages=list(message), tag=tag, end=end, split=split, flush=flush, level=LogLevel.warn, stack_trace=stack_trace) | False | not self.log_for(LogLevel.warn) | self.is_null() | 0.6436254978179932 |
466 | 464 | def _break_cont_exps(self, g):
if<mask>:
return flatten_list([self._break_cont_exps(g.inputs[i]) for i in g.inputs if is_pos(i)])
else:
return [g] | False | g.typename() == 'cont_turn' | isinstance(g, Union) | 0.6467052698135376 |
467 | 465 | def forward(self, x):
x = self.relu(x)
if<mask>:
x = nn.ZeroPad2d((1, 0, 1, 0))(x)
x = self.separable_1(x)
if self.name =='specific':
x = x[:, :, 1:, 1:].contiguous()
x = self.bn_sep_1(x)
x = self.relu1(x)
x = self.separable_2(x)
x = self.bn_sep_2(x)
return x | True | self.name == 'specific' | self.name == 'specific' | 0.6466684341430664 |
468 | 466 | def forward(self, x):
x = self.relu(x)
if self.name =='specific':
x = nn.ZeroPad2d((1, 0, 1, 0))(x)
x = self.separable_1(x)
if<mask>:
x = x[:, :, 1:, 1:].contiguous()
x = self.bn_sep_1(x)
x = self.relu1(x)
x = self.separable_2(x)
x = self.bn_sep_2(x)
return x | True | self.name == 'specific' | self.name == 'specific' | 0.6458703279495239 |
469 | 467 | def asformat(self, format, copy=False):
"""Return this matrix in the passed sparse format.
Parameters
----------
format : {str, None}
The desired sparse matrix format ("csr", "csc", "lil", "dok",...)
or None for no conversion.
copy : bool, optional
If True, the result is guaranteed to not share data with self.
Returns
-------
A : This matrix in the passed sparse format.
"""
if<mask>:
if copy:
return self.copy()
else:
return self
else:
try:
convert_method = getattr(self, 'to' + format)
except AttributeError:
raise ValueError('Format {} is unknown.'.format(format))
else:
return convert_method(copy=copy) | False | format is None or format == self.format | format is None | 0.6479859352111816 |
470 | 468 | def asformat(self, format, copy=False):
"""Return this matrix in the passed sparse format.
Parameters
----------
format : {str, None}
The desired sparse matrix format ("csr", "csc", "lil", "dok",...)
or None for no conversion.
copy : bool, optional
If True, the result is guaranteed to not share data with self.
Returns
-------
A : This matrix in the passed sparse format.
"""
if format is None or format == self.format:
if<mask>:
return self.copy()
else:
return self
else:
try:
convert_method = getattr(self, 'to' + format)
except AttributeError:
raise ValueError('Format {} is unknown.'.format(format))
else:
return convert_method(copy=copy) | True | copy | copy | 0.672120213508606 |
471 | 469 | def cancels_job_with_name(self, job_name: Text, sender_id: Text) -> bool:
"""Determines if this `ReminderCancelled` event should cancel the job with the given name.
Args:
job_name: Name of the job to be tested.
sender_id: The `sender_id` of the tracker.
Returns:
`True`, if this `ReminderCancelled` event should cancel the job with the given name,
and `False` otherwise.
"""
match = re.match(f'^\\[([\\d\\-]*),([\\d\\-]*),([\\d\\-]*)\\]({re.escape(ACTION_NAME_SENDER_ID_CONNECTOR_STR)}{re.escape(sender_id)})', job_name)
if<mask>:
return False
name_hash, intent_hash, entities_hash = match.group(1, 2, 3)
return (not self.name or self._matches_name_hash(name_hash)) and (not self.intent or self._matches_intent_hash(intent_hash)) and (not self.entities or self._matches_entities_hash(entities_hash)) | True | not match | not match | 0.6534326076507568 |
472 | 470 | def set_value(self, tag, value):
if<mask>:
return
family = tag.split('.')[0]
if family == 'Exif':
self.set_exif_value(tag, value)
elif family == 'Iptc':
self.set_iptc_value(tag, value)
else:
self.set_xmp_value(tag, value) | True | not tag | not tag | 0.6641639471054077 |
473 | 471 | def set_value(self, tag, value):
if not tag:
return
family = tag.split('.')[0]
if<mask>:
self.set_exif_value(tag, value)
elif family == 'Iptc':
self.set_iptc_value(tag, value)
else:
self.set_xmp_value(tag, value) | True | family == 'Exif' | family == 'Exif' | 0.6504823565483093 |
474 | 472 | def set_value(self, tag, value):
if not tag:
return
family = tag.split('.')[0]
if family == 'Exif':
self.set_exif_value(tag, value)
elif<mask>:
self.set_iptc_value(tag, value)
else:
self.set_xmp_value(tag, value) | True | family == 'Iptc' | family == 'Iptc' | 0.6538915634155273 |
475 | 473 | def pytest_generate_tests(metafunc):
"""
Function called by pytest when collecting a test_XXX function
define the dispatch_rules fixtures in test environement with collected the
value _dispatch_rules if it exist or with an empty dispatch_rules
:param metafunc: the test context given by pytest
"""
if<mask>:
dispatch_rules = getattr(metafunc.function, '_dispatch_rules', None)
if isinstance(dispatch_rules, list):
metafunc.parametrize('dispatch_rules', [dispatch_rules])
else:
metafunc.parametrize('dispatch_rules', [[(Report1, DispatchRule1AB(primary=True))]])
if 'formula_class' in metafunc.fixturenames:
formula_class = getattr(metafunc.function, '_formula_class', DummyFormulaActor)
metafunc.parametrize('formula_class', [formula_class]) | True | 'dispatch_rules' in metafunc.fixturenames | 'dispatch_rules' in metafunc.fixturenames | 0.6457846760749817 |
476 | 474 | def pytest_generate_tests(metafunc):
"""
Function called by pytest when collecting a test_XXX function
define the dispatch_rules fixtures in test environement with collected the
value _dispatch_rules if it exist or with an empty dispatch_rules
:param metafunc: the test context given by pytest
"""
if 'dispatch_rules' in metafunc.fixturenames:
dispatch_rules = getattr(metafunc.function, '_dispatch_rules', None)
if isinstance(dispatch_rules, list):
metafunc.parametrize('dispatch_rules', [dispatch_rules])
else:
metafunc.parametrize('dispatch_rules', [[(Report1, DispatchRule1AB(primary=True))]])
if<mask>:
formula_class = getattr(metafunc.function, '_formula_class', DummyFormulaActor)
metafunc.parametrize('formula_class', [formula_class]) | True | 'formula_class' in metafunc.fixturenames | 'formula_class' in metafunc.fixturenames | 0.6451785564422607 |
477 | 475 | def pytest_generate_tests(metafunc):
"""
Function called by pytest when collecting a test_XXX function
define the dispatch_rules fixtures in test environement with collected the
value _dispatch_rules if it exist or with an empty dispatch_rules
:param metafunc: the test context given by pytest
"""
if 'dispatch_rules' in metafunc.fixturenames:
dispatch_rules = getattr(metafunc.function, '_dispatch_rules', None)
if<mask>:
metafunc.parametrize('dispatch_rules', [dispatch_rules])
else:
metafunc.parametrize('dispatch_rules', [[(Report1, DispatchRule1AB(primary=True))]])
if 'formula_class' in metafunc.fixturenames:
formula_class = getattr(metafunc.function, '_formula_class', DummyFormulaActor)
metafunc.parametrize('formula_class', [formula_class]) | False | isinstance(dispatch_rules, list) | isinstance(dispatch_rules, Mapping) | 0.6479055881500244 |
478 | 476 | def test_find_first_zero_bit(self):
sym = gdb.lookup_symbol('cpu_online_mask', None)[0]
if<mask>:
sym = gdb.lookup_symbol('__cpu_online_mask', None)[0]
self.assertTrue(sym is not None)
bitmap = sym.value()['bits']
count = 0
bit = bitmaps.find_first_zero_bit(bitmap)
self.assertTrue(type(bit) is int) | True | sym is None | sym is None | 0.6599867343902588 |
479 | 477 | def _do_evaluate(self, runner):
"""perform evaluation and save ckpt."""
if<mask>:
return
from mmdet.apis import single_gpu_test
results = single_gpu_test(runner.model, self.dataloader, show=False)
runner.log_buffer.output['eval_iter_num'] = len(self.dataloader)
key_score = self.evaluate(runner, results)
if self.save_best:
self._save_ckpt(runner, key_score) | False | not self._should_evaluate(runner) | self.save_best is False and runner.model is None | 0.644819974899292 |
480 | 478 | def _do_evaluate(self, runner):
"""perform evaluation and save ckpt."""
if not self._should_evaluate(runner):
return
from mmdet.apis import single_gpu_test
results = single_gpu_test(runner.model, self.dataloader, show=False)
runner.log_buffer.output['eval_iter_num'] = len(self.dataloader)
key_score = self.evaluate(runner, results)
if<mask>:
self._save_ckpt(runner, key_score) | True | self.save_best | self.save_best | 0.6514607071876526 |
481 | 479 | def try_load(name: Text) -> Optional[Image]:
if<mask>:
return None
try:
return load(name)
except Exception as ex:
app.log.text('can not load: %s: %s' % (name, ex), level=app.DEBUG)
_NOT_EXISTED_NAMES.add(name)
return None | True | name in _NOT_EXISTED_NAMES | name in _NOT_EXISTED_NAMES | 0.651135265827179 |
482 | 480 | @staticmethod
def set_incr_scan(proj_conf, total_scan):
job_context = proj_conf['job_context']
task_list = proj_conf['tasks']
if<mask>:
job_context['incr_scan'] = False
for task_request in task_list:
task_params = task_request['task_params']
if total_scan:
task_params['incr_scan'] = False
task_params['scm_last_revision'] = ''
elif 'incr_scan' in task_params:
if not task_params['incr_scan']:
total_scan = True
else:
task_params['incr_scan'] = True | False | total_scan | 'incr_scan' not in job_context | 0.660485565662384 |
483 | 481 | @staticmethod
def set_incr_scan(proj_conf, total_scan):
job_context = proj_conf['job_context']
task_list = proj_conf['tasks']
if total_scan:
job_context['incr_scan'] = False
for task_request in task_list:
task_params = task_request['task_params']
if<mask>:
task_params['incr_scan'] = False
task_params['scm_last_revision'] = ''
elif 'incr_scan' in task_params:
if not task_params['incr_scan']:
total_scan = True
else:
task_params['incr_scan'] = True | False | total_scan | task_params['scm_last_revision'] | 0.6604939699172974 |
484 | 482 | @staticmethod
def set_incr_scan(proj_conf, total_scan):
job_context = proj_conf['job_context']
task_list = proj_conf['tasks']
if total_scan:
job_context['incr_scan'] = False
for task_request in task_list:
task_params = task_request['task_params']
if total_scan:
task_params['incr_scan'] = False
task_params['scm_last_revision'] = ''
elif<mask>:
if not task_params['incr_scan']:
total_scan = True
else:
task_params['incr_scan'] = True | False | 'incr_scan' in task_params | job_context['incr_scan'] | 0.651297926902771 |
485 | 483 | @staticmethod
def set_incr_scan(proj_conf, total_scan):
job_context = proj_conf['job_context']
task_list = proj_conf['tasks']
if total_scan:
job_context['incr_scan'] = False
for task_request in task_list:
task_params = task_request['task_params']
if total_scan:
task_params['incr_scan'] = False
task_params['scm_last_revision'] = ''
elif 'incr_scan' in task_params:
if<mask>:
total_scan = True
else:
task_params['incr_scan'] = True | False | not task_params['incr_scan'] | task_params['incr_scan'] | 0.6464079022407532 |
486 | 484 | def safe_location(self, location_name, geom, max_distance=200):
"""
Returns a location (geometry) to use, given a location_name and
geometry. This is used for data sources that publish both a geometry
and a location_name -- we double-check that the geometry is within
a certain `max_distance` from the geocoded location_name.
If there's a discrepancy or if the location_name can't be geocoded,
this returns None.
"""
location = self.geocode(location_name)
if<mask>:
return None
location_point = location['point']
if not location_point:
return None
location_point.srid = 4326
is_close, distance = locations_are_close(location_point, geom, max_distance)
if not is_close:
return None
return geom | False | location is None | 'point' not in location | 0.6538652777671814 |
487 | 485 | def safe_location(self, location_name, geom, max_distance=200):
"""
Returns a location (geometry) to use, given a location_name and
geometry. This is used for data sources that publish both a geometry
and a location_name -- we double-check that the geometry is within
a certain `max_distance` from the geocoded location_name.
If there's a discrepancy or if the location_name can't be geocoded,
this returns None.
"""
location = self.geocode(location_name)
if location is None:
return None
location_point = location['point']
if<mask>:
return None
location_point.srid = 4326
is_close, distance = locations_are_close(location_point, geom, max_distance)
if not is_close:
return None
return geom | False | not location_point | location_point.srid in self.discrepancy | 0.6511427164077759 |
488 | 486 | def safe_location(self, location_name, geom, max_distance=200):
"""
Returns a location (geometry) to use, given a location_name and
geometry. This is used for data sources that publish both a geometry
and a location_name -- we double-check that the geometry is within
a certain `max_distance` from the geocoded location_name.
If there's a discrepancy or if the location_name can't be geocoded,
this returns None.
"""
location = self.geocode(location_name)
if location is None:
return None
location_point = location['point']
if not location_point:
return None
location_point.srid = 4326
is_close, distance = locations_are_close(location_point, geom, max_distance)
if<mask>:
return None
return geom | True | not is_close | not is_close | 0.6524591445922852 |
489 | 487 | def __str__(self):
if<mask>:
return self.name
if self.strRepr is None:
self.strRepr = '[' + _ustr(self.expr) + ']...'
return self.strRepr | True | hasattr(self, 'name') | hasattr(self, 'name') | 0.6489574313163757 |
490 | 488 | def __str__(self):
if hasattr(self, 'name'):
return self.name
if<mask>:
self.strRepr = '[' + _ustr(self.expr) + ']...'
return self.strRepr | True | self.strRepr is None | self.strRepr is None | 0.648362398147583 |
491 | 489 | def get_user(self, email):
user = [x for x in MOCK_USERS if x.get('email') == email]
if<mask>:
return user[0]
return None | True | user | user | 0.6739429831504822 |
492 | 490 | def __ge__(self, other):
if<mask>:
return NotImplemented
return other <= self | True | not isinstance(other, Set) | not isinstance(other, Set) | 0.6492550373077393 |
493 | 491 | def CreateCMakeTargetName(self, qualified_target):
base_name = CreateCMakeTargetBaseName(qualified_target)
if<mask>:
return CreateCMakeTargetFullName(qualified_target)
return base_name | False | base_name in self.cmake_target_base_names_conficting | not base_name | 0.6448224782943726 |
494 | 492 | def __call__(self, id, name=None):
"""
Return mapped id from id and, if available, name
"""
if<mask>:
return id
newid = self.name2id(name)
if newid is None:
return id
else:
return newid | True | not name | not name | 0.6647347211837769 |
495 | 493 | def __call__(self, id, name=None):
"""
Return mapped id from id and, if available, name
"""
if not name:
return id
newid = self.name2id(name)
if<mask>:
return id
else:
return newid | True | newid is None | newid is None | 0.6542881727218628 |
496 | 494 | @property
def Type(self):
if<mask>:
return self._entity_data.get('Type')
return '0' | True | 'Type' in self._entity_data | 'Type' in self._entity_data | 0.6537728309631348 |
497 | 495 | def handle(self, handler_context):
url = ''
if<mask>:
url = handler_context.flow['request']['url']
headers = {'Content-Type': 'text/html; charset=utf-8'}
code = lb_http_status.STATUS_CODE_CAN_NOT_HANDLE_REQUEST
resp_data = f'Lyrebird cannot handle this request: {url}\n'
handler_context.flow['response']['headers'] = headers
handler_context.flow['response']['code'] = code
handler_context.flow['response']['data'] = resp_data
handler_context.response = Response(resp_data, status=code, headers=headers)
logger.info(f'<Proxy> ERROR::CAN_NOT_HANDLE_REQUEST {url}') | False | 'url' in handler_context.flow.get('request') | 'request' in handler_context.flow | 0.6497178077697754 |
498 | 496 | def myprint(self, message):
assert self.mylogfile!= None, 'The LogFile is not initialized yet!'
print(message)
sys.stdout.flush()
if<mask>:
print(message, file=self.mylogfile)
self.mylogfile.flush() | True | self.mylogfile != None | self.mylogfile != None | 0.6499958038330078 |
499 | 497 | def add_pattern(text, pattern_str, replace_str, before, after):
if<mask>:
text = re.sub('%s' % pattern_str, '%s\\1' % replace_str, text)
elif before == 0 and after == 1:
text = re.sub('%s' % pattern_str, '\\1%s' % replace_str, text)
elif before == 1 and after == 1:
text = re.sub('%s' % pattern_str, '%s\\1%s' % (replace_str, replace_str), text)
return text | True | before == 1 and after == 0 | before == 1 and after == 0 | 0.6535935401916504 |
500 | 498 | def add_pattern(text, pattern_str, replace_str, before, after):
if before == 1 and after == 0:
text = re.sub('%s' % pattern_str, '%s\\1' % replace_str, text)
elif<mask>:
text = re.sub('%s' % pattern_str, '\\1%s' % replace_str, text)
elif before == 1 and after == 1:
text = re.sub('%s' % pattern_str, '%s\\1%s' % (replace_str, replace_str), text)
return text | True | before == 0 and after == 1 | before == 0 and after == 1 | 0.6534395813941956 |
501 | 499 | def add_pattern(text, pattern_str, replace_str, before, after):
if before == 1 and after == 0:
text = re.sub('%s' % pattern_str, '%s\\1' % replace_str, text)
elif before == 0 and after == 1:
text = re.sub('%s' % pattern_str, '\\1%s' % replace_str, text)
elif<mask>:
text = re.sub('%s' % pattern_str, '%s\\1%s' % (replace_str, replace_str), text)
return text | True | before == 1 and after == 1 | before == 1 and after == 1 | 0.653862476348877 |
502 | 500 | def _scroll_shift_y(self, event: MouseEvent):
old_ylim = self._ax.get_ylim()
old_height = old_ylim[1] - old_ylim[0]
shift_y = old_height / self._MOUSE_WHEEL_TRANSLATE_SCALE
if<mask>:
shift_y *= -1
self._ax.set_ylim(old_ylim[0] + shift_y, old_ylim[1] + shift_y)
self._fig.canvas.draw() | False | event.button == 'up' | self._swipe_scale > 1 | 0.6539157629013062 |
503 | 501 | def propagate_faults(self, icomb: Tuple[int], error: Tuple[str]):
"""Insert a set of faults and propagate through a circuit.
icomb = integer tuple of failed operations' indices
error = tuple of pauli strings
Return: measurement outcome discrepancies.
"""
if<mask>:
raise Exception('no circuit loaded')
self.qubit_array = [0] * (2 * self.qreg_size)
self.clbit_array = [0] * self.creg_size
for j, enc_circ in enumerate(self.encoded_circ):
opcode, q_idx, c_idx, _ = enc_circ
self.gate_dispatch[opcode](j, q_idx, c_idx, icomb, error)
return self.clbit_array | False | self.encoded_circ is None | self.gate_dispatch is None | 0.6508350372314453 |
504 | 502 | def get_resource(self, request, filename):
"""Return a static resource from the shared folder."""
filename = join(dirname(__file__),'shared', basename(filename))
if<mask>:
mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
f = file(filename, 'rb')
try:
return Response(f.read(), mimetype=mimetype)
finally:
f.close()
return Response('Not Found', status=404) | False | isfile(filename) | os.path.exists(filename) | 0.6466870903968811 |
505 | 503 | @classmethod
def register_runner(cls, name):
"""Register a model to registry with key 'name'
Args:
name: Key with which the task will be registered.
Usage:
from minigpt4_utils.common.registry import registry
"""
def wrap(runner_cls):
if<mask>:
raise KeyError("Name '{}' already registered for {}.".format(name, cls.mapping['runner_name_mapping'][name]))
cls.mapping['runner_name_mapping'][name] = runner_cls
return runner_cls
return wrap | True | name in cls.mapping['runner_name_mapping'] | name in cls.mapping['runner_name_mapping'] | 0.6477246284484863 |
506 | 504 | def check_spooler_service(self):
ok = False
service_config, service_status = self.get_service('Spooler', self.connection)
if<mask>:
ok = True
reasons = ['Spooler service disabled']
else:
reasons = ['Spooler service enabled']
if service_status == scmr.SERVICE_RUNNING:
reasons.append('Spooler service running')
elif service_status == scmr.SERVICE_STOPPED:
ok = True
reasons.append('Spooler service not running')
return (ok, reasons) | False | service_config['dwStartType'] == scmr.SERVICE_DISABLED | service_config == scmr.SERVICE_DISABLED | 0.6512548923492432 |
507 | 505 | def check_spooler_service(self):
ok = False
service_config, service_status = self.get_service('Spooler', self.connection)
if service_config['dwStartType'] == scmr.SERVICE_DISABLED:
ok = True
reasons = ['Spooler service disabled']
else:
reasons = ['Spooler service enabled']
if<mask>:
reasons.append('Spooler service running')
elif service_status == scmr.SERVICE_STOPPED:
ok = True
reasons.append('Spooler service not running')
return (ok, reasons) | True | service_status == scmr.SERVICE_RUNNING | service_status == scmr.SERVICE_RUNNING | 0.6494125127792358 |
508 | 506 | def check_spooler_service(self):
ok = False
service_config, service_status = self.get_service('Spooler', self.connection)
if service_config['dwStartType'] == scmr.SERVICE_DISABLED:
ok = True
reasons = ['Spooler service disabled']
else:
reasons = ['Spooler service enabled']
if service_status == scmr.SERVICE_RUNNING:
reasons.append('Spooler service running')
elif<mask>:
ok = True
reasons.append('Spooler service not running')
return (ok, reasons) | False | service_status == scmr.SERVICE_STOPPED | service_status == scmr.SERVICE_NOT_RUNNING | 0.6483436226844788 |
509 | 507 | def set_seed(args):
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if<mask>:
torch.cuda.manual_seed_all(args.seed) | True | args.n_gpu > 0 | args.n_gpu > 0 | 0.6485757827758789 |
510 | 508 | def can_handle(self, devinfo, debug=False):
if<mask>:
dev = USBDevice(devinfo)
main, carda, cardb = self.find_device_nodes(detected_device=dev)
if main is None and carda is None and (cardb is None):
if debug:
print('\tPRS-T1: Appears to be in non data mode or was ejected, ignoring')
return False
return True | False | islinux | self.is_data_mode() | 0.6676485538482666 |
511 | 509 | def can_handle(self, devinfo, debug=False):
if islinux:
dev = USBDevice(devinfo)
main, carda, cardb = self.find_device_nodes(detected_device=dev)
if<mask>:
if debug:
print('\tPRS-T1: Appears to be in non data mode or was ejected, ignoring')
return False
return True | False | main is None and carda is None and (cardb is None) | carda.status != 'ejected' | 0.6439000368118286 |
512 | 510 | def can_handle(self, devinfo, debug=False):
if islinux:
dev = USBDevice(devinfo)
main, carda, cardb = self.find_device_nodes(detected_device=dev)
if main is None and carda is None and (cardb is None):
if<mask>:
print('\tPRS-T1: Appears to be in non data mode or was ejected, ignoring')
return False
return True | True | debug | debug | 0.6651346683502197 |
513 | 511 | def fix_model_name(model: Type[BaseModel], name: str) -> None:
if<mask>:
setattr(model.__config__, 'title', name)
else:
setattr(model, '__name__', name) | False | isinstance(model, type(BaseModel)) | issubclass(model, BaseModel) | 0.6515050530433655 |
514 | 512 | def _is_punctuation(char):
"""Checks whether `chars` is a punctuation character."""
cp = ord(char)
if<mask>:
return True
cat = unicodedata.category(char)
if cat.startswith('P'):
return True
return False | True | cp >= 33 and cp <= 47 or (cp >= 58 and cp <= 64) or (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126) | cp >= 33 and cp <= 47 or (cp >= 58 and cp <= 64) or (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126) | 0.6487233638763428 |
515 | 513 | def _is_punctuation(char):
"""Checks whether `chars` is a punctuation character."""
cp = ord(char)
if cp >= 33 and cp <= 47 or (cp >= 58 and cp <= 64) or (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126):
return True
cat = unicodedata.category(char)
if<mask>:
return True
return False | True | cat.startswith('P') | cat.startswith('P') | 0.6423234939575195 |
516 | 514 | @dimension_labels.setter
def dimension_labels(self, val):
if<mask>:
self.geometry.dimension_labels = val
self._dimension_labels = val | False | hasattr(self, 'geometry') | isinstance(val, (int, float, complex, np.number)) | 0.6522078514099121 |
517 | 515 | def _concrete_constraint(self, e):
c = super()._concrete_value(e)
if<mask>:
return c
if self._replace_constraints:
er = self._replacement(e)
return super()._concrete_constraint(er)
else:
return super()._concrete_constraint(e) | False | c is not None | c is not e | 0.653294563293457 |
518 | 516 | def _concrete_constraint(self, e):
c = super()._concrete_value(e)
if c is not None:
return c
if<mask>:
er = self._replacement(e)
return super()._concrete_constraint(er)
else:
return super()._concrete_constraint(e) | False | self._replace_constraints | self._is_replacement(e) | 0.6504372358322144 |
519 | 517 | def get_module_name(group_name, model, key_word, exist_module_name, mpu=None, verbose=True):
"""
get the associated module name from the model based on the key_word provided by users
"""
return_module_name = []
for name, module in model.named_modules():
module_check = is_module_compressible(module, mpu)
if<mask>:
if name in exist_module_name and verbose:
raise ValueError(f'{name} is already added to compression, please check your config file for {group_name}.')
if name not in exist_module_name:
exist_module_name.add(name)
return_module_name.append(name)
return (return_module_name, exist_module_name) | False | re.search(key_word, name) is not None and module_check | module_check | 0.6428780555725098 |
520 | 518 | def get_module_name(group_name, model, key_word, exist_module_name, mpu=None, verbose=True):
"""
get the associated module name from the model based on the key_word provided by users
"""
return_module_name = []
for name, module in model.named_modules():
module_check = is_module_compressible(module, mpu)
if re.search(key_word, name) is not None and module_check:
if<mask>:
raise ValueError(f'{name} is already added to compression, please check your config file for {group_name}.')
if name not in exist_module_name:
exist_module_name.add(name)
return_module_name.append(name)
return (return_module_name, exist_module_name) | False | name in exist_module_name and verbose | verbose | 0.6487796306610107 |
521 | 519 | def get_module_name(group_name, model, key_word, exist_module_name, mpu=None, verbose=True):
"""
get the associated module name from the model based on the key_word provided by users
"""
return_module_name = []
for name, module in model.named_modules():
module_check = is_module_compressible(module, mpu)
if re.search(key_word, name) is not None and module_check:
if name in exist_module_name and verbose:
raise ValueError(f'{name} is already added to compression, please check your config file for {group_name}.')
if<mask>:
exist_module_name.add(name)
return_module_name.append(name)
return (return_module_name, exist_module_name) | False | name not in exist_module_name | not exist_module_name | 0.6524373292922974 |
522 | 520 | def __rsub__(self, other):
"""
Implementation of - operator when left operand is not a C{L{ParserElement}}
"""
if<mask>:
other = ParserElement._literalStringClass(other)
if not isinstance(other, ParserElement):
warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2)
return None
return other - self | True | isinstance(other, basestring) | isinstance(other, basestring) | 0.6507130265235901 |
523 | 521 | def __rsub__(self, other):
"""
Implementation of - operator when left operand is not a C{L{ParserElement}}
"""
if isinstance(other, basestring):
other = ParserElement._literalStringClass(other)
if<mask>:
warnings.warn('Cannot combine element of type %s with ParserElement' % type(other), SyntaxWarning, stacklevel=2)
return None
return other - self | True | not isinstance(other, ParserElement) | not isinstance(other, ParserElement) | 0.6486527919769287 |
524 | 522 | def cmd(cmd, name=None):
"""Run any command to client "name".
"""
if<mask>:
print('Please give a client name.')
else:
client = fabutils.select_client_cfg(name)
with cd(fabutils.wd(client, CFG)):
with prefix(VENV_ACTIVATE.format(client.name)):
run(cmd) | True | not name | not name | 0.6591615676879883 |
525 | 523 | def first_factorization(self, threshold: Optional[float]=None):
"""Factorize :math:`V = 1/2 \\sum_{ijkl, st}V_{ijkl} is^ jt^ kt ls` by
transforming to chemist notation.
Args:
threshold: threshold for factorization.
Returns:
Tuple of (eigenvalues of factors, one-body ops in factors, one
body correction).
"""
if<mask>:
threshold = self.icut
if self.spin_basis:
eigenvalues, one_body_squares, one_body_correction, _ = low_rank_two_body_decomposition(self.tei, truncation_threshold=threshold, final_rank=self.lmax, spin_basis=self.spin_basis)
else:
eigenvalues, one_body_squares, one_body_correction, _ = low_rank_two_body_decomposition(0.5 * self.tei, truncation_threshold=threshold, final_rank=self.lmax, spin_basis=self.spin_basis)
return (eigenvalues, one_body_squares, one_body_correction) | True | threshold is None | threshold is None | 0.6589322090148926 |
526 | 524 | def first_factorization(self, threshold: Optional[float]=None):
"""Factorize :math:`V = 1/2 \\sum_{ijkl, st}V_{ijkl} is^ jt^ kt ls` by
transforming to chemist notation.
Args:
threshold: threshold for factorization.
Returns:
Tuple of (eigenvalues of factors, one-body ops in factors, one
body correction).
"""
if threshold is None:
threshold = self.icut
if<mask>:
eigenvalues, one_body_squares, one_body_correction, _ = low_rank_two_body_decomposition(self.tei, truncation_threshold=threshold, final_rank=self.lmax, spin_basis=self.spin_basis)
else:
eigenvalues, one_body_squares, one_body_correction, _ = low_rank_two_body_decomposition(0.5 * self.tei, truncation_threshold=threshold, final_rank=self.lmax, spin_basis=self.spin_basis)
return (eigenvalues, one_body_squares, one_body_correction) | False | self.spin_basis | isinstance(threshold, float) | 0.6463888883590698 |
527 | 525 | def _filter_imgs(self, min_size=32):
"""Filter images too small."""
valid_inds = []
for i, img_info in enumerate(self.data_infos):
if<mask>:
valid_inds.append(i)
return valid_inds | True | min(img_info['width'], img_info['height']) >= min_size | min(img_info['width'], img_info['height']) >= min_size | 0.6423009037971497 |
528 | 526 | def _maybe_add_bbox(obj: Dict[str, Any], ann_dict: Dict[str, Any]) -> None:
if<mask>:
return
obj['bbox'] = ann_dict['bbox']
obj['bbox_mode'] = BoxMode.XYWH_ABS | True | 'bbox' not in ann_dict | 'bbox' not in ann_dict | 0.657332181930542 |
529 | 527 | def SetupScript(self, target_arch):
script_data = self._SetupScriptInternal(target_arch)
script_path = script_data[0]
if<mask>:
raise Exception('%s is missing - make sure VC++ tools are installed.' % script_path)
return script_data | True | not os.path.exists(script_path) | not os.path.exists(script_path) | 0.6460453271865845 |
530 | 528 | def sendNegotiate(self, negotiateMessage):
negotiate = NTLMAuthNegotiate()
negotiate.fromString(negotiateMessage)
negotiate['flags'] ^= NTLMSSP_NEGOTIATE_ALWAYS_SIGN
challenge = NTLMAuthChallenge()
if<mask>:
challenge.fromString(self.sendNegotiatev1(negotiateMessage))
else:
challenge.fromString(self.sendNegotiatev2(negotiateMessage))
self.sessionData['CHALLENGE_MESSAGE'] = challenge
return challenge | False | self.session.getDialect() == SMB_DIALECT | self.version <= 2 | 0.6515882015228271 |
531 | 529 | def _apply_diagonal_coulomb(self, hamil: 'diagonal_coulomb.DiagonalCoulomb') -> 'Wavefunction':
"""Applies the diagonal coulomb operator to the wavefunction
Args:
hamil (DiagonalCoulomb): diagonal coulomb Hamiltonian to be applied
Returns:
(Wavefunction): resulting wave function
"""
out = copy.deepcopy(self)
for _, sector in out._civec.items():
diag, array = (hamil._tensor[1], hamil._tensor[2])
sector.apply_diagonal_coulomb(diag, array, inplace=True)
if<mask>:
out.ax_plus_y(hamil.e_0(), self)
return out | False | numpy.abs(hamil.e_0()) > 1e-15 | hamil.e_0 is not None | 0.6430132985115051 |
532 | 530 | def plot_candidates(candidates, config, ts_min=50, outdir='./'):
for candidate in candidates:
if<mask>:
continue
logger.info('Plotting %s (%.2f,%.2f)...' % (candidate['name'], candidate['glon'], candidate['glat']))
plotter = ugali.utils.plotting.ObjectPlotter(candidate, config)
fig, ax = plotter.plot4()
basename = '%s_plot.png' % candidate['name']
outfile = os.path.join(outdir, basename)
plt.savefig(outfile) | False | candidate['TS'] < ts_min | candidate['glon'] == '2f' or candidate['glat'] < ts_min | 0.6493653059005737 |
533 | 531 | def evaluate(self, runner, new_labels):
hist = np.bincount(new_labels, minlength=runner.model.module.memory_bank.num_classes)
empty_cls = (hist == 0).sum()
minimal_cls_size, maximal_cls_size = (hist.min(), hist.max())
if<mask>:
print_log('empty_num: {}\tmin_cluster: {}\tmax_cluster:{}'.format(empty_cls.item(), minimal_cls_size.item(), maximal_cls_size.item()), logger='root') | False | runner.rank == 0 | self.verbose | 0.6596526503562927 |
534 | 532 | def __eq__(self, other):
if<mask>:
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val!= other_val:
return False
return True | True | not isinstance(other, self.__class__) | not isinstance(other, self.__class__) | 0.64579176902771 |
535 | 533 | def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if<mask>:
return False
return True | True | my_val != other_val | my_val != other_val | 0.6494433879852295 |
536 | 534 | def call_init(obj, *args, **kwargs):
init(obj, *args, **kwargs)
if<mask>:
obj.set('interval', 'never') | True | obj.parameter('interval') is None | obj.parameter('interval') is None | 0.6427839994430542 |
537 | 535 | def custom_placeholder_format(value_dict, placeholder_match):
key = placeholder_match.group(1).lower()
value = value_dict.get(key, key) or '_'
if<mask>:
first_key = list(value.keys())[0]
value = str(value[first_key][0]) if isinstance(value[first_key], list) and value[first_key] else str(value[first_key])
return str(value)[:50] | False | isinstance(value, dict) and value | isinstance(value, dict) | 0.6469836235046387 |
538 | 536 | def OnDeserialized(self):
"""
Test deserialization success.
Raises:
Exception: if there are no inputs for the transaction.
"""
if<mask>:
raise Exception('No inputs for miner transaction') | False | len(self.inputs) is not 0 | self.TransactionType == TransactionType.TFTP | 0.6440246105194092 |
539 | 537 | def get_page_number(self) -> typing.Optional[Decimal]:
"""
This function returns the page number
"""
kids = self._page.get_parent().get_parent().get('Kids')
l = int(self._page.get_parent().get_parent().get('Count'))
for i in range(0, l):
if<mask>:
return Decimal(i)
return None | False | kids[i] == self._page | kids[i] in self._pages[i] | 0.6522336006164551 |
540 | 538 | def preprocess(i):
os_info = i['os_info']
if<mask>:
return {'return': 1, 'error': 'Windows is not supported in this script yet'}
env = i['env']
automation = i['automation']
recursion_spaces = i['recursion_spaces']
need_version = env.get('CM_VERSION', '')
if need_version == '':
return {'return': 1, 'error': 'internal problem - CM_VERSION is not defined in env'}
print(recursion_spaces +' # Requested version: {}'.format(need_version))
return {'return': 0} | True | os_info['platform'] == 'windows' | os_info['platform'] == 'windows' | 0.6515749096870422 |
541 | 539 | def preprocess(i):
os_info = i['os_info']
if os_info['platform'] == 'windows':
return {'return': 1, 'error': 'Windows is not supported in this script yet'}
env = i['env']
automation = i['automation']
recursion_spaces = i['recursion_spaces']
need_version = env.get('CM_VERSION', '')
if<mask>:
return {'return': 1, 'error': 'internal problem - CM_VERSION is not defined in env'}
print(recursion_spaces +' # Requested version: {}'.format(need_version))
return {'return': 0} | False | need_version == '' | need_version is None | 0.6547970771789551 |
542 | 540 | def format_results(self, results, jsonfile_prefix=None, **kwargs):
"""Format the results to json (standard format for COCO evaluation).
Args:
results (list): Testing results of the dataset.
jsonfile_prefix (str | None): The prefix of json files. It includes
the file path and the prefix of filename, e.g., "a/b/prefix".
If not specified, a temp file will be created. Default: None.
Returns:
tuple: (result_files, tmp_dir), result_files is a dict containing
the json filepaths, tmp_dir is the temporal directory created
for saving json files when jsonfile_prefix is not specified.
"""
assert isinstance(results, list),'results must be a list'
assert len(results) == len(self), 'The length of results is not equal to the dataset len: {}!= {}'.format(len(results), len(self))
if<mask>:
tmp_dir = tempfile.TemporaryDirectory()
jsonfile_prefix = osp.join(tmp_dir.name,'results')
else:
tmp_dir = None
result_files = self.results2json(results, jsonfile_prefix)
return (result_files, tmp_dir) | True | jsonfile_prefix is None | jsonfile_prefix is None | 0.6473140716552734 |
543 | 541 | def set_welc_preference(chat_id, should_welcome):
with INSERTION_LOCK:
curr = SESSION.query(Welcome).get(str(chat_id))
if<mask>:
curr = Welcome(str(chat_id), should_welcome=should_welcome)
else:
curr.should_welcome = should_welcome
SESSION.add(curr)
SESSION.commit() | True | not curr | not curr | 0.6591516733169556 |
544 | 542 | def _load_adapters(self, model, resume_from_checkpoint):
adapter_loaded = False
for file_name in os.listdir(resume_from_checkpoint):
if<mask>:
if ',' not in file_name and 'adapter_config.json' in os.listdir(os.path.join(resume_from_checkpoint, file_name)):
model.load_adapter(os.path.join(os.path.join(resume_from_checkpoint, file_name)))
adapter_loaded = True
return adapter_loaded | False | os.path.isdir(os.path.join(resume_from_checkpoint, file_name)) | file_name.endswith('.json') | 0.6481344699859619 |
545 | 543 | def _load_adapters(self, model, resume_from_checkpoint):
adapter_loaded = False
for file_name in os.listdir(resume_from_checkpoint):
if os.path.isdir(os.path.join(resume_from_checkpoint, file_name)):
if<mask>:
model.load_adapter(os.path.join(os.path.join(resume_from_checkpoint, file_name)))
adapter_loaded = True
return adapter_loaded | False | ',' not in file_name and 'adapter_config.json' in os.listdir(os.path.join(resume_from_checkpoint, file_name)) | model.__class__.__name__ in ['OriA', 'OriA'] | 0.6490288376808167 |
546 | 544 | def tearDown(self):
ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts)
if<mask>:
raise ExecutionError('Failed to umount the vol & cleanup Volume')
g.log.info('Successful in umounting the volume and Cleanup')
self.get_super_method(self, 'tearDown')() | True | not ret | not ret | 0.6598210334777832 |
547 | 545 | def _prune(self):
if<mask>:
now = time()
for idx, (key, (expires, _)) in enumerate(self._cache.items()):
if expires <= now or idx % 3 == 0:
self._cache.pop(key, None) | False | len(self._cache) > self._threshold | self._cache | 0.6458609104156494 |
548 | 546 | def _prune(self):
if len(self._cache) > self._threshold:
now = time()
for idx, (key, (expires, _)) in enumerate(self._cache.items()):
if<mask>:
self._cache.pop(key, None) | False | expires <= now or idx % 3 == 0 | expires > self._threshold or idx > now | 0.6490975022315979 |
549 | 547 | @property
def full_address(self):
addr = self.mailing_address_line_1
if<mask>:
addr += f' {self.mailing_address_line_2}'
return f'{addr}, {self.city}, {self.state}, {self.country}' | True | self.mailing_address_line_2 | self.mailing_address_line_2 | 0.6500547528266907 |
550 | 548 | def _compare(self, other, method):
if<mask>:
return NotImplemented
return method(self._key, other._key) | True | not isinstance(other, _BaseVersion) | not isinstance(other, _BaseVersion) | 0.6475049257278442 |
551 | 549 | def upButtonPressed(self):
str_list = self.slm.stringList()
if<mask>:
str_list[self.hero_index], str_list[self.hero_index - 1] = (str_list[self.hero_index - 1], str_list[self.hero_index])
self.slm.setStringList(str_list) | False | 1 <= self.hero_index < len(str_list) | 0 <= self.hero_index < len(str_list) | 0.6467466950416565 |
552 | 550 | def _get_digest(self, info):
"""
Get a digest from a dictionary by looking at keys of the form
'algo_digest'.
Returns a 2-tuple (algo, digest) if found, else None. Currently
looks only for SHA256, then MD5.
"""
result = None
for algo in ('sha256','md5'):
key = '%s_digest' % algo
if<mask>:
result = (algo, info[key])
break
return result | True | key in info | key in info | 0.6571716070175171 |
553 | 551 | @staticmethod
def serialize_agreed_variation(agreed_variation, with_users=False):
if<mask>:
return agreed_variation
user = User.query.filter(User.id == agreed_variation['agreedUserId']).first()
if not user:
return agreed_variation
return dict(agreed_variation, **{'agreedUserName': user.name, 'agreedUserEmail': user.email_address}) | False | not (with_users and agreed_variation.get('agreedUserId')) | with_users | 0.6502817273139954 |
554 | 552 | @staticmethod
def serialize_agreed_variation(agreed_variation, with_users=False):
if not (with_users and agreed_variation.get('agreedUserId')):
return agreed_variation
user = User.query.filter(User.id == agreed_variation['agreedUserId']).first()
if<mask>:
return agreed_variation
return dict(agreed_variation, **{'agreedUserName': user.name, 'agreedUserEmail': user.email_address}) | True | not user | not user | 0.663364589214325 |
555 | 553 | def _set_momenta(module, momenta):
if<mask>:
module.momentum = momenta[module] | False | issubclass(module.__class__, torch.nn.modules.batchnorm._BatchNorm) | module in momenta | 0.6470762491226196 |
556 | 554 | def __init__(self, columns=None, label=None, alias=None, group=None, **kwargs):
"""
"""
if<mask>:
columns = []
super().__init__()
self.columns = columns
self.alias = alias
self.group = group
self._label = label
self.kwargs = kwargs | True | columns is None | columns is None | 0.6668776273727417 |
557 | 555 | def remove(self, filepath: Union[str, Path]) -> None:
"""Remove a file.
Args:
filepath (str or Path): Path to be removed.
Raises:
FileNotFoundError: If filepath does not exist, an FileNotFoundError
will be raised.
IsADirectoryError: If filepath is a directory, an IsADirectoryError
will be raised.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.remove(filepath)
"""
if<mask>:
raise NotImplementedError('Current version of Petrel Python SDK has not supported the `delete` method, please use a higher version or dev branch instead.')
if not self.exists(filepath):
raise FileNotFoundError(f'filepath {filepath} does not exist')
if self.isdir(filepath):
raise IsADirectoryError('filepath should be a file')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
self._client.delete(filepath) | False | not has_method(self._client, 'delete') | self._client is None | 0.6441586017608643 |
558 | 556 | def remove(self, filepath: Union[str, Path]) -> None:
"""Remove a file.
Args:
filepath (str or Path): Path to be removed.
Raises:
FileNotFoundError: If filepath does not exist, an FileNotFoundError
will be raised.
IsADirectoryError: If filepath is a directory, an IsADirectoryError
will be raised.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.remove(filepath)
"""
if not has_method(self._client, 'delete'):
raise NotImplementedError('Current version of Petrel Python SDK has not supported the `delete` method, please use a higher version or dev branch instead.')
if<mask>:
raise FileNotFoundError(f'filepath {filepath} does not exist')
if self.isdir(filepath):
raise IsADirectoryError('filepath should be a file')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
self._client.delete(filepath) | False | not self.exists(filepath) | not os.path.exists(filepath) | 0.6472853422164917 |
559 | 557 | def remove(self, filepath: Union[str, Path]) -> None:
"""Remove a file.
Args:
filepath (str or Path): Path to be removed.
Raises:
FileNotFoundError: If filepath does not exist, an FileNotFoundError
will be raised.
IsADirectoryError: If filepath is a directory, an IsADirectoryError
will be raised.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.remove(filepath)
"""
if not has_method(self._client, 'delete'):
raise NotImplementedError('Current version of Petrel Python SDK has not supported the `delete` method, please use a higher version or dev branch instead.')
if not self.exists(filepath):
raise FileNotFoundError(f'filepath {filepath} does not exist')
if<mask>:
raise IsADirectoryError('filepath should be a file')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
self._client.delete(filepath) | False | self.isdir(filepath) | not isinstance(filepath, Path) | 0.6468396186828613 |
560 | 558 | def check_plate(self, text_list):
plate_all = {'plate': []}
for text_pcar in text_list:
platelicense = ''
for text_info in text_pcar:
text = text_info[0][0][0]
if<mask>:
platelicense = self.replace_cn_code(text)
plate_all['plate'].append(platelicense)
return plate_all | False | len(text) > 2 and len(text) < 10 | text | 0.6466926336288452 |
561 | 559 | def just_fix_windows_console():
global fixed_windows_console
if<mask>:
return
if fixed_windows_console:
return
if wrapped_stdout is not None or wrapped_stderr is not None:
return
new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False)
if new_stdout.convert:
sys.stdout = new_stdout
new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False)
if new_stderr.convert:
sys.stderr = new_stderr
fixed_windows_console = True | False | sys.platform != 'win32' | hasattr(sys, 'ps1') | 0.6487252116203308 |
562 | 560 | def just_fix_windows_console():
global fixed_windows_console
if sys.platform!= 'win32':
return
if<mask>:
return
if wrapped_stdout is not None or wrapped_stderr is not None:
return
new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False)
if new_stdout.convert:
sys.stdout = new_stdout
new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False)
if new_stderr.convert:
sys.stderr = new_stderr
fixed_windows_console = True | True | fixed_windows_console | fixed_windows_console | 0.6512162685394287 |
563 | 561 | def just_fix_windows_console():
global fixed_windows_console
if sys.platform!= 'win32':
return
if fixed_windows_console:
return
if<mask>:
return
new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False)
if new_stdout.convert:
sys.stdout = new_stdout
new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False)
if new_stderr.convert:
sys.stderr = new_stderr
fixed_windows_console = True | False | wrapped_stdout is not None or wrapped_stderr is not None | hasattr(sys, 'stdout') and sys.stdout == 'get_stderr' | 0.6467798948287964 |
564 | 562 | def just_fix_windows_console():
global fixed_windows_console
if sys.platform!= 'win32':
return
if fixed_windows_console:
return
if wrapped_stdout is not None or wrapped_stderr is not None:
return
new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False)
if<mask>:
sys.stdout = new_stdout
new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False)
if new_stderr.convert:
sys.stderr = new_stderr
fixed_windows_console = True | True | new_stdout.convert | new_stdout.convert | 0.6601179838180542 |
565 | 563 | def just_fix_windows_console():
global fixed_windows_console
if sys.platform!= 'win32':
return
if fixed_windows_console:
return
if wrapped_stdout is not None or wrapped_stderr is not None:
return
new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False)
if new_stdout.convert:
sys.stdout = new_stdout
new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False)
if<mask>:
sys.stderr = new_stderr
fixed_windows_console = True | True | new_stderr.convert | new_stderr.convert | 0.6581211090087891 |
566 | 564 | def _open_in_browser(self):
url = get_templated_url(unicode(self.url_combo.currentText()).strip())
if<mask>:
open_url(QUrl(url)) | True | url | url | 0.6722559332847595 |
567 | 565 | def sanitize_token(self, token):
token_type = token['type']
if<mask>:
name = token['name']
namespace = token['namespace']
if (namespace, name) in self.allowed_elements or (namespace is None and (namespaces['html'], name) in self.allowed_elements):
return self.allowed_token(token)
else:
return self.disallowed_token(token)
elif token_type == 'Comment':
pass
else:
return token | False | token_type in ('StartTag', 'EndTag', 'EmptyTag') | token_type == 'Text' | 0.6465229392051697 |
568 | 566 | def sanitize_token(self, token):
token_type = token['type']
if token_type in ('StartTag', 'EndTag', 'EmptyTag'):
name = token['name']
namespace = token['namespace']
if<mask>:
return self.allowed_token(token)
else:
return self.disallowed_token(token)
elif token_type == 'Comment':
pass
else:
return token | False | namespace, name) in self.allowed_elements or (namespace is None and (namespaces['html'], name) in self.allowed_elements | name.lower() in namespace.lower() and namespace.lower() in self.allowed_tokens | 0.6470671892166138 |
569 | 567 | def sanitize_token(self, token):
token_type = token['type']
if token_type in ('StartTag', 'EndTag', 'EmptyTag'):
name = token['name']
namespace = token['namespace']
if (namespace, name) in self.allowed_elements or (namespace is None and (namespaces['html'], name) in self.allowed_elements):
return self.allowed_token(token)
else:
return self.disallowed_token(token)
elif<mask>:
pass
else:
return token | False | token_type == 'Comment' | token_type == 'EndTag' | 0.6508560180664062 |
570 | 568 | def forward(self, inputs):
if<mask>:
return (self.layers(inputs[0]), *inputs[1:])
else:
return self.layers(inputs) | False | isinstance(inputs, (list, tuple)) | self.training | 0.6414899230003357 |
571 | 569 | def applyZPNorm(metricValue, plotDict):
if<mask>:
if plotDict['zp'] is not None:
metricValue = metricValue - plotDict['zp']
if 'normVal' in plotDict:
if plotDict['normVal'] is not None:
metricValue = metricValue / plotDict['normVal']
return metricValue | True | 'zp' in plotDict | 'zp' in plotDict | 0.6547361016273499 |
572 | 570 | def applyZPNorm(metricValue, plotDict):
if 'zp' in plotDict:
if plotDict['zp'] is not None:
metricValue = metricValue - plotDict['zp']
if<mask>:
if plotDict['normVal'] is not None:
metricValue = metricValue / plotDict['normVal']
return metricValue | True | 'normVal' in plotDict | 'normVal' in plotDict | 0.6528257131576538 |
573 | 571 | def applyZPNorm(metricValue, plotDict):
if 'zp' in plotDict:
if<mask>:
metricValue = metricValue - plotDict['zp']
if 'normVal' in plotDict:
if plotDict['normVal'] is not None:
metricValue = metricValue / plotDict['normVal']
return metricValue | True | plotDict['zp'] is not None | plotDict['zp'] is not None | 0.6527303457260132 |
574 | 572 | def applyZPNorm(metricValue, plotDict):
if 'zp' in plotDict:
if plotDict['zp'] is not None:
metricValue = metricValue - plotDict['zp']
if 'normVal' in plotDict:
if<mask>:
metricValue = metricValue / plotDict['normVal']
return metricValue | True | plotDict['normVal'] is not None | plotDict['normVal'] is not None | 0.6515785455703735 |
575 | 573 | def _eval(self, part_keys: List[Dict[str, str]], part_input: Dict[str, Any]) -> Any:
for key, value in zip(part_keys, part_input['Values']):
if<mask>:
return _cast(key['Type'], value)
raise InvalidInputException('GetPartitions', f"Unknown column '{self.ident}'") | False | self.ident == key['Name'] | key['Name'] == self.ident | 0.6554266214370728 |
576 | 574 | def get_target_label(self, input, target_is_real):
"""Get target label.
Args:
input (Tensor): Input tensor.
target_is_real (bool): Whether the target is real or fake.
Returns:
(bool | Tensor): Target tensor. Return bool for wgan, otherwise,
return Tensor.
"""
if<mask>:
return target_is_real
target_val = self.real_label_val if target_is_real else self.fake_label_val
return input.new_ones(input.size()) * target_val | False | self.gan_type == 'wgan' | self.gan_type in ['wgan', 'wgan_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus_softplus] | 0.6473735570907593 |
577 | 575 | def wait_for(lock_name, value, wait=10, timeout=LOCK_EXPIRE):
"""Utility function to wait until the given lock has been released"""
old_lock = None
if<mask>:
logger.warn('A task is already running. Wait for it: {}/{}'.format(lock_name, acquire_lock.lock))
old_lock = acquire_lock.lock
while not acquire_lock(lock_name, value, timeout):
sleep(wait)
logger.debug('lock released by: {}'.format(old_lock))
return old_lock | False | not acquire_lock(lock_name, value, timeout) | acquire_lock is not None | 0.645682692527771 |
578 | 576 | @break_exec_regex.setter
def break_exec_regex(self, break_exec_regex):
"""
Setterfor break_exec_regex
:param break_exec_regex: String with regex, compiled regex object or None
:return: None
"""
if<mask>:
break_exec_regex = re.compile(break_exec_regex)
self._break_exec_regex = break_exec_regex | False | isinstance(break_exec_regex, six.string_types) | break_exec_regex is not None | 0.6424634456634521 |
579 | 577 | def sys_x86_64_readlink(jitter, linux_env):
path = jitter.cpu.RDI
buf = jitter.cpu.RSI
bufsize = jitter.cpu.RDX
rpath = jitter.get_c_str(path)
log.debug('sys_readlink(%r, %x, %x)', rpath, buf, bufsize)
link = linux_env.filesystem.readlink(rpath)
if<mask>:
jitter.cpu.RAX = -1
else:
data = link[:bufsize - 1] + b'\x00'
jitter.vm.set_mem(buf, data)
jitter.cpu.RAX = len(data) - 1 | False | link is None | link.startswith(link) | 0.6551709175109863 |
580 | 578 | def get_test_cluster_template(context, **kw):
"""Return a ClusterTemplate object with appropriate attributes.
NOTE: The object leaves the attributes marked as changed, such
that a create() could be used to commit it to the DB.
"""
db_cluster_template = db_utils.get_test_cluster_template(**kw)
cluster_template = objects.ClusterTemplate(context)
if<mask>:
del db_cluster_template['id']
for key in db_cluster_template:
setattr(cluster_template, key, db_cluster_template[key])
return cluster_template | True | 'id' not in kw | 'id' not in kw | 0.6585248708724976 |
581 | 579 | def __missing__(self, key):
if<mask>:
raise KeyError(key)
self[key] = value = self.default_factory()
return value | True | self.default_factory is None | self.default_factory is None | 0.6501814126968384 |
582 | 580 | def main(config):
set_dirs(config)
with tf.device(config.device):
if<mask>:
_train(config)
elif config.mode == 'test' or config.mode == 'dev':
_test(config)
elif config.mode == 'forward':
_forward(config)
else:
raise ValueError("invalid value for'mode': {}".format(config.mode)) | True | config.mode == 'train' | config.mode == 'train' | 0.657152533531189 |
583 | 581 | def main(config):
set_dirs(config)
with tf.device(config.device):
if config.mode == 'train':
_train(config)
elif<mask>:
_test(config)
elif config.mode == 'forward':
_forward(config)
else:
raise ValueError("invalid value for'mode': {}".format(config.mode)) | False | config.mode == 'test' or config.mode == 'dev' | config.mode == 'test' | 0.6475286483764648 |
584 | 582 | def main(config):
set_dirs(config)
with tf.device(config.device):
if config.mode == 'train':
_train(config)
elif config.mode == 'test' or config.mode == 'dev':
_test(config)
elif<mask>:
_forward(config)
else:
raise ValueError("invalid value for'mode': {}".format(config.mode)) | True | config.mode == 'forward' | config.mode == 'forward' | 0.6519702076911926 |
585 | 583 | def __getattr__(self, name):
if<mask>:
return self.fileobj.fileno
else:
raise AttributeError(name) | True | name == 'fileno' | name == 'fileno' | 0.6563180088996887 |
586 | 584 | def _test_set_vlan_vid(self, vid, mask=None):
header = ofproto.OXM_OF_VLAN_VID
match = OFPMatch()
if<mask>:
match.set_vlan_vid(vid)
else:
header = ofproto.OXM_OF_VLAN_VID_W
match.set_vlan_vid_masked(vid, mask)
self._test_serialize_and_parser(match, header, vid, mask) | True | mask is None | mask is None | 0.6560935974121094 |
587 | 585 | def get_file_name_time(self):
a = str(datetime.now())
hour = a.split(' ')[-1].split(':')[0]
num = int(hour) / 3
num = int(num) * 3
if<mask>:
num = 24
a = str(datetime.now() - timedelta(days=1))
num = a.split(' ')[0] +'' + str(num)
return num | True | num == 0 | num == 0 | 0.6623152494430542 |
588 | 586 | def get_rank():
if<mask>:
return 0
if not dist.is_initialized():
return 0
return dist.get_rank() | True | not dist.is_available() | not dist.is_available() | 0.651805579662323 |
589 | 587 | def get_rank():
if not dist.is_available():
return 0
if<mask>:
return 0
return dist.get_rank() | True | not dist.is_initialized() | not dist.is_initialized() | 0.650885820388794 |
590 | 588 | def get_random_user_agent(agent_list=UA_CACHE):
if<mask>:
ua_file = file(UA_FILE)
for line in ua_file:
line = line.strip()
if line:
agent_list.append(line)
ua = random.choice(UA_CACHE)
return ua | False | not len(agent_list) | os.path.exists(UA_FILE) | 0.6439211368560791 |
591 | 589 | def get_random_user_agent(agent_list=UA_CACHE):
if not len(agent_list):
ua_file = file(UA_FILE)
for line in ua_file:
line = line.strip()
if<mask>:
agent_list.append(line)
ua = random.choice(UA_CACHE)
return ua | True | line | line | 0.6648772954940796 |
592 | 590 | def aliases_to_ordered_dict(_d):
"""
Unpacks a dict-with-lists to an ordered dict with keys sorted by length
"""
arr = []
for original, aliases in _d.items():
arr.append((original, original))
if<mask>:
aliases = [original]
elif isinstance(aliases, str):
aliases = [aliases]
for alias in aliases:
arr.append((alias, original))
return OrderedDict(sorted(arr, key=lambda _kv: 0 - len(_kv[0]))) | False | isinstance(aliases, bool) | not aliases | 0.6411546468734741 |
593 | 591 | def aliases_to_ordered_dict(_d):
"""
Unpacks a dict-with-lists to an ordered dict with keys sorted by length
"""
arr = []
for original, aliases in _d.items():
arr.append((original, original))
if isinstance(aliases, bool):
aliases = [original]
elif<mask>:
aliases = [aliases]
for alias in aliases:
arr.append((alias, original))
return OrderedDict(sorted(arr, key=lambda _kv: 0 - len(_kv[0]))) | False | isinstance(aliases, str) | isinstance(aliases, list) | 0.6433718800544739 |
594 | 592 | def stop(self) -> bool:
if<mask>:
return run([join(sep, 'usr','sbin', 'nginx'), '-s','stop'], stdin=DEVNULL, stderr=STDOUT, check=False).returncode == 0
return self.apiCaller.send_to_apis('POST', '/stop') | True | self._type == 'local' | self._type == 'local' | 0.6516494750976562 |
595 | 593 | def decode(self, label: bytes) -> str:
"""Decode *label*."""
if<mask>:
return super().decode(label)
if label == b'':
return ''
try:
return _escapify(encodings.idna.ToUnicode(label))
except Exception as e:
raise IDNAException(idna_exception=e) | False | not self.strict_decode | isinstance(label, bytes) | 0.6460101008415222 |
596 | 594 | def decode(self, label: bytes) -> str:
"""Decode *label*."""
if not self.strict_decode:
return super().decode(label)
if<mask>:
return ''
try:
return _escapify(encodings.idna.ToUnicode(label))
except Exception as e:
raise IDNAException(idna_exception=e) | False | label == b'' | len(label) == 0 | 0.6557474732398987 |
597 | 595 | @cached_property
def _lsb_release_info(self):
"""
Get the information items from the lsb_release command output.
Returns:
A dictionary containing all information items.
"""
if<mask>:
return {}
with open(os.devnull, 'w') as devnull:
try:
cmd = ('lsb_release', '-a')
stdout = subprocess.check_output(cmd, stderr=devnull)
except OSError:
return {}
content = stdout.decode(sys.getfilesystemencoding()).splitlines()
return self._parse_lsb_release_content(content) | True | not self.include_lsb | not self.include_lsb | 0.6448009014129639 |
598 | 596 | def extract_img_feat(self, img):
"""Directly extract features from the img backbone+neck."""
x = self.img_backbone(img)
if<mask>:
x = self.img_neck(x)
return x | True | self.with_img_neck | self.with_img_neck | 0.6567720770835876 |
599 | 597 | def find_by_tag(tag_str, to_output_list=True, only_explicit_tag=False) -> List[SiacNote]:
if<mask>:
return []
pinned = [] if not get_index() else get_index().pinned
tags = tag_str.split(' ')
query = _tag_query(tags)
conn = _get_connection()
res = conn.execute('select * from notes %s order by id desc' % query).fetchall()
conn.close()
if not to_output_list:
return res
return _to_notes(res, pinned) | False | len(tag_str.strip()) == 0 | only_explicit_tag | 0.648457407951355 |
600 | 598 | def find_by_tag(tag_str, to_output_list=True, only_explicit_tag=False) -> List[SiacNote]:
if len(tag_str.strip()) == 0:
return []
pinned = [] if not get_index() else get_index().pinned
tags = tag_str.split(' ')
query = _tag_query(tags)
conn = _get_connection()
res = conn.execute('select * from notes %s order by id desc' % query).fetchall()
conn.close()
if<mask>:
return res
return _to_notes(res, pinned) | False | not to_output_list | only_explicit_tag | 0.6490492820739746 |
601 | 599 | def _advance(self):
self._top = (self._top + 1) % self._max_buffer_size
if<mask>:
self._size += 1 | True | self._size < self._max_buffer_size | self._size < self._max_buffer_size | 0.6476047039031982 |
602 | 600 | @GlancesPluginModel._log_result_decorator
def update(self):
"""Update core stats.
Stats is a dict (with both physical and log cpu number) instead of a integer.
"""
stats = self.get_init_value()
if<mask>:
try:
stats['phys'] = psutil.cpu_count(logical=False)
stats['log'] = psutil.cpu_count()
except NameError:
self.reset()
elif self.input_method =='snmp':
pass
self.stats = stats
return self.stats | False | self.input_method == 'local' | self.input_method == 'physical' | 0.6471355557441711 |
603 | 601 | @GlancesPluginModel._log_result_decorator
def update(self):
"""Update core stats.
Stats is a dict (with both physical and log cpu number) instead of a integer.
"""
stats = self.get_init_value()
if self.input_method == 'local':
try:
stats['phys'] = psutil.cpu_count(logical=False)
stats['log'] = psutil.cpu_count()
except NameError:
self.reset()
elif<mask>:
pass
self.stats = stats
return self.stats | True | self.input_method == 'snmp' | self.input_method == 'snmp' | 0.6467959880828857 |
604 | 602 | def arg_col_level(v_self):
columns = v_self.columns
if<mask>:
yield from Select(range(0, columns.nlevels)) | False | columns.nlevels > 1 | columns | 0.6525924205780029 |
605 | 603 | def pairwise(sents):
_ref, _hypo = ([], [])
for s in sents:
for i in range(len(s)):
for j in range(len(s)):
if<mask>:
_ref.append(s[i])
_hypo.append(s[j])
return corpus_bleu(_hypo, [_ref]) | True | i != j | i != j | 0.6714960336685181 |
606 | 604 | def _read(self, addr, length, pad=False):
offset = self.translate(addr)
if<mask>:
if pad:
return '\x00' * length
else:
return None
win32file.SetFilePointer(self.fhandle, offset, 0)
data = win32file.ReadFile(self.fhandle, length)[1]
return data | False | offset == None | not self.fhandle | 0.657290518283844 |
607 | 605 | def _read(self, addr, length, pad=False):
offset = self.translate(addr)
if offset == None:
if<mask>:
return '\x00' * length
else:
return None
win32file.SetFilePointer(self.fhandle, offset, 0)
data = win32file.ReadFile(self.fhandle, length)[1]
return data | True | pad | pad | 0.6718904376029968 |
608 | 606 | def bind_processor(self, dialect):
def process(value):
if<mask>:
return None
elif isinstance(value, basestring):
return value
elif dialect.datetimeformat == 'internal':
return value.strftime('%Y%m%d')
elif dialect.datetimeformat == 'iso':
return value.strftime('%Y-%m-%d')
else:
raise exc.InvalidRequestError("datetimeformat '%s' is not supported." % (dialect.datetimeformat,))
return process | True | value is None | value is None | 0.6555759906768799 |
609 | 607 | def bind_processor(self, dialect):
def process(value):
if value is None:
return None
elif<mask>:
return value
elif dialect.datetimeformat == 'internal':
return value.strftime('%Y%m%d')
elif dialect.datetimeformat == 'iso':
return value.strftime('%Y-%m-%d')
else:
raise exc.InvalidRequestError("datetimeformat '%s' is not supported." % (dialect.datetimeformat,))
return process | False | isinstance(value, basestring) | dialect.datetimeformat == 'internal' | 0.6456430554389954 |
610 | 608 | def bind_processor(self, dialect):
def process(value):
if value is None:
return None
elif isinstance(value, basestring):
return value
elif<mask>:
return value.strftime('%Y%m%d')
elif dialect.datetimeformat == 'iso':
return value.strftime('%Y-%m-%d')
else:
raise exc.InvalidRequestError("datetimeformat '%s' is not supported." % (dialect.datetimeformat,))
return process | True | dialect.datetimeformat == 'internal' | dialect.datetimeformat == 'internal' | 0.6472441554069519 |
611 | 609 | def bind_processor(self, dialect):
def process(value):
if value is None:
return None
elif isinstance(value, basestring):
return value
elif dialect.datetimeformat == 'internal':
return value.strftime('%Y%m%d')
elif<mask>:
return value.strftime('%Y-%m-%d')
else:
raise exc.InvalidRequestError("datetimeformat '%s' is not supported." % (dialect.datetimeformat,))
return process | True | dialect.datetimeformat == 'iso' | dialect.datetimeformat == 'iso' | 0.647171676158905 |
612 | 610 | def srr_to_srx(self, srr, **kwargs):
"""Get SRX for a SRR"""
if<mask>:
srr = [srr]
srr_df = self.sra_metadata(srr)
srr_df = srr_df.loc[srr_df['run_accession'].isin(srr)]
return _order_first(srr_df, ['run_accession', 'experiment_accession']) | False | isinstance(srr, str) | not isinstance(srr, str) | 0.6532561779022217 |
613 | 611 | def bar_update(count, block_size, total_size):
if<mask>:
pbar.total = total_size
progress_bytes = count * block_size
pbar.update(progress_bytes - pbar.n) | True | pbar.total is None and total_size | pbar.total is None and total_size | 0.6473742723464966 |
614 | 612 | def __init__(self, url, **kwargs):
mockread.reset_mock()
if<mask>:
self.a = url
else:
self.code = 200
self.msg = 'Ok'
self.a = BytesIO(url) | False | isinstance(url, Exception) | url.startswith('file://') | 0.6497751474380493 |
615 | 613 | def on_dir_btn(self):
fname = str(QFileDialog.getExistingDirectory(self, 'Select your markdown folder'))
if<mask>:
fname = fname.replace('\\', '/')
self.md_source_input.setText(fname) | False | fname is not None and len(fname) > 0 | fname is not None | 0.6459832787513733 |
616 | 614 | def get_breathing_rate(self, botengine):
"""
Retrieve the most recent breathing_rate value
:param botengine:
:return:
"""
if<mask>:
return self.measurements[HealthDevice.MEASUREMENT_NAME_BREATHING_RATE][0][0]
return None | True | HealthDevice.MEASUREMENT_NAME_BREATHING_RATE in self.measurements | HealthDevice.MEASUREMENT_NAME_BREATHING_RATE in self.measurements | 0.6496018171310425 |
617 | 615 | def _check_and_update(key, value):
assert value is not None
if<mask>:
if not _known_status[key] == value:
raise RuntimeError('Confilict status for {}, existing status {}, new status {}'.format(key, _known_status[key], value))
_known_status[key] = value | True | key in _known_status | key in _known_status | 0.6564457416534424 |
618 | 616 | def _check_and_update(key, value):
assert value is not None
if key in _known_status:
if<mask>:
raise RuntimeError('Confilict status for {}, existing status {}, new status {}'.format(key, _known_status[key], value))
_known_status[key] = value | False | not _known_status[key] == value | value != _known_status[key] | 0.6491991281509399 |
619 | 617 | def GetChangeAddress(self, from_addr=None):
"""
Get the address where change is send to.
Args:
from_address (UInt160): (optional) from address script hash.
Raises:
Exception: if change address could not be found.
Returns:
UInt160: script hash.
"""
if<mask>:
for contract in self._contracts.values():
if contract.ScriptHash == from_addr:
return contract.ScriptHash
for contract in self._contracts.values():
if contract.IsStandard:
return contract.ScriptHash
if len(self._contracts.values()):
for k, v in self._contracts.items():
return v
raise Exception('Could not find change address') | False | from_addr is not None | from_addr | 0.6465224027633667 |
620 | 618 | def GetChangeAddress(self, from_addr=None):
"""
Get the address where change is send to.
Args:
from_address (UInt160): (optional) from address script hash.
Raises:
Exception: if change address could not be found.
Returns:
UInt160: script hash.
"""
if from_addr is not None:
for contract in self._contracts.values():
if contract.ScriptHash == from_addr:
return contract.ScriptHash
for contract in self._contracts.values():
if contract.IsStandard:
return contract.ScriptHash
if<mask>:
for k, v in self._contracts.items():
return v
raise Exception('Could not find change address') | False | len(self._contracts.values()) | self.GetChangeAddress() | 0.6425373554229736 |
621 | 619 | def GetChangeAddress(self, from_addr=None):
"""
Get the address where change is send to.
Args:
from_address (UInt160): (optional) from address script hash.
Raises:
Exception: if change address could not be found.
Returns:
UInt160: script hash.
"""
if from_addr is not None:
for contract in self._contracts.values():
if contract.ScriptHash == from_addr:
return contract.ScriptHash
for contract in self._contracts.values():
if<mask>:
return contract.ScriptHash
if len(self._contracts.values()):
for k, v in self._contracts.items():
return v
raise Exception('Could not find change address') | False | contract.IsStandard | contract.ScriptHash == from_addr | 0.6464129090309143 |
622 | 620 | def GetChangeAddress(self, from_addr=None):
"""
Get the address where change is send to.
Args:
from_address (UInt160): (optional) from address script hash.
Raises:
Exception: if change address could not be found.
Returns:
UInt160: script hash.
"""
if from_addr is not None:
for contract in self._contracts.values():
if<mask>:
return contract.ScriptHash
for contract in self._contracts.values():
if contract.IsStandard:
return contract.ScriptHash
if len(self._contracts.values()):
for k, v in self._contracts.items():
return v
raise Exception('Could not find change address') | False | contract.ScriptHash == from_addr | contract.IsStandard | 0.6496742367744446 |
623 | 621 | def get_wave_name(waves, body):
wave_name = ''
for wave in waves:
if<mask>:
wave_name += extract_alnum(wave['wave_name'])
return wave_name | False | str(wave['wave_id']) == body['waveid'] | 'wave_name' in wave | 0.6482675075531006 |
624 | 622 | def power_connectable(other):
if<mask>:
return False
if cur_pole.global_position.x!= other.global_position.x and cur_pole.global_position.y!= other.global_position.y and only_axis:
return False
dist = distance(cur_pole.global_position.data, other.global_position.data)
min_dist = min(cur_pole.maximum_wire_distance, other.maximum_wire_distance)
return dist <= min_dist | False | other is cur_pole | not isinstance(other, Pole) | 0.6570698022842407 |
625 | 623 | def power_connectable(other):
if other is cur_pole:
return False
if<mask>:
return False
dist = distance(cur_pole.global_position.data, other.global_position.data)
min_dist = min(cur_pole.maximum_wire_distance, other.maximum_wire_distance)
return dist <= min_dist | False | cur_pole.global_position.x != other.global_position.x and cur_pole.global_position.y != other.global_position.y and only_axis | cur_pole.is_in_wire() | 0.6506286859512329 |
626 | 624 | def is_next_letter_hun(a: str, b: str) -> bool:
if<mask>:
return True
if len(a) == 1 and len(b) == 1 and (ord(a) + 1 == ord(b)):
return True
return False | False | (a, b) in SPECIAL_NEXT_LETTER_PAIRS | a == b | 0.6456416845321655 |
627 | 625 | def is_next_letter_hun(a: str, b: str) -> bool:
if (a, b) in SPECIAL_NEXT_LETTER_PAIRS:
return True
if<mask>:
return True
return False | False | len(a) == 1 and len(b) == 1 and (ord(a) + 1 == ord(b)) | (a, b) in SPECIAL_NEXT_LETTER_PAIRS | 0.6467128396034241 |
628 | 626 | def set_reward_shaping(self, reward_shaping: dict, agent_indices: Union[int, slice]):
if<mask>:
agent_indices = slice(agent_indices, agent_indices + 1)
for agent_idx in range(agent_indices.start, agent_indices.stop):
self.current_reward_shaping[agent_idx] = reward_shaping
self.set_env_attr(agent_idx, 'unwrapped.reward_shaping_interface.reward_shaping_scheme', reward_shaping) | True | isinstance(agent_indices, int) | isinstance(agent_indices, int) | 0.6505836248397827 |
629 | 627 | def GetValue(self, actor, timeStamp=None):
"""
Returns the value associated with a specified "actor" at a specified
"timeStamp". If no timeStamp is specified, then the function will
return all values associated with the specified actor at all time
stamps.
Parameters
----------
actor: str
timeStamp: float
Returns
-------
self.stream.loc[self.timeStamp, actor] or self.stream.loc[timeStamp,
actor]: list or float, respectively.
"""
assert actor in self.stream.columns
if<mask>:
return self.stream.loc[self.timeStamp, actor]
else:
assert timeStamp in self.stream.index
return self.stream.loc[timeStamp, actor] | True | timeStamp is None | timeStamp is None | 0.6613017320632935 |
630 | 628 | def bootstrap_on_err_cb(exc: Exception) -> None:
nonlocal retries
if<mask>:
retries += 1
self._logger.warning('Failed bootstrap phase; try=%s max_retries=%s', retries, max_retries)
else:
self._logger.error('Failed bootstrap phase after %s retries (%s)', retries, exc)
raise exc | False | not isinstance(exc, InvalidToken) and (max_retries < 0 or retries < max_retries) | isinstance(exc, Exception) and retries <= max_retries | 0.6490060091018677 |
631 | 629 | @cached
def relation_to_role_and_interface(relation_name):
"""
Given the name of a relation, return the role and the name of the interface
that relation uses (where role is one of ``provides``, ``requires``, or ``peers``).
:returns: A tuple containing ``(role, interface)``, or ``(None, None)``.
"""
_metadata = metadata()
for role in ('provides','requires', 'peers'):
interface = _metadata.get(role, {}).get(relation_name, {}).get('interface')
if<mask>:
return (role, interface)
return (None, None) | True | interface | interface | 0.6747817993164062 |
632 | 630 | def run_safety_checker(self, image, dtype):
if<mask>:
has_nsfw_concept = None
else:
if paddle.is_tensor(x=image):
feature_extractor_input = self.image_processor.postprocess(image, output_type='pil')
else:
feature_extractor_input = self.image_processor.numpy_to_pil(image)
safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors='pd')
image, has_nsfw_concept = self.safety_checker(images=image, clip_input=safety_checker_input.pixel_values.cast(dtype))
return (image, has_nsfw_concept) | True | self.safety_checker is None | self.safety_checker is None | 0.6455807685852051 |
633 | 631 | def run_safety_checker(self, image, dtype):
if self.safety_checker is None:
has_nsfw_concept = None
else:
if<mask>:
feature_extractor_input = self.image_processor.postprocess(image, output_type='pil')
else:
feature_extractor_input = self.image_processor.numpy_to_pil(image)
safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors='pd')
image, has_nsfw_concept = self.safety_checker(images=image, clip_input=safety_checker_input.pixel_values.cast(dtype))
return (image, has_nsfw_concept) | False | paddle.is_tensor(x=image) | paddle.is_tensor(image) | 0.6434617042541504 |
634 | 632 | def construct_search(field_name):
if<mask>:
return '%s__istartswith' % field_name[1:]
elif field_name.startswith('='):
return '%s__iexact' % field_name[1:]
elif field_name.startswith('@'):
return '%s__search' % field_name[1:]
else:
return '%s__icontains' % field_name | False | field_name.startswith('^') | field_name.startswith('istartswith=') | 0.6434298157691956 |
635 | 633 | def construct_search(field_name):
if field_name.startswith('^'):
return '%s__istartswith' % field_name[1:]
elif<mask>:
return '%s__iexact' % field_name[1:]
elif field_name.startswith('@'):
return '%s__search' % field_name[1:]
else:
return '%s__icontains' % field_name | False | field_name.startswith('=') | field_name.startswith('iexact') | 0.648019015789032 |
636 | 634 | def construct_search(field_name):
if field_name.startswith('^'):
return '%s__istartswith' % field_name[1:]
elif field_name.startswith('='):
return '%s__iexact' % field_name[1:]
elif<mask>:
return '%s__search' % field_name[1:]
else:
return '%s__icontains' % field_name | False | field_name.startswith('@') | field_name.startswith('.') | 0.6440175771713257 |
637 | 635 | def encrypt(self, value, precision=None, random_value=None):
"""Encode and Paillier encrypt a real number value.
"""
if<mask>:
value = value.decode()
encoding = FixedPointNumber.encode(value, self.n, self.max_int, precision)
obfuscator = random_value or 1
ciphertext = self.raw_encrypt(encoding.encoding, random_value=obfuscator)
encryptednumber = PaillierEncryptedNumber(self, ciphertext, encoding.exponent)
if random_value is None:
encryptednumber.apply_obfuscator()
return encryptednumber | False | isinstance(value, FixedPointNumber) | isinstance(value, bytes) | 0.6498298645019531 |
638 | 636 | def encrypt(self, value, precision=None, random_value=None):
"""Encode and Paillier encrypt a real number value.
"""
if isinstance(value, FixedPointNumber):
value = value.decode()
encoding = FixedPointNumber.encode(value, self.n, self.max_int, precision)
obfuscator = random_value or 1
ciphertext = self.raw_encrypt(encoding.encoding, random_value=obfuscator)
encryptednumber = PaillierEncryptedNumber(self, ciphertext, encoding.exponent)
if<mask>:
encryptednumber.apply_obfuscator()
return encryptednumber | False | random_value is None | encoding.encoding == 'gzip' | 0.6471483707427979 |
639 | 637 | def init_data(netsettings):
init_file()
if<mask>:
netrender.init_data = False
netsettings.active_slave_index = 0
while len(netsettings.slaves) > 0:
netsettings.slaves.remove(0)
netsettings.active_blacklisted_slave_index = 0
while len(netsettings.slaves_blacklist) > 0:
netsettings.slaves_blacklist.remove(0)
netsettings.active_job_index = 0
while len(netsettings.jobs) > 0:
netsettings.jobs.remove(0) | True | netrender.init_data | netrender.init_data | 0.6481001377105713 |
640 | 638 | def add_module(self, module_name):
node = self.__graph.find_node(module_name)
if<mask>:
return node
self.__updated = True
return self.__graph.add_module(module_name) | True | node is not None | node is not None | 0.6537237167358398 |
641 | 639 | def call_get_disk_usage(args):
"""Main method for getting disk usage."""
disk_usage = get_disk_usage(args.dir)
if<mask>:
return 1
print(disk_usage)
return 0 | True | disk_usage is None | disk_usage is None | 0.6504016518592834 |
642 | 640 | @_app.callback(Output('vp-upper-left', 'value'), [Input('vp-graph', 'figure')], [State('vp-bound-val', 'value')])
def update_upper_left_number(fig, bounds):
"""Update the number of data points in the upper left corner."""
l_lim = bounds[0]
number = 0
if<mask>:
x = np.array(fig['data'][0]['x'])
idx = x < float(l_lim)
number = len(x[idx])
return number | False | len(fig['data']) > 1 | l_lim is not None | 0.6525425910949707 |
643 | 641 | def process_batch(batch):
for message in batch.messages:
if<mask>:
return
_process(message)
reader.commit(batch) | False | not batch.alive | not message.alive | 0.6508684158325195 |
644 | 642 | def build_detector(cfg, train_cfg=None, test_cfg=None):
"""Build detector."""
if<mask>:
warnings.warn('train_cfg and test_cfg is deprecated, please specify them in model', UserWarning)
assert cfg.get('train_cfg') is None or train_cfg is None, 'train_cfg specified in both outer field and model field '
assert cfg.get('test_cfg') is None or test_cfg is None, 'test_cfg specified in both outer field and model field '
return build(cfg, DETECTORS, dict(train_cfg=train_cfg, test_cfg=test_cfg)) | True | train_cfg is not None or test_cfg is not None | train_cfg is not None or test_cfg is not None | 0.6514482498168945 |
645 | 643 | @classmethod
def to_stream(cls, instance, stream, context=None, arg=0, template=None):
if<mask>:
cls._storage.to_stream(instance.value, stream, context)
else:
logging.debug(f'instance {instance} is not a member of the {cls} class, writing int')
cls._storage.to_stream(int(instance), stream, context)
return instance | False | isinstance(instance, cls) | issubclass(instance, cls) | 0.649711549282074 |
646 | 644 | @property
def public_key(self):
"""
:return:
The PublicKey object for the public key this certificate contains
"""
if<mask>:
self._public_key = load_public_key(self.asn1['tbs_certificate']['subject_public_key_info'])
return self._public_key | True | self._public_key is None | self._public_key is None | 0.6504393815994263 |
647 | 645 | def __init__(self, data_schema: Dict[str, SizeData], fpath: str, mode: str):
self.fpath = fpath
assert 'b' in mode, f"Tensor storage should be opened in binary mode, got '{mode}'"
if<mask>:
file_h = PathManager.open(fpath, mode)
elif 'r' in mode:
local_fpath = PathManager.get_local_path(fpath)
file_h = open(local_fpath, mode)
else:
raise ValueError(f'Unsupported file mode {mode}, supported modes: rb, wb')
super().__init__(data_schema, file_h) | False | 'w' in mode | 'b' in mode | 0.6633249521255493 |
648 | 646 | def __init__(self, data_schema: Dict[str, SizeData], fpath: str, mode: str):
self.fpath = fpath
assert 'b' in mode, f"Tensor storage should be opened in binary mode, got '{mode}'"
if 'w' in mode:
file_h = PathManager.open(fpath, mode)
elif<mask>:
local_fpath = PathManager.get_local_path(fpath)
file_h = open(local_fpath, mode)
else:
raise ValueError(f'Unsupported file mode {mode}, supported modes: rb, wb')
super().__init__(data_schema, file_h) | False | 'r' in mode | 'rb' in mode | 0.6604881882667542 |
649 | 647 | def add_env_value(self, key, value):
""" add key, value pair to env array """
rval = False
env = self.get_env_vars()
if<mask>:
env.append({'name': key, 'value': value})
rval = True
else:
result = self.put(DeploymentConfig.env_path, {'name': key, 'value': value})
rval = result[0]
return rval | True | env | env | 0.6736887693405151 |
650 | 648 | def __setitem__(self, key, value):
"""Store item in sorted dict with `key` and corresponding `value`.
``sd.__setitem__(key, value)`` <==> ``sd[key] = value``
Runtime complexity: `O(log(n))` -- approximate.
>>> sd = SortedDict()
>>> sd['c'] = 3
>>> sd['a'] = 1
>>> sd['b'] = 2
>>> sd
SortedDict({'a': 1, 'b': 2, 'c': 3})
:param key: key for item
:param value: value for item
"""
if<mask>:
self._list_add(key)
dict.__setitem__(self, key, value) | True | key not in self | key not in self | 0.6586028337478638 |
651 | 649 | def encode(self):
if<mask>:
return False
v = int(self.expr)
v = self.encodeval(v & self.lmask)
self.value = v & self.lmask
return True | True | not isinstance(self.expr, ExprInt) | not isinstance(self.expr, ExprInt) | 0.6470715999603271 |
652 | 650 | def insertComment(self, token, parent=None):
if<mask>:
parent = self.openElements[-1]
parent.appendChild(self.commentClass(token['data'])) | True | parent is None | parent is None | 0.6628707647323608 |
653 | 651 | def _stripMimeTypeExtension(self, mime_type: MimeType, file_name: str) -> str:
suffixes = mime_type.suffixes[:]
if<mask>:
suffixes.append(mime_type.preferredSuffix)
for suffix in suffixes:
if file_name.endswith(suffix):
return file_name[:-len(suffix) - 1]
return file_name | True | mime_type.preferredSuffix | mime_type.preferredSuffix | 0.6443555355072021 |
654 | 652 | def _stripMimeTypeExtension(self, mime_type: MimeType, file_name: str) -> str:
suffixes = mime_type.suffixes[:]
if mime_type.preferredSuffix:
suffixes.append(mime_type.preferredSuffix)
for suffix in suffixes:
if<mask>:
return file_name[:-len(suffix) - 1]
return file_name | True | file_name.endswith(suffix) | file_name.endswith(suffix) | 0.6435981392860413 |
655 | 653 | def get_vtable_element_size(self):
code = self.flags & (SWI_V32 | SWI_VSIZE)
if<mask>:
return 2
elif code == SWI_V32:
return 4
elif code == SWI_VSIZE:
return 1
return 8 | True | code == 0 | code == 0 | 0.6649768352508545 |
656 | 654 | def get_vtable_element_size(self):
code = self.flags & (SWI_V32 | SWI_VSIZE)
if code == 0:
return 2
elif<mask>:
return 4
elif code == SWI_VSIZE:
return 1
return 8 | True | code == SWI_V32 | code == SWI_V32 | 0.6565409302711487 |
657 | 655 | def get_vtable_element_size(self):
code = self.flags & (SWI_V32 | SWI_VSIZE)
if code == 0:
return 2
elif code == SWI_V32:
return 4
elif<mask>:
return 1
return 8 | True | code == SWI_VSIZE | code == SWI_VSIZE | 0.6576449871063232 |
658 | 656 | def str_extract(arr, pat, flags=0, expand=True):
"""
Extract capture groups in the regex `pat` as columns in a DataFrame.
For each subject string in the Series, extract groups from the
first match of regular expression `pat`.
Parameters
----------
pat : str
Regular expression pattern with capturing groups.
flags : int, default 0 (no flags)
Flags from the ``re`` module, e.g. ``re.IGNORECASE``, that
modify regular expression matching for things like case,
spaces, etc. For more details, see :mod:`re`.
expand : bool, default True
If True, return DataFrame with one column per capture group.
If False, return a Series/Index if there is one capture group
or DataFrame if there are multiple capture groups.
Returns
-------
DataFrame or Series or Index
A DataFrame with one row for each subject string, and one
column for each group. Any capture group names in regular
expression pat will be used for column names; otherwise
capture group numbers will be used. The dtype of each result
column is always object, even when no match is found. If
``expand=False`` and pat has only one capture group, then
return a Series (if subject is a Series) or Index (if subject
is an Index).
See Also
--------
extractall : Returns all matches (not just the first match).
Examples
--------
A pattern with two groups will return a DataFrame with two columns.
Non-matches will be NaN.
>>> s = pd.Series(['a1', 'b2', 'c3'])
>>> s.str.extract(r'([ab])(\\d)')
0 1
0 a 1
1 b 2
2 NaN NaN
A pattern may contain optional groups.
>>> s.str.extract(r'([ab])?(\\d)')
0 1
0 a 1
1 b 2
2 NaN 3
Named groups will become column names in the result.
>>> s.str | False | not isinstance(expand, bool) | expand | 0.6443957090377808 |
659 | 657 | def str_extract(arr, pat, flags=0, expand=True):
"""
Extract capture groups in the regex `pat` as columns in a DataFrame.
For each subject string in the Series, extract groups from the
first match of regular expression `pat`.
Parameters
----------
pat : str
Regular expression pattern with capturing groups.
flags : int, default 0 (no flags)
Flags from the ``re`` module, e.g. ``re.IGNORECASE``, that
modify regular expression matching for things like case,
spaces, etc. For more details, see :mod:`re`.
expand : bool, default True
If True, return DataFrame with one column per capture group.
If False, return a Series/Index if there is one capture group
or DataFrame if there are multiple capture groups.
Returns
-------
DataFrame or Series or Index
A DataFrame with one row for each subject string, and one
column for each group. Any capture group names in regular
expression pat will be used for column names; otherwise
capture group numbers will be used. The dtype of each result
column is always object, even when no match is found. If
``expand=False`` and pat has only one capture group, then
return a Series (if subject is a Series) or Index (if subject
is an Index).
See Also
--------
extractall : Returns all matches (not just the first match).
Examples
--------
A pattern with two groups will return a DataFrame with two columns.
Non-matches will be NaN.
>>> s = pd.Series(['a1', 'b2', 'c3'])
>>> s.str.extract(r'([ab])(\\d)')
0 1
0 a 1
1 b 2
2 NaN NaN
A pattern may contain optional groups.
>>> s.str.extract(r'([ab])?(\\d)')
0 1
0 a 1
1 b 2
2 NaN 3
Named groups will become column names in the result.
>>> s.str | True | expand | expand | 0.6749656200408936 |
660 | 658 | def eval(self, body, domain):
jsPayload = template(body, domain)
if<mask>:
logging.warning('WARNING - Please upgrade your js2py https://github.com/PiotrDabkowski/Js2Py, applying work around for the meantime.')
jsPayload = jsunfuck(jsPayload)
def atob(s):
return base64.b64decode('{}'.format(s)).decode('utf-8')
js2py.disable_pyimport()
context = js2py.EvalJs({'atob': atob})
result = context.eval(jsPayload)
return result | False | js2py.eval_js('(+(+!+[]+[+!+[]]+(!![]+[])[!+[]+!+[]+!+[]]+[!+[]+!+[]]+[+[]])+[])[+!+[]]') == '1' | 'Ubuntu' in jsPayload | 0.6551860570907593 |
661 | 659 | def index_pix_in_pixels(pix, pixels, sort=False, outside=-1):
"""
Find the indices of a set of pixels into another set of pixels.
!!! ASSUMES SORTED PIXELS!!!
Parameters:
-----------
pix : set of search pixels
pixels : set of reference pixels
Returns:
--------
index : index into the reference pixels
"""
if<mask>:
pixels = np.sort(pixels)
index = np.searchsorted(pixels, pix)
if np.isscalar(index):
if not np.in1d(pix, pixels).any():
index = outside
else:
index[~np.in1d(pix, pixels)] = outside
return index | True | sort | sort | 0.6685218811035156 |
662 | 660 | def index_pix_in_pixels(pix, pixels, sort=False, outside=-1):
"""
Find the indices of a set of pixels into another set of pixels.
!!! ASSUMES SORTED PIXELS!!!
Parameters:
-----------
pix : set of search pixels
pixels : set of reference pixels
Returns:
--------
index : index into the reference pixels
"""
if sort:
pixels = np.sort(pixels)
index = np.searchsorted(pixels, pix)
if<mask>:
if not np.in1d(pix, pixels).any():
index = outside
else:
index[~np.in1d(pix, pixels)] = outside
return index | False | np.isscalar(index) | index is None | 0.6454963684082031 |
663 | 661 | def index_pix_in_pixels(pix, pixels, sort=False, outside=-1):
"""
Find the indices of a set of pixels into another set of pixels.
!!! ASSUMES SORTED PIXELS!!!
Parameters:
-----------
pix : set of search pixels
pixels : set of reference pixels
Returns:
--------
index : index into the reference pixels
"""
if sort:
pixels = np.sort(pixels)
index = np.searchsorted(pixels, pix)
if np.isscalar(index):
if<mask>:
index = outside
else:
index[~np.in1d(pix, pixels)] = outside
return index | False | not np.in1d(pix, pixels).any() | invert | 0.6456348896026611 |
664 | 662 | @type.setter
def type(self, type):
"""Sets the type of this Assignment.
:param type: The type of this Assignment.
:type type: str
"""
allowed_values = ['user', 'group']
if<mask>:
raise ValueError('Invalid value for `type` ({0}), must be one of {1}'.format(type, allowed_values))
self._type = type | True | type not in allowed_values | type not in allowed_values | 0.6552023887634277 |
665 | 663 | def _save_response_content(response:'requests.models.Response', destination: str, chunk_size: int=32768) -> None:
with open(destination, 'wb') as f:
pbar = tqdm(total=None)
progress = 0
for chunk in response.iter_content(chunk_size):
if<mask>:
f.write(chunk)
progress += len(chunk)
pbar.update(progress - pbar.n)
pbar.close() | True | chunk | chunk | 0.6597679853439331 |
666 | 664 | def is_nfs_ganesha_cluster_in_bad_state(mnode):
"""
Checks whether nfs ganesha cluster is in bad state.
Args:
mnode (str): Node in which cmd command will
be executed.
Returns:
bool : True if nfs ganesha cluster is in bad state.
False otherwise
Example:
is_nfs_ganesha_cluster_in_bad_state(mnode)
"""
cmd = '/usr/libexec/ganesha/ganesha-ha.sh --status'+ '/run/gluster/shared_storage/nfs-ganesha/ | grep'+ " 'Cluster HA Status' | cut -d'' -f 4 "
retcode, stdout, _ = g.run(mnode, cmd)
if<mask>:
g.log.error('Failed to execute nfs-ganesha status command to check if cluster is in bad state')
return False
if stdout.strip('\n')!= 'BAD':
g.log.error('nfs-ganesha cluster is not in bad state. Current cluster state: %s'% stdout)
return False | True | retcode != 0 | retcode != 0 | 0.657815158367157 |
667 | 665 | def is_nfs_ganesha_cluster_in_bad_state(mnode):
"""
Checks whether nfs ganesha cluster is in bad state.
Args:
mnode (str): Node in which cmd command will
be executed.
Returns:
bool : True if nfs ganesha cluster is in bad state.
False otherwise
Example:
is_nfs_ganesha_cluster_in_bad_state(mnode)
"""
cmd = '/usr/libexec/ganesha/ganesha-ha.sh --status'+ '/run/gluster/shared_storage/nfs-ganesha/ | grep'+ " 'Cluster HA Status' | cut -d'' -f 4 "
retcode, stdout, _ = g.run(mnode, cmd)
if retcode!= 0:
g.log.error('Failed to execute nfs-ganesha status command to check if cluster is in bad state')
return False
if<mask>:
g.log.error('nfs-ganesha cluster is not in bad state. Current cluster state: %s'% stdout)
return False | False | stdout.strip('\n') != 'BAD' | retcode == 0 and stdout | 0.6454600095748901 |
668 | 666 | def set_timeout(self):
"""Sets an alarm to time out the test"""
if<mask>:
self.vprint(2,'setting plugin timeout to 1 second')
else:
self.vprint(2,'setting plugin timeout to %s seconds' % self.timeout)
signal.signal(signal.SIGALRM, self.sighandler)
signal.alarm(self.timeout) | True | self.timeout == 1 | self.timeout == 1 | 0.6588247418403625 |
669 | 667 | def __init__(self, value, type_id, info_offset, data_offset=None, bit_offset=None, enum_string=None, hash_string=None):
self.value = value
self.type_id = type_id
self.info_offset = info_offset
if<mask>:
self.data_offset = info_offset
else:
self.data_offset = data_offset
self.bit_offset = bit_offset
self.enum_string = enum_string
self.hash_string = hash_string | True | data_offset is None | data_offset is None | 0.6573934555053711 |
670 | 668 | def __header_lang(self, line, state):
"""
@param line: The current line in GedLine format
@type line: GedLine
@param state: The current state
@type state: CurrentState
"""
if<mask>:
sattr = SrcAttribute()
sattr.set_type(_('Language of GEDCOM text'))
sattr.set_value(line.data)
self.def_src.add_attribute(sattr) | True | self.use_def_src | self.use_def_src | 0.651429295539856 |
671 | 669 | def __getattr__(self, name):
if<mask>:
return self
else:
raise AttributeError(name) | True | name == 'dict' | name == 'dict' | 0.6522465944290161 |
672 | 670 | def withGSParams(self, gsparams=None, **kwargs):
"""Create a version of the current interpolant with the given gsparams
"""
if<mask>:
return self
from copy import copy
ret = copy(self)
ret._gsparams = GSParams.check(gsparams, self.gsparams, **kwargs)
return ret | True | gsparams == self.gsparams | gsparams == self.gsparams | 0.6543525457382202 |
673 | 671 | def epoch_start(self, epoch):
if<mask>:
self.dae_scheduler.step()
self.vae_scheduler.step() | False | epoch > self.cfg.sde.warmup_epochs | self.scheduler != None | 0.6449305415153503 |
674 | 672 | def release_conn(self):
if<mask>:
return
self._pool._put_conn(self._connection)
self._connection = None | True | not self._pool or not self._connection | not self._pool or not self._connection | 0.6462751626968384 |
675 | 673 | def __getattr__(self, attr):
if<mask>:
return self.rrset.name
elif attr == 'ttl':
return self.rrset.ttl
elif attr == 'covers':
return self.rrset.covers
elif attr == 'rdclass':
return self.rrset.rdclass
elif attr == 'rdtype':
return self.rrset.rdtype
else:
raise AttributeError(attr) | True | attr == 'name' | attr == 'name' | 0.6590777635574341 |
676 | 674 | def __getattr__(self, attr):
if attr == 'name':
return self.rrset.name
elif<mask>:
return self.rrset.ttl
elif attr == 'covers':
return self.rrset.covers
elif attr == 'rdclass':
return self.rrset.rdclass
elif attr == 'rdtype':
return self.rrset.rdtype
else:
raise AttributeError(attr) | True | attr == 'ttl' | attr == 'ttl' | 0.6633827686309814 |
677 | 675 | def __getattr__(self, attr):
if attr == 'name':
return self.rrset.name
elif attr == 'ttl':
return self.rrset.ttl
elif<mask>:
return self.rrset.covers
elif attr == 'rdclass':
return self.rrset.rdclass
elif attr == 'rdtype':
return self.rrset.rdtype
else:
raise AttributeError(attr) | True | attr == 'covers' | attr == 'covers' | 0.6541184782981873 |
678 | 676 | def __getattr__(self, attr):
if attr == 'name':
return self.rrset.name
elif attr == 'ttl':
return self.rrset.ttl
elif attr == 'covers':
return self.rrset.covers
elif<mask>:
return self.rrset.rdclass
elif attr == 'rdtype':
return self.rrset.rdtype
else:
raise AttributeError(attr) | True | attr == 'rdclass' | attr == 'rdclass' | 0.6521478891372681 |
679 | 677 | def __getattr__(self, attr):
if attr == 'name':
return self.rrset.name
elif attr == 'ttl':
return self.rrset.ttl
elif attr == 'covers':
return self.rrset.covers
elif attr == 'rdclass':
return self.rrset.rdclass
elif<mask>:
return self.rrset.rdtype
else:
raise AttributeError(attr) | True | attr == 'rdtype' | attr == 'rdtype' | 0.6532492637634277 |
680 | 678 | def _get_new_profile_position(self) -> Optional[DeviceFacade.View]:
buttons = self.device.find(className=ResourceID.BUTTON)
for button in buttons:
if<mask>:
return button
return None | False | button.content_desc() == 'Profile' | button.content_desc == 'NEW' | 0.6502137184143066 |
681 | 679 | def get_out_channel(layer):
if<mask>:
return getattr(layer, 'out_channels')
return layer.weight.size(0) | True | hasattr(layer, 'out_channels') | hasattr(layer, 'out_channels') | 0.644025981426239 |
682 | 680 | @staticmethod
def normalize_path(path):
if<mask>:
return '/'
path = re.sub('/+', '/', path)
if not path.startswith('/'):
path = '/' + path
return path | False | not path | path is None | 0.6645264029502869 |
683 | 681 | @staticmethod
def normalize_path(path):
if not path:
return '/'
path = re.sub('/+', '/', path)
if<mask>:
path = '/' + path
return path | False | not path.startswith('/') | path[:1] != '/' | 0.6438450217247009 |
684 | 682 | def tearDown(self):
"""
Cleanup and umount volume
"""
for mount_object in self.mounts:
self.delete_user(mount_object.client_system, 'qa')
for server in self.servers:
self.delete_user(server, 'qa')
g.log.info('Starting to Unmount Volume and Cleanup Volume')
ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts)
if<mask>:
raise ExecutionError('Failed to umount the vol & cleanup Volume')
g.log.info('Successful in umounting the volume and Cleanup')
self.get_super_method(self, 'tearDown')() | True | not ret | not ret | 0.6566653847694397 |
685 | 683 | def demap_params(self, params):
if<mask>:
print(f'param out of bounds: {params}')
params = np.clip(params, -1.0, 1.0)
if self.param_mapping =='sin':
params = np.arcsin(params) / (0.5 * np.pi)
return params | False | not np.all((params <= 1.0) & (params >= -1.0)) | self.param_mapping is None | 0.6538748741149902 |
686 | 684 | def demap_params(self, params):
if not np.all((params <= 1.0) & (params >= -1.0)):
print(f'param out of bounds: {params}')
params = np.clip(params, -1.0, 1.0)
if<mask>:
params = np.arcsin(params) / (0.5 * np.pi)
return params | False | self.param_mapping == 'sin' | self.normalize_params | 0.6482543349266052 |
687 | 685 | def format(self, val, *args, **kwargs):
if<mask>:
val_ = ', '.join(val)
else:
val_ = ''
return {self.name: val_} | False | val and isinstance(val, list) | len(val) > 0 | 0.647120475769043 |
688 | 686 | def evaluate_book(book_id, db):
authors = db.authors(book_id, index_is_id=True)
if<mask>:
authors = [a.strip().replace('|', ',') for a in authors.split(',')]
for author in authors:
if ',' not in author:
return True
return False | True | authors | authors | 0.659276008605957 |
689 | 687 | def evaluate_book(book_id, db):
authors = db.authors(book_id, index_is_id=True)
if authors:
authors = [a.strip().replace('|', ',') for a in authors.split(',')]
for author in authors:
if<mask>:
return True
return False | False | ',' not in author | author == author | 0.6475291848182678 |
690 | 688 | def handleFreeLookPointing(self):
if<mask>:
msg = self.vehicle.message_factory.mount_control_encode(0, 1, self.camPitch * 100, 0.0, self.camYaw * 100, 0)
self.vehicle.send_mavlink(msg)
else:
msg = self.vehicle.message_factory.command_long_encode(0, 1, mavutil.mavlink.MAV_CMD_CONDITION_YAW, 0, self.camYaw, YAW_SPEED, self.camDir, 0.0, 0, 0, 0)
self.vehicle.send_mavlink(msg) | False | self.vehicle.mount_status[0] is not None | self.mount_status[0] | 0.6466354131698608 |
691 | 689 | def __iter__(self, slice_key=None):
if<mask>:
for i in range(len(self)):
yield self[i]
else:
for i in range(len(self))[slice_key]:
yield self[i] | True | slice_key is None | slice_key is None | 0.6507833003997803 |
692 | 690 | def __call__(self, expr, state, recurrences):
results = self._cache.get((expr, state, recurrences))
if<mask>:
return results
results = super().__call__(expr, state, recurrences)
self._cache[expr, state, recurrences] = results
return results | True | results is not None | results is not None | 0.6499881744384766 |
693 | 691 | def get_user_admin_fed_full(user_id):
user_feds = []
for f in FEDERATION_BYFEDID:
if<mask>:
user_feds.append({'fed_id': f, 'fed': FEDERATION_BYFEDID[f]})
return user_feds | False | int(user_id) in eval(eval(FEDERATION_BYFEDID[f]['fusers'])['members']) | user_id == FEDERATION_BYFEDID[f].get('id') | 0.651580810546875 |
694 | 692 | def _retries_gen(self, args, kwargs):
for i, is_last_try in zip(count(), _is_last_gen(self.max_retries)):
try:
yield i
except self._retry_exceptions:
if<mask>:
yield True
else:
logger.warning(type(self).__name__ +'caught an error, retrying (%s/%s tries). Called with (*%r, **%r).', i, self.max_retries, args, kwargs, exc_info=True)
yield False
continue
else:
return | True | is_last_try | is_last_try | 0.6488479375839233 |
695 | 693 | def get_field(self, field):
"""Accesses a box collection and associated fields.
This function returns specified field with object; if no field is specified,
it returns the box coordinates.
Args:
field: this optional string parameter can be used to specify
a related field to be accessed.
Returns:
a tensor representing the box collection or an associated field.
Raises:
ValueError: if invalid field
"""
if<mask>:
raise ValueError('field'+ str(field) +'does not exist')
return self.data[field] | True | not self.has_field(field) | not self.has_field(field) | 0.6458514928817749 |
696 | 694 | def resize_num_qa_labels(self, num_labels):
"""
Build a resized question answering linear layer Module from a provided new linear layer. Increasing the size will add newly
initialized weights. Reducing the size will remove weights from the end
Args:
cur_qa_logit_layer (:obj:`torch.nn.Linear`):
Old linear layer to be resized.
num_labels (:obj:`int`, `optional`):
New number of labels in the linear layer weight matrix.
Increasing the size will add newly initialized weights at the end. Reducing the size will remove
weights from the end. If not provided or :obj:`None`, just returns a pointer to the qa labels
:obj:`torch.nn.Linear`` module of the model wihtout doing anything.
Return:
:obj:`torch.nn.Linear`: Pointer to the resized Linear layer or the old Linear layer
"""
cur_qa_logit_layer = self.get_qa_logit_layer()
if<mask>:
return
new_qa_logit_layer = self._resize_qa_labels(num_labels)
self.config.num_qa_labels = num_labels
self.num_qa_labels = num_labels
return new_qa_logit_layer | False | num_labels is None or cur_qa_logit_layer is None | cur_qa_logit_layer is None | 0.6435016393661499 |
697 | 695 | def append(self, expectation):
if<mask>:
self.expectations.append(expectation)
if self._scheduler is not None:
self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation)
else:
self.expectations.extend(expectation.expectations)
if self._scheduler is not None:
self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation.expectations)
return self | False | not isinstance(expectation, self.__class__) | isinstance(expectation, Expectation) | 0.6442573070526123 |
698 | 696 | def append(self, expectation):
if not isinstance(expectation, self.__class__):
self.expectations.append(expectation)
if<mask>:
self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation)
else:
self.expectations.extend(expectation.expectations)
if self._scheduler is not None:
self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation.expectations)
return self | True | self._scheduler is not None | self._scheduler is not None | 0.652035117149353 |
699 | 697 | def append(self, expectation):
if not isinstance(expectation, self.__class__):
self.expectations.append(expectation)
if self._scheduler is not None:
self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation)
else:
self.expectations.extend(expectation.expectations)
if<mask>:
self._scheduler.expectation_loop.run_async(self._register_subexpectations, expectation.expectations)
return self | True | self._scheduler is not None | self._scheduler is not None | 0.6539571285247803 |
700 | 698 | def __eq__(self, other):
if<mask>:
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val!= other_val:
return False
return True | True | not isinstance(other, self.__class__) | not isinstance(other, self.__class__) | 0.64579176902771 |
701 | 699 | def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if<mask>:
return False
return True | True | my_val != other_val | my_val != other_val | 0.6494433879852295 |
702 | 700 | def sigencode_der_canonize(r, s, order):
"""
Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`.
Makes sure that the signature is encoded in the canonical format, where
the ``s`` parameter is always smaller than ``order / 2``.
Most commonly used in bitcoin.
Encodes the signature to the following :term:`ASN.1` structure::
Ecdsa-Sig-Value ::= SEQUENCE {
r INTEGER,
s INTEGER
}
It's expected that this function will be used as a ``sigencode=`` parameter
in :func:`ecdsa.keys.SigningKey.sign` method.
:param int r: first parameter of the signature
:param int s: second parameter of the signature
:param int order: the order of the curve over which the signature was
computed
:return: DER encoding of ECDSA signature
:rtype: bytes
"""
if<mask>:
s = order - s
return sigencode_der(r, s, order) | False | s > order / 2 | order != 0 | 0.6569547653198242 |
703 | 701 | def __eq__(self, other):
if<mask>:
return False
return self.source == other.source and self.identifier == other.identifier and (self.ntype == other.ntype) | False | not isinstance(other, GraphNode) | not isinstance(other, Token) | 0.6500142216682434 |
704 | 702 | def _translateType(self, t):
it = TypeToIntegerType(t)
if<mask>:
it: IntegerType
return Bits(it.getBitWidth())
else:
raise NotImplementedError(t) | False | it is not None | t == BALLEL_INT_TYPE | 0.6533830165863037 |
705 | 703 | def options(self, context, module_options):
"""
URL URL for the download cradle
PAYLOAD Payload architecture (choices: 64 or 32) Default: 64
"""
if<mask>:
context.log.fail('URL option is required!')
exit(1)
self.url = module_options['URL']
self.payload = '64'
if 'PAYLOAD' in module_options:
if module_options['PAYLOAD'] not in ['64', '32']:
context.log.fail('Invalid value for PAYLOAD option!')
exit(1)
self.payload = module_options['PAYLOAD'] | False | not 'URL' in module_options | 'URL' not in module_options | 0.6517795324325562 |
706 | 704 | def options(self, context, module_options):
"""
URL URL for the download cradle
PAYLOAD Payload architecture (choices: 64 or 32) Default: 64
"""
if not 'URL' in module_options:
context.log.fail('URL option is required!')
exit(1)
self.url = module_options['URL']
self.payload = '64'
if<mask>:
if module_options['PAYLOAD'] not in ['64', '32']:
context.log.fail('Invalid value for PAYLOAD option!')
exit(1)
self.payload = module_options['PAYLOAD'] | True | 'PAYLOAD' in module_options | 'PAYLOAD' in module_options | 0.6498085856437683 |
707 | 705 | def options(self, context, module_options):
"""
URL URL for the download cradle
PAYLOAD Payload architecture (choices: 64 or 32) Default: 64
"""
if not 'URL' in module_options:
context.log.fail('URL option is required!')
exit(1)
self.url = module_options['URL']
self.payload = '64'
if 'PAYLOAD' in module_options:
if<mask>:
context.log.fail('Invalid value for PAYLOAD option!')
exit(1)
self.payload = module_options['PAYLOAD'] | False | module_options['PAYLOAD'] not in ['64', '32'] | module_options['PAYLOAD'] not in _SUPPORTED_PAYLOAD | 0.645696222782135 |
708 | 706 | def get_adapter(self, url):
"""
Returns the appropriate connection adapter for the given URL.
:rtype: requests.adapters.BaseAdapter
"""
for prefix, adapter in self.adapters.items():
if<mask>:
return adapter
raise InvalidSchema("No connection adapters were found for '%s'" % url) | True | url.lower().startswith(prefix.lower()) | url.lower().startswith(prefix.lower()) | 0.6447865962982178 |
709 | 707 | def proxy_headers(self, proxy):
"""Returns a dictionary of the headers to add to any request sent
through a proxy. This works with urllib3 magic to ensure that they are
correctly sent to the proxy, rather than in a tunnelled request if
CONNECT is being used.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxies: The url of the proxy being used for this request.
"""
headers = {}
username, password = get_auth_from_url(proxy)
if<mask>:
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
return headers | True | username and password | username and password | 0.6595168709754944 |
710 | 708 | def known_extension(self, extension):
for pattern in self.extension_patterns:
if<mask>:
if extension.lower() == pattern.lower():
return 1
elif pattern.match(extension):
return 1
return 0 | False | isinstance(pattern, types.StringTypes) | isinstance(extension, str) | 0.6412390470504761 |
711 | 709 | def known_extension(self, extension):
for pattern in self.extension_patterns:
if isinstance(pattern, types.StringTypes):
if<mask>:
return 1
elif pattern.match(extension):
return 1
return 0 | False | extension.lower() == pattern.lower() | pattern.match(extension) | 0.6480761766433716 |
712 | 710 | def known_extension(self, extension):
for pattern in self.extension_patterns:
if isinstance(pattern, types.StringTypes):
if extension.lower() == pattern.lower():
return 1
elif<mask>:
return 1
return 0 | False | pattern.match(extension) | extension.lower() == pattern.lower() | 0.6445437073707581 |
713 | 711 | def set_params(self, **kwargs):
self.source.set_params(**kwargs)
if<mask>:
raise ValueError('Coordinate outside interior ROI.') | False | self.pixel not in self.roi.pixels_interior | self.source.intersects(self.source) | 0.6472979784011841 |
714 | 712 | def scan_line_break(self):
ch = self.peek()
if<mask>:
if self.prefix(2) == '\r\n':
self.forward(2)
else:
self.forward()
return '\n'
elif ch in '\u2028\u2029':
self.forward()
return ch
return '' | True | ch in '\r\n\x85' | ch in '\r\n\x85' | 0.6483626365661621 |
715 | 713 | def scan_line_break(self):
ch = self.peek()
if ch in '\r\n\x85':
if<mask>:
self.forward(2)
else:
self.forward()
return '\n'
elif ch in '\u2028\u2029':
self.forward()
return ch
return '' | True | self.prefix(2) == '\r\n' | self.prefix(2) == '\r\n' | 0.6466876268386841 |
716 | 714 | def scan_line_break(self):
ch = self.peek()
if ch in '\r\n\x85':
if self.prefix(2) == '\r\n':
self.forward(2)
else:
self.forward()
return '\n'
elif<mask>:
self.forward()
return ch
return '' | False | ch in '\u2028\u2029' | ch in '\u2028\u2029\u2029' | 0.649858832359314 |
717 | 715 | def startTagTableOther(self, token):
if<mask>:
self.clearStackToTableBodyContext()
self.endTagTableRowGroup(impliedTagToken(self.tree.openElements[-1].name))
return token
else:
assert self.parser.innerHTML
self.parser.parseError() | False | self.tree.elementInScope('tbody', variant='table') or self.tree.elementInScope('thead', variant='table') or self.tree.elementInScope('tfoot', variant='table') | self.tree.elementInScope('tbody', variant='table') or self.tree.elementInScope('thead', variant='table') or self.tree.elementInScope('tfoot', variant='table') or self.tree.elementInScope('tfoot', variant='table') | 0.6490222215652466 |
718 | 716 | def validate_port(self):
"""Exits with an error if the port is not valid"""
if<mask>:
self.port = ''
else:
try:
self.port = int(self.port)
if not 1 <= self.port <= 65535:
raise ValueError
except ValueError:
end(UNKNOWN, 'port number must be a whole number between'+ '1 and 65535') | True | self.port is None | self.port is None | 0.6516157984733582 |
719 | 717 | def validate_port(self):
"""Exits with an error if the port is not valid"""
if self.port is None:
self.port = ''
else:
try:
self.port = int(self.port)
if<mask>:
raise ValueError
except ValueError:
end(UNKNOWN, 'port number must be a whole number between'+ '1 and 65535') | False | not 1 <= self.port <= 65535 | self.port < 1 or self.port > 65535 | 0.6601684093475342 |
720 | 718 | def ajoutLigne(self, ligne):
if<mask>:
return
self.ajoutTraceComplete(ligne)
self.ajoutTraceSynthese(ligne) | False | not ligne | ligne.getlevel() == logging.INFO | 0.6535208225250244 |
721 | 719 | def cfg_dict(cfg: Config) -> AttrDict:
if<mask>:
return AttrDict(cfg)
else:
return AttrDict(vars(cfg)) | False | isinstance(cfg, dict) | isinstance(cfg, AttrDict) | 0.6484097242355347 |
722 | 720 | def __getitem__(self, package):
"""Get information about a package from apt and dpkg databases.
:param package: Name of package
:type package: str
:returns: Package object
:rtype: object
:raises: KeyError, subprocess.CalledProcessError
"""
apt_result = self._apt_cache_show([package])[package]
apt_result['name'] = apt_result.pop('package')
pkg = Package(apt_result)
dpkg_result = self.dpkg_list([package]).get(package, {})
current_ver = None
installed_version = dpkg_result.get('version')
if<mask>:
current_ver = Version({'ver_str': installed_version})
pkg.current_ver = current_ver
pkg.architecture = dpkg_result.get('architecture')
return pkg | False | installed_version | installed_version is not None | 0.6565266847610474 |
723 | 721 | def can_offerer_create_educational_offer(offerer_id: int) -> None:
import pcapi.core.educational.adage_backends as adage_client
if<mask>:
return
if offerers_repository.offerer_has_venue_with_adage_id(offerer_id):
return
siren = offerers_repository.find_siren_by_offerer_id(offerer_id)
try:
response = adage_client.get_adage_offerer(siren)
if len(response) == 0:
raise educational_exceptions.CulturalPartnerNotFoundException('No venue has been found for the selected siren')
except (educational_exceptions.CulturalPartnerNotFoundException, educational_exceptions.AdageException) as exception:
raise exception | False | settings.CAN_COLLECTIVE_OFFERER_IGNORE_ADAGE | not offerer_id | 0.648491382598877 |
724 | 722 | def can_offerer_create_educational_offer(offerer_id: int) -> None:
import pcapi.core.educational.adage_backends as adage_client
if settings.CAN_COLLECTIVE_OFFERER_IGNORE_ADAGE:
return
if<mask>:
return
siren = offerers_repository.find_siren_by_offerer_id(offerer_id)
try:
response = adage_client.get_adage_offerer(siren)
if len(response) == 0:
raise educational_exceptions.CulturalPartnerNotFoundException('No venue has been found for the selected siren')
except (educational_exceptions.CulturalPartnerNotFoundException, educational_exceptions.AdageException) as exception:
raise exception | False | offerers_repository.offerer_has_venue_with_adage_id(offerer_id) | offerer_id is None | 0.6461092233657837 |
725 | 723 | def can_offerer_create_educational_offer(offerer_id: int) -> None:
import pcapi.core.educational.adage_backends as adage_client
if settings.CAN_COLLECTIVE_OFFERER_IGNORE_ADAGE:
return
if offerers_repository.offerer_has_venue_with_adage_id(offerer_id):
return
siren = offerers_repository.find_siren_by_offerer_id(offerer_id)
try:
response = adage_client.get_adage_offerer(siren)
if<mask>:
raise educational_exceptions.CulturalPartnerNotFoundException('No venue has been found for the selected siren')
except (educational_exceptions.CulturalPartnerNotFoundException, educational_exceptions.AdageException) as exception:
raise exception | False | len(response) == 0 | not response or response.status_code != 0 | 0.6514031887054443 |
726 | 724 | def get_decoder(self, i=None):
if<mask>:
n = len(self.plotter.plot_data)
decoders = self.plotter.plot_data_decoder or [None] * n
return decoders[i] or self.plotter.plot_data[i].psy.decoder
else:
return self.decoder | False | i is not None and isinstance(self.plotter.plot_data, InteractiveList) | i is not None | 0.6468645334243774 |
727 | 725 | def colorize_codeblock_body(s: str) -> Iterator[Union[Tag, str]]:
idx = 0
for match in DOCTEST_RE.finditer(s):
start = match.start()
if<mask>:
yield s[idx:start]
yield from subfunc(match)
idx = match.end()
assert idx == len(s) | False | idx < start | start and idx < len(s) | 0.6634973287582397 |
728 | 726 | def activate(visualizer: Visualizer) -> None:
if<mask>:
raise core.UserError('Running a task', 'Please wait until the current task has been finished before switching to another window.')
global __active_visualizer
if __active_visualizer is not None:
__active_visualizer.detach()
__active_visualizer = visualizer
__active_visualizer.attach()
__active_visualizer.refresh_all()
__active_visualizer.update_status(__active_visualizer.get_default_status()) | False | visualizer.get_window().get_scheduler().has_active_tasks() | hasattr(visualizer, 'get_default_status') | 0.6484569311141968 |
729 | 727 | def activate(visualizer: Visualizer) -> None:
if visualizer.get_window().get_scheduler().has_active_tasks():
raise core.UserError('Running a task', 'Please wait until the current task has been finished before switching to another window.')
global __active_visualizer
if<mask>:
__active_visualizer.detach()
__active_visualizer = visualizer
__active_visualizer.attach()
__active_visualizer.refresh_all()
__active_visualizer.update_status(__active_visualizer.get_default_status()) | False | __active_visualizer is not None | __active_visualizer.is_visible() and __active_visualizer.get_active_window() is visualizer.get_window() | 0.6550665497779846 |
730 | 728 | def _get_label(self, graph_number, node):
assert graph_number in [1, 2]
if<mask>:
return self.graph1.nodes[node]['label']
else:
return self.graph2.nodes[node]['label'] | True | graph_number == 1 | graph_number == 1 | 0.6545867919921875 |
731 | 729 | def variance(self):
if<mask>:
return self.S / (self.n - 1.0)
else:
return 0 | False | self.n >= 2 | self.n > 1.0 | 0.653853178024292 |
732 | 730 | def mixing_rate_num_characters(task, temperature: float=1.0, char_count_name: str='text_chars') -> float:
"""Mixing rate based on the number of characters for the task's 'train' split.
Args:
task: the seqio.Task to compute a rate for.
temperature: a temperature (T) to scale rate (r) by as r^(1/T).
char_count_name: feature name of the character counts in the cached stats
file.
Returns:
The mixing rate for this task.
"""
if<mask>:
raise ValueError('`mixing_rate_num_characters` requires that each task has is cached with the character count stats.')
ret = task.get_cached_stats('train')[char_count_name]
if temperature!= 1.0:
ret = ret ** (1.0 / temperature)
return ret | False | task.cache_dir is None | char_count_name not in task.get_cached_stats('train') | 0.6491622924804688 |
733 | 731 | def mixing_rate_num_characters(task, temperature: float=1.0, char_count_name: str='text_chars') -> float:
"""Mixing rate based on the number of characters for the task's 'train' split.
Args:
task: the seqio.Task to compute a rate for.
temperature: a temperature (T) to scale rate (r) by as r^(1/T).
char_count_name: feature name of the character counts in the cached stats
file.
Returns:
The mixing rate for this task.
"""
if task.cache_dir is None:
raise ValueError('`mixing_rate_num_characters` requires that each task has is cached with the character count stats.')
ret = task.get_cached_stats('train')[char_count_name]
if<mask>:
ret = ret ** (1.0 / temperature)
return ret | True | temperature != 1.0 | temperature != 1.0 | 0.6570450067520142 |
734 | 732 | def log_error(experiment_name, model_name, specific_params, inp, err_msg, path_prefix):
err_info = {'input': inp,'msg': err_msg}
logged_errors = {}
if<mask>:
logged_errors = read_json(path_prefix, 'errors.json')
if experiment_name not in logged_errors:
logged_errors[experiment_name] = {}
if model_name not in logged_errors[experiment_name]:
logged_errors[experiment_name][model_name] = []
logged_errors[experiment_name][model_name].append({'err_info': err_info, **specific_params})
write_json(path_prefix, 'errors.json', logged_errors) | False | check_file_exists(path_prefix, 'errors.json') | specific_params == 'no_specific_params' | 0.6440228223800659 |
735 | 733 | def log_error(experiment_name, model_name, specific_params, inp, err_msg, path_prefix):
err_info = {'input': inp,'msg': err_msg}
logged_errors = {}
if check_file_exists(path_prefix, 'errors.json'):
logged_errors = read_json(path_prefix, 'errors.json')
if<mask>:
logged_errors[experiment_name] = {}
if model_name not in logged_errors[experiment_name]:
logged_errors[experiment_name][model_name] = []
logged_errors[experiment_name][model_name].append({'err_info': err_info, **specific_params})
write_json(path_prefix, 'errors.json', logged_errors) | True | experiment_name not in logged_errors | experiment_name not in logged_errors | 0.6507952213287354 |
736 | 734 | def log_error(experiment_name, model_name, specific_params, inp, err_msg, path_prefix):
err_info = {'input': inp,'msg': err_msg}
logged_errors = {}
if check_file_exists(path_prefix, 'errors.json'):
logged_errors = read_json(path_prefix, 'errors.json')
if experiment_name not in logged_errors:
logged_errors[experiment_name] = {}
if<mask>:
logged_errors[experiment_name][model_name] = []
logged_errors[experiment_name][model_name].append({'err_info': err_info, **specific_params})
write_json(path_prefix, 'errors.json', logged_errors) | True | model_name not in logged_errors[experiment_name] | model_name not in logged_errors[experiment_name] | 0.6472816467285156 |
737 | 735 | def encode(self, x):
c = torch.ones(x.shape[0], 1) * self.sos_token
c = c.long().to(x.device)
if<mask>:
return (c, None, [None, None, c])
return c | False | self.quantize_interface | not isinstance(c, torch.Tensor) | 0.6497696042060852 |
738 | 736 | def _check_letter_case_collisions(eopatch_features: Features, filesystem_features: FilesystemDataInfo) -> None:
"""Check that features have no name clashes (ignoring case) with other EOPatch features and saved features."""
lowercase_features = {_to_lowercase(*feature) for feature in eopatch_features}
if<mask>:
raise OSError('Some features differ only in casing and cannot be saved in separate files.')
for feature, _ in filesystem_features.iterate_features():
if feature not in eopatch_features and _to_lowercase(*feature) in lowercase_features:
raise OSError(f'There already exists a feature {feature} in the filesystem that only differs in casing from a feature that should be saved.') | False | len(lowercase_features) != len(eopatch_features) | filesystem_features.is_multi_columns | 0.6443958282470703 |
739 | 737 | def _check_letter_case_collisions(eopatch_features: Features, filesystem_features: FilesystemDataInfo) -> None:
"""Check that features have no name clashes (ignoring case) with other EOPatch features and saved features."""
lowercase_features = {_to_lowercase(*feature) for feature in eopatch_features}
if len(lowercase_features)!= len(eopatch_features):
raise OSError('Some features differ only in casing and cannot be saved in separate files.')
for feature, _ in filesystem_features.iterate_features():
if<mask>:
raise OSError(f'There already exists a feature {feature} in the filesystem that only differs in casing from a feature that should be saved.') | False | feature not in eopatch_features and _to_lowercase(*feature) in lowercase_features | feature in lowercase_features | 0.6462252140045166 |
740 | 738 | def recalculate_descendants_perms(self):
if<mask>:
return
children = list(self.children.only('pk', 'owner', 'parent'))
if not children:
return
effective_perms = self._get_effective_perms(include_calculated=False)
for child in children:
child.permissions.filter(inherited=True).delete()
child._recalculate_inherited_perms(parent_effective_perms=effective_perms, stale_already_deleted=True)
child.recalculate_descendants_perms() | False | self.asset_type not in ASSET_TYPES_WITH_CHILDREN | not self.children | 0.6441363096237183 |
741 | 739 | def recalculate_descendants_perms(self):
if self.asset_type not in ASSET_TYPES_WITH_CHILDREN:
return
children = list(self.children.only('pk', 'owner', 'parent'))
if<mask>:
return
effective_perms = self._get_effective_perms(include_calculated=False)
for child in children:
child.permissions.filter(inherited=True).delete()
child._recalculate_inherited_perms(parent_effective_perms=effective_perms, stale_already_deleted=True)
child.recalculate_descendants_perms() | True | not children | not children | 0.6563915014266968 |
742 | 740 | def _make_dir(filename):
folder = os.path.dirname(filename)
if<mask>:
os.makedirs(folder) | True | not os.path.exists(folder) | not os.path.exists(folder) | 0.6444495916366577 |
743 | 741 | def checkcol(amob):
if<mask>:
circle = Vector3(amob.pos.x, amob.pos.y, amob.scale)
if segvcircle(p1, p2, circle):
self.explosions.append(explosion(amob))
amob.kill()
return True
return False | False | amob.on | amob | 0.6553150415420532 |
744 | 742 | def checkcol(amob):
if amob.on:
circle = Vector3(amob.pos.x, amob.pos.y, amob.scale)
if<mask>:
self.explosions.append(explosion(amob))
amob.kill()
return True
return False | False | segvcircle(p1, p2, circle) | circle.x + circle.y >= amob.width and circle.y + circle.width | 0.6447510123252869 |
745 | 743 | def serialize(self):
if<mask>:
self.length = len(self.value) + self._OUI_AND_SUBTYPE_LEN
buf = struct.pack(self._PACK_STR, self._type, self.length, self.oui, self.subtype)
buf = bytearray(buf)
form = '%ds' % (self.length - self._OUI_AND_SUBTYPE_LEN)
buf.extend(struct.pack(form, self.value))
return buf | False | self.length == 0 | self.length == -1 | 0.6622902750968933 |
746 | 744 | def prepare(self, db, user):
self.place_type = self.list[1]
if<mask>:
self.place_type = PlaceType()
self.place_type.set_from_xml_str(self.list[1]) | False | self.place_type | self.list[1] == 'place' | 0.6560956239700317 |
747 | 745 | def _update_matched_xlinks(self, b_matched, prot, res, matched, non_matched):
if<mask>:
if prot in matched:
matched[prot].add(res)
else:
matched[prot] = set([res])
elif prot in non_matched:
non_matched[prot].add(res)
else:
non_matched[prot] = set([res])
return (matched, non_matched) | True | b_matched | b_matched | 0.6556805372238159 |
748 | 746 | def _update_matched_xlinks(self, b_matched, prot, res, matched, non_matched):
if b_matched:
if<mask>:
matched[prot].add(res)
else:
matched[prot] = set([res])
elif prot in non_matched:
non_matched[prot].add(res)
else:
non_matched[prot] = set([res])
return (matched, non_matched) | True | prot in matched | prot in matched | 0.6652586460113525 |
749 | 747 | def _update_matched_xlinks(self, b_matched, prot, res, matched, non_matched):
if b_matched:
if prot in matched:
matched[prot].add(res)
else:
matched[prot] = set([res])
elif<mask>:
non_matched[prot].add(res)
else:
non_matched[prot] = set([res])
return (matched, non_matched) | False | prot in non_matched | not prot in non_matched | 0.6521333456039429 |
750 | 748 | def __eq__(self, other):
if<mask>:
return self is other or self.__dict__ == other.__dict__
elif isinstance(other, basestring):
try:
self.parseString(_ustr(other), parseAll=True)
return True
except ParseBaseException:
return False
else:
return super(ParserElement, self) == other | True | isinstance(other, ParserElement) | isinstance(other, ParserElement) | 0.6479086875915527 |
751 | 749 | def __eq__(self, other):
if isinstance(other, ParserElement):
return self is other or self.__dict__ == other.__dict__
elif<mask>:
try:
self.parseString(_ustr(other), parseAll=True)
return True
except ParseBaseException:
return False
else:
return super(ParserElement, self) == other | True | isinstance(other, basestring) | isinstance(other, basestring) | 0.6456524133682251 |
752 | 750 | def __init__(self, backbone, neck=None, bbox_head=None, train_cfg=None, test_cfg=None, pretrained=None):
super(SingleStageDetector, self).__init__()
self.backbone = builder.build_backbone(backbone)
if<mask>:
self.neck = builder.build_neck(neck)
self.bbox_head = builder.build_head(bbox_head)
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.init_weights(pretrained=pretrained) | True | neck is not None | neck is not None | 0.6676803231239319 |
753 | 751 | def getfirst(self, key, default=None):
""" Return the first value received."""
if<mask>:
value = self[key]
if isinstance(value, list):
return value[0].value
else:
return value.value
else:
return default | True | key in self | key in self | 0.6615408658981323 |
754 | 752 | def getfirst(self, key, default=None):
""" Return the first value received."""
if key in self:
value = self[key]
if<mask>:
return value[0].value
else:
return value.value
else:
return default | False | isinstance(value, list) | isinstance(value, tuple) | 0.6437720656394958 |
755 | 753 | def getRegisteredMachines():
result = helper.execCommand('VBoxManage list vms')
lines = result.stdout.decode('utf-8').strip().split('\n')
machines = {}
for line in lines:
if<mask>:
name, vid = line.split(' ')
machines[vid] = name.strip('"')
return machines | False | len(line) > 0 | line.startswith('VM name:') | 0.6479718685150146 |
756 | 754 | def describe_model_package_group(self, model_package_group_name: str) -> ModelPackageGroup:
model_package_group = self.model_package_groups.get(model_package_group_name)
if<mask>:
raise ValidationError(f'Model package group {model_package_group_name} not found')
return model_package_group | True | model_package_group is None | model_package_group is None | 0.6486688852310181 |
757 | 755 | def build_tokenizer(vocab_file, merges_file, tokenizer_type='GPT2BPETokenizer'):
"""Select and instantiate the tokenizer."""
if<mask>:
tokenizer = _GPT2BPETokenizer(vocab_file, merges_file)
else:
raise NotImplementedError('{} tokenizer is not implemented.'.format(tokenizer_type))
return tokenizer | True | tokenizer_type == 'GPT2BPETokenizer' | tokenizer_type == 'GPT2BPETokenizer' | 0.647770881652832 |
758 | 756 | def write(self, oprot):
if<mask>:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('get_open_txns_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd() | True | oprot._fast_encode is not None and self.thrift_spec is not None | oprot._fast_encode is not None and self.thrift_spec is not None | 0.6459708213806152 |
759 | 757 | def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('get_open_txns_result')
if<mask>:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd() | True | self.success is not None | self.success is not None | 0.6479748487472534 |
760 | 758 | @property
def image_title(self) -> str:
if<mask>:
raise LabelRowError('Image title can only be retrieved for DataType.IMAGE or DataType.IMG_GROUP')
return self._frame_level_data().image_title | False | self._label_row.data_type not in [DataType.IMAGE, DataType.IMG_GROUP] | self._label_row_type not in [DataType.IMAGE, DataType.IMG_GROUP] | 0.6482896208763123 |
761 | 759 | def image_combo_index_changed(self, combo, row):
if<mask>:
self.display_add_new_image_dialog(select_in_combo=True, combo=combo)
title_item = self.item(row, 1)
title_item.setIcon(combo.itemIcon(combo.currentIndex()))
combo.setItemData(0, combo.currentIndex()) | False | combo.currentText() == self.COMBO_IMAGE_ADD | self.get_selected_image_widget() | 0.6496030688285828 |
762 | 760 | def mod_crop(img, scale):
"""Mod crop images, used during testing.
Args:
img (ndarray): Input image.
scale (int): Scale factor.
Returns:
ndarray: Result image.
"""
img = img.copy()
if<mask>:
h, w = (img.shape[0], img.shape[1])
h_remainder, w_remainder = (h % scale, w % scale)
img = img[:h - h_remainder, :w - w_remainder,...]
else:
raise ValueError(f'Wrong img ndim: {img.ndim}.')
return img | False | img.ndim in (2, 3) | img.ndim == 3 | 0.6473227739334106 |
763 | 761 | def merge_counts(x, y):
for k, v in y.items():
if<mask>:
x[k] = 0
x[k] += v
return x | True | k not in x | k not in x | 0.6621514558792114 |
764 | 762 | def Activated(self):
import FreeCADGui
sel = FreeCADGui.Selection.getSelection()
if<mask>:
nudge = self.getNudgeValue('right')
if nudge:
FreeCADGui.addModule('Draft')
FreeCADGui.doCommand('Draft.move(' + self.toStr(sel) + ',FreeCAD.' + str(nudge) + ')')
FreeCADGui.doCommand('FreeCAD.ActiveDocument.recompute()') | True | sel | sel | 0.6726675033569336 |
765 | 763 | def Activated(self):
import FreeCADGui
sel = FreeCADGui.Selection.getSelection()
if sel:
nudge = self.getNudgeValue('right')
if<mask>:
FreeCADGui.addModule('Draft')
FreeCADGui.doCommand('Draft.move(' + self.toStr(sel) + ',FreeCAD.' + str(nudge) + ')')
FreeCADGui.doCommand('FreeCAD.ActiveDocument.recompute()') | True | nudge | nudge | 0.6760420799255371 |
766 | 764 | def get_prod_folder():
folder = ''
system = platform.system()
if<mask>:
folder = '/home/xxx/'
elif system == 'Windows':
folder = 'C:\\xxx\\'
return folder | True | system == 'Linux' | system == 'Linux' | 0.6563327312469482 |
767 | 765 | def get_prod_folder():
folder = ''
system = platform.system()
if system == 'Linux':
folder = '/home/xxx/'
elif<mask>:
folder = 'C:\\xxx\\'
return folder | True | system == 'Windows' | system == 'Windows' | 0.6597287654876709 |
768 | 766 | def iter_messages(self):
for msg_data in self.messages:
data = msg_data.copy()
if<mask>:
guild = self._bot.get_guild(guild_id)
channel = guild and guild.get_channel(msg_data['channel'])
else:
channel = self._bot.get_channel(msg_data['channel'])
data['partial_message'] = channel.get_partial_message(data['message']) if channel is not None else None
yield data | False | guild_id := msg_data.get('guild') | self.guild_id | 0.646680474281311 |
769 | 767 | def _solve(linear_op, rhs):
from..operators import CholLinearOperator, TriangularLinearOperator
if<mask>:
return linear_op.solve(rhs)
if settings.fast_computations.solves.off() or linear_op.size(-1) <= settings.max_cholesky_size.value():
return linear_op.cholesky()._cholesky_solve(rhs)
else:
with torch.no_grad():
preconditioner = linear_op.detach()._solve_preconditioner()
return linear_op._solve(rhs, preconditioner) | False | isinstance(linear_op, (CholLinearOperator, TriangularLinearOperator)) | not settings.enable_cholesky_size.value() or linear_op.size(-1) <= settings.max_cholesky_size.value() | 0.6458022594451904 |
770 | 768 | def _solve(linear_op, rhs):
from..operators import CholLinearOperator, TriangularLinearOperator
if isinstance(linear_op, (CholLinearOperator, TriangularLinearOperator)):
return linear_op.solve(rhs)
if<mask>:
return linear_op.cholesky()._cholesky_solve(rhs)
else:
with torch.no_grad():
preconditioner = linear_op.detach()._solve_preconditioner()
return linear_op._solve(rhs, preconditioner) | False | settings.fast_computations.solves.off() or linear_op.size(-1) <= settings.max_cholesky_size.value() | isinstance(linear_op, CholLinearOperator) | 0.6458474397659302 |
771 | 769 | def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if<mask>:
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
if order < self._mTableSize:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1 | True | aCharLen == 2 | aCharLen == 2 | 0.6565761566162109 |
772 | 770 | def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
order = self.get_order(aBuf)
else:
order = -1
if<mask>:
self._mTotalChars += 1
if order < self._mTableSize:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1 | True | order >= 0 | order >= 0 | 0.6584807634353638 |
773 | 771 | def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
if<mask>:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1 | True | order < self._mTableSize | order < self._mTableSize | 0.6491011381149292 |
774 | 772 | def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
if order < self._mTableSize:
if<mask>:
self._mFreqChars += 1 | True | 512 > self._mCharToFreqOrder[order] | 512 > self._mCharToFreqOrder[order] | 0.6435670852661133 |
775 | 773 | def GetActivePage(self):
""" Returns the current selected tab or ``wx.NOT_FOUND`` if none is selected. """
for indx, page in enumerate(self._pages):
if<mask>:
return indx
return wx.NOT_FOUND | False | page.active | page['name'] == 'hx.meituan.com' | 0.6590600609779358 |
776 | 774 | @pytest.mark.script_launch_mode('subprocess')
def test_duration_formatting(script_runner):
ret = script_runner.run(['cat-numbers', '-b', '--formatting', 'tests/data/duration_112.numbers'], print_result=False)
assert ret.success
rows = ret.stdout.strip().splitlines()
csv_reader = csv.reader(rows)
for row in csv_reader:
if<mask>:
check.equal(row[6], row[13]) | False | row[13] != 'Check' and row[13] is not None | row[0] == 'duration' | 0.6467792987823486 |
777 | 775 | @hpipm_mode.setter
def hpipm_mode(self, hpipm_mode):
hpipm_modes = ('BALANCE', 'SPEED_ABS', 'SPEED', 'ROBUST')
if<mask>:
self.__hpipm_mode = hpipm_mode
else:
raise Exception('Invalid hpipm_mode value. Possible values are:\n\n' + ',\n'.join(hpipm_modes) + '.\n\nYou have:'+ hpipm_mode + '.\n\nExiting.') | True | hpipm_mode in hpipm_modes | hpipm_mode in hpipm_modes | 0.6490256786346436 |
778 | 776 | def atari_env_by_name(name):
for cfg in ENVPOOL_ATARI_ENVS:
if<mask>:
return cfg
raise Exception('Unknown Atari env') | False | cfg.name == name | name == cfg.get('atari') | 0.6565744876861572 |
779 | 777 | def __eq__(self, other):
"""
Specie is equal to other only if element and oxidation states are
exactly the same.
"""
if<mask>:
return False
return self.symbol == other.symbol and self._oxi_state == other._oxi_state | False | not isinstance(other, DummySpecie) | not isinstance(other, Element) | 0.6492488980293274 |
780 | 778 | def makedir(data_url):
if<mask>:
shutil.rmtree(data_url)
os.makedirs(data_url) | True | os.path.exists(data_url) | os.path.exists(data_url) | 0.6426948308944702 |
781 | 779 | def set_dropout_rate(self, p=None) -> int:
if<mask>:
self.head_module[-2].p = p
return 1
return 0 | True | p is not None | p is not None | 0.6538280248641968 |
782 | 780 | @staticmethod
def identify_baud_rate(dxl_id, usb):
"""Identify the baud rate a Dynamixel servo is communicating at.
Parameters
----------
dxl_id : int
Dynamixel ID on chain. Must be [0, 25]
usb : str
the USB port, typically "/dev/something"
Returns
-------
int
the baud rate the Dynamixel is communicating at
"""
try:
for b in BAUD_MAP.keys():
port_h = prh.PortHandler(usb)
port_h.openPort()
port_h.setBaudRate(b)
packet_h = pch.PacketHandler(2.0)
_, dxl_comm_result, _ = packet_h.ping(port_h, dxl_id)
port_h.closePort()
if<mask>:
return b
except:
pass
return -1 | False | dxl_comm_result == COMM_SUCCESS | b == dxl_comm_result | 0.6449222564697266 |
783 | 781 | def remove_plugin(self, label: str) -> None:
"""Remove a plugin from the encoder.
Parameters
----------
label : str
The label of the plugin to remove.
"""
if<mask>:
del self._available[label]
elif label in self._unavailable:
del self._unavailable[label]
else:
raise ValueError(f"Unable to remove '{label}', no such plugin'") | True | label in self._available | label in self._available | 0.6543689966201782 |
784 | 782 | def remove_plugin(self, label: str) -> None:
"""Remove a plugin from the encoder.
Parameters
----------
label : str
The label of the plugin to remove.
"""
if label in self._available:
del self._available[label]
elif<mask>:
del self._unavailable[label]
else:
raise ValueError(f"Unable to remove '{label}', no such plugin'") | True | label in self._unavailable | label in self._unavailable | 0.6527466773986816 |
785 | 783 | def select(self, panel: Any):
for index, item in enumerate(self.items):
if<mask>:
if self.selected_index!= index:
self.selected_index = index
self.dirty()
return | False | item == panel | item is panel | 0.6561578512191772 |
786 | 784 | def select(self, panel: Any):
for index, item in enumerate(self.items):
if item == panel:
if<mask>:
self.selected_index = index
self.dirty()
return | False | self.selected_index != index | index + 1 == len(self.items) | 0.6484863758087158 |
787 | 785 | def __call__(self, results):
"""Call function to convert image in results to :obj:`torch.Tensor` and
transpose the channel order.
Args:
results (dict): Result dict contains the image data to convert.
Returns:
dict: The result dict contains the image converted
to :obj:`torch.Tensor` and transposed to (C, H, W) order.
"""
for key in self.keys:
img = results[key]
if<mask>:
img = np.expand_dims(img, -1)
results[key] = to_tensor(img.transpose(2, 0, 1))
return results | True | len(img.shape) < 3 | len(img.shape) < 3 | 0.6471738815307617 |
788 | 786 | def multi_gpu_extract(model, teacher_model, data_loader, logger, args):
func = lambda **x: model(mode='extract', **x)
func_teacher = lambda **x: model(mode='extract', **x)
rank, world_size = get_dist_info()
results = dist_forward_collect_with_teacher(func, func_teacher, data_loader, rank, len(data_loader.dataset))
if<mask>:
value = results['ressl_loss'].mean()
logger.info(f'mean ressl loss: {value}')
torch.distributed.barrier() | False | rank == 0 | 'ressl_loss' in results | 0.6710919141769409 |
789 | 787 | def user_role_updated(self, botengine, location_id, user_id, role, category, location_access, previous_category, previous_location_access):
"""
A user changed roles
:param botengine: BotEngine environment
:param location_id: Location ID
:param user_id: User ID that changed roles
:param role: Application-layer agreed upon role integer which may auto-configure location_access and alert category
:param category: User's current alert/communications category (1=resident; 2=supporter)
:param location_access: User's current access to the location
:param previous_category: User's previous category, if any
:param previous_location_access: User's previous access to the location, if any
:return:
"""
if<mask>:
botengine.get_logger().info('\t=> Now tracking location'+ str(location_id))
self.locations[location_id] = Location(botengine, location_id)
self.locations[location_id].user_role_updated(botengine, user_id, role, category, location_access, previous_category, previous_location_access) | True | location_id not in self.locations | location_id not in self.locations | 0.6452451944351196 |
790 | 788 | def forward(self, input):
x_gather_list = all_gather_differentiable(input)
input_size_list = all_gather_obj(input.size(0))
cur_gpu = get_rank()
if<mask>:
self.start_list = [sum(input_size_list[:t]) for t in range(len(input_size_list) + 1)]
dist.barrier()
return torch.cat(x_gather_list, 0) | False | cur_gpu == 0 | cur_gpu == 1 | 0.6569294929504395 |
791 | 789 | def param_to_unit(self, name):
"""Return the unit associated with a parameter
This is a wrapper function over the property ``_param_unit_map``. It
also handles aliases and indexed parameters (e.g., `pint.models.parameter.prefixParameter`
and `pint.models.parameter.maskParameter`) with an index beyond those currently
initialized.
This can be used without an existing :class:`~pint.models.TimingModel`.
Parameters
----------
name : str
Name of PINT parameter or alias
Returns
-------
astropy.u.Unit
"""
pintname, firstname = self.alias_to_pint_param(name)
if<mask>:
return self._param_unit_map[pintname]
prefix, idx_str, idx = split_prefixed_name(pintname)
component = self.param_component_map[firstname][0]
if getattr(self.components[component], firstname).unit_template is None:
return self._param_unit_map[firstname]
return u.Unit(getattr(self.components[component], firstname).unit_template(idx)) | False | pintname == firstname | pintname in self._param_unit_map | 0.6536151170730591 |
792 | 790 | def param_to_unit(self, name):
"""Return the unit associated with a parameter
This is a wrapper function over the property ``_param_unit_map``. It
also handles aliases and indexed parameters (e.g., `pint.models.parameter.prefixParameter`
and `pint.models.parameter.maskParameter`) with an index beyond those currently
initialized.
This can be used without an existing :class:`~pint.models.TimingModel`.
Parameters
----------
name : str
Name of PINT parameter or alias
Returns
-------
astropy.u.Unit
"""
pintname, firstname = self.alias_to_pint_param(name)
if pintname == firstname:
return self._param_unit_map[pintname]
prefix, idx_str, idx = split_prefixed_name(pintname)
component = self.param_component_map[firstname][0]
if<mask>:
return self._param_unit_map[firstname]
return u.Unit(getattr(self.components[component], firstname).unit_template(idx)) | False | getattr(self.components[component], firstname).unit_template is None | component == 'root' | 0.6446189880371094 |
793 | 791 | def __bool__(self):
for n, r in zip(self._outputFileNames, self._renameFileNames):
if<mask>:
return True
return False | False | n != r | n == r | 0.6677680611610413 |
794 | 792 | def wait_for_mitm_start(self, config, logger):
timeout = 30
wait_time_count = 0
ip = config.get('ip')
mock_port = config.get('mock.port')
proxy_port = config.get('proxy.port')
while True:
if<mask>:
return False
time.sleep(1)
wait_time_count += 1
try:
resp = requests.get(f'http://{ip}:{mock_port}/api/status', proxies={'http': f'http://{ip}:{proxy_port}'})
if resp.status_code!= 200:
continue
else:
return True
except Exception:
continue | False | wait_time_count >= timeout | wait_time_count > timeout | 0.6486297845840454 |
795 | 793 | def wait_for_mitm_start(self, config, logger):
timeout = 30
wait_time_count = 0
ip = config.get('ip')
mock_port = config.get('mock.port')
proxy_port = config.get('proxy.port')
while True:
if wait_time_count >= timeout:
return False
time.sleep(1)
wait_time_count += 1
try:
resp = requests.get(f'http://{ip}:{mock_port}/api/status', proxies={'http': f'http://{ip}:{proxy_port}'})
if<mask>:
continue
else:
return True
except Exception:
continue | False | resp.status_code != 200 | resp.status_code == 200 or resp.status_code == 404 | 0.6518865823745728 |
796 | 794 | def runProjectManager(self):
if<mask>:
return
self.defineCurrentProject()
dlg = QGISRedProjectManagerDialog()
dlg.config(self.iface, self.ProjectDirectory, self.NetworkName, self)
self.opendedLayers = False
self.especificComplementaryLayers = []
self.selectedFids = {}
dlg.exec_()
result = dlg.ProcessDone
if result:
self.NetworkName = dlg.NetworkName
self.ProjectDirectory = dlg.ProjectDirectory | True | not self.checkDependencies() | not self.checkDependencies() | 0.6507564783096313 |
797 | 795 | def runProjectManager(self):
if not self.checkDependencies():
return
self.defineCurrentProject()
dlg = QGISRedProjectManagerDialog()
dlg.config(self.iface, self.ProjectDirectory, self.NetworkName, self)
self.opendedLayers = False
self.especificComplementaryLayers = []
self.selectedFids = {}
dlg.exec_()
result = dlg.ProcessDone
if<mask>:
self.NetworkName = dlg.NetworkName
self.ProjectDirectory = dlg.ProjectDirectory | True | result | result | 0.6687904596328735 |
798 | 796 | def strip_module(filename):
if<mask>:
filename = os.path.splitext(filename)[0]
if filename.endswith('module'):
filename = filename[:-6]
return filename | False | '.' in filename | isinstance(filename, str) | 0.6636002659797668 |
799 | 797 | def strip_module(filename):
if '.' in filename:
filename = os.path.splitext(filename)[0]
if<mask>:
filename = filename[:-6]
return filename | False | filename.endswith('module') | filename.endswith('__') | 0.6420181393623352 |
800 | 798 | def is_active_from(self, conf):
""" used in try-restart/other commands to check if needed. """
if<mask>:
return False
return self.get_active_from(conf) == 'active' | False | not conf | conf.getlist(Service, 'ExecReload', []) | 0.6576880216598511 |
801 | 799 | def get_unicode_from_response(r):
"""Returns the requested content back in unicode.
:param r: Response object to get unicode content from.
Tried:
1. charset from content-type
2. fall back and replace all unicode characters
:rtype: str
"""
warnings.warn('In requests 3.0, get_unicode_from_response will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)', DeprecationWarning)
tried_encodings = []
encoding = get_encoding_from_headers(r.headers)
if<mask>:
try:
return str(r.content, encoding)
except UnicodeError:
tried_encodings.append(encoding)
try:
return str(r.content, encoding, errors='replace')
except TypeError:
return r.content | True | encoding | encoding | 0.6653707027435303 |
802 | 800 | def _bbox_forward(self, x, rois):
"""Box head forward function used in both training and testing time."""
bbox_cls_feats = self.bbox_roi_extractor(x[:self.bbox_roi_extractor.num_inputs], rois)
bbox_reg_feats = self.bbox_roi_extractor(x[:self.bbox_roi_extractor.num_inputs], rois, roi_scale_factor=self.reg_roi_scale_factor)
if<mask>:
bbox_cls_feats = self.shared_head(bbox_cls_feats)
bbox_reg_feats = self.shared_head(bbox_reg_feats)
cls_score, bbox_pred = self.bbox_head(bbox_cls_feats, bbox_reg_feats)
bbox_results = dict(cls_score=cls_score, bbox_pred=bbox_pred, bbox_feats=bbox_cls_feats)
return bbox_results | True | self.with_shared_head | self.with_shared_head | 0.6471490859985352 |
803 | 801 | def __init__(self, dataset_name, output_dir=None, distributed=True):
super().__init__(dataset_name, output_dir=output_dir, distributed=distributed)
maybe_filter_categories_cocoapi(dataset_name, self._coco_api)
_maybe_add_iscrowd_annotations(self._coco_api)
if<mask>:
self._maybe_substitute_metadata() | False | hasattr(self._metadata, 'thing_dataset_id_to_contiguous_id') | self._coco_api.get_substitute_metadata() | 0.6417214274406433 |
804 | 802 | def is_secure_transport(uri):
"""Check if the uri is over ssl."""
if<mask>:
return True
return uri.lower().startswith('https://') | False | os.environ.get('DEBUG') | uri.lower().startswith('http://') or uri.lower().startswith('https://') | 0.6447168588638306 |
805 | 803 | def __set_ks(self, kf_kb_pair):
"""Utility for setting kf and kb from packed vectors.
Parameters
----------
kf_kb_pair: tuple(numpy.ndarray, numpy.ndarray)
If any element of the tuple is None, the corresponding data is not updated.
Returns
-------
"""
assert isinstance(kf_kb_pair, tuple)
assert len(kf_kb_pair) == 2
if<mask>:
self.__set_kf(kf_kb_pair[0])
if kf_kb_pair[1] is not None:
self.__set_kb(kf_kb_pair[1]) | True | kf_kb_pair[0] is not None | kf_kb_pair[0] is not None | 0.6511337757110596 |
806 | 804 | def __set_ks(self, kf_kb_pair):
"""Utility for setting kf and kb from packed vectors.
Parameters
----------
kf_kb_pair: tuple(numpy.ndarray, numpy.ndarray)
If any element of the tuple is None, the corresponding data is not updated.
Returns
-------
"""
assert isinstance(kf_kb_pair, tuple)
assert len(kf_kb_pair) == 2
if kf_kb_pair[0] is not None:
self.__set_kf(kf_kb_pair[0])
if<mask>:
self.__set_kb(kf_kb_pair[1]) | True | kf_kb_pair[1] is not None | kf_kb_pair[1] is not None | 0.6503876447677612 |
807 | 805 | @property
def spawnflags(self):
flags = []
if<mask>:
value = self._entity_data.get('spawnflags', None)
for name, (key, _) in {'Start inactive': (1, 0)}.items():
if value & key > 0:
flags.append(name)
return flags | True | 'spawnflags' in self._entity_data | 'spawnflags' in self._entity_data | 0.6500391960144043 |
808 | 806 | @property
def spawnflags(self):
flags = []
if'spawnflags' in self._entity_data:
value = self._entity_data.get('spawnflags', None)
for name, (key, _) in {'Start inactive': (1, 0)}.items():
if<mask>:
flags.append(name)
return flags | True | value & key > 0 | value & key > 0 | 0.658052921295166 |
809 | 807 | def update_lock(chat_id, lock_type, locked):
curr_perm = SESSION.query(Locks).get(str(chat_id))
if<mask>:
curr_perm = init_locks(chat_id)
if lock_type == 'bots':
curr_perm.bots = locked
elif lock_type == 'commands':
curr_perm.commands = locked
elif lock_type == 'email':
curr_perm.email = locked
elif lock_type == 'forward':
curr_perm.forward = locked
elif lock_type == 'url':
curr_perm.url = locked
SESSION.add(curr_perm)
SESSION.commit() | True | not curr_perm | not curr_perm | 0.6544647216796875 |
810 | 808 | def update_lock(chat_id, lock_type, locked):
curr_perm = SESSION.query(Locks).get(str(chat_id))
if not curr_perm:
curr_perm = init_locks(chat_id)
if<mask>:
curr_perm.bots = locked
elif lock_type == 'commands':
curr_perm.commands = locked
elif lock_type == 'email':
curr_perm.email = locked
elif lock_type == 'forward':
curr_perm.forward = locked
elif lock_type == 'url':
curr_perm.url = locked
SESSION.add(curr_perm)
SESSION.commit() | True | lock_type == 'bots' | lock_type == 'bots' | 0.6549631357192993 |
811 | 809 | def update_lock(chat_id, lock_type, locked):
curr_perm = SESSION.query(Locks).get(str(chat_id))
if not curr_perm:
curr_perm = init_locks(chat_id)
if lock_type == 'bots':
curr_perm.bots = locked
elif<mask>:
curr_perm.commands = locked
elif lock_type == 'email':
curr_perm.email = locked
elif lock_type == 'forward':
curr_perm.forward = locked
elif lock_type == 'url':
curr_perm.url = locked
SESSION.add(curr_perm)
SESSION.commit() | True | lock_type == 'commands' | lock_type == 'commands' | 0.652057409286499 |
812 | 810 | def update_lock(chat_id, lock_type, locked):
curr_perm = SESSION.query(Locks).get(str(chat_id))
if not curr_perm:
curr_perm = init_locks(chat_id)
if lock_type == 'bots':
curr_perm.bots = locked
elif lock_type == 'commands':
curr_perm.commands = locked
elif<mask>:
curr_perm.email = locked
elif lock_type == 'forward':
curr_perm.forward = locked
elif lock_type == 'url':
curr_perm.url = locked
SESSION.add(curr_perm)
SESSION.commit() | True | lock_type == 'email' | lock_type == 'email' | 0.6539292931556702 |
813 | 811 | def update_lock(chat_id, lock_type, locked):
curr_perm = SESSION.query(Locks).get(str(chat_id))
if not curr_perm:
curr_perm = init_locks(chat_id)
if lock_type == 'bots':
curr_perm.bots = locked
elif lock_type == 'commands':
curr_perm.commands = locked
elif lock_type == 'email':
curr_perm.email = locked
elif<mask>:
curr_perm.forward = locked
elif lock_type == 'url':
curr_perm.url = locked
SESSION.add(curr_perm)
SESSION.commit() | True | lock_type == 'forward' | lock_type == 'forward' | 0.6520143151283264 |
814 | 812 | def update_lock(chat_id, lock_type, locked):
curr_perm = SESSION.query(Locks).get(str(chat_id))
if not curr_perm:
curr_perm = init_locks(chat_id)
if lock_type == 'bots':
curr_perm.bots = locked
elif lock_type == 'commands':
curr_perm.commands = locked
elif lock_type == 'email':
curr_perm.email = locked
elif lock_type == 'forward':
curr_perm.forward = locked
elif<mask>:
curr_perm.url = locked
SESSION.add(curr_perm)
SESSION.commit() | True | lock_type == 'url' | lock_type == 'url' | 0.6523473262786865 |
815 | 813 | def arch_specific(self):
"""Return arch specific information for the current architecture"""
arch = self.ir_arch.arch
has_delayslot = False
if<mask>:
from miasm.arch.mips32.jit import mipsCGen
cgen_class = mipsCGen
has_delayslot = True
elif arch.name == 'arm':
from miasm.arch.arm.jit import arm_CGen
cgen_class = arm_CGen
else:
from miasm.jitter.codegen import CGen
cgen_class = CGen
return (cgen_class(self.ir_arch), has_delayslot) | True | arch.name == 'mips32' | arch.name == 'mips32' | 0.6543385982513428 |
816 | 814 | def arch_specific(self):
"""Return arch specific information for the current architecture"""
arch = self.ir_arch.arch
has_delayslot = False
if arch.name =='mips32':
from miasm.arch.mips32.jit import mipsCGen
cgen_class = mipsCGen
has_delayslot = True
elif<mask>:
from miasm.arch.arm.jit import arm_CGen
cgen_class = arm_CGen
else:
from miasm.jitter.codegen import CGen
cgen_class = CGen
return (cgen_class(self.ir_arch), has_delayslot) | True | arch.name == 'arm' | arch.name == 'arm' | 0.6517907381057739 |
817 | 815 | def init(self, node, job_finder, group_history_finder):
self.node = node
self.job_finder = job_finder
self.group_history_finder = group_history_finder
self.meta_session = self.node.meta_session
if<mask>:
self._sync_state()
self.cache = cache
cache.init(self.meta_session, self.__tq)
self.ns_settings_idx = indexes.TagSecondaryIndex(keys.MM_NAMESPACE_SETTINGS_IDX, None, keys.MM_NAMESPACE_SETTINGS_KEY_TPL, self.meta_session, logger=logger, namespace='namespaces')
self.ns_settings = {}
self._sync_ns_settings() | False | self.group_history_finder | self.meta_session.sync_state | 0.651206374168396 |
818 | 816 | def add(self, xyz, atom_type, residue_type, residue_index, all_indexes, radius):
if<mask>:
self.spheres.append((xyz, residue_type, residue_index, all_indexes, radius))
else:
self.atoms.append((xyz, atom_type, residue_type, residue_index, all_indexes, radius)) | False | atom_type is None | atom_type == 'spheres' | 0.6512689590454102 |
819 | 817 | @property
def maxMosaicImageCount(self):
if<mask>:
self.__init()
return self._maxMosaicImageCount | True | self._maxMosaicImageCount is None | self._maxMosaicImageCount is None | 0.6581517457962036 |
820 | 818 | @classmethod
def eval(cls, x, y):
zero = core.Constant(0, 1)
one = core.Constant(1, 1)
if<mask>:
return one if x.val >= y.val else zero | True | isinstance(x, core.Constant) and isinstance(y, core.Constant) | isinstance(x, core.Constant) and isinstance(y, core.Constant) | 0.643170177936554 |
821 | 819 | def train(cfg=DEFAULT_CFG, use_python=False):
"""Train a YOLO segmentation model based on passed arguments."""
model = cfg.model or 'yolov8n-seg.pt'
data = cfg.data or 'coco128-seg.yaml'
device = cfg.device if cfg.device is not None else ''
args = dict(model=model, data=data, device=device)
if<mask>:
from ultralytics import YOLO
YOLO(model).train(**args)
else:
trainer = SegmentationTrainer(overrides=args)
trainer.train() | True | use_python | use_python | 0.6624095439910889 |
822 | 820 | @util.allow_redefinition_iter
def record_iter_sample(dataset, cand, shuf, random, inf=False):
first = True
while first or inf:
first = False
if<mask>:
cand = cand.sample(frac=1.0, random_state=random)
for _, sample in cand.iterrows():
yield (sample['qid'], sample['did']) | False | shuf | cand.shuf_id == shuf | 0.6643292903900146 |
823 | 821 | def __hash__(self):
"""
Possibly dangerous:
i,j,k assumed to be between -500, 499, so each taking 3 digits (i+500)
l assumed to be between 0 and 9999, so 4 digits
"""
code = 0
n = self.ijkl[3]
if<mask>:
raise ValueError('l cannot be hashed', self.l)
code = code * 10000 + n
for x in self.ijkl[:3]:
n = 500 + x
if n < 0 or n > 999:
raise ValueError('ijk cannot be hashed', self.ijk)
code = code * 1000 + n
return code | False | n < 0 or n > 9999 | n < -500 or n > 999 | 0.6533547639846802 |
824 | 822 | def __hash__(self):
"""
Possibly dangerous:
i,j,k assumed to be between -500, 499, so each taking 3 digits (i+500)
l assumed to be between 0 and 9999, so 4 digits
"""
code = 0
n = self.ijkl[3]
if n < 0 or n > 9999:
raise ValueError('l cannot be hashed', self.l)
code = code * 10000 + n
for x in self.ijkl[:3]:
n = 500 + x
if<mask>:
raise ValueError('ijk cannot be hashed', self.ijk)
code = code * 1000 + n
return code | False | n < 0 or n > 999 | n > 9999 | 0.654901385307312 |
825 | 823 | def pick(self):
with self._lock:
try:
priority, session = self._active_queue.get_nowait()
except queue.Empty:
return None
till_expire = priority - time.time()
if<mask>:
return session
self._active_queue.put((priority, session))
return None | False | till_expire < self._keep_alive_threshold | till_expire <= 5 | 0.6450203061103821 |
826 | 824 | @theta.setter
def theta(self, value):
self.__theta = value
if<mask>:
self.callBacksDictionary['theta'](self.__theta) | True | self.callBacksDictionary.has_key('theta') | self.callBacksDictionary.has_key('theta') | 0.6504338979721069 |
827 | 825 | def unset_cached_properties(obj: Any) -> None:
"""
Reset all cached properties of an object.
Successive calls to the property will recompute the value.
:param obj: the object with cached properties.
"""
cls = obj.__class__
for a in dir(obj):
attr_a = getattr(cls, a, cls)
if<mask>:
obj.__dict__.pop(attr_a.attrname, None) | False | isinstance(attr_a, cached_property) | attr_a and attr_a.attrname is not None | 0.6463683843612671 |
828 | 826 | def _siftdown(self, pos):
"""Restore invariant by repeatedly replacing out-of-place element with
its parent."""
h, d = (self.h, self.d)
elt = h[pos]
while pos > 0:
parent_pos = pos - 1 >> 1
parent = h[parent_pos]
if<mask>:
h[parent_pos], h[pos] = (elt, parent)
parent_pos, pos = (pos, parent_pos)
d[elt] = pos
d[parent] = parent_pos
else:
break
return pos | False | parent > elt | parent in d | 0.6604626178741455 |
829 | 827 | @property
def lower_inc(self):
"""`!True` if the lower bound is included in the range."""
if<mask>:
return False
if self._lower is None:
return False
return self._bounds[0] == '[' | True | self._bounds is None | self._bounds is None | 0.6520695686340332 |
830 | 828 | @property
def lower_inc(self):
"""`!True` if the lower bound is included in the range."""
if self._bounds is None:
return False
if<mask>:
return False
return self._bounds[0] == '[' | False | self._lower is None | len(self._bounds) < 1 | 0.6540442109107971 |
831 | 829 | def andExpression_sempred(self, localctx: AndExpressionContext, predIndex: int):
if<mask>:
return self.precpred(self._ctx, 1) | False | predIndex == 8 | predIndex == 0 | 0.6692686676979065 |
832 | 830 | def get_atoms(self):
index_dict = {}
for parser_index, parser in enumerate(self.get_sub_parsers()):
fragment_id = FragmentSymmetryParser.get_fragment_id(parser_index)
if<mask>:
fragment_id = ''
for symmetry_class, atom_index, fragment_index in parser.get_atoms():
try:
atom_index = index_dict[symmetry_class]
index_dict[symmetry_class] += 1
except:
atom_index = 1
index_dict[symmetry_class] = 2
yield (symmetry_class, atom_index, fragment_id + fragment_index) | False | not self.has_sub_fragments | fragment_id is None | 0.6451714038848877 |
833 | 831 | def __bytes__(self):
if<mask>:
self.sum = dpkt.in_cksum(dpkt.Packet.__bytes__(self))
return dpkt.Packet.__bytes__(self) | False | not self.sum | self.sum is None | 0.6590626835823059 |
834 | 832 | def figure_timestamp(fig, x=0.97, y=0.02, iso=True, ha='right', va='bottom', fontsize=5, color='k', alpha=1.0):
"""
Add a timestamp to a figure output
Parameters
----------
fig : `matplotlib` Figure
Figure object
x, y : float
Label position in `fig.transFigure` coordinates (i.e., 0 < x,y < 1)
iso : bool
Use ISO-formatted time from `~grizli.utils.ctime_to_iso`, otherwise use
`time.ctime()`
ha, va : str
Horizontal and vertical alignment
fontsize, color, alpha: int, str, float
Label properties (in `matplotlib.Figure.text`)
Returns
-------
Adds a timestamp to the `fig` object
"""
import time
time_str = time.ctime()
if<mask>:
time_str = ctime_to_iso(time_str, verbose=False)
fig.text(x, y, time_str, ha=ha, va=va, fontsize=fontsize, transform=fig.transFigure, color=color, alpha=alpha) | True | iso | iso | 0.6705964803695679 |
835 | 833 | def _check_input_dim(self, input):
if<mask>:
raise ValueError('expected 5D input (got {}D input)'.format(input.dim()))
super(SynchronizedBatchNorm3d, self)._check_input_dim(input) | True | input.dim() != 5 | input.dim() != 5 | 0.6531423330307007 |
836 | 834 | @property
def udl_family_from_lang(self):
if<mask>:
self._udl_family_from_lang_cache = dict(((uf, L) for uf, L in [(self.m_lang, 'M'), (self.css_lang, 'CSS'), (self.csl_lang, 'CSL'), (self.ssl_lang, 'SSL'), (self.tpl_lang, 'TPL')] if L is not None))
return self._udl_family_from_lang_cache | True | self._udl_family_from_lang_cache is None | self._udl_family_from_lang_cache is None | 0.6463906168937683 |
837 | 835 | def keyReleaseEvent(self, event):
if<mask>:
return False
if event.text() == shortcut:
global lastPosition
lastPosition = ''
if not self.triggerMode:
self.closeHotbox(hotkey=True)
return True | False | event.isAutoRepeat() | event.key() != Qt.Key_Escape | 0.6479076743125916 |
838 | 836 | def keyReleaseEvent(self, event):
if event.isAutoRepeat():
return False
if<mask>:
global lastPosition
lastPosition = ''
if not self.triggerMode:
self.closeHotbox(hotkey=True)
return True | False | event.text() == shortcut | event.key() == QtCore.Qt.Key_Escape | 0.6519359946250916 |
839 | 837 | def keyReleaseEvent(self, event):
if event.isAutoRepeat():
return False
if event.text() == shortcut:
global lastPosition
lastPosition = ''
if<mask>:
self.closeHotbox(hotkey=True)
return True | False | not self.triggerMode | self.hotbox is not None and self.hotbox.hasFocus(event.text()) | 0.6468883156776428 |
840 | 838 | def _evaluate_predictions_on_coco(coco_gt, coco_results, img_ids=None):
"""
Evaluate the coco results using COCOEval API.
"""
assert len(coco_results) > 0
coco_results = copy.deepcopy(coco_results)
for c in coco_results:
c.pop('bbox', None)
coco_dt = coco_gt.loadRes(coco_results)
coco_eval = YTVOSeval(coco_gt, coco_dt)
max_dets_per_image = [1, 10, 100]
coco_eval.params.maxDets = max_dets_per_image
if<mask>:
coco_eval.params.imgIds = img_ids
coco_eval.evaluate()
coco_eval.accumulate()
coco_eval.summarize()
return coco_eval | True | img_ids is not None | img_ids is not None | 0.651093065738678 |
841 | 839 | def load_module(self, fullname):
try:
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if<mask>:
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod | True | isinstance(mod, MovedModule) | isinstance(mod, MovedModule) | 0.646613359451294 |
842 | 840 | @staticmethod
def _is_target_domain(fuzzable_request):
"""
:param fuzzable_request: The api call as a fuzzable request
:return: True if the target domain matches
"""
targets = cf.cf.get('targets')
if<mask>:
return False
target_domain = targets[0].get_domain()
api_call_domain = fuzzable_request.get_url().get_domain()
if target_domain == api_call_domain:
return True
om.out.debug('The OpenAPI specification has operations which point to a domain (%s) outside the defined target (%s). Ignoring the operation to prevent scanning out of scope targets.' % (api_call_domain, target_domain))
return False | True | not targets | not targets | 0.6599678993225098 |
843 | 841 | @staticmethod
def _is_target_domain(fuzzable_request):
"""
:param fuzzable_request: The api call as a fuzzable request
:return: True if the target domain matches
"""
targets = cf.cf.get('targets')
if not targets:
return False
target_domain = targets[0].get_domain()
api_call_domain = fuzzable_request.get_url().get_domain()
if<mask>:
return True
om.out.debug('The OpenAPI specification has operations which point to a domain (%s) outside the defined target (%s). Ignoring the operation to prevent scanning out of scope targets.' % (api_call_domain, target_domain))
return False | False | target_domain == api_call_domain | api_call_domain == target_domain | 0.6478955745697021 |
844 | 842 | @staticmethod
def upgrade_data_model(dm):
print('------------------------->>> Upgrading MCellSurfaceClassesPropertyGroup Data Model')
if<mask>:
dm['data_model_version'] = 'DM_2014_10_24_1638'
if dm['data_model_version']!= 'DM_2014_10_24_1638':
data_model.flag_incompatible_data_model('Error: Unable to upgrade MCellSurfaceClassesPropertyGroup data model to current version.')
return None
if'surface_class_list' in dm:
for item in dm['surface_class_list']:
if MCellSurfaceClassesProperty.upgrade_data_model(item) == None:
return None
return dm | True | not 'data_model_version' in dm | not 'data_model_version' in dm | 0.6521627902984619 |
845 | 843 | @staticmethod
def upgrade_data_model(dm):
print('------------------------->>> Upgrading MCellSurfaceClassesPropertyGroup Data Model')
if not 'data_model_version' in dm:
dm['data_model_version'] = 'DM_2014_10_24_1638'
if<mask>:
data_model.flag_incompatible_data_model('Error: Unable to upgrade MCellSurfaceClassesPropertyGroup data model to current version.')
return None
if'surface_class_list' in dm:
for item in dm['surface_class_list']:
if MCellSurfaceClassesProperty.upgrade_data_model(item) == None:
return None
return dm | True | dm['data_model_version'] != 'DM_2014_10_24_1638' | dm['data_model_version'] != 'DM_2014_10_24_1638' | 0.6485673189163208 |
846 | 844 | @staticmethod
def upgrade_data_model(dm):
print('------------------------->>> Upgrading MCellSurfaceClassesPropertyGroup Data Model')
if not 'data_model_version' in dm:
dm['data_model_version'] = 'DM_2014_10_24_1638'
if dm['data_model_version']!= 'DM_2014_10_24_1638':
data_model.flag_incompatible_data_model('Error: Unable to upgrade MCellSurfaceClassesPropertyGroup data model to current version.')
return None
if<mask>:
for item in dm['surface_class_list']:
if MCellSurfaceClassesProperty.upgrade_data_model(item) == None:
return None
return dm | True | 'surface_class_list' in dm | 'surface_class_list' in dm | 0.6497397422790527 |
847 | 845 | @staticmethod
def upgrade_data_model(dm):
print('------------------------->>> Upgrading MCellSurfaceClassesPropertyGroup Data Model')
if not 'data_model_version' in dm:
dm['data_model_version'] = 'DM_2014_10_24_1638'
if dm['data_model_version']!= 'DM_2014_10_24_1638':
data_model.flag_incompatible_data_model('Error: Unable to upgrade MCellSurfaceClassesPropertyGroup data model to current version.')
return None
if'surface_class_list' in dm:
for item in dm['surface_class_list']:
if<mask>:
return None
return dm | False | MCellSurfaceClassesProperty.upgrade_data_model(item) == None | MCellSurfaceClassGroup.upgrade_data_model(item) == None | 0.648385763168335 |
848 | 846 | @staticmethod
def _validate_ssl_context_for_tls_in_tls(ssl_context):
"""
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
for TLS in TLS.
The only requirement is that the ssl_context provides the 'wrap_bio'
methods.
"""
if<mask>:
if six.PY2:
raise ProxySchemeUnsupported("TLS in TLS requires SSLContext.wrap_bio() which isn't supported on Python 2")
else:
raise ProxySchemeUnsupported("TLS in TLS requires SSLContext.wrap_bio() which isn't available on non-native SSLContext") | False | not hasattr(ssl_context, 'wrap_bio') | ssl_context is not None | 0.6423006057739258 |
849 | 847 | @staticmethod
def _validate_ssl_context_for_tls_in_tls(ssl_context):
"""
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
for TLS in TLS.
The only requirement is that the ssl_context provides the 'wrap_bio'
methods.
"""
if not hasattr(ssl_context, 'wrap_bio'):
if<mask>:
raise ProxySchemeUnsupported("TLS in TLS requires SSLContext.wrap_bio() which isn't supported on Python 2")
else:
raise ProxySchemeUnsupported("TLS in TLS requires SSLContext.wrap_bio() which isn't available on non-native SSLContext") | False | six.PY2 | sys.version_info >= (2, 3) | 0.6519508361816406 |
850 | 848 | def __get_canonical_additional_signed_headers(self, additional_headers):
if<mask>:
return ''
return ';'.join(sorted(additional_headers)) | False | additional_headers is None | not additional_headers | 0.6484804153442383 |
851 | 849 | def dict_to_sequence(d):
"""Returns an internal sequence dictionary update."""
if<mask>:
d = d.items()
return d | True | hasattr(d, 'items') | hasattr(d, 'items') | 0.6437700986862183 |
852 | 850 | def cbresponse(response):
if<mask>:
out.warn('{} to {} returned code {}'.format(request.method, request.url, response.code))
if self.max_retries is None or self.retries < self.max_retries:
reactor.callLater(self.retryDelay, self.send, report)
self.retries += 1
self.increaseDelay()
nexus.core.jwt_valid = False
else:
nexus.core.jwt_valid = True | False | not response.success | response.code != 200 | 0.6519026160240173 |
853 | 851 | def cbresponse(response):
if not response.success:
out.warn('{} to {} returned code {}'.format(request.method, request.url, response.code))
if<mask>:
reactor.callLater(self.retryDelay, self.send, report)
self.retries += 1
self.increaseDelay()
nexus.core.jwt_valid = False
else:
nexus.core.jwt_valid = True | False | self.max_retries is None or self.retries < self.max_retries | self.retryDelay > 0 | 0.6466606855392456 |
854 | 852 | @Slot(int)
def restoreWindowSizeStateChanged(self, value):
self.restoreWindowSize = bool(value)
if<mask>:
self.__changedData = True | True | not self.__changedData | not self.__changedData | 0.65090012550354 |
855 | 853 | def reset_classifier(self, num_classes, global_pool=''):
self.num_classes = num_classes
self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()
if<mask>:
self.head_dist = nn.Linear(self.embed_dim, self.num_classes) if num_classes > 0 else nn.Identity() | False | self.num_tokens == 2 | self.num_classes > 0 | 0.6489435434341431 |
856 | 854 | def is_supported(self, t):
unsupported = ['test_action_set_field_ip_proto', 'test_action_set_field_dl_type', 'test_action_set_field_icmp', 'test_action_set_field_icmpv6_code', 'test_action_set_field_icmpv6_type', 'test_action_set_field_ipv6_flabel', 'test_action_set_field_ipv6_nd_sll', 'test_action_set_field_ipv6_nd_target', 'test_action_set_field_ipv6_nd_tll', 'test_action_copy_ttl_in', 'test_action_copy_ttl_out']
for u in unsupported:
if<mask>:
return False
return True | False | t.find(u) != -1 | u not in t | 0.6474747061729431 |
857 | 855 | def shouldSave(self):
if<mask>:
btn = QMessageBox.warning(self, 'Confirm?', 'Unsaved data will be lost. Save?', QMessageBox.Yes | QMessageBox.No | QMessageBox.Discard)
if btn == QMessageBox.No:
return QMessageBox.No
else:
return btn
return QMessageBox.No | False | self.modified | self.data is not None | 0.6542555093765259 |
858 | 856 | def shouldSave(self):
if self.modified:
btn = QMessageBox.warning(self, 'Confirm?', 'Unsaved data will be lost. Save?', QMessageBox.Yes | QMessageBox.No | QMessageBox.Discard)
if<mask>:
return QMessageBox.No
else:
return btn
return QMessageBox.No | False | btn == QMessageBox.No | btn == QMessageBox.Yes | 0.6572132110595703 |
859 | 857 | def execute(self):
fqdn = ''
v = self.cli('show running-config')
match = self.rx_hostname.search(v)
if<mask>:
fqdn = match.group('hostname')
match = self.rx_domain_name.search(v)
if match:
fqdn = fqdn + '.' + match.group('domain')
return fqdn | True | match | match | 0.6649458408355713 |
860 | 858 | def execute(self):
fqdn = ''
v = self.cli('show running-config')
match = self.rx_hostname.search(v)
if match:
fqdn = match.group('hostname')
match = self.rx_domain_name.search(v)
if<mask>:
fqdn = fqdn + '.' + match.group('domain')
return fqdn | True | match | match | 0.6644583940505981 |
861 | 859 | def validate(self):
if<mask>:
raise TProtocolException(message='Required field component is unset!')
if self.user is None:
raise TProtocolException(message='Required field user is unset!')
if self.hostname is None:
raise TProtocolException(message='Required field hostname is unset!')
return | True | self.component is None | self.component is None | 0.6490193009376526 |
862 | 860 | def validate(self):
if self.component is None:
raise TProtocolException(message='Required field component is unset!')
if<mask>:
raise TProtocolException(message='Required field user is unset!')
if self.hostname is None:
raise TProtocolException(message='Required field hostname is unset!')
return | True | self.user is None | self.user is None | 0.6485776305198669 |
863 | 861 | def validate(self):
if self.component is None:
raise TProtocolException(message='Required field component is unset!')
if self.user is None:
raise TProtocolException(message='Required field user is unset!')
if<mask>:
raise TProtocolException(message='Required field hostname is unset!')
return | True | self.hostname is None | self.hostname is None | 0.6486556529998779 |
864 | 862 | def __init__(self, uri=None, signature=None, *args, **kwargs):
"""Constructs a new EmailSettingsSignature object with the given arguments.
Args:
uri: string (optional) The uri of this object for HTTP requests.
signature: string (optional) The signature to be appended to outgoing
messages.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsSignature, self).__init__(*args, **kwargs)
if<mask>:
self.uri = uri
if signature is not None:
self.signature_value = signature | True | uri | uri | 0.6846779584884644 |
865 | 863 | def __init__(self, uri=None, signature=None, *args, **kwargs):
"""Constructs a new EmailSettingsSignature object with the given arguments.
Args:
uri: string (optional) The uri of this object for HTTP requests.
signature: string (optional) The signature to be appended to outgoing
messages.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsSignature, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if<mask>:
self.signature_value = signature | True | signature is not None | signature is not None | 0.6552187204360962 |
866 | 864 | def GetMailContactapi(mailname):
try:
exapivalue = GetMailContacthight(mailname)
if<mask>:
return {'isSuccess': exapivalue['isSuccess'],'message': exapivalue['message'][0]}
else:
return {'isSuccess': exapivalue['isSuccess'],'message': exapivalue['msg']}
except Exception as e:
return {'isSuccess': False,'message': str(e)} | True | exapivalue['isSuccess'] | exapivalue['isSuccess'] | 0.6505104899406433 |
867 | 865 | def __init__(self, str=None):
gtk.Label.__init__(self)
self.__wrap_width = 0
self.layout = self.get_layout()
self.layout.set_wrap(pango.WRAP_WORD_CHAR)
if<mask>:
self.set_text(str)
self.set_alignment(0.0, 0.0) | False | str != None | str | 0.6613825559616089 |
868 | 866 | def item_title(self, item):
"""render the item title"""
if<mask>:
return item.pure_name
title_template = get_template('snippets/status/header_content.html')
title = title_template.render({'status': item})
template = get_template('rss/title.html')
return template.render({'user': item.user, 'item_title': title}).strip() | False | hasattr(item, 'pure_name') and item.pure_name | item.pure_name | 0.6489530205726624 |
869 | 867 | def add_word_information(self, word: str) -> None:
if<mask>:
self.words_matching_root_token.append(word) | True | word not in self.words_matching_root_token | word not in self.words_matching_root_token | 0.6470382213592529 |
870 | 868 | def setAxis(self):
self.ax.legend()
self.ax.relim()
self.ax.autoscale_view()
if<mask>:
self.fig.canvas.draw()
self.fig.canvas.flush_events() | False | not kUseFigCanvasDrawIdle | self.fig.canvas.draw | 0.6457640528678894 |
871 | 869 | def _get_next_update(self):
"""MUTEX: updateLock
Returns the size of the local update queue.
"""
self.updateLock.acquire()
if<mask>:
a = self.updateQueue.pop(0)
else:
a = None
self.updateLock.release()
return a | True | len(self.updateQueue) > 0 | len(self.updateQueue) > 0 | 0.6489444971084595 |
872 | 870 | def get_hypo():
if<mask>:
hypo_attn = attn_clone[i]
else:
hypo_attn = None
return {'tokens': tokens_clone[i],'score': score, 'attention': hypo_attn, 'alignment': None, 'positional_scores': pos_scores[i]} | True | attn_clone is not None | attn_clone is not None | 0.6628034710884094 |
873 | 871 | def set_field(self, field, value):
"""Sets the value of a field.
Updates the field of a box_list with a given value.
Args:
field: (string) name of the field to set value.
value: the value to assign to the field.
Raises:
ValueError: if the box_list does not have specified field.
"""
if<mask>:
raise ValueError('field %s does not exist' % field)
self.data[field] = value | True | not self.has_field(field) | not self.has_field(field) | 0.6464101672172546 |
874 | 872 | def write(data):
if<mask>:
data = str(data)
if isinstance(fp, file) and isinstance(data, unicode) and (fp.encoding is not None):
errors = getattr(fp, 'errors', None)
if errors is None:
errors ='strict'
data = data.encode(fp.encoding, errors)
fp.write(data) | True | not isinstance(data, basestring) | not isinstance(data, basestring) | 0.6452983617782593 |
875 | 873 | def write(data):
if not isinstance(data, basestring):
data = str(data)
if<mask>:
errors = getattr(fp, 'errors', None)
if errors is None:
errors ='strict'
data = data.encode(fp.encoding, errors)
fp.write(data) | True | isinstance(fp, file) and isinstance(data, unicode) and (fp.encoding is not None) | isinstance(fp, file) and isinstance(data, unicode) and (fp.encoding is not None) | 0.644314169883728 |
876 | 874 | def write(data):
if not isinstance(data, basestring):
data = str(data)
if isinstance(fp, file) and isinstance(data, unicode) and (fp.encoding is not None):
errors = getattr(fp, 'errors', None)
if<mask>:
errors ='strict'
data = data.encode(fp.encoding, errors)
fp.write(data) | True | errors is None | errors is None | 0.6531771421432495 |
877 | 875 | def lineto(self, points, relative=False):
if<mask>:
ox, oy = self.current
else:
ox, oy = (0, 0)
vertices = self.vertices[-1]
for i in range(0, len(points), 2):
x, y = (points[i], points[i + 1])
vertices.append((x + ox, y + oy))
self.current = vertices[-1]
self.last_control3 = None
self.last_control4 = None | True | relative | relative | 0.6565186977386475 |
878 | 876 | def get_kexs(self, allow_weak_kex):
if<mask>:
weak_kex = 'weak'
else:
weak_kex = 'default'
default = 'diffie-hellman-group-exchange-sha256'
weak = default + ',diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1'
kex = {'default': default, 'weak': weak}
default = 'curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256'
weak = default + ',diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1'
kex_66 = {'default': default, 'weak': weak}
_release = lsb_release()['DISTRIB_CODENAME'].lower()
if CompareHostReleases(_release) >= 'trusty':
log('Detected Ubuntu 14.04 or newer, using new key exchange algorithms', level=DEBUG)
kex = kex_66
return kex[weak_kex] | True | allow_weak_kex | allow_weak_kex | 0.6570942401885986 |
879 | 877 | def get_kexs(self, allow_weak_kex):
if allow_weak_kex:
weak_kex = 'weak'
else:
weak_kex = 'default'
default = 'diffie-hellman-group-exchange-sha256'
weak = default + ',diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1'
kex = {'default': default, 'weak': weak}
default = 'curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256'
weak = default + ',diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1'
kex_66 = {'default': default, 'weak': weak}
_release = lsb_release()['DISTRIB_CODENAME'].lower()
if<mask>:
log('Detected Ubuntu 14.04 or newer, using new key exchange algorithms', level=DEBUG)
kex = kex_66
return kex[weak_kex] | False | CompareHostReleases(_release) >= 'trusty' | _release in kex_66 | 0.6475317478179932 |
880 | 878 | def __setitem__(self, feature_name: str, value: T | FeatureIO[T]) -> None:
"""Before setting value to the dictionary it checks that value is of correct type and dimension and tries to
transform value in correct form.
"""
if<mask>:
value = self._parse_feature_value(value, feature_name)
self._check_feature_name(feature_name)
self._content[feature_name] = value | False | not isinstance(value, FeatureIO) | feature_name.isdigit() | 0.6505963802337646 |
881 | 879 | def is_cold(self, dc, threshold):
for l in dc:
for dt in dc[l]:
for i in dc[l][dt]:
if<mask>:
return True
return False | False | dc[l][dt][i][0] < threshold | i >= threshold | 0.6462461948394775 |
882 | 880 | def WriteFillContainer(part, file, rank, status, regions):
if<mask>:
return
if InstanceName.Find('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)):
return
container = InstanceName.Get('P_' + part.name + rank + status + '_REG_' + '_'.join(regions))
id = 'isP_' + InstanceName.Get(part.name + rank + status)
file.write(' if (' + id + '((&(event.mc()->particles()[i]))))'+ container + '.push_back(&(event.mc()->particles()[i]));\n') | False | part.PTrank != 0 | part.Find('P_' + part.name + rank + status) | 0.6539376378059387 |
883 | 881 | def WriteFillContainer(part, file, rank, status, regions):
if part.PTrank!= 0:
return
if<mask>:
return
container = InstanceName.Get('P_' + part.name + rank + status + '_REG_' + '_'.join(regions))
id = 'isP_' + InstanceName.Get(part.name + rank + status)
file.write(' if (' + id + '((&(event.mc()->particles()[i]))))'+ container + '.push_back(&(event.mc()->particles()[i]));\n') | False | InstanceName.Find('P_' + part.name + rank + status + '_REG_' + '_'.join(regions)) | InstanceName.Get('P_' + part.name + rank + status) | 0.6505239009857178 |
884 | 882 | def exitRule(self, listener: ParseTreeListener):
if<mask>:
listener.exitColumnTypeExprEnum(self) | True | hasattr(listener, 'exitColumnTypeExprEnum') | hasattr(listener, 'exitColumnTypeExprEnum') | 0.6516823172569275 |
885 | 883 | def renderMarkedModifiers(self):
if<mask>:
return ''
txt = 'TAGS'.center(60) + '\n\n'
for term in self.__markedModifiers:
txt += term.__str__() + '\n'
return txt | True | not self.__markedModifiers | not self.__markedModifiers | 0.6519392132759094 |
886 | 884 | def _get(xi, span):
if<mask>:
return ['']
if len(xi[span[0][0]]) <= span[1][1]:
return ['']
return xi[span[0][0]][span[0][1]:span[1][1]] | True | len(xi) <= span[0][0] | len(xi) <= span[0][0] | 0.6505478024482727 |
887 | 885 | def _get(xi, span):
if len(xi) <= span[0][0]:
return ['']
if<mask>:
return ['']
return xi[span[0][0]][span[0][1]:span[1][1]] | True | len(xi[span[0][0]]) <= span[1][1] | len(xi[span[0][0]]) <= span[1][1] | 0.6481741070747375 |
888 | 886 | def current_reactor_klass():
"""
Return class name of currently installed Twisted reactor or None.
"""
if<mask>:
current_reactor = reflect.qual(sys.modules['twisted.internet.reactor'].__class__).split('.')[-1]
else:
current_reactor = None
return current_reactor | True | 'twisted.internet.reactor' in sys.modules | 'twisted.internet.reactor' in sys.modules | 0.648668646812439 |
889 | 887 | def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for i, line in enumerate(lines):
if<mask>:
continue
guid = '%s-%s' % (set_type, i)
text_a = line[3]
text_b = line[4]
label = line[0]
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples | True | i == 0 | i == 0 | 0.6704572439193726 |
890 | 888 | @classmethod
def get_attrs(cls):
if<mask>:
all_attrs = dir(cls.Meta)
attrs = [(attr, getattr(cls.Meta, attr)) for attr in all_attrs if isinstance(getattr(cls.Meta, attr), AttrDecl)]
cls._attrs = attrs
return cls._attrs | False | not hasattr(cls, '_attrs') | cls._attrs is None | 0.647627592086792 |
891 | 889 | def flatten_byteorder(self, obj, data):
byteorder = obj.dtype.byteorder
if<mask>:
data['byteorder'] = get_byteorder(obj) | False | byteorder != '|' | byteorder is not None | 0.6662157773971558 |
892 | 890 | def create_dir(dir_str):
url = 'https://pan.baidu.com/api/create?a=commit&channel=chunlei&app_id=250528&channel=chunlei&web=1&app_id=250528&clienttype=0&'
data = {'path': dir_str, 'isdir': 1,'size': '', 'block_list': '[]','method': 'post', 'dataType': 'json'}
res = requests.post(url, data=data, headers=api.get_randsk_headers(), timeout=30)
res = util.dict_to_object(json.loads(res.text))
if<mask>:
return res.path
return '' | False | res.errno == 0 | res.status_code == 200 | 0.6559996008872986 |
893 | 891 | def auto_shutdown():
global singleton
if<mask>:
singleton.process.terminate()
singleton.process.join()
singleton = None | False | singleton and singleton.process | singleton.process | 0.6513727903366089 |
894 | 892 | def _get_used_palette_colors(im):
used_palette_colors = []
i = 0
for count in im.histogram():
if<mask>:
used_palette_colors.append(i)
i += 1
return used_palette_colors | False | count | count.max() > 0 | 0.66873699426651 |
895 | 893 | def is_scalar(f):
"""Determine if the input argument is a scalar.
The function **is_scalar** returns *True* if the input is an integer,
float or complex number. The function returns *False* otherwise.
Parameters
----------
f : object
Any input quantity
Returns
-------
bool
- *True* if the input argument is an integer, float or complex number
- *False* otherwise
"""
if<mask>:
return True
elif isinstance(f, np.ndarray) and f.size == 1 and isinstance(f[0], SCALARTYPES):
return True
return False | False | isinstance(f, SCALARTYPES) | isinstance(f, np.int64) | 0.6462920904159546 |
896 | 894 | def is_scalar(f):
"""Determine if the input argument is a scalar.
The function **is_scalar** returns *True* if the input is an integer,
float or complex number. The function returns *False* otherwise.
Parameters
----------
f : object
Any input quantity
Returns
-------
bool
- *True* if the input argument is an integer, float or complex number
- *False* otherwise
"""
if isinstance(f, SCALARTYPES):
return True
elif<mask>:
return True
return False | False | isinstance(f, np.ndarray) and f.size == 1 and isinstance(f[0], SCALARTYPES) | isinstance(f, float) or isinstance(f, np.ndarray) | 0.645554780960083 |
897 | 895 | @property
def password(self):
if<mask>:
return None
else:
return util.text_type(self.password_original) | True | self.password_original is None | self.password_original is None | 0.6454421281814575 |
898 | 896 | def density_L1(self):
total = 0
for idx in range(len(self.density_plane_space)):
if<mask>:
continue
total = total + torch.mean(torch.abs(self.density_plane_space[idx])) + torch.mean(torch.abs(self.density_plane_time[idx])) + torch.mean(torch.abs(self.density_line[idx]))
return total | False | self.density_plane_space[idx].shape[1] == 0 | self.density_line[idx].shape[1] == 0 | 0.6463340520858765 |
899 | 897 | def forward(self, input_seq, offset=0):
"""
Args:
input_seq (torch.Tensor): input sequence, shape [batch_size, sequence_length].
Returns:
torch.Tensor: position embedding, shape [batch_size, sequence_length, embedding_size].
"""
batch_size, seq_len = input_seq.size()
max_position = seq_len + offset
if<mask>:
self.weights = self.get_embedding(max_position, self.embedding_size)
positions = offset + torch.arange(seq_len)
pos_embeddings = self.weights.index_select(0, positions).unsqueeze(0).expand(batch_size, -1, -1).detach()
return pos_embeddings | False | self.weights is None or max_position > self.weights.size(0) | self.embedding_size > 0 | 0.6445240378379822 |
900 | 898 | def toggle_pihole_status(self, widget):
if<mask>:
try:
req = None
if self._pihole_status:
req = requests.get(self._pihole_address + '/admin/api.php?disable&auth=' + self._pihole_secret)
else:
req = requests.get(self._pihole_address + '/admin/api.php?enable&auth=' + self._pihole_secret)
if req is not None:
if req.status_code == 200:
status = req.json()['status']
self._pihole_status = False if status == 'disabled' else True
except:
pass | False | self._pihole_status is not None | self._pihole_secret | 0.6440352201461792 |
901 | 899 | def toggle_pihole_status(self, widget):
if self._pihole_status is not None:
try:
req = None
if<mask>:
req = requests.get(self._pihole_address + '/admin/api.php?disable&auth=' + self._pihole_secret)
else:
req = requests.get(self._pihole_address + '/admin/api.php?enable&auth=' + self._pihole_secret)
if req is not None:
if req.status_code == 200:
status = req.json()['status']
self._pihole_status = False if status == 'disabled' else True
except:
pass | False | self._pihole_status | self._disabled | 0.6450003385543823 |
902 | 900 | def toggle_pihole_status(self, widget):
if self._pihole_status is not None:
try:
req = None
if self._pihole_status:
req = requests.get(self._pihole_address + '/admin/api.php?disable&auth=' + self._pihole_secret)
else:
req = requests.get(self._pihole_address + '/admin/api.php?enable&auth=' + self._pihole_secret)
if<mask>:
if req.status_code == 200:
status = req.json()['status']
self._pihole_status = False if status == 'disabled' else True
except:
pass | False | req is not None | req | 0.6521580219268799 |
903 | 901 | def toggle_pihole_status(self, widget):
if self._pihole_status is not None:
try:
req = None
if self._pihole_status:
req = requests.get(self._pihole_address + '/admin/api.php?disable&auth=' + self._pihole_secret)
else:
req = requests.get(self._pihole_address + '/admin/api.php?enable&auth=' + self._pihole_secret)
if req is not None:
if<mask>:
status = req.json()['status']
self._pihole_status = False if status == 'disabled' else True
except:
pass | False | req.status_code == 200 | req.json()['status'] == 'enabled' | 0.6560711860656738 |
904 | 902 | def html_page_context(app, pagename, templatename, context, doctree):
if<mask>:
return
if not app.config.edit_on_github_project:
warnings.warn('edit_on_github_project not specified')
return
if not doctree:
warnings.warn('doctree is None')
return
path = os.path.relpath(doctree.get('source'), app.builder.srcdir)
show_url = get_github_url(app, 'blob', path)
edit_url = get_github_url(app, 'edit', path)
context['show_on_github_url'] = show_url
context['edit_on_github_url'] = edit_url | False | templatename != 'page.html' | pagename is None | 0.6452323794364929 |
905 | 903 | def html_page_context(app, pagename, templatename, context, doctree):
if templatename!= 'page.html':
return
if<mask>:
warnings.warn('edit_on_github_project not specified')
return
if not doctree:
warnings.warn('doctree is None')
return
path = os.path.relpath(doctree.get('source'), app.builder.srcdir)
show_url = get_github_url(app, 'blob', path)
edit_url = get_github_url(app, 'edit', path)
context['show_on_github_url'] = show_url
context['edit_on_github_url'] = edit_url | False | not app.config.edit_on_github_project | not pagename | 0.6498720645904541 |
906 | 904 | def html_page_context(app, pagename, templatename, context, doctree):
if templatename!= 'page.html':
return
if not app.config.edit_on_github_project:
warnings.warn('edit_on_github_project not specified')
return
if<mask>:
warnings.warn('doctree is None')
return
path = os.path.relpath(doctree.get('source'), app.builder.srcdir)
show_url = get_github_url(app, 'blob', path)
edit_url = get_github_url(app, 'edit', path)
context['show_on_github_url'] = show_url
context['edit_on_github_url'] = edit_url | False | not doctree | doctree is None | 0.6573389768600464 |
907 | 905 | def get_object(self):
if<mask>:
return self.request.user.membership
else:
raise Http404() | False | self.request.user.has_membership | hasattr(self.request, 'user') | 0.6449790000915527 |
908 | 906 | def bytes_to_float(value, _domain, _range, _error=None):
"""Convert the fixed point value self.value to a floating point value."""
src_value = int().from_bytes(value, byteorder='big', signed=min(_domain) < 0)
if<mask>:
return None
return linear_map(src_value, _domain, _range) | False | src_value == _error | src_value == 0 | 0.6528149843215942 |
909 | 907 | def pick_peaks(arr):
prev_dex = prev_val = None
result = {'pos': [], 'peaks': []}
upwards = False
for i, a in enumerate(arr):
if<mask>:
continue
elif prev_val is None or prev_val < a:
upwards = True
else:
if prev_dex and upwards:
result['pos'].append(prev_dex)
result['peaks'].append(prev_val)
upwards = False
prev_dex = i
prev_val = a
return result | False | prev_val == a | i == 0 | 0.6536059379577637 |
910 | 908 | def pick_peaks(arr):
prev_dex = prev_val = None
result = {'pos': [], 'peaks': []}
upwards = False
for i, a in enumerate(arr):
if prev_val == a:
continue
elif<mask>:
upwards = True
else:
if prev_dex and upwards:
result['pos'].append(prev_dex)
result['peaks'].append(prev_val)
upwards = False
prev_dex = i
prev_val = a
return result | False | prev_val is None or prev_val < a | a == prev_val | 0.6455966234207153 |
911 | 909 | def pick_peaks(arr):
prev_dex = prev_val = None
result = {'pos': [], 'peaks': []}
upwards = False
for i, a in enumerate(arr):
if prev_val == a:
continue
elif prev_val is None or prev_val < a:
upwards = True
else:
if<mask>:
result['pos'].append(prev_dex)
result['peaks'].append(prev_val)
upwards = False
prev_dex = i
prev_val = a
return result | False | prev_dex and upwards | upwards and prev_dex is not None | 0.6480530500411987 |
912 | 910 | def format_address_spaces(addr, left=True):
"""Format the address according to its size, but with spaces instead of zeroes."""
width = get_memory_alignment() * 2 + 2
addr = align_address(addr)
if<mask>:
return '0x{:x}'.format(addr).rjust(width)
return '0x{:x}'.format(addr).ljust(width) | False | not left | left | 0.6637938022613525 |
913 | 911 | def hasContent_(self):
if<mask>:
return True
else:
return False | False | super(SpikeGenerator, self).hasContent_() | self.valueOf_ is not None | 0.6443113684654236 |
914 | 912 | @property
def matched(self):
for trigger in self.triggers:
if<mask>:
return True
return False | False | trigger.triggered | trigger.match(self) | 0.6532926559448242 |
915 | 913 | def post(self, request, *args, **kwargs):
self.object = None
forms = self.get_form()
if<mask>:
return self.form_valid(forms)
return self.form_invalid(forms) | False | all((form.is_valid() for form in forms)) | forms.is_valid() | 0.6451195478439331 |
916 | 914 | def __init__(self, optimizer, multiplier, total_epoch, after_scheduler=None, **kwargs):
self.multiplier = multiplier
if<mask>:
raise ValueError('multiplier should be greater than 1.')
self.total_epoch = total_epoch
self.after_scheduler = after_scheduler
self.finished = False
super().__init__(optimizer) | False | self.multiplier <= 1.0 | multiplier > 1 | 0.659283459186554 |
917 | 915 | def ret(self):
for device in Device._buffers:
if<mask>:
continue
if not CI:
print(device)
if device in exclude_devices:
if not CI:
print(f'WARNING: {device} test is excluded')
continue
with self.subTest(device=device):
try:
Device[device]
except Exception:
if not CI:
print(f"WARNING: {device} test isn't running")
continue
fxn(self, device) | False | device in ['DISK', 'SHM', 'FAKE'] | device not in include_devices | 0.6416229009628296 |
918 | 916 | def ret(self):
for device in Device._buffers:
if device in ['DISK', 'SHM', 'FAKE']:
continue
if<mask>:
print(device)
if device in exclude_devices:
if not CI:
print(f'WARNING: {device} test is excluded')
continue
with self.subTest(device=device):
try:
Device[device]
except Exception:
if not CI:
print(f"WARNING: {device} test isn't running")
continue
fxn(self, device) | False | not CI | self.verbose | 0.6551085710525513 |
919 | 917 | def ret(self):
for device in Device._buffers:
if device in ['DISK', 'SHM', 'FAKE']:
continue
if not CI:
print(device)
if<mask>:
if not CI:
print(f'WARNING: {device} test is excluded')
continue
with self.subTest(device=device):
try:
Device[device]
except Exception:
if not CI:
print(f"WARNING: {device} test isn't running")
continue
fxn(self, device) | False | device in exclude_devices | device not in Device.objects | 0.6479327082633972 |
920 | 918 | def ret(self):
for device in Device._buffers:
if device in ['DISK', 'SHM', 'FAKE']:
continue
if not CI:
print(device)
if device in exclude_devices:
if<mask>:
print(f'WARNING: {device} test is excluded')
continue
with self.subTest(device=device):
try:
Device[device]
except Exception:
if not CI:
print(f"WARNING: {device} test isn't running")
continue
fxn(self, device) | False | not CI | device not in include_devices | 0.6545816659927368 |
921 | 919 | def ret(self):
for device in Device._buffers:
if device in ['DISK', 'SHM', 'FAKE']:
continue
if not CI:
print(device)
if device in exclude_devices:
if not CI:
print(f'WARNING: {device} test is excluded')
continue
with self.subTest(device=device):
try:
Device[device]
except Exception:
if<mask>:
print(f"WARNING: {device} test isn't running")
continue
fxn(self, device) | False | not CI | not self.is_running | 0.6549131870269775 |
922 | 920 | def _create_disk_usage_uri(self, cluster_name: Optional[str], org_name: Optional[str]) -> URL:
if<mask>:
uri = self._normalize_uri(URL(f'storage://{cluster_name}/{org_name}/{self._config.project_name_or_raise}'))
else:
uri = self._normalize_uri(URL(f'storage://{cluster_name}/{self._config.project_name_or_raise}'))
assert uri.host is not None
return uri | False | org_name | cluster_name and org_name | 0.6632174253463745 |
923 | 921 | def DECIMAL_LITERAL(self, i: int=None):
if<mask>:
return self.getTokens(HiveParser.DECIMAL_LITERAL)
else:
return self.getToken(HiveParser.DECIMAL_LITERAL, i) | True | i is None | i is None | 0.6608383655548096 |
924 | 922 | def test_structured_conf(self, input_: Any) -> None:
if<mask>:
cfg = OmegaConf.structured(ListOfAny(input_))
assert isinstance(cfg.list, ListConfig)
else:
cfg = OmegaConf.structured(DictOfAny(input_))
assert isinstance(cfg.dict, DictConfig) | False | isinstance(input_, Sequence) | isinstance(input_, (list, DictConfig)) | 0.6484379768371582 |
925 | 923 | def setNameOverride(self, name):
if<mask>:
return
self.name = name
if self.isDemo is None:
if re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s*$', self.name, re.I) or re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s+.*$', self.name, re.I):
self.isDemo = True
else:
self.isDemo = False | False | not name | not name or self.name | 0.6594038009643555 |
926 | 924 | def setNameOverride(self, name):
if not name:
return
self.name = name
if<mask>:
if re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s*$', self.name, re.I) or re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s+.*$', self.name, re.I):
self.isDemo = True
else:
self.isDemo = False | True | self.isDemo is None | self.isDemo is None | 0.650297224521637 |
927 | 925 | def setNameOverride(self, name):
if not name:
return
self.name = name
if self.isDemo is None:
if<mask>:
self.isDemo = True
else:
self.isDemo = False | False | re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s*$', self.name, re.I) or re.match('.*\\s[\\(\\[]?Demo[\\)\\]]?\\s+.*$', self.name, re.I) | name == 'Demo' or name == 'Demo' | 0.6504395008087158 |
928 | 926 | def __init__(self, search_context: Optional[SearchContext]=None) -> None:
if<mask>:
search_context = get_search_context()
self.ctx = search_context
self._module_cache: Dict[ModulePath, Module] = {} | True | search_context is None | search_context is None | 0.6626418828964233 |
929 | 927 | def parse_devices(input_devices):
"""Parse user's devices input str to standard format.
e.g. [gpu0, gpu1,...]
"""
ret = []
for d in input_devices.split(','):
for regex, func in REGEX:
m = regex.match(d.lower().strip())
if<mask>:
tmp = func(m.groups())
for x in tmp:
if x not in ret:
ret.append(x)
break
else:
raise NotSupportedCliException('Can not recognize device: "%s"' % d)
return ret | True | m | m | 0.6782421469688416 |
930 | 928 | def parse_devices(input_devices):
"""Parse user's devices input str to standard format.
e.g. [gpu0, gpu1,...]
"""
ret = []
for d in input_devices.split(','):
for regex, func in REGEX:
m = regex.match(d.lower().strip())
if m:
tmp = func(m.groups())
for x in tmp:
if<mask>:
ret.append(x)
break
else:
raise NotSupportedCliException('Can not recognize device: "%s"' % d)
return ret | False | x not in ret | x | 0.6563632488250732 |
931 | 929 | def __init__(self, offset, name=None):
"""
:param offset:
A timedelta with this timezone's offset from UTC
:param name:
Name of the timezone; if None, generate one.
"""
if<mask>:
raise ValueError('Offset must be in [-23:59, 23:59]')
if offset.seconds % 60 or offset.microseconds:
raise ValueError('Offset must be full minutes')
self._offset = offset
if name is not None:
self._name = name
elif not offset:
self._name = 'UTC'
else:
self._name = 'UTC' + _format_offset(offset) | False | not timedelta(hours=-24) < offset < timedelta(hours=24) | offset.hour < 1 | 0.6472988724708557 |
932 | 930 | def __init__(self, offset, name=None):
"""
:param offset:
A timedelta with this timezone's offset from UTC
:param name:
Name of the timezone; if None, generate one.
"""
if not timedelta(hours=-24) < offset < timedelta(hours=24):
raise ValueError('Offset must be in [-23:59, 23:59]')
if<mask>:
raise ValueError('Offset must be full minutes')
self._offset = offset
if name is not None:
self._name = name
elif not offset:
self._name = 'UTC'
else:
self._name = 'UTC' + _format_offset(offset) | False | offset.seconds % 60 or offset.microseconds | offset % timedelta(minutes=1) or offset > timedelta(minutes=1) | 0.6484977006912231 |
933 | 931 | def __init__(self, offset, name=None):
"""
:param offset:
A timedelta with this timezone's offset from UTC
:param name:
Name of the timezone; if None, generate one.
"""
if not timedelta(hours=-24) < offset < timedelta(hours=24):
raise ValueError('Offset must be in [-23:59, 23:59]')
if offset.seconds % 60 or offset.microseconds:
raise ValueError('Offset must be full minutes')
self._offset = offset
if<mask>:
self._name = name
elif not offset:
self._name = 'UTC'
else:
self._name = 'UTC' + _format_offset(offset) | False | name is not None | name | 0.6517388820648193 |
934 | 932 | def __init__(self, offset, name=None):
"""
:param offset:
A timedelta with this timezone's offset from UTC
:param name:
Name of the timezone; if None, generate one.
"""
if not timedelta(hours=-24) < offset < timedelta(hours=24):
raise ValueError('Offset must be in [-23:59, 23:59]')
if offset.seconds % 60 or offset.microseconds:
raise ValueError('Offset must be full minutes')
self._offset = offset
if name is not None:
self._name = name
elif<mask>:
self._name = 'UTC'
else:
self._name = 'UTC' + _format_offset(offset) | False | not offset | offset is None | 0.6555944681167603 |
935 | 933 | def dump(self, indent=0):
print(''* indent + self.name, self.op,'start')
if<mask>:
self.lft.dump(indent + 1)
print(''* (indent + 1) + 'Operator', self.op)
if self.rgt is not None:
self.rgt.dump(indent + 1)
print(''* indent + self.name, self.op, 'end.') | False | self.lft is not None | self.lgt is not None | 0.6590958833694458 |
936 | 934 | def dump(self, indent=0):
print(''* indent + self.name, self.op,'start')
if self.lft is not None:
self.lft.dump(indent + 1)
print(''* (indent + 1) + 'Operator', self.op)
if<mask>:
self.rgt.dump(indent + 1)
print(''* indent + self.name, self.op, 'end.') | True | self.rgt is not None | self.rgt is not None | 0.6516817808151245 |
937 | 935 | def check_cardinality_1_Fix_Empty(self, fixed, unfixed, doFix):
for propname in self.propertyCardinality_1_Fix_Empty:
if<mask>:
logProblem = '[%s] Too many required property: %s' % (self.getType(), propname)
unfixed.append(logProblem)
elif self.countProperty(propname) == 0:
logProblem = '[%s] Missing required property: %s' % (self.getType(), propname)
if doFix:
self.addProperty(self.sPropertyType(propname, ''))
fixed.append(logProblem)
else:
unfixed.append(logProblem) | False | self.countProperty(propname) > 1 | self.countProperty(propname) > 0 | 0.6466086506843567 |
938 | 936 | def check_cardinality_1_Fix_Empty(self, fixed, unfixed, doFix):
for propname in self.propertyCardinality_1_Fix_Empty:
if self.countProperty(propname) > 1:
logProblem = '[%s] Too many required property: %s' % (self.getType(), propname)
unfixed.append(logProblem)
elif<mask>:
logProblem = '[%s] Missing required property: %s' % (self.getType(), propname)
if doFix:
self.addProperty(self.sPropertyType(propname, ''))
fixed.append(logProblem)
else:
unfixed.append(logProblem) | False | self.countProperty(propname) == 0 | self.countProperty(propname) < 0 | 0.6478222608566284 |
939 | 937 | def check_cardinality_1_Fix_Empty(self, fixed, unfixed, doFix):
for propname in self.propertyCardinality_1_Fix_Empty:
if self.countProperty(propname) > 1:
logProblem = '[%s] Too many required property: %s' % (self.getType(), propname)
unfixed.append(logProblem)
elif self.countProperty(propname) == 0:
logProblem = '[%s] Missing required property: %s' % (self.getType(), propname)
if<mask>:
self.addProperty(self.sPropertyType(propname, ''))
fixed.append(logProblem)
else:
unfixed.append(logProblem) | True | doFix | doFix | 0.6615027189254761 |
940 | 938 | def __iter__(self):
it = iter(self.loader)
storage = get_event_storage()
while True:
try:
batch = next(it)
num_inst_per_dataset = {}
for data in batch:
dataset_name = data['dataset']
if<mask>:
num_inst_per_dataset[dataset_name] = 0
num_inst = len(data['instances'])
num_inst_per_dataset[dataset_name] += num_inst
for dataset_name in num_inst_per_dataset:
storage.put_scalar(f'batch/{dataset_name}', num_inst_per_dataset[dataset_name])
yield batch
except StopIteration:
break | True | dataset_name not in num_inst_per_dataset | dataset_name not in num_inst_per_dataset | 0.6437749862670898 |
941 | 939 | def get_home():
if<mask>:
explicit = os.environ.get('HOME', '')
if explicit:
return explicit
uid = os.geteuid()
return pwd.getpwuid(uid).pw_name
return os.path.expanduser('~') | False | False | platform.system() == 'Linux' | 0.6616235971450806 |
942 | 940 | def get_home():
if False:
explicit = os.environ.get('HOME', '')
if<mask>:
return explicit
uid = os.geteuid()
return pwd.getpwuid(uid).pw_name
return os.path.expanduser('~') | True | explicit | explicit | 0.6693891882896423 |
943 | 941 | def maybe_rotate(self):
if<mask>:
self.rotate()
self.rotate_when = self.next_backup(self.freq)
elif self.maxsize:
try:
if os.stat(self.filename)[stat.ST_SIZE] > self.maxsize:
self.rotate()
except os.error:
self.rotate() | False | self.freq and time.time() > self.rotate_when | self.freq | 0.6458740234375 |
944 | 942 | def maybe_rotate(self):
if self.freq and time.time() > self.rotate_when:
self.rotate()
self.rotate_when = self.next_backup(self.freq)
elif<mask>:
try:
if os.stat(self.filename)[stat.ST_SIZE] > self.maxsize:
self.rotate()
except os.error:
self.rotate() | False | self.maxsize | self.filename | 0.6506963968276978 |
945 | 943 | def maybe_rotate(self):
if self.freq and time.time() > self.rotate_when:
self.rotate()
self.rotate_when = self.next_backup(self.freq)
elif self.maxsize:
try:
if<mask>:
self.rotate()
except os.error:
self.rotate() | False | os.stat(self.filename)[stat.ST_SIZE] > self.maxsize | self.rotate_when and time.time() < self.next_backup_max | 0.6449729204177856 |
946 | 944 | def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
out = torch.cat((x0, x1), 1)
out = self.conv2d(out)
out = out * self.scale + x
if<mask>:
out = self.relu(out)
return out | True | not self.noReLU | not self.noReLU | 0.6446675062179565 |
947 | 945 | def compute_average_flops_cost(model):
"""
A method that will be available after add_flops_counting_methods() is called on a desired net object.
Returns current mean flops consumption per image.
"""
batches_count = model.__batch_counter__
flops_sum = 0
for module in model.modules():
if<mask>:
flops_sum += module.__flops__
return flops_sum / batches_count | False | isinstance(module, torch.nn.Conv2d) or isinstance(module, torch.nn.Linear) or isinstance(module, torch.nn.Conv1d) or hasattr(module, 'calculate_flop_self') | is_supported_instance(module) | 0.6470948457717896 |
948 | 946 | def __init__(self, num_sync_devices, **args):
"""
Naive version of Synchronized 3D BatchNorm.
Args:
num_sync_devices (int): number of device to sync.
args (list): other arguments.
"""
self.num_sync_devices = num_sync_devices
if<mask>:
assert du.get_local_size() % self.num_sync_devices == 0, (du.get_local_size(), self.num_sync_devices)
self.num_groups = du.get_local_size() // self.num_sync_devices
else:
self.num_sync_devices = du.get_local_size()
self.num_groups = 1
super(NaiveSyncBatchNorm3d, self).__init__(**args) | True | self.num_sync_devices > 0 | self.num_sync_devices > 0 | 0.6444109082221985 |
949 | 947 | def nextf(self, counter=None):
"""Retrieves the numeric value for the given counter, then
increments it by one. New counters start at one."""
if<mask>:
counter = self._defaultCounter
return self._getCounter(counter).nextf() | False | not counter | counter is None | 0.6655290126800537 |
950 | 948 | @profile.setter
def profile(self, new_profile):
if<mask>:
if new_profile.min() < 0 and (not EnergyFlow.allow_negative_flows):
new_profile[new_profile < 0] = 0.0
self._profile = new_profile
else:
raise ValueError(f'The energy flow profile does not have the correct format, i.e. numerical series of {self.time_frame} time steps or single numerical value.') | False | isinstance(new_profile, pd.Series) and len(new_profile) in [1, self.time_frame] | isinstance(new_profile, np.ndarray) | 0.6473793983459473 |
951 | 949 | @profile.setter
def profile(self, new_profile):
if isinstance(new_profile, pd.Series) and len(new_profile) in [1, self.time_frame]:
if<mask>:
new_profile[new_profile < 0] = 0.0
self._profile = new_profile
else:
raise ValueError(f'The energy flow profile does not have the correct format, i.e. numerical series of {self.time_frame} time steps or single numerical value.') | False | new_profile.min() < 0 and (not EnergyFlow.allow_negative_flows) | new_profile.numel() == 0 | 0.6461544036865234 |
952 | 950 | def get_theme_base_dir(theme_dir_name, suppress_error=False):
"""
Returns absolute path to the directory that contains the given theme.
Args:
theme_dir_name (str): theme directory name to get base path for
suppress_error (bool): if True function will return None if theme is not found instead of raising an error
Returns:
(str): Base directory that contains the given theme
"""
for themes_dir in get_theme_base_dirs():
if theme_dir_name in get_theme_dirs(themes_dir):
return themes_dir
if<mask>:
return None
raise ValueError("Theme '{theme}' not found in any of the following themes dirs, \nTheme dirs: \n{dir}".format(theme=theme_dir_name, dir=get_theme_base_dirs())) | False | suppress_error | suppress_error and theme_dir_name in get_theme_dirs(get_theme_dirs()) | 0.655534029006958 |
953 | 951 | def get_theme_base_dir(theme_dir_name, suppress_error=False):
"""
Returns absolute path to the directory that contains the given theme.
Args:
theme_dir_name (str): theme directory name to get base path for
suppress_error (bool): if True function will return None if theme is not found instead of raising an error
Returns:
(str): Base directory that contains the given theme
"""
for themes_dir in get_theme_base_dirs():
if<mask>:
return themes_dir
if suppress_error:
return None
raise ValueError("Theme '{theme}' not found in any of the following themes dirs, \nTheme dirs: \n{dir}".format(theme=theme_dir_name, dir=get_theme_base_dirs())) | False | theme_dir_name in get_theme_dirs(themes_dir) | theme_dir_name == themes_dir | 0.6445627808570862 |
954 | 952 | def format_framework_integrity_error_message(error, json_framework):
if<mask>:
error_message = 'At least one of `hasDirectAward` or `hasFurtherCompetition` must be True'
elif 'duplicate key value violates unique constraint "ix_frameworks_slug"' in str(error):
error_message = "Slug '{}' already in use".format(json_framework.get('slug', '<unknown slug>'))
elif re.search('Not a [a-z]+? value:', str(error)):
error_message = 'Invalid framework'
else:
error_message = format(error)
return error_message | False | 'violates check constraint "ck_framework_has_direct_award_or_further_competition"' in str(error) | error is None | 0.6478252410888672 |
955 | 953 | def format_framework_integrity_error_message(error, json_framework):
if 'violates check constraint "ck_framework_has_direct_award_or_further_competition"' in str(error):
error_message = 'At least one of `hasDirectAward` or `hasFurtherCompetition` must be True'
elif<mask>:
error_message = "Slug '{}' already in use".format(json_framework.get('slug', '<unknown slug>'))
elif re.search('Not a [a-z]+? value:', str(error)):
error_message = 'Invalid framework'
else:
error_message = format(error)
return error_message | False | 'duplicate key value violates unique constraint "ix_frameworks_slug"' in str(error) | not re.search('^[A-Z]{0,1}$', str(error)) | 0.6502788066864014 |
956 | 954 | def format_framework_integrity_error_message(error, json_framework):
if 'violates check constraint "ck_framework_has_direct_award_or_further_competition"' in str(error):
error_message = 'At least one of `hasDirectAward` or `hasFurtherCompetition` must be True'
elif 'duplicate key value violates unique constraint "ix_frameworks_slug"' in str(error):
error_message = "Slug '{}' already in use".format(json_framework.get('slug', '<unknown slug>'))
elif<mask>:
error_message = 'Invalid framework'
else:
error_message = format(error)
return error_message | False | re.search('Not a [a-z]+? value:', str(error)) | error is None | 0.6435253620147705 |
957 | 955 | def run_test(domain_event) -> None:
if<mask>:
dumped = domain_event.model_dump()
_ = domain_event.__class__(**dumped)
if format_dump:
formatted = domain_event.format()
_ = domain_event.__class__.from_format(formatted) | True | model_dump | model_dump | 0.6605995893478394 |
958 | 956 | def run_test(domain_event) -> None:
if model_dump:
dumped = domain_event.model_dump()
_ = domain_event.__class__(**dumped)
if<mask>:
formatted = domain_event.format()
_ = domain_event.__class__.from_format(formatted) | False | format_dump | format | 0.6591200828552246 |
959 | 957 | def _convert_weights_to_fp16(l):
if<mask>:
l.weight.data = l.weight.data.half()
if l.bias is not None:
l.bias.data = l.bias.data.half() | False | isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)) | l.weight is not None | 0.6517105102539062 |
960 | 958 | def _convert_weights_to_fp16(l):
if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)):
l.weight.data = l.weight.data.half()
if<mask>:
l.bias.data = l.bias.data.half() | True | l.bias is not None | l.bias is not None | 0.650610089302063 |
961 | 959 | @property
def spawnflags(self):
flags = []
if<mask>:
value = self._entity_data.get('spawnflags', None)
for name, (key, _) in {'Use Hitboxes for Renderbox': (64, 0), 'Start with collision disabled': (256, 0), 'Set to NAVIgnore': (512, 0)}.items():
if value & key > 0:
flags.append(name)
return flags | True | 'spawnflags' in self._entity_data | 'spawnflags' in self._entity_data | 0.6513683795928955 |
962 | 960 | @property
def spawnflags(self):
flags = []
if'spawnflags' in self._entity_data:
value = self._entity_data.get('spawnflags', None)
for name, (key, _) in {'Use Hitboxes for Renderbox': (64, 0), 'Start with collision disabled': (256, 0), 'Set to NAVIgnore': (512, 0)}.items():
if<mask>:
flags.append(name)
return flags | True | value & key > 0 | value & key > 0 | 0.6617642045021057 |
963 | 961 | def interpreter_version(**kwargs):
"""
Returns the version of the running interpreter.
"""
warn = _warn_keyword_parameter('interpreter_version', kwargs)
version = _get_config_var('py_version_nodot', warn=warn)
if<mask>:
version = str(version)
else:
version = _version_nodot(sys.version_info[:2])
return version | False | version | warn | 0.6695005893707275 |
964 | 962 | def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj):
if<mask>:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step)) | True | obj is None | obj is None | 0.6584059000015259 |
965 | 963 | def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if<mask>:
paths.append(cookie.path)
return paths | True | cookie.path not in paths | cookie.path not in paths | 0.6541555523872375 |
966 | 964 | def remove_section(self, section):
if<mask>:
return
for option in self.config.options(section):
self.config.remove_option(section, option)
self.config.remove_section(section)
self.dirty = True | False | not self.config.has_section(section) | self.dirty | 0.6431008577346802 |
967 | 965 | @register.filter('startswith')
def startswith(text, starts):
if<mask>:
return text.startswith(starts)
return False | False | isinstance(text, str) | starts | 0.6442351937294006 |
968 | 966 | def run(self):
self.running = True
while not self.shutdown_flag.is_set():
self.stats.wait_until_ready_to_run()
if<mask>:
self.step()
self.robot.logger.debug('Shutting down SystemMonitorThread') | False | not self.shutdown_flag.is_set() | self.running | 0.6480913162231445 |
969 | 967 | @property
def physdamagescale(self):
if<mask>:
return float(self._entity_data.get('physdamagescale'))
return float(1.0) | True | 'physdamagescale' in self._entity_data | 'physdamagescale' in self._entity_data | 0.6524853706359863 |
970 | 968 | def set_minimum_column_width(self, col, minimum):
if<mask>:
self.setColumnWidth(col, minimum) | True | self.columnWidth(col) < minimum | self.columnWidth(col) < minimum | 0.6505662202835083 |
971 | 969 | def downgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
if<mask>:
downgrade_with_copy(meta, migrate_engine)
else:
downgrade_with_rename(meta, migrate_engine) | False | migrate_engine.name == 'sqlite' | migrate_engine.dialect.version == '1.6' | 0.6499318480491638 |
972 | 970 | def __init__(self, path: str | None=None) -> None:
self._total_packages = 0
self._rule_match_counts = defaultdict(int)
if<mask>:
self.load(path) | False | path is not None | path | 0.6568262577056885 |
973 | 971 | def _set_slot(self, key: Text, value: Any) -> None:
"""Sets the value of a slot if that slot exists."""
if<mask>:
slot = self.slots[key]
slot.value = value
else:
logger.error(f"Tried to set non existent slot '{key}'. Make sure you added all your slots to your domain file.") | True | key in self.slots | key in self.slots | 0.6535583734512329 |
974 | 972 | def scan(image_path: str) -> Any:
image = numpy.asarray(Image.open(image_path).convert('RGB'))
if<mask>:
image = zbar.misc.rgb2gray(image)
results = scanner.scan(image)
return [(result.type, result.data, result.quality, result.position) for result in results] | False | len(image.shape) == 3 | gray | 0.6498275399208069 |
975 | 973 | def write_wrapped(self, s, extra_room=0):
"""Add a soft line break if needed, then write s."""
if<mask>:
self.write_soft_break()
self.write_str(s) | False | self.room < len(s) + extra_room | extra_room | 0.6464666724205017 |
976 | 974 | @property
def behind_mineral_position_center(self) -> Point2:
if<mask>:
return self.behind_mineral_positions[1]
return self.center_location | True | self.behind_mineral_positions | self.behind_mineral_positions | 0.6496765613555908 |
977 | 975 | def setup_basic_filtering(self, instance, network_info):
"""Set up provider rules and basic NWFilter."""
self.nwfilter.setup_basic_filtering(instance, network_info)
if<mask>:
LOG.debug(_('iptables firewall: Setup Basic Filtering'), instance=instance)
self.refresh_provider_fw_rules()
self.basically_filtered = True | False | not self.basically_filtered | self.iptables_enabled | 0.6487493515014648 |
978 | 976 | def check_ts_counter(self, _uri):
"""
Providers sometime add the same stream section back into the list.
This methods catches this and informs the caller that it should be ignored.
"""
if<mask>:
self.logger.notice('TC Counter Same section being transmitted, ignoring uri: {} m3u8pid:{} proxypid:{}'.format(_uri, self.t_m3u8_pid, os.getpid()))
return False
self.last_ts_filename = _uri
return True | False | _uri == self.last_ts_filename | self.t_m3u8_pid != os.getpid() | 0.6474699378013611 |
979 | 977 | def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Union[HTTPValidationError, str]]:
if<mask>:
response_200 = cast(str, response.json())
return response_200
if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY:
response_422 = HTTPValidationError.from_dict(response.json())
return response_422
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None | True | response.status_code == HTTPStatus.OK | response.status_code == HTTPStatus.OK | 0.655895471572876 |
980 | 978 | def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Union[HTTPValidationError, str]]:
if response.status_code == HTTPStatus.OK:
response_200 = cast(str, response.json())
return response_200
if<mask>:
response_422 = HTTPValidationError.from_dict(response.json())
return response_422
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None | True | response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY | response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY | 0.6491738557815552 |
981 | 979 | def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Union[HTTPValidationError, str]]:
if response.status_code == HTTPStatus.OK:
response_200 = cast(str, response.json())
return response_200
if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY:
response_422 = HTTPValidationError.from_dict(response.json())
return response_422
if<mask>:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None | True | client.raise_on_unexpected_status | client.raise_on_unexpected_status | 0.6513196229934692 |
982 | 980 | def get_loss(self, pred, target, mean=True):
if<mask>:
loss = (target - pred).abs()
if mean:
loss = loss.mean()
elif self.loss_type == 'l2':
if mean:
loss = torch.nn.functional.mse_loss(target, pred)
else:
loss = torch.nn.functional.mse_loss(target, pred, reduction='none')
else:
raise NotImplementedError("unknown loss type '{loss_type}'")
return loss | True | self.loss_type == 'l1' | self.loss_type == 'l1' | 0.6491593718528748 |
983 | 981 | def get_loss(self, pred, target, mean=True):
if self.loss_type == 'l1':
loss = (target - pred).abs()
if<mask>:
loss = loss.mean()
elif self.loss_type == 'l2':
if mean:
loss = torch.nn.functional.mse_loss(target, pred)
else:
loss = torch.nn.functional.mse_loss(target, pred, reduction='none')
else:
raise NotImplementedError("unknown loss type '{loss_type}'")
return loss | True | mean | mean | 0.6699435710906982 |
984 | 982 | def get_loss(self, pred, target, mean=True):
if self.loss_type == 'l1':
loss = (target - pred).abs()
if mean:
loss = loss.mean()
elif<mask>:
if mean:
loss = torch.nn.functional.mse_loss(target, pred)
else:
loss = torch.nn.functional.mse_loss(target, pred, reduction='none')
else:
raise NotImplementedError("unknown loss type '{loss_type}'")
return loss | True | self.loss_type == 'l2' | self.loss_type == 'l2' | 0.6494088768959045 |
985 | 983 | def get_loss(self, pred, target, mean=True):
if self.loss_type == 'l1':
loss = (target - pred).abs()
if mean:
loss = loss.mean()
elif self.loss_type == 'l2':
if<mask>:
loss = torch.nn.functional.mse_loss(target, pred)
else:
loss = torch.nn.functional.mse_loss(target, pred, reduction='none')
else:
raise NotImplementedError("unknown loss type '{loss_type}'")
return loss | True | mean | mean | 0.6698806285858154 |
986 | 984 | def get_results(callee):
res_generator = retry_operation_impl(callee, retry_settings=retry_once_settings)
results = []
exc = None
try:
for res in res_generator:
results.append(res)
if<mask>:
break
except Exception as e:
exc = e
return (results, exc) | False | isinstance(res, YdbRetryOperationFinalResult) | 'last_iteration' in res.get_iteration_index | 0.6464452743530273 |
987 | 985 | def createSnapshotOfVolumeSet(self, name, copyOfName, optional=None):
"""Create a snapshot of an existing Volume Set.
:param name: Name of the Snapshot. The vvname pattern is described in
"VV Name Patterns" in the HPE 3PAR Command Line Interface
Reference, which is available at the following
website: http://www.hp.com/go/storage/docs
:type name: str
:param copyOfName: The volume set you want to snapshot
:type copyOfName: str
:param optional: Dictionary of optional params
:type optional: dict
.. code-block:: python
optional = {
'id': 12, # Specifies ID of the volume set
# set, next by default
'comment': "some comment",
'readOnly': True, # Read Only
'expirationHours': 36, # time from now to expire
'retentionHours': 12 # time from now to expire
}
:raises: :class:`~hpe3parclient.exceptions.HTTPBadRequest`
- INVALID_INPUT_VV_PATTERN - Invalid volume pattern specified
:raises: :class:`~hpe3parclient.exceptions.HTTPNotFound`
- NON_EXISTENT_SET - The set does not exist
:raises: :class:`~hpe3parclient.exceptions.HTTPNotFound`
- EMPTY_SET - The set is empty
:raises: :class:`~hpe3parclient.exceptions.HTTPServiceUnavailable`
- VV_LIMIT_REACHED - Maximum number of volumes reached
:raises: :class:`~hpe3parclient.exceptions.HTTPNotFound`
- NON_EXISTENT_VOL - The storage volume does not exist
:raises: :class:`~hpe3parclient.exceptions.HTTPForbidden`
- VV_IS_BEING_REMOVED - The volume is being removed
:raises: :class:`~hpe3parclient.exceptions.HTTPForbidden`
- INV_OPERATION_VV_READONLY_TO_READONLY_SNAP - Creating a
read-only copy from a read- | True | optional | optional | 0.6585502028465271 |
988 | 986 | def path(self, subpath=''):
ret = '%s/%s' % (self.root, self.proj)
if<mask>:
ret = '%s/%s' % (ret, subpath)
return ret | True | subpath | subpath | 0.6598453521728516 |
989 | 987 | def AlgorithmTypeChanged(self, value):
"""Locks and unlocks widgets for cont and ind contCubes"""
value = int(value)
self.AlgorithmWidget.setCurrentIndex(value)
if<mask>:
self.SGSWidget.SeedGB.show()
self.SGSWidget.MaskGB.show()
else:
self.SGSWidget.SeedGB.hide()
self.SGSWidget.MaskGB.hide() | False | value == 3 | value == self.AlgorithmWidget.Seed | 0.6597493886947632 |
990 | 988 | def _do_json_post(self, endpoint, expected_status=200, **kwargs):
url = urljoin(self._host, endpoint)
try:
res = requests.post(url, headers=self._make_headers(), json=kwargs)
except requests.exceptions.ConnectionError as e:
raise ClientConnectionError(f'Failed to connect to API endpoint {self._host}. {e}')
except requests.exceptions.RequestException as e:
raise ClientError(f'API request failed: {e}')
if<mask>:
_raise_for_status(_response_ctx(res), endpoint, expected_status)
return res.json() | True | res.status_code != expected_status | res.status_code != expected_status | 0.6474182605743408 |
991 | 989 | def get_warmup_momentum(self, cur_iters):
if<mask>:
warmup_m = self.warmup_ratio * self.momentum
elif self.warmup == 'linear':
k = (1 - cur_iters / self.warmup_iters) * (1 - self.warmup_ratio)
warmup_m = (1 - k) * self.momentum
elif self.warmup == 'exp':
k = self.warmup_ratio ** (1 - cur_iters / self.warmup_iters)
warmup_m = k * self.momentum
return warmup_m | True | self.warmup == 'constant' | self.warmup == 'constant' | 0.6548739075660706 |
992 | 990 | def get_warmup_momentum(self, cur_iters):
if self.warmup == 'constant':
warmup_m = self.warmup_ratio * self.momentum
elif<mask>:
k = (1 - cur_iters / self.warmup_iters) * (1 - self.warmup_ratio)
warmup_m = (1 - k) * self.momentum
elif self.warmup == 'exp':
k = self.warmup_ratio ** (1 - cur_iters / self.warmup_iters)
warmup_m = k * self.momentum
return warmup_m | True | self.warmup == 'linear' | self.warmup == 'linear' | 0.6550660133361816 |
993 | 991 | def get_warmup_momentum(self, cur_iters):
if self.warmup == 'constant':
warmup_m = self.warmup_ratio * self.momentum
elif self.warmup == 'linear':
k = (1 - cur_iters / self.warmup_iters) * (1 - self.warmup_ratio)
warmup_m = (1 - k) * self.momentum
elif<mask>:
k = self.warmup_ratio ** (1 - cur_iters / self.warmup_iters)
warmup_m = k * self.momentum
return warmup_m | True | self.warmup == 'exp' | self.warmup == 'exp' | 0.6567461490631104 |
994 | 992 | def squeezenet1_0(num_classes, loss='softmax', pretrained=True, **kwargs):
model = SqueezeNet(num_classes, loss, version=1.0, fc_dims=None, dropout_p=None, **kwargs)
if<mask>:
init_pretrained_weights(model, model_urls['squeezenet1_0'])
return model | True | pretrained | pretrained | 0.6663972735404968 |
995 | 993 | def _forward(self, inputs, return_tensors=False):
"""
Internal framework specific forward dispatching.
Args:
inputs: dict holding all the keyworded arguments for required by the model forward method.
return_tensors: Whether to return native framework (pt/tf) tensors rather than numpy array.
Returns:
Numpy array
"""
with self.device_placement():
if self.framework == 'tf':
predictions = self.model(inputs.data, training=False)[0]
else:
with torch.no_grad():
inputs = self.ensure_tensor_on_device(**inputs)
predictions = self.model(**inputs)[0].cpu()
if<mask>:
return predictions
else:
return predictions.numpy() | True | return_tensors | return_tensors | 0.6540572643280029 |
996 | 994 | def _forward(self, inputs, return_tensors=False):
"""
Internal framework specific forward dispatching.
Args:
inputs: dict holding all the keyworded arguments for required by the model forward method.
return_tensors: Whether to return native framework (pt/tf) tensors rather than numpy array.
Returns:
Numpy array
"""
with self.device_placement():
if<mask>:
predictions = self.model(inputs.data, training=False)[0]
else:
with torch.no_grad():
inputs = self.ensure_tensor_on_device(**inputs)
predictions = self.model(**inputs)[0].cpu()
if return_tensors:
return predictions
else:
return predictions.numpy() | True | self.framework == 'tf' | self.framework == 'tf' | 0.6478662490844727 |
997 | 995 | def generate_dict_getter_function(self, scope, code):
dict_attr = scope.lookup_here('__dict__')
if<mask>:
return
func_name = scope.mangle_internal('__dict__getter')
dict_name = dict_attr.cname
code.putln('')
code.putln('static PyObject *%s(PyObject *o, CYTHON_UNUSED void *x) {' % func_name)
self.generate_self_cast(scope, code)
code.putln('if (unlikely(!p->%s)){' % dict_name)
code.putln('p->%s = PyDict_New();' % dict_name)
code.putln('}')
code.putln('Py_XINCREF(p->%s);' % dict_name)
code.putln('return p->%s;' % dict_name)
code.putln('}') | False | not dict_attr or not dict_attr.is_variable | dict_attr is None | 0.6467263698577881 |
998 | 996 | def _append_pseudo_questions(self, survey):
_survey = []
for item in survey:
_survey.append(item)
if<mask>:
_survey.append({'type': 'text', 'name': f"{item['name']}_other", 'label': [None] * len(self.translations)})
return _survey | False | item.get('_or_other', False) | item['type'] == 'pseudo' and item['name'] in self.translations | 0.6456488370895386 |
999 | 997 | def step(self):
dt = self.gravSys.dt
self.setpos(self.pos() + dt * self.v)
if<mask>:
self.setheading(self.towards(self.gravSys.planets[0]))
self.a = self.acc()
self.v = self.v + dt * self.a | False | self.gravSys.planets.index(self) != 0 | self.heading is not None | 0.6483408212661743 |
1000 | 998 | def goto(path):
eepath = get_external_editor_path()
if<mask>:
print('Going to:')
print(path)
open_in_external_editor(path) | True | eepath is not None | eepath is not None | 0.6480452418327332 |
1001 | 999 | def _called_with_cfg(*args, **kwargs):
"""
Returns:
bool: whether the arguments contain CfgNode and should be considered
forwarded to from_config.
"""
from omegaconf import DictConfig
if<mask>:
return True
if isinstance(kwargs.pop('cfg', None), (_CfgNode, DictConfig)):
return True
return False | False | len(args) and isinstance(args[0], (_CfgNode, DictConfig)) | len(args) and isinstance(args[0], _CfgNode) | 0.6490945816040039 |
1002 | 1000 | def _called_with_cfg(*args, **kwargs):
"""
Returns:
bool: whether the arguments contain CfgNode and should be considered
forwarded to from_config.
"""
from omegaconf import DictConfig
if len(args) and isinstance(args[0], (_CfgNode, DictConfig)):
return True
if<mask>:
return True
return False | False | isinstance(kwargs.pop('cfg', None), (_CfgNode, DictConfig)) | isinstance(kwargs.pop('cfg', None), _CfgNode) | 0.647269606590271 |
1003 | 1001 | def matches_blob_above(self, i, j):
"""Returns true if the current point matches the point above.
Args:
i (int): the x-coordinate in self.matrix
j (int): the y-coordinate in self.matrix
Returns:
bool specifying whether the current point matches the point above.
"""
if<mask>:
return False
matches_above = self.matrix.at(i, j - 1).value == self.color_to_find
return matches_above | False | j == 0 | i == j | 0.6693016886711121 |
1004 | 1002 | def publish_traceback(debug_server_urls, graph, feed_dict, fetches, old_graph_version):
"""Publish traceback and source code if graph version is new.
`graph.version` is compared with `old_graph_version`. If the former is higher
(i.e., newer), the graph traceback and the associated source code is sent to
the debug server at the specified gRPC URLs.
Args:
debug_server_urls: A single gRPC debug server URL as a `str` or a `list` of
debug server URLs.
graph: A Python `tf.Graph` object.
feed_dict: Feed dictionary given to the `Session.run()` call.
fetches: Fetches from the `Session.run()` call.
old_graph_version: Old graph version to compare to.
Returns:
If `graph.version > old_graph_version`, the new graph version as an `int`.
Else, the `old_graph_version` is returned.
"""
from tensorflow.python.debug.lib import source_remote
if<mask>:
run_key = common.get_run_key(feed_dict, fetches)
source_remote.send_graph_tracebacks(debug_server_urls, run_key, traceback.extract_stack(), graph, send_source=True)
return graph.version
else:
return old_graph_version | False | graph.version > old_graph_version | feed_dict.get('feed_dict') is not None | 0.648255467414856 |
1005 | 1003 | def load_certificate(source):
"""
Loads an x509 certificate into a Certificate object
:param source:
A byte string of file contents, a unicode string filename or an
asn1crypto.x509.Certificate object
:raises:
ValueError - when any of the parameters contain an invalid value
TypeError - when any of the parameters are of the wrong type
OSError - when an error is returned by the OS crypto library
:return:
A Certificate object
"""
if<mask>:
certificate = source
elif isinstance(source, byte_cls):
certificate = parse_certificate(source)
elif isinstance(source, str_cls):
with open(source, 'rb') as f:
certificate = parse_certificate(f.read())
else:
raise TypeError(pretty_message('\n source must be a byte string, unicode string or\n asn1crypto.x509.Certificate object, not %s\n ', type_name(source)))
return _load_x509(certificate) | True | isinstance(source, Asn1Certificate) | isinstance(source, Asn1Certificate) | 0.6471686363220215 |
1006 | 1004 | def load_certificate(source):
"""
Loads an x509 certificate into a Certificate object
:param source:
A byte string of file contents, a unicode string filename or an
asn1crypto.x509.Certificate object
:raises:
ValueError - when any of the parameters contain an invalid value
TypeError - when any of the parameters are of the wrong type
OSError - when an error is returned by the OS crypto library
:return:
A Certificate object
"""
if isinstance(source, Asn1Certificate):
certificate = source
elif<mask>:
certificate = parse_certificate(source)
elif isinstance(source, str_cls):
with open(source, 'rb') as f:
certificate = parse_certificate(f.read())
else:
raise TypeError(pretty_message('\n source must be a byte string, unicode string or\n asn1crypto.x509.Certificate object, not %s\n ', type_name(source)))
return _load_x509(certificate) | True | isinstance(source, byte_cls) | isinstance(source, byte_cls) | 0.6460551023483276 |
1007 | 1005 | def load_certificate(source):
"""
Loads an x509 certificate into a Certificate object
:param source:
A byte string of file contents, a unicode string filename or an
asn1crypto.x509.Certificate object
:raises:
ValueError - when any of the parameters contain an invalid value
TypeError - when any of the parameters are of the wrong type
OSError - when an error is returned by the OS crypto library
:return:
A Certificate object
"""
if isinstance(source, Asn1Certificate):
certificate = source
elif isinstance(source, byte_cls):
certificate = parse_certificate(source)
elif<mask>:
with open(source, 'rb') as f:
certificate = parse_certificate(f.read())
else:
raise TypeError(pretty_message('\n source must be a byte string, unicode string or\n asn1crypto.x509.Certificate object, not %s\n ', type_name(source)))
return _load_x509(certificate) | True | isinstance(source, str_cls) | isinstance(source, str_cls) | 0.6460889577865601 |
1008 | 1006 | def get_confidence(self):
"""return confidence based on existing data"""
if<mask>:
return SURE_NO
if self._mTotalChars!= self._mFreqChars:
r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio)
if r < SURE_YES:
return r
return SURE_YES | True | self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD | self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD | 0.6502353549003601 |
1009 | 1007 | def get_confidence(self):
"""return confidence based on existing data"""
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if<mask>:
r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio)
if r < SURE_YES:
return r
return SURE_YES | True | self._mTotalChars != self._mFreqChars | self._mTotalChars != self._mFreqChars | 0.6556333303451538 |
1010 | 1008 | def get_confidence(self):
"""return confidence based on existing data"""
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars!= self._mFreqChars:
r = self._mFreqChars / ((self._mTotalChars - self._mFreqChars) * self._mTypicalDistributionRatio)
if<mask>:
return r
return SURE_YES | True | r < SURE_YES | r < SURE_YES | 0.6624365448951721 |
1011 | 1009 | def do_activate(self):
win = self.props.active_window
if<mask>:
win = ExampleWindow(application=self)
win.present() | True | not win | not win | 0.6659681797027588 |
1012 | 1010 | def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a."""
if<mask>:
return '.'
return '+' | True | '+' in pieces.get('closest-tag', '') | '+' in pieces.get('closest-tag', '') | 0.6440763473510742 |
1013 | 1011 | def register_vfe(cls, name=None):
global REGISTERED_VFE_CLASSES
if<mask>:
name = cls.__name__
assert name not in REGISTERED_VFE_CLASSES, f'exist class: {REGISTERED_VFE_CLASSES}'
REGISTERED_VFE_CLASSES[name] = cls
return cls | True | name is None | name is None | 0.670485258102417 |
1014 | 1012 | def set_state(self, state_obj):
"""
Set the value of the state object for this parser
@param state_obj The object to set the state to.
@throws DatasetParserException if there is a bad state structure
"""
if<mask>:
raise DatasetParserException('Invalid state structure')
if not Vel3dKWfpStcStateKey.FIRST_RECORD in state_obj or not Vel3dKWfpStcStateKey.POSITION in state_obj or (not Vel3dKWfpStcStateKey.VELOCITY_END in state_obj):
raise DatasetParserException('Invalid state keys')
self._timestamp = 0.0
self._record_buffer = []
self._state = state_obj
self._read_state = state_obj
self.input_file.seek(self._read_state[Vel3dKWfpStcStateKey.POSITION], 0) | True | not isinstance(state_obj, dict) | not isinstance(state_obj, dict) | 0.6490800380706787 |
1015 | 1013 | def set_state(self, state_obj):
"""
Set the value of the state object for this parser
@param state_obj The object to set the state to.
@throws DatasetParserException if there is a bad state structure
"""
if not isinstance(state_obj, dict):
raise DatasetParserException('Invalid state structure')
if<mask>:
raise DatasetParserException('Invalid state keys')
self._timestamp = 0.0
self._record_buffer = []
self._state = state_obj
self._read_state = state_obj
self.input_file.seek(self._read_state[Vel3dKWfpStcStateKey.POSITION], 0) | False | not Vel3dKWfpStcStateKey.FIRST_RECORD in state_obj or not Vel3dKWfpStcStateKey.POSITION in state_obj or (not Vel3dKWfpStcStateKey.VELOCITY_END in state_obj) | not (Vel3dKWfpStateKey.POSITION in state_obj and Vel3dKWfpStateKey.POSITION not in state_obj) | 0.6588072776794434 |
1016 | 1014 | def focusInEvent(self, event):
if<mask>:
self.tooltip_label.move(self.mapToGlobal(QPoint(0, self.height() - 70)))
self.tooltip_label.setText(self.tooltip_text)
self.tooltip_label.show()
super(AdjustableTextEdit, self).focusInEvent(event) | False | self.label_enabled and self.tooltip_text | self.tooltip_text | 0.6472121477127075 |
1017 | 1015 | def check_document_start(self):
if<mask>:
if self.prefix(3) == '---' and self.peek(3) in '\x00 \t\r\n\x85\u2028\u2029':
return True | False | self.column == 0 | self.version >= 3 | 0.6550612449645996 |
1018 | 1016 | def check_document_start(self):
if self.column == 0:
if<mask>:
return True | False | self.prefix(3) == '---' and self.peek(3) in '\x00 \t\r\n\x85\u2028\u2029' | self.document_start_column is None or self.document_start_column is None | 0.6489856243133545 |
1019 | 1017 | def handle_action(self, action: Action) -> None:
"""Modify state in reaction to an action."""
if<mask>:
self._handle_command(action.command) | False | isinstance(action, UpdateCommandAction) | isinstance(action, Command) | 0.6490628719329834 |
1020 | 1018 | def __del__(self):
for i in range(4):
if<mask>:
del self.dockableContainer[i] | False | not self.dockableContainer[i] | i in self.dockableContainer | 0.6510080695152283 |
1021 | 1019 | def extract_feature_map(input_map, kmeans, centroids):
feature_map = []
for item in input_map:
temp_dict = {}
temp_dict['label'] = item['label']
print('Extracting features for', item['image'])
img = cv2.imread(item['image'])
img = resize_to_size(img, 150)
temp_dict['feature_vector'] = FeatureExtractor().get_feature_vector(img, kmeans, centroids)
if<mask>:
feature_map.append(temp_dict)
return feature_map | False | temp_dict['feature_vector'] is not None | temp_dict | 0.6462053060531616 |
1022 | 1020 | def filter_data(self, min_len, max_len):
logging.info(f'Filtering data, min len: {min_len}, max len: {max_len}')
initial_len = len(self.src)
filtered_src = []
for src in self.src:
if<mask>:
filtered_src.append(src)
self.src = filtered_src
filtered_len = len(self.src)
logging.info(f'Pairs before: {initial_len}, after: {filtered_len}') | False | min_len <= len(src) <= max_len | min_len <= len(src) <= max_len and min_len <= src <= max_len | 0.649456262588501 |
1023 | 1021 | def tearDown(self):
if<mask>:
self.shell.cs.clear_callstack()
super(ShellFixture, self).tearDown() | False | hasattr(self.shell, 'cs') | self.shell.cs | 0.6455273032188416 |
1024 | 1022 | def getattr(self, obj, attribute):
"""Subscribe an object from sandboxed code and prefer the
attribute. The attribute passed *must* be a bytestring.
"""
try:
value = getattr(obj, attribute)
except AttributeError:
try:
return obj[attribute]
except (TypeError, LookupError):
pass
else:
if<mask>:
return value
return self.unsafe_undefined(obj, attribute)
return self.undefined(obj=obj, name=attribute) | True | self.is_safe_attribute(obj, attribute, value) | self.is_safe_attribute(obj, attribute, value) | 0.6423095464706421 |
1025 | 1023 | def _find_ucc_global_config_json(app_root, ucc_config_filename):
"""Find UCC config file from all possible directories"""
candidates = ['local', 'default', 'bin', op.join('appserver','static', 'js', 'build')]
for candidate in candidates:
file_path = op.join(app_root, candidate, ucc_config_filename)
if<mask>:
return file_path
raise RuntimeError('Unable to load %s from [%s]' % (ucc_config_filename, ','.join(candidates))) | False | op.isfile(file_path) | os.path.exists(file_path) | 0.6454447507858276 |
1026 | 1024 | def get_vluns_for_host(host_name):
ret = []
for vlun in vluns['members']:
if<mask>:
ret.append(vlun)
return ret | False | vlun['hostname'] == host_name | vlun['host'] == host_name | 0.6494061350822449 |
1027 | 1025 | def run(self):
self.filelist = FileList()
if<mask>:
self.write_manifest()
self.add_defaults()
if os.path.exists(self.template):
self.read_template()
self.add_license_files()
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest() | False | not os.path.exists(self.manifest) | os.path.exists(self.manifest) | 0.6469367742538452 |
1028 | 1026 | def run(self):
self.filelist = FileList()
if not os.path.exists(self.manifest):
self.write_manifest()
self.add_defaults()
if<mask>:
self.read_template()
self.add_license_files()
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest() | True | os.path.exists(self.template) | os.path.exists(self.template) | 0.6487430334091187 |
1029 | 1027 | def _init_learning_rate(self):
self.eta0_ = self.eta0
if<mask>:
typw = numpy.sqrt(1.0 / numpy.sqrt(self.alpha))
self.eta0_ = typw / max(1.0, (1 + typw) * 2)
self.optimal_init_ = 1.0 / (self.eta0_ * self.alpha)
else:
self.eta0_ = self.eta0
return self.eta0_ | False | self.learning_rate == 'optimal' | self.eta0_ is None or self.eta0_ is None | 0.646294355392456 |
1030 | 1028 | def get_outputs_filtered(self, owner, spent=None):
"""
Get a list of output links filtered on some criteria
Args:
owner (str): base58 encoded public_key.
spent (bool): If ``True`` return only the spent outputs. If
``False`` return only unspent outputs. If spent is
not specified (``None``) return all outputs.
Returns:
:obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s
pointing to another transaction's condition
"""
outputs = self.fastquery.get_outputs_by_public_key(owner)
if<mask>:
return outputs
elif spent is True:
return self.fastquery.filter_unspent_outputs(outputs)
elif spent is False:
return self.fastquery.filter_spent_outputs(outputs) | False | spent is None | not spent | 0.6583720445632935 |
1031 | 1029 | def get_outputs_filtered(self, owner, spent=None):
"""
Get a list of output links filtered on some criteria
Args:
owner (str): base58 encoded public_key.
spent (bool): If ``True`` return only the spent outputs. If
``False`` return only unspent outputs. If spent is
not specified (``None``) return all outputs.
Returns:
:obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s
pointing to another transaction's condition
"""
outputs = self.fastquery.get_outputs_by_public_key(owner)
if spent is None:
return outputs
elif<mask>:
return self.fastquery.filter_unspent_outputs(outputs)
elif spent is False:
return self.fastquery.filter_spent_outputs(outputs) | True | spent is True | spent is True | 0.6569360494613647 |
1032 | 1030 | def get_outputs_filtered(self, owner, spent=None):
"""
Get a list of output links filtered on some criteria
Args:
owner (str): base58 encoded public_key.
spent (bool): If ``True`` return only the spent outputs. If
``False`` return only unspent outputs. If spent is
not specified (``None``) return all outputs.
Returns:
:obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s
pointing to another transaction's condition
"""
outputs = self.fastquery.get_outputs_by_public_key(owner)
if spent is None:
return outputs
elif spent is True:
return self.fastquery.filter_unspent_outputs(outputs)
elif<mask>:
return self.fastquery.filter_spent_outputs(outputs) | True | spent is False | spent is False | 0.656681478023529 |
1033 | 1031 | def add_pth(self, pth_file: str, entry: str) -> None:
pth_file = normalize_path(pth_file)
if<mask>:
if pth_file not in self._pth:
self._pth[pth_file] = UninstallPthEntries(pth_file)
self._pth[pth_file].add(entry)
else:
self._refuse.add(pth_file) | True | self._permitted(pth_file) | self._permitted(pth_file) | 0.6468223333358765 |
1034 | 1032 | def add_pth(self, pth_file: str, entry: str) -> None:
pth_file = normalize_path(pth_file)
if self._permitted(pth_file):
if<mask>:
self._pth[pth_file] = UninstallPthEntries(pth_file)
self._pth[pth_file].add(entry)
else:
self._refuse.add(pth_file) | True | pth_file not in self._pth | pth_file not in self._pth | 0.6554079055786133 |
1035 | 1033 | def deprecated(message=None):
"""A decorator for deprecated functions"""
def _decorator(func, message=message):
if<mask>:
message = '%s is deprecated' % func.__name__
def newfunc(*args, **kwds):
warnings.warn(message, DeprecationWarning, stacklevel=2)
return func(*args, **kwds)
return newfunc
return _decorator | True | message is None | message is None | 0.6576377749443054 |
1036 | 1034 | def get_image_object(self, ccd, **kwargs):
if<mask>:
return self.north.get_image_object(ccd, **kwargs)
return self.south.get_image_object(ccd, **kwargs) | False | ccd.is_north | self.orth.get_distribution_type() == 'north' | 0.6492307186126709 |
1037 | 1035 | def login(self, name=user_name):
"""Logs in."""
uf = self.portal.acl_users
user = uf.getUserById(name)
if<mask>:
user = user.__of__(uf)
newSecurityManager(None, user) | False | not hasattr(user, 'aq_base') | user.__of__ | 0.6524370908737183 |
1038 | 1036 | def pdeque(iterable=(), maxlen=None):
"""
Return deque containing the elements of iterable. If maxlen is specified then
len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen.
>>> pdeque([1, 2, 3])
pdeque([1, 2, 3])
>>> pdeque([1, 2, 3, 4], maxlen=2)
pdeque([3, 4], maxlen=2)
"""
t = tuple(iterable)
if<mask>:
t = t[-maxlen:]
length = len(t)
pivot = int(length / 2)
left = plist(t[:pivot])
right = plist(t[pivot:], reverse=True)
return PDeque(left, right, length, maxlen) | True | maxlen is not None | maxlen is not None | 0.6523933410644531 |
1039 | 1037 | def build_data_filter(cfg: CfgNode):
if<mask>:
min_score = cfg.MIN_VALUE
return ScoreBasedFilter(min_score=min_score)
raise ValueError(f'Unknown data filter type {cfg.TYPE}') | False | cfg.TYPE == 'detection_score' | cfg.TYPE == 'ScoreBasedFilter' | 0.6534725427627563 |
1040 | 1038 | def find_root(self, bones):
for b in bones:
if<mask>:
return b
return bones[0] | False | b.parent not in bones | b['isthing'] | 0.6545785665512085 |
1041 | 1039 | def _on_account_acquired(self, account):
with self.unlock_cond:
if<mask>:
msg = 'attempt to acquire unknown account %s' % account
raise Exception(msg)
if account not in self.unlocked_accounts:
raise Exception('account %s is already locked' % account)
self.unlocked_accounts.remove(account)
self.unlock_cond.notify_all()
return account | False | account not in self.accounts | account not in self.available_accounts | 0.6565709710121155 |
1042 | 1040 | def _on_account_acquired(self, account):
with self.unlock_cond:
if account not in self.accounts:
msg = 'attempt to acquire unknown account %s' % account
raise Exception(msg)
if<mask>:
raise Exception('account %s is already locked' % account)
self.unlocked_accounts.remove(account)
self.unlock_cond.notify_all()
return account | False | account not in self.unlocked_accounts | account in self.locked_accounts | 0.6507906913757324 |
1043 | 1041 | @instrument_w_nvtx
def backward(self, loss, retain_graph=False):
"""
:attr:`backward` performs the following steps:
1. fp32_loss = loss.float()
2. scaled_loss = fp32_loss*loss_scale
3. scaled_loss.backward(), which accumulates scaled gradients into the ``.grad`` attributes of the model's fp16 leaves
"""
if<mask>:
self.optimizer_swapper.pre_backward()
see_memory_usage(f'Before backward', force=False)
if self.custom_loss_scaler:
scaled_loss = self.external_loss_scale * loss
scaled_loss.backward()
else:
self.loss_scaler.backward(loss.float(), retain_graph=retain_graph)
self._get_param_coordinator(training=True).reset_step()
if self.swap_optimizer:
self.optimizer_swapper.post_backward() | True | self.swap_optimizer | self.swap_optimizer | 0.6530719995498657 |
1044 | 1042 | @instrument_w_nvtx
def backward(self, loss, retain_graph=False):
"""
:attr:`backward` performs the following steps:
1. fp32_loss = loss.float()
2. scaled_loss = fp32_loss*loss_scale
3. scaled_loss.backward(), which accumulates scaled gradients into the ``.grad`` attributes of the model's fp16 leaves
"""
if self.swap_optimizer:
self.optimizer_swapper.pre_backward()
see_memory_usage(f'Before backward', force=False)
if<mask>:
scaled_loss = self.external_loss_scale * loss
scaled_loss.backward()
else:
self.loss_scaler.backward(loss.float(), retain_graph=retain_graph)
self._get_param_coordinator(training=True).reset_step()
if self.swap_optimizer:
self.optimizer_swapper.post_backward() | False | self.custom_loss_scaler | self.external_loss_scale is not None | 0.6482833623886108 |
1045 | 1043 | @instrument_w_nvtx
def backward(self, loss, retain_graph=False):
"""
:attr:`backward` performs the following steps:
1. fp32_loss = loss.float()
2. scaled_loss = fp32_loss*loss_scale
3. scaled_loss.backward(), which accumulates scaled gradients into the ``.grad`` attributes of the model's fp16 leaves
"""
if self.swap_optimizer:
self.optimizer_swapper.pre_backward()
see_memory_usage(f'Before backward', force=False)
if self.custom_loss_scaler:
scaled_loss = self.external_loss_scale * loss
scaled_loss.backward()
else:
self.loss_scaler.backward(loss.float(), retain_graph=retain_graph)
self._get_param_coordinator(training=True).reset_step()
if<mask>:
self.optimizer_swapper.post_backward() | True | self.swap_optimizer | self.swap_optimizer | 0.6535240411758423 |
1046 | 1044 | def _step_alpha(self, action):
used_edge_cpus = collections.defaultdict(float)
action = action.flatten()[:-1].reshape(1, -1)
for client_id, alpha in list(zip(self.clients.keys(), action)):
used_edge_cpus[client_id] = self.clients[client_id].do_tasks(alpha)
state = self._get_obs(scale=GHZ)
if<mask>:
print('alpha', 1 - sum(sum(action)))
return (used_edge_cpus, state) | False | self.timestamp % 1000 == 0 | self._verbose | 0.6513635516166687 |
1047 | 1045 | def get_gitdir(self, rpc: str):
"""Determine the git repository for this request"""
gitdir = self.gitlookup(rpc)
if<mask>:
raise HTTPError(404, 'unable to find repository')
self.log.info('Accessing git at: %s', gitdir)
return gitdir | False | gitdir is None | not gitdir | 0.6544747352600098 |
1048 | 1046 | def handle_dictelement(self, node, i):
if<mask>:
key = None
value = self.handle_expr(node.children[i + 1])
i += 2
else:
key = self.handle_expr(node.children[i])
value = self.handle_expr(node.children[i + 2])
i += 3
return (i, key, value) | False | node.children[i].type == tokens.DOUBLESTAR | node.type == Node.ELEMENT_NODE | 0.644324541091919 |
1049 | 1047 | def _cb_panel_3(self, button):
if<mask>:
movie_info = self._movie_db.get_data(self._current_url)
tmdb = TMDBv3(lang=ini.get('movies', 'info_lang'))
tmdb.get_posters(movie_info['tmdb_id'], self._cb_posters_list_complete) | False | self._movie_db.id_exists(self._current_url) | button.button() == Qt.LeftButton | 0.6484020948410034 |
1050 | 1048 | def draw_post(self, surface: 'pygame.Surface') -> 'Decorator':
"""
Draw post.
:param surface: Pygame surface
:return: Self reference
"""
if<mask>:
self._draw(self._decor[DECOR_TYPE_POST], surface)
else:
self._draw_assemble_cache(DECOR_TYPE_POST, self._decor[DECOR_TYPE_POST], surface)
return self | False | not self.cache | self._is_cached | 0.6534846425056458 |
1051 | 1049 | def digit_version(version_str):
digit_version = []
for x in version_str.split('.'):
if<mask>:
digit_version.append(int(x))
elif x.find('rc')!= -1:
patch_version = x.split('rc')
digit_version.append(int(patch_version[0]) - 1)
digit_version.append(int(patch_version[1]))
return digit_version | True | x.isdigit() | x.isdigit() | 0.6501805782318115 |
1052 | 1050 | def digit_version(version_str):
digit_version = []
for x in version_str.split('.'):
if x.isdigit():
digit_version.append(int(x))
elif<mask>:
patch_version = x.split('rc')
digit_version.append(int(patch_version[0]) - 1)
digit_version.append(int(patch_version[1]))
return digit_version | True | x.find('rc') != -1 | x.find('rc') != -1 | 0.6460107564926147 |
1053 | 1051 | def parse(input):
fullpath = dsz.ui.GetString('Please enter the full path to the file you want to parse: ', '')
if<mask>:
dsz.ui.Echo('No string entered', dsz.ERROR)
return False
success = parsefile(fullpath)
if not success:
return False
return True | True | fullpath == '' | fullpath == '' | 0.655876874923706 |
1054 | 1052 | def parse(input):
fullpath = dsz.ui.GetString('Please enter the full path to the file you want to parse: ', '')
if fullpath == '':
dsz.ui.Echo('No string entered', dsz.ERROR)
return False
success = parsefile(fullpath)
if<mask>:
return False
return True | True | not success | not success | 0.6560096740722656 |
1055 | 1053 | def get_ovs_use_veth(self):
"""Return correct ovs_use_veth setting for use in dhcp_agent.ini.
Get the right value from config or existing dhcp_agent.ini file.
Existing has precedence. Attempt to default to "False" without
disrupting existing deployments. Handle existing deployments and
upgrades safely. See LP Bug#1831935
:returns: Value to use for ovs_use_veth setting
:rtype: Bool
"""
_existing = self.get_existing_ovs_use_veth()
if<mask>:
return _existing
_config = self.parse_ovs_use_veth()
if _config is None:
return False
else:
return _config | True | _existing is not None | _existing is not None | 0.651789665222168 |
1056 | 1054 | def get_ovs_use_veth(self):
"""Return correct ovs_use_veth setting for use in dhcp_agent.ini.
Get the right value from config or existing dhcp_agent.ini file.
Existing has precedence. Attempt to default to "False" without
disrupting existing deployments. Handle existing deployments and
upgrades safely. See LP Bug#1831935
:returns: Value to use for ovs_use_veth setting
:rtype: Bool
"""
_existing = self.get_existing_ovs_use_veth()
if _existing is not None:
return _existing
_config = self.parse_ovs_use_veth()
if<mask>:
return False
else:
return _config | True | _config is None | _config is None | 0.6502765417098999 |
1057 | 1055 | def split_sections(s):
"""Split a string or iterable thereof into (section, content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
and each ``content`` is a list of stripped lines excluding blank lines and
comment-only lines. If there are any such lines before the first section
header, they're returned in a first ``section`` of ``None``.
"""
section = None
content = []
for line in yield_lines(s):
if<mask>:
if line.endswith(']'):
if section or content:
yield (section, content)
section = line[1:-1].strip()
content = []
else:
raise ValueError('Invalid section heading', line)
else:
content.append(line)
yield (section, content) | True | line.startswith('[') | line.startswith('[') | 0.6467174291610718 |
1058 | 1056 | def split_sections(s):
"""Split a string or iterable thereof into (section, content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
and each ``content`` is a list of stripped lines excluding blank lines and
comment-only lines. If there are any such lines before the first section
header, they're returned in a first ``section`` of ``None``.
"""
section = None
content = []
for line in yield_lines(s):
if line.startswith('['):
if<mask>:
if section or content:
yield (section, content)
section = line[1:-1].strip()
content = []
else:
raise ValueError('Invalid section heading', line)
else:
content.append(line)
yield (section, content) | True | line.endswith(']') | line.endswith(']') | 0.6429246664047241 |
1059 | 1057 | def split_sections(s):
"""Split a string or iterable thereof into (section, content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
and each ``content`` is a list of stripped lines excluding blank lines and
comment-only lines. If there are any such lines before the first section
header, they're returned in a first ``section`` of ``None``.
"""
section = None
content = []
for line in yield_lines(s):
if line.startswith('['):
if line.endswith(']'):
if<mask>:
yield (section, content)
section = line[1:-1].strip()
content = []
else:
raise ValueError('Invalid section heading', line)
else:
content.append(line)
yield (section, content) | True | section or content | section or content | 0.6555565595626831 |
1060 | 1058 | def _prepare_stub_instance(self, stub):
if<mask>:
self._stub_instances[stub] = stub(self._channel) | False | stub not in self._stub_instances | stub is not None | 0.6551868915557861 |
1061 | 1059 | def _execute(self, e):
if<mask>:
self.exec_func(self.x, self.y, self.v)
self.anim.event_source.start()
self.executing = True | True | not self.executing | not self.executing | 0.6540613770484924 |
1062 | 1060 | def __call__(self, bboxes1, bboxes2, mode='iou', is_aligned=False):
"""Calculate IoU between 2D bboxes.
Args:
bboxes1 (Tensor): bboxes have shape (m, 4) in <x1, y1, x2, y2>
format, or shape (m, 5) in <x1, y1, x2, y2, score> format.
bboxes2 (Tensor): bboxes have shape (m, 4) in <x1, y1, x2, y2>
format, shape (m, 5) in <x1, y1, x2, y2, score> format, or be
empty. If ``is_aligned `` is ``True``, then m and n must be
equal.
mode (str): "iou" (intersection over union), "iof" (intersection
over foreground), or "giou" (generalized intersection over
union).
is_aligned (bool, optional): If True, then m and n must be equal.
Default False.
Returns:
Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,)
"""
assert bboxes1.size(-1) in [0, 4, 5]
assert bboxes2.size(-1) in [0, 4, 5]
if<mask>:
bboxes2 = bboxes2[..., :4]
if bboxes1.size(-1) == 5:
bboxes1 = bboxes1[..., :4]
return bbox_overlaps(bboxes1, bboxes2, mode, is_aligned) | True | bboxes2.size(-1) == 5 | bboxes2.size(-1) == 5 | 0.6455575227737427 |
1063 | 1061 | def __call__(self, bboxes1, bboxes2, mode='iou', is_aligned=False):
"""Calculate IoU between 2D bboxes.
Args:
bboxes1 (Tensor): bboxes have shape (m, 4) in <x1, y1, x2, y2>
format, or shape (m, 5) in <x1, y1, x2, y2, score> format.
bboxes2 (Tensor): bboxes have shape (m, 4) in <x1, y1, x2, y2>
format, shape (m, 5) in <x1, y1, x2, y2, score> format, or be
empty. If ``is_aligned `` is ``True``, then m and n must be
equal.
mode (str): "iou" (intersection over union), "iof" (intersection
over foreground), or "giou" (generalized intersection over
union).
is_aligned (bool, optional): If True, then m and n must be equal.
Default False.
Returns:
Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,)
"""
assert bboxes1.size(-1) in [0, 4, 5]
assert bboxes2.size(-1) in [0, 4, 5]
if bboxes2.size(-1) == 5:
bboxes2 = bboxes2[..., :4]
if<mask>:
bboxes1 = bboxes1[..., :4]
return bbox_overlaps(bboxes1, bboxes2, mode, is_aligned) | True | bboxes1.size(-1) == 5 | bboxes1.size(-1) == 5 | 0.6457552909851074 |
1064 | 1062 | @environmentfilter
def do_attr(environment, obj, name):
"""Get an attribute of an object. ``foo|attr("bar")`` works like
``foo.bar`` just that always an attribute is returned and items are not
looked up.
See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
"""
try:
name = str(name)
except UnicodeError:
pass
else:
try:
value = getattr(obj, name)
except AttributeError:
pass
else:
if<mask>:
return environment.unsafe_undefined(obj, name)
return value
return environment.undefined(obj=obj, name=name) | False | environment.sandboxed and (not environment.is_safe_attribute(obj, name, value)) | isinstance(value, unicode) | 0.644638180732727 |
1065 | 1063 | def reparameterize(self, latent_distribution_params):
if<mask>:
return self.rsample(latent_distribution_params)
else:
return latent_distribution_params[0] | False | self.training | self.use_rsample | 0.6568121910095215 |
1066 | 1064 | def _get_unique_endpoints(trainer_endpoints):
trainer_endpoints.sort()
ips = set()
unique_endpoints = set()
for endpoint in trainer_endpoints:
ip = endpoint.split(':')[0]
if<mask>:
continue
ips.add(ip)
unique_endpoints.add(endpoint)
logger.info('unique_endpoints {}'.format(unique_endpoints))
return unique_endpoints | True | ip in ips | ip in ips | 0.6804461479187012 |
1067 | 1065 | def contains(self, item, prereleases=None):
if<mask>:
prereleases = self.prereleases
item = self._coerce_version(item)
if item.is_prerelease and (not prereleases):
return False
return self._get_operator(self.operator)(item, self.version) | True | prereleases is None | prereleases is None | 0.6580885648727417 |
1068 | 1066 | def contains(self, item, prereleases=None):
if prereleases is None:
prereleases = self.prereleases
item = self._coerce_version(item)
if<mask>:
return False
return self._get_operator(self.operator)(item, self.version) | True | item.is_prerelease and (not prereleases) | item.is_prerelease and (not prereleases) | 0.6483731865882874 |
1069 | 1067 | def advapi32_CryptHashData(jitter):
ret_ad, args = jitter.func_args_stdcall(['hhash', 'pbdata', 'dwdatalen', 'dwflags'])
if<mask>:
raise ValueError('unknown crypt context')
data = jitter.vm.get_mem(args.pbdata, args.dwdatalen)
log.debug('will hash %X', args.dwdatalen)
log.debug(repr(data[:16]) + '...')
winobjs.cryptcontext[args.hhash].h.update(data)
jitter.func_ret_stdcall(ret_ad, 1) | False | not args.hhash in winobjs.cryptcontext | args.hhash not in winobjs.cryptcontext | 0.6507587432861328 |
1070 | 1068 | def layer_url_template(layer):
if<mask>:
return 'http://maps.yimg.com/hw/tile?&v=9&imgtype=png&s=256&x=%i&y=%i&z=%i'
elif layer == LAYER_SAT:
return 'http://maps.yimg.com/ae/ximg?v=9&t=s&imgtype=png&s=256&x=%i&y=%i&z=%i'
elif layer == LAYER_HYB:
return 'http://maps.yimg.com/hx/tl?v=9&t=h&imgtype=png&s=256&x=%i&y=%i&z=%i' | False | layer == LAYER_MAP | layer == LAYER_HALF | 0.6570522785186768 |
1071 | 1069 | def layer_url_template(layer):
if layer == LAYER_MAP:
return 'http://maps.yimg.com/hw/tile?&v=9&imgtype=png&s=256&x=%i&y=%i&z=%i'
elif<mask>:
return 'http://maps.yimg.com/ae/ximg?v=9&t=s&imgtype=png&s=256&x=%i&y=%i&z=%i'
elif layer == LAYER_HYB:
return 'http://maps.yimg.com/hx/tl?v=9&t=h&imgtype=png&s=256&x=%i&y=%i&z=%i' | False | layer == LAYER_SAT | layer == LAYER_ADEPRECATE | 0.6540009379386902 |
1072 | 1070 | def layer_url_template(layer):
if layer == LAYER_MAP:
return 'http://maps.yimg.com/hw/tile?&v=9&imgtype=png&s=256&x=%i&y=%i&z=%i'
elif layer == LAYER_SAT:
return 'http://maps.yimg.com/ae/ximg?v=9&t=s&imgtype=png&s=256&x=%i&y=%i&z=%i'
elif<mask>:
return 'http://maps.yimg.com/hx/tl?v=9&t=h&imgtype=png&s=256&x=%i&y=%i&z=%i' | False | layer == LAYER_HYB | layer == LAYER_TL | 0.6489716172218323 |
1073 | 1071 | def set_node_certificate(self, pkey_settings):
"""Activates the current node certificate
Grabs chain.pem and pkey.pem from the <data folder>/inbox/ directory and
applies them to the node. chain.pem contains the chain encoded certificates
starting from the node certificat and ending with the last intermediate
certificate before cluster CA. pkey.pem contains the pem encoded private
key for node certifiactes. Both files should exist on the server before
this API is called."""
params = {}
if<mask>:
params['privateKeyPassphrase'] = pkey_settings
return self._post_json(f'{self.hostname}/node/controller/reloadCertificate', params) | False | pkey_settings | pKey_settings | 0.6525788903236389 |
1074 | 1072 | def _get_destroyed_at_step(self, step_id: int):
destroyed = []
locations = []
for participant in self.locations.keys():
state_values = self.locations[participant]
is_destruction = state_values[step_id - 1]!= NO_LOCATION and state_values[step_id] == NO_LOCATION
if<mask>:
destroyed.append(_summarize_participants(participant))
locations.append(state_values[step_id - 1])
return (destroyed, locations) | True | is_destruction | is_destruction | 0.6489428281784058 |
1075 | 1073 | def wait_for_file_on_target(testcase, file_path, max_attempts=6):
for i in range(max_attempts):
err, retcode, msg = testcase.run_platform_command('ls %s' % file_path)
if<mask>:
break
if i < max_attempts:
import time
time.sleep(pow(2, i) * 0.25)
else:
testcase.fail('File %s not found even after %d attempts.' % (file_path, max_attempts))
return read_file_on_target(testcase, file_path) | False | err.Success() and retcode == 0 | retcode == 0 | 0.6474069356918335 |
1076 | 1074 | def wait_for_file_on_target(testcase, file_path, max_attempts=6):
for i in range(max_attempts):
err, retcode, msg = testcase.run_platform_command('ls %s' % file_path)
if err.Success() and retcode == 0:
break
if<mask>:
import time
time.sleep(pow(2, i) * 0.25)
else:
testcase.fail('File %s not found even after %d attempts.' % (file_path, max_attempts))
return read_file_on_target(testcase, file_path) | False | i < max_attempts | msg == 'File found' | 0.6521081328392029 |
1077 | 1075 | def get_file_type(filename):
extension = filename.split('.')[-1].lower()
if<mask>:
return 'image'
elif extension in VIDEO_FORMATS:
return 'video' | True | extension in IMAGE_FORMATS | extension in IMAGE_FORMATS | 0.654323399066925 |
1078 | 1076 | def get_file_type(filename):
extension = filename.split('.')[-1].lower()
if extension in IMAGE_FORMATS:
return 'image'
elif<mask>:
return 'video' | True | extension in VIDEO_FORMATS | extension in VIDEO_FORMATS | 0.6530576944351196 |
1079 | 1077 | def reparentChildren(self, newParent):
if<mask>:
newParent.childNodes[-1]._element.tail += self._element.text
else:
if not newParent._element.text:
newParent._element.text = ''
if self._element.text is not None:
newParent._element.text += self._element.text
self._element.text = ''
base.Node.reparentChildren(self, newParent) | True | newParent.childNodes | newParent.childNodes | 0.6500186920166016 |
1080 | 1078 | def reparentChildren(self, newParent):
if newParent.childNodes:
newParent.childNodes[-1]._element.tail += self._element.text
else:
if<mask>:
newParent._element.text = ''
if self._element.text is not None:
newParent._element.text += self._element.text
self._element.text = ''
base.Node.reparentChildren(self, newParent) | True | not newParent._element.text | not newParent._element.text | 0.6457720398902893 |
1081 | 1079 | def reparentChildren(self, newParent):
if newParent.childNodes:
newParent.childNodes[-1]._element.tail += self._element.text
else:
if not newParent._element.text:
newParent._element.text = ''
if<mask>:
newParent._element.text += self._element.text
self._element.text = ''
base.Node.reparentChildren(self, newParent) | True | self._element.text is not None | self._element.text is not None | 0.6460127830505371 |
1082 | 1080 | def add(self, *items):
if<mask>:
return
p = self.conn.pipeline(transaction=False)
buckets = set()
for item in items:
bucket = self._get_bucket(item)
buckets.add(bucket)
p.sadd(bucket, item)
added = sum(p.execute())
if added:
self.conn.incr(self.counterkey, added)
self.conn.sadd(self.bucketskey, *list(buckets)) | False | len(items) == 0 | not items | 0.6493656039237976 |
1083 | 1081 | def add(self, *items):
if len(items) == 0:
return
p = self.conn.pipeline(transaction=False)
buckets = set()
for item in items:
bucket = self._get_bucket(item)
buckets.add(bucket)
p.sadd(bucket, item)
added = sum(p.execute())
if<mask>:
self.conn.incr(self.counterkey, added)
self.conn.sadd(self.bucketskey, *list(buckets)) | True | added | added | 0.6615814566612244 |
1084 | 1082 | def loss(self, batch, preds=None):
"""
Compute loss
Args:
batch (dict): Batch to compute loss on
preds (torch.Tensor | List[torch.Tensor]): Predictions.
"""
if<mask>:
self.criterion = self.init_criterion()
preds = self.forward(batch['img']) if preds is None else preds
return self.criterion(preds, batch) | False | not hasattr(self, 'criterion') | self.criterion is None | 0.6457927227020264 |
1085 | 1083 | def OnSize(self, evt=None):
size = self.Size
if<mask>:
width, height = size
if KEEP_ASPECT_RATIO:
total_size = width * height
height = int(math.sqrt(total_size / self.aspect_ratio))
width = int(total_size / height)
self.SetSize((width, height))
self.canvas.SetSize((width, height)) | False | size[0] > 0 and size[1] > 0 | size is not None | 0.6486570835113525 |
1086 | 1084 | def OnSize(self, evt=None):
size = self.Size
if size[0] > 0 and size[1] > 0:
width, height = size
if<mask>:
total_size = width * height
height = int(math.sqrt(total_size / self.aspect_ratio))
width = int(total_size / height)
self.SetSize((width, height))
self.canvas.SetSize((width, height)) | False | KEEP_ASPECT_RATIO | self.aspect_ratio != 1.0 | 0.6438031792640686 |
1087 | 1085 | def check(self):
if<mask>:
self.done()
return False
return self.info.running | False | self.info.create_time + self.timeout < time.time() and (not self.timeout == 0) | self.info.done | 0.6471694707870483 |
1088 | 1086 | def __init__(self, config_map=None, downward_api=None, secret=None, service_account_token=None):
"""V1VolumeProjection - a model defined in Swagger"""
self._config_map = None
self._downward_api = None
self._secret = None
self._service_account_token = None
self.discriminator = None
if<mask>:
self.config_map = config_map
if downward_api is not None:
self.downward_api = downward_api
if secret is not None:
self.secret = secret
if service_account_token is not None:
self.service_account_token = service_account_token | True | config_map is not None | config_map is not None | 0.6526575088500977 |
1089 | 1087 | def __init__(self, config_map=None, downward_api=None, secret=None, service_account_token=None):
"""V1VolumeProjection - a model defined in Swagger"""
self._config_map = None
self._downward_api = None
self._secret = None
self._service_account_token = None
self.discriminator = None
if config_map is not None:
self.config_map = config_map
if<mask>:
self.downward_api = downward_api
if secret is not None:
self.secret = secret
if service_account_token is not None:
self.service_account_token = service_account_token | True | downward_api is not None | downward_api is not None | 0.6535540819168091 |