text
stringlengths 48
9.16k
|
|---|
def delta(flag, ones_before, zeros_after):
if flag==0: return -ones_before
else: return +zeros_after
|
def chmod(path, mode, recursive=True, use_sudo=False):
cmd = 'chmod %(mode)s %(path)s' % locals()
if recursive:
cmd += ' -R'
_conditional_sudo(cmd, use_sudo)
|
def test_no_client_ip(self):
out, err = self.run_netcmd(cmd_testparm, ["--client-name=foo"],
retcode=-1)
self.assertEquals("", out)
self.assertEquals(
"ERROR: Both a DNS name and an IP address are "
"required for the host access check\n", err)
|
def addError(self, test, err, details=None):
self.failed_tests += 1
|
def test_all_negative_card_type_indicators(self):
customer = Customer.create().customer
result = CreditCard.create({
"customer_id": customer.id,
"number": CreditCardNumbers.CardTypeIndicators.No,
"expiration_date": "05/2014",
"options": {"verify_card": True}
})
credit_card = result.credit_card
self.assertEquals(CreditCard.Debit.No, credit_card.debit)
self.assertEquals(CreditCard.DurbinRegulated.No, credit_card.durbin_regulated)
self.assertEquals(CreditCard.Prepaid.No, credit_card.prepaid)
self.assertEquals(CreditCard.Payroll.No, credit_card.payroll)
self.assertEquals(CreditCard.Commercial.No, credit_card.commercial)
self.assertEquals(CreditCard.Healthcare.No, credit_card.healthcare)
|
def test_missing_test_with_plan_adds_error(self):
# A file
# 1..3
# ok first test
# not ok 3 third test
# results in three tests, with the second being created
self.tap.write('1..3\n')
self.tap.write('ok first test\n')
self.tap.write('not ok 3 third test\n')
self.tap.seek(0)
result = subunit.TAP2SubUnit(self.tap, self.subunit)
self.assertEqual(0, result)
self.assertEqual([
'test test 1 first test',
'success test 1 first test',
'test test 2',
'error test 2 [',
'test missing from TAP output',
']',
'test test 3 third test',
'failure test 3 third test',
],
self.subunit.getvalue().splitlines())
|
def set_up_done(exception_caught):
"""Set up is done, either clean up or run the test."""
if self.exception_caught == exception_caught:
fails.append(None)
return clean_up()
else:
d = self._run_user(self.case._run_test_method, self.result)
d.addCallback(fail_if_exception_caught)
d.addBoth(tear_down)
return d
|
def setUp(self):
self.snapshots = []
for x in range(50):
cb = CanonicalBuilding()
cb.save()
b = SEEDFactory.building_snapshot(canonical_building=cb)
b.extra_data = {
'my new field': 'something extra'
}
b.save()
self.snapshots.append(b)
|
def add_argument(self, *args, **kwargs):
"""
This method supports the same args as ArgumentParser.add_argument(..)
as well as the additional args below.
Additional Args:
env_var: If set, the value of this environment variable will override
any config file or default values for this arg (but can itself
be overriden on the commandline). Also, if auto_env_var_prefix is
set in the constructor, this env var name will be used instead of
the automatic name.
is_config_file_arg: If True, this arg is treated as a config file path
This provides an alternative way to specify config files in place of
the ArgumentParser(fromfile_prefix_chars=..) mechanism.
Default: False
is_write_out_config_file_arg: If True, this arg will be treated as a
config file path, and, when it is specified, will cause
configargparse to write all current commandline args to this file
as config options and then exit.
Default: False
"""
env_var = kwargs.pop("env_var", None)
is_config_file_arg = kwargs.pop(
"is_config_file_arg", None) or kwargs.pop(
"is_config_file", None) # for backward compat.
is_write_out_config_file_arg = kwargs.pop(
"is_write_out_config_file_arg", None)
action = self.original_add_argument_method(*args, **kwargs)
action.is_positional_arg = not action.option_strings
action.env_var = env_var
action.is_config_file_arg = is_config_file_arg
action.is_write_out_config_file_arg = is_write_out_config_file_arg
if action.is_positional_arg and env_var:
raise ValueError("env_var can't be set for a positional arg.")
if action.is_config_file_arg and type(action) != argparse._StoreAction:
raise ValueError("arg with is_config_file_arg=True must have "
"action='store'")
if action.is_write_out_config_file_arg:
error_prefix = "arg with is_write_out_config_file_arg=True "
if type(action) != argparse._StoreAction:
raise ValueError(error_prefix + "must have action='store'")
if is_config_file_arg:
raise ValueError(error_prefix + "can't also have "
"is_config_file_arg=True")
return action
|
def test_calls_setUp_test_tearDown_in_sequence(self):
# setUp, the test method and tearDown can all return
# Deferreds. AsynchronousDeferredRunTest will make sure that each of
# these are run in turn, only going on to the next stage once the
# Deferred from the previous stage has fired.
call_log = []
a = defer.Deferred()
a.addCallback(lambda x: call_log.append('a'))
b = defer.Deferred()
b.addCallback(lambda x: call_log.append('b'))
c = defer.Deferred()
c.addCallback(lambda x: call_log.append('c'))
class SomeCase(TestCase):
def setUp(self):
super(SomeCase, self).setUp()
call_log.append('setUp')
return a
def test_success(self):
call_log.append('test')
return b
def tearDown(self):
super(SomeCase, self).tearDown()
call_log.append('tearDown')
return c
test = SomeCase('test_success')
timeout = self.make_timeout()
runner = self.make_runner(test, timeout)
result = self.make_result()
reactor = self.make_reactor()
def fire_a():
self.assertThat(call_log, Equals(['setUp']))
a.callback(None)
def fire_b():
self.assertThat(call_log, Equals(['setUp', 'a', 'test']))
b.callback(None)
def fire_c():
self.assertThat(
call_log, Equals(['setUp', 'a', 'test', 'b', 'tearDown']))
c.callback(None)
reactor.callLater(timeout * 0.25, fire_a)
reactor.callLater(timeout * 0.5, fire_b)
reactor.callLater(timeout * 0.75, fire_c)
runner.run(result)
self.assertThat(
call_log, Equals(['setUp', 'a', 'test', 'b', 'tearDown', 'c']))
|
def __init__(self, authToken=None, type=None,):
self.authToken = authToken
self.type = type
|
def test_default_template_renders_image_alt(self):
html = render_uploads('<<<an-image:alt=the alt text>>>')
self.assertTrue('alt="the alt text"' in html)
|
def handle(self, request, data):
self.cache_data(request, data)
if constants.ENFORCE_SECURE and not request.is_secure():
return self.render_to_response({'error': 'access_denied',
'error_description': _("A secure connection is required."),
'next': None},
status=400)
return HttpResponseRedirect(self.get_redirect_url(request))
|
def __init__(self, locale, urlnode):
self.locale = locale
self.urlnode = urlnode
|
def get_fieldsets(bases, attrs):
"""Get the fieldsets definition from the inner Meta class."""
fieldsets = _get_meta_attr(attrs, 'fieldsets', None)
if fieldsets is None:
#grab the fieldsets from the first base class that has them
for base in bases:
fieldsets = getattr(base, 'base_fieldsets', None)
if fieldsets is not None:
break
fieldsets = fieldsets or []
return fieldsets
|
def _test(self, mock_foo):
test.assertIsNot(Foo, original)
test.assertIs(Foo, mock_foo)
test.assertIsInstance(Foo, SomeClass)
|
def clean_password(self):
password = self.cleaned_data.get('password')
if not password:
raise OAuthValidationError({'error': 'invalid_request'})
return password
|
def __setitem__(self, name, value):
self.values[name] = value
|
def to_python(self, value):
if not value:
return []
# New in Django 1.6: value may come in as a string.
# Instead of raising an `OAuthValidationError`, try to parse and
# ultimately return an empty list if nothing remains -- this will
# eventually raise an `OAuthValidationError` in `validate` where
# it should be anyways.
if not isinstance(value, (list, tuple)):
value = value.split(' ')
# Split values into list
return u' '.join([smart_unicode(val) for val in value]).split(u' ')
|
def testUnsavedOptions(self):
c = makeConfig()
s_option = "%s%s" % ('section1', 'foo2')
c.set('section1', 'foo2', 'bar2')
self.assertFalse(s_option in c._unsaved)
c.remove_option('section1', 'foo2')
self.assertFalse(s_option in c._unsaved)
c.set_secure('section1', 'foo2', 'bar2')
self.assertTrue(s_option in c._unsaved)
self.assertTrue(c._unsaved[s_option][0] == 'set')
self.assertTrue(c._unsaved[s_option][1] == 'bar2')
c.remove_option('section1', 'foo2')
self.assertTrue(s_option in c._unsaved)
self.assertTrue(c._unsaved[s_option][0] == 'delete')
self.assertTrue(c._unsaved[s_option][1] is None)
|
def dns_type_flag(rec_type):
rtype = rec_type.upper()
if rtype == 'A':
record_type = dnsp.DNS_TYPE_A
elif rtype == 'AAAA':
record_type = dnsp.DNS_TYPE_AAAA
elif rtype == 'PTR':
record_type = dnsp.DNS_TYPE_PTR
elif rtype == 'NS':
record_type = dnsp.DNS_TYPE_NS
elif rtype == 'CNAME':
record_type = dnsp.DNS_TYPE_CNAME
elif rtype == 'SOA':
record_type = dnsp.DNS_TYPE_SOA
elif rtype == 'MX':
record_type = dnsp.DNS_TYPE_MX
elif rtype == 'SRV':
record_type = dnsp.DNS_TYPE_SRV
elif rtype == 'TXT':
record_type = dnsp.DNS_TYPE_TXT
elif rtype == 'ALL':
record_type = dnsp.DNS_TYPE_ALL
else:
raise CommandError('Unknown type of DNS record %s' % rec_type)
return record_type
|
def __init__(self, parent=None):
super(AddAccountWizard, self).__init__(
parent,
windowTitle="Sign In")
# TODO - remove magic numbers
self.setPage(0, AccountTypeWizardPage())
self.setPage(1, GithubCredentialsWizardPage())
self.setPage(2, Github2FAWizardPage())
self.setPage(3, UserSummaryWizardPage())
|
def test_custom_cluster_name_bad(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('--cluster=/evil-this-should-not-be-created'.split())
out, err = capsys.readouterr()
assert ('--cluster: argument must start with a letter and contain only '
'letters and numbers') in err
|
def test_repo_url_default_is_none(self):
args = self.parser.parse_args('repo ceph host1'.split())
assert args.repo_url is None
|
def test_get_repos_is_empty(self):
cfg = conf.cephdeploy.Conf()
cfg.sections = lambda: ['ceph-deploy-install']
assert cfg.get_repos() == []
|
def print_new_acl(self, samdb, object_dn):
desc = self.read_descriptor(samdb, object_dn)
desc_sddl = desc.as_sddl(self.get_domain_sid(samdb))
self.outf.write("new descriptor for %s:\n" % object_dn)
self.outf.write(desc_sddl + "\n")
|
def test_defaults(newcfg):
cfg = newcfg('host1')
assert cfg.get('global', 'auth cluster required') == 'cephx'
assert cfg.get('global', 'auth service required') == 'cephx'
assert cfg.get('global', 'auth client required') == 'cephx'
|
def register_connection(alias='default', host='localhost', port=6379, **kwargs):
global _connections
kwargs.setdefault('parser_class', PythonParser)
kwargs.setdefault('db', 0)
pool = ConnectionPool(host=host, port=port, **kwargs)
conn = redis.StrictRedis(connection_pool=pool)
_connections[alias] = conn
return conn
|
def _juliacode(expr, ):
code = sympy.printing.lambdarepr.lambdarepr(expr)
return code.replace('**', '^')
|
def write_keyring(path, key, uid=-1, gid=-1):
""" create a keyring file """
# Note that we *require* to avoid deletion of the temp file
# otherwise we risk not being able to copy the contents from
# one file system to the other, hence the `delete=False`
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(key)
tmp_file.close()
keyring_dir = os.path.dirname(path)
if not path_exists(keyring_dir):
makedir(keyring_dir, uid, gid)
shutil.move(tmp_file.name, path)
|
def setSegmentStartTangent(segment, tangent):
if len(segment.points) == 2:
'''
Convert straight segment to 4-point cubic bezier.
'''
p0, p3 = segment.points
p2 = p0.midpoint(p3)
p1 = p0.plus(tangent.scale(p0.distanceTo(p3) * 0.5))
result = TFSSegment(p0, p1, p2, p3)
elif len(segment.points) == 3:
'''
Update 3-point cubic bezier.
'''
p0, p1, p2 = segment.points
p1 = TFSIntersection.getIntersectPoint(p0,
p0.plus(tangent),
p1,
p2)
result = TFSSegment(p0, p1, p2)
elif len(segment.points) == 4:
'''
Update 4-point cubic bezier.
'''
p0, p1, p2, p3 = segment.points
p1 = p0.plus(tangent.scale(p0.distanceTo(p1)))
result = TFSSegment(p0, p1, p2, p3)
else:
raise Exception('Invalid segment: ' + segment.description())
# print 'updated segment:', segment.description(), 'to:', result.description()
return result
|
@property
def urls(self):
import operator
urls_or_func = self.settings.get('source_urls') or getattr(self.rule, 'source_urls', None)
rval = urls_or_func
if operator.isCallable(urls_or_func):
rval = urls_or_func(self)
return rval or []
|
def parse(dataset):
shapes = {}
with codecs.open(dataset,'r', encoding="utf8") as dataset:
current_char = ''
current_shape = []
remaining_strokes = 1
for l in dataset.readlines():
letter = letter_re.search(l)
if letter:
current_char = letter.group('char')
continue
strokes = strokes_re.search(l)
if strokes:
remaining_strokes = int(strokes.group('nb'))
continue
points = points_re.search(l)
if points:
if remaining_strokes == 0:
raise RuntimeError("I should not find points! No stroke is missing")
remaining_strokes -= 1
current_shape.append(map(int,points.group("coords").split()))
if remaining_strokes == 0:
shapes.setdefault(current_char,[]).append(current_shape)
current_shape = []
return shapes
|
def concatenate_keyrings(args):
"""
A helper to collect all keyrings into a single blob that will be
used to inject it to mons with ``--mkfs`` on remote nodes
We require all keyring files to be concatenated to be in a directory
to end with ``.keyring``.
"""
keyring_path = os.path.abspath(args.keyrings)
LOG.info('concatenating keyrings from %s' % keyring_path)
LOG.info('to seed remote monitors')
keyrings = [
os.path.join(keyring_path, f) for f in os.listdir(keyring_path)
if os.path.isfile(os.path.join(keyring_path, f)) and f.endswith('.keyring')
]
contents = []
seen_sections = {}
if not keyrings:
path_from_arg = os.path.abspath(args.keyrings)
raise RuntimeError('could not find any keyrings in %s' % path_from_arg)
for keyring in keyrings:
path = os.path.abspath(keyring)
for section in keyring_parser(path):
if not seen_sections.get(section):
seen_sections[section] = path
LOG.info('adding entity "%s" from keyring %s' % (section, path))
with open(path) as k:
contents.append(k.read())
else:
LOG.warning('will not add keyring: %s' % path)
LOG.warning('entity "%s" from keyring %s is a duplicate' % (section, path))
LOG.warning('already present in keyring: %s' % seen_sections[section])
return ''.join(contents)
|
def test_hierarchy_isa(self):
""" Test hierarchical lookup.
"""
cpt = SNOMEDConcept('315004001') # Metastasis from malignant tumor of breast
child = SNOMEDConcept('128462008') # Metastatic neoplasm (disease)
self.assertTrue(cpt.isa(child.code))
child = SNOMEDConcept('363346000') # Malignant neoplastic disease (disorder)
self.assertTrue(cpt.isa(child))
child = SNOMEDConcept('55342001') # Neoplasia
self.assertTrue(cpt.isa(child.code))
child = SNOMEDConcept('408643008') # Infiltrating duct carcinoma of breast
self.assertFalse(cpt.isa(child.code))
|
def __init__(self, robotdef, geom, ifunc=None):
if not ifunc:
ifunc = _id
self.rbtdef = robotdef
self.geom = geom
self.dof = self.rbtdef.dof
def sym_skew(v):
return sympy.Matrix([[0, -v[2], v[1]],
[v[2], 0, -v[0]],
[-v[1], v[0], 0]])
if self.rbtdef._dh_convention == 'standard':
# extend z and p so that z[-1] and p[-1] return values from base
# frame
z_ext = geom.z + [sympy.Matrix([0, 0, 1])]
p_ext = geom.p + [sympy.zeros(3, 1)]
self.Jp = list(range(self.rbtdef.dof))
for l in range(self.rbtdef.dof):
self.Jp[l] = sympy.zeros(3, self.rbtdef.dof)
for j in range(l + 1):
if self.rbtdef._links_sigma[j]:
self.Jp[l][0:3, j] = ifunc(z_ext[j - 1])
else:
self.Jp[l][0:3, j] = ifunc(z_ext[j - 1].cross(
(p_ext[l] - p_ext[j - 1])).reshape(3, 1))
self.Jo = list(range(self.rbtdef.dof))
for l in range(self.rbtdef.dof):
self.Jo[l] = sympy.zeros(3, self.rbtdef.dof)
for j in range(l + 1):
if self.rbtdef._links_sigma[j]:
self.Jo[l][0:3, j] = sympy.zeros(3, 1)
else:
self.Jo[l][0:3, j] = ifunc(z_ext[j - 1])
elif self.rbtdef._dh_convention == 'modified':
self.Jp = list(range(self.rbtdef.dof))
for l in range(self.rbtdef.dof):
self.Jp[l] = sympy.zeros(3, self.rbtdef.dof)
for j in range(l + 1):
if self.rbtdef._links_sigma[j]:
self.Jp[l][0:3, j] = ifunc(geom.z[j])
else:
self.Jp[l][0:3, j] = ifunc(geom.z[j].cross(
(geom.p[l] - geom.p[j])).reshape(3, 1))
self.Jo = list(range(self.rbtdef.dof))
for l in range(self.rbtdef.dof):
self.Jo[l] = sympy.zeros(3, self.rbtdef.dof)
for j in range(l + 1):
if self.rbtdef._links_sigma[j]:
self.Jo[l][0:3, j] = sympy.zeros(3, 1)
else:
self.Jo[l][0:3, j] = ifunc(geom.z[j])
self.J = list(range(self.rbtdef.dof))
for l in range(self.rbtdef.dof):
self.J[l] = self.Jp[l].col_join(self.Jo[l])
self.Jcp = list(range(self.rbtdef.dof))
self.Jco = self.Jo
for l in range(self.rbtdef.dof):
self.Jcp[l] = ifunc(self.Jp[l] - sym_skew(
geom.R[l] * sympy.Matrix(self.rbtdef.l[l])) * self.Jo[l])
self.Jc = list(range(self.rbtdef.dof))
for l in range(self.rbtdef.dof):
self.Jc[l] = self.Jcp[l].col_join(self.Jco[l])
|
def __unicode__(self):
if self.event_id:
text = '{0} => {1}'.format(self.event, self.state)
else:
text = unicode(self.state)
if self.duration:
text = '{0} ({1})'.format(text, self.natural_duration)
elif self.in_transition():
text = '{0} (in transition)'.format(text)
return text
|
@skipIf(django.VERSION < (1,8,), "This test needs Django >=1.8")
def test_polymorphic__complex_aggregate(self):
""" test (complex expression on) aggregate (should work for annotate either) """
Model2A.objects.create(field1='A1')
Model2B.objects.create(field1='A1', field2='B2')
Model2B.objects.create(field1='A1', field2='B2')
# aggregate using **kwargs
result = Model2A.objects.aggregate(
cnt_a1=Count(Case(When(field1='A1', then=1))),
cnt_b2=Count(Case(When(Model2B___field2='B2', then=1))),
)
self.assertEqual(result, {'cnt_b2': 2, 'cnt_a1': 3})
# aggregate using **args
# we have to set the defaul alias or django won't except a complex expression
# on aggregate/annotate
def ComplexAgg(expression):
complexagg = Count(expression)*10
complexagg.default_alias = 'complexagg'
return complexagg
with self.assertRaisesMessage(AssertionError, 'PolymorphicModel: annotate()/aggregate(): ___ model lookup supported for keyword arguments only'):
Model2A.objects.aggregate(ComplexAgg('Model2B___field2'))
|
def _log_error(self, bundle, url, e):
if self.logger:
self.logger.error('Error when handle bundle: %s, url: %s' % (
str(bundle), str(url)))
self.logger.exception(e)
if url == getattr(bundle, 'error_url', None):
bundle.error_times = getattr(bundle, 'error_times', 0) + 1
else:
bundle.error_times = 0
bundle.error_url = url
self.counter_client.local_inc(self.ip, self.id_,
'error_urls', 1)
self.counter_client.global_inc('error_urls', 1)
|
def tokenize(self, string):
tokens = string.split(' ')
wrapper = self._fmt_wrapper()
_tokens, count = '', len(tokens) - 1
for k, token in enumerate(tokens):
_tokens += self._fmt(token, k, count)
return wrapper.format(_tokens)
|
def is_successor(self, prev, next):
if prev >= next:
return False
return True
|
@app.route('/user/<user_id>')
def user_shipments(user_id):
print('Getting shipments for user: {}'.format(user_id))
response = jsonify({
'data': {
'shipments': [
{'name': 'teddy bear 123', 'arrival_date': '12/25/2015'},
{'name': 'chocolate cookies', 'arrival_date': '12/23/2015'},
]
}
})
return response
|
def interpret(self, code):
def _print(arg):
print(arg)
def cls(*args):
print('\n\n\n\n\n')
def sleep(arg):
return time.sleep(int(args))
control_table = dict(
_print=_print,
sleep=sleep,
cls=cls,
)
for token in code:
if len(token) > 2:
func, args, signature = token
print('Interpreter token signature: {}'.format(signature))
if func == 'print':
print(args)
else:
if func in control_table:
yield control_table[func](args)
|
def __init__(self, content):
self.content = content
|
def is_unauthorized(self, request, *args, **kwargs):
if request.method != 'POST':
return super(Root, self).is_unauthorized(request, *args, **kwargs)
|
def message_action(self, message):
return '[Email Message]: {}'.format(message)
|
def _check_valid(self, node, data):
total = sum([prob for prob in data['edges'].values()])
# Edges must sum to 1 (e.g 0.4, 0.5, 0.1)
if total != 1:
raise InvalidProbabilityValue
|
def test_delete(self):
ctx1 = DataContext(user=self.user, name='Context 1')
ctx1.save()
ctx2 = DataContext(user=self.user, name='Context 2')
ctx2.save()
ctx3 = DataContext(user=self.user, name='Context 3', session=True)
ctx3.save()
response = self.client.get('/api/contexts/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, codes.ok)
self.assertEqual(len(json.loads(response.content)), 3)
response = self.client.delete('/api/contexts/{0}/'.format(ctx1.pk))
self.assertEqual(response.status_code, codes.no_content)
response = self.client.get('/api/contexts/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, codes.ok)
self.assertEqual(len(json.loads(response.content)), 2)
# Cannot delete the session
response = self.client.delete('/api/contexts/{0}/'.format(ctx3.pk))
self.assertEqual(response.status_code, codes.bad_request)
response = self.client.get('/api/contexts/',
HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, codes.ok)
self.assertEqual(len(json.loads(response.content)), 2)
|
def get_coins_for_address(self, address_rec):
"""Given an address <address_rec>, return the list of coin's
straight from the DB. Note this specifically does NOT return
COIN objects.
"""
color_set = self.color_set
addr_color_set = address_rec.get_color_set()
all_coins = filter(
self.coin_matches_filter,
self.coin_manager.get_coins_for_address(address_rec.get_address()))
cdata = self.model.ccc.colordata
address_is_uncolored = addr_color_set.color_id_set == set([0])
if address_is_uncolored:
for coin in all_coins:
coin.address_rec = address_rec
coin.colorvalues = [SimpleColorValue(colordef=UNCOLORED_MARKER,
value=coin.value)]
return all_coins
for coin in all_coins:
coin.address_rec = address_rec
coin.colorvalues = None
try:
coin.colorvalues = cdata.get_colorvalues(
addr_color_set.color_id_set, coin.txhash, coin.outindex)
except Exception as e:
raise
def relevant(coin):
cvl = coin.colorvalues
if coin.colorvalues is None:
return False # None indicates failure
if cvl == []:
return color_set.has_color_id(0)
for cv in cvl:
if color_set.has_color_id(cv.get_color_id()):
return True
return False
return filter(relevant, all_coins)
|
def dec_to_bcd_8421(num):
"""Convert a decimal to binary, and decompress into Binary Coded Decimal.
Adds trailing bits to the left to enforce a 4-bit "nibble" on all digits.
Uses 8421 notation [see wikipedia.org/wiki/Binary-coded_decimal]"""
bcd, binary, decimals = '', '', ''
for digit in str(num):
binval = encoders.dec_to_bin(int(digit))
binary += '{}{}'.format(binval, ' ' * (4 - len(binval) + 1))
if len(binval) < 4:
binval = binval.zfill(4)
bcd += '{} '.format(binval)
decimals += digit + (' ' * 4)
_show_bcd(num, decimals, binary, bcd)
return bcd
|
def get_data(self):
"""Get this object as a JSON/Storage compatible dict.
Useful for storage and persistence.
"""
raw = self.prefix + to_bytes_32(self.rawPrivKey)
return {"color_set": self.color_set.get_data(),
"address_data": b2a_hashed_base58(raw)}
|
def add(self, color_id, txhash, outindex, value, label):
self.execute(
self.queries['add'], (color_id, txhash, outindex, value, label))
|
def clearBackground(event, wname=wname):
widget = getattr(self, wname)
widget.setStyleSheet('')
widget.__class__.focusInEvent(widget, event)
|
def __init__(self, ewctrl, orig_offer, my_offer):
super(MyEProposal, self).__init__(make_random_id(),
ewctrl, orig_offer)
self.my_offer = my_offer
if not orig_offer.matches(my_offer):
raise Exception("Offers are incongruent!")
self.etx_spec = ewctrl.make_etx_spec(self.offer.B, self.offer.A)
self.etx_data = None
|
def intersects(self, other):
"""Given another color set <other>, returns whether
they share a color in common.
"""
return len(self.color_id_set & other.color_id_set) > 0
|
def generateInt(self, minimum=0, maximum=100):
""" Generates random integers """
return random.randint(minimum,maximum)
|
def __call__(self, *args, **kwargs):
"""Compile-time decorator (turn on the tool in config).
For example::
@tools.proxy()
def whats_my_base(self):
return cherrypy.request.base
whats_my_base.exposed = True
"""
if args:
raise TypeError("The %r Tool does not accept positional "
"arguments; you must use keyword arguments."
% self._name)
def tool_decorator(f):
if not hasattr(f, "_cp_config"):
f._cp_config = {}
subspace = self.namespace + "." + self._name + "."
f._cp_config[subspace + "on"] = True
for k, v in kwargs.items():
f._cp_config[subspace + k] = v
return f
return tool_decorator
|
def dfs(term, recursive=False, visited={}, **kwargs):
if term in visited: # Break on cyclic relations.
return []
visited[term], a = True, []
if dict.__contains__(self, term):
a = self[term][0].keys()
for classifier in self.classifiers:
a.extend(classifier.parents(term, **kwargs) or [])
if recursive:
for w in a: a += dfs(w, recursive, visited, **kwargs)
return a
|
def f(self, other):
other = as_value_expr(other)
if not isinstance(other, BooleanValue):
raise TypeError(other)
op = klass(self, other)
return op.to_expr()
|
def _get_option(pat, silent=False):
key = _get_single_key(pat, silent)
# walk the nested dict
root, k = _get_root(key)
return root[k]
|
def update(self, top=0.5, mutation=0.5):
""" Updates the population by selecting the top fittest candidates,
and recombining them into a new generation.
"""
# 1) Selection.
# Choose the top fittest candidates.
# Including weaker candidates can be beneficial (diversity).
p = sorted(self.population, key=self.fitness, reverse=True)
p = p[:max(2, int(round(len(p) * top)))]
# 2) Reproduction.
# Choose random parents for crossover.
# Mutation avoids local optima by maintaining genetic diversity.
g = []
n = len(p)
for candidate in self.population:
i = randint(0, n-1)
j = choice([x for x in xrange(n) if x != i]) if n > 1 else 0
g.append(self.combine(p[i], p[j]))
if random() <= mutation:
g[-1] = self.mutate(g[-1])
self.population = g
self.generation += 1
|
def __init__(self, cases, results, default):
assert len(cases) == len(results)
ValueOp.__init__(self, cases, results, default)
|
def find_base_table(expr):
if isinstance(expr, TableExpr):
return expr
for arg in expr.op().flat_args():
if isinstance(arg, Expr):
r = find_base_table(arg)
if isinstance(r, TableExpr):
return r
|
def _get_root(key):
path = key.split('.')
cursor = _global_config
for p in path[:-1]:
cursor = cursor[p]
return cursor, path[-1]
|
def test_zeroifnull(self):
dresult = self.alltypes.double_col.zeroifnull()
iresult = self.alltypes.int_col.zeroifnull()
assert type(dresult.op()) == ops.ZeroIfNull
assert type(dresult) == ir.DoubleArray
# Impala upconverts all ints to bigint. Hmm.
assert type(iresult) == type(iresult)
|
def reflect(object, quantify=True, replace=readable_types):
""" Returns the type of each object in the given object.
- For modules, this means classes and functions etc.
- For list and tuples, means the type of each item in it.
- For other objects, means the type of the object itself.
"""
_type = lambda object: type(object).__name__
types = []
# Classes and modules with a __dict__ attribute listing methods, functions etc.
if hasattr(object, "__dict__"):
# Function and method objects.
if _type(object) in ("function", "instancemethod"):
types.append(_type(object))
# Classes and modules.
else:
for v in object.__dict__.values():
try: types.append(str(v.__classname__))
except:
# Not a class after all (some stuff like ufunc in Numeric).
types.append(_type(v))
# Lists and tuples can consist of several types of objects.
elif isinstance(object, (list, tuple, set)):
types += [_type(x) for x in object]
# Dictionaries have keys pointing to objects.
elif isinstance(object, dict):
types += [_type(k) for k in object]
types += [_type(v) for v in object.values()]
else:
types.append(_type(object))
# Clean up type strings.
m = {}
for i in range(len(types)):
k = types[i]
# Execute the regular expressions once only,
# next time we'll have the conversion cached.
if k not in m:
for a,b in replace:
types[i] = re.sub(a, b, types[i])
m[k] = types[i]
types[i] = m[k]
if not quantify:
if not isinstance(object, (list, tuple, set, dict)) and not hasattr(object, "__dict__"):
return types[0]
return types
return count(types, plural={"built-in function" : "built-in functions"})
|
def find_lemmata(self, tokens, **kwargs):
return find_lemmata(tokens)
|
def test_dtype_datetime64(self):
df = pd.DataFrame({
'col': [pd.Timestamp('2010-11-01 00:01:00'),
pd.Timestamp('2010-11-01 00:02:00.1000'),
pd.Timestamp('2010-11-01 00:03:00.300000')]})
inferred = pandas_to_ibis_schema(df)
expected = ibis.schema([('col', 'timestamp')])
assert inferred == expected
|
def _timestamp_delta(translator, expr):
op = expr.op()
arg, offset = op.args
formatted_arg = translator.translate(arg)
return _timestamp_format_offset(offset, formatted_arg)
|
def import_model(path):
"""
Passed a string "app.Model", will return Model registered inside app.
"""
split = path.split('.', 1)
return get_model(split[0], split[1])
|
def _case_projection_fuse_filter(self):
# Probably test this during the evaluation phase. In SQL, "fusable"
# table operations will be combined together into a single select
# statement
#
# see ibis #71 for more on this
t = ibis.table([
('a', 'int8'),
('b', 'int16'),
('c', 'int32'),
('d', 'int64'),
('e', 'float'),
('f', 'double'),
('g', 'string'),
('h', 'boolean')
], 'foo')
proj = t['a', 'b', 'c']
# Rewrite a little more aggressively here
expr1 = proj[t.a > 0]
# at one point these yielded different results
filtered = t[t.a > 0]
expr2 = filtered[t.a, t.b, t.c]
expr3 = filtered.projection(['a', 'b', 'c'])
return expr1, expr2, expr3
|
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = impala._thrift_gen.Status.ttypes.TStatus()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = impala._thrift_gen.beeswax.ttypes.BeeswaxException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
|
def uint8(self, val):
write_uint8(self.buf, val)
return self
|
def get_container(self, container_name):
"""
>>> driver = DummyStorageDriver('key', 'secret')
>>> driver.get_container('unknown') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ContainerDoesNotExistError:
>>> container = driver.create_container(container_name='test container 1')
>>> container
<Container: name=test container 1, provider=Dummy Storage Provider>
>>> container.name
'test container 1'
>>> driver.get_container('test container 1')
<Container: name=test container 1, provider=Dummy Storage Provider>
"""
if container_name not in self._containers:
raise ContainerDoesNotExistError(driver=self, value=None,
container_name=container_name)
return self._containers[container_name]['container']
|
def test_where_with_between(self):
t = self.con.table('alltypes')
what = t.filter([t.a > 0, t.f.between(0, 1)])
result = to_sql(what)
expected = """SELECT *
alltypes
E `a` > 0 AND
`f` BETWEEN 0 AND 1"""
assert result == expected
|
def recv_Cancel(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = Cancel_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(TApplicationException.MISSING_RESULT, "Cancel failed: unknown result");
|
def _get_id(self, element):
return element.get('id')
|
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.hostname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.port = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
|
def find_link_by_text(self, text):
return self.find_by_xpath(
'//a[text()="%s"]' % text, original_find="link by text", original_query=text)
|
def find_by_value(self, value):
return self.find_by_xpath('//*[@value="%s"]' % value, original_find='value', original_query=value)
|
def refresh(self, *args, **kwargs):
"""
Fetch the result SYNCHRONOUSLY and populate the cache
"""
result = self.fetch(*args, **kwargs)
self.cache_set(self.key(*args, **kwargs),
self.expiry(*args, **kwargs),
result)
return result
|
@mod.route('/threads/vote/', methods=['POST'])
@requires_login
def vote_thread():
"""
Submit votes via ajax
"""
thread_id = int(request.form['thread_id'])
user_id = g.user.id
if not thread_id:
abort(404)
thread = Thread.query.get_or_404(int(thread_id))
vote_status = thread.vote(user_id=user_id)
return jsonify(new_votes=thread.votes, vote_status=vote_status)
|
def get_denominator_csv(self):
output = io.StringIO()
writer = csv.writer(output, quoting=csv.QUOTE_NONNUMERIC)
writer.writerow(["year", "month", "officers out on service"])
values = sorted(self.denominator_values,
key=lambda x: (x.year, x.month))
for value in values:
row = [
value.year,
value.month,
value.officers_out_on_service
]
writer.writerow(row)
return output.getvalue()
|
def testIncreasingCTime(self):
# This test will check 200 different years, every month, every day,
# every hour, every minute, every second, and every weekday, using
# a delta of more or less 1 year, 1 month, 1 day, 1 minute and
# 1 second.
delta = timedelta(days=365+31+1, seconds=1+60+60*60)
dt = datetime(1900, 1, 1, 0, 0, 0, 0)
for i in range(200):
self.assertEqual(parse(dt.ctime()), dt)
dt += delta
|
def _NextButtonActivated(self, event):
year, month, day, id = self._GetEntryFormKeys()
nextid = self.entries.get_next_id(year, month, day, id)
self._SetEntryFormDate(year, month, day, nextid)
|
def __delitem__(self, key):
"""removes item with given key"""
n = self.d[key]
n.next.prev = n.prev
n.prev.next = n.next
del self.d[key]
|
def __init__(self, *args, **kwargs):
webcamPreviewDialog.__init__(self, *args, **kwargs)
self.parent = self.GetParent().GetParent()
self.timer = Timer(self.callback)
self.timer.Start(250)
self.temppath = self.GetParent().temppath
self.previewokbutton.Bind(wx.EVT_BUTTON, self.close)
|
def _exec_dockerinspect_slow(long_id):
try:
proc = subprocess.Popen('docker inspect %s' % long_id,
shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
inspect_data = proc.stdout.read().strip()
(out, err) = proc.communicate()
if proc.returncode != 0:
# There is no docker command (or it just failed).
raise RuntimeError('Could not run docker command')
inspect = json.loads(inspect_data)[0]
_reformat_inspect(inspect)
return inspect
except Exception:
logger.error('Error executing dockerinspect', exc_info=True)
raise
|
def EntryChangedListener(self, tag, entry, add=True):
"""Callback for TKEntries.store_entry()."""
year, month, day = entry.get_date()
id = entry.get_id()
wx.BeginBusyCursor()
try:
stack = self.GetTagStack(tag, year, month, day, id)
tag_path = map(unicode, tag.split('/'))
expected_stack_len = len(tag_path) + 2 # root + tag pieces + entry
if add == False:
if len(stack) == expected_stack_len:
self.Prune(stack[-1])
else:
newtag = None
for i in range(len(tag_path)):
if i == 0:
newtag = tag_path[i]
else:
newtag = newtag + '/' + tag_path[i]
if len(stack) == i + 1:
data = wx.TreeItemData(TKEntryKey(None, None,
None, None,
newtag))
stack.append(self.AppendItem(stack[i], tag_path[i],
-1, -1, data))
self.SortChildren(stack[i])
subject = entry.get_subject()
if len(stack) == i + 2:
data = wx.TreeItemData(TKEntryKey(year, month, day,
id, newtag))
stack.append(self.AppendItem(stack[i + 1],
self._ItemLabel(day, month,
year,
subject),
-1, -1, data))
self.SortChildren(stack[i + 1])
else:
self.SetItemText(stack[i + 2],
self._ItemLabel(day, month, year,
subject))
finally:
wx.EndBusyCursor()
|
def __init__(self, parent, MainFrame, id, title, workingdir):
wx.Frame.__init__(self, parent, -1, title, size = (1, 1),
style=wx.FRAME_NO_TASKBAR|wx.NO_FULL_REPAINT_ON_RESIZE)
self.tbicon = TaskBarIcon(self, MainFrame, workingdir)
self.Show(True)
self.MainFrame = MainFrame
|
def serve(self, endpoint):
"""Serves the application at the given *endpoint*. The *endpoint* must be a tuple (<host>, <port>)."""
return Server.serve(endpoint, self.handle_connection)
|
def test_post_ois_data_near_match_does_not_update(self, testapp):
''' OIS data with the same ID but different details creates a new record.
'''
# Set up the extractor
department = Department.create(name="Good Police Department", short_name="GPD", load_defaults=False)
extractor, envs = Extractor.from_department_and_password(department=department, password="password")
# Set the correct authorization
testapp.authorization = ('Basic', (extractor.username, 'password'))
# Get a generated list of OIS descriptions from the JSON test client
test_client = JSONTestClient()
ois_data = test_client.get_prebaked_ois(last=1)
# post the json to the OIS URL
response = testapp.post_json("/data/OIS", params={'month': 0, 'year': 0, 'data': ois_data})
# assert that we got the expected reponse
assert response.status_code == 200
assert response.json_body['updated'] == 0
assert response.json_body['added'] == 1
# Get the second pre-baked ois incident
updated_ois_data = test_client.get_prebaked_ois(first=1, last=2)
# Swap in the opaque ID from the first ois incident
updated_ois_data[0]["opaqueId"] = ois_data[0]["opaqueId"]
# post the json to the ois URL
response = testapp.post_json("/data/OIS", params={'month': 0, 'year': 0, 'data': updated_ois_data})
# assert that we got the expected reponse
assert response.status_code == 200
assert response.json_body['updated'] == 0
assert response.json_body['added'] == 1
# There's only one complaint in the database.
all_ois = OfficerInvolvedShooting.query.all()
assert len(all_ois) == 2
|
def test_mode_to_str(self):
m = meta.Metadata()
modes = [
stat.S_ISUID,
stat.S_ISGID,
stat.S_ISVTX,
stat.S_IRUSR,
stat.S_IWUSR,
stat.S_IXUSR,
stat.S_IRGRP,
stat.S_IWGRP,
stat.S_IXGRP,
stat.S_IROTH,
stat.S_IWOTH,
stat.S_IXOTH,
]
for i in modes:
s = m.mode_to_str(i)
assert m.str_to_mode(s) == i
for i in range(100):
n = random.randint(0, len(modes))
mode = 0
for i in range(n):
mode |= random.choice(modes)
s = m.mode_to_str(mode)
assert m.str_to_mode(s) == mode
|
def _crawl_config_files(
self,
root_dir='/',
exclude_dirs=['proc', 'mnt', 'dev', 'tmp'],
root_dir_alias=None,
known_config_files=[],
discover_config_files=False,
):
assert(self.crawl_mode is not Modes.OUTCONTAINER)
saved_args = locals()
logger.debug('Crawling config files: %s' % (saved_args))
accessed_since = self.feature_epoch
try:
assert os.path.isdir(root_dir)
if root_dir_alias is None:
root_dir_alias = root_dir
exclude_dirs = [os.path.join(root_dir, d) for d in
exclude_dirs]
exclude_regex = r'|'.join([fnmatch.translate(d) for d in
exclude_dirs]) or r'$.'
known_config_files[:] = [os.path.join(root_dir, f) for f in
known_config_files]
known_config_files[:] = [f for f in known_config_files
if not re.match(exclude_regex, f)]
config_file_set = set()
for fpath in known_config_files:
if os.path.exists(fpath):
lstat = os.lstat(fpath)
if (lstat.st_atime > accessed_since or
lstat.st_ctime > accessed_since):
config_file_set.add(fpath)
except Exception as e:
logger.error('Error examining %s' % root_dir, exc_info=True)
raise CrawlError(e)
try:
if discover_config_files:
# Walk the directory hierarchy starting at 'root_dir' in BFS
# order looking for config files.
for (root_dirpath, dirs, files) in os.walk(root_dir):
dirs[:] = [os.path.join(root_dirpath, d) for d in
dirs]
dirs[:] = [d for d in dirs
if not re.match(exclude_regex, d)]
files = [os.path.join(root_dirpath, f) for f in
files]
files = [f for f in files
if not re.match(exclude_regex, f)]
for fpath in files:
if os.path.exists(fpath) \
and self.is_config_file(fpath):
lstat = os.lstat(fpath)
if lstat.st_atime > accessed_since \
or lstat.st_ctime > accessed_since:
config_file_set.add(fpath)
except Exception as e:
logger.error('Error examining %s' % root_dir, exc_info=True)
raise CrawlError(e)
try:
for fpath in config_file_set:
try:
(_, fname) = os.path.split(fpath)
frelpath = fpath.replace(root_dir, root_dir_alias,
1) # root_dir relative path
# Copy this config_file into / before reading it, so we
# don't change its atime attribute.
(th, temppath) = tempfile.mkstemp(prefix='config.',
dir='/')
os.close(th)
shutil.copyfile(fpath, temppath)
with codecs.open(filename=fpath, mode='r',
encoding='utf-8', errors='ignore') as \
config_file:
# Encode the contents of config_file as utf-8.
yield (frelpath, ConfigFeature(fname,
config_file.read(),
frelpath))
os.remove(temppath)
except IOError as e:
raise CrawlError(e)
except Exception as e:
logger.error('Error crawling config file %s'
% fpath, exc_info=True)
raise CrawlError(e)
except Exception as e:
logger.error('Error examining %s' % root_dir, exc_info=True)
raise CrawlError(e)
|
def testTwoDates(self):
input = "From March 13 at 12:30pm to September 2 at 11:15am"
targets = [datetime.datetime(2014, 3, 13, 12, 30),
datetime.datetime(2014, 9, 2, 11, 15)]
self.compareDates(input, targets)
self.compareTimes(input, targets)
|
@staticmethod
@raises(IOError)
def test_cannot_find_config_file_raises():
"""Tests that an exception is raised if it cannot find any yaml files
"""
with TempDirectory() as d:
with TempDirectory() as d2:
load_yaml([join(d.path, 'test.cfg'),
join(d2.path, 'test.cfg')])
|
def ip_addr(data):
return ".".join([ str(a) for a in data ])
|
def get_context_data(self, **kwargs):
ctx = super(ListDetail, self).get_context_data(**kwargs)
ctx.update({
"packages": self.object.packages.all().extra(select={"lower_name": "lower(name)"}).order_by("lower_name"),
})
return ctx
|
def wrapped_function(*args, **kwargs):
# Handle setting of Flask-Cors parameters
options = get_cors_options(current_app, _options)
if options.get('automatic_options') and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
set_cors_headers(resp, options)
setattr(resp, FLASK_CORS_EVALUATED, True)
return resp
|
def test_measmodestmt_dump():
""" Test MeasuringModeStmt to_excellon()
"""
lines = ['M71', 'M72', ]
for line in lines:
stmt = MeasuringModeStmt.from_excellon(line)
assert_equal(stmt.to_excellon(), line)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.