text
stringlengths 48
9.16k
|
|---|
def test_slow_shattered(self):
"""The port number is received slowly in chunks."""
# start()
# As the lock is mocked out, this provides a mox expectation.
with self.proxy._process_lock:
safe_subprocess.start_process_file(
args=['/runtime'],
input_string=self.runtime_config.SerializeToString(),
env={'foo': 'bar'},
cwd=self.tmpdir,
stderr=subprocess.PIPE).AndReturn(self.process)
for response, sleeptime in [
('', .125), ('43', .25), ('4321', .5), ('4321\n', None)]:
self.process.poll().AndReturn(None)
self.process.child_out.seek(0).AndReturn(None)
self.process.child_out.read().AndReturn(response)
if sleeptime is not None:
time.sleep(sleeptime).AndReturn(None)
self.process.child_out.close().AndReturn(None)
self.process.child_out.name = '/tmp/c-out.ABC'
os.remove('/tmp/c-out.ABC').AndReturn(None)
self.proxy._stderr_tee = FakeTee('')
self.mox.ReplayAll()
self.proxy.start()
self.assertEquals(4321, self.proxy._proxy._port)
self.mox.VerifyAll()
|
def IsComplete(self):
return self.message and self.message[-1] == '\n'
|
def ByteSize(self):
n = 0
n += self.lengthString(len(self.status_))
return n + 1
|
def v3_reference_to_group(self, v3_ref, group):
"""Converts a v3 Reference to a v3 Path representing the entity group.
The entity group is represented as an entity_pb.Path containing only the
first element in the provided Reference.
Args:
v3_ref: an entity_pb.Reference
group: an entity_pb.Path to populate
"""
group.Clear()
path = v3_ref.path()
assert path.element_size() >= 1
group.add_element().CopyFrom(path.element(0))
|
def _local_grad(self, parent, d_out_d_self):
parent_shape = self._parents[parent].shape
num_singletons = len(d_out_d_self.shape) - len(parent_shape)
if num_singletons > 0:
extra_singletons = tuple(range(num_singletons))
result = np.sum(d_out_d_self, axis=extra_singletons, keepdims=False)
else:
result = d_out_d_self
assert len(result.shape) == len(parent_shape)
original_singletons = tuple(np.where(np.array(parent_shape) == 1)[0])
return np.sum(result, axis=original_singletons, keepdims=True)
|
def getcallargs(func, *positional, **named):
"""Get the mapping of arguments to values.
A dict is returned, with keys the function argument names (including the
names of the * and ** arguments, if any), and values the respective bound
values from 'positional' and 'named'."""
args, varargs, varkw, defaults = getargspec(func)
f_name = func.__name__
arg2value = {}
# The following closures are basically because of tuple parameter unpacking.
assigned_tuple_params = []
def assign(arg, value):
if isinstance(arg, str):
arg2value[arg] = value
else:
assigned_tuple_params.append(arg)
value = iter(value)
for i, subarg in enumerate(arg):
try:
subvalue = next(value)
except StopIteration:
raise ValueError('need more than %d %s to unpack' %
(i, 'values' if i > 1 else 'value'))
assign(subarg,subvalue)
try:
next(value)
except StopIteration:
pass
else:
raise ValueError('too many values to unpack')
def is_assigned(arg):
if isinstance(arg,str):
return arg in arg2value
return arg in assigned_tuple_params
if ismethod(func) and func.im_self is not None:
# implicit 'self' (or 'cls' for classmethods) argument
positional = (func.im_self,) + positional
num_pos = len(positional)
num_total = num_pos + len(named)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
for arg, value in zip(args, positional):
assign(arg, value)
if varargs:
if num_pos > num_args:
assign(varargs, positional[-(num_pos-num_args):])
else:
assign(varargs, ())
elif 0 < num_args < num_pos:
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at most' if defaults else 'exactly', num_args,
'arguments' if num_args > 1 else 'argument', num_total))
elif num_args == 0 and num_total:
if varkw:
if num_pos:
# XXX: We should use num_pos, but Python also uses num_total:
raise TypeError('%s() takes exactly 0 arguments '
'(%d given)' % (f_name, num_total))
else:
raise TypeError('%s() takes no arguments (%d given)' %
(f_name, num_total))
for arg in args:
if isinstance(arg, str) and arg in named:
if is_assigned(arg):
raise TypeError("%s() got multiple values for keyword "
"argument '%s'" % (f_name, arg))
else:
assign(arg, named.pop(arg))
if defaults: # fill in any missing values with the defaults
for arg, value in zip(args[-num_defaults:], defaults):
if not is_assigned(arg):
assign(arg, value)
if varkw:
assign(varkw, named)
elif named:
unexpected = next(iter(named))
if isinstance(unexpected, unicode):
unexpected = unexpected.encode(sys.getdefaultencoding(), 'replace')
raise TypeError("%s() got an unexpected keyword argument '%s'" %
(f_name, unexpected))
unassigned = num_args - len([arg for arg in args if is_assigned(arg)])
if unassigned:
num_required = num_args - num_defaults
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at least' if defaults else 'exactly', num_required,
'arguments' if num_required > 1 else 'argument', num_total))
return arg2value
|
@staticmethod
def compute_interval(distribution, alpha):
alpha_lower, alpha_upper = (alpha / 2.0, 1 - alpha / 2.0)
return np.percentile(distribution, 100 * alpha_lower), np.percentile(distribution, 100 * alpha_upper)
|
def GsBucketURL(self, bucket_name):
return 'https://%s/%s/' % (self.GS_API_HOSTNAME, bucket_name)
|
def update_video_views(gplus_id):
"""Iterate through ActivityRecords and get video views"""
logging.info('Updating Video Views')
# build the service object of the yt api
API_KEY = get_server_api_key()
yt_service = build('youtube', 'v3', developerKey=API_KEY)
# get the activities for the gde
activities = ActivityRecord.query(ActivityRecord.gplus_id == gplus_id,
ActivityRecord.metadata.type == '#video')
for activity in activities:
for meta in activity.metadata:
if meta.link is not None:
video_id = is_youtube_video(meta.link)
if video_id is not False:
logging.info('linked YT video found %s', video_id)
# get the stats for the video
stats = yt_service.videos().list(
part="statistics", id=video_id).execute()
views = stats["items"][0]['statistics']['viewCount']
meta.impact = int(views)
logging.info('video meta stats updated: %s', views)
activity.put()
|
def mail_message_to_mime_message(protocol_message):
"""Generate a MIMEMultitype message from protocol buffer.
Generates a complete MIME multi-part email object from a MailMessage
protocol buffer. The body fields are sent as individual alternatives
if they are both present, otherwise, only one body part is sent.
Multiple entry email fields such as 'To', 'Cc' and 'Bcc' are converted
to a list of comma separated email addresses.
Args:
protocol_message: Message PB to convert to MIMEMultitype.
Returns:
MIMEMultitype representing the provided MailMessage.
Raises:
InvalidAttachmentTypeError when the file name of an attachment
"""
parts = []
if protocol_message.has_textbody():
parts.append(MIMEText.MIMEText(
protocol_message.textbody(),
_charset=_GuessCharset(protocol_message.textbody())))
if protocol_message.has_htmlbody():
parts.append(MIMEText.MIMEText(
protocol_message.htmlbody(), _subtype='html',
_charset=_GuessCharset(protocol_message.htmlbody())))
if len(parts) == 1:
payload = parts
else:
payload = [MIMEMultipart.MIMEMultipart('alternative', _subparts=parts)]
result = MIMEMultipart.MIMEMultipart(_subparts=payload)
for attachment in protocol_message.attachment_list():
file_name = attachment.filename()
mime_type = _GetMimeType(file_name)
maintype, subtype = mime_type.split('/')
mime_attachment = MIMEBase.MIMEBase(maintype, subtype)
mime_attachment.add_header('Content-Disposition',
'attachment',
filename=attachment.filename())
mime_attachment.set_payload(attachment.data())
if attachment.has_contentid():
mime_attachment['content-id'] = attachment.contentid()
result.attach(mime_attachment)
if protocol_message.to_size():
result['To'] = _I18nHeader(', '.join(protocol_message.to_list()))
if protocol_message.cc_size():
result['Cc'] = _I18nHeader(', '.join(protocol_message.cc_list()))
if protocol_message.bcc_size():
result['Bcc'] = _I18nHeader(', '.join(protocol_message.bcc_list()))
result['From'] = _I18nHeader(protocol_message.sender())
result['Reply-To'] = _I18nHeader(protocol_message.replyto())
result['Subject'] = _I18nHeader(protocol_message.subject())
for header in protocol_message.header_list():
result[header.name()] = _I18nHeader(header.value())
return result
|
@staticmethod
def MergeSkipFiles(skip_files_one, skip_files_two):
if skip_files_one == SKIP_NO_FILES:
return skip_files_two
if skip_files_two == SKIP_NO_FILES:
return skip_files_one
return validation.RegexStr().Validate(
[skip_files_one, skip_files_two], SKIP_FILES)
|
def get_prep_value(self, value):
if value is not None:
return int(value)
return super(SaneTimeField,self).get_prep_value(value)
|
def MergeFrom(self, x):
assert x is not self
if (x.has_time()): self.set_time(x.time())
if (x.has_level()): self.set_level(x.level())
if (x.has_log_message()): self.set_log_message(x.log_message())
if (x.has_source_location()): self.mutable_source_location().MergeFrom(x.source_location())
|
def predictive_log_likelihood(self, Xtest, data_index=0, Npred=100):
"""
Hacky way of computing the predictive log likelihood
:param X_pred:
:param data_index:
:param M:
:return:
"""
Tpred = Xtest.shape[0]
# Sample particle trajectories
preds = self.states_list[data_index].sample_predictions(Tpred, Npred)
preds = np.transpose(preds, [2,0,1])
assert preds.shape == (Npred, Tpred, self.n)
psis = np.array([pred.dot(self.C.T) + self.mu for pred in preds])
pis = np.array([psi_to_pi(psi) for psi in psis])
# TODO: Generalize for multinomial
lls = np.zeros(Npred)
for m in xrange(Npred):
# lls[m] = np.sum(
# [Multinomial(weights=pis[m,t,:], K=self.p).log_likelihood(Xtest[t][None,:])
# for t in xrange(Tpred)])
lls[m] = np.nansum(Xtest * np.log(pis[m]))
# Compute the average
hll = logsumexp(lls) - np.log(Npred)
# Use bootstrap to compute error bars
samples = np.random.choice(lls, size=(100, Npred), replace=True)
hll_samples = logsumexp(samples, axis=1) - np.log(Npred)
std_hll = hll_samples.std()
return hll, std_hll
|
def shutdown(sts, mode):
print(mode + 'ing Appliance')
ret = sts.shutdown(mode)
pprint(ret)
|
def __rmod__(self, other):
return other % self.value
|
def execute_pre_save_hooks(kind, pb_key, entity):
put_type_id = put_type.UNKNOWN
func_list = pre_save_hooks.get(kind, None)
if func_list is not None:
key = db.Key._FromPb(pb_key)
if not key.id_or_name():
path = key.to_path()[:-1]
path.append(1)
model_key = db.Key.from_path(*path)
ids = datastore.AllocateIds(model_key, 1)
path = path[:-1]
path.append(ids[0])
new_key = db.Key.from_path(*path)
pb_key = new_key._ToPb()
entity.key().CopyFrom(pb_key)
group = entity.mutable_entity_group()
root = entity.key().path().element(0)
group.add_element().CopyFrom(root)
e = datastore.Entity._FromPb(entity)
instance = db.class_for_kind(kind).from_entity(e)
put_type_id = put_type.NEWLY_CREATED
else:
entity.key_.CopyFrom(pb_key)
e = datastore.Entity._FromPb(entity)
instance = db.class_for_kind(kind).from_entity(e)
created = get_created_datetime(instance)
updated = get_updated_datetime(instance)
if created:
threshold = datetime.timedelta(0,0,1000)
if updated:
if abs(created - updated) < threshold:
put_type_id = put_type.MAYBE_NEWLY_CREATED
else:
put_type_id = put_type.MAYBE_UPDATED
else:
if (datetime.datetime.now() - created) < threshold:
put_type_id = put_type.MAYBE_NEWLY_CREATED
else:
put_type_id = put_type.MAYBE_UPDATED
for func in func_list:
func(instance, put_type_id)
|
def test_template_name_change_based_on_ajax_request(self):
view = self._prepare_view(forms.views.ModalFormView,
dict(HTTP_X_REQUESTED_WITH='XMLHttpRequest'))
self.assertEqual(view.get_template_names(),
'_' + view.template_name)
view = self._prepare_view(forms.views.ModalFormView, {})
self.assertEqual(view.get_template_names(), view.template_name)
|
def __init__(self, application, response_wrapper=None, use_cookies=True):
self.application = application
if response_wrapper is None:
response_wrapper = lambda a, s, h: (a, s, h)
self.response_wrapper = response_wrapper
if use_cookies:
self.cookie_jar = _TestCookieJar()
else:
self.cookie_jar = None
self.redirect_client = None
|
def average( values ) :
return sum( values ) / len( values )
|
def testWindowParent( self ) :
parentWindow1 = GafferUI.Window()
parentWindow2 = GafferUI.Window()
childWindow = GafferUI.Window()
childWindowWeakRef = weakref.ref( childWindow )
self.failUnless( parentWindow1.parent() is None )
self.failUnless( parentWindow2.parent() is None )
self.failUnless( childWindow.parent() is None )
parentWindow1.addChildWindow( childWindow )
self.failUnless( parentWindow1.parent() is None )
self.failUnless( parentWindow2.parent() is None )
self.failUnless( childWindow.parent() is parentWindow1 )
parentWindow2.addChildWindow( childWindow )
self.failUnless( parentWindow1.parent() is None )
self.failUnless( parentWindow2.parent() is None )
self.failUnless( childWindow.parent() is parentWindow2 )
parentWindow2.removeChild( childWindow )
self.failUnless( parentWindow1.parent() is None )
self.failUnless( parentWindow2.parent() is None )
self.failUnless( childWindow.parent() is None )
del childWindow
self.failUnless( childWindowWeakRef() is None )
|
def setVisible( self, visible ) :
if visible == self.getVisible() :
return
self.__visibilityAnimation = _VisibilityAnimation( self._qtWidget(), visible )
self.__visibilityAnimation.start()
|
def commit(self):
self.log('in commit')
for p in [c for c in self.block_candidates.values() if c.block.prevhash == self.head.hash]:
assert isinstance(p, BlockProposal)
ls = self.heights[p.height].last_quorum_lockset
if ls and ls.has_quorum == p.blockhash:
self.store_proposal(p)
self.store_last_committing_lockset(ls)
success = self.chainservice.commit_block(p.block)
assert success
if success:
self.log('commited', p=p, hash=phx(p.blockhash))
assert self.head == p.block
self.commit() # commit all possible
return True
else:
self.log('could not commit', p=p)
else:
self.log('no quorum for', p=p)
if ls:
self.log('votes', votes=ls.votes)
|
def _updateFromPlug( self ) :
view = self.getPlug().node()
## \todo We're getting the context from the view because our
# own context hasn't been set properly. We need to fix that
# properly, I think by having some sort of ContextSensitiveWidget
# base class which inherits contexts from parents.
with view.getContext() :
pixel = self.getPlug()["pixel"].getValue()
try :
channelNames = view.viewportGadget().getPrimaryChild().getImage()["channelNames"].getValue()
color = self.getPlug()["color"].getValue()
except :
channelNames = view.viewportGadget().getPrimaryChild().getImage()["channelNames"].defaultValue()
color = self.getPlug()["color"].defaultValue()
if "A" not in channelNames :
color = IECore.Color3f( color[0], color[1], color[2] )
self.__positionLabel.setText( "<b>XY : %d %d</b>" % ( pixel.x, pixel.y ) )
self.__swatch.setColor( color )
if isinstance( color, IECore.Color4f ) :
self.__rgbLabel.setText( "<b>RGBA : %.3f %.3f %.3f %.3f</b>" % ( color.r, color.g, color.b, color.a ) )
else :
self.__rgbLabel.setText( "<b>RGB : %.3f %.3f %.3f</b>" % ( color.r, color.g, color.b ) )
hsv = color.rgbToHSV()
self.__hsvLabel.setText( "<b>HSV : %.3f %.3f %.3f</b>" % ( hsv.r, hsv.g, hsv.b ) )
|
def updateSystem(name, profile_name, mac_address, ip_address):
cobbler_handle = xmlrpclib.Server("http://"+cobbler_server+"/cobbler_api")
ltoken = cobbler_handle.login(cobbler_user, cobbler_password)
system_id = cobbler_handle.new_system(ltoken)
cobbler_server_conn.modify_system(system_id, "name", name, ltoken)
cobbler_server_conn.modify_system(system_id,'modify_interface', {
"macaddress-eth1" : mac_address,
"dnsname-eth1" : name,
}, ltoken)
cobbler_server_conn.modify_system(system_id, "profile", profile_name, ltoken)
cobbler_server_conn.save_system(system_id, ltoken)
cobbler_server_conn.sync(ltoken)
|
def __revertToSavedAvailable( menu ) :
scriptWindow = menu.ancestor( GafferUI.ScriptWindow )
script = scriptWindow.scriptNode()
if script["fileName"].getValue() and script["unsavedChanges"].getValue() :
return True
return False
|
def testSliceDel( self ) :
c = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Vertical )
ca = TestWidget( "a" )
cb = TestWidget( "b" )
cc = TestWidget( "c" )
self.assert_( ca.parent() is None )
self.assert_( cb.parent() is None )
self.assert_( cc.parent() is None )
c.append( ca )
self.assert_( ca.parent() is c )
c.append( cb )
self.assert_( cb.parent() is c )
c.append( cc )
self.assert_( cc.parent() is c )
self.assertEqual( len( c ), 3 )
del c[0:2]
self.assertEqual( len( c ), 1 )
self.assert_( ca.parent() is None )
self.assert_( cb.parent() is None )
self.assert_( cc.parent() is c )
|
def __updateChildNameChangedConnection( self, child ) :
if self.__parent.isSame( child.parent() ) :
if child not in self.__childNameChangedConnections :
self.__childNameChangedConnections[child] = child.nameChangedSignal().connect( Gaffer.WeakMethod( self.__childNameChanged ) )
else :
if child in self.__childNameChangedConnections :
del self.__childNameChangedConnections[child]
|
def testEnabled( self ) :
w = GafferUI.MultiSelectionMenu()
w.append("A")
w.append("B")
# Test the return type
self.assertTrue( isinstance( w.getEnabledItems(), list ) )
# Test that a single element can be enabled.
w.setEnabledItems( "A" )
self.assertEqual( w.getEnabledItems(), ["A"] )
self.assertEqual( w.getEnabledItems(), ["A"] )
# Test that multiple elements can be enabled.
w.setEnabledItems( ["A", "B"] )
self.assertTrue( "A" in w.getEnabledItems() )
self.assertTrue( "B" in w.getEnabledItems() )
|
def sniffer(genre, host, item_aliases, provider_config=default_settings.PROVIDERS):
all_metrics_providers = [provider.provider_name for provider in
ProviderFactory.get_providers(provider_config, "metrics")]
if "arxiv" in item_aliases:
# for these purposes
host = "arxiv"
if (genre == "article") and (host != "arxiv"):
run = [[("aliases", provider)] for provider in ["mendeley", "crossref", "pubmed", "altmetric_com"]]
run += [[("biblio", provider) for provider in ["crossref", "pubmed", "mendeley", "webpage"]]]
run += [[("metrics", provider) for provider in all_metrics_providers]]
elif (host == "arxiv") or ("doi" in item_aliases):
run = [[("aliases", provider)] for provider in [host, "altmetric_com"]]
run += [[("biblio", provider) for provider in [host, "mendeley"]]]
run += [[("metrics", provider) for provider in all_metrics_providers]]
else:
# relevant alias and biblio providers are always the same
relevant_providers = [host]
if relevant_providers == ["unknown"]:
relevant_providers = ["webpage"]
run = [[("aliases", provider)] for provider in relevant_providers]
run += [[("biblio", provider) for provider in relevant_providers]]
run += [[("metrics", provider) for provider in all_metrics_providers]]
return(run)
|
def _updateFromPlug( self ) :
self.__selectionMenu.setEnabled( self._editable() )
if self.getPlug() is not None :
with self.getContext() :
plugValue = self.getPlug().getValue()
for labelAndValue in self.__labelsAndValues :
if labelAndValue[1] == plugValue :
with Gaffer.BlockedConnection( self.__selectionChangedConnection ) :
self.__selectionMenu.setSelection( labelAndValue[0] )
|
def sm_flow_3(self):
"""Type: Flow or Auxiliary
"""
return (self.sm_stock_2()-self.sm_stock_3())/self.per_stock_adjustment_time()
|
def readStructBegin(self):
assert self.state in (CLEAR, CONTAINER_READ, VALUE_READ), self.state
self.__structs.append((self.state, self.__last_fid))
self.state = FIELD_READ
self.__last_fid = 0
|
def just_finished_profile_refresh(self):
refresh_status = self.get_refresh_status()
if refresh_status.refresh_state == RefreshStatus.states["PROGRESS_BAR"] and \
refresh_status.is_done_refreshing:
return True
return False
|
def setUp( self ) :
GafferTest.TestCase.setUp( self )
open( self.temporaryDirectory() + "/a", "w" )
open( self.temporaryDirectory() + "/b.txt", "w" )
|
def testSymLinkInfo( self ) :
with open( self.temporaryDirectory() + "/a", "w" ) as f :
f.write( "AAAA" )
os.symlink( self.temporaryDirectory() + "/a", self.temporaryDirectory() + "/l" )
# symlinks should report the info for the file
# they point to.
a = Gaffer.FileSystemPath( self.temporaryDirectory() + "/a" )
l = Gaffer.FileSystemPath( self.temporaryDirectory() + "/l" )
aInfo = a.info()
self.assertEqual( aInfo["fileSystem:size"], l.info()["fileSystem:size"] )
# unless they're broken
os.remove( str( a ) )
self.assertNotEqual( aInfo["fileSystem:size"], l.info()["fileSystem:size"] )
|
def get_all(self, key, column_count=100, yield_batch=False, **kwargs):
kwargs['key'] = key
kwargs['column_count'] = column_count
results = self.get(**kwargs)
result_count = len(results)
if yield_batch:
k = next(reversed(results))
yield results
else:
for k, v in results.iteritems():
yield k, v
while result_count == column_count:
kwargs['column_start'] = k
results = self.get(**kwargs)
result_count = len(results)
if result_count:
results.popitem(False)
if yield_batch:
k = next(reversed(results))
yield results
else:
for k, v in results.iteritems():
yield k, v
|
def test_signup(suite):
result = _call('auth/users.signup', data={
'username': 'signup',
'password': 'password',
'email': 'signup@sondra.github.com',
'given_name': "Jefferson",
'family_name': "Heard"
})
assert result.ok, result.text
assert 'signup' in suite['auth']['users']
u = suite['auth']['users']['signup']
assert u['active'] is False
assert u['confirmed_email'] is False
result = _call('auth/users.signup', data={
'username': 'signup',
'password': 'password',
'email': 'signup@sondra.github.com',
'given_name': "Jefferson",
'family_name': "Heard"
})
assert not result.ok, "Duplicate signup should not be allowed"
u.delete()
|
def get_next_task(self):
"""get the next task if there's one that should be processed,
and return how long it will be until the next one should be
processed."""
if _debug: TaskManager._debug("get_next_task")
# get the time
now = _time()
task = None
delta = None
if self.tasks:
# look at the first task
when, nxttask = self.tasks[0]
if when <= now:
# pull it off the list and mark that it's no longer scheduled
heappop(self.tasks)
task = nxttask
task.isScheduled = False
if self.tasks:
when, nxttask = self.tasks[0]
# peek at the next task, return how long to wait
delta = max(when - now, 0.0)
else:
delta = when - now
# return the task to run and how long to wait for the next one
return (task, delta)
|
def test_remove_connection(self):
self.worker.set_connections(self.worker_addresses)
sleep(0.1)
self.assertTrue(self.worker_addresses[0] in self.worker.active_connections)
self.assertTrue(self.worker_addresses[1] in self.worker.active_connections)
self.assertTrue(self.worker_addresses[2] in self.worker.active_connections)
self.worker.remove_connection(self.worker_addresses[1])
sleep(0.1)
self.assertTrue(self.worker_addresses[0] in self.worker.active_connections)
self.assertTrue(self.worker_addresses[1] not in self.worker.active_connections)
self.assertTrue(self.worker_addresses[2] in self.worker.active_connections)
self.worker.remove_connection(self.worker_addresses[2])
sleep(0.1)
self.assertTrue(self.worker_addresses[0] in self.worker.active_connections)
self.assertTrue(self.worker_addresses[1] not in self.worker.active_connections)
self.assertTrue(self.worker_addresses[2] not in self.worker.active_connections)
|
def _build_request_url(self, params, kwargs, post=False):
"""
Build URL to send API query to.
- params: dictionary of parameters
- kwargs: urlencoded contents of params
- post: boolean
"""
if post:
return '%s%s' % (self.endpoint, self.methodname)
else:
return '%s%s?%s' % (self.endpoint, self.methodname, kwargs)
|
def confirmation(self, pdu):
if _debug: BIPForeign._debug("confirmation %r", pdu)
# check for a registration request result
if isinstance(pdu, Result):
# if we are unbinding, do nothing
if self.registrationStatus == -2:
return
### make sure we have a bind request in process
# make sure the result is from the bbmd
if pdu.pduSource != self.bbmdAddress:
if _debug: BIPForeign._debug(" - packet dropped, not from the BBMD")
return
# save the result code as the status
self.registrationStatus = pdu.bvlciResultCode
# check for success
if pdu.bvlciResultCode == 0:
# schedule for a refresh
self.install_task(_time() + self.bbmdTimeToLive)
return
# check the BBMD registration status, we may not be registered
if self.registrationStatus != 0:
if _debug: BIPForeign._debug(" - packet dropped, unregistered")
return
if isinstance(pdu, ReadBroadcastDistributionTableAck):
# send this to the service access point
self.sap_response(pdu)
elif isinstance(pdu, ReadForeignDeviceTableAck):
# send this to the service access point
self.sap_response(pdu)
elif isinstance(pdu, OriginalUnicastNPDU):
# build a vanilla PDU
xpdu = PDU(pdu.pduData, source=pdu.pduSource, destination=pdu.pduDestination, user_data=pdu.pduUserData)
# send it upstream
self.response(xpdu)
elif isinstance(pdu, ForwardedNPDU):
# build a PDU with the source from the real source
xpdu = PDU(pdu.pduData, source=pdu.bvlciAddress, destination=LocalBroadcast(), user_data=pdu.pduUserData)
# send it upstream
self.response(xpdu)
else:
BIPForeign._warning("invalid pdu type: %s", type(pdu))
|
def enable_recompress(self):
"""Enable the recompress button."""
self.ui.recompress.setEnabled(True)
if QSystemTrayIcon.isSystemTrayAvailable() and not self.cli:
self.systemtray.recompress.setEnabled(True)
|
def _print_msg(self, stream, msg, record):
same_line = hasattr(record, 'same_line')
if self.on_same_line and not same_line:
stream.write(self.terminator)
stream.write(msg)
if same_line:
self.on_same_line = True
else:
stream.write(self.terminator)
self.on_same_line = False
self.flush()
|
def testCoshaderType( self ) :
coshader = self.compileShader( os.path.dirname( __file__ ) + "/shaders/coshader.sl" )
coshaderNode = GafferRenderMan.RenderManShader()
coshaderNode.loadShader( coshader )
self.assertEqual( coshaderNode.state()[0].type, "ri:shader" )
|
def create_group(self, bucket_id, group_id, members=None):
if members is None:
group = MINIMALIST_GROUP
else:
group = {'data': {'members': members}}
group_url = '/buckets/%s/groups/%s' % (bucket_id, group_id)
self.app.put_json(group_url, group,
headers=self.headers, status=201)
|
def test_create_api_key(self):
key = self.app.apikeys.create()
keys = self.app.apikeys.all()
self.assertTrue(key.key in [k.key for k in keys])
|
def _inner(value):
if not (low <= value <= high):
raise ValueError('{} not in range ({}, {})'.format(value, low, high))
if step:
value = round((value - low) / step) * step + low
return value
|
def test(self, exercise):
_, _, err = self.run(["make", "clean", "all", "run-test"], exercise)
ret = []
testpath = path.join(exercise.path(), "test", "tmc_test_results.xml")
if not path.isfile(testpath):
return [TestResult(success=False, message=err)]
if len(err) > 0:
ret.append(TestResult(message=err, warning=True))
xmlsrc = ""
with open(testpath) as fp:
xmlsrc = fp.read()
xmlsrc = re.sub(r"&(\s)", r"&\1", xmlsrc)
ns = "{http://check.sourceforge.net/ns}"
matchtest = ns + "test"
matchdesc = ns + "description"
matchmsg = ns + "message"
root = ET.fromstring(xmlsrc)
for test in root.iter(matchtest):
name = test.find(matchdesc).text
if test.get("result") in ["failure", "error"]:
success = False
message = test.find(matchmsg).text
message = message.replace(r"&", "&")
else:
success = True
message = ""
ret.append(TestResult(success=success, name=name, message=message))
self.name = "Valgrind"
err, _, trace = self.run(["valgrind", "--leak-check=full",
"--error-exitcode=1", "test/test"], exercise,
silent=True, env=dict(environ, CK_FORK="no"))
success = err == 0
ret.append(TestResult(success=success, name="valgrind", message=trace))
return ret
|
@contextmanager
def timer(s):
t0 = time.time()
yield
debug("%s (%.2f)" % (s, time.time() - t0))
|
def load_registry(db, registry_data, datalang='en'):
for item in registry_data:
typ = item['Type']
if typ == 'language':
db.add_language(item, datalang, name_order=10)
elif typ == 'extlang':
db.add_extlang(item, datalang)
elif typ in {'grandfathered', 'redundant'}:
db.add_nonstandard(item, datalang)
elif typ == 'region':
db.add_region(item, datalang, name_order=10)
elif typ == 'script':
db.add_script(item, datalang, name_order=10)
elif typ == 'variant':
db.add_variant(item, datalang, name_order=10)
else:
print("Ignoring type: %s" % typ)
|
def asByte(self):
"""
Name: BitField.asByte()
Args: None
Desc: Returns the value of the bitfield as a byte.
>>> bf = BitField()
>>> bf.fromByte(123) # Modifies bf in place
>>> bf.bit4 = 0
>>> print bf.asByte()
107
"""
byteVal = 0
for i, v in enumerate(reversed(self.rawBits)):
byteVal += ( 1 << i ) * v
return byteVal
|
@properties.setter
def properties(self, value):
"""The properties property.
Args:
value (hash). the property value.
"""
if value == self._defaults['properties'] and 'properties' in self._values:
del self._values['properties']
else:
self._values['properties'] = value
|
@user_id.setter
def user_id(self, value):
"""The user_id property.
Args:
value (string). the property value.
"""
if value == self._defaults['userId'] and 'userId' in self._values:
del self._values['userId']
else:
self._values['userId'] = value
|
def run_viterbi(initial_scores, transition_scores, final_scores, emission_scores):
length = np.size(emission_scores, 0) # Length of the sequence.
num_states = np.size(initial_scores) # Number of states.
# Variables storing the Viterbi scores.
viterbi_scores = np.zeros([length, num_states])
# Variables storing the paths to backtrack.
viterbi_paths = -np.ones([length, num_states], dtype=int)
# Most likely sequence.
best_path = -np.ones(length, dtype=int)
# Initialization.
viterbi_scores[0,:] = emission_scores[0,:] * initial_scores
# Viterbi loop.
for pos in xrange(1,length):
for current_state in xrange(num_states):
viterbi_scores[pos, current_state] = \
np.max(viterbi_scores[pos-1, :] * transition_scores[pos-1, current_state, :])
viterbi_scores[pos, current_state] *= emission_scores[pos, current_state]
viterbi_paths[pos, current_state] = \
np.argmax(viterbi_scores[pos-1, :] * transition_scores[pos-1, current_state, :])
# Termination.
best_score = np.max(viterbi_scores[length-1,:] * final_scores)
best_path[length-1] = np.argmax(viterbi_scores[length-1,:] * final_scores)
# Backtrack.
for pos in xrange(length-2, -1, -1):
best_path[pos] = viterbi_paths[pos+1, best_path[pos+1]]
return best_path, best_score
|
def botcommand(*args, **kwargs):
"""Decorator for bot command function"""
def decorate(function, hidden=False, admin=False, name=None, need_arg=False):
function._zxLoLBoT_command = True
function._zxLoLBoT_command_name = name or function.__name__
function._zxLoLBoT_command_admin = admin
function._zxLoLBoT_command_hidden = hidden
function._zxLoLBoT_command_need_arg = need_arg
return function
if args:
return decorate(args[0], **kwargs)
else:
return lambda function: decorate(function, **kwargs)
|
def save_supplies(self, data):
url = self._construct_url(addl=['supplies', ])
entity, _ = super(Strategy, self)._post(PATHS['mgmt'], url, data)
self._update_self(next(entity))
self._deserialize_target_expr()
if 'relations' in self.properties:
del self.properties['relations']
|
def ArrayOf(klass):
"""Function to return a class that can encode and decode a list of
some other type."""
global _array_of_map
global _array_of_classes, _sequence_of_classes
# if this has already been built, return the cached one
if klass in _array_of_map:
return _array_of_map[klass]
# no ArrayOf(ArrayOf(...)) allowed
if klass in _array_of_classes:
raise TypeError("nested arrays disallowed")
# no ArrayOf(SequenceOf(...)) allowed
if klass in _sequence_of_classes:
raise TypeError("arrays of SequenceOf disallowed")
# define a generic class for arrays
@bacpypes_debugging
class ArrayOf(Array):
subtype = None
def __init__(self, value=None):
if value is None:
self.value = [0]
elif isinstance(value, list):
self.value = [len(value)]
self.value.extend(value)
else:
raise TypeError("invalid constructor datatype")
def append(self, value):
if issubclass(self.subtype, Atomic):
pass
elif issubclass(self.subtype, AnyAtomic) and not isinstance(value, Atomic):
raise TypeError("instance of an atomic type required")
elif not isinstance(value, self.subtype):
raise TypeError("%s value required" % (self.subtype.__name__,))
self.value.append(value)
self.value[0] = len(self.value) - 1
def __len__(self):
return self.value[0]
def __getitem__(self, item):
# no wrapping index
if (item < 0) or (item > self.value[0]):
raise IndexError("index out of range")
return self.value[item]
def __setitem__(self, item, value):
# no wrapping index
if (item < 1) or (item > self.value[0]):
raise IndexError("index out of range")
# special length handling for index 0
if item == 0:
if value < self.value[0]:
# trim
self.value = self.value[0:value + 1]
elif value > self.value[0]:
# extend
self.value.extend( [None] * (value - self.value[0]) )
else:
return
self.value[0] = value
else:
self.value[item] = value
def __delitem__(self, item):
# no wrapping index
if (item < 1) or (item > self.value[0]):
raise IndexError("index out of range")
# delete the item and update the length
del self.value[item]
self.value[0] -= 1
def index(self, value):
# only search through values
for i in range(1, self.value[0] + 1):
if value == self.value[i]:
return i
# not found
raise ValueError("%r not in array" % (value,))
def encode(self, taglist):
if _debug: ArrayOf._debug("(%r)encode %r", self.__class__.__name__, taglist)
for value in self.value[1:]:
if issubclass(self.subtype, (Atomic, AnyAtomic)):
# a helper cooperates between the atomic value and the tag
helper = self.subtype(value)
# build a tag and encode the data into it
tag = Tag()
helper.encode(tag)
# now encode the tag
taglist.append(tag)
elif isinstance(value, self.subtype):
# it must have its own encoder
value.encode(taglist)
else:
raise TypeError("%s must be a %s" % (value, self.subtype.__name__))
def decode(self, taglist):
if _debug: ArrayOf._debug("(%r)decode %r", self.__class__.__name__, taglist)
# start with an empty array
self.value = [0]
while len(taglist) != 0:
tag = taglist.Peek()
if tag.tagClass == Tag.closingTagClass:
break
if issubclass(self.subtype, (Atomic, AnyAtomic)):
if _debug: ArrayOf._debug(" - building helper: %r %r", self.subtype, tag)
taglist.Pop()
# a helper cooperates between the atomic value and the tag
helper = self.subtype(tag)
# save the value
self.value.append(helper.value)
else:
if _debug: ArrayOf._debug(" - building value: %r", self.subtype)
# build an element
value = self.subtype()
# let it decode itself
value.decode(taglist)
# save what was built
self.value.append(value)
# update the length
self.value[0] = len(self.value) - 1
def encode_item(self, item, taglist):
if _debug: ArrayOf._debug("(%r)encode_item %r %r", self.__class__.__name__, item, taglist)
if item == 0:
# a helper cooperates between the atomic value and the tag
helper = Unsigned(self.value[0])
# build a tag and encode the data into it
tag = Tag()
helper.encode(tag)
# now encode the tag
taglist.append(tag)
else:
value = self.value[item]
if issubclass(self.subtype, (Atomic, AnyAtomic)):
# a helper cooperates between the atomic value and the tag
helper = self.subtype(self.value[item])
# build a tag and encode the data into it
tag = Tag()
helper.encode(tag)
# now encode the tag
taglist.append(tag)
elif isinstance(value, self.subtype):
# it must have its own encoder
value.encode(taglist)
else:
raise TypeError("%s must be a %s" % (value, self.subtype.__name__))
def decode_item(self, item, taglist):
if _debug: ArrayOf._debug("(%r)decode_item %r %r", self.__class__.__name__, item, taglist)
if item == 0:
# a helper cooperates between the atomic value and the tag
helper = Unsigned(taglist.Pop())
# save the value
self.value = helper.value
elif issubclass(self.subtype, (Atomic, AnyAtomic)):
if _debug: ArrayOf._debug(" - building helper: %r", self.subtype)
# a helper cooperates between the atomic value and the tag
helper = self.subtype(taglist.Pop())
# save the value
self.value = helper.value
else:
if _debug: ArrayOf._debug(" - building value: %r", self.subtype)
# build an element
value = self.subtype()
# let it decode itself
value.decode(taglist)
# save what was built
self.value = value
def debug_contents(self, indent=1, file=sys.stdout, _ids=None):
try:
value_list = enumerate(self.value)
except TypeError:
file.write("%s(non-sequence) %r\n" % (" " * indent, self.value))
return
for i, value in value_list:
if i == 0:
file.write("%slength = %d\n" % (" " * indent, value))
elif issubclass(self.subtype, (Atomic, AnyAtomic)):
file.write("%s[%d] = %r\n" % (" " * indent, i, value))
elif isinstance(value, self.subtype):
file.write("%s[%d]\n" % (" " * indent, i))
value.debug_contents(indent+1, file, _ids)
else:
file.write("%s%s must be a %s" % (" " * indent, value, self.subtype.__name__))
def dict_contents(self, use_dict=None, as_class=dict):
# return arrays as arrays
mapped_value = []
for value in self.value:
if issubclass(self.subtype, Atomic):
mapped_value.append(value) ### ambiguous
elif issubclass(self.subtype, AnyAtomic):
mapped_value.append(value.value) ### ambiguous
elif isinstance(value, self.subtype):
mapped_value.append(value.dict_contents(as_class=as_class))
# return what we built
return mapped_value
# constrain it to a list of a specific type of item
setattr(ArrayOf, 'subtype', klass)
ArrayOf.__name__ = 'ArrayOf' + klass.__name__
# cache this type
_array_of_map[klass] = ArrayOf
_array_of_classes[ArrayOf] = 1
# return this new type
return ArrayOf
|
def reconstruct(self,rows):
if rows is None:
U = self.U
else:
U = np.asfortranarray(self.U[rows,:])
return U.dot(self.V.T + self.X.dot(self.W).T)
|
def add_response_code_stats(self, stats):
for code in self.STATUS_CODES:
self.add_derive_value('Requests/Response/%s' % code, 'requests',
stats[str(code)].get('current', 0))
|
@property
def account_acquisition_date(self):
"""The account_acquisition_date property.
Returns:
(string). the property value. (defaults to: None)
"""
if 'ai.user.accountAcquisitionDate' in self._values:
return self._values['ai.user.accountAcquisitionDate']
return self._defaults['ai.user.accountAcquisitionDate']
|
def collect_numerals(z3term):
if z3.is_int_value(z3term) or z3.is_bv_value(z3term):
yield z3term
elif z3.is_app_of(z3term,z3.Z3_OP_ITE):
yield collect_numerals(z3term.arg(1))
yield collect_numerals(z3term.arg(2))
|
def __getitem__(self, key):
val = self.get(key)
if val: return val
raise KeyError('%s not found' % key)
|
def decompress(self, value):
if value is not None:
return list(value)
else:
return ['', {}]
|
def get_image_url(self, file_id, size=None, include_filename=True):
"""Return an image significant URL
In:
- ``file_id`` -- file identifier
- ``size`` -- size to get (thumb, medium, cover, large)
- ``include_filename`` -- add the filename to the URL or not
Return:
- image significant URL
"""
if self.baseurl:
url = [self.baseurl, self.get_entry_name(), file_id, size or 'large']
else:
url = ['', self.get_entry_name(), file_id, size or 'large']
if include_filename:
url.append(self.get_metadata(file_id)['filename'])
return '/'.join(url)
|
def _check_for_overlaps(self):
'''make sure that cases with different rvals aren't overlapping'''
for outer in self.cases:
for inner in self.cases:
#if the return vals are the same, it doesn't really matter if they blend together.
if self.cases[inner]['rval'] != self.cases[outer]['rval']:
math = EasyMath()
mean_stddev = math.mean([self.cases[inner]['stddev'],self.cases[outer]['stddev']])
diff = abs(self.cases[inner]['mean'] - self.cases[outer]['mean'])
if diff <= mean_stddev*2:
raise utilities.TrueFalseRangeOverlap("truth and falsity overlap")
|
def validate_old_password(self, value):
"""Check old password
In:
- ``value`` -- old password
Return:
- password value if value is the old password
"""
if len(value) == 0 or security.get_user().data.check_password(value):
return self.validate_password(value)
raise ValueError(_('''This password doesn't match the old one.'''))
|
def get_tokens(self):
query = DataToken.query.filter_by(username=self.username)
query = query.filter(DataToken.action.like(self.action + '%'))
return query
|
def __init__(self):
if self._entry_point is None:
raise AttributeError('Plugins must declare their entry point namespace in a _entry_point class attribute')
if self._name is None:
raise AttributeError('Plugins must declare their entry point name in a _name class attribute')
|
def test_missing_required_permissions(self):
other_user = User.objects.create_user('OtherGuy')
self.client.force_authenticate(user=other_user)
patch_dict = {
'description': "renamed %s" % uuid.uuid4(),
}
response = self.client.patch(self.item_url, patch_dict)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self.client.delete(self.item_url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
def test_modify_job(self):
headers = {'Content-Type': 'application/json; charset=UTF-8'}
data = {
'job_class_string': 'hello.world',
'name': 'hello world job',
'minute': '*/5'}
response = self.fetch(self.JOBS_URL, method='POST', headers=headers,
body=json.dumps(data))
return_info = json.loads(response.body)
self.assertTrue('job_id' in return_info)
self.assertEquals(len(return_info['job_id']), 32)
job = self.scheduler.get_job(return_info['job_id'])
self.assertEquals(utils.get_job_name(job), data['job_class_string'])
self.assertEquals(job.name, data['name'])
headers = {'Content-Type': 'application/json; charset=UTF-8'}
data = {
'job_class_string': 'hello.world!!!!',
'name': 'hello world job~~~~',
'minute': '*/100'}
response = self.fetch(self.JOBS_URL + '/' + return_info['job_id'] + '?sync=true',
method='PUT', headers=headers, body=json.dumps(data))
self.assertEquals(response.code, 200)
job = self.scheduler.get_job(return_info['job_id'])
self.assertEquals(utils.get_job_name(job), data['job_class_string'])
self.assertEquals(job.name, data['name'])
|
def run(cmd, args, cb=True):
if AsyncError:
M._log(1, 'AsyncError %s' % repr(AsyncError))
M.logout()
typ, val = AsyncError
raise typ(val)
if not M.debug: M._log(0, '%s %.100s' % (cmd, args))
try:
if cb:
typ, dat = getattr(M, cmd)(callback=responder, cb_arg=(cmd, args), *args)
M._log(1, '%s %.100s => %s %.100s' % (cmd, args, typ, dat))
else:
typ, dat = getattr(M, cmd)(*args)
M._log(1, '%s %.100s => %s %.100s' % (cmd, args, typ, dat))
except:
M._log(1, '%s - %s' % sys.exc_info()[:2])
M.logout()
raise
if typ == 'NO':
M._log(1, 'NO')
M.logout()
raise Exception(dat[0])
return dat
|
def buildPlotInfo(self, seriesInfo):
#remove all of the nodatavalues from the pandas table
filteredData = seriesInfo.dataTable[seriesInfo.dataTable["DataValue"] != seriesInfo.noDataValue]
val = filteredData["Month"].map(calcSeason)
filteredData["Season"] = val
# construct tasks for the task server
tasks = [("Probability", filteredData),
("BoxWhisker", (filteredData, seriesInfo.boxWhiskerMethod)),
("Summary", filteredData)]
# Give tasks to the taskserver to run parallelly
logger.debug("Sending tasks to taskserver")
self.taskserver.setTasks(tasks)
self.taskserver.processTasks()
if self.editID == seriesInfo.seriesID:
#set color to black for editing
seriesInfo.edit = True
seriesInfo.plotcolor = self.colorList.pop(0)
seriesInfo.color = "Black"
else:
seriesInfo.color = self.colorList.pop(0)
return seriesInfo
|
def test_collections_datatable(self):
url = '/api/collections/%d/datatable/' % self.Collection1.pk
response = json.loads(self.client.get(url, follow=True).content)
collection_name = "not found"
for prop in response['aaData']:
if prop[0] == 'name':
collection_name = prop[1]
break
self.assertEqual(collection_name, u'Collection1')
|
def _normalize(self):
# Step 1: Normalize the chunks so that all runs of identical charsets
# get collapsed into a single unicode string.
chunks = []
last_charset = None
last_chunk = []
for string, charset in self._chunks:
if charset == last_charset:
last_chunk.append(string)
else:
if last_charset is not None:
chunks.append((SPACE.join(last_chunk), last_charset))
last_chunk = [string]
last_charset = charset
if last_chunk:
chunks.append((SPACE.join(last_chunk), last_charset))
self._chunks = chunks
|
def _dump_ur(self, lvl):
if lvl > self.debug:
return
l = self.untagged_responses # NB: bytes array
if not l:
return
t = '\n\t\t'
l = ['%s: "%s"' % (x[0], x[1][0] and b'" "'.join(x[1]) or '') for x in l]
self.debug_lock.acquire()
self._mesg('untagged responses dump:%s%s' % (t, t.join(l)))
self.debug_lock.release()
|
def test_rpc_delete(self):
self._connecting_to_connected()
self.protocol.router.addContact(self.protocol.sourceNode)
# Set a keyword to store
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.protoVer = self.version
m.testnet = False
m.arguments.extend([digest("Keyword"), "Key",
self.protocol.sourceNode.getProto().SerializeToString(), str(10)])
m.signature = self.signing_key.sign(m.SerializeToString())[:64]
data = m.SerializeToString()
del m.arguments[-4:]
m.arguments.append("True")
m.ClearField("signature")
expected_message1 = m.SerializeToString()
self.handler.on_connection_made()
self.handler.receive_message(data)
self.assertTrue(
self.storage.getSpecific(digest("Keyword"), "Key") ==
self.protocol.sourceNode.getProto().SerializeToString())
# Test bad signature
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("DELETE")
m.protoVer = self.version
m.testnet = False
m.arguments.extend([digest("Keyword"), "Key", "Bad Signature"])
m.signature = self.signing_key.sign(m.SerializeToString())[:64]
data = m.SerializeToString()
del m.arguments[-3:]
m.arguments.append("False")
m.ClearField("signature")
expected_message2 = m.SerializeToString()
self.handler.receive_message(data)
self.assertTrue(
self.storage.getSpecific(digest("Keyword"), "Key") ==
self.protocol.sourceNode.getProto().SerializeToString())
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packets = tuple(
packet.Packet.from_bytes(call[0][0])
for call in self.proto_mock.send_datagram.call_args_list
)
m2 = message.Message()
m2.ParseFromString(sent_packets[0].payload)
m2.ClearField("signature")
received_message1 = m2.SerializeToString()
m3 = message.Message()
m3.ParseFromString(sent_packets[1].payload)
m3.ClearField("signature")
received_message2 = m3.SerializeToString()
self.assertEqual(received_message1, expected_message1)
self.assertEqual(received_message2, expected_message2)
self.proto_mock.send_datagram.call_args_list = []
# Test good signature
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("DELETE")
m.protoVer = self.version
m.testnet = False
m.arguments.extend([digest("Keyword"), "Key", self.signing_key.sign("Key")[:64]])
m.signature = self.signing_key.sign(m.SerializeToString())[:64]
data = m.SerializeToString()
del m.arguments[-3:]
m.arguments.append("True")
m.ClearField("signature")
expected_message3 = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
m4 = message.Message()
m4.ParseFromString(sent_packet.payload)
m4.ClearField("signature")
received_message = m4.SerializeToString()
self.assertEqual(received_message, expected_message3)
self.assertTrue(self.storage.getSpecific(digest("Keyword"), "Key") is None)
|
def set_writer(self, writer):
"""
Changes the writer function to handle writing to the text edit.
A writer function must have the following prototype:
.. code-block:: python
def write(text_edit, text, color)
:param writer: write function as described above.
"""
if self._writer != writer and self._writer:
self._writer = None
if writer:
self._writer = writer
|
def onEditDelPoint(self, event):
dataframe = self.parent.getRecordService().get_filtered_points()
try:
self.isEmptySelection(dataframe)
except EmptySelection:
wx.MessageBox("There are no points to delete",
'Delete Points', wx.OK | wx.ICON_WARNING, parent=self.parent)
return
val = wx.MessageBox("You have chosen to delete the %s selected points.\nDo you want to continue?" % len(dataframe),
'Deleting Points', wx.YES_NO | wx.ICON_QUESTION, parent=self.parent)
if val == 2: #wx.ID_YES:
self.parent.getRecordService().delete_points()
event.Skip()
|
def header_store_parse(self, name, value):
"""+
The name is returned unchanged. If the input value has a 'name'
attribute and it matches the name ignoring case, the value is returned
unchanged. Otherwise the name and value are passed to header_factory
method, and the resulting custom header object is returned as the
value. In this case a ValueError is raised if the input value contains
CR or LF characters.
"""
if hasattr(value, 'name') and value.name.lower() == name.lower():
return (name, value)
if isinstance(value, str) and len(value.splitlines())>1:
raise ValueError("Header values may not contain linefeed "
"or carriage return characters")
return (name, self.header_factory(name, value))
|
@presentation.render_for(Icon)
def render_Icon(self, h, comp, *args):
if self.title is not None:
h << h.i(class_=self.icon, title=self.title)
h << self.title
else:
h << h.i(class_=self.icon, title=self.title)
return h.root
|
def body_encode(self, string):
"""Body-encode a string by converting it first to bytes.
The type of encoding (base64 or quoted-printable) will be based on
self.body_encoding. If body_encoding is None, we assume the
output charset is a 7bit encoding, so re-encoding the decoded
string using the ascii codec produces the correct string version
of the content.
"""
if not string:
return string
if self.body_encoding is BASE64:
if isinstance(string, str):
string = string.encode(self.output_charset)
return email.base64mime.body_encode(string)
elif self.body_encoding is QP:
# quopromime.body_encode takes a string, but operates on it as if
# it were a list of byte codes. For a (minimal) history on why
# this is so, see changeset 0cf700464177. To correctly encode a
# character set, then, we must turn it into pseudo bytes via the
# latin1 charset, which will encode any byte as a single code point
# between 0 and 255, which is what body_encode is expecting.
if isinstance(string, str):
string = string.encode(self.output_charset)
string = string.decode('latin1')
return email.quoprimime.body_encode(string)
else:
if isinstance(string, str):
string = string.encode(self.output_charset).decode('ascii')
return string
|
def test_cleanup_rows_buffered(self):
conn = self.test_connection
cursor = conn.cursor(pymysql.cursors.Cursor)
cursor.execute("select * from test as t1, test as t2")
for counter, row in enumerate(cursor):
if counter > 10:
break
del cursor
self.safe_gc_collect()
c2 = conn.cursor()
c2.execute("select 1")
self.assertEqual(
c2.fetchone(), (1,)
)
self.assertIsNone(c2.fetchone())
|
def set_ntp_servers(self, primary, secondary=None):
self._logger.debug("Set ntp-servers: primary:%s secondary:%s" % (primary, secondary))
self.set_config_changed()
xpath = pandevice.XPATH_DEVICECONFIG_SYSTEM
xpath61 = pandevice.XPATH_DEVICECONFIG_SYSTEM + "/ntp-servers"
# Path is different depending on PAN-OS 6.0 vs 6.1
# Try PAN-OS 6.1 first
element61 = ""
# First if primary is None, remove all NTP config
if primary is None:
# PAN-OS 6.1 and higher
self.xapi.delete(xpath61)
# PAN-OS 6.0 and lower
self.xapi.delete(xpath + "/ntp-server-1")
self.xapi.delete(xpath + "/ntp-server-2")
return
if primary:
element61 += "<ntp-servers>" \
"<primary-ntp-server>" \
"<ntp-server-address>%s</ntp-server-address>" \
"</primary-ntp-server>" % (primary,)
if secondary:
element61 += "<secondary-ntp-server>" \
"<ntp-server-address>%s</ntp-server-address>" \
"</secondary-ntp-server>" % (secondary,)
element61 += "</ntp-servers>"
try:
# PAN-OS 6.1 and higher
self.xapi.edit(xpath61, element61)
self._logger.debug("Set ntp server for PAN-OS 6.1 or higher")
except (pan.xapi.PanXapiError, err.PanDeviceXapiError) as e:
try:
message = e.msg
except AttributeError:
message = e.message
if message.startswith("Could not get schema node for xpath"):
# PAN-OS 6.0 and lower
self._set_ntp_servers_60(primary, secondary=secondary)
self._logger.debug("Set ntp server for PAN-OS 6.0 or lower")
else:
self._logger.debug("Could not set NTP server, unknown PAN-OS version")
raise e
|
@responder(pattern="^!(?P<source>.+)", form="!<code>", help="Execute some python code")
def python(conversation, source):
from droned.entity import namespace
source = source.strip()
try:
code = compile(source, '<jabber>', 'eval')
except:
try:
code = compile(source, '<jabber>', 'single')
except:
msg = "I couldn't compile your code.\n%s" % traceback.format_exc()
conversation.say(msg, useHTML=False)
return
try:
result = eval(code, namespace)
except:
result = traceback.format_exc()
conversation.say(str(result), useHTML=False)
|
def __init__( self, logger ):
"""
Initialize the base class and validate the logger component.
@param logger: the ILogger object tasked with dispatching debug messages.
@type logger: ILogger
"""
self.logger = logger
self.re_address = re.compile( '^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(?:/(?:1[6-9]|2[0-9]|3[0-2]))?$' )
self.re_ipv6_address = re.compile(
'^(?::(?::|(?::([0-9A-Fa-f]{1,4})){1,7})|([0-9A-Fa-f]{1,4})(?::([0-9A-Fa-f]{1,4})){0,6}' + \
'::|([0-9A-Fa-f]{1,4})(?::([0-9A-Fa-f]{1,4})){0,5}::([0-9A-Fa-f]{1,4})|([0-9A-Fa-f]{1,4})' + \
'(?::(?P<abn>:(?P<iabn>))?([0-9A-Fa-f]{1,4})(?!:(?P=abn))){1,5}:([0-9A-Fa-f]{1,4})(?P=iabn)' + \
'|([0-9A-Fa-f]{1,4})(?::([0-9A-Fa-f]{1,4})){7})(?:\/(12[0-8]|1[01][0-9]|[1-9]?\d))?$'
)
Interface.Assert(self.logger, ILogger)
|
def think(self):
if self.active_state is None:
return
self.active_state.do_actions()
new_state_name = self.active_state.check_conditions()
if new_state_name is not None:
self.set_state(new_state_name)
|
def intersects(self, shape):
try:
return shape.intersects_sphere(self)
except AttributeError:
raise TypeError( "No 'intersects_sphere' method supplied by %s" % type(shape) )
|
def to_message(base):
"""
Given a MailBase, this will construct a MIME part that is canonicalized for
use with the Python email API.
"""
ctype, ctparams = base.get_content_type()
if not ctype:
if base.parts:
ctype = 'multipart/mixed'
else:
ctype = 'text/plain'
maintype, subtype = ctype.split('/')
is_text = maintype == 'text'
is_multipart = maintype == 'multipart'
if base.parts and not is_multipart:
raise RuntimeError(
'Content type should be multipart, not %r' % ctype
)
body = base.get_body()
ctenc = base.get_transfer_encoding()
charset = ctparams.get('charset')
if is_multipart:
out = MIMEMultipart(subtype, **ctparams)
else:
out = MIMENonMultipart(maintype, subtype, **ctparams)
if ctenc:
out['Content-Transfer-Encoding'] = ctenc
if isinstance(body, text_type):
if not charset:
if is_text:
charset, _ = best_charset(body)
else:
charset = 'utf-8'
if PY2:
body = body.encode(charset)
else:
body = body.encode(charset, 'surrogateescape')
if body is not None:
if ctenc:
body = transfer_encode(ctenc, body)
if not PY2:
body = body.decode(charset or 'ascii', 'replace')
out.set_payload(body, charset)
for k in base.keys(): # returned sorted
value = base[k]
if not value:
continue
out[k] = value
cdisp, cdisp_params = base.get_content_disposition()
if cdisp:
out.add_header('Content-Disposition', cdisp, **cdisp_params)
# go through the children
for part in base.parts:
sub = to_message(part)
out.attach(sub)
return out
|
def append(self, item):
kv = item.split(b':', 1)
if len(kv) > 1 and kv[0].lower() == b'content-length':
self.content_length = kv[1].strip()
list.append(self, item)
|
def get_node(node_to_ask):
def parse_response(response):
if response[0] and response[1][0] == "True":
return True
if not response[0]:
self.send_message(Node(unhexlify(buyer_guid)),
buyer_enc_key.encode(),
objects.PlaintextMessage.Type.Value("REFUND"),
refund_json,
order_id,
store_only=True)
return True
else:
return response[1][0]
if node_to_ask:
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, buyer_enc_key)
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(refund_json, indent=4), nonce)
d = self.protocol.callRefund(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
|
def draw_quad(x, y, z, w, h):
# Send four vertices to draw a quad
glBegin(GL_QUADS)
glTexCoord2f(0, 0)
glVertex3f(x-w/2, y-h/2, z)
glTexCoord2f(1, 0)
glVertex3f(x+w/2, y-h/2, z)
glTexCoord2f(1, 1)
glVertex3f(x+w/2, y+h/2, z)
glTexCoord2f(0, 1)
glVertex3f(x-w/2, y+h/2, z)
glEnd()
|
def stero_pan(x_coord, screen_width):
right_volume = float(x_coord) / screen_width
left_volume = 1.0 - right_volume
return (left_volume, right_volume)
|
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
field = super(GroupTabularPermissionsAdminBase, self).formfield_for_manytomany(db_field, request, **kwargs)
if db_field.name == 'permissions':
field.widget = TabularPermissionsWidget(db_field.verbose_name, db_field.name in self.filter_vertical,
'permissions')
field.help_text = ''
return field
|
def __set_api_key(self):
if self.api_key is None:
self.keygen()
self._log(DEBUG1, 'autoset api_key: "%s"', self.api_key)
|
def prompt(name, default=None):
"""
Grab user input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = input(prompt)
if rv:
return rv
if default is not None:
return default
|
def test_simulate_ergodicity():
P = [[0.4, 0.6], [0.2, 0.8]]
stationary_dist = [0.25, 0.75]
init = 0
mc = MarkovChain(P)
seed = 4433
ts_length = 100
num_reps = 300
tol = 0.1
x = mc.simulate(ts_length, init=init, num_reps=num_reps, random_state=seed)
frequency_1 = x[:, -1].mean()
ok_(np.abs(frequency_1 - stationary_dist[1]) < tol)
|
def get_liberties(state, maximum=8):
"""A feature encoding the number of liberties of the group connected to the stone at
each location
Note:
- there is no zero-liberties plane; the 0th plane indicates groups in atari
- the [maximum-1] plane is used for any stone with liberties greater than or equal to maximum
- EMPTY locations are all-zero features
"""
planes = np.zeros((maximum, state.size, state.size))
for i in range(maximum):
# single liberties in plane zero (groups won't have zero), double liberties in plane one, etc
planes[i, state.liberty_counts == i + 1] = 1
# the "maximum-or-more" case on the backmost plane
planes[maximum - 1, state.liberty_counts >= maximum] = 1
return planes
|
def test_bad_request():
assert '400 BAD REQUEST' == current_app.test_client().get('/examples/alerts/modal').status
assert '400 BAD REQUEST' == current_app.test_client().get('/examples/alerts/modal?flash_type=success').status
|
def validate_user(val):
if val is None:
return os.geteuid()
if isinstance(val, int):
return val
elif val.isdigit():
return int(val)
else:
try:
return pwd.getpwnam(val).pw_uid
except KeyError:
raise ConfigError("No such user: '%s'" % val)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.