text
stringlengths
48
9.16k
def get_tag_revs(self, svn_tag_url): stdout = self.run_command(['ls', '-v', svn_tag_url], show_stdout=False) results = [] for line in stdout.splitlines(): parts = line.split() rev = int(parts[0]) tag = parts[-1].strip('/') results.append((tag, rev)) return results
def _has_marker(keys, markers): for marker in markers: if marker in keys: return True return False
def test_contains_unary_metric(self): ret = parser._contains_unary_metrics(self.unary_conf['metrics']) self.assertTrue(ret)
def _FetchHelper(self, opt, project, lock, fetched, pm, sem): if not project.Sync_NetworkHalf(quiet=opt.quiet): print >>sys.stderr, 'error: Cannot fetch %s' % project.name if opt.force_broken: print >>sys.stderr, 'warn: --force-broken, continuing to sync' else: sem.release() sys.exit(1) lock.acquire() fetched.add(project.gitdir) pm.update() lock.release() sem.release()
def addInfothisDict (thisNode, cloudElementArea,criteriaB): ''' Purpose:: Update original dictionary node with information Input:: thisNode: a string representing the unique ID of a node cloudElementArea: a floating-point number representing the area of the cloud element criteriaB: a masked array of floating-point numbers representing the lat,lons meeting the criteria Output:: None ''' for eachdict in CLOUD_ELEMENT_GRAPH.nodes(thisNode): if eachdict[1]['uniqueID'] == thisNode: eachdict[1]['CriteriaBArea'] = cloudElementArea eachdict[1]['CriteriaBLatLon'] = criteriaB return
@user_entities.setter def user_entities(self, user_entities): self._user_entities = user_entities
def generate_evaluation_from_config(config_data): """ Generate an Evaluation object from configuration data. :param config_data: Dictionary of the data parsed from the supplied YAML configuration file. :type config_data: :func:`dict` :returns: An Evaluation object containing the data specified in the supplied configuration data. """ # Load datasets reference = None targets = [] if config_data['datasets']: if 'reference' in config_data['datasets']: reference = _load_dataset(config_data['datasets']['reference']) if 'targets' in config_data['datasets']: targets = [_load_dataset(t) for t in config_data['datasets']['targets']] reference, targets = _prepare_datasets_for_evaluation(reference, targets, config_data) # Load metrics eval_metrics = [] if config_data['metrics']: eval_metrics = [_load_metric(m)() for m in config_data['metrics']] # Load Subregions (if present) subregions = None if 'subregions' in config_data: subregions = [_load_subregion(s) for s in config_data['subregions']] return Evaluation(reference, targets, eval_metrics, subregions=subregions)
def check_message_2(self, sr, color='GREEN'): self.log.write(sr) self.log.write('\n') Utils.pprint(color, sr)
def __new__(self, actions=None): return tuple.__new__(self, actions or [])
def __init__(self, name, id, classes, attribs, pseudos): self.name = name self.id = id self.classes = classes self.attribs = attribs self.pseudos = pseudos
def set_cone_inner_angle(self, cone_inner_angle): context.lock() al.alSourcef(self._al_source, al.AL_CONE_INNER_ANGLE, cone_inner_angle) context.unlock()
def start(self): # Create timer timer = self._timer carbon.InstallEventLoopTimer(self._event_loop, ctypes.c_double(0.1), #? ctypes.c_double(kEventDurationForever), self._timer_func_proc, None, ctypes.byref(timer))
def deco(func): setattr(task_gen, func.__name__, func) try: for x in Utils.to_list(var): task_gen.mappings[x] = func except: raise Utils.WafError('extension takes either a list or a string %r' % var) task_gen.mapped[func.__name__] = func return func
def is_media(self): t = self._peek() return isinstance(t, AtKeyword) and t == '@media'
def is_charset(self): t = self._peek() return isinstance(t, AtKeyword) and t == '@charset'
def get_scheme_pkgfile(self, scheme): return '-'.join(self.make_fullplatcomponents( self.get_name(), scheme, self.get_version(), )) + '.pkg'
def p_define_function_params(self, p): '''define_function_params : identifier_list_opt | ELLIPSIS | identifier_list ',' ELLIPSIS ''' if len(p) == 2: if p[1] == 'ELLIPSIS': p[0] = ('...',) else: p[0] = p[1] else: p[0] = p[1] + ('...',)
def connect(self, other): return other._connect_line2(self)
def get_set_bits(bytes): bits = set() j = 0 for byte in bytes: for i in range(8): if byte & 1: bits.add(j + i) byte >>= 1 j += 8 return bits
def unindent_docstring(docstring): # [xx] copied from inspect.getdoc(); we can't use inspect.getdoc() # itself, since it expects an object, not a string. if not docstring: return '' lines = docstring.expandtabs().split('\n') # Find minimum indentation of any non-blank lines after first line. margin = sys.maxint for line in lines[1:]: content = len(line.lstrip()) if content: indent = len(line) - content margin = min(margin, indent) # Remove indentation. if lines: lines[0] = lines[0].lstrip() if margin < sys.maxint: for i in range(1, len(lines)): lines[i] = lines[i][margin:] # Remove any trailing (but not leading!) blank lines. while lines and not lines[-1]: lines.pop() #while lines and not lines[0]: # lines.pop(0) return '\n'.join(lines)
@event.default('button') def on_mouse_press(self, x, y, button, modifiers): self.is_pressed = True return event.EVENT_UNHANDLED
def _update_funcid_to_doc(self, profile_stats): """ Update the dictionary mapping from C{pstat.Stat} funciton ids to C{RoutineDoc}s. C{pstat.Stat} function ids are tuples of C{(filename, lineno, funcname)}. """ # Maps (filename, lineno, funcname) -> RoutineDoc for val_doc in self.reachable_valdocs(): # We only care about routines. if not isinstance(val_doc, RoutineDoc): continue # Get the filename from the defining module. module = val_doc.defining_module if module is UNKNOWN or module.filename is UNKNOWN: continue # Normalize the filename. filename = os.path.abspath(module.filename) try: filename = py_src_filename(filename) except: pass # Look up the stat_func_id funcid = (filename, val_doc.lineno, val_doc.canonical_name[-1]) if funcid in profile_stats.stats: self._funcid_to_doc[funcid] = val_doc
def report_errors(api_doc, docindex, parse_errors, field_warnings): """A helper function for L{parse_docstring()} that reports any markup warnings and field warnings that we encountered while processing C{api_doc}'s docstring.""" if not parse_errors and not field_warnings: return # Get the name of the item containing the error, and the # filename of its containing module. name = api_doc.canonical_name module = api_doc.defining_module if module is not UNKNOWN and module.filename not in (None, UNKNOWN): try: filename = py_src_filename(module.filename) except: filename = module.filename else: filename = '??' # [xx] Don't report markup errors for standard builtins. if (isinstance(api_doc, ValueDoc) and api_doc != module and (api_doc.pyval in __builtin__.__dict__.values() or (module not in (None, UNKNOWN) and module.pyval in (__builtin__, exceptions)))): return # Get the start line of the docstring containing the error. startline = api_doc.docstring_lineno if startline in (None, UNKNOWN): startline = introspect_docstring_lineno(api_doc) if startline in (None, UNKNOWN): startline = None # Display a block header. header = 'File %s, ' % filename if startline is not None: header += 'line %d, ' % startline header += 'in %s' % name log.start_block(header) # Display all parse errors. But first, combine any errors # with duplicate description messages. if startline is None: # remove dups, but keep original order: dups = {} for error in parse_errors: message = error.descr() if message not in dups: log.docstring_warning(message) dups[message] = 1 else: # Combine line number fields for dup messages: messages = {} # maps message -> list of linenum for error in parse_errors: error.set_linenum_offset(startline) message = error.descr() messages.setdefault(message, []).append(error.linenum()) message_items = messages.items() message_items.sort(lambda a,b:cmp(min(a[1]), min(b[1]))) for message, linenums in message_items: linenums = [n for n in linenums if n is not None] if len(linenums) == 0: log.docstring_warning(message) elif len(linenums) == 1: log.docstring_warning("Line %s: %s" % (linenums[0], message)) else: linenums = ', '.join(['%s' % l for l in linenums]) log.docstring_warning("Lines %s: %s" % (linenums, message)) # Display all field warnings. for warning in field_warnings: log.docstring_warning(warning) # End the message block. log.end_block()
def create_construction(self, x, y): x, y = (x // hw)*hw, (y // hh)*hh cx, cy = x//hw, y//hh cells = (cx, cy), (cx+1, cy), (cx, cy+1), (cx+1, cy+1) for cell in cells: if self.play_field[cell]: return # check we're not going to block the only path for any enemy if not self.path.test_mod(cells): return # all ok Turret(x, y, self) for cell in cells: self.play_field[cell] = path.Blocker self.path = path.Path.determine_path(self.play_field, map_width*2, map_height*2) #self.path.dump() self.show_highlight = False
def __init__(self, factory=None, elements_per_list=0): super(ListFactory, self).__init__() self._factory = factory self._elements_per_list = elements_per_list
def test_dispatch_meta(self): p, t, d, m = self.build_proto() d.inject(3, 0, const.RTMP_DATA, 1, encode_amf('onStatus', None)) self.assertEquals(self.messages, [('meta', 0, 1, ['onStatus', None])])
def blit_to_texture(self, target, level, x, y, z): glReadBuffer(self.gl_buffer) glCopyTexSubImage2D(target, level, x - self.anchor_x, y - self.anchor_y, self.x, self.y, self.width, self.height)
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers): print >> self.file, 'on_mouse_drag(x=%d, y=%d, dx=%d, dy=%d, '\ 'buttons=%s, modifiers=%s)' % ( x, y, dx, dy, mouse.buttons_string(buttons), key.modifiers_string(modifiers))
def get_style(self, attribute): '''Get the document's named style at the caret's current position. If there is a text selection and the style varies over the selection, `pyglet.text.document.STYLE_INDETERMINATE` is returned. :Parameters: `attribute` : str Name of style attribute to retrieve. See `pyglet.text.document` for a list of recognised attribute names. :rtype: object ''' if self._mark is None or self._mark == self._position: try: return self._next_attributes[attribute] except KeyError: return self._layout.document.get_style(attribute, self._position) start = min(self._position, self._mark) end = max(self._position, self._mark) return self._layout.document.get_style_range(attribute, start, end)
def __init__(self, *args, **kwargs): super(Window, self).__init__(400, 140, caption='Text entry') self.batch = pyglet.graphics.Batch() self.labels = [ pyglet.text.Label('Name', x=10, y=100, anchor_y='bottom', color=(0, 0, 0, 255), batch=self.batch), pyglet.text.Label('Species', x=10, y=60, anchor_y='bottom', color=(0, 0, 0, 255), batch=self.batch), pyglet.text.Label('Special abilities', x=10, y=20, anchor_y='bottom', color=(0, 0, 0, 255), batch=self.batch) ] self.widgets = [ TextWidget('', 200, 100, self.width - 210, self.batch), TextWidget('', 200, 60, self.width - 210, self.batch), TextWidget('', 200, 20, self.width - 210, self.batch) ] self.text_cursor = self.get_system_mouse_cursor('text') self.focus = None self.set_focus(self.widgets[0])
def test_waitStatus(self): p, t, dmx, mux = self.build_proto() # wait for event with specific code d = p.waitStatus(1, info1.code) d.addCallback(self.assertEquals, info1) # then wait for any event on message stream 1 d.addCallback(lambda _: p.waitStatus(1, None)) d.addCallback(self.assertEquals, info2) dmx.inject(3, 0, const.RTMP_COMMAND, 1, encode_amf('onStatus', 0, None, info1)) dmx.inject(3, 0, const.RTMP_COMMAND, 1, encode_amf('onStatus', 0, None, info2)) return d
def read(self, callback, grpos_range, frames=None): if grpos_range: # here we only handle the case of data shrinking from the # left / earlier side... end_grpos = self._grpos + grpos_range pos = self._pos grpos = self._grpos while 1: idx = pos - self._s.data_offset if idx < 0: pos -= idx idx = 0 try: f = self._s.data[idx] except IndexError: # we're behind the last frame -> no read break grpos = f[0] if grpos >= end_grpos: grpos = end_grpos break callback(*f) pos += 1 self._pos = pos self._grpos = grpos elif frames: pos = self._pos grpos = self._grpos while 1: idx = pos - self._s.data_offset if idx < 0: pos -= idx frames += idx if frames < 1: break try: f = self._s.data[idx] except IndexError: # we're behind the last frame -> no more read break grpos = f[0] callback(*f) pos += 1 frames -= 1 self._pos = pos self._grpos = grpos # if HAVE_TASKS: # t = tasks.CompTask() # return t, t.run() return None, defer.succeed(None)
def update(self, dt): self.player.dispatch_events() if self.control is None: # the player update may have resulted in this element being # culled return if not self.control.isVisible(): return t = self.player.time # time display s = int(t) m = t // 60 h = m // 60 m %= 60 s = s % 60 if h: text = '%d:%02d:%02d'%(h, m, s) else: text = '%02d:%02d'%(m, s) if text != self.control.time.text: self.control.time.text = text # slider position p = (t/self.player.source.duration) self.control.position.x = int(p * self.control.range.width)
def get_mipmapped_texture(self): if self._current_mipmap_texture: return self._current_mipmap_texture if not self._have_extension(): # TODO mip-mapped software decoded compressed textures. For now, # just return a non-mipmapped texture. return self.get_texture() texture = Texture.create_for_size( GL_TEXTURE_2D, self.width, self.height) if self.anchor_x or self.anchor_y: texture.anchor_x = self.anchor_x texture.anchor_y = self.anchor_y glBindTexture(texture.target, texture.id) glTexParameteri(texture.target, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR) if not self.mipmap_data: if not gl_info.have_version(1, 4): raise ImageException( 'Require GL 1.4 to generate mipmaps for compressed textures') glTexParameteri(texture.target, GL_GENERATE_MIPMAP, GL_TRUE) glCompressedTexImage2DARB(texture.target, texture.level, self.gl_format, self.width, self.height, 0, len(self.data), self.data) width, height = self.width, self.height level = 0 for data in self.mipmap_data: width >>= 1 height >>= 1 level += 1 glCompressedTexImage2DARB(texture.target, level, self.gl_format, width, height, 0, len(data), data) glFlush() self._current_mipmap_texture = texture return texture
def test_send_notrendered(self): message = self._initMessage() message.context = CONTEXT2 message.send() self._assertIsRendered(message, True, SUBJECT2, BODY2)
def test_watching_with_page(self): repos = self.client.repos.watching('tekkub', page=2) eq_(len(repos), 39) eq_(repos[0].name, 'Buffoon')
def quote_string(s): chars = [] for c in s: if c == "\\": c = "\\\\" elif c == "\"": c = "\\\"" chars.append(c) return "\"" + "".join(chars) + "\""
def get_now(): """ Allows to access global request and read a timestamp from query. """ if not get_current_request: return datetime.datetime.now() request = get_current_request() if request: openinghours_now = request.GET.get('openinghours-now') if openinghours_now: return datetime.datetime.strptime(openinghours_now, '%Y%m%d%H%M%S') return datetime.datetime.now()
def rotatey(self, angle): self *= Matrix4.new_rotatey(angle) return self
def to_plaintext(self, docstring_linker, **options): # [XX] don't cache -- different options might be used!! #if self._plaintext is not None: return self._plaintext if self._tree is None: return '' if 'indent' in options: self._plaintext = to_plaintext(self._tree, indent=options['indent']) else: self._plaintext = to_plaintext(self._tree) return self._plaintext
def get_preprocessor_define(node, buffer) : """get_preprocessor_define(File, string) -> string""" # Regex the #define. Group 1 is the name, Group 2 is the value define = re.search('#define[\\s]+([^\\s]+)[\\s]+(.+)', buffer) if define : # The whole line is consumed, return an empty string to indicate that buffer = '' name = define.group(1) value = define.group(2).strip() node.funcs.add((name + ' (constant: ' + value + ')', name)) return buffer
def get(self): parent_entity_id = self.get_argument('parent_entity_id', default=None, strip=True) entity_definition_keyname = self.get_argument('entity_definition_keyname', default=None, strip=True) public = True if self.get_argument('public', default=None, strip=True) == 'true' else False user = self.get_user_by_session_key(self.get_argument('session_key', default=None, strip=True)) user_id = None if user: if not user.get('id'): raise web.HTTPError(401, "Unauthorized") else: raise web.HTTPError(401, "Unauthorized") if entity_definition_keyname != None: entity_id = entity.create_entity(entity_definition_keyname=entity_definition_keyname, parent_entity_id=parent_entity_id) if public: entity.set_public(entity_id,is_public=public) self.write({ 'entity_id':entity_id }) else: raise web.HTTPError(400, 'To create a new Entity entity_definition_keyname is required.')
def __init__(self, f): super(CheckedCall, self).__setattr__("f", f)
def _generate_ascii(self, matrix, foreground, background): """ Generates an identicon "image" in the ASCII format. The image will just output the matrix used to generate the identicon. Arguments: matrix - Matrix describing which blocks in the identicon should be painted with foreground (background if inverted) colour. foreground - Character which should be used for representing foreground. background - Character which should be used for representing background. Returns: ASCII representation of an identicon image, where one block is one character. """ return "\n".join(["".join([foreground if cell else background for cell in row]) for row in matrix])
def expand(tests): if isinstance(tests, unittest.TestCase): method_name = tests._testMethodName meth = getattr(tests, method_name) if hasattr(meth, 'iterations'): tests = unittest.TestSuite(type(tests)(method_name) for i in xrange(meth.iterations)) else: tests = unittest.TestSuite(expand(t) for t in tests) return tests
@classmethod def setUpClass(self): with open ("test/snippets/line_01.txt", "r") as myfile: self.string1 = myfile.read() with open ("test/snippets/line_02.txt", "r") as myfile: self.string2 = myfile.read()
def for_address(self, address): if not address: return None func = getattr(settings, 'EASY_MAPS_GEOCODE', None) if func is not None: if not isinstance(func, collections.Callable): func = importpath(func) try: return func(address) except geocode.Error as e: try: logger.error(e) except Exception: logger.error( "Geocoding error for address '%s'", address) return None
def injectmex86(pid, shellcode): from sys import platform if platform.startswith('win'): print ("\nPtrace not working on windows machines ..\n") return False else: try: from ptrace.debugger.debugger import PtraceDebugger from ptrace.debugger.debugger import PtraceProcess except ImportError: print ("\nYou must install ptrace library before use this script.\n") return False else: try: dbg = PtraceDebugger() process = dbg.addProcess(int(pid), False) eip = process.getInstrPointer() bytes = process.writeBytes(eip, shellcode.replace("\\x", "").decode("hex")) process.setreg("ebx", 0) process.cont() except Exception as error: print (error) print ("\nPlease do not forget report !\n") else: print ("\nInject complate !\n")
def _debugdirtyFn(self, x, y): if self._screendirty[x][y]: return 'D' else: return '.'
def bz2_pack(source): """ Returns 'source' as a bzip2-compressed, self-extracting python script. .. note:: This method uses up more space than the zip_pack method but it has the advantage in that the resulting .py file can still be imported into a python program. """ import bz2, base64 out = "" # Preserve shebangs (don't care about encodings for this) first_line = source.split('\n')[0] if analyze.shebang.match(first_line): if py3: if first_line.rstrip().endswith('python'): # Make it python3 first_line = first_line.rstrip() first_line += '3' #!/usr/bin/env python3 out = first_line + '\n' compressed_source = bz2.compress(source.encode('utf-8')) out += 'import bz2, base64\n' out += "exec(bz2.decompress(base64.b64decode('" out += base64.b64encode(compressed_source).decode('utf-8') out += "')))\n" return out
def test_create_contacts_from_message_field_successfully_creates_contact_object(self): contacts = Contact.create_contacts_from_message_field('to', self.message) self.assertEqual(contacts[0].email, 'ben@npmjs.com')
def get_response(self): """ gets the message type and message from rexster :returns: RexProMessage """ msg_version = self.recv(1) if not msg_version: raise exceptions.RexProConnectionException('socket connection has been closed') if bytearray([msg_version])[0] != 1: raise exceptions.RexProConnectionException('unsupported protocol version: {}'.format()) serializer_type = self.recv(1) if bytearray(serializer_type)[0] != 0: raise exceptions.RexProConnectionException('unsupported serializer version: {}'.format()) #get padding self.recv(4) msg_type = self.recv(1) msg_type = bytearray(msg_type)[0] msg_len = struct.unpack('!I', self.recv(4))[0] response = '' while len(response) < msg_len: response += self.recv(msg_len) MessageTypes = messages.MessageTypes type_map = { MessageTypes.ERROR: messages.ErrorResponse, MessageTypes.SESSION_RESPONSE: messages.SessionResponse, MessageTypes.SCRIPT_RESPONSE: messages.MsgPackScriptResponse } if msg_type not in type_map: raise exceptions.RexProConnectionException("can't deserialize message type {}".format(msg_type)) return type_map[msg_type].deserialize(response)
def _generate_regions(R, L): n_ini = sum(not parent for parent in R.values()) n_all = len(R) regions = list() for label in R.keys(): i = min(n_all - n_ini + 1, n_all - label) vi = numpy.random.rand() * i regions.append((vi, L[i])) return sorted(regions)
def setup(target, inputFile, N): """Sets up the sort. """ tempOutputFile = getTempFile(rootDir=target.getGlobalTempDir()) target.addChildTargetFn(down, (inputFile, 0, os.path.getsize(inputFile), N, tempOutputFile)) target.setFollowOnFn(cleanup, (tempOutputFile, inputFile))
def main(): num_samples = len(cu.get_dataframe("public_leaderboard.csv")) predictions = np.kron(np.ones((num_samples,5)), np.array(0.2)) cu.write_submission("uniform_benchmark.csv", predictions)
@pytest.yield_fixture() def to_be_deleted(workon_home): envname = 'to_be_deleted' invoke('new', envname, '-d') yield envname assert not (workon_home / envname).exists()
def lost_connection(self, p): """Called by the rpc protocol whenever it loses a connection.""" self.protocols.remove(p)
def get_fieldsets(self, request, obj=None): """ Check `add_fieldsets` and only display those when action is add """ if not obj and hasattr(self, 'add_fieldsets'): return self.add_fieldsets return super(EnhancedModelAdmin, self).get_fieldsets(request, obj)
def __init__(self, loop, factory): self.loop = loop self.factory = factory self.timer = pyev.Timer(2.0, 2.0, loop, self._print_stats)
def get_report(self, config, client, options): username = config.get_server_username() if username is None: username = getpass.getuser() return ReportToDoListMine(client, username=username, projects=self.get_projects(config, options), branches=options.branch, files=options.file, topics=options.topic, usecolor=options.color, deps=options.deps)
def NotifySearch(self): """ Send notification of the new Sitemap(s) to the search engines. """ if self._suppress: output.Log('Search engine notification is suppressed.', 1) return output.Log('Notifying search engines.', 1) # Override the urllib's opener class with one that doesn't ignore 404s class ExceptionURLopener(urllib.FancyURLopener): def http_error_default(self, url, fp, errcode, errmsg, headers): output.Log('HTTP error %d: %s' % (errcode, errmsg), 2) raise IOError #end def http_error_default #end class ExceptionURLOpener old_opener = urllib._urlopener urllib._urlopener = ExceptionURLopener() # Build the URL we want to send in if self._sitemaps > 1: url = self._filegen.GenerateURL(SITEINDEX_SUFFIX, self._base_url) else: url = self._filegen.GenerateURL(0, self._base_url) # Test if we can hit it ourselves try: u = urllib.urlopen(url) u.close() except IOError: output.Error('When attempting to access our generated Sitemap at the ' 'following URL:\n %s\n we failed to read it. Please ' 'verify the store_into path you specified in\n' ' your configuration file is web-accessable. Consult ' 'the FAQ for more\n information.' % url) output.Warn('Proceeding to notify with an unverifyable URL.') # Cycle through notifications # To understand this, see the comment near the NOTIFICATION_SITES comment for ping in NOTIFICATION_SITES: query_map = ping[3] query_attr = ping[5] query_map[query_attr] = url query = urllib.urlencode(query_map) notify = urlparse.urlunsplit((ping[0], ping[1], ping[2], query, ping[4])) # Send the notification output.Log('Notifying: %s' % ping[1], 0) output.Log('Notification URL: %s' % notify, 2) try: u = urllib.urlopen(notify) u.read() u.close() except IOError: output.Warn('Cannot contact: %s' % ping[1]) if old_opener: urllib._urlopener = old_opener
@web.removeslash def get(self): menu = self.get_menu() self.json({ 'result': menu, 'time': round(self.request.request_time(), 3), })
def register_publish(username, block_version, async_process=True): """Used in background to know if a user has been reused""" generic_enqueue('biicode.background.worker.worker.register_publish', [username, block_version], async_process=async_process)
def get_user_info(self, access_token): params = {"alt": "json", "access_token": access_token} encoded_params = urllib.urlencode(params) url = 'https://www.googleapis.com/oauth2/v1/userinfo?%s' % encoded_params res = requests.get(url) json = res.json() login = json["email"].split("@")[0].replace(".", "_") if not json["email"]: return None return login, json["email"]
def error(self, *args, **kwargs): predictions = self.predictions(*args, **kwargs) error = T.mean((predictions - self.target_var) ** 2) return error
def parse_field(self, field_types): attrs = self.element_start('FIELD') id = int(attrs['ID']) type = field_types[id] value = self.character_data() if type == 'Integer': value = int(value) elif type == 'Float': value = float(value) elif type == 'Address': value = int(value) elif type == 'String': pass else: assert False self.element_end('FIELD') return id, value
def parse_cg_entry(self, lines): if lines[0].startswith("["): self.parse_cycle_entry(lines) else: self.parse_function_entry(lines)
def ratio(self, outevent, inevent): assert outevent not in self assert inevent in self for function in self.functions.itervalues(): assert outevent not in function assert inevent in function function[outevent] = ratio(function[inevent], self[inevent]) for call in function.calls.itervalues(): assert outevent not in call if inevent in call: call[outevent] = ratio(call[inevent], self[inevent]) self[outevent] = 1.0
def user_popular_links(self, **kwargs): data = self._call_oauth2_metrics("v3/user/popular_links", dict(), **kwargs) return data["popular_links"]
def demultiplex_records(n, records): demux = [[] for _ in xrange(n)] for i, r in records: demux[i].append(r) return demux
def get_colors(self): colors = '' for sym in self.symbols: if self.symbols[sym] > 0: symcolors = re.sub(r'2|P|S|X', '', sym) for symcolor in symcolors: if symcolor not in colors: colors += symcolor # sort so the order is always consistent return ''.join(sorted(colors))
def fetch_destination(self, address): recipient = unicode(address).strip() # alias match1 = re.match("^(|([\w\-\.]+)@)((\w[\w\-]+\.)+[\w\-]+)$", recipient) # label or alias, with address in brackets match2 = re.match("(.*?)\s*\<([1-9A-HJ-NP-Za-km-z]{26,})\>", recipient) if match1: dest_address = \ self.g.wallet.get_alias(recipient, True, self.show_message, self.question) return dest_address elif match2: return match2.group(2) else: return recipient
def loglkl(self, cosmo, data): # reduced Hubble parameter h = cosmo.h() # WiggleZ specific if self.use_scaling: # angular diameter distance at this redshift, in Mpc d_angular = cosmo.angular_distance(self.redshift) # radial distance at this redshift, in Mpc, is simply 1/H (itself # in Mpc^-1). Hz is an array, with only one element. r, Hz = cosmo.z_of_r([self.redshift]) d_radial = 1/Hz[0] # scaling factor = (d_angular**2 * d_radial)^(1/3) for the # fiducial cosmology used in the data files of the observations # divided by the same quantity for the cosmology we are comparing with. # The fiducial values are stored in the .data files for # each experiment, and are truly in Mpc. Beware for a potential # difference with CAMB conventions here. scaling = pow( (self.d_angular_fid/d_angular)**2 * (self.d_radial_fid/d_radial), 1./3.) else: scaling = 1 # get rescaled values of k in 1/Mpc self.k = self.kh*h*scaling # get P(k) at right values of k, convert it to (Mpc/h)^3 and rescale it P_lin = np.zeros((self.k_size), 'float64') # If the flag use_giggleZ is set to True, the power spectrum retrieved # from Class will get rescaled by the fiducial power spectrum given by # the GiggleZ N-body simulations CITE if self.use_giggleZ: P = np.zeros((self.k_fid_size), 'float64') for i in range(self.k_fid_size): P[i] = cosmo.pk(self.k_fid[i]*h, self.redshift) power = 0 # The following create a polynome in k, which coefficients are # stored in the .data files of the experiments. for j in range(6): power += self.giggleZ_fidpoly[j]*self.k_fid[i]**j # rescale P by fiducial model and get it in (Mpc/h)**3 P[i] *= pow(10, power)*(h/scaling)**3/self.P_fid[i] if self.use_giggleZPP0: # Shot noise parameter addition to GiggleZ model. It should # recover the proper nuisance parameter, depending on the name. # I.e., Wigglez_A should recover P0_a, etc... tag = self.name[-2:] # circle over "_a", "_b", etc... P0_value = data.mcmc_parameters['P0'+tag]['current'] *\ data.mcmc_parameters['P0'+tag]['scale'] P_lin = np.interp(self.kh,self.k_fid,P+P0_value) else: # get P_lin by interpolation. It is still in (Mpc/h)**3 P_lin = np.interp(self.kh, self.k_fid, P) else: # get rescaled values of k in 1/Mpc self.k = self.kh*h*scaling # get values of P(k) in Mpc**3 for i in range(self.k_size): P_lin[i] = cosmo.pk(self.k[i], self.redshift) # get rescaled values of P(k) in (Mpc/h)**3 P_lin *= (h/scaling)**3 W_P_th = np.zeros((self.n_size), 'float64') # starting analytic marginalisation over bias # Define quantities living in all the regions possible. If only a few # regions are selected in the .data file, many elements from these # arrays will stay at 0. P_data_large = np.zeros( (self.n_size*self.num_regions_used), 'float64') W_P_th_large = np.zeros( (self.n_size*self.num_regions_used), 'float64') cov_dat_large = np.zeros( (self.n_size*self.num_regions_used), 'float64') cov_th_large = np.zeros( (self.n_size*self.num_regions_used), 'float64') normV = 0 # infer P_th from P_lin. It is still in (Mpc/h)**3. TODO why was it # called P_lin in the first place ? Couldn't we use now P_th all the # way ? P_th = P_lin # Loop over all the available regions for i_region in range(self.num_regions): # In each region that was selected with the array of flags # self.used_region, define boundaries indices, and fill in the # corresponding windowed power spectrum. All the unused regions # will still be set to zero as from the initialization, which will # not contribute anything in the final sum. if self.used_region[i_region]: imin = i_region*self.n_size imax = (i_region+1)*self.n_size-1 W_P_th = np.dot(self.window[i_region, :], P_th) for i in range(self.n_size): P_data_large[imin+i] = self.P_obs[i_region, i] W_P_th_large[imin+i] = W_P_th[i] cov_dat_large[imin+i] = np.dot( self.invcov[i_region, i, :], self.P_obs[i_region, :]) cov_th_large[imin+i] = np.dot( self.invcov[i_region, i, :], W_P_th[:]) # Explain what it is TODO normV += np.dot(W_P_th_large, cov_th_large) # Sort of bias TODO ? b_out = np.sum(W_P_th_large*cov_dat_large) / \ np.sum(W_P_th_large*cov_th_large) # Explain this formula better, link to article ? chisq = np.dot(P_data_large, cov_dat_large) - \ np.dot(W_P_th_large, cov_dat_large)**2/normV return -chisq/2
def set_parameters(self, host, port, protocol, proxy, auto_connect): self.config.set_key('auto_cycle', auto_connect, True) self.config.set_key("proxy", proxy, True) self.config.set_key("protocol", protocol, True) server = ':'.join([ host, port, protocol ]) self.config.set_key("server", server, True) if self.proxy != proxy or self.protocol != protocol: self.proxy = proxy self.protocol = protocol for i in self.interfaces.values(): i.stop() if auto_connect: #self.interface = None return if auto_connect: if not self.interface.is_connected: self.switch_to_random_interface() else: if self.server_lag > 0: self.stop_interface() else: self.set_server(server)
def check_result (val, func, args): if val == 0: raise ValueError else: return ctypes.c_void_p (val)
def __str__(self): '''String represention of this collection.''' result = [] for key, value in self: result.append('%s = %s' % (key, repr(value))) result.sort() return '\n'.join(result)
def __init__(self, owner=None): self.owner = owner self.editing = False QTreeWidget.__init__(self, owner) self.setColumnCount(3) self.setHeaderLabels([_("Address"), _("Label"), _("Used")]) self.setIndentation(0) self.hide_used = True self.setColumnHidden(2, True)
def get_feature_size(self): size = 0 for vect_rule in self.vect_rules: vect = vect_rule.get('vectorizer') size += len(vect.vocabulary_) return size
def to_str(self): for (path, fp) in self.fingerprints.fingerprints.iteritems(): if not fp or not fp.md5: raise AssertionError("missing Fingerprint or MD5 when serializing FingerprintList: %s: %s" % (path, fp)) return values_to_str([("checkouts", self.checkouts.to_str()), ("fingerprints", self.fingerprints.to_str())], comment="Checkout state for this working directory, for all currently checked out scopes.")
def build_supervised_model(self, n_features, n_classes): """ Creates the computational graph. This graph is intented to be created for finetuning, i.e. after unsupervised pretraining. :param n_features: Number of features. :param n_classes: number of classes. :return: self """ self._create_placeholders(n_features, n_classes) self._create_variables(n_features) next_train = self._create_encoding_layers() self._create_softmax_layer(next_train, n_classes) self.cost = self._create_cost_function_node(self.finetune_loss_func, self.softmax_out, self.input_labels) self.train_step = self._create_train_step_node(self.finetune_opt, self.finetune_learning_rate, self.cost, self.momentum) self._create_supervised_test_node()
def method_is_explictly_overwritten(self): view_func = current_app.view_functions[request.endpoint] return hasattr(view_func, '_explict_rule_set') and view_func._explict_rule_set is True
def test_country_preferences(self): '''It should save a school's country preferences.''' c1 = TestCountries.new_country().id c2 = TestCountries.new_country().id params = self.get_params(countrypreferences=[0, c1, c2, 0, c1]) response = self.get_response(params=params) self.assertEqual(response.data['countrypreferences'], [c1, c2]) school_id = response.data['id'] school = School.objects.get(id=school_id) self.assertEqual([c1, c2], school.country_preference_ids)
def matches_subject_class(self, subject): """ subject can be either Classes or instances of classes self.subjects can either be string or Classes """ for sub in self.subjects: if inspect.isclass(sub): if inspect.isclass(subject): return issubclass(subject, sub) else: return isinstance(subject, sub) elif isinstance(sub, string_types): if inspect.isclass(subject): return subject.__name__ == sub else: return subject.__class__.__name__ == sub return False
def run_master(self): """ Runs the master service if it is not running :return: """ self._logger.info('Running master on {}'.format(self._master_url)) if self.is_master_up(): return cmd = [self._main_executable, 'master', '--port', self._port(self._master_url)] self._run_service(cmd, self._master_url)
def test_start_subjob_raises_if_slave_is_shutdown(self): slave = self._create_slave() slave._is_in_shutdown_mode = True self.assertRaises(SlaveMarkedForShutdownError, slave.start_subjob, Mock())
def test_send_template_without_from_field(self): msg = mail.EmailMessage('Subject', 'Text Body', 'from@example.com', ['to@example.com']) msg.template_name = "PERSONALIZED_SPECIALS" msg.use_template_from = True msg.send() self.assert_mandrill_called("/messages/send-template.json") data = self.get_api_call_data() self.assertEqual(data['template_name'], "PERSONALIZED_SPECIALS") self.assertFalse('from_email' in data['message']) self.assertFalse('from_name' in data['message'])
def test_func(self, user): raise NotImplementedError( '{0} is missing implementation of the ' 'test_func method. You should write one.'.format( self.__class__.__name__))
def test_save_blank_object(self): """Test that JSON model can save a blank object as none""" model = JsonModel() self.assertEqual(model.empty_default, {}) model.save() self.assertEqual(model.empty_default, {}) model1 = JsonModel(empty_default={"hey": "now"}) self.assertEqual(model1.empty_default, {"hey": "now"}) model1.save() self.assertEqual(model1.empty_default, {"hey": "now"})
def __init__(self, attributes): AttributeGetter.__init__(self, attributes) if self.settlement_amount is not None: self.settlement_amount = Decimal(self.settlement_amount) if self.settlement_currency_exchange_rate is not None: self.settlement_currency_exchange_rate = Decimal(self.settlement_currency_exchange_rate)
@override_settings(DJRILL_WEBHOOK_SECRET='abc123') def test_default_secret_name(self): response = self.client.head('/webhook/?secret=abc123') self.assertEqual(response.status_code, 200)
def starts_with(self, value): return Search.Node(self.name, {"starts_with": value})
def test_gauge(self): """Tests the result of the gauge template tag.""" with patch("redis_metrics.templatetags.redis_metric_tags.get_r") as mock_r: inst = mock_r.return_value inst.get_gauge.return_value = 100 size = 50 maximum = 200 result = taglib.gauge("test-slug", maximum, size) expected_result = { 'slug': "test-slug", 'current_value': 100, 'max_value': maximum, 'size': size, 'diff': maximum - 100 } self.assertEqual(result, expected_result) mock_r.assert_called_once_with() inst.get_gauge.assert_called_once_with("test-slug")
@xform def inline(ast, M): return Front.procedure_prune(Front.inline(ast, M), M.entry_points)
def delete(self, *args, **kwargs): """ Delete object and redirect user to configured success URL. :return: Redirect to URL returned by :py:meth:`~.DeletionMixin.get_success_url`. """ self.object.delete() return redirect(self.get_success_url())
def test_user_gets_role_and_id(self): role = Role(name='administrator') self.assertTrue(role.id is None) user = User(email='b2@gmail.com', password='1234', role=role) self.assertTrue(user.id is None) db.session.add(user) db.session.commit() self.assertFalse(role.id is None) self.assertFalse(user.id is None) self.assertTrue(user.role_id == role.id) self.assertTrue(user.is_admin())
def test_text_node_is(self): credit_card = Customer.create({ "credit_card": { "number": "4111111111111111", "expiration_date": "05/2010", } }).customer.credit_cards[0] trial_subscription = Subscription.create({ "payment_method_token": credit_card.token, "plan_id": TestHelper.trial_plan["id"] }).subscription trialless_subscription = Subscription.create({ "payment_method_token": credit_card.token, "plan_id": TestHelper.trialless_plan["id"] }).subscription collection = Subscription.search([ SubscriptionSearch.plan_id == "integration_trial_plan" ]) self.assertTrue(TestHelper.includes(collection, trial_subscription)) self.assertFalse(TestHelper.includes(collection, trialless_subscription))
def __repr__(self, d=2): """ return a string representation; 'd' determines amount of significant digits to display """ return "[%.*f, %.*f]" % (d, self.x, d, self.y)
def list_available_solution_stacks(self): """ Returns a list of available solution stacks """ stacks = self.ebs.list_available_solution_stacks() return stacks['ListAvailableSolutionStacksResponse']['ListAvailableSolutionStacksResult']['SolutionStacks']
def grep_word_from_files(): word = sys.argv[1] for filename in sys.argv[2:]: with open(filename) as file: for lino, line in enumerate(file, start=1): if word in line: print("{0}:{1}:{2:.40}".format(filename, lino, line.rstrip()))
def __init__(self, gpg): self.gpg = gpg self.type = None self.fingerprint = None