text
stringlengths
48
9.16k
def test_network_urls(): """Test various non-socket URLs.""" actual = parse_url('redis://user:pass@localhost:6379/1') assert dict(password='pass', host='localhost', port=6379, db=1) == actual actual = parse_url('redis://192.168.1.100:8000/1') assert dict(host='192.168.1.100', port=8000, db=1) == actual actual = parse_url('redis://redis.myserver.com:8000') assert dict(host='redis.myserver.com', port=8000) == actual actual = parse_url('redis://user:pass@localhost') assert dict(password='pass', host='localhost') == actual actual = parse_url('redis://localhost/2') assert dict(host='localhost', db=2) == actual actual = parse_url('redis://localhost/') assert dict(host='localhost') == actual actual = parse_url('redis://localhost') assert dict(host='localhost') == actual
def __repr__(self): kls = self.__class__ return '<%s.%s object at 0x%x type=%s>' % ( kls.__module__, kls.__name__, id(self), self.type)
def create_connect_args(self, url): opts = url.translate_connect_args(username='user') if 'port' in opts: opts['port'] = int(opts['port']) else: opts['port'] = 5432 opts.update(url.query) return ([], opts)
def __setitem__(self, index, entity): if isinstance(index, slice): step = index.step or 1 start = index.start or 0 if start < 0: start += len(self) stop = index.stop or len(self) if stop < 0: stop += len(self) for i in range(start, stop, step): self.__setitem__(i, entity[i]) else: self._order_entity(index, entity, True) super(OrderingList, self).__setitem__(index, entity)
def _bindparam_requires_quotes(self, value): """Return True if the given identifier requires quoting.""" lc_value = value.lower() return (lc_value in self.reserved_words or value[0] in self.illegal_initial_characters or not self.legal_characters.match(util.text_type(value)) )
def _get_server_version_info(self, connection): vers = connection.scalar("select @@version_number") # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0), # (12, 5, 0, 0) return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10)
def note_session_activity(req): """ Add a session 'timeout' whenever a user performs an action. This is required to keep dormant (not yet expired, but inactive) sessions from maxing out the concurrent session limit. Parameters: req - An HttpRequest, with a session attribute """ # The timeout in this timedelta specifies the number of minutes. window = timedelta(0,0,0,0,settings.SESSION_TIMEOUT) req.session['activity_time'] = datetime.now() + window
def append_prefix(self, clause): """append the given columns clause prefix expression to this select() construct. This is an **in-place** mutation method; the :meth:`~.Select.prefix_with` method is preferred, as it provides standard :term:`method chaining`. """ clause = _literal_as_text(clause) self._prefixes = self._prefixes + (clause,)
@util.memoized_property def should_autocommit(self): autocommit = self.execution_options.get('autocommit', not self.compiled and self.statement and expression.PARSE_AUTOCOMMIT or False) if autocommit is expression.PARSE_AUTOCOMMIT: return self.should_autocommit_text(self.unicode_statement) else: return autocommit
def init_class_attribute(self, mapper): self.is_class_level = True coltype = self.columns[0].type # TODO: check all columns ? check for foreign key as well? active_history = self.parent_property.active_history or \ self.columns[0].primary_key or \ mapper.version_id_col in set(self.columns) _register_attribute( self, mapper, useobject=False, compare_function=coltype.compare_values, active_history=active_history )
def __init__(self, Malware_Instance=None): if Malware_Instance is None: self.Malware_Instance = [] else: self.Malware_Instance = Malware_Instance
def exportChildren(self, lwrite, level, nsmap, namespace_=XML_NS, name_='TermsOfUseMarkingStructureType', fromsubclass_=False, pretty_print=True): super(TermsOfUseMarkingStructureType, self).exportChildren(lwrite, level, nsmap, namespace_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.Terms_Of_Use is not None: showIndent(lwrite, level, pretty_print) lwrite('<%s:Terms_Of_Use>%s</%s:Terms_Of_Use>%s' % (nsmap[namespace_], quote_xml(self.Terms_Of_Use), nsmap[namespace_], eol_))
def __add_keys_to(self, result): try: keyed_result = self.service.get_storage_account_keys( result.service_name ) except Exception as e: raise AzureStorageAccountShowError( '%s: %s' % (type(e).__name__, format(e)) ) result.storage_service_keys = keyed_result.storage_service_keys return result
def create_expected_endpoint_output(self): return { 'idle-timeout': '%d minutes' % self.idle_timeout, 'instance-port': self.port, 'name': self.endpoint_name, 'port': self.port, 'protocol': self.protocol }
def _check_spatial_resource(self, url, name, dictionary, update=False): """ This method will check geoserver for the existence of an object. It will create the object if it doesn't exist. @param url: The URL of the resource. @param name: The name of the resource. @param dictionary: A dictionary of settings to the resource. @keyword type_name: Optional. Name of the type, if different from the name. @keyword update: Optional. Update the featuretype if it exists? @returns: A flag indicating if the configuration call completed successfully. """ if self._rest_check('%s/%s.json' % (url, name)): if update: if not self._rest_config( 'PUT', url, data=json.dumps(dictionary)): return False else: if not self._rest_config( 'POST', url, data=json.dumps(dictionary)): return False return True
def _parse_collected_classes(self): collected = self._collected_classes entity_klasses = (stix.Entity, Entity) # Generator which yields all stix.Entity and mixbox.Entity subclasses # that were collected. entity_subclasses = ( klass for klass in collected if issubclass(klass, entity_klasses) ) # Local function for adding namespaces that have no defined prefix # mapping at the class-level. These will be resolved in the # self._finalize_namespaces() function. no_alias = self._collected_namespaces[None].add for klass in entity_subclasses: # Prevents exception being raised if/when # collections.MutableSequence or another base class appears in the # MRO. ns = getattr(klass, "_namespace", None) if not ns: continue # cybox.objects.* ObjectProperties derivations have an _XSI_NS # class-level attribute which holds the namespace alias to be # used for its namespace. alias = getattr(klass, "_XSI_NS", None) if alias: self._collected_namespaces[alias] = ns continue # Many stix/cybox entity classes have an _XSI_TYPE attribute that # contains a `prefix:namespace` formatted QNAME for the # associated xsi:type. xsi_type = getattr(klass, "_XSI_TYPE", None) if not xsi_type: no_alias(ns) continue # Attempt to split the xsi:type attribute value into the ns alias # and the typename. typeinfo = xsi_type.split(":") if len(typeinfo) == 2: self._collected_namespaces[typeinfo[0]] = ns else: no_alias(ns)
def get_xmlns_str(self, ns_dict): pairs = sorted(ns_dict.iteritems()) return "\n\t".join( 'xmlns:%s="%s"' % (alias, ns) for alias, ns in pairs )
def get_field(self, name): names = name.split('.', 1) if not self._digit_re.match(names[0]): # XXX is this needed? return None field = self.field(name=names[0]) if len(names) > 1: return field.get_field(names[1]) return field
def multiply(traj): """Example of a sophisticated simulation that involves multiplying two values. :param traj: Trajectory containing the parameters in a particular combination, it also serves as a container for results. """ z=traj.mylink1*traj.mylink2 # And again we now can also use the different names # due to the creation of links traj.f_add_result('runs.$.z', z, comment='Result of our simulation!')
def select_blogs_by_id_sql(newsid): c = g.db.cursor() sql ="select * from blog where newsid =%s" %(newsid) c.execute(sql) results=c.fetchone() index=results[0] sql ="select * from blog where id< %d and id>%d" %(index,index-9) c.execute(sql) results=c.fetchall() blogs=[] blogjsonlist={} for index in range(8) : blogs.append(json.loads(results[7-index][2])) blogjsonlist['nextId']=results[0][1] blogjsonlist['newslist']=blogs blogs=json.dumps(blogjsonlist).decode("unicode-escape") return blogs
def get_dict(self): n = len(self) keys = (c_void_p * n)() values = (c_void_p * n)() coref.CFDictionaryGetKeysAndValues(self, keys, values) d = dict() for i in xrange(n): d[CFType.from_handle(keys[i])] = CFType.from_handle(values[i]) return d
def test_clean_value(self): class AssertConv(convs.Int): def to_python(conv, value): value = convs.Int.to_python(conv, value) if value is not None: field = conv.field.form.get_field('num') self.assertEqual(field.clean_value, value) return value class F(Form): fields = [FieldBlock('', fields=[ Field('num', conv=convs.Int()), Field('f2', conv=AssertConv()) ])] env = AppEnvironment.create() form = F(env) self.assertEqual(form.get_field('num').clean_value, None) form = F(env, initial={'num': 2}) self.assertEqual(form.get_field('num').clean_value, 2) form = F(env) form.accept({'num': '4', 'f2': '4'}) self.assertEqual(form.get_field('num').clean_value, 4) self.assertEqual(form.get_field('f2').clean_value, 4)
def wrapper(cls): cls_dict = cls.__dict__.copy() slots = cls_dict.get('__slots__', None) if slots is not None: if isinstance(slots, compat.base_type): slots = (slots,) for slot in slots: cls_dict.pop(slot) cls_dict.pop('__dict__', None) cls_dict.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, cls_dict)
def run(self, title, body, topic=None, status=0): if topic: topic = self._convert_slug(topic) path = '/topics/%s/articles' % topic else: path = '/articles' payload = self._create_article(title=title, body=body, status=status) response = self._api_post(path, json=payload) return response
def multiply(traj): """Sophisticated simulation of multiplication""" z=traj.x*traj.y traj.f_add_result('z',z=z, comment='I am the product of two reals!')
def __init__(self, file_handle): for line in file_handle: line = line.strip() if len(line) == 0 or line[0] == '#': continue key, value = line.split('=') self[key] = value
def _dispatch_application_normal(self, payload, attempt_no=0): ''' Dispatches WEB_APP_NORMAL_TRIGGER_REF if the application health_status is 'green'. ''' # basic guard to avoid queuing up forever. if attempt_no == 10: self._log.warning('Abandoning WEB_APP_NORMAL_TRIGGER_REF dispatch. Payload %s', payload) return try: application = self._get_application(payload['alert']['application_name']) if application['health_status'] in ['green']: self._dispatch_trigger(WEB_APP_NORMAL_TRIGGER_REF, payload) else: self._log.info('Application %s has state %s. Rescheduling normal check.', application['name'], application['health_status']) eventlet.spawn_after(self._normal_report_delay, self._dispatch_application_normal, payload, attempt_no + 1) except Exception: self._log.exception('Failed delay dispatch. Payload %s.', payload)
def tearDown(self): if hasattr(self, 'env') and hasattr(self.env, 'f_disable_logging'): self.env.f_disable_logging() self.clear_handlers() remove_data()
def initTree(self, notepath, parent): if parent == "": # site wide index page htmlfile = os.path.join(self.sitepath, "index.html") else: # append subpages to page htmlfile = os.path.join(self.htmlpath, parent + ".html") html = QFile(htmlfile) html.open(QIODevice.Append) savestream = QTextStream(html) noteDir = QDir(notepath) notesList = noteDir.entryInfoList(['*.md', '*.mkd', '*.markdown'], QDir.NoFilter, QDir.Name|QDir.IgnoreCase) nl = [note.completeBaseName() for note in notesList] noduplicate = list(set(nl)) noduplicate.sort(key=str.lower) htmlDir = os.path.join(self.htmlpath, parent) if len(noduplicate) > 0 and not QDir(htmlDir).exists(): QDir().mkdir(htmlDir) for name in noduplicate: path = notepath + '/' + name filename = os.path.join(parent, name) for ext in self.extName: notefile = os.path.join(self.notepath, filename + ext) if QFile.exists(notefile): break htmlfile = os.path.join(self.htmlpath, filename + ".html") #print(notefile, htmlfile) self.convert(notefile, htmlfile, os.path.join(parent,name)) self.initTree(path, os.path.join(parent,name)) # append subpages to page savestream << '<li><a href="/notes/' + filename + '.html">' + name + '</a></li>' html.close()
def _get_average_contract_interval(self): with self.contracts_lock: if (len(self.contracts) > 0): total = sum([c.estimated_interval.total_seconds() for c in self.contracts.values()]) return float(total) / float(len(self.contracts)) else: return self.estimated_contract_interval
def passed(self, msg, *args, **kwargs): self.info('PASS: ' + msg, *args, **kwargs) if self.isEnabledFor(logging.DEBUG) and len(getLastFoundImages()) != 0: # source image self.html_img("Source Image", common.cfgImageLibrary + '/' + getLastFoundImage()) # matched image last_match = SCREEN.getLastMatch() region = (last_match.getX(), last_match.getY(), last_match.getW(), last_match.getH()) self.screenshot(msg="Best Matches", folder='results/matches/', region=region) # score of match self.info("Matched with score: %s" % last_match.getScore())
def test_documentation_element_reference_time(self): xml = '<documentation type="Reference Time">' \ '2015-05-28T12:00:00Z</documentation>' element = self._make_element(xml) md = TDSCatalogMetadata(element).metadata assert "documentation" in md assert "Reference Time" in md["documentation"] assert len(md["documentation"]["Reference Time"]) > 0 for entry in md["documentation"]["Reference Time"]: assert entry != []
def decorator(func): @wraps(func) def wrapper(*args, **kwargs): cache_key = 'c:' + key_func(func, args, kwargs, extra) try: result = self.get(cache_key) except CacheMiss: result = func(*args, **kwargs) self.set(cache_key, result, timeout) return result def invalidate(*args, **kwargs): cache_key = 'c:' + key_func(func, args, kwargs, extra) self.delete(cache_key) wrapper.invalidate = invalidate def key(*args, **kwargs): cache_key = 'c:' + key_func(func, args, kwargs, extra) return CacheKey.make(cache_key, cache=self, timeout=timeout) wrapper.key = key return wrapper
def interfaceVersion( self ): """ Must return 1. """ try: return 1 except Exception as e: self.logToConsole( "interfaceVersion: %s" % str(e) )
def setUp(self): if not self.udt_support: self.skipTest("testing with Cassandra < 2.2, can't test with UDT's") super(UDTTest, self).setUp() self.session.execute('TRUNCATE %s' % self.table)
def build_command(self, cmd): return self.bin + self.opts + [c for c in cmd if c]
def on_tag(self, repo, tag): exit, stdout, stderr = self.git(['checkout', 'tags/%s' % tag], cwd=repo) if exit == 0: panel = self.window.get_output_panel('git-checkout') panel.run_command('git_panel_write', {'content': stderr}) self.window.run_command('show_panel', {'panel': 'output.git-checkout'}) else: sublime.error_message(self.format_error_message(stderr)) self.window.run_command('git_status', {'refresh_only': True})
def setupPrivateRun(self, privateRunDir): """Sets up a private /run (& /var/run) directory for the node privateRunDir: None/True for default source, else path for source""" # Handle the input provided (either a bool or a path) if isinstance(privateRunDir, bool): if privateRunDir is False: return privateRunDir = '/run/mininext/%s' % (self.name) elif not isinstance(privateRunDir, basestring): raise Exception("Invalid parameter for privateRunDir\n") # Create the PathProperties and MountProperties objects logPathProperties = PathProperties(path=privateRunDir, perms=getObjectPerms('/run'), create=True, createRecursive=True, setPerms=False) logMount = MountProperties(target='/run', source=logPathProperties) # Pass the created mountPoint off... self.setupMountPoint(logMount) # Mark the node as having private run space self.hasPrivateRun = True
def toStrNormal(self): res = "#EXTM3U\n" res += "#EXT-X-VERSION:" + str(self.version) + "\n" res += "#EXT-X-TARGETDURATION:" + str(self.targetDuration) + "\n" res += "#EXT-X-MEDIA-SEQUENCE:" + str(self.mediaSequence) + "\n" if self.encryption != None: res += "#EXT-X-KEY:METHOD=" + self.encryption.method + ",URI=" + self.encryption.uri + '\n' for item in self.items: res += "#EXTINF:" + str(item.dur) + ",\n" res += item.relativeUrl + "\n" return res
def __exit__(self, *exc_info): """Exit the context, pop this async from the executing context stack. """ local_context = _local.get_local_context() last = local_context._executing_async.pop() if last is not self._async: local_context._executing_async.append(last) raise errors.CorruptContextError(*exc_info) return False
def testIncludeWithPath(self): cellVariables = getid1( '/Cell:%(cellName)s/VariableMap:/' % topology ) variableCount = len(cellVariables.entries) importConfigurationManifest( 'wdrtest/manifests/imports/include_with_path.wdrc', topology ) self.assertEquals(len(cellVariables.entries), variableCount + 3)
def adminTaskAsDict(adminTaskList): result = {} for (key, valueQuoted, valueNotQuoted) in _itemPattern.findall( adminTaskList ): result[key] = valueQuoted or valueNotQuoted return result
def test_tasks_with_small_batch_size(self): """Ensure that when a batch_size parameter is smaller than 100, that the correct number of batches are created with the tasks in them. """ from furious.context.context import _task_batcher tasks = 'a' * 101 batch_size = 30 result = list(_task_batcher(tasks, batch_size=batch_size)) self.assertEqual(4, len(result)) self.assertEqual(30, len(result[0])) self.assertEqual(30, len(result[1])) self.assertEqual(30, len(result[2])) self.assertEqual(11, len(result[3]))
def generate_operation_metric_for_regionserver(regionserver): task = regionserver.task metric = [] endpoint = form_perf_counter_endpoint_name(regionserver.task) group = 'HBase' for operationName in metric_view_config.REGION_SERVER_OPERATION_VIEW_CONFIG: counter = [] # first append operationName counter.append(operationName) # then, append counter for NumOps num_ops_counter = {} num_ops_counter['title'] = operationName + '_histogram_num_ops' num_ops_counter['query'] = [] num_ops_counter['query'].append(make_ops_metric_query(endpoint, group, num_ops_counter['title'])) counter.append(num_ops_counter) # lastly, append counters for percentile percentile_counter = {} percentile_counter['title'] = 'Percentile-Comparision' percentile_counter['query'] = form_percentile_counter_name(endpoint, group, operationName) counter.append(percentile_counter) metric.append(counter) return metric
def reset(self): value = self.value if value is not None: if value is True or value is False: self.value = False else: self.value = 0 if self.ref is not None: self.ref.reset()
def test_create_schema(tmpdir): db_path = os.path.join(tmpdir.strpath, 'db.db') with sqlite3.connect(db_path) as db: create_schema(db) results = db.execute( "SELECT name FROM sqlite_master WHERE type = 'table'" ).fetchall() table_names = [table_name for table_name, in results] assert 'metric_names' in table_names assert 'metric_data' in table_names
def authorize(self, auth_info): username, password = auth_info if self.dict.has_key(username): stored_password = self.dict[username] if stored_password.startswith('{SHA}'): password_hash = sha1(password).hexdigest() return stored_password[5:] == password_hash else: return stored_password == password else: return False
def run_subcommand(self, arguments): """Load the configuration, update it with the arguments and options specified on the command-line and then call the run method implemented by each sub-command. """ # Expand path of the command-line specified config file, if any if arguments.config_file is not None: arguments.config_file = expand_path(arguments.config_file) # Load configurations into a Configuration named tuple try: configs = load_configuration(arguments.config_file) except PyleusError as e: self.error(e) configs = _ensure_storm_path_in_configs(configs) # Update configuration with command line values configs = update_configuration(configs, vars(arguments)) try: self.run(configs) except PyleusError as e: self.error(e)
def receive(self, sig, frame): if sig not in self._signals_recvd: self._signals_recvd.append(sig)
def _parse_hosts_list(self, job_dict, job_name): ''' Parse the hosts list for job ''' self.hosts = {} self.hostnames = {} for name, value in job_dict.iteritems(): reg_expr = HOST_RULE_REGEX.match(name) if not reg_expr: continue host_id = int(reg_expr.group("id")) self.hosts[host_id] = ServiceConfig.Jobs.Hosts(value) ip = self.hosts[host_id].ip try: self.hostnames[host_id] = socket.gethostbyaddr(ip)[0] except: self.hostnames[host_id] = ip instance_num = self.hosts[host_id].instance_num if instance_num > 1 and job_name not in MULTIPLE_INSTANCES_JOBS: Log.print_critical("The job %s doesn't support for multiple instances" \ " on the same host. Please check your config." % job_name)
def __repr__(self): return '<%s.%s instance at %s: %s>' % ( self.__class__.__module__, self.__class__.__name__, hex(id(self)), self.command )
def parse_pattern(self, pattern, size, center): with open(pattern) as handle: rows = [row.rstrip() for row in handle] # Check that the pattern fits in the grid height = len(rows) width = max(len(row) for row in rows) if height > size: raise CommandError("Too many rows in pattern. Increase size?") if width > size: raise CommandError("Too many columns in pattern. Increase size?") # Center pattern vertically and horizontally if center: top = (size - height) // 2 rows = [''] * top + rows left = (size - width) // 2 prefix = ' ' * left rows = [prefix + row for row in rows] # Add padding to match the grid size rows += [''] * (size - len(rows)) rows = [row.ljust(size) for row in rows] # Convert to booleans return [[x not in '. ' for x in row] for row in rows]
def process_config(self): super(RedisCollector, self).process_config() instance_list = self.config['instances'] # configobj make str of single-element list, let's convert if isinstance(instance_list, basestring): instance_list = [instance_list] # process original single redis instance if len(instance_list) == 0: host = self.config['host'] port = int(self.config['port']) auth = self.config['auth'] if auth is not None: instance_list.append('%s:%d/%s' % (host, port, auth)) else: instance_list.append('%s:%d' % (host, port)) self.instances = {} for instance in instance_list: if '@' in instance: (nickname, hostport) = instance.split('@', 1) else: nickname = None hostport = instance if hostport.startswith(SOCKET_PREFIX): unix_socket, __, port_auth = hostport[ SOCKET_PREFIX_LEN:].partition(':') auth = port_auth.partition('/')[2] or None if nickname is None: nickname = os.path.splitext( os.path.basename(unix_socket))[0] self.instances[nickname] = (self._DEFAULT_HOST, self._DEFAULT_PORT, unix_socket, auth) else: if '/' in hostport: parts = hostport.split('/') hostport = parts[0] auth = parts[1] else: auth = None if ':' in hostport: if hostport[0] == ':': host = self._DEFAULT_HOST port = int(hostport[1:]) else: parts = hostport.split(':') host = parts[0] port = int(parts[1]) else: host = hostport port = self._DEFAULT_PORT if nickname is None: nickname = str(port) self.instances[nickname] = (host, port, None, auth) self.log.debug("Configured instances: %s" % self.instances.items())
def test_enqueue_user_xmpp_with_string(self): fake_domain = "fakedomain.com" fake_user = "fakeuser" self.MockedSettings['xmpp']['default_domain'] = fake_domain with mock.patch.dict(pushmanager.core.xmppclient.Settings, self.MockedSettings): with mock.patch.object(pushmanager.core.xmppclient.XMPPQueue, "enqueue_xmpp") as mock_enqueue_xmpp: pushmanager.core.xmppclient.XMPPQueue.enqueue_user_xmpp(fake_user, 'message') mock_enqueue_xmpp.assert_called_with("%s@%s" % (fake_user, fake_domain), 'message')
def autoNorm(dataSet): minVals = dataSet.min(0) maxVals = dataSet.max(0) ranges = maxVals - minVals normDataSet = zeros(shape(dataSet)) m = dataSet.shape[0] normDataSet = dataSet - tile(minVals, (m, 1)) normDataSet = normDataSet / tile(ranges, (m, 1)) # element wise divide return normDataSet, ranges, minVals
def readS16(self, register, little_endian=True): """Read a signed 16-bit value from the specified register, with the specified endianness (default little endian, or least significant byte first).""" result = self.readU16(register, little_endian) if result > 32767: result -= 65536 return result
def has_buffer(self): return self.buffer_size() > 0
@override_mysql_variables(SQL_MODE="ANSI") def test_appendleft_works_in_ansi_mode(self): CharListModel.objects.create() CharListModel.objects.update(field=ListF('field').appendleft('big')) CharListModel.objects.update(field=ListF('field').appendleft('bad')) model = CharListModel.objects.get() assert model.field == ["bad", "big"]
def hasChild(self, childPath): '''Return True if child exists in data.''' if type(self.data) == type(dict()): nodes = childPath.split('/') child = self.data for node in nodes: if node: if node == nodes[-1:]: return node in child else: child = child[node] else: return False
def _access_log(self): host, port = self.server.addr if self.server else (None, None) if self.request.method == b"CONNECT": logger.info("%s:%s - %s %s:%s" % (self.client.addr[0], self.client.addr[1], self.request.method, host, port)) elif self.request.method: logger.info("%s:%s - %s %s:%s%s - %s %s - %s bytes" % (self.client.addr[0], self.client.addr[1], self.request.method, host, port, self.request.build_url(), self.response.code, self.response.reason, len(self.response.raw)))
def test_works_with_two_fields(self): CharSetModel.objects.create(field={"snickers", "lion"}, field2={"apple", "orange"}) # Concurrent add CharSetModel.objects.update(field=SetF('field').add("mars"), field2=SetF('field2').add("banana")) model = CharSetModel.objects.get() assert model.field == {"snickers", "lion", "mars"} assert model.field2 == {"apple", "orange", "banana"} # Concurrent add and remove CharSetModel.objects.update(field=SetF('field').add("reeses"), field2=SetF('field2').remove("banana")) model = CharSetModel.objects.get() assert model.field == {"snickers", "lion", "mars", "reeses"} assert model.field2 == {"apple", "orange"} # Swap CharSetModel.objects.update(field=SetF('field').remove("lion"), field2=SetF('field2').remove("apple")) model = CharSetModel.objects.get() assert model.field == {"snickers", "mars", "reeses"} assert model.field2 == {"orange"}
def read(conn): data = conn.recv(1024) if data == b'switch': with self.window_list_lock: tree = self.i3.get_tree() windows = set(w.id for w in tree.leaves()) for window_id in self.window_list[1:]: if window_id not in windows: self.window_list.remove(window_id) else: self.i3.command('[con_id=%s] focus' % window_id) break elif not data: selector.unregister(conn) conn.close()
def patch_CursorWrapper_execute(): # Be idemptotent if getattr(CursorWrapper, '_has_django_mysql_execute', False): return orig_execute = CursorWrapper.execute @functools.wraps(orig_execute) def execute(self, sql, args=None): if ( getattr(settings, 'DJANGO_MYSQL_REWRITE_QUERIES', False) and REWRITE_MARKER in sql ): sql = rewrite_query(sql) return orig_execute(self, sql, args) CursorWrapper.execute = execute CursorWrapper._has_django_mysql_execute = True
def tempdir(self, path=None): """Creates a temporary directory. A unique directory name is generated if no path name is specified. The directory is created, and will be removed when the TestCmd object is destroyed. """ if path is None: try: path = tempfile.mktemp(prefix=tempfile.template) except TypeError: path = tempfile.mktemp() os.mkdir(path) # Symlinks in the path will report things # differently from os.getcwd(), so chdir there # and back to fetch the canonical path. cwd = os.getcwd() try: os.chdir(path) path = os.getcwd() finally: os.chdir(cwd) # Uppercase the drive letter since the case of drive # letters is pretty much random on win32: drive,rest = os.path.splitdrive(path) if drive: path = string.upper(drive) + rest # self._dirlist.append(path) global _Cleanup try: _Cleanup.index(self) except ValueError: _Cleanup.append(self) return path
def handle(self, *args, **kwargs): path = kwargs['path'] # With DEBUG on this will DIE. settings.DEBUG = False print("Begin: %s" % datetime.datetime.now()) call_command('load_zipcodes', path=path) call_command('load_states', path=path) call_command('load_counties', path=path) print("All Finished: %s" % datetime.datetime.now())
def inversion(self, start, end): ''' inverts sequence between start and end, bases at start and end positions are not affected ''' start = int(start) end = int(end) assert start < end invseq = rc(self.seq[start:end]) self.seq = self.seq[:start] + invseq + self.seq[end:]
def re_filter(self, conf): return conf or self.default_pattern, None, None
def up_to_date(self, gyp_file, target=None, **kw): """ Verifies that a build of the specified Make target is up to date. """ if target in (None, self.DEFAULT): message_target = 'all' else: message_target = target kw['stdout'] = "make: Nothing to be done for `%s'.\n" % message_target return self.build(gyp_file, target, **kw)
def get_source(self, line_offset): """Return source of line at absolute line offset `line_offset`.""" return self.input_lines.source(line_offset - self.input_offset)
def _create_win(self): try: key = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts') except EnvironmentError: try: key = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') except EnvironmentError: raise FontNotFound('Can\'t open Windows font registry key') try: path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True) self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size) for style in ('ITALIC', 'BOLD', 'BOLDITALIC'): path = self._lookup_win(key, self.font_name, STYLES[style]) if path: self.fonts[style] = ImageFont.truetype(path, self.font_size) else: if style == 'BOLDITALIC': self.fonts[style] = self.fonts['BOLD'] else: self.fonts[style] = self.fonts['NORMAL'] finally: _winreg.CloseKey(key)
def visit_field_list(self, node): if 'rfc2822' in node['classes']: raise nodes.SkipNode
def callback(lexer, match, context): text = match.group() context.block_scalar_indent = None if not text: return increment = match.group(1) if increment: current_indent = max(context.indent, 0) increment = int(increment) context.block_scalar_indent = current_indent + increment if text: yield match.start(), token_class, text context.pos = match.end()
def validate_models(): """Since BaseRunserverCommand is only run once, we need to call model valdidation here to ensure it is run every time the code changes. """ import logging from django.core.management.validation import get_validation_errors try: from cStringIO import StringIO except ImportError: from StringIO import StringIO logging.info("Validating models...") s = StringIO() num_errors = get_validation_errors(s, None) if num_errors: s.seek(0) error_text = s.read() logging.critical("One or more models did not validate:\n%s" % error_text) else: logging.info("All models validated.")
def GetToolchainEnv(self, additional_settings=None): """Returns the variables toolchain would set for build steps.""" env = self.GetSortedXcodeEnv(additional_settings=additional_settings) if self.flavor == 'win': env = self.GetMsvsToolchainEnv( additional_settings=additional_settings) return env
def is_expired(self): """Check whether the credit card is expired or not""" return datetime.date.today() > datetime.date(self.year, self.month, calendar.monthrange(self.year, self.month)[1])
def get_next_fire_time(self, previous_fire_time, now): return self.run_date if previous_fire_time is None else None
def _IsUniqueSymrootForTarget(self, symroot): # This method returns True if all configurations in target contain a # 'SYMROOT' attribute that is unique for the given target. A value is # unique, if the Xcode macro '$SRCROOT' appears in it in any form. uniquifier = ['$SRCROOT', '$(SRCROOT)'] if any(x in symroot for x in uniquifier): return True return False
def test_send_frame_when_frame_overflow(self): frame = mock() self.connection._frame_max = 100 expect(frame.write_frame).side_effect( lambda buf: buf.extend('a' * 200)) expect(self.connection.close).args( reply_code=501, reply_text=var('reply'), class_id=0, method_id=0, disconnect=True) stub(self.connection._transport.write) self.connection._connected = True with assert_raises(ConnectionClosed): self.connection.send_frame(frame)
def get_image_exif_cases(self): cases = image_test.get_image_exif_cases() m = dict(preserve_exif="exif") for i, case in enumerate(cases): path = "/test/data/%s" % os.path.basename(case["source_path"]) cases[i]["source_query_params"] = dict( url=self.get_url(path), w=case["width"] or "", h=case["height"] or "") for k in m.keys(): if k in case: cases[i]["source_query_params"][m.get(k)] = case[k] cases[i]["content_type"] = self._format_to_content_type( case.get("format")) return cases
def __array_finalize__(self, obj): if obj is None: return self.ctypesArray = getattr(obj, 'ctypesArray', None)
def purchase(self, money, credit_card, options = None): """Create a plan that bills every decade or so and charge the plan immediately""" options = options or {} resp = self.store(credit_card, options = options) subscription_id = resp["response"]["subscription"]["id"] resp = self._chargebee_request("post", "/invoices/charge", data = {"subscription_id": subscription_id, "amount": money, "description": options.get("description")}) if 200 <= resp.status_code < 300: transaction_was_successful.send(sender=self, type="purchase", response=resp.json()) return {'status': 'SUCCESS', 'response': resp.json()} transaction_was_unsuccessful.send(sender=self, type="purchase", response=resp.json()) return {'status': 'FAILURE', 'response': resp.json()}
@register.simple_tag def render_banner_ad(type, fallback='True'): """ Renders a BannerAd instance of the desired size. If fallback is 'True', the site will display an AdSense ad if there is no current BannerAd of the specified type. """ site = Site.objects.get_current() # If we ask for a vertical ad, this means we'll have room for either a # vertical ad or a shortvert. Let's mix things up a bit. if type == 'vertical': type = random.choice(['vertical', 'shortvert']) if type in ['vertical', 'shortvert', 'banner']: try: ads = BannerAd.current.filter(site=site).filter(ad_type=type) # .filter(special_section__isnull=True) see above if not ads: ad = None else: ad = random.choice(ads) except Advertisement.DoesNotExist: ad = None if not ad: if fallback == 'True': return render_adsense(type) else: return '' code = '' if not ad.code: code = '<img src="%s" alt="%s" />' % (ad.image.url, ad.name) if ad.url: code = ''.join(['<a href="%s">' % ad.url, code, '</a>']) else: code = ad.code code = ''.join(['<div class="ad ad_%s">' % type, code, '</div>']) return code
def parseBytecode(dex, insns_start_pos, shorts, catch_addrs): ops = [] pos = 0 while pos < len(shorts): pos, op = parseInstruction(dex, insns_start_pos, shorts, pos) ops.append(op) # Fill in data for move-result for instr, instr2 in zip(ops, ops[1:]): if not instr2.type == MoveResult: continue if instr.type in INVOKE_TYPES: called_id = dex.method_id(instr.args[0]) if called_id.return_type != b'V': instr2.prev_result = called_id.return_type elif instr.type == FilledNewArray: instr2.prev_result = dex.type(instr.args[0]) elif instr2.pos in catch_addrs: instr2.prev_result = b'Ljava/lang/Throwable;' assert(0 not in catch_addrs) # Fill in implicit cast data for i, instr in enumerate(ops): if instr.opcode in (0x38, 0x39): # if-eqz, if-nez if i > 0 and ops[i-1].type == InstanceOf: prev = ops[i-1] desc_ind = prev.args[2] regs = {prev.args[1]} if i > 1 and ops[i-2].type == Move: prev2 = ops[i-2] if prev2.args[0] == prev.args[1]: regs.add(prev2.args[1]) # Don't cast result of instanceof if it overwrites the input regs.discard(prev.args[0]) if regs: instr.implicit_casts = desc_ind, sorted(regs) return ops
def get_pids(procname, plugin="psscan"): pids = [] if procname == "": return pids f = open_full_plugin(plugin, 2) for line in f: if re.search(' ' + procname + ' ', line, re.IGNORECASE): pids.append(re.sub(' +', ' ', line).split(' ')[2]) pids = sorted(set(pids)) f.close() return pids
def data_cksum(data, cksum=0): for b in data: if isinstance(b, bytes): b = ord(b) # right-shift one with wrap-around lowbit = (cksum & 0x0001) cksum = (cksum >> 1) if lowbit: cksum = (cksum | 0x8000) # then add in the data and clear any carried bit past 16 cksum = (cksum + b) & 0xffff return cksum
@user_passes_test(lambda u: u.is_staff or u.is_superuser) def staff_object_list(*args, **kwargs): return object_list(*args, **kwargs)
def safe_single_except_nl(self, node, indent): if '\\n' in node.value: return '"%s"' % node.value.replace('"', '\\"') else: return self.safe_single(node, indent)
def is_valid(self): """ Checks if the newsletter is valid. A newsletter needs to have a subject to be valid. """ if self.subject == '': return False # todo: check if email is valid return True
@classmethod def authenticate(self, email, password): """ Returns user and authentication status. """ user = User.query.filter_by(email=email).first() if user is not None: if user.check_password(password): return user, True return user, False
def solve(self, cipher): """ :param cipher: the cipher """ m, A, n, B = cipher result = set() # {} is for dictionary hm = {} for a in A: if a not in hm: hm[a] = 1 else: hm[a] += 1 for b in B: if b not in hm or hm[b] <= 0: result.add(b) else: hm[b] -= 1 result = sorted(list(result)) return " ".join(map(str, result))
def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('account_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()
def __eq__(self, other): if not isinstance(other, self.__class__): return False for attr in self.__slots__: my_val = getattr(self, attr) other_val = getattr(other, attr) if my_val != other_val: return False return True
@classmethod def prepare_constraints(cls, labels, num_constraints): C = np.empty((num_constraints,4), dtype=int) a, c = np.random.randint(len(labels), size=(2,num_constraints)) for i,(al,cl) in enumerate(zip(labels[a],labels[c])): C[i,1] = choice(np.nonzero(labels == al)[0]) C[i,3] = choice(np.nonzero(labels != cl)[0]) C[:,0] = a C[:,2] = c return C
def send_notice(self, extra_context=None): if extra_context is None: extra_context = {} extra_context.update({"observed": self.observed_object}) send([self.user], self.notice_type.label, extra_context)
def function_walk(self, node): for j, child in enumerate(node.definitions): if child.type == 'function_definition': node.definitions[j] = self.transform_special_f(child) elif child.type == 'class_definition': self.current_class = child if child.constructor: child.constructor = self.transform_special_f(child.constructor) for k, definition in enumerate(child.methods): child.methods[k] = self.transform_special_f(definition) return node
def __init__(self): temper_devices = self._get_temper_devices() if len(temper_devices) == 0: msg = 'No TEMPer devices discovered' raise DeviceError(msg) # Use first device if multiple devices discovered self._temper_device = temper_devices[0]
def run(self, **kwargs): """Run function with params on external service. Basically this method call external method with params wich it accept. You must now about accepted params by external function and provide it. Args: kwargs: A dictonary with params. Returns: Returns external function result - type of result depends of external method. Raises: QueuedServiceError: If something goes wrong on queue. """ job = self.queue.enqueue(self.method, **kwargs) if not job.is_queued: raise QueuedServiceError while job and not any([job.is_finished, job.is_failed]): time.sleep(0.1) return job.result
@staticmethod def gaussian_kernel(sigma, size=None): if size is None: size = int(np.ceil(sigma*2.)) if size % 2 == 0: size += 1 x = np.linspace(-size/2., size/2., size) kernel = 1/(np.sqrt(2*np.pi))*np.exp(-x**2/(2*sigma**2))/sigma return kernel/np.sum(kernel)
def do_login(self, request, extra_message=None): if request.method == 'GET': return self.show_login(request, extra_message) user_url = request.POST.get('openid_url', None) if not user_url: return self.show_login(request, self.openid_required_message) return self.start_openid_process(request, user_url)
def read_to_condition(self, condition_func): start = self._l for line in self[start:]: if condition_func(line): return self[start:self._l] self._l += 1 if self.eof(): return self[start:self._l + 1] return []