code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
def system_generate_batch_inputs(input_params={}, always_retry=True, **kwargs): return DXHTTPRequest(, input_params, always_retry=always_retry, **kwargs)
Invokes the /system/generateBatchInputs API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Search#API-method:-/system/generateBatchInputs
def create_dagrun(self, run_id, state, execution_date, start_date=None, external_trigger=False, conf=None, session=None): return self.get_dag().crea...
Creates a dag run from this dag including the tasks associated with this dag. Returns the dag run. :param run_id: defines the the run id for this dag run :type run_id: str :param execution_date: the execution date of this dag run :type execution_date: datetime.datetime :...
def resolved_packages(self): if (self.status != SolverStatus.solved): return None final_phase = self.phase_stack[-1] return final_phase._get_solved_variants()
Return a list of PackageVariant objects, or None if the resolve did not complete or was unsuccessful.
def _advapi32_encrypt(certificate_or_public_key, data, rsa_oaep_padding=False): flags = 0 if rsa_oaep_padding: flags = Advapi32Const.CRYPT_OAEP out_len = new(advapi32, , len(data)) res = advapi32.CryptEncrypt( certificate_or_public_key.ex_key_handle, null(), True, ...
Encrypts a value using an RSA public key via CryptoAPI :param certificate_or_public_key: A Certificate or PublicKey instance to encrypt with :param data: A byte string of the data to encrypt :param rsa_oaep_padding: If OAEP padding should be used instead of PKCS#1 v1.5 :raise...
def add(request, kind, method, *args): request.session.setdefault(_key_name(kind), []).append({ "method": method, "args": args })
add(request, "mixpanel", "track", "purchase", {order: "1234", amount: "100"}) add(request, "google", "push", ["_addTrans", "1234", "Gondor", "100"])
def main(bot): greenlet = spawn(bot.run) try: greenlet.join() except KeyboardInterrupt: print log.info("Killed by user, disconnecting...") bot.disconnect() finally: greenlet.kill()
Entry point for the command line launcher. :param bot: the IRC bot to run :type bot: :class:`fatbotslim.irc.bot.IRC`
def text(what="sentence", *args, **kwargs): if what == "character": return character(*args, **kwargs) elif what == "characters": return characters(*args, **kwargs) elif what == "word": return word(*args, **kwargs) elif what == "words": return words(*args, **kwargs) ...
An aggregator for all above defined public methods.
def repair_broken_bonds(self, slab, bonds): for pair in bonds.keys(): blength = bonds[pair] cn_dict = {} for i, el in enumerate(pair): cnlist = [] for site in self.oriented_unit_cel...
This method will find undercoordinated atoms due to slab cleaving specified by the bonds parameter and move them to the other surface to make sure the bond is kept intact. In a future release of surface.py, the ghost_sites will be used to tell us how the repair bonds should look like. ...
def get_account(self, account): try: return self.get_account_by_cookie(account.account_cookie) except: QA_util_log_info( % account.account_cookie ) return None
check the account whether in the protfolio dict or not :param account: QA_Account :return: QA_Account if in dict None not in list
def _await_descriptor_upload(tor_protocol, onion, progress, await_all_uploads): if progress: if await_all_uploads: msg = "Completed descriptor uploads" else: msg = "At least one descriptor uploaded" try: progress(100.0, "wait_descriptor", ms...
Internal helper. :param tor_protocol: ITorControlProtocol instance :param onion: IOnionService instance :param progress: a progess callback, or None :returns: a Deferred that fires once we've detected at least one descriptor upload for the service (as detected by listening for HS_DES...
def eval_nonagg_call(self, exp): "helper for eval_callx; evaluator for CallX that consume a single value" args=self.eval(exp.args) if exp.f==: a,b=args return b if a is None else a elif exp.f==: return self.eval(exp.args)[0] elif exp.f in (,): return set(self.eval(exp.args.childre...
helper for eval_callx; evaluator for CallX that consume a single value
def _add_spin_magnitudes(self, structure): for idx, site in enumerate(structure): if getattr(site.specie, , None): spin = site.specie._properties.get(, None) sign = int(spin) if spin else 0 if spin: new_properties = site.sp...
Replaces Spin.up/Spin.down with spin magnitudes specified by mag_species_spin. :param structure: :return:
def _migrate_subresource(subresource, parent, migrations): for key, doc in getattr(parent, subresource.parent_key, {}).items(): for migration in migrations[]: instance = migration(subresource(id=key, **doc)) parent._resource[] = unicode(migration.version) instance =...
Migrate a resource's subresource :param subresource: the perch.SubResource instance :param parent: the parent perch.Document instance :param migrations: the migrations for a resource
def gen_age(output, ascii_props=False, append=False, prefix=""): obj = {} all_chars = ALL_ASCII if ascii_props else ALL_CHARS with codecs.open(os.path.join(HOME, , UNIVERSION, ), , ) as uf: for line in uf: if not line.startswith(): data = line.split()[0].split() ...
Generate `age` property.
def make_success_redirect(self): new_authorization_code = AuthorizationCode.objects.create( user=self.user, client=self.client, redirect_uri=(self.redirect_uri if self.request_redirect_uri else None) ) new_authorization_code.scopes = self.valid_scope_objects new_authorizatio...
Return a Django ``HttpResponseRedirect`` describing the request success. The custom authorization endpoint should return the result of this method when the user grants the Client's authorization request. The request is assumed to have successfully been vetted by the :py:meth:`validate` method.
def short_dask_repr(array, show_dtype=True): chunksize = tuple(c[0] for c in array.chunks) if show_dtype: return .format( array.shape, array.dtype, chunksize) else: return .format( array.shape, chunksize)
Similar to dask.array.DataArray.__repr__, but without redundant information that's already printed by the repr function of the xarray wrapper.
def add(repo, args, targetdir, execute=False, generator=False, includes=[], script=False, source=None): if not execute: files = add_files(args=args, targetdir=targetdir, source=source, script=scr...
Add files to the repository by explicitly specifying them or by specifying a pattern over files accessed during execution of an executable. Parameters ---------- repo: Repository args: files or command line (a) If simply adding files, then the list of files that must be adde...
def build_ast_schema( document_ast: DocumentNode, assume_valid: bool = False, assume_valid_sdl: bool = False, ) -> GraphQLSchema: if not isinstance(document_ast, DocumentNode): raise TypeError("Must provide a Document AST.") if not (assume_valid or assume_valid_sdl): from ..val...
Build a GraphQL Schema from a given AST. This takes the ast of a schema document produced by the parse function in src/language/parser.py. If no schema definition is provided, then it will look for types named Query and Mutation. Given that AST it constructs a GraphQLSchema. The resulting schema ...
def variable_declaration(self): self._process(Nature.LET) node = VariableDeclaration(assignment=self.assignment()) self._process(Nature.SEMI) return node
variable_declaration: 'let' assignment ';'
def DEBUG(msg, *args, **kwargs): logger = getLogger("DEBUG") if len(logger.handlers) == 0: logger.addHandler(StreamHandler()) logger.propagate = False logger.setLevel(logging.DEBUG) logger.DEV(msg, *args, **kwargs)
temporary logger during development that is always on
def AddProperty(self, interface, name, value): s main interface (as specified on construction). name: Property name. value: Property value. property %s already exists.PropertyExistst do if not (isinstance(value, dbus.Dictionary) or isinstance(value, db...
Add property to this object interface: D-Bus interface to add this to. For convenience you can specify '' here to add the property to the object's main interface (as specified on construction). name: Property name. value: Property value.
def ensure_table_strings(table): for row in range(len(table)): for column in range(len(table[row])): table[row][column] = str(table[row][column]) return table
Force each cell in the table to be a string Parameters ---------- table : list of lists Returns ------- table : list of lists of str
def verbose(self): log = copy.copy(self) log._is_verbose = True return log
Make it the verbose log. A verbose log can be only shown when user want to see more logs. It works as:: log.verbose.warn('this is a verbose warn') log.verbose.info('this is a verbose info')
def GET_AUTH(self, courseid, taskid): if not id_checker(taskid): raise Exception("Invalid task id") self.get_course_and_check_rights(courseid, allow_all_staff=False) request = web.input() if request.get("action") == "download" and request.get() is not None: ...
Edit a task
def latitude(self, dms: bool = False) -> Union[str, float]: return self._get_fs(, dms)
Generate a random value of latitude. :param dms: DMS format. :return: Value of longitude.
def parse_package_string(path): parts = path.split() if parts[-1][0].isupper(): return ".".join(parts[:-1]), parts[-1] return path, ""
Parse the effect package string. Can contain the package python path or path to effect class in an effect package. Examples:: # Path to effect pacakge examples.cubes # Path to effect class examples.cubes.Cubes Args: path: python path to effect package. May also in...
def visit_pass(self, node, parent): return nodes.Pass(node.lineno, node.col_offset, parent)
visit a Pass node by returning a fresh instance of it
def remove_overlaps(self, ufos, glyph_filter=lambda g: len(g)): from booleanOperations import union, BooleanOperationsError for ufo in ufos: font_name = self._font_name(ufo) logger.info("Removing overlaps for " + font_name) for glyph in ufo: ...
Remove overlaps in UFOs' glyphs' contours.
def run(arguments: List[str], execution_directory: str=None, execution_environment: Dict=None) -> str: process = subprocess.Popen( arguments, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, cwd=execution_directory, env=execution_environment) out, error = process.commu...
Runs the given arguments from the given directory (if given, else resorts to the (undefined) current directory). :param arguments: the CLI arguments to run :param execution_directory: the directory to execute the arguments in :param execution_environment: the environment to execute in :return: what is w...
def get_pmap_from_nrml(oqparam, fname): hcurves_by_imt = {} oqparam.hazard_imtls = imtls = {} for hcurves in nrml.read(fname): imt = hcurves[] oqparam.investigation_time = hcurves[] if imt == : imt += % hcurves[] imtls[imt] = ~hcurves.IMLs data = sor...
:param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: an XML file containing hazard curves :returns: site mesh, curve array
def tell(self): pos = ctypes.c_size_t() check_call(_LIB.MXRecordIOReaderTell(self.handle, ctypes.byref(pos))) return pos.value
Returns the current position of read head.
def _add_post_data(self, request: Request): if self._item_session.url_record.post_data: data = wpull.string.to_bytes(self._item_session.url_record.post_data) else: data = wpull.string.to_bytes( self._processor.fetch_params.post_data ) ...
Add data to the payload.
def scan_processes_fast(self): new_pids = set( win32.EnumProcesses() ) old_pids = set( compat.iterkeys(self.__processDict) ) our_pid = win32.GetCurrentProcessId() if our_pid in new_pids: new_pids.remove(our_pid) if our_pid in old_pids: ...
Populates the snapshot with running processes. Only the PID is retrieved for each process. Dead processes are removed. Threads and modules of living processes are ignored. Tipically you don't need to call this method directly, if unsure use L{scan} instead. @note: This...
def get_uvec(vec): l = np.linalg.norm(vec) if l < 1e-8: return vec return vec / l
Gets a unit vector parallel to input vector
def add_ne(self, ne): ne_id = self.get_element_id(ne) ne_label = +ne.attrib[] self.add_node(ne_id, layers={self.ns, self.ns+}, attr_dict=self.element_attribs_to_dict(ne), label=ne_label) for child in ne.iterchildren(): ...
Parameters ---------- ne : etree.Element etree representation of a <ne> element (marks a text span -- (one or more <node> or <word> elements) as a named entity) Example ------- <ne xml:id="ne_23" type="PER"> <word xml:id="s3_2" form="Ute"...
def delete(self, id, **kwargs): kwargs[] = True if kwargs.get(): return self.delete_with_http_info(id, **kwargs) else: (data) = self.delete_with_http_info(id, **kwargs) return data
Deletes an existing License This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) ...
def _in_version(self, *versions): "Returns true if this frame is in any of the specified versions of ID3." for version in versions: if (self._version == version or (isinstance(self._version, collections.Container) and version in self._version)): ...
Returns true if this frame is in any of the specified versions of ID3.
def generous_parse_uri(uri): parse_result = urlparse(uri) if parse_result.scheme == : abspath = os.path.abspath(parse_result.path) if IS_WINDOWS: abspath = windows_to_unix_path(abspath) fixed_uri = "file://{}".format(abspath) parse_result = urlparse(fixed_uri) ...
Return a urlparse.ParseResult object with the results of parsing the given URI. This has the same properties as the result of parse_uri. When passed a relative path, it determines the absolute path, sets the scheme to file, the netloc to localhost and returns a parse of the result.
def reconstruct_interval(experiment_id): start, end = map(lambda x: udatetime.utcfromtimestamp(x / 1000.0), map(float, experiment_id.split("-"))) from ..time_interval import TimeInterval return TimeInterval(start, end)
Reverse the construct_experiment_id operation :param experiment_id: The experiment id :return: time interval
def html(theme_name=): os.environ[] = theme(theme_name) api() man() clean() local("cd docs; make html") local("fab security.check") local("touch docs/build/html/.nojekyll")
build the doc locally and view
def bulk_call(self, call_params): path = + self.api_version + method = return self.request(path, method, call_params)
REST BulkCalls Helper
def predict(self, X): val = numpy.dot(X, self.coef_) if hasattr(self, "intercept_"): val += self.intercept_ if self.rank_ratio == 1: val *= -1 else: val = numpy.exp(val) return val
Rank samples according to survival times Lower ranks indicate shorter survival, higher ranks longer survival. Parameters ---------- X : array-like, shape = (n_samples, n_features) The input samples. Returns ------- y : ndarray, shape = (n_samples,) ...
def items(self): pipe = self.conn.pipeline() pipe.lrange(Q_STORAGE_ITEMS, 0, -1) pipe.ltrim(Q_STORAGE_ITEMS, 1, 0) items = pipe.execute()[0] for item in items: item = pickle.loads(item) yield item
Get the items fetched by the jobs.
async def dump_blob(elem, elem_type=None): elem_is_blob = isinstance(elem, x.BlobType) data = getattr(elem, x.BlobType.DATA_ATTR) if elem_is_blob else elem if data is None or len(data) == 0: return b if isinstance(data, (bytes, bytearray, list)): return base64.b16encode(bytes(data))...
Dumps blob message. Supports both blob and raw value. :param writer: :param elem: :param elem_type: :param params: :return:
def from_shapefile(cls, shapefile, *args, **kwargs): reader = Reader(shapefile) return cls.from_records(reader.records(), *args, **kwargs)
Loads a shapefile from disk and optionally merges it with a dataset. See ``from_records`` for full signature. Parameters ---------- records: list of cartopy.io.shapereader.Record Iterator containing Records. dataset: holoviews.Dataset Any HoloViews ...
def similar(self, **kwargs): path = self._get_id_path() response = self._GET(path, kwargs) self._set_attrs_to_values(response) return response
Get the similar TV series for a specific TV series id. Args: page: (optional) Minimum value of 1. Expected value is an integer. language: (optional) ISO 639-1 code. append_to_response: (optional) Comma separated, any TV method. Returns: A dict respresen...
def _apply_dvs_capability(capability_spec, capability_dict): if in capability_dict: capability_spec.dvsOperationSupported = \ capability_dict[] if in capability_dict: capability_spec.dvPortOperationSupported = \ capability_dict[] if in capability_dict:...
Applies the values of the capability_dict dictionary to a DVS capability object (vim.vim.DVSCapability)
def parse_manifest(path_to_manifest): bam_re = r"^(?P<uuid>\S+)\s(?P<url>\S+[bsc][r]?am)" fq_re = r"^(?P<uuid>\S+)\s(?P<url>\S+)\s(?P<paired_url>\S+)?\s?(?P<rg_line>@RG\S+)" samples = [] with open(path_to_manifest, ) as f: for line in f.readlines(): line = line.strip() ...
Parses manifest file for Toil Germline Pipeline :param str path_to_manifest: Path to sample manifest file :return: List of GermlineSample namedtuples :rtype: list[GermlineSample]
def rnd_date(start=date(1970, 1, 1), end=None, **kwargs): if end is None: end = date.today() start = parser.parse_date(start) end = parser.parse_date(end) _assert_correct_start_end(start, end) return _rnd_date(start, end)
Generate a random date between ``start`` to ``end``. :param start: Left bound :type start: string or datetime.date, (default date(1970, 1, 1)) :param end: Right bound :type end: string or datetime.date, (default date.today()) :return: a datetime.date object **中文文档** 随机生成一个位于 ``start`` 和 `...
def close(self): self._execute_plugin_hooks_sync(hook=) if not self.session.closed: ensure_future(self.session.close(), loop=self.loop)
Close service client and its plugins.
def xcom_pull( self, task_ids=None, dag_id=None, key=XCOM_RETURN_KEY, include_prior_dates=False): if dag_id is None: dag_id = self.dag_id pull_fn = functools.partial( XCom.get_one, execution_date=s...
Pull XComs that optionally meet certain criteria. The default value for `key` limits the search to XComs that were returned by other tasks (as opposed to those that were pushed manually). To remove this filter, pass key=None (or any desired value). If a single task_id string is provide...
def getAnalystName(self): mtool = getToolByName(self, ) analyst = self.getAnalyst().strip() analyst_member = mtool.getMemberById(analyst) if analyst_member is not None: return analyst_member.getProperty() return analyst
Returns the name of the currently assigned analyst
def seek(self, pos): if (pos > self.file_size) or (pos < 0): raise Exception("Unable to seek - position out of file!") self.file.seek(pos)
Move to new input file position. If position is negative or out of file, raise Exception.
def to_list(self): src = self._source or coll = self._collection or desc = self._description or l = [self._id, src, coll, self._name, .join(sorted(self._genes)), desc] return l
Converts the GeneSet object to a flat list of strings. Note: see also :meth:`from_list`. Parameters ---------- Returns ------- list of str The data from the GeneSet object as a flat list.
def spectral_registration(data, target, initial_guess=(0.0, 0.0), frequency_range=None): data = data.squeeze() target = target.squeeze() if type(frequency_range) is tuple: spectral_weights = frequency_range[0] < data.frequency_axis() & data.frequency_axis() < fre...
Performs the spectral registration method to calculate the frequency and phase shifts between the input data and the reference spectrum target. The frequency range over which the two spectra are compared can be specified to exclude regions where the spectra differ. :param data: :param target: :...
def callback(self, request, **kwargs): access_token = request.session[] + " access_token += str(request.session[]) kwargs = {: access_token} return super(ServiceGithub, self).callback(request, **kwargs)
Called from the Service when the user accept to activate it :param request: request object :return: callback url :rtype: string , path to the template
def custom_object_prefix_lax(instance): if (instance[] not in enums.TYPES and instance[] not in enums.RESERVED_OBJECTS and not CUSTOM_TYPE_LAX_PREFIX_RE.match(instance[])): yield JSONError("Custom object type should start with in " "order to be compatib...
Ensure custom objects follow lenient naming style conventions for forward-compatibility.
def enable_key(self): print("This command will enable a disabled key.") apiKeyID = input("API Key ID: ") try: key = self._curl_bitmex("/apiKey/enable", postdict={"apiKeyID": apiKeyID}) print("Key with ID %s enabled." % key["id"...
Enable an existing API Key.
def split_path(path) : "convenience routine for splitting a path into a list of components." if isinstance(path, (tuple, list)) : result = path elif path == "/" : result = [] else : if not path.startswith("/") or path.endswith("/") : raise DBusError(DBUS.ERROR_INVALI...
convenience routine for splitting a path into a list of components.
def replyToComment(self, repo_user, repo_name, pull_number, body, in_reply_to): return self.api.makeRequest( ["repos", repo_user, repo_name, "pulls", str(pull_number), "comments"], method="POST", data=dict(body=body, ...
POST /repos/:owner/:repo/pulls/:number/comments Like create, but reply to an existing comment. :param body: The text of the comment. :param in_reply_to: The comment ID to reply to.
def _produce_return(self, cursor): self.callback(self._row_generator(cursor), *self.cb_args) return None
Calls callback once with generator. :rtype: None
def init(banner, hidden, backup): manage_file = HIDDEN_MANAGE_FILE if hidden else MANAGE_FILE if os.path.exists(manage_file): if not click.confirm(.format(manage_file)): return if backup: bck = .format(manage_file) with open(manage_file, ) as source, ope...
Initialize a manage shell in current directory $ manage init --banner="My awesome app shell" initializing manage... creating manage.yml
def rc_params(usetex=None): rcp = GWPY_RCPARAMS.copy() if usetex: rcp.update(GWPY_TEX_RCPARAMS) return rcp
Returns a new `matplotlib.RcParams` with updated GWpy parameters The updated parameters are globally stored as `gwpy.plot.rc.GWPY_RCPARAMS`, with the updated TeX parameters as `gwpy.plot.rc.GWPY_TEX_RCPARAMS`. .. note:: This function doesn't apply the new `RcParams` in any way, just cre...
def calc_allowedremoterelieve_v1(self): flu = self.sequences.fluxes.fastaccess log = self.sequences.logs.fastaccess flu.allowedremoterelieve = log.loggedallowedremoterelieve[0]
Get the allowed remote relieve of the last simulation step. Required log sequence: |LoggedAllowedRemoteRelieve| Calculated flux sequence: |AllowedRemoteRelieve| Basic equation: :math:`AllowedRemoteRelieve = LoggedAllowedRemoteRelieve` Example: >>> from hydpy.models.dam imp...
def validate(self, generator, axesToMove=None, **kwargs): iterations = 10 for k, default in self._block.configure.defaults.items(): if k not in kwargs: kwargs[k] = default params = ConfigureParams(generator, axesToMove, **kwargs) ...
Validate configuration parameters and return validated parameters. Doesn't take device state into account so can be run in any state
def get_current_cmus(): result = subprocess.run(.split(), check=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) info = {} for line in result.stdout.decode().split(): line = line.split() if line[0] != : continue key = line[1] ...
Get the current song from cmus.
def validate_unique_items(value, **kwargs): counter = collections.Counter(( json.dumps(v, sort_keys=True) for v in value )) dupes = [json.loads(v) for v, count in counter.items() if count > 1] if dupes: raise ValidationError( MESSAGES[][].format( re...
Validator for ARRAY types to enforce that all array items must be unique.
def _extract_inner_match(self, candidate, offset): for possible_inner_match in _INNER_MATCHES: group_match = possible_inner_match.search(candidate) is_first_match = True while group_match and self._max_tries > 0: if is_first_match: ...
Attempts to extract a match from candidate if the whole candidate does not qualify as a match. Arguments: candidate -- The candidate text that might contain a phone number offset -- The current offset of candidate within text Returns the match found, None if none can be found
def _send_rpc_response(self, *packets): if len(packets) == 0: return handle, payload = packets[0] try: self._send_notification(handle, payload) except bable_interface.BaBLEException as err: if err.packet.status == : time.s...
Send an RPC response. It is executed in the baBLE working thread: should not be blocking. The RPC response is notified in one or two packets depending on whether or not response data is included. If there is a temporary error sending one of the packets it is retried automatically. If th...
def stat(path, user=None): host, port, path_ = split(path, user) fs = hdfs_fs.hdfs(host, port, user) retval = StatResult(fs.get_path_info(path_)) if not host: _update_stat(retval, path_) fs.close() return retval
Performs the equivalent of :func:`os.stat` on ``path``, returning a :class:`StatResult` object.
def _merge_keys(kwargs): TypeConfig log_driver = kwargs.pop(, helpers.NOTSET) log_opt = kwargs.pop(, helpers.NOTSET) if not in kwargs: if log_driver is not helpers.NOTSET \ or log_opt is not helpers.NOTSET: kwargs[] = { : log_driver ...
The log_config is a mixture of the CLI options --log-driver and --log-opt (which we support in Salt as log_driver and log_opt, respectively), but it must be submitted to the host config in the format {'Type': log_driver, 'Config': log_opt}. So, we need to construct this argument to be passed to the API ...
def _priority(s): if type(s) in (list, tuple, set, frozenset): return ITERABLE if type(s) is dict: return DICT if issubclass(type(s), type): return TYPE if hasattr(s, "validate"): return VALIDATOR if callable(s): return CALLABLE else: return C...
Return priority for a given object.
def permuted_copy(self, partition=None): def take(n, iterable): return [next(iterable) for _ in range(n)] if partition is None: partition = Partition([1] * len(self)) index_tuples = partition.get_membership() alignments = [] for ix in index_tup...
Return a copy of the collection with all alignment columns permuted
def bounds(self, thr=0): min_lat = float("inf") min_lon = float("inf") max_lat = -float("inf") max_lon = -float("inf") for segment in self.segments: milat, milon, malat, malon = segment.bounds(thr=thr) min_lat = min(milat, min_lat) min...
Gets the bounds of this segment Returns: (float, float, float, float): Bounds, with min latitude, min longitude, max latitude and max longitude
def getRoutes(self): routes = [] try: out = subprocess.Popen([routeCmd, "-n"], stdout=subprocess.PIPE).communicate()[0] except: raise Exception( % ipCmd) lines = out.splitlines() if len(lines) > 1: h...
Get routing table. @return: List of routes.
def verify_multi(self, otp_list, max_time_window=DEFAULT_MAX_TIME_WINDOW, sl=None, timeout=None): otps = [] for otp in otp_list: otps.append(OTP(otp, self.translate_otp)) if len(otp_list) < 2: raise ValueError() device_ids...
Verify a provided list of OTPs. :param max_time_window: Maximum number of seconds which can pass between the first and last OTP generation for the OTP to still be considered valid. :type max_time_window: ``int``
def mount_share_at_path(share_path, mount_path): sh_url = CFURLCreateWithString(None, share_path, None) mo_url = CFURLCreateWithString(None, mount_path, None) open_options = {NetFS.kNAUIOptionKey: NetFS.kNAUIOptionNoUI} mount_options = {NetFS.kNetFSAllowSubMountsKey: True, ...
Mounts a share at the specified path Args: share_path: String URL with all auth info to connect to file share. mount_path: Path to mount share on. Returns: The mount point or raises an error
def get_syllable_count(self, syllables: List[str]) -> int: tmp_syllables = copy.deepcopy(syllables) return len(string_utils.remove_blank_spaces( string_utils.move_consonant_right(tmp_syllables, self._find_solo_consonant(tmp_syllables))))
Counts the number of syllable groups that would occur after ellision. Often we will want preserve the position and separation of syllables so that they can be used to reconstitute a line, and apply stresses to the original word positions. However, we also want to be able to count the number of ...
def signFix(val, width): if val > 0: msb = 1 << (width - 1) if val & msb: val -= mask(width) + 1 return val
Convert negative int to positive int which has same bits set
def update(self, modelID, modelParams, modelParamsHash, metricResult, completed, completionReason, matured, numRecords): assert (modelParamsHash is not None) if completed: matured = True if metricResult is not None and matured and \ ...
Insert a new entry or update an existing one. If this is an update of an existing entry, then modelParams will be None Parameters: -------------------------------------------------------------------- modelID: globally unique modelID of this model modelParams: params dict for this model, or...
def check_email_status(mx_resolver, recipient_address, sender_address, smtp_timeout=10, helo_hostname=None): domain = recipient_address[recipient_address.find() + 1:] if helo_hostname is None: helo_hostname = domain ret = {: 101, : None, : "The server is unable to connect."} records = [] ...
Checks if an email might be valid by getting the status from the SMTP server. :param mx_resolver: MXResolver :param recipient_address: string :param sender_address: string :param smtp_timeout: integer :param helo_hostname: string :return: dict
def run_breiman2(): x, y = build_sample_ace_problem_breiman2(500) ace_solver = ace.ACESolver() ace_solver.specify_data_set(x, y) ace_solver.solve() try: plt = ace.plot_transforms(ace_solver, None) except ImportError: pass plt.subplot(1, 2, 1) phi = numpy.sin(2.0 * n...
Run Breiman's other sample problem.
def lstm_seq2seq_internal_attention_bid_encoder(inputs, targets, hparams, train): with tf.variable_scope("lstm_seq2seq_attention_bid_encoder"): inputs_length = common_layers.length_from_embedding(inputs) inputs = common_layers.flatten4d3d(inputs) ...
LSTM seq2seq model with attention, main step used for training.
def get_keyboard_mapping_unchecked(conn): mn, mx = get_min_max_keycode() return conn.core.GetKeyboardMappingUnchecked(mn, mx - mn + 1)
Return an unchecked keyboard mapping cookie that can be used to fetch the table of keysyms in the current X environment. :rtype: xcb.xproto.GetKeyboardMappingCookie
def tempo_account_get_customers(self, query=None, count_accounts=None): params = {} if query is not None: params[] = query if count_accounts is not None: params[] = count_accounts url = return self.get(url, params=params)
Gets all or some Attribute whose key or name contain a specific substring. Attributes can be a Category or Customer. :param query: OPTIONAL: query for search :param count_accounts: bool OPTIONAL: provide how many associated Accounts with Customer :return: list of customers
def line_intersection_2D(abarg, cdarg): ((x1,y1),(x2,y2)) = abarg ((x3,y3),(x4,y4)) = cdarg dx12 = (x1 - x2) dx34 = (x3 - x4) dy12 = (y1 - y2) dy34 = (y3 - y4) denom = dx12*dy34 - dy12*dx34 unit = np.isclose(denom, 0) if unit is True: return (np.nan, np.nan) denom = unit + d...
line_intersection((a, b), (c, d)) yields the intersection point between the lines that pass through the given pairs of points. If any lines are parallel, (numpy.nan, numpy.nan) is returned; note that a, b, c, and d can all be 2 x n matrices of x and y coordinate row-vectors.
def pexpect_monkeypatch(): if pexpect.__version__[:3] >= : return def __del__(self): if not self.closed: try: self.close() except AttributeError: pass pexpect.spawn.__del__ = __del__
Patch pexpect to prevent unhandled exceptions at VM teardown. Calling this function will monkeypatch the pexpect.spawn class and modify its __del__ method to make it more robust in the face of failures that can occur if it is called when the Python VM is shutting down. Since Python may fire __del__ me...
def get(self): tasks = self._get_avaliable_tasks() if not tasks: return None name, data = tasks[0] self._client.kv.delete(name) return data
Get a task from the queue.
def do_alias(self, arg): args = arg.split() if len(args) == 0: keys = sorted(self.aliases.keys()) for alias in keys: self.message("%s = %s" % (alias, self.aliases[alias])) return if args[0] in self.aliases and len(args) == 1: ...
alias [name [command [parameter parameter ...] ]] Create an alias called 'name' that executes 'command'. The command must *not* be enclosed in quotes. Replaceable parameters can be indicated by %1, %2, and so on, while %* is replaced by all the parameters. If no command is given, the ...
def _set_datapath(self, datapath): if datapath: self._datapath = datapath.rstrip(os.sep) self._fifo = int(stat.S_ISFIFO(os.stat(self.datapath).st_mode)) else: self._datapath = None self._fifo = False
Set a datapath.
def count_snps(mat): snps = np.zeros(4, dtype=np.uint32) snps[0] = np.uint32(\ mat[0, 5] + mat[0, 10] + mat[0, 15] + \ mat[5, 0] + mat[5, 10] + mat[5, 15] + \ mat[10, 0] + mat[10, 5] + mat[10, 15] + \ mat[15, 0] + mat[15, 5] + mat[15, 10]) f...
get dstats from the count array and return as a float tuple
def lhistogram (inlist,numbins=10,defaultreallimits=None,printextras=0): if (defaultreallimits != None): if type(defaultreallimits) not in [ListType,TupleType] or len(defaultreallimits)==1: estbinwidth=(max(inlist)-min(inlist))/float(numbins) + 1 binsize = ((max(inlist)-min(inlist)+es...
Returns (i) a list of histogram bin counts, (ii) the smallest value of the histogram binning, and (iii) the bin width (the last 2 are not necessarily integers). Default number of bins is 10. If no sequence object is given for defaultreallimits, the routine picks (usually non-pretty) bins spanning all the numbers in t...
def get_pip(mov=None, api=None, name=None): if mov is None and api is None: logger.error("need at least one of those") raise ValueError() elif mov is not None and api is not None: logger.error("mov and api are exclusive") raise ValueError() if api is not None: ...
get value of pip
def _dict_contents(self, use_dict=None, as_class=dict): if _debug: Object._debug("dict_contents use_dict=%r as_class=%r", use_dict, as_class) if use_dict is None: use_dict = as_class() klasses = list(self.__class__.__mro__) klasses.reverse() ...
Return the contents of an object as a dict.
def strip_docstrings(tokens): stack = [] state = for t in tokens: typ = t[0] if state == : if typ in (tokenize.NL, tokenize.COMMENT): yield t elif typ in (tokenize.DEDENT, tokenize.INDENT, tokenize.STRING): stack.append(t) ...
Replace docstring tokens with NL tokens in a `tokenize` stream. Any STRING token not part of an expression is deemed a docstring. Indented docstrings are not yet recognised.
def get_brandings(self): connection = Connection(self.token) connection.set_url(self.production, self.BRANDINGS_URL) return connection.get_request()
Get all account brandings @return List of brandings
def CopyVcardFields(new_vcard, auth_vcard, field_names): for field in field_names: value_list = auth_vcard.contents.get(field) new_vcard = SetVcardField(new_vcard, field, value_list) return new_vcard
Copy vCard field values from an authoritative vCard into a new one.
def F_(self, X): if self._interpol: if not hasattr(self, ): if self._lookup: x = self._x_lookup F_x = self._f_lookup else: x = np.linspace(0, self._max_interp_X, self._num_interp_X) ...
computes h() :param X: :return:
def write_info (self, url_data): sep = u"<br/>"+os.linesep text = sep.join(cgi.escape(x) for x in url_data.info) self.writeln(u + self.part("info")+ u"</td><td>"+text+u"</td></tr>")
Write url_data.info.
def status(deps=DEPENDENCIES, linesep=os.linesep): maxwidth = 0 col1 = [] col2 = [] for dependency in deps: title1 = dependency.modname title1 += + dependency.required_version col1.append(title1) maxwidth = max([maxwidth, len(title1)]) col2.append(...
Return a status of dependencies