code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
def stream(self, opNames=[], *args, **kwargs): for op in self.ops(**kwargs): if not opNames or op["op"][0] in opNames: r = { "type": op["op"][0], "timestamp": op.get("timestamp"), "block_num": op.get("block_num"), } r.update(op["op"][1]) yield r
Yield specific operations (e.g. comments) only :param array opNames: List of operations to filter for :param int start: Start at this block :param int stop: Stop at this block :param str mode: We here have the choice between * "head": the last block * "irreversible": the block that is confirmed by 2/3 of all block producers and is thus irreversible! The dict output is formated such that ``type`` caries the operation type, timestamp and block_num are taken from the block the operation was stored in and the other key depend on the actualy operation.
def apparent_dip_correction(axes): a1 = axes[0].copy() a1[-1] = 0 cosa = angle(axes[0],a1,cos=True) _ = 1-cosa**2 if _ > 1e-12: sina = N.sqrt(_) if cosa < 0: sina *= -1 R= N.array([[cosa,sina],[-sina,cosa]]) else: R = N.identity(2) return R
Produces a two-dimensional rotation matrix that rotates a projected dataset to correct for apparent dip
def keybd_event(bVk: int, bScan: int, dwFlags: int, dwExtraInfo: int) -> None: ctypes.windll.user32.keybd_event(bVk, bScan, dwFlags, dwExtraInfo)
keybd_event from Win32.
def _parse_document(self): self._reset() doc = self._profile_doc self._split_tokens_calculated = True self._split_tokens = None self._duration_calculated = True self._duration = doc[u] self._datetime_calculated = True self._datetime = doc[u] if self._datetime.tzinfo is None: self._datetime = self._datetime.replace(tzinfo=tzutc()) self._datetime_format = None self._reformat_timestamp(, force=True) self._thread_calculated = True self._thread = doc[] self._operation_calculated = True self._operation = doc[u] self._namespace = doc[u] self._command_calculated = True if self.operation == : self._command = doc[u].keys()[0] if in doc: if in doc[] and isinstance(doc[][], dict): self._pattern = str(doc[][]).replace(""$queryqueryquery$query", ) else: self._pattern = str(doc[]).replace(""orderbyqueryqueryorderbyqueryorderby", ) elif in doc[]: self._sort_pattern = str(doc[] []).replace(""nscannednscannedntoreturnntoreturnnupdatednupdatednreturnednreturnedninsertedninsertedndeletedndeletednumYieldnumYieldlockStatslockStatstimeLockedMicrosrlockStatstimeLockedMicroswlockStatstimeAcquiringMicrosrlockStatstimeAcquiringMicrosww:%ir:%ilockslocksqueryquery: %squery", "", )) if in doc: payload += ( % str(doc[u]) .replace("u").replace(""updateobj update: %supdateobj", "", )) scanned = % self._nscanned if in doc else yields = % self._numYields if in doc else duration = % self.duration if self.duration is not None else self._line_str = ("[{thread}] {operation} {namespace} {payload} " "{scanned} {yields} locks(micros) {locks} " "{duration}".format(datetime=self.datetime, thread=self.thread, operation=self.operation, namespace=self.namespace, payload=payload, scanned=scanned, yields=yields, locks=locks, duration=duration))
Parse system.profile doc, copy all values to member variables.
def buildhtmlheader(self): for css in self.header_css: self.htmlheader += css for js in self.header_js: self.htmlheader += js
generate HTML header content
async def open_session(self, request: BaseRequestWebsocket) -> Session: return await ensure_coroutine(self.session_interface.open_session)(self, request)
Open and return a Session using the request.
def clean_time(time_string): time = dateutil.parser.parse(time_string) if not settings.USE_TZ: time = time.astimezone(timezone.utc).replace(tzinfo=None) return time
Return a datetime from the Amazon-provided datetime string
def SetFileAttributes(filepath, *attrs): nice_names = collections.defaultdict( lambda key: key, hidden=, read_only=, ) flags = (getattr(api, nice_names[attr], attr) for attr in attrs) flags = functools.reduce(operator.or_, flags) handle_nonzero_success(api.SetFileAttributes(filepath, flags))
Set file attributes. e.g.: SetFileAttributes('C:\\foo', 'hidden') Each attr must be either a numeric value, a constant defined in jaraco.windows.filesystem.api, or one of the nice names defined in this function.
def _current_web_port(self): info = inspect_container(self._get_container_name()) if info is None: return None try: if not info[][]: return None return info[][][][0][] except TypeError: return None
return just the port number for the web container, or None if not running
def get_storer(self, key): group = self.get_node(key) if group is None: raise KeyError(.format(key=key)) s = self._create_storer(group) s.infer_axes() return s
return the storer object for a key, raise if not in the file
def main(): print("Python version %s" % sys.version) print("Testing compatibility for function defined with *args") test_func_args(func_old_args) test_func_args(func_new) print("Testing compatibility for function defined with **kwargs") test_func_kwargs(func_old_kwargs) test_func_kwargs(func_new) print("All tests successful - we can change *args and **kwargs to named args.") return 0
Main function calls the test functs
def CrearPlantillaPDF(self, papel="A4", orientacion="portrait"): "Iniciar la creación del archivo PDF" t = Template( format=papel, orientation=orientacion, title="F 1116 B/C %s" % (self.NroOrden), author="CUIT %s" % self.Cuit, subject="COE %s" % self.params_out.get(), keywords="AFIP Liquidacion Electronica Primaria de Granos", creator= % __version__,) self.template = t return True
Iniciar la creación del archivo PDF
def safestr(value): if not value or isinstance(value, (int, float, bool, long)): return value elif isinstance(value, (date, datetime)): return value.isoformat() else: return unicode(value)
Ensure type to string serialization
def get_comment_form_for_create(self, reference_id, comment_record_types): if not isinstance(reference_id, ABCId): raise errors.InvalidArgument() for arg in comment_record_types: if not isinstance(arg, ABCType): raise errors.InvalidArgument() if comment_record_types == []: obj_form = objects.CommentForm( book_id=self._catalog_id, reference_id=reference_id, effective_agent_id=str(self.get_effective_agent_id()), catalog_id=self._catalog_id, runtime=self._runtime, proxy=self._proxy) else: obj_form = objects.CommentForm( book_id=self._catalog_id, record_types=comment_record_types, reference_id=reference_id, effective_agent_id=self.get_effective_agent_id(), catalog_id=self._catalog_id, runtime=self._runtime, proxy=self._proxy) obj_form._for_update = False self._forms[obj_form.get_id().get_identifier()] = not CREATED return obj_form
Gets the comment form for creating new comments. A new form should be requested for each create transaction. arg: reference_id (osid.id.Id): the ``Id`` for the reference object arg: comment_record_types (osid.type.Type[]): array of comment record types return: (osid.commenting.CommentForm) - the comment form raise: NullArgument - ``reference_id or comment_record_types`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - unable to get form for requested record types *compliance: mandatory -- This method must be implemented.*
def gen_df_forcing( path_csv_in=, url_base=url_repo_input,)->pd.DataFrame: /input_files/SSss_YYYY_data_tt.csv try: urlpath_table = url_base/path_csv_in df_var_info = pd.read_csv(urlpath_table) except: print(f) else: df_var_forcing = df_var_info.drop([, ], axis=1) df_var_forcing = df_var_forcing.set_index() df_var_forcing.index = df_var_forcing.index\ .map(lambda x: x.replace(, ))\ .rename() df_var_forcing.loc[] = return df_var_forcing
Generate description info of supy forcing data into a dataframe Parameters ---------- path_csv_in : str, optional path to the input csv file relative to url_base (the default is '/input_files/SSss_YYYY_data_tt.csv']) url_base : urlpath.URL, optional URL to the input files of repo base (the default is url_repo_input, which is defined at the top of this file) Returns ------- pd.DataFrame Description info of supy forcing data
def window_size(self): value = self.http.get().value w = roundint(value[]) h = roundint(value[]) return namedtuple(, [, ])(w, h)
Returns: namedtuple: eg Size(width=320, height=568)
def p_const_vector_elem_list(p): if p[1] is None: return if not is_static(p[1]): if isinstance(p[1], symbols.UNARY): tmp = make_constexpr(p.lineno(1), p[1]) else: api.errmsg.syntax_error_not_constant(p.lexer.lineno) p[0] = None return else: tmp = p[1] p[0] = [tmp]
const_number_list : expr
def get_loss_func(self, C=1.0, k=1): def lf(x): mu, ln_var = self.encode(x) batchsize = len(mu.data) rec_loss = 0 for l in six.moves.range(k): z = F.gaussian(mu, ln_var) rec_loss += F.bernoulli_nll(x, self.decode(z, sigmoid=False)) \ / (k * batchsize) self.rec_loss = rec_loss self.loss = self.rec_loss + \ C * gaussian_kl_divergence(mu, ln_var) / batchsize return self.loss return lf
Get loss function of VAE. The loss value is equal to ELBO (Evidence Lower Bound) multiplied by -1. Args: C (int): Usually this is 1.0. Can be changed to control the second term of ELBO bound, which works as regularization. k (int): Number of Monte Carlo samples used in encoded vector.
def send_request(self, kind, url_components, **kwargs): return self.api.send_request(kind, self.resource_path, url_components, **kwargs)
Send a request for this resource to the API Parameters ---------- kind: str, {'get', 'delete', 'put', 'post', 'head'}
def well(self, well_x=1, well_y=1): xpath = xpath += _xpath_attrib(, well_x) xpath += _xpath_attrib(, well_y) return self.well_array.find(xpath)
ScanWellData of specific well. Parameters ---------- well_x : int well_y : int Returns ------- lxml.objectify.ObjectifiedElement
def _low_level_dispatch(pcapdev, devname, pktqueue): while LLNetReal.running: pktinfo = pcapdev.recv_packet(timeout=0.2) if pktinfo is None: continue pktqueue.put( (devname,pcapdev.dlt,pktinfo) ) log_debug("Receiver thread for {} exiting".format(devname)) stats = pcapdev.stats() log_debug("Final device statistics {}: {} received, {} dropped, {} dropped/if".format(devname, stats.ps_recv, stats.ps_drop, stats.ps_ifdrop))
Thread entrypoint for doing low-level receive and dispatch for a single pcap device.
def fix_encoding_and_explain(text): best_version = text best_cost = text_cost(text) best_plan = [] plan_so_far = [] while True: prevtext = text text, plan = fix_one_step_and_explain(text) plan_so_far.extend(plan) cost = text_cost(text) for _, _, step_cost in plan_so_far: cost += step_cost if cost < best_cost: best_cost = cost best_version = text best_plan = list(plan_so_far) if text == prevtext: return best_version, best_plan
Re-decodes text that has been decoded incorrectly, and also return a "plan" indicating all the steps required to fix it. The resulting plan could be used with :func:`ftfy.fixes.apply_plan` to fix additional strings that are broken in the same way.
def washburn(target, surface_tension=, contact_angle=, diameter=): r network = target.project.network phase = target.project.find_phase(target) element, sigma, theta = _get_key_props(phase=phase, diameter=diameter, surface_tension=surface_tension, contact_angle=contact_angle) r = network[diameter]/2 value = -2*sigma*_sp.cos(_sp.radians(theta))/r if diameter.split()[0] == : value = value[phase.throats(target.name)] else: value = value[phase.pores(target.name)] value[_sp.absolute(value) == _sp.inf] = 0 return value
r""" Computes the capillary entry pressure assuming the throat in a cylindrical tube. Parameters ---------- target : OpenPNM Object The object for which these values are being calculated. This controls the length of the calculated array, and also provides access to other necessary thermofluid properties. surface_tension : string The dictionary key containing the surface tension values to be used. If a pore property is given, it is interpolated to a throat list. contact_angle : string The dictionary key containing the contact angle values to be used. If a pore property is given, it is interpolated to a throat list. diameter : string The dictionary key containing the throat diameter values to be used. Notes ----- The Washburn equation is: .. math:: P_c = -\frac{2\sigma(cos(\theta))}{r} This is the most basic approach to calculating entry pressure and is suitable for highly non-wetting invading phases in most materials.
def _group_get_hostnames(self, group_name): hostnames = [] hosts_section = self._get_section(group_name, ) if hosts_section: for entry in hosts_section[]: hostnames.extend(self.expand_hostdef(entry[])) children_section = self._get_section(group_name, ) if children_section: for entry in children_section[]: hostnames.extend(self._group_get_hostnames(entry[])) return hostnames
Recursively fetch a list of each unique hostname that belongs in or under the group. This includes hosts in children groups.
def preprocess_belscript(lines): set_flag = False for line in lines: if set_flag is False and re.match("SET", line): set_flag = True set_line = [line.rstrip()] elif set_flag and re.match("SET", line): yield f"{.join(set_line)}\n" set_line = [line.rstrip()] elif set_flag and re.match("\s+$", line): yield f"{.join(set_line)}\n" yield line set_flag = False elif set_flag: set_line.append(line.rstrip()) else: yield line
Convert any multi-line SET statements into single line SET statements
def get_jamo_class(jamo): if jamo in JAMO_LEADS or jamo == chr(0x115F): return "lead" if jamo in JAMO_VOWELS or jamo == chr(0x1160) or\ 0x314F <= ord(jamo) <= 0x3163: return "vowel" if jamo in JAMO_TAILS: return "tail" else: raise InvalidJamoError("Invalid or classless jamo argument.", jamo)
Determine if a jamo character is a lead, vowel, or tail. Integers and U+11xx characters are valid arguments. HCJ consonants are not valid here. get_jamo_class should return the class ["lead" | "vowel" | "tail"] of a given character or integer. Note: jamo class directly corresponds to the Unicode 7.0 specification, thus includes filler characters as having a class.
def upload_file(self, simple_upload_url, chunked_upload_url, file_obj, chunk_size=CHUNK_SIZE, force_chunked=False, extra_data=None): if isinstance(file_obj, string_types): file_obj = open(file_obj, ) close_file = True else: close_file = False file_obj.seek(0, os.SEEK_END) file_size = file_obj.tell() file_obj.seek(0) try: if (simple_upload_url and not force_chunked and file_size < MAX_SIZE_SIMPLE_UPLOAD): return self.post(simple_upload_url, data=extra_data, files={: file_obj}) data = {} md5_hash = hashlib.md5() start_byte = 0 while True: chunk = file_obj.read(chunk_size) md5_hash.update(chunk) end_byte = start_byte + len(chunk) - 1 content_range = % (start_byte, end_byte, file_size) ret = self.post(chunked_upload_url, data=data, files={: chunk}, headers={: content_range}) data.setdefault(, ret[]) start_byte = end_byte + 1 if start_byte == file_size: break if chunked_upload_url.endswith(): chunked_upload_complete_url = chunked_upload_url + else: chunked_upload_complete_url = chunked_upload_url + data[] = md5_hash.hexdigest() if extra_data: data.update(extra_data) return self.post(chunked_upload_complete_url, data=data) finally: if close_file: file_obj.close()
Generic method to upload files to AmigoCloud. Can be used for different API endpoints. `file_obj` could be a file-like object or a filepath. If the size of the file is greater than MAX_SIZE_SIMPLE_UPLOAD (8MB) `chunked_upload_url` will be used, otherwise `simple_upload_url` will be. If `simple_upload_url` evaluates to False, or `force_chunked` is True, the `chunked_upload_url` will always be used.
def daily_forecast_at_coords(self, lat, lon, limit=None): geo.assert_is_lon(lon) geo.assert_is_lat(lat) if limit is not None: assert isinstance(limit, int), " must be an int or None" if limit < 1: raise ValueError(" must be None or greater than zero") params = {: lon, : lat, : self._language} if limit is not None: params[] = limit uri = http_client.HttpClient.to_url(DAILY_FORECAST_URL, self._API_key, self._subscription_type, self._use_ssl) _, json_data = self._wapi.cacheable_get_json(uri, params=params) forecast = self._parsers[].parse_JSON(json_data) if forecast is not None: forecast.set_interval("daily") return forecaster.Forecaster(forecast) else: return None
Queries the OWM Weather API for daily weather forecast for the specified geographic coordinate (eg: latitude: 51.5073509, longitude: -0.1277583). A *Forecaster* object is returned, containing a *Forecast* instance covering a global streak of fourteen days by default: this instance encapsulates *Weather* objects, with a time interval of one day one from each other :param lat: location's latitude, must be between -90.0 and 90.0 :type lat: int/float :param lon: location's longitude, must be between -180.0 and 180.0 :type lon: int/float :param limit: the maximum number of daily *Weather* items to be retrieved (default is ``None``, which stands for any number of items) :type limit: int or ``None`` :returns: a *Forecaster* instance or ``None`` if forecast data is not available for the specified location :raises: *ParseResponseException* when OWM Weather API responses' data cannot be parsed, *APICallException* when OWM Weather API can not be reached, *ValueError* if negative values are supplied for limit
def perform_extended_selection(self, event=None): TextHelper(self.editor).select_extended_word( continuation_chars=self.continuation_characters) if event: event.accept()
Performs extended word selection. :param event: QMouseEvent
def p_delays_floatnumber(self, p): p[0] = DelayStatement(FloatConst( p[2], lineno=p.lineno(1)), lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
delays : DELAY floatnumber
def _is_builtin_module(module): if (not hasattr(module, )) or module.__name__ in sys.builtin_module_names: return True if module.__name__ in _stdlib._STD_LIB_MODULES: return True amp = os.path.abspath(module.__file__) if in amp: return False if amp.startswith(_STD_MODULE_DIR): return True if not in module.__name__: return False mn_top = module.__name__.split()[0] return mn_top in _stdlib._STD_LIB_MODULES
Is builtin or part of standard library
def services(doc): for service_id, service in doc.get(, {}).items(): service_type = service.get() org = doc[] service[] = service_id service[] = org yield service_id, service yield [service_type, org], service yield [service_type, None], service yield [None, org], service yield [None, None], service
View for getting services
def get_checksum32(oqparam, hazard=False): checksum = 0 for fname in get_input_files(oqparam, hazard): checksum = _checksum(fname, checksum) if hazard: hazard_params = [] for key, val in vars(oqparam).items(): if key in (, , , , , , , , , , , , , , , , ): hazard_params.append( % (key, val)) data = .join(hazard_params).encode() checksum = zlib.adler32(data, checksum) & 0xffffffff return checksum
Build an unsigned 32 bit integer from the input files of a calculation. :param oqparam: an OqParam instance :param hazard: if True, consider only the hazard files :returns: the checkume
def traverse(self, node): old_level = self.current_level if isinstance(node, nodes.section): if in node: self.current_level = node[] to_visit = [] to_replace = [] for c in node.children[:]: newnode = self.find_replace(c) if newnode is not None: to_replace.append((c, newnode)) else: to_visit.append(c) for oldnode, newnodes in to_replace: node.replace(oldnode, newnodes) for child in to_visit: self.traverse(child) self.current_level = old_level
Traverse the document tree rooted at node. node : docutil node current root node to traverse
def get_closest(cls, *locale_codes: str) -> "Locale": for code in locale_codes: if not code: continue code = code.replace("-", "_") parts = code.split("_") if len(parts) > 2: continue elif len(parts) == 2: code = parts[0].lower() + "_" + parts[1].upper() if code in _supported_locales: return cls.get(code) if parts[0].lower() in _supported_locales: return cls.get(parts[0].lower()) return cls.get(_default_locale)
Returns the closest match for the given locale code.
def ParseFileSystemsStruct(struct_class, fs_count, data): results = [] cstr = lambda x: x.split(b"\x00", 1)[0] for count in range(0, fs_count): struct_size = struct_class.GetSize() s_data = data[count * struct_size:(count + 1) * struct_size] s = struct_class(s_data) s.f_fstypename = cstr(s.f_fstypename) s.f_mntonname = cstr(s.f_mntonname) s.f_mntfromname = cstr(s.f_mntfromname) results.append(s) return results
Take the struct type and parse it into a list of structs.
def dumps_content(self): import numpy as np string = shape = self.matrix.shape for (y, x), value in np.ndenumerate(self.matrix): if x: string += string += str(value) if x == shape[1] - 1 and y != shape[0] - 1: string += r + super().dumps_content() return string
Return a string representing the matrix in LaTeX syntax. Returns ------- str
def diff(left, right): def processComponentLists(leftList, rightList): output = [] rightIndex = 0 rightListSize = len(rightList) for comp in leftList: if rightIndex >= rightListSize: output.append((comp, None)) else: leftKey = getSortKey(comp) rightComp = rightList[rightIndex] rightKey = getSortKey(rightComp) while leftKey > rightKey: output.append((None, rightComp)) rightIndex += 1 if rightIndex >= rightListSize: output.append((comp, None)) break else: rightComp = rightList[rightIndex] rightKey = getSortKey(rightComp) if leftKey < rightKey: output.append((comp, None)) elif leftKey == rightKey: rightIndex += 1 matchResult = processComponentPair(comp, rightComp) if matchResult is not None: output.append(matchResult) return output def newComponent(name, body): if body is None: return None else: c = Component(name) c.behavior = getBehavior(name) c.isNative = True return c def processComponentPair(leftComp, rightComp): leftChildKeys = leftComp.contents.keys() rightChildKeys = rightComp.contents.keys() differentContentLines = [] differentComponents = {} for key in leftChildKeys: rightList = rightComp.contents.get(key, []) if isinstance(leftComp.contents[key][0], Component): compDifference = processComponentLists(leftComp.contents[key], rightList) if len(compDifference) > 0: differentComponents[key] = compDifference elif leftComp.contents[key] != rightList: differentContentLines.append((leftComp.contents[key], rightList)) for key in rightChildKeys: if key not in leftChildKeys: if isinstance(rightComp.contents[key][0], Component): differentComponents[key] = ([], rightComp.contents[key]) else: differentContentLines.append(([], rightComp.contents[key])) if len(differentContentLines) == 0 and len(differentComponents) == 0: return None else: left = newFromBehavior(leftComp.name) right = newFromBehavior(leftComp.name) return vevents + vtodos
Take two VCALENDAR components, compare VEVENTs and VTODOs in them, return a list of object pairs containing just UID and the bits that didn't match, using None for objects that weren't present in one version or the other. When there are multiple ContentLines in one VEVENT, for instance many DESCRIPTION lines, such lines original order is assumed to be meaningful. Order is also preserved when comparing (the unlikely case of) multiple parameters of the same type in a ContentLine
def get_platform_info(): try: system_name = platform.system() release_name = platform.release() except: system_name = "Unknown" release_name = "Unknown" return { : system_name, : release_name, }
Gets platform info :return: platform info
def merge(self, other_rel): if other_rel.thresholds.size == self.thresholds.size and np.all(other_rel.thresholds == self.thresholds): self.frequencies += other_rel.frequencies else: print("Input table thresholds do not match.")
Ingest another DistributedReliability and add its contents to the current object. Args: other_rel: a Distributed reliability object.
def emailComment(comment, obj, request): if not obj.author.frog_prefs.get().json()[]: return if obj.author == request.user: return html = render_to_string(, { : comment.user, : comment.comment, : obj, : , : isinstance(obj, Image), : FROG_SITE_URL, }) subject = .format(getSiteConfig()[], comment.user_name) fromemail = comment.user_email to = obj.author.email text_content = html_content = html send_mail(subject, text_content, fromemail, [to], html_message=html_content)
Send an email to the author about a new comment
def _hole_end(self, position, ignore=None): for rindex in range(position, self.max_end): for starting in self.starting(rindex): if not ignore or not ignore(starting): return rindex return self.max_end
Retrieves the end of hole index from position. :param position: :type position: :param ignore: :type ignore: :return: :rtype:
def run(self, input): p = self.get_process() output, errors = p.communicate(input=input.encode()) if p.returncode != 0: raise AssetHandlerError(errors) return output.decode()
Runs :attr:`executable` with ``input`` as stdin. :class:`AssetHandlerError` exception is raised, if execution is failed, otherwise stdout is returned.
def check( state_engine, nameop, block_id, checked_ops ): namespace_id_hash = nameop[] consensus_hash = nameop[] token_fee = nameop[] if not state_engine.is_new_namespace_preorder( namespace_id_hash ): log.warning("Namespace preorder already in use" % namespace_id_hash) return False if not state_engine.is_consensus_hash_valid( block_id, consensus_hash ): valid_consensus_hashes = state_engine.get_valid_consensus_hashes( block_id ) log.warning("Invalid consensus hash : expected any of %s" % (consensus_hash, ",".join( valid_consensus_hashes )) ) return False if not in nameop: log.warning("Missing namespace preorder fee") return False if nameop[] != BLOCKSTACK_BURN_ADDRESS: log.warning("Invalid burn address: expected {}, got {}".format(BLOCKSTACK_BURN_ADDRESS, nameop[])) return False log.warning("Account {} has balance {} {}, but needs to pay {} {}".format(token_address, account_balance, token_type, token_fee, token_type)) return False state_preorder_put_account_payment_info(nameop, token_address, token_type, token_fee) nameop[] = .format(token_fee) nameop[] = TOKEN_TYPE_STACKS else: state_preorder_put_account_payment_info(nameop, None, None, None) nameop[] = nameop[] = return True
Given a NAMESPACE_PREORDER nameop, see if we can preorder it. It must be unqiue. Return True if accepted. Return False if not.
def opt_pagesize(self, pagesize): if pagesize != "auto": pagesize = int(pagesize) self.conf["pagesize"] = pagesize
Get or set the page size of the query output
def swarm(self, predictedField=None, swarmParams=None): self.prepareSwarm( predictedField=predictedField, swarmParams=swarmParams ) self.runSwarm(self._workingDir)
Runs a swarm on data and swarm description found within the given working directory. If no predictedField is provided, it is assumed that the first stream listed in the streamIds provided to the Menorah constructor is the predicted field. :param predictedField: (string) :param swarmParams: (dict) overrides any swarm params :return:
def create_button_label(icon, font_size=constants.FONT_SIZE_NORMAL): label = Gtk.Label() set_label_markup(label, + icon + , constants.ICON_FONT, font_size) label.show() return label
Create a button label with a chosen icon. :param icon: The icon :param font_size: The size of the icon :return: The created label
def draw(self, label, expire): with self.client.pipeline() as pipe: pipe.msetnx({self.keys.dispenser: 0, self.keys.indicator: 1}) pipe.incr(self.keys.dispenser) number = pipe.execute()[-1] self.message(.format(number, label)) kwargs = {: self.client, : self.keys.key(number)} keeper = Keeper(label=label, expire=expire, **kwargs) try: yield number except: self.message(.format(number)) raise finally: keeper.close() self.message(.format(number, label)) number += 1 self.client.set(self.keys.indicator, number) self.announce(number)
Return a Serial number for this resource queue, after bootstrapping.
def highlight(self, rect, color="red", seconds=None): if tk._default_root is None: Debug.log(3, "Creating new temporary Tkinter root") temporary_root = True root = tk.Tk() root.withdraw() else: Debug.log(3, "Borrowing existing Tkinter root") temporary_root = False root = tk._default_root image_to_show = self.getBitmapFromRect(*rect) app = highlightWindow(root, rect, color, image_to_show) if seconds == 0: t = threading.Thread(target=app.do_until_timeout) t.start() return app app.do_until_timeout(seconds)
Simulates a transparent rectangle over the specified ``rect`` on the screen. Actually takes a screenshot of the region and displays with a rectangle border in a borderless window (due to Tkinter limitations) If a Tkinter root window has already been created somewhere else, uses that instead of creating a new one.
def img(self): SlipThumbnail.img(self) if self.rotation: mat = cv2.getRotationMatrix2D((self.height//2, self.width//2), -self.rotation, 1.0) self._rotated = cv2.warpAffine(self._img, mat, (self.height, self.width)) else: self._rotated = self._img return self._rotated
return a cv image for the icon
def _proxy(self): if self._context is None: self._context = TerminatingSipDomainContext( self._version, trunk_sid=self._solution[], sid=self._solution[], ) return self._context
Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: TerminatingSipDomainContext for this TerminatingSipDomainInstance :rtype: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainContext
def fft(a, n=None, axis=-1, norm=None): output = mkl_fft.fft(a, n, axis) if _unitary(norm): output *= 1 / sqrt(output.shape[axis]) return output
Compute the one-dimensional discrete Fourier Transform. This function computes the one-dimensional *n*-point discrete Fourier Transform (DFT) with the efficient Fast Fourier Transform (FFT) algorithm [CT]. Parameters ---------- a : array_like Input array, can be complex. n : int, optional Length of the transformed axis of the output. If `n` is smaller than the length of the input, the input is cropped. If it is larger, the input is padded with zeros. If `n` is not given, the length of the input along the axis specified by `axis` is used. axis : int, optional Axis over which to compute the FFT. If not given, the last axis is used. norm : {None, "ortho"}, optional .. versionadded:: 1.10.0 Normalization mode (see `numpy.fft`). Default is None. Returns ------- out : complex ndarray The truncated or zero-padded input, transformed along the axis indicated by `axis`, or the last one if `axis` is not specified. Raises ------ IndexError if `axes` is larger than the last axis of `a`. See Also -------- numpy.fft : for definition of the DFT and conventions used. ifft : The inverse of `fft`. fft2 : The two-dimensional FFT. fftn : The *n*-dimensional FFT. rfftn : The *n*-dimensional FFT of real input. fftfreq : Frequency bins for given FFT parameters. Notes ----- FFT (Fast Fourier Transform) refers to a way the discrete Fourier Transform (DFT) can be calculated efficiently, by using symmetries in the calculated terms. The symmetry is highest when `n` is a power of 2, and the transform is therefore most efficient for these sizes. The DFT is defined, with the conventions used in this implementation, in the documentation for the `numpy.fft` module. References ---------- .. [CT] Cooley, James W., and John W. Tukey, 1965, "An algorithm for the machine calculation of complex Fourier series," *Math. Comput.* 19: 297-301. Examples -------- >>> np.fft.fft(np.exp(2j * np.pi * np.arange(8) / 8)) array([ -3.44505240e-16 +1.14383329e-17j, 8.00000000e+00 -5.71092652e-15j, 2.33482938e-16 +1.22460635e-16j, 1.64863782e-15 +1.77635684e-15j, 9.95839695e-17 +2.33482938e-16j, 0.00000000e+00 +1.66837030e-15j, 1.14383329e-17 +1.22460635e-16j, -1.64863782e-15 +1.77635684e-15j]) >>> import matplotlib.pyplot as plt >>> t = np.arange(256) >>> sp = np.fft.fft(np.sin(t)) >>> freq = np.fft.fftfreq(t.shape[-1]) >>> plt.plot(freq, sp.real, freq, sp.imag) [<matplotlib.lines.Line2D object at 0x...>, <matplotlib.lines.Line2D object at 0x...>] >>> plt.show() In this example, real input has an FFT which is Hermitian, i.e., symmetric in the real part and anti-symmetric in the imaginary part, as described in the `numpy.fft` documentation.
def render_cheetah_tmpl(tmplstr, context, tmplpath=None): from Cheetah.Template import Template return salt.utils.data.decode(Template(tmplstr, searchList=[context]))
Render a Cheetah template.
def appendData(self, content): if self.pcdata is not None: self.pcdata += content else: self.pcdata = content
Add characters to the element's pcdata.
def _zforce(self,R,z,phi=0.,t=0.): Rdist = _cylR(R,phi,self._orb.R(t),self._orb.phi(t)) (xd,yd,zd) = _cyldiff(self._orb.R(t), self._orb.phi(t), self._orb.z(t), R, phi, z) return -evaluatezforces(self._pot,Rdist,zd, use_physical=False)
NAME: _zforce PURPOSE: evaluate the vertical force for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t - time OUTPUT: the vertical force HISTORY: 2011-04-10 - Written - Bovy (NYU) 2018-10-18 - Updated for general object potential - James Lane (UofT)
def header_body_from_content(content): m = _CLASSIFIED_BY_PATTERN.search(content) idx = m and m.end() or 0 m = _SUMMARY_PATTERN.search(content) summary_idx = m and m.start() or None m = _FIRST_PARAGRAPH_PATTERN.search(content) para_idx = m and m.start() or None if summary_idx and para_idx: idx = max(idx, min(summary_idx, para_idx)) elif summary_idx: idx = max(summary_idx, idx) elif para_idx: idx = max(para_idx, idx) if idx > 0: return content[:idx], content[idx:] return None, None
\ Tries to extract the header and the message from the cable content. The header is something like UNCLASSIFIED ... SUBJECT ... REF ... while the message begins usually with a summary 1. SUMMARY ... ... 10. ... Returns (header, msg) or (None, None) if the header/message cannot be detected. `content` The "content" part of a cable.
def parallel_apply(func, arg_iterable, **kwargs): max_workers = kwargs.pop(, None) parallel = kwargs.pop(, True) parallel_warning = kwargs.pop(, True) func_args = kwargs.pop(, ()) func_pre_args = kwargs.pop(, ()) func_kwargs = kwargs.pop(, {}) tqdm_kwargs = kwargs.pop(, {}) if kwargs: raise TypeError(.format(kwargs)) if not in tqdm_kwargs: tqdm_kwargs[] = False assert isinstance(func_args, tuple), ( str(func_args) + + str(type(func_args))) assert isinstance(func_pre_args, tuple), ( str(func_pre_args) + + str(type(func_pre_args))) progress = select_tqdm() if not parallel: if parallel_warning: warnings.warn(( ), UserWarning) return [func(*(func_pre_args + (x,) + func_args), **func_kwargs) for x in progress(arg_iterable, **tqdm_kwargs)] else: pool = concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) futures = [] for element in arg_iterable: futures.append(pool.submit( func, *(func_pre_args + (element,) + func_args), **func_kwargs)) results = [] for fut in progress(concurrent.futures.as_completed(futures), total=len(arg_iterable), **tqdm_kwargs): results.append(fut.result()) return results
Apply function to iterable with parallelisation and a tqdm progress bar. Roughly equivalent to >>> [func(*func_pre_args, x, *func_args, **func_kwargs) for x in arg_iterable] but will **not** necessarily return results in input order. Parameters ---------- func: function Function to apply to list of args. arg_iterable: iterable argument to iterate over. func_args: tuple, optional Additional positional arguments for func. func_pre_args: tuple, optional Positional arguments to place before the iterable argument in func. func_kwargs: dict, optional Additional keyword arguments for func. parallel: bool, optional To turn off parallelisation if needed. parallel_warning: bool, optional To turn off warning for no parallelisation if needed. max_workers: int or None, optional Number of processes. If max_workers is None then concurrent.futures.ProcessPoolExecutor defaults to using the number of processors of the machine. N.B. If max_workers=None and running on supercomputer clusters with multiple nodes, this may default to the number of processors on a single node. Returns ------- results_list: list of function outputs
def remove_masquerade(zone=None, permanent=True): ** if zone: cmd = .format(zone) else: cmd = if permanent: cmd += return __firewall_cmd(cmd)
Remove masquerade on a zone. If zone is omitted, default zone will be used. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' firewalld.remove_masquerade To remove masquerade on a specific zone .. code-block:: bash salt '*' firewalld.remove_masquerade dmz
def get_page_full_export(self, page_id): try: result = self._request(, {: page_id}) return TildaPage(**result) except NetworkError: return []
Get full page info for export and body html code
def elemc(item, inset): assert isinstance(inset, stypes.SpiceCell) item = stypes.stringToCharP(item) return bool(libspice.elemc_c(item, ctypes.byref(inset)))
Determine whether an item is an element of a character set. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/elemc_c.html :param item: Item to be tested. :type item: str :param inset: Set to be tested. :type inset: spiceypy.utils.support_types.SpiceCell :return: True if item is an element of set. :rtype: bool
def override_root_main_ref(config, remotes, banner): log = logging.getLogger(__name__) greatest_tag = config.banner_greatest_tag if banner else config.greatest_tag recent_tag = config.banner_recent_tag if banner else config.recent_tag if greatest_tag or recent_tag: candidates = [r for r in remotes if r[] == ] if candidates: multi_sort(candidates, [ if greatest_tag else ]) config.update({ if banner else : candidates[0][]}, overwrite=True) else: flag = if banner else log.warning(, flag) ref = config.banner_main_ref if banner else config.root_ref return ref in [r[] for r in remotes]
Override root_ref or banner_main_ref with tags in config if user requested. :param sphinxcontrib.versioning.lib.Config config: Runtime configuration. :param iter remotes: List of dicts from Versions.remotes. :param bool banner: Evaluate banner main ref instead of root ref. :return: If root/main ref exists. :rtype: bool
def get_object(model, meteor_id, *args, **kwargs): meta = model._meta if isinstance(meta.pk, AleaIdField): return model.objects.filter(*args, **kwargs).get(pk=meteor_id) alea_unique_fields = [ field for field in meta.local_fields if isinstance(field, AleaIdField) and field.unique and not field.null ] if len(alea_unique_fields) == 1: return model.objects.filter(*args, **kwargs).get(**{ alea_unique_fields[0].name: meteor_id, }) return model.objects.filter(*args, **kwargs).get( pk=get_object_id(model, meteor_id), )
Return an object for the given meteor_id.
def GetTransactionResults(self): if self.References is None: return None results = [] realresults = [] for ref_output in self.References.values(): results.append(TransactionResult(ref_output.AssetId, ref_output.Value)) for output in self.outputs: results.append(TransactionResult(output.AssetId, output.Value * Fixed8(-1))) for key, group in groupby(results, lambda x: x.AssetId): sum = Fixed8(0) for item in group: sum = sum + item.Amount if sum != Fixed8.Zero(): realresults.append(TransactionResult(key, sum)) return realresults
Get the execution results of the transaction. Returns: None: if the transaction has no references. list: of TransactionResult objects.
def to_pydatetime(self): return datetime.datetime.combine(self._date.to_pydate(), self._time.to_pytime())
Converts datetime2 object into Python's datetime.datetime object @return: naive datetime.datetime
def schemaValidateDoc(self, ctxt): if ctxt is None: ctxt__o = None else: ctxt__o = ctxt._o ret = libxml2mod.xmlSchemaValidateDoc(ctxt__o, self._o) return ret
Validate a document tree in memory.
def _apply_axis_properties(self, axis, rot=None, fontsize=None): if rot is not None or fontsize is not None: labels = axis.get_majorticklabels() + axis.get_minorticklabels() for label in labels: if rot is not None: label.set_rotation(rot) if fontsize is not None: label.set_fontsize(fontsize)
Tick creation within matplotlib is reasonably expensive and is internally deferred until accessed as Ticks are created/destroyed multiple times per draw. It's therefore beneficial for us to avoid accessing unless we will act on the Tick.
def convolutional_layer_series(initial_size, layer_sequence): size = initial_size for filter_size, padding, stride in layer_sequence: size = convolution_size_equation(size, filter_size, padding, stride) return size
Execute a series of convolutional layer transformations to the size number
def error(self, message): sys.stderr.write( % message) self.print_help() sys.exit(2)
Prints error message, then help.
def mb_handler(self, args): if len(args) == 1: raise InvalidArgument() self.validate(, args) self.s3handler().create_bucket(args[1])
Handler for mb command
def _periodicfeatures_worker(task): pfpickle, lcbasedir, outdir, starfeatures, kwargs = task try: return get_periodicfeatures(pfpickle, lcbasedir, outdir, starfeatures=starfeatures, **kwargs) except Exception as e: LOGEXCEPTION( % pfpickle)
This is a parallel worker for the drivers below.
def from_object(self, instance: Union[object, str]) -> None: if isinstance(instance, str): try: path, config = instance.rsplit(, 1) except ValueError: path = instance instance = importlib.import_module(path) else: module = importlib.import_module(path) instance = getattr(module, config) for key in dir(instance): if key.isupper(): self[key] = getattr(instance, key)
Load the configuration from a Python object. This can be used to reference modules or objects within modules for example, .. code-block:: python app.config.from_object('module') app.config.from_object('module.instance') from module import instance app.config.from_object(instance) are valid. Arguments: instance: Either a str referencing a python object or the object itself.
def registry_adapter(obj, request): return { : obj.query_uri, : obj.success, : obj.has_references, : obj.count, : [{ : a.title, : a.uri, : a.service_url, : a.success, : a.has_references, : a.count, : [{ : i.uri, : i.title } for i in a.items] if a.items is not None else None } for a in obj.applications] if obj.applications is not None else None }
Adapter for rendering a :class:`pyramid_urireferencer.models.RegistryResponse` to json. :param pyramid_urireferencer.models.RegistryResponse obj: The response to be rendered. :rtype: :class:`dict`
def IPID_count(lst, funcID=lambda x: x[1].id, funcpres=lambda x: x[1].summary()): idlst = [funcID(e) for e in lst] idlst.sort() classes = [idlst[0]] classes += [t[1] for t in zip(idlst[:-1], idlst[1:]) if abs(t[0] - t[1]) > 50] lst = [(funcID(x), funcpres(x)) for x in lst] lst.sort() print("Probably %i classes:" % len(classes), classes) for id, pr in lst: print("%5i" % id, pr)
Identify IP id values classes in a list of packets lst: a list of packets funcID: a function that returns IP id values funcpres: a function used to summarize packets
def filter(self, extractions, case_sensitive=False) -> List[Extraction]: filtered_extractions = [] if not isinstance(extractions, list): extractions = [extractions] for extraction in extractions: if case_sensitive: try: if extraction.value.lower() not in self.black_list: filtered_extractions.append(extraction) except Exception as e: print(.format(e, extraction.value)) filtered_extractions.append(extraction) else: if extraction.value not in self.black_list: filtered_extractions.append(extraction) return filtered_extractions
filters out the extraction if extracted value is in the blacklist
def mixed_list_file(cls, filename, values, bits): fd = open(filename, ) for original in values: try: parsed = cls.integer(original, bits) except TypeError: parsed = repr(original) print >> fd, parsed fd.close()
Write a list of mixed values to a file. If a file of the same name exists, it's contents are replaced. See L{HexInput.mixed_list_file} for a description of the file format. @type filename: str @param filename: Name of the file to write. @type values: list( int ) @param values: List of mixed values to write to the file. @type bits: int @param bits: (Optional) Number of bits of the target architecture. The default is platform dependent. See: L{HexOutput.integer_size}
def setup(self): super(CleanCSSFilter, self).setup() self.root = current_app.config.get()
Initialize filter just before it will be used.
def get_default_home_dir(): ding0_dir = str(cfg_ding0.get(, )) return os.path.join(os.path.expanduser(), ding0_dir)
Return default home directory of Ding0 Returns ------- :any:`str` Default home directory including its path
def _checkCanIndex(self): if not self.chainedFields: return (False, False) for chainedField in self.chainedFields: if chainedField.CAN_INDEX is False: return (False, False) return (True, self.chainedFields[-1].hashIndex)
_checkCanIndex - Check if we CAN index (if all fields are indexable). Also checks the right-most field for "hashIndex" - if it needs to hash we will hash.
def Beta(alpha, beta, low=0, high=1, tag=None): assert ( alpha > 0 and beta > 0 ), assert low < high, return uv(ss.beta(alpha, beta, loc=low, scale=high - low), tag=tag)
A Beta random variate Parameters ---------- alpha : scalar The first shape parameter beta : scalar The second shape parameter Optional -------- low : scalar Lower bound of the distribution support (default=0) high : scalar Upper bound of the distribution support (default=1)
def __balance(self, account_id, **kwargs): params = { : account_id } return self.make_call(self.__balance, params, kwargs)
Call documentation: `/account/balance <https://www.wepay.com/developer/reference/account-2011-01-15#balance>`_, plus extra keyword parameters: :keyword str access_token: will be used instead of instance's ``access_token``, with ``batch_mode=True`` will set `authorization` param to it's value. :keyword bool batch_mode: turn on/off the batch_mode, see :class:`wepay.api.WePay` :keyword str batch_reference_id: `reference_id` param for batch call, see :class:`wepay.api.WePay` :keyword str api_version: WePay API version, see :class:`wepay.api.WePay` .. warning :: This call is depricated as of API version '2014-01-08'.
def get_checklists( self ): checklists = self.getChecklistsJson( self.base_uri ) checklists_list = [] for checklist_json in checklists: checklists_list.append( self.createChecklist( checklist_json ) ) return checklists_list
Get the checklists for this board. Returns a list of Checklist objects.
def sort_data(x_vals, y_vals): idxs = np.argsort(x_vals) x_vals = x_vals[idxs] y_vals = y_vals[idxs] mask = np.r_[True, (np.diff(x_vals) > 0)] if not mask.all(): numof_duplicates = np.repeat(mask, np.equal(mask, False)).shape[0] del numof_duplicates x_vals = x_vals[mask] y_vals = y_vals[mask] return x_vals, y_vals
Sort the data so that x is monotonically increasing and contains no duplicates.
def resolve_object_property(obj, path: str): value = obj for path_part in path.split(): value = getattr(value, path_part) return value
Resolves the value of a property on an object. Is able to resolve nested properties. For example, a path can be specified: 'other.beer.name' Raises: AttributeError: In case the property could not be resolved. Returns: The value of the specified property.
def computeEntropyAndEnthalpy(self, uncertainty_method=None, verbose=False, warning_cutoff=1.0e-10): if verbose: print("Computing average energy and entropy by MBAR.") N = self.N K = self.K Log_W_nk = np.zeros([N, K * 2], dtype=np.float64) N_k = np.zeros([K * 2], dtype=np.int32) f_k = np.zeros(K, dtype=np.float64) Log_W_nk[:, 0:K] = self.Log_W_nk N_k[0:K] = self.N_k u_min = self.u_kn.min() u_i = np.zeros([K], dtype=np.float64) for l in range(0, K): u_kn = self.u_kn[l, :] - (u_min-1) Log_W_nk[:, K + l] = np.log(u_kn) + self.Log_W_nk[:, l] f_k[l] = -_logsum(Log_W_nk[:, K + l]) Log_W_nk[:, K + l] += f_k[l] u_i[l] = np.exp(-f_k[l]) W_nk = np.exp(Log_W_nk) Theta_ij = self._computeAsymptoticCovarianceMatrix( W_nk, N_k, method=uncertainty_method) dDelta_f_ij = np.zeros([K, K], dtype=np.float64) dDelta_u_ij = np.zeros([K, K], dtype=np.float64) dDelta_s_ij = np.zeros([K, K], dtype=np.float64) f_k = np.matrix(self.f_k) Delta_f_ij = f_k - f_k.transpose() u_k = np.matrix(u_i) Delta_u_ij = u_k - u_k.transpose() s_k = u_k - f_k Delta_s_ij = s_k - s_k.transpose() diag = Theta_ij.diagonal() dii = diag[0:K, 0:K] d2DeltaF = dii + dii.transpose() - 2 * Theta_ij[0:K, 0:K] if (np.any(d2DeltaF < 0.0)): if(np.any(d2DeltaF) < warning_cutoff): print("A squared uncertainty is negative. d2DeltaF = %e" % d2DeltaF[(np.any(d2DeltaF) < warning_cutoff)]) else: d2DeltaF[(np.any(d2DeltaF) < warning_cutoff)] = 0.0 dDelta_f_ij = np.sqrt(d2DeltaF) for i in range(0, K): for j in range(0, K): try: dDelta_u_ij[i, j] = math.sqrt( + u_i[i] * Theta_ij[i, i] * u_i[i] - u_i[i] * Theta_ij[i, j] * u_i[j] - u_i[ i] * Theta_ij[i, K + i] * u_i[i] + u_i[i] * Theta_ij[i, K + j] * u_i[j] - u_i[j] * Theta_ij[j, i] * u_i[i] + u_i[j] * Theta_ij[j, j] * u_i[j] + u_i[ j] * Theta_ij[j, K + i] * u_i[i] - u_i[j] * Theta_ij[j, K + j] * u_i[j] - u_i[i] * Theta_ij[K + i, i] * u_i[i] + u_i[i] * Theta_ij[K + i, j] * u_i[ j] + u_i[i] * Theta_ij[K + i, K + i] * u_i[i] - u_i[i] * Theta_ij[K + i, K + j] * u_i[j] + u_i[j] * Theta_ij[K + j, i] * u_i[i] - u_i[j] * Theta_ij[K + j, j] * u_i[ j] - u_i[j] * Theta_ij[K + j, K + i] * u_i[i] + u_i[j] * Theta_ij[K + j, K + j] * u_i[j] ) except: dDelta_u_ij[i, j] = 0.0 try: dDelta_s_ij[i, j] = math.sqrt( + (u_i[i] - 1) * Theta_ij[i, i] * (u_i[i] - 1) + (u_i[i] - 1) * Theta_ij[i, j] * (-u_i[j] + 1) + ( u_i[i] - 1) * Theta_ij[i, K + i] * (-u_i[i]) + (u_i[i] - 1) * Theta_ij[i, K + j] * u_i[j] + (-u_i[j] + 1) * Theta_ij[j, i] * (u_i[i] - 1) + (-u_i[j] + 1) * Theta_ij[j, j] * (-u_i[j] + 1) + (-u_i[j] + 1) * Theta_ij[j, K + i] * (-u_i[i]) + (-u_i[j] + 1) * Theta_ij[j, K + j] * u_i[j] + (-u_i[i]) * Theta_ij[K + i, i] * (u_i[i] - 1) + (-u_i[i]) * Theta_ij[K + i, j] * (-u_i[j] + 1) + (-u_i[i]) * Theta_ij[K + i, K + i] * (-u_i[i]) + (-u_i[i]) * Theta_ij[K + i, K + j] * u_i[j] + u_i[j] * Theta_ij[K + j, i] * (u_i[i] - 1) + u_i[j] * Theta_ij[K + j, j] * (-u_i[j] + 1) + u_i[ j] * Theta_ij[K + j, K + i] * (-u_i[i]) + u_i[j] * Theta_ij[K + j, K + j] * u_i[j] ) except: dDelta_s_ij[i, j] = 0.0 return (Delta_f_ij, dDelta_f_ij, Delta_u_ij, dDelta_u_ij, Delta_s_ij, dDelta_s_ij)
Decompose free energy differences into enthalpy and entropy differences. Compute the decomposition of the free energy difference between states 1 and N into reduced free energy differences, reduced potential (enthalpy) differences, and reduced entropy (S/k) differences. Parameters ---------- uncertainty_method : string , optional Choice of method used to compute asymptotic covariance method, or None to use default See help for computeAsymptoticCovarianceMatrix() for more information on various methods. (default: None) warning_cutoff : float, optional Warn if squared-uncertainty is negative and larger in magnitude than this number (default: 1.0e-10) Returns ------- Delta_f_ij : np.ndarray, float, shape=(K, K) Delta_f_ij[i,j] is the dimensionless free energy difference f_j - f_i dDelta_f_ij : np.ndarray, float, shape=(K, K) uncertainty in Delta_f_ij Delta_u_ij : np.ndarray, float, shape=(K, K) Delta_u_ij[i,j] is the reduced potential energy difference u_j - u_i dDelta_u_ij : np.ndarray, float, shape=(K, K) uncertainty in Delta_f_ij Delta_s_ij : np.ndarray, float, shape=(K, K) Delta_s_ij[i,j] is the reduced entropy difference S/k between states i and j (s_j - s_i) dDelta_s_ij : np.ndarray, float, shape=(K, K) uncertainty in Delta_s_ij Notes ----- This method is EXPERIMENTAL and should be used at your own risk. Examples -------- >>> from pymbar import testsystems >>> (x_n, u_kn, N_k, s_n) = testsystems.HarmonicOscillatorsTestCase().sample(mode='u_kn') >>> mbar = MBAR(u_kn, N_k) >>> [Delta_f_ij, dDelta_f_ij, Delta_u_ij, dDelta_u_ij, Delta_s_ij, dDelta_s_ij] = mbar.computeEntropyAndEnthalpy()
def savefits(cube, fitsname, **kwargs): dropdeg = kwargs.pop(, False) ndim = len(cube.dims) FITSINFO = get_data(, ) hdrdata = yaml.load(FITSINFO, dc.utils.OrderedLoader) if ndim == 2: header = fits.Header(hdrdata[]) data = cube.values.T elif ndim == 3: if dropdeg: header = fits.Header(hdrdata[]) data = cube.values[:, :, 0].T else: header = fits.Header(hdrdata[]) kidfq = cube.kidfq.values freqrange = ~np.isnan(kidfq) orderedfq = np.argsort(kidfq[freqrange]) newcube = cube[:, :, orderedfq] data = newcube.values.T else: raise TypeError(ndim) if cube.coordsys == : header.update({: , : }) elif cube.coordsys == : header.update({: float(cube.xref), : float(cube.yref)}) else: pass header.update({: float(cube.x[0]), : float(cube.x[1] - cube.x[0]), : float(cube.y[0]), : float(cube.y[1] - cube.y[0]), : datetime.now(timezone()).isoformat()}) if (ndim == 3) and (not dropdeg): header.update({: float(newcube.kidfq[0]), : float(newcube.kidfq[1] - newcube.kidfq[0])}) fitsname = str(Path(fitsname).expanduser()) fits.writeto(fitsname, data, header, **kwargs) logger.info(.format(fitsname))
Save a cube to a 3D-cube FITS file. Args: cube (xarray.DataArray): Cube to be saved. fitsname (str): Name of output FITS file. kwargs (optional): Other arguments common with astropy.io.fits.writeto().
def kibana_install(self): with cd(): if not exists(): sudo(.format( bigdata_conf.kibana_download_url )) sudo() sudo()
kibana install :return:
def feed_data(self, data: bytes) -> None: if self._parser is not None: self._parser.feed_data(data)
代理 feed_data
def make_pose(translation, rotation): pose = np.zeros((4, 4)) pose[:3, :3] = rotation pose[:3, 3] = translation pose[3, 3] = 1.0 return pose
Makes a homogenous pose matrix from a translation vector and a rotation matrix. Args: translation: a 3-dim iterable rotation: a 3x3 matrix Returns: pose: a 4x4 homogenous matrix
def _solve(self, A=None, b=None): r if self.settings[] == : self.settings[] = if self.settings[] == : self.settings[] = rtol = self.settings[] min_A = np.abs(A.data).min() min_b = np.abs(b).min() or 1e100 atol = min(min_A, min_b) * rtol if self.settings[] == : if importlib.util.find_spec(): A.indices = A.indices.astype(np.int64) A.indptr = A.indptr.astype(np.int64) iterative = [, , , , , , , , ] solver = getattr(sprs.linalg, self.settings[]) if self.settings[] in iterative: x, exit_code = solver(A=A, b=b, atol=atol, tol=rtol, maxiter=self.settings[]) if exit_code > 0: raise Exception( + + str(exit_code)) else: x = solver(A=A, b=b) return x if self.settings[] == : if importlib.util.find_spec(): from openpnm.utils.petsc import PETScSparseLinearSolver as SLS else: raise Exception() ls = SLS(A=A, b=b) sets = self.settings sets = {k: v for k, v in sets.items() if k.startswith()} sets = {k.split()[1]: v for k, v in sets.items()} ls.settings.update(sets) x = SLS.solve(ls) del(ls) return x if self.settings[] == : if importlib.util.find_spec(): import pyamg else: raise Exception() ml = pyamg.ruge_stuben_solver(A) x = ml.solve(b=b, tol=1e-6) return x
r""" Sends the A and b matrices to the specified solver, and solves for *x* given the boundary conditions, and source terms based on the present value of *x*. This method does NOT iterate to solve for non-linear source terms or march time steps. Parameters ---------- A : sparse matrix The coefficient matrix in sparse format. If not specified, then it uses the ``A`` matrix attached to the object. b : ND-array The RHS matrix in any format. If not specified, then it uses the ``b`` matrix attached to the object. Notes ----- The solver used here is specified in the ``settings`` attribute of the algorithm.
def _get_base_command(self): cd_command = .join([, str(self.WorkingDir), ]) jvm_command = "java" jvm_args = self._commandline_join( [self.Parameters[k] for k in self._jvm_parameters]) cp_args = % (self._get_jar_fp(), self.TrainingClass) command_parts = [cd_command, jvm_command, jvm_args, cp_args] return self._commandline_join(command_parts).strip()
Returns the base command plus command-line options. Handles everything up to and including the classpath. The positional training parameters are added by the _input_handler_decorator method.
def exhandler(function, parser): args = vars(bparser.parse_known_args()[0]) if args["examples"]: function() exit(0) if args["verbose"]: from msg import set_verbosity set_verbosity(args["verbose"]) args.update(vars(parser.parse_known_args()[0])) return args
If -examples was specified in 'args', the specified function is called and the application exits. :arg function: the function that prints the examples. :arg parser: the initialized instance of the parser that has the additional, script-specific parameters.
def check(self, return_code=0): ret = self.call().return_code ok = ret == return_code if not ok: raise EasyProcessError( self, .format(return_code)) return self
Run command with arguments. Wait for command to complete. If the exit code was as expected and there is no exception then return, otherwise raise EasyProcessError. :param return_code: int, expected return code :rtype: self
def weld_str_lower(array): obj_id, weld_obj = create_weld_object(array) weld_template = weld_obj.weld_code = weld_template.format(array=obj_id) return weld_obj
Convert values to lowercase. Parameters ---------- array : numpy.ndarray or WeldObject Input data. Returns ------- WeldObject Representation of this computation.
def groupby(xs, key_fn): result = defaultdict(list) for x in xs: key = key_fn(x) result[key].append(x) return result
Group elements of the list `xs` by keys generated from calling `key_fn`. Returns a dictionary which maps keys to sub-lists of `xs`.
def get_artist_by_mbid(self, mbid): params = {"mbid": mbid} doc = _Request(self, "artist.getInfo", params).execute(True) return Artist(_extract(doc, "name"), self)
Looks up an artist by its MusicBrainz ID
def _handle_upsert(self, parts, unwritten_lobs=()): self.description = None self._received_last_resultset_part = True for part in parts: if part.kind == part_kinds.ROWSAFFECTED: self.rowcount = part.values[0] elif part.kind in (part_kinds.TRANSACTIONFLAGS, part_kinds.STATEMENTCONTEXT, part_kinds.PARAMETERMETADATA): pass elif part.kind == part_kinds.WRITELOBREPLY: for lob_buffer, lob_locator_id in izip(unwritten_lobs, part.locator_ids): lob_buffer.locator_id = lob_locator_id self._perform_lob_write_requests(unwritten_lobs) else: raise InterfaceError("Prepared insert statement response, unexpected part kind %d." % part.kind) self._executed = True
Handle reply messages from INSERT or UPDATE statements
def validate_metadata(self, handler): if self.meta == : new_metadata = self.metadata cur_metadata = handler.read_metadata(self.cname) if (new_metadata is not None and cur_metadata is not None and not array_equivalent(new_metadata, cur_metadata)): raise ValueError("cannot append a categorical with " "different categories to the existing")
validate that kind=category does not change the categories
def placeholder(type_): typetuple = type_ if isinstance(type_, tuple) else (type_,) if any in typetuple: typetuple = any if typetuple not in EMPTY_VALS: EMPTY_VALS[typetuple] = EmptyVal(typetuple) return EMPTY_VALS[typetuple]
Returns the EmptyVal instance for the given type
def join(self, joiner, formatter=lambda s, t: t.format(s), template="{}"): return ww.s(joiner).join(self, formatter, template)
Join values and convert to string Example: >>> from ww import l >>> lst = l('012') >>> lst.join(',') u'0,1,2' >>> lst.join(',', template="{}#") u'0#,1#,2#' >>> string = lst.join(',',\ formatter = lambda x, y: str(int(x) ** 2)) >>> string u'0,1,4'
def filter(self, model=None, context=None): if model is None: return self.filter_properties(model, context=context) self.filter_entities(model, context=context) self.filter_collections(model, context=context)
Perform filtering on the model. Will change model in place. :param model: object or dict :param context: object, dict or None :return: None