code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
def has_property(elem_to_parse, xpath): xroot, attr = get_xpath_tuple(xpath) if not xroot and not attr: return False elif not attr: return bool(get_elements_text(elem_to_parse, xroot)) else: return bool(get_elements_attributes(elem_to_parse, xroot, attr))
Parse xpath for any attribute reference "path/@attr" and check for root and presence of attribute. :return: True if xpath is present in the element along with any attribute referenced, otherwise False
def sci(x, digs): if type(x) != type(): x = repr(x) sign, intpart, fraction, expo = extract(x) if not intpart: while fraction and fraction[0] == : fraction = fraction[1:] expo = expo - 1 if fraction: intpart, fraction = fraction[0], fraction[1:] ...
Format x as [-]d.dddE[+-]ddd with 'digs' digits after the point and exactly one digit before. If digs is <= 0, one digit is kept and the point is suppressed.
def set_iter_mesh(self, mesh, shift=None, is_time_reversal=True, is_mesh_symmetry=True, is_eigenvectors=False, is_gamma_center=False): warnings.warn("Phonopy.set_iter_mes...
Create an IterMesh instancer Attributes ---------- See set_mesh method.
def find_rt_jar(javahome=None): if not javahome: if in os.environ: javahome = os.environ[] elif sys.platform == : javahome = _find_osx_javahome() else: javahome = _get_javahome_from_java(_find_java_binary()) ...
Find the path to the Java standard library jar. The jar is expected to exist at the path 'jre/lib/rt.jar' inside a standard Java installation directory. The directory is found using the following procedure: 1. If the javehome argument is provided, use the value as the directory. 2. If the J...
def show_raslog_output_show_all_raslog_raslog_entries_log_type(self, **kwargs): config = ET.Element("config") show_raslog = ET.Element("show_raslog") config = show_raslog output = ET.SubElement(show_raslog, "output") show_all_raslog = ET.SubElement(output, "show-all-rasl...
Auto Generated Code
def deep_align(objects, join=, copy=True, indexes=None, exclude=frozenset(), raise_on_invalid=True): from .dataarray import DataArray from .dataset import Dataset if indexes is None: indexes = {} def is_alignable(obj): return isinstance(obj, (DataArray, Dataset)) ...
Align objects for merging, recursing into dictionary values. This function is not public API.
def _apply_user_port_channel_config(self, nexus_host, vpc_nbr): cli_cmds = self._get_user_port_channel_config(nexus_host, vpc_nbr) if cli_cmds: self._send_cli_conf_string(nexus_host, cli_cmds) else: vpc_str = str(vpc_nbr) path_snip = snipp.PATH_ALL ...
Adds STP and no lacp suspend config to port channel.
def ajModeles(self): sl = [] lines = [line for line in lignesFichier(self.path("modeles.la"))] max = len(lines) - 1 for i, l in enumerate(lines): if l.startswith(): varname, value = tuple(l.split("=")) self.lemmatiseur._variables[varna...
Lecture des modèles, et enregistrement de leurs désinences
def from_header(self, binary): if binary is None: return span_context_module.SpanContext(from_header=False) try: data = Header._make(struct.unpack(BINARY_FORMAT, binary)) except struct.error: logging.warning( ...
Generate a SpanContext object using the trace context header. The value of enabled parsed from header is int. Need to convert to bool. :type binary: bytes :param binary: Trace context header which was extracted from the request headers. :rtype: :class:`~o...
def mk_class_name(*parts): cap = lambda s: s and (s[0].capitalize() + s[1:]) return "".join(["".join([cap(i) for i in re.split("[\ \-\_\.]", str(p))]) for p in parts])
Create a valid class name from a list of strings.
def send_command_return(self, obj, command, *arguments): return self._perform_command(.format(self.session_url, obj.ref), command, OperReturnType.line_output, *arguments).json()
Send command with single line output. :param obj: requested object. :param command: command to send. :param arguments: list of command arguments. :return: command output.
def _get_clumpp_table(self, kpop, max_var_multiple, quiet): reps, excluded = _concat_reps(self, kpop, max_var_multiple, quiet) if reps: ninds = reps[0].inds nreps = len(reps) else: ninds = nreps = 0 if not reps: return "no result files found" clumphandle =...
private function to clumpp results
def to_element(self, include_namespaces=False): elt_attrib = {} if include_namespaces: elt_attrib.update({ : "urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/", : "http://purl.org/dc/elements/1.1/", : "urn:schemas-upnp-org:metadata-1-0/upn...
Return an ElementTree Element representing this instance. Args: include_namespaces (bool, optional): If True, include xml namespace attributes on the root element Return: ~xml.etree.ElementTree.Element: an Element.
def log_variable_sizes(var_list=None, tag=None, verbose=False): if var_list is None: var_list = tf.trainable_variables() if tag is None: tag = "Trainable Variables" if not var_list: return name_to_var = {v.name: v for v in var_list} total_size = 0 for v_name in sorted(list(name_to_var)): ...
Log the sizes and shapes of variables, and the total size. Args: var_list: a list of variables; defaults to trainable_variables tag: a string; defaults to "Trainable Variables" verbose: bool, if True, log every weight; otherwise, log total size only.
def logpdf_link(self, inv_link_f, y, Y_metadata=None): e = y - inv_link_f objective = (+ gammaln((self.v + 1) * 0.5) - gammaln(self.v * 0.5) - 0.5*np.log(self.sigma2 * self.v * np.pi) - 0.5*(self.v +...
Log Likelihood Function given link(f) .. math:: \\ln p(y_{i}|\lambda(f_{i})) = \\ln \\Gamma\\left(\\frac{v+1}{2}\\right) - \\ln \\Gamma\\left(\\frac{v}{2}\\right) - \\ln \\sqrt{v \\pi\\sigma^{2}} - \\frac{v+1}{2}\\ln \\left(1 + \\frac{1}{v}\\left(\\frac{(y_{i} - \lambda(f_{i}))^{2}}{\\sigma^{2}}\\r...
def template_delete(call=None, kwargs=None): if call != : raise SaltCloudSystemExit( ) if kwargs is None: kwargs = {} name = kwargs.get(, None) template_id = kwargs.get(, None) if template_id: if name: log.warning( temp...
Deletes the given template from OpenNebula. Either a name or a template_id must be supplied. .. versionadded:: 2016.3.0 name The name of the template to delete. Can be used instead of ``template_id``. template_id The ID of the template to delete. Can be used instead of ``name``. ...
def instance_attr_ancestors(self, name, context=None): for astroid in self.ancestors(context=context): if name in astroid.instance_attrs: yield astroid
Iterate over the parents that define the given name as an attribute. :param name: The name to find definitions for. :type name: str :returns: The parents that define the given name as an instance attribute. :rtype: iterable(NodeNG)
def OnUpdate(self, event): undo_toolid = self.label2id["Undo"] redo_toolid = self.label2id["Redo"] self.EnableTool(undo_toolid, undo.stack().canundo()) self.EnableTool(redo_toolid, undo.stack().canredo()) undotext = undo.stack().undotext() u...
Updates the toolbar states
def _item_to_document_ref(iterator, item): document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1] return iterator.collection.document(document_id)
Convert Document resource to document ref. Args: iterator (google.api_core.page_iterator.GRPCIterator): iterator response item (dict): document resource
def deserialize_duration(attr): if isinstance(attr, ET.Element): attr = attr.text try: duration = isodate.parse_duration(attr) except(ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise_with...
Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. :rtype: TimeDelta :raises: DeserializationError if string format invalid.
def create_model_schema(target_model): from nautilus.database import db schema = graphene.Schema(auto_camelcase=False) primary_key = target_model.primary_key() primary_key_type = convert_peewee_field(primary_key) class ModelObjectType(PeeweeObjectType): class Meta: ...
This function creates a graphql schema that provides a single model
def is_valid_cidr(string_network): if string_network.count() == 1: try: mask = int(string_network.split()[1]) except ValueError: return False if mask < 1 or mask > 32: return False try: socket.inet_aton(string_network.split()[0])...
Very simple check of the cidr format in no_proxy variable. :rtype: bool
def vlan_dot1q_tag_native(self, **kwargs): config = ET.Element("config") vlan = ET.SubElement(config, "vlan", xmlns="urn:brocade.com:mgmt:brocade-vlan") dot1q = ET.SubElement(vlan, "dot1q") tag = ET.SubElement(dot1q, "tag") native = ET.SubElement(tag, "native") ...
Auto Generated Code
def append(self, data_frame): if len(data_frame) == 0: return data_frame_index = data_frame.index combined_index = self._index + data_frame_index if len(set(combined_index)) != len(combined_index): raise ValueError() for c, column in enumerate(...
Append another DataFrame to this DataFrame. If the new data_frame has columns that are not in the current DataFrame then new columns will be created. All of the indexes in the data_frame must be different from the current indexes or will raise an error. :param data_frame: DataFrame to append ...
def geoms(self, scale=None, bounds=None, as_element=True): feature = self.data if scale is not None: feature = feature.with_scale(scale) if bounds: extent = (bounds[0], bounds[2], bounds[1], bounds[3]) else: extent = None geoms = [g f...
Returns the geometries held by the Feature. Parameters ---------- scale: str Scale of the geometry to return expressed as string. Available scales depends on the Feature type. NaturalEarthFeature: '10m', '50m', '110m' GSHHSFeature: ...
def setter(self, func): if not callable(func): raise TypeError() if hasattr(func, ) and func.__code__.co_argcount != 2: raise TypeError() if func.__name__ != self.name: raise TypeError() self._set_func = func return self
Register a set function for the DynamicProperty This function must take two arguments, self and the new value. Input value to the function is validated with prop validation prior to execution.
def send_cmd(cmd, args, ret): from dvc.daemon import daemon if not Analytics._is_enabled(cmd): return analytics = Analytics() analytics.collect_cmd(args, ret) daemon(["analytics", analytics.dump()])
Collect and send analytics for CLI command. Args: args (list): parsed args for the CLI command. ret (int): return value of the CLI command.
async def stop(self): await self.node.stop(self.channel.guild.id) self.queue = [] self.current = None self.position = 0 self._paused = False
Stops playback from lavalink. .. important:: This method will clear the queue.
def zipWithUniqueId(self): n = self.getNumPartitions() def func(k, it): for i, v in enumerate(it): yield v, i * n + k return self.mapPartitionsWithIndex(func)
Zips this RDD with generated unique Long ids. Items in the kth partition will get ids k, n+k, 2*n+k, ..., where n is the number of partitions. So there may exist gaps, but this method won't trigger a spark job, which is different from L{zipWithIndex} >>> sc.parallelize(["a", "b...
def name_targets(func): def wrap(*a, **kw): ret = func(*a, **kw) return dict(zip(ret[:-1], ret[-1])) return wrap
Wrap a function such that returning ``'a', 'b', 'c', [1, 2, 3]`` transforms the value into ``dict(a=1, b=2, c=3)``. This is useful in the case where the last parameter is an SCons command.
def bind(self, ticket, device_id, user_id): return self._post( , data={ : ticket, : device_id, : user_id } )
绑定设备 详情请参考 https://iot.weixin.qq.com/wiki/new/index.html?page=3-4-7 :param ticket: 绑定操作合法性的凭证(由微信后台生成,第三方H5通过客户端jsapi获得) :param device_id: 设备id :param user_id: 用户对应的openid :return: 返回的 JSON 数据包
def _serialize_parameters(parameters): for key, value in parameters.items(): if isinstance(value, bool): parameters[key] = "true" if value else "false" elif isinstance(value, dict): parameters[key] = "|".join( ("%s:%s" % (k, v...
Serialize some parameters to match python native types with formats specified in google api docs like: * True/False -> "true"/"false", * {"a": 1, "b":2} -> "a:1|b:2" :type parameters: dict oif query parameters
def parse_play(boxscore_id, details, is_hm): return p
Parse play details from a play-by-play string describing a play. Assuming valid input, this function returns structured data in a dictionary describing the play. If the play detail string was invalid, this function returns None. :param boxscore_id: the boxscore ID of the play :param details: detai...
def hiddenColumns( self ): output = [] columns = self.columns() for c, column in enumerate(columns): if ( not self.isColumnHidden(c) ): continue output.append(column) return output
Returns a list of the hidden columns for this tree. :return [<str>, ..]
def hashed(field_name, percent, fields=None, count=0): if field_name is None: raise Exception() def _hashed_sampling(sql): projection = Sampling._create_projection(fields) sql = % \ (projection, sql, field_name, percent) if count != 0: sql = % (sql, count) ...
Provides a sampling strategy based on hashing and selecting a percentage of data. Args: field_name: the name of the field to hash. percent: the percentage of the resulting hashes to select. fields: an optional list of field names to retrieve. count: optional maximum count of rows to pick. ...
def is_fw_complete(self): LOG.info("In fw_complete needed %(fw_created)s " "%(active_policy_id)s %(is_fw_drvr_created)s " "%(pol_present)s %(fw_type)s", {: self.fw_created, : self.active_pol_id, : self.is_fw_drvr_cre...
This API returns the completion status of FW. This returns True if a FW is created with a active policy that has more than one rule associated with it and if a driver init is done successfully.
def fetchmany(self, size=None): if self._state == self._STATE_NONE: raise Exception("No query yet") if size is None: size = 1 if not self._data: return [] else: if len(self._data) > size: result, self._data = self...
Fetch the next set of rows of a query result, returning a sequence of sequences (e.g. a list of tuples). An empty sequence is returned when no more rows are available. The number of rows to fetch per call is specified by the parameter. If it is not given, the cursor's arraysize determines the n...
def get_pixel_distance(self, x1, y1, x2, y2): dx = abs(x2 - x1) dy = abs(y2 - y1) dist = np.sqrt(dx * dx + dy * dy) dist = np.round(dist) return dist
Calculate distance between the given pixel positions. Parameters ---------- x1, y1, x2, y2 : number Pixel coordinates. Returns ------- dist : float Rounded distance.
def to_task(self): from google.appengine.api.taskqueue import Task from google.appengine.api.taskqueue import TaskRetryOptions self._increment_recursion_level() self.check_recursion_depth() url = "%s/%s" % (ASYNC_ENDPOINT, self.function_path) kwargs = { ...
Return a task object representing this async job.
def inflate_plugins(self, plugins_definition, inflate_method): if isinstance(plugins_definition, list): return self.inflate_plugin_list(plugins_definition, inflate_method) elif isinstance(plugins_definition, dict): return self.inflate_plugin_dict(plugins_definition, infl...
Inflate multiple plugins based on a list/dict definition. Args: plugins_definition (list/dict): the plugins definitions. inflate_method (method): the method to indlate each plugin. Returns: list: a list of plugin instances. Raises: ValueError: w...
def mmi_ramp_roman(raster_layer): items = [] sorted_mmi_scale = sorted( earthquake_mmi_scale[], key=itemgetter()) for class_max in sorted_mmi_scale: colour = class_max[] label = % class_max[] ramp_item = QgsColorRampShader.ColorRampItem( class_max[], colour...
Generate an mmi ramp using range of 1-10 on roman. A standarised range is used so that two shakemaps of different intensities can be properly compared visually with colours stretched accross the same range. The colours used are the 'standard' colours commonly shown for the mercalli scale e.g. on w...
def chown(self, tarinfo, targetpath): if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: try: g = grp.getgrnam(tarinfo.gname)[2] except KeyError: g = tarinfo.gid try: u = pwd.getpwnam(tarinfo.uname)[2...
Set owner of targetpath according to tarinfo.
def run_gradle(path=kernel_path, cmd=, skip_tests=False): class Gradle(BaseCommand): description = def skip_test_option(self, skip): if skip: return else: return def run(self): run([( if sys.platform == else ) + ...
Return a Command for running gradle scripts. Parameters ---------- path: str, optional The base path of the node package. Defaults to the repo root. cmd: str, optional The command to run with gradlew.
def email_url_config(cls, url, backend=None): config = {} url = urlparse(url) if not isinstance(url, cls.URL_CLASS) else url path = url.path[1:] path = unquote_plus(path.split(, 2)[0]) config.update({ : path, : _cast_urlstr(u...
Parses an email URL.
def get_for_nearest_ancestor(self, cls, attribute_name): for family_cls in family(cls): if self.has(family_cls.__module__, family_cls.__name__, attribute_name): return self.get(family_cls.__module__, family_cls.__name__, attribute_name) ini_filename = cls.__module__...
Find a prior with the attribute analysis_path from the config for this class or one of its ancestors Parameters ---------- cls: class The class of interest attribute_name: String The analysis_path of the attribute Returns ------- prior_arr...
def __regkey_value(self, path, name=, start_key=None): r if sys.version < : import _winreg as reg else: import winreg as reg def _fn(path, name=, start_key=None): if isinstance(path, str): path = path.split() if s...
r'''Return the data of value mecabrc at MeCab HKEY node. On Windows, the path to the mecabrc as set in the Windows Registry is used to deduce the path to libmecab.dll. Returns: The full path to the mecabrc on Windows. Raises: WindowsError: A problem wa...
def get_questions(self, answered=None, honor_sequential=True, update=True): def update_question_list(): latest_question_response = question_map[][0] question_answered = False if not in latest_question_response: question_ans...
gets all available questions for this section if answered == False: only return next unanswered question if answered == True: only return next answered question if answered in None: return next question whether answered or not if honor_sequential == True: only return questions if sectio...
def add_local(self, field_name, field): self._dlog("adding local ".format(field_name)) field._pfp__name = field_name self._curr_scope["vars"][field_name] = field
Add a local variable in the current scope :field_name: The field's name :field: The field :returns: None
def apply(self, df): if hasattr(self.definition, ): r = self.definition(df) elif self.definition in df.columns: r = df[self.definition] elif not isinstance(self.definition, string_types): r = pd.Series(self.definition, index=df.index) else: ...
Takes a pd.DataFrame and returns the newly defined column, i.e. a pd.Series that has the same index as `df`.
def check_config_xml(self, contents): self.log(u"Checking contents XML config file") self.result = ValidatorResult() if self._are_safety_checks_disabled(u"check_config_xml"): return self.result contents = gf.safe_bytes(contents) self.log(u"Checking that conte...
Check whether the given XML config file contents is well-formed and it has all the required parameters. :param string contents: the XML config file contents or XML config string :param bool is_config_string: if ``True``, contents is a config string :rtype: :class:`~aeneas.validator.Vali...
def create_project(self, key, name=None, assignee=None, type="Software", template_name=None): if assignee is None: assignee = self.current_user() if name is None: name = key possible_templates = [, , , ] if template_name is not None: possibl...
Create a project with the specified parameters. :param key: Mandatory. Must match JIRA project key requirements, usually only 2-10 uppercase characters. :type: str :param name: If not specified it will use the key value. :type name: Optional[str] :param assignee: If not specifie...
def get_default_config(self): config = super(OneWireCollector, self).get_default_config() config.update({ : , : , }) return config
Returns the default collector settings
def fantope(x, rho, dim, tol=1e-4): U, V = np.linalg.eigh(x) minval, maxval = np.maximum(U.min(), 0), np.maximum(U.max(), 20 * dim) while True: theta = 0.5 * (maxval + minval) thr_eigvals = np.minimum(np.maximum((U - theta), 0), 1) constraint = np.sum(thr_eigvals) i...
Projection onto the fantope [1]_ .. [1] Vu, Vincent Q., et al. "Fantope projection and selection: A near-optimal convex relaxation of sparse PCA." Advances in neural information processing systems. 2013.
def convertDirMP3ToWav(dirName, Fs, nC, useMp3TagsAsName = False): types = (dirName+os.sep+,) filesToProcess = [] for files in types: filesToProcess.extend(glob.glob(files)) for f in filesToProcess: audioFile = eyed3.load(f) if useMp3TagsAsN...
This function converts the MP3 files stored in a folder to WAV. If required, the output names of the WAV files are based on MP3 tags, otherwise the same names are used. ARGUMENTS: - dirName: the path of the folder where the MP3s are stored - Fs: the sampling rate of the generated WAV files ...
def DeleteGroup(r, group, dry_run=False): query = { "dry-run": dry_run, } return r.request("delete", "/2/groups/%s" % group, query=query)
Deletes a node group. @type group: str @param group: the node group to delete @type dry_run: bool @param dry_run: whether to peform a dry run @rtype: int @return: job id
def prepare_renderable(request, test_case_result, is_admin): test_case = test_case_result.test_case file_directory = request.registry.settings[] sha1 = test_case_result.diff.sha1 if test_case_result.diff else None kwargs = {: test_case.id, : test_case.testable.name, : test_case.name, ...
Return a completed Renderable.
def git_url_ssh_to_https(url): path = url.split(, 1)[1][1:].strip() new = % path print( % new) return new.format(GITHUB_TOKEN=os.getenv())
Convert a git url url will look like https://github.com/ARMmbed/mbed-cloud-sdk-python.git or git@github.com:ARMmbed/mbed-cloud-sdk-python.git we want: https://${GITHUB_TOKEN}@github.com/ARMmbed/mbed-cloud-sdk-python-private.git
def get_composite_reflectivity(self, tower_id, background=, include_legend=True, include_counties=True, include_warnings=True, include_highways=True, include_cities=True, include_rivers=True, include_topography=True): return self._bu...
Get the composite reflectivity for a noaa radar site. :param tower_id: The noaa tower id. Ex Huntsville, Al -> 'HTX'. :type tower_id: str :param background: The hex background color. :type background: str :param include_legend: True - include legend. :type include_legend...
def get_key_from_envs(envs, key): if hasattr(envs, ): envs = [envs] for env in envs: if key in env: return env[key] return NO_VALUE
Return the value of a key from the given dict respecting namespaces. Data can also be a list of data dicts.
def any2utf8(text, errors=, encoding=): if isinstance(text, unicode): return text.encode() return unicode(text, encoding, errors=errors).encode()
Convert a string (unicode or bytestring in `encoding`), to bytestring in utf8.
def list_stateful_set_for_all_namespaces(self, **kwargs): kwargs[] = True if kwargs.get(): return self.list_stateful_set_for_all_namespaces_with_http_info(**kwargs) else: (data) = self.list_stateful_set_for_all_namespaces_with_http_info(**kwargs) retu...
list or watch objects of kind StatefulSet This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_stateful_set_for_all_namespaces(async_req=True) >>> result = thread.get() :param async_req bo...
async def evaluate_trained_model(state): return await evaluate_model( state.train_model_path, state.best_model_path, os.path.join(fsdb.eval_dir(), state.train_model_name), state.seed)
Evaluate the most recently trained model against the current best model. Args: state: the RL loop State instance.
def excerpts(n_samples, n_excerpts=None, excerpt_size=None): assert n_excerpts >= 2 step = _excerpt_step(n_samples, n_excerpts=n_excerpts, excerpt_size=excerpt_size) for i in range(n_excerpts): start = i * step if start >= n_samples: ...
Yield (start, end) where start is included and end is excluded.
def get_fields(model_class): return [ attr for attr, value in model_class.__dict__.items() if issubclass(type(value), (mongo.base.BaseField, mongo.EmbeddedDocumentField)) ]
Pass in a mongo model class and extract all the attributes which are mongoengine fields Returns: list of strings of field attributes
def serve(request, path, document_root=None, show_indexes=False, default=): path = posixpath.normpath(unquote(path)) path = path.lstrip() newpath = for part in path.split(): if not part: continue drive, part = os.path.splitdrive(part) head, pa...
Serve static files below a given point in the directory structure. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root' : '/path/to/my/files/'}) in your URLconf. You must provide the ``document_root`` param. You may also set ``show_inde...
def isValidFeatureWriter(klass): if not isclass(klass): logger.error("%r is not a class", klass) return False if not hasattr(klass, "tableTag"): logger.error("%r does not have required attribute", klass) return False if not hasattr(klass, "write"): logger.error(...
Return True if 'klass' is a valid feature writer class. A valid feature writer class is a class (of type 'type'), that has two required attributes: 1) 'tableTag' (str), which can be "GSUB", "GPOS", or other similar tags. 2) 'write' (bound method), with the signature matching the same method from ...
def disp(self, idx=100): filenameprefix = self.name_prefix def printdatarow(dat, iteration): i = np.where(dat.f[:, 0] == iteration)[0][0] j = np.where(dat.std[:, 0] == iteration)[0][0] print( % (int(dat.f[i, 0])) + % (int(dat.f[i, 1])) + % ...
displays selected data from (files written by) the class `CMADataLogger`. Arguments --------- `idx` indices corresponding to rows in the data file; if idx is a scalar (int), the first two, then every idx-th, and the last three rows are displayed. ...
def get_dates_range(self, scale=, start=None, end=None, date_max=): automaximumdailyweeklymonthlyquarterlyyearlyauto if scale not in [, , , , , , ]: raise ValueError( % scale) start = Timestamp(start or self._start.min() or date_max) ...
Returns a list of dates sampled according to the specified parameters. :param scale: {'auto', 'maximum', 'daily', 'weekly', 'monthly', 'quarterly', 'yearly'} Scale specifies the sampling intervals. 'auto' will heuristically choose a scale for quick processing :param ...
def acts_as_state_machine(cls): assert not hasattr(cls, ), .format(cls) assert not hasattr(cls, ), .format(cls) def get_states(obj): return StateInfo.get_states(obj.__class__) def is_transition_failure_handler(obj): return all([ any([ inspect.ismethod(o...
a decorator which sets two properties on a class: * the 'current_state' property: a read-only property, returning the state machine's current state, as 'State' object * the 'states' property: a tuple of all valid state machine states, as 'State' objects class objects may use current_state and states...
def power_cycle_vm(virtual_machine, action=): s name. virtual_machine vim.VirtualMachine object to power on/off virtual machine action Operation option to power on/off the machine onpower onNot enough permissions. Required privilege: {}offpower offNot enough permissions. Required privi...
Powers on/off a virtual machine specified by it's name. virtual_machine vim.VirtualMachine object to power on/off virtual machine action Operation option to power on/off the machine
def __fix_field_date(self, item, attribute): field_date = str_to_datetime(item[attribute]) try: _ = int(field_date.strftime("%z")[0:3]) except ValueError: logger.warning("%s in commit %s has a wrong format", attribute, item[]) item[attribute] = fiel...
Fix possible errors in the field date
def date_from_number(self, value): if not isinstance(value, numbers.Real): return None delta = datetime.timedelta(days=value) return self._null_date + delta
Converts a float value to corresponding datetime instance.
def computePhase2(self, doLearn=False): self.confidence[][c,i] = maxConfidence
This is the phase 2 of learning, inference and multistep prediction. During this phase, all the cell with lateral support have their predictedState turned on and the firing segments are queued up for updates. Parameters: -------------------------------------------- doLearn: Boolean flag to que...
def row_factory(cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d
Returns a sqlite row factory that returns a dictionary
def _interpolate_missing_data(data, mask, method=): from scipy import interpolate data_interp = np.array(data, copy=True) if len(data_interp.shape) != 2: raise ValueError() if mask.shape != data.shape: raise ValueError() y, x = np.indices(data_interp.shape) xy = np.dsta...
Interpolate missing data as identified by the ``mask`` keyword. Parameters ---------- data : 2D `~numpy.ndarray` An array containing the 2D image. mask : 2D bool `~numpy.ndarray` A 2D booleen mask array with the same shape as the input ``data``, where a `True` value indicates t...
def copy_framebuffer(self, dst, src) -> None: s content) - downsample a framebuffer directly to a texture. Args: dst (Framebuffer or Texture): Destination framebuffer or texture. src (Framebuffer): Source framebuffer. ' self.mglo.copy...
Copy framebuffer content. Use this method to: - blit framebuffers. - copy framebuffer content into a texture. - downsample framebuffers. (it will allow to read the framebuffer's content) - downsample a framebuffer directly to a texture. ...
def swipe_bottom(self, steps=10, *args, **selectors): self.device(**selectors).swipe.down(steps=steps)
Swipe the UI object with *selectors* from center to bottom See `Swipe Left` for more details.
def normalize(arg=None): res = t_arg = type(arg) if t_arg in (list, tuple): for i in arg: res += normalize(i) elif t_arg is dict: keys = arg.keys() keys.sort() for key in keys: res += % (normalize(key), normalize(arg[key])) elif t_arg i...
Normalizes an argument for signing purpose. This is used for normalizing the arguments of RPC method calls. :param arg: The argument to normalize :return: A string representating the normalized argument. .. doctest:: >>> from cloud.rpc import normalize >>> normalize(['foo', 42, 'bar']) ...
def set_mtime(self, name, mtime, size): self.check_write(name) os.utime(os.path.join(self.cur_dir, name), (-1, mtime))
Set modification time on file.
def to_json(self): json_dict = self.to_json_basic() json_dict[] = self.closed json_dict[] = self.opened json_dict[] = self.closed_long return json.dumps(json_dict)
:return: str
def _reload(self, module=None): if self.module is None: raise RuntimeError elif module is None: import importlib module = ModuleSource(importlib.reload(module)) elif module.name != self.module: raise RuntimeError if self.name in ...
Reload the source function from the source module. **Internal use only** Update the source function of the formula. This method is used to updated the underlying formula when the source code of the module in which the source function is read from is modified. If the for...
def lastOfferedMonth(self): lastOfferedSeries = self.event_set.order_by().first() return (lastOfferedSeries.year,lastOfferedSeries.month)
Sometimes a Series is associated with a month other than the one in which the first class begins, so this returns a (year,month) tuple that can be used in admin instead.
def clean_username(self, username): username_case = settings.CAS_FORCE_CHANGE_USERNAME_CASE if username_case == : username = username.lower() elif username_case == : username = username.upper() elif username_case is not None: raise ImproperlyC...
Performs any cleaning on the "username" prior to using it to get or create the user object. Returns the cleaned username. By default, changes the username case according to `settings.CAS_FORCE_CHANGE_USERNAME_CASE`.
def _retrieve(self, namespace, stream, start_id, end_time, order, limit, configuration): stream = self.get_stream(namespace, stream, configuration) events = stream.iterator(start_id, uuid_from_kronos_time(end_time, ...
Retrieve events for `stream` between `start_id` and `end_time`. `stream` : The stream to return events for. `start_id` : Return events with id > `start_id`. `end_time` : Return events ending <= `end_time`. `order` : Whether to return the results in ResultOrder.ASCENDING or ResultOrder.DESC...
def build_struct_type(s_sdt): s_dt = nav_one(s_sdt).S_DT[17]() struct = ET.Element(, name=s_dt.name) first_filter = lambda selected: not nav_one(selected).S_MBR[46, ]() s_mbr = nav_any(s_sdt).S_MBR[44](first_filter) while s_mbr: s_dt = nav_one(s_mbr).S_DT[45]() type_na...
Build an xsd complexType out of a S_SDT.
def add_node(self, kind, image_id, image_user, flavor, security_group, image_userdata=, name=None, **extra): if not self._NODE_KIND_RE.match(kind): raise ValueError( "Invalid name `{kind}`. The `kind` argument may only contain" " alphanumeric...
Adds a new node to the cluster. This factory method provides an easy way to add a new node to the cluster by specifying all relevant parameters. The node does not get started nor setup automatically, this has to be done manually afterwards. :param str kind: kind of node to start. this r...
def refresh(self, accept=MEDIA_TYPE_TAXII_V20): self.refresh_information(accept) self.refresh_collections(accept)
Update the API Root's information and list of Collections
def _summarize_in_roi(self, label_mask, num_clusters_per_roi=1, metric=): this_label = self.carpet[label_mask.flatten(), :] if num_clusters_per_roi == 1: out_matrix = self._summary_func(this_label, axis=0) else: out_matrix = self._make_clusters(this_label, num_c...
returns a single row summarizing (typically via mean) all rows in an ROI.
def _mem(self): value = int(psutil.virtual_memory().percent) set_metric("memory", value, category=self.category) gauge("memory", value)
Record Memory usage.
def _parse_processor_embedded_health(self, data): processor = self.get_value_as_list((data[] []), ) if processor is None: msg = "Unable to get cpu data. Error: Data missing" raise exception.IloError(msg) cpus = 0 ...
Parse the get_host_health_data() for essential properties :param data: the output returned by get_host_health_data() :returns: processor details like cpu arch and number of cpus.
def noise_set_type(n: tcod.noise.Noise, typ: int) -> None: n.algorithm = typ
Set a Noise objects default noise algorithm. Args: typ (int): Any NOISE_* constant.
def update_project(config, task_presenter, results, long_description, tutorial, watch): if watch: res = _update_project_watch(config, task_presenter, results, long_description, tutorial) else: res = _update_project(config, task_present...
Update project templates and information.
def send_alert_to_configured_integration(integration_alert): try: alert = integration_alert.alert configured_integration = integration_alert.configured_integration integration = configured_integration.integration integration_actions_instance = configured_integration.integration....
Send IntegrationAlert to configured integration.
def _fix_key(key): if isinstance(key, unicode): return key if isinstance(key, str): raise TypeError(key)
Normalize keys to Unicode strings.
def select_with_correspondence( self, selector, result_selector=KeyedElement): if self.closed(): raise ValueError("Attempt to call select_with_correspondence() on a " "closed Queryable.") if not is_callable(selector):...
Apply a callable to each element in an input sequence, generating a new sequence of 2-tuples where the first element is the input value and the second is the transformed input value. The generated sequence is lazily evaluated. Note: This method uses deferred execution. Args: ...
def delete(self, run_id): self.generic_dao.delete_record( self.metrics_collection_name, {"run_id": self._parse_run_id(run_id)})
Delete all metrics belonging to the given run. :param run_id: ID of the Run that the metric belongs to.
def acquire(self, signal=True): if not self.needs_lock: return with self.synclock: while not self.lock.acquire(False): self.synclock.wait() if signal: self.acquired_event(self) self.synclock.notify_all()
Locks the account. Method has no effect if the constructor argument `needs_lock` wsa set to False. :type signal: bool :param signal: Whether to emit the acquired_event signal.
def _combine_indexers(old_key, shape, new_key): if not isinstance(old_key, VectorizedIndexer): old_key = _outer_to_vectorized_indexer(old_key, shape) if len(old_key.tuple) == 0: return new_key new_shape = np.broadcast(*old_key.tuple).shape if isinstance(new_key, VectorizedIndexer):...
Combine two indexers. Parameters ---------- old_key: ExplicitIndexer The first indexer for the original array shape: tuple of ints Shape of the original array to be indexed by old_key new_key: The second indexer for indexing original[old_key]
def stop(self, timeout=5): self.inner().stop(timeout=timeout) self.inner().reload()
Stop the container. The container must have been created. :param timeout: Timeout in seconds to wait for the container to stop before sending a ``SIGKILL``. Default: 5 (half the Docker default)
def doigrf(lon, lat, alt, date, **kwargs): from . import coefficients as cf gh, sv = [], [] colat = 90. - lat if lon < 0: lon = lon + 360. itype = 1 models, igrf12coeffs = cf.get_igrf12() if in list(kwargs.keys()): if kwargs[] == : psvmodels, psvcoeffs = c...
Calculates the interpolated (<2015) or extrapolated (>2015) main field and secular variation coefficients and passes them to the Malin and Barraclough routine (function pmag.magsyn) to calculate the field from the coefficients. Parameters: ----------- lon : east longitude in degrees (0 to 360 or -...