code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
def save_model(self, request, obj, form, change): super(TweetableAdminMixin, self).save_model(request, obj, form, change) if Api and request.POST.get("send_tweet", False): auth_settings = get_auth_settings() obj.set_short_url() message = truncatechars(obj, 14...
Sends a tweet with the title/short_url if applicable.
def add_context_action(self, action): self.main_tab_widget.context_actions.append(action) for child_splitter in self.child_splitters: child_splitter.add_context_action(action)
Adds a custom context menu action :param action: action to add.
def capture_update_from_model(cls, table_name, record_id, *, update_fields=()): include_cols = () if update_fields: model_cls = get_connected_model_for_table_name(table_name) include_cols = cls._fieldnames_to_colnames(model_cls, update_fields) raw_query = sql.SQL...
Create a fresh update record from the current model state in the database. For read-write connected models, this will lead to the attempted update of the values of a corresponding object in Salesforce. Args: table_name (str): The name of the table backing the connected model (witho...
def defvalkey(js, key, default=None, take_none=True): if js is None: return default if key not in js: return default if js[key] is None and not take_none: return default return js[key]
Returns js[key] if set, otherwise default. Note js[key] can be None. :param js: :param key: :param default: :param take_none: :return:
def xsl_elements(self): def append_xsl_elements(xsl_elements, r, xsl): if r is not None: r.xpath(, namespaces=self.namespaces)[0].text = xsl xe = XslElement(r, logger=self.logger) xsl_elements.append(xe) return None, if...
Find all "XSL" styled runs, normalize related paragraph and returns list of XslElements
def set_updated(self): output = [] for method in self.methods.values(): data = method["last_output"] if isinstance(data, list): if self.testing and data: data[0]["cached_until"] = method.get("cached_until") out...
Mark the module as updated. We check if the actual content has changed and if so we trigger an update in py3status.
def projScatter(lon, lat, **kwargs): hp.projscatter(lon, lat, lonlat=True, **kwargs)
Create a scatter plot on HEALPix projected axes. Inputs: lon (deg), lat (deg)
async def set_custom_eq(self, target: str, value: str) -> None: params = {"settings": [{"target": target, "value": value}]} return await self.services["audio"]["setCustomEqualizerSettings"](params)
Set custom EQ settings.
def create_hit(MaxAssignments=None, AutoApprovalDelayInSeconds=None, LifetimeInSeconds=None, AssignmentDurationInSeconds=None, Reward=None, Title=None, Keywords=None, Description=None, Question=None, RequesterAnnotation=None, QualificationRequirements=None, UniqueRequestToken=None, AssignmentReviewPolicy=None, HITRevie...
The CreateHIT operation creates a new Human Intelligence Task (HIT). The new HIT is made available for Workers to find and accept on the Amazon Mechanical Turk website. This operation allows you to specify a new HIT by passing in values for the properties of the HIT, such as its title, reward amount and number of a...
def register_service(cls, primary_key_type): view_func = cls.as_view(cls.__name__.lower()) methods = set(cls.__model__.__methods__) if in methods: current_app.add_url_rule( cls.__model__.__url__ + , defaults={: None}, view_func=view_func, methods=[]) ...
Register an API service endpoint. :param cls: The class to register :param str primary_key_type: The type (as a string) of the primary_key field
def cat_extract(tar, member, targetpath): assert member.isreg() targetpath = targetpath.rstrip("/") targetpath = targetpath.replace("/", os.sep) upperdirs = os.path.dirname(targetpath) if upperdirs and not os.path.exists(upperdirs): try: ...
Extract a regular file member using cat for async-like I/O Mostly adapted from tarfile.py.
def _merge_constraints(constraints, overrides): for o in overrides: i = 0 while i < len(constraints): c = constraints[i] if _same(o, c): constraints[i].update(o) break i = i + 1
Merge the constraints avoiding duplicates Change constraints in place.
def densenet121(num_classes=1000, pretrained=): r model = models.densenet121(pretrained=False) if pretrained is not None: settings = pretrained_settings[][pretrained] model = load_pretrained(model, num_classes, settings) model = modify_densenets(model) return model
r"""Densenet-121 model from `"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
def get_preferred_partition(self, broker, sibling_distance): eligible_partitions = self.partitions - broker.partitions if eligible_partitions: pref_partition = min( eligible_partitions, key=lambda source_partition: ...
The preferred partition belongs to the topic with the minimum (also negative) distance between destination and source. :param broker: Destination broker :param sibling_distance: dict {topic: distance} negative distance should mean that destination broker has got less partition of a...
def audit(**kwargs): def wrap(fn): @functools.wraps(fn) def advice(parent_object, *args, **kw): request = parent_object.request wijziging = request.audit_manager.create_revision() result = fn(parent_object, *args, **kw) if hasattr(request, ) a...
use this decorator to audit an operation
def reset_all_to_coefficients(self): self.reset_to_coefficients() [ee.reset_to_coefficients() for ee in self.get_extensions(data=True)] self.meta._add_modify("Reset full system to coefficients") return self
Resets the IOSystem and all extensions to coefficients. This method calls reset_to_coefficients for the IOSystem and for all Extensions in the system Note ----- The system can not be reconstructed after this steps because all absolute data is removed. Save the Y data i...
def angle(self, center1_x, center1_y, center2_x, center2_y): phi_G = np.arctan2(center2_y - center1_y, center2_x - center1_x) return phi_G
compute the rotation angle of the dipole :return:
def stream_via(self, reactor, host, port, socks_endpoint, use_tls=False): from .endpoints import TorClientEndpoint ep = TorClientEndpoint( host, port, socks_endpoint, tls=use_tls, reactor=reactor, ) return...
This returns an `IStreamClientEndpoint`_ that will connect to the given ``host``, ``port`` via Tor -- and via this parciular circuit. We match the streams up using their source-ports, so even if there are many streams in-flight to the same destination they will align correctly. ...
def _get_logger_for_instance(self, instance: typing.Any) -> logging.Logger: if self.logger is not None: return self.logger elif hasattr(instance, "logger") and isinstance(instance.logger, logging.Logger): return instance.logger elif hasattr(instance, "log") and...
Get logger for log calls. :param instance: Owner class instance. Filled only if instance created, else None. :type instance: typing.Optional[owner] :return: logger instance :rtype: logging.Logger
def find_by_extension(extension): for format in FORMATS: if extension in format.extensions: return format raise UnknownFormat( % extension)
Find and return a format by extension. :param extension: A string describing the extension of the format.
def tags(self): self.create() return dict((r.tag, r) for r in self.find_tags())
A dictionary that maps tag names to :class:`Revision` objects. Here's an example based on a mirror of the git project's repository: >>> from pprint import pprint >>> from vcs_repo_mgr.backends.git import GitRepo >>> repository = GitRepo(remote='https://github.com/git/git.git') ...
def print_block_num_row(block_num, cliques, next_cliques): n_cliques = len(cliques) if n_cliques == 0: print(.format(block_num)) return def mapper(clique): block_id, _ = clique if block_id not in next_cliques: return return format_str = + str...
Print out a row of padding and a row with the block number. Includes the branches prior to this block number.
def _ast_op_concat_to_code(self, opr, *, ignore_whitespace, **kwargs): hoist_target = OP_CONCAT if ignore_whitespace else OP_WS_CONCAT operands = self._hoist_operands(opr.operands, lambda t: isinstance(t, OptreeNode) and t.opnode.operator is hoist_target) lines = ["concatenation(["] for op in oper...
Convert an AST concatenate op to python source code.
def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE): from scipy.interpolate.interpnd import (LinearNDInterpolator, _ndim_coords_from_arrays) if isinstance(chunks, (list, tuple)): vchunks, hchunks = chunks else: v...
Interpolate linearly, generating a dask array.
def run(arguments): parser = argparse.ArgumentParser( description="Exports font icons as PNG images." ) parser.add_argument( , action=, help="list all available icon names and exit" ) parser.add_argument( , choices=[x for x in AVAILABLE_ICON_FONTS...
Main function for command line usage
def confd_state_rest_listen_tcp_port(self, **kwargs): config = ET.Element("config") confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring") rest = ET.SubElement(confd_state, "rest") listen = ET.SubElement(rest, "listen") tcp = ...
Auto Generated Code
def moments(self): moment1 = statstools.calc_mean_time(self.delays, self.coefs) moment2 = statstools.calc_mean_time_deviation( self.delays, self.coefs, moment1) return numpy.array([moment1, moment2])
The first two time delay weighted statistical moments of the MA coefficients.
def interm_range_type(self) -> Sequence[str]: fluents = self.domain.intermediate_fluents ordering = self.domain.interm_fluent_ordering return self._fluent_range_type(fluents, ordering)
The range type of each intermediate fluent in canonical order. Returns: Sequence[str]: A tuple of range types representing the range of each fluent.
def _init_tag_params(self, tag, params): self._element = tag self.params = params self._parseTagName() self._istag = True self._isendtag = False self._isnonpairtag = False self._element = self.tagToString()
Alternative constructor used when the tag parameters are added to the HTMLElement (HTMLElement(tag, params)). This method just creates string and then pass it to the :meth:`_init_tag`. Args: tag (str): HTML tag as string. params (dict): HTML tag parameters as di...
def checkPortIsOpen(remoteServerHost=ServerHost, port = Port): remoteServerIP = socket.gethostbyname(remoteServerHost) try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) result = sock.connect_ex((remoteServerIP, int(port))) if result == 0: return True ...
Checks if the specified port is open :param remoteServerHost: the host address :param port: port which needs to be checked :return: ``True`` if port is open, ``False`` otherwise
def check_cmake_exists(cmake_command): from subprocess import Popen, PIPE p = Popen( .format(cmake_command), shell=True, stdin=PIPE, stdout=PIPE) if not ( in p.communicate()[0].decode()): sys.stderr.write() sys.stderr.write() sys.stderr.write() ...
Check whether CMake is installed. If not, print informative error message and quits.
def occurrence(self, file_name=None, path=None, date=None): if self._indicator_data.get() != : return None occurrence_obj = FileOccurrence(file_name, path, date) self._occurrences.append(occurrence_obj) return occurrence_obj
Add a file Occurrence. Args: file_name (str, optional): The file name for this occurrence. path (str, optional): The file path for this occurrence. date (str, optional): The datetime expression for this occurrence. Returns: obj: An instance of Occurrence...
def _addPort(n: LNode, lp: LPort, intf: Interface, reverseDirection=False): origin = originObjOfPort(intf) d = intf._direction d = PortTypeFromDir(d) if reverseDirection: d = PortType.opposite(d) new_lp = LPort(lp, d, lp.side, name=intf._name) new_lp.originObj = origi...
add port to LPort for interface
def periodic_callback(self): if self.stopped: return if not self.scanning and len(self.connections.get_connections()) == 0: self._logger.info("Restarting scan for devices") self.start_scan(self._active_scan) self._logger.info("Finished ...
Periodic cleanup tasks to maintain this adapter, should be called every second.
def atlas_find_missing_zonefile_availability( peer_table=None, con=None, path=None, missing_zonefile_info=None ): bit_offset = 0 bit_count = 10000 missing = [] ret = {} if missing_zonefile_info is None: while True: zfinfo = atlasdb_zonefile_find_missing( bit_offset, b...
Find the set of missing zonefiles, as well as their popularity amongst our neighbors. Only consider zonefiles that are known by at least one peer; otherwise they're missing from our clique (and we'll re-sync our neighborss' inventories every so often to make sure we detect when zonefiles becom...
def _getCharFont(self, font_files, code_point): return_font_files = [] for font_file in font_files: face = ft.Face(font_file) if face.get_char_index(code_point): return_font_files.append(font_file) return return_font_files
Returns font files containing given code point.
def Zabransky_quasi_polynomial_integral(T, Tc, a1, a2, a3, a4, a5, a6): r Tc2 = Tc*Tc Tc3 = Tc2*Tc term = T - Tc return R*(T*(T*(T*(T*a6/(4.*Tc3) + a5/(3.*Tc2)) + a4/(2.*Tc)) - a1 + a3) + T*a1*log(1. - T/Tc) - 0.5*Tc*(a1 + a2)*log(term*term))
r'''Calculates the integral of liquid heat capacity using the quasi-polynomial model developed in [1]_. Parameters ---------- T : float Temperature [K] a1-a6 : float Coefficients Returns ------- H : float Difference in enthalpy from 0 K, [J/mol] Notes ...
def _open_fp(self, fp): if hasattr(fp, ) and not in fp.mode: raise pycdlibexception.PyCdlibInvalidInput("The file to open must be in binary mode (add to the open flags)") self._cdfp = fp self._parse_volume_descriptors() ...
An internal method to open an existing ISO for inspection and modification. Note that the file object passed in here must stay open for the lifetime of this object, as the PyCdlib class uses it internally to do writing and reading operations. Parameters: fp - The file object c...
def SetDayOfWeekHasService(self, dow, has_service=True): assert(dow >= 0 and dow < 7) self.day_of_week[dow] = has_service
Set service as running (or not) on a day of the week. By default the service does not run on any days. Args: dow: 0 for Monday through 6 for Sunday has_service: True if this service operates on dow, False if it does not. Returns: None
def future_check_request(self, name, update=None): exist = False yield self.until_data_synced() if name in self._requests_index: exist = True else: if update or (update is None and self._update_on_lookup): yield self.inspect_requests(name)...
Check if the request exists. Used internally by future_get_request. This method is aware of synchronisation in progress and if inspection of the server is allowed. Parameters ---------- name : str Name of the request to verify. update : bool or None, optiona...
def client_has_user_consent(self): value = False try: uc = UserConsent.objects.get(user=self.request.user, client=self.client) if (set(self.params[]).issubset(uc.scope)) and not (uc.has_expired()): value = True except UserConsent.DoesNotExist: ...
Check if already exists user consent for some client. Return bool.
def note_on(self, channel, note, velocity): return self.midi_event(NOTE_ON, channel, note, velocity)
Return bytes for a 'note_on' event.
def joint(node): node, _, _ = _fix(node) body = node.body[0].body[:-1] + node.body[1].body func = gast.Module(body=[gast.FunctionDef( name=node.body[0].name, args=node.body[1].args, body=body, decorator_list=[], returns=None)]) anno.clearanno(func) return func
Merge the bodies of primal and adjoint into a single function. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. Returns: func: A `Module` node with a single function definition containing the combined primal and adjoint.
def load_spitzer_catalog(show_progress=False): path = get_path(, location=, show_progress=show_progress) table = Table.read(path) return table
Load a 4.5 micron Spitzer catalog. The image from which this catalog was derived is returned by :func:`load_spitzer_image`. Parameters ---------- show_progress : bool, optional Whether to display a progress bar during the download (default is `False`). Returns ------- ...
def _num_samples(x): if not hasattr(x, ) and not hasattr(x, ): if hasattr(x, ): x = np.asarray(x) else: raise TypeError("Expected sequence or array-like, got %r" % x) return x.shape[0] if hasattr(x, ) else len(x)
Return number of samples in array-like x.
def handle_data(self, data): if not self.active_url: return if data.strip() == self.active_url: self.entries.append(self.active_url)
Callback when the data of a tag has been collected.
def gamma_centered(cls, kpts=(1, 1, 1), use_symmetries=True, use_time_reversal=True): return cls(kpts=[kpts], kpt_shifts=(0.0, 0.0, 0.0), use_symmetries=use_symmetries, use_time_reversal=use_time_reversal, comment="gamma-centered mode")
Convenient static constructor for an automatic Gamma centered Kpoint grid. Args: kpts: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors. use_symmetries: False if spatial symmetries should not be used to reduce the number of independent k-points. ...
def remove_origin(self, account_id, origin_id): return self.account.deleteOriginPullRule(origin_id, id=account_id)
Removes an origin pull mapping with the given origin pull ID. :param int account_id: the CDN account ID from which the mapping should be deleted. :param int origin_id: the origin pull mapping ID to delete.
def groups(self): groups = set() for item in self._items: groups |= item.groups return groups
Set of groups defined in the roster. :Return: the groups :ReturnType: `set` of `unicode`
def setupTable_cmap(self): if "cmap" not in self.tables: return from fontTools.ttLib.tables._c_m_a_p import cmap_format_4 nonBMP = dict((k,v) for k,v in self.unicodeToGlyphNameMapping.items() if k > 65535) if nonBMP: mapping = dict((k,v) for k,v in self...
Make the cmap table. **This should not be called externally.** Subclasses may override or supplement this method to handle the table creation in a different way if desired.
def _cb_created(self, payload, duplicated): if payload[P_RESOURCE] in _POINT_TYPE_TO_CLASS: store = self.__new_feeds if payload[P_RESOURCE] == R_FEED else self.__new_controls cls = _POINT_TYPE_TO_CLASS[payload[P_RESOURCE]] with store: store[payload[P_...
Indirect callback (via Client) for point & subscription creation responses
def translate_addresstype(f): @wraps(f) def wr(r, pc): at = r["addressType"] try: r.update({"addressType": POSTCODE_API_TYPEDEFS_ADDRESS_TYPES[at]}) except: logger.warning("Warning: {}: " "unknown : {}".format(pc, at)) retu...
decorator to translate the addressType field. translate the value of the addressType field of the API response into a translated type.
def update_task_redundancy(config, task_id, redundancy): if task_id is None: msg = ("Are you sure you want to update all the tasks redundancy?") if click.confirm(msg): res = _update_tasks_redundancy(config, task_id, redundancy) click.echo(res) else: ...
Update task redudancy for a project.
def update_col(self, column_name, series): logger.debug(.format( column_name, self.name)) self.local[column_name] = series
Add or replace a column in the underlying DataFrame. Parameters ---------- column_name : str Column to add or replace. series : pandas.Series or sequence Column data.
def appendpickle(table, source=None, protocol=-1, write_header=False): _writepickle(table, source=source, mode=, protocol=protocol, write_header=write_header)
Append data to an existing pickle file. I.e., as :func:`petl.io.pickle.topickle` but the file is opened in append mode. Note that no attempt is made to check that the fields or row lengths are consistent with the existing data, the data rows from the table are simply appended to the file.
def show_feature_destibution(self, data = None): visualizer = cluster_visualizer(); print("amount of nodes: ", self.__amount_nodes); if (data is not None): visualizer.append_cluster(data, marker = ); for level in range(0, self.heig...
! @brief Shows feature distribution. @details Only features in 1D, 2D, 3D space can be visualized. @param[in] data (list): List of points that will be used for visualization, if it not specified than feature will be displayed only.
def lookup_endpoint(cli): url = .format(APPNAME) environ = cli.user.get(url).item port = environ[] host = socket.gethostbyname(environ[]) return "tcp://{0}:{1}".format(host, port)
Looks up the application endpoint from dotcloud
def Decompress(self, compressed_data): try: uncompressed_data = self._zlib_decompressor.decompress(compressed_data) remaining_compressed_data = getattr( self._zlib_decompressor, , b) except zlib.error as exception: raise errors.BackEndError(( ).format(excep...
Decompresses the compressed data. Args: compressed_data (bytes): compressed data. Returns: tuple(bytes, bytes): uncompressed data and remaining compressed data. Raises: BackEndError: if the zlib compressed stream cannot be decompressed.
def add_book_series(self, title, volume=None): book_series = {} if title is not None: book_series[] = title if volume is not None: book_series[] = volume self._append_to(, book_series)
:param volume: the volume of the book :type volume: string :param title: the title of the book :type title: string
def parse_complex(tree_to_parse, xpath_root, xpath_map, complex_key): complex_struct = {} for prop in _complex_definitions.get(complex_key, xpath_map): parsed = parse_property(tree_to_parse, xpath_root, xpath_map, prop) parsed = reduce_value(flatten_items(v.split(_COMPLEX_DELIM) ...
Creates and returns a Dictionary data structure parsed from the metadata. :param tree_to_parse: the XML tree compatible with element_utils to be parsed :param xpath_root: the XPATH location of the structure inside the parent element :param xpath_map: a dict of XPATHs corresponding to a complex definition ...
def _process_panel_configuration(self, config): try: dashboard = config.get() if not dashboard: LOG.warning("Skipping %s because it doesnPANELPANEL_GROUPDEFAULT_PANELREMOVE_PANELADD_PANELADD_PANELmoduleexccan_registercan_registerpanelCould not process panel %(pan...
Add, remove and set default panels on the dashboard.
def updateData(self, data): fig = pylab.figure(1) n_agent = len(data) idx = 1 for i, adata in enumerate(data): saxis = fig.add_subplot(3, n_agent, i + 1) saxis.plot(adata[0]) idx += 1 aaxis = fig.add_subplot(3, n_agent, i + 1 +...
Updates the data used by the renderer.
def is_prefix(cls, path): lagofile = paths.Paths(path).prefix_lagofile() return os.path.isfile(lagofile)
Check if a path is a valid prefix Args: path(str): path to be checked Returns: bool: True if the given path is a prefix
def predict(self, h=5): if self.latent_variables.estimated is False: raise Exception("No latent variables estimated!") else: predictions, _, _, _ = self._construct_predict(self.latent_variables.get_z_values(),h) predictions = predictions*self._norm_...
Makes forecast with the estimated model Parameters ---------- h : int (default : 5) How many steps ahead would you like to forecast? Returns ---------- - pd.DataFrame with predicted values
def callback(self, request, **kwargs): try: client = self.get_evernote_client() us = UserService.objects.get(user=request.user, name=ServicesActivated.objects.get(name=)) us.token = client....
Called from the Service when the user accept to activate it
def download_data(self, configuration, output_file): params = configuration response = self.__app.native_api_call(, , params, self.__options, False, None, True, http_path="/api/v1/meta/") with open(output_file, ) as out_file: shutil.copyfileobj(response.raw, out_file) ...
Выполняет указанный в конфигурации запрос и отдает файл на скачивание :param configuration: Конфгурация запроса :param output_file: Место, куда надо скачать файл :return:
def make_app(): from plnt import Plnt database_uri = os.environ.get("PLNT_DATABASE_URI") app = Plnt(database_uri or "sqlite:////tmp/plnt.db") app.bind_to_context() return app
Helper function that creates a plnt app.
def with_env(self, **environment_variables): new_env_vars = { str(var): str(val) for var, val in environment_variables.items() } new_command = copy.deepcopy(self) new_command._env.update(new_env_vars) return new_command
Return new Command object that will be run with additional environment variables. Specify environment variables as follows: new_cmd = old_cmd.with_env(PYTHON_PATH=".", ENV_PORT="2022")
def post_fork_child(self, fingerprint, jvm_options, classpath, stdout, stderr): java = SubprocessExecutor(self._distribution) subproc = java.spawn(classpath=classpath, main=, jvm_options=jvm_options, args=[], ...
Post-fork() child callback for ProcessManager.daemon_spawn().
def insert(self, key, item): return lib.zhashx_insert(self._as_parameter_, key, item)
Insert item into hash table with specified key and item. If key is already present returns -1 and leaves existing item unchanged Returns 0 on success.
def check_path(self, path): path = os.path.abspath(path) if os.path.exists(path): return path else: utils.die("input file does not exists:\n {}".format(path))
turns path into an absolute path and checks that it exists, then returns it as a string.
def generate(self, more_content=None, all_members=False): directive = getattr(self, , self.objtype) (file, _, namepath) = self.name.rpartition() (contract_name, _, fullname) = namepath.partition() (name, _, paramtypes) = fullname.partition() ...
Generate reST for the object given by *self.name*, and possibly for its members. If *more_content* is given, include that content. If *all_members* is True, document all members.
def rm_docs(self): for filename in self.created: if os.path.exists(filename): os.unlink(filename)
Remove converted docs.
def from_string(cls, cl_function, dependencies=()): return_type, function_name, parameter_list, body = split_cl_function(cl_function) return SimpleCLFunction(return_type, function_name, parameter_list, body, dependencies=dependencies)
Parse the given CL function into a SimpleCLFunction object. Args: cl_function (str): the function we wish to turn into an object dependencies (list or tuple of CLLibrary): The list of CL libraries this function depends on Returns: SimpleCLFunction: the CL data type ...
def copy_result(self, selection): bbox = selection.get_bbox() if not bbox: bb_top, bb_left = self.grid.actions.cursor[:2] bb_bottom, bb_right = bb_top, bb_left else: (bb_top, bb_left), (bb_bottom, bb_right) = bbox ...
Returns result If selection consists of one cell only and result is a bitmap then the bitmap is returned. Otherwise the method returns string representations of the result for the given selection in a tab separated string.
def _set_openflow_interface_cfg(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=openflow_interface_cfg.openflow_interface_cfg, is_container=, presence=False, yang_name="openflow-interface-cfg", rest_name="openflow", parent=self, path_helper=self._path...
Setter method for openflow_interface_cfg, mapped from YANG variable /interface/hundredgigabitethernet/openflow_interface_cfg (container) If this variable is read-only (config: false) in the source YANG file, then _set_openflow_interface_cfg is considered as a private method. Backends looking to populate thi...
def register_method(func, name=None, deprecated=False): if deprecated: func = deprecated_function( func, "the {0!r} PSD methods is deprecated, and will be removed " "in a future release, please consider using {1!r} instead".format( name, name.spl...
Register a method of calculating an average spectrogram. Parameters ---------- func : `callable` function to execute name : `str`, optional name of the method, defaults to ``func.__name__`` deprecated : `bool`, optional whether this method is deprecated (`True`) or not (`F...
def close(self): if not self.closed: self.flush() err = _snd.sf_close(self._file) self._file = None _error_check(err)
Close the file. Can be called multiple times.
def get_and_update_setting(self, name, default=None, user=True): t set, return default (default is None) So the user of the function can assume a return of None equates to not set anywhere, and take the appropriate action. ' setting = self._get_setting(name, user=user) if set...
Look for a setting in the environment (first priority) and then the settings file (second). If something is found, the settings file is updated. The order of operations works as follows: 1. The user config file is used as a cache for the variable 2. the environment variable always takes pri...
def compute_neighbors( self, n_neighbors: int = 30, knn: bool = True, n_pcs: Optional[int] = None, use_rep: Optional[str] = None, method: str = , random_state: Optional[Union[RandomState, int]] = 0, write_knn_indices: bool = False, metric: str = , ...
\ Compute distances and connectivities of neighbors. Parameters ---------- n_neighbors Use this number of nearest neighbors. knn Restrict result to `n_neighbors` nearest neighbors. {n_pcs} {use_rep} Returns ------- ...
def plot_grouped_gos(self, fout_img=None, exclude_hdrs=None, **kws_usr): kws_plt, kws_dag = self._get_kws_plt(self.grprobj.usrgos, **kws_usr) pltgosusr = self.grprobj.usrgos if exclude_hdrs is not None: pltgosusr = pltgosusr.difference(self.grprobj.get_usrgos_g_hdrg...
One Plot containing all user GOs (yellow or green) and header GO IDs(green or purple).
def gauss_hermite_nodes(orders, sigma, mu=None): if isinstance(orders, int): orders = [orders] import numpy if mu is None: mu = numpy.array( [0]*sigma.shape[0] ) herms = [hermgauss(i) for i in orders] points = [ h[0]*numpy.sqrt(2) for h in herms] weights = [ h[1]/numpy...
Computes the weights and nodes for Gauss Hermite quadrature. Parameters ---------- orders : int, list, array The order of integration used in the quadrature routine sigma : array-like If one dimensional, the variance of the normal distribution being approximated. If multidimensi...
def _ReverseHostname(self, hostname): if not hostname: return if len(hostname) <= 1: return hostname if hostname[-1] == : return hostname[::-1][1:] return hostname[::-1][0:]
Reverses the hostname and strips the leading dot. The hostname entry is reversed: moc.elgoog.www. Should be: www.google.com Args: hostname (str): reversed hostname. Returns: str: hostname without a leading dot.
def spawn_container(addr, env_cls=Environment, mgr_cls=EnvManager, set_seed=True, *args, **kwargs): try: import setproctitle as spt title = .format(env_cls.__class__.__name__, _get_base_url(addr)) spt.setproctitle(ti...
Spawn a new environment in a given address as a coroutine. Arguments and keyword arguments are passed down to the created environment at initialization time. If `setproctitle <https://pypi.python.org/pypi/setproctitle>`_ is installed, this function renames the title of the process to start with 'c...
def log(self, timer_name, node): timestamp = time.time() if hasattr(self, timer_name): getattr(self, timer_name).append({ "node":node, "time":timestamp}) else: setattr(self, timer_name, [{"node":node, "time":timestamp}])
logs a event in the timer
def validate_state_locations(self): names = map(lambda loc: loc["name"], self.locations) assert len(names) == len(set(names)), "Names of state locations must be unique"
Names of all state locations must be unique.
def lineReceived(self, line): if line and line.isdigit(): self._expectedLength = int(line) self._rawBuffer = [] self._rawBufferLength = 0 self.setRawMode() else: self.keepAliveReceived()
Called when a line is received. We expect a length in bytes or an empty line for keep-alive. If we got a length, switch to raw mode to receive that amount of bytes.
async def send(self, parameters: RTCRtpSendParameters): if not self.__started: self.__cname = parameters.rtcp.cname self.__mid = parameters.muxId self.__transport._register_rtp_sender(self, parameters) self.__rtp_header_extensions_map.config...
Attempt to set the parameters controlling the sending of media. :param: parameters: The :class:`RTCRtpParameters` for the sender.
def sum_in_date(x=, y=, filter_dict=None, model=, app=DEFAULT_APP, sort=True, limit=100000): sort = sort_prefix(sort) model = get_model(model, app) filter_dict = filter_dict or {} objects = model.objects.filter(**filter_dict) objects = objects.values(x) objects = objects.annotate(y=djm...
Count the number of records for each discrete (categorical) value of a field and return a dict of two lists, the field values and the counts. FIXME: Tests need models with a date field: Examples: >> x, y = sum_in_date(y='net_sales', filter_dict={'model__startswith': 'LC60'}, model='Permission', limit=5, ...
def view_dupl_sources(token, dstore): fields = [, , , , ] dic = group_array(dstore[].value[fields], ) sameid = [] dupl = [] for source_id, group in dic.items(): if len(group) > 1: sources = [] for rec in group: geom = dstore[][rec[]:rec[]] ...
Show the sources with the same ID and the truly duplicated sources
def _defaults(self, keys=None): d = {} keys = self._keys if keys is None else keys for key in keys: d[key] = None return d
create an empty record
def config_maker(project_name, path): with open(skeleton_path("config.py"), "r") as config_source: config_content = config_source.read() config_content = config_content.replace("__PROJECT_NAME__", project_name) with open(path, "w") as config_dest: config_dest.write(config_content)
Creates a config file based on the project name
def tail_of_file(filename, n, ansi2html=False): avg_line_length = 74 to_read = n with open(filename) as f: while 1: try: f.seek(-(avg_line_length * to_read), 2) except IOError: f.seek(0) pos =...
Reads a n lines from f with an offset of offset lines.
def zone_absent(name, resource_group, connection_auth=None): ret = { : name, : False, : , : {} } if not isinstance(connection_auth, dict): ret[] = return ret zone = __salt__[]( name, resource_group, azurearm_log_level=, ...
.. versionadded:: Fluorine Ensure a DNS zone does not exist in the resource group. :param name: Name of the DNS zone. :param resource_group: The resource group assigned to the DNS zone. :param connection_auth: A dict with subscription and authentication parameters to be used ...
def _add_notification_config_to_xml(node, element_name, configs): for config in configs: config_node = s3_xml.SubElement(node, element_name) if in config: id_node = s3_xml.SubElement(config_node, ) id_node.text = config[] arn_node = s3_xml.SubElement( ...
Internal function that builds the XML sub-structure for a given kind of notification configuration.
def giant_text_sqltype(dialect: Dialect) -> str: if dialect.name == SqlaDialectName.SQLSERVER: return elif dialect.name == SqlaDialectName.MYSQL: return else: raise ValueError("Unknown dialect: {}".format(dialect.name))
Returns the SQL column type used to make very large text columns for a given dialect. Args: dialect: a SQLAlchemy :class:`Dialect` Returns: the SQL data type of "giant text", typically 'LONGTEXT' for MySQL and 'NVARCHAR(MAX)' for SQL Server.
def window(self, window_name): if self._driver.w3c: self._w3c_window(window_name) return data = {: window_name} self._driver.execute(Command.SWITCH_TO_WINDOW, data)
Switches focus to the specified window. :Args: - window_name: The name or window handle of the window to switch to. :Usage: :: driver.switch_to.window('main')
def _FetchMostRecentGraphSeriesFromTheLegacyDB( label, report_type, token = None ): try: stats_for_label = aff4.FACTORY.Open( GetAFF4ClientReportsURN().Add(label), aff4_type=aff4_stats.ClientFleetStats, mode="r", token=token) except aff4.InstantiationError: ...
Fetches the latest graph-series for a client label from the legacy DB. Args: label: Client label to fetch data for. report_type: rdf_stats.ClientGraphSeries.ReportType to fetch data for. token: ACL token to use for reading from the DB. Raises: AFF4AttributeTypeError: If an unexpected report-data t...
def suppress_output(reverse=False): if reverse: sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ else: sys.stdout = os.devnull sys.stderr = os.devnull
Suppress output
def install_python(name, version=None, install_args=None, override_args=False): s easy_install. name The name of the package to be installed. Only accepts a single argument. version Install a specific version of the package. Defaults to latest version available. install_args ...
Instructs Chocolatey to install a package via Python's easy_install. name The name of the package to be installed. Only accepts a single argument. version Install a specific version of the package. Defaults to latest version available. install_args A list of install argume...