Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
200
def find_slot(self, wanted, slots=None): for slot in self.find_slots(wanted, slots): return slot return None
Searches the given slots or, if not given, active hotbar slot, hotbar, inventory, open window in this order. Args: wanted: function(Slot) or Slot or itemID or (itemID, metadata) Returns: Optional[Slot]: The first slot containing the item or None if not found.
201
def on_open(self): filename, filter = QtWidgets.QFileDialog.getOpenFileName( self, _()) if filename: self.open_file(filename)
Shows an open file dialog and open the file if the dialog was accepted.
202
def setColumnCount( self, count ): super(XColorTreeWidget, self).setColumnCount(count) header = self.header() header.setResizeMode(0, header.Stretch) for i in range(1, count): header.setResizeMode(i, header.Fixed)
Sets the number of columns used for this tree widget, updating the \ column resizing modes to stretch the first column. :param count | <int>
203
def add_unique_rule(self, rule, opname, arg_count, customize): if rule not in self.new_rules: self.new_rules.add(rule) self.addRule(rule, nop_func) customize[opname] = arg_count pass return
Add rule to grammar, but only if it hasn't been added previously opname and stack_count are used in the customize() semantic the actions to add the semantic action rule. Stack_count is used in custom opcodes like MAKE_FUNCTION to indicate how many arguments it has. Often it is not used.
204
def ParseFromUnicode(self, value): precondition.AssertType(value, Text) value = value.strip() super(ClientURN, self).ParseFromUnicode(value) match = self.CLIENT_ID_RE.match(self._string_urn) if not match: raise type_info.TypeValueError("Client urn malformed: %s" % value) clientid = match.group("clientid") clientid_correctcase = "".join((clientid[0].upper(), clientid[1:].lower())) self._string_urn = self._string_urn.replace(clientid, clientid_correctcase, 1)
Parse a string into a client URN. Convert case so that all URNs are of the form C.[0-9a-f]. Args: value: string value to parse
205
def get_matrix(self, x1, x2=None): x1 = self.parse_samples(x1) if x2 is None: return self.kernel.get_value(x1) x2 = self.parse_samples(x2) return self.kernel.get_value(x1, x2)
Get the covariance matrix at a given set or two of independent coordinates. :param x1: ``(nsamples,)`` or ``(nsamples, ndim)`` A list of samples. :param x2: ``(nsamples,)`` or ``(nsamples, ndim)`` (optional) A second list of samples. If this is given, the cross covariance matrix is computed. Otherwise, the auto-covariance is evaluated.
206
def update(self, model_alias, code=, name=None, order=None, display_filter=None): model_alias = self.get_model_alias(model_alias) for item in self.tabs[model_alias]: if item.code != code: continue if name: item.name = name if order: item.order = order if display_filter: item.display_filter = display_filter break self.tabs[model_alias] = sorted(self.tabs[model_alias], key=lambda item: item.code if item.code else 999)
Update given tab :param model_alias: :param code: :param name: :param order: :param display_filter: :return:
207
def _warn_deprecated_outside_JSONField(self): if not isinstance(self, JSONField) and not self.warned: warnings.warn( "Deprecated. JSONifiable fields should derive from JSONField ({name})".format(name=self.name), DeprecationWarning, stacklevel=3 ) self.warned = True
Certain methods will be moved to JSONField. This warning marks calls when the object is not derived from that class.
208
def delete(self, dict_name): conn = redis.Redis(connection_pool=self.pool) script = conn.register_script() res = script(keys=[self._lock_name, self._namespace(dict_name), self._namespace(dict_name) + ], args=[self._session_lock_identifier]) if res == -1: raise LockError()
Delete an entire dictionary. This operation on its own is atomic and does not require a session lock, but a session lock is honored. :param str dict_name: name of the dictionary to delete :raises rejester.exceptions.LockError: if called with a session lock, but the system does not currently have that lock; or if called without a session lock but something else holds it
209
async def sign_url(self, url, method=HASH): token = await self.get_token() if method == self.QUERY: return patch_qs(url, { settings.WEBVIEW_TOKEN_KEY: token, }) elif method == self.HASH: hash_id = 5 p = list(urlparse(url)) p[hash_id] = quote(token) return urlunparse(p) else: raise ValueError(f)
Sign an URL with this request's auth token
210
def startProducing(self, consumer): self._task = self._cooperate(self._writeloop(consumer)) d = self._task.whenDone() def maybeStopped(reason): reason.trap(task.TaskStopped) return defer.Deferred() d.addCallbacks(lambda ignored: None, maybeStopped) return d
Start a cooperative task which will read bytes from the input file and write them to C{consumer}. Return a L{Deferred} which fires after all bytes have been written. @param consumer: Any L{IConsumer} provider
211
def _load_features_from_array(self, features): self.feature_images = np.load(features) self.feature_names = range(self.feature_images.shape[1])
Load feature data from a 2D ndarray on disk.
212
def register_seo_admin(admin_site, metadata_class): if metadata_class._meta.use_sites: path_admin = SitePathMetadataAdmin model_instance_admin = SiteModelInstanceMetadataAdmin model_admin = SiteModelMetadataAdmin view_admin = SiteViewMetadataAdmin else: path_admin = PathMetadataAdmin model_instance_admin = ModelInstanceMetadataAdmin model_admin = ModelMetadataAdmin view_admin = ViewMetadataAdmin def get_list_display(): return tuple( name for name, obj in metadata_class._meta.elements.items() if obj.editable) backends = metadata_class._meta.backends if in backends: class ModelAdmin(model_admin): form = get_model_form(metadata_class) list_display = model_admin.list_display + get_list_display() _register_admin(admin_site, metadata_class._meta.get_model(), ModelAdmin) if in backends: class ViewAdmin(view_admin): form = get_view_form(metadata_class) list_display = view_admin.list_display + get_list_display() _register_admin(admin_site, metadata_class._meta.get_model(), ViewAdmin) if in backends: class PathAdmin(path_admin): form = get_path_form(metadata_class) list_display = path_admin.list_display + get_list_display() _register_admin(admin_site, metadata_class._meta.get_model(), PathAdmin) if in backends: class ModelInstanceAdmin(model_instance_admin): form = get_modelinstance_form(metadata_class) list_display = (model_instance_admin.list_display + get_list_display()) _register_admin(admin_site, metadata_class._meta.get_model(), ModelInstanceAdmin)
Register the backends specified in Meta.backends with the admin.
213
def draw_to_notebook(layers, **kwargs): from IPython.display import Image layers = (layers.get_all_layers() if hasattr(layers, ) else layers) dot = make_pydot_graph(layers, **kwargs) return Image(dot.create_png())
Draws a network diagram in an IPython notebook :parameters: - layers : list or NeuralNet instance List of layers or the neural net to draw. - **kwargs : see the docstring of make_pydot_graph for other options
214
def is_integer(self, value, strict=False): if value is not None: if isinstance(value, numbers.Number): return value = stringify(value) if value is not None and value.isnumeric(): return self.shout(, strict, value)
if value is an integer
215
def post(method, hmc, uri, uri_parms, body, logon_required, wait_for_completion): assert wait_for_completion is True cpc_oid = uri_parms[0] try: cpc = hmc.cpcs.lookup_by_oid(cpc_oid) except KeyError: raise InvalidResourceError(method, uri) if not cpc.dpm_enabled: raise CpcNotInDpmError(method, uri, cpc) check_required_fields(method, uri, body, []) body2 = body.copy() body2[] = try: new_adapter = cpc.adapters.add(body2) except InputError as exc: raise BadRequestError(method, uri, reason=5, message=str(exc)) return {: new_adapter.uri}
Operation: Create Hipersocket (requires DPM mode).
216
def sqlalch_datetime(dt): if isinstance(dt, str): return datetime.strptime(dt, "%Y-%m-%d %H:%M:%S.%f").replace(tzinfo=UTC) if dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None: return dt.astimezone(UTC) return dt.replace(tzinfo=UTC)
Convert a SQLAlchemy datetime string to a datetime object.
217
def is_parent_of_gradebook(self, id_, gradebook_id): if self._catalog_session is not None: return self._catalog_session.is_parent_of_catalog(id_=id_, catalog_id=gradebook_id) return self._hierarchy_session.is_parent(id_=gradebook_id, parent_id=id_)
Tests if an ``Id`` is a direct parent of a gradebook. arg: id (osid.id.Id): an ``Id`` arg: gradebook_id (osid.id.Id): the ``Id`` of a gradebook return: (boolean) - ``true`` if this ``id`` is a parent of ``gradebook_id,`` ``false`` otherwise raise: NotFound - ``gradebook_id`` is not found raise: NullArgument - ``id`` or ``gradebook_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* *implementation notes*: If ``id`` not found return ``false``.
218
def _normal_model(self, beta): Y = np.array(self.data[self.max_lag:]) z = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) if self.ar != 0: mu = np.matmul(np.transpose(self.X),z[:-self.family_z_no-self.ma]) else: mu = np.ones(Y.shape[0])*z[0] if self.ma != 0: mu = arima_recursion_normal(z, mu, Y, self.max_lag, Y.shape[0], self.ar, self.ma) return mu, Y
Creates the structure of the model (model matrices etc) for a Normal family ARIMA model. Parameters ---------- beta : np.ndarray Contains untransformed starting values for the latent variables Returns ---------- mu : np.ndarray Contains the predicted values (location) for the time series Y : np.ndarray Contains the length-adjusted time series (accounting for lags)
219
def find_period(data, min_period=0.2, max_period=32.0, coarse_precision=1e-5, fine_precision=1e-9, periodogram=Lomb_Scargle, period_jobs=1): if min_period >= max_period: return min_period coarse_period = periodogram(data, coarse_precision, min_period, max_period, period_jobs=period_jobs) return coarse_period if coarse_precision <= fine_precision else \ periodogram(data, fine_precision, coarse_period - coarse_precision, coarse_period + coarse_precision, period_jobs=period_jobs)
find_period(data, min_period=0.2, max_period=32.0, coarse_precision=1e-5, fine_precision=1e-9, periodogram=Lomb_Scargle, period_jobs=1) Returns the period of *data* according to the given *periodogram*, searching first with a coarse precision, and then a fine precision. **Parameters** data : array-like, shape = [n_samples, 2] or [n_samples, 3] Array containing columns *time*, *mag*, and (optional) *error*. min_period : number Minimum period in search-space. max_period : number Maximum period in search-space. coarse_precision : number Distance between contiguous frequencies in search-space during first sweep. fine_precision : number Distance between contiguous frequencies in search-space during second sweep. periodogram : function A function with arguments *data*, *precision*, *min_period*, *max_period*, and *period_jobs*, and return value *period*. period_jobs : int, optional Number of simultaneous processes to use while searching (default 1). **Returns** period : number The period of *data*.
220
def resolve_compound_variable_fields(dbg, thread_id, frame_id, scope, attrs): var = getVariable(dbg, thread_id, frame_id, scope, attrs) try: _type, _typeName, resolver = get_type(var) return _typeName, resolver.get_dictionary(var) except: pydev_log.exception(, thread_id, frame_id, scope, attrs)
Resolve compound variable in debugger scopes by its name and attributes :param thread_id: id of the variable's thread :param frame_id: id of the variable's frame :param scope: can be BY_ID, EXPRESSION, GLOBAL, LOCAL, FRAME :param attrs: after reaching the proper scope, we have to get the attributes until we find the proper location (i.e.: obj\tattr1\tattr2) :return: a dictionary of variables's fields
221
def neural_gpu_body(inputs, hparams, name=None): with tf.variable_scope(name, "neural_gpu"): def step(state, inp): x = tf.nn.dropout(state, 1.0 - hparams.dropout) for layer in range(hparams.num_hidden_layers): x = common_layers.conv_gru( x, (hparams.kernel_height, hparams.kernel_width), hparams.hidden_size, name="cgru_%d" % layer) padding_inp = tf.less(tf.reduce_sum(tf.abs(inp), axis=[1, 2]), 0.00001) new_state = tf.where(padding_inp, state, x) return new_state return tf.foldl( step, tf.transpose(inputs, [1, 0, 2, 3]), initializer=inputs, parallel_iterations=1, swap_memory=True)
The core Neural GPU.
222
def create_asset_browser(self, ): assetbrws = ListBrowser(4, headers=[, , , ]) self.asset_browser_vbox.insertWidget(0, assetbrws) return assetbrws
Create the asset browser This creates a list browser for assets and adds it to the ui :returns: the created borwser :rtype: :class:`jukeboxcore.gui.widgets.browser.ListBrowser` :raises: None
223
def drop_namespace_by_url(self, url: str) -> None: namespace = self.get_namespace_by_url(url) self.session.query(NamespaceEntry).filter(NamespaceEntry.namespace == namespace).delete() self.session.delete(namespace) self.session.commit()
Drop the namespace at the given URL. Won't work if the edge store is in use. :param url: The URL of the namespace to drop
224
def bayesfactor_pearson(r, n): from scipy.special import gamma def fun(g, r, n): return np.exp(((n - 2) / 2) * np.log(1 + g) + (-(n - 1) / 2) * np.log(1 + (1 - r**2) * g) + (-3 / 2) * np.log(g) + - n / (2 * g)) integr = quad(fun, 0, np.inf, args=(r, n))[0] bf10 = np.sqrt((n / 2)) / gamma(1 / 2) * integr return _format_bf(bf10)
Bayes Factor of a Pearson correlation. Parameters ---------- r : float Pearson correlation coefficient n : int Sample size Returns ------- bf : str Bayes Factor (BF10). The Bayes Factor quantifies the evidence in favour of the alternative hypothesis. Notes ----- Adapted from a Matlab code found at https://github.com/anne-urai/Tools/blob/master/stats/BayesFactors/corrbf.m If you would like to compute the Bayes Factor directly from the raw data instead of from the correlation coefficient, use the :py:func:`pingouin.corr` function. The JZS Bayes Factor is approximated using the formula described in ref [1]_: .. math:: BF_{10} = \\frac{\\sqrt{n/2}}{\\gamma(1/2)}* \\int_{0}^{\\infty}e((n-2)/2)* log(1+g)+(-(n-1)/2)log(1+(1-r^2)*g)+(-3/2)log(g)-n/2g where **n** is the sample size and **r** is the Pearson correlation coefficient. References ---------- .. [1] Wetzels, R., Wagenmakers, E.-J., 2012. A default Bayesian hypothesis test for correlations and partial correlations. Psychon. Bull. Rev. 19, 1057–1064. https://doi.org/10.3758/s13423-012-0295-x Examples -------- Bayes Factor of a Pearson correlation >>> from pingouin import bayesfactor_pearson >>> bf = bayesfactor_pearson(0.6, 20) >>> print("Bayes Factor: %s" % bf) Bayes Factor: 8.221
225
def find_by_any(self, identifier, how): if "i" in how: match = self.find_by_id(identifier) if match: return match if "l" in how: match = self.find_by_localpath(identifier) if match: return match if "c" in how: match = self.find_by_canonical(identifier) if match: return match
how should be a string with any or all of the characters "ilc"
226
def initialize_repository(path, spor_dir=): path = pathlib.Path(path) spor_path = path / spor_dir if spor_path.exists(): raise ValueError(.format(spor_path)) spor_path.mkdir() return Repository(path, spor_dir)
Initialize a spor repository in `path` if one doesn't already exist. Args: path: Path to any file or directory within the repository. spor_dir: The name of the directory containing spor data. Returns: A `Repository` instance. Raises: ValueError: A repository already exists at `path`.
227
def close(self): if self.ipmi_session: self.ipmi_session.unregister_keepalive(self.keepaliveid) if self.activated: try: self.ipmi_session.raw_command(netfn=6, command=0x49, data=(1, 1, 0, 0, 0, 0)) except exc.IpmiException: pass
Shut down an SOL session,
228
def func_str(func, args=[], kwargs={}, type_aliases=[], packed=False, packkw=None, truncate=False): import utool as ut truncatekw = {} argrepr_list = ([] if args is None else ut.get_itemstr_list(args, nl=False, truncate=truncate, truncatekw=truncatekw)) kwrepr_list = ([] if kwargs is None else ut.dict_itemstr_list(kwargs, explicit=True, nl=False, truncate=truncate, truncatekw=truncatekw)) repr_list = argrepr_list + kwrepr_list argskwargs_str = .join(repr_list) _str = % (meta_util_six.get_funcname(func), argskwargs_str) if packed: packkw_ = dict(textwidth=80, nlprefix=, break_words=False) if packkw is not None: packkw_.update(packkw_) _str = packstr(_str, **packkw_) return _str
string representation of function definition Returns: str: a representation of func with args, kwargs, and type_aliases Args: func (function): args (list): argument values (default = []) kwargs (dict): kwargs values (default = {}) type_aliases (list): (default = []) packed (bool): (default = False) packkw (None): (default = None) Returns: str: func_str CommandLine: python -m utool.util_str --exec-func_str Example: >>> # ENABLE_DOCTEST >>> from utool.util_str import * # NOQA >>> func = byte_str >>> args = [1024, 'MB'] >>> kwargs = dict(precision=2) >>> type_aliases = [] >>> packed = False >>> packkw = None >>> _str = func_str(func, args, kwargs, type_aliases, packed, packkw) >>> result = _str >>> print(result) byte_str(1024, 'MB', precision=2)
229
def valid_flows_array(catchment): return np.array([record.flow for record in catchment.amax_records if record.flag == 0])
Return array of valid flows (i.e. excluding rejected years etc) :param catchment: gauged catchment with amax_records set :type catchment: :class:`floodestimation.entities.Catchment` :return: 1D array of flow values :rtype: :class:`numpy.ndarray`
230
def vars(self): return self.independent_vars + self.dependent_vars + [self.sigmas[var] for var in self.dependent_vars]
:return: Returns a list of dependent, independent and sigma variables, in that order.
231
def add_directories(self, directories, except_blacklisted=True): directories = util.to_absolute_paths(directories) if except_blacklisted: directories = self._remove_blacklisted(directories) self.plugin_directories.update(directories)
Adds `directories` to the set of plugin directories. `directories` may be either a single object or a iterable. `directories` can be relative paths, but will be converted into absolute paths based on the current working directory. if `except_blacklisted` is `True` all `directories` in `self.blacklisted_directories` will be removed
232
def stream(self, model, position): validate_not_abstract(model) if not model.Meta.stream or not model.Meta.stream.get("arn"): raise InvalidStream("{!r} does not have a stream arn".format(model)) stream = Stream(model=model, engine=self) stream.move_to(position=position) return stream
Create a :class:`~bloop.stream.Stream` that provides approximate chronological ordering. .. code-block:: pycon # Create a user so we have a record >>> engine = Engine() >>> user = User(id=3, email="user@domain.com") >>> engine.save(user) >>> user.email = "admin@domain.com" >>> engine.save(user) # First record lacks an "old" value since it's an insert >>> stream = engine.stream(User, "trim_horizon") >>> next(stream) {'key': None, 'old': None, 'new': User(email='user@domain.com', id=3, verified=None), 'meta': { 'created_at': datetime.datetime(2016, 10, 23, ...), 'event': { 'id': '3fe6d339b7cb19a1474b3d853972c12a', 'type': 'insert', 'version': '1.1'}, 'sequence_number': '700000000007366876916'} } :param model: The model to stream records from. :param position: "trim_horizon", "latest", a stream token, or a :class:`datetime.datetime`. :return: An iterator for records in all shards. :rtype: :class:`~bloop.stream.Stream` :raises bloop.exceptions.InvalidStream: if the model does not have a stream.
233
def keypoint_scale(keypoint, scale_x, scale_y, **params): x, y, a, s = keypoint return [x * scale_x, y * scale_y, a, s * max(scale_x, scale_y)]
Scales a keypoint by scale_x and scale_y.
234
def get_backend(alias): if alias not in settings.BACKENDS: raise VCSError("Given alias is not recognized! Allowed aliases:\n" "%s" % (alias, pformat(settings.BACKENDS.keys()))) backend_path = settings.BACKENDS[alias] klass = import_class(backend_path) return klass
Returns ``Repository`` class identified by the given alias or raises VCSError if alias is not recognized or backend class cannot be imported.
235
def is_fnmatch_regex(string): is_regex = False regex_chars = [, , ] for c in regex_chars: if string.find(c) > -1: return True return is_regex
Returns True if the given string is considered a fnmatch regular expression, False otherwise. It will look for :param string: str
236
def get_instance(self, payload): return SessionInstance(self._version, payload, service_sid=self._solution[], )
Build an instance of SessionInstance :param dict payload: Payload response from the API :returns: twilio.rest.proxy.v1.service.session.SessionInstance :rtype: twilio.rest.proxy.v1.service.session.SessionInstance
237
async def main(): redis = await create_pool(RedisSettings()) job = await redis.enqueue_job() print(await job.status()) print(await job.result(timeout=5))
> 68362958a244465b9be909db4b7b5ab4 (or whatever)
238
def _endCodeIfNeeded(line, inCodeBlock): assert isinstance(line, str) if inCodeBlock: line = .format(linesep, line.rstrip()) inCodeBlock = False return line, inCodeBlock
Simple routine to append end code marker if needed.
239
def configure(self, cfg, handler, path=""): for name, attr in handler.attributes(): if cfg.get(name) is not None: continue if attr.expected_type not in [list, dict]: cfg[name] = self.set(handler, attr, name, path, cfg) elif attr.default is None and not hasattr(handler, "configure_%s" % name): self.action_required.append(("%s.%s: %s" % (path, name, attr.help_text)).strip(".")) for name, attr in handler.attributes(): if cfg.get(name) is not None: continue if hasattr(handler, "configure_%s" % name): fn = getattr(handler, "configure_%s" % name) fn(self, cfg, "%s.%s"% (path, name)) if attr.expected_type in [list, dict] and not cfg.get(name): try: del cfg[name] except KeyError: pass
Start configuration process for the provided handler Args: cfg (dict): config container handler (config.Handler class): config handler to use path (str): current path in the configuration progress
240
def keep_entry_range(entry, lows, highs, converter, regex): return any( low <= converter(num) <= high for num in regex.findall(entry) for low, high in zip(lows, highs) )
Check if an entry falls into a desired range. Every number in the entry will be extracted using *regex*, if any are within a given low to high range the entry will be kept. Parameters ---------- entry : str lows : iterable Collection of low values against which to compare the entry. highs : iterable Collection of high values against which to compare the entry. converter : callable Function to convert a string to a number. regex : regex object Regular expression to locate numbers in a string. Returns ------- True if the entry should be kept, False otherwise.
241
def card_names_and_ids(self): b = Board(self.client, self.board_id) cards = b.getCards() card_names_and_ids = [(unidecode(c.name), c.id) for c in cards] return card_names_and_ids
Returns [(name, id), ...] pairs of cards from current board
242
def close(self): if self._buffer: self.flush() self._handle.write(_bgzf_eof) self._handle.flush() self._handle.close()
Flush data, write 28 bytes BGZF EOF marker, and close BGZF file. samtools will look for a magic EOF marker, just a 28 byte empty BGZF block, and if it is missing warns the BAM file may be truncated. In addition to samtools writing this block, so too does bgzip - so this implementation does too.
243
def get_sds_by_ip(self,ip): if self.conn.is_ip_addr(ip): for sds in self.sds: for sdsIp in sds.ipList: if sdsIp == ip: return sds raise KeyError("SDS of that name not found") else: raise ValueError("Malformed IP address - get_sds_by_ip()")
Get ScaleIO SDS object by its ip address :param name: IP address of SDS :return: ScaleIO SDS object :raise KeyError: No SDS with specified ip found :rtype: SDS object
244
def mass_fraction_within_radius(self, kwargs_lens, center_x, center_y, theta_E, numPix=100): x_grid, y_grid = util.make_grid(numPix=numPix, deltapix=2.*theta_E / numPix) x_grid += center_x y_grid += center_y mask = mask_util.mask_sphere(x_grid, y_grid, center_x, center_y, theta_E) kappa_list = [] for i in range(len(kwargs_lens)): kappa = self.LensModel.kappa(x_grid, y_grid, kwargs_lens, k=i) kappa_mean = np.sum(kappa * mask) / np.sum(mask) kappa_list.append(kappa_mean) return kappa_list
computes the mean convergence of all the different lens model components within a spherical aperture :param kwargs_lens: lens model keyword argument list :param center_x: center of the aperture :param center_y: center of the aperture :param theta_E: radius of aperture :return: list of average convergences for all the model components
245
def isometric_load(script, AbsName="TEMP3D.abs"): filter_xml = .join([ , , % AbsName, , , , , ]) util.write_filter(script, filter_xml) return None
Isometric parameterization: Load Abstract Domain
246
def patches(self, dwn, install, comp_sum, uncomp_sum): dwnp, installp, comp_sump, uncomp_sump = ([] for i in range(4)) for d, i, c, u in zip(dwn, install, comp_sum, uncomp_sum): if "_slack" + slack_ver() in i: dwnp.append(d) dwn.remove(d) installp.append(i) install.remove(i) comp_sump.append(c) comp_sum.remove(c) uncomp_sump.append(u) uncomp_sum.remove(u) if "--patches" in self.flag: return dwnp, installp, comp_sump, uncomp_sump return dwn, install, comp_sum, uncomp_sum
Seperates packages from patches/ directory
247
def inject_url_defaults(self, endpoint, values): funcs = self.url_default_functions.get(None, ()) if in endpoint: bp = endpoint.rsplit(, 1)[0] funcs = chain(funcs, self.url_default_functions.get(bp, ())) for func in funcs: func(endpoint, values)
Injects the URL defaults for the given endpoint directly into the values dictionary passed. This is used internally and automatically called on URL building. .. versionadded:: 0.7
248
async def forget(request, response): identity_policy = request.config_dict.get(IDENTITY_KEY) if identity_policy is None: text = ("Security subsystem is not initialized, " "call aiohttp_security.setup(...) first") raise web.HTTPInternalServerError(reason=text, text=text) await identity_policy.forget(request, response)
Forget previously remembered identity. Usually it clears cookie or server-side storage to forget user session.
249
def _parse_array(stream): logger.debug("parsing array") arr = [] while True: c = stream.read(1) if c in _GDB_MI_VALUE_START_CHARS: stream.seek(-1) val = _parse_val(stream) arr.append(val) elif c in _WHITESPACE: pass elif c == ",": pass elif c == "]": break logger.debug("parsed array:") logger.debug("%s", fmt_green(arr)) return arr
Parse an array, stream should be passed the initial [ returns: Parsed array
250
def _classify_move_register(self, regs_init, regs_fini, mem_fini, written_regs, read_regs): matches = [] regs_init_inv = self._invert_dictionary(regs_init) for dst_reg, dst_val in regs_fini.items(): if dst_reg not in written_regs: continue for src_reg in regs_init_inv.get(dst_val, []): if src_reg not in read_regs: continue if self._arch_regs_size[src_reg] != self._arch_regs_size[dst_reg]: continue if src_reg == dst_reg: continue if regs_init[dst_reg] == regs_init[src_reg]: continue src_reg_ir = ReilRegisterOperand(src_reg, self._arch_regs_size[src_reg]) dst_reg_ir = ReilRegisterOperand(dst_reg, self._arch_regs_size[dst_reg]) matches.append({ "src": [src_reg_ir], "dst": [dst_reg_ir] }) return matches
Classify move-register gadgets.
251
def parse(cls, args): try: (options, args) = cls.optparser.parse_args(args) if options.db_tap_id is None: raise ParseError("db_tap_id is required", cls.optparser.format_help()) if options.query is None and options.script_location is None: raise ParseError("query or script location is required", cls.optparser.format_help()) if options.script_location is not None: if options.query is not None: raise ParseError( "Both query and script_location cannot be specified", cls.optparser.format_help()) if ((options.script_location.find("s3://") != 0) and (options.script_location.find("s3n://") != 0)): try: q = open(options.script_location).read() except IOError as e: raise ParseError("Unable to open script location: %s" % str(e), cls.optparser.format_help()) options.script_location = None options.query = q except OptionParsingError as e: raise ParseError(e.msg, cls.optparser.format_help()) except OptionParsingExit as e: return None if options.macros is not None: options.macros = json.loads(options.macros) v = vars(options) v["command_type"] = "DbTapQueryCommand" return v
Parse command line arguments to construct a dictionary of command parameters that can be used to create a command Args: `args`: sequence of arguments Returns: Dictionary that can be used in create method Raises: ParseError: when the arguments are not correct
252
def dump_artifact(obj, path, filename=None): p_sha1 = None if not os.path.exists(path): os.makedirs(path, mode=0o700) else: p_sha1 = hashlib.sha1() p_sha1.update(obj.encode(encoding=)) if filename is None: fd, fn = tempfile.mkstemp(dir=path) else: fn = os.path.join(path, filename) if os.path.exists(fn): c_sha1 = hashlib.sha1() with open(fn) as f: contents = f.read() c_sha1.update(contents.encode(encoding=)) if not os.path.exists(fn) or p_sha1.hexdigest() != c_sha1.hexdigest(): lock_fp = os.path.join(path, ) lock_fd = os.open(lock_fp, os.O_RDWR | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR) fcntl.lockf(lock_fd, fcntl.LOCK_EX) try: with open(fn, ) as f: os.chmod(fn, stat.S_IRUSR) f.write(str(obj)) finally: fcntl.lockf(lock_fd, fcntl.LOCK_UN) os.close(lock_fd) os.remove(lock_fp) return fn
Write the artifact to disk at the specified path Args: obj (string): The string object to be dumped to disk in the specified path. The artifact filename will be automatically created path (string): The full path to the artifacts data directory. filename (string, optional): The name of file to write the artifact to. If the filename is not provided, then one will be generated. Returns: string: The full path filename for the artifact that was generated
253
def parse_args(self, argv=None): arg_input = shlex.split(argv) if argv is not None else None self.get_or_create_session() return self.argparser.parse_args(arg_input)
Return an argparse.Namespace of the argv string or sys.argv if argv is None.
254
def _get_base_url(request): if request.is_secure(): base_url = else: base_url = base_url %= request.META[] return base_url
Construct a base URL, given a request object. This comprises the protocol prefix (http:// or https://) and the host, which can include the port number. For example: http://www.openquake.org or https://www.openquake.org:8000.
255
def split_certificate(certificate_path, destination_folder, password=None): try: p = subprocess.Popen( ["openssl", "version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) sout, serr = p.communicate() openssl_executable_version = sout.decode().lower() if not ( openssl_executable_version.startswith("openssl") or openssl_executable_version.startswith("libressl") ): raise BankIDError( "OpenSSL executable could not be found. " "Splitting cannot be performed." ) openssl_executable = "openssl" except Exception: p = subprocess.Popen( ["C:\\Program Files\\Git\\mingw64\\bin\\openssl.exe", "version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) sout, serr = p.communicate() if not sout.decode().lower().startswith("openssl"): raise BankIDError( "OpenSSL executable could not be found. " "Splitting cannot be performed." ) openssl_executable = "C:\\Program Files\\Git\\mingw64\\bin\\openssl.exe" if not os.path.exists(os.path.abspath(os.path.expanduser(destination_folder))): os.makedirs(os.path.abspath(os.path.expanduser(destination_folder))) out_cert_path = os.path.join( os.path.abspath(os.path.expanduser(destination_folder)), "certificate.pem" ) out_key_path = os.path.join( os.path.abspath(os.path.expanduser(destination_folder)), "key.pem" ) pipeline_1 = [ openssl_executable, "pkcs12", "-in", "{0}".format(certificate_path), "-passin" if password is not None else "", "pass:{0}".format(password) if password is not None else "", "-out", "{0}".format(out_cert_path), "-clcerts", "-nokeys", ] p = subprocess.Popen( list(filter(None, pipeline_1)), stdout=subprocess.PIPE, stderr=subprocess.PIPE ) p.communicate() pipeline_2 = [ openssl_executable, "pkcs12", "-in", "{0}".format(certificate_path), "-passin" if password is not None else "", "pass:{0}".format(password) if password is not None else "", "-out", "{0}".format(out_key_path), "-nocerts", "-nodes", ] p = subprocess.Popen( list(filter(None, pipeline_2)), stdout=subprocess.PIPE, stderr=subprocess.PIPE ) p.communicate() return out_cert_path, out_key_path
Splits a PKCS12 certificate into Base64-encoded DER certificate and key. This method splits a potentially password-protected `PKCS12 <https://en.wikipedia.org/wiki/PKCS_12>`_ certificate (format ``.p12`` or ``.pfx``) into one certificate and one key part, both in `pem <https://en.wikipedia.org/wiki/X.509#Certificate_filename_extensions>`_ format. :returns: Tuple of certificate and key string data. :rtype: tuple
256
def map_sprinkler(self, sx, sy, watered_crop=, watered_field=, dry_field=, dry_crop=): maplist = [list(s) for s in self.maplist] for y, row in enumerate(maplist): for x, cell in enumerate(row): if sprinkler_reaches_cell(x, y, sx, sy, self.r): if cell == : cell = watered_crop else: cell = watered_field else: cell = dry_crop if cell == else dry_field maplist[y][x] = cell maplist[sy][sx] = return .join([.join(row) for row in maplist])
Return a version of the ASCII map showing reached crop cells.
257
def search(self, cond): if cond in self._query_cache: return self._query_cache.get(cond, [])[:] docs = [doc for doc in self.all() if cond(doc)] self._query_cache[cond] = docs return docs[:]
Search for all documents matching a 'where' cond. :param cond: the condition to check against :type cond: Query :returns: list of matching documents :rtype: list[Element]
258
def diff_archives(archive1, archive2, verbosity=0, interactive=True): util.check_existing_filename(archive1) util.check_existing_filename(archive2) if verbosity >= 0: util.log_info("Comparing %s with %s ..." % (archive1, archive2)) res = _diff_archives(archive1, archive2, verbosity=verbosity, interactive=interactive) if res == 0 and verbosity >= 0: util.log_info("... no differences found.")
Print differences between two archives.
259
def get_question_mdata(): return { : { : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : False, : False, : False, : False, : [], : , : [], }, }
Return default mdata map for Question
260
def _remove_call(self, real_time, call): try: (delayed_call, calls) = self._buckets[real_time] except KeyError: return calls.remove(call) if not calls: del self._buckets[real_time] delayed_call.cancel()
Internal helper. Removes a (possibly still pending) call from a bucket. It is *not* an error of the bucket is gone (e.g. the call has already happened).
261
def install_python_package(self, arch, name=None, env=None, is_dir=True): if name is None: name = self.name if env is None: env = self.get_recipe_env(arch) info(.format(self.name)) with current_directory(self.get_build_dir(arch.arch)): hostpython = sh.Command(self.hostpython_location) if self.ctx.python_recipe.name != : hpenv = env.copy() shprint(hostpython, , , , .format(self.ctx.get_python_install_dir()), , _env=hpenv, *self.setup_extra_args) elif self.call_hostpython_via_targetpython: shprint(hostpython, , , , _env=env, *self.setup_extra_args) else: hppath = join(dirname(self.hostpython_location), , ) hpenv = env.copy() if in hpenv: hpenv[] = .join([hppath] + hpenv[].split()) else: hpenv[] = hppath shprint(hostpython, , , , .format(self.ctx.get_python_install_dir()), , _env=hpenv, *self.setup_extra_args) if self.install_in_hostpython: self.install_hostpython_package(arch)
Automate the installation of a Python package (or a cython package where the cython components are pre-built).
262
def walk_links(directory, prefix=, linkbase=None): links = {} try: for child in os.listdir(directory): fullname = os.path.join(directory, child) if os.path.islink(fullname): link_path = os.path.normpath(os.path.join(directory, os.readlink(fullname))) if linkbase: link_path = os.path.relpath(link_path, linkbase) links[os.path.join(prefix, child)] = link_path elif os.path.isdir(fullname): links.update(walk_links(fullname, prefix=os.path.join(prefix, child), linkbase=linkbase)) except OSError as err: if err.errno != 2: raise return links
Return all links contained in directory (or any sub directory).
263
def _add_cytomine_cli_args(argparse): argparse.add_argument(*_cytomine_parameter_name_synonyms("host"), dest="host", help="The Cytomine host (without protocol).", required=True) argparse.add_argument(*_cytomine_parameter_name_synonyms("public_key"), dest="public_key", help="The Cytomine public key.", required=True) argparse.add_argument(*_cytomine_parameter_name_synonyms("private_key"), dest="private_key", help="The Cytomine private key.", required=True) argparse.add_argument("--verbose", "--cytomine_verbose", dest="verbose", type=int, default=logging.INFO, help="The verbosity level of the client (as an integer value).") argparse.add_argument("-l", "--log_level", "--cytomine_log_level", dest="log_level", choices=[, , , , ], help="The logging level of the client (as a string value)") return argparse
Add cytomine CLI args to the ArgumentParser object: cytomine_host, cytomine_public_key, cytomine_private_key and cytomine_verbose. Parameters ---------- argparse: ArgumentParser The argument parser Return ------ argparse: ArgumentParser The argument parser (same object as parameter)
264
def clean_conf_folder(self, locale): dirname = self.configuration.get_messages_dir(locale) dirname.removedirs_p()
Remove the configuration directory for `locale`
265
def create_user(username): "Create a new user." password = prompt_pass("Enter password") user = User(username=username, password=password) db.session.add(user) db.session.commit()
Create a new user.
266
def admin_tools_render_menu_css(context, menu=None): if menu is None: menu = get_admin_menu(context) context.update({ : , : menu.Media.css, }) return context
Template tag that renders the menu css files,, it takes an optional ``Menu`` instance as unique argument, if not given, the menu will be retrieved with the ``get_admin_menu`` function.
267
def isConnected(self, signal, slot): sig_calls = self._callbacks.get(signal, []) for callback in sig_calls: if callback == slot: return True return False
Returns if the given signal is connected to the inputted slot. :param signal | <variant> slot | <callable> :return <bool> | is connected
268
def community_post_comment_down_create(self, post_id, id, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/help_center/votes api_path = "/api/v2/community/posts/{post_id}/comments/{id}/down.json" api_path = api_path.format(post_id=post_id, id=id) return self.call(api_path, method="POST", data=data, **kwargs)
https://developer.zendesk.com/rest_api/docs/help_center/votes#create-vote
269
def put_multi(entities): if not entities: return [] adapter, requests = None, [] for entity in entities: if adapter is None: adapter = entity._adapter entity.pre_put_hook() requests.append(PutRequest(entity.key, entity.unindexed_properties, entity)) keys = adapter.put_multi(requests) for key, entity in zip(keys, entities): entity.key = key entity.post_put_hook() return entities
Persist a set of entities to Datastore. Note: This uses the adapter that is tied to the first Entity in the list. If the entities have disparate adapters this function may behave in unexpected ways. Warning: You must pass a **list** and not a generator or some other kind of iterable to this function as it has to iterate over the list of entities multiple times. Parameters: entities(list[Model]): The list of entities to persist. Raises: RuntimeError: If the given set of models use a disparate set of adapters. Returns: list[Model]: The list of persisted entitites.
270
def get_bel_resource_hash(location, hash_function=None): resource = get_bel_resource(location) return hash_names( resource[], hash_function=hash_function )
Get a BEL resource file and returns its semantic hash. :param str location: URL of a resource :param hash_function: A hash function or list of hash functions, like :func:`hashlib.md5` or :code:`hashlib.sha512` :return: The hexadecimal digest of the hash of the values in the resource :rtype: str :raises: pybel.resources.exc.ResourceError
271
def installed(name, pkgs=None, pip_bin=None, requirements=None, bin_env=None, use_wheel=False, no_use_wheel=False, log=None, proxy=None, timeout=None, repo=None, editable=None, find_links=None, index_url=None, extra_index_url=None, no_index=False, mirrors=None, build=None, target=None, download=None, download_cache=None, source=None, upgrade=False, force_reinstall=False, ignore_installed=False, exists_action=None, no_deps=False, no_install=False, no_download=False, install_options=None, global_options=None, user=None, cwd=None, pre_releases=False, cert=None, allow_all_external=False, allow_external=None, allow_unverified=None, process_dependency_links=False, env_vars=None, use_vt=False, trusted_host=None, no_cache_dir=False, cache_dir=None, no_binary=None, extra_args=None, **kwargs): :all:t actually install them no_cache_dir: Disable the cache. cwd Current working directory to run pip from pre_releases Include pre-releases in the available versions cert Provide a path to an alternate CA bundle allow_all_external Allow the installation of all externally hosted files allow_external Allow the installation of externally hosted files (comma separated list) allow_unverified Allow the installation of insecure and unverifiable files (comma separated list) process_dependency_links Enable the processing of dependency links bin_env : None Absolute path to a virtual environment directory or absolute path to a pip executable. The example below assumes a virtual environment has been created at ``/foo/.virtualenvs/bar``. env_vars Add or modify environment variables. Useful for tweaking build steps, such as specifying INCLUDE or LIBRARY paths in Makefiles, build scripts or compiler calls. This must be in the form of a dictionary or a mapping. Example: .. code-block:: yaml django: pip.installed: - name: django_app - env_vars: CUSTOM_PATH: /opt/django_app VERBOSE: True use_vt Use VT terminal emulation (see output while installing) trusted_host Mark this host as trusted, even though it does not have valid or any HTTPS. Example: .. code-block:: yaml django: pip.installed: - name: django >= 1.6, <= 1.7 - bin_env: /foo/.virtualenvs/bar - require: - pkg: python-pip Or Example: .. code-block:: yaml django: pip.installed: - name: django >= 1.6, <= 1.7 - bin_env: /foo/.virtualenvs/bar/bin/pip - require: - pkg: python-pip .. admonition:: Attention The following arguments are deprecated, do not use. pip_bin : None Deprecated, use ``bin_env`` .. versionchanged:: 0.17.0 ``use_wheel`` option added. install_options Extra arguments to be supplied to the setup.py install command. If you are using an option with a directory path, be sure to use absolute path. Example: .. code-block:: yaml django: pip.installed: - name: django - install_options: - --prefix=/blah - require: - pkg: python-pip global_options Extra global options to be supplied to the setup.py call before the install command. .. versionadded:: 2014.1.3 .. admonition:: Attention As of Salt 0.17.0 the pip state **needs** an importable pip module. This usually means having the systems functionality duplication and its version of pip, a `No such option error` will be thrown. .. _`virtualenv`: http://www.virtualenv.org/en/latest/ nameresultchangescommentpkgs argument must be formatted as a lists a `collections.OrderedDict` prepro = lambda pkg: pkg if isinstance(pkg, six.string_types) else \ .join((six.iteritems(pkg)[0][0], six.iteritems(pkg)[0][1])) pkgs = [prepro(pkg) for pkg in pkgs] ret = {: .join(pkgs), : None, : , : {}} try: cur_version = __salt__[](bin_env) except (CommandNotFoundError, CommandExecutionError) as err: ret[] = None ret[] = {0}\.format(name, err) return ret if use_wheel: min_version = max_version = too_low = salt.utils.versions.compare(ver1=cur_version, oper=, ver2=min_version) too_high = salt.utils.versions.compare(ver1=cur_version, oper=, ver2=max_version) if too_low or too_high: ret[] = False ret[] = (use_wheel\ ).format(min_version, max_version, cur_version) return ret if no_use_wheel: min_version = max_version = too_low = salt.utils.versions.compare(ver1=cur_version, oper=, ver2=min_version) too_high = salt.utils.versions.compare(ver1=cur_version, oper=, ver2=max_version) if too_low or too_high: ret[] = False ret[] = (no_use_wheel\ ).format(min_version, max_version, cur_version) return ret if no_binary: min_version = too_low = salt.utils.versions.compare(ver1=cur_version, oper=, ver2=min_version) if too_low: ret[] = False ret[] = (no_binary\ ).format(min_version, cur_version) return ret pkgs_details = [] if pkgs and not (requirements or editable): comments = [] for pkg in iter(pkgs): out = _check_pkg_version_format(pkg) if out[] is False: ret[] = False comments.append(out[]) elif out[] is True: pkgs_details.append((out[], pkg, out[])) if ret[] is False: ret[] = .join(comments) return ret target_pkgs = [] already_installed_comments = [] if requirements or editable: comments = [] if __opts__[]: ret[] = None if requirements: comments.append({0}\ .format(requirements)) if editable: comments.append( .format(editable) ) ret[] = .join(comments) return ret else: try: pip_list = __salt__[](bin_env=bin_env, user=user, cwd=cwd) if not pipsearch: pkg_404_comms.append( {0}\ pip.freeze\.format(pkg) ) else: if prefix in pipsearch \ and prefix.lower() not in already_installed_packages: ver = pipsearch[prefix] ret[][.format(prefix, ver)] = else: ret[][.format(state_name)] = aicomms = .join(already_installed_comments) succ_comm = \ if not pkg_404_comms else .join(pkg_404_comms) ret[] = aicomms + ( if aicomms else ) + succ_comm return ret elif pip_install_call: ret[] = False if in pip_install_call: error = .format(pip_install_call[], pip_install_call[]) else: error = .format(pip_install_call[]) if requirements or editable: comments = [] if requirements: comments.append( .format(requirements)) if editable: comments.append( .format(editable)) comments.append(error) ret[] = .join(comments) else: pkgs_str = .join([state_name for _, state_name in target_pkgs]) aicomms = .join(already_installed_comments) error_comm = ( .format(pkgs_str, error)) ret[] = aicomms + ( if aicomms else ) + error_comm else: ret[] = False ret[] = return ret
Make sure the package is installed name The name of the python package to install. You can also specify version numbers here using the standard operators ``==, >=, <=``. If ``requirements`` is given, this parameter will be ignored. Example: .. code-block:: yaml django: pip.installed: - name: django >= 1.6, <= 1.7 - require: - pkg: python-pip This will install the latest Django version greater than 1.6 but less than 1.7. requirements Path to a pip requirements file. If the path begins with salt:// the file will be transferred from the master file server. user The user under which to run pip use_wheel : False Prefer wheel archives (requires pip>=1.4) no_use_wheel : False Force to not use wheel archives (requires pip>=1.4) no_binary Force to not use binary packages (requires pip >= 7.0.0) Accepts either :all: to disable all binary packages, :none: to empty the set, or a list of one or more packages Example: .. code-block:: yaml django: pip.installed: - no_binary: ':all:' flask: pip.installed: - no_binary: - itsdangerous - click log Log file where a complete (maximum verbosity) record will be kept proxy Specify a proxy in the form user:passwd@proxy.server:port. Note that the user:password@ is optional and required only if you are behind an authenticated proxy. If you provide user@proxy.server:port then you will be prompted for a password. timeout Set the socket timeout (default 15 seconds) editable install something editable (i.e. git+https://github.com/worldcompany/djangoembed.git#egg=djangoembed) find_links URL to look for packages at index_url Base URL of Python Package Index extra_index_url Extra URLs of package indexes to use in addition to ``index_url`` no_index Ignore package index mirrors Specific mirror URL(s) to query (automatically adds --use-mirrors) build Unpack packages into ``build`` dir target Install packages into ``target`` dir download Download packages into ``download`` instead of installing them download_cache Cache downloaded packages in ``download_cache`` dir source Check out ``editable`` packages into ``source`` dir upgrade Upgrade all packages to the newest available version force_reinstall When upgrading, reinstall all packages even if they are already up-to-date. ignore_installed Ignore the installed packages (reinstalling instead) exists_action Default action when a path already exists: (s)witch, (i)gnore, (w)ipe, (b)ackup no_deps Ignore package dependencies no_install Download and unpack all packages, but don't actually install them no_cache_dir: Disable the cache. cwd Current working directory to run pip from pre_releases Include pre-releases in the available versions cert Provide a path to an alternate CA bundle allow_all_external Allow the installation of all externally hosted files allow_external Allow the installation of externally hosted files (comma separated list) allow_unverified Allow the installation of insecure and unverifiable files (comma separated list) process_dependency_links Enable the processing of dependency links bin_env : None Absolute path to a virtual environment directory or absolute path to a pip executable. The example below assumes a virtual environment has been created at ``/foo/.virtualenvs/bar``. env_vars Add or modify environment variables. Useful for tweaking build steps, such as specifying INCLUDE or LIBRARY paths in Makefiles, build scripts or compiler calls. This must be in the form of a dictionary or a mapping. Example: .. code-block:: yaml django: pip.installed: - name: django_app - env_vars: CUSTOM_PATH: /opt/django_app VERBOSE: True use_vt Use VT terminal emulation (see output while installing) trusted_host Mark this host as trusted, even though it does not have valid or any HTTPS. Example: .. code-block:: yaml django: pip.installed: - name: django >= 1.6, <= 1.7 - bin_env: /foo/.virtualenvs/bar - require: - pkg: python-pip Or Example: .. code-block:: yaml django: pip.installed: - name: django >= 1.6, <= 1.7 - bin_env: /foo/.virtualenvs/bar/bin/pip - require: - pkg: python-pip .. admonition:: Attention The following arguments are deprecated, do not use. pip_bin : None Deprecated, use ``bin_env`` .. versionchanged:: 0.17.0 ``use_wheel`` option added. install_options Extra arguments to be supplied to the setup.py install command. If you are using an option with a directory path, be sure to use absolute path. Example: .. code-block:: yaml django: pip.installed: - name: django - install_options: - --prefix=/blah - require: - pkg: python-pip global_options Extra global options to be supplied to the setup.py call before the install command. .. versionadded:: 2014.1.3 .. admonition:: Attention As of Salt 0.17.0 the pip state **needs** an importable pip module. This usually means having the system's pip package installed or running Salt from an active `virtualenv`_. The reason for this requirement is because ``pip`` already does a pretty good job parsing its own requirements. It makes no sense for Salt to do ``pip`` requirements parsing and validation before passing them to the ``pip`` library. It's functionality duplication and it's more error prone. .. admonition:: Attention Please set ``reload_modules: True`` to have the salt minion import this module after installation. Example: .. code-block:: yaml pyopenssl: pip.installed: - name: pyOpenSSL - reload_modules: True - exists_action: i extra_args pip keyword and positional arguments not yet implemented in salt .. code-block:: yaml pandas: pip.installed: - name: pandas - extra_args: - --latest-pip-kwarg: param - --latest-pip-arg .. warning:: If unsupported options are passed here that are not supported in a minion's version of pip, a `No such option error` will be thrown. .. _`virtualenv`: http://www.virtualenv.org/en/latest/
272
def _create_bv_circuit(self, bit_map: Dict[str, str]) -> Program: unitary, _ = self._compute_unitary_oracle_matrix(bit_map) full_bv_circuit = Program() full_bv_circuit.defgate("BV-ORACLE", unitary) full_bv_circuit.inst(X(self.ancilla), H(self.ancilla)) full_bv_circuit.inst([H(i) for i in self.computational_qubits]) full_bv_circuit.inst( tuple(["BV-ORACLE"] + sorted(self.computational_qubits + [self.ancilla], reverse=True))) full_bv_circuit.inst([H(i) for i in self.computational_qubits]) return full_bv_circuit
Implementation of the Bernstein-Vazirani Algorithm. Given a list of input qubits and an ancilla bit, all initially in the :math:`\\vert 0\\rangle` state, create a program that can find :math:`\\vec{a}` with one query to the given oracle. :param Dict[String, String] bit_map: truth-table of a function for Bernstein-Vazirani with the keys being all possible bit vectors strings and the values being the function values :rtype: Program
273
def register(self, model_cls): assert issubclass(model_cls, peewee.Model) assert not hasattr(model_cls._meta, ) if model_cls in self: raise RuntimeError("Model already registered") self.append(model_cls) model_cls._meta.database = self.dbm return model_cls
Register model(s) with app
274
def DisplayGetter(accessor, *args, **kwargs): short_description = get_pretty_name(accessor) accessor = % accessor getter = Getter(accessor, *args, **kwargs) getter.short_description = short_description return getter
Returns a Getter that gets the display name for a model field with choices.
275
def start_state_manager_watches(self): Log.info("Start state manager watches") statemgr_config = StateMgrConfig() statemgr_config.set_state_locations(configloader.load_state_manager_locations( self.cluster, state_manager_config_file=self.state_manager_config_file, overrides={"heron.statemgr.connection.string": self.state_manager_connection})) try: self.state_managers = statemanagerfactory.get_all_state_managers(statemgr_config) for state_manager in self.state_managers: state_manager.start() except Exception as ex: Log.error("Found exception while initializing state managers: %s. Bailing out..." % ex) traceback.print_exc() sys.exit(1) def on_packing_plan_watch(state_manager, new_packing_plan): Log.debug("State watch triggered for PackingPlan update on shard %s. Existing: %s, New: %s" % (self.shard, str(self.packing_plan), str(new_packing_plan))) if self.packing_plan != new_packing_plan: Log.info("PackingPlan change detected on shard %s, relaunching effected processes." % self.shard) self.update_packing_plan(new_packing_plan) Log.info("Updating executor processes") self.launch() else: Log.info( "State watch triggered for PackingPlan update but plan not changed so not relaunching.") for state_manager in self.state_managers: onPackingPlanWatch = functools.partial(on_packing_plan_watch, state_manager) state_manager.get_packing_plan(self.topology_name, onPackingPlanWatch) Log.info("Registered state watch for packing plan changes with state manager %s." % str(state_manager))
Receive updates to the packing plan from the statemgrs and update processes as needed.
276
def merge_arena(self, mujoco_arena): self.arena = mujoco_arena self.bin_offset = mujoco_arena.bin_abs self.bin_size = mujoco_arena.table_full_size self.bin2_body = mujoco_arena.bin2_body self.merge(mujoco_arena)
Adds arena model to the MJCF model.
277
def ParseTable(table): precondition.AssertIterableType(table, dict) result = rdf_osquery.OsqueryTable() result.header = ParseHeader(table) for row in table: result.rows.append(ParseRow(result.header, row)) return result
Parses table of osquery output. Args: table: A table in a "parsed JSON" representation. Returns: A parsed `rdf_osquery.OsqueryTable` instance.
278
def bestfit(self): try: import statsmodels.api as sm except: raise Exception("statsmodels is required: " \ "please run " \ "pip install statsmodels" ) x=pd.Series(list(range(1,len(self)+1)),index=self.index) x=sm.add_constant(x) model=sm.OLS(self,x) fit=model.fit() vals=fit.params.values best_fit=fit.fittedvalues best_fit.formula= % (vals[0],vals[1]) return best_fit
Returns a series with the bestfit values. Example: Series.bestfit() Returns: series The returned series contains a parameter called 'formula' which includes the string representation of the bestfit line.
279
def rebuild_system(self, override=False, **kwargs): supercell_333 = create_supercell(self.system, **kwargs) discrete = discrete_molecules(self.system, rebuild=supercell_333) coordinates = np.array([], dtype=np.float64).reshape(0, 3) atom_ids = np.array([]) elements = np.array([]) for i in discrete: coordinates = np.concatenate( [coordinates, i[]], axis=0 ) atom_ids = np.concatenate([atom_ids, i[]], axis=0) elements = np.concatenate([elements, i[]], axis=0) rebuild_system = { : coordinates, : atom_ids, : elements } if override is True: self.system.update(rebuild_system) return None else: return self.load_system(rebuild_system)
Rebuild molecules in molecular system. Parameters ---------- override : :class:`bool`, optional (default=False) If False the rebuild molecular system is returned as a new :class:`MolecularSystem`, if True, the current :class:`MolecularSystem` is modified.
280
def render_exception_js(self, exception): from .http import JsonResponse response = {} response["error"] = exception.error response["error_description"] = exception.reason return JsonResponse(response, status=getattr(exception, , 400))
Return a response with the body containing a JSON-formatter version of the exception.
281
def set_temperature(self, temp): self.set_service_value( self.thermostat_setpoint, , , temp) self.set_cache_value(, temp)
Set current goal temperature / setpoint
282
def _update_records(self, records, data): data = {k: v for k, v in data.items() if v} records = [dict(record, **data) for record in records] return self._apicall( , domainname=self.domain, dnsrecordset={: records}, ).get(, [])
Insert or update a list of DNS records, specified in the netcup API convention. The fields ``hostname``, ``type``, and ``destination`` are mandatory and must be provided either in the record dict or through ``data``!
283
def get_input_shape(sym, proto_obj): arg_params = proto_obj.arg_dict aux_params = proto_obj.aux_dict model_input_shape = [data[1] for data in proto_obj.model_metadata.get()] data_names = [data[0] for data in proto_obj.model_metadata.get()] inputs = [] for in_shape in model_input_shape: inputs.append(nd.ones(shape=in_shape)) data_shapes = [] for idx, input_name in enumerate(data_names): data_shapes.append((input_name, inputs[idx].shape)) ctx = context.cpu() mod = module.Module(symbol=sym, data_names=data_names, context=ctx, label_names=None) mod.bind(for_training=False, data_shapes=data_shapes, label_shapes=None) mod.set_params(arg_params=arg_params, aux_params=aux_params) data_forward = [] for idx, input_name in enumerate(data_names): val = inputs[idx] data_forward.append(val) mod.forward(io.DataBatch(data_forward)) result = mod.get_outputs()[0].asnumpy() return result.shape
Helper function to obtain the shape of an array
284
def get_program(self, program_resource_name: str) -> Dict: return self.service.projects().programs().get( name=program_resource_name).execute()
Returns the previously created quantum program. Params: program_resource_name: A string of the form `projects/project_id/programs/program_id`. Returns: A dictionary containing the metadata and the program.
285
def _execute_callback_async(self, callback, data): if asyncio.iscoroutine(callback): asyncio.ensure_future( callback(rtm_client=self, web_client=self._web_client, data=data) ) else: asyncio.ensure_future( asyncio.coroutine(callback)( rtm_client=self, web_client=self._web_client, data=data ) )
Execute the callback asynchronously. If the callback is not a coroutine, convert it. Note: The WebClient passed into the callback is running in "async" mode. This means all responses will be futures.
286
def remove_image_info_cb(self, viewer, channel, image_info): return self.remove_image_cb(viewer, channel.name, image_info.name, image_info.path)
Almost the same as remove_image_cb().
287
def rect(self, x, y, width, height, roundness=0.0, draw=True, **kwargs): path = self.BezierPath(**kwargs) path.rect(x, y, width, height, roundness, self.rectmode) if draw: path.draw() return path
Draw a rectangle from x, y of width, height. :param startx: top left x-coordinate :param starty: top left y-coordinate :param width: height Size of rectangle. :roundness: Corner roundness defaults to 0.0 (a right-angle). :draw: If True draws immediately. :fill: Optionally pass a fill color. :return: path representing the rectangle.
288
def infer(self, **options): descriptor = deepcopy(self.__current_descriptor) if self.__source_inspection.get(): return descriptor if not descriptor.get(): descriptor[] = self.__source_inspection[] if not self.inline and not self.__storage: if not descriptor.get(): descriptor[] = self.__source_inspection[] if not descriptor.get(): descriptor[] = % descriptor[] if not descriptor.get(): contents = b with self.raw_iter(stream=True) as stream: for chunk in stream: contents += chunk if len(contents) > 1000: break encoding = cchardet.detect(contents)[] if encoding is not None: encoding = encoding.lower() descriptor[] = if encoding == else encoding if not descriptor.get(): if self.tabular: descriptor[] = self.__get_table().infer(**options) if descriptor.get() == config.DEFAULT_RESOURCE_PROFILE: if self.tabular: descriptor[] = self.__current_descriptor = descriptor self.__build() return descriptor
https://github.com/frictionlessdata/datapackage-py#resource
289
def get(self): if not HAS_SQL: return requests.session() try: conn, c = self.connect() except: log.traceback(logging.DEBUG) return requests.session() session = None try: c.execute() c.execute(.format(self.table_name)) row = c.fetchone() if row is not None: session = pickle.loads(row[0]) except: log.traceback(logging.DEBUG) try: conn.close() except: log.traceback(logging.DEBUG) return session if session is not None else requests.session()
Returns a requests.Session object. Gets Session from sqlite3 cache or creates a new Session.
290
def update(self, instance, condition): item = self.dbi.get(condition) if item is None: return None item.update(instance.as_dict()) self.dbi.update(item, condition) return item.eid
Update the instance to the database :param instance: an instance of modeled data object :param condition: condition evaluated to determine record(s) to update :returns: record id updated or None :rtype: int
291
def get_instance(self, payload): return TranscriptionInstance( self._version, payload, account_sid=self._solution[], recording_sid=self._solution[], )
Build an instance of TranscriptionInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.recording.transcription.TranscriptionInstance :rtype: twilio.rest.api.v2010.account.recording.transcription.TranscriptionInstance
292
def content(self): if self.cache_content and self.cached_content: return self.cached_content try: with self._open_dockerfile() as dockerfile: content = b2u(dockerfile.read()) if self.cache_content: self.cached_content = content return content except (IOError, OSError) as ex: logger.error("Couldn't retrieve content of dockerfile: %r", ex) raise
:return: string (unicode) with Dockerfile content
293
def decode_offset_commit_response(cls, data): ((correlation_id,), cur) = relative_unpack(, data, 0) ((num_topics,), cur) = relative_unpack(, data, cur) for _ in xrange(num_topics): (topic, cur) = read_short_string(data, cur) ((num_partitions,), cur) = relative_unpack(, data, cur) for _ in xrange(num_partitions): ((partition, error), cur) = relative_unpack(, data, cur) yield OffsetCommitResponse(topic, partition, error)
Decode bytes to an OffsetCommitResponse Arguments: data: bytes to decode
294
def create_from_fitsfile(cls, fitsfile): from fermipy.skymap import Map index_map = Map.create_from_fits(fitsfile) mult_map = Map.create_from_fits(fitsfile, hdu=1) ff = fits.open(fitsfile) hpx = HPX.create_from_hdu(ff[0]) mapping_data = dict(ipixs=index_map.counts, mult_val=mult_map.counts, npix=mult_map.counts.shape) return cls(hpx, index_map.wcs, mapping_data)
Read a fits file and use it to make a mapping
295
def add_raw_code(self, string_or_list): if _is_string(string_or_list): self._GMSH_CODE.append(string_or_list) else: assert isinstance(string_or_list, list) for string in string_or_list: self._GMSH_CODE.append(string) return
Add raw Gmsh code.
296
def dump_dict_to_file(dictionary, filepath): create_dirs( os.path.dirname(filepath) ) with open(filepath, ) as outfile: json.dump(dictionary, outfile) outfile.write()
Dump @dictionary as a line to @filepath.
297
def get_globals(self): if self.shell: globals_ = dict(_initial_globals) else: globals_ = dict(self.current_frame.f_globals) globals_[] = self.db.last_obj if cut is not None: globals_.setdefault(, cut) globals_[] = self.db globals_.update(self.current_locals) for var, val in self.db.extra_vars.items(): globals_[var] = val self.db.extra_items = {} return globals_
Get enriched globals
298
def truncate(self, before=None, after=None): if after and before and after < before: raise ValueError() i, j = self.levels[0].slice_locs(before, after) left, right = self.slice_locs(before, after) new_levels = list(self.levels) new_levels[0] = new_levels[0][i:j] new_codes = [level_codes[left:right] for level_codes in self.codes] new_codes[0] = new_codes[0] - i return MultiIndex(levels=new_levels, codes=new_codes, verify_integrity=False)
Slice index between two labels / tuples, return new MultiIndex Parameters ---------- before : label or tuple, can be partial. Default None None defaults to start after : label or tuple, can be partial. Default None None defaults to end Returns ------- truncated : MultiIndex
299
def launch_frozen(in_name, out_name, script_path, frozen_tar_path=None, temp_path=, cache=True, check_script=False, **kw): if (( in kw and isinstance(kw[], (str, unicode))) or ( in kw and isinstance(kw[], (str, unicode))) or ( in kw and isinstance(kw[], (str, unicode)))): raise TypeError() if in kw: kw[] = _listeq_to_dict(kw[]) if in kw: kw[] = _listeq_to_dict(kw[]) cmds = [] if not frozen_tar_path: freeze_out = hadoopy.freeze_script(script_path, temp_path=temp_path, cache=cache) frozen_tar_path = freeze_out[] cmds += freeze_out[] jobconfs = kw.get(, {}) jobconfs[] = % frozen_tar_path jobconfs[] = % frozen_tar_path kw[] = False kw[] = False kw[] = jobconfs out = launch(in_name, out_name, script_path, script_dir=, remove_ext=True, check_script=check_script, make_executable=False, **kw) out[] = cmds out[] = frozen_tar_path return out
Freezes a script and then launches it. This function will freeze your python program, and place it on HDFS in 'temp_path'. It will not remove it afterwards as they are typically small, you can easily reuse/debug them, and to avoid any risks involved with removing the file. :param in_name: Input path (string or list) :param out_name: Output path :param script_path: Path to the script (e.g., script.py) :param frozen_tar_path: If not None, use this path to a previously frozen archive. You can get such a path from the return value of this function, it is particularly helpful in iterative programs. :param cache: If True (default) then use previously frozen scripts. Cache is stored in memory (not persistent). :param temp_path: HDFS path that we can use to store temporary files (default to _hadoopy_temp) :param partitioner: If True, the partitioner is the value. :param wait: If True, wait till the process is completed (default True) this is useful if you want to run multiple jobs concurrently by using the 'process' entry in the output. :param files: Extra files (other than the script) (iterator). NOTE: Hadoop copies the files into working directory :param jobconfs: Extra jobconf parameters (iterator) :param cmdenvs: Extra cmdenv parameters (iterator) :param hstreaming: The full hadoop streaming path to call. :param name: Set the job name to this (default None, job name is the script name) :param use_typedbytes: If True (default), use typedbytes IO. :param use_seqoutput: True (default), output sequence file. If False, output is text. :param use_autoinput: If True (default), sets the input format to auto. :param config: If a string, set the hadoop config path :param pipe: If true (default) then call user code through a pipe to isolate it and stop bugs when printing to stdout. See project docs. :param python_cmd: The python command to use. The default is "python". Can be used to override the system default python, e.g. python_cmd = "python2.6" :param num_mappers: The number of mappers to use, i.e. the argument given to 'numMapTasks'. If None, then do not specify this argument to hadoop streaming. :param num_reducers: The number of reducers to use, i.e. the argument given to 'numReduceTasks'. If None, then do not specify this argument to hadoop streaming. :param check_script: If True, then copy script and .py(c) files to a temporary directory and verify that it can be executed. This catches the majority of errors related to not included locally imported files. The default is False when using launch_frozen as the freeze process packages local files. :rtype: Dictionary with some of the following entries (depending on options) :returns: freeze_cmds: Freeze command(s) ran :returns: frozen_tar_path: HDFS path to frozen file :returns: hadoop_cmds: Hadoopy command(s) ran :returns: process: subprocess.Popen object :returns: output: Iterator of (key, value) pairs :raises: subprocess.CalledProcessError: Hadoop error. :raises: OSError: Hadoop streaming not found. :raises: TypeError: Input types are not correct. :raises: ValueError: Script not found