code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _get_pid_file_variable(self, db): pid_file = None try: with closing(db.cursor()) as cursor: cursor.execute("SHOW VARIABLES LIKE 'pid_file'") pid_file = cursor.fetchone()[1] except Exception: self.warning("Error while fetching pid_file variable of MySQL.") return pid_file
Get the `pid_file` variable
def mirror(self: BaseBoardT) -> BaseBoardT: board = self.transform(flip_vertical) board.occupied_co[WHITE], board.occupied_co[BLACK] = board.occupied_co[BLACK], board.occupied_co[WHITE] return board
Returns a mirrored copy of the board. The board is mirrored vertically and piece colors are swapped, so that the position is equivalent modulo color.
def count_characters(root, out): if os.path.isfile(root): with open(root, 'rb') as in_f: for line in in_f: for char in line: if char not in out: out[char] = 0 out[char] = out[char] + 1 elif os.path.isdir(root): for filename in os.listdir(root): count_characters(os.path.join(root, filename), out)
Count the occurrances of the different characters in the files
def reload(self, encoding): assert os.path.exists(self.path) self.open(self.path, encoding=encoding, use_cached_encoding=False)
Reload the file with another encoding. :param encoding: the new encoding to use to reload the file.
def wgan(cls, data:DataBunch, generator:nn.Module, critic:nn.Module, switcher:Callback=None, clip:float=0.01, **learn_kwargs): "Create a WGAN from `data`, `generator` and `critic`." return cls(data, generator, critic, NoopLoss(), WassersteinLoss(), switcher=switcher, clip=clip, **learn_kwargs)
Create a WGAN from `data`, `generator` and `critic`.
def currency_format(cents): try: cents = int(cents) except ValueError: return cents negative = (cents < 0) if negative: cents = -1 * cents if cents < 100: dollars = 0 else: dollars = cents / 100 cents = cents % 100 centstr = str(cents) if len(centstr) < 2: centstr = '0' + centstr if negative: return "- $%s.%s" % (intcomma(dollars), centstr) return "$%s.%s" % (intcomma(dollars), centstr)
Format currency with symbol and decimal points. >> currency_format(-600) - $6.00 TODO: Add localization support.
def shell(args): " A helper command to be used for shell integration " print print " print " print "export MAKESITE_HOME=%s" % args.path print "source %s" % op.join(settings.BASEDIR, 'shell.sh') print
A helper command to be used for shell integration
def add_tags(self, tags): if not isinstance(tags, list): tags = [tags] self._bugsy.request('bug/comment/%s/tags' % self._comment['id'], method='PUT', json={"add": tags})
Add tags to the comments
def _twoByteStringToNum(bytestring, numberOfDecimals=0, signed=False): _checkString(bytestring, minlength=2, maxlength=2, description='bytestring') _checkInt(numberOfDecimals, minvalue=0, description='number of decimals') _checkBool(signed, description='signed parameter') formatcode = '>' if signed: formatcode += 'h' else: formatcode += 'H' fullregister = _unpack(formatcode, bytestring) if numberOfDecimals == 0: return fullregister divisor = 10 ** numberOfDecimals return fullregister / float(divisor)
Convert a two-byte string to a numerical value, possibly scaling it. Args: * bytestring (str): A string of length 2. * numberOfDecimals (int): The number of decimals. Defaults to 0. * signed (bol): Whether large positive values should be interpreted as negative values. Returns: The numerical value (int or float) calculated from the ``bytestring``. Raises: TypeError, ValueError Use the parameter ``signed=True`` if converting a bytestring that can hold negative values. Then upper range data will be automatically converted into negative return values (two's complement). Use ``numberOfDecimals=1`` to divide the received data by 10 before returning the value. Similarly ``numberOfDecimals=2`` will divide the received data by 100 before returning the value. The byte order is big-endian, meaning that the most significant byte is sent first. For example: A string ``\\x03\\x02`` (which has the length 2) corresponds to 0302 (hex) = 770 (dec). If ``numberOfDecimals = 1``, then this is converted to 77.0 (float).
def get_privileges(self): url = self.url('GET_USER_PRIVILEGES') return self.dispatch('GET', url, auth=self.auth)
Get privledges for this user.
def get_header(self, name, default=None): return self._handler.headers.get(name, default)
Retrieves the value of a header
def random_choice(self, actions=None, random_state=None): random_state = check_random_state(random_state) if actions is not None: n = len(actions) else: n = self.num_actions if n == 1: idx = 0 else: idx = random_state.randint(n) if actions is not None: return actions[idx] else: return idx
Return a pure action chosen randomly from `actions`. Parameters ---------- actions : array_like(int), optional(default=None) An array of integers representing pure actions. random_state : int or np.random.RandomState, optional Random seed (integer) or np.random.RandomState instance to set the initial state of the random number generator for reproducibility. If None, a randomly initialized RandomState is used. Returns ------- scalar(int) If `actions` is given, returns an integer representing a pure action chosen randomly from `actions`; if not, an action is chosen randomly from the player's all actions.
def _subprocess_method(self, command): p = subprocess.Popen([self._ipmitool_path] + self.args + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.output, self.error = p.communicate() self.status = p.returncode
Use the subprocess module to execute ipmitool commands and and set status
def _call(self, x, out=None): if out is None: return self.prod_op(x)[0] else: wrapped_out = self.prod_op.range.element([out], cast=False) pspace_result = self.prod_op(x, out=wrapped_out) return pspace_result[0]
Apply operators to ``x`` and sum.
def ValidateStopTimesForTrip(self, problems, trip, stop_times): prev_departure_secs = -1 consecutive_stop_times_with_potentially_same_time = 0 consecutive_stop_times_with_fully_specified_same_time = 0 def CheckSameTimeCount(): if (prev_departure_secs != -1 and consecutive_stop_times_with_fully_specified_same_time > 5): problems.TooManyConsecutiveStopTimesWithSameTime(trip.trip_id, consecutive_stop_times_with_fully_specified_same_time, prev_departure_secs) for index, st in enumerate(stop_times): if st.arrival_secs is None or st.departure_secs is None: consecutive_stop_times_with_potentially_same_time += 1 continue if (prev_departure_secs == st.arrival_secs and st.arrival_secs == st.departure_secs): consecutive_stop_times_with_potentially_same_time += 1 consecutive_stop_times_with_fully_specified_same_time = ( consecutive_stop_times_with_potentially_same_time) else: CheckSameTimeCount() consecutive_stop_times_with_potentially_same_time = 1 consecutive_stop_times_with_fully_specified_same_time = 1 prev_departure_secs = st.departure_secs CheckSameTimeCount()
Checks for the stop times of a trip. Ensure that a trip does not have too many consecutive stop times with the same departure/arrival time.
def get_raw_data(self, mac, response_format='json'): data = { self._FORMAT_F: response_format, self._SEARCH_F: mac } response = self.__decode_str(self.__call_api(self.__url, data), 'utf-8') if len(response) > 0: return response raise EmptyResponseException()
Get data from API as string. Keyword arguments: mac -- MAC address or OUI for searching response_format -- supported types you can see on the https://macaddress.io
def _load_file(self, filename): filename = os.path.abspath(os.path.expanduser(filename)) if not os.path.isfile(filename): raise Exception('File %s does not exist' % filename) ext = vtki.get_ext(filename) if ext == '.ply': reader = vtk.vtkPLYReader() elif ext == '.stl': reader = vtk.vtkSTLReader() elif ext == '.vtk': reader = vtk.vtkPolyDataReader() elif ext == '.vtp': reader = vtk.vtkXMLPolyDataReader() elif ext == '.obj': reader = vtk.vtkOBJReader() else: raise TypeError('Filetype must be either "ply", "stl", "vtk", "vtp", or "obj".') reader.SetFileName(filename) reader.Update() self.ShallowCopy(reader.GetOutput()) if not np.any(self.points): raise AssertionError('Empty or invalid file')
Load a surface mesh from a mesh file. Mesh file may be an ASCII or binary ply, stl, or vtk mesh file. Parameters ---------- filename : str Filename of mesh to be loaded. File type is inferred from the extension of the filename Notes ----- Binary files load much faster than ASCII.
def get_display_panel_by_id(self, identifier: str) -> DisplayPanel: display_panel = next( (display_panel for display_panel in self.__document_controller.workspace_controller.display_panels if display_panel.identifier.lower() == identifier.lower()), None) return DisplayPanel(display_panel) if display_panel else None
Return display panel with the identifier. .. versionadded:: 1.0 Status: Provisional Scriptable: Yes
def _initialize_client_from_environment(): global _client, project_id, write_key, read_key, master_key, base_url if _client is None: project_id = project_id or os.environ.get("KEEN_PROJECT_ID") write_key = write_key or os.environ.get("KEEN_WRITE_KEY") read_key = read_key or os.environ.get("KEEN_READ_KEY") master_key = master_key or os.environ.get("KEEN_MASTER_KEY") base_url = base_url or os.environ.get("KEEN_BASE_URL") if not project_id: raise InvalidEnvironmentError("Please set the KEEN_PROJECT_ID environment variable or set keen.project_id!") _client = KeenClient(project_id, write_key=write_key, read_key=read_key, master_key=master_key, base_url=base_url)
Initialize a KeenClient instance using environment variables.
def string_to_enum(value, enumeration, strict=True, default_value=None): if not isinstance(enumeration, Enum): raise ValueError('The specified enumeration is not an instance of Enum') if is_undefined(value): if strict: raise ValueError('The value cannot be null') if default_value is not None and not default_value in enumeration: raise ValueError('The default value must be an item of the specified enumeration') return default_value item = [ item for item in enumeration if str(item) == value] if len(item) == 0: raise ValueError('The specified string "%s" does not represent any item of the enumeration' % value) return item[0]
Return the item of an enumeration that corresponds to the specified string representation. @param value: string representation of an item of a Python enumeration. @param enumeration: a Python enumeration. @param strict: indicate whether the value must correspond to an item of the specified Python enumeration or if ``None`` value is accepted. @return: the item of the Python enumeration the specified string representation corresponds to. @raise ValueError: if the enumeration is not an instance of ``Enum``, or if the string representation doesn't correspond to any item of the given Python enumeration, or if the default value is not an item of the given Python enumeration.
def __call(self, uri, params=None, method="get"): try: resp = self.__get_response(uri, params, method, False) rjson = resp.json(**self.json_options) assert resp.ok except AssertionError: msg = "OCode-{}: {}".format(resp.status_code, rjson["message"]) raise BadRequest(msg) except Exception as e: msg = "Bad response: {}".format(e) log.error(msg, exc_info=True) raise BadRequest(msg) else: return rjson
Only returns the response, nor the status_code
def _getTrafficClassAndFlowLabel(self): if self.tf == 0x0: return (self.tc_ecn << 6) + self.tc_dscp, self.flowlabel elif self.tf == 0x1: return (self.tc_ecn << 6), self.flowlabel elif self.tf == 0x2: return (self.tc_ecn << 6) + self.tc_dscp, 0 else: return 0, 0
Page 6, draft feb 2011
def parse_qs(qs, keep_blank_values=0, strict_parsing=0, keep_attr_order=True): od = DefaultOrderedDict(list) if keep_attr_order else defaultdict(list) for name, value in parse_qsl(qs, keep_blank_values, strict_parsing): od[name].append(value) return od
Kind of like urlparse.parse_qs, except returns an ordered dict. Also avoids replicating that function's bad habit of overriding the built-in 'dict' type. Taken from below with modification: <https://bitbucket.org/btubbs/thumpy/raw/8cdece404f15/thumpy.py>
def call_or_cast(self, method, args={}, nowait=False, **kwargs): return (nowait and self.cast or self.call)(method, args, **kwargs)
Apply remote `method` asynchronously or synchronously depending on the value of `nowait`. :param method: The name of the remote method to perform. :param args: Dictionary of arguments for the method. :keyword nowait: If false the call will block until the result is available and return it (default), if true the call will be non-blocking and no result will be returned. :keyword retry: If set to true then message sending will be retried in the event of connection failures. Default is decided by the :attr:`retry` attributed. :keyword retry_policy: Override retry policies. See :attr:`retry_policy`. This must be a dictionary, and keys will be merged with the default retry policy. :keyword timeout: Timeout to wait for replies in seconds as a float (**only relevant in blocking mode**). :keyword limit: Limit number of replies to wait for (**only relevant in blocking mode**). :keyword callback: If provided, this callback will be called for every reply received (**only relevant in blocking mode**). :keyword \*\*props: Additional message properties. See :meth:`kombu.Producer.publish`.
def send_cons3rt_agent_logs(self): log = logging.getLogger(self.cls_logger + '.send_cons3rt_agent_logs') if self.cons3rt_agent_log_dir is None: log.warn('There is not CONS3RT agent log directory on this system') return log.debug('Searching for log files in directory: {d}'.format(d=self.cons3rt_agent_log_dir)) for item in os.listdir(self.cons3rt_agent_log_dir): item_path = os.path.join(self.cons3rt_agent_log_dir, item) if os.path.isfile(item_path): log.info('Sending email with cons3rt agent log file: {f}'.format(f=item_path)) try: self.send_text_file(text_file=item_path) except (TypeError, OSError, AssetMailerError): _, ex, trace = sys.exc_info() msg = '{n}: There was a problem sending CONS3RT agent log file: {f}\n{e}'.format( n=ex.__class__.__name__, f=item_path, e=str(ex)) raise AssetMailerError, msg, trace else: log.info('Successfully sent email with file: {f}'.format(f=item_path))
Send the cons3rt agent log file :return:
def get_field_mro(cls, field_name): res = set() if hasattr(cls, '__mro__'): for _class in inspect.getmro(cls): values_ = getattr(_class, field_name, None) if values_ is not None: res = res.union(set(make_list(values_))) return res
Goes up the mro and looks for the specified field.
def get_market_history(self, market): return self._api_query(path_dict={ API_V1_1: '/public/getmarkethistory', }, options={'market': market, 'marketname': market}, protection=PROTECTION_PUB)
Used to retrieve the latest trades that have occurred for a specific market. Endpoint: 1.1 /market/getmarkethistory 2.0 NO Equivalent Example :: {'success': True, 'message': '', 'result': [ {'Id': 5625015, 'TimeStamp': '2017-08-31T01:29:50.427', 'Quantity': 7.31008193, 'Price': 0.00177639, 'Total': 0.01298555, 'FillType': 'FILL', 'OrderType': 'BUY'}, ... ] } :param market: String literal for the market (ex: BTC-LTC) :type market: str :return: Market history in JSON :rtype : dict
def _get_current_names(current, dsn, pc): _table_name = "" _variable_name = "" try: _table_name = current['{}_tableName'.format(pc)] _variable_name = current['{}_variableName'.format(pc)] except Exception as e: print("Error: Unable to collapse time series: {}, {}".format(dsn, e)) logger_ts.error("get_current: {}, {}".format(dsn, e)) return _table_name, _variable_name
Get the table name and variable name from the given time series entry :param dict current: Time series entry :param str pc: paleoData or chronData :return str _table_name: :return str _variable_name:
def rpc_call(self, request, method=None, params=None, **kwargs): args = [] kwargs = dict() if isinstance(params, dict): kwargs.update(params) else: args = list(as_tuple(params)) method_key = "{0}.{1}".format(self.scheme_name, method) if method_key not in self.methods: raise AssertionError("Unknown method: {0}".format(method)) method = self.methods[method_key] if hasattr(method, 'request'): args.insert(0, request) return method(*args, **kwargs)
Call a RPC method. return object: a result
def sphlat(r, colat, lons): r = ctypes.c_double(r) colat = ctypes.c_double(colat) lons = ctypes.c_double(lons) radius = ctypes.c_double() lon = ctypes.c_double() lat = ctypes.c_double() libspice.sphcyl_c(r, colat, lons, ctypes.byref(radius), ctypes.byref(lon), ctypes.byref(lat)) return radius.value, lon.value, lat.value
Convert from spherical coordinates to latitudinal coordinates. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/sphlat_c.html :param r: Distance of the point from the origin. :type r: float :param colat: Angle of the point from positive z axis (radians). :type colat: float :param lons: Angle of the point from the XZ plane (radians). :type lons: float :return: Distance of a point from the origin, Angle of the point from the XZ plane in radians, Angle of the point from the XY plane in radians. :rtype: tuple
def _set_load_action(self, mem_addr, rec_count, retries, read_complete=False): if self._have_all_records(): mem_addr = None rec_count = 0 retries = 0 elif read_complete: retries = 0 if rec_count: mem_addr = self._next_address(mem_addr) else: mem_addr = self._next_address(mem_addr) rec_count = 1 retries = 0 elif rec_count and retries < ALDB_RECORD_RETRIES: retries = retries + 1 elif not rec_count and retries < ALDB_ALL_RECORD_RETRIES: retries = retries + 1 elif not rec_count and retries >= ALDB_ALL_RECORD_RETRIES: mem_addr = self._next_address(mem_addr) rec_count = 1 retries = 0 else: mem_addr = None rec_count = 0 retries = 0 self._load_action = LoadAction(mem_addr, rec_count, retries) if mem_addr is not None: _LOGGER.debug('Load action: addr: %04x rec_count: %d retries: %d', self._load_action.mem_addr, self._load_action.rec_count, self._load_action.retries)
Calculate the next record to read. If the last record was successful and one record was being read then look for the next record until we get to the high water mark. If the last read was successful and all records were being read then look for the first record. if the last read was unsuccessful and one record was being read then repeat the last read until max retries If the last read was unsuccessful and all records were being read then repeat the last read until max retries or look for the first record.
def get_ctype(rtype, cfunc, *args): val_p = backend.ffi.new(rtype) args = args + (val_p,) cfunc(*args) return val_p[0]
Call a C function that takes a pointer as its last argument and return the C object that it contains after the function has finished. :param rtype: C data type is filled by the function :param cfunc: C function to call :param args: Arguments to call function with :return: A pointer to the specified data type
async def set_config(self, config): app_facade = client.ApplicationFacade.from_connection(self.connection) log.debug( 'Setting config for %s: %s', self.name, config) return await app_facade.Set(self.name, config)
Set configuration options for this application. :param config: Dict of configuration to set
def _fit_and_score_ensemble(self, X, y, cv, **fit_params): fit_params_steps = self._split_fit_params(fit_params) folds = list(cv.split(X, y)) base_estimators, kernel_cache = self._get_base_estimators(X) out = Parallel( n_jobs=self.n_jobs, verbose=self.verbose )( delayed(_fit_and_score_fold)(clone(estimator), X if i not in kernel_cache else kernel_cache[i], y, self.scorer, train_index, test_index, fit_params_steps[name], i, fold) for i, (name, estimator) in enumerate(base_estimators) for fold, (train_index, test_index) in enumerate(folds)) if len(kernel_cache) > 0: out = self._restore_base_estimators(kernel_cache, out, X, folds) return self._create_base_ensemble(out, len(base_estimators), len(folds))
Create a cross-validated model by training a model for each fold with the same model parameters
def content(self, path=None, overwrite=True, encoding='utf-8'): if path: self.download(wait=True, path=path, overwrite=overwrite) with io.open(path, 'r', encoding=encoding) as fp: return fp.read() with tempfile.NamedTemporaryFile() as tmpfile: self.download(wait=True, path=tmpfile.name, overwrite=overwrite) with io.open(tmpfile.name, 'r', encoding=encoding) as fp: return fp.read()
Downloads file to the specified path or as temporary file and reads the file content in memory. Should not be used on very large files. :param path: Path for file download If omitted tmp file will be used. :param overwrite: Overwrite file if exists locally :param encoding: File encoding, by default it is UTF-8 :return: File content.
def _get_shade_hdrgos(**kws): if 'shade_hdrgos' in kws: return kws['shade_hdrgos'] if 'hdrgo_prt' in kws: return kws['hdrgo_prt'] if 'section_sortby' in kws and kws['section_sortby']: return False if 'top_n' in kws and isinstance(kws['top_n'], int): return False return True
If no hdrgo_prt specified, and these conditions are present -> hdrgo_prt=F.
def top_sections(self): top_line = self.text.split('\n')[0] sections = len(top_line.split('+')) - 2 return sections
The number of sections that touch the top side. Returns ------- sections : int The number of sections on the top
def countok(self): ok = np.ones(len(self.stars)).astype(bool) for name in self.constraints: c = self.constraints[name] if c.name not in self.selectfrac_skip: ok &= c.ok return ok
Boolean array showing which stars pass all count constraints. A "count constraint" is a constraint that affects the number of stars.
def do_commit_amends(self): commit_cumalative_count = 0 for days in MARKED_DAYS: amend_date = ( self.end_date - datetime.timedelta(days)).strftime("%Y-%m-%d %H:%M:%S") for commit_number_in_a_day in xrange(0, self.max_commits): commit_cumalative_count += 1 subprocess.check_call( ['git', 'commit', '--amend', "--date='<" + amend_date + " + 0530 >' ", '-C', 'HEAD~{commit_number}'.format(commit_number=commit_cumalative_count)], cwd=self.repository_name) subprocess.check_call( ['git', 'pull', '--no-edit'], cwd=self.repository_name) subprocess.check_call( ['git', 'push', 'origin', 'master'], cwd=self.repository_name)
Amends the Commit to form the heart
def disallow(nodes): def disallowed(cls): cls.unsupported_nodes = () for node in nodes: new_method = _node_not_implemented(node, cls) name = 'visit_{node}'.format(node=node) cls.unsupported_nodes += (name,) setattr(cls, name, new_method) return cls return disallowed
Decorator to disallow certain nodes from parsing. Raises a NotImplementedError instead. Returns ------- disallowed : callable
def get_sh_ids(self, identity, backend_name): identity_tuple = tuple(identity.items()) sh_ids = self.__get_sh_ids_cache(identity_tuple, backend_name) return sh_ids
Return the Sorting Hat id and uuid for an identity
def _convert_to_degress(self, value): d0 = value[0][0] d1 = value[0][1] d = float(d0) / float(d1) m0 = value[1][0] m1 = value[1][1] m = float(m0) / float(m1) s0 = value[2][0] s1 = value[2][1] s = float(s0) / float(s1) return d + (m / 60.0) + (s / 3600.0)
Helper function to convert the GPS coordinates stored in the EXIF to degress in float format
def time_window(self, window_width_ms): op = Operator( _generate_uuid(), OpType.TimeWindow, "TimeWindow", num_instances=self.env.config.parallelism, other=window_width_ms) return self.__register(op)
Applies a system time window to the stream. Attributes: window_width_ms (int): The length of the window in ms.
def _geodetic_to_cartesian(cls, lat, lon, alt): C = Earth.r / np.sqrt(1 - (Earth.e * np.sin(lat)) ** 2) S = Earth.r * (1 - Earth.e ** 2) / np.sqrt(1 - (Earth.e * np.sin(lat)) ** 2) r_d = (C + alt) * np.cos(lat) r_k = (S + alt) * np.sin(lat) norm = np.sqrt(r_d ** 2 + r_k ** 2) return norm * np.array([ np.cos(lat) * np.cos(lon), np.cos(lat) * np.sin(lon), np.sin(lat) ])
Conversion from latitude, longitude and altitude coordinates to cartesian with respect to an ellipsoid Args: lat (float): Latitude in radians lon (float): Longitude in radians alt (float): Altitude to sea level in meters Return: numpy.array: 3D element (in meters)
def get_interpolated(self, target, extent): result = self.copy() result.interpolate(target, extent) return result
Return a new vector that has been moved towards the given target by the given extent. The extent should be between 0 and 1.
def decode(self) -> Iterable: if self.data[0:1] not in (b'd', b'l'): return self.__wrap_with_tuple() return self.__parse()
Start of decode process. Returns final results.
def _apply_credentials(auto_refresh=True, credentials=None, headers=None): token = credentials.get_credentials().access_token if auto_refresh is True: if token is None: token = credentials.refresh( access_token=None, timeout=10) elif credentials.jwt_is_expired(): token = credentials.refresh(timeout=10) headers.update( {'Authorization': "Bearer {}".format(token)} )
Update Authorization header. Update request headers with latest `access_token`. Perform token `refresh` if token is ``None``. Args: auto_refresh (bool): Perform token refresh if access_token is ``None`` or expired. Defaults to ``True``. credentials (class): Read-only credentials. headers (class): Requests `CaseInsensitiveDict`.
def do_random(context, seq): try: return random.choice(seq) except IndexError: return context.environment.undefined('No random item, sequence was empty.')
Return a random item from the sequence.
async def _sonar_data(self, data): data = data[1:-1] pin_number = data[0] val = int((data[PrivateConstants.MSB] << 7) + data[PrivateConstants.LSB]) reply_data = [] sonar_pin_entry = self.active_sonar_map[pin_number] if sonar_pin_entry[0] is not None: if sonar_pin_entry[2] != val: sonar_pin_entry[2] = val self.active_sonar_map[pin_number] = sonar_pin_entry if sonar_pin_entry[0]: reply_data.append(pin_number) reply_data.append(val) if sonar_pin_entry[1]: await sonar_pin_entry[0](reply_data) else: loop = self.loop loop.call_soon(sonar_pin_entry[0], reply_data) else: sonar_pin_entry[1] = val self.active_sonar_map[pin_number] = sonar_pin_entry await asyncio.sleep(self.sleep_tune)
This method handles the incoming sonar data message and stores the data in the response table. :param data: Message data from Firmata :returns: No return value.
def query_names(self, pat): for item in self.chnames.items(): if fnmatch.fnmatchcase(item[1], pat): print item
pat a shell pattern. See fnmatch.fnmatchcase. Print the results to stdout.
def write_json(self): with open(self.results_folder_name + '/results-' + str(self.get_global_count()) + '.json', 'a') as f: json.dump(self.result, f)
Dump data into json file.
def ref(self): x = RefTrace(self.filehandle, self.dtype, len(self), self.shape, self.readonly, ) yield x x.flush()
A write-back version of Trace Returns ------- ref : RefTrace `ref` is returned in a context manager, and must be in a ``with`` statement Notes ----- .. versionadded:: 1.6 Examples -------- >>> with trace.ref as ref: ... ref[10] += 1.617
def translate_state(self, s): if not isinstance(s, basestring): return s s = s.capitalize().replace("_", " ") return t(_(s))
Translate the given state string
def require_bool(self, key: str) -> bool: v = self.get_bool(key) if v is None: raise ConfigMissingError(self.full_key(key)) return v
Returns a configuration value, as a bool, by its given key. If it doesn't exist, or the configuration value is not a legal bool, an error is thrown. :param str key: The requested configuration key. :return: The configuration key's value. :rtype: bool :raises ConfigMissingError: The configuration value did not exist. :raises ConfigTypeError: The configuration value existed but couldn't be coerced to bool.
def tilequeue_rawr_seed_all(cfg, peripherals): rawr_yaml = cfg.yml.get('rawr') assert rawr_yaml is not None, 'Missing rawr configuration in yaml' group_by_zoom = rawr_yaml.get('group-zoom') assert group_by_zoom is not None, 'Missing group-zoom rawr config' max_coord = 2 ** group_by_zoom coords = [] for x in xrange(0, max_coord): for y in xrange(0, max_coord): coords.append(Coordinate(zoom=group_by_zoom, column=x, row=y)) _tilequeue_rawr_seed(cfg, peripherals, coords)
command to enqueue all the tiles at the group-by zoom
def _get_model(self, appname, modelname): app = self._get_app(appname) models = app.get_models() model = None for mod in models: if mod.__name__ == modelname: model = mod return model msg = "Model " + modelname + " not found"
return model or None
def stored_bind(self, instance): if self.id is None: return self.bind(instance) store = self._bound_pangler_store.setdefault(instance, {}) p = store.get(self.id) if p is None: p = store[self.id] = self.bind(instance) return p
Bind an instance to this Pangler, using the bound Pangler store. This method functions identically to `bind`, except that it might return a Pangler which was previously bound to the provided instance.
def path_distance(points): vecs = np.diff(points, axis=0)[:, :3] d2 = [np.dot(p, p) for p in vecs] return np.sum(np.sqrt(d2))
Compute the path distance from given set of points
def connectivity(measure_names, b, c=None, nfft=512): con = Connectivity(b, c, nfft) try: return getattr(con, measure_names)() except TypeError: return dict((m, getattr(con, m)()) for m in measure_names)
Calculate connectivity measures. Parameters ---------- measure_names : str or list of str Name(s) of the connectivity measure(s) to calculate. See :class:`Connectivity` for supported measures. b : array, shape (n_channels, n_channels * model_order) VAR model coefficients. See :ref:`var-model-coefficients` for details about the arrangement of coefficients. c : array, shape (n_channels, n_channels), optional Covariance matrix of the driving noise process. Identity matrix is used if set to None (default). nfft : int, optional Number of frequency bins to calculate. Note that these points cover the range between 0 and half the sampling rate. Returns ------- result : array, shape (n_channels, n_channels, `nfft`) An array of shape (m, m, nfft) is returned if measures is a string. If measures is a list of strings, a dictionary is returned, where each key is the name of the measure, and the corresponding values are arrays of shape (m, m, nfft). Notes ----- When using this function, it is more efficient to get several measures at once than calling the function multiple times. Examples -------- >>> c = connectivity(['DTF', 'PDC'], [[0.3, 0.6], [0.0, 0.9]])
def count_async(self, limit=None, **q_options): qry = self._fix_namespace() return qry._count_async(limit=limit, **q_options)
Count the number of query results, up to a limit. This is the asynchronous version of Query.count().
def _uri(self, url): if url and not url.startswith('/'): return url uri = "{0}://{1}{2}{3}".format( self._protocol, self.real_connection.host, self._port_postfix(), url, ) return uri
Returns request absolute URI
def from_object(obj): return obj if isinstance(obj, Contact) \ else Contact(cast.string_to_enum(obj.name, Contact.ContactName), obj.value, is_primary=obj.is_primary and cast.string_to_boolean(obj.is_primary, strict=True), is_verified=obj.is_verified and cast.string_to_boolean(obj.is_verified, strict=True))
Convert an object representing a contact information to an instance `Contact`. @param obj: an object containg the following attributes: * `name`: an item of the enumeration `ContactName` representing the type of this contact information. * `value`: value of this contact information representing by a string, such as ``+84.01272170781``, the formatted value for a telephone number property. * `is_primary`: indicate whether this contact property is the first to be used to contact the entity that this contact information corresponds to. There is only one primary contact property for a given property name (e.g., `EMAIL`, `PHONE`, `WEBSITE`). * `is_verified`: indicate whether this contact information has been verified, whether it has been grabbed from a trusted Social Networking Service (SNS), or whether through a challenge/response process. @raise ValueError: if the value of this contact information is null.
def on_network_adapter_change(self, network_adapter, change_adapter): if not isinstance(network_adapter, INetworkAdapter): raise TypeError("network_adapter can only be an instance of type INetworkAdapter") if not isinstance(change_adapter, bool): raise TypeError("change_adapter can only be an instance of type bool") self._call("onNetworkAdapterChange", in_p=[network_adapter, change_adapter])
Triggered when settings of a network adapter of the associated virtual machine have changed. in network_adapter of type :class:`INetworkAdapter` in change_adapter of type bool raises :class:`VBoxErrorInvalidVmState` Session state prevents operation. raises :class:`VBoxErrorInvalidObjectState` Session type prevents operation.
def minutes_in_range(self, start_minute, end_minute): start_idx = searchsorted(self._trading_minutes_nanos, start_minute.value) end_idx = searchsorted(self._trading_minutes_nanos, end_minute.value) if end_minute.value == self._trading_minutes_nanos[end_idx]: end_idx += 1 return self.all_minutes[start_idx:end_idx]
Given start and end minutes, return all the calendar minutes in that range, inclusive. Given minutes don't need to be calendar minutes. Parameters ---------- start_minute: pd.Timestamp The minute representing the start of the desired range. end_minute: pd.Timestamp The minute representing the end of the desired range. Returns ------- pd.DatetimeIndex The minutes in the desired range.
def diffuse_template(self, **kwargs): kwargs_copy = self.base_dict.copy() kwargs_copy.update(**kwargs) self._replace_none(kwargs_copy) localpath = NameFactory.diffuse_template_format.format(**kwargs_copy) if kwargs.get('fullpath', False): return self.fullpath(localpath=localpath) return localpath
return the file name for other diffuse map templates
def _process(self, data): try: packet = json.loads(data) except ValueError: logger.warning('Received invalid JSON from client. Ignoring.') return if packet['cmd'] == 'run-command': self._run_command(packet) elif packet['cmd'] == 'in': self._pipeinput.send_text(packet['data']) elif packet['cmd'] == 'size': data = packet['data'] self.size = Size(rows=data[0], columns=data[1]) self.pymux.invalidate() elif packet['cmd'] == 'start-gui': detach_other_clients = bool(packet['detach-others']) color_depth = packet['color-depth'] term = packet['term'] if detach_other_clients: for c in self.pymux.connections: c.detach_and_close() print('Create app...') self._create_app(color_depth=color_depth, term=term)
Process packet received from client.
def currency_context(context): request = context['request'] currency_code = memoize_nullary(lambda: get_currency_code(request)) context['CURRENCIES'] = Currency.active.all() context['CURRENCY_CODE'] = currency_code context['CURRENCY'] = memoize_nullary(lambda: get_currency(currency_code)) return ''
Use instead of context processor Context variables are only valid within the block scope
def show_tabulated(self, begin, middle, end): internal_assert(len(begin) < info_tabulation, "info message too long", begin) self.show(begin + " " * (info_tabulation - len(begin)) + middle + " " + end)
Shows a tabulated message.
def create_user(self, claims): email = claims.get('email') username = self.get_username(claims) return self.UserModel.objects.create_user(username, email)
Return object for a newly created user account.
def unregister(self, command): if command not in self._commands.keys(): self.log.warning("Can not unregister command %s" % command) else: del(self._click_root_command.commands[command]) del(self._commands[command]) self.log.debug("Command %s got unregistered" % command)
Unregisters an existing command, so that this command is no longer available on the command line interface. This function is mainly used during plugin deactivation. :param command: Name of the command
def purge(datasets, reuses, organizations): purge_all = not any((datasets, reuses, organizations)) if purge_all or datasets: log.info('Purging datasets') purge_datasets() if purge_all or reuses: log.info('Purging reuses') purge_reuses() if purge_all or organizations: log.info('Purging organizations') purge_organizations() success('Done')
Permanently remove data flagged as deleted. If no model flag is given, all models are purged.
def datapt_to_wcspt(self, datapt, coords='data', naxispath=None): if naxispath: raise NotImplementedError return np.asarray([self.pixtoradec((pt[0], pt[1]), coords=coords) for pt in datapt])
Convert multiple data points to WCS. Parameters ---------- datapt : array-like Pixel coordinates in the format of ``[[x0, y0, ...], [x1, y1, ...], ..., [xn, yn, ...]]``. coords : 'data' or None, optional, default to 'data' Expresses whether the data coordinate is indexed from zero. naxispath : list-like or None, optional, defaults to None A sequence defining the pixel indexes > 2D, if any. Returns ------- wcspt : array-like WCS coordinates in the format of ``[[ra0, dec0], [ra1, dec1], ..., [ran, decn]]``.
def move_to_clipboard(self, request, files_queryset, folders_queryset): if not self.has_change_permission(request): raise PermissionDenied if request.method != 'POST': return None clipboard = tools.get_user_clipboard(request.user) check_files_edit_permissions(request, files_queryset) check_folder_edit_permissions(request, folders_queryset) files_count = [0] def move_files(files): files_count[0] += tools.move_file_to_clipboard(files, clipboard) def move_folders(folders): for f in folders: move_files(f.files) move_folders(f.children.all()) move_files(files_queryset) move_folders(folders_queryset) self.message_user(request, _("Successfully moved %(count)d files to " "clipboard.") % {"count": files_count[0]}) return None
Action which moves the selected files and files in selected folders to clipboard.
def jwt_required(realm=None): def wrapper(fn): @wraps(fn) def decorator(*args, **kwargs): _jwt_required(realm or current_app.config['JWT_DEFAULT_REALM']) return fn(*args, **kwargs) return decorator return wrapper
View decorator that requires a valid JWT token to be present in the request :param realm: an optional realm
def remove_alert(thing_name, key, session=None): return _request('get', '/remove/alert/for/{0}'.format(thing_name), params={'key': key}, session=session)
Remove an alert for the given thing
def dbmin20years(self, value=None): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `dbmin20years`'.format(value)) self._dbmin20years = value
Corresponds to IDD Field `dbmin20years` 20-year return period values for minimum extreme dry-bulb temperature Args: value (float): value for IDD Field `dbmin20years` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def export(self, node): dictexporter = self.dictexporter or DictExporter() data = dictexporter.export(node) return json.dumps(data, **self.kwargs)
Return JSON for tree starting at `node`.
def parse_mailto(mailto_str): if mailto_str.startswith('mailto:'): import urllib.parse to_str, parms_str = mailto_str[7:].partition('?')[::2] headers = {} body = u'' to = urllib.parse.unquote(to_str) if to: headers['To'] = [to] for s in parms_str.split('&'): key, value = s.partition('=')[::2] key = key.capitalize() if key == 'Body': body = urllib.parse.unquote(value) elif value: headers[key] = [urllib.parse.unquote(value)] return (headers, body) else: return (None, None)
Interpret mailto-string :param mailto_str: the string to interpret. Must conform to :rfc:2368. :type mailto_str: str :return: the header fields and the body found in the mailto link as a tuple of length two :rtype: tuple(dict(str->list(str)), str)
def getCodecList(self): if self.checkVersion('1.4'): cmd = "core show codecs" else: cmd = "show codecs" cmdresp = self.executeCommand(cmd) info_dict = {} for line in cmdresp.splitlines(): mobj = re.match('\s*(\d+)\s+\((.+)\)\s+\((.+)\)\s+(\w+)\s+(\w+)\s+\((.+)\)$', line) if mobj: info_dict[mobj.group(5)] = (mobj.group(4), mobj.group(6)) return info_dict
Query Asterisk Manager Interface for defined codecs. CLI Command - core show codecs @return: Dictionary - Short Name -> (Type, Long Name)
def create_toolbutton(parent, text=None, shortcut=None, icon=None, tip=None, toggled=None, triggered=None, autoraise=True, text_beside_icon=False): button = QToolButton(parent) if text is not None: button.setText(text) if icon is not None: if is_text_string(icon): icon = get_icon(icon) button.setIcon(icon) if text is not None or tip is not None: button.setToolTip(text if tip is None else tip) if text_beside_icon: button.setToolButtonStyle(Qt.ToolButtonTextBesideIcon) button.setAutoRaise(autoraise) if triggered is not None: button.clicked.connect(triggered) if toggled is not None: button.toggled.connect(toggled) button.setCheckable(True) if shortcut is not None: button.setShortcut(shortcut) return button
Create a QToolButton
def _ctypes_code_parameter(lines, parameter, position): mdict = { 'indices': _ctypes_indices, 'variable': _ctypes_variables, 'out': _ctypes_out, 'regular': _ctypes_regular, 'saved': _ctypes_saved, 'assign': _ctypes_assign, 'clean': _ctypes_clean } line = mdict[position](parameter) if line is not None: value, blank = line lines.append(value) if blank: lines.append("")
Returns the code for the specified parameter being written into a subroutine wrapper. :arg position: one of ['indices', 'variable', 'out', 'regular', 'saved', 'assign', 'clean']
def set_close_callback(self, callback: Optional[Callable[[], None]]) -> None: self._close_callback = callback self._maybe_add_error_listener()
Call the given callback when the stream is closed. This mostly is not necessary for applications that use the `.Future` interface; all outstanding ``Futures`` will resolve with a `StreamClosedError` when the stream is closed. However, it is still useful as a way to signal that the stream has been closed while no other read or write is in progress. Unlike other callback-based interfaces, ``set_close_callback`` was not removed in Tornado 6.0.
def read(parts): cur_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(cur_dir, *parts), "rb", "utf-8") as f: return f.read()
Build an absolute path from parts array and and return the contents of the resulting file. Assume UTF-8 encoding.
def bootstrap_buttons(parser, token): kwargs = parse_token_contents(parser, token) kwargs["nodelist"] = parser.parse(("endbuttons",)) parser.delete_first_token() return ButtonsNode(**kwargs)
Render buttons for form **Tag name**:: buttons **Parameters**: submit Text for a submit button reset Text for a reset button **Usage**:: {% buttons %}{% endbuttons %} **Example**:: {% buttons submit='OK' reset="Cancel" %}{% endbuttons %}
def get_acls_recursive(self, path, depth, include_ephemerals): yield path, self.get_acls(path)[0] if depth == -1: return for tpath, _ in self.tree(path, depth, full_path=True): try: acls, stat = self.get_acls(tpath) except NoNodeError: continue if not include_ephemerals and stat.ephemeralOwner != 0: continue yield tpath, acls
A recursive generator wrapper for get_acls :param path: path from which to start :param depth: depth of the recursion (-1 no recursion, 0 means no limit) :param include_ephemerals: get ACLs for ephemerals too
def fit_for_distance(self): for prop in self.properties.keys(): if prop in self.ic.bands: return True return False
``True`` if any of the properties are apparent magnitudes.
def assign(self, dst, req, src): if req == 'null': return elif req in ('write', 'inplace'): dst[:] = src elif req == 'add': dst[:] += src
Helper function for assigning into dst depending on requirements.
def container_instance_from_string(id): try: service, instance = id.rsplit('_', 1) instance = int(instance) except (TypeError, ValueError): raise context.ValueError("Invalid container id %r" % id) return _proto.ContainerInstance(service_name=service, instance=instance)
Create a ContainerInstance from an id string
def push_primary_analyses_for_removal(self, analysis_request, analyses): to_remove = self.analyses_to_remove.get(analysis_request, []) to_remove.extend(analyses) self.analyses_to_remove[analysis_request] = list(set(to_remove))
Stores the analyses to be removed after partitions creation
def set_focus(self, pos): "Set the focus in the underlying body widget." logging.debug('setting focus to %s ', pos) self.body.set_focus(pos)
Set the focus in the underlying body widget.
def delete_info(ctx, info): head = ctx.parent.head vcf_handle = ctx.parent.handle outfile = ctx.parent.outfile silent = ctx.parent.silent if not info: logger.error("No info provided") sys.exit("Please provide a info string") if not info in head.info_dict: logger.error("Info '{0}' is not specified in vcf header".format(info)) sys.exit("Please provide a valid info field") head.remove_header(info) print_headers(head, outfile=outfile, silent=silent) for line in vcf_handle: line = line.rstrip() new_line = remove_vcf_info(keyword=info, variant_line=line) print_variant(variant_line=new_line, outfile=outfile, silent=silent)
Delete a info field from all variants in a vcf
def resize2fs(device): cmd = 'resize2fs {0}'.format(device) try: out = __salt__['cmd.run_all'](cmd, python_shell=False) except subprocess.CalledProcessError as err: return False if out['retcode'] == 0: return True
Resizes the filesystem. CLI Example: .. code-block:: bash salt '*' disk.resize2fs /dev/sda1
def get_object(self): if self._object: return self._object self._object = super().get_object() if not self.AUDITOR_EVENT_TYPES: return self._object method = self.request.method event_type = self.AUDITOR_EVENT_TYPES.get(method) if method == 'GET' and event_type and is_user(self.request.user): auditor.record(event_type=event_type, instance=self._object, actor_id=self.request.user.id, actor_name=self.request.user.username) elif method == 'DELETE' and event_type and is_user(self.request.user): auditor.record(event_type=event_type, instance=self._object, actor_id=self.request.user.id, actor_name=self.request.user.username) return self._object
We memoize the access to this function in case a second call is made.
def db_exists(cls, impl, working_dir): path = config.get_snapshots_filename(impl, working_dir) return os.path.exists(path)
Does the chainstate db exist?
def run_bottleneck_on_image(sess, image_data, image_data_tensor, decoded_image_tensor, resized_input_tensor, bottleneck_tensor): resized_input_values = sess.run(decoded_image_tensor, {image_data_tensor: image_data}) bottleneck_values = sess.run(bottleneck_tensor, {resized_input_tensor: resized_input_values}) bottleneck_values = np.squeeze(bottleneck_values) return bottleneck_values
Runs inference on an image to extract the 'bottleneck' summary layer. Args: sess: Current active TensorFlow Session. image_data: String of raw JPEG data. image_data_tensor: Input data layer in the graph. decoded_image_tensor: Output of initial image resizing and preprocessing. resized_input_tensor: The input node of the recognition graph. bottleneck_tensor: Layer before the final softmax. Returns: Numpy array of bottleneck values.
def getElementByWdomId(self, id: Union[str]) -> Optional[WebEventTarget]: elm = getElementByWdomId(id) if elm and elm.ownerDocument is self: return elm return None
Get an element node with ``wdom_id``. If this document does not have the element with the id, return None.
def apply_gates_to_fd(stilde_dict, gates): outdict = dict(stilde_dict.items()) strain_dict = dict([[ifo, outdict[ifo].to_timeseries()] for ifo in gates]) for ifo,d in apply_gates_to_td(strain_dict, gates).items(): outdict[ifo] = d.to_frequencyseries() return outdict
Applies the given dictionary of gates to the given dictionary of strain in the frequency domain. Gates are applied by IFFT-ing the strain data to the time domain, applying the gate, then FFT-ing back to the frequency domain. Parameters ---------- stilde_dict : dict Dictionary of frequency-domain strain, keyed by the ifos. gates : dict Dictionary of gates. Keys should be the ifo to apply the data to, values are a tuple giving the central time of the gate, the half duration, and the taper duration. Returns ------- dict Dictionary of frequency-domain strain with the gates applied.
def search_info(self, search_index): ddoc_search_info = self.r_session.get( '/'.join([self.document_url, '_search_info', search_index])) ddoc_search_info.raise_for_status() return response_to_json_dict(ddoc_search_info)
Retrieves information about a specified search index within the design document, returns dictionary GET databasename/_design/{ddoc}/_search_info/{search_index}
def setup( *, verbose: bool = False, quiet: bool = False, color: str = "auto", title: str = "auto", timestamp: bool = False ) -> None: _setup(verbose=verbose, quiet=quiet, color=color, title=title, timestamp=timestamp)
Configure behavior of message functions. :param verbose: Whether :func:`debug` messages should get printed :param quiet: Hide every message except :func:`warning`, :func:`error`, and :func:`fatal` :param color: Choices: 'auto', 'always', or 'never'. Whether to color output. By default ('auto'), only use color when output is a terminal. :param title: Ditto for setting terminal title :param timestamp: Whether to prefix every message with a time stamp
def write(self, text): if text: if text[0] in '(w': self.log.debug(text[:-1]) return if self.access_log: self.access_log.write(text) self.log.info(text[:-1])
Write to appropriate target.