text
stringlengths
78
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
487
def setup(self): """When subclassing remember to call SubtitleChangeCommand::setup() to perform generic checks.""" if not isinstance(self.filePath, str): raise TypeError("File path is not a string!") if self.controller is None: raise ValueError("Command controller hasn't been specified!")
[ "def", "setup", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "filePath", ",", "str", ")", ":", "raise", "TypeError", "(", "\"File path is not a string!\"", ")", "if", "self", ".", "controller", "is", "None", ":", "raise", "ValueError", "(", "\"Command controller hasn't been specified!\"", ")" ]
48.428571
12.142857
def listfolder(p): """ generator of list folder in the path. folders only """ for entry in scandir.scandir(p): if entry.is_dir(): yield entry.name
[ "def", "listfolder", "(", "p", ")", ":", "for", "entry", "in", "scandir", ".", "scandir", "(", "p", ")", ":", "if", "entry", ".", "is_dir", "(", ")", ":", "yield", "entry", ".", "name" ]
22.375
9.625
def drop_matching_records(self, check): """Remove a record from the DB.""" matches = self._match(check) for m in matches: del self._records[m['msg_id']]
[ "def", "drop_matching_records", "(", "self", ",", "check", ")", ":", "matches", "=", "self", ".", "_match", "(", "check", ")", "for", "m", "in", "matches", ":", "del", "self", ".", "_records", "[", "m", "[", "'msg_id'", "]", "]" ]
36.8
4.4
def __get_user_env_vars(self): """Return the user defined environment variables""" return (os.environ.get(self.GP_URL_ENV_VAR), os.environ.get(self.GP_INSTANCE_ID_ENV_VAR), os.environ.get(self.GP_USER_ID_ENV_VAR), os.environ.get(self.GP_PASSWORD_ENV_VAR), os.environ.get(self.GP_IAM_API_KEY_ENV_VAR))
[ "def", "__get_user_env_vars", "(", "self", ")", ":", "return", "(", "os", ".", "environ", ".", "get", "(", "self", ".", "GP_URL_ENV_VAR", ")", ",", "os", ".", "environ", ".", "get", "(", "self", ".", "GP_INSTANCE_ID_ENV_VAR", ")", ",", "os", ".", "environ", ".", "get", "(", "self", ".", "GP_USER_ID_ENV_VAR", ")", ",", "os", ".", "environ", ".", "get", "(", "self", ".", "GP_PASSWORD_ENV_VAR", ")", ",", "os", ".", "environ", ".", "get", "(", "self", ".", "GP_IAM_API_KEY_ENV_VAR", ")", ")" ]
52
11.714286
def reset_all(self, suppress_logging=False): """ iterates thru the list of established connections and resets them by disconnecting and reconnecting """ pool_names = list(self.pools) for name in pool_names: self.reset(name, suppress_logging)
[ "def", "reset_all", "(", "self", ",", "suppress_logging", "=", "False", ")", ":", "pool_names", "=", "list", "(", "self", ".", "pools", ")", "for", "name", "in", "pool_names", ":", "self", ".", "reset", "(", "name", ",", "suppress_logging", ")" ]
54.6
4.4
def get_file_info(self, relativePath): """ Get file information dict from the repository given its relative path. :Parameters: #. relativePath (string): The relative to the repository path of the file. :Returns: #. info (None, dictionary): The file information dictionary. If None, it means an error has occurred. #. errorMessage (string): The error message if any error occurred. """ relativePath = self.to_repo_relative_path(path=relativePath, split=False) fileName = os.path.basename(relativePath) isRepoFile,fileOnDisk, infoOnDisk, classOnDisk = self.is_repository_file(relativePath) if not isRepoFile: return None, "file is not a registered repository file." if not infoOnDisk: return None, "file is a registered repository file but info file missing" fileInfoPath = os.path.join(self.__path,os.path.dirname(relativePath),self.__fileInfo%fileName) try: with open(fileInfoPath, 'rb') as fd: info = pickle.load(fd) except Exception as err: return None, "Unable to read file info from disk (%s)"%str(err) return info, ''
[ "def", "get_file_info", "(", "self", ",", "relativePath", ")", ":", "relativePath", "=", "self", ".", "to_repo_relative_path", "(", "path", "=", "relativePath", ",", "split", "=", "False", ")", "fileName", "=", "os", ".", "path", ".", "basename", "(", "relativePath", ")", "isRepoFile", ",", "fileOnDisk", ",", "infoOnDisk", ",", "classOnDisk", "=", "self", ".", "is_repository_file", "(", "relativePath", ")", "if", "not", "isRepoFile", ":", "return", "None", ",", "\"file is not a registered repository file.\"", "if", "not", "infoOnDisk", ":", "return", "None", ",", "\"file is a registered repository file but info file missing\"", "fileInfoPath", "=", "os", ".", "path", ".", "join", "(", "self", ".", "__path", ",", "os", ".", "path", ".", "dirname", "(", "relativePath", ")", ",", "self", ".", "__fileInfo", "%", "fileName", ")", "try", ":", "with", "open", "(", "fileInfoPath", ",", "'rb'", ")", "as", "fd", ":", "info", "=", "pickle", ".", "load", "(", "fd", ")", "except", "Exception", "as", "err", ":", "return", "None", ",", "\"Unable to read file info from disk (%s)\"", "%", "str", "(", "err", ")", "return", "info", ",", "''" ]
46.074074
24.814815
def setup(self, pin, value): """Set the input or output mode for a specified pin. Mode should be either GPIO.OUT or GPIO.IN. """ self._validate_pin(pin) # Set bit to 1 for input or 0 for output. if value == GPIO.IN: self.iodir[int(pin/8)] |= 1 << (int(pin%8)) elif value == GPIO.OUT: self.iodir[int(pin/8)] &= ~(1 << (int(pin%8))) else: raise ValueError('Unexpected value. Must be GPIO.IN or GPIO.OUT.') self.write_iodir()
[ "def", "setup", "(", "self", ",", "pin", ",", "value", ")", ":", "self", ".", "_validate_pin", "(", "pin", ")", "# Set bit to 1 for input or 0 for output.", "if", "value", "==", "GPIO", ".", "IN", ":", "self", ".", "iodir", "[", "int", "(", "pin", "/", "8", ")", "]", "|=", "1", "<<", "(", "int", "(", "pin", "%", "8", ")", ")", "elif", "value", "==", "GPIO", ".", "OUT", ":", "self", ".", "iodir", "[", "int", "(", "pin", "/", "8", ")", "]", "&=", "~", "(", "1", "<<", "(", "int", "(", "pin", "%", "8", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "'Unexpected value. Must be GPIO.IN or GPIO.OUT.'", ")", "self", ".", "write_iodir", "(", ")" ]
40
13
def sample(self, num_rows=1): """Creates sintentic values stadistically similar to the original dataset. Args: num_rows: `int` amount of samples to generate. Returns: np.ndarray: Sampled data. """ self.check_fit() res = {} means = np.zeros(self.covariance.shape[0]) size = (num_rows,) clean_cov = np.nan_to_num(self.covariance) samples = np.random.multivariate_normal(means, clean_cov, size=size) for i, (label, distrib) in enumerate(self.distribs.items()): cdf = stats.norm.cdf(samples[:, i]) res[label] = distrib.percent_point(cdf) return pd.DataFrame(data=res)
[ "def", "sample", "(", "self", ",", "num_rows", "=", "1", ")", ":", "self", ".", "check_fit", "(", ")", "res", "=", "{", "}", "means", "=", "np", ".", "zeros", "(", "self", ".", "covariance", ".", "shape", "[", "0", "]", ")", "size", "=", "(", "num_rows", ",", ")", "clean_cov", "=", "np", ".", "nan_to_num", "(", "self", ".", "covariance", ")", "samples", "=", "np", ".", "random", ".", "multivariate_normal", "(", "means", ",", "clean_cov", ",", "size", "=", "size", ")", "for", "i", ",", "(", "label", ",", "distrib", ")", "in", "enumerate", "(", "self", ".", "distribs", ".", "items", "(", ")", ")", ":", "cdf", "=", "stats", ".", "norm", ".", "cdf", "(", "samples", "[", ":", ",", "i", "]", ")", "res", "[", "label", "]", "=", "distrib", ".", "percent_point", "(", "cdf", ")", "return", "pd", ".", "DataFrame", "(", "data", "=", "res", ")" ]
28.791667
21.75
def can_read(self): """Check if the field is readable """ sm = getSecurityManager() if not sm.checkPermission(permissions.View, self.context): return False return True
[ "def", "can_read", "(", "self", ")", ":", "sm", "=", "getSecurityManager", "(", ")", "if", "not", "sm", ".", "checkPermission", "(", "permissions", ".", "View", ",", "self", ".", "context", ")", ":", "return", "False", "return", "True" ]
30.428571
13
def save(self, path): """ Saves all tensors of the root set to a file defined by *path*. """ path = os.path.expandvars(os.path.expanduser(path)) with open(path, "wb") as f: pickle.dump(self.roots, f)
[ "def", "save", "(", "self", ",", "path", ")", ":", "path", "=", "os", ".", "path", ".", "expandvars", "(", "os", ".", "path", ".", "expanduser", "(", "path", ")", ")", "with", "open", "(", "path", ",", "\"wb\"", ")", "as", "f", ":", "pickle", ".", "dump", "(", "self", ".", "roots", ",", "f", ")" ]
35
10.714286
def update_history_log(history_log, clear=False, description=None, test_log=None): """ Update the history log file with item. If clear flag is provided the log file is deleted. """ if not test_log and not clear: raise IpaUtilsException( 'A test log or clear flag must be provided.' ) if clear: with ignored(OSError): os.remove(history_log) else: history_dir = os.path.dirname(history_log) if not os.path.isdir(history_dir): try: os.makedirs(history_dir) except OSError as error: raise IpaUtilsException( 'Unable to create directory: %s' % error ) with open(history_log, 'a+') as f: # Using append mode creates file if it does not exist if description: description = '"%s"' % description out = '{} {}'.format( test_log, description or '' ) f.write(out.strip() + '\n')
[ "def", "update_history_log", "(", "history_log", ",", "clear", "=", "False", ",", "description", "=", "None", ",", "test_log", "=", "None", ")", ":", "if", "not", "test_log", "and", "not", "clear", ":", "raise", "IpaUtilsException", "(", "'A test log or clear flag must be provided.'", ")", "if", "clear", ":", "with", "ignored", "(", "OSError", ")", ":", "os", ".", "remove", "(", "history_log", ")", "else", ":", "history_dir", "=", "os", ".", "path", ".", "dirname", "(", "history_log", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "history_dir", ")", ":", "try", ":", "os", ".", "makedirs", "(", "history_dir", ")", "except", "OSError", "as", "error", ":", "raise", "IpaUtilsException", "(", "'Unable to create directory: %s'", "%", "error", ")", "with", "open", "(", "history_log", ",", "'a+'", ")", "as", "f", ":", "# Using append mode creates file if it does not exist", "if", "description", ":", "description", "=", "'\"%s\"'", "%", "description", "out", "=", "'{} {}'", ".", "format", "(", "test_log", ",", "description", "or", "''", ")", "f", ".", "write", "(", "out", ".", "strip", "(", ")", "+", "'\\n'", ")" ]
29.810811
13.864865
def _ExtractRequestSummaryFields(document): """Extract logging fields from the request's suds.sax.element.Element. Args: document: A suds.sax.element.Element instance containing the API request. Returns: A dict mapping logging field names to their corresponding value. """ headers = document.childAtPath('Header/RequestHeader') body = document.childAtPath('Body') summary_fields = { 'methodName': body.getChildren()[0].name } # Extract AdWords-specific fields if they exist. # Note: We need to check if None because this will always evaluate False. client_customer_id = headers.getChild('clientCustomerId') if client_customer_id is not None: summary_fields['clientCustomerId'] = client_customer_id.text # Extract Ad Manager-specific fields if they exist. # Note: We need to check if None because this will always evaluate False. network_code = headers.getChild('networkCode') if network_code is not None: summary_fields['networkCode'] = network_code.text return summary_fields
[ "def", "_ExtractRequestSummaryFields", "(", "document", ")", ":", "headers", "=", "document", ".", "childAtPath", "(", "'Header/RequestHeader'", ")", "body", "=", "document", ".", "childAtPath", "(", "'Body'", ")", "summary_fields", "=", "{", "'methodName'", ":", "body", ".", "getChildren", "(", ")", "[", "0", "]", ".", "name", "}", "# Extract AdWords-specific fields if they exist.", "# Note: We need to check if None because this will always evaluate False.", "client_customer_id", "=", "headers", ".", "getChild", "(", "'clientCustomerId'", ")", "if", "client_customer_id", "is", "not", "None", ":", "summary_fields", "[", "'clientCustomerId'", "]", "=", "client_customer_id", ".", "text", "# Extract Ad Manager-specific fields if they exist.", "# Note: We need to check if None because this will always evaluate False.", "network_code", "=", "headers", ".", "getChild", "(", "'networkCode'", ")", "if", "network_code", "is", "not", "None", ":", "summary_fields", "[", "'networkCode'", "]", "=", "network_code", ".", "text", "return", "summary_fields" ]
34.827586
22.103448
def k_nearest_approx(self, vec, k): """Get the k nearest neighbors of a vector (in terms of cosine similarity). :param (np.array) vec: query vector :param (int) k: number of top neighbors to return :return (list[tuple[str, float]]): a list of (word, cosine similarity) pairs, in descending order """ if not hasattr(self, 'lshf'): self.lshf = self._init_lsh_forest() # TODO(kelvin): make this inner product score, to be consistent with k_nearest distances, neighbors = self.lshf.kneighbors([vec], n_neighbors=k, return_distance=True) scores = np.subtract(1, distances) nbr_score_pairs = self._word_to_score(np.squeeze(neighbors), np.squeeze(scores)) return sorted(nbr_score_pairs.items(), key=lambda x: x[1], reverse=True)
[ "def", "k_nearest_approx", "(", "self", ",", "vec", ",", "k", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'lshf'", ")", ":", "self", ".", "lshf", "=", "self", ".", "_init_lsh_forest", "(", ")", "# TODO(kelvin): make this inner product score, to be consistent with k_nearest", "distances", ",", "neighbors", "=", "self", ".", "lshf", ".", "kneighbors", "(", "[", "vec", "]", ",", "n_neighbors", "=", "k", ",", "return_distance", "=", "True", ")", "scores", "=", "np", ".", "subtract", "(", "1", ",", "distances", ")", "nbr_score_pairs", "=", "self", ".", "_word_to_score", "(", "np", ".", "squeeze", "(", "neighbors", ")", ",", "np", ".", "squeeze", "(", "scores", ")", ")", "return", "sorted", "(", "nbr_score_pairs", ".", "items", "(", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ",", "reverse", "=", "True", ")" ]
47.588235
26.529412
def esinw(b, orbit, solve_for=None, **kwargs): """ Create a constraint for esinw in an orbit. If 'esinw' does not exist in the orbit, it will be created :parameter b: the :class:`phoebe.frontend.bundle.Bundle` :parameter str orbit: the label of the orbit in which this constraint should be built :parameter str solve_for: if 'esinw' should not be the derived/constrained parameter, provide which other parameter should be derived (ie 'ecc', 'per0') :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments that were passed to this function) """ orbit_ps = _get_system_ps(b, orbit) metawargs = orbit_ps.meta metawargs.pop('qualifier') esinw_def = FloatParameter(qualifier='esinw', value=0.0, default_unit=u.dimensionless_unscaled, limits=(-1.0,1.0), description='Eccentricity times sin of argument of periastron') esinw, created = b.get_or_create('esinw', esinw_def, **metawargs) ecc = b.get_parameter(qualifier='ecc', **metawargs) per0 = b.get_parameter(qualifier='per0', **metawargs) if solve_for in [None, esinw]: lhs = esinw rhs = ecc * sin(per0) elif solve_for == ecc: lhs = ecc rhs = esinw / sin(per0) elif solve_for == per0: lhs = per0 #rhs = arcsin(esinw/ecc) rhs = esinw2per0(ecc, esinw) else: raise NotImplementedError return lhs, rhs, {'orbit': orbit}
[ "def", "esinw", "(", "b", ",", "orbit", ",", "solve_for", "=", "None", ",", "*", "*", "kwargs", ")", ":", "orbit_ps", "=", "_get_system_ps", "(", "b", ",", "orbit", ")", "metawargs", "=", "orbit_ps", ".", "meta", "metawargs", ".", "pop", "(", "'qualifier'", ")", "esinw_def", "=", "FloatParameter", "(", "qualifier", "=", "'esinw'", ",", "value", "=", "0.0", ",", "default_unit", "=", "u", ".", "dimensionless_unscaled", ",", "limits", "=", "(", "-", "1.0", ",", "1.0", ")", ",", "description", "=", "'Eccentricity times sin of argument of periastron'", ")", "esinw", ",", "created", "=", "b", ".", "get_or_create", "(", "'esinw'", ",", "esinw_def", ",", "*", "*", "metawargs", ")", "ecc", "=", "b", ".", "get_parameter", "(", "qualifier", "=", "'ecc'", ",", "*", "*", "metawargs", ")", "per0", "=", "b", ".", "get_parameter", "(", "qualifier", "=", "'per0'", ",", "*", "*", "metawargs", ")", "if", "solve_for", "in", "[", "None", ",", "esinw", "]", ":", "lhs", "=", "esinw", "rhs", "=", "ecc", "*", "sin", "(", "per0", ")", "elif", "solve_for", "==", "ecc", ":", "lhs", "=", "ecc", "rhs", "=", "esinw", "/", "sin", "(", "per0", ")", "elif", "solve_for", "==", "per0", ":", "lhs", "=", "per0", "#rhs = arcsin(esinw/ecc)", "rhs", "=", "esinw2per0", "(", "ecc", ",", "esinw", ")", "else", ":", "raise", "NotImplementedError", "return", "lhs", ",", "rhs", ",", "{", "'orbit'", ":", "orbit", "}" ]
34.804878
22.463415
def get(name, defval=None): ''' Return an object from the embedded synapse data folder. Example: for tld in syanpse.data.get('iana.tlds'): dostuff(tld) NOTE: Files are named synapse/data/<name>.mpk ''' with s_datfile.openDatFile('synapse.data/%s.mpk' % name) as fd: return s_msgpack.un(fd.read())
[ "def", "get", "(", "name", ",", "defval", "=", "None", ")", ":", "with", "s_datfile", ".", "openDatFile", "(", "'synapse.data/%s.mpk'", "%", "name", ")", "as", "fd", ":", "return", "s_msgpack", ".", "un", "(", "fd", ".", "read", "(", ")", ")" ]
26.076923
23.769231
def get_train_eval_files(input_dir): """Get preprocessed training and eval files.""" data_dir = _get_latest_data_dir(input_dir) train_pattern = os.path.join(data_dir, 'train*.tfrecord.gz') eval_pattern = os.path.join(data_dir, 'eval*.tfrecord.gz') train_files = file_io.get_matching_files(train_pattern) eval_files = file_io.get_matching_files(eval_pattern) return train_files, eval_files
[ "def", "get_train_eval_files", "(", "input_dir", ")", ":", "data_dir", "=", "_get_latest_data_dir", "(", "input_dir", ")", "train_pattern", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "'train*.tfrecord.gz'", ")", "eval_pattern", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "'eval*.tfrecord.gz'", ")", "train_files", "=", "file_io", ".", "get_matching_files", "(", "train_pattern", ")", "eval_files", "=", "file_io", ".", "get_matching_files", "(", "eval_pattern", ")", "return", "train_files", ",", "eval_files" ]
49.375
11.25
def gallery_images(self): """Instance depends on the API version: * 2018-06-01: :class:`GalleryImagesOperations<azure.mgmt.compute.v2018_06_01.operations.GalleryImagesOperations>` * 2019-03-01: :class:`GalleryImagesOperations<azure.mgmt.compute.v2019_03_01.operations.GalleryImagesOperations>` """ api_version = self._get_api_version('gallery_images') if api_version == '2018-06-01': from .v2018_06_01.operations import GalleryImagesOperations as OperationClass elif api_version == '2019-03-01': from .v2019_03_01.operations import GalleryImagesOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
[ "def", "gallery_images", "(", "self", ")", ":", "api_version", "=", "self", ".", "_get_api_version", "(", "'gallery_images'", ")", "if", "api_version", "==", "'2018-06-01'", ":", "from", ".", "v2018_06_01", ".", "operations", "import", "GalleryImagesOperations", "as", "OperationClass", "elif", "api_version", "==", "'2019-03-01'", ":", "from", ".", "v2019_03_01", ".", "operations", "import", "GalleryImagesOperations", "as", "OperationClass", "else", ":", "raise", "NotImplementedError", "(", "\"APIVersion {} is not available\"", ".", "format", "(", "api_version", ")", ")", "return", "OperationClass", "(", "self", ".", "_client", ",", "self", ".", "config", ",", "Serializer", "(", "self", ".", "_models_dict", "(", "api_version", ")", ")", ",", "Deserializer", "(", "self", ".", "_models_dict", "(", "api_version", ")", ")", ")" ]
64.285714
37.714286
def get_path(self, name, default=None): """Retrieves an environment variable as a filesystem path. Requires the `pathlib`_ library if using Python <= 3.4. Args: name (str): The case-insensitive, unprefixed variable name. default: If provided, a default value will be returned instead of throwing ``EnvironmentError``. Returns: pathlib.Path: The environment variable as a ``pathlib.Path`` object. Raises: EnvironmentError: If the environment variable does not exist, and ``default`` was not provided. .. _pathlib: https://pypi.python.org/pypi/pathlib/ """ if name not in self: if default is not None: return default raise EnvironmentError.not_found(self._prefix, name) return pathlib.Path(self[name])
[ "def", "get_path", "(", "self", ",", "name", ",", "default", "=", "None", ")", ":", "if", "name", "not", "in", "self", ":", "if", "default", "is", "not", "None", ":", "return", "default", "raise", "EnvironmentError", ".", "not_found", "(", "self", ".", "_prefix", ",", "name", ")", "return", "pathlib", ".", "Path", "(", "self", "[", "name", "]", ")" ]
33.259259
21.666667
def ADR(cpu, dest, src): """ Address to Register adds an immediate value to the PC value, and writes the result to the destination register. :param ARMv7Operand dest: Specifies the destination register. :param ARMv7Operand src: Specifies the label of an instruction or literal data item whose address is to be loaded into <Rd>. The assembler calculates the required value of the offset from the Align(PC,4) value of the ADR instruction to this label. """ aligned_pc = (cpu.instruction.address + 4) & 0xfffffffc dest.write(aligned_pc + src.read())
[ "def", "ADR", "(", "cpu", ",", "dest", ",", "src", ")", ":", "aligned_pc", "=", "(", "cpu", ".", "instruction", ".", "address", "+", "4", ")", "&", "0xfffffffc", "dest", ".", "write", "(", "aligned_pc", "+", "src", ".", "read", "(", ")", ")" ]
52.333333
27.833333
def gen_opf(self, book_idx): """ 生成项目文件 :return: :rtype: """ if self._chapterization: title = '{title}-{book_idx}'.format(title=self._title, book_idx=book_idx) book = 'book-{book_idx}'.format(book_idx=book_idx) toc = 'toc-{book_idx}'.format(book_idx=book_idx) else: title = '{title}'.format(title=self._title) book = 'book' toc = 'toc' opf_file = """<?xml version="1.0" encoding="utf-8"?> <package unique-identifier="uid" xmlns:opf="http://www.idpf.org/2007/opf" xmlns:asd="http://www.idpf.org/asdfaf"> <metadata> <dc-metadata xmlns:dc="http://purl.org/metadata/dublin_core" xmlns:oebpackage="http://openebook.org/namespaces/oeb-package/1.0/"> <dc:Title>{title}</dc:Title> <dc:Language>zh-cn</dc:Language> <dc:Creator>{author}</dc:Creator> <dc:Copyrights>{author}</dc:Copyrights> <dc:Publisher>Alexander.Li</dc:Publisher> <x-metadata> <EmbeddedCover>{cover}</EmbeddedCover> </x-metadata> </dc-metadata> </metadata> <manifest> <item id="toc" properties="nav" href="{book}.html" media-type="application/xhtml+xml"/> <item id="content" media-type="application/xhtml+xml" href="{book}.html"></item> <item id="cover-image" media-type="image/png" href="{cover}"/> <item id="ncx" media-type="application/x-dtbncx+xml" href="{toc}.ncx"/> </manifest> <spine toc="ncx"> <itemref idref="cover-image"/> <itemref idref="toc"/> <itemref idref="content"/> </spine> <guide> <reference type="toc" title="{title_name}" href="{book}.html#toc"/> <reference type="content" title="Book" href="{book}.html"/> </guide> </package> """.format( title_name='目录', author=self._author, title=title, cover=self._cover_img, book=book, toc=toc ) return opf_file
[ "def", "gen_opf", "(", "self", ",", "book_idx", ")", ":", "if", "self", ".", "_chapterization", ":", "title", "=", "'{title}-{book_idx}'", ".", "format", "(", "title", "=", "self", ".", "_title", ",", "book_idx", "=", "book_idx", ")", "book", "=", "'book-{book_idx}'", ".", "format", "(", "book_idx", "=", "book_idx", ")", "toc", "=", "'toc-{book_idx}'", ".", "format", "(", "book_idx", "=", "book_idx", ")", "else", ":", "title", "=", "'{title}'", ".", "format", "(", "title", "=", "self", ".", "_title", ")", "book", "=", "'book'", "toc", "=", "'toc'", "opf_file", "=", "\"\"\"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<package unique-identifier=\"uid\" xmlns:opf=\"http://www.idpf.org/2007/opf\" xmlns:asd=\"http://www.idpf.org/asdfaf\">\n <metadata>\n <dc-metadata xmlns:dc=\"http://purl.org/metadata/dublin_core\" xmlns:oebpackage=\"http://openebook.org/namespaces/oeb-package/1.0/\">\n <dc:Title>{title}</dc:Title>\n <dc:Language>zh-cn</dc:Language>\n <dc:Creator>{author}</dc:Creator>\n <dc:Copyrights>{author}</dc:Copyrights>\n <dc:Publisher>Alexander.Li</dc:Publisher>\n <x-metadata>\n <EmbeddedCover>{cover}</EmbeddedCover>\n </x-metadata>\n </dc-metadata>\n </metadata>\n <manifest>\n <item id=\"toc\" properties=\"nav\" href=\"{book}.html\" media-type=\"application/xhtml+xml\"/>\n <item id=\"content\" media-type=\"application/xhtml+xml\" href=\"{book}.html\"></item>\n <item id=\"cover-image\" media-type=\"image/png\" href=\"{cover}\"/>\n <item id=\"ncx\" media-type=\"application/x-dtbncx+xml\" href=\"{toc}.ncx\"/>\n </manifest>\n <spine toc=\"ncx\">\n <itemref idref=\"cover-image\"/>\n <itemref idref=\"toc\"/>\n <itemref idref=\"content\"/>\n </spine>\n <guide>\n <reference type=\"toc\" title=\"{title_name}\" href=\"{book}.html#toc\"/>\n <reference type=\"content\" title=\"Book\" href=\"{book}.html\"/>\n </guide>\n</package>\n \"\"\"", ".", "format", "(", "title_name", "=", "'目录',", "", "author", "=", "self", ".", "_author", ",", "title", "=", "title", ",", "cover", "=", "self", ".", "_cover_img", ",", "book", "=", "book", ",", "toc", "=", "toc", ")", "return", "opf_file" ]
37.574074
21.851852
def get_mouse_location2(self): """ Get all mouse location-related data. :return: a namedtuple with ``x``, ``y``, ``screen_num`` and ``window`` fields """ x = ctypes.c_int(0) y = ctypes.c_int(0) screen_num_ret = ctypes.c_ulong(0) window_ret = ctypes.c_ulong(0) _libxdo.xdo_get_mouse_location2( self._xdo, ctypes.byref(x), ctypes.byref(y), ctypes.byref(screen_num_ret), ctypes.byref(window_ret)) return mouse_location2(x.value, y.value, screen_num_ret.value, window_ret.value)
[ "def", "get_mouse_location2", "(", "self", ")", ":", "x", "=", "ctypes", ".", "c_int", "(", "0", ")", "y", "=", "ctypes", ".", "c_int", "(", "0", ")", "screen_num_ret", "=", "ctypes", ".", "c_ulong", "(", "0", ")", "window_ret", "=", "ctypes", ".", "c_ulong", "(", "0", ")", "_libxdo", ".", "xdo_get_mouse_location2", "(", "self", ".", "_xdo", ",", "ctypes", ".", "byref", "(", "x", ")", ",", "ctypes", ".", "byref", "(", "y", ")", ",", "ctypes", ".", "byref", "(", "screen_num_ret", ")", ",", "ctypes", ".", "byref", "(", "window_ret", ")", ")", "return", "mouse_location2", "(", "x", ".", "value", ",", "y", ".", "value", ",", "screen_num_ret", ".", "value", ",", "window_ret", ".", "value", ")" ]
37.9375
12.1875
def get_optional(self, key: str, default: Optional[Any] = None) -> Optional[Any]: """ Simply get a argument with given key. Deprecated. Use `session.state.get()` instead. """ return self.state.get(key, default)
[ "def", "get_optional", "(", "self", ",", "key", ":", "str", ",", "default", ":", "Optional", "[", "Any", "]", "=", "None", ")", "->", "Optional", "[", "Any", "]", ":", "return", "self", ".", "state", ".", "get", "(", "key", ",", "default", ")" ]
33.125
12.375
def human_readable_size(num, suffix='B'): """ FROM http://stackoverflow.com/a/1094933/1958900 """ for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']: if abs(num) < 1024.0: return "%3.1f%s%s" % (num, unit, suffix) num /= 1024.0 return "%.1f%s%s" % (num, 'Yi', suffix)
[ "def", "human_readable_size", "(", "num", ",", "suffix", "=", "'B'", ")", ":", "for", "unit", "in", "[", "''", ",", "'Ki'", ",", "'Mi'", ",", "'Gi'", ",", "'Ti'", ",", "'Pi'", ",", "'Ei'", ",", "'Zi'", "]", ":", "if", "abs", "(", "num", ")", "<", "1024.0", ":", "return", "\"%3.1f%s%s\"", "%", "(", "num", ",", "unit", ",", "suffix", ")", "num", "/=", "1024.0", "return", "\"%.1f%s%s\"", "%", "(", "num", ",", "'Yi'", ",", "suffix", ")" ]
38
7.75
def get_predicates(self, class_): """Proxy ``get_predicates`` to client with stored request controller. """ return self.client.get_predicates( class_=class_, controller=self.controller)
[ "def", "get_predicates", "(", "self", ",", "class_", ")", ":", "return", "self", ".", "client", ".", "get_predicates", "(", "class_", "=", "class_", ",", "controller", "=", "self", ".", "controller", ")" ]
37.333333
7.333333
def init_log_file(self): """ redirects stdout to a log file to prevent printing to a hanging terminal when dealing with the compiled binary. """ # redirect terminal output self.old_stdout = sys.stdout sys.stdout = open(os.path.join(self.WD, "demag_gui.log"), 'w+')
[ "def", "init_log_file", "(", "self", ")", ":", "# redirect terminal output", "self", ".", "old_stdout", "=", "sys", ".", "stdout", "sys", ".", "stdout", "=", "open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "WD", ",", "\"demag_gui.log\"", ")", ",", "'w+'", ")" ]
39.125
12.875
def make_sentence_with_start(self, beginning, strict=True, **kwargs): """ Tries making a sentence that begins with `beginning` string, which should be a string of one to `self.state` words known to exist in the corpus. If strict == True, then markovify will draw its initial inspiration only from sentences that start with the specified word/phrase. If strict == False, then markovify will draw its initial inspiration from any sentence containing the specified word/phrase. **kwargs are passed to `self.make_sentence` """ split = tuple(self.word_split(beginning)) word_count = len(split) if word_count == self.state_size: init_states = [ split ] elif word_count > 0 and word_count < self.state_size: if strict: init_states = [ (BEGIN,) * (self.state_size - word_count) + split ] else: init_states = [ key for key in self.chain.model.keys() # check for starting with begin as well ordered lists if tuple(filter(lambda x: x != BEGIN, key))[:word_count] == split ] random.shuffle(init_states) else: err_msg = "`make_sentence_with_start` for this model requires a string containing 1 to {0} words. Yours has {1}: {2}".format(self.state_size, word_count, str(split)) raise ParamError(err_msg) for init_state in init_states: output = self.make_sentence(init_state, **kwargs) if output is not None: return output return None
[ "def", "make_sentence_with_start", "(", "self", ",", "beginning", ",", "strict", "=", "True", ",", "*", "*", "kwargs", ")", ":", "split", "=", "tuple", "(", "self", ".", "word_split", "(", "beginning", ")", ")", "word_count", "=", "len", "(", "split", ")", "if", "word_count", "==", "self", ".", "state_size", ":", "init_states", "=", "[", "split", "]", "elif", "word_count", ">", "0", "and", "word_count", "<", "self", ".", "state_size", ":", "if", "strict", ":", "init_states", "=", "[", "(", "BEGIN", ",", ")", "*", "(", "self", ".", "state_size", "-", "word_count", ")", "+", "split", "]", "else", ":", "init_states", "=", "[", "key", "for", "key", "in", "self", ".", "chain", ".", "model", ".", "keys", "(", ")", "# check for starting with begin as well ordered lists", "if", "tuple", "(", "filter", "(", "lambda", "x", ":", "x", "!=", "BEGIN", ",", "key", ")", ")", "[", ":", "word_count", "]", "==", "split", "]", "random", ".", "shuffle", "(", "init_states", ")", "else", ":", "err_msg", "=", "\"`make_sentence_with_start` for this model requires a string containing 1 to {0} words. Yours has {1}: {2}\"", ".", "format", "(", "self", ".", "state_size", ",", "word_count", ",", "str", "(", "split", ")", ")", "raise", "ParamError", "(", "err_msg", ")", "for", "init_state", "in", "init_states", ":", "output", "=", "self", ".", "make_sentence", "(", "init_state", ",", "*", "*", "kwargs", ")", "if", "output", "is", "not", "None", ":", "return", "output", "return", "None" ]
40.5
26.25
def is_valid_url(url): """ Check if url is valid """ regex = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... #r'localhost|' #localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) return bool(regex.match(url))
[ "def", "is_valid_url", "(", "url", ")", ":", "regex", "=", "re", ".", "compile", "(", "r'^(?:http|ftp)s?://'", "# http:// or https://", "r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+(?:[A-Z]{2,6}\\.?|[A-Z0-9-]{2,}\\.?)|'", "#domain...", "#r'localhost|' #localhost...", "r'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})'", "# ...or ip", "r'(?::\\d+)?'", "# optional port", "r'(?:/?|[/?]\\S+)$'", ",", "re", ".", "IGNORECASE", ")", "return", "bool", "(", "regex", ".", "match", "(", "url", ")", ")" ]
38.833333
13.833333
def joinpath(cls, first, *others): """ Join first to zero or more :class:`Path` components, adding a separator character (:samp:`{first}.module.sep`) if needed. Returns a new instance of :samp:`{first}._next_class`. .. seealso:: :func:`os.path.join` """ if not isinstance(first, cls): first = cls(first) return first._next_class(first.module.join(first, *others))
[ "def", "joinpath", "(", "cls", ",", "first", ",", "*", "others", ")", ":", "if", "not", "isinstance", "(", "first", ",", "cls", ")", ":", "first", "=", "cls", "(", "first", ")", "return", "first", ".", "_next_class", "(", "first", ".", "module", ".", "join", "(", "first", ",", "*", "others", ")", ")" ]
36.5
11.666667
def guessoffset(args): """ %prog guessoffset fastqfile Guess the quality offset of the fastqfile, whether 33 or 64. See encoding schemes: <http://en.wikipedia.org/wiki/FASTQ_format> SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS............................... ..........................XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX ...............................IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII .................................JJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJ LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL............................... !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefgh | | | | | 33 59 64 73 104 S - Sanger Phred+33, raw reads typically (0, 40) X - Solexa Solexa+64, raw reads typically (-5, 40) I - Illumina 1.3+ Phred+64, raw reads typically (0, 40) J - Illumina 1.5+ Phred+64, raw reads typically (3, 40) L - Illumina 1.8+ Phred+33, raw reads typically (0, 40) with 0=unused, 1=unused, 2=Read Segment Quality Control Indicator (bold) """ p = OptionParser(guessoffset.__doc__) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) fastqfile, = args ai = iter_fastq(fastqfile) rec = next(ai) offset = 64 while rec: quality = rec.quality lowcounts = len([x for x in quality if x < 59]) highcounts = len([x for x in quality if x > 74]) diff = highcounts - lowcounts if diff > 10: break elif diff < -10: offset = 33 break rec = next(ai) if offset == 33: print("Sanger encoding (offset=33)", file=sys.stderr) elif offset == 64: print("Illumina encoding (offset=64)", file=sys.stderr) return offset
[ "def", "guessoffset", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "guessoffset", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "1", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "fastqfile", ",", "=", "args", "ai", "=", "iter_fastq", "(", "fastqfile", ")", "rec", "=", "next", "(", "ai", ")", "offset", "=", "64", "while", "rec", ":", "quality", "=", "rec", ".", "quality", "lowcounts", "=", "len", "(", "[", "x", "for", "x", "in", "quality", "if", "x", "<", "59", "]", ")", "highcounts", "=", "len", "(", "[", "x", "for", "x", "in", "quality", "if", "x", ">", "74", "]", ")", "diff", "=", "highcounts", "-", "lowcounts", "if", "diff", ">", "10", ":", "break", "elif", "diff", "<", "-", "10", ":", "offset", "=", "33", "break", "rec", "=", "next", "(", "ai", ")", "if", "offset", "==", "33", ":", "print", "(", "\"Sanger encoding (offset=33)\"", ",", "file", "=", "sys", ".", "stderr", ")", "elif", "offset", "==", "64", ":", "print", "(", "\"Illumina encoding (offset=64)\"", ",", "file", "=", "sys", ".", "stderr", ")", "return", "offset" ]
37.764706
23.647059
def set_file_license_comment(self, doc, text): """ Raises OrderError if no package or file defined. Raises CardinalityError if more than one per file. """ if self.has_package(doc) and self.has_file(doc): if not self.file_license_comment_set: self.file_license_comment_set = True self.file(doc).license_comment = text return True else: raise CardinalityError('File::LicenseComment') else: raise OrderError('File::LicenseComment')
[ "def", "set_file_license_comment", "(", "self", ",", "doc", ",", "text", ")", ":", "if", "self", ".", "has_package", "(", "doc", ")", "and", "self", ".", "has_file", "(", "doc", ")", ":", "if", "not", "self", ".", "file_license_comment_set", ":", "self", ".", "file_license_comment_set", "=", "True", "self", ".", "file", "(", "doc", ")", ".", "license_comment", "=", "text", "return", "True", "else", ":", "raise", "CardinalityError", "(", "'File::LicenseComment'", ")", "else", ":", "raise", "OrderError", "(", "'File::LicenseComment'", ")" ]
40.214286
13.357143
def p_pragma(self, p): 'pragma : LPAREN TIMES ID TIMES RPAREN' p[0] = Pragma(PragmaEntry(p[3], lineno=p.lineno(1)), lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
[ "def", "p_pragma", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "Pragma", "(", "PragmaEntry", "(", "p", "[", "3", "]", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ")", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ")", "p", ".", "set_lineno", "(", "0", ",", "p", ".", "lineno", "(", "1", ")", ")" ]
41.2
10
def _CanProcessKeyWithPlugin(self, registry_key, plugin): """Determines if a plugin can process a Windows Registry key or its values. Args: registry_key (dfwinreg.WinRegistryKey): Windows Registry key. plugin (WindowsRegistryPlugin): Windows Registry plugin. Returns: bool: True if the Registry key can be processed with the plugin. """ for registry_key_filter in plugin.FILTERS: # Skip filters that define key paths since they are already # checked by the path filter. if getattr(registry_key_filter, 'key_paths', []): continue if registry_key_filter.Match(registry_key): return True return False
[ "def", "_CanProcessKeyWithPlugin", "(", "self", ",", "registry_key", ",", "plugin", ")", ":", "for", "registry_key_filter", "in", "plugin", ".", "FILTERS", ":", "# Skip filters that define key paths since they are already", "# checked by the path filter.", "if", "getattr", "(", "registry_key_filter", ",", "'key_paths'", ",", "[", "]", ")", ":", "continue", "if", "registry_key_filter", ".", "Match", "(", "registry_key", ")", ":", "return", "True", "return", "False" ]
33.2
22.2
def _prfx_setattr_(obj, item, value): """Replacement of __setattr__""" if item.startswith('v_'): return setattr(obj, item[2:], value) else: return super(obj.__class__, obj).__setattr__(item, value)
[ "def", "_prfx_setattr_", "(", "obj", ",", "item", ",", "value", ")", ":", "if", "item", ".", "startswith", "(", "'v_'", ")", ":", "return", "setattr", "(", "obj", ",", "item", "[", "2", ":", "]", ",", "value", ")", "else", ":", "return", "super", "(", "obj", ".", "__class__", ",", "obj", ")", ".", "__setattr__", "(", "item", ",", "value", ")" ]
36.666667
12.333333
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return ProtectionAuthority(key) if key not in ProtectionAuthority._member_map_: extend_enum(ProtectionAuthority, key, default) return ProtectionAuthority[key]
[ "def", "get", "(", "key", ",", "default", "=", "-", "1", ")", ":", "if", "isinstance", "(", "key", ",", "int", ")", ":", "return", "ProtectionAuthority", "(", "key", ")", "if", "key", "not", "in", "ProtectionAuthority", ".", "_member_map_", ":", "extend_enum", "(", "ProtectionAuthority", ",", "key", ",", "default", ")", "return", "ProtectionAuthority", "[", "key", "]" ]
43.142857
8.571429
def pull(i): """ Input: { (path) - repo UOA (where to create entry) (type) - type (url) - URL or (data_uoa) - repo UOA (clone) - if 'yes', clone repo instead of update (current_repos) - if resolving dependencies on other repos, list of repos being updated (to avoid infinite recursion) (git) - if 'yes', use git protocol instead of https (ignore_pull) - useful just for switching to another branch (stable) - take stable version (highly experimental) (version) - checkout version (default - stable) (branch) - git branch (checkout) - git checkout } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } """ o=i.get('out','') xrecache=False pp=[] px=i.get('path','') t=i.get('type','') url=i.get('url','') stable=i.get('stable','') version=i.get('version','') if stable=='yes': version='stable' branch=i.get('branch','') checkout=i.get('checkout','') ip=i.get('ignore_pull','') cr=i.get('current_repos',[]) tt='pull' if i.get('clone','')=='yes': tt='clone' if px!='': pp.append({'path':px, 'type':t, 'url':url}) uoa=i.get('data_uoa','') cids=i.get('cids',[]) if len(cids)>0 and uoa=='': uoa=cids[0] # If url is not empty and uoa is empty, get name from URL: if url!='' and uoa=='' and px=='': ix=url.rfind('/') if ix>0: uoa=url[ix+1:] if uoa.endswith('.git'): uoa=uoa[:-4] i['data_uoa']=uoa if uoa=='' and len(pp)==0 and url=='': uoa='*' if uoa!='': if uoa.find('*')>=0 or uoa.find('?')>=0: r=ck.list_data({'module_uoa':work['self_module_uoa'], 'data_uoa':uoa}) if r['return']>0: return r lst=r['lst'] for q in lst: # Loading repo r=ck.access({'action':'load', 'module_uoa':work['self_module_uoa'], 'data_uoa':q['data_uoa'], 'common':'yes'}) if r['return']>0: return r d=r['dict'] t=d.get('shared','') duoa=r['data_uoa'] if d.get('recache','')=='yes': xrecache=True if t!='': p=d.get('path','') url=d.get('url','') checkouts=d.get('checkouts',{}) pp.append({'path':p, 'type':t, 'url':url, 'data_uoa':duoa, 'checkouts':checkouts}) else: # Loading repo r=ck.access({'action':'load', 'module_uoa':work['self_module_uoa'], 'data_uoa':uoa, 'common':'yes'}) if r['return']>0: if r['return']==16: # If not found, try to add from GIT i['action']='add' i['shared']='yes' x=i.get('quiet','') if x=='': x='yes' i['quiet']=x i['current_repos']=cr return add(i) else: return r d=r['dict'] duoa=r['data_uoa'] if d.get('recache','')=='yes': xrecache=True p=d['path'] t=d.get('shared','') url=d.get('url','') checkouts=d.get('checkouts',{}) pp.append({'path':p, 'type':t, 'url':url, 'data_uoa':duoa, 'checkouts':checkouts}) # Updating ... for q in pp: p=q.get('path','') duoa=q.get('data_uoa','') t=q.get('type','') url=q.get('url','') # Semi hack (useful for Anton) if i.get('git','')=='yes': url=url.replace('https://','git@') j=url.find('/') if j>0: url=url[:j]+':'+url[j+1:] url+='.git' if o=='con' and tt!='clone': ck.out('******************************************************************') ck.out('Updating repo "'+duoa+'" ...') ck.out('') ck.out(' Local path: '+p) ck.out(' URL: '+url) if t=='git': # Check if git is installed rq=ck.gen_tmp_file({}) if rq['return']>0: return rq xfn=rq['file_name'] os.system('git --version > '+xfn) rq=ck.load_text_file({'text_file':xfn, 'delete_after_read':'yes'}) xs='' if rq['return']==0: xs=rq['string'].strip() if xs.find(' version ')<0: return{'return':1, 'error':'git command line client is not found - please, install it or download repo as zip'} # Continue try: px=os.getcwd() except OSError: from os.path import expanduser px=expanduser("~") if not os.path.isdir(p): os.makedirs(p) if o=='con': ck.out('') ck.out(' cd '+p) os.chdir(p) r=0 if ip!='yes': s=ck.cfg['repo_types'][t][tt].replace('$#url#$', url).replace('$#path#$', p) if o=='con': ck.out(' '+s) ck.out('') r=os.system(s) if o=='con': ck.out('') os.chdir(px) # Restore path if r>0: if o=='con': ck.out('') ck.out(' WARNING: repository update likely failed OR IN A DIFFERENT BRANCH/CHECKOUT (git exit code: '+str(r)+')') ck.out('') rx=ck.inp({'text': 'Would you like to continue (Y/n)?: '}) x=rx['string'].lower() if x=='n' or x=='no': return {'return':1, 'error':'repository update likely failed - exit code '+str(r)} else: return {'return':1, 'error':'repository update likely failed - exit code '+str(r)} else: if o=='con': ck.out('CK warning: this repository is not shared!') # Check deps if tt!='clone': # clone is done in add ... if o=='con': ck.out(' ========================================') ck.out(' Checking dependencies on other repos ...') ck.out('') r=deps({'path':p, 'current_path':cr, 'how':'pull', 'version':version, 'branch':branch, 'checkout':checkout, 'out':o}) if r['return']>0: return r # Re-caching ... if xrecache: if o=='con': ck.out(' ==============================================') ck.out(' At least one repository requires recaching ...') ck.out('') r=recache({'out':o}) if r['return']>0: return r return {'return':0}
[ "def", "pull", "(", "i", ")", ":", "o", "=", "i", ".", "get", "(", "'out'", ",", "''", ")", "xrecache", "=", "False", "pp", "=", "[", "]", "px", "=", "i", ".", "get", "(", "'path'", ",", "''", ")", "t", "=", "i", ".", "get", "(", "'type'", ",", "''", ")", "url", "=", "i", ".", "get", "(", "'url'", ",", "''", ")", "stable", "=", "i", ".", "get", "(", "'stable'", ",", "''", ")", "version", "=", "i", ".", "get", "(", "'version'", ",", "''", ")", "if", "stable", "==", "'yes'", ":", "version", "=", "'stable'", "branch", "=", "i", ".", "get", "(", "'branch'", ",", "''", ")", "checkout", "=", "i", ".", "get", "(", "'checkout'", ",", "''", ")", "ip", "=", "i", ".", "get", "(", "'ignore_pull'", ",", "''", ")", "cr", "=", "i", ".", "get", "(", "'current_repos'", ",", "[", "]", ")", "tt", "=", "'pull'", "if", "i", ".", "get", "(", "'clone'", ",", "''", ")", "==", "'yes'", ":", "tt", "=", "'clone'", "if", "px", "!=", "''", ":", "pp", ".", "append", "(", "{", "'path'", ":", "px", ",", "'type'", ":", "t", ",", "'url'", ":", "url", "}", ")", "uoa", "=", "i", ".", "get", "(", "'data_uoa'", ",", "''", ")", "cids", "=", "i", ".", "get", "(", "'cids'", ",", "[", "]", ")", "if", "len", "(", "cids", ")", ">", "0", "and", "uoa", "==", "''", ":", "uoa", "=", "cids", "[", "0", "]", "# If url is not empty and uoa is empty, get name from URL:", "if", "url", "!=", "''", "and", "uoa", "==", "''", "and", "px", "==", "''", ":", "ix", "=", "url", ".", "rfind", "(", "'/'", ")", "if", "ix", ">", "0", ":", "uoa", "=", "url", "[", "ix", "+", "1", ":", "]", "if", "uoa", ".", "endswith", "(", "'.git'", ")", ":", "uoa", "=", "uoa", "[", ":", "-", "4", "]", "i", "[", "'data_uoa'", "]", "=", "uoa", "if", "uoa", "==", "''", "and", "len", "(", "pp", ")", "==", "0", "and", "url", "==", "''", ":", "uoa", "=", "'*'", "if", "uoa", "!=", "''", ":", "if", "uoa", ".", "find", "(", "'*'", ")", ">=", "0", "or", "uoa", ".", "find", "(", "'?'", ")", ">=", "0", ":", "r", "=", "ck", ".", "list_data", "(", "{", "'module_uoa'", ":", "work", "[", "'self_module_uoa'", "]", ",", "'data_uoa'", ":", "uoa", "}", ")", "if", "r", "[", "'return'", "]", ">", "0", ":", "return", "r", "lst", "=", "r", "[", "'lst'", "]", "for", "q", "in", "lst", ":", "# Loading repo", "r", "=", "ck", ".", "access", "(", "{", "'action'", ":", "'load'", ",", "'module_uoa'", ":", "work", "[", "'self_module_uoa'", "]", ",", "'data_uoa'", ":", "q", "[", "'data_uoa'", "]", ",", "'common'", ":", "'yes'", "}", ")", "if", "r", "[", "'return'", "]", ">", "0", ":", "return", "r", "d", "=", "r", "[", "'dict'", "]", "t", "=", "d", ".", "get", "(", "'shared'", ",", "''", ")", "duoa", "=", "r", "[", "'data_uoa'", "]", "if", "d", ".", "get", "(", "'recache'", ",", "''", ")", "==", "'yes'", ":", "xrecache", "=", "True", "if", "t", "!=", "''", ":", "p", "=", "d", ".", "get", "(", "'path'", ",", "''", ")", "url", "=", "d", ".", "get", "(", "'url'", ",", "''", ")", "checkouts", "=", "d", ".", "get", "(", "'checkouts'", ",", "{", "}", ")", "pp", ".", "append", "(", "{", "'path'", ":", "p", ",", "'type'", ":", "t", ",", "'url'", ":", "url", ",", "'data_uoa'", ":", "duoa", ",", "'checkouts'", ":", "checkouts", "}", ")", "else", ":", "# Loading repo", "r", "=", "ck", ".", "access", "(", "{", "'action'", ":", "'load'", ",", "'module_uoa'", ":", "work", "[", "'self_module_uoa'", "]", ",", "'data_uoa'", ":", "uoa", ",", "'common'", ":", "'yes'", "}", ")", "if", "r", "[", "'return'", "]", ">", "0", ":", "if", "r", "[", "'return'", "]", "==", "16", ":", "# If not found, try to add from GIT", "i", "[", "'action'", "]", "=", "'add'", "i", "[", "'shared'", "]", "=", "'yes'", "x", "=", "i", ".", "get", "(", "'quiet'", ",", "''", ")", "if", "x", "==", "''", ":", "x", "=", "'yes'", "i", "[", "'quiet'", "]", "=", "x", "i", "[", "'current_repos'", "]", "=", "cr", "return", "add", "(", "i", ")", "else", ":", "return", "r", "d", "=", "r", "[", "'dict'", "]", "duoa", "=", "r", "[", "'data_uoa'", "]", "if", "d", ".", "get", "(", "'recache'", ",", "''", ")", "==", "'yes'", ":", "xrecache", "=", "True", "p", "=", "d", "[", "'path'", "]", "t", "=", "d", ".", "get", "(", "'shared'", ",", "''", ")", "url", "=", "d", ".", "get", "(", "'url'", ",", "''", ")", "checkouts", "=", "d", ".", "get", "(", "'checkouts'", ",", "{", "}", ")", "pp", ".", "append", "(", "{", "'path'", ":", "p", ",", "'type'", ":", "t", ",", "'url'", ":", "url", ",", "'data_uoa'", ":", "duoa", ",", "'checkouts'", ":", "checkouts", "}", ")", "# Updating ...", "for", "q", "in", "pp", ":", "p", "=", "q", ".", "get", "(", "'path'", ",", "''", ")", "duoa", "=", "q", ".", "get", "(", "'data_uoa'", ",", "''", ")", "t", "=", "q", ".", "get", "(", "'type'", ",", "''", ")", "url", "=", "q", ".", "get", "(", "'url'", ",", "''", ")", "# Semi hack (useful for Anton)", "if", "i", ".", "get", "(", "'git'", ",", "''", ")", "==", "'yes'", ":", "url", "=", "url", ".", "replace", "(", "'https://'", ",", "'git@'", ")", "j", "=", "url", ".", "find", "(", "'/'", ")", "if", "j", ">", "0", ":", "url", "=", "url", "[", ":", "j", "]", "+", "':'", "+", "url", "[", "j", "+", "1", ":", "]", "url", "+=", "'.git'", "if", "o", "==", "'con'", "and", "tt", "!=", "'clone'", ":", "ck", ".", "out", "(", "'******************************************************************'", ")", "ck", ".", "out", "(", "'Updating repo \"'", "+", "duoa", "+", "'\" ...'", ")", "ck", ".", "out", "(", "''", ")", "ck", ".", "out", "(", "' Local path: '", "+", "p", ")", "ck", ".", "out", "(", "' URL: '", "+", "url", ")", "if", "t", "==", "'git'", ":", "# Check if git is installed", "rq", "=", "ck", ".", "gen_tmp_file", "(", "{", "}", ")", "if", "rq", "[", "'return'", "]", ">", "0", ":", "return", "rq", "xfn", "=", "rq", "[", "'file_name'", "]", "os", ".", "system", "(", "'git --version > '", "+", "xfn", ")", "rq", "=", "ck", ".", "load_text_file", "(", "{", "'text_file'", ":", "xfn", ",", "'delete_after_read'", ":", "'yes'", "}", ")", "xs", "=", "''", "if", "rq", "[", "'return'", "]", "==", "0", ":", "xs", "=", "rq", "[", "'string'", "]", ".", "strip", "(", ")", "if", "xs", ".", "find", "(", "' version '", ")", "<", "0", ":", "return", "{", "'return'", ":", "1", ",", "'error'", ":", "'git command line client is not found - please, install it or download repo as zip'", "}", "# Continue", "try", ":", "px", "=", "os", ".", "getcwd", "(", ")", "except", "OSError", ":", "from", "os", ".", "path", "import", "expanduser", "px", "=", "expanduser", "(", "\"~\"", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "p", ")", ":", "os", ".", "makedirs", "(", "p", ")", "if", "o", "==", "'con'", ":", "ck", ".", "out", "(", "''", ")", "ck", ".", "out", "(", "' cd '", "+", "p", ")", "os", ".", "chdir", "(", "p", ")", "r", "=", "0", "if", "ip", "!=", "'yes'", ":", "s", "=", "ck", ".", "cfg", "[", "'repo_types'", "]", "[", "t", "]", "[", "tt", "]", ".", "replace", "(", "'$#url#$'", ",", "url", ")", ".", "replace", "(", "'$#path#$'", ",", "p", ")", "if", "o", "==", "'con'", ":", "ck", ".", "out", "(", "' '", "+", "s", ")", "ck", ".", "out", "(", "''", ")", "r", "=", "os", ".", "system", "(", "s", ")", "if", "o", "==", "'con'", ":", "ck", ".", "out", "(", "''", ")", "os", ".", "chdir", "(", "px", ")", "# Restore path", "if", "r", ">", "0", ":", "if", "o", "==", "'con'", ":", "ck", ".", "out", "(", "''", ")", "ck", ".", "out", "(", "' WARNING: repository update likely failed OR IN A DIFFERENT BRANCH/CHECKOUT (git exit code: '", "+", "str", "(", "r", ")", "+", "')'", ")", "ck", ".", "out", "(", "''", ")", "rx", "=", "ck", ".", "inp", "(", "{", "'text'", ":", "'Would you like to continue (Y/n)?: '", "}", ")", "x", "=", "rx", "[", "'string'", "]", ".", "lower", "(", ")", "if", "x", "==", "'n'", "or", "x", "==", "'no'", ":", "return", "{", "'return'", ":", "1", ",", "'error'", ":", "'repository update likely failed - exit code '", "+", "str", "(", "r", ")", "}", "else", ":", "return", "{", "'return'", ":", "1", ",", "'error'", ":", "'repository update likely failed - exit code '", "+", "str", "(", "r", ")", "}", "else", ":", "if", "o", "==", "'con'", ":", "ck", ".", "out", "(", "'CK warning: this repository is not shared!'", ")", "# Check deps", "if", "tt", "!=", "'clone'", ":", "# clone is done in add ...", "if", "o", "==", "'con'", ":", "ck", ".", "out", "(", "' ========================================'", ")", "ck", ".", "out", "(", "' Checking dependencies on other repos ...'", ")", "ck", ".", "out", "(", "''", ")", "r", "=", "deps", "(", "{", "'path'", ":", "p", ",", "'current_path'", ":", "cr", ",", "'how'", ":", "'pull'", ",", "'version'", ":", "version", ",", "'branch'", ":", "branch", ",", "'checkout'", ":", "checkout", ",", "'out'", ":", "o", "}", ")", "if", "r", "[", "'return'", "]", ">", "0", ":", "return", "r", "# Re-caching ...", "if", "xrecache", ":", "if", "o", "==", "'con'", ":", "ck", ".", "out", "(", "' =============================================='", ")", "ck", ".", "out", "(", "' At least one repository requires recaching ...'", ")", "ck", ".", "out", "(", "''", ")", "r", "=", "recache", "(", "{", "'out'", ":", "o", "}", ")", "if", "r", "[", "'return'", "]", ">", "0", ":", "return", "r", "return", "{", "'return'", ":", "0", "}" ]
28.333333
21.257028
def unnest_children(data, parent_name='', pk_name=None, force_pk=False): """ For each ``key`` in each row of ``data`` (which must be a list of dicts), unnest any dict values into ``parent``, and remove list values into separate lists. Return (``data``, ``pk_name``, ``children``, ``child_fk_names``) where ``data`` the transformed input list ``pk_name`` field name of ``data``'s (possibly new) primary key ``children`` a defaultdict(list) of data extracted from child lists ``child_fk_names`` dict of the foreign key field name in each child """ possible_fk_names = ['%s_id' % parent_name, '_%s_id' % parent_name, 'parent_id', ] if pk_name: possible_fk_names.insert(0, '%s_%s' % (parent_name, pk_name.strip('_'))) children = defaultdict(list) field_names_used_by_children = defaultdict(set) child_fk_names = {} parent = ParentTable(data, parent_name, pk_name=pk_name, force_pk=force_pk) for row in parent: try: for (key, val) in row.items(): if hasattr(val, 'items'): unnest_child_dict(parent=row, key=key, parent_name=parent_name) elif isinstance(val, list) or isinstance(val, tuple): # force listed items to be dicts, not scalars row[key] = [v if hasattr(v, 'items') else {key: v} for v in val] except AttributeError: raise TypeError('Each row should be a dictionary, got %s: %s' % (type(row), row)) for (key, val) in row.items(): if isinstance(val, list) or isinstance(val, tuple): for child in val: field_names_used_by_children[key].update(set(child.keys())) for (child_name, names_in_use) in field_names_used_by_children.items(): if not parent.pk: parent.assign_pk() for fk_name in possible_fk_names: if fk_name not in names_in_use: break else: raise Exception("Cannot find unused field name in %s.%s to use as foreign key" % (parent_name, child_name)) child_fk_names[child_name] = fk_name for row in parent: if child_name in row: for child in row[child_name]: child[fk_name] = row[parent.pk.name] children[child_name].append(child) row.pop(child_name) # TODO: What if rows have a mix of scalar / list / dict types? return (parent, parent.pk.name if parent.pk else None, children, child_fk_names)
[ "def", "unnest_children", "(", "data", ",", "parent_name", "=", "''", ",", "pk_name", "=", "None", ",", "force_pk", "=", "False", ")", ":", "possible_fk_names", "=", "[", "'%s_id'", "%", "parent_name", ",", "'_%s_id'", "%", "parent_name", ",", "'parent_id'", ",", "]", "if", "pk_name", ":", "possible_fk_names", ".", "insert", "(", "0", ",", "'%s_%s'", "%", "(", "parent_name", ",", "pk_name", ".", "strip", "(", "'_'", ")", ")", ")", "children", "=", "defaultdict", "(", "list", ")", "field_names_used_by_children", "=", "defaultdict", "(", "set", ")", "child_fk_names", "=", "{", "}", "parent", "=", "ParentTable", "(", "data", ",", "parent_name", ",", "pk_name", "=", "pk_name", ",", "force_pk", "=", "force_pk", ")", "for", "row", "in", "parent", ":", "try", ":", "for", "(", "key", ",", "val", ")", "in", "row", ".", "items", "(", ")", ":", "if", "hasattr", "(", "val", ",", "'items'", ")", ":", "unnest_child_dict", "(", "parent", "=", "row", ",", "key", "=", "key", ",", "parent_name", "=", "parent_name", ")", "elif", "isinstance", "(", "val", ",", "list", ")", "or", "isinstance", "(", "val", ",", "tuple", ")", ":", "# force listed items to be dicts, not scalars", "row", "[", "key", "]", "=", "[", "v", "if", "hasattr", "(", "v", ",", "'items'", ")", "else", "{", "key", ":", "v", "}", "for", "v", "in", "val", "]", "except", "AttributeError", ":", "raise", "TypeError", "(", "'Each row should be a dictionary, got %s: %s'", "%", "(", "type", "(", "row", ")", ",", "row", ")", ")", "for", "(", "key", ",", "val", ")", "in", "row", ".", "items", "(", ")", ":", "if", "isinstance", "(", "val", ",", "list", ")", "or", "isinstance", "(", "val", ",", "tuple", ")", ":", "for", "child", "in", "val", ":", "field_names_used_by_children", "[", "key", "]", ".", "update", "(", "set", "(", "child", ".", "keys", "(", ")", ")", ")", "for", "(", "child_name", ",", "names_in_use", ")", "in", "field_names_used_by_children", ".", "items", "(", ")", ":", "if", "not", "parent", ".", "pk", ":", "parent", ".", "assign_pk", "(", ")", "for", "fk_name", "in", "possible_fk_names", ":", "if", "fk_name", "not", "in", "names_in_use", ":", "break", "else", ":", "raise", "Exception", "(", "\"Cannot find unused field name in %s.%s to use as foreign key\"", "%", "(", "parent_name", ",", "child_name", ")", ")", "child_fk_names", "[", "child_name", "]", "=", "fk_name", "for", "row", "in", "parent", ":", "if", "child_name", "in", "row", ":", "for", "child", "in", "row", "[", "child_name", "]", ":", "child", "[", "fk_name", "]", "=", "row", "[", "parent", ".", "pk", ".", "name", "]", "children", "[", "child_name", "]", ".", "append", "(", "child", ")", "row", ".", "pop", "(", "child_name", ")", "# TODO: What if rows have a mix of scalar / list / dict types?", "return", "(", "parent", ",", "parent", ".", "pk", ".", "name", "if", "parent", ".", "pk", "else", "None", ",", "children", ",", "child_fk_names", ")" ]
45.517857
22.232143
def add_element(self, element): """ Add an element to this ComplexType and also append it to element dict of parent type graph. """ self.elements.append(element) self.type_graph.add_element(element)
[ "def", "add_element", "(", "self", ",", "element", ")", ":", "self", ".", "elements", ".", "append", "(", "element", ")", "self", ".", "type_graph", ".", "add_element", "(", "element", ")" ]
34.285714
9.142857
def main(tex_file, output, verbose): """ FLaP merges your LaTeX projects into a single LaTeX file that refers to images in the same directory. It reads the given root TEX_FILE and generates a flatten version in the given OUTPUT directory. It inlines the content of any TeX files refered by \\input or \\include but also copies resources such as images (JPG, EPS, PDF, etc.) as well as other resources (class and package definitions, BibTeX files, etc.). """ Controller(OSFileSystem(), Display(sys.stdout, verbose)).run(tex_file, output)
[ "def", "main", "(", "tex_file", ",", "output", ",", "verbose", ")", ":", "Controller", "(", "OSFileSystem", "(", ")", ",", "Display", "(", "sys", ".", "stdout", ",", "verbose", ")", ")", ".", "run", "(", "tex_file", ",", "output", ")" ]
47.166667
22.666667
def _apply_options(self, token): """Applies various filtering and processing options on token. Returns: The processed token. None if filtered. """ # Apply work token filtering. if token.is_punct and self.remove_punct: return None if token.is_stop and self.remove_stop_words: return None if token.is_digit and self.remove_digits: return None if token.is_oov and self.exclude_oov: return None if token.pos_ in self.exclude_pos_tags: return None if token.ent_type_ in self.exclude_entities: return None # Lemmatized ones are already lowered. if self.lemmatize: return token.lemma_ if self.lower: return token.lower_ return token.orth_
[ "def", "_apply_options", "(", "self", ",", "token", ")", ":", "# Apply work token filtering.", "if", "token", ".", "is_punct", "and", "self", ".", "remove_punct", ":", "return", "None", "if", "token", ".", "is_stop", "and", "self", ".", "remove_stop_words", ":", "return", "None", "if", "token", ".", "is_digit", "and", "self", ".", "remove_digits", ":", "return", "None", "if", "token", ".", "is_oov", "and", "self", ".", "exclude_oov", ":", "return", "None", "if", "token", ".", "pos_", "in", "self", ".", "exclude_pos_tags", ":", "return", "None", "if", "token", ".", "ent_type_", "in", "self", ".", "exclude_entities", ":", "return", "None", "# Lemmatized ones are already lowered.", "if", "self", ".", "lemmatize", ":", "return", "token", ".", "lemma_", "if", "self", ".", "lower", ":", "return", "token", ".", "lower_", "return", "token", ".", "orth_" ]
31.846154
13.461538
def create_stoichiometric_matrix(model, array_type='dense', dtype=None): """Return a stoichiometric array representation of the given model. The the columns represent the reactions and rows represent metabolites. S[i,j] therefore contains the quantity of metabolite `i` produced (negative for consumed) by reaction `j`. Parameters ---------- model : cobra.Model The cobra model to construct the matrix for. array_type : string The type of array to construct. if 'dense', return a standard numpy.array, 'dok', or 'lil' will construct a sparse array using scipy of the corresponding type and 'DataFrame' will give a pandas `DataFrame` with metabolite indices and reaction columns dtype : data-type The desired data-type for the array. If not given, defaults to float. Returns ------- matrix of class `dtype` The stoichiometric matrix for the given model. """ if array_type not in ('DataFrame', 'dense') and not dok_matrix: raise ValueError('Sparse matrices require scipy') if dtype is None: dtype = np.float64 array_constructor = { 'dense': np.zeros, 'dok': dok_matrix, 'lil': lil_matrix, 'DataFrame': np.zeros, } n_metabolites = len(model.metabolites) n_reactions = len(model.reactions) array = array_constructor[array_type]((n_metabolites, n_reactions), dtype=dtype) m_ind = model.metabolites.index r_ind = model.reactions.index for reaction in model.reactions: for metabolite, stoich in iteritems(reaction.metabolites): array[m_ind(metabolite), r_ind(reaction)] = stoich if array_type == 'DataFrame': metabolite_ids = [met.id for met in model.metabolites] reaction_ids = [rxn.id for rxn in model.reactions] return pd.DataFrame(array, index=metabolite_ids, columns=reaction_ids) else: return array
[ "def", "create_stoichiometric_matrix", "(", "model", ",", "array_type", "=", "'dense'", ",", "dtype", "=", "None", ")", ":", "if", "array_type", "not", "in", "(", "'DataFrame'", ",", "'dense'", ")", "and", "not", "dok_matrix", ":", "raise", "ValueError", "(", "'Sparse matrices require scipy'", ")", "if", "dtype", "is", "None", ":", "dtype", "=", "np", ".", "float64", "array_constructor", "=", "{", "'dense'", ":", "np", ".", "zeros", ",", "'dok'", ":", "dok_matrix", ",", "'lil'", ":", "lil_matrix", ",", "'DataFrame'", ":", "np", ".", "zeros", ",", "}", "n_metabolites", "=", "len", "(", "model", ".", "metabolites", ")", "n_reactions", "=", "len", "(", "model", ".", "reactions", ")", "array", "=", "array_constructor", "[", "array_type", "]", "(", "(", "n_metabolites", ",", "n_reactions", ")", ",", "dtype", "=", "dtype", ")", "m_ind", "=", "model", ".", "metabolites", ".", "index", "r_ind", "=", "model", ".", "reactions", ".", "index", "for", "reaction", "in", "model", ".", "reactions", ":", "for", "metabolite", ",", "stoich", "in", "iteritems", "(", "reaction", ".", "metabolites", ")", ":", "array", "[", "m_ind", "(", "metabolite", ")", ",", "r_ind", "(", "reaction", ")", "]", "=", "stoich", "if", "array_type", "==", "'DataFrame'", ":", "metabolite_ids", "=", "[", "met", ".", "id", "for", "met", "in", "model", ".", "metabolites", "]", "reaction_ids", "=", "[", "rxn", ".", "id", "for", "rxn", "in", "model", ".", "reactions", "]", "return", "pd", ".", "DataFrame", "(", "array", ",", "index", "=", "metabolite_ids", ",", "columns", "=", "reaction_ids", ")", "else", ":", "return", "array" ]
35.851852
23.481481
def image_list(self, name=None): ''' List server images ''' nt_ks = self.compute_conn ret = {} for image in nt_ks.images.list(): links = {} for link in image.links: links[link['rel']] = link['href'] ret[image.name] = { 'name': image.name, 'id': image.id, 'status': image.status, 'progress': image.progress, 'created': image.created, 'updated': image.updated, 'metadata': image.metadata, 'links': links, } if hasattr(image, 'minDisk'): ret[image.name]['minDisk'] = image.minDisk if hasattr(image, 'minRam'): ret[image.name]['minRam'] = image.minRam if name: return {name: ret[name]} return ret
[ "def", "image_list", "(", "self", ",", "name", "=", "None", ")", ":", "nt_ks", "=", "self", ".", "compute_conn", "ret", "=", "{", "}", "for", "image", "in", "nt_ks", ".", "images", ".", "list", "(", ")", ":", "links", "=", "{", "}", "for", "link", "in", "image", ".", "links", ":", "links", "[", "link", "[", "'rel'", "]", "]", "=", "link", "[", "'href'", "]", "ret", "[", "image", ".", "name", "]", "=", "{", "'name'", ":", "image", ".", "name", ",", "'id'", ":", "image", ".", "id", ",", "'status'", ":", "image", ".", "status", ",", "'progress'", ":", "image", ".", "progress", ",", "'created'", ":", "image", ".", "created", ",", "'updated'", ":", "image", ".", "updated", ",", "'metadata'", ":", "image", ".", "metadata", ",", "'links'", ":", "links", ",", "}", "if", "hasattr", "(", "image", ",", "'minDisk'", ")", ":", "ret", "[", "image", ".", "name", "]", "[", "'minDisk'", "]", "=", "image", ".", "minDisk", "if", "hasattr", "(", "image", ",", "'minRam'", ")", ":", "ret", "[", "image", ".", "name", "]", "[", "'minRam'", "]", "=", "image", ".", "minRam", "if", "name", ":", "return", "{", "name", ":", "ret", "[", "name", "]", "}", "return", "ret" ]
32.962963
10.962963
def _get_rows(self): """ Return all rows on page """ html = requests.get(self.url.build()).text if re.search('did not match any documents', html): return [] pq = PyQuery(html) rows = pq("table.data").find("tr") return map(rows.eq, range(rows.size()))[1:]
[ "def", "_get_rows", "(", "self", ")", ":", "html", "=", "requests", ".", "get", "(", "self", ".", "url", ".", "build", "(", ")", ")", ".", "text", "if", "re", ".", "search", "(", "'did not match any documents'", ",", "html", ")", ":", "return", "[", "]", "pq", "=", "PyQuery", "(", "html", ")", "rows", "=", "pq", "(", "\"table.data\"", ")", ".", "find", "(", "\"tr\"", ")", "return", "map", "(", "rows", ".", "eq", ",", "range", "(", "rows", ".", "size", "(", ")", ")", ")", "[", "1", ":", "]" ]
32.1
10.3
def attention_lm_translation(): """Version to use for seq2seq.""" hparams = attention_lm_base() hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" hparams.learning_rate = 0.4 hparams.prepend_mode = "prepend_inputs_masked_attention" hparams.max_length = 512 hparams.label_smoothing = 0.1 hparams.shared_embedding_and_softmax_weights = True return hparams
[ "def", "attention_lm_translation", "(", ")", ":", "hparams", "=", "attention_lm_base", "(", ")", "hparams", ".", "layer_preprocess_sequence", "=", "\"n\"", "hparams", ".", "layer_postprocess_sequence", "=", "\"da\"", "hparams", ".", "learning_rate", "=", "0.4", "hparams", ".", "prepend_mode", "=", "\"prepend_inputs_masked_attention\"", "hparams", ".", "max_length", "=", "512", "hparams", ".", "label_smoothing", "=", "0.1", "hparams", ".", "shared_embedding_and_softmax_weights", "=", "True", "return", "hparams" ]
35.818182
10.090909
def move_item_up(self, item): """Move an item up in the list. Essentially swap it with the item above it. :param item: The item to be moved. """ prev_iter = self._prev_iter_for(item) if prev_iter is not None: self.model.swap(prev_iter, self._iter_for(item))
[ "def", "move_item_up", "(", "self", ",", "item", ")", ":", "prev_iter", "=", "self", ".", "_prev_iter_for", "(", "item", ")", "if", "prev_iter", "is", "not", "None", ":", "self", ".", "model", ".", "swap", "(", "prev_iter", ",", "self", ".", "_iter_for", "(", "item", ")", ")" ]
31
13.6
def set_autosession(self, value=None): """ Turn autosession (automatic committing after each modification call) on/off. If value is None, only query the current value (don't change anything). """ if value is not None: self.rollback() self.autosession = value return self.autosession
[ "def", "set_autosession", "(", "self", ",", "value", "=", "None", ")", ":", "if", "value", "is", "not", "None", ":", "self", ".", "rollback", "(", ")", "self", ".", "autosession", "=", "value", "return", "self", ".", "autosession" ]
38.444444
13.555556
def _process_sample (self, ap1, ap2, ap3, triple, tflags): """We have computed one independent phase closure triple in one timeslot. """ # Frequency-resolved: np.divide (triple, np.abs (triple), triple) phase = np.angle (triple) self.ap_spec_stats_by_ddid[self.cur_ddid].accum (ap1, phase, tflags + 0.) self.ap_spec_stats_by_ddid[self.cur_ddid].accum (ap2, phase, tflags + 0.) self.ap_spec_stats_by_ddid[self.cur_ddid].accum (ap3, phase, tflags + 0.) # Frequency-averaged: triple = np.dot (triple, tflags) / tflags.sum () phase = np.angle (triple) self.global_stats_by_time.accum (self.cur_time, phase) self.ap_stats_by_ddid[self.cur_ddid].accum (ap1, phase) self.ap_stats_by_ddid[self.cur_ddid].accum (ap2, phase) self.ap_stats_by_ddid[self.cur_ddid].accum (ap3, phase) self.bp_stats_by_ddid[self.cur_ddid].accum ((ap1, ap2), phase) self.bp_stats_by_ddid[self.cur_ddid].accum ((ap1, ap3), phase) self.bp_stats_by_ddid[self.cur_ddid].accum ((ap2, ap3), phase) self.ap_time_stats_by_ddid[self.cur_ddid].accum (self.cur_time, ap1, phase) self.ap_time_stats_by_ddid[self.cur_ddid].accum (self.cur_time, ap2, phase) self.ap_time_stats_by_ddid[self.cur_ddid].accum (self.cur_time, ap3, phase)
[ "def", "_process_sample", "(", "self", ",", "ap1", ",", "ap2", ",", "ap3", ",", "triple", ",", "tflags", ")", ":", "# Frequency-resolved:", "np", ".", "divide", "(", "triple", ",", "np", ".", "abs", "(", "triple", ")", ",", "triple", ")", "phase", "=", "np", ".", "angle", "(", "triple", ")", "self", ".", "ap_spec_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "ap1", ",", "phase", ",", "tflags", "+", "0.", ")", "self", ".", "ap_spec_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "ap2", ",", "phase", ",", "tflags", "+", "0.", ")", "self", ".", "ap_spec_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "ap3", ",", "phase", ",", "tflags", "+", "0.", ")", "# Frequency-averaged:", "triple", "=", "np", ".", "dot", "(", "triple", ",", "tflags", ")", "/", "tflags", ".", "sum", "(", ")", "phase", "=", "np", ".", "angle", "(", "triple", ")", "self", ".", "global_stats_by_time", ".", "accum", "(", "self", ".", "cur_time", ",", "phase", ")", "self", ".", "ap_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "ap1", ",", "phase", ")", "self", ".", "ap_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "ap2", ",", "phase", ")", "self", ".", "ap_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "ap3", ",", "phase", ")", "self", ".", "bp_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "(", "ap1", ",", "ap2", ")", ",", "phase", ")", "self", ".", "bp_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "(", "ap1", ",", "ap3", ")", ",", "phase", ")", "self", ".", "bp_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "(", "ap2", ",", "ap3", ")", ",", "phase", ")", "self", ".", "ap_time_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "self", ".", "cur_time", ",", "ap1", ",", "phase", ")", "self", ".", "ap_time_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "self", ".", "cur_time", ",", "ap2", ",", "phase", ")", "self", ".", "ap_time_stats_by_ddid", "[", "self", ".", "cur_ddid", "]", ".", "accum", "(", "self", ".", "cur_time", ",", "ap3", ",", "phase", ")" ]
47.642857
26.928571
def _time_string(self, value): """ Get a suitable time string ("ss", "mm:ss", "hh:mm:ss"), according to the maximum time. :param int value: the time value :rtype: string """ if self.max_time < 60: return "%02d" % (value) elif self.max_time < 3600: mm = value // 60 ss = value - mm * 60 return "%02d:%02d" % (mm, ss) hh = value // 3600 mm = (value - hh * 3600) // 60 ss = (value - hh * 3600 - mm * 60) return "%02d:%02d:%02d" % (hh, mm, ss)
[ "def", "_time_string", "(", "self", ",", "value", ")", ":", "if", "self", ".", "max_time", "<", "60", ":", "return", "\"%02d\"", "%", "(", "value", ")", "elif", "self", ".", "max_time", "<", "3600", ":", "mm", "=", "value", "//", "60", "ss", "=", "value", "-", "mm", "*", "60", "return", "\"%02d:%02d\"", "%", "(", "mm", ",", "ss", ")", "hh", "=", "value", "//", "3600", "mm", "=", "(", "value", "-", "hh", "*", "3600", ")", "//", "60", "ss", "=", "(", "value", "-", "hh", "*", "3600", "-", "mm", "*", "60", ")", "return", "\"%02d:%02d:%02d\"", "%", "(", "hh", ",", "mm", ",", "ss", ")" ]
30.210526
7.684211
def check_response(self, resp): """ Checks response after request was made. Checks status of the response, mainly :param resp: :return: """ # For successful API call, response code will be 200 (OK) if resp.ok: json = resp.json() self.response = ResponseHolder() self.response.response = json # Check the code if 'status' not in json: raise InvalidResponse('No status field') self.response.status = self.field_to_long(json['status']) if self.response.status != EBConsts.STATUS_OK: txt_status = self.get_text_status(json) raise InvalidStatus('Status is %s (%04X)' % (txt_status if txt_status is not None else "", self.response.status)) if self.response_checker is not None: self.response_checker(self.response) return self.response else: # If response code is not ok (200), print the resulting http error code with description resp.raise_for_status() pass
[ "def", "check_response", "(", "self", ",", "resp", ")", ":", "# For successful API call, response code will be 200 (OK)", "if", "resp", ".", "ok", ":", "json", "=", "resp", ".", "json", "(", ")", "self", ".", "response", "=", "ResponseHolder", "(", ")", "self", ".", "response", ".", "response", "=", "json", "# Check the code", "if", "'status'", "not", "in", "json", ":", "raise", "InvalidResponse", "(", "'No status field'", ")", "self", ".", "response", ".", "status", "=", "self", ".", "field_to_long", "(", "json", "[", "'status'", "]", ")", "if", "self", ".", "response", ".", "status", "!=", "EBConsts", ".", "STATUS_OK", ":", "txt_status", "=", "self", ".", "get_text_status", "(", "json", ")", "raise", "InvalidStatus", "(", "'Status is %s (%04X)'", "%", "(", "txt_status", "if", "txt_status", "is", "not", "None", "else", "\"\"", ",", "self", ".", "response", ".", "status", ")", ")", "if", "self", ".", "response_checker", "is", "not", "None", ":", "self", ".", "response_checker", "(", "self", ".", "response", ")", "return", "self", ".", "response", "else", ":", "# If response code is not ok (200), print the resulting http error code with description", "resp", ".", "raise_for_status", "(", ")", "pass" ]
33.5
21.558824
def blk_nd_short(blk, shape): """Iterate trough the blocks that strictly cover an array. Iterate trough the blocks that recover the part of the array given by max_blk_coverage. :param blk: the N-dimensional shape of the block :param shape: the N-dimensional shape of the array :return: a generator that yields the blocks Example: >>> result = list(blk_nd_short(blk=(5,3), shape=(11, 11))) >>> result[0] (slice(0, 5, None), slice(0, 3, None)) >>> result[1] (slice(0, 5, None), slice(3, 6, None)) >>> result[-1] (slice(5, 10, None), slice(6, 9, None)) In this case, the output of max_blk_coverage is (10, 9), so only this part of the array is covered .. seealso:: :py:func:`blk_nd` Yields blocks of blk size until the remaining part is smaller than `blk` and the yields smaller blocks. """ internals = (blk_1d_short(b, s) for b, s in zip(blk, shape)) return product(*internals)
[ "def", "blk_nd_short", "(", "blk", ",", "shape", ")", ":", "internals", "=", "(", "blk_1d_short", "(", "b", ",", "s", ")", "for", "b", ",", "s", "in", "zip", "(", "blk", ",", "shape", ")", ")", "return", "product", "(", "*", "internals", ")" ]
30.181818
21.666667
def function_begin(self): """Inserts function name label and function frame initialization""" self.newline_label(self.shared.function_name, False, True) self.push("%14") self.move("%15", "%14")
[ "def", "function_begin", "(", "self", ")", ":", "self", ".", "newline_label", "(", "self", ".", "shared", ".", "function_name", ",", "False", ",", "True", ")", "self", ".", "push", "(", "\"%14\"", ")", "self", ".", "move", "(", "\"%15\"", ",", "\"%14\"", ")" ]
45
13
def toggle(s): """ Toggle back and forth between a name and a tuple representation. :param str s: a string which is either a text name, or a tuple-string: a string with three numbers separated by commas :returns: if the string was a text name, return a tuple. If it's a tuple-string and it corresponds to a text name, return the text name, else return the original tuple-string. """ is_numeric = ',' in s or s.startswith('0x') or s.startswith('#') c = name_to_color(s) return color_to_name(c) if is_numeric else str(c)
[ "def", "toggle", "(", "s", ")", ":", "is_numeric", "=", "','", "in", "s", "or", "s", ".", "startswith", "(", "'0x'", ")", "or", "s", ".", "startswith", "(", "'#'", ")", "c", "=", "name_to_color", "(", "s", ")", "return", "color_to_name", "(", "c", ")", "if", "is_numeric", "else", "str", "(", "c", ")" ]
41.857143
24
def route_absent(name, route_table, resource_group, connection_auth=None): ''' .. versionadded:: 2019.2.0 Ensure a route table does not exist in the resource group. :param name: Name of the route table. :param route_table: The name of the existing route table containing the route. :param resource_group: The resource group assigned to the route table. :param connection_auth: A dict with subscription and authentication parameters to be used in connecting to the Azure Resource Manager API. ''' ret = { 'name': name, 'result': False, 'comment': '', 'changes': {} } if not isinstance(connection_auth, dict): ret['comment'] = 'Connection information must be specified via connection_auth dictionary!' return ret route = __salt__['azurearm_network.route_get']( name, route_table, resource_group, azurearm_log_level='info', **connection_auth ) if 'error' in route: ret['result'] = True ret['comment'] = 'Route {0} was not found.'.format(name) return ret elif __opts__['test']: ret['comment'] = 'Route {0} would be deleted.'.format(name) ret['result'] = None ret['changes'] = { 'old': route, 'new': {}, } return ret deleted = __salt__['azurearm_network.route_delete'](name, route_table, resource_group, **connection_auth) if deleted: ret['result'] = True ret['comment'] = 'Route {0} has been deleted.'.format(name) ret['changes'] = { 'old': route, 'new': {} } return ret ret['comment'] = 'Failed to delete route {0}!'.format(name) return ret
[ "def", "route_absent", "(", "name", ",", "route_table", ",", "resource_group", ",", "connection_auth", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "if", "not", "isinstance", "(", "connection_auth", ",", "dict", ")", ":", "ret", "[", "'comment'", "]", "=", "'Connection information must be specified via connection_auth dictionary!'", "return", "ret", "route", "=", "__salt__", "[", "'azurearm_network.route_get'", "]", "(", "name", ",", "route_table", ",", "resource_group", ",", "azurearm_log_level", "=", "'info'", ",", "*", "*", "connection_auth", ")", "if", "'error'", "in", "route", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Route {0} was not found.'", ".", "format", "(", "name", ")", "return", "ret", "elif", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Route {0} would be deleted.'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "route", ",", "'new'", ":", "{", "}", ",", "}", "return", "ret", "deleted", "=", "__salt__", "[", "'azurearm_network.route_delete'", "]", "(", "name", ",", "route_table", ",", "resource_group", ",", "*", "*", "connection_auth", ")", "if", "deleted", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Route {0} has been deleted.'", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "route", ",", "'new'", ":", "{", "}", "}", "return", "ret", "ret", "[", "'comment'", "]", "=", "'Failed to delete route {0}!'", ".", "format", "(", "name", ")", "return", "ret" ]
26.784615
25.4
def expand_branch_name(self, name): """ Expand branch names to their unambiguous form. :param name: The name of a local or remote branch (a string). :returns: The unambiguous form of the branch name (a string). This internal method is used by methods like :func:`find_revision_id()` and :func:`find_revision_number()` to detect and expand remote branch names into their unambiguous form which is accepted by commands like ``git rev-parse`` and ``git rev-list --count``. """ # If no name is given we pick the default revision. if not name: return self.default_revision # Run `git for-each-ref' once and remember the results. branches = list(self.find_branches_raw()) # Check for an exact match against a local branch. for prefix, other_name, revision_id in branches: if prefix == 'refs/heads/' and name == other_name: # If we find a local branch whose name exactly matches the name # given by the caller then we consider the argument given by # the caller unambiguous. logger.debug("Branch name %r matches local branch.", name) return name # Check for an exact match against a remote branch. for prefix, other_name, revision_id in branches: if prefix.startswith('refs/remotes/') and name == other_name: # If we find a remote branch whose name exactly matches the # name given by the caller then we expand the name given by the # caller into the full %(refname) emitted by `git for-each-ref'. unambiguous_name = prefix + name logger.debug("Branch name %r matches remote branch %r.", name, unambiguous_name) return unambiguous_name # As a fall back we return the given name without expanding it. # This code path might not be necessary but was added out of # conservativeness, with the goal of trying to guarantee # backwards compatibility. logger.debug("Failed to expand branch name %r.", name) return name
[ "def", "expand_branch_name", "(", "self", ",", "name", ")", ":", "# If no name is given we pick the default revision.", "if", "not", "name", ":", "return", "self", ".", "default_revision", "# Run `git for-each-ref' once and remember the results.", "branches", "=", "list", "(", "self", ".", "find_branches_raw", "(", ")", ")", "# Check for an exact match against a local branch.", "for", "prefix", ",", "other_name", ",", "revision_id", "in", "branches", ":", "if", "prefix", "==", "'refs/heads/'", "and", "name", "==", "other_name", ":", "# If we find a local branch whose name exactly matches the name", "# given by the caller then we consider the argument given by", "# the caller unambiguous.", "logger", ".", "debug", "(", "\"Branch name %r matches local branch.\"", ",", "name", ")", "return", "name", "# Check for an exact match against a remote branch.", "for", "prefix", ",", "other_name", ",", "revision_id", "in", "branches", ":", "if", "prefix", ".", "startswith", "(", "'refs/remotes/'", ")", "and", "name", "==", "other_name", ":", "# If we find a remote branch whose name exactly matches the", "# name given by the caller then we expand the name given by the", "# caller into the full %(refname) emitted by `git for-each-ref'.", "unambiguous_name", "=", "prefix", "+", "name", "logger", ".", "debug", "(", "\"Branch name %r matches remote branch %r.\"", ",", "name", ",", "unambiguous_name", ")", "return", "unambiguous_name", "# As a fall back we return the given name without expanding it.", "# This code path might not be necessary but was added out of", "# conservativeness, with the goal of trying to guarantee", "# backwards compatibility.", "logger", ".", "debug", "(", "\"Failed to expand branch name %r.\"", ",", "name", ")", "return", "name" ]
54.075
22.825
async def _handle_container_timeout(self, container_id, timeout): """ Check timeout with docker stats :param container_id: :param timeout: in seconds (cpu time) """ try: docker_stats = await self._docker_interface.get_stats(container_id) source = AsyncIteratorWrapper(docker_stats) nano_timeout = timeout * (10 ** 9) async for upd in source: if upd is None: await self._kill_it_with_fire(container_id) self._logger.debug("%i", upd['cpu_stats']['cpu_usage']['total_usage']) if upd['cpu_stats']['cpu_usage']['total_usage'] > nano_timeout: self._logger.info("Killing container %s as it used %i CPU seconds (max was %i)", container_id, int(upd['cpu_stats']['cpu_usage']['total_usage'] / (10 ** 9)), timeout) await self._kill_it_with_fire(container_id) return except asyncio.CancelledError: pass except: self._logger.exception("Exception in _handle_container_timeout")
[ "async", "def", "_handle_container_timeout", "(", "self", ",", "container_id", ",", "timeout", ")", ":", "try", ":", "docker_stats", "=", "await", "self", ".", "_docker_interface", ".", "get_stats", "(", "container_id", ")", "source", "=", "AsyncIteratorWrapper", "(", "docker_stats", ")", "nano_timeout", "=", "timeout", "*", "(", "10", "**", "9", ")", "async", "for", "upd", "in", "source", ":", "if", "upd", "is", "None", ":", "await", "self", ".", "_kill_it_with_fire", "(", "container_id", ")", "self", ".", "_logger", ".", "debug", "(", "\"%i\"", ",", "upd", "[", "'cpu_stats'", "]", "[", "'cpu_usage'", "]", "[", "'total_usage'", "]", ")", "if", "upd", "[", "'cpu_stats'", "]", "[", "'cpu_usage'", "]", "[", "'total_usage'", "]", ">", "nano_timeout", ":", "self", ".", "_logger", ".", "info", "(", "\"Killing container %s as it used %i CPU seconds (max was %i)\"", ",", "container_id", ",", "int", "(", "upd", "[", "'cpu_stats'", "]", "[", "'cpu_usage'", "]", "[", "'total_usage'", "]", "/", "(", "10", "**", "9", ")", ")", ",", "timeout", ")", "await", "self", ".", "_kill_it_with_fire", "(", "container_id", ")", "return", "except", "asyncio", ".", "CancelledError", ":", "pass", "except", ":", "self", ".", "_logger", ".", "exception", "(", "\"Exception in _handle_container_timeout\"", ")" ]
49.695652
22.565217
def trim_wav_pydub(in_path: Path, out_path: Path, start_time: int, end_time: int) -> None: """ Crops the wav file. """ logger.info( "Using pydub/ffmpeg to create {} from {}".format(out_path, in_path) + " using a start_time of {} and an end_time of {}".format(start_time, end_time)) if out_path.is_file(): return # TODO add logging here #print("in_fn: {}".format(in_fn)) #print("out_fn: {}".format(out_fn)) in_ext = in_path.suffix[1:] out_ext = out_path.suffix[1:] audio = AudioSegment.from_file(str(in_path), in_ext) trimmed = audio[start_time:end_time] # pydub evidently doesn't actually use the parameters when outputting wavs, # since it doesn't use FFMPEG to deal with outputtting WAVs. This is a bit # of a leaky abstraction. No warning is given, so normalization to 16Khz # mono wavs has to happen later. Leaving the parameters here in case it # changes trimmed.export(str(out_path), format=out_ext, parameters=["-ac", "1", "-ar", "16000"])
[ "def", "trim_wav_pydub", "(", "in_path", ":", "Path", ",", "out_path", ":", "Path", ",", "start_time", ":", "int", ",", "end_time", ":", "int", ")", "->", "None", ":", "logger", ".", "info", "(", "\"Using pydub/ffmpeg to create {} from {}\"", ".", "format", "(", "out_path", ",", "in_path", ")", "+", "\" using a start_time of {} and an end_time of {}\"", ".", "format", "(", "start_time", ",", "end_time", ")", ")", "if", "out_path", ".", "is_file", "(", ")", ":", "return", "# TODO add logging here", "#print(\"in_fn: {}\".format(in_fn))", "#print(\"out_fn: {}\".format(out_fn))", "in_ext", "=", "in_path", ".", "suffix", "[", "1", ":", "]", "out_ext", "=", "out_path", ".", "suffix", "[", "1", ":", "]", "audio", "=", "AudioSegment", ".", "from_file", "(", "str", "(", "in_path", ")", ",", "in_ext", ")", "trimmed", "=", "audio", "[", "start_time", ":", "end_time", "]", "# pydub evidently doesn't actually use the parameters when outputting wavs,", "# since it doesn't use FFMPEG to deal with outputtting WAVs. This is a bit", "# of a leaky abstraction. No warning is given, so normalization to 16Khz", "# mono wavs has to happen later. Leaving the parameters here in case it", "# changes", "trimmed", ".", "export", "(", "str", "(", "out_path", ")", ",", "format", "=", "out_ext", ",", "parameters", "=", "[", "\"-ac\"", ",", "\"1\"", ",", "\"-ar\"", ",", "\"16000\"", "]", ")" ]
42.769231
21.884615
def check_cli_version(): """ Check if the current cli version satisfies the server requirements """ should_exit = False server_version = VersionClient().get_cli_version() current_version = get_cli_version() if LooseVersion(current_version) < LooseVersion(server_version.min_version): print("\nYour version of CLI (%s) is no longer compatible with server." % current_version) should_exit = True elif LooseVersion(current_version) < LooseVersion(server_version.latest_version): print("\nNew version of CLI (%s) is now available." % server_version.latest_version) else: return # new version is ready if should_exit and click.confirm('\nDo you want to upgrade to version %s now?' % server_version.latest_version): auto_upgrade() sys.exit(0) else: msg_parts = [] msg_parts.append("\nTo manually upgrade run:") msg_parts.append(" pip install -U floyd-cli") if is_conda_env(): msg_parts.append("Or if you prefer to use conda:") msg_parts.append(" conda install -y -c conda-forge -c floydhub floyd-cli") print("\n".join(msg_parts)) print("") if should_exit: sys.exit(0)
[ "def", "check_cli_version", "(", ")", ":", "should_exit", "=", "False", "server_version", "=", "VersionClient", "(", ")", ".", "get_cli_version", "(", ")", "current_version", "=", "get_cli_version", "(", ")", "if", "LooseVersion", "(", "current_version", ")", "<", "LooseVersion", "(", "server_version", ".", "min_version", ")", ":", "print", "(", "\"\\nYour version of CLI (%s) is no longer compatible with server.\"", "%", "current_version", ")", "should_exit", "=", "True", "elif", "LooseVersion", "(", "current_version", ")", "<", "LooseVersion", "(", "server_version", ".", "latest_version", ")", ":", "print", "(", "\"\\nNew version of CLI (%s) is now available.\"", "%", "server_version", ".", "latest_version", ")", "else", ":", "return", "# new version is ready", "if", "should_exit", "and", "click", ".", "confirm", "(", "'\\nDo you want to upgrade to version %s now?'", "%", "server_version", ".", "latest_version", ")", ":", "auto_upgrade", "(", ")", "sys", ".", "exit", "(", "0", ")", "else", ":", "msg_parts", "=", "[", "]", "msg_parts", ".", "append", "(", "\"\\nTo manually upgrade run:\"", ")", "msg_parts", ".", "append", "(", "\" pip install -U floyd-cli\"", ")", "if", "is_conda_env", "(", ")", ":", "msg_parts", ".", "append", "(", "\"Or if you prefer to use conda:\"", ")", "msg_parts", ".", "append", "(", "\" conda install -y -c conda-forge -c floydhub floyd-cli\"", ")", "print", "(", "\"\\n\"", ".", "join", "(", "msg_parts", ")", ")", "print", "(", "\"\"", ")", "if", "should_exit", ":", "sys", ".", "exit", "(", "0", ")" ]
38.09375
25.84375
def enum_sigma_ort(cutoff, r_axis, c2_b2_a2_ratio): """ Find all possible sigma values and corresponding rotation angles within a sigma value cutoff with known rotation axis in orthorhombic system. The algorithm for this code is from reference, Scipta Metallurgica 27, 291(1992) Args: cutoff (integer): the cutoff of sigma values. r_axis (list of three integers, e.g. u, v, w): the rotation axis of the grain boundary, with the format of [u,v,w]. c2_b2_a2_ratio (list of three integers, e.g. mu,lamda, mv): mu:lam:mv is the square of the orthorhombic axial ratio with rational numbers. If irrational for one axis, set it to None. e.g. mu:lam:mv = c2,None,a2, means b2 is irrational. Returns: sigmas (dict): dictionary with keys as the possible integer sigma values and values as list of the possible rotation angles to the corresponding sigma values. e.g. the format as {sigma1: [angle11,angle12,...], sigma2: [angle21, angle22,...],...} Note: the angles are the rotation angle of one grain respect to the other grain. When generate the microstructure of the grain boundary using these angles, you need to analyze the symmetry of the structure. Different angles may result in equivalent microstructures. """ sigmas = {} # make sure gcd(r_axis)==1 if reduce(gcd, r_axis) != 1: r_axis = [int(round(x / reduce(gcd, r_axis))) for x in r_axis] u, v, w = r_axis # make sure mu, lambda, mv are coprime integers. if None in c2_b2_a2_ratio: mu, lam, mv = c2_b2_a2_ratio non_none = [i for i in c2_b2_a2_ratio if i is not None] if len(non_none) < 2: raise RuntimeError('No CSL exist for two irrational numbers') non1, non2 = non_none if reduce(gcd, non_none) != 1: temp = reduce(gcd, non_none) non1 = int(round(non1 / temp)) non2 = int(round(non2 / temp)) if mu is None: lam = non1 mv = non2 mu = 1 if w != 0: if u != 0 or (v != 0): raise RuntimeError('For irrational c2, CSL only exist for [0,0,1] ' 'or [u,v,0] and m = 0') elif lam is None: mu = non1 mv = non2 lam = 1 if v != 0: if u != 0 or (w != 0): raise RuntimeError('For irrational b2, CSL only exist for [0,1,0] ' 'or [u,0,w] and m = 0') elif mv is None: mu = non1 lam = non2 mv = 1 if u != 0: if w != 0 or (v != 0): raise RuntimeError('For irrational a2, CSL only exist for [1,0,0] ' 'or [0,v,w] and m = 0') else: mu, lam, mv = c2_b2_a2_ratio if reduce(gcd, c2_b2_a2_ratio) != 1: temp = reduce(gcd, c2_b2_a2_ratio) mu = int(round(mu / temp)) mv = int(round(mv / temp)) lam = int(round(lam / temp)) if u == 0 and v == 0: mu = 1 if u == 0 and w == 0: lam = 1 if v == 0 and w == 0: mv = 1 # refer to the meaning of d in reference d = (mv * u ** 2 + lam * v ** 2) * mv + w ** 2 * mu * mv # Compute the max n we need to enumerate. n_max = int(np.sqrt((cutoff * 4 * mu * mv * mv * lam) / d)) # Enumerate all possible n, m to give possible sigmas within the cutoff. for n in range(1, n_max + 1): mu_temp, lam_temp, mv_temp = c2_b2_a2_ratio if (mu_temp is None and w == 0) or (lam_temp is None and v == 0) \ or (mv_temp is None and u == 0): m_max = 0 else: m_max = int(np.sqrt((cutoff * 4 * mu * mv * lam * mv - n ** 2 * d) / mu / lam)) for m in range(0, m_max + 1): if gcd(m, n) == 1 or m == 0: # construct the rotation matrix, refer to the reference R_list = [(u ** 2 * mv * mv - lam * v ** 2 * mv - w ** 2 * mu * mv) * n ** 2 + lam * mu * m ** 2, 2 * lam * (v * u * mv * n ** 2 - w * mu * m * n), 2 * mu * (u * w * mv * n ** 2 + v * lam * m * n), 2 * mv * (u * v * mv * n ** 2 + w * mu * m * n), (v ** 2 * mv * lam - u ** 2 * mv * mv - w ** 2 * mu * mv) * n ** 2 + lam * mu * m ** 2, 2 * mv * mu * (v * w * n ** 2 - u * m * n), 2 * mv * (u * w * mv * n ** 2 - v * lam * m * n), 2 * lam * mv * (v * w * n ** 2 + u * m * n), (w ** 2 * mu * mv - u ** 2 * mv * mv - v ** 2 * mv * lam) * n ** 2 + lam * mu * m ** 2] m = -1 * m # inverse of rotation matrix R_list_inv = [(u ** 2 * mv * mv - lam * v ** 2 * mv - w ** 2 * mu * mv) * n ** 2 + lam * mu * m ** 2, 2 * lam * (v * u * mv * n ** 2 - w * mu * m * n), 2 * mu * (u * w * mv * n ** 2 + v * lam * m * n), 2 * mv * (u * v * mv * n ** 2 + w * mu * m * n), (v ** 2 * mv * lam - u ** 2 * mv * mv - w ** 2 * mu * mv) * n ** 2 + lam * mu * m ** 2, 2 * mv * mu * (v * w * n ** 2 - u * m * n), 2 * mv * (u * w * mv * n ** 2 - v * lam * m * n), 2 * lam * mv * (v * w * n ** 2 + u * m * n), (w ** 2 * mu * mv - u ** 2 * mv * mv - v ** 2 * mv * lam) * n ** 2 + lam * mu * m ** 2] m = -1 * m F = mu * lam * m ** 2 + d * n ** 2 all_list = R_list + R_list_inv + [F] # Compute the max common factors for the elements of the rotation matrix # and its inverse. com_fac = reduce(gcd, all_list) sigma = int(round((mu * lam * m ** 2 + d * n ** 2) / com_fac)) if (sigma <= cutoff) and (sigma > 1): if sigma not in list(sigmas.keys()): if m == 0: angle = 180.0 else: angle = 2 * np.arctan(n / m * np.sqrt(d / mu / lam)) \ / np.pi * 180 sigmas[sigma] = [angle] else: if m == 0: angle = 180.0 else: angle = 2 * np.arctan(n / m * np.sqrt(d / mu / lam)) \ / np.pi * 180 if angle not in sigmas[sigma]: sigmas[sigma].append(angle) if m_max == 0: break return sigmas
[ "def", "enum_sigma_ort", "(", "cutoff", ",", "r_axis", ",", "c2_b2_a2_ratio", ")", ":", "sigmas", "=", "{", "}", "# make sure gcd(r_axis)==1", "if", "reduce", "(", "gcd", ",", "r_axis", ")", "!=", "1", ":", "r_axis", "=", "[", "int", "(", "round", "(", "x", "/", "reduce", "(", "gcd", ",", "r_axis", ")", ")", ")", "for", "x", "in", "r_axis", "]", "u", ",", "v", ",", "w", "=", "r_axis", "# make sure mu, lambda, mv are coprime integers.", "if", "None", "in", "c2_b2_a2_ratio", ":", "mu", ",", "lam", ",", "mv", "=", "c2_b2_a2_ratio", "non_none", "=", "[", "i", "for", "i", "in", "c2_b2_a2_ratio", "if", "i", "is", "not", "None", "]", "if", "len", "(", "non_none", ")", "<", "2", ":", "raise", "RuntimeError", "(", "'No CSL exist for two irrational numbers'", ")", "non1", ",", "non2", "=", "non_none", "if", "reduce", "(", "gcd", ",", "non_none", ")", "!=", "1", ":", "temp", "=", "reduce", "(", "gcd", ",", "non_none", ")", "non1", "=", "int", "(", "round", "(", "non1", "/", "temp", ")", ")", "non2", "=", "int", "(", "round", "(", "non2", "/", "temp", ")", ")", "if", "mu", "is", "None", ":", "lam", "=", "non1", "mv", "=", "non2", "mu", "=", "1", "if", "w", "!=", "0", ":", "if", "u", "!=", "0", "or", "(", "v", "!=", "0", ")", ":", "raise", "RuntimeError", "(", "'For irrational c2, CSL only exist for [0,0,1] '", "'or [u,v,0] and m = 0'", ")", "elif", "lam", "is", "None", ":", "mu", "=", "non1", "mv", "=", "non2", "lam", "=", "1", "if", "v", "!=", "0", ":", "if", "u", "!=", "0", "or", "(", "w", "!=", "0", ")", ":", "raise", "RuntimeError", "(", "'For irrational b2, CSL only exist for [0,1,0] '", "'or [u,0,w] and m = 0'", ")", "elif", "mv", "is", "None", ":", "mu", "=", "non1", "lam", "=", "non2", "mv", "=", "1", "if", "u", "!=", "0", ":", "if", "w", "!=", "0", "or", "(", "v", "!=", "0", ")", ":", "raise", "RuntimeError", "(", "'For irrational a2, CSL only exist for [1,0,0] '", "'or [0,v,w] and m = 0'", ")", "else", ":", "mu", ",", "lam", ",", "mv", "=", "c2_b2_a2_ratio", "if", "reduce", "(", "gcd", ",", "c2_b2_a2_ratio", ")", "!=", "1", ":", "temp", "=", "reduce", "(", "gcd", ",", "c2_b2_a2_ratio", ")", "mu", "=", "int", "(", "round", "(", "mu", "/", "temp", ")", ")", "mv", "=", "int", "(", "round", "(", "mv", "/", "temp", ")", ")", "lam", "=", "int", "(", "round", "(", "lam", "/", "temp", ")", ")", "if", "u", "==", "0", "and", "v", "==", "0", ":", "mu", "=", "1", "if", "u", "==", "0", "and", "w", "==", "0", ":", "lam", "=", "1", "if", "v", "==", "0", "and", "w", "==", "0", ":", "mv", "=", "1", "# refer to the meaning of d in reference", "d", "=", "(", "mv", "*", "u", "**", "2", "+", "lam", "*", "v", "**", "2", ")", "*", "mv", "+", "w", "**", "2", "*", "mu", "*", "mv", "# Compute the max n we need to enumerate.", "n_max", "=", "int", "(", "np", ".", "sqrt", "(", "(", "cutoff", "*", "4", "*", "mu", "*", "mv", "*", "mv", "*", "lam", ")", "/", "d", ")", ")", "# Enumerate all possible n, m to give possible sigmas within the cutoff.", "for", "n", "in", "range", "(", "1", ",", "n_max", "+", "1", ")", ":", "mu_temp", ",", "lam_temp", ",", "mv_temp", "=", "c2_b2_a2_ratio", "if", "(", "mu_temp", "is", "None", "and", "w", "==", "0", ")", "or", "(", "lam_temp", "is", "None", "and", "v", "==", "0", ")", "or", "(", "mv_temp", "is", "None", "and", "u", "==", "0", ")", ":", "m_max", "=", "0", "else", ":", "m_max", "=", "int", "(", "np", ".", "sqrt", "(", "(", "cutoff", "*", "4", "*", "mu", "*", "mv", "*", "lam", "*", "mv", "-", "n", "**", "2", "*", "d", ")", "/", "mu", "/", "lam", ")", ")", "for", "m", "in", "range", "(", "0", ",", "m_max", "+", "1", ")", ":", "if", "gcd", "(", "m", ",", "n", ")", "==", "1", "or", "m", "==", "0", ":", "# construct the rotation matrix, refer to the reference", "R_list", "=", "[", "(", "u", "**", "2", "*", "mv", "*", "mv", "-", "lam", "*", "v", "**", "2", "*", "mv", "-", "w", "**", "2", "*", "mu", "*", "mv", ")", "*", "n", "**", "2", "+", "lam", "*", "mu", "*", "m", "**", "2", ",", "2", "*", "lam", "*", "(", "v", "*", "u", "*", "mv", "*", "n", "**", "2", "-", "w", "*", "mu", "*", "m", "*", "n", ")", ",", "2", "*", "mu", "*", "(", "u", "*", "w", "*", "mv", "*", "n", "**", "2", "+", "v", "*", "lam", "*", "m", "*", "n", ")", ",", "2", "*", "mv", "*", "(", "u", "*", "v", "*", "mv", "*", "n", "**", "2", "+", "w", "*", "mu", "*", "m", "*", "n", ")", ",", "(", "v", "**", "2", "*", "mv", "*", "lam", "-", "u", "**", "2", "*", "mv", "*", "mv", "-", "w", "**", "2", "*", "mu", "*", "mv", ")", "*", "n", "**", "2", "+", "lam", "*", "mu", "*", "m", "**", "2", ",", "2", "*", "mv", "*", "mu", "*", "(", "v", "*", "w", "*", "n", "**", "2", "-", "u", "*", "m", "*", "n", ")", ",", "2", "*", "mv", "*", "(", "u", "*", "w", "*", "mv", "*", "n", "**", "2", "-", "v", "*", "lam", "*", "m", "*", "n", ")", ",", "2", "*", "lam", "*", "mv", "*", "(", "v", "*", "w", "*", "n", "**", "2", "+", "u", "*", "m", "*", "n", ")", ",", "(", "w", "**", "2", "*", "mu", "*", "mv", "-", "u", "**", "2", "*", "mv", "*", "mv", "-", "v", "**", "2", "*", "mv", "*", "lam", ")", "*", "n", "**", "2", "+", "lam", "*", "mu", "*", "m", "**", "2", "]", "m", "=", "-", "1", "*", "m", "# inverse of rotation matrix", "R_list_inv", "=", "[", "(", "u", "**", "2", "*", "mv", "*", "mv", "-", "lam", "*", "v", "**", "2", "*", "mv", "-", "w", "**", "2", "*", "mu", "*", "mv", ")", "*", "n", "**", "2", "+", "lam", "*", "mu", "*", "m", "**", "2", ",", "2", "*", "lam", "*", "(", "v", "*", "u", "*", "mv", "*", "n", "**", "2", "-", "w", "*", "mu", "*", "m", "*", "n", ")", ",", "2", "*", "mu", "*", "(", "u", "*", "w", "*", "mv", "*", "n", "**", "2", "+", "v", "*", "lam", "*", "m", "*", "n", ")", ",", "2", "*", "mv", "*", "(", "u", "*", "v", "*", "mv", "*", "n", "**", "2", "+", "w", "*", "mu", "*", "m", "*", "n", ")", ",", "(", "v", "**", "2", "*", "mv", "*", "lam", "-", "u", "**", "2", "*", "mv", "*", "mv", "-", "w", "**", "2", "*", "mu", "*", "mv", ")", "*", "n", "**", "2", "+", "lam", "*", "mu", "*", "m", "**", "2", ",", "2", "*", "mv", "*", "mu", "*", "(", "v", "*", "w", "*", "n", "**", "2", "-", "u", "*", "m", "*", "n", ")", ",", "2", "*", "mv", "*", "(", "u", "*", "w", "*", "mv", "*", "n", "**", "2", "-", "v", "*", "lam", "*", "m", "*", "n", ")", ",", "2", "*", "lam", "*", "mv", "*", "(", "v", "*", "w", "*", "n", "**", "2", "+", "u", "*", "m", "*", "n", ")", ",", "(", "w", "**", "2", "*", "mu", "*", "mv", "-", "u", "**", "2", "*", "mv", "*", "mv", "-", "v", "**", "2", "*", "mv", "*", "lam", ")", "*", "n", "**", "2", "+", "lam", "*", "mu", "*", "m", "**", "2", "]", "m", "=", "-", "1", "*", "m", "F", "=", "mu", "*", "lam", "*", "m", "**", "2", "+", "d", "*", "n", "**", "2", "all_list", "=", "R_list", "+", "R_list_inv", "+", "[", "F", "]", "# Compute the max common factors for the elements of the rotation matrix", "# and its inverse.", "com_fac", "=", "reduce", "(", "gcd", ",", "all_list", ")", "sigma", "=", "int", "(", "round", "(", "(", "mu", "*", "lam", "*", "m", "**", "2", "+", "d", "*", "n", "**", "2", ")", "/", "com_fac", ")", ")", "if", "(", "sigma", "<=", "cutoff", ")", "and", "(", "sigma", ">", "1", ")", ":", "if", "sigma", "not", "in", "list", "(", "sigmas", ".", "keys", "(", ")", ")", ":", "if", "m", "==", "0", ":", "angle", "=", "180.0", "else", ":", "angle", "=", "2", "*", "np", ".", "arctan", "(", "n", "/", "m", "*", "np", ".", "sqrt", "(", "d", "/", "mu", "/", "lam", ")", ")", "/", "np", ".", "pi", "*", "180", "sigmas", "[", "sigma", "]", "=", "[", "angle", "]", "else", ":", "if", "m", "==", "0", ":", "angle", "=", "180.0", "else", ":", "angle", "=", "2", "*", "np", ".", "arctan", "(", "n", "/", "m", "*", "np", ".", "sqrt", "(", "d", "/", "mu", "/", "lam", ")", ")", "/", "np", ".", "pi", "*", "180", "if", "angle", "not", "in", "sigmas", "[", "sigma", "]", ":", "sigmas", "[", "sigma", "]", ".", "append", "(", "angle", ")", "if", "m_max", "==", "0", ":", "break", "return", "sigmas" ]
50.875817
22.313725
def base (self): """ Returns properties that are neither incidental nor free. """ result = [p for p in self.lazy_properties if not(p.feature.incidental or p.feature.free)] result.extend(self.base_) return result
[ "def", "base", "(", "self", ")", ":", "result", "=", "[", "p", "for", "p", "in", "self", ".", "lazy_properties", "if", "not", "(", "p", ".", "feature", ".", "incidental", "or", "p", ".", "feature", ".", "free", ")", "]", "result", ".", "extend", "(", "self", ".", "base_", ")", "return", "result" ]
37.571429
12
def set_one_var_from_string(name, param_type, checks): """Construct code for auto config file for one param value. Parameters ---------- name : string Name of the parameter. param_type : string Type of the parameter. checks : list Constraints of the parameter. Returns ------- ret : string Lines of auto config file with getting and checks of one parameter value. """ ret = "" univar_mapper = {"int": "GetInt", "double": "GetDouble", "bool": "GetBool", "std::string": "GetString"} if "vector" not in param_type: ret += " %s(params, \"%s\", &%s);\n" % (univar_mapper[param_type], name, name) if len(checks) > 0: for check in checks: ret += " CHECK(%s %s);\n" % (name, check) ret += "\n" else: ret += " if (GetString(params, \"%s\", &tmp_str)) {\n" % (name) type2 = param_type.split("<")[1][:-1] if type2 == "std::string": ret += " %s = Common::Split(tmp_str.c_str(), ',');\n" % (name) else: ret += " %s = Common::StringToArray<%s>(tmp_str, ',');\n" % (name, type2) ret += " }\n\n" return ret
[ "def", "set_one_var_from_string", "(", "name", ",", "param_type", ",", "checks", ")", ":", "ret", "=", "\"\"", "univar_mapper", "=", "{", "\"int\"", ":", "\"GetInt\"", ",", "\"double\"", ":", "\"GetDouble\"", ",", "\"bool\"", ":", "\"GetBool\"", ",", "\"std::string\"", ":", "\"GetString\"", "}", "if", "\"vector\"", "not", "in", "param_type", ":", "ret", "+=", "\" %s(params, \\\"%s\\\", &%s);\\n\"", "%", "(", "univar_mapper", "[", "param_type", "]", ",", "name", ",", "name", ")", "if", "len", "(", "checks", ")", ">", "0", ":", "for", "check", "in", "checks", ":", "ret", "+=", "\" CHECK(%s %s);\\n\"", "%", "(", "name", ",", "check", ")", "ret", "+=", "\"\\n\"", "else", ":", "ret", "+=", "\" if (GetString(params, \\\"%s\\\", &tmp_str)) {\\n\"", "%", "(", "name", ")", "type2", "=", "param_type", ".", "split", "(", "\"<\"", ")", "[", "1", "]", "[", ":", "-", "1", "]", "if", "type2", "==", "\"std::string\"", ":", "ret", "+=", "\" %s = Common::Split(tmp_str.c_str(), ',');\\n\"", "%", "(", "name", ")", "else", ":", "ret", "+=", "\" %s = Common::StringToArray<%s>(tmp_str, ',');\\n\"", "%", "(", "name", ",", "type2", ")", "ret", "+=", "\" }\\n\\n\"", "return", "ret" ]
34.617647
23.264706
def Q_weir_rectangular_SIA(h1, h2, b, b1): r'''Calculates the flow rate across rectangular weir from the height of the liquid above the crest of the notch, the liquid depth beneath it, and the width of the notch. Model from [1]_ as reproduced in [2]_. Flow rate is given by: .. math:: Q = 0.544\left[1 + 0.064\left(\frac{b}{b_1}\right)^2 + \frac{0.00626 - 0.00519(b/b_1)^2}{h_1 + 0.0016}\right] \left[1 + 0.5\left(\frac{b}{b_1}\right)^4\left(\frac{h_1}{h_1+h_2} \right)^2\right]b\sqrt{g}h^{1.5} Parameters ---------- h1 : float Height of the fluid above the crest of the weir [m] h2 : float Height of the fluid below the crest of the weir [m] b : float Width of the rectangular flow section of the weir [m] b1 : float Width of the full section of the channel [m] Returns ------- Q : float Volumetric flow rate across the weir [m^3/s] Notes ----- The following limits apply to the use of this equation: b/b1 ≤ 0.2 h1/h2 < 2 b > 0.15 m h1 > 0.03 m h2 > 0.1 m Examples -------- >>> Q_weir_rectangular_SIA(0.2, 0.5, 1, 2) 1.0408858453811165 References ---------- .. [1] Normen für Wassermessungen: bei Durchführung von Abnahmeversuchen an Wasserkraftmaschinen. SIA, 1924. .. [2] Blevins, Robert D. Applied Fluid Dynamics Handbook. New York, N.Y.: Van Nostrand Reinhold Co., 1984. ''' h = h1 + h2 Q = 0.544*(1 + 0.064*(b/b1)**2 + (0.00626 - 0.00519*(b/b1)**2)/(h1 + 0.0016))\ *(1 + 0.5*(b/b1)**4*(h1/(h1 + h2))**2)*b*g**0.5*h**1.5 return Q
[ "def", "Q_weir_rectangular_SIA", "(", "h1", ",", "h2", ",", "b", ",", "b1", ")", ":", "h", "=", "h1", "+", "h2", "Q", "=", "0.544", "*", "(", "1", "+", "0.064", "*", "(", "b", "/", "b1", ")", "**", "2", "+", "(", "0.00626", "-", "0.00519", "*", "(", "b", "/", "b1", ")", "**", "2", ")", "/", "(", "h1", "+", "0.0016", ")", ")", "*", "(", "1", "+", "0.5", "*", "(", "b", "/", "b1", ")", "**", "4", "*", "(", "h1", "/", "(", "h1", "+", "h2", ")", ")", "**", "2", ")", "*", "b", "*", "g", "**", "0.5", "*", "h", "**", "1.5", "return", "Q" ]
28.875
25.982143
def get_extension_by_id(self, extension_id, version=None, flags=None): """GetExtensionById. [Preview API] :param str extension_id: :param str version: :param str flags: :rtype: :class:`<PublishedExtension> <azure.devops.v5_0.gallery.models.PublishedExtension>` """ route_values = {} if extension_id is not None: route_values['extensionId'] = self._serialize.url('extension_id', extension_id, 'str') query_parameters = {} if version is not None: query_parameters['version'] = self._serialize.query('version', version, 'str') if flags is not None: query_parameters['flags'] = self._serialize.query('flags', flags, 'str') response = self._send(http_method='GET', location_id='a41192c8-9525-4b58-bc86-179fa549d80d', version='5.0-preview.2', route_values=route_values, query_parameters=query_parameters) return self._deserialize('PublishedExtension', response)
[ "def", "get_extension_by_id", "(", "self", ",", "extension_id", ",", "version", "=", "None", ",", "flags", "=", "None", ")", ":", "route_values", "=", "{", "}", "if", "extension_id", "is", "not", "None", ":", "route_values", "[", "'extensionId'", "]", "=", "self", ".", "_serialize", ".", "url", "(", "'extension_id'", ",", "extension_id", ",", "'str'", ")", "query_parameters", "=", "{", "}", "if", "version", "is", "not", "None", ":", "query_parameters", "[", "'version'", "]", "=", "self", ".", "_serialize", ".", "query", "(", "'version'", ",", "version", ",", "'str'", ")", "if", "flags", "is", "not", "None", ":", "query_parameters", "[", "'flags'", "]", "=", "self", ".", "_serialize", ".", "query", "(", "'flags'", ",", "flags", ",", "'str'", ")", "response", "=", "self", ".", "_send", "(", "http_method", "=", "'GET'", ",", "location_id", "=", "'a41192c8-9525-4b58-bc86-179fa549d80d'", ",", "version", "=", "'5.0-preview.2'", ",", "route_values", "=", "route_values", ",", "query_parameters", "=", "query_parameters", ")", "return", "self", ".", "_deserialize", "(", "'PublishedExtension'", ",", "response", ")" ]
50.090909
21.5
def _check_exception_inherit_from_stopiteration(exc): """Return True if the exception node in argument inherit from StopIteration""" stopiteration_qname = "{}.StopIteration".format(utils.EXCEPTIONS_MODULE) return any(_class.qname() == stopiteration_qname for _class in exc.mro())
[ "def", "_check_exception_inherit_from_stopiteration", "(", "exc", ")", ":", "stopiteration_qname", "=", "\"{}.StopIteration\"", ".", "format", "(", "utils", ".", "EXCEPTIONS_MODULE", ")", "return", "any", "(", "_class", ".", "qname", "(", ")", "==", "stopiteration_qname", "for", "_class", "in", "exc", ".", "mro", "(", ")", ")" ]
75
23.5
def srem(self, name, *values): """ send raw (source) values here. Right functioning with other values not guaranteed (and even worse). """ return self.storage.srem(name, *self.dump(values, False))
[ "def", "srem", "(", "self", ",", "name", ",", "*", "values", ")", ":", "return", "self", ".", "storage", ".", "srem", "(", "name", ",", "*", "self", ".", "dump", "(", "values", ",", "False", ")", ")" ]
38.5
11.5
def from_json(cls, json_str): """Deserialize BERTVocab object from json string. Parameters ---------- json_str : str Serialized json string of a BERTVocab object. Returns ------- BERTVocab """ vocab_dict = json.loads(json_str) unknown_token = vocab_dict.get('unknown_token') bert_vocab = cls(unknown_token=unknown_token) bert_vocab._idx_to_token = vocab_dict.get('idx_to_token') bert_vocab._token_to_idx = vocab_dict.get('token_to_idx') if unknown_token: bert_vocab._token_to_idx = DefaultLookupDict(bert_vocab._token_to_idx[unknown_token], bert_vocab._token_to_idx) bert_vocab._reserved_tokens = vocab_dict.get('reserved_tokens') bert_vocab._padding_token = vocab_dict.get('padding_token') bert_vocab._bos_token = vocab_dict.get('bos_token') bert_vocab._eos_token = vocab_dict.get('eos_token') bert_vocab._mask_token = vocab_dict.get('mask_token') bert_vocab._sep_token = vocab_dict.get('sep_token') bert_vocab._cls_token = vocab_dict.get('cls_token') return bert_vocab
[ "def", "from_json", "(", "cls", ",", "json_str", ")", ":", "vocab_dict", "=", "json", ".", "loads", "(", "json_str", ")", "unknown_token", "=", "vocab_dict", ".", "get", "(", "'unknown_token'", ")", "bert_vocab", "=", "cls", "(", "unknown_token", "=", "unknown_token", ")", "bert_vocab", ".", "_idx_to_token", "=", "vocab_dict", ".", "get", "(", "'idx_to_token'", ")", "bert_vocab", ".", "_token_to_idx", "=", "vocab_dict", ".", "get", "(", "'token_to_idx'", ")", "if", "unknown_token", ":", "bert_vocab", ".", "_token_to_idx", "=", "DefaultLookupDict", "(", "bert_vocab", ".", "_token_to_idx", "[", "unknown_token", "]", ",", "bert_vocab", ".", "_token_to_idx", ")", "bert_vocab", ".", "_reserved_tokens", "=", "vocab_dict", ".", "get", "(", "'reserved_tokens'", ")", "bert_vocab", ".", "_padding_token", "=", "vocab_dict", ".", "get", "(", "'padding_token'", ")", "bert_vocab", ".", "_bos_token", "=", "vocab_dict", ".", "get", "(", "'bos_token'", ")", "bert_vocab", ".", "_eos_token", "=", "vocab_dict", ".", "get", "(", "'eos_token'", ")", "bert_vocab", ".", "_mask_token", "=", "vocab_dict", ".", "get", "(", "'mask_token'", ")", "bert_vocab", ".", "_sep_token", "=", "vocab_dict", ".", "get", "(", "'sep_token'", ")", "bert_vocab", ".", "_cls_token", "=", "vocab_dict", ".", "get", "(", "'cls_token'", ")", "return", "bert_vocab" ]
40.066667
22.866667
def _resample_data(self, gssha_var): """ This function resamples the data to match the GSSHA grid IN TESTING MODE """ self.data = self.data.lsm.resample(gssha_var, self.gssha_grid)
[ "def", "_resample_data", "(", "self", ",", "gssha_var", ")", ":", "self", ".", "data", "=", "self", ".", "data", ".", "lsm", ".", "resample", "(", "gssha_var", ",", "self", ".", "gssha_grid", ")" ]
35.833333
12.5
def set_background(self, fname=None, genome=None, length=200, nseq=10000): """Set the background to use for FPR and z-score calculations. Background can be specified either as a genome name or as the name of a FASTA file. Parameters ---------- fname : str, optional Name of FASTA file to use as background. genome : str, optional Name of genome to use to retrieve random sequences. length : int, optional Length of genomic sequences to retrieve. The default is 200. nseq : int, optional Number of genomic sequences to retrieve. """ length = int(length) if genome and fname: raise ValueError("Need either genome or filename for background.") if fname: if not os.path.exists(fname): raise IOError("Background file {} does not exist!".format(fname)) self.background = Fasta(fname) self.background_hash = file_checksum(fname) return if not genome: if self.genome: genome = self.genome logger.info("Using default background: genome {} with length {}".format( genome, length)) else: raise ValueError("Need either genome or filename for background.") logger.info("Using background: genome {} with length {}".format(genome, length)) with Cache(CACHE_DIR) as cache: self.background_hash = "{}\{}".format(genome, int(length)) fa = cache.get(self.background_hash) if not fa: fa = RandomGenomicFasta(genome, length, nseq) cache.set(self.background_hash, fa) self.background = fa
[ "def", "set_background", "(", "self", ",", "fname", "=", "None", ",", "genome", "=", "None", ",", "length", "=", "200", ",", "nseq", "=", "10000", ")", ":", "length", "=", "int", "(", "length", ")", "if", "genome", "and", "fname", ":", "raise", "ValueError", "(", "\"Need either genome or filename for background.\"", ")", "if", "fname", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "fname", ")", ":", "raise", "IOError", "(", "\"Background file {} does not exist!\"", ".", "format", "(", "fname", ")", ")", "self", ".", "background", "=", "Fasta", "(", "fname", ")", "self", ".", "background_hash", "=", "file_checksum", "(", "fname", ")", "return", "if", "not", "genome", ":", "if", "self", ".", "genome", ":", "genome", "=", "self", ".", "genome", "logger", ".", "info", "(", "\"Using default background: genome {} with length {}\"", ".", "format", "(", "genome", ",", "length", ")", ")", "else", ":", "raise", "ValueError", "(", "\"Need either genome or filename for background.\"", ")", "logger", ".", "info", "(", "\"Using background: genome {} with length {}\"", ".", "format", "(", "genome", ",", "length", ")", ")", "with", "Cache", "(", "CACHE_DIR", ")", "as", "cache", ":", "self", ".", "background_hash", "=", "\"{}\\{}\"", ".", "format", "(", "genome", ",", "int", "(", "length", ")", ")", "fa", "=", "cache", ".", "get", "(", "self", ".", "background_hash", ")", "if", "not", "fa", ":", "fa", "=", "RandomGenomicFasta", "(", "genome", ",", "length", ",", "nseq", ")", "cache", ".", "set", "(", "self", ".", "background_hash", ",", "fa", ")", "self", ".", "background", "=", "fa" ]
35.137255
22.137255
def draw_char_screen(self): """ Draws the output buffered in the char_buffer. """ self.screen = Image.new("RGB", (self.height, self.width)) self.drawer = ImageDraw.Draw(self.screen) for sy, line in enumerate(self.char_buffer): for sx, tinfo in enumerate(line): self.drawer.text((sx * 6, sy * 9), tinfo[0], fill=tinfo[1:]) self.output_device.interrupt()
[ "def", "draw_char_screen", "(", "self", ")", ":", "self", ".", "screen", "=", "Image", ".", "new", "(", "\"RGB\"", ",", "(", "self", ".", "height", ",", "self", ".", "width", ")", ")", "self", ".", "drawer", "=", "ImageDraw", ".", "Draw", "(", "self", ".", "screen", ")", "for", "sy", ",", "line", "in", "enumerate", "(", "self", ".", "char_buffer", ")", ":", "for", "sx", ",", "tinfo", "in", "enumerate", "(", "line", ")", ":", "self", ".", "drawer", ".", "text", "(", "(", "sx", "*", "6", ",", "sy", "*", "9", ")", ",", "tinfo", "[", "0", "]", ",", "fill", "=", "tinfo", "[", "1", ":", "]", ")", "self", ".", "output_device", ".", "interrupt", "(", ")" ]
33.090909
11.272727
def _refresh_state(self): """ Refresh the job info. """ # DataFlow's DataflowPipelineResult does not refresh state, so we have to do it ourselves # as a workaround. self._runner_results._job = ( self._runner_results._runner.dataflow_client.get_job(self._runner_results.job_id())) self._is_complete = self._runner_results.state in ['STOPPED', 'DONE', 'FAILED', 'CANCELLED'] self._fator_error = getattr(self._runner_results._runner, 'last_error_msg', None)
[ "def", "_refresh_state", "(", "self", ")", ":", "# DataFlow's DataflowPipelineResult does not refresh state, so we have to do it ourselves", "# as a workaround.", "self", ".", "_runner_results", ".", "_job", "=", "(", "self", ".", "_runner_results", ".", "_runner", ".", "dataflow_client", ".", "get_job", "(", "self", ".", "_runner_results", ".", "job_id", "(", ")", ")", ")", "self", ".", "_is_complete", "=", "self", ".", "_runner_results", ".", "state", "in", "[", "'STOPPED'", ",", "'DONE'", ",", "'FAILED'", ",", "'CANCELLED'", "]", "self", ".", "_fator_error", "=", "getattr", "(", "self", ".", "_runner_results", ".", "_runner", ",", "'last_error_msg'", ",", "None", ")" ]
53.222222
31.777778
def collapse_addresses(addresses): """Collapse a list of IP objects. Example: collapse_addresses([IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/25')]) -> [IPv4Network('192.0.2.0/24')] Args: addresses: An iterator of IPv4Network or IPv6Network objects. Returns: An iterator of the collapsed IPv(4|6)Network objects. Raises: TypeError: If passed a list of mixed version objects. """ addrs = [] ips = [] nets = [] # split IP addresses and networks for ip in addresses: if isinstance(ip, _BaseAddress): if ips and ips[-1]._version != ip._version: raise TypeError("%s and %s are not of the same version" % ( ip, ips[-1])) ips.append(ip) elif ip._prefixlen == ip._max_prefixlen: if ips and ips[-1]._version != ip._version: raise TypeError("%s and %s are not of the same version" % ( ip, ips[-1])) try: ips.append(ip.ip) except AttributeError: ips.append(ip.network_address) else: if nets and nets[-1]._version != ip._version: raise TypeError("%s and %s are not of the same version" % ( ip, nets[-1])) nets.append(ip) # sort and dedup ips = sorted(set(ips)) # find consecutive address ranges in the sorted sequence and summarize them if ips: for first, last in _find_address_range(ips): addrs.extend(summarize_address_range(first, last)) return _collapse_addresses_internal(addrs + nets)
[ "def", "collapse_addresses", "(", "addresses", ")", ":", "addrs", "=", "[", "]", "ips", "=", "[", "]", "nets", "=", "[", "]", "# split IP addresses and networks", "for", "ip", "in", "addresses", ":", "if", "isinstance", "(", "ip", ",", "_BaseAddress", ")", ":", "if", "ips", "and", "ips", "[", "-", "1", "]", ".", "_version", "!=", "ip", ".", "_version", ":", "raise", "TypeError", "(", "\"%s and %s are not of the same version\"", "%", "(", "ip", ",", "ips", "[", "-", "1", "]", ")", ")", "ips", ".", "append", "(", "ip", ")", "elif", "ip", ".", "_prefixlen", "==", "ip", ".", "_max_prefixlen", ":", "if", "ips", "and", "ips", "[", "-", "1", "]", ".", "_version", "!=", "ip", ".", "_version", ":", "raise", "TypeError", "(", "\"%s and %s are not of the same version\"", "%", "(", "ip", ",", "ips", "[", "-", "1", "]", ")", ")", "try", ":", "ips", ".", "append", "(", "ip", ".", "ip", ")", "except", "AttributeError", ":", "ips", ".", "append", "(", "ip", ".", "network_address", ")", "else", ":", "if", "nets", "and", "nets", "[", "-", "1", "]", ".", "_version", "!=", "ip", ".", "_version", ":", "raise", "TypeError", "(", "\"%s and %s are not of the same version\"", "%", "(", "ip", ",", "nets", "[", "-", "1", "]", ")", ")", "nets", ".", "append", "(", "ip", ")", "# sort and dedup", "ips", "=", "sorted", "(", "set", "(", "ips", ")", ")", "# find consecutive address ranges in the sorted sequence and summarize them", "if", "ips", ":", "for", "first", ",", "last", "in", "_find_address_range", "(", "ips", ")", ":", "addrs", ".", "extend", "(", "summarize_address_range", "(", "first", ",", "last", ")", ")", "return", "_collapse_addresses_internal", "(", "addrs", "+", "nets", ")" ]
32.711538
21.711538
def kms_encrypt(kms_client, service, env, secret): """ Encrypt string for use by a given service/environment Args: kms_client (boto3 kms client object): Instantiated kms client object. Usually created through create_aws_clients. service (string): name of the service that the secret is being encrypted for. env (string): environment that the secret is being encrypted for. secret (string): value to be encrypted Returns: a populated EFPWContext object Raises: SystemExit(1): If there is an error with the boto3 encryption call (ex. missing kms key) """ # Converting all periods to underscores because they are invalid in KMS alias names key_alias = '{}-{}'.format(env, service.replace('.', '_')) try: response = kms_client.encrypt( KeyId='alias/{}'.format(key_alias), Plaintext=secret.encode() ) except ClientError as error: if error.response['Error']['Code'] == "NotFoundException": fail("Key '{}' not found. You may need to run ef-generate for this environment.".format(key_alias), error) else: fail("boto3 exception occurred while performing kms encrypt operation.", error) encrypted_secret = base64.b64encode(response['CiphertextBlob']) return encrypted_secret
[ "def", "kms_encrypt", "(", "kms_client", ",", "service", ",", "env", ",", "secret", ")", ":", "# Converting all periods to underscores because they are invalid in KMS alias names", "key_alias", "=", "'{}-{}'", ".", "format", "(", "env", ",", "service", ".", "replace", "(", "'.'", ",", "'_'", ")", ")", "try", ":", "response", "=", "kms_client", ".", "encrypt", "(", "KeyId", "=", "'alias/{}'", ".", "format", "(", "key_alias", ")", ",", "Plaintext", "=", "secret", ".", "encode", "(", ")", ")", "except", "ClientError", "as", "error", ":", "if", "error", ".", "response", "[", "'Error'", "]", "[", "'Code'", "]", "==", "\"NotFoundException\"", ":", "fail", "(", "\"Key '{}' not found. You may need to run ef-generate for this environment.\"", ".", "format", "(", "key_alias", ")", ",", "error", ")", "else", ":", "fail", "(", "\"boto3 exception occurred while performing kms encrypt operation.\"", ",", "error", ")", "encrypted_secret", "=", "base64", ".", "b64encode", "(", "response", "[", "'CiphertextBlob'", "]", ")", "return", "encrypted_secret" ]
43.785714
26.285714
def _set_keepalive(self, v, load=False): """ Setter method for keepalive, mapped from YANG variable /interface/tunnel/keepalive (container) If this variable is read-only (config: false) in the source YANG file, then _set_keepalive is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_keepalive() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=keepalive.keepalive, is_container='container', presence=False, yang_name="keepalive", rest_name="keepalive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Tunnel keepalive', u'cli-sequence-commands': None, u'cli-full-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """keepalive must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=keepalive.keepalive, is_container='container', presence=False, yang_name="keepalive", rest_name="keepalive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Tunnel keepalive', u'cli-sequence-commands': None, u'cli-full-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='container', is_config=True)""", }) self.__keepalive = t if hasattr(self, '_set'): self._set()
[ "def", "_set_keepalive", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "keepalive", ".", "keepalive", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"keepalive\"", ",", "rest_name", "=", "\"keepalive\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Tunnel keepalive'", ",", "u'cli-sequence-commands'", ":", "None", ",", "u'cli-full-no'", ":", "None", ",", "u'cli-incomplete-command'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-gre-vxlan'", ",", "defining_module", "=", "'brocade-gre-vxlan'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"keepalive must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=keepalive.keepalive, is_container='container', presence=False, yang_name=\"keepalive\", rest_name=\"keepalive\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Tunnel keepalive', u'cli-sequence-commands': None, u'cli-full-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='container', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__keepalive", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
79.727273
37
def plugin_info(self): """ Property for accessing :class:`PluginInfoManager` instance, which is used to manage pipeline configurations. :rtype: yagocd.resources.plugin_info.PluginInfoManager """ if self._plugin_info_manager is None: self._plugin_info_manager = PluginInfoManager(session=self._session) return self._plugin_info_manager
[ "def", "plugin_info", "(", "self", ")", ":", "if", "self", ".", "_plugin_info_manager", "is", "None", ":", "self", ".", "_plugin_info_manager", "=", "PluginInfoManager", "(", "session", "=", "self", ".", "_session", ")", "return", "self", ".", "_plugin_info_manager" ]
43
22.333333
def highlight_text(needles, haystack, cls_name='highlighted', words=False, case=False): """ Applies cls_name to all needles found in haystack. """ if not needles: return haystack if not haystack: return '' if words: pattern = r"(%s)" % "|".join(['\\b{}\\b'.format(re.escape(n)) for n in needles]) else: pattern = r"(%s)" % "|".join([re.escape(n) for n in needles]) if case: regex = re.compile(pattern) else: regex = re.compile(pattern, re.I) i, out = 0, "" for m in regex.finditer(haystack): out += "".join([haystack[i:m.start()], '<span class="%s">' % cls_name, haystack[m.start():m.end()], "</span>"]) i = m.end() return mark_safe(out + haystack[i:])
[ "def", "highlight_text", "(", "needles", ",", "haystack", ",", "cls_name", "=", "'highlighted'", ",", "words", "=", "False", ",", "case", "=", "False", ")", ":", "if", "not", "needles", ":", "return", "haystack", "if", "not", "haystack", ":", "return", "''", "if", "words", ":", "pattern", "=", "r\"(%s)\"", "%", "\"|\"", ".", "join", "(", "[", "'\\\\b{}\\\\b'", ".", "format", "(", "re", ".", "escape", "(", "n", ")", ")", "for", "n", "in", "needles", "]", ")", "else", ":", "pattern", "=", "r\"(%s)\"", "%", "\"|\"", ".", "join", "(", "[", "re", ".", "escape", "(", "n", ")", "for", "n", "in", "needles", "]", ")", "if", "case", ":", "regex", "=", "re", ".", "compile", "(", "pattern", ")", "else", ":", "regex", "=", "re", ".", "compile", "(", "pattern", ",", "re", ".", "I", ")", "i", ",", "out", "=", "0", ",", "\"\"", "for", "m", "in", "regex", ".", "finditer", "(", "haystack", ")", ":", "out", "+=", "\"\"", ".", "join", "(", "[", "haystack", "[", "i", ":", "m", ".", "start", "(", ")", "]", ",", "'<span class=\"%s\">'", "%", "cls_name", ",", "haystack", "[", "m", ".", "start", "(", ")", ":", "m", ".", "end", "(", ")", "]", ",", "\"</span>\"", "]", ")", "i", "=", "m", ".", "end", "(", ")", "return", "mark_safe", "(", "out", "+", "haystack", "[", "i", ":", "]", ")" ]
31.208333
24.291667
def split_leading_dim(tensor, inputs, n_dims=2): """Split the first dimension of a tensor. Args: tensor: Tensor to have its first dimension split. inputs: Original reference input to look the dimensions of. n_dims: Number of dimensions to split. Returns: The input tensor, with its first dimension split. """ input_shape_static = inputs.get_shape() input_shape_list = input_shape_static.as_list() tensor_shape_static = tensor.get_shape() tensor_shape_list = tensor_shape_static.as_list() if (input_shape_static.is_fully_defined() and tensor_shape_static.is_fully_defined()): new_shape = input_shape_list[:n_dims] + tensor_shape_list[1:] return tf.reshape(tensor, new_shape) # Shape can't be inferred statically. dims_after_first = tf.shape(tensor)[1:] split_sizes = tf.shape(inputs)[:n_dims] known_split_sizes = input_shape_list[:n_dims] known_dims_after_first = tensor_shape_list[1:] output_size = tf.concat([split_sizes, dims_after_first], 0) result = tf.reshape(tensor, output_size) result.set_shape(known_split_sizes + known_dims_after_first) return result
[ "def", "split_leading_dim", "(", "tensor", ",", "inputs", ",", "n_dims", "=", "2", ")", ":", "input_shape_static", "=", "inputs", ".", "get_shape", "(", ")", "input_shape_list", "=", "input_shape_static", ".", "as_list", "(", ")", "tensor_shape_static", "=", "tensor", ".", "get_shape", "(", ")", "tensor_shape_list", "=", "tensor_shape_static", ".", "as_list", "(", ")", "if", "(", "input_shape_static", ".", "is_fully_defined", "(", ")", "and", "tensor_shape_static", ".", "is_fully_defined", "(", ")", ")", ":", "new_shape", "=", "input_shape_list", "[", ":", "n_dims", "]", "+", "tensor_shape_list", "[", "1", ":", "]", "return", "tf", ".", "reshape", "(", "tensor", ",", "new_shape", ")", "# Shape can't be inferred statically.", "dims_after_first", "=", "tf", ".", "shape", "(", "tensor", ")", "[", "1", ":", "]", "split_sizes", "=", "tf", ".", "shape", "(", "inputs", ")", "[", ":", "n_dims", "]", "known_split_sizes", "=", "input_shape_list", "[", ":", "n_dims", "]", "known_dims_after_first", "=", "tensor_shape_list", "[", "1", ":", "]", "output_size", "=", "tf", ".", "concat", "(", "[", "split_sizes", ",", "dims_after_first", "]", ",", "0", ")", "result", "=", "tf", ".", "reshape", "(", "tensor", ",", "output_size", ")", "result", ".", "set_shape", "(", "known_split_sizes", "+", "known_dims_after_first", ")", "return", "result" ]
37.965517
13.482759
def get_surface_boundaries(self): """ :returns: (min_max lons, min_max lats) """ min_lon, min_lat, max_lon, max_lat = self.get_bounding_box() return [[min_lon, max_lon]], [[min_lat, max_lat]]
[ "def", "get_surface_boundaries", "(", "self", ")", ":", "min_lon", ",", "min_lat", ",", "max_lon", ",", "max_lat", "=", "self", ".", "get_bounding_box", "(", ")", "return", "[", "[", "min_lon", ",", "max_lon", "]", "]", ",", "[", "[", "min_lat", ",", "max_lat", "]", "]" ]
37.666667
9.666667
def _assert_safe_casting(cls, data, subarr): """ Ensure incoming data can be represented as ints. """ if not issubclass(data.dtype.type, np.signedinteger): if not np.array_equal(data, subarr): raise TypeError('Unsafe NumPy casting, you must ' 'explicitly cast')
[ "def", "_assert_safe_casting", "(", "cls", ",", "data", ",", "subarr", ")", ":", "if", "not", "issubclass", "(", "data", ".", "dtype", ".", "type", ",", "np", ".", "signedinteger", ")", ":", "if", "not", "np", ".", "array_equal", "(", "data", ",", "subarr", ")", ":", "raise", "TypeError", "(", "'Unsafe NumPy casting, you must '", "'explicitly cast'", ")" ]
43.25
10.5
def get_future_days(self): """Return only future Day objects.""" today = timezone.now().date() return Day.objects.filter(date__gte=today)
[ "def", "get_future_days", "(", "self", ")", ":", "today", "=", "timezone", ".", "now", "(", ")", ".", "date", "(", ")", "return", "Day", ".", "objects", ".", "filter", "(", "date__gte", "=", "today", ")" ]
31.6
13.4
def __truncate(self, line_arr, max_width): ''' Cut tuple of line chunks according to it's wisible lenght ''' def is_space(chunk): return all([True if i == ' ' else False for i in chunk]) def is_empty(chunks, markups): result = [] for chunk in chunks: if chunk in markups: result.append(True) elif is_space(chunk): result.append(True) else: result.append(False) return all(result) left = max_width result = '' markups = self.markup.get_markup_vars() for num, chunk in enumerate(line_arr): if chunk in markups: result += chunk else: if left > 0: if len(chunk) <= left: result += chunk left -= len(chunk) else: leftover = (chunk[left:],) + line_arr[num + 1:] was_cut = not is_empty(leftover, markups) if was_cut: result += chunk[:left - 1] + self.markup.RESET + u'\u2026' else: result += chunk[:left] left = 0 return result
[ "def", "__truncate", "(", "self", ",", "line_arr", ",", "max_width", ")", ":", "def", "is_space", "(", "chunk", ")", ":", "return", "all", "(", "[", "True", "if", "i", "==", "' '", "else", "False", "for", "i", "in", "chunk", "]", ")", "def", "is_empty", "(", "chunks", ",", "markups", ")", ":", "result", "=", "[", "]", "for", "chunk", "in", "chunks", ":", "if", "chunk", "in", "markups", ":", "result", ".", "append", "(", "True", ")", "elif", "is_space", "(", "chunk", ")", ":", "result", ".", "append", "(", "True", ")", "else", ":", "result", ".", "append", "(", "False", ")", "return", "all", "(", "result", ")", "left", "=", "max_width", "result", "=", "''", "markups", "=", "self", ".", "markup", ".", "get_markup_vars", "(", ")", "for", "num", ",", "chunk", "in", "enumerate", "(", "line_arr", ")", ":", "if", "chunk", "in", "markups", ":", "result", "+=", "chunk", "else", ":", "if", "left", ">", "0", ":", "if", "len", "(", "chunk", ")", "<=", "left", ":", "result", "+=", "chunk", "left", "-=", "len", "(", "chunk", ")", "else", ":", "leftover", "=", "(", "chunk", "[", "left", ":", "]", ",", ")", "+", "line_arr", "[", "num", "+", "1", ":", "]", "was_cut", "=", "not", "is_empty", "(", "leftover", ",", "markups", ")", "if", "was_cut", ":", "result", "+=", "chunk", "[", ":", "left", "-", "1", "]", "+", "self", ".", "markup", ".", "RESET", "+", "u'\\u2026'", "else", ":", "result", "+=", "chunk", "[", ":", "left", "]", "left", "=", "0", "return", "result" ]
37.971429
13.114286
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the Get response payload to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the object type, unique identifier, or secret attributes are missing from the payload struct. """ local_stream = utils.BytearrayStream() if self.object_type: self._object_type.write(local_stream, kmip_version=kmip_version) else: raise ValueError("Payload is missing the object type field.") if self.unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) else: raise ValueError( "Payload is missing the unique identifier field." ) if self.secret: self._secret.write(local_stream, kmip_version=kmip_version) else: raise ValueError("Payload is missing the secret field.") self.length = local_stream.length() super(GetResponsePayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
[ "def", "write", "(", "self", ",", "output_stream", ",", "kmip_version", "=", "enums", ".", "KMIPVersion", ".", "KMIP_1_0", ")", ":", "local_stream", "=", "utils", ".", "BytearrayStream", "(", ")", "if", "self", ".", "object_type", ":", "self", ".", "_object_type", ".", "write", "(", "local_stream", ",", "kmip_version", "=", "kmip_version", ")", "else", ":", "raise", "ValueError", "(", "\"Payload is missing the object type field.\"", ")", "if", "self", ".", "unique_identifier", ":", "self", ".", "_unique_identifier", ".", "write", "(", "local_stream", ",", "kmip_version", "=", "kmip_version", ")", "else", ":", "raise", "ValueError", "(", "\"Payload is missing the unique identifier field.\"", ")", "if", "self", ".", "secret", ":", "self", ".", "_secret", ".", "write", "(", "local_stream", ",", "kmip_version", "=", "kmip_version", ")", "else", ":", "raise", "ValueError", "(", "\"Payload is missing the secret field.\"", ")", "self", ".", "length", "=", "local_stream", ".", "length", "(", ")", "super", "(", "GetResponsePayload", ",", "self", ")", ".", "write", "(", "output_stream", ",", "kmip_version", "=", "kmip_version", ")", "output_stream", ".", "write", "(", "local_stream", ".", "buffer", ")" ]
36.022727
22.431818
def subscriber_has_active_subscription(subscriber, plan=None): """ Helper function to check if a subscriber has an active subscription. Throws improperlyConfigured if the subscriber is an instance of AUTH_USER_MODEL and get_user_model().is_anonymous == True. Activate subscription rules (or): * customer has active subscription If the subscriber is an instance of AUTH_USER_MODEL, active subscription rules (or): * customer has active subscription * user.is_superuser * user.is_staff :param subscriber: The subscriber for which to check for an active subscription. :type subscriber: dj-stripe subscriber :param plan: The plan for which to check for an active subscription. If plan is None and there exists only one subscription, this method will check if that subscription is active. Calling this method with no plan and multiple subscriptions will throw an exception. :type plan: Plan or string (plan ID) """ if isinstance(subscriber, AnonymousUser): raise ImproperlyConfigured(ANONYMOUS_USER_ERROR_MSG) if isinstance(subscriber, get_user_model()): if subscriber.is_superuser or subscriber.is_staff: return True from .models import Customer customer, created = Customer.get_or_create(subscriber) if created or not customer.has_active_subscription(plan): return False return True
[ "def", "subscriber_has_active_subscription", "(", "subscriber", ",", "plan", "=", "None", ")", ":", "if", "isinstance", "(", "subscriber", ",", "AnonymousUser", ")", ":", "raise", "ImproperlyConfigured", "(", "ANONYMOUS_USER_ERROR_MSG", ")", "if", "isinstance", "(", "subscriber", ",", "get_user_model", "(", ")", ")", ":", "if", "subscriber", ".", "is_superuser", "or", "subscriber", ".", "is_staff", ":", "return", "True", "from", ".", "models", "import", "Customer", "customer", ",", "created", "=", "Customer", ".", "get_or_create", "(", "subscriber", ")", "if", "created", "or", "not", "customer", ".", "has_active_subscription", "(", "plan", ")", ":", "return", "False", "return", "True" ]
36.027778
23.25
def firmware_download_input_rbridge_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") firmware_download = ET.Element("firmware_download") config = firmware_download input = ET.SubElement(firmware_download, "input") rbridge_id = ET.SubElement(input, "rbridge-id") rbridge_id.text = kwargs.pop('rbridge_id') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "firmware_download_input_rbridge_id", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "firmware_download", "=", "ET", ".", "Element", "(", "\"firmware_download\"", ")", "config", "=", "firmware_download", "input", "=", "ET", ".", "SubElement", "(", "firmware_download", ",", "\"input\"", ")", "rbridge_id", "=", "ET", ".", "SubElement", "(", "input", ",", "\"rbridge-id\"", ")", "rbridge_id", ".", "text", "=", "kwargs", ".", "pop", "(", "'rbridge_id'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
39.666667
12.583333
def save_file(self, path=None, force_overwrite=False, just_settings=False, **kwargs): """ Saves the data in the databox to a file. Parameters ---------- path=None Path for output. If set to None, use a save dialog. force_overwrite=False Do not question the overwrite if the file already exists. just_settings=False Set to True to save only the state of the DataboxPlot controls **kwargs are sent to the normal databox save_file() function. """ # Update the binary mode if not 'binary' in kwargs: kwargs['binary'] = self.combo_binary.get_text() # if it's just the settings file, make a new databox if just_settings: d = _d.databox() # otherwise use the internal databox else: d = self # add all the controls settings to the header for x in self._autosettings_controls: self._store_gui_setting(d, x) # save the file using the skeleton function, so as not to recursively # call this one again! _d.databox.save_file(d, path, self.file_type, self.file_type, force_overwrite, **kwargs)
[ "def", "save_file", "(", "self", ",", "path", "=", "None", ",", "force_overwrite", "=", "False", ",", "just_settings", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# Update the binary mode", "if", "not", "'binary'", "in", "kwargs", ":", "kwargs", "[", "'binary'", "]", "=", "self", ".", "combo_binary", ".", "get_text", "(", ")", "# if it's just the settings file, make a new databox", "if", "just_settings", ":", "d", "=", "_d", ".", "databox", "(", ")", "# otherwise use the internal databox", "else", ":", "d", "=", "self", "# add all the controls settings to the header", "for", "x", "in", "self", ".", "_autosettings_controls", ":", "self", ".", "_store_gui_setting", "(", "d", ",", "x", ")", "# save the file using the skeleton function, so as not to recursively ", "# call this one again!", "_d", ".", "databox", ".", "save_file", "(", "d", ",", "path", ",", "self", ".", "file_type", ",", "self", ".", "file_type", ",", "force_overwrite", ",", "*", "*", "kwargs", ")" ]
39
24.266667
def pymatgen_mol(self): """ Returns pymatgen Molecule object. """ sp = [] coords = [] for atom in ob.OBMolAtomIter(self._obmol): sp.append(atom.GetAtomicNum()) coords.append([atom.GetX(), atom.GetY(), atom.GetZ()]) return Molecule(sp, coords)
[ "def", "pymatgen_mol", "(", "self", ")", ":", "sp", "=", "[", "]", "coords", "=", "[", "]", "for", "atom", "in", "ob", ".", "OBMolAtomIter", "(", "self", ".", "_obmol", ")", ":", "sp", ".", "append", "(", "atom", ".", "GetAtomicNum", "(", ")", ")", "coords", ".", "append", "(", "[", "atom", ".", "GetX", "(", ")", ",", "atom", ".", "GetY", "(", ")", ",", "atom", ".", "GetZ", "(", ")", "]", ")", "return", "Molecule", "(", "sp", ",", "coords", ")" ]
31.3
10.7
def viscosity_converter(val, old_scale, new_scale, extrapolate=False): r'''Converts kinematic viscosity values from different scales which have historically been used. Though they may not be in use much, some standards still specify values in these scales. Parameters ---------- val : float Viscosity value in the specified scale; [m^2/s] if 'kinematic viscosity'; [degrees] if Engler or Barbey; [s] for the other scales. old_scale : str String representing the scale that `val` is in originally. new_scale : str String representing the scale that `val` should be converted to. extrapolate : bool If True, a conversion will be performed even if outside the limits of either scale; if False, and either value is outside a limit, an exception will be raised. Returns ------- result : float Viscosity value in the specified scale; [m^2/s] if 'kinematic viscosity'; [degrees] if Engler or Barbey; [s] for the other scales Notes ----- The valid scales for this function are any of the following: ['a&w b', 'a&w crucible', 'american can', 'astm 0.07', 'astm 0.10', 'astm 0.15', 'astm 0.20', 'astm 0.25', 'barbey', 'caspers tin plate', 'continental can', 'crown cork and seal', 'demmier #1', 'demmier #10', 'engler', 'ford cup #3', 'ford cup #4', 'kinematic viscosity', 'mac michael', 'murphy varnish', 'parlin cup #10', 'parlin cup #15', 'parlin cup #20', 'parlin cup #25', 'parlin cup #30', 'parlin cup #7', 'pratt lambert a', 'pratt lambert b', 'pratt lambert c', 'pratt lambert d', 'pratt lambert e', 'pratt lambert f', 'pratt lambert g', 'pratt lambert h', 'pratt lambert i', 'redwood admiralty', 'redwood standard', 'saybolt furol', 'saybolt universal', 'scott', 'stormer 100g load', 'westinghouse', 'zahn cup #1', 'zahn cup #2', 'zahn cup #3', 'zahn cup #4', 'zahn cup #5'] Some of those scales are converted linearly; the rest use tabulated data and splines. Because the conversion is performed by spline functions, a re-conversion of a value will not yield exactly the original value. However, it is quite close. The method 'Saybolt universal' has a special formula implemented for its conversion, from [4]_. It is designed for maximum backwards compatibility with prior experimental data. It is solved by newton's method when kinematic viscosity is desired as an output. .. math:: SUS_{eq} = 4.6324\nu_t + \frac{[1.0 + 0.03264\nu_t]} {[(3930.2 + 262.7\nu_t + 23.97\nu_t^2 + 1.646\nu_t^3)\times10^{-5})]} Examples -------- >>> viscosity_converter(8.79, 'engler', 'parlin cup #7') 52.5 >>> viscosity_converter(700, 'Saybolt Universal Seconds', 'kinematic viscosity') 0.00015108914751515542 References ---------- .. [1] Hydraulic Institute. Hydraulic Institute Engineering Data Book. Cleveland, Ohio: Hydraulic Institute, 1990. .. [2] Gardner/Sward. Paint Testing Manual. Physical and Chemical Examination of Paints, Varnishes, Lacquers, and Colors. 13th Edition. ASTM, 1972. .. [3] Euverard, M. R., The Efflux Type Viscosity Cup. National Paint, Varnish, and Lacquer Association, 1948. .. [4] API Technical Data Book: General Properties & Characterization. American Petroleum Institute, 7E, 2005. .. [5] ASTM. Standard Practice for Conversion of Kinematic Viscosity to Saybolt Universal Viscosity or to Saybolt Furol Viscosity. D 2161 - 93. ''' def range_check(visc, scale): scale_min, scale_max, nu_min, nu_max = viscosity_converter_limits[scale] if visc < scale_min*(1.-1E-7) or visc > scale_max*(1.+1E-7): raise Exception('Viscosity conversion is outside the limits of the ' '%s scale; given value is %s, but the range of the ' 'scale is from %s to %s. Set `extrapolate` to True ' 'to perform the conversion anyway.' %(scale, visc, scale_min, scale_max)) def range_check_linear(val, c, tmin, scale): if val < tmin: raise Exception('Viscosity conversion is outside the limits of the ' '%s scale; given value is %s, but the minimum time ' 'for this scale is %s s. Set `extrapolate` to True ' 'to perform the conversion anyway.' %(scale, val, tmin)) old_scale = old_scale.lower().replace('degrees', '').replace('seconds', '').strip() new_scale = new_scale.lower().replace('degrees', '').replace('seconds', '').strip() def Saybolt_universal_eq(nu): return (4.6324*nu + (1E5 + 3264.*nu)/(nu*(nu*(1.646*nu + 23.97) + 262.7) + 3930.2)) # Convert to kinematic viscosity if old_scale == 'kinematic viscosity': val = 1E6*val # convert to centistokes, the basis of the functions elif old_scale == 'saybolt universal': if not extrapolate: range_check(val, old_scale) to_solve = lambda nu: Saybolt_universal_eq(nu) - val val = newton(to_solve, 1) elif old_scale in viscosity_converters_to_nu: if not extrapolate: range_check(val, old_scale) val = exp(viscosity_converters_to_nu[old_scale](log(val))) elif old_scale in viscosity_scales_linear: c, tmin = viscosity_scales_linear[old_scale] if not extrapolate: range_check_linear(val, c, tmin, old_scale) val = c*val # convert from seconds to centistokes else: keys = sorted(set(list(viscosity_scales.keys()) + list(viscosity_scales_linear.keys()))) raise Exception('Scale "%s" not recognized - allowable values are any of %s.' %(old_scale, keys)) # Convert to desired scale if new_scale == 'kinematic viscosity': val = 1E-6*val # convert to m^2/s elif new_scale == 'saybolt universal': val = Saybolt_universal_eq(val) elif new_scale in viscosity_converters_from_nu: val = exp(viscosity_converters_from_nu[new_scale](log(val))) if not extrapolate: range_check(val, new_scale) elif new_scale in viscosity_scales_linear: c, tmin = viscosity_scales_linear[new_scale] val = val/c # convert from centistokes to seconds if not extrapolate: range_check_linear(val, c, tmin, new_scale) else: keys = sorted(set(list(viscosity_scales.keys()) + list(viscosity_scales_linear.keys()))) raise Exception('Scale "%s" not recognized - allowable values are any of %s.' %(new_scale, keys)) return float(val)
[ "def", "viscosity_converter", "(", "val", ",", "old_scale", ",", "new_scale", ",", "extrapolate", "=", "False", ")", ":", "def", "range_check", "(", "visc", ",", "scale", ")", ":", "scale_min", ",", "scale_max", ",", "nu_min", ",", "nu_max", "=", "viscosity_converter_limits", "[", "scale", "]", "if", "visc", "<", "scale_min", "*", "(", "1.", "-", "1E-7", ")", "or", "visc", ">", "scale_max", "*", "(", "1.", "+", "1E-7", ")", ":", "raise", "Exception", "(", "'Viscosity conversion is outside the limits of the '", "'%s scale; given value is %s, but the range of the '", "'scale is from %s to %s. Set `extrapolate` to True '", "'to perform the conversion anyway.'", "%", "(", "scale", ",", "visc", ",", "scale_min", ",", "scale_max", ")", ")", "def", "range_check_linear", "(", "val", ",", "c", ",", "tmin", ",", "scale", ")", ":", "if", "val", "<", "tmin", ":", "raise", "Exception", "(", "'Viscosity conversion is outside the limits of the '", "'%s scale; given value is %s, but the minimum time '", "'for this scale is %s s. Set `extrapolate` to True '", "'to perform the conversion anyway.'", "%", "(", "scale", ",", "val", ",", "tmin", ")", ")", "old_scale", "=", "old_scale", ".", "lower", "(", ")", ".", "replace", "(", "'degrees'", ",", "''", ")", ".", "replace", "(", "'seconds'", ",", "''", ")", ".", "strip", "(", ")", "new_scale", "=", "new_scale", ".", "lower", "(", ")", ".", "replace", "(", "'degrees'", ",", "''", ")", ".", "replace", "(", "'seconds'", ",", "''", ")", ".", "strip", "(", ")", "def", "Saybolt_universal_eq", "(", "nu", ")", ":", "return", "(", "4.6324", "*", "nu", "+", "(", "1E5", "+", "3264.", "*", "nu", ")", "/", "(", "nu", "*", "(", "nu", "*", "(", "1.646", "*", "nu", "+", "23.97", ")", "+", "262.7", ")", "+", "3930.2", ")", ")", "# Convert to kinematic viscosity", "if", "old_scale", "==", "'kinematic viscosity'", ":", "val", "=", "1E6", "*", "val", "# convert to centistokes, the basis of the functions", "elif", "old_scale", "==", "'saybolt universal'", ":", "if", "not", "extrapolate", ":", "range_check", "(", "val", ",", "old_scale", ")", "to_solve", "=", "lambda", "nu", ":", "Saybolt_universal_eq", "(", "nu", ")", "-", "val", "val", "=", "newton", "(", "to_solve", ",", "1", ")", "elif", "old_scale", "in", "viscosity_converters_to_nu", ":", "if", "not", "extrapolate", ":", "range_check", "(", "val", ",", "old_scale", ")", "val", "=", "exp", "(", "viscosity_converters_to_nu", "[", "old_scale", "]", "(", "log", "(", "val", ")", ")", ")", "elif", "old_scale", "in", "viscosity_scales_linear", ":", "c", ",", "tmin", "=", "viscosity_scales_linear", "[", "old_scale", "]", "if", "not", "extrapolate", ":", "range_check_linear", "(", "val", ",", "c", ",", "tmin", ",", "old_scale", ")", "val", "=", "c", "*", "val", "# convert from seconds to centistokes", "else", ":", "keys", "=", "sorted", "(", "set", "(", "list", "(", "viscosity_scales", ".", "keys", "(", ")", ")", "+", "list", "(", "viscosity_scales_linear", ".", "keys", "(", ")", ")", ")", ")", "raise", "Exception", "(", "'Scale \"%s\" not recognized - allowable values are any of %s.'", "%", "(", "old_scale", ",", "keys", ")", ")", "# Convert to desired scale", "if", "new_scale", "==", "'kinematic viscosity'", ":", "val", "=", "1E-6", "*", "val", "# convert to m^2/s", "elif", "new_scale", "==", "'saybolt universal'", ":", "val", "=", "Saybolt_universal_eq", "(", "val", ")", "elif", "new_scale", "in", "viscosity_converters_from_nu", ":", "val", "=", "exp", "(", "viscosity_converters_from_nu", "[", "new_scale", "]", "(", "log", "(", "val", ")", ")", ")", "if", "not", "extrapolate", ":", "range_check", "(", "val", ",", "new_scale", ")", "elif", "new_scale", "in", "viscosity_scales_linear", ":", "c", ",", "tmin", "=", "viscosity_scales_linear", "[", "new_scale", "]", "val", "=", "val", "/", "c", "# convert from centistokes to seconds", "if", "not", "extrapolate", ":", "range_check_linear", "(", "val", ",", "c", ",", "tmin", ",", "new_scale", ")", "else", ":", "keys", "=", "sorted", "(", "set", "(", "list", "(", "viscosity_scales", ".", "keys", "(", ")", ")", "+", "list", "(", "viscosity_scales_linear", ".", "keys", "(", ")", ")", ")", ")", "raise", "Exception", "(", "'Scale \"%s\" not recognized - allowable values are any of %s.'", "%", "(", "new_scale", ",", "keys", ")", ")", "return", "float", "(", "val", ")" ]
46.319444
27.041667
def get_nets_jpnic(self, response): """ The function for parsing network blocks from jpnic whois data. Args: response (:obj:`str`): The response from the jpnic server. Returns: list of dict: Mapping of networks with start and end positions. :: [{ 'cidr' (str) - The network routing block 'start' (int) - The starting point of the network 'end' (int) - The endpoint point of the network }] """ nets = [] # Iterate through all of the networks found, storing the CIDR value # and the start and end positions. for match in re.finditer( r'^.*?(\[Network Number\])[^\S\n]+.+?>(?P<val>.+?)</A>$', response, re.MULTILINE ): try: net = copy.deepcopy(BASE_NET) tmp = ip_network(match.group(2)) try: # pragma: no cover network_address = tmp.network_address except AttributeError: # pragma: no cover network_address = tmp.ip pass try: # pragma: no cover broadcast_address = tmp.broadcast_address except AttributeError: # pragma: no cover broadcast_address = tmp.broadcast pass net['range'] = '{0} - {1}'.format( network_address + 1, broadcast_address ) cidr = ip_network(match.group(2).strip()).__str__() net['cidr'] = cidr net['start'] = match.start() net['end'] = match.end() nets.append(net) except (ValueError, TypeError): pass return nets
[ "def", "get_nets_jpnic", "(", "self", ",", "response", ")", ":", "nets", "=", "[", "]", "# Iterate through all of the networks found, storing the CIDR value", "# and the start and end positions.", "for", "match", "in", "re", ".", "finditer", "(", "r'^.*?(\\[Network Number\\])[^\\S\\n]+.+?>(?P<val>.+?)</A>$'", ",", "response", ",", "re", ".", "MULTILINE", ")", ":", "try", ":", "net", "=", "copy", ".", "deepcopy", "(", "BASE_NET", ")", "tmp", "=", "ip_network", "(", "match", ".", "group", "(", "2", ")", ")", "try", ":", "# pragma: no cover", "network_address", "=", "tmp", ".", "network_address", "except", "AttributeError", ":", "# pragma: no cover", "network_address", "=", "tmp", ".", "ip", "pass", "try", ":", "# pragma: no cover", "broadcast_address", "=", "tmp", ".", "broadcast_address", "except", "AttributeError", ":", "# pragma: no cover", "broadcast_address", "=", "tmp", ".", "broadcast", "pass", "net", "[", "'range'", "]", "=", "'{0} - {1}'", ".", "format", "(", "network_address", "+", "1", ",", "broadcast_address", ")", "cidr", "=", "ip_network", "(", "match", ".", "group", "(", "2", ")", ".", "strip", "(", ")", ")", ".", "__str__", "(", ")", "net", "[", "'cidr'", "]", "=", "cidr", "net", "[", "'start'", "]", "=", "match", ".", "start", "(", ")", "net", "[", "'end'", "]", "=", "match", ".", "end", "(", ")", "nets", ".", "append", "(", "net", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "pass", "return", "nets" ]
29.709677
22.483871
def delete(ctx, archive_name): ''' Delete an archive ''' _generate_api(ctx) var = ctx.obj.api.get_archive(archive_name) var.delete() click.echo('deleted archive {}'.format(var))
[ "def", "delete", "(", "ctx", ",", "archive_name", ")", ":", "_generate_api", "(", "ctx", ")", "var", "=", "ctx", ".", "obj", ".", "api", ".", "get_archive", "(", "archive_name", ")", "var", ".", "delete", "(", ")", "click", ".", "echo", "(", "'deleted archive {}'", ".", "format", "(", "var", ")", ")" ]
19.8
23.2
def set_stable_spot_instance_settings(self, maximum_bid_price_percentage=None, timeout_for_request=None, allow_fallback=True): """ Purchase options for stable spot instances. `maximum_bid_price_percentage`: Maximum value to bid for stable node spot instances, expressed as a percentage of the base price (applies to both master and slave nodes). `timeout_for_request`: Timeout for a stable node spot instance request (Unit: minutes) `allow_fallback`: Whether to fallback to on-demand instances for stable nodes if spot instances are not available """ self.hadoop_settings['stable_spot_instance_settings'] = { 'maximum_bid_price_percentage': maximum_bid_price_percentage, 'timeout_for_request': timeout_for_request, 'allow_fallback': allow_fallback}
[ "def", "set_stable_spot_instance_settings", "(", "self", ",", "maximum_bid_price_percentage", "=", "None", ",", "timeout_for_request", "=", "None", ",", "allow_fallback", "=", "True", ")", ":", "self", ".", "hadoop_settings", "[", "'stable_spot_instance_settings'", "]", "=", "{", "'maximum_bid_price_percentage'", ":", "maximum_bid_price_percentage", ",", "'timeout_for_request'", ":", "timeout_for_request", ",", "'allow_fallback'", ":", "allow_fallback", "}" ]
48.25
25.15
def harmonics_1d(harmonic_out, x, freqs, h_range, kind='linear', fill_value=0, axis=0): '''Populate a harmonic tensor from a time-frequency representation. Parameters ---------- harmonic_out : np.ndarray, shape=(len(h_range), X.shape) The output array to store harmonics X : np.ndarray The input energy freqs : np.ndarray, shape=(x.shape[axis]) The frequency values corresponding to x's elements along the chosen axis. h_range : list-like, non-negative Harmonics to compute. The first harmonic (1) corresponds to `x` itself. Values less than one (e.g., 1/2) correspond to sub-harmonics. kind : str Interpolation type. See `scipy.interpolate.interp1d`. fill_value : float The value to fill when extrapolating beyond the observed frequency range. axis : int The axis along which to compute harmonics See Also -------- harmonics scipy.interpolate.interp1d Examples -------- Estimate the harmonics of a time-averaged tempogram >>> y, sr = librosa.load(librosa.util.example_audio_file(), ... duration=15, offset=30) >>> # Compute the time-varying tempogram and average over time >>> tempi = np.mean(librosa.feature.tempogram(y=y, sr=sr), axis=1) >>> # We'll measure the first five harmonics >>> h_range = [1, 2, 3, 4, 5] >>> f_tempo = librosa.tempo_frequencies(len(tempi), sr=sr) >>> # Build the harmonic tensor >>> t_harmonics = librosa.interp_harmonics(tempi, f_tempo, h_range) >>> print(t_harmonics.shape) (5, 384) >>> # And plot the results >>> import matplotlib.pyplot as plt >>> plt.figure() >>> librosa.display.specshow(t_harmonics, x_axis='tempo', sr=sr) >>> plt.yticks(0.5 + np.arange(len(h_range)), ... ['{:.3g}'.format(_) for _ in h_range]) >>> plt.ylabel('Harmonic') >>> plt.xlabel('Tempo (BPM)') >>> plt.tight_layout() We can also compute frequency harmonics for spectrograms. To calculate subharmonic energy, use values < 1. >>> h_range = [1./3, 1./2, 1, 2, 3, 4] >>> S = np.abs(librosa.stft(y)) >>> fft_freqs = librosa.fft_frequencies(sr=sr) >>> S_harm = librosa.interp_harmonics(S, fft_freqs, h_range, axis=0) >>> print(S_harm.shape) (6, 1025, 646) >>> plt.figure() >>> for i, _sh in enumerate(S_harm, 1): ... plt.subplot(3,2,i) ... librosa.display.specshow(librosa.amplitude_to_db(_sh, ... ref=S.max()), ... sr=sr, y_axis='log') ... plt.title('h={:.3g}'.format(h_range[i-1])) ... plt.yticks([]) >>> plt.tight_layout() ''' # Note: this only works for fixed-grid, 1d interpolation f_interp = scipy.interpolate.interp1d(freqs, x, kind=kind, axis=axis, copy=False, bounds_error=False, fill_value=fill_value) idx_out = [slice(None)] * harmonic_out.ndim # Compute the output index of the interpolated values interp_axis = 1 + (axis % x.ndim) # Iterate over the harmonics range for h_index, harmonic in enumerate(h_range): idx_out[0] = h_index # Iterate over frequencies for f_index, frequency in enumerate(freqs): # Offset the output axis by 1 to account for the harmonic index idx_out[interp_axis] = f_index # Estimate the harmonic energy at this frequency across time harmonic_out[tuple(idx_out)] = f_interp(harmonic * frequency)
[ "def", "harmonics_1d", "(", "harmonic_out", ",", "x", ",", "freqs", ",", "h_range", ",", "kind", "=", "'linear'", ",", "fill_value", "=", "0", ",", "axis", "=", "0", ")", ":", "# Note: this only works for fixed-grid, 1d interpolation", "f_interp", "=", "scipy", ".", "interpolate", ".", "interp1d", "(", "freqs", ",", "x", ",", "kind", "=", "kind", ",", "axis", "=", "axis", ",", "copy", "=", "False", ",", "bounds_error", "=", "False", ",", "fill_value", "=", "fill_value", ")", "idx_out", "=", "[", "slice", "(", "None", ")", "]", "*", "harmonic_out", ".", "ndim", "# Compute the output index of the interpolated values", "interp_axis", "=", "1", "+", "(", "axis", "%", "x", ".", "ndim", ")", "# Iterate over the harmonics range", "for", "h_index", ",", "harmonic", "in", "enumerate", "(", "h_range", ")", ":", "idx_out", "[", "0", "]", "=", "h_index", "# Iterate over frequencies", "for", "f_index", ",", "frequency", "in", "enumerate", "(", "freqs", ")", ":", "# Offset the output axis by 1 to account for the harmonic index", "idx_out", "[", "interp_axis", "]", "=", "f_index", "# Estimate the harmonic energy at this frequency across time", "harmonic_out", "[", "tuple", "(", "idx_out", ")", "]", "=", "f_interp", "(", "harmonic", "*", "frequency", ")" ]
34.425926
21.796296
def Sanjari(T, Tc, Pc, omega): r'''Calculates vapor pressure of a fluid at arbitrary temperatures using a CSP relationship by [1]_. Requires a chemical's critical temperature, pressure, and acentric factor. Although developed for refrigerants, this model should have some general predictive ability. The vapor pressure of a chemical at `T` is given by: .. math:: P^{sat} = P_c\exp(f^{(0)} + \omega f^{(1)} + \omega^2 f^{(2)}) f^{(0)} = a_1 + \frac{a_2}{T_r} + a_3\ln T_r + a_4 T_r^{1.9} f^{(1)} = a_5 + \frac{a_6}{T_r} + a_7\ln T_r + a_8 T_r^{1.9} f^{(2)} = a_9 + \frac{a_{10}}{T_r} + a_{11}\ln T_r + a_{12} T_r^{1.9} Parameters ---------- T : float Temperature of fluid [K] Tc : float Critical temperature of fluid [K] Pc : float Critical pressure of fluid [Pa] omega : float Acentric factor [-] Returns ------- Psat : float Vapor pressure, [Pa] Notes ----- a[1-12] are as follows: 6.83377, -5.76051, 0.90654, -1.16906, 5.32034, -28.1460, -58.0352, 23.57466, 18.19967, 16.33839, 65.6995, -35.9739. For a claimed fluid not included in the regression, R128, the claimed AARD was 0.428%. A re-calculation using 200 data points from 125.45 K to 343.90225 K evenly spaced by 1.09775 K as generated by NIST Webbook April 2016 produced an AARD of 0.644%. It is likely that the author's regression used more precision in its coefficients than was shown here. Nevertheless, the function is reproduced as shown in [1]_. For Tc=808 K, Pc=1100000 Pa, omega=1.1571, this function actually declines after 770 K. Examples -------- >>> Sanjari(347.2, 617.1, 36E5, 0.299) 13651.916109552498 References ---------- .. [1] Sanjari, Ehsan, Mehrdad Honarmand, Hamidreza Badihi, and Ali Ghaheri. "An Accurate Generalized Model for Predict Vapor Pressure of Refrigerants." International Journal of Refrigeration 36, no. 4 (June 2013): 1327-32. doi:10.1016/j.ijrefrig.2013.01.007. ''' Tr = T/Tc f0 = 6.83377 + -5.76051/Tr + 0.90654*log(Tr) + -1.16906*Tr**1.9 f1 = 5.32034 + -28.1460/Tr + -58.0352*log(Tr) + 23.57466*Tr**1.9 f2 = 18.19967 + 16.33839/Tr + 65.6995*log(Tr) + -35.9739*Tr**1.9 return Pc*exp(f0 + omega*f1 + omega**2*f2)
[ "def", "Sanjari", "(", "T", ",", "Tc", ",", "Pc", ",", "omega", ")", ":", "Tr", "=", "T", "/", "Tc", "f0", "=", "6.83377", "+", "-", "5.76051", "/", "Tr", "+", "0.90654", "*", "log", "(", "Tr", ")", "+", "-", "1.16906", "*", "Tr", "**", "1.9", "f1", "=", "5.32034", "+", "-", "28.1460", "/", "Tr", "+", "-", "58.0352", "*", "log", "(", "Tr", ")", "+", "23.57466", "*", "Tr", "**", "1.9", "f2", "=", "18.19967", "+", "16.33839", "/", "Tr", "+", "65.6995", "*", "log", "(", "Tr", ")", "+", "-", "35.9739", "*", "Tr", "**", "1.9", "return", "Pc", "*", "exp", "(", "f0", "+", "omega", "*", "f1", "+", "omega", "**", "2", "*", "f2", ")" ]
34.537313
26.537313
def linked(base_dir: str, rr_id: str) -> str: """ Get, from the specified directory, the path to the tails file associated with the input revocation registry identifier, or None for no such file. :param base_dir: base directory for tails files, thereafter split by cred def id :param rr_id: rev reg id :return: (stringified) path to tails file of interest, or None for no such file. """ LOGGER.debug('Tails.linked >>> base_dir: %s, rr_id: %s', base_dir, rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.linked <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) cd_id = rev_reg_id2cred_def_id(rr_id) link = join(base_dir, cd_id, rr_id) rv = join(base_dir, cd_id, readlink(link)) if islink(link) else None LOGGER.debug('Tails.linked <<< %s', rv) return rv
[ "def", "linked", "(", "base_dir", ":", "str", ",", "rr_id", ":", "str", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'Tails.linked >>> base_dir: %s, rr_id: %s'", ",", "base_dir", ",", "rr_id", ")", "if", "not", "ok_rev_reg_id", "(", "rr_id", ")", ":", "LOGGER", ".", "debug", "(", "'Tails.linked <!< Bad rev reg id %s'", ",", "rr_id", ")", "raise", "BadIdentifier", "(", "'Bad rev reg id {}'", ".", "format", "(", "rr_id", ")", ")", "cd_id", "=", "rev_reg_id2cred_def_id", "(", "rr_id", ")", "link", "=", "join", "(", "base_dir", ",", "cd_id", ",", "rr_id", ")", "rv", "=", "join", "(", "base_dir", ",", "cd_id", ",", "readlink", "(", "link", ")", ")", "if", "islink", "(", "link", ")", "else", "None", "LOGGER", ".", "debug", "(", "'Tails.linked <<< %s'", ",", "rv", ")", "return", "rv" ]
41.590909
25.590909
def on_btn_unpack(self, event): """ Create dialog to choose a file to unpack with download magic. Then run download_magic and create self.contribution. """ dlg = wx.FileDialog( None, message = "choose txt file to unpack", defaultDir=self.WD, defaultFile="", style=wx.FD_OPEN #| wx.FD_CHANGE_DIR ) if dlg.ShowModal() == wx.ID_OK: FILE = dlg.GetPath() input_dir, f = os.path.split(FILE) else: return False outstring="download_magic.py -f {} -WD {} -ID {} -DM {}".format(f, self.WD, input_dir, self.data_model_num) # run as module: print("-I- running python script:\n %s"%(outstring)) wait = wx.BusyInfo("Please wait, working...") wx.SafeYield() ex = None try: if ipmag.download_magic(f, self.WD, input_dir, overwrite=True, data_model=self.data_model): text = "Successfully ran download_magic.py program.\nMagIC files were saved in your working directory.\nSee Terminal/message window for details." else: text = "Something went wrong. Make sure you chose a valid file downloaded from the MagIC database and try again." except Exception as ex: text = "Something went wrong. Make sure you chose a valid file downloaded from the MagIC database and try again." del wait dlg = wx.MessageDialog(self, caption="Saved", message=text, style=wx.OK) result = dlg.ShowModal() if result == wx.ID_OK: dlg.Destroy() if ex: raise(ex) self.contribution = cb.Contribution(self.WD)
[ "def", "on_btn_unpack", "(", "self", ",", "event", ")", ":", "dlg", "=", "wx", ".", "FileDialog", "(", "None", ",", "message", "=", "\"choose txt file to unpack\"", ",", "defaultDir", "=", "self", ".", "WD", ",", "defaultFile", "=", "\"\"", ",", "style", "=", "wx", ".", "FD_OPEN", "#| wx.FD_CHANGE_DIR", ")", "if", "dlg", ".", "ShowModal", "(", ")", "==", "wx", ".", "ID_OK", ":", "FILE", "=", "dlg", ".", "GetPath", "(", ")", "input_dir", ",", "f", "=", "os", ".", "path", ".", "split", "(", "FILE", ")", "else", ":", "return", "False", "outstring", "=", "\"download_magic.py -f {} -WD {} -ID {} -DM {}\"", ".", "format", "(", "f", ",", "self", ".", "WD", ",", "input_dir", ",", "self", ".", "data_model_num", ")", "# run as module:", "print", "(", "\"-I- running python script:\\n %s\"", "%", "(", "outstring", ")", ")", "wait", "=", "wx", ".", "BusyInfo", "(", "\"Please wait, working...\"", ")", "wx", ".", "SafeYield", "(", ")", "ex", "=", "None", "try", ":", "if", "ipmag", ".", "download_magic", "(", "f", ",", "self", ".", "WD", ",", "input_dir", ",", "overwrite", "=", "True", ",", "data_model", "=", "self", ".", "data_model", ")", ":", "text", "=", "\"Successfully ran download_magic.py program.\\nMagIC files were saved in your working directory.\\nSee Terminal/message window for details.\"", "else", ":", "text", "=", "\"Something went wrong. Make sure you chose a valid file downloaded from the MagIC database and try again.\"", "except", "Exception", "as", "ex", ":", "text", "=", "\"Something went wrong. Make sure you chose a valid file downloaded from the MagIC database and try again.\"", "del", "wait", "dlg", "=", "wx", ".", "MessageDialog", "(", "self", ",", "caption", "=", "\"Saved\"", ",", "message", "=", "text", ",", "style", "=", "wx", ".", "OK", ")", "result", "=", "dlg", ".", "ShowModal", "(", ")", "if", "result", "==", "wx", ".", "ID_OK", ":", "dlg", ".", "Destroy", "(", ")", "if", "ex", ":", "raise", "(", "ex", ")", "self", ".", "contribution", "=", "cb", ".", "Contribution", "(", "self", ".", "WD", ")" ]
41.853659
25.170732
def filter(self, request, queryset, view): """ Filter each resource separately using its own filter """ summary_queryset = queryset filtered_querysets = [] for queryset in summary_queryset.querysets: filter_class = self._get_filter(queryset) queryset = filter_class(request.query_params, queryset=queryset).qs filtered_querysets.append(queryset) summary_queryset.querysets = filtered_querysets return summary_queryset
[ "def", "filter", "(", "self", ",", "request", ",", "queryset", ",", "view", ")", ":", "summary_queryset", "=", "queryset", "filtered_querysets", "=", "[", "]", "for", "queryset", "in", "summary_queryset", ".", "querysets", ":", "filter_class", "=", "self", ".", "_get_filter", "(", "queryset", ")", "queryset", "=", "filter_class", "(", "request", ".", "query_params", ",", "queryset", "=", "queryset", ")", ".", "qs", "filtered_querysets", ".", "append", "(", "queryset", ")", "summary_queryset", ".", "querysets", "=", "filtered_querysets", "return", "summary_queryset" ]
44.727273
13.636364
def read_short(self): """Read an unsigned 16-bit integer""" self.bitcount = self.bits = 0 return unpack('>H', self.input.read(2))[0]
[ "def", "read_short", "(", "self", ")", ":", "self", ".", "bitcount", "=", "self", ".", "bits", "=", "0", "return", "unpack", "(", "'>H'", ",", "self", ".", "input", ".", "read", "(", "2", ")", ")", "[", "0", "]" ]
38.25
8
def buildcontent(self): """build HTML content only, no header or body tags""" self.buildcontainer() self.option = json.dumps(self.options, cls = HighchartsEncoder) self.setoption = json.dumps(self.setOptions, cls = HighchartsEncoder) self.data = json.dumps(self.data_temp, cls = HighchartsEncoder) # DEM 2017/04/25: Make 'data' available as an array # ... this permits jinja2 array access to each data definition # ... which is useful for looping over multiple data sources self.data_list = [json.dumps(x, cls = HighchartsEncoder) for x in self.data_temp] if self.navi_seri_flag: self.navi_seri = json.dumps(self.navi_seri_temp, cls = HighchartsEncoder) self._htmlcontent = self.template_content_highcharts.render(chart=self).encode('utf-8')
[ "def", "buildcontent", "(", "self", ")", ":", "self", ".", "buildcontainer", "(", ")", "self", ".", "option", "=", "json", ".", "dumps", "(", "self", ".", "options", ",", "cls", "=", "HighchartsEncoder", ")", "self", ".", "setoption", "=", "json", ".", "dumps", "(", "self", ".", "setOptions", ",", "cls", "=", "HighchartsEncoder", ")", "self", ".", "data", "=", "json", ".", "dumps", "(", "self", ".", "data_temp", ",", "cls", "=", "HighchartsEncoder", ")", "# DEM 2017/04/25: Make 'data' available as an array", "# ... this permits jinja2 array access to each data definition", "# ... which is useful for looping over multiple data sources", "self", ".", "data_list", "=", "[", "json", ".", "dumps", "(", "x", ",", "cls", "=", "HighchartsEncoder", ")", "for", "x", "in", "self", ".", "data_temp", "]", "if", "self", ".", "navi_seri_flag", ":", "self", ".", "navi_seri", "=", "json", ".", "dumps", "(", "self", ".", "navi_seri_temp", ",", "cls", "=", "HighchartsEncoder", ")", "self", ".", "_htmlcontent", "=", "self", ".", "template_content_highcharts", ".", "render", "(", "chart", "=", "self", ")", ".", "encode", "(", "'utf-8'", ")" ]
49.764706
29.823529
def GetMessages(self, formatter_mediator, event): """Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. """ if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() priority_level = event_values.get('level', None) if isinstance(priority_level, py2to3.INTEGER_TYPES): event_values['level'] = '{0:s} ({1:d})'.format( self._PRIORITY_LEVELS.get(priority_level, 'UNKNOWN'), priority_level) # If no rights are assigned the value is 0xffffffff (-1). read_uid = event_values.get('read_uid', None) if read_uid == -1: event_values['read_uid'] = 'ALL' # If no rights are assigned the value is 0xffffffff (-1). read_gid = event_values.get('read_gid', None) if read_gid == -1: event_values['read_gid'] = 'ALL' # TODO: get the real name for the user of the group having the uid or gid. return self._ConditionalFormatMessages(event_values)
[ "def", "GetMessages", "(", "self", ",", "formatter_mediator", ",", "event", ")", ":", "if", "self", ".", "DATA_TYPE", "!=", "event", ".", "data_type", ":", "raise", "errors", ".", "WrongFormatter", "(", "'Unsupported data type: {0:s}.'", ".", "format", "(", "event", ".", "data_type", ")", ")", "event_values", "=", "event", ".", "CopyToDict", "(", ")", "priority_level", "=", "event_values", ".", "get", "(", "'level'", ",", "None", ")", "if", "isinstance", "(", "priority_level", ",", "py2to3", ".", "INTEGER_TYPES", ")", ":", "event_values", "[", "'level'", "]", "=", "'{0:s} ({1:d})'", ".", "format", "(", "self", ".", "_PRIORITY_LEVELS", ".", "get", "(", "priority_level", ",", "'UNKNOWN'", ")", ",", "priority_level", ")", "# If no rights are assigned the value is 0xffffffff (-1).", "read_uid", "=", "event_values", ".", "get", "(", "'read_uid'", ",", "None", ")", "if", "read_uid", "==", "-", "1", ":", "event_values", "[", "'read_uid'", "]", "=", "'ALL'", "# If no rights are assigned the value is 0xffffffff (-1).", "read_gid", "=", "event_values", ".", "get", "(", "'read_gid'", ",", "None", ")", "if", "read_gid", "==", "-", "1", ":", "event_values", "[", "'read_gid'", "]", "=", "'ALL'", "# TODO: get the real name for the user of the group having the uid or gid.", "return", "self", ".", "_ConditionalFormatMessages", "(", "event_values", ")" ]
37.131579
22.631579
def create_relationship(self, relationship_form): """Creates a new ``Relationship``. arg: relationship_form (osid.relationship.RelationshipForm): the form for this ``Relationship`` return: (osid.relationship.Relationship) - the new ``Relationship`` raise: IllegalState - ``relationship_form`` already used in a create transaction raise: InvalidArgument - one or more of the form elements is invalid raise: NullArgument - ``relationship_form`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - ``relationship_form`` did not originate from ``get_relationship_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceAdminSession.create_resource_template collection = JSONClientValidated('relationship', collection='Relationship', runtime=self._runtime) if not isinstance(relationship_form, ABCRelationshipForm): raise errors.InvalidArgument('argument type is not an RelationshipForm') if relationship_form.is_for_update(): raise errors.InvalidArgument('the RelationshipForm is for update only, not create') try: if self._forms[relationship_form.get_id().get_identifier()] == CREATED: raise errors.IllegalState('relationship_form already used in a create transaction') except KeyError: raise errors.Unsupported('relationship_form did not originate from this session') if not relationship_form.is_valid(): raise errors.InvalidArgument('one or more of the form elements is invalid') insert_result = collection.insert_one(relationship_form._my_map) self._forms[relationship_form.get_id().get_identifier()] = CREATED result = objects.Relationship( osid_object_map=collection.find_one({'_id': insert_result.inserted_id}), runtime=self._runtime, proxy=self._proxy) return result
[ "def", "create_relationship", "(", "self", ",", "relationship_form", ")", ":", "# Implemented from template for", "# osid.resource.ResourceAdminSession.create_resource_template", "collection", "=", "JSONClientValidated", "(", "'relationship'", ",", "collection", "=", "'Relationship'", ",", "runtime", "=", "self", ".", "_runtime", ")", "if", "not", "isinstance", "(", "relationship_form", ",", "ABCRelationshipForm", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", "'argument type is not an RelationshipForm'", ")", "if", "relationship_form", ".", "is_for_update", "(", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", "'the RelationshipForm is for update only, not create'", ")", "try", ":", "if", "self", ".", "_forms", "[", "relationship_form", ".", "get_id", "(", ")", ".", "get_identifier", "(", ")", "]", "==", "CREATED", ":", "raise", "errors", ".", "IllegalState", "(", "'relationship_form already used in a create transaction'", ")", "except", "KeyError", ":", "raise", "errors", ".", "Unsupported", "(", "'relationship_form did not originate from this session'", ")", "if", "not", "relationship_form", ".", "is_valid", "(", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", "'one or more of the form elements is invalid'", ")", "insert_result", "=", "collection", ".", "insert_one", "(", "relationship_form", ".", "_my_map", ")", "self", ".", "_forms", "[", "relationship_form", ".", "get_id", "(", ")", ".", "get_identifier", "(", ")", "]", "=", "CREATED", "result", "=", "objects", ".", "Relationship", "(", "osid_object_map", "=", "collection", ".", "find_one", "(", "{", "'_id'", ":", "insert_result", ".", "inserted_id", "}", ")", ",", "runtime", "=", "self", ".", "_runtime", ",", "proxy", "=", "self", ".", "_proxy", ")", "return", "result" ]
51.431818
24.454545
def get_trigger_data(value, mode=0): '''Returns 31bit trigger counter (mode=0), 31bit timestamp (mode=1), 15bit timestamp and 16bit trigger counter (mode=2) ''' if mode == 2: return np.right_shift(np.bitwise_and(value, 0x7FFF0000), 16), np.bitwise_and(value, 0x0000FFFF) else: return np.bitwise_and(value, 0x7FFFFFFF)
[ "def", "get_trigger_data", "(", "value", ",", "mode", "=", "0", ")", ":", "if", "mode", "==", "2", ":", "return", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x7FFF0000", ")", ",", "16", ")", ",", "np", ".", "bitwise_and", "(", "value", ",", "0x0000FFFF", ")", "else", ":", "return", "np", ".", "bitwise_and", "(", "value", ",", "0x7FFFFFFF", ")" ]
49.857143
34.714286
def migrate(connection, dsn): """ Collects all migrations and applies missed. Args: connection (sqlalchemy connection): """ all_migrations = _get_all_migrations() logger.debug('Collected migrations: {}'.format(all_migrations)) for version, modname in all_migrations: if _is_missed(connection, version) and version <= SCHEMA_VERSION: logger.info('Missed migration: {} migration is missed. Migrating...'.format(version)) module = __import__(modname, fromlist='dummy') # run each migration under its own transaction. This allows us to apply valid migrations # and break on invalid. trans = connection.begin() try: module.Migration().migrate(connection) _update_version(connection, version) trans.commit() except: trans.rollback() logger.error("Failed to migrate '{}' on {} ".format(version, dsn)) raise
[ "def", "migrate", "(", "connection", ",", "dsn", ")", ":", "all_migrations", "=", "_get_all_migrations", "(", ")", "logger", ".", "debug", "(", "'Collected migrations: {}'", ".", "format", "(", "all_migrations", ")", ")", "for", "version", ",", "modname", "in", "all_migrations", ":", "if", "_is_missed", "(", "connection", ",", "version", ")", "and", "version", "<=", "SCHEMA_VERSION", ":", "logger", ".", "info", "(", "'Missed migration: {} migration is missed. Migrating...'", ".", "format", "(", "version", ")", ")", "module", "=", "__import__", "(", "modname", ",", "fromlist", "=", "'dummy'", ")", "# run each migration under its own transaction. This allows us to apply valid migrations", "# and break on invalid.", "trans", "=", "connection", ".", "begin", "(", ")", "try", ":", "module", ".", "Migration", "(", ")", ".", "migrate", "(", "connection", ")", "_update_version", "(", "connection", ",", "version", ")", "trans", ".", "commit", "(", ")", "except", ":", "trans", ".", "rollback", "(", ")", "logger", ".", "error", "(", "\"Failed to migrate '{}' on {} \"", ".", "format", "(", "version", ",", "dsn", ")", ")", "raise" ]
38.423077
21.653846
def UNIFAC(T, xs, chemgroups, cached=None, subgroup_data=None, interaction_data=None, modified=False): r'''Calculates activity coefficients using the UNIFAC model (optionally modified), given a mixture's temperature, liquid mole fractions, and optionally the subgroup data and interaction parameter data of your choice. The default is to use the original UNIFAC model, with the latest parameters published by DDBST. The model supports modified forms (Dortmund, NIST) when the `modified` parameter is True. Parameters ---------- T : float Temperature of the system, [K] xs : list[float] Mole fractions of all species in the system in the liquid phase, [-] chemgroups : list[dict] List of dictionaries of subgroup IDs and their counts for all species in the mixture, [-] subgroup_data : dict[UNIFAC_subgroup] UNIFAC subgroup data; available dictionaries in this module are UFSG (original), DOUFSG (Dortmund), or NISTUFSG ([4]_). interaction_data : dict[dict[tuple(a_mn, b_mn, c_mn)]] UNIFAC interaction parameter data; available dictionaries in this module are UFIP (original), DOUFIP2006 (Dortmund parameters as published by 2006), DOUFIP2016 (Dortmund parameters as published by 2016), and NISTUFIP ([4]_). modified : bool True if using the modified form and temperature dependence, otherwise False. Returns ------- gammas : list[float] Activity coefficients of all species in the mixture, [-] Notes ----- The actual implementation of UNIFAC is formulated slightly different than the formulas above for computational efficiency. DDBST switched to using the more efficient forms in their publication, but the numerical results are identical. The model is as follows: .. math:: \ln \gamma_i = \ln \gamma_i^c + \ln \gamma_i^r **Combinatorial component** .. math:: \ln \gamma_i^c = \ln \frac{\phi_i}{x_i} + \frac{z}{2} q_i \ln\frac{\theta_i}{\phi_i} + L_i - \frac{\phi_i}{x_i} \sum_{j=1}^{n} x_j L_j \theta_i = \frac{x_i q_i}{\sum_{j=1}^{n} x_j q_j} \phi_i = \frac{x_i r_i}{\sum_{j=1}^{n} x_j r_j} L_i = 5(r_i - q_i)-(r_i-1) **Residual component** .. math:: \ln \gamma_i^r = \sum_{k}^n \nu_k^{(i)} \left[ \ln \Gamma_k - \ln \Gamma_k^{(i)} \right] \ln \Gamma_k = Q_k \left[1 - \ln \sum_m \Theta_m \Psi_{mk} - \sum_m \frac{\Theta_m \Psi_{km}}{\sum_n \Theta_n \Psi_{nm}}\right] \Theta_m = \frac{Q_m X_m}{\sum_{n} Q_n X_n} X_m = \frac{ \sum_j \nu^j_m x_j}{\sum_j \sum_n \nu_n^j x_j} **R and Q** .. math:: r_i = \sum_{k=1}^{n} \nu_k R_k q_i = \sum_{k=1}^{n}\nu_k Q_k The newer forms of UNIFAC (Dortmund, NIST) calculate the combinatorial part slightly differently: .. math:: \ln \gamma_i^c = 1 - {V'}_i + \ln({V'}_i) - 5q_i \left(1 - \frac{V_i}{F_i}+ \ln\left(\frac{V_i}{F_i}\right)\right) V'_i = \frac{r_i^{3/4}}{\sum_j r_j^{3/4}x_j} This is more clear when looking at the full rearranged form as in [3]_. Examples -------- >>> UNIFAC(T=333.15, xs=[0.5, 0.5], chemgroups=[{1:2, 2:4}, {1:1, 2:1, 18:1}]) [1.4276025835624173, 1.3646545010104225] >>> UNIFAC(373.15, [0.2, 0.3, 0.2, 0.2], ... [{9:6}, {78:6}, {1:1, 18:1}, {1:1, 2:1, 14:1}], ... subgroup_data=DOUFSG, interaction_data=DOUFIP2006, modified=True) [1.186431113706829, 1.440280133911197, 1.204479833499608, 1.9720706090299824] References ---------- .. [1] Gmehling, Jurgen. Chemical Thermodynamics: For Process Simulation. Weinheim, Germany: Wiley-VCH, 2012. .. [2] Fredenslund, Aage, Russell L. Jones, and John M. Prausnitz. "Group Contribution Estimation of Activity Coefficients in Nonideal Liquid Mixtures." AIChE Journal 21, no. 6 (November 1, 1975): 1086-99. doi:10.1002/aic.690210607. .. [3] Jakob, Antje, Hans Grensemann, Jürgen Lohmann, and Jürgen Gmehling. "Further Development of Modified UNIFAC (Dortmund):  Revision and Extension 5." Industrial & Engineering Chemistry Research 45, no. 23 (November 1, 2006): 7924-33. doi:10.1021/ie060355c. .. [4] Kang, Jeong Won, Vladimir Diky, and Michael Frenkel. "New Modified UNIFAC Parameters Using Critically Evaluated Phase Equilibrium Data." Fluid Phase Equilibria 388 (February 25, 2015): 128-41. doi:10.1016/j.fluid.2014.12.042. ''' cmps = range(len(xs)) if subgroup_data is None: subgroups = UFSG else: subgroups = subgroup_data if interaction_data is None: interactions = UFIP else: interactions = interaction_data # Obtain r and q values using the subgroup values if not cached: rs = [] qs = [] for groups in chemgroups: ri = 0. qi = 0. for group, count in groups.items(): ri += subgroups[group].R*count qi += subgroups[group].Q*count rs.append(ri) qs.append(qi) group_counts = {} for groups in chemgroups: for group, count in groups.items(): if group in group_counts: group_counts[group] += count else: group_counts[group] = count else: rs, qs, group_counts = cached # Sum the denominator for calculating Xs group_sum = sum(count*xs[i] for i in cmps for count in chemgroups[i].values()) # Caclulate each numerator for calculating Xs group_count_xs = {} for group in group_counts: tot_numerator = sum(chemgroups[i][group]*xs[i] for i in cmps if group in chemgroups[i]) group_count_xs[group] = tot_numerator/group_sum rsxs = sum([rs[i]*xs[i] for i in cmps]) Vis = [rs[i]/rsxs for i in cmps] qsxs = sum([qs[i]*xs[i] for i in cmps]) Fis = [qs[i]/qsxs for i in cmps] if modified: rsxs2 = sum([rs[i]**0.75*xs[i] for i in cmps]) Vis2 = [rs[i]**0.75/rsxs2 for i in cmps] loggammacs = [1. - Vis2[i] + log(Vis2[i]) - 5.*qs[i]*(1. - Vis[i]/Fis[i] + log(Vis[i]/Fis[i])) for i in cmps] else: loggammacs = [1. - Vis[i] + log(Vis[i]) - 5.*qs[i]*(1. - Vis[i]/Fis[i] + log(Vis[i]/Fis[i])) for i in cmps] Q_sum_term = sum([subgroups[group].Q*group_count_xs[group] for group in group_counts]) area_fractions = {group: subgroups[group].Q*group_count_xs[group]/Q_sum_term for group in group_counts.keys()} UNIFAC_psis = {k: {m:(UNIFAC_psi(T, m, k, subgroups, interactions, modified=modified)) for m in group_counts} for k in group_counts} loggamma_groups = {} for k in group_counts: sum1, sum2 = 0., 0. for m in group_counts: sum1 += area_fractions[m]*UNIFAC_psis[k][m] sum3 = sum(area_fractions[n]*UNIFAC_psis[m][n] for n in group_counts) sum2 -= area_fractions[m]*UNIFAC_psis[m][k]/sum3 loggamma_groups[k] = subgroups[k].Q*(1. - log(sum1) + sum2) loggammars = [] for groups in chemgroups: chem_loggamma_groups = {} chem_group_sum = sum(groups.values()) chem_group_count_xs = {group: count/chem_group_sum for group, count in groups.items()} Q_sum_term = sum([subgroups[group].Q*chem_group_count_xs[group] for group in groups]) chem_area_fractions = {group: subgroups[group].Q*chem_group_count_xs[group]/Q_sum_term for group in groups.keys()} for k in groups: sum1, sum2 = 0., 0. for m in groups: sum1 += chem_area_fractions[m]*UNIFAC_psis[k][m] sum3 = sum(chem_area_fractions[n]*UNIFAC_psis[m][n] for n in groups) sum2 -= chem_area_fractions[m]*UNIFAC_psis[m][k]/sum3 chem_loggamma_groups[k] = subgroups[k].Q*(1. - log(sum1) + sum2) tot = sum([count*(loggamma_groups[group] - chem_loggamma_groups[group]) for group, count in groups.items()]) loggammars.append(tot) return [exp(loggammacs[i]+loggammars[i]) for i in cmps]
[ "def", "UNIFAC", "(", "T", ",", "xs", ",", "chemgroups", ",", "cached", "=", "None", ",", "subgroup_data", "=", "None", ",", "interaction_data", "=", "None", ",", "modified", "=", "False", ")", ":", "cmps", "=", "range", "(", "len", "(", "xs", ")", ")", "if", "subgroup_data", "is", "None", ":", "subgroups", "=", "UFSG", "else", ":", "subgroups", "=", "subgroup_data", "if", "interaction_data", "is", "None", ":", "interactions", "=", "UFIP", "else", ":", "interactions", "=", "interaction_data", "# Obtain r and q values using the subgroup values", "if", "not", "cached", ":", "rs", "=", "[", "]", "qs", "=", "[", "]", "for", "groups", "in", "chemgroups", ":", "ri", "=", "0.", "qi", "=", "0.", "for", "group", ",", "count", "in", "groups", ".", "items", "(", ")", ":", "ri", "+=", "subgroups", "[", "group", "]", ".", "R", "*", "count", "qi", "+=", "subgroups", "[", "group", "]", ".", "Q", "*", "count", "rs", ".", "append", "(", "ri", ")", "qs", ".", "append", "(", "qi", ")", "group_counts", "=", "{", "}", "for", "groups", "in", "chemgroups", ":", "for", "group", ",", "count", "in", "groups", ".", "items", "(", ")", ":", "if", "group", "in", "group_counts", ":", "group_counts", "[", "group", "]", "+=", "count", "else", ":", "group_counts", "[", "group", "]", "=", "count", "else", ":", "rs", ",", "qs", ",", "group_counts", "=", "cached", "# Sum the denominator for calculating Xs", "group_sum", "=", "sum", "(", "count", "*", "xs", "[", "i", "]", "for", "i", "in", "cmps", "for", "count", "in", "chemgroups", "[", "i", "]", ".", "values", "(", ")", ")", "# Caclulate each numerator for calculating Xs", "group_count_xs", "=", "{", "}", "for", "group", "in", "group_counts", ":", "tot_numerator", "=", "sum", "(", "chemgroups", "[", "i", "]", "[", "group", "]", "*", "xs", "[", "i", "]", "for", "i", "in", "cmps", "if", "group", "in", "chemgroups", "[", "i", "]", ")", "group_count_xs", "[", "group", "]", "=", "tot_numerator", "/", "group_sum", "rsxs", "=", "sum", "(", "[", "rs", "[", "i", "]", "*", "xs", "[", "i", "]", "for", "i", "in", "cmps", "]", ")", "Vis", "=", "[", "rs", "[", "i", "]", "/", "rsxs", "for", "i", "in", "cmps", "]", "qsxs", "=", "sum", "(", "[", "qs", "[", "i", "]", "*", "xs", "[", "i", "]", "for", "i", "in", "cmps", "]", ")", "Fis", "=", "[", "qs", "[", "i", "]", "/", "qsxs", "for", "i", "in", "cmps", "]", "if", "modified", ":", "rsxs2", "=", "sum", "(", "[", "rs", "[", "i", "]", "**", "0.75", "*", "xs", "[", "i", "]", "for", "i", "in", "cmps", "]", ")", "Vis2", "=", "[", "rs", "[", "i", "]", "**", "0.75", "/", "rsxs2", "for", "i", "in", "cmps", "]", "loggammacs", "=", "[", "1.", "-", "Vis2", "[", "i", "]", "+", "log", "(", "Vis2", "[", "i", "]", ")", "-", "5.", "*", "qs", "[", "i", "]", "*", "(", "1.", "-", "Vis", "[", "i", "]", "/", "Fis", "[", "i", "]", "+", "log", "(", "Vis", "[", "i", "]", "/", "Fis", "[", "i", "]", ")", ")", "for", "i", "in", "cmps", "]", "else", ":", "loggammacs", "=", "[", "1.", "-", "Vis", "[", "i", "]", "+", "log", "(", "Vis", "[", "i", "]", ")", "-", "5.", "*", "qs", "[", "i", "]", "*", "(", "1.", "-", "Vis", "[", "i", "]", "/", "Fis", "[", "i", "]", "+", "log", "(", "Vis", "[", "i", "]", "/", "Fis", "[", "i", "]", ")", ")", "for", "i", "in", "cmps", "]", "Q_sum_term", "=", "sum", "(", "[", "subgroups", "[", "group", "]", ".", "Q", "*", "group_count_xs", "[", "group", "]", "for", "group", "in", "group_counts", "]", ")", "area_fractions", "=", "{", "group", ":", "subgroups", "[", "group", "]", ".", "Q", "*", "group_count_xs", "[", "group", "]", "/", "Q_sum_term", "for", "group", "in", "group_counts", ".", "keys", "(", ")", "}", "UNIFAC_psis", "=", "{", "k", ":", "{", "m", ":", "(", "UNIFAC_psi", "(", "T", ",", "m", ",", "k", ",", "subgroups", ",", "interactions", ",", "modified", "=", "modified", ")", ")", "for", "m", "in", "group_counts", "}", "for", "k", "in", "group_counts", "}", "loggamma_groups", "=", "{", "}", "for", "k", "in", "group_counts", ":", "sum1", ",", "sum2", "=", "0.", ",", "0.", "for", "m", "in", "group_counts", ":", "sum1", "+=", "area_fractions", "[", "m", "]", "*", "UNIFAC_psis", "[", "k", "]", "[", "m", "]", "sum3", "=", "sum", "(", "area_fractions", "[", "n", "]", "*", "UNIFAC_psis", "[", "m", "]", "[", "n", "]", "for", "n", "in", "group_counts", ")", "sum2", "-=", "area_fractions", "[", "m", "]", "*", "UNIFAC_psis", "[", "m", "]", "[", "k", "]", "/", "sum3", "loggamma_groups", "[", "k", "]", "=", "subgroups", "[", "k", "]", ".", "Q", "*", "(", "1.", "-", "log", "(", "sum1", ")", "+", "sum2", ")", "loggammars", "=", "[", "]", "for", "groups", "in", "chemgroups", ":", "chem_loggamma_groups", "=", "{", "}", "chem_group_sum", "=", "sum", "(", "groups", ".", "values", "(", ")", ")", "chem_group_count_xs", "=", "{", "group", ":", "count", "/", "chem_group_sum", "for", "group", ",", "count", "in", "groups", ".", "items", "(", ")", "}", "Q_sum_term", "=", "sum", "(", "[", "subgroups", "[", "group", "]", ".", "Q", "*", "chem_group_count_xs", "[", "group", "]", "for", "group", "in", "groups", "]", ")", "chem_area_fractions", "=", "{", "group", ":", "subgroups", "[", "group", "]", ".", "Q", "*", "chem_group_count_xs", "[", "group", "]", "/", "Q_sum_term", "for", "group", "in", "groups", ".", "keys", "(", ")", "}", "for", "k", "in", "groups", ":", "sum1", ",", "sum2", "=", "0.", ",", "0.", "for", "m", "in", "groups", ":", "sum1", "+=", "chem_area_fractions", "[", "m", "]", "*", "UNIFAC_psis", "[", "k", "]", "[", "m", "]", "sum3", "=", "sum", "(", "chem_area_fractions", "[", "n", "]", "*", "UNIFAC_psis", "[", "m", "]", "[", "n", "]", "for", "n", "in", "groups", ")", "sum2", "-=", "chem_area_fractions", "[", "m", "]", "*", "UNIFAC_psis", "[", "m", "]", "[", "k", "]", "/", "sum3", "chem_loggamma_groups", "[", "k", "]", "=", "subgroups", "[", "k", "]", ".", "Q", "*", "(", "1.", "-", "log", "(", "sum1", ")", "+", "sum2", ")", "tot", "=", "sum", "(", "[", "count", "*", "(", "loggamma_groups", "[", "group", "]", "-", "chem_loggamma_groups", "[", "group", "]", ")", "for", "group", ",", "count", "in", "groups", ".", "items", "(", ")", "]", ")", "loggammars", ".", "append", "(", "tot", ")", "return", "[", "exp", "(", "loggammacs", "[", "i", "]", "+", "loggammars", "[", "i", "]", ")", "for", "i", "in", "cmps", "]" ]
38.638889
24.509259
def read_pdb(self, path): """Read PDB files (unzipped or gzipped) from local drive Attributes ---------- path : str Path to the PDB file in .pdb format or gzipped format (.pdb.gz). Returns --------- self """ self.pdb_path, self.pdb_text = self._read_pdb(path=path) self._df = self._construct_df(pdb_lines=self.pdb_text.splitlines(True)) self.header, self.code = self._parse_header_code() return self
[ "def", "read_pdb", "(", "self", ",", "path", ")", ":", "self", ".", "pdb_path", ",", "self", ".", "pdb_text", "=", "self", ".", "_read_pdb", "(", "path", "=", "path", ")", "self", ".", "_df", "=", "self", ".", "_construct_df", "(", "pdb_lines", "=", "self", ".", "pdb_text", ".", "splitlines", "(", "True", ")", ")", "self", ".", "header", ",", "self", ".", "code", "=", "self", ".", "_parse_header_code", "(", ")", "return", "self" ]
29.058824
24.411765