text
stringlengths 94
87.1k
| code_tokens
sequence | avg_line_len
float64 7.91
668
| score
sequence |
---|---|---|---|
def _validate_no_null_values(self):
"""
Loads the tables from the gtfs object and counts the number of rows that have null values in
fields that should not be null. Stores the number of null rows in warnings_container
"""
for table in DB_TABLE_NAMES:
null_not_ok_warning = "Null values in must-have columns in table {table}".format(table=table)
null_warn_warning = "Null values in good-to-have columns in table {table}".format(table=table)
null_not_ok_fields = DB_TABLE_NAME_TO_FIELDS_WHERE_NULL_NOT_OK[table]
null_warn_fields = DB_TABLE_NAME_TO_FIELDS_WHERE_NULL_OK_BUT_WARN[table]
# CW, TODO: make this validation source by source
df = self.gtfs.get_table(table)
for warning, fields in zip([null_not_ok_warning, null_warn_warning], [null_not_ok_fields, null_warn_fields]):
null_unwanted_df = df[fields]
rows_having_null = null_unwanted_df.isnull().any(1)
if sum(rows_having_null) > 0:
rows_having_unwanted_null = df[rows_having_null.values]
self.warnings_container.add_warning(warning, rows_having_unwanted_null, len(rows_having_unwanted_null)) | [
"def",
"_validate_no_null_values",
"(",
"self",
")",
":",
"for",
"table",
"in",
"DB_TABLE_NAMES",
":",
"null_not_ok_warning",
"=",
"\"Null values in must-have columns in table {table}\"",
".",
"format",
"(",
"table",
"=",
"table",
")",
"null_warn_warning",
"=",
"\"Null values in good-to-have columns in table {table}\"",
".",
"format",
"(",
"table",
"=",
"table",
")",
"null_not_ok_fields",
"=",
"DB_TABLE_NAME_TO_FIELDS_WHERE_NULL_NOT_OK",
"[",
"table",
"]",
"null_warn_fields",
"=",
"DB_TABLE_NAME_TO_FIELDS_WHERE_NULL_OK_BUT_WARN",
"[",
"table",
"]",
"# CW, TODO: make this validation source by source",
"df",
"=",
"self",
".",
"gtfs",
".",
"get_table",
"(",
"table",
")",
"for",
"warning",
",",
"fields",
"in",
"zip",
"(",
"[",
"null_not_ok_warning",
",",
"null_warn_warning",
"]",
",",
"[",
"null_not_ok_fields",
",",
"null_warn_fields",
"]",
")",
":",
"null_unwanted_df",
"=",
"df",
"[",
"fields",
"]",
"rows_having_null",
"=",
"null_unwanted_df",
".",
"isnull",
"(",
")",
".",
"any",
"(",
"1",
")",
"if",
"sum",
"(",
"rows_having_null",
")",
">",
"0",
":",
"rows_having_unwanted_null",
"=",
"df",
"[",
"rows_having_null",
".",
"values",
"]",
"self",
".",
"warnings_container",
".",
"add_warning",
"(",
"warning",
",",
"rows_having_unwanted_null",
",",
"len",
"(",
"rows_having_unwanted_null",
")",
")"
] | 62.05 | [
0.02857142857142857,
0.18181818181818182,
0.03,
0.03260869565217391,
0.18181818181818182,
0.05555555555555555,
0.02857142857142857,
0.02830188679245283,
0.037037037037037035,
0.03571428571428571,
0,
0.03278688524590164,
0.046511627906976744,
0,
0.024793388429752067,
0.044444444444444446,
0.029850746268656716,
0.044444444444444446,
0.02666666666666667,
0.024390243902439025
] |
def dict2row(d, model, rel=None, exclude=None, exclude_pk=None,
exclude_underscore=None, only=None, fk_suffix=None):
"""
Recursively walk dict attributes to serialize ones into a row.
:param d: dict that represent a serialized row
:param model: class nested from the declarative base class
:param rel: dict of key (relationship name) -value (class) pairs
:param exclude: set of attributes names to exclude
:param exclude_pk: are foreign keys (e.g. fk_name_id) excluded
:param exclude_underscore: are private and protected attributes excluded
:param only: set of attributes names to include
:param fk_suffix: str that represent a foreign key suffix
:return: instance of the declarative base class
"""
if not isinstance(d, dict):
raise TypeError('Source must be instance of dict, got %s instead' %
type(d).__name__)
row = model()
mapper = get_mapper(row)
if rel is None:
rel = getattr(row, ATTR_REL, DEFAULT_REL)
if exclude is None:
exclude = getattr(row, ATTR_EXCLUDE, DEFAULT_EXCLUDE)
if exclude_pk is None:
exclude_pk = getattr(row, ATTR_EXCLUDE_PK, DEFAULT_EXCLUDE_PK)
if exclude_underscore is None:
exclude_underscore = getattr(row, ATTR_EXCLUDE_UNDERSCORE,
DEFAULT_EXCLUDE_UNDERSCORE)
if only is None:
only = getattr(row, ATTR_ONLY, DEFAULT_ONLY)
if fk_suffix is None:
fk_suffix = getattr(row, ATTR_FK_SUFFIX, DEFAULT_FK_SUFFIX)
for c in mapper.columns.keys() + mapper.synonyms.keys():
if c not in d or c in exclude or \
check_exclude_pk(c, exclude_pk, fk_suffix=fk_suffix) or \
check_exclude_underscore(c, exclude_underscore) or \
check_only(c, only):
continue
setattr(row, c, d[c])
for r in mapper.relationships.keys():
if r not in d or r not in rel or check_only(r, only):
continue
kwargs = dict(rel=rel, exclude=exclude, exclude_pk=exclude_pk,
exclude_underscore=exclude_underscore, only=only,
fk_suffix=fk_suffix)
if isinstance(d[r], list):
setattr(row, r, collections.InstrumentedList())
for i in d[r]:
getattr(row, r).append(dict2row(i, rel[r], **kwargs))
else:
if not exclude_pk:
rpk = d[r].get('id') if isinstance(d[r], dict) else None
setattr(row, r + fk_suffix, rpk)
setattr(row, r, dict2row(d[r], rel[r], **kwargs))
return row | [
"def",
"dict2row",
"(",
"d",
",",
"model",
",",
"rel",
"=",
"None",
",",
"exclude",
"=",
"None",
",",
"exclude_pk",
"=",
"None",
",",
"exclude_underscore",
"=",
"None",
",",
"only",
"=",
"None",
",",
"fk_suffix",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"d",
",",
"dict",
")",
":",
"raise",
"TypeError",
"(",
"'Source must be instance of dict, got %s instead'",
"%",
"type",
"(",
"d",
")",
".",
"__name__",
")",
"row",
"=",
"model",
"(",
")",
"mapper",
"=",
"get_mapper",
"(",
"row",
")",
"if",
"rel",
"is",
"None",
":",
"rel",
"=",
"getattr",
"(",
"row",
",",
"ATTR_REL",
",",
"DEFAULT_REL",
")",
"if",
"exclude",
"is",
"None",
":",
"exclude",
"=",
"getattr",
"(",
"row",
",",
"ATTR_EXCLUDE",
",",
"DEFAULT_EXCLUDE",
")",
"if",
"exclude_pk",
"is",
"None",
":",
"exclude_pk",
"=",
"getattr",
"(",
"row",
",",
"ATTR_EXCLUDE_PK",
",",
"DEFAULT_EXCLUDE_PK",
")",
"if",
"exclude_underscore",
"is",
"None",
":",
"exclude_underscore",
"=",
"getattr",
"(",
"row",
",",
"ATTR_EXCLUDE_UNDERSCORE",
",",
"DEFAULT_EXCLUDE_UNDERSCORE",
")",
"if",
"only",
"is",
"None",
":",
"only",
"=",
"getattr",
"(",
"row",
",",
"ATTR_ONLY",
",",
"DEFAULT_ONLY",
")",
"if",
"fk_suffix",
"is",
"None",
":",
"fk_suffix",
"=",
"getattr",
"(",
"row",
",",
"ATTR_FK_SUFFIX",
",",
"DEFAULT_FK_SUFFIX",
")",
"for",
"c",
"in",
"mapper",
".",
"columns",
".",
"keys",
"(",
")",
"+",
"mapper",
".",
"synonyms",
".",
"keys",
"(",
")",
":",
"if",
"c",
"not",
"in",
"d",
"or",
"c",
"in",
"exclude",
"or",
"check_exclude_pk",
"(",
"c",
",",
"exclude_pk",
",",
"fk_suffix",
"=",
"fk_suffix",
")",
"or",
"check_exclude_underscore",
"(",
"c",
",",
"exclude_underscore",
")",
"or",
"check_only",
"(",
"c",
",",
"only",
")",
":",
"continue",
"setattr",
"(",
"row",
",",
"c",
",",
"d",
"[",
"c",
"]",
")",
"for",
"r",
"in",
"mapper",
".",
"relationships",
".",
"keys",
"(",
")",
":",
"if",
"r",
"not",
"in",
"d",
"or",
"r",
"not",
"in",
"rel",
"or",
"check_only",
"(",
"r",
",",
"only",
")",
":",
"continue",
"kwargs",
"=",
"dict",
"(",
"rel",
"=",
"rel",
",",
"exclude",
"=",
"exclude",
",",
"exclude_pk",
"=",
"exclude_pk",
",",
"exclude_underscore",
"=",
"exclude_underscore",
",",
"only",
"=",
"only",
",",
"fk_suffix",
"=",
"fk_suffix",
")",
"if",
"isinstance",
"(",
"d",
"[",
"r",
"]",
",",
"list",
")",
":",
"setattr",
"(",
"row",
",",
"r",
",",
"collections",
".",
"InstrumentedList",
"(",
")",
")",
"for",
"i",
"in",
"d",
"[",
"r",
"]",
":",
"getattr",
"(",
"row",
",",
"r",
")",
".",
"append",
"(",
"dict2row",
"(",
"i",
",",
"rel",
"[",
"r",
"]",
",",
"*",
"*",
"kwargs",
")",
")",
"else",
":",
"if",
"not",
"exclude_pk",
":",
"rpk",
"=",
"d",
"[",
"r",
"]",
".",
"get",
"(",
"'id'",
")",
"if",
"isinstance",
"(",
"d",
"[",
"r",
"]",
",",
"dict",
")",
"else",
"None",
"setattr",
"(",
"row",
",",
"r",
"+",
"fk_suffix",
",",
"rpk",
")",
"setattr",
"(",
"row",
",",
"r",
",",
"dict2row",
"(",
"d",
"[",
"r",
"]",
",",
"rel",
"[",
"r",
"]",
",",
"*",
"*",
"kwargs",
")",
")",
"return",
"row"
] | 44.37931 | [
0.031746031746031744,
0.1076923076923077,
0.2857142857142857,
0.030303030303030304,
0,
0.06,
0.04838709677419355,
0.08823529411764706,
0.05555555555555555,
0.06060606060606061,
0.039473684210526314,
0.058823529411764705,
0.04918032786885246,
0,
0.058823529411764705,
0.2857142857142857,
0.06451612903225806,
0.04,
0.07317073170731707,
0.11764705882352941,
0.07142857142857142,
0.10526315789473684,
0.04081632653061224,
0.08695652173913043,
0.03278688524590164,
0.07692307692307693,
0.02857142857142857,
0.058823529411764705,
0.045454545454545456,
0.0625,
0.1,
0.038461538461538464,
0.08,
0.029850746268656716,
0.03333333333333333,
0.047619047619047616,
0.0273972602739726,
0.029411764705882353,
0.05555555555555555,
0.1,
0.06896551724137931,
0.04878048780487805,
0.03278688524590164,
0.1,
0.04285714285714286,
0.07042253521126761,
0.11904761904761904,
0.058823529411764705,
0.03389830508474576,
0.07692307692307693,
0.028985507246376812,
0.15384615384615385,
0.06666666666666667,
0.027777777777777776,
0.041666666666666664,
0.03278688524590164,
0,
0.14285714285714285
] |
def initialized(func):
""" decorator for testing if a class has been initialized
prior to calling any attribute """
def wrapper(self, *args, **kwargs):
""" internal wrapper function """
if not self.__is_initialized__:
return EmptyDot()
return func(self, *args, **kwargs)
return wrapper | [
"def",
"initialized",
"(",
"func",
")",
":",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\" internal wrapper function \"\"\"",
"if",
"not",
"self",
".",
"__is_initialized__",
":",
"return",
"EmptyDot",
"(",
")",
"return",
"func",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper"
] | 33.3 | [
0.045454545454545456,
0.03278688524590164,
0.07142857142857142,
0,
0.05128205128205128,
0.04878048780487805,
0.05128205128205128,
0.06896551724137931,
0.047619047619047616,
0.1111111111111111
] |
def get_lists(self, **query_params):
'''
Get the lists attached to this board. Returns a list of List objects.
Returns:
list(List): The lists attached to this board
'''
lists = self.get_lists_json(self.base_uri, query_params=query_params)
lists_list = []
for list_json in lists:
lists_list.append(self.create_list(list_json))
return lists_list | [
"def",
"get_lists",
"(",
"self",
",",
"*",
"*",
"query_params",
")",
":",
"lists",
"=",
"self",
".",
"get_lists_json",
"(",
"self",
".",
"base_uri",
",",
"query_params",
"=",
"query_params",
")",
"lists_list",
"=",
"[",
"]",
"for",
"list_json",
"in",
"lists",
":",
"lists_list",
".",
"append",
"(",
"self",
".",
"create_list",
"(",
"list_json",
")",
")",
"return",
"lists_list"
] | 30.071429 | [
0.027777777777777776,
0.18181818181818182,
0.025974025974025976,
0,
0.125,
0.03571428571428571,
0.18181818181818182,
0.025974025974025976,
0,
0.08695652173913043,
0.06451612903225806,
0.034482758620689655,
0,
0.08
] |
def font(self):
"""
The |Font| object that provides access to the text properties for
these data labels, such as bold, italic, etc.
"""
defRPr = self._element.defRPr
font = Font(defRPr)
return font | [
"def",
"font",
"(",
"self",
")",
":",
"defRPr",
"=",
"self",
".",
"_element",
".",
"defRPr",
"font",
"=",
"Font",
"(",
"defRPr",
")",
"return",
"font"
] | 30.75 | [
0.06666666666666667,
0.18181818181818182,
0.0547945205479452,
0.03773584905660377,
0.18181818181818182,
0.05405405405405406,
0.07407407407407407,
0.10526315789473684
] |
def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer):
"""
Split actions into chunks by number or size, serialize them into strings in
the process.
"""
bulk_actions, bulk_data = [], []
size, action_count = 0, 0
for action, data in actions:
raw_data, raw_action = data, action
action = serializer.dumps(action)
# +1 to account for the trailing new line character
cur_size = len(action.encode("utf-8")) + 1
if data is not None:
data = serializer.dumps(data)
cur_size += len(data.encode("utf-8")) + 1
# full chunk, send it and start a new one
if bulk_actions and (
size + cur_size > max_chunk_bytes or action_count == chunk_size
):
yield bulk_data, bulk_actions
bulk_actions, bulk_data = [], []
size, action_count = 0, 0
bulk_actions.append(action)
if data is not None:
bulk_actions.append(data)
bulk_data.append((raw_action, raw_data))
else:
bulk_data.append((raw_action,))
size += cur_size
action_count += 1
if bulk_actions:
yield bulk_data, bulk_actions | [
"def",
"_chunk_actions",
"(",
"actions",
",",
"chunk_size",
",",
"max_chunk_bytes",
",",
"serializer",
")",
":",
"bulk_actions",
",",
"bulk_data",
"=",
"[",
"]",
",",
"[",
"]",
"size",
",",
"action_count",
"=",
"0",
",",
"0",
"for",
"action",
",",
"data",
"in",
"actions",
":",
"raw_data",
",",
"raw_action",
"=",
"data",
",",
"action",
"action",
"=",
"serializer",
".",
"dumps",
"(",
"action",
")",
"# +1 to account for the trailing new line character",
"cur_size",
"=",
"len",
"(",
"action",
".",
"encode",
"(",
"\"utf-8\"",
")",
")",
"+",
"1",
"if",
"data",
"is",
"not",
"None",
":",
"data",
"=",
"serializer",
".",
"dumps",
"(",
"data",
")",
"cur_size",
"+=",
"len",
"(",
"data",
".",
"encode",
"(",
"\"utf-8\"",
")",
")",
"+",
"1",
"# full chunk, send it and start a new one",
"if",
"bulk_actions",
"and",
"(",
"size",
"+",
"cur_size",
">",
"max_chunk_bytes",
"or",
"action_count",
"==",
"chunk_size",
")",
":",
"yield",
"bulk_data",
",",
"bulk_actions",
"bulk_actions",
",",
"bulk_data",
"=",
"[",
"]",
",",
"[",
"]",
"size",
",",
"action_count",
"=",
"0",
",",
"0",
"bulk_actions",
".",
"append",
"(",
"action",
")",
"if",
"data",
"is",
"not",
"None",
":",
"bulk_actions",
".",
"append",
"(",
"data",
")",
"bulk_data",
".",
"append",
"(",
"(",
"raw_action",
",",
"raw_data",
")",
")",
"else",
":",
"bulk_data",
".",
"append",
"(",
"(",
"raw_action",
",",
")",
")",
"size",
"+=",
"cur_size",
"action_count",
"+=",
"1",
"if",
"bulk_actions",
":",
"yield",
"bulk_data",
",",
"bulk_actions"
] | 32.135135 | [
0.014492753623188406,
0.2857142857142857,
0.02531645569620253,
0.125,
0.2857142857142857,
0.05555555555555555,
0.06896551724137931,
0.0625,
0.046511627906976744,
0.04878048780487805,
0.03389830508474576,
0.04,
0,
0.07142857142857142,
0.04878048780487805,
0.03773584905660377,
0,
0.04081632653061224,
0.10344827586206896,
0.02666666666666667,
0.3,
0.04878048780487805,
0.045454545454545456,
0.05405405405405406,
0,
0.05714285714285714,
0.07142857142857142,
0.05405405405405406,
0.038461538461538464,
0.15384615384615385,
0.046511627906976744,
0,
0.08333333333333333,
0.08,
0,
0.1,
0.05405405405405406
] |
def changed(self, code_changed=False, value_changed=False):
"""Inform dependents that this shaderobject has changed.
"""
for d in self._dependents:
d._dep_changed(self, code_changed=code_changed,
value_changed=value_changed) | [
"def",
"changed",
"(",
"self",
",",
"code_changed",
"=",
"False",
",",
"value_changed",
"=",
"False",
")",
":",
"for",
"d",
"in",
"self",
".",
"_dependents",
":",
"d",
".",
"_dep_changed",
"(",
"self",
",",
"code_changed",
"=",
"code_changed",
",",
"value_changed",
"=",
"value_changed",
")"
] | 47 | [
0.01694915254237288,
0.03125,
0.18181818181818182,
0.058823529411764705,
0.05084745762711865,
0.09090909090909091
] |
def draw(self):
'''
Draws samples from the `true` distribution.
Returns:
`np.ndarray` of samples.
'''
observed_arr = None
for result_tuple in self.__feature_generator.generate():
observed_arr = result_tuple[0]
break
observed_arr = observed_arr.astype(float)
if self.__norm_mode == "z_score":
if observed_arr.std() != 0:
observed_arr = (observed_arr - observed_arr.mean()) / observed_arr.std()
elif self.__norm_mode == "min_max":
if (observed_arr.max() - observed_arr.min()) != 0:
observed_arr = (observed_arr - observed_arr.min()) / (observed_arr.max() - observed_arr.min())
elif self.__norm_mode == "tanh":
observed_arr = np.tanh(observed_arr)
return observed_arr | [
"def",
"draw",
"(",
"self",
")",
":",
"observed_arr",
"=",
"None",
"for",
"result_tuple",
"in",
"self",
".",
"__feature_generator",
".",
"generate",
"(",
")",
":",
"observed_arr",
"=",
"result_tuple",
"[",
"0",
"]",
"break",
"observed_arr",
"=",
"observed_arr",
".",
"astype",
"(",
"float",
")",
"if",
"self",
".",
"__norm_mode",
"==",
"\"z_score\"",
":",
"if",
"observed_arr",
".",
"std",
"(",
")",
"!=",
"0",
":",
"observed_arr",
"=",
"(",
"observed_arr",
"-",
"observed_arr",
".",
"mean",
"(",
")",
")",
"/",
"observed_arr",
".",
"std",
"(",
")",
"elif",
"self",
".",
"__norm_mode",
"==",
"\"min_max\"",
":",
"if",
"(",
"observed_arr",
".",
"max",
"(",
")",
"-",
"observed_arr",
".",
"min",
"(",
")",
")",
"!=",
"0",
":",
"observed_arr",
"=",
"(",
"observed_arr",
"-",
"observed_arr",
".",
"min",
"(",
")",
")",
"/",
"(",
"observed_arr",
".",
"max",
"(",
")",
"-",
"observed_arr",
".",
"min",
"(",
")",
")",
"elif",
"self",
".",
"__norm_mode",
"==",
"\"tanh\"",
":",
"observed_arr",
"=",
"np",
".",
"tanh",
"(",
"observed_arr",
")",
"return",
"observed_arr"
] | 37.695652 | [
0.0625,
0.08333333333333333,
0.038461538461538464,
0.1111111111111111,
0.11764705882352941,
0.05405405405405406,
0.08333333333333333,
0.03571428571428571,
0.046153846153846156,
0.023255813953488372,
0.05555555555555555,
1,
0.02,
0.07142857142857142,
0.075,
0.02247191011235955,
0.06818181818181818,
0.047619047619047616,
0.018018018018018018,
0.07317073170731707,
0.02040816326530612,
1,
0.07407407407407407
] |
def modify_cache_parameter_group(name, region=None, key=None, keyid=None, profile=None,
**args):
'''
Update a cache parameter group in place.
Note that due to a design limitation in AWS, this function is not atomic -- a maximum of 20
params may be modified in one underlying boto call. This means that if more than 20 params
need to be changed, the update is performed in blocks of 20, which in turns means that if a
later sub-call fails after an earlier one has succeeded, the overall update will be left
partially applied.
CacheParameterGroupName
The name of the cache parameter group to modify.
ParameterNameValues
A [list] of {dicts}, each composed of a parameter name and a value, for the parameter
update. At least one parameter/value pair is required.
.. code-block:: yaml
ParameterNameValues:
- ParameterName: timeout
# Amazon requires ALL VALUES to be strings...
ParameterValue: "30"
- ParameterName: appendonly
# The YAML parser will turn a bare `yes` into a bool, which Amazon will then throw on...
ParameterValue: "yes"
Example:
.. code-block:: bash
salt myminion boto3_elasticache.modify_cache_parameter_group \
CacheParameterGroupName=myParamGroup \
ParameterNameValues='[ { ParameterName: timeout,
ParameterValue: "30" },
{ ParameterName: appendonly,
ParameterValue: "yes" } ]'
'''
args = dict([(k, v) for k, v in args.items() if not k.startswith('_')])
try:
Params = args['ParameterNameValues']
except ValueError as e:
raise SaltInvocationError('Invalid `ParameterNameValues` structure passed.')
while Params:
args.update({'ParameterNameValues': Params[:20]})
Params = Params[20:]
if not _modify_resource(name, name_param='CacheParameterGroupName',
desc='cache parameter group', res_type='cache_parameter_group',
region=region, key=key, keyid=keyid, profile=profile, **args):
return False
return True | [
"def",
"modify_cache_parameter_group",
"(",
"name",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
",",
"*",
"*",
"args",
")",
":",
"args",
"=",
"dict",
"(",
"[",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"args",
".",
"items",
"(",
")",
"if",
"not",
"k",
".",
"startswith",
"(",
"'_'",
")",
"]",
")",
"try",
":",
"Params",
"=",
"args",
"[",
"'ParameterNameValues'",
"]",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"SaltInvocationError",
"(",
"'Invalid `ParameterNameValues` structure passed.'",
")",
"while",
"Params",
":",
"args",
".",
"update",
"(",
"{",
"'ParameterNameValues'",
":",
"Params",
"[",
":",
"20",
"]",
"}",
")",
"Params",
"=",
"Params",
"[",
"20",
":",
"]",
"if",
"not",
"_modify_resource",
"(",
"name",
",",
"name_param",
"=",
"'CacheParameterGroupName'",
",",
"desc",
"=",
"'cache parameter group'",
",",
"res_type",
"=",
"'cache_parameter_group'",
",",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
",",
"*",
"*",
"args",
")",
":",
"return",
"False",
"return",
"True"
] | 43.173077 | [
0.034482758620689655,
0.0975609756097561,
0.2857142857142857,
0.045454545454545456,
0,
0.042105263157894736,
0.031578947368421054,
0.031578947368421054,
0.03260869565217391,
0.09090909090909091,
0,
0.07407407407407407,
0.03571428571428571,
0,
0.08695652173913043,
0.043010752688172046,
0.031746031746031744,
0,
0.125,
0,
0.07142857142857142,
0.0625,
0.05454545454545454,
0.1,
0.05714285714285714,
0.04081632653061224,
0.0967741935483871,
0,
0.16666666666666666,
0,
0.125,
0,
0.02857142857142857,
0.05555555555555555,
0.09375,
0.078125,
0.07462686567164178,
0.08955223880597014,
0.2857142857142857,
0.02666666666666667,
0.25,
0.045454545454545456,
0.07407407407407407,
0.03571428571428571,
0.11764705882352941,
0.03508771929824561,
0.07142857142857142,
0.04,
0.05263157894736842,
0.0851063829787234,
0.08333333333333333,
0.13333333333333333
] |
def neighbours(healpix_index, nside, order='ring'):
"""
Find all the HEALPix pixels that are the neighbours of a HEALPix pixel
Parameters
----------
healpix_index : `~numpy.ndarray`
Array of HEALPix pixels
nside : int
Number of pixels along the side of each of the 12 top-level HEALPix tiles
order : { 'nested' | 'ring' }
Order of HEALPix pixels
Returns
-------
neigh : `~numpy.ndarray`
Array giving the neighbours starting SW and rotating clockwise. This has
one extra dimension compared to ``healpix_index`` - the first dimension -
which is set to 8. For example if healpix_index has shape (2, 3),
``neigh`` has shape (8, 2, 3).
"""
_validate_nside(nside)
nside = np.asarray(nside, dtype=np.intc)
if _validate_order(order) == 'ring':
func = _core.neighbours_ring
else: # _validate_order(order) == 'nested'
func = _core.neighbours_nested
return np.stack(func(healpix_index, nside)) | [
"def",
"neighbours",
"(",
"healpix_index",
",",
"nside",
",",
"order",
"=",
"'ring'",
")",
":",
"_validate_nside",
"(",
"nside",
")",
"nside",
"=",
"np",
".",
"asarray",
"(",
"nside",
",",
"dtype",
"=",
"np",
".",
"intc",
")",
"if",
"_validate_order",
"(",
"order",
")",
"==",
"'ring'",
":",
"func",
"=",
"_core",
".",
"neighbours_ring",
"else",
":",
"# _validate_order(order) == 'nested'",
"func",
"=",
"_core",
".",
"neighbours_nested",
"return",
"np",
".",
"stack",
"(",
"func",
"(",
"healpix_index",
",",
"nside",
")",
")"
] | 31.0625 | [
0.0196078431372549,
0.2857142857142857,
0.02702702702702703,
0,
0.14285714285714285,
0.14285714285714285,
0.1111111111111111,
0.06451612903225806,
0.2,
0.037037037037037035,
0.15151515151515152,
0.06451612903225806,
0,
0.18181818181818182,
0.18181818181818182,
0.14285714285714285,
0.0375,
0.04938271604938271,
0.0410958904109589,
0.10526315789473684,
0.2857142857142857,
0,
0.07692307692307693,
0,
0.045454545454545456,
0,
0.05,
0.05555555555555555,
0.0425531914893617,
0.05263157894736842,
0,
0.0425531914893617
] |
def run_command(cmd, *args, **kwargs):
"""
Take an input command and run it, handling exceptions and error codes and returning
its stdout and stderr.
:param cmd: The list of command and arguments.
:type cmd: list
:returns: A 2-tuple of the output and error from the command
:rtype: Tuple[str, str]
:raises: exceptions.PipenvCmdError
"""
from pipenv.vendor import delegator
from ._compat import decode_for_output
from .cmdparse import Script
catch_exceptions = kwargs.pop("catch_exceptions", True)
if isinstance(cmd, (six.string_types, list, tuple)):
cmd = Script.parse(cmd)
if not isinstance(cmd, Script):
raise TypeError("Command input must be a string, list or tuple")
if "env" not in kwargs:
kwargs["env"] = os.environ.copy()
kwargs["env"]["PYTHONIOENCODING"] = "UTF-8"
try:
cmd_string = cmd.cmdify()
except TypeError:
click_echo("Error turning command into string: {0}".format(cmd), err=True)
sys.exit(1)
if environments.is_verbose():
click_echo("Running command: $ {0}".format(cmd_string, err=True))
c = delegator.run(cmd_string, *args, **kwargs)
return_code = c.return_code
if environments.is_verbose():
click_echo("Command output: {0}".format(
crayons.blue(decode_for_output(c.out))
), err=True)
if not c.ok and catch_exceptions:
raise PipenvCmdError(cmd_string, c.out, c.err, return_code)
return c | [
"def",
"run_command",
"(",
"cmd",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"pipenv",
".",
"vendor",
"import",
"delegator",
"from",
".",
"_compat",
"import",
"decode_for_output",
"from",
".",
"cmdparse",
"import",
"Script",
"catch_exceptions",
"=",
"kwargs",
".",
"pop",
"(",
"\"catch_exceptions\"",
",",
"True",
")",
"if",
"isinstance",
"(",
"cmd",
",",
"(",
"six",
".",
"string_types",
",",
"list",
",",
"tuple",
")",
")",
":",
"cmd",
"=",
"Script",
".",
"parse",
"(",
"cmd",
")",
"if",
"not",
"isinstance",
"(",
"cmd",
",",
"Script",
")",
":",
"raise",
"TypeError",
"(",
"\"Command input must be a string, list or tuple\"",
")",
"if",
"\"env\"",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"\"env\"",
"]",
"=",
"os",
".",
"environ",
".",
"copy",
"(",
")",
"kwargs",
"[",
"\"env\"",
"]",
"[",
"\"PYTHONIOENCODING\"",
"]",
"=",
"\"UTF-8\"",
"try",
":",
"cmd_string",
"=",
"cmd",
".",
"cmdify",
"(",
")",
"except",
"TypeError",
":",
"click_echo",
"(",
"\"Error turning command into string: {0}\"",
".",
"format",
"(",
"cmd",
")",
",",
"err",
"=",
"True",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"environments",
".",
"is_verbose",
"(",
")",
":",
"click_echo",
"(",
"\"Running command: $ {0}\"",
".",
"format",
"(",
"cmd_string",
",",
"err",
"=",
"True",
")",
")",
"c",
"=",
"delegator",
".",
"run",
"(",
"cmd_string",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return_code",
"=",
"c",
".",
"return_code",
"if",
"environments",
".",
"is_verbose",
"(",
")",
":",
"click_echo",
"(",
"\"Command output: {0}\"",
".",
"format",
"(",
"crayons",
".",
"blue",
"(",
"decode_for_output",
"(",
"c",
".",
"out",
")",
")",
")",
",",
"err",
"=",
"True",
")",
"if",
"not",
"c",
".",
"ok",
"and",
"catch_exceptions",
":",
"raise",
"PipenvCmdError",
"(",
"cmd_string",
",",
"c",
".",
"out",
",",
"c",
".",
"err",
",",
"return_code",
")",
"return",
"c"
] | 37.461538 | [
0.02631578947368421,
0.2857142857142857,
0.034482758620689655,
0.07692307692307693,
0,
0.06,
0.15789473684210525,
0.046875,
0.1111111111111111,
0.07894736842105263,
0.2857142857142857,
0,
0.05128205128205128,
0.047619047619047616,
0.0625,
0.03389830508474576,
0.03571428571428571,
0.06451612903225806,
0.05714285714285714,
0.027777777777777776,
0.07407407407407407,
0.04878048780487805,
0.0425531914893617,
0.25,
0.06060606060606061,
0.09523809523809523,
0.036585365853658534,
0.10526315789473684,
0.06060606060606061,
0.0273972602739726,
0.04,
0.06451612903225806,
0.06060606060606061,
0.0625,
0.04,
0.15,
0.05405405405405406,
0.029850746268656716,
0.16666666666666666
] |
def update(self, style_sheet=values.unset):
"""
Update the StyleSheetInstance
:param dict style_sheet: The JSON string that describes the style sheet object
:returns: Updated StyleSheetInstance
:rtype: twilio.rest.autopilot.v1.assistant.style_sheet.StyleSheetInstance
"""
data = values.of({'StyleSheet': serialize.object(style_sheet), })
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return StyleSheetInstance(self._version, payload, assistant_sid=self._solution['assistant_sid'], ) | [
"def",
"update",
"(",
"self",
",",
"style_sheet",
"=",
"values",
".",
"unset",
")",
":",
"data",
"=",
"values",
".",
"of",
"(",
"{",
"'StyleSheet'",
":",
"serialize",
".",
"object",
"(",
"style_sheet",
")",
",",
"}",
")",
"payload",
"=",
"self",
".",
"_version",
".",
"update",
"(",
"'POST'",
",",
"self",
".",
"_uri",
",",
"data",
"=",
"data",
",",
")",
"return",
"StyleSheetInstance",
"(",
"self",
".",
"_version",
",",
"payload",
",",
"assistant_sid",
"=",
"self",
".",
"_solution",
"[",
"'assistant_sid'",
"]",
",",
")"
] | 33.5 | [
0.023255813953488372,
0.18181818181818182,
0.05405405405405406,
0,
0.046511627906976744,
0,
0.06818181818181818,
0.04938271604938271,
0.18181818181818182,
0.0273972602739726,
0,
0.07692307692307693,
0.10526315789473684,
0.09090909090909091,
0.13636363636363635,
0.3333333333333333,
0,
0.02830188679245283
] |
def _get_pattern(self, pattern_id):
"""Get pattern item by id."""
for key in ('PATTERNS1', 'PATTERNS2', 'PATTERNS3'):
if key in self.tagged_blocks:
data = self.tagged_blocks.get_data(key)
for pattern in data:
if pattern.pattern_id == pattern_id:
return pattern
return None | [
"def",
"_get_pattern",
"(",
"self",
",",
"pattern_id",
")",
":",
"for",
"key",
"in",
"(",
"'PATTERNS1'",
",",
"'PATTERNS2'",
",",
"'PATTERNS3'",
")",
":",
"if",
"key",
"in",
"self",
".",
"tagged_blocks",
":",
"data",
"=",
"self",
".",
"tagged_blocks",
".",
"get_data",
"(",
"key",
")",
"for",
"pattern",
"in",
"data",
":",
"if",
"pattern",
".",
"pattern_id",
"==",
"pattern_id",
":",
"return",
"pattern",
"return",
"None"
] | 41.777778 | [
0.02857142857142857,
0.05405405405405406,
0.03389830508474576,
0.04878048780487805,
0.03636363636363636,
0.05555555555555555,
0.03571428571428571,
0.05263157894736842,
0.10526315789473684
] |
def get_unpacked_response_body(self, requestId, mimetype="application/unknown"):
'''
Return a unpacked, decoded resposne body from Network_getResponseBody()
'''
content = self.Network_getResponseBody(requestId)
assert 'result' in content
result = content['result']
assert 'base64Encoded' in result
assert 'body' in result
if result['base64Encoded']:
content = base64.b64decode(result['body'])
else:
content = result['body']
self.log.info("Navigate complete. Received %s byte response with type %s.", len(content), mimetype)
return {'binary' : result['base64Encoded'], 'mimetype' : mimetype, 'content' : content} | [
"def",
"get_unpacked_response_body",
"(",
"self",
",",
"requestId",
",",
"mimetype",
"=",
"\"application/unknown\"",
")",
":",
"content",
"=",
"self",
".",
"Network_getResponseBody",
"(",
"requestId",
")",
"assert",
"'result'",
"in",
"content",
"result",
"=",
"content",
"[",
"'result'",
"]",
"assert",
"'base64Encoded'",
"in",
"result",
"assert",
"'body'",
"in",
"result",
"if",
"result",
"[",
"'base64Encoded'",
"]",
":",
"content",
"=",
"base64",
".",
"b64decode",
"(",
"result",
"[",
"'body'",
"]",
")",
"else",
":",
"content",
"=",
"result",
"[",
"'body'",
"]",
"self",
".",
"log",
".",
"info",
"(",
"\"Navigate complete. Received %s byte response with type %s.\"",
",",
"len",
"(",
"content",
")",
",",
"mimetype",
")",
"return",
"{",
"'binary'",
":",
"result",
"[",
"'base64Encoded'",
"]",
",",
"'mimetype'",
":",
"mimetype",
",",
"'content'",
":",
"content",
"}"
] | 31.4 | [
0.025,
0.6,
0.0410958904109589,
0.6,
0.058823529411764705,
0,
0.10714285714285714,
0.10714285714285714,
0,
0.08823529411764706,
0.12,
0,
0.10344827586206896,
0.06666666666666667,
0.42857142857142855,
0.1111111111111111,
0,
0.039603960396039604,
0,
0.07777777777777778
] |
def match_to_dict(match):
"""Convert a match object into a dict.
Values are:
indent: amount of indentation of this [sub]account
parent: the parent dict (None)
account_fragment: account name fragment
balance: decimal.Decimal balance
children: sub-accounts ([])
"""
balance, indent, account_fragment = match.group(1, 2, 3)
return {
'balance': decimal.Decimal(balance),
'indent': len(indent),
'account_fragment': account_fragment,
'parent': None,
'children': [],
} | [
"def",
"match_to_dict",
"(",
"match",
")",
":",
"balance",
",",
"indent",
",",
"account_fragment",
"=",
"match",
".",
"group",
"(",
"1",
",",
"2",
",",
"3",
")",
"return",
"{",
"'balance'",
":",
"decimal",
".",
"Decimal",
"(",
"balance",
")",
",",
"'indent'",
":",
"len",
"(",
"indent",
")",
",",
"'account_fragment'",
":",
"account_fragment",
",",
"'parent'",
":",
"None",
",",
"'children'",
":",
"[",
"]",
",",
"}"
] | 30.5 | [
0.04,
0.047619047619047616,
0,
0.13333333333333333,
0.05172413793103448,
0.07894736842105263,
0.0425531914893617,
0.05,
0.08571428571428572,
0.2857142857142857,
0.03333333333333333,
0.25,
0.045454545454545456,
0.06666666666666667,
0.044444444444444446,
0.08695652173913043,
0.08695652173913043,
0.6
] |
def collect_num(self):
"""获取答案收藏数
:return: 答案收藏数量
:rtype: int
"""
element = self.soup.find("a", {
"data-za-a": "click_answer_collected_count"
})
if element is None:
return 0
else:
return int(element.get_text()) | [
"def",
"collect_num",
"(",
"self",
")",
":",
"element",
"=",
"self",
".",
"soup",
".",
"find",
"(",
"\"a\"",
",",
"{",
"\"data-za-a\"",
":",
"\"click_answer_collected_count\"",
"}",
")",
"if",
"element",
"is",
"None",
":",
"return",
"0",
"else",
":",
"return",
"int",
"(",
"element",
".",
"get_text",
"(",
")",
")"
] | 23.076923 | [
0.045454545454545456,
0.1111111111111111,
0,
0.125,
0.15789473684210525,
0.18181818181818182,
0.07692307692307693,
0.03636363636363636,
0.3,
0.07407407407407407,
0.1,
0.15384615384615385,
0.047619047619047616
] |
def cd(i):
"""
Input: {
(repo_uoa) - repo UOA
module_uoa - module UOA
data_uoa - data UOA
or
cid
}
Output: {
Output of the 'load' function
string - prepared string 'cd {path to entry}'
}
"""
o=i.get('out','')
i['out']=''
r=find(i)
i['out']=o
if r['return']>0: return r
noe=r.get('number_of_entries','')
if noe=='': noe=0
if noe>1 and o=='con':
out('CK warning: '+str(noe)+' entries found! Selecting the first one ...')
out('')
p=r.get('path','')
if p!='':
rx=get_os_ck({})
if rx['return']>0: return rx
plat=rx['platform']
s='cd '
if plat=='win':
s+='/D '
if p.find(' ')>0:
p='"'+p+'"'
s+=p
out(s)
r['string']=s
import platform
import subprocess
out('')
out('Warning: you are in a new shell with a reused environment. Enter "exit" to return to the original one!')
if platform.system().lower().startswith('win'): # pragma: no cover
p = subprocess.Popen(["cmd", "/k", s], shell = True, env=os.environ)
p.wait()
else:
rx=gen_tmp_file({})
if rx['return']>0: return rx
fn=rx['file_name']
rx=save_text_file({'text_file':fn, 'string':s})
if rx['return']>0: return rx
os.system("bash --rcfile "+fn)
return r | [
"def",
"cd",
"(",
"i",
")",
":",
"o",
"=",
"i",
".",
"get",
"(",
"'out'",
",",
"''",
")",
"i",
"[",
"'out'",
"]",
"=",
"''",
"r",
"=",
"find",
"(",
"i",
")",
"i",
"[",
"'out'",
"]",
"=",
"o",
"if",
"r",
"[",
"'return'",
"]",
">",
"0",
":",
"return",
"r",
"noe",
"=",
"r",
".",
"get",
"(",
"'number_of_entries'",
",",
"''",
")",
"if",
"noe",
"==",
"''",
":",
"noe",
"=",
"0",
"if",
"noe",
">",
"1",
"and",
"o",
"==",
"'con'",
":",
"out",
"(",
"'CK warning: '",
"+",
"str",
"(",
"noe",
")",
"+",
"' entries found! Selecting the first one ...'",
")",
"out",
"(",
"''",
")",
"p",
"=",
"r",
".",
"get",
"(",
"'path'",
",",
"''",
")",
"if",
"p",
"!=",
"''",
":",
"rx",
"=",
"get_os_ck",
"(",
"{",
"}",
")",
"if",
"rx",
"[",
"'return'",
"]",
">",
"0",
":",
"return",
"rx",
"plat",
"=",
"rx",
"[",
"'platform'",
"]",
"s",
"=",
"'cd '",
"if",
"plat",
"==",
"'win'",
":",
"s",
"+=",
"'/D '",
"if",
"p",
".",
"find",
"(",
"' '",
")",
">",
"0",
":",
"p",
"=",
"'\"'",
"+",
"p",
"+",
"'\"'",
"s",
"+=",
"p",
"out",
"(",
"s",
")",
"r",
"[",
"'string'",
"]",
"=",
"s",
"import",
"platform",
"import",
"subprocess",
"out",
"(",
"''",
")",
"out",
"(",
"'Warning: you are in a new shell with a reused environment. Enter \"exit\" to return to the original one!'",
")",
"if",
"platform",
".",
"system",
"(",
")",
".",
"lower",
"(",
")",
".",
"startswith",
"(",
"'win'",
")",
":",
"# pragma: no cover",
"p",
"=",
"subprocess",
".",
"Popen",
"(",
"[",
"\"cmd\"",
",",
"\"/k\"",
",",
"s",
"]",
",",
"shell",
"=",
"True",
",",
"env",
"=",
"os",
".",
"environ",
")",
"p",
".",
"wait",
"(",
")",
"else",
":",
"rx",
"=",
"gen_tmp_file",
"(",
"{",
"}",
")",
"if",
"rx",
"[",
"'return'",
"]",
">",
"0",
":",
"return",
"rx",
"fn",
"=",
"rx",
"[",
"'file_name'",
"]",
"rx",
"=",
"save_text_file",
"(",
"{",
"'text_file'",
":",
"fn",
",",
"'string'",
":",
"s",
"}",
")",
"if",
"rx",
"[",
"'return'",
"]",
">",
"0",
":",
"return",
"rx",
"os",
".",
"system",
"(",
"\"bash --rcfile \"",
"+",
"fn",
")",
"return",
"r"
] | 19.986301 | [
0.1,
0.2857142857142857,
0.23076923076923078,
0.1111111111111111,
0.10526315789473684,
0.1111111111111111,
0.15789473684210525,
0.17647058823529413,
0,
0.23076923076923078,
0,
0.2857142857142857,
0.09090909090909091,
0,
0.05084745762711865,
0.23076923076923078,
0.2857142857142857,
0,
0.19047619047619047,
0,
0.2,
0.23076923076923078,
0.21428571428571427,
0,
0.13333333333333333,
0,
0.10810810810810811,
0.23809523809523808,
0,
0.15384615384615385,
0.04938271604938271,
0.21428571428571427,
0,
0.18181818181818182,
0.23076923076923078,
0.17391304347826086,
0.14285714285714285,
0,
0.15384615384615385,
0,
0.2857142857142857,
0.18181818181818182,
0.2222222222222222,
0,
0.16666666666666666,
0.19047619047619047,
0.36363636363636365,
0,
0.23076923076923078,
0,
0.2,
0,
0.13636363636363635,
0.125,
0,
0.21428571428571427,
0.034482758620689655,
0,
0.0547945205479452,
0.0641025641025641,
0.16666666666666666,
0,
0.25,
0.13793103448275862,
0.13157894736842105,
0.14285714285714285,
0,
0.10526315789473684,
0.13157894736842105,
0,
0.075,
0,
0.16666666666666666
] |
def initialize_record_handler(test_uid, test_record, notify_update):
"""Initialize the record handler for a test.
For each running test, we attach a record handler to the top-level OpenHTF
logger. The handler will append OpenHTF logs to the test record, while
filtering out logs that are specific to any other test run.
"""
htf_logger = logging.getLogger(LOGGER_PREFIX)
htf_logger.addHandler(RecordHandler(test_uid, test_record, notify_update)) | [
"def",
"initialize_record_handler",
"(",
"test_uid",
",",
"test_record",
",",
"notify_update",
")",
":",
"htf_logger",
"=",
"logging",
".",
"getLogger",
"(",
"LOGGER_PREFIX",
")",
"htf_logger",
".",
"addHandler",
"(",
"RecordHandler",
"(",
"test_uid",
",",
"test_record",
",",
"notify_update",
")",
")"
] | 50.111111 | [
0.014705882352941176,
0.043478260869565216,
0,
0.039473684210526314,
0.041666666666666664,
0.04918032786885246,
0.4,
0.06382978723404255,
0.039473684210526314
] |
def lines(input):
"""Remove comments and empty lines"""
for raw_line in input:
line = raw_line.strip()
if line and not line.startswith('#'):
yield strip_comments(line) | [
"def",
"lines",
"(",
"input",
")",
":",
"for",
"raw_line",
"in",
"input",
":",
"line",
"=",
"raw_line",
".",
"strip",
"(",
")",
"if",
"line",
"and",
"not",
"line",
".",
"startswith",
"(",
"'#'",
")",
":",
"yield",
"strip_comments",
"(",
"line",
")"
] | 33 | [
0.058823529411764705,
0.04878048780487805,
0.07692307692307693,
0.06451612903225806,
0.044444444444444446,
0.05263157894736842
] |
def instruction_ST16(self, opcode, ea, register):
"""
Writes the contents of a 16-bit register into two consecutive memory
locations.
source code forms: STD P; STX P; STY P; STS P; STU P
CC bits "HNZVC": -aa0-
"""
value = register.value
# log.debug("$%x ST16 store value $%x from %s at $%x \t| %s" % (
# self.program_counter,
# value, register.name, ea,
# self.cfg.mem_info.get_shortest(ea)
# ))
self.clear_NZV()
self.update_NZ_16(value)
return ea, value | [
"def",
"instruction_ST16",
"(",
"self",
",",
"opcode",
",",
"ea",
",",
"register",
")",
":",
"value",
"=",
"register",
".",
"value",
"# log.debug(\"$%x ST16 store value $%x from %s at $%x \\t| %s\" % (",
"# self.program_counter,",
"# value, register.name, ea,",
"# self.cfg.mem_info.get_shortest(ea)",
"# ))",
"self",
".",
"clear_NZV",
"(",
")",
"self",
".",
"update_NZ_16",
"(",
"value",
")",
"return",
"ea",
",",
"value"
] | 31.666667 | [
0.02040816326530612,
0.18181818181818182,
0.02631578947368421,
0.1111111111111111,
0,
0.1,
0,
0.06666666666666667,
0.18181818181818182,
0.06666666666666667,
0.014084507042253521,
0.02857142857142857,
0.02564102564102564,
0.020833333333333332,
0.08333333333333333,
0.08333333333333333,
0.0625,
0.08333333333333333
] |
def join_room(room, sid=None, namespace=None):
"""Join a room.
This function puts the user in a room, under the current namespace. The
user and the namespace are obtained from the event context. This is a
function that can only be called from a SocketIO event handler. Example::
@socketio.on('join')
def on_join(data):
username = session['username']
room = data['room']
join_room(room)
send(username + ' has entered the room.', room=room)
:param room: The name of the room to join.
:param sid: The session id of the client. If not provided, the client is
obtained from the request context.
:param namespace: The namespace for the room. If not provided, the
namespace is obtained from the request context.
"""
socketio = flask.current_app.extensions['socketio']
sid = sid or flask.request.sid
namespace = namespace or flask.request.namespace
socketio.server.enter_room(sid, room, namespace=namespace) | [
"def",
"join_room",
"(",
"room",
",",
"sid",
"=",
"None",
",",
"namespace",
"=",
"None",
")",
":",
"socketio",
"=",
"flask",
".",
"current_app",
".",
"extensions",
"[",
"'socketio'",
"]",
"sid",
"=",
"sid",
"or",
"flask",
".",
"request",
".",
"sid",
"namespace",
"=",
"namespace",
"or",
"flask",
".",
"request",
".",
"namespace",
"socketio",
".",
"server",
".",
"enter_room",
"(",
"sid",
",",
"room",
",",
"namespace",
"=",
"namespace",
")"
] | 42.875 | [
0.021739130434782608,
0.10526315789473684,
0,
0.02666666666666667,
0.0273972602739726,
0.03896103896103896,
0,
0.07142857142857142,
0.07692307692307693,
0.047619047619047616,
0.06451612903225806,
0.07407407407407407,
0.03125,
0,
0.06521739130434782,
0.039473684210526314,
0.04,
0.04285714285714286,
0.043478260869565216,
0.2857142857142857,
0.03636363636363636,
0.058823529411764705,
0.038461538461538464,
0.03225806451612903
] |
def unpack_ambiguous(s):
"""
List sequences with ambiguous characters in all possibilities.
"""
sd = [ambiguous_dna_values[x] for x in s]
return ["".join(x) for x in list(product(*sd))] | [
"def",
"unpack_ambiguous",
"(",
"s",
")",
":",
"sd",
"=",
"[",
"ambiguous_dna_values",
"[",
"x",
"]",
"for",
"x",
"in",
"s",
"]",
"return",
"[",
"\"\"",
".",
"join",
"(",
"x",
")",
"for",
"x",
"in",
"list",
"(",
"product",
"(",
"*",
"sd",
")",
")",
"]"
] | 33.333333 | [
0.041666666666666664,
0.2857142857142857,
0.030303030303030304,
0.2857142857142857,
0.044444444444444446,
0.0392156862745098
] |
def generate_sample(self, initial_pos, num_samples, trajectory_length, stepsize=None):
"""
Method returns a generator type object whose each iteration yields a sample
using Hamiltonian Monte Carlo
Parameters
----------
initial_pos: A 1d array like object
Vector representing values of parameter position, the starting
state in markov chain.
num_samples: int
Number of samples to be generated
trajectory_length: int or float
Target trajectory length, stepsize * number of steps(L),
where L is the number of steps taken per HMC iteration,
and stepsize is step size for splitting time method.
stepsize: float , defaults to None
The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably
Returns
-------
genrator: yielding a 1d numpy.array type object for a sample
Examples
--------
>>> from pgmpy.sampling import HamiltonianMC as HMC, GradLogPDFGaussian as GLPG
>>> from pgmpy.factors import GaussianDistribution as JGD
>>> import numpy as np
>>> mean = np.array([4, -1])
>>> covariance = np.array([[3, 0.4], [0.4, 3]])
>>> model = JGD(['x', 'y'], mean, covariance)
>>> sampler = HMC(model=model, grad_log_pdf=GLPG)
>>> gen_samples = sampler.generate_sample(np.array([-1, 1]), num_samples = 10000,
... trajectory_length=2, stepsize=0.25)
>>> samples_array = np.array([sample for sample in gen_samples])
>>> samples_array
array([[ 0.1467264 , 0.27143857],
[ 4.0371448 , 0.15871274],
[ 3.24656208, -1.03742621],
...,
[ 6.45975905, 1.97941306],
[ 4.89007171, 0.15413156],
[ 5.9528083 , 1.92983158]])
>>> np.cov(samples_array.T)
array([[ 2.95692642, 0.4379419 ],
[ 0.4379419 , 3.00939434]])
>>> sampler.acceptance_rate
0.9969
"""
self.accepted_proposals = 0
initial_pos = _check_1d_array_object(initial_pos, 'initial_pos')
_check_length_equal(initial_pos, self.model.variables, 'initial_pos', 'model.variables')
if stepsize is None:
stepsize = self._find_reasonable_stepsize(initial_pos)
lsteps = int(max(1, round(trajectory_length / stepsize, 0)))
position_m = initial_pos.copy()
for i in range(0, num_samples):
position_m, _ = self._sample(position_m, trajectory_length, stepsize, lsteps)
yield position_m
self.acceptance_rate = self.accepted_proposals / num_samples | [
"def",
"generate_sample",
"(",
"self",
",",
"initial_pos",
",",
"num_samples",
",",
"trajectory_length",
",",
"stepsize",
"=",
"None",
")",
":",
"self",
".",
"accepted_proposals",
"=",
"0",
"initial_pos",
"=",
"_check_1d_array_object",
"(",
"initial_pos",
",",
"'initial_pos'",
")",
"_check_length_equal",
"(",
"initial_pos",
",",
"self",
".",
"model",
".",
"variables",
",",
"'initial_pos'",
",",
"'model.variables'",
")",
"if",
"stepsize",
"is",
"None",
":",
"stepsize",
"=",
"self",
".",
"_find_reasonable_stepsize",
"(",
"initial_pos",
")",
"lsteps",
"=",
"int",
"(",
"max",
"(",
"1",
",",
"round",
"(",
"trajectory_length",
"/",
"stepsize",
",",
"0",
")",
")",
")",
"position_m",
"=",
"initial_pos",
".",
"copy",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"num_samples",
")",
":",
"position_m",
",",
"_",
"=",
"self",
".",
"_sample",
"(",
"position_m",
",",
"trajectory_length",
",",
"stepsize",
",",
"lsteps",
")",
"yield",
"position_m",
"self",
".",
"acceptance_rate",
"=",
"self",
".",
"accepted_proposals",
"/",
"num_samples"
] | 38.901408 | [
0.023255813953488372,
0.18181818181818182,
0.03614457831325301,
0.05405405405405406,
0,
0.1111111111111111,
0.1111111111111111,
0.046511627906976744,
0.02702702702702703,
0.058823529411764705,
0,
0.08333333333333333,
0.044444444444444446,
0,
0.05128205128205128,
0.029411764705882353,
0.029850746268656716,
0.03125,
0,
0.07142857142857142,
0.031578947368421054,
0.04,
0,
0.13333333333333333,
0.13333333333333333,
0.029411764705882353,
0,
0.125,
0.125,
0.04597701149425287,
0.046153846153846156,
0.1,
0.08333333333333333,
0.05454545454545454,
0.05660377358490566,
0.05263157894736842,
0.07865168539325842,
0.07058823529411765,
0.041666666666666664,
0.12,
0.11904761904761904,
0.11904761904761904,
0.09523809523809523,
0.15789473684210525,
0.09523809523809523,
0.09523809523809523,
0.13953488372093023,
0.08571428571428572,
0.11904761904761904,
0.13953488372093023,
0.08571428571428572,
0.14285714285714285,
0.18181818181818182,
0,
0.05714285714285714,
0.027777777777777776,
0.03125,
0,
0.07142857142857142,
0.030303030303030304,
0,
0.029411764705882353,
0.05128205128205128,
0,
0.05128205128205128,
0,
0.033707865168539325,
0,
0.07142857142857142,
0,
0.029411764705882353
] |
def download(self, filename, output, packethook=None, timeout=SOCK_TIMEOUT):
"""This method initiates a tftp download from the configured remote
host, requesting the filename passed. It writes the file to output,
which can be a file-like object or a path to a local file. If a
packethook is provided, it must be a function that takes a single
parameter, which will be a copy of each DAT packet received in the
form of a TftpPacketDAT object. The timeout parameter may be used to
override the default SOCK_TIMEOUT setting, which is the amount of time
that the client will wait for a receive packet to arrive.
Note: If output is a hyphen, stdout is used."""
# We're downloading.
log.debug("Creating download context with the following params:")
log.debug("host = %s, port = %s, filename = %s" % (self.host, self.iport, filename))
log.debug("options = %s, packethook = %s, timeout = %s" % (self.options, packethook, timeout))
self.context = TftpContextClientDownload(self.host,
self.iport,
filename,
output,
self.options,
packethook,
timeout,
localip = self.localip)
self.context.start()
# Download happens here
self.context.end()
metrics = self.context.metrics
log.info('')
log.info("Download complete.")
if metrics.duration == 0:
log.info("Duration too short, rate undetermined")
else:
log.info("Downloaded %.2f bytes in %.2f seconds" % (metrics.bytes, metrics.duration))
log.info("Average rate: %.2f kbps" % metrics.kbps)
log.info("%.2f bytes in resent data" % metrics.resent_bytes)
log.info("Received %d duplicate packets" % metrics.dupcount) | [
"def",
"download",
"(",
"self",
",",
"filename",
",",
"output",
",",
"packethook",
"=",
"None",
",",
"timeout",
"=",
"SOCK_TIMEOUT",
")",
":",
"# We're downloading.",
"log",
".",
"debug",
"(",
"\"Creating download context with the following params:\"",
")",
"log",
".",
"debug",
"(",
"\"host = %s, port = %s, filename = %s\"",
"%",
"(",
"self",
".",
"host",
",",
"self",
".",
"iport",
",",
"filename",
")",
")",
"log",
".",
"debug",
"(",
"\"options = %s, packethook = %s, timeout = %s\"",
"%",
"(",
"self",
".",
"options",
",",
"packethook",
",",
"timeout",
")",
")",
"self",
".",
"context",
"=",
"TftpContextClientDownload",
"(",
"self",
".",
"host",
",",
"self",
".",
"iport",
",",
"filename",
",",
"output",
",",
"self",
".",
"options",
",",
"packethook",
",",
"timeout",
",",
"localip",
"=",
"self",
".",
"localip",
")",
"self",
".",
"context",
".",
"start",
"(",
")",
"# Download happens here",
"self",
".",
"context",
".",
"end",
"(",
")",
"metrics",
"=",
"self",
".",
"context",
".",
"metrics",
"log",
".",
"info",
"(",
"''",
")",
"log",
".",
"info",
"(",
"\"Download complete.\"",
")",
"if",
"metrics",
".",
"duration",
"==",
"0",
":",
"log",
".",
"info",
"(",
"\"Duration too short, rate undetermined\"",
")",
"else",
":",
"log",
".",
"info",
"(",
"\"Downloaded %.2f bytes in %.2f seconds\"",
"%",
"(",
"metrics",
".",
"bytes",
",",
"metrics",
".",
"duration",
")",
")",
"log",
".",
"info",
"(",
"\"Average rate: %.2f kbps\"",
"%",
"metrics",
".",
"kbps",
")",
"log",
".",
"info",
"(",
"\"%.2f bytes in resent data\"",
"%",
"metrics",
".",
"resent_bytes",
")",
"log",
".",
"info",
"(",
"\"Received %d duplicate packets\"",
"%",
"metrics",
".",
"dupcount",
")"
] | 54.736842 | [
0.013157894736842105,
0.02666666666666667,
0.02666666666666667,
0.028169014084507043,
0.0273972602739726,
0.02702702702702703,
0.02631578947368421,
0.02564102564102564,
0.03076923076923077,
0,
0.05454545454545454,
0.07142857142857142,
0.0273972602739726,
0.03260869565217391,
0.029411764705882353,
0.05084745762711865,
0.05,
0.05172413793103448,
0.05357142857142857,
0.04838709677419355,
0.05,
0.05263157894736842,
0.05555555555555555,
0.07142857142857142,
0.06451612903225806,
0.07692307692307693,
0,
0.05263157894736842,
0,
0.1,
0.05263157894736842,
0.06060606060606061,
0.03278688524590164,
0.15384615384615385,
0.030927835051546393,
0.03225806451612903,
0.029411764705882353,
0.029411764705882353
] |
def get_article_by_history_json(text, article_json=None):
"""从 历史消息页的文本 提取文章列表信息
Parameters
----------
text : str or unicode
历史消息页的文本
article_json : dict
历史消息页的文本 提取出来的文章json dict
Returns
-------
list[dict]
{
'send_id': '', # 群发id,注意不唯一,因为同一次群发多个消息,而群发id一致
'datetime': '', # 群发datatime
'type': '', # 消息类型,均是49,表示图文
'main': 0, # 是否是一次群发的第一次消息
'title': '', # 文章标题
'abstract': '', # 摘要
'fileid': '', #
'content_url': '', # 文章链接
'source_url': '', # 阅读原文的链接
'cover': '', # 封面图
'author': '', # 作者
'copyright_stat': '', # 文章类型,例如:原创啊
}
"""
if article_json is None:
article_json = find_article_json_re.findall(text)
if not article_json:
return []
article_json = article_json[0] + '}}]}'
article_json = json.loads(article_json)
items = list()
for listdic in article_json['list']:
if str(listdic['comm_msg_info'].get('type', '')) != '49':
continue
comm_msg_info = listdic['comm_msg_info']
app_msg_ext_info = listdic['app_msg_ext_info']
send_id = comm_msg_info.get('id', '')
msg_datetime = comm_msg_info.get('datetime', '')
msg_type = str(comm_msg_info.get('type', ''))
items.append({
'send_id': send_id,
'datetime': msg_datetime,
'type': msg_type,
'main': 1, 'title': app_msg_ext_info.get('title', ''),
'abstract': app_msg_ext_info.get('digest', ''),
'fileid': app_msg_ext_info.get('fileid', ''),
'content_url': WechatSogouStructuring.__handle_content_url(app_msg_ext_info.get('content_url')),
'source_url': app_msg_ext_info.get('source_url', ''),
'cover': app_msg_ext_info.get('cover', ''),
'author': app_msg_ext_info.get('author', ''),
'copyright_stat': app_msg_ext_info.get('copyright_stat', '')
})
if app_msg_ext_info.get('is_multi', 0) == 1:
for multi_dict in app_msg_ext_info['multi_app_msg_item_list']:
items.append({
'send_id': send_id,
'datetime': msg_datetime,
'type': msg_type,
'main': 0, 'title': multi_dict.get('title', ''),
'abstract': multi_dict.get('digest', ''),
'fileid': multi_dict.get('fileid', ''),
'content_url': WechatSogouStructuring.__handle_content_url(multi_dict.get('content_url')),
'source_url': multi_dict.get('source_url', ''),
'cover': multi_dict.get('cover', ''),
'author': multi_dict.get('author', ''),
'copyright_stat': multi_dict.get('copyright_stat', '')
})
return list(filter(lambda x: x['content_url'], items)) | [
"def",
"get_article_by_history_json",
"(",
"text",
",",
"article_json",
"=",
"None",
")",
":",
"if",
"article_json",
"is",
"None",
":",
"article_json",
"=",
"find_article_json_re",
".",
"findall",
"(",
"text",
")",
"if",
"not",
"article_json",
":",
"return",
"[",
"]",
"article_json",
"=",
"article_json",
"[",
"0",
"]",
"+",
"'}}]}'",
"article_json",
"=",
"json",
".",
"loads",
"(",
"article_json",
")",
"items",
"=",
"list",
"(",
")",
"for",
"listdic",
"in",
"article_json",
"[",
"'list'",
"]",
":",
"if",
"str",
"(",
"listdic",
"[",
"'comm_msg_info'",
"]",
".",
"get",
"(",
"'type'",
",",
"''",
")",
")",
"!=",
"'49'",
":",
"continue",
"comm_msg_info",
"=",
"listdic",
"[",
"'comm_msg_info'",
"]",
"app_msg_ext_info",
"=",
"listdic",
"[",
"'app_msg_ext_info'",
"]",
"send_id",
"=",
"comm_msg_info",
".",
"get",
"(",
"'id'",
",",
"''",
")",
"msg_datetime",
"=",
"comm_msg_info",
".",
"get",
"(",
"'datetime'",
",",
"''",
")",
"msg_type",
"=",
"str",
"(",
"comm_msg_info",
".",
"get",
"(",
"'type'",
",",
"''",
")",
")",
"items",
".",
"append",
"(",
"{",
"'send_id'",
":",
"send_id",
",",
"'datetime'",
":",
"msg_datetime",
",",
"'type'",
":",
"msg_type",
",",
"'main'",
":",
"1",
",",
"'title'",
":",
"app_msg_ext_info",
".",
"get",
"(",
"'title'",
",",
"''",
")",
",",
"'abstract'",
":",
"app_msg_ext_info",
".",
"get",
"(",
"'digest'",
",",
"''",
")",
",",
"'fileid'",
":",
"app_msg_ext_info",
".",
"get",
"(",
"'fileid'",
",",
"''",
")",
",",
"'content_url'",
":",
"WechatSogouStructuring",
".",
"__handle_content_url",
"(",
"app_msg_ext_info",
".",
"get",
"(",
"'content_url'",
")",
")",
",",
"'source_url'",
":",
"app_msg_ext_info",
".",
"get",
"(",
"'source_url'",
",",
"''",
")",
",",
"'cover'",
":",
"app_msg_ext_info",
".",
"get",
"(",
"'cover'",
",",
"''",
")",
",",
"'author'",
":",
"app_msg_ext_info",
".",
"get",
"(",
"'author'",
",",
"''",
")",
",",
"'copyright_stat'",
":",
"app_msg_ext_info",
".",
"get",
"(",
"'copyright_stat'",
",",
"''",
")",
"}",
")",
"if",
"app_msg_ext_info",
".",
"get",
"(",
"'is_multi'",
",",
"0",
")",
"==",
"1",
":",
"for",
"multi_dict",
"in",
"app_msg_ext_info",
"[",
"'multi_app_msg_item_list'",
"]",
":",
"items",
".",
"append",
"(",
"{",
"'send_id'",
":",
"send_id",
",",
"'datetime'",
":",
"msg_datetime",
",",
"'type'",
":",
"msg_type",
",",
"'main'",
":",
"0",
",",
"'title'",
":",
"multi_dict",
".",
"get",
"(",
"'title'",
",",
"''",
")",
",",
"'abstract'",
":",
"multi_dict",
".",
"get",
"(",
"'digest'",
",",
"''",
")",
",",
"'fileid'",
":",
"multi_dict",
".",
"get",
"(",
"'fileid'",
",",
"''",
")",
",",
"'content_url'",
":",
"WechatSogouStructuring",
".",
"__handle_content_url",
"(",
"multi_dict",
".",
"get",
"(",
"'content_url'",
")",
")",
",",
"'source_url'",
":",
"multi_dict",
".",
"get",
"(",
"'source_url'",
",",
"''",
")",
",",
"'cover'",
":",
"multi_dict",
".",
"get",
"(",
"'cover'",
",",
"''",
")",
",",
"'author'",
":",
"multi_dict",
".",
"get",
"(",
"'author'",
",",
"''",
")",
",",
"'copyright_stat'",
":",
"multi_dict",
".",
"get",
"(",
"'copyright_stat'",
",",
"''",
")",
"}",
")",
"return",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
"[",
"'content_url'",
"]",
",",
"items",
")",
")"
] | 40.632911 | [
0.017543859649122806,
0.06666666666666667,
0,
0.1111111111111111,
0.1111111111111111,
0.10344827586206896,
0.1,
0.1111111111111111,
0.05405405405405406,
0,
0.13333333333333333,
0.13333333333333333,
0.1111111111111111,
0.23076923076923078,
0.03125,
0.044444444444444446,
0.044444444444444446,
0.046511627906976744,
0.05555555555555555,
0.05405405405405406,
0.0625,
0.047619047619047616,
0.045454545454545456,
0.05714285714285714,
0.05714285714285714,
0.038461538461538464,
0.23076923076923078,
0,
0.18181818181818182,
0.0625,
0.03278688524590164,
0.0625,
0.08,
0.0392156862745098,
0.0392156862745098,
0,
0.09090909090909091,
0,
0.045454545454545456,
0.028985507246376812,
0.08333333333333333,
0,
0.038461538461538464,
0.034482758620689655,
0.04081632653061224,
0.03333333333333333,
0.03508771929824561,
0,
0.11538461538461539,
0.05714285714285714,
0.04878048780487805,
0.06060606060606061,
0.02857142857142857,
0.031746031746031744,
0.03278688524590164,
0.026785714285714284,
0.028985507246376812,
0.03389830508474576,
0.03278688524590164,
0.02631578947368421,
0.21428571428571427,
0,
0.03571428571428571,
0.02564102564102564,
0.08823529411764706,
0.046511627906976744,
0.04081632653061224,
0.04878048780487805,
0.027777777777777776,
0.03076923076923077,
0.031746031746031744,
0.02631578947368421,
0.028169014084507043,
0.03278688524590164,
0.031746031746031744,
0.02564102564102564,
0.13636363636363635,
0,
0.03225806451612903
] |
def upgrade_bcbio(args):
"""Perform upgrade of bcbio to latest release, or from GitHub development version.
Handles bcbio, third party tools and data.
"""
print("Upgrading bcbio")
args = add_install_defaults(args)
if args.upgrade in ["stable", "system", "deps", "development"]:
if args.upgrade == "development":
anaconda_dir = _update_conda_devel()
_check_for_conda_problems()
print("Upgrading bcbio-nextgen to latest development version")
pip_bin = os.path.join(os.path.dirname(os.path.realpath(sys.executable)), "pip")
git_tag = "@%s" % args.revision if args.revision != "master" else ""
_pip_safe_ssl([[pip_bin, "install", "--upgrade", "--no-deps",
"git+%s%s#egg=bcbio-nextgen" % (REMOTES["gitrepo"], git_tag)]], anaconda_dir)
print("Upgrade of bcbio-nextgen development code complete.")
else:
_update_conda_packages()
_check_for_conda_problems()
print("Upgrade of bcbio-nextgen code complete.")
if args.cwl and args.upgrade:
_update_bcbiovm()
try:
_set_matplotlib_default_backend()
except OSError:
pass
if args.tooldir:
with bcbio_tmpdir():
print("Upgrading third party tools to latest versions")
_symlink_bcbio(args, script="bcbio_nextgen.py")
_symlink_bcbio(args, script="bcbio_setup_genome.py")
_symlink_bcbio(args, script="bcbio_prepare_samples.py")
_symlink_bcbio(args, script="bcbio_fastq_umi_prep.py")
if args.cwl:
_symlink_bcbio(args, "bcbio_vm.py", "bcbiovm")
_symlink_bcbio(args, "python", "bcbiovm", "bcbiovm")
upgrade_thirdparty_tools(args, REMOTES)
print("Third party tools upgrade complete.")
if args.toolplus:
print("Installing additional tools")
_install_toolplus(args)
if args.install_data:
for default in DEFAULT_INDEXES:
if default not in args.aligners:
args.aligners.append(default)
if len(args.aligners) == 0:
print("Warning: no aligners provided with `--aligners` flag")
if len(args.genomes) == 0:
print("Data not installed, no genomes provided with `--genomes` flag")
else:
with bcbio_tmpdir():
print("Upgrading bcbio-nextgen data files")
upgrade_bcbio_data(args, REMOTES)
print("bcbio-nextgen data upgrade complete.")
if args.isolate and args.tooldir:
print("Isolated tool installation not automatically added to environmental variables")
print(" Add:\n {t}/bin to PATH".format(t=args.tooldir))
save_install_defaults(args)
args.datadir = _get_data_dir()
_install_container_bcbio_system(args.datadir)
print("Upgrade completed successfully.")
return args | [
"def",
"upgrade_bcbio",
"(",
"args",
")",
":",
"print",
"(",
"\"Upgrading bcbio\"",
")",
"args",
"=",
"add_install_defaults",
"(",
"args",
")",
"if",
"args",
".",
"upgrade",
"in",
"[",
"\"stable\"",
",",
"\"system\"",
",",
"\"deps\"",
",",
"\"development\"",
"]",
":",
"if",
"args",
".",
"upgrade",
"==",
"\"development\"",
":",
"anaconda_dir",
"=",
"_update_conda_devel",
"(",
")",
"_check_for_conda_problems",
"(",
")",
"print",
"(",
"\"Upgrading bcbio-nextgen to latest development version\"",
")",
"pip_bin",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"sys",
".",
"executable",
")",
")",
",",
"\"pip\"",
")",
"git_tag",
"=",
"\"@%s\"",
"%",
"args",
".",
"revision",
"if",
"args",
".",
"revision",
"!=",
"\"master\"",
"else",
"\"\"",
"_pip_safe_ssl",
"(",
"[",
"[",
"pip_bin",
",",
"\"install\"",
",",
"\"--upgrade\"",
",",
"\"--no-deps\"",
",",
"\"git+%s%s#egg=bcbio-nextgen\"",
"%",
"(",
"REMOTES",
"[",
"\"gitrepo\"",
"]",
",",
"git_tag",
")",
"]",
"]",
",",
"anaconda_dir",
")",
"print",
"(",
"\"Upgrade of bcbio-nextgen development code complete.\"",
")",
"else",
":",
"_update_conda_packages",
"(",
")",
"_check_for_conda_problems",
"(",
")",
"print",
"(",
"\"Upgrade of bcbio-nextgen code complete.\"",
")",
"if",
"args",
".",
"cwl",
"and",
"args",
".",
"upgrade",
":",
"_update_bcbiovm",
"(",
")",
"try",
":",
"_set_matplotlib_default_backend",
"(",
")",
"except",
"OSError",
":",
"pass",
"if",
"args",
".",
"tooldir",
":",
"with",
"bcbio_tmpdir",
"(",
")",
":",
"print",
"(",
"\"Upgrading third party tools to latest versions\"",
")",
"_symlink_bcbio",
"(",
"args",
",",
"script",
"=",
"\"bcbio_nextgen.py\"",
")",
"_symlink_bcbio",
"(",
"args",
",",
"script",
"=",
"\"bcbio_setup_genome.py\"",
")",
"_symlink_bcbio",
"(",
"args",
",",
"script",
"=",
"\"bcbio_prepare_samples.py\"",
")",
"_symlink_bcbio",
"(",
"args",
",",
"script",
"=",
"\"bcbio_fastq_umi_prep.py\"",
")",
"if",
"args",
".",
"cwl",
":",
"_symlink_bcbio",
"(",
"args",
",",
"\"bcbio_vm.py\"",
",",
"\"bcbiovm\"",
")",
"_symlink_bcbio",
"(",
"args",
",",
"\"python\"",
",",
"\"bcbiovm\"",
",",
"\"bcbiovm\"",
")",
"upgrade_thirdparty_tools",
"(",
"args",
",",
"REMOTES",
")",
"print",
"(",
"\"Third party tools upgrade complete.\"",
")",
"if",
"args",
".",
"toolplus",
":",
"print",
"(",
"\"Installing additional tools\"",
")",
"_install_toolplus",
"(",
"args",
")",
"if",
"args",
".",
"install_data",
":",
"for",
"default",
"in",
"DEFAULT_INDEXES",
":",
"if",
"default",
"not",
"in",
"args",
".",
"aligners",
":",
"args",
".",
"aligners",
".",
"append",
"(",
"default",
")",
"if",
"len",
"(",
"args",
".",
"aligners",
")",
"==",
"0",
":",
"print",
"(",
"\"Warning: no aligners provided with `--aligners` flag\"",
")",
"if",
"len",
"(",
"args",
".",
"genomes",
")",
"==",
"0",
":",
"print",
"(",
"\"Data not installed, no genomes provided with `--genomes` flag\"",
")",
"else",
":",
"with",
"bcbio_tmpdir",
"(",
")",
":",
"print",
"(",
"\"Upgrading bcbio-nextgen data files\"",
")",
"upgrade_bcbio_data",
"(",
"args",
",",
"REMOTES",
")",
"print",
"(",
"\"bcbio-nextgen data upgrade complete.\"",
")",
"if",
"args",
".",
"isolate",
"and",
"args",
".",
"tooldir",
":",
"print",
"(",
"\"Isolated tool installation not automatically added to environmental variables\"",
")",
"print",
"(",
"\" Add:\\n {t}/bin to PATH\"",
".",
"format",
"(",
"t",
"=",
"args",
".",
"tooldir",
")",
")",
"save_install_defaults",
"(",
"args",
")",
"args",
".",
"datadir",
"=",
"_get_data_dir",
"(",
")",
"_install_container_bcbio_system",
"(",
"args",
".",
"datadir",
")",
"print",
"(",
"\"Upgrade completed successfully.\"",
")",
"return",
"args"
] | 44.492308 | [
0.041666666666666664,
0.03488372093023256,
0,
0.043478260869565216,
0.2857142857142857,
0.07142857142857142,
0.05405405405405406,
0.029850746268656716,
0.04878048780487805,
0.041666666666666664,
0.05128205128205128,
0.02702702702702703,
0.03260869565217391,
0.0375,
0.0410958904109589,
0.0380952380952381,
0.027777777777777776,
0.15384615384615385,
0.05555555555555555,
0.05128205128205128,
0.03333333333333333,
0.06060606060606061,
0.08,
0,
0.25,
0.04878048780487805,
0.10526315789473684,
0.16666666666666666,
0,
0.1,
0.07142857142857142,
0.029850746268656716,
0.03389830508474576,
0.03125,
0.029850746268656716,
0.030303030303030304,
0.08333333333333333,
0.03225806451612903,
0.029411764705882353,
0.0392156862745098,
0.03571428571428571,
0.09523809523809523,
0.045454545454545456,
0.06451612903225806,
0.08,
0.05128205128205128,
0.045454545454545456,
0.044444444444444446,
0.05714285714285714,
0.0273972602739726,
0.058823529411764705,
0.036585365853658534,
0.15384615384615385,
0.0625,
0.03389830508474576,
0.04081632653061224,
0.03278688524590164,
0.05405405405405406,
0.031914893617021274,
0.03125,
0.06451612903225806,
0.058823529411764705,
0.04081632653061224,
0.045454545454545456,
0.13333333333333333
] |
def search(self, query: 're.Pattern') -> 'Iterable[_WorkTitles]':
"""Search titles using a compiled RE query."""
titles: 'Titles'
for titles in self._titles_list:
title: 'AnimeTitle'
for title in titles.titles:
if query.search(title.title):
yield WorkTitles(
aid=titles.aid,
main_title=_get_main_title(titles.titles),
titles=[t.title for t in titles.titles],
)
continue | [
"def",
"search",
"(",
"self",
",",
"query",
":",
"'re.Pattern'",
")",
"->",
"'Iterable[_WorkTitles]'",
":",
"titles",
":",
"'Titles'",
"for",
"titles",
"in",
"self",
".",
"_titles_list",
":",
"title",
":",
"'AnimeTitle'",
"for",
"title",
"in",
"titles",
".",
"titles",
":",
"if",
"query",
".",
"search",
"(",
"title",
".",
"title",
")",
":",
"yield",
"WorkTitles",
"(",
"aid",
"=",
"titles",
".",
"aid",
",",
"main_title",
"=",
"_get_main_title",
"(",
"titles",
".",
"titles",
")",
",",
"titles",
"=",
"[",
"t",
".",
"title",
"for",
"t",
"in",
"titles",
".",
"titles",
"]",
",",
")",
"continue"
] | 42.538462 | [
0.015384615384615385,
0.037037037037037035,
0.08333333333333333,
0.05,
0.06451612903225806,
0.05128205128205128,
0.044444444444444446,
0.08108108108108109,
0.07692307692307693,
0.045454545454545456,
0.046875,
0.14285714285714285,
0.07142857142857142
] |
def get_auto_allocated_topology(self, project_id, **_params):
"""Fetch information about a project's auto-allocated topology."""
return self.get(
self.auto_allocated_topology_path % project_id,
params=_params) | [
"def",
"get_auto_allocated_topology",
"(",
"self",
",",
"project_id",
",",
"*",
"*",
"_params",
")",
":",
"return",
"self",
".",
"get",
"(",
"self",
".",
"auto_allocated_topology_path",
"%",
"project_id",
",",
"params",
"=",
"_params",
")"
] | 49 | [
0.01639344262295082,
0.02702702702702703,
0.125,
0.03389830508474576,
0.14814814814814814
] |
def setup_menu(self, minmax):
"""Setup context menu"""
if self.minmax_action is not None:
self.minmax_action.setChecked(minmax)
return
resize_action = create_action(self, _("Resize rows to contents"),
triggered=self.resizeRowsToContents)
resize_columns_action = create_action(
self,
_("Resize columns to contents"),
triggered=self.resize_column_contents)
self.paste_action = create_action(self, _("Paste"),
icon=ima.icon('editpaste'),
triggered=self.paste)
self.copy_action = create_action(self, _("Copy"),
icon=ima.icon('editcopy'),
triggered=self.copy)
self.edit_action = create_action(self, _("Edit"),
icon=ima.icon('edit'),
triggered=self.edit_item)
self.plot_action = create_action(self, _("Plot"),
icon=ima.icon('plot'),
triggered=lambda: self.plot_item('plot'))
self.plot_action.setVisible(False)
self.hist_action = create_action(self, _("Histogram"),
icon=ima.icon('hist'),
triggered=lambda: self.plot_item('hist'))
self.hist_action.setVisible(False)
self.imshow_action = create_action(self, _("Show image"),
icon=ima.icon('imshow'),
triggered=self.imshow_item)
self.imshow_action.setVisible(False)
self.save_array_action = create_action(self, _("Save array"),
icon=ima.icon('filesave'),
triggered=self.save_array)
self.save_array_action.setVisible(False)
self.insert_action = create_action(self, _("Insert"),
icon=ima.icon('insert'),
triggered=self.insert_item)
self.remove_action = create_action(self, _("Remove"),
icon=ima.icon('editdelete'),
triggered=self.remove_item)
self.minmax_action = create_action(self, _("Show arrays min/max"),
toggled=self.toggle_minmax)
self.minmax_action.setChecked(minmax)
self.toggle_minmax(minmax)
self.rename_action = create_action(self, _("Rename"),
icon=ima.icon('rename'),
triggered=self.rename_item)
self.duplicate_action = create_action(self, _("Duplicate"),
icon=ima.icon('edit_add'),
triggered=self.duplicate_item)
menu = QMenu(self)
menu_actions = [self.edit_action, self.plot_action, self.hist_action,
self.imshow_action, self.save_array_action,
self.insert_action, self.remove_action,
self.copy_action, self.paste_action,
None, self.rename_action, self.duplicate_action,
None, resize_action, resize_columns_action]
if ndarray is not FakeObject:
menu_actions.append(self.minmax_action)
add_actions(menu, menu_actions)
self.empty_ws_menu = QMenu(self)
add_actions(self.empty_ws_menu,
[self.insert_action, self.paste_action,
None, resize_action, resize_columns_action])
return menu | [
"def",
"setup_menu",
"(",
"self",
",",
"minmax",
")",
":",
"if",
"self",
".",
"minmax_action",
"is",
"not",
"None",
":",
"self",
".",
"minmax_action",
".",
"setChecked",
"(",
"minmax",
")",
"return",
"resize_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Resize rows to contents\"",
")",
",",
"triggered",
"=",
"self",
".",
"resizeRowsToContents",
")",
"resize_columns_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Resize columns to contents\"",
")",
",",
"triggered",
"=",
"self",
".",
"resize_column_contents",
")",
"self",
".",
"paste_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Paste\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'editpaste'",
")",
",",
"triggered",
"=",
"self",
".",
"paste",
")",
"self",
".",
"copy_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Copy\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'editcopy'",
")",
",",
"triggered",
"=",
"self",
".",
"copy",
")",
"self",
".",
"edit_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Edit\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'edit'",
")",
",",
"triggered",
"=",
"self",
".",
"edit_item",
")",
"self",
".",
"plot_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Plot\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'plot'",
")",
",",
"triggered",
"=",
"lambda",
":",
"self",
".",
"plot_item",
"(",
"'plot'",
")",
")",
"self",
".",
"plot_action",
".",
"setVisible",
"(",
"False",
")",
"self",
".",
"hist_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Histogram\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'hist'",
")",
",",
"triggered",
"=",
"lambda",
":",
"self",
".",
"plot_item",
"(",
"'hist'",
")",
")",
"self",
".",
"hist_action",
".",
"setVisible",
"(",
"False",
")",
"self",
".",
"imshow_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Show image\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'imshow'",
")",
",",
"triggered",
"=",
"self",
".",
"imshow_item",
")",
"self",
".",
"imshow_action",
".",
"setVisible",
"(",
"False",
")",
"self",
".",
"save_array_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Save array\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'filesave'",
")",
",",
"triggered",
"=",
"self",
".",
"save_array",
")",
"self",
".",
"save_array_action",
".",
"setVisible",
"(",
"False",
")",
"self",
".",
"insert_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Insert\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'insert'",
")",
",",
"triggered",
"=",
"self",
".",
"insert_item",
")",
"self",
".",
"remove_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Remove\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'editdelete'",
")",
",",
"triggered",
"=",
"self",
".",
"remove_item",
")",
"self",
".",
"minmax_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Show arrays min/max\"",
")",
",",
"toggled",
"=",
"self",
".",
"toggle_minmax",
")",
"self",
".",
"minmax_action",
".",
"setChecked",
"(",
"minmax",
")",
"self",
".",
"toggle_minmax",
"(",
"minmax",
")",
"self",
".",
"rename_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Rename\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'rename'",
")",
",",
"triggered",
"=",
"self",
".",
"rename_item",
")",
"self",
".",
"duplicate_action",
"=",
"create_action",
"(",
"self",
",",
"_",
"(",
"\"Duplicate\"",
")",
",",
"icon",
"=",
"ima",
".",
"icon",
"(",
"'edit_add'",
")",
",",
"triggered",
"=",
"self",
".",
"duplicate_item",
")",
"menu",
"=",
"QMenu",
"(",
"self",
")",
"menu_actions",
"=",
"[",
"self",
".",
"edit_action",
",",
"self",
".",
"plot_action",
",",
"self",
".",
"hist_action",
",",
"self",
".",
"imshow_action",
",",
"self",
".",
"save_array_action",
",",
"self",
".",
"insert_action",
",",
"self",
".",
"remove_action",
",",
"self",
".",
"copy_action",
",",
"self",
".",
"paste_action",
",",
"None",
",",
"self",
".",
"rename_action",
",",
"self",
".",
"duplicate_action",
",",
"None",
",",
"resize_action",
",",
"resize_columns_action",
"]",
"if",
"ndarray",
"is",
"not",
"FakeObject",
":",
"menu_actions",
".",
"append",
"(",
"self",
".",
"minmax_action",
")",
"add_actions",
"(",
"menu",
",",
"menu_actions",
")",
"self",
".",
"empty_ws_menu",
"=",
"QMenu",
"(",
"self",
")",
"add_actions",
"(",
"self",
".",
"empty_ws_menu",
",",
"[",
"self",
".",
"insert_action",
",",
"self",
".",
"paste_action",
",",
"None",
",",
"resize_action",
",",
"resize_columns_action",
"]",
")",
"return",
"menu"
] | 57.5 | [
0.03333333333333333,
0.030303030303030304,
0.06976744186046512,
0.02,
0.05263157894736842,
0.1111111111111111,
0.04054054054054054,
0.05333333333333334,
0.0425531914893617,
0.1111111111111111,
0.044444444444444446,
0.058823529411764705,
0.05,
0.05714285714285714,
0.0625,
0.05172413793103448,
0.058823529411764705,
0.06451612903225806,
0.05172413793103448,
0.0625,
0.05970149253731343,
0.05172413793103448,
0.05084745762711865,
0.05128205128205128,
0.023255813953488372,
0.047619047619047616,
0.05084745762711865,
0.05128205128205128,
0.023255813953488372,
0.045454545454545456,
0.058823529411764705,
0.056338028169014086,
0.022222222222222223,
0.04285714285714286,
0.05405405405405406,
0.05405405405405406,
0.02040816326530612,
0.04838709677419355,
0.058823529411764705,
0.056338028169014086,
0.04838709677419355,
0.05555555555555555,
0.056338028169014086,
0.04,
0.056338028169014086,
0.021739130434782608,
0.02857142857142857,
0.04838709677419355,
0.058823529411764705,
0.056338028169014086,
0.04411764705882353,
0.0547945205479452,
0.05194805194805195,
0.037037037037037035,
0.038461538461538464,
0.029411764705882353,
0.03125,
0.03278688524590164,
0.0273972602739726,
0.029411764705882353,
0.07894736842105263,
0.019230769230769232,
0.025,
0.024390243902439025,
0.075,
0.05,
0.045454545454545456,
0.10526315789473684
] |
def getLowerDetectionLimit(self):
"""Returns the Lower Detection Limit (LDL) that applies to this
analysis in particular. If no value set or the analysis service
doesn't allow manual input of detection limits, returns the value set
by default in the Analysis Service
"""
if self.isLowerDetectionLimit():
result = self.getResult()
try:
# in this case, the result itself is the LDL.
return float(result)
except (TypeError, ValueError):
logger.warn("The result for the analysis %s is a lower "
"detection limit, but not floatable: '%s'. "
"Returnig AS's default LDL." %
(self.id, result))
return AbstractBaseAnalysis.getLowerDetectionLimit(self) | [
"def",
"getLowerDetectionLimit",
"(",
"self",
")",
":",
"if",
"self",
".",
"isLowerDetectionLimit",
"(",
")",
":",
"result",
"=",
"self",
".",
"getResult",
"(",
")",
"try",
":",
"# in this case, the result itself is the LDL.",
"return",
"float",
"(",
"result",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"logger",
".",
"warn",
"(",
"\"The result for the analysis %s is a lower \"",
"\"detection limit, but not floatable: '%s'. \"",
"\"Returnig AS's default LDL.\"",
"%",
"(",
"self",
".",
"id",
",",
"result",
")",
")",
"return",
"AbstractBaseAnalysis",
".",
"getLowerDetectionLimit",
"(",
"self",
")"
] | 50 | [
0.030303030303030304,
0.028169014084507043,
0.028169014084507043,
0.025974025974025976,
0.047619047619047616,
0.18181818181818182,
0.05,
0.05405405405405406,
0.125,
0.03278688524590164,
0.05555555555555555,
0.046511627906976744,
0.041666666666666664,
0.027777777777777776,
0.034482758620689655,
0.06521739130434782,
0.03125
] |
def sighandler(signum, handler):
"""
Context manager to run code with UNIX signal `signum` bound to `handler`.
The existing handler is saved upon entering the context and restored upon
exit.
The `handler` argument may be anything that can be passed to Python's
`signal.signal <https://docs.python.org/2/library/signal.html#signal.signal>`_
standard library call.
"""
prev_handler = signal.getsignal(signum)
signal.signal(signum, handler)
yield
signal.signal(signum, prev_handler) | [
"def",
"sighandler",
"(",
"signum",
",",
"handler",
")",
":",
"prev_handler",
"=",
"signal",
".",
"getsignal",
"(",
"signum",
")",
"signal",
".",
"signal",
"(",
"signum",
",",
"handler",
")",
"yield",
"signal",
".",
"signal",
"(",
"signum",
",",
"prev_handler",
")"
] | 34.333333 | [
0.03125,
0.2857142857142857,
0.03896103896103896,
0,
0.025974025974025976,
0.2222222222222222,
0,
0.0410958904109589,
0.0975609756097561,
0.07692307692307693,
0.2857142857142857,
0.046511627906976744,
0.058823529411764705,
0.2222222222222222,
0.05128205128205128
] |
def _init_map(self):
"""stub"""
super(MultiLanguageDragAndDropQuestionFormRecord, self)._init_map()
self.my_osid_object_form._my_map['droppables'] = \
self._droppables_metadata['default_object_values'][0]
self.my_osid_object_form._my_map['targets'] = \
self._targets_metadata['default_object_values'][0]
self.my_osid_object_form._my_map['zones'] = \
self._zones_metadata['default_object_values'][0]
self.my_osid_object_form._my_map['shuffleDroppables'] = \
bool(self._shuffle_droppables_metadata['default_boolean_values'][0])
self.my_osid_object_form._my_map['shuffleTargets'] = \
bool(self._shuffle_targets_metadata['default_boolean_values'][0])
self.my_osid_object_form._my_map['shuffleZones'] = \
bool(self._shuffle_zones_metadata['default_boolean_values'][0]) | [
"def",
"_init_map",
"(",
"self",
")",
":",
"super",
"(",
"MultiLanguageDragAndDropQuestionFormRecord",
",",
"self",
")",
".",
"_init_map",
"(",
")",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'droppables'",
"]",
"=",
"self",
".",
"_droppables_metadata",
"[",
"'default_object_values'",
"]",
"[",
"0",
"]",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'targets'",
"]",
"=",
"self",
".",
"_targets_metadata",
"[",
"'default_object_values'",
"]",
"[",
"0",
"]",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'zones'",
"]",
"=",
"self",
".",
"_zones_metadata",
"[",
"'default_object_values'",
"]",
"[",
"0",
"]",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'shuffleDroppables'",
"]",
"=",
"bool",
"(",
"self",
".",
"_shuffle_droppables_metadata",
"[",
"'default_boolean_values'",
"]",
"[",
"0",
"]",
")",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'shuffleTargets'",
"]",
"=",
"bool",
"(",
"self",
".",
"_shuffle_targets_metadata",
"[",
"'default_boolean_values'",
"]",
"[",
"0",
"]",
")",
"self",
".",
"my_osid_object_form",
".",
"_my_map",
"[",
"'shuffleZones'",
"]",
"=",
"bool",
"(",
"self",
".",
"_shuffle_zones_metadata",
"[",
"'default_boolean_values'",
"]",
"[",
"0",
"]",
")"
] | 59 | [
0.05,
0.1111111111111111,
0.02666666666666667,
0.034482758620689655,
0.03076923076923077,
0.03636363636363636,
0.03225806451612903,
0.03773584905660377,
0.03333333333333333,
0.03076923076923077,
0.0375,
0.03225806451612903,
0.025974025974025976,
0.03333333333333333,
0.02666666666666667
] |
def write (self, vm_address, value):
"""
Writes to VM addresses of Siemens Logo.
Example: write("VW10", 200) or write("V10.3", 1)
:param vm_address: write offset
:param value: integer
"""
area = snap7types.S7AreaDB
db_number = 1
start = 0
amount = 1
wordlen = 0
data = bytearray(0)
logger.debug("write, vm_address:%s, value:%s" %
(vm_address, value))
if re.match("^V[0-9]{1,4}\.[0-7]{1}$", vm_address):
## bit value
logger.info("read, Bit address: " + vm_address)
address = vm_address[1:].split(".")
# transform string to int
address_byte = int(address[0])
address_bit = int(address[1])
start = (address_byte*8)+address_bit
wordlen = snap7types.S7WLBit
if value > 0:
data = bytearray([1])
else:
data = bytearray([0])
elif re.match("^V[0-9]+$", vm_address):
## byte value
logger.info("Byte address: " + vm_address)
start = int(vm_address[1:])
wordlen = snap7types.S7WLByte
data = bytearray(struct.pack(">B", value))
elif re.match("^VW[0-9]+$", vm_address):
## byte value
logger.info("Word address: " + vm_address)
start = int(vm_address[2:])
wordlen = snap7types.S7WLWord
data = bytearray(struct.pack(">h", value))
elif re.match("^VD[0-9]+$", vm_address):
## byte value
logger.info("DWord address: " + vm_address)
start = int(vm_address[2:])
wordlen = snap7types.S7WLDWord
data = bytearray(struct.pack(">l", value))
else:
logger.info("write, Unknown address format: " + vm_address)
return 1
if wordlen == snap7types.S7WLBit:
type_ = snap7.snap7types.wordlen_to_ctypes[snap7types.S7WLByte]
else:
type_ = snap7.snap7types.wordlen_to_ctypes[wordlen]
cdata = (type_ * amount).from_buffer_copy(data)
logger.debug("write, vm_address:%s value:%s" % (vm_address, value))
result = self.library.Cli_WriteArea(self.pointer, area, db_number, start,
amount, wordlen, byref(cdata))
check_error(result, context="client")
return result | [
"def",
"write",
"(",
"self",
",",
"vm_address",
",",
"value",
")",
":",
"area",
"=",
"snap7types",
".",
"S7AreaDB",
"db_number",
"=",
"1",
"start",
"=",
"0",
"amount",
"=",
"1",
"wordlen",
"=",
"0",
"data",
"=",
"bytearray",
"(",
"0",
")",
"logger",
".",
"debug",
"(",
"\"write, vm_address:%s, value:%s\"",
"%",
"(",
"vm_address",
",",
"value",
")",
")",
"if",
"re",
".",
"match",
"(",
"\"^V[0-9]{1,4}\\.[0-7]{1}$\"",
",",
"vm_address",
")",
":",
"## bit value",
"logger",
".",
"info",
"(",
"\"read, Bit address: \"",
"+",
"vm_address",
")",
"address",
"=",
"vm_address",
"[",
"1",
":",
"]",
".",
"split",
"(",
"\".\"",
")",
"# transform string to int",
"address_byte",
"=",
"int",
"(",
"address",
"[",
"0",
"]",
")",
"address_bit",
"=",
"int",
"(",
"address",
"[",
"1",
"]",
")",
"start",
"=",
"(",
"address_byte",
"*",
"8",
")",
"+",
"address_bit",
"wordlen",
"=",
"snap7types",
".",
"S7WLBit",
"if",
"value",
">",
"0",
":",
"data",
"=",
"bytearray",
"(",
"[",
"1",
"]",
")",
"else",
":",
"data",
"=",
"bytearray",
"(",
"[",
"0",
"]",
")",
"elif",
"re",
".",
"match",
"(",
"\"^V[0-9]+$\"",
",",
"vm_address",
")",
":",
"## byte value",
"logger",
".",
"info",
"(",
"\"Byte address: \"",
"+",
"vm_address",
")",
"start",
"=",
"int",
"(",
"vm_address",
"[",
"1",
":",
"]",
")",
"wordlen",
"=",
"snap7types",
".",
"S7WLByte",
"data",
"=",
"bytearray",
"(",
"struct",
".",
"pack",
"(",
"\">B\"",
",",
"value",
")",
")",
"elif",
"re",
".",
"match",
"(",
"\"^VW[0-9]+$\"",
",",
"vm_address",
")",
":",
"## byte value",
"logger",
".",
"info",
"(",
"\"Word address: \"",
"+",
"vm_address",
")",
"start",
"=",
"int",
"(",
"vm_address",
"[",
"2",
":",
"]",
")",
"wordlen",
"=",
"snap7types",
".",
"S7WLWord",
"data",
"=",
"bytearray",
"(",
"struct",
".",
"pack",
"(",
"\">h\"",
",",
"value",
")",
")",
"elif",
"re",
".",
"match",
"(",
"\"^VD[0-9]+$\"",
",",
"vm_address",
")",
":",
"## byte value",
"logger",
".",
"info",
"(",
"\"DWord address: \"",
"+",
"vm_address",
")",
"start",
"=",
"int",
"(",
"vm_address",
"[",
"2",
":",
"]",
")",
"wordlen",
"=",
"snap7types",
".",
"S7WLDWord",
"data",
"=",
"bytearray",
"(",
"struct",
".",
"pack",
"(",
"\">l\"",
",",
"value",
")",
")",
"else",
":",
"logger",
".",
"info",
"(",
"\"write, Unknown address format: \"",
"+",
"vm_address",
")",
"return",
"1",
"if",
"wordlen",
"==",
"snap7types",
".",
"S7WLBit",
":",
"type_",
"=",
"snap7",
".",
"snap7types",
".",
"wordlen_to_ctypes",
"[",
"snap7types",
".",
"S7WLByte",
"]",
"else",
":",
"type_",
"=",
"snap7",
".",
"snap7types",
".",
"wordlen_to_ctypes",
"[",
"wordlen",
"]",
"cdata",
"=",
"(",
"type_",
"*",
"amount",
")",
".",
"from_buffer_copy",
"(",
"data",
")",
"logger",
".",
"debug",
"(",
"\"write, vm_address:%s value:%s\"",
"%",
"(",
"vm_address",
",",
"value",
")",
")",
"result",
"=",
"self",
".",
"library",
".",
"Cli_WriteArea",
"(",
"self",
".",
"pointer",
",",
"area",
",",
"db_number",
",",
"start",
",",
"amount",
",",
"wordlen",
",",
"byref",
"(",
"cdata",
")",
")",
"check_error",
"(",
"result",
",",
"context",
"=",
"\"client\"",
")",
"return",
"result"
] | 37.859375 | [
0.05555555555555555,
0.18181818181818182,
0.0425531914893617,
0.03571428571428571,
0,
0.07692307692307693,
0.10344827586206896,
0.18181818181818182,
0.058823529411764705,
0.09523809523809523,
0.11764705882352941,
0.1111111111111111,
0.10526315789473684,
0.07407407407407407,
0.05454545454545454,
0.0975609756097561,
0.05084745762711865,
0.125,
0.03389830508474576,
0.0425531914893617,
0.05405405405405406,
0.047619047619047616,
0.04878048780487805,
0.041666666666666664,
0.05,
0.08,
0.07317073170731707,
0.11764705882352941,
0.05405405405405406,
0.0425531914893617,
0.12,
0.037037037037037035,
0.05128205128205128,
0.04878048780487805,
0.037037037037037035,
0.041666666666666664,
0.12,
0.037037037037037035,
0.05128205128205128,
0.04878048780487805,
0.037037037037037035,
0.041666666666666664,
0.12,
0.03636363636363636,
0.05128205128205128,
0.047619047619047616,
0.037037037037037035,
0.15384615384615385,
0.028169014084507043,
0.1,
0.25,
0.04878048780487805,
0.02666666666666667,
0.15384615384615385,
0.031746031746031744,
0.25,
0.03636363636363636,
0,
0.02666666666666667,
0,
0.04938271604938271,
0.05555555555555555,
0.044444444444444446,
0.09523809523809523
] |
def Rock(*args, **kwargs):
"""
Graceful deprecation for old class name.
"""
with warnings.catch_warnings():
warnings.simplefilter("always")
w = "The 'Rock' class was renamed 'Component'. "
w += "Please update your code."
warnings.warn(w, DeprecationWarning, stacklevel=2)
return Component(*args, **kwargs) | [
"def",
"Rock",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"warnings",
".",
"catch_warnings",
"(",
")",
":",
"warnings",
".",
"simplefilter",
"(",
"\"always\"",
")",
"w",
"=",
"\"The 'Rock' class was renamed 'Component'. \"",
"w",
"+=",
"\"Please update your code.\"",
"warnings",
".",
"warn",
"(",
"w",
",",
"DeprecationWarning",
",",
"stacklevel",
"=",
"2",
")",
"return",
"Component",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 29 | [
0.038461538461538464,
0.2857142857142857,
0.045454545454545456,
0.2857142857142857,
0,
0.05714285714285714,
0.05128205128205128,
0.03571428571428571,
0.05128205128205128,
0.034482758620689655,
0,
0.05405405405405406
] |
def error(self):
"""Returns the error for this barrier and all work items, if any."""
# Copy the error from any failed item to be the error for the whole
# barrier. The first error seen "wins". Also handles the case where
# the WorkItems passed into the barrier have already completed and
# been marked with errors.
for item in self:
if isinstance(item, WorkItem) and item.error:
return item.error
return None | [
"def",
"error",
"(",
"self",
")",
":",
"# Copy the error from any failed item to be the error for the whole",
"# barrier. The first error seen \"wins\". Also handles the case where",
"# the WorkItems passed into the barrier have already completed and",
"# been marked with errors.",
"for",
"item",
"in",
"self",
":",
"if",
"isinstance",
"(",
"item",
",",
"WorkItem",
")",
"and",
"item",
".",
"error",
":",
"return",
"item",
".",
"error",
"return",
"None"
] | 48.4 | [
0.0625,
0.02631578947368421,
0.02666666666666667,
0.02666666666666667,
0.02702702702702703,
0.058823529411764705,
0.08,
0.03508771929824561,
0.06060606060606061,
0.10526315789473684
] |
def _visit(self, L, marked, tempmarked):
"""
Sort features topologically.
This recursive function uses depth-first search to find an ordering of
the features in the feature graph that is sorted both topologically and
with respect to genome coordinates.
Implementation based on Wikipedia's description of the algorithm in
Cormen's *Introduction to Algorithms*.
http://en.wikipedia.org/wiki/Topological_sorting#Algorithms
There are potentially many valid topological sorts of a feature graph,
but only one that is also sorted with respect to genome coordinates
(excluding different orderings of, for example, exons and CDS features
with the same coordinates). Iterating through feature children in
reversed order (in this functions' inner-most loop) seems to be the key
to sorting with respect to genome coordinates.
"""
assert not self.is_pseudo
if self in tempmarked:
raise Exception('feature graph is cyclic')
if self not in marked:
tempmarked[self] = True
features = list()
if self.siblings is not None and self.is_toplevel:
features.extend(reversed(self.siblings))
if self.children is not None:
features.extend(reversed(self.children))
if len(features) > 0:
for feature in features:
feature._visit(L, marked, tempmarked)
marked[self] = True
del tempmarked[self]
L.insert(0, self) | [
"def",
"_visit",
"(",
"self",
",",
"L",
",",
"marked",
",",
"tempmarked",
")",
":",
"assert",
"not",
"self",
".",
"is_pseudo",
"if",
"self",
"in",
"tempmarked",
":",
"raise",
"Exception",
"(",
"'feature graph is cyclic'",
")",
"if",
"self",
"not",
"in",
"marked",
":",
"tempmarked",
"[",
"self",
"]",
"=",
"True",
"features",
"=",
"list",
"(",
")",
"if",
"self",
".",
"siblings",
"is",
"not",
"None",
"and",
"self",
".",
"is_toplevel",
":",
"features",
".",
"extend",
"(",
"reversed",
"(",
"self",
".",
"siblings",
")",
")",
"if",
"self",
".",
"children",
"is",
"not",
"None",
":",
"features",
".",
"extend",
"(",
"reversed",
"(",
"self",
".",
"children",
")",
")",
"if",
"len",
"(",
"features",
")",
">",
"0",
":",
"for",
"feature",
"in",
"features",
":",
"feature",
".",
"_visit",
"(",
"L",
",",
"marked",
",",
"tempmarked",
")",
"marked",
"[",
"self",
"]",
"=",
"True",
"del",
"tempmarked",
"[",
"self",
"]",
"L",
".",
"insert",
"(",
"0",
",",
"self",
")"
] | 44.885714 | [
0.025,
0.18181818181818182,
0.05555555555555555,
0,
0.02564102564102564,
0.02531645569620253,
0.046511627906976744,
0,
0.02666666666666667,
0.06521739130434782,
0.07462686567164178,
0,
0.02564102564102564,
0.02666666666666667,
0.038461538461538464,
0.0410958904109589,
0.05063291139240506,
0.037037037037037035,
0.18181818181818182,
0.06060606060606061,
0.06666666666666667,
0.037037037037037035,
0.06666666666666667,
0.05714285714285714,
0.06896551724137931,
0.03225806451612903,
0.03571428571428571,
0.04878048780487805,
0.03571428571428571,
0.06060606060606061,
0.05,
0.03508771929824561,
0.06451612903225806,
0.0625,
0.06896551724137931
] |
def _determine_keys(dictionary):
"""Determine the different kinds of keys."""
optional = {}
defaults = {}
mandatory = {}
types = {}
for key, value in dictionary.items():
if isinstance(key, Optional):
optional[key.value] = parse_schema(value)
if isinstance(value, BaseSchema) and\
value.default is not UNSPECIFIED:
defaults[key.value] = (value.default, value.null_values)
continue # pragma: nocover
if type(key) is type:
types[key] = parse_schema(value)
continue
mandatory[key] = parse_schema(value)
return mandatory, optional, types, defaults | [
"def",
"_determine_keys",
"(",
"dictionary",
")",
":",
"optional",
"=",
"{",
"}",
"defaults",
"=",
"{",
"}",
"mandatory",
"=",
"{",
"}",
"types",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"dictionary",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"key",
",",
"Optional",
")",
":",
"optional",
"[",
"key",
".",
"value",
"]",
"=",
"parse_schema",
"(",
"value",
")",
"if",
"isinstance",
"(",
"value",
",",
"BaseSchema",
")",
"and",
"value",
".",
"default",
"is",
"not",
"UNSPECIFIED",
":",
"defaults",
"[",
"key",
".",
"value",
"]",
"=",
"(",
"value",
".",
"default",
",",
"value",
".",
"null_values",
")",
"continue",
"# pragma: nocover",
"if",
"type",
"(",
"key",
")",
"is",
"type",
":",
"types",
"[",
"key",
"]",
"=",
"parse_schema",
"(",
"value",
")",
"continue",
"mandatory",
"[",
"key",
"]",
"=",
"parse_schema",
"(",
"value",
")",
"return",
"mandatory",
",",
"optional",
",",
"types",
",",
"defaults"
] | 33.7 | [
0.03125,
0.041666666666666664,
0.11764705882352941,
0.11764705882352941,
0.1111111111111111,
0.14285714285714285,
0.04878048780487805,
0.05405405405405406,
0.03773584905660377,
0.04081632653061224,
0.03773584905660377,
0.027777777777777776,
0.05128205128205128,
0,
0.06896551724137931,
0.045454545454545456,
0.1,
0,
0.045454545454545456,
0.0425531914893617
] |
def set_debug(self, set_to=True):
"""
Sets the capture to debug mode (or turns it off if specified).
"""
if set_to:
StreamHandler(sys.stdout).push_application()
self._log.level = logbook.DEBUG
self.debug = set_to | [
"def",
"set_debug",
"(",
"self",
",",
"set_to",
"=",
"True",
")",
":",
"if",
"set_to",
":",
"StreamHandler",
"(",
"sys",
".",
"stdout",
")",
".",
"push_application",
"(",
")",
"self",
".",
"_log",
".",
"level",
"=",
"logbook",
".",
"DEBUG",
"self",
".",
"debug",
"=",
"set_to"
] | 33.625 | [
0.030303030303030304,
0.18181818181818182,
0.05714285714285714,
0.18181818181818182,
0.1111111111111111,
0.03571428571428571,
0.046511627906976744,
0.07407407407407407
] |
def find_city(self, city, state=None, best_match=True, min_similarity=70):
"""
Fuzzy search correct city.
:param city: city name.
:param state: search city in specified state.
:param best_match: bool, when True, only the best matched city
will return. otherwise, will return all matching cities.
**中文文档**
如果给定了state, 则只在指定的state里的城市中寻找, 否则, 在全国所有的城市中寻找。
"""
# find out what is the city that user looking for
if state:
state_sort = self.find_state(state, best_match=True)[0]
city_pool = self.state_to_city_mapper[state_sort.upper()]
else:
city_pool = self.city_list
result_city_list = list()
if best_match:
city, confidence = extractOne(city, city_pool)
if confidence >= min_similarity:
result_city_list.append(city)
else:
for city, confidence in extract(city, city_pool):
if confidence >= min_similarity:
result_city_list.append(city)
if len(result_city_list) == 0:
raise ValueError("'%s' is not a valid city name" % city)
return result_city_list | [
"def",
"find_city",
"(",
"self",
",",
"city",
",",
"state",
"=",
"None",
",",
"best_match",
"=",
"True",
",",
"min_similarity",
"=",
"70",
")",
":",
"# find out what is the city that user looking for",
"if",
"state",
":",
"state_sort",
"=",
"self",
".",
"find_state",
"(",
"state",
",",
"best_match",
"=",
"True",
")",
"[",
"0",
"]",
"city_pool",
"=",
"self",
".",
"state_to_city_mapper",
"[",
"state_sort",
".",
"upper",
"(",
")",
"]",
"else",
":",
"city_pool",
"=",
"self",
".",
"city_list",
"result_city_list",
"=",
"list",
"(",
")",
"if",
"best_match",
":",
"city",
",",
"confidence",
"=",
"extractOne",
"(",
"city",
",",
"city_pool",
")",
"if",
"confidence",
">=",
"min_similarity",
":",
"result_city_list",
".",
"append",
"(",
"city",
")",
"else",
":",
"for",
"city",
",",
"confidence",
"in",
"extract",
"(",
"city",
",",
"city_pool",
")",
":",
"if",
"confidence",
">=",
"min_similarity",
":",
"result_city_list",
".",
"append",
"(",
"city",
")",
"if",
"len",
"(",
"result_city_list",
")",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"'%s' is not a valid city name\"",
"%",
"city",
")",
"return",
"result_city_list"
] | 34.142857 | [
0.013513513513513514,
0.18181818181818182,
0.058823529411764705,
0,
0.0967741935483871,
0.05660377358490566,
0.04285714285714286,
0.029411764705882353,
0,
0.125,
0,
0.03571428571428571,
0.18181818181818182,
0.03508771929824561,
0.11764705882352941,
0.029850746268656716,
0.028985507246376812,
0.15384615384615385,
0.05263157894736842,
0,
0.06060606060606061,
0,
0.09090909090909091,
0.034482758620689655,
0.045454545454545456,
0.044444444444444446,
0.15384615384615385,
0.03278688524590164,
0.041666666666666664,
0.04081632653061224,
0,
0.05263157894736842,
0.029411764705882353,
0,
0.06451612903225806
] |
def find_doc(self, name=None, ns_uri=None, first_only=False):
"""
Find :class:`Element` node descendants of the document containing
this node, with optional constraints to limit the results.
Delegates to :meth:`find` applied to this node's owning document.
"""
return self.document.find(name=name, ns_uri=ns_uri,
first_only=first_only) | [
"def",
"find_doc",
"(",
"self",
",",
"name",
"=",
"None",
",",
"ns_uri",
"=",
"None",
",",
"first_only",
"=",
"False",
")",
":",
"return",
"self",
".",
"document",
".",
"find",
"(",
"name",
"=",
"name",
",",
"ns_uri",
"=",
"ns_uri",
",",
"first_only",
"=",
"first_only",
")"
] | 43.111111 | [
0.01639344262295082,
0.18181818181818182,
0.0821917808219178,
0.030303030303030304,
0,
0.0821917808219178,
0.18181818181818182,
0.05084745762711865,
0.11764705882352941
] |
def _fold_line(self, line):
"""Write string line as one or more folded lines."""
if len(line) <= self._cols:
self._output_file.write(line)
self._output_file.write(self._line_sep)
else:
pos = self._cols
self._output_file.write(line[0:self._cols])
self._output_file.write(self._line_sep)
while pos < len(line):
self._output_file.write(b' ')
end = min(len(line), pos + self._cols - 1)
self._output_file.write(line[pos:end])
self._output_file.write(self._line_sep)
pos = end | [
"def",
"_fold_line",
"(",
"self",
",",
"line",
")",
":",
"if",
"len",
"(",
"line",
")",
"<=",
"self",
".",
"_cols",
":",
"self",
".",
"_output_file",
".",
"write",
"(",
"line",
")",
"self",
".",
"_output_file",
".",
"write",
"(",
"self",
".",
"_line_sep",
")",
"else",
":",
"pos",
"=",
"self",
".",
"_cols",
"self",
".",
"_output_file",
".",
"write",
"(",
"line",
"[",
"0",
":",
"self",
".",
"_cols",
"]",
")",
"self",
".",
"_output_file",
".",
"write",
"(",
"self",
".",
"_line_sep",
")",
"while",
"pos",
"<",
"len",
"(",
"line",
")",
":",
"self",
".",
"_output_file",
".",
"write",
"(",
"b' '",
")",
"end",
"=",
"min",
"(",
"len",
"(",
"line",
")",
",",
"pos",
"+",
"self",
".",
"_cols",
"-",
"1",
")",
"self",
".",
"_output_file",
".",
"write",
"(",
"line",
"[",
"pos",
":",
"end",
"]",
")",
"self",
".",
"_output_file",
".",
"write",
"(",
"self",
".",
"_line_sep",
")",
"pos",
"=",
"end"
] | 42.133333 | [
0.037037037037037035,
0.03333333333333333,
0.05714285714285714,
0.04878048780487805,
0.0392156862745098,
0.15384615384615385,
0.07142857142857142,
0.03636363636363636,
0.0392156862745098,
0.058823529411764705,
0.044444444444444446,
0.034482758620689655,
0.037037037037037035,
0.03636363636363636,
0.08
] |
async def _play(self, ctx, *, query: str):
""" Searches and plays a song from a given query. """
player = self.bot.lavalink.players.get(ctx.guild.id)
query = query.strip('<>')
if not url_rx.match(query):
query = f'ytsearch:{query}'
tracks = await self.bot.lavalink.get_tracks(query)
if not tracks:
return await ctx.send('Nothing found!')
embed = discord.Embed(color=discord.Color.blurple())
if 'list' in query and 'ytsearch:' not in query:
for track in tracks:
player.add(requester=ctx.author.id, track=track)
embed.title = 'Playlist enqueued!'
embed.description = f'Imported {len(tracks)} tracks from the playlist!'
await ctx.send(embed=embed)
else:
track_title = tracks[0]["info"]["title"]
track_uri = tracks[0]["info"]["uri"]
embed.title = "Track enqueued!"
embed.description = f'[{track_title}]({track_uri})'
player.add(requester=ctx.author.id, track=tracks[0])
if not player.is_playing:
await player.play() | [
"async",
"def",
"_play",
"(",
"self",
",",
"ctx",
",",
"*",
",",
"query",
":",
"str",
")",
":",
"player",
"=",
"self",
".",
"bot",
".",
"lavalink",
".",
"players",
".",
"get",
"(",
"ctx",
".",
"guild",
".",
"id",
")",
"query",
"=",
"query",
".",
"strip",
"(",
"'<>'",
")",
"if",
"not",
"url_rx",
".",
"match",
"(",
"query",
")",
":",
"query",
"=",
"f'ytsearch:{query}'",
"tracks",
"=",
"await",
"self",
".",
"bot",
".",
"lavalink",
".",
"get_tracks",
"(",
"query",
")",
"if",
"not",
"tracks",
":",
"return",
"await",
"ctx",
".",
"send",
"(",
"'Nothing found!'",
")",
"embed",
"=",
"discord",
".",
"Embed",
"(",
"color",
"=",
"discord",
".",
"Color",
".",
"blurple",
"(",
")",
")",
"if",
"'list'",
"in",
"query",
"and",
"'ytsearch:'",
"not",
"in",
"query",
":",
"for",
"track",
"in",
"tracks",
":",
"player",
".",
"add",
"(",
"requester",
"=",
"ctx",
".",
"author",
".",
"id",
",",
"track",
"=",
"track",
")",
"embed",
".",
"title",
"=",
"'Playlist enqueued!'",
"embed",
".",
"description",
"=",
"f'Imported {len(tracks)} tracks from the playlist!'",
"await",
"ctx",
".",
"send",
"(",
"embed",
"=",
"embed",
")",
"else",
":",
"track_title",
"=",
"tracks",
"[",
"0",
"]",
"[",
"\"info\"",
"]",
"[",
"\"title\"",
"]",
"track_uri",
"=",
"tracks",
"[",
"0",
"]",
"[",
"\"info\"",
"]",
"[",
"\"uri\"",
"]",
"embed",
".",
"title",
"=",
"\"Track enqueued!\"",
"embed",
".",
"description",
"=",
"f'[{track_title}]({track_uri})'",
"player",
".",
"add",
"(",
"requester",
"=",
"ctx",
".",
"author",
".",
"id",
",",
"track",
"=",
"tracks",
"[",
"0",
"]",
")",
"if",
"not",
"player",
".",
"is_playing",
":",
"await",
"player",
".",
"play",
"(",
")"
] | 35.151515 | [
0.023255813953488372,
0.016129032258064516,
0.01639344262295082,
1,
0.029411764705882353,
1,
0.08333333333333333,
0.025,
1,
0.01694915254237288,
1,
0.13043478260869565,
0.019230769230769232,
1,
0.01639344262295082,
1,
0.05263157894736842,
0.09090909090909091,
0.015384615384615385,
1,
0.02127659574468085,
0.023809523809523808,
0.025,
0.21428571428571427,
0.018867924528301886,
0.02040816326530612,
1,
0.022727272727272728,
0.015625,
0.015384615384615385,
1,
0.08823529411764706,
0.06451612903225806
] |
def _set_iroot_via_xroot(self, xroot):
"""Determine the index of the root cell.
Given an expression vector, find the observation index that is closest
to this vector.
Parameters
----------
xroot : np.ndarray
Vector that marks the root cell, the vector storing the initial
condition, only relevant for computing pseudotime.
"""
if self._adata.shape[1] != xroot.size:
raise ValueError(
'The root vector you provided does not have the '
'correct dimension.')
# this is the squared distance
dsqroot = 1e10
iroot = 0
for i in range(self._adata.shape[0]):
diff = self._adata.X[i, :] - xroot
dsq = diff.dot(diff)
if dsq < dsqroot:
dsqroot = dsq
iroot = i
if np.sqrt(dsqroot) < 1e-10: break
logg.msg('setting root index to', iroot, v=4)
if self.iroot is not None and iroot != self.iroot:
logg.warn('Changing index of iroot from {} to {}.'.format(self.iroot, iroot))
self.iroot = iroot | [
"def",
"_set_iroot_via_xroot",
"(",
"self",
",",
"xroot",
")",
":",
"if",
"self",
".",
"_adata",
".",
"shape",
"[",
"1",
"]",
"!=",
"xroot",
".",
"size",
":",
"raise",
"ValueError",
"(",
"'The root vector you provided does not have the '",
"'correct dimension.'",
")",
"# this is the squared distance",
"dsqroot",
"=",
"1e10",
"iroot",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"_adata",
".",
"shape",
"[",
"0",
"]",
")",
":",
"diff",
"=",
"self",
".",
"_adata",
".",
"X",
"[",
"i",
",",
":",
"]",
"-",
"xroot",
"dsq",
"=",
"diff",
".",
"dot",
"(",
"diff",
")",
"if",
"dsq",
"<",
"dsqroot",
":",
"dsqroot",
"=",
"dsq",
"iroot",
"=",
"i",
"if",
"np",
".",
"sqrt",
"(",
"dsqroot",
")",
"<",
"1e-10",
":",
"break",
"logg",
".",
"msg",
"(",
"'setting root index to'",
",",
"iroot",
",",
"v",
"=",
"4",
")",
"if",
"self",
".",
"iroot",
"is",
"not",
"None",
"and",
"iroot",
"!=",
"self",
".",
"iroot",
":",
"logg",
".",
"warn",
"(",
"'Changing index of iroot from {} to {}.'",
".",
"format",
"(",
"self",
".",
"iroot",
",",
"iroot",
")",
")",
"self",
".",
"iroot",
"=",
"iroot"
] | 37.766667 | [
0.02631578947368421,
0.041666666666666664,
0,
0.02564102564102564,
0.08695652173913043,
0,
0.1111111111111111,
0.1111111111111111,
0.11538461538461539,
0.02666666666666667,
0.03225806451612903,
0.18181818181818182,
0.043478260869565216,
0.10344827586206896,
0.03076923076923077,
0.08108108108108109,
0.05263157894736842,
0.09090909090909091,
0.11764705882352941,
0.044444444444444446,
0.043478260869565216,
0.0625,
0.06896551724137931,
0.06896551724137931,
0.08,
0.06,
0.03773584905660377,
0.034482758620689655,
0.033707865168539325,
0.07692307692307693
] |
def _set_max_value(self, max_value):
"""Sets current maximum allowed value"""
self._external_max_value = max_value
# Check that the current value of the parameter is still within the boundaries. If not, issue a warning
if self._external_max_value is not None and self.value > self._external_max_value:
warnings.warn("The current value of the parameter %s (%s) "
"was above the new maximum %s." % (self.name, self.value, self._external_max_value),
exceptions.RuntimeWarning)
self.value = self._external_max_value | [
"def",
"_set_max_value",
"(",
"self",
",",
"max_value",
")",
":",
"self",
".",
"_external_max_value",
"=",
"max_value",
"# Check that the current value of the parameter is still within the boundaries. If not, issue a warning",
"if",
"self",
".",
"_external_max_value",
"is",
"not",
"None",
"and",
"self",
".",
"value",
">",
"self",
".",
"_external_max_value",
":",
"warnings",
".",
"warn",
"(",
"\"The current value of the parameter %s (%s) \"",
"\"was above the new maximum %s.\"",
"%",
"(",
"self",
".",
"name",
",",
"self",
".",
"value",
",",
"self",
".",
"_external_max_value",
")",
",",
"exceptions",
".",
"RuntimeWarning",
")",
"self",
".",
"value",
"=",
"self",
".",
"_external_max_value"
] | 47 | [
0.027777777777777776,
0.041666666666666664,
0,
0.045454545454545456,
0,
0.02702702702702703,
0,
0.03333333333333333,
0,
0.04225352112676056,
0.03636363636363636,
0.07692307692307693,
0.04081632653061224
] |
def rspr(self, times=1, **kwargs):
""" Random SPR, with prune and regraft edges chosen randomly, and
lengths drawn uniformly from the available edge lengths.
N1: disallow_sibling_sprs prevents sprs that don't alter the topology
of the tree """
spr = SPR(self.copy())
for _ in range(times):
spr.rspr(**kwargs)
return spr.tree | [
"def",
"rspr",
"(",
"self",
",",
"times",
"=",
"1",
",",
"*",
"*",
"kwargs",
")",
":",
"spr",
"=",
"SPR",
"(",
"self",
".",
"copy",
"(",
")",
")",
"for",
"_",
"in",
"range",
"(",
"times",
")",
":",
"spr",
".",
"rspr",
"(",
"*",
"*",
"kwargs",
")",
"return",
"spr",
".",
"tree"
] | 34.909091 | [
0.029411764705882353,
0.0273972602739726,
0.03125,
0,
0.025974025974025976,
0.13043478260869565,
0,
0.06666666666666667,
0.06666666666666667,
0.06666666666666667,
0.08695652173913043
] |
def get_search_names(name):
"""Return a list of values to search on when we are looking for a package
with the given name.
This is required to search on both pyramid_debugtoolbar and
pyramid-debugtoolbar.
"""
parts = re.split('[-_.]', name)
if len(parts) == 1:
return parts
result = set()
for i in range(len(parts) - 1, 0, -1):
for s1 in '-_.':
prefix = s1.join(parts[:i])
for s2 in '-_.':
suffix = s2.join(parts[i:])
for s3 in '-_.':
result.add(s3.join([prefix, suffix]))
return list(result) | [
"def",
"get_search_names",
"(",
"name",
")",
":",
"parts",
"=",
"re",
".",
"split",
"(",
"'[-_.]'",
",",
"name",
")",
"if",
"len",
"(",
"parts",
")",
"==",
"1",
":",
"return",
"parts",
"result",
"=",
"set",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"parts",
")",
"-",
"1",
",",
"0",
",",
"-",
"1",
")",
":",
"for",
"s1",
"in",
"'-_.'",
":",
"prefix",
"=",
"s1",
".",
"join",
"(",
"parts",
"[",
":",
"i",
"]",
")",
"for",
"s2",
"in",
"'-_.'",
":",
"suffix",
"=",
"s2",
".",
"join",
"(",
"parts",
"[",
"i",
":",
"]",
")",
"for",
"s3",
"in",
"'-_.'",
":",
"result",
".",
"add",
"(",
"s3",
".",
"join",
"(",
"[",
"prefix",
",",
"suffix",
"]",
")",
")",
"return",
"list",
"(",
"result",
")"
] | 28.904762 | [
0.037037037037037035,
0.025974025974025976,
0.08333333333333333,
0,
0.031746031746031744,
0.08,
0,
0.2857142857142857,
0.05714285714285714,
0.08695652173913043,
0.1,
0,
0.1111111111111111,
0.047619047619047616,
0.08333333333333333,
0.05128205128205128,
0.07142857142857142,
0.046511627906976744,
0.0625,
0.03508771929824561,
0.08695652173913043
] |
def MI_createInstance(self,
env,
instance):
# pylint: disable=invalid-name
"""Create a CIM instance, and return its instance name
Implements the WBEM operation CreateInstance in terms
of the set_instance method. A derived class will not normally
override this method.
"""
logger = env.get_logger()
logger.log_debug('CIMProvider2 MI_createInstance called...')
rval = None
'''
ch = env.get_cimom_handle()
cimClass = ch.GetClass(instance.classname,
instance.path.namespace,
LocalOnly=False,
IncludeQualifiers=True)
'''
# CIMOM has already filled in default property values for
# props with default values, if values not supplied by client.
rval = self.set_instance(env=env,
instance=instance,
modify_existing=False)
logger.log_debug('CIMProvider2 MI_createInstance returning')
return rval.path | [
"def",
"MI_createInstance",
"(",
"self",
",",
"env",
",",
"instance",
")",
":",
"# pylint: disable=invalid-name",
"logger",
"=",
"env",
".",
"get_logger",
"(",
")",
"logger",
".",
"log_debug",
"(",
"'CIMProvider2 MI_createInstance called...'",
")",
"rval",
"=",
"None",
"'''\n ch = env.get_cimom_handle()\n cimClass = ch.GetClass(instance.classname,\n instance.path.namespace,\n LocalOnly=False,\n IncludeQualifiers=True)\n '''",
"# CIMOM has already filled in default property values for",
"# props with default values, if values not supplied by client.",
"rval",
"=",
"self",
".",
"set_instance",
"(",
"env",
"=",
"env",
",",
"instance",
"=",
"instance",
",",
"modify_existing",
"=",
"False",
")",
"logger",
".",
"log_debug",
"(",
"'CIMProvider2 MI_createInstance returning'",
")",
"return",
"rval",
".",
"path"
] | 39.724138 | [
0.07407407407407407,
0.1,
0.1111111111111111,
0.05263157894736842,
0.03225806451612903,
0,
0.03278688524590164,
0.02857142857142857,
0.06896551724137931,
0,
0.18181818181818182,
0,
0.06060606060606061,
0.029411764705882353,
0.10526315789473684,
0.18181818181818182,
0.05714285714285714,
0.06,
0.05263157894736842,
0.08163265306122448,
0.08928571428571429,
0.05555555555555555,
0.03076923076923077,
0.02857142857142857,
0.07317073170731707,
0.0784313725490196,
0.09090909090909091,
0.029411764705882353,
0.08333333333333333
] |
def build_source_files(self):
"""Return acessors to the build files"""
from .files import BuildSourceFileAccessor
return BuildSourceFileAccessor(self, self.dataset, self.source_fs) | [
"def",
"build_source_files",
"(",
"self",
")",
":",
"from",
".",
"files",
"import",
"BuildSourceFileAccessor",
"return",
"BuildSourceFileAccessor",
"(",
"self",
",",
"self",
".",
"dataset",
",",
"self",
".",
"source_fs",
")"
] | 40.2 | [
0.034482758620689655,
0.041666666666666664,
0,
0.04,
0.02702702702702703
] |
def report_message(report):
"""Report message."""
body = 'Error: return code != 0\n\n'
body += 'Archive: {}\n\n'.format(report['archive'])
body += 'Docker image: {}\n\n'.format(report['image'])
body += 'Docker container: {}\n\n'.format(report['container_id'])
return body | [
"def",
"report_message",
"(",
"report",
")",
":",
"body",
"=",
"'Error: return code != 0\\n\\n'",
"body",
"+=",
"'Archive: {}\\n\\n'",
".",
"format",
"(",
"report",
"[",
"'archive'",
"]",
")",
"body",
"+=",
"'Docker image: {}\\n\\n'",
".",
"format",
"(",
"report",
"[",
"'image'",
"]",
")",
"body",
"+=",
"'Docker container: {}\\n\\n'",
".",
"format",
"(",
"report",
"[",
"'container_id'",
"]",
")",
"return",
"body"
] | 41.285714 | [
0.037037037037037035,
0.08,
0.05,
0.03636363636363636,
0.034482758620689655,
0.028985507246376812,
0.13333333333333333
] |
def rotation(self):
"""Rotation of device
Returns:
int (0-3)
"""
rs = dict(PORTRAIT=0, LANDSCAPE=1, UIA_DEVICE_ORIENTATION_LANDSCAPERIGHT=3)
return rs.get(self.session.orientation, 0) | [
"def",
"rotation",
"(",
"self",
")",
":",
"rs",
"=",
"dict",
"(",
"PORTRAIT",
"=",
"0",
",",
"LANDSCAPE",
"=",
"1",
",",
"UIA_DEVICE_ORIENTATION_LANDSCAPERIGHT",
"=",
"3",
")",
"return",
"rs",
".",
"get",
"(",
"self",
".",
"session",
".",
"orientation",
",",
"0",
")"
] | 32.714286 | [
0.05263157894736842,
0.06896551724137931,
0.125,
0.14285714285714285,
0.18181818181818182,
0.03614457831325301,
0.04
] |
def main():
"""Runs the test sender."""
stream_config = spead2.send.StreamConfig(
max_packet_size=16356, rate=1000e6, burst_size=10, max_heaps=1)
item_group = spead2.send.ItemGroup(flavour=spead2.Flavour(4, 64, 48, 0))
# Add item descriptors to the heap.
num_baselines = (512 * 513) // 2
dtype = [('TCI', 'i1'), ('FD', 'u1'), ('VIS', '<c8', 4)]
item_group.add_item(
id=0x6000, name='visibility_timestamp_count', description='',
shape=tuple(), format=None, dtype='<u4')
item_group.add_item(
id=0x6001, name='visibility_timestamp_fraction', description='',
shape=tuple(), format=None, dtype='<u4')
item_group.add_item(
id=0x6005, name='visibility_baseline_count', description='',
shape=tuple(), format=None, dtype='<u4')
item_group.add_item(
id=0x6008, name='scan_id', description='',
shape=tuple(), format=None, dtype='<u8')
item_group.add_item(
id=0x600A, name='correlator_output_data', description='',
shape=(num_baselines,), dtype=dtype)
# Create streams and send start-of-stream message.
streams = []
num_streams = 2
for i in range(num_streams):
stream = spead2.send.UdpStream(
thread_pool=spead2.ThreadPool(threads=1),
hostname='127.0.0.1', port=41000 + i, config=stream_config)
stream.send_heap(item_group.get_start())
streams.append(stream)
vis = numpy.zeros(shape=(num_baselines,), dtype=dtype)
num_heaps = 200
start_time = time.time()
for stream in streams:
# Update values in the heap.
item_group['visibility_timestamp_count'].value = 1
item_group['visibility_timestamp_fraction'].value = 0
item_group['visibility_baseline_count'].value = num_baselines
item_group['scan_id'].value = 100000000
item_group['correlator_output_data'].value = vis
# Iterate heaps.
for i in range(num_heaps):
# Send heap.
stream.send_heap(item_group.get_heap(descriptors='all', data='all'))
# Print time taken.
duration = time.time() - start_time
data_size = num_streams * num_heaps * (vis.nbytes / 1e6)
print("Sent %.3f MB in %.3f sec (%.3f MB/sec)" % (
data_size, duration, (data_size/duration)))
# Send end-of-stream message.
for stream in streams:
stream.send_heap(item_group.get_end()) | [
"def",
"main",
"(",
")",
":",
"stream_config",
"=",
"spead2",
".",
"send",
".",
"StreamConfig",
"(",
"max_packet_size",
"=",
"16356",
",",
"rate",
"=",
"1000e6",
",",
"burst_size",
"=",
"10",
",",
"max_heaps",
"=",
"1",
")",
"item_group",
"=",
"spead2",
".",
"send",
".",
"ItemGroup",
"(",
"flavour",
"=",
"spead2",
".",
"Flavour",
"(",
"4",
",",
"64",
",",
"48",
",",
"0",
")",
")",
"# Add item descriptors to the heap.",
"num_baselines",
"=",
"(",
"512",
"*",
"513",
")",
"//",
"2",
"dtype",
"=",
"[",
"(",
"'TCI'",
",",
"'i1'",
")",
",",
"(",
"'FD'",
",",
"'u1'",
")",
",",
"(",
"'VIS'",
",",
"'<c8'",
",",
"4",
")",
"]",
"item_group",
".",
"add_item",
"(",
"id",
"=",
"0x6000",
",",
"name",
"=",
"'visibility_timestamp_count'",
",",
"description",
"=",
"''",
",",
"shape",
"=",
"tuple",
"(",
")",
",",
"format",
"=",
"None",
",",
"dtype",
"=",
"'<u4'",
")",
"item_group",
".",
"add_item",
"(",
"id",
"=",
"0x6001",
",",
"name",
"=",
"'visibility_timestamp_fraction'",
",",
"description",
"=",
"''",
",",
"shape",
"=",
"tuple",
"(",
")",
",",
"format",
"=",
"None",
",",
"dtype",
"=",
"'<u4'",
")",
"item_group",
".",
"add_item",
"(",
"id",
"=",
"0x6005",
",",
"name",
"=",
"'visibility_baseline_count'",
",",
"description",
"=",
"''",
",",
"shape",
"=",
"tuple",
"(",
")",
",",
"format",
"=",
"None",
",",
"dtype",
"=",
"'<u4'",
")",
"item_group",
".",
"add_item",
"(",
"id",
"=",
"0x6008",
",",
"name",
"=",
"'scan_id'",
",",
"description",
"=",
"''",
",",
"shape",
"=",
"tuple",
"(",
")",
",",
"format",
"=",
"None",
",",
"dtype",
"=",
"'<u8'",
")",
"item_group",
".",
"add_item",
"(",
"id",
"=",
"0x600A",
",",
"name",
"=",
"'correlator_output_data'",
",",
"description",
"=",
"''",
",",
"shape",
"=",
"(",
"num_baselines",
",",
")",
",",
"dtype",
"=",
"dtype",
")",
"# Create streams and send start-of-stream message.",
"streams",
"=",
"[",
"]",
"num_streams",
"=",
"2",
"for",
"i",
"in",
"range",
"(",
"num_streams",
")",
":",
"stream",
"=",
"spead2",
".",
"send",
".",
"UdpStream",
"(",
"thread_pool",
"=",
"spead2",
".",
"ThreadPool",
"(",
"threads",
"=",
"1",
")",
",",
"hostname",
"=",
"'127.0.0.1'",
",",
"port",
"=",
"41000",
"+",
"i",
",",
"config",
"=",
"stream_config",
")",
"stream",
".",
"send_heap",
"(",
"item_group",
".",
"get_start",
"(",
")",
")",
"streams",
".",
"append",
"(",
"stream",
")",
"vis",
"=",
"numpy",
".",
"zeros",
"(",
"shape",
"=",
"(",
"num_baselines",
",",
")",
",",
"dtype",
"=",
"dtype",
")",
"num_heaps",
"=",
"200",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"for",
"stream",
"in",
"streams",
":",
"# Update values in the heap.",
"item_group",
"[",
"'visibility_timestamp_count'",
"]",
".",
"value",
"=",
"1",
"item_group",
"[",
"'visibility_timestamp_fraction'",
"]",
".",
"value",
"=",
"0",
"item_group",
"[",
"'visibility_baseline_count'",
"]",
".",
"value",
"=",
"num_baselines",
"item_group",
"[",
"'scan_id'",
"]",
".",
"value",
"=",
"100000000",
"item_group",
"[",
"'correlator_output_data'",
"]",
".",
"value",
"=",
"vis",
"# Iterate heaps.",
"for",
"i",
"in",
"range",
"(",
"num_heaps",
")",
":",
"# Send heap.",
"stream",
".",
"send_heap",
"(",
"item_group",
".",
"get_heap",
"(",
"descriptors",
"=",
"'all'",
",",
"data",
"=",
"'all'",
")",
")",
"# Print time taken.",
"duration",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
"data_size",
"=",
"num_streams",
"*",
"num_heaps",
"*",
"(",
"vis",
".",
"nbytes",
"/",
"1e6",
")",
"print",
"(",
"\"Sent %.3f MB in %.3f sec (%.3f MB/sec)\"",
"%",
"(",
"data_size",
",",
"duration",
",",
"(",
"data_size",
"/",
"duration",
")",
")",
")",
"# Send end-of-stream message.",
"for",
"stream",
"in",
"streams",
":",
"stream",
".",
"send_heap",
"(",
"item_group",
".",
"get_end",
"(",
")",
")"
] | 40.050847 | [
0.09090909090909091,
0.06451612903225806,
0.06666666666666667,
0.09859154929577464,
0.02631578947368421,
0,
0.05128205128205128,
0.05555555555555555,
0.03333333333333333,
0.125,
0.07246376811594203,
0.125,
0.125,
0.06944444444444445,
0.125,
0.125,
0.07352941176470588,
0.125,
0.125,
0.1,
0.125,
0.125,
0.07692307692307693,
0.11363636363636363,
0,
0.037037037037037035,
0.125,
0.10526315789473684,
0.0625,
0.07692307692307693,
0.05660377358490566,
0.08450704225352113,
0.041666666666666664,
0.06666666666666667,
0,
0.034482758620689655,
0.10526315789473684,
0.07142857142857142,
0.07692307692307693,
0.05555555555555555,
0.034482758620689655,
0.03278688524590164,
0.028985507246376812,
0.0425531914893617,
0.03571428571428571,
0.08333333333333333,
0.058823529411764705,
0.08333333333333333,
0.0375,
0,
0.08695652173913043,
0.05128205128205128,
0.03333333333333333,
0.05555555555555555,
0.058823529411764705,
0,
0.06060606060606061,
0.07692307692307693,
0.043478260869565216
] |
def anno_parser(func):
"Look at params (annotated with `Param`) in func and return an `ArgumentParser`"
p = ArgumentParser(description=func.__doc__)
for k,v in inspect.signature(func).parameters.items():
param = func.__annotations__.get(k, Param())
kwargs = param.kwargs
if v.default != inspect.Parameter.empty: kwargs['default'] = v.default
p.add_argument(f"{param.pre}{k}", **kwargs)
return p | [
"def",
"anno_parser",
"(",
"func",
")",
":",
"p",
"=",
"ArgumentParser",
"(",
"description",
"=",
"func",
".",
"__doc__",
")",
"for",
"k",
",",
"v",
"in",
"inspect",
".",
"signature",
"(",
"func",
")",
".",
"parameters",
".",
"items",
"(",
")",
":",
"param",
"=",
"func",
".",
"__annotations__",
".",
"get",
"(",
"k",
",",
"Param",
"(",
")",
")",
"kwargs",
"=",
"param",
".",
"kwargs",
"if",
"v",
".",
"default",
"!=",
"inspect",
".",
"Parameter",
".",
"empty",
":",
"kwargs",
"[",
"'default'",
"]",
"=",
"v",
".",
"default",
"p",
".",
"add_argument",
"(",
"f\"{param.pre}{k}\"",
",",
"*",
"*",
"kwargs",
")",
"return",
"p"
] | 48.222222 | [
0.045454545454545456,
0.03571428571428571,
0.041666666666666664,
0.05172413793103448,
0.038461538461538464,
0.06896551724137931,
0.038461538461538464,
0.0392156862745098,
0.16666666666666666
] |
def _do_config_proposal_vote(args):
"""Executes the 'proposal vote' subcommand. Given a key file, a proposal
id and a vote value, it generates a batch of sawtooth_settings transactions
in a BatchList instance. The BatchList is file or submitted to a
validator.
"""
signer = _read_signer(args.key)
rest_client = RestClient(args.url)
proposals = _get_proposals(rest_client)
proposal = None
for candidate in proposals.candidates:
if candidate.proposal_id == args.proposal_id:
proposal = candidate
break
if proposal is None:
raise CliException('No proposal exists with the given id')
for vote_record in proposal.votes:
if vote_record.public_key == signer.get_public_key().as_hex():
raise CliException(
'A vote has already been recorded with this signing key')
txn = _create_vote_txn(
signer,
args.proposal_id,
proposal.proposal.setting,
args.vote_value)
batch = _create_batch(signer, [txn])
batch_list = BatchList(batches=[batch])
rest_client.send_batches(batch_list) | [
"def",
"_do_config_proposal_vote",
"(",
"args",
")",
":",
"signer",
"=",
"_read_signer",
"(",
"args",
".",
"key",
")",
"rest_client",
"=",
"RestClient",
"(",
"args",
".",
"url",
")",
"proposals",
"=",
"_get_proposals",
"(",
"rest_client",
")",
"proposal",
"=",
"None",
"for",
"candidate",
"in",
"proposals",
".",
"candidates",
":",
"if",
"candidate",
".",
"proposal_id",
"==",
"args",
".",
"proposal_id",
":",
"proposal",
"=",
"candidate",
"break",
"if",
"proposal",
"is",
"None",
":",
"raise",
"CliException",
"(",
"'No proposal exists with the given id'",
")",
"for",
"vote_record",
"in",
"proposal",
".",
"votes",
":",
"if",
"vote_record",
".",
"public_key",
"==",
"signer",
".",
"get_public_key",
"(",
")",
".",
"as_hex",
"(",
")",
":",
"raise",
"CliException",
"(",
"'A vote has already been recorded with this signing key'",
")",
"txn",
"=",
"_create_vote_txn",
"(",
"signer",
",",
"args",
".",
"proposal_id",
",",
"proposal",
".",
"proposal",
".",
"setting",
",",
"args",
".",
"vote_value",
")",
"batch",
"=",
"_create_batch",
"(",
"signer",
",",
"[",
"txn",
"]",
")",
"batch_list",
"=",
"BatchList",
"(",
"batches",
"=",
"[",
"batch",
"]",
")",
"rest_client",
".",
"send_batches",
"(",
"batch_list",
")"
] | 31.714286 | [
0.02857142857142857,
0.025974025974025976,
0.02531645569620253,
0.028985507246376812,
0.14285714285714285,
0.2857142857142857,
0.05714285714285714,
0.05263157894736842,
0,
0.046511627906976744,
0,
0.10526315789473684,
0.047619047619047616,
0.03773584905660377,
0.0625,
0.11764705882352941,
0,
0.08333333333333333,
0.030303030303030304,
0,
0.05263157894736842,
0.02857142857142857,
0.0967741935483871,
0.0410958904109589,
0,
0.1111111111111111,
0.13333333333333333,
0.08,
0.058823529411764705,
0.125,
0.05,
0,
0.046511627906976744,
0,
0.05
] |
def _conn(commit=False):
'''
Return an postgres cursor
'''
defaults = {'host': 'localhost',
'user': 'salt',
'password': 'salt',
'dbname': 'salt',
'port': 5432}
conn_kwargs = {}
for key, value in defaults.items():
conn_kwargs[key] = __opts__.get('queue.{0}.{1}'.format(__virtualname__, key), value)
try:
conn = psycopg2.connect(**conn_kwargs)
except psycopg2.OperationalError as exc:
raise SaltMasterError('pgjsonb returner could not connect to database: {exc}'.format(exc=exc))
cursor = conn.cursor()
try:
yield cursor
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(six.text_type(error))
cursor.execute("ROLLBACK")
raise err
else:
if commit:
cursor.execute("COMMIT")
else:
cursor.execute("ROLLBACK")
finally:
conn.close() | [
"def",
"_conn",
"(",
"commit",
"=",
"False",
")",
":",
"defaults",
"=",
"{",
"'host'",
":",
"'localhost'",
",",
"'user'",
":",
"'salt'",
",",
"'password'",
":",
"'salt'",
",",
"'dbname'",
":",
"'salt'",
",",
"'port'",
":",
"5432",
"}",
"conn_kwargs",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"defaults",
".",
"items",
"(",
")",
":",
"conn_kwargs",
"[",
"key",
"]",
"=",
"__opts__",
".",
"get",
"(",
"'queue.{0}.{1}'",
".",
"format",
"(",
"__virtualname__",
",",
"key",
")",
",",
"value",
")",
"try",
":",
"conn",
"=",
"psycopg2",
".",
"connect",
"(",
"*",
"*",
"conn_kwargs",
")",
"except",
"psycopg2",
".",
"OperationalError",
"as",
"exc",
":",
"raise",
"SaltMasterError",
"(",
"'pgjsonb returner could not connect to database: {exc}'",
".",
"format",
"(",
"exc",
"=",
"exc",
")",
")",
"cursor",
"=",
"conn",
".",
"cursor",
"(",
")",
"try",
":",
"yield",
"cursor",
"except",
"psycopg2",
".",
"DatabaseError",
"as",
"err",
":",
"error",
"=",
"err",
".",
"args",
"sys",
".",
"stderr",
".",
"write",
"(",
"six",
".",
"text_type",
"(",
"error",
")",
")",
"cursor",
".",
"execute",
"(",
"\"ROLLBACK\"",
")",
"raise",
"err",
"else",
":",
"if",
"commit",
":",
"cursor",
".",
"execute",
"(",
"\"COMMIT\"",
")",
"else",
":",
"cursor",
".",
"execute",
"(",
"\"ROLLBACK\"",
")",
"finally",
":",
"conn",
".",
"close",
"(",
")"
] | 27.764706 | [
0.041666666666666664,
0.2857142857142857,
0.06896551724137931,
0.2857142857142857,
0.08333333333333333,
0.06451612903225806,
0.05714285714285714,
0.06060606060606061,
0.10344827586206896,
0,
0.1,
0.05128205128205128,
0.03260869565217391,
0.25,
0.043478260869565216,
0.045454545454545456,
0.029411764705882353,
0,
0.07692307692307693,
0,
0.25,
0.1,
0.04878048780487805,
0.08333333333333333,
0.043478260869565216,
0.058823529411764705,
0.11764705882352941,
0.2222222222222222,
0.1111111111111111,
0.05555555555555555,
0.15384615384615385,
0.05263157894736842,
0.16666666666666666,
0.1
] |
def from_wei(number: int, unit: str) -> Union[int, decimal.Decimal]:
"""
Takes a number of wei and converts it to any other ether unit.
"""
if unit.lower() not in units:
raise ValueError(
"Unknown unit. Must be one of {0}".format("/".join(units.keys()))
)
if number == 0:
return 0
if number < MIN_WEI or number > MAX_WEI:
raise ValueError("value must be between 1 and 2**256 - 1")
unit_value = units[unit.lower()]
with localcontext() as ctx:
ctx.prec = 999
d_number = decimal.Decimal(value=number, context=ctx)
result_value = d_number / unit_value
return result_value | [
"def",
"from_wei",
"(",
"number",
":",
"int",
",",
"unit",
":",
"str",
")",
"->",
"Union",
"[",
"int",
",",
"decimal",
".",
"Decimal",
"]",
":",
"if",
"unit",
".",
"lower",
"(",
")",
"not",
"in",
"units",
":",
"raise",
"ValueError",
"(",
"\"Unknown unit. Must be one of {0}\"",
".",
"format",
"(",
"\"/\"",
".",
"join",
"(",
"units",
".",
"keys",
"(",
")",
")",
")",
")",
"if",
"number",
"==",
"0",
":",
"return",
"0",
"if",
"number",
"<",
"MIN_WEI",
"or",
"number",
">",
"MAX_WEI",
":",
"raise",
"ValueError",
"(",
"\"value must be between 1 and 2**256 - 1\"",
")",
"unit_value",
"=",
"units",
"[",
"unit",
".",
"lower",
"(",
")",
"]",
"with",
"localcontext",
"(",
")",
"as",
"ctx",
":",
"ctx",
".",
"prec",
"=",
"999",
"d_number",
"=",
"decimal",
".",
"Decimal",
"(",
"value",
"=",
"number",
",",
"context",
"=",
"ctx",
")",
"result_value",
"=",
"d_number",
"/",
"unit_value",
"return",
"result_value"
] | 28.478261 | [
0.014705882352941176,
0.2857142857142857,
0.030303030303030304,
0.2857142857142857,
0.06060606060606061,
0.12,
0.02564102564102564,
0.3333333333333333,
0,
0.10526315789473684,
0.125,
0,
0.045454545454545456,
0.030303030303030304,
0,
0.05555555555555555,
0,
0.06451612903225806,
0.09090909090909091,
0.03278688524590164,
0.045454545454545456,
0,
0.08695652173913043
] |
def values(self):
'''Iteratir over values of :class:`PairMixin`.'''
if self.cache.cache is None:
backend = self.read_backend
return backend.execute(backend.structure(self).values(),
self.load_values)
else:
return self.cache.cache.values() | [
"def",
"values",
"(",
"self",
")",
":",
"if",
"self",
".",
"cache",
".",
"cache",
"is",
"None",
":",
"backend",
"=",
"self",
".",
"read_backend",
"return",
"backend",
".",
"execute",
"(",
"backend",
".",
"structure",
"(",
"self",
")",
".",
"values",
"(",
")",
",",
"self",
".",
"load_values",
")",
"else",
":",
"return",
"self",
".",
"cache",
".",
"cache",
".",
"values",
"(",
")"
] | 41.625 | [
0.05555555555555555,
0.017241379310344827,
0.08108108108108109,
0.025,
0.043478260869565216,
0.05660377358490566,
0.21428571428571427,
0.045454545454545456
] |
def imdct(y, L):
"""Inverse Modified Discrete Cosine Transform (MDCT)
Returns the Inverse Modified Discrete Cosine Transform
with fixed window size L of the vector of coefficients y.
The window is based on a sine window.
Parameters
----------
y : ndarray, shape (L/2, 2 * N / L)
The MDCT coefficients
L : int
The window length
Returns
-------
x : ndarray, shape (N,)
The reconstructed signal
See also
--------
mdct
"""
# Signal length
N = y.size
# Number of frequency channels
K = L // 2
# Test length
if N % K != 0:
raise ValueError('Input length must be a multiple of the half of '
'the window size')
# Number of frames
P = N // K
if P < 2:
raise ValueError('Signal too short')
# Reshape
temp = y
y = np.zeros((L, P), dtype=np.float)
y[:K, :] = temp
del temp
# Pre-twiddle
aL = np.arange(L, dtype=np.float)
y = y * np.exp((1j * np.pi * (L / 2. + 1.) / L) * aL)[:, None]
# IFFT
x = ifft(y, axis=0)
# Post-twiddle
x *= np.exp((1j * np.pi / L) * (aL + (L / 2. + 1.) / 2.))[:, None]
# Windowing
w_long = np.sin((np.pi / L) * (aL + 0.5))
w_edge_L = w_long.copy()
w_edge_L[:L // 4] = 0.
w_edge_L[L // 4:L // 2] = 1.
w_edge_R = w_long.copy()
w_edge_R[L // 2:L // 2 + L // 4] = 1.
w_edge_R[L // 2 + L // 4:L] = 0.
x[:, 0] *= w_edge_L
x[:, 1:-1] *= w_long[:, None]
x[:, -1] *= w_edge_R
# Real part and scaling
x = math.sqrt(2. / K) * L * np.real(x)
# Overlap and add
def overlap_add(y, x):
z = np.concatenate((y, np.zeros((K,))))
z[-2 * K:] += x
return z
x = six.moves.reduce(overlap_add, [x[:, i] for i in range(x.shape[1])])
# Cut edges
x = x[K // 2:-K // 2].copy()
return x | [
"def",
"imdct",
"(",
"y",
",",
"L",
")",
":",
"# Signal length",
"N",
"=",
"y",
".",
"size",
"# Number of frequency channels",
"K",
"=",
"L",
"//",
"2",
"# Test length",
"if",
"N",
"%",
"K",
"!=",
"0",
":",
"raise",
"ValueError",
"(",
"'Input length must be a multiple of the half of '",
"'the window size'",
")",
"# Number of frames",
"P",
"=",
"N",
"//",
"K",
"if",
"P",
"<",
"2",
":",
"raise",
"ValueError",
"(",
"'Signal too short'",
")",
"# Reshape",
"temp",
"=",
"y",
"y",
"=",
"np",
".",
"zeros",
"(",
"(",
"L",
",",
"P",
")",
",",
"dtype",
"=",
"np",
".",
"float",
")",
"y",
"[",
":",
"K",
",",
":",
"]",
"=",
"temp",
"del",
"temp",
"# Pre-twiddle",
"aL",
"=",
"np",
".",
"arange",
"(",
"L",
",",
"dtype",
"=",
"np",
".",
"float",
")",
"y",
"=",
"y",
"*",
"np",
".",
"exp",
"(",
"(",
"1j",
"*",
"np",
".",
"pi",
"*",
"(",
"L",
"/",
"2.",
"+",
"1.",
")",
"/",
"L",
")",
"*",
"aL",
")",
"[",
":",
",",
"None",
"]",
"# IFFT",
"x",
"=",
"ifft",
"(",
"y",
",",
"axis",
"=",
"0",
")",
"# Post-twiddle",
"x",
"*=",
"np",
".",
"exp",
"(",
"(",
"1j",
"*",
"np",
".",
"pi",
"/",
"L",
")",
"*",
"(",
"aL",
"+",
"(",
"L",
"/",
"2.",
"+",
"1.",
")",
"/",
"2.",
")",
")",
"[",
":",
",",
"None",
"]",
"# Windowing",
"w_long",
"=",
"np",
".",
"sin",
"(",
"(",
"np",
".",
"pi",
"/",
"L",
")",
"*",
"(",
"aL",
"+",
"0.5",
")",
")",
"w_edge_L",
"=",
"w_long",
".",
"copy",
"(",
")",
"w_edge_L",
"[",
":",
"L",
"//",
"4",
"]",
"=",
"0.",
"w_edge_L",
"[",
"L",
"//",
"4",
":",
"L",
"//",
"2",
"]",
"=",
"1.",
"w_edge_R",
"=",
"w_long",
".",
"copy",
"(",
")",
"w_edge_R",
"[",
"L",
"//",
"2",
":",
"L",
"//",
"2",
"+",
"L",
"//",
"4",
"]",
"=",
"1.",
"w_edge_R",
"[",
"L",
"//",
"2",
"+",
"L",
"//",
"4",
":",
"L",
"]",
"=",
"0.",
"x",
"[",
":",
",",
"0",
"]",
"*=",
"w_edge_L",
"x",
"[",
":",
",",
"1",
":",
"-",
"1",
"]",
"*=",
"w_long",
"[",
":",
",",
"None",
"]",
"x",
"[",
":",
",",
"-",
"1",
"]",
"*=",
"w_edge_R",
"# Real part and scaling",
"x",
"=",
"math",
".",
"sqrt",
"(",
"2.",
"/",
"K",
")",
"*",
"L",
"*",
"np",
".",
"real",
"(",
"x",
")",
"# Overlap and add",
"def",
"overlap_add",
"(",
"y",
",",
"x",
")",
":",
"z",
"=",
"np",
".",
"concatenate",
"(",
"(",
"y",
",",
"np",
".",
"zeros",
"(",
"(",
"K",
",",
")",
")",
")",
")",
"z",
"[",
"-",
"2",
"*",
"K",
":",
"]",
"+=",
"x",
"return",
"z",
"x",
"=",
"six",
".",
"moves",
".",
"reduce",
"(",
"overlap_add",
",",
"[",
"x",
"[",
":",
",",
"i",
"]",
"for",
"i",
"in",
"range",
"(",
"x",
".",
"shape",
"[",
"1",
"]",
")",
"]",
")",
"# Cut edges",
"x",
"=",
"x",
"[",
"K",
"//",
"2",
":",
"-",
"K",
"//",
"2",
"]",
".",
"copy",
"(",
")",
"return",
"x"
] | 22.121951 | [
0.0625,
0.03571428571428571,
0,
0.034482758620689655,
0.03278688524590164,
0,
0.04878048780487805,
0,
0.14285714285714285,
0.14285714285714285,
0.10256410256410256,
0.06896551724137931,
0.2727272727272727,
0.08,
0,
0.18181818181818182,
0.18181818181818182,
0.14814814814814814,
0.0625,
0,
0.16666666666666666,
0.16666666666666666,
0.25,
0.2857142857142857,
0.10526315789473684,
0.14285714285714285,
0,
0.058823529411764705,
0.14285714285714285,
0,
0.11764705882352941,
0.1111111111111111,
0.04054054054054054,
0.09302325581395349,
0,
0.09090909090909091,
0.14285714285714285,
0.15384615384615385,
0.045454545454545456,
0,
0.15384615384615385,
0.16666666666666666,
0.05,
0.10526315789473684,
0.16666666666666666,
0,
0.11764705882352941,
0.05405405405405406,
0.030303030303030304,
0,
0.2,
0.08695652173913043,
0,
0.1111111111111111,
0.02857142857142857,
0,
0.13333333333333333,
0.044444444444444446,
0.07142857142857142,
0.07692307692307693,
0.0625,
0.07142857142857142,
0.04878048780487805,
0.05555555555555555,
0.08695652173913043,
0.06060606060606061,
0.08333333333333333,
0,
0.07407407407407407,
0.047619047619047616,
0,
0.09523809523809523,
0.07692307692307693,
0.0425531914893617,
0.08695652173913043,
0.125,
0,
0.02666666666666667,
0,
0.13333333333333333,
0.0625,
0.16666666666666666
] |
def serialize_model(self, value):
"""
Serializes a model and all of its prefetched foreign keys
:param value:
:return:
"""
# Check if the context value is a model
if not isinstance(value, models.Model):
return value
# Serialize the model
serialized_model = model_to_dict(value)
# Check the model for cached foreign keys
for model_field, model_value in serialized_model.items():
model_state = value._state
# Django >= 2
if hasattr(model_state, 'fields_cache'): # pragma: no cover
if model_state.fields_cache.get(model_field):
serialized_model[model_field] = model_state.fields_cache.get(model_field)
else: # pragma: no cover
# Django < 2
cache_field = '_{0}_cache'.format(model_field)
if hasattr(value, cache_field):
serialized_model[model_field] = getattr(value, cache_field)
# Return the serialized model
return self.serialize_value(serialized_model) | [
"def",
"serialize_model",
"(",
"self",
",",
"value",
")",
":",
"# Check if the context value is a model",
"if",
"not",
"isinstance",
"(",
"value",
",",
"models",
".",
"Model",
")",
":",
"return",
"value",
"# Serialize the model",
"serialized_model",
"=",
"model_to_dict",
"(",
"value",
")",
"# Check the model for cached foreign keys",
"for",
"model_field",
",",
"model_value",
"in",
"serialized_model",
".",
"items",
"(",
")",
":",
"model_state",
"=",
"value",
".",
"_state",
"# Django >= 2",
"if",
"hasattr",
"(",
"model_state",
",",
"'fields_cache'",
")",
":",
"# pragma: no cover",
"if",
"model_state",
".",
"fields_cache",
".",
"get",
"(",
"model_field",
")",
":",
"serialized_model",
"[",
"model_field",
"]",
"=",
"model_state",
".",
"fields_cache",
".",
"get",
"(",
"model_field",
")",
"else",
":",
"# pragma: no cover",
"# Django < 2",
"cache_field",
"=",
"'_{0}_cache'",
".",
"format",
"(",
"model_field",
")",
"if",
"hasattr",
"(",
"value",
",",
"cache_field",
")",
":",
"serialized_model",
"[",
"model_field",
"]",
"=",
"getattr",
"(",
"value",
",",
"cache_field",
")",
"# Return the serialized model",
"return",
"self",
".",
"serialize_value",
"(",
"serialized_model",
")"
] | 36.566667 | [
0.030303030303030304,
0.18181818181818182,
0.03076923076923077,
0.14285714285714285,
0.1875,
0.18181818181818182,
0,
0.0425531914893617,
0.0425531914893617,
0.08333333333333333,
0,
0.06896551724137931,
0.0425531914893617,
0,
0.04081632653061224,
0.03076923076923077,
0.05263157894736842,
0,
0.08,
0.027777777777777776,
0.03278688524590164,
0.03225806451612903,
0.05405405405405406,
0.07142857142857142,
0.03225806451612903,
0.0425531914893617,
0.02531645569620253,
0,
0.05405405405405406,
0.03773584905660377
] |
def cached_classproperty(fun):
"""A memorization decorator for class properties.
It implements the above `classproperty` decorator, with
the difference that the function result is computed and attached
to class as direct attribute. (Lazy loading and caching.)
"""
@functools.wraps(fun)
def get(cls):
try:
return cls.__cache[fun]
except AttributeError:
cls.__cache = {}
except KeyError: # pragma: no cover
pass
ret = cls.__cache[fun] = fun(cls)
return ret
return classproperty(get) | [
"def",
"cached_classproperty",
"(",
"fun",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"fun",
")",
"def",
"get",
"(",
"cls",
")",
":",
"try",
":",
"return",
"cls",
".",
"__cache",
"[",
"fun",
"]",
"except",
"AttributeError",
":",
"cls",
".",
"__cache",
"=",
"{",
"}",
"except",
"KeyError",
":",
"# pragma: no cover",
"pass",
"ret",
"=",
"cls",
".",
"__cache",
"[",
"fun",
"]",
"=",
"fun",
"(",
"cls",
")",
"return",
"ret",
"return",
"classproperty",
"(",
"get",
")"
] | 31.888889 | [
0.03333333333333333,
0.037037037037037035,
0,
0.05084745762711865,
0.029411764705882353,
0.03278688524590164,
0.2857142857142857,
0.08,
0.11764705882352941,
0.16666666666666666,
0.05714285714285714,
0.06666666666666667,
0.07142857142857142,
0.045454545454545456,
0.125,
0.04878048780487805,
0.1111111111111111,
0.06896551724137931
] |
def types(self) -> GraphQLTypeList:
"""Get provided types."""
try:
types = resolve_thunk(self._types)
except GraphQLError:
raise
except Exception as error:
raise TypeError(f"{self.name} types cannot be resolved: {error}")
if types is None:
types = []
if not isinstance(types, (list, tuple)):
raise TypeError(
f"{self.name} types must be a list/tuple"
" or a function which returns a list/tuple."
)
if not all(isinstance(value, GraphQLObjectType) for value in types):
raise TypeError(f"{self.name} types must be GraphQLObjectType objects.")
return types[:] | [
"def",
"types",
"(",
"self",
")",
"->",
"GraphQLTypeList",
":",
"try",
":",
"types",
"=",
"resolve_thunk",
"(",
"self",
".",
"_types",
")",
"except",
"GraphQLError",
":",
"raise",
"except",
"Exception",
"as",
"error",
":",
"raise",
"TypeError",
"(",
"f\"{self.name} types cannot be resolved: {error}\"",
")",
"if",
"types",
"is",
"None",
":",
"types",
"=",
"[",
"]",
"if",
"not",
"isinstance",
"(",
"types",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"raise",
"TypeError",
"(",
"f\"{self.name} types must be a list/tuple\"",
"\" or a function which returns a list/tuple.\"",
")",
"if",
"not",
"all",
"(",
"isinstance",
"(",
"value",
",",
"GraphQLObjectType",
")",
"for",
"value",
"in",
"types",
")",
":",
"raise",
"TypeError",
"(",
"f\"{self.name} types must be GraphQLObjectType objects.\"",
")",
"return",
"types",
"[",
":",
"]"
] | 39.888889 | [
0.02857142857142857,
0.06060606060606061,
0.16666666666666666,
0.043478260869565216,
0.07142857142857142,
0.11764705882352941,
0.058823529411764705,
0.025974025974025976,
0.08,
0.09090909090909091,
0.041666666666666664,
0.10714285714285714,
0.03508771929824561,
0.03333333333333333,
0.23076923076923078,
0.02631578947368421,
0.03571428571428571,
0.08695652173913043
] |
def _get_position_from_instance(self, instance, ordering):
"""
The position will be a tuple of values:
The QuerySet number inside of the QuerySetSequence.
Whatever the normal value taken from the ordering property gives.
"""
# Get the QuerySet number of the current instance.
qs_order = getattr(instance, '#')
# Strip the '#' and call the standard _get_position_from_instance.
result = super(SequenceCursorPagination, self)._get_position_from_instance(instance, ordering[1:])
# Return a tuple of these two elements.
return (qs_order, result) | [
"def",
"_get_position_from_instance",
"(",
"self",
",",
"instance",
",",
"ordering",
")",
":",
"# Get the QuerySet number of the current instance.",
"qs_order",
"=",
"getattr",
"(",
"instance",
",",
"'#'",
")",
"# Strip the '#' and call the standard _get_position_from_instance.",
"result",
"=",
"super",
"(",
"SequenceCursorPagination",
",",
"self",
")",
".",
"_get_position_from_instance",
"(",
"instance",
",",
"ordering",
"[",
"1",
":",
"]",
")",
"# Return a tuple of these two elements.",
"return",
"(",
"qs_order",
",",
"result",
")"
] | 39.125 | [
0.017241379310344827,
0.18181818181818182,
0.0425531914893617,
0,
0.031746031746031744,
0.025974025974025976,
0,
0.18181818181818182,
0.034482758620689655,
0.04878048780487805,
0,
0.02702702702702703,
0.02830188679245283,
0,
0.0425531914893617,
0.06060606060606061
] |
def val(self, name):
"""
retrieves a value, substituting actual
values for ConfigValue templates.
"""
v = getattr(self, name)
if hasattr(v, 'retrieve_value'):
v = v.retrieve_value(self.__dict__)
return v | [
"def",
"val",
"(",
"self",
",",
"name",
")",
":",
"v",
"=",
"getattr",
"(",
"self",
",",
"name",
")",
"if",
"hasattr",
"(",
"v",
",",
"'retrieve_value'",
")",
":",
"v",
"=",
"v",
".",
"retrieve_value",
"(",
"self",
".",
"__dict__",
")",
"return",
"v"
] | 29.222222 | [
0.05,
0.18181818181818182,
0.043478260869565216,
0.04878048780487805,
0.18181818181818182,
0.06451612903225806,
0.05,
0.0425531914893617,
0.125
] |
def value(self):
"""Returns the positive value to subtract from the total."""
originalPrice = self.lineItem.totalPrice
if self.flatRate == 0:
return originalPrice * self.percent
return self.flatRate | [
"def",
"value",
"(",
"self",
")",
":",
"originalPrice",
"=",
"self",
".",
"lineItem",
".",
"totalPrice",
"if",
"self",
".",
"flatRate",
"==",
"0",
":",
"return",
"originalPrice",
"*",
"self",
".",
"percent",
"return",
"self",
".",
"flatRate"
] | 37.5 | [
0.0625,
0.045454545454545456,
0.06521739130434782,
0.10714285714285714,
0.046511627906976744,
0.11538461538461539
] |
def run(self, resources):
"""Sets the RTC timestamp to UTC.
Args:
resources (dict): A dictionary containing the required resources that
we needed access to in order to perform this step.
"""
hwman = resources['connection']
con = hwman.hwman.controller()
test_interface = con.test_interface()
try:
test_interface.synchronize_clock()
print('Time currently set at %s' % test_interface.current_time_str())
except:
raise ArgumentError('Error setting RTC time, check if controller actually has RTC or if iotile-support-lib-controller-3 is updated') | [
"def",
"run",
"(",
"self",
",",
"resources",
")",
":",
"hwman",
"=",
"resources",
"[",
"'connection'",
"]",
"con",
"=",
"hwman",
".",
"hwman",
".",
"controller",
"(",
")",
"test_interface",
"=",
"con",
".",
"test_interface",
"(",
")",
"try",
":",
"test_interface",
".",
"synchronize_clock",
"(",
")",
"print",
"(",
"'Time currently set at %s'",
"%",
"test_interface",
".",
"current_time_str",
"(",
")",
")",
"except",
":",
"raise",
"ArgumentError",
"(",
"'Error setting RTC time, check if controller actually has RTC or if iotile-support-lib-controller-3 is updated'",
")"
] | 43.8 | [
0.04,
0.04878048780487805,
0,
0.15384615384615385,
0.04938271604938271,
0.030303030303030304,
0.18181818181818182,
0.05128205128205128,
0.05263157894736842,
0.044444444444444446,
0.16666666666666666,
0.043478260869565216,
0.037037037037037035,
0.2,
0.020833333333333332
] |
def put(self, url, data=None):
"""Send a HTTP PUT request to a URL and return the result.
"""
self.conn.request("PUT", url, data)
return self._process_response() | [
"def",
"put",
"(",
"self",
",",
"url",
",",
"data",
"=",
"None",
")",
":",
"self",
".",
"conn",
".",
"request",
"(",
"\"PUT\"",
",",
"url",
",",
"data",
")",
"return",
"self",
".",
"_process_response",
"(",
")"
] | 37.8 | [
0.03333333333333333,
0.030303030303030304,
0.18181818181818182,
0.046511627906976744,
0.05128205128205128
] |
def get_assessment_ids(self):
"""Gets the Ids of any assessments associated with this activity.
return: (osid.id.IdList) - list of assessment Ids
raise: IllegalState - is_assessment_based_activity() is false
compliance: mandatory - This method must be implemented.
"""
if not self.is_assessment_based_activity():
raise IllegalState()
else:
return [Id(a) for a in self._my_map['assessmentIds']] | [
"def",
"get_assessment_ids",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"is_assessment_based_activity",
"(",
")",
":",
"raise",
"IllegalState",
"(",
")",
"else",
":",
"return",
"[",
"Id",
"(",
"a",
")",
"for",
"a",
"in",
"self",
".",
"_my_map",
"[",
"'assessmentIds'",
"]",
"]"
] | 38.75 | [
0.034482758620689655,
0.0273972602739726,
0,
0.03508771929824561,
0.02857142857142857,
0.03125,
0,
0.18181818181818182,
0.0392156862745098,
0.0625,
0.15384615384615385,
0.03076923076923077
] |
def setup(self):
"""Get start end end date of logfile before starting to parse."""
if self.mlogfilter.is_stdin:
# assume this year (we have no other info)
now = datetime.now()
self.startDateTime = datetime(now.year, 1, 1, tzinfo=tzutc())
self.endDateTime = datetime(MAXYEAR, 12, 31, tzinfo=tzutc())
else:
logfiles = self.mlogfilter.args['logfile']
self.startDateTime = min([lf.start +
timedelta(hours=self
.mlogfilter
.args['timezone'][i])
for i, lf in enumerate(logfiles)])
self.endDateTime = max([lf.end +
timedelta(hours=self
.mlogfilter.args['timezone'][i])
for i, lf in enumerate(logfiles)])
# now parse for further changes to from and to datetimes
dtbound = DateTimeBoundaries(self.startDateTime, self.endDateTime)
self.fromDateTime, self.toDateTime = dtbound(self.mlogfilter
.args['from'] or None,
self.mlogfilter
.args['to'] or None)
# define start_limit for mlogfilter's fast_forward method
self.start_limit = self.fromDateTime
# for single logfile, get file seek position of `to` datetime
if (len(self.mlogfilter.args['logfile']) == 1 and not
self.mlogfilter.is_stdin):
if self.mlogfilter.args['to'] != "end":
# fast forward, get seek value, then reset file
logfile = self.mlogfilter.args['logfile'][0]
logfile.fast_forward(self.toDateTime)
self.seek_to = logfile.filehandle.tell()
logfile.filehandle.seek(0)
else:
self.seek_to = -1
else:
self.seek_to = False | [
"def",
"setup",
"(",
"self",
")",
":",
"if",
"self",
".",
"mlogfilter",
".",
"is_stdin",
":",
"# assume this year (we have no other info)",
"now",
"=",
"datetime",
".",
"now",
"(",
")",
"self",
".",
"startDateTime",
"=",
"datetime",
"(",
"now",
".",
"year",
",",
"1",
",",
"1",
",",
"tzinfo",
"=",
"tzutc",
"(",
")",
")",
"self",
".",
"endDateTime",
"=",
"datetime",
"(",
"MAXYEAR",
",",
"12",
",",
"31",
",",
"tzinfo",
"=",
"tzutc",
"(",
")",
")",
"else",
":",
"logfiles",
"=",
"self",
".",
"mlogfilter",
".",
"args",
"[",
"'logfile'",
"]",
"self",
".",
"startDateTime",
"=",
"min",
"(",
"[",
"lf",
".",
"start",
"+",
"timedelta",
"(",
"hours",
"=",
"self",
".",
"mlogfilter",
".",
"args",
"[",
"'timezone'",
"]",
"[",
"i",
"]",
")",
"for",
"i",
",",
"lf",
"in",
"enumerate",
"(",
"logfiles",
")",
"]",
")",
"self",
".",
"endDateTime",
"=",
"max",
"(",
"[",
"lf",
".",
"end",
"+",
"timedelta",
"(",
"hours",
"=",
"self",
".",
"mlogfilter",
".",
"args",
"[",
"'timezone'",
"]",
"[",
"i",
"]",
")",
"for",
"i",
",",
"lf",
"in",
"enumerate",
"(",
"logfiles",
")",
"]",
")",
"# now parse for further changes to from and to datetimes",
"dtbound",
"=",
"DateTimeBoundaries",
"(",
"self",
".",
"startDateTime",
",",
"self",
".",
"endDateTime",
")",
"self",
".",
"fromDateTime",
",",
"self",
".",
"toDateTime",
"=",
"dtbound",
"(",
"self",
".",
"mlogfilter",
".",
"args",
"[",
"'from'",
"]",
"or",
"None",
",",
"self",
".",
"mlogfilter",
".",
"args",
"[",
"'to'",
"]",
"or",
"None",
")",
"# define start_limit for mlogfilter's fast_forward method",
"self",
".",
"start_limit",
"=",
"self",
".",
"fromDateTime",
"# for single logfile, get file seek position of `to` datetime",
"if",
"(",
"len",
"(",
"self",
".",
"mlogfilter",
".",
"args",
"[",
"'logfile'",
"]",
")",
"==",
"1",
"and",
"not",
"self",
".",
"mlogfilter",
".",
"is_stdin",
")",
":",
"if",
"self",
".",
"mlogfilter",
".",
"args",
"[",
"'to'",
"]",
"!=",
"\"end\"",
":",
"# fast forward, get seek value, then reset file",
"logfile",
"=",
"self",
".",
"mlogfilter",
".",
"args",
"[",
"'logfile'",
"]",
"[",
"0",
"]",
"logfile",
".",
"fast_forward",
"(",
"self",
".",
"toDateTime",
")",
"self",
".",
"seek_to",
"=",
"logfile",
".",
"filehandle",
".",
"tell",
"(",
")",
"logfile",
".",
"filehandle",
".",
"seek",
"(",
"0",
")",
"else",
":",
"self",
".",
"seek_to",
"=",
"-",
"1",
"else",
":",
"self",
".",
"seek_to",
"=",
"False"
] | 47.727273 | [
0.0625,
0.0273972602739726,
0.05555555555555555,
0.037037037037037035,
0.0625,
0.0273972602739726,
0.027777777777777776,
0,
0.15384615384615385,
0.037037037037037035,
0.0625,
0.06896551724137931,
0.03389830508474576,
0.043478260869565216,
0.05555555555555555,
0.06818181818181818,
0.05357142857142857,
0.05128205128205128,
0.04285714285714286,
0,
0.03125,
0.02702702702702703,
0.04411764705882353,
0.04,
0.04411764705882353,
0.0547945205479452,
0,
0.03076923076923077,
0.045454545454545456,
0,
0.028985507246376812,
0.04918032786885246,
0.07142857142857142,
0,
0.0392156862745098,
0.031746031746031744,
0.03333333333333333,
0.03773584905660377,
0.03571428571428571,
0.047619047619047616,
0.11764705882352941,
0.06060606060606061,
0.15384615384615385,
0.0625
] |
def idle_task(self):
'''called on idle'''
if mp_util.has_wxpython and (not self.menu_added_console and self.module('console') is not None):
self.menu_added_console = True
# we don't dynamically update these yet due to a wx bug
self.menu_add.items = [ MPMenuItem(p, p, '# link add %s' % p) for p in self.complete_serial_ports('') ]
self.menu_rm.items = [ MPMenuItem(p, p, '# link remove %s' % p) for p in self.complete_links('') ]
self.module('console').add_menu(self.menu)
for m in self.mpstate.mav_master:
m.source_system = self.settings.source_system
m.mav.srcSystem = m.source_system
m.mav.srcComponent = self.settings.source_component | [
"def",
"idle_task",
"(",
"self",
")",
":",
"if",
"mp_util",
".",
"has_wxpython",
"and",
"(",
"not",
"self",
".",
"menu_added_console",
"and",
"self",
".",
"module",
"(",
"'console'",
")",
"is",
"not",
"None",
")",
":",
"self",
".",
"menu_added_console",
"=",
"True",
"# we don't dynamically update these yet due to a wx bug",
"self",
".",
"menu_add",
".",
"items",
"=",
"[",
"MPMenuItem",
"(",
"p",
",",
"p",
",",
"'# link add %s'",
"%",
"p",
")",
"for",
"p",
"in",
"self",
".",
"complete_serial_ports",
"(",
"''",
")",
"]",
"self",
".",
"menu_rm",
".",
"items",
"=",
"[",
"MPMenuItem",
"(",
"p",
",",
"p",
",",
"'# link remove %s'",
"%",
"p",
")",
"for",
"p",
"in",
"self",
".",
"complete_links",
"(",
"''",
")",
"]",
"self",
".",
"module",
"(",
"'console'",
")",
".",
"add_menu",
"(",
"self",
".",
"menu",
")",
"for",
"m",
"in",
"self",
".",
"mpstate",
".",
"mav_master",
":",
"m",
".",
"source_system",
"=",
"self",
".",
"settings",
".",
"source_system",
"m",
".",
"mav",
".",
"srcSystem",
"=",
"m",
".",
"source_system",
"m",
".",
"mav",
".",
"srcComponent",
"=",
"self",
".",
"settings",
".",
"source_component"
] | 62.25 | [
0.05,
0.07142857142857142,
0.02857142857142857,
0.047619047619047616,
0.029850746268656716,
0.043478260869565216,
0.045454545454545456,
0.037037037037037035,
0.04878048780487805,
0.03508771929824561,
0.044444444444444446,
0.031746031746031744
] |
def add(self, col: str, value):
"""
Add a column with default values
:param col: column name
:type col: str
:param value: column value
:type value: any
:example: ``ds.add("Col 4", 0)``
"""
try:
self.df[col] = value
except Exception as e:
self.err(e, self.add, "Can not add column") | [
"def",
"add",
"(",
"self",
",",
"col",
":",
"str",
",",
"value",
")",
":",
"try",
":",
"self",
".",
"df",
"[",
"col",
"]",
"=",
"value",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"err",
"(",
"e",
",",
"self",
".",
"add",
",",
"\"Can not add column\"",
")"
] | 24.866667 | [
0.03225806451612903,
0.18181818181818182,
0.05,
0,
0.0967741935483871,
0.13636363636363635,
0.08823529411764706,
0.125,
0,
0.1,
0.18181818181818182,
0.16666666666666666,
0.0625,
0.06666666666666667,
0.03636363636363636
] |
def add_forward_workflow(self, dag, sections, satisfies=None):
'''Add a forward-workflow, return number of nodes added
'''
dag.new_forward_workflow()
if 'DAG' in env.config['SOS_DEBUG'] or 'ALL' in env.config['SOS_DEBUG']:
env.log_to_file(
'DAG', f'Adding mini-workflow with {len(sections)} sections')
default_input: sos_targets = sos_targets([])
for idx, section in enumerate(sections):
#
res = analyze_section(section, default_input=default_input)
environ_vars = res['environ_vars']
signature_vars = res['signature_vars']
changed_vars = res['changed_vars']
# parameters, if used in the step, should be considered environmental
environ_vars |= env.parameter_vars & signature_vars
# add shared to targets
if res['changed_vars']:
if 'provides' in section.options:
if isinstance(section.options['provides'], str):
section.options.set('provides',
[section.options['provides']])
else:
section.options.set('provides', [])
#
section.options.set(
'provides', section.options['provides'] +
[sos_variable(var) for var in changed_vars])
context = {
'__signature_vars__': signature_vars,
'__environ_vars__': environ_vars,
'__changed_vars__': changed_vars,
'__dynamic_depends__': res['dynamic_depends'],
'__dynamic_input__': res['dynamic_input']
}
# for nested workflow, the input is specified by sos_run, not None.
if idx == 0:
context['__step_output__'] = env.sos_dict['__step_output__']
# can be the only step
if idx == len(sections) - 1 and satisfies is not None:
res['step_output'].extend(satisfies)
dag.add_step(
section.uuid,
section.step_name(),
idx,
res['step_input'],
res['step_depends'],
res['step_output'],
context=context)
default_input = res['step_output']
return len(sections) | [
"def",
"add_forward_workflow",
"(",
"self",
",",
"dag",
",",
"sections",
",",
"satisfies",
"=",
"None",
")",
":",
"dag",
".",
"new_forward_workflow",
"(",
")",
"if",
"'DAG'",
"in",
"env",
".",
"config",
"[",
"'SOS_DEBUG'",
"]",
"or",
"'ALL'",
"in",
"env",
".",
"config",
"[",
"'SOS_DEBUG'",
"]",
":",
"env",
".",
"log_to_file",
"(",
"'DAG'",
",",
"f'Adding mini-workflow with {len(sections)} sections'",
")",
"default_input",
":",
"sos_targets",
"=",
"sos_targets",
"(",
"[",
"]",
")",
"for",
"idx",
",",
"section",
"in",
"enumerate",
"(",
"sections",
")",
":",
"#",
"res",
"=",
"analyze_section",
"(",
"section",
",",
"default_input",
"=",
"default_input",
")",
"environ_vars",
"=",
"res",
"[",
"'environ_vars'",
"]",
"signature_vars",
"=",
"res",
"[",
"'signature_vars'",
"]",
"changed_vars",
"=",
"res",
"[",
"'changed_vars'",
"]",
"# parameters, if used in the step, should be considered environmental",
"environ_vars",
"|=",
"env",
".",
"parameter_vars",
"&",
"signature_vars",
"# add shared to targets",
"if",
"res",
"[",
"'changed_vars'",
"]",
":",
"if",
"'provides'",
"in",
"section",
".",
"options",
":",
"if",
"isinstance",
"(",
"section",
".",
"options",
"[",
"'provides'",
"]",
",",
"str",
")",
":",
"section",
".",
"options",
".",
"set",
"(",
"'provides'",
",",
"[",
"section",
".",
"options",
"[",
"'provides'",
"]",
"]",
")",
"else",
":",
"section",
".",
"options",
".",
"set",
"(",
"'provides'",
",",
"[",
"]",
")",
"#",
"section",
".",
"options",
".",
"set",
"(",
"'provides'",
",",
"section",
".",
"options",
"[",
"'provides'",
"]",
"+",
"[",
"sos_variable",
"(",
"var",
")",
"for",
"var",
"in",
"changed_vars",
"]",
")",
"context",
"=",
"{",
"'__signature_vars__'",
":",
"signature_vars",
",",
"'__environ_vars__'",
":",
"environ_vars",
",",
"'__changed_vars__'",
":",
"changed_vars",
",",
"'__dynamic_depends__'",
":",
"res",
"[",
"'dynamic_depends'",
"]",
",",
"'__dynamic_input__'",
":",
"res",
"[",
"'dynamic_input'",
"]",
"}",
"# for nested workflow, the input is specified by sos_run, not None.",
"if",
"idx",
"==",
"0",
":",
"context",
"[",
"'__step_output__'",
"]",
"=",
"env",
".",
"sos_dict",
"[",
"'__step_output__'",
"]",
"# can be the only step",
"if",
"idx",
"==",
"len",
"(",
"sections",
")",
"-",
"1",
"and",
"satisfies",
"is",
"not",
"None",
":",
"res",
"[",
"'step_output'",
"]",
".",
"extend",
"(",
"satisfies",
")",
"dag",
".",
"add_step",
"(",
"section",
".",
"uuid",
",",
"section",
".",
"step_name",
"(",
")",
",",
"idx",
",",
"res",
"[",
"'step_input'",
"]",
",",
"res",
"[",
"'step_depends'",
"]",
",",
"res",
"[",
"'step_output'",
"]",
",",
"context",
"=",
"context",
")",
"default_input",
"=",
"res",
"[",
"'step_output'",
"]",
"return",
"len",
"(",
"sections",
")"
] | 41.280702 | [
0.016129032258064516,
0.031746031746031744,
0.18181818181818182,
0.058823529411764705,
0,
0.0375,
0.10714285714285714,
0.03896103896103896,
0.038461538461538464,
0.041666666666666664,
0.15384615384615385,
0.028169014084507043,
0,
0.043478260869565216,
0.04,
0.043478260869565216,
0.037037037037037035,
0.031746031746031744,
0,
0.05714285714285714,
0.05714285714285714,
0.04081632653061224,
0.029411764705882353,
0.05454545454545454,
0.04054054054054054,
0.09523809523809523,
0.03636363636363636,
0.11764705882352941,
0.08333333333333333,
0.03278688524590164,
0.046875,
0,
0.13043478260869565,
0.03773584905660377,
0.04081632653061224,
0.04081632653061224,
0.03225806451612903,
0.03508771929824561,
0.23076923076923078,
0,
0.02531645569620253,
0.08333333333333333,
0.02631578947368421,
0.058823529411764705,
0.030303030303030304,
0.038461538461538464,
0,
0.12,
0.06896551724137931,
0.05555555555555555,
0.1,
0.058823529411764705,
0.05555555555555555,
0.05714285714285714,
0.125,
0.043478260869565216,
0.07142857142857142
] |
def pbs(ac1, ac2, ac3, window_size, window_start=0, window_stop=None,
window_step=None, normed=True):
"""Compute the population branching statistic (PBS) which performs a comparison
of allele frequencies between three populations to detect genome regions that are
unusually differentiated in one population relative to the other two populations.
Parameters
----------
ac1 : array_like, int
Allele counts from the first population.
ac2 : array_like, int
Allele counts from the second population.
ac3 : array_like, int
Allele counts from the third population.
window_size : int
The window size (number of variants) within which to compute PBS values.
window_start : int, optional
The variant index at which to start windowed calculations.
window_stop : int, optional
The variant index at which to stop windowed calculations.
window_step : int, optional
The number of variants between start positions of windows. If not given, defaults
to the window size, i.e., non-overlapping windows.
normed : bool, optional
If True (default), use the normalised version of PBS, also known as PBSn1 [2]_.
Otherwise, use the PBS statistic as originally defined in [1]_.
Returns
-------
pbs : ndarray, float
Windowed PBS values.
Notes
-----
The F:sub:`ST` calculations use Hudson's estimator.
References
----------
.. [1] Yi et al., "Sequencing of Fifty Human Exomes Reveals Adaptation to High
Altitude", Science, 329(5987): 75–78, 2 July 2010.
.. [2] Malaspinas et al., "A genomic history of Aboriginal Australia", Nature. volume
538, pages 207–214, 13 October 2016.
"""
# normalise and check inputs
ac1 = AlleleCountsArray(ac1)
ac2 = AlleleCountsArray(ac2)
ac3 = AlleleCountsArray(ac3)
check_dim0_aligned(ac1, ac2, ac3)
# compute fst
fst12 = moving_hudson_fst(ac1, ac2, size=window_size, start=window_start,
stop=window_stop, step=window_step)
fst13 = moving_hudson_fst(ac1, ac3, size=window_size, start=window_start,
stop=window_stop, step=window_step)
fst23 = moving_hudson_fst(ac2, ac3, size=window_size, start=window_start,
stop=window_stop, step=window_step)
# clip fst values to avoid infinite if fst is 1
for x in fst12, fst13, fst23:
np.clip(x, a_min=0, a_max=0.99999, out=x)
# compute fst transform
t12 = -np.log(1 - fst12)
t13 = -np.log(1 - fst13)
t23 = -np.log(1 - fst23)
# compute pbs
ret = (t12 + t13 - t23) / 2
if normed:
# compute pbs normalising constant
norm = 1 + (t12 + t13 + t23) / 2
ret = ret / norm
return ret | [
"def",
"pbs",
"(",
"ac1",
",",
"ac2",
",",
"ac3",
",",
"window_size",
",",
"window_start",
"=",
"0",
",",
"window_stop",
"=",
"None",
",",
"window_step",
"=",
"None",
",",
"normed",
"=",
"True",
")",
":",
"# normalise and check inputs",
"ac1",
"=",
"AlleleCountsArray",
"(",
"ac1",
")",
"ac2",
"=",
"AlleleCountsArray",
"(",
"ac2",
")",
"ac3",
"=",
"AlleleCountsArray",
"(",
"ac3",
")",
"check_dim0_aligned",
"(",
"ac1",
",",
"ac2",
",",
"ac3",
")",
"# compute fst",
"fst12",
"=",
"moving_hudson_fst",
"(",
"ac1",
",",
"ac2",
",",
"size",
"=",
"window_size",
",",
"start",
"=",
"window_start",
",",
"stop",
"=",
"window_stop",
",",
"step",
"=",
"window_step",
")",
"fst13",
"=",
"moving_hudson_fst",
"(",
"ac1",
",",
"ac3",
",",
"size",
"=",
"window_size",
",",
"start",
"=",
"window_start",
",",
"stop",
"=",
"window_stop",
",",
"step",
"=",
"window_step",
")",
"fst23",
"=",
"moving_hudson_fst",
"(",
"ac2",
",",
"ac3",
",",
"size",
"=",
"window_size",
",",
"start",
"=",
"window_start",
",",
"stop",
"=",
"window_stop",
",",
"step",
"=",
"window_step",
")",
"# clip fst values to avoid infinite if fst is 1",
"for",
"x",
"in",
"fst12",
",",
"fst13",
",",
"fst23",
":",
"np",
".",
"clip",
"(",
"x",
",",
"a_min",
"=",
"0",
",",
"a_max",
"=",
"0.99999",
",",
"out",
"=",
"x",
")",
"# compute fst transform",
"t12",
"=",
"-",
"np",
".",
"log",
"(",
"1",
"-",
"fst12",
")",
"t13",
"=",
"-",
"np",
".",
"log",
"(",
"1",
"-",
"fst13",
")",
"t23",
"=",
"-",
"np",
".",
"log",
"(",
"1",
"-",
"fst23",
")",
"# compute pbs",
"ret",
"=",
"(",
"t12",
"+",
"t13",
"-",
"t23",
")",
"/",
"2",
"if",
"normed",
":",
"# compute pbs normalising constant",
"norm",
"=",
"1",
"+",
"(",
"t12",
"+",
"t13",
"+",
"t23",
")",
"/",
"2",
"ret",
"=",
"ret",
"/",
"norm",
"return",
"ret"
] | 35.766234 | [
0.028985507246376812,
0.1282051282051282,
0.03614457831325301,
0.03529411764705882,
0.03529411764705882,
0,
0.14285714285714285,
0.14285714285714285,
0.12,
0.041666666666666664,
0.12,
0.04081632653061224,
0.12,
0.041666666666666664,
0.14285714285714285,
0.05,
0.09375,
0.030303030303030304,
0.0967741935483871,
0.03076923076923077,
0.0967741935483871,
0.033707865168539325,
0.034482758620689655,
0.1111111111111111,
0.04597701149425287,
0.028169014084507043,
0,
0.18181818181818182,
0.18181818181818182,
0.125,
0.07142857142857142,
0,
0.2222222222222222,
0.2222222222222222,
0.09090909090909091,
0,
0.14285714285714285,
0.14285714285714285,
0.036585365853658534,
0.05263157894736842,
0.033707865168539325,
0.06976744186046512,
0,
0.2857142857142857,
0,
0.0625,
0.0625,
0.0625,
0.0625,
0.05405405405405406,
0,
0.11764705882352941,
0.03896103896103896,
0.09230769230769231,
0.03896103896103896,
0.09230769230769231,
0.03896103896103896,
0.09230769230769231,
0,
0.0392156862745098,
0.06060606060606061,
0.04081632653061224,
0,
0.07407407407407407,
0.07142857142857142,
0.07142857142857142,
0.07142857142857142,
0,
0.11764705882352941,
0.06451612903225806,
0,
0.14285714285714285,
0.047619047619047616,
0.05,
0.08333333333333333,
0,
0.14285714285714285
] |
def run_plugins(plugins, url_data, stop_after_match=False, **kwargs):
"""Run the check(url_data) method of given plugins."""
for plugin in plugins:
log.debug(LOG_PLUGIN, "Run plugin %s", plugin.__class__.__name__)
if plugin.applies_to(url_data, **kwargs):
plugin.check(url_data)
if stop_after_match:
break | [
"def",
"run_plugins",
"(",
"plugins",
",",
"url_data",
",",
"stop_after_match",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"plugin",
"in",
"plugins",
":",
"log",
".",
"debug",
"(",
"LOG_PLUGIN",
",",
"\"Run plugin %s\"",
",",
"plugin",
".",
"__class__",
".",
"__name__",
")",
"if",
"plugin",
".",
"applies_to",
"(",
"url_data",
",",
"*",
"*",
"kwargs",
")",
":",
"plugin",
".",
"check",
"(",
"url_data",
")",
"if",
"stop_after_match",
":",
"break"
] | 45.25 | [
0.014492753623188406,
0.034482758620689655,
0.07692307692307693,
0.0273972602739726,
0.04081632653061224,
0.058823529411764705,
0.0625,
0.09523809523809523
] |
def assert_valid(self, instance, value=None):
"""Returns True if the Property is valid on a HasProperties instance
Raises a ValueError if the value required and not set, not valid,
not correctly coerced, etc.
.. note::
Unlike :code:`validate`, this method requires instance to be
a HasProperties instance; it cannot be None.
"""
if value is None:
value = instance._get(self.name)
if value is None and self.required:
message = (
"The '{name}' property of a {cls} instance is required "
"and has not been set.".format(
name=self.name,
cls=instance.__class__.__name__
)
)
raise ValidationError(message, 'missing', self.name, instance)
valid = super(Property, self).assert_valid(instance, value)
return valid | [
"def",
"assert_valid",
"(",
"self",
",",
"instance",
",",
"value",
"=",
"None",
")",
":",
"if",
"value",
"is",
"None",
":",
"value",
"=",
"instance",
".",
"_get",
"(",
"self",
".",
"name",
")",
"if",
"value",
"is",
"None",
"and",
"self",
".",
"required",
":",
"message",
"=",
"(",
"\"The '{name}' property of a {cls} instance is required \"",
"\"and has not been set.\"",
".",
"format",
"(",
"name",
"=",
"self",
".",
"name",
",",
"cls",
"=",
"instance",
".",
"__class__",
".",
"__name__",
")",
")",
"raise",
"ValidationError",
"(",
"message",
",",
"'missing'",
",",
"self",
".",
"name",
",",
"instance",
")",
"valid",
"=",
"super",
"(",
"Property",
",",
"self",
")",
".",
"assert_valid",
"(",
"instance",
",",
"value",
")",
"return",
"valid"
] | 38.166667 | [
0.022222222222222223,
0.02631578947368421,
0,
0.0273972602739726,
0.05714285714285714,
0,
0.17647058823529413,
0,
0.08333333333333333,
0.05357142857142857,
0.18181818181818182,
0.08,
0.045454545454545456,
0.046511627906976744,
0.13043478260869565,
0.027777777777777776,
0.06382978723404255,
0.08571428571428572,
0.058823529411764705,
0.17647058823529413,
0.23076923076923078,
0.02702702702702703,
0.029850746268656716,
0.1
] |
def save_fast_format(self, filename):
"""
Save a reach instance in a fast format.
The reach fast format stores the words and vectors of a Reach instance
separately in a JSON and numpy format, respectively.
Parameters
----------
filename : str
The prefix to add to the saved filename. Note that this is not the
real filename under which these items are stored.
The words and unk_index are stored under "{filename}_words.json",
and the numpy matrix is saved under "{filename}_vectors.npy".
"""
items, _ = zip(*sorted(self.items.items(), key=lambda x: x[1]))
items = {"items": items,
"unk_index": self.unk_index,
"name": self.name}
json.dump(items, open("{}_items.json".format(filename), 'w'))
np.save(open("{}_vectors.npy".format(filename), 'wb'), self.vectors) | [
"def",
"save_fast_format",
"(",
"self",
",",
"filename",
")",
":",
"items",
",",
"_",
"=",
"zip",
"(",
"*",
"sorted",
"(",
"self",
".",
"items",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"1",
"]",
")",
")",
"items",
"=",
"{",
"\"items\"",
":",
"items",
",",
"\"unk_index\"",
":",
"self",
".",
"unk_index",
",",
"\"name\"",
":",
"self",
".",
"name",
"}",
"json",
".",
"dump",
"(",
"items",
",",
"open",
"(",
"\"{}_items.json\"",
".",
"format",
"(",
"filename",
")",
",",
"'w'",
")",
")",
"np",
".",
"save",
"(",
"open",
"(",
"\"{}_vectors.npy\"",
".",
"format",
"(",
"filename",
")",
",",
"'wb'",
")",
",",
"self",
".",
"vectors",
")"
] | 39.956522 | [
0.02702702702702703,
0.18181818181818182,
0.0425531914893617,
0,
0.02564102564102564,
0.03333333333333333,
0,
0.1111111111111111,
0.1111111111111111,
0.13636363636363635,
0.02564102564102564,
0.03278688524590164,
0.025974025974025976,
0.0273972602739726,
0,
0.18181818181818182,
0.028169014084507043,
0.09375,
0.06666666666666667,
0.11428571428571428,
0,
0.028985507246376812,
0.02631578947368421
] |
def encode(cls, line):
"""Backslash escape line.value."""
if not line.encoded:
encoding = getattr(line, 'encoding_param', None)
if encoding and encoding.upper() == cls.base64string:
line.value = b64encode(line.value).decode('utf-8')
else:
line.value = backslashEscape(str_(line.value))
line.encoded = True | [
"def",
"encode",
"(",
"cls",
",",
"line",
")",
":",
"if",
"not",
"line",
".",
"encoded",
":",
"encoding",
"=",
"getattr",
"(",
"line",
",",
"'encoding_param'",
",",
"None",
")",
"if",
"encoding",
"and",
"encoding",
".",
"upper",
"(",
")",
"==",
"cls",
".",
"base64string",
":",
"line",
".",
"value",
"=",
"b64encode",
"(",
"line",
".",
"value",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"else",
":",
"line",
".",
"value",
"=",
"backslashEscape",
"(",
"str_",
"(",
"line",
".",
"value",
")",
")",
"line",
".",
"encoded",
"=",
"True"
] | 43.666667 | [
0.045454545454545456,
0.047619047619047616,
0.07142857142857142,
0.03333333333333333,
0.03076923076923077,
0.030303030303030304,
0.11764705882352941,
0.03225806451612903,
0.06451612903225806
] |
def depth_score_helper(self, d, dopt):
"""
Gets a weighted count of the number of Intervals deeper than dopt.
:param d: current depth, starting from 0
:param dopt: optimal maximum depth of a leaf Node
:rtype: real
"""
# di is how may levels deeper than optimal d is
di = d - dopt
if di > 0:
count = di * len(self.s_center)
else:
count = 0
if self.right_node:
count += self.right_node.depth_score_helper(d + 1, dopt)
if self.left_node:
count += self.left_node.depth_score_helper(d + 1, dopt)
return count | [
"def",
"depth_score_helper",
"(",
"self",
",",
"d",
",",
"dopt",
")",
":",
"# di is how may levels deeper than optimal d is",
"di",
"=",
"d",
"-",
"dopt",
"if",
"di",
">",
"0",
":",
"count",
"=",
"di",
"*",
"len",
"(",
"self",
".",
"s_center",
")",
"else",
":",
"count",
"=",
"0",
"if",
"self",
".",
"right_node",
":",
"count",
"+=",
"self",
".",
"right_node",
".",
"depth_score_helper",
"(",
"d",
"+",
"1",
",",
"dopt",
")",
"if",
"self",
".",
"left_node",
":",
"count",
"+=",
"self",
".",
"left_node",
".",
"depth_score_helper",
"(",
"d",
"+",
"1",
",",
"dopt",
")",
"return",
"count"
] | 35.444444 | [
0.02631578947368421,
0.18181818181818182,
0.02702702702702703,
0.0625,
0.05263157894736842,
0.15,
0.18181818181818182,
0.03636363636363636,
0.09523809523809523,
0.1111111111111111,
0.046511627906976744,
0.15384615384615385,
0.09523809523809523,
0.07407407407407407,
0.029411764705882353,
0.07692307692307693,
0.029850746268656716,
0.1
] |
def case(self, case):
"""Get a case from the database
Search the cases with the case id
Args:
case (dict): A case dictionary
Returns:
mongo_case (dict): A mongo case dictionary
"""
LOG.debug("Getting case {0} from database".format(case.get('case_id')))
case_id = case['case_id']
return self.db.case.find_one({'case_id': case_id}) | [
"def",
"case",
"(",
"self",
",",
"case",
")",
":",
"LOG",
".",
"debug",
"(",
"\"Getting case {0} from database\"",
".",
"format",
"(",
"case",
".",
"get",
"(",
"'case_id'",
")",
")",
")",
"case_id",
"=",
"case",
"[",
"'case_id'",
"]",
"return",
"self",
".",
"db",
".",
"case",
".",
"find_one",
"(",
"{",
"'case_id'",
":",
"case_id",
"}",
")"
] | 30.5 | [
0.047619047619047616,
0.05128205128205128,
0,
0.044444444444444446,
0,
0.11764705882352941,
0.06521739130434782,
0,
0.1,
0.05172413793103448,
0.18181818181818182,
0.02531645569620253,
0.06060606060606061,
0.034482758620689655
] |
def build(self):
"""
Builds and returns all combinations of parameters specified
by the param grid.
"""
keys = self._param_grid.keys()
grid_values = self._param_grid.values()
def to_key_value_pairs(keys, values):
return [(key, key.typeConverter(value)) for key, value in zip(keys, values)]
return [dict(to_key_value_pairs(keys, prod)) for prod in itertools.product(*grid_values)] | [
"def",
"build",
"(",
"self",
")",
":",
"keys",
"=",
"self",
".",
"_param_grid",
".",
"keys",
"(",
")",
"grid_values",
"=",
"self",
".",
"_param_grid",
".",
"values",
"(",
")",
"def",
"to_key_value_pairs",
"(",
"keys",
",",
"values",
")",
":",
"return",
"[",
"(",
"key",
",",
"key",
".",
"typeConverter",
"(",
"value",
")",
")",
"for",
"key",
",",
"value",
"in",
"zip",
"(",
"keys",
",",
"values",
")",
"]",
"return",
"[",
"dict",
"(",
"to_key_value_pairs",
"(",
"keys",
",",
"prod",
")",
")",
"for",
"prod",
"in",
"itertools",
".",
"product",
"(",
"*",
"grid_values",
")",
"]"
] | 37.166667 | [
0.0625,
0.18181818181818182,
0.029850746268656716,
0.07692307692307693,
0.18181818181818182,
0.05263157894736842,
0.0425531914893617,
0,
0.044444444444444446,
0.03409090909090909,
0,
0.030927835051546393
] |
def update(self, new_keys: Index):
"""Adds the new keys to the mapping.
Parameters
----------
new_keys :
The new index to hash.
"""
if not self._map.index.intersection(new_keys).empty:
raise KeyError("Non-unique keys in index.")
mapping_update = self.hash_(new_keys)
if self._map.empty:
self._map = mapping_update.drop_duplicates()
else:
self._map = self._map.append(mapping_update).drop_duplicates()
collisions = mapping_update.index.difference(self._map.index)
salt = 1
while not collisions.empty:
mapping_update = self.hash_(collisions, salt)
self._map = self._map.append(mapping_update).drop_duplicates()
collisions = mapping_update.index.difference(self._map.index)
salt += 1 | [
"def",
"update",
"(",
"self",
",",
"new_keys",
":",
"Index",
")",
":",
"if",
"not",
"self",
".",
"_map",
".",
"index",
".",
"intersection",
"(",
"new_keys",
")",
".",
"empty",
":",
"raise",
"KeyError",
"(",
"\"Non-unique keys in index.\"",
")",
"mapping_update",
"=",
"self",
".",
"hash_",
"(",
"new_keys",
")",
"if",
"self",
".",
"_map",
".",
"empty",
":",
"self",
".",
"_map",
"=",
"mapping_update",
".",
"drop_duplicates",
"(",
")",
"else",
":",
"self",
".",
"_map",
"=",
"self",
".",
"_map",
".",
"append",
"(",
"mapping_update",
")",
".",
"drop_duplicates",
"(",
")",
"collisions",
"=",
"mapping_update",
".",
"index",
".",
"difference",
"(",
"self",
".",
"_map",
".",
"index",
")",
"salt",
"=",
"1",
"while",
"not",
"collisions",
".",
"empty",
":",
"mapping_update",
"=",
"self",
".",
"hash_",
"(",
"collisions",
",",
"salt",
")",
"self",
".",
"_map",
"=",
"self",
".",
"_map",
".",
"append",
"(",
"mapping_update",
")",
".",
"drop_duplicates",
"(",
")",
"collisions",
"=",
"mapping_update",
".",
"index",
".",
"difference",
"(",
"self",
".",
"_map",
".",
"index",
")",
"salt",
"+=",
"1"
] | 35.5 | [
0.029411764705882353,
0.045454545454545456,
0,
0.1111111111111111,
0.1111111111111111,
0.16666666666666666,
0.058823529411764705,
0.18181818181818182,
0.03333333333333333,
0.03636363636363636,
0,
0.044444444444444446,
0.07407407407407407,
0.03571428571428571,
0.15384615384615385,
0.02702702702702703,
0,
0.028985507246376812,
0.125,
0.05714285714285714,
0.03508771929824561,
0.02702702702702703,
0.0273972602739726,
0.09523809523809523
] |
def ok_hash(token: str) -> bool:
"""
Whether input token looks like a valid tails hash.
:param token: candidate string
:return: whether input token looks like a valid tails hash
"""
LOGGER.debug('Tails.ok_hash >>> token: %s', token)
rv = re.match('[{}]{{42,44}}$'.format(B58), token) is not None
LOGGER.debug('Tails.ok_hash <<< %s', rv)
return rv | [
"def",
"ok_hash",
"(",
"token",
":",
"str",
")",
"->",
"bool",
":",
"LOGGER",
".",
"debug",
"(",
"'Tails.ok_hash >>> token: %s'",
",",
"token",
")",
"rv",
"=",
"re",
".",
"match",
"(",
"'[{}]{{42,44}}$'",
".",
"format",
"(",
"B58",
")",
",",
"token",
")",
"is",
"not",
"None",
"LOGGER",
".",
"debug",
"(",
"'Tails.ok_hash <<< %s'",
",",
"rv",
")",
"return",
"rv"
] | 31.461538 | [
0.03125,
0.18181818181818182,
0.034482758620689655,
0,
0.07894736842105263,
0.045454545454545456,
0.18181818181818182,
0,
0.034482758620689655,
0,
0.02857142857142857,
0.041666666666666664,
0.11764705882352941
] |
def DbImportEvent(self, argin):
""" Get event channel info from database
:param argin: name of event channel or factory
:type: tango.DevString
:return: export information e.g. IOR
:rtype: tango.DevVarLongStringArray """
self._log.debug("In DbImportEvent()")
argin = replace_wildcard(argin.lower())
return self.db.import_event(argin) | [
"def",
"DbImportEvent",
"(",
"self",
",",
"argin",
")",
":",
"self",
".",
"_log",
".",
"debug",
"(",
"\"In DbImportEvent()\"",
")",
"argin",
"=",
"replace_wildcard",
"(",
"argin",
".",
"lower",
"(",
")",
")",
"return",
"self",
".",
"db",
".",
"import_event",
"(",
"argin",
")"
] | 38.8 | [
0.03225806451612903,
0.041666666666666664,
0,
0.05555555555555555,
0.1,
0.06818181818181818,
0.0851063829787234,
0.044444444444444446,
0.0425531914893617,
0.047619047619047616
] |
def nfwAlpha(self, R, Rs, rho0, r_trunc, ax_x, ax_y):
"""
deflection angel of NFW profile along the projection to coordinate axis
:param R: radius of interest
:type R: float/numpy array
:param Rs: scale radius
:type Rs: float
:param rho0: density normalization (characteristic density)
:type rho0: float
:param r200: radius of (sub)halo
:type r200: float>0
:param axis: projection to either x- or y-axis
:type axis: same as R
:return: Epsilon(R) projected density at radius R
"""
if isinstance(R, int) or isinstance(R, float):
R = max(R, 0.00001)
else:
R[R <= 0.00001] = 0.00001
x = R / Rs
tau = float(r_trunc) / Rs
gx = self._g(x, tau)
a = 4 * rho0 * Rs * gx / x ** 2
return a * ax_x, a * ax_y | [
"def",
"nfwAlpha",
"(",
"self",
",",
"R",
",",
"Rs",
",",
"rho0",
",",
"r_trunc",
",",
"ax_x",
",",
"ax_y",
")",
":",
"if",
"isinstance",
"(",
"R",
",",
"int",
")",
"or",
"isinstance",
"(",
"R",
",",
"float",
")",
":",
"R",
"=",
"max",
"(",
"R",
",",
"0.00001",
")",
"else",
":",
"R",
"[",
"R",
"<=",
"0.00001",
"]",
"=",
"0.00001",
"x",
"=",
"R",
"/",
"Rs",
"tau",
"=",
"float",
"(",
"r_trunc",
")",
"/",
"Rs",
"gx",
"=",
"self",
".",
"_g",
"(",
"x",
",",
"tau",
")",
"a",
"=",
"4",
"*",
"rho0",
"*",
"Rs",
"*",
"gx",
"/",
"x",
"**",
"2",
"return",
"a",
"*",
"ax_x",
",",
"a",
"*",
"ax_y"
] | 33.192308 | [
0.018867924528301886,
0.18181818181818182,
0.02531645569620253,
0,
0.08333333333333333,
0.08823529411764706,
0.0967741935483871,
0.13043478260869565,
0.05970149253731343,
0.12,
0.1,
0.14814814814814814,
0.07407407407407407,
0.10344827586206896,
0.05263157894736842,
0.18181818181818182,
0.037037037037037035,
0.06451612903225806,
0.15384615384615385,
0.05405405405405406,
0,
0.1111111111111111,
0.06060606060606061,
0.07142857142857142,
0.05128205128205128,
0.06060606060606061
] |
def get_border(shape, thickness=1, mode='edges', return_indices=False):
r"""
Creates an array of specified size with corners, edges or faces labelled as
True. This can be used as mask to manipulate values laying on the
perimeter of an image.
Parameters
----------
shape : array_like
The shape of the array to return. Can be either 2D or 3D.
thickness : scalar (default is 1)
The number of pixels/voxels to place along perimeter.
mode : string
The type of border to create. Options are 'faces', 'edges' (default)
and 'corners'. In 2D 'faces' and 'edges' give the same result.
return_indices : boolean
If ``False`` (default) an image is returned with the border voxels set
to ``True``. If ``True``, then a tuple with the x, y, z (if ``im`` is
3D) indices is returned. This tuple can be used directly to index into
the image, such as ``im[tup] = 2``.
Returns
-------
image : ND-array
An ND-array of specified shape with ``True`` values at the perimeter
and ``False`` elsewhere
Notes
-----
TODO: This function uses brute force to create an image then fill the
edges using location-based logic, and if the user requests
``return_indices`` it finds them using ``np.where``. Since these arrays
are cubic it should be possible to use more elegant and efficient
index-based logic to find the indices, then use them to fill an empty
image with ``True`` using these indices.
Examples
--------
>>> import porespy as ps
>>> import scipy as sp
>>> mask = ps.tools.get_border(shape=[3, 3], mode='corners')
>>> print(mask)
[[ True False True]
[False False False]
[ True False True]]
>>> mask = ps.tools.get_border(shape=[3, 3], mode='edges')
>>> print(mask)
[[ True True True]
[ True False True]
[ True True True]]
"""
ndims = len(shape)
t = thickness
border = sp.ones(shape, dtype=bool)
if mode == 'faces':
if ndims == 2:
border[t:-t, t:-t] = False
if ndims == 3:
border[t:-t, t:-t, t:-t] = False
elif mode == 'edges':
if ndims == 2:
border[t:-t, t:-t] = False
if ndims == 3:
border[0::, t:-t, t:-t] = False
border[t:-t, 0::, t:-t] = False
border[t:-t, t:-t, 0::] = False
elif mode == 'corners':
if ndims == 2:
border[t:-t, 0::] = False
border[0::, t:-t] = False
if ndims == 3:
border[t:-t, 0::, 0::] = False
border[0::, t:-t, 0::] = False
border[0::, 0::, t:-t] = False
if return_indices:
border = sp.where(border)
return border | [
"def",
"get_border",
"(",
"shape",
",",
"thickness",
"=",
"1",
",",
"mode",
"=",
"'edges'",
",",
"return_indices",
"=",
"False",
")",
":",
"ndims",
"=",
"len",
"(",
"shape",
")",
"t",
"=",
"thickness",
"border",
"=",
"sp",
".",
"ones",
"(",
"shape",
",",
"dtype",
"=",
"bool",
")",
"if",
"mode",
"==",
"'faces'",
":",
"if",
"ndims",
"==",
"2",
":",
"border",
"[",
"t",
":",
"-",
"t",
",",
"t",
":",
"-",
"t",
"]",
"=",
"False",
"if",
"ndims",
"==",
"3",
":",
"border",
"[",
"t",
":",
"-",
"t",
",",
"t",
":",
"-",
"t",
",",
"t",
":",
"-",
"t",
"]",
"=",
"False",
"elif",
"mode",
"==",
"'edges'",
":",
"if",
"ndims",
"==",
"2",
":",
"border",
"[",
"t",
":",
"-",
"t",
",",
"t",
":",
"-",
"t",
"]",
"=",
"False",
"if",
"ndims",
"==",
"3",
":",
"border",
"[",
"0",
":",
":",
",",
"t",
":",
"-",
"t",
",",
"t",
":",
"-",
"t",
"]",
"=",
"False",
"border",
"[",
"t",
":",
"-",
"t",
",",
"0",
":",
":",
",",
"t",
":",
"-",
"t",
"]",
"=",
"False",
"border",
"[",
"t",
":",
"-",
"t",
",",
"t",
":",
"-",
"t",
",",
"0",
":",
":",
"]",
"=",
"False",
"elif",
"mode",
"==",
"'corners'",
":",
"if",
"ndims",
"==",
"2",
":",
"border",
"[",
"t",
":",
"-",
"t",
",",
"0",
":",
":",
"]",
"=",
"False",
"border",
"[",
"0",
":",
":",
",",
"t",
":",
"-",
"t",
"]",
"=",
"False",
"if",
"ndims",
"==",
"3",
":",
"border",
"[",
"t",
":",
"-",
"t",
",",
"0",
":",
":",
",",
"0",
":",
":",
"]",
"=",
"False",
"border",
"[",
"0",
":",
":",
",",
"t",
":",
"-",
"t",
",",
"0",
":",
":",
"]",
"=",
"False",
"border",
"[",
"0",
":",
":",
",",
"0",
":",
":",
",",
"t",
":",
"-",
"t",
"]",
"=",
"False",
"if",
"return_indices",
":",
"border",
"=",
"sp",
".",
"where",
"(",
"border",
")",
"return",
"border"
] | 35.168831 | [
0.014084507042253521,
0.25,
0.02531645569620253,
0.02857142857142857,
0.07692307692307693,
0,
0.14285714285714285,
0.14285714285714285,
0.13636363636363635,
0.030303030303030304,
0.10810810810810811,
0.03278688524590164,
0.17647058823529413,
0.025974025974025976,
0.028169014084507043,
0.10714285714285714,
0.038461538461538464,
0.0641025641025641,
0.0379746835443038,
0.06976744186046512,
0,
0.18181818181818182,
0.18181818181818182,
0.15,
0.039473684210526314,
0.0967741935483871,
0,
0.2222222222222222,
0.2222222222222222,
0.0273972602739726,
0.03225806451612903,
0.039473684210526314,
0.028985507246376812,
0.0273972602739726,
0.0625,
0,
0.16666666666666666,
0.16666666666666666,
0.10714285714285714,
0.11538461538461539,
0.046875,
0.15789473684210525,
0.16666666666666666,
0.125,
0.2,
0.04838709677419355,
0.15789473684210525,
0.16666666666666666,
0.16666666666666666,
0.2,
0.2857142857142857,
0.09090909090909091,
0.11764705882352941,
0.05128205128205128,
0.08695652173913043,
0.09090909090909091,
0.05263157894736842,
0.09090909090909091,
0.045454545454545456,
0.08,
0.09090909090909091,
0.05263157894736842,
0.09090909090909091,
0.046511627906976744,
0.046511627906976744,
0.046511627906976744,
0.07407407407407407,
0.09090909090909091,
0.05405405405405406,
0.05405405405405406,
0.09090909090909091,
0.047619047619047616,
0.047619047619047616,
0.047619047619047616,
0.09090909090909091,
0.06060606060606061,
0.11764705882352941
] |
def _mount(self):
"""Mount based on which OS is running."""
# mount_afp "afp://scraig:<password>@address/share" <mnt_point>
if is_osx():
if self.connection["jss"].verbose:
print self.connection["mount_url"]
if mount_share:
self.connection["mount_point"] = mount_share(
self.connection["mount_url"])
else:
# Non-Apple OS X python:
args = ["mount", "-t", self.protocol,
self.connection["mount_url"],
self.connection["mount_point"]]
if self.connection["jss"].verbose:
print " ".join(args)
subprocess.check_call(args)
elif is_linux():
args = ["mount_afp", "-t", self.protocol,
self.connection["mount_url"],
self.connection["mount_point"]]
if self.connection["jss"].verbose:
print " ".join(args)
subprocess.check_call(args)
else:
raise JSSError("Unsupported OS.") | [
"def",
"_mount",
"(",
"self",
")",
":",
"# mount_afp \"afp://scraig:<password>@address/share\" <mnt_point>",
"if",
"is_osx",
"(",
")",
":",
"if",
"self",
".",
"connection",
"[",
"\"jss\"",
"]",
".",
"verbose",
":",
"print",
"self",
".",
"connection",
"[",
"\"mount_url\"",
"]",
"if",
"mount_share",
":",
"self",
".",
"connection",
"[",
"\"mount_point\"",
"]",
"=",
"mount_share",
"(",
"self",
".",
"connection",
"[",
"\"mount_url\"",
"]",
")",
"else",
":",
"# Non-Apple OS X python:",
"args",
"=",
"[",
"\"mount\"",
",",
"\"-t\"",
",",
"self",
".",
"protocol",
",",
"self",
".",
"connection",
"[",
"\"mount_url\"",
"]",
",",
"self",
".",
"connection",
"[",
"\"mount_point\"",
"]",
"]",
"if",
"self",
".",
"connection",
"[",
"\"jss\"",
"]",
".",
"verbose",
":",
"print",
"\" \"",
".",
"join",
"(",
"args",
")",
"subprocess",
".",
"check_call",
"(",
"args",
")",
"elif",
"is_linux",
"(",
")",
":",
"args",
"=",
"[",
"\"mount_afp\"",
",",
"\"-t\"",
",",
"self",
".",
"protocol",
",",
"self",
".",
"connection",
"[",
"\"mount_url\"",
"]",
",",
"self",
".",
"connection",
"[",
"\"mount_point\"",
"]",
"]",
"if",
"self",
".",
"connection",
"[",
"\"jss\"",
"]",
".",
"verbose",
":",
"print",
"\" \"",
".",
"join",
"(",
"args",
")",
"subprocess",
".",
"check_call",
"(",
"args",
")",
"else",
":",
"raise",
"JSSError",
"(",
"\"Unsupported OS.\"",
")"
] | 42.192308 | [
0.058823529411764705,
0.04081632653061224,
0.028169014084507043,
0.1,
0.043478260869565216,
0.04,
0.07407407407407407,
0.04918032786885246,
0.061224489795918366,
0.11764705882352941,
0.05,
0.05660377358490566,
0.03773584905660377,
0.05454545454545454,
0.04,
0.05,
0.046511627906976744,
0.08333333333333333,
0.05660377358490566,
0.04081632653061224,
0.058823529411764705,
0.043478260869565216,
0.05555555555555555,
0.05128205128205128,
0.15384615384615385,
0.044444444444444446
] |
def set_led(self, led_number, led_value):
"""
Set front-panel controller LEDs. The DS3 controller has four, labelled, LEDs on the front panel that can
be either on or off.
:param led_number:
Integer between 1 and 4
:param led_value:
Value, set to 0 to turn the LED off, 1 to turn it on
"""
if 1 > led_number > 4:
return
write_led_value(hw_id=self.device_unique_name, led_name='sony{}'.format(led_number), value=led_value) | [
"def",
"set_led",
"(",
"self",
",",
"led_number",
",",
"led_value",
")",
":",
"if",
"1",
">",
"led_number",
">",
"4",
":",
"return",
"write_led_value",
"(",
"hw_id",
"=",
"self",
".",
"device_unique_name",
",",
"led_name",
"=",
"'sony{}'",
".",
"format",
"(",
"led_number",
")",
",",
"value",
"=",
"led_value",
")"
] | 39.230769 | [
0.024390243902439025,
0.18181818181818182,
0.026785714285714284,
0.07142857142857142,
0,
0.11538461538461539,
0.05714285714285714,
0.12,
0.03125,
0.18181818181818182,
0.06666666666666667,
0.1111111111111111,
0.027522935779816515
] |
def parse_timespan(timedef):
"""
Convert a string timespan definition to seconds, for example converting
'1m30s' to 90. If *timedef* is already an int, the value will be returned
unmodified.
:param timedef: The timespan definition to convert to seconds.
:type timedef: int, str
:return: The converted value in seconds.
:rtype: int
"""
if isinstance(timedef, int):
return timedef
converter_order = ('w', 'd', 'h', 'm', 's')
converters = {
'w': 604800,
'd': 86400,
'h': 3600,
'm': 60,
's': 1
}
timedef = timedef.lower()
if timedef.isdigit():
return int(timedef)
elif len(timedef) == 0:
return 0
seconds = -1
for spec in converter_order:
timedef = timedef.split(spec)
if len(timedef) == 1:
timedef = timedef[0]
continue
elif len(timedef) > 2 or not timedef[0].isdigit():
seconds = -1
break
adjustment = converters[spec]
seconds = max(seconds, 0)
seconds += (int(timedef[0]) * adjustment)
timedef = timedef[1]
if not len(timedef):
break
if seconds < 0:
raise ValueError('invalid time format')
return seconds | [
"def",
"parse_timespan",
"(",
"timedef",
")",
":",
"if",
"isinstance",
"(",
"timedef",
",",
"int",
")",
":",
"return",
"timedef",
"converter_order",
"=",
"(",
"'w'",
",",
"'d'",
",",
"'h'",
",",
"'m'",
",",
"'s'",
")",
"converters",
"=",
"{",
"'w'",
":",
"604800",
",",
"'d'",
":",
"86400",
",",
"'h'",
":",
"3600",
",",
"'m'",
":",
"60",
",",
"'s'",
":",
"1",
"}",
"timedef",
"=",
"timedef",
".",
"lower",
"(",
")",
"if",
"timedef",
".",
"isdigit",
"(",
")",
":",
"return",
"int",
"(",
"timedef",
")",
"elif",
"len",
"(",
"timedef",
")",
"==",
"0",
":",
"return",
"0",
"seconds",
"=",
"-",
"1",
"for",
"spec",
"in",
"converter_order",
":",
"timedef",
"=",
"timedef",
".",
"split",
"(",
"spec",
")",
"if",
"len",
"(",
"timedef",
")",
"==",
"1",
":",
"timedef",
"=",
"timedef",
"[",
"0",
"]",
"continue",
"elif",
"len",
"(",
"timedef",
")",
">",
"2",
"or",
"not",
"timedef",
"[",
"0",
"]",
".",
"isdigit",
"(",
")",
":",
"seconds",
"=",
"-",
"1",
"break",
"adjustment",
"=",
"converters",
"[",
"spec",
"]",
"seconds",
"=",
"max",
"(",
"seconds",
",",
"0",
")",
"seconds",
"+=",
"(",
"int",
"(",
"timedef",
"[",
"0",
"]",
")",
"*",
"adjustment",
")",
"timedef",
"=",
"timedef",
"[",
"1",
"]",
"if",
"not",
"len",
"(",
"timedef",
")",
":",
"break",
"if",
"seconds",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"'invalid time format'",
")",
"return",
"seconds"
] | 23.409091 | [
0.03571428571428571,
0.75,
0.041666666666666664,
0.06756756756756757,
0.25,
0,
0.06349206349206349,
0.16666666666666666,
0.0975609756097561,
0.3333333333333333,
0.75,
0.10344827586206896,
0.1875,
0.06818181818181818,
0.26666666666666666,
0.21428571428571427,
0.23076923076923078,
0.25,
0.3,
0.375,
2,
0.11538461538461539,
0.13636363636363635,
0.14285714285714285,
0.125,
0.3,
0.23076923076923078,
0.10344827586206896,
0.0967741935483871,
0.13043478260869565,
0.13043478260869565,
0.2727272727272727,
0.057692307692307696,
0.2,
0.375,
0.0967741935483871,
0.1111111111111111,
0.06976744186046512,
0.13636363636363635,
0.13636363636363635,
0.375,
0.1875,
0.07317073170731707,
0.2
] |
def readline(self):
"""
Readline implementation.
:return: popped line from descriptor queue. None if nothing found
:raises: RuntimeError if errors happened while reading PIPE
"""
try:
return self._descriptor.read_queue.pop()
except IndexError:
# No lines in queue
if self.has_error():
raise RuntimeError("Errors reading PIPE")
return None | [
"def",
"readline",
"(",
"self",
")",
":",
"try",
":",
"return",
"self",
".",
"_descriptor",
".",
"read_queue",
".",
"pop",
"(",
")",
"except",
"IndexError",
":",
"# No lines in queue",
"if",
"self",
".",
"has_error",
"(",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Errors reading PIPE\"",
")",
"return",
"None"
] | 31.571429 | [
0.05263157894736842,
0.18181818181818182,
0.0625,
0,
0.0410958904109589,
0.04477611940298507,
0.18181818181818182,
0.16666666666666666,
0.038461538461538464,
0.07692307692307693,
0.06451612903225806,
0.0625,
0.03508771929824561,
0.10526315789473684
] |
def schedCoro(self, coro):
'''
Schedules a free-running coroutine to run on this base's event loop. Kills the coroutine if Base is fini'd.
It does not pend on coroutine completion.
Precondition:
This function is *not* threadsafe and must be run on the Base's event loop
Returns:
asyncio.Task: An asyncio.Task object.
'''
import synapse.lib.provenance as s_provenance # avoid import cycle
if __debug__:
assert s_coro.iscoro(coro)
import synapse.lib.threads as s_threads # avoid import cycle
assert s_threads.iden() == self.tid
task = self.loop.create_task(coro)
# In rare cases, (Like this function being triggered from call_soon_threadsafe), there's no task context
if asyncio.current_task():
s_provenance.dupstack(task)
def taskDone(task):
self._active_tasks.remove(task)
try:
task.result()
except asyncio.CancelledError:
pass
except Exception:
logger.exception('Task scheduled through Base.schedCoro raised exception')
self._active_tasks.add(task)
task.add_done_callback(taskDone)
return task | [
"def",
"schedCoro",
"(",
"self",
",",
"coro",
")",
":",
"import",
"synapse",
".",
"lib",
".",
"provenance",
"as",
"s_provenance",
"# avoid import cycle",
"if",
"__debug__",
":",
"assert",
"s_coro",
".",
"iscoro",
"(",
"coro",
")",
"import",
"synapse",
".",
"lib",
".",
"threads",
"as",
"s_threads",
"# avoid import cycle",
"assert",
"s_threads",
".",
"iden",
"(",
")",
"==",
"self",
".",
"tid",
"task",
"=",
"self",
".",
"loop",
".",
"create_task",
"(",
"coro",
")",
"# In rare cases, (Like this function being triggered from call_soon_threadsafe), there's no task context",
"if",
"asyncio",
".",
"current_task",
"(",
")",
":",
"s_provenance",
".",
"dupstack",
"(",
"task",
")",
"def",
"taskDone",
"(",
"task",
")",
":",
"self",
".",
"_active_tasks",
".",
"remove",
"(",
"task",
")",
"try",
":",
"task",
".",
"result",
"(",
")",
"except",
"asyncio",
".",
"CancelledError",
":",
"pass",
"except",
"Exception",
":",
"logger",
".",
"exception",
"(",
"'Task scheduled through Base.schedCoro raised exception'",
")",
"self",
".",
"_active_tasks",
".",
"add",
"(",
"task",
")",
"task",
".",
"add_done_callback",
"(",
"taskDone",
")",
"return",
"task"
] | 33.078947 | [
0.038461538461538464,
0.18181818181818182,
0.02586206896551724,
0.04081632653061224,
0,
0.09523809523809523,
0.03488372093023256,
0,
0.125,
0.04081632653061224,
0,
0.18181818181818182,
0.02666666666666667,
0,
0.09523809523809523,
0.05263157894736842,
0.0273972602739726,
0.0425531914893617,
0,
0.047619047619047616,
0,
0.026785714285714284,
0.058823529411764705,
0.05128205128205128,
0,
0.07407407407407407,
0.046511627906976744,
0.125,
0.06896551724137931,
0.047619047619047616,
0.1,
0.06896551724137931,
0.03333333333333333,
0,
0.05555555555555555,
0.05,
0,
0.10526315789473684
] |
def parse_next(self, ptype, m):
"""
Parse the next packet.
:param ptype: The (string) type of the incoming packet
:param `.Message` m: The paket content
"""
if ptype == MSG_KEXGSS_GROUPREQ:
return self._parse_kexgss_groupreq(m)
elif ptype == MSG_KEXGSS_GROUP:
return self._parse_kexgss_group(m)
elif ptype == MSG_KEXGSS_INIT:
return self._parse_kexgss_gex_init(m)
elif ptype == MSG_KEXGSS_HOSTKEY:
return self._parse_kexgss_hostkey(m)
elif ptype == MSG_KEXGSS_CONTINUE:
return self._parse_kexgss_continue(m)
elif ptype == MSG_KEXGSS_COMPLETE:
return self._parse_kexgss_complete(m)
elif ptype == MSG_KEXGSS_ERROR:
return self._parse_kexgss_error(m)
msg = "KexGex asked to handle packet type {:d}"
raise SSHException(msg.format(ptype)) | [
"def",
"parse_next",
"(",
"self",
",",
"ptype",
",",
"m",
")",
":",
"if",
"ptype",
"==",
"MSG_KEXGSS_GROUPREQ",
":",
"return",
"self",
".",
"_parse_kexgss_groupreq",
"(",
"m",
")",
"elif",
"ptype",
"==",
"MSG_KEXGSS_GROUP",
":",
"return",
"self",
".",
"_parse_kexgss_group",
"(",
"m",
")",
"elif",
"ptype",
"==",
"MSG_KEXGSS_INIT",
":",
"return",
"self",
".",
"_parse_kexgss_gex_init",
"(",
"m",
")",
"elif",
"ptype",
"==",
"MSG_KEXGSS_HOSTKEY",
":",
"return",
"self",
".",
"_parse_kexgss_hostkey",
"(",
"m",
")",
"elif",
"ptype",
"==",
"MSG_KEXGSS_CONTINUE",
":",
"return",
"self",
".",
"_parse_kexgss_continue",
"(",
"m",
")",
"elif",
"ptype",
"==",
"MSG_KEXGSS_COMPLETE",
":",
"return",
"self",
".",
"_parse_kexgss_complete",
"(",
"m",
")",
"elif",
"ptype",
"==",
"MSG_KEXGSS_ERROR",
":",
"return",
"self",
".",
"_parse_kexgss_error",
"(",
"m",
")",
"msg",
"=",
"\"KexGex asked to handle packet type {:d}\"",
"raise",
"SSHException",
"(",
"msg",
".",
"format",
"(",
"ptype",
")",
")"
] | 39.478261 | [
0.03225806451612903,
0.18181818181818182,
0.06666666666666667,
0,
0.06451612903225806,
0.08695652173913043,
0.18181818181818182,
0.05,
0.04081632653061224,
0.05128205128205128,
0.043478260869565216,
0.05263157894736842,
0.04081632653061224,
0.04878048780487805,
0.041666666666666664,
0.047619047619047616,
0.04081632653061224,
0.047619047619047616,
0.04081632653061224,
0.05128205128205128,
0.043478260869565216,
0.03636363636363636,
0.044444444444444446
] |
def reaction_weight(reaction):
"""Return the metabolite weight times its stoichiometric coefficient."""
if len(reaction.metabolites) != 1:
raise ValueError('Reaction weight is only defined for single '
'metabolite products or educts.')
met, coeff = next(iteritems(reaction.metabolites))
return [coeff * met.formula_weight] | [
"def",
"reaction_weight",
"(",
"reaction",
")",
":",
"if",
"len",
"(",
"reaction",
".",
"metabolites",
")",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"'Reaction weight is only defined for single '",
"'metabolite products or educts.'",
")",
"met",
",",
"coeff",
"=",
"next",
"(",
"iteritems",
"(",
"reaction",
".",
"metabolites",
")",
")",
"return",
"[",
"coeff",
"*",
"met",
".",
"formula_weight",
"]"
] | 36.5 | [
0.03333333333333333,
0.02631578947368421,
0,
0.05263157894736842,
0.04285714285714286,
0.06896551724137931,
0,
0.037037037037037035,
0,
0.05128205128205128
] |
def _add_baseline_to_exclude_files(args):
"""
Modifies args.exclude_files in-place.
"""
baseline_name_regex = r'^{}$'.format(args.import_filename[0])
if not args.exclude_files:
args.exclude_files = baseline_name_regex
elif baseline_name_regex not in args.exclude_files:
args.exclude_files += r'|{}'.format(baseline_name_regex) | [
"def",
"_add_baseline_to_exclude_files",
"(",
"args",
")",
":",
"baseline_name_regex",
"=",
"r'^{}$'",
".",
"format",
"(",
"args",
".",
"import_filename",
"[",
"0",
"]",
")",
"if",
"not",
"args",
".",
"exclude_files",
":",
"args",
".",
"exclude_files",
"=",
"baseline_name_regex",
"elif",
"baseline_name_regex",
"not",
"in",
"args",
".",
"exclude_files",
":",
"args",
".",
"exclude_files",
"+=",
"r'|{}'",
".",
"format",
"(",
"baseline_name_regex",
")"
] | 35.8 | [
0.024390243902439025,
0.2857142857142857,
0.07317073170731707,
0.2857142857142857,
0.03076923076923077,
0,
0.06666666666666667,
0.041666666666666664,
0.03636363636363636,
0.03125
] |
def get_factors(self, node=None):
"""
Returns all the factors containing the node. If node is not specified
returns all the factors that have been added till now to the graph.
Parameter
---------
node: any hashable python object (optional)
The node whose factor we want. If node is not specified
Examples
--------
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> student = MarkovModel([('Alice', 'Bob'), ('Bob', 'Charles')])
>>> factor1 = DiscreteFactor(['Alice', 'Bob'], cardinality=[2, 2],
... values=np.random.rand(4))
>>> factor2 = DiscreteFactor(['Bob', 'Charles'], cardinality=[2, 3],
... values=np.ones(6))
>>> student.add_factors(factor1,factor2)
>>> student.get_factors()
[<DiscreteFactor representing phi(Alice:2, Bob:2) at 0x7f8a0e9bf630>,
<DiscreteFactor representing phi(Bob:2, Charles:3) at 0x7f8a0e9bf5f8>]
>>> student.get_factors('Alice')
[<DiscreteFactor representing phi(Alice:2, Bob:2) at 0x7f8a0e9bf630>]
"""
if node:
if node not in self.nodes():
raise ValueError('Node not present in the Undirected Graph')
node_factors = []
for factor in self.factors:
if node in factor.scope():
node_factors.append(factor)
return node_factors
else:
return self.factors | [
"def",
"get_factors",
"(",
"self",
",",
"node",
"=",
"None",
")",
":",
"if",
"node",
":",
"if",
"node",
"not",
"in",
"self",
".",
"nodes",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'Node not present in the Undirected Graph'",
")",
"node_factors",
"=",
"[",
"]",
"for",
"factor",
"in",
"self",
".",
"factors",
":",
"if",
"node",
"in",
"factor",
".",
"scope",
"(",
")",
":",
"node_factors",
".",
"append",
"(",
"factor",
")",
"return",
"node_factors",
"else",
":",
"return",
"self",
".",
"factors"
] | 42.444444 | [
0.030303030303030304,
0.18181818181818182,
0.025974025974025976,
0.02666666666666667,
0,
0.11764705882352941,
0.11764705882352941,
0.058823529411764705,
0.029850746268656716,
0,
0.125,
0.125,
0.0625,
0.04918032786885246,
0.0410958904109589,
0.05405405405405406,
0.07547169811320754,
0.05263157894736842,
0.11764705882352941,
0.08333333333333333,
0.09090909090909091,
0.06493506493506493,
0.08860759493670886,
0.075,
0.05194805194805195,
0.18181818181818182,
0.125,
0.05,
0.02631578947368421,
0.06896551724137931,
0.05128205128205128,
0.047619047619047616,
0.0425531914893617,
0.06451612903225806,
0.15384615384615385,
0.06451612903225806
] |
def SetupVars(self):
"""Set up some vars for the directories we use."""
# Python paths chosen to match appveyor:
# http://www.appveyor.com/docs/installed-software#python
self.python_dir_64 = args.python64_dir
self.python_dir_32 = args.python32_dir
self.virtualenv_bin64 = os.path.join(self.python_dir_64,
r"Scripts\virtualenv.exe")
self.virtualenv_bin32 = os.path.join(self.python_dir_32,
r"Scripts\virtualenv.exe")
if args.virtualenv_64bit:
self.virtualenv64 = args.virtualenv_64bit
else:
self.virtualenv64 = os.path.join(args.build_dir, "python_64")
if args.virtualenv_32bit:
self.virtualenv32 = args.virtualenv_32bit
else:
self.virtualenv32 = os.path.join(args.build_dir, "python_32")
self.grr_client_build64 = os.path.join(self.virtualenv64,
r"Scripts\grr_client_build.exe")
self.grr_client_build32 = os.path.join(self.virtualenv32,
r"Scripts\grr_client_build.exe")
self.pip64 = os.path.join(self.virtualenv64, r"Scripts\pip.exe")
self.pip32 = os.path.join(self.virtualenv32, r"Scripts\pip.exe")
self.virtualenv_python64 = os.path.join(self.virtualenv64,
r"Scripts\python.exe")
self.virtualenv_python32 = os.path.join(self.virtualenv32,
r"Scripts\python.exe")
self.git = r"C:\Program Files\Git\bin\git.exe"
self.install_path = r"C:\Windows\System32\GRR"
self.service_name = "GRR Monitor"
self.expect_service_running = args.expect_service_running | [
"def",
"SetupVars",
"(",
"self",
")",
":",
"# Python paths chosen to match appveyor:",
"# http://www.appveyor.com/docs/installed-software#python",
"self",
".",
"python_dir_64",
"=",
"args",
".",
"python64_dir",
"self",
".",
"python_dir_32",
"=",
"args",
".",
"python32_dir",
"self",
".",
"virtualenv_bin64",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"python_dir_64",
",",
"r\"Scripts\\virtualenv.exe\"",
")",
"self",
".",
"virtualenv_bin32",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"python_dir_32",
",",
"r\"Scripts\\virtualenv.exe\"",
")",
"if",
"args",
".",
"virtualenv_64bit",
":",
"self",
".",
"virtualenv64",
"=",
"args",
".",
"virtualenv_64bit",
"else",
":",
"self",
".",
"virtualenv64",
"=",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"build_dir",
",",
"\"python_64\"",
")",
"if",
"args",
".",
"virtualenv_32bit",
":",
"self",
".",
"virtualenv32",
"=",
"args",
".",
"virtualenv_32bit",
"else",
":",
"self",
".",
"virtualenv32",
"=",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"build_dir",
",",
"\"python_32\"",
")",
"self",
".",
"grr_client_build64",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"virtualenv64",
",",
"r\"Scripts\\grr_client_build.exe\"",
")",
"self",
".",
"grr_client_build32",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"virtualenv32",
",",
"r\"Scripts\\grr_client_build.exe\"",
")",
"self",
".",
"pip64",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"virtualenv64",
",",
"r\"Scripts\\pip.exe\"",
")",
"self",
".",
"pip32",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"virtualenv32",
",",
"r\"Scripts\\pip.exe\"",
")",
"self",
".",
"virtualenv_python64",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"virtualenv64",
",",
"r\"Scripts\\python.exe\"",
")",
"self",
".",
"virtualenv_python32",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"virtualenv32",
",",
"r\"Scripts\\python.exe\"",
")",
"self",
".",
"git",
"=",
"r\"C:\\Program Files\\Git\\bin\\git.exe\"",
"self",
".",
"install_path",
"=",
"r\"C:\\Windows\\System32\\GRR\"",
"self",
".",
"service_name",
"=",
"\"GRR Monitor\"",
"self",
".",
"expect_service_running",
"=",
"args",
".",
"expect_service_running"
] | 42.05 | [
0.05,
0.037037037037037035,
0.045454545454545456,
0.03333333333333333,
0,
0.047619047619047616,
0.047619047619047616,
0,
0.05,
0.05970149253731343,
0.05,
0.05970149253731343,
0,
0.06896551724137931,
0.06382978723404255,
0.2222222222222222,
0.04477611940298507,
0.06896551724137931,
0.06382978723404255,
0.2222222222222222,
0.04477611940298507,
0,
0.04918032786885246,
0.05333333333333334,
0.04918032786885246,
0.05333333333333334,
0.029411764705882353,
0.029411764705882353,
0,
0.04838709677419355,
0.045454545454545456,
0.04838709677419355,
0.045454545454545456,
0,
0.04,
0,
0.04,
0.05405405405405406,
0,
0.03278688524590164
] |
def gen_references(self) -> str:
""" Generate python type declarations for all identifiers (primary keys)
"""
rval = []
for cls in self.schema.classes.values():
pkeys = self.primary_keys_for(cls)
for pk in pkeys:
pk_slot = self.schema.slots[pk]
classname = camelcase(cls.name) + camelcase(pk)
if cls.is_a and getattr(self.schema.classes[cls.is_a], pk, None):
parent = self.range_type_name(pk_slot, cls.is_a)
else:
parent = self.python_name_for(pk_slot.range)
rval.append(f'class {classname}({parent}):\n\tpass')
return '\n\n\n'.join(rval) | [
"def",
"gen_references",
"(",
"self",
")",
"->",
"str",
":",
"rval",
"=",
"[",
"]",
"for",
"cls",
"in",
"self",
".",
"schema",
".",
"classes",
".",
"values",
"(",
")",
":",
"pkeys",
"=",
"self",
".",
"primary_keys_for",
"(",
"cls",
")",
"for",
"pk",
"in",
"pkeys",
":",
"pk_slot",
"=",
"self",
".",
"schema",
".",
"slots",
"[",
"pk",
"]",
"classname",
"=",
"camelcase",
"(",
"cls",
".",
"name",
")",
"+",
"camelcase",
"(",
"pk",
")",
"if",
"cls",
".",
"is_a",
"and",
"getattr",
"(",
"self",
".",
"schema",
".",
"classes",
"[",
"cls",
".",
"is_a",
"]",
",",
"pk",
",",
"None",
")",
":",
"parent",
"=",
"self",
".",
"range_type_name",
"(",
"pk_slot",
",",
"cls",
".",
"is_a",
")",
"else",
":",
"parent",
"=",
"self",
".",
"python_name_for",
"(",
"pk_slot",
".",
"range",
")",
"rval",
".",
"append",
"(",
"f'class {classname}({parent}):\\n\\tpass'",
")",
"return",
"'\\n\\n\\n'",
".",
"join",
"(",
"rval",
")"
] | 44.25 | [
0.03125,
0.0375,
0,
0.18181818181818182,
0.11764705882352941,
0.041666666666666664,
0.043478260869565216,
0.07142857142857142,
0.0425531914893617,
0.031746031746031744,
0.037037037037037035,
0.029411764705882353,
0.09523809523809523,
0.03125,
0.029411764705882353,
0.058823529411764705
] |
def compile_cpfs(self,
scope: Dict[str, TensorFluent],
batch_size: Optional[int] = None,
noise: Optional[Noise] = None) -> Tuple[List[CPFPair], List[CPFPair]]:
'''Compiles the intermediate and next state fluent CPFs given the current `state` and `action` scope.
Args:
scope (Dict[str, :obj:`rddl2tf.fluent.TensorFluent`]): The fluent scope for CPF evaluation.
batch_size (Optional[int]): The batch size.
Returns:
Tuple[List[CPFPair], List[CPFPair]]: A pair of lists of TensorFluent
representing the intermediate and state CPFs.
'''
interm_fluents = self.compile_intermediate_cpfs(scope, batch_size, noise)
scope.update(dict(interm_fluents))
next_state_fluents = self.compile_state_cpfs(scope, batch_size, noise)
return interm_fluents, next_state_fluents | [
"def",
"compile_cpfs",
"(",
"self",
",",
"scope",
":",
"Dict",
"[",
"str",
",",
"TensorFluent",
"]",
",",
"batch_size",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
",",
"noise",
":",
"Optional",
"[",
"Noise",
"]",
"=",
"None",
")",
"->",
"Tuple",
"[",
"List",
"[",
"CPFPair",
"]",
",",
"List",
"[",
"CPFPair",
"]",
"]",
":",
"interm_fluents",
"=",
"self",
".",
"compile_intermediate_cpfs",
"(",
"scope",
",",
"batch_size",
",",
"noise",
")",
"scope",
".",
"update",
"(",
"dict",
"(",
"interm_fluents",
")",
")",
"next_state_fluents",
"=",
"self",
".",
"compile_state_cpfs",
"(",
"scope",
",",
"batch_size",
",",
"noise",
")",
"return",
"interm_fluents",
",",
"next_state_fluents"
] | 50.722222 | [
0.09090909090909091,
0.057692307692307696,
0.05555555555555555,
0.054945054945054944,
0.027522935779816515,
0,
0.15384615384615385,
0.04854368932038835,
0.05454545454545454,
0,
0.125,
0.0375,
0.03508771929824561,
0.18181818181818182,
0.037037037037037035,
0.047619047619047616,
0.02564102564102564,
0.04081632653061224
] |
def prepare_image(tarpath, outfolder, **kwargs):
"""Unpack the OS image stored at tarpath to outfolder.
Prepare the unpacked image for use as a VR base image.
"""
outfolder = path.Path(outfolder)
untar(tarpath, outfolder, **kwargs)
# Some OSes have started making /etc/resolv.conf into a symlink to
# /run/resolv.conf. That prevents us from bind-mounting to that
# location. So delete that symlink, if it exists.
resolv_path = outfolder / 'etc' / 'resolv.conf'
if resolv_path.islink():
resolv_path.remove().write_text('', encoding='ascii') | [
"def",
"prepare_image",
"(",
"tarpath",
",",
"outfolder",
",",
"*",
"*",
"kwargs",
")",
":",
"outfolder",
"=",
"path",
".",
"Path",
"(",
"outfolder",
")",
"untar",
"(",
"tarpath",
",",
"outfolder",
",",
"*",
"*",
"kwargs",
")",
"# Some OSes have started making /etc/resolv.conf into a symlink to",
"# /run/resolv.conf. That prevents us from bind-mounting to that",
"# location. So delete that symlink, if it exists.",
"resolv_path",
"=",
"outfolder",
"/",
"'etc'",
"/",
"'resolv.conf'",
"if",
"resolv_path",
".",
"islink",
"(",
")",
":",
"resolv_path",
".",
"remove",
"(",
")",
".",
"write_text",
"(",
"''",
",",
"encoding",
"=",
"'ascii'",
")"
] | 38.533333 | [
0.020833333333333332,
0.034482758620689655,
0,
0.034482758620689655,
0,
0.2857142857142857,
0.05555555555555555,
0.05128205128205128,
0,
0.02857142857142857,
0.029411764705882353,
0.037037037037037035,
0.0392156862745098,
0.07142857142857142,
0.03278688524590164
] |
def get_layers_output(self, dataset):
"""Get output from each layer of the network.
:param dataset: input data
:return: list of np array, element i is the output of layer i
"""
layers_out = []
with self.tf_graph.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
for l in self.layer_nodes:
layers_out.append(l.eval({self.input_data: dataset,
self.keep_prob: 1}))
if layers_out == []:
raise Exception("This method is not implemented for this model")
else:
return layers_out | [
"def",
"get_layers_output",
"(",
"self",
",",
"dataset",
")",
":",
"layers_out",
"=",
"[",
"]",
"with",
"self",
".",
"tf_graph",
".",
"as_default",
"(",
")",
":",
"with",
"tf",
".",
"Session",
"(",
")",
"as",
"self",
".",
"tf_session",
":",
"self",
".",
"tf_saver",
".",
"restore",
"(",
"self",
".",
"tf_session",
",",
"self",
".",
"model_path",
")",
"for",
"l",
"in",
"self",
".",
"layer_nodes",
":",
"layers_out",
".",
"append",
"(",
"l",
".",
"eval",
"(",
"{",
"self",
".",
"input_data",
":",
"dataset",
",",
"self",
".",
"keep_prob",
":",
"1",
"}",
")",
")",
"if",
"layers_out",
"==",
"[",
"]",
":",
"raise",
"Exception",
"(",
"\"This method is not implemented for this model\"",
")",
"else",
":",
"return",
"layers_out"
] | 37.473684 | [
0.02702702702702703,
0.03773584905660377,
0,
0.08823529411764706,
0.043478260869565216,
0.18181818181818182,
0.08695652173913043,
0,
0.05,
0.04081632653061224,
0.028169014084507043,
0.07142857142857142,
0.04225352112676056,
0.06060606060606061,
0,
0.07142857142857142,
0.02631578947368421,
0.15384615384615385,
0.06896551724137931
] |
def _unschedule_sending_init_updates(self):
"""Un-schedules sending of initial updates
Stops the timer if set for sending initial updates.
Returns:
- True if timer was stopped
- False if timer was already stopped and nothing was done
"""
LOG.debug('Un-scheduling sending of initial Non-RTC UPDATEs'
' (init. UPDATEs already sent: %s)',
self._sent_init_non_rtc_update)
if self._rtc_eor_timer:
self._rtc_eor_timer.stop()
self._rtc_eor_timer = None
return True
return False | [
"def",
"_unschedule_sending_init_updates",
"(",
"self",
")",
":",
"LOG",
".",
"debug",
"(",
"'Un-scheduling sending of initial Non-RTC UPDATEs'",
"' (init. UPDATEs already sent: %s)'",
",",
"self",
".",
"_sent_init_non_rtc_update",
")",
"if",
"self",
".",
"_rtc_eor_timer",
":",
"self",
".",
"_rtc_eor_timer",
".",
"stop",
"(",
")",
"self",
".",
"_rtc_eor_timer",
"=",
"None",
"return",
"True",
"return",
"False"
] | 38 | [
0.023255813953488372,
0.04,
0,
0.03389830508474576,
0.125,
0.05128205128205128,
0.028985507246376812,
0.18181818181818182,
0.04411764705882353,
0.05555555555555555,
0.08163265306122448,
0.06451612903225806,
0.05263157894736842,
0.05263157894736842,
0.08695652173913043,
0.1
] |
def _get_group_cached_perms(self):
"""
Set group cache.
"""
if not self.group:
return {}
perms = Permission.objects.filter(
group=self.group,
)
group_permissions = {}
for perm in perms:
group_permissions[(
perm.object_id,
perm.content_type_id,
perm.codename,
perm.approved,
)] = True
return group_permissions | [
"def",
"_get_group_cached_perms",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"group",
":",
"return",
"{",
"}",
"perms",
"=",
"Permission",
".",
"objects",
".",
"filter",
"(",
"group",
"=",
"self",
".",
"group",
",",
")",
"group_permissions",
"=",
"{",
"}",
"for",
"perm",
"in",
"perms",
":",
"group_permissions",
"[",
"(",
"perm",
".",
"object_id",
",",
"perm",
".",
"content_type_id",
",",
"perm",
".",
"codename",
",",
"perm",
".",
"approved",
",",
")",
"]",
"=",
"True",
"return",
"group_permissions"
] | 26.388889 | [
0.029411764705882353,
0.18181818181818182,
0.08333333333333333,
0.18181818181818182,
0.07692307692307693,
0.09523809523809523,
0.07142857142857142,
0.10344827586206896,
0.3333333333333333,
0.06666666666666667,
0.07692307692307693,
0.0967741935483871,
0.06451612903225806,
0.05405405405405406,
0.06666666666666667,
0.06666666666666667,
0.23809523809523808,
0.0625
] |
def create(observation_data,
user_id='user_id', item_id='item_id', target=None,
user_data=None, item_data=None,
random_seed=0,
verbose=True):
"""
Create a model that makes recommendations using item popularity. When no
target column is provided, the popularity is determined by the number of
observations involving each item. When a target is provided, popularity
is computed using the item's mean target value. When the target column
contains ratings, for example, the model computes the mean rating for
each item and uses this to rank items for recommendations.
Parameters
----------
observation_data : SFrame
The dataset to use for training the model. It must contain a column of
user ids and a column of item ids. Each row represents an observed
interaction between the user and the item. The (user, item) pairs
are stored with the model so that they can later be excluded from
recommendations if desired. It can optionally contain a target ratings
column. All other columns are interpreted by the underlying model as
side features for the observations.
The user id and item id columns must be of type 'int' or 'str'. The
target column must be of type 'int' or 'float'.
user_id : string, optional
The name of the column in `observation_data` that corresponds to the
user id.
item_id : string, optional
The name of the column in `observation_data` that corresponds to the
item id.
target : string, optional
The `observation_data` can optionally contain a column of scores
representing ratings given by the users. If present, the name of this
column may be specified variables `target`.
user_data : SFrame, optional
Side information for the users. This SFrame must have a column with
the same name as what is specified by the `user_id` input parameter.
`user_data` can provide any amount of additional user-specific
information.
item_data : SFrame, optional
Side information for the items. This SFrame must have a column with
the same name as what is specified by the `item_id` input parameter.
`item_data` can provide any amount of additional item-specific
information.
verbose : bool, optional
Enables verbose output.
Examples
--------
>>> sf = turicreate.SFrame({'user_id': ["0", "0", "0", "1", "1", "2", "2", "2"],
... 'item_id': ["a", "b", "c", "a", "b", "b", "c", "d"],
... 'rating': [1, 3, 2, 5, 4, 1, 4, 3]})
>>> m = turicreate.popularity_recommender.create(sf, target='rating')
See Also
--------
PopularityRecommender
"""
from turicreate._cython.cy_server import QuietProgress
opts = {}
model_proxy = _turicreate.extensions.popularity()
model_proxy.init_options(opts)
if user_data is None:
user_data = _turicreate.SFrame()
if item_data is None:
item_data = _turicreate.SFrame()
nearest_items = _turicreate.SFrame()
opts = {'user_id': user_id,
'item_id': item_id,
'target': target,
'random_seed': 1}
extra_data = {"nearest_items" : _turicreate.SFrame()}
with QuietProgress(verbose):
model_proxy.train(observation_data, user_data, item_data, opts, extra_data)
return PopularityRecommender(model_proxy) | [
"def",
"create",
"(",
"observation_data",
",",
"user_id",
"=",
"'user_id'",
",",
"item_id",
"=",
"'item_id'",
",",
"target",
"=",
"None",
",",
"user_data",
"=",
"None",
",",
"item_data",
"=",
"None",
",",
"random_seed",
"=",
"0",
",",
"verbose",
"=",
"True",
")",
":",
"from",
"turicreate",
".",
"_cython",
".",
"cy_server",
"import",
"QuietProgress",
"opts",
"=",
"{",
"}",
"model_proxy",
"=",
"_turicreate",
".",
"extensions",
".",
"popularity",
"(",
")",
"model_proxy",
".",
"init_options",
"(",
"opts",
")",
"if",
"user_data",
"is",
"None",
":",
"user_data",
"=",
"_turicreate",
".",
"SFrame",
"(",
")",
"if",
"item_data",
"is",
"None",
":",
"item_data",
"=",
"_turicreate",
".",
"SFrame",
"(",
")",
"nearest_items",
"=",
"_turicreate",
".",
"SFrame",
"(",
")",
"opts",
"=",
"{",
"'user_id'",
":",
"user_id",
",",
"'item_id'",
":",
"item_id",
",",
"'target'",
":",
"target",
",",
"'random_seed'",
":",
"1",
"}",
"extra_data",
"=",
"{",
"\"nearest_items\"",
":",
"_turicreate",
".",
"SFrame",
"(",
")",
"}",
"with",
"QuietProgress",
"(",
"verbose",
")",
":",
"model_proxy",
".",
"train",
"(",
"observation_data",
",",
"user_data",
",",
"item_data",
",",
"opts",
",",
"extra_data",
")",
"return",
"PopularityRecommender",
"(",
"model_proxy",
")"
] | 39.056818 | [
0.07142857142857142,
0.09836065573770492,
0.11904761904761904,
0.16,
0.2,
0.2857142857142857,
0.02631578947368421,
0.02631578947368421,
0.02666666666666667,
0.02702702702702703,
0.0273972602739726,
0.03225806451612903,
0,
0.14285714285714285,
0.14285714285714285,
0.10344827586206896,
0.02564102564102564,
0.02702702702702703,
0.04054054054054054,
0.0273972602739726,
0.02564102564102564,
0.02631578947368421,
0.046511627906976744,
0,
0.02666666666666667,
0.03636363636363636,
0,
0.1,
0.039473684210526314,
0.125,
0,
0.1,
0.039473684210526314,
0.125,
0,
0.10344827586206896,
0.041666666666666664,
0.025974025974025976,
0.058823529411764705,
0,
0.09375,
0.02631578947368421,
0.039473684210526314,
0.04285714285714286,
0.1,
0,
0.09375,
0.02631578947368421,
0.039473684210526314,
0.04285714285714286,
0.1,
0,
0.10714285714285714,
0.06451612903225806,
0,
0.16666666666666666,
0.16666666666666666,
0.05952380952380952,
0.036585365853658534,
0.045454545454545456,
0.0410958904109589,
0,
0.16666666666666666,
0.16666666666666666,
0.08,
0.2857142857142857,
0.034482758620689655,
0,
0.15384615384615385,
0.03773584905660377,
0.058823529411764705,
0,
0.08,
0.05,
0.08,
0.05,
0.05,
0,
0.0967741935483871,
0.06451612903225806,
0.06896551724137931,
0.10344827586206896,
0,
0.05263157894736842,
0.0625,
0.03614457831325301,
0,
0.044444444444444446
] |