repo stringlengths 7 54 | path stringlengths 4 192 | url stringlengths 87 284 | code stringlengths 78 104k | code_tokens list | docstring stringlengths 1 46.9k | docstring_tokens list | language stringclasses 1
value | partition stringclasses 3
values |
|---|---|---|---|---|---|---|---|---|
Tanganelli/CoAPthon3 | coapthon/client/coap.py | https://github.com/Tanganelli/CoAPthon3/blob/985763bfe2eb9e00f49ec100c5b8877c2ed7d531/coapthon/client/coap.py#L69-L79 | def close(self):
"""
Stop the client.
"""
self.stopped.set()
for event in self.to_be_stopped:
event.set()
if self._receiver_thread is not None:
self._receiver_thread.join()
self._socket.close() | [
"def",
"close",
"(",
"self",
")",
":",
"self",
".",
"stopped",
".",
"set",
"(",
")",
"for",
"event",
"in",
"self",
".",
"to_be_stopped",
":",
"event",
".",
"set",
"(",
")",
"if",
"self",
".",
"_receiver_thread",
"is",
"not",
"None",
":",
"self",
".... | Stop the client. | [
"Stop",
"the",
"client",
"."
] | python | train |
bcicen/haproxy-stats | haproxystats/__init__.py | https://github.com/bcicen/haproxy-stats/blob/f9268244b84eb52095d07b577646fdea4135fe3b/haproxystats/__init__.py#L115-L124 | def _decode(value):
"""
decode byte strings and convert to int where needed
"""
if value.isdigit():
return int(value)
if isinstance(value, bytes):
return value.decode('utf-8')
else:
return value | [
"def",
"_decode",
"(",
"value",
")",
":",
"if",
"value",
".",
"isdigit",
"(",
")",
":",
"return",
"int",
"(",
"value",
")",
"if",
"isinstance",
"(",
"value",
",",
"bytes",
")",
":",
"return",
"value",
".",
"decode",
"(",
"'utf-8'",
")",
"else",
":"... | decode byte strings and convert to int where needed | [
"decode",
"byte",
"strings",
"and",
"convert",
"to",
"int",
"where",
"needed"
] | python | train |
mitsei/dlkit | dlkit/json_/grading/searches.py | https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/grading/searches.py#L97-L108 | def get_grade_systems(self):
"""Gets the grade system list resulting from the search.
return: (osid.grading.GradeSystemList) - the grade system list
raise: IllegalState - list already retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.ret... | [
"def",
"get_grade_systems",
"(",
"self",
")",
":",
"if",
"self",
".",
"retrieved",
":",
"raise",
"errors",
".",
"IllegalState",
"(",
"'List has already been retrieved.'",
")",
"self",
".",
"retrieved",
"=",
"True",
"return",
"objects",
".",
"GradeSystemList",
"(... | Gets the grade system list resulting from the search.
return: (osid.grading.GradeSystemList) - the grade system list
raise: IllegalState - list already retrieved
*compliance: mandatory -- This method must be implemented.* | [
"Gets",
"the",
"grade",
"system",
"list",
"resulting",
"from",
"the",
"search",
"."
] | python | train |
intel-analytics/BigDL | pyspark/bigdl/util/common.py | https://github.com/intel-analytics/BigDL/blob/e9c19788285986ab789a2e2998f9a85d7524779f/pyspark/bigdl/util/common.py#L478-L490 | def to_sample_rdd(x, y, numSlices=None):
"""
Conver x and y into RDD[Sample]
:param x: ndarray and the first dimension should be batch
:param y: ndarray and the first dimension should be batch
:param numSlices:
:return:
"""
sc = get_spark_context()
from bigdl.util.common import Sampl... | [
"def",
"to_sample_rdd",
"(",
"x",
",",
"y",
",",
"numSlices",
"=",
"None",
")",
":",
"sc",
"=",
"get_spark_context",
"(",
")",
"from",
"bigdl",
".",
"util",
".",
"common",
"import",
"Sample",
"x_rdd",
"=",
"sc",
".",
"parallelize",
"(",
"x",
",",
"nu... | Conver x and y into RDD[Sample]
:param x: ndarray and the first dimension should be batch
:param y: ndarray and the first dimension should be batch
:param numSlices:
:return: | [
"Conver",
"x",
"and",
"y",
"into",
"RDD",
"[",
"Sample",
"]",
":",
"param",
"x",
":",
"ndarray",
"and",
"the",
"first",
"dimension",
"should",
"be",
"batch",
":",
"param",
"y",
":",
"ndarray",
"and",
"the",
"first",
"dimension",
"should",
"be",
"batch"... | python | test |
sofiatolaosebikan/hopcroftkarp | hopcroftkarp/__init__.py | https://github.com/sofiatolaosebikan/hopcroftkarp/blob/5e6cf4f95702304847307a07d369f8041edff8c9/hopcroftkarp/__init__.py#L84-L109 | def __dfs(self, v, index, layers):
"""
we recursively run dfs on each vertices in free_vertex,
:param v: vertices in free_vertex
:return: True if P is not empty (i.e., the maximal set of vertex-disjoint alternating path of length k)
and false otherwise.
"""
if in... | [
"def",
"__dfs",
"(",
"self",
",",
"v",
",",
"index",
",",
"layers",
")",
":",
"if",
"index",
"==",
"0",
":",
"path",
"=",
"[",
"v",
"]",
"while",
"self",
".",
"_dfs_parent",
"[",
"v",
"]",
"!=",
"v",
":",
"path",
".",
"append",
"(",
"self",
"... | we recursively run dfs on each vertices in free_vertex,
:param v: vertices in free_vertex
:return: True if P is not empty (i.e., the maximal set of vertex-disjoint alternating path of length k)
and false otherwise. | [
"we",
"recursively",
"run",
"dfs",
"on",
"each",
"vertices",
"in",
"free_vertex"
] | python | train |
kislyuk/aegea | aegea/packages/github3/repos/repo.py | https://github.com/kislyuk/aegea/blob/94957e9dba036eae3052e2662c208b259c08399a/aegea/packages/github3/repos/repo.py#L1009-L1018 | def is_assignee(self, login):
"""Check if the user is a possible assignee for an issue on this
repository.
:returns: :class:`bool`
"""
if not login:
return False
url = self._build_url('assignees', login, base_url=self._api)
return self._boolean(self._... | [
"def",
"is_assignee",
"(",
"self",
",",
"login",
")",
":",
"if",
"not",
"login",
":",
"return",
"False",
"url",
"=",
"self",
".",
"_build_url",
"(",
"'assignees'",
",",
"login",
",",
"base_url",
"=",
"self",
".",
"_api",
")",
"return",
"self",
".",
"... | Check if the user is a possible assignee for an issue on this
repository.
:returns: :class:`bool` | [
"Check",
"if",
"the",
"user",
"is",
"a",
"possible",
"assignee",
"for",
"an",
"issue",
"on",
"this",
"repository",
"."
] | python | train |
jenisys/parse_type | parse_type/builder.py | https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/parse_type/builder.py#L128-L154 | def make_choice(cls, choices, transform=None, strict=None):
"""
Creates a type-converter function to select one from a list of strings.
The type-converter function returns the selected choice_text.
The :param:`transform()` function is applied in the type converter.
It can be used... | [
"def",
"make_choice",
"(",
"cls",
",",
"choices",
",",
"transform",
"=",
"None",
",",
"strict",
"=",
"None",
")",
":",
"# -- NOTE: Parser uses re.IGNORECASE flag",
"# => transform may enforce case.",
"choices",
"=",
"cls",
".",
"_normalize_choices",
"(",
"choices",... | Creates a type-converter function to select one from a list of strings.
The type-converter function returns the selected choice_text.
The :param:`transform()` function is applied in the type converter.
It can be used to enforce the case (because parser uses re.IGNORECASE).
:param choice... | [
"Creates",
"a",
"type",
"-",
"converter",
"function",
"to",
"select",
"one",
"from",
"a",
"list",
"of",
"strings",
".",
"The",
"type",
"-",
"converter",
"function",
"returns",
"the",
"selected",
"choice_text",
".",
"The",
":",
"param",
":",
"transform",
"(... | python | train |
standage/tag | tag/index.py | https://github.com/standage/tag/blob/94686adf57115cea1c5235e99299e691f80ba10b/tag/index.py#L55-L58 | def consume_file(self, infile):
"""Load the specified GFF3 file into memory."""
reader = tag.reader.GFF3Reader(infilename=infile)
self.consume(reader) | [
"def",
"consume_file",
"(",
"self",
",",
"infile",
")",
":",
"reader",
"=",
"tag",
".",
"reader",
".",
"GFF3Reader",
"(",
"infilename",
"=",
"infile",
")",
"self",
".",
"consume",
"(",
"reader",
")"
] | Load the specified GFF3 file into memory. | [
"Load",
"the",
"specified",
"GFF3",
"file",
"into",
"memory",
"."
] | python | train |
aljungberg/pyle | pyle.py | https://github.com/aljungberg/pyle/blob/e0f25f42f5f35f0cefd0f7f9afafb6c9f37cc499/pyle.py#L43-L47 | def truncate_ellipsis(line, length=30):
"""Truncate a line to the specified length followed by ``...`` unless its shorter than length already."""
l = len(line)
return line if l < length else line[:length - 3] + "..." | [
"def",
"truncate_ellipsis",
"(",
"line",
",",
"length",
"=",
"30",
")",
":",
"l",
"=",
"len",
"(",
"line",
")",
"return",
"line",
"if",
"l",
"<",
"length",
"else",
"line",
"[",
":",
"length",
"-",
"3",
"]",
"+",
"\"...\""
] | Truncate a line to the specified length followed by ``...`` unless its shorter than length already. | [
"Truncate",
"a",
"line",
"to",
"the",
"specified",
"length",
"followed",
"by",
"...",
"unless",
"its",
"shorter",
"than",
"length",
"already",
"."
] | python | train |
bovee/Aston | aston/tracefile/agilent_extra_cs.py | https://github.com/bovee/Aston/blob/007630fdf074690373d03398fe818260d3d3cf5a/aston/tracefile/agilent_extra_cs.py#L133-L156 | def read_multireg_file(f, title=None):
"""
Some REG files have multiple "sections" with different data.
This parses each chunk out of such a file (e.g. LCDIAG.REG)
"""
f.seek(0x26)
nparts = struct.unpack('<H', f.read(2))[0]
foff = 0x2D
if title is None:
data = []
for _ in... | [
"def",
"read_multireg_file",
"(",
"f",
",",
"title",
"=",
"None",
")",
":",
"f",
".",
"seek",
"(",
"0x26",
")",
"nparts",
"=",
"struct",
".",
"unpack",
"(",
"'<H'",
",",
"f",
".",
"read",
"(",
"2",
")",
")",
"[",
"0",
"]",
"foff",
"=",
"0x2D",
... | Some REG files have multiple "sections" with different data.
This parses each chunk out of such a file (e.g. LCDIAG.REG) | [
"Some",
"REG",
"files",
"have",
"multiple",
"sections",
"with",
"different",
"data",
".",
"This",
"parses",
"each",
"chunk",
"out",
"of",
"such",
"a",
"file",
"(",
"e",
".",
"g",
".",
"LCDIAG",
".",
"REG",
")"
] | python | train |
pantsbuild/pants | contrib/buildgen/src/python/pants/contrib/buildgen/build_file_manipulator.py | https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/buildgen/src/python/pants/contrib/buildgen/build_file_manipulator.py#L375-L387 | def dependency_lines(self):
"""The formatted dependencies=[...] lines for this target.
If there are no dependencies, this returns an empty list.
"""
deps = sorted(self._dependencies_by_address.values(), key=lambda d: d.spec)
def dep_lines():
yield ' dependencies = ['
for dep in deps:
... | [
"def",
"dependency_lines",
"(",
"self",
")",
":",
"deps",
"=",
"sorted",
"(",
"self",
".",
"_dependencies_by_address",
".",
"values",
"(",
")",
",",
"key",
"=",
"lambda",
"d",
":",
"d",
".",
"spec",
")",
"def",
"dep_lines",
"(",
")",
":",
"yield",
"'... | The formatted dependencies=[...] lines for this target.
If there are no dependencies, this returns an empty list. | [
"The",
"formatted",
"dependencies",
"=",
"[",
"...",
"]",
"lines",
"for",
"this",
"target",
"."
] | python | train |
virtuald/pyhcl | src/hcl/parser.py | https://github.com/virtuald/pyhcl/blob/e6e27742215692974f0ef503a91a81ec4adc171c/src/hcl/parser.py#L104-L108 | def p_top(self, p):
"top : objectlist"
if DEBUG:
self.print_p(p)
p[0] = self.objectlist_flat(p[1], True) | [
"def",
"p_top",
"(",
"self",
",",
"p",
")",
":",
"if",
"DEBUG",
":",
"self",
".",
"print_p",
"(",
"p",
")",
"p",
"[",
"0",
"]",
"=",
"self",
".",
"objectlist_flat",
"(",
"p",
"[",
"1",
"]",
",",
"True",
")"
] | top : objectlist | [
"top",
":",
"objectlist"
] | python | valid |
yyuu/botornado | boto/fps/connection.py | https://github.com/yyuu/botornado/blob/fffb056f5ff2324d1d5c1304014cfb1d899f602e/boto/fps/connection.py#L76-L100 | def install_caller_instruction(self, token_type="Unrestricted",
transaction_id=None):
"""
Set us up as a caller
This will install a new caller_token into the FPS section.
This should really only be called to regenerate the caller token.
"""
... | [
"def",
"install_caller_instruction",
"(",
"self",
",",
"token_type",
"=",
"\"Unrestricted\"",
",",
"transaction_id",
"=",
"None",
")",
":",
"response",
"=",
"self",
".",
"install_payment_instruction",
"(",
"\"MyRole=='Caller';\"",
",",
"token_type",
"=",
"token_type",... | Set us up as a caller
This will install a new caller_token into the FPS section.
This should really only be called to regenerate the caller token. | [
"Set",
"us",
"up",
"as",
"a",
"caller",
"This",
"will",
"install",
"a",
"new",
"caller_token",
"into",
"the",
"FPS",
"section",
".",
"This",
"should",
"really",
"only",
"be",
"called",
"to",
"regenerate",
"the",
"caller",
"token",
"."
] | python | train |
azraq27/neural | neural/wrappers/common.py | https://github.com/azraq27/neural/blob/fe91bfeecbf73ad99708cf5dca66cb61fcd529f5/neural/wrappers/common.py#L69-L74 | def blur(dset,fwhm,prefix=None):
'''blurs ``dset`` with given ``fwhm`` runs 3dmerge to blur dataset to given ``fwhm``
default ``prefix`` is to suffix ``dset`` with ``_blur%.1fmm``'''
if prefix==None:
prefix = nl.suffix(dset,'_blur%.1fmm'%fwhm)
return available_method('blur')(dset,fwhm,prefix) | [
"def",
"blur",
"(",
"dset",
",",
"fwhm",
",",
"prefix",
"=",
"None",
")",
":",
"if",
"prefix",
"==",
"None",
":",
"prefix",
"=",
"nl",
".",
"suffix",
"(",
"dset",
",",
"'_blur%.1fmm'",
"%",
"fwhm",
")",
"return",
"available_method",
"(",
"'blur'",
")... | blurs ``dset`` with given ``fwhm`` runs 3dmerge to blur dataset to given ``fwhm``
default ``prefix`` is to suffix ``dset`` with ``_blur%.1fmm`` | [
"blurs",
"dset",
"with",
"given",
"fwhm",
"runs",
"3dmerge",
"to",
"blur",
"dataset",
"to",
"given",
"fwhm",
"default",
"prefix",
"is",
"to",
"suffix",
"dset",
"with",
"_blur%",
".",
"1fmm"
] | python | train |
dariosky/wfcli | wfcli/wfapi.py | https://github.com/dariosky/wfcli/blob/87a9ed30dbd456f801135a55099f0541b0614ccb/wfcli/wfapi.py#L67-L71 | def list_domains(self):
""" Return all domains. Domain is a key, so group by them """
self.connect()
results = self.server.list_domains(self.session_id)
return {i['domain']: i['subdomains'] for i in results} | [
"def",
"list_domains",
"(",
"self",
")",
":",
"self",
".",
"connect",
"(",
")",
"results",
"=",
"self",
".",
"server",
".",
"list_domains",
"(",
"self",
".",
"session_id",
")",
"return",
"{",
"i",
"[",
"'domain'",
"]",
":",
"i",
"[",
"'subdomains'",
... | Return all domains. Domain is a key, so group by them | [
"Return",
"all",
"domains",
".",
"Domain",
"is",
"a",
"key",
"so",
"group",
"by",
"them"
] | python | train |
kodethon/KoDrive | kodrive/cli.py | https://github.com/kodethon/KoDrive/blob/325fe5e5870b7d4eb121dcc7e93be64aa16e7988/kodrive/cli.py#L178-L200 | def mv(source, target):
''' Move synchronized directory. '''
if os.path.isfile(target) and len(source) == 1:
if click.confirm("Are you sure you want to overwrite %s?" % target):
err_msg = cli_syncthing_adapter.mv_edge_case(source, target)
# Edge case: to match Bash 'mv' behavior and overwrite file... | [
"def",
"mv",
"(",
"source",
",",
"target",
")",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"target",
")",
"and",
"len",
"(",
"source",
")",
"==",
"1",
":",
"if",
"click",
".",
"confirm",
"(",
"\"Are you sure you want to overwrite %s?\"",
"%",
"t... | Move synchronized directory. | [
"Move",
"synchronized",
"directory",
"."
] | python | train |
pycontribs/pyrax | pyrax/autoscale.py | https://github.com/pycontribs/pyrax/blob/9ddfd5064b3a292d7337906f3b2d5dce95b50b99/pyrax/autoscale.py#L921-L925 | def get_webhook(self, webhook):
"""
Gets the detail for the specified webhook.
"""
return self.manager.get_webhook(self.scaling_group, self, webhook) | [
"def",
"get_webhook",
"(",
"self",
",",
"webhook",
")",
":",
"return",
"self",
".",
"manager",
".",
"get_webhook",
"(",
"self",
".",
"scaling_group",
",",
"self",
",",
"webhook",
")"
] | Gets the detail for the specified webhook. | [
"Gets",
"the",
"detail",
"for",
"the",
"specified",
"webhook",
"."
] | python | train |
timothydmorton/VESPA | vespa/stars/populations.py | https://github.com/timothydmorton/VESPA/blob/0446b54d48009f3655cfd1a3957ceea21d3adcaa/vespa/stars/populations.py#L376-L436 | def prophist2d(self,propx,propy, mask=None,
logx=False,logy=False,
fig=None,selected=False,**kwargs):
"""Makes a 2d density histogram of two given properties
:param propx,propy:
Names of properties to histogram. Must be names of columns
in ... | [
"def",
"prophist2d",
"(",
"self",
",",
"propx",
",",
"propy",
",",
"mask",
"=",
"None",
",",
"logx",
"=",
"False",
",",
"logy",
"=",
"False",
",",
"fig",
"=",
"None",
",",
"selected",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"mask",... | Makes a 2d density histogram of two given properties
:param propx,propy:
Names of properties to histogram. Must be names of columns
in ``self.stars`` table.
:param mask: (optional)
Boolean mask (``True`` is good) to say which indices to plot.
Must be sa... | [
"Makes",
"a",
"2d",
"density",
"histogram",
"of",
"two",
"given",
"properties"
] | python | train |
PSPC-SPAC-buyandsell/von_agent | von_agent/agent/issuer.py | https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/agent/issuer.py#L309-L407 | async def create_cred(
self,
cred_offer_json,
cred_req_json: str,
cred_attrs: dict,
rr_size: int = None) -> (str, str, int):
"""
Create credential as Issuer out of credential request and dict of key:value (raw, unencoded)
entries for at... | [
"async",
"def",
"create_cred",
"(",
"self",
",",
"cred_offer_json",
",",
"cred_req_json",
":",
"str",
",",
"cred_attrs",
":",
"dict",
",",
"rr_size",
":",
"int",
"=",
"None",
")",
"->",
"(",
"str",
",",
"str",
",",
"int",
")",
":",
"LOGGER",
".",
"de... | Create credential as Issuer out of credential request and dict of key:value (raw, unencoded)
entries for attributes.
Return credential json, and if cred def supports revocation, credential revocation identifier
and revocation registry delta ledger timestamp (epoch seconds).
If the cred... | [
"Create",
"credential",
"as",
"Issuer",
"out",
"of",
"credential",
"request",
"and",
"dict",
"of",
"key",
":",
"value",
"(",
"raw",
"unencoded",
")",
"entries",
"for",
"attributes",
"."
] | python | train |
wonambi-python/wonambi | wonambi/widgets/notes.py | https://github.com/wonambi-python/wonambi/blob/1d8e3d7e53df8017c199f703bcab582914676e76/wonambi/widgets/notes.py#L282-L500 | def create_action(self):
"""Create actions associated with Annotations."""
actions = {}
act = QAction('New Annotations', self)
act.triggered.connect(self.new_annot)
actions['new_annot'] = act
act = QAction('Load Annotations', self)
act.triggered.connect(self.loa... | [
"def",
"create_action",
"(",
"self",
")",
":",
"actions",
"=",
"{",
"}",
"act",
"=",
"QAction",
"(",
"'New Annotations'",
",",
"self",
")",
"act",
".",
"triggered",
".",
"connect",
"(",
"self",
".",
"new_annot",
")",
"actions",
"[",
"'new_annot'",
"]",
... | Create actions associated with Annotations. | [
"Create",
"actions",
"associated",
"with",
"Annotations",
"."
] | python | train |
spotify/luigi | luigi/tools/range.py | https://github.com/spotify/luigi/blob/c5eca1c3c3ee2a7eb612486192a0da146710a1e9/luigi/tools/range.py#L117-L124 | def of_cls(self):
"""
DONT USE. Will be deleted soon. Use ``self.of``!
"""
if isinstance(self.of, six.string_types):
warnings.warn('When using Range programatically, dont pass "of" param as string!')
return Register.get_task_cls(self.of)
return self.of | [
"def",
"of_cls",
"(",
"self",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"of",
",",
"six",
".",
"string_types",
")",
":",
"warnings",
".",
"warn",
"(",
"'When using Range programatically, dont pass \"of\" param as string!'",
")",
"return",
"Register",
".",
... | DONT USE. Will be deleted soon. Use ``self.of``! | [
"DONT",
"USE",
".",
"Will",
"be",
"deleted",
"soon",
".",
"Use",
"self",
".",
"of",
"!"
] | python | train |
chibisov/cli-bdd | tasks.py | https://github.com/chibisov/cli-bdd/blob/579e2d9a07f9985b268aa9aaba42dee33021e163/tasks.py#L8-L27 | def deploy_docs():
"""
Based on https://gist.github.com/domenic/ec8b0fc8ab45f39403dd
"""
run('rm -rf ./site/')
build_docs()
with util.cd('./site/'):
run('git init')
run('echo ".*pyc" > .gitignore')
run('git config user.name "Travis CI"')
run('git config user.email... | [
"def",
"deploy_docs",
"(",
")",
":",
"run",
"(",
"'rm -rf ./site/'",
")",
"build_docs",
"(",
")",
"with",
"util",
".",
"cd",
"(",
"'./site/'",
")",
":",
"run",
"(",
"'git init'",
")",
"run",
"(",
"'echo \".*pyc\" > .gitignore'",
")",
"run",
"(",
"'git conf... | Based on https://gist.github.com/domenic/ec8b0fc8ab45f39403dd | [
"Based",
"on",
"https",
":",
"//",
"gist",
".",
"github",
".",
"com",
"/",
"domenic",
"/",
"ec8b0fc8ab45f39403dd"
] | python | train |
cuihantao/andes | andes/variables/dae.py | https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L437-L481 | def hard_limit_remote(self,
yidx,
ridx,
rtype='y',
rmin=None,
rmax=None,
min_yset=0,
max_yset=0):
"""Limit the output of yidx if t... | [
"def",
"hard_limit_remote",
"(",
"self",
",",
"yidx",
",",
"ridx",
",",
"rtype",
"=",
"'y'",
",",
"rmin",
"=",
"None",
",",
"rmax",
"=",
"None",
",",
"min_yset",
"=",
"0",
",",
"max_yset",
"=",
"0",
")",
":",
"ny",
"=",
"len",
"(",
"yidx",
")",
... | Limit the output of yidx if the remote y is not within the limits
This function needs to be modernized. | [
"Limit",
"the",
"output",
"of",
"yidx",
"if",
"the",
"remote",
"y",
"is",
"not",
"within",
"the",
"limits"
] | python | train |
weso/CWR-DataApi | cwr/grammar/field/basic.py | https://github.com/weso/CWR-DataApi/blob/f3b6ba8308c901b6ab87073c155c08e30692333c/cwr/grammar/field/basic.py#L340-L359 | def flag(name=None):
"""
Creates the grammar for a Flag (F) field, accepting only 'Y', 'N' or 'U'.
:param name: name for the field
:return: grammar for the flag field
"""
if name is None:
name = 'Flag Field'
# Basic field
field = pp.Regex('[YNU]')
# Name
field.setName... | [
"def",
"flag",
"(",
"name",
"=",
"None",
")",
":",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"'Flag Field'",
"# Basic field",
"field",
"=",
"pp",
".",
"Regex",
"(",
"'[YNU]'",
")",
"# Name",
"field",
".",
"setName",
"(",
"name",
")",
"field",
"."... | Creates the grammar for a Flag (F) field, accepting only 'Y', 'N' or 'U'.
:param name: name for the field
:return: grammar for the flag field | [
"Creates",
"the",
"grammar",
"for",
"a",
"Flag",
"(",
"F",
")",
"field",
"accepting",
"only",
"Y",
"N",
"or",
"U",
"."
] | python | train |
dereneaton/ipyrad | ipyrad/analysis/twiist.py | https://github.com/dereneaton/ipyrad/blob/5eeb8a178160f45faf71bf47cec4abe998a575d1/ipyrad/analysis/twiist.py#L95-L125 | def sample_loci(self):
""" finds loci with sufficient sampling for this test"""
## store idx of passing loci
idxs = np.random.choice(self.idxs, self.ntests)
## open handle, make a proper generator to reduce mem
with open(self.data) as indata:
liter = (indata.read().... | [
"def",
"sample_loci",
"(",
"self",
")",
":",
"## store idx of passing loci",
"idxs",
"=",
"np",
".",
"random",
".",
"choice",
"(",
"self",
".",
"idxs",
",",
"self",
".",
"ntests",
")",
"## open handle, make a proper generator to reduce mem",
"with",
"open",
"(",
... | finds loci with sufficient sampling for this test | [
"finds",
"loci",
"with",
"sufficient",
"sampling",
"for",
"this",
"test"
] | python | valid |
dusktreader/flask-praetorian | example/refresh.py | https://github.com/dusktreader/flask-praetorian/blob/d530cf3ffeffd61bfff1b8c79e8b45e9bfa0db0c/example/refresh.py#L141-L154 | def disable_user():
"""
Disables a user in the data store
.. example::
$ curl http://localhost:5000/disable_user -X POST \
-H "Authorization: Bearer <your_token>" \
-d '{"username":"Walter"}'
"""
req = flask.request.get_json(force=True)
usr = User.query.filter_by(use... | [
"def",
"disable_user",
"(",
")",
":",
"req",
"=",
"flask",
".",
"request",
".",
"get_json",
"(",
"force",
"=",
"True",
")",
"usr",
"=",
"User",
".",
"query",
".",
"filter_by",
"(",
"username",
"=",
"req",
".",
"get",
"(",
"'username'",
",",
"None",
... | Disables a user in the data store
.. example::
$ curl http://localhost:5000/disable_user -X POST \
-H "Authorization: Bearer <your_token>" \
-d '{"username":"Walter"}' | [
"Disables",
"a",
"user",
"in",
"the",
"data",
"store"
] | python | train |
python-wink/python-wink | src/pywink/devices/cloud_clock.py | https://github.com/python-wink/python-wink/blob/cf8bdce8c6518f30b91b23aa7aa32e89c2ce48da/src/pywink/devices/cloud_clock.py#L22-L40 | def set_dial(self, json_value, index, timezone=None):
"""
:param json_value: The value to set
:param index: The dials index
:param timezone: The time zone to use for a time dial
:return:
"""
values = self.json_state
values["nonce"] = str(random.r... | [
"def",
"set_dial",
"(",
"self",
",",
"json_value",
",",
"index",
",",
"timezone",
"=",
"None",
")",
":",
"values",
"=",
"self",
".",
"json_state",
"values",
"[",
"\"nonce\"",
"]",
"=",
"str",
"(",
"random",
".",
"randint",
"(",
"0",
",",
"1000000000",
... | :param json_value: The value to set
:param index: The dials index
:param timezone: The time zone to use for a time dial
:return: | [
":",
"param",
"json_value",
":",
"The",
"value",
"to",
"set",
":",
"param",
"index",
":",
"The",
"dials",
"index",
":",
"param",
"timezone",
":",
"The",
"time",
"zone",
"to",
"use",
"for",
"a",
"time",
"dial",
":",
"return",
":"
] | python | train |
rosenbrockc/fortpy | fortpy/interop/converter.py | https://github.com/rosenbrockc/fortpy/blob/1ed0757c52d549e41d9d44bdea68cb89529293a5/fortpy/interop/converter.py#L193-L201 | def _load(self, element, commentchar):
"""Loads all the child line elements from the XML group element."""
for child in element:
if "id" in child.attrib:
tline = TemplateLine(child, self, commentchar)
self.order.append(tline.identifier)
... | [
"def",
"_load",
"(",
"self",
",",
"element",
",",
"commentchar",
")",
":",
"for",
"child",
"in",
"element",
":",
"if",
"\"id\"",
"in",
"child",
".",
"attrib",
":",
"tline",
"=",
"TemplateLine",
"(",
"child",
",",
"self",
",",
"commentchar",
")",
"self"... | Loads all the child line elements from the XML group element. | [
"Loads",
"all",
"the",
"child",
"line",
"elements",
"from",
"the",
"XML",
"group",
"element",
"."
] | python | train |
squaresLab/BugZoo | bugzoo/client/container.py | https://github.com/squaresLab/BugZoo/blob/68664f1977e85b37a78604f7c570382ffae1fa3b/bugzoo/client/container.py#L110-L138 | def provision(self,
bug: Bug,
*,
plugins: Optional[List[Tool]] = None
) -> Container:
"""
Provisions a container for a given bug.
"""
if plugins is None:
plugins = []
logger.info("provisioning co... | [
"def",
"provision",
"(",
"self",
",",
"bug",
":",
"Bug",
",",
"*",
",",
"plugins",
":",
"Optional",
"[",
"List",
"[",
"Tool",
"]",
"]",
"=",
"None",
")",
"->",
"Container",
":",
"if",
"plugins",
"is",
"None",
":",
"plugins",
"=",
"[",
"]",
"logge... | Provisions a container for a given bug. | [
"Provisions",
"a",
"container",
"for",
"a",
"given",
"bug",
"."
] | python | train |
tanghaibao/goatools | goatools/grouper/wr_sections.py | https://github.com/tanghaibao/goatools/blob/407682e573a108864a79031f8ca19ee3bf377626/goatools/grouper/wr_sections.py#L36-L40 | def prt_ntgos(self, prt, ntgos):
"""Print the Grouper namedtuples."""
for ntgo in ntgos:
key2val = ntgo._asdict()
prt.write("{GO_LINE}\n".format(GO_LINE=self.prtfmt.format(**key2val))) | [
"def",
"prt_ntgos",
"(",
"self",
",",
"prt",
",",
"ntgos",
")",
":",
"for",
"ntgo",
"in",
"ntgos",
":",
"key2val",
"=",
"ntgo",
".",
"_asdict",
"(",
")",
"prt",
".",
"write",
"(",
"\"{GO_LINE}\\n\"",
".",
"format",
"(",
"GO_LINE",
"=",
"self",
".",
... | Print the Grouper namedtuples. | [
"Print",
"the",
"Grouper",
"namedtuples",
"."
] | python | train |
rogerhil/thegamesdb | thegamesdb/resources.py | https://github.com/rogerhil/thegamesdb/blob/795314215f9ee73697c7520dea4ddecfb23ca8e6/thegamesdb/resources.py#L81-L88 | def games(self, platform):
""" It returns a list of games given the platform *alias* (usually is
the game name separated by "-" instead of white spaces).
"""
platform = platform.lower()
data_list = self.db.get_data(self.games_path, platform=platform)
data_list = data_list... | [
"def",
"games",
"(",
"self",
",",
"platform",
")",
":",
"platform",
"=",
"platform",
".",
"lower",
"(",
")",
"data_list",
"=",
"self",
".",
"db",
".",
"get_data",
"(",
"self",
".",
"games_path",
",",
"platform",
"=",
"platform",
")",
"data_list",
"=",
... | It returns a list of games given the platform *alias* (usually is
the game name separated by "-" instead of white spaces). | [
"It",
"returns",
"a",
"list",
"of",
"games",
"given",
"the",
"platform",
"*",
"alias",
"*",
"(",
"usually",
"is",
"the",
"game",
"name",
"separated",
"by",
"-",
"instead",
"of",
"white",
"spaces",
")",
"."
] | python | train |
ANTsX/ANTsPy | ants/viz/surface.py | https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/viz/surface.py#L331-L485 | def _surf_smooth_single(image,outfile,dilation,smooth,threshold,inflation,alpha,
cut_idx,cut_side,overlay,overlay_mask,overlay_cmap,overlay_scale,
overlay_alpha,rotation,grayscale,bg_grayscale,verbose):
"""
Generate a surface of the smooth white matter of a brain image.
This is great for displayi... | [
"def",
"_surf_smooth_single",
"(",
"image",
",",
"outfile",
",",
"dilation",
",",
"smooth",
",",
"threshold",
",",
"inflation",
",",
"alpha",
",",
"cut_idx",
",",
"cut_side",
",",
"overlay",
",",
"overlay_mask",
",",
"overlay_cmap",
",",
"overlay_scale",
",",
... | Generate a surface of the smooth white matter of a brain image.
This is great for displaying functional activations as are typically seen
in the neuroimaging literature.
Arguments
---------
image : ANTsImage
A binary segmentation of the white matter surface.
If you don't have a wh... | [
"Generate",
"a",
"surface",
"of",
"the",
"smooth",
"white",
"matter",
"of",
"a",
"brain",
"image",
"."
] | python | train |
OpenGov/python_data_wrap | datawrap/external/xmlparse.py | https://github.com/OpenGov/python_data_wrap/blob/7de38bb30d7a500adc336a4a7999528d753e5600/datawrap/external/xmlparse.py#L521-L526 | def startElement (self, name, attrs):
'''if there's a start method for this element, call it
'''
func = getattr(self, 'start_' + name, None)
if func:
func(attrs) | [
"def",
"startElement",
"(",
"self",
",",
"name",
",",
"attrs",
")",
":",
"func",
"=",
"getattr",
"(",
"self",
",",
"'start_'",
"+",
"name",
",",
"None",
")",
"if",
"func",
":",
"func",
"(",
"attrs",
")"
] | if there's a start method for this element, call it | [
"if",
"there",
"s",
"a",
"start",
"method",
"for",
"this",
"element",
"call",
"it"
] | python | train |
apple/turicreate | deps/src/libxml2-2.9.1/python/libxml2.py | https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/deps/src/libxml2-2.9.1/python/libxml2.py#L7312-L7315 | def xpathRegisterVariable(self, name, ns_uri, value):
"""Register a variable with the XPath context """
ret = libxml2mod.xmlXPathRegisterVariable(self._o, name, ns_uri, value)
return ret | [
"def",
"xpathRegisterVariable",
"(",
"self",
",",
"name",
",",
"ns_uri",
",",
"value",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlXPathRegisterVariable",
"(",
"self",
".",
"_o",
",",
"name",
",",
"ns_uri",
",",
"value",
")",
"return",
"ret"
] | Register a variable with the XPath context | [
"Register",
"a",
"variable",
"with",
"the",
"XPath",
"context"
] | python | train |
Microsoft/azure-devops-python-api | azure-devops/azure/devops/v5_1/gallery/gallery_client.py | https://github.com/Microsoft/azure-devops-python-api/blob/4777ffda2f5052fabbaddb2abe9cb434e0cf1aa8/azure-devops/azure/devops/v5_1/gallery/gallery_client.py#L98-L122 | def get_acquisition_options(self, item_id, installation_target, test_commerce=None, is_free_or_trial_install=None):
"""GetAcquisitionOptions.
[Preview API]
:param str item_id:
:param str installation_target:
:param bool test_commerce:
:param bool is_free_or_trial_install:... | [
"def",
"get_acquisition_options",
"(",
"self",
",",
"item_id",
",",
"installation_target",
",",
"test_commerce",
"=",
"None",
",",
"is_free_or_trial_install",
"=",
"None",
")",
":",
"route_values",
"=",
"{",
"}",
"if",
"item_id",
"is",
"not",
"None",
":",
"rou... | GetAcquisitionOptions.
[Preview API]
:param str item_id:
:param str installation_target:
:param bool test_commerce:
:param bool is_free_or_trial_install:
:rtype: :class:`<AcquisitionOptions> <azure.devops.v5_1.gallery.models.AcquisitionOptions>` | [
"GetAcquisitionOptions",
".",
"[",
"Preview",
"API",
"]",
":",
"param",
"str",
"item_id",
":",
":",
"param",
"str",
"installation_target",
":",
":",
"param",
"bool",
"test_commerce",
":",
":",
"param",
"bool",
"is_free_or_trial_install",
":",
":",
"rtype",
":"... | python | train |
calve/prof | prof/session.py | https://github.com/calve/prof/blob/c6e034f45ab60908dea661e8271bc44758aeedcf/prof/session.py#L54-L95 | def get_session(session, baseurl, config):
"""
Try to get a valid session for this baseurl, using login found in config.
This function invoques Firefox if necessary
"""
# Read proxy for firefox
if environ.get("HTTP_PROXY"):
myProxy = environ.get("HTTP_PROXY")
proxy = Proxy({
... | [
"def",
"get_session",
"(",
"session",
",",
"baseurl",
",",
"config",
")",
":",
"# Read proxy for firefox",
"if",
"environ",
".",
"get",
"(",
"\"HTTP_PROXY\"",
")",
":",
"myProxy",
"=",
"environ",
".",
"get",
"(",
"\"HTTP_PROXY\"",
")",
"proxy",
"=",
"Proxy",... | Try to get a valid session for this baseurl, using login found in config.
This function invoques Firefox if necessary | [
"Try",
"to",
"get",
"a",
"valid",
"session",
"for",
"this",
"baseurl",
"using",
"login",
"found",
"in",
"config",
".",
"This",
"function",
"invoques",
"Firefox",
"if",
"necessary"
] | python | train |
robotools/fontParts | Lib/fontParts/base/normalizers.py | https://github.com/robotools/fontParts/blob/d2ff106fe95f9d566161d936a645157626568712/Lib/fontParts/base/normalizers.py#L1083-L1098 | def normalizeRounding(value):
"""
Normalizes rounding.
Python 2 and Python 3 handing the rounding of halves (0.5, 1.5, etc)
differently. This normalizes rounding to be the same (Python 3 style)
in both environments.
* **value** must be an :ref:`type-int-float`
* Returned value is a ``int``... | [
"def",
"normalizeRounding",
"(",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"(",
"int",
",",
"float",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"Value to round must be an int or float, not %s.\"",
"%",
"type",
"(",
"value",
")",
".",
... | Normalizes rounding.
Python 2 and Python 3 handing the rounding of halves (0.5, 1.5, etc)
differently. This normalizes rounding to be the same (Python 3 style)
in both environments.
* **value** must be an :ref:`type-int-float`
* Returned value is a ``int`` | [
"Normalizes",
"rounding",
"."
] | python | train |
kensho-technologies/grift | grift/config.py | https://github.com/kensho-technologies/grift/blob/b8767d1604c1a0a25eace6cdd04b53b57afa9757/grift/config.py#L70-L74 | def _iter_config_props(cls):
"""Iterate over all ConfigProperty attributes, yielding (attr_name, config_property) """
props = inspect.getmembers(cls, lambda a: isinstance(a, ConfigProperty))
for attr_name, config_prop in props:
yield attr_name, config_prop | [
"def",
"_iter_config_props",
"(",
"cls",
")",
":",
"props",
"=",
"inspect",
".",
"getmembers",
"(",
"cls",
",",
"lambda",
"a",
":",
"isinstance",
"(",
"a",
",",
"ConfigProperty",
")",
")",
"for",
"attr_name",
",",
"config_prop",
"in",
"props",
":",
"yiel... | Iterate over all ConfigProperty attributes, yielding (attr_name, config_property) | [
"Iterate",
"over",
"all",
"ConfigProperty",
"attributes",
"yielding",
"(",
"attr_name",
"config_property",
")"
] | python | train |
rocky/python3-trepan | trepan/processor/parse/scanner.py | https://github.com/rocky/python3-trepan/blob/14e91bc0acce090d67be145b1ac040cab92ac5f3/trepan/processor/parse/scanner.py#L101-L105 | def t_comma(self, s):
r','
# Used in "list" to separate first from last
self.add_token('COMMA', s)
self.pos += len(s) | [
"def",
"t_comma",
"(",
"self",
",",
"s",
")",
":",
"# Used in \"list\" to separate first from last",
"self",
".",
"add_token",
"(",
"'COMMA'",
",",
"s",
")",
"self",
".",
"pos",
"+=",
"len",
"(",
"s",
")"
] | r', | [
"r"
] | python | test |
gsi-upm/soil | examples/custom_generator/mymodule.py | https://github.com/gsi-upm/soil/blob/a3ea434f237f039c3cadbc2e0a83ae626d77b818/examples/custom_generator/mymodule.py#L5-L21 | def mygenerator(n=5, n_edges=5):
'''
Just a simple generator that creates a network with n nodes and
n_edges edges. Edges are assigned randomly, only avoiding self loops.
'''
G = nx.Graph()
for i in range(n):
G.add_node(i)
for i in range(n_edges):
nodes = list(G.nodes)
... | [
"def",
"mygenerator",
"(",
"n",
"=",
"5",
",",
"n_edges",
"=",
"5",
")",
":",
"G",
"=",
"nx",
".",
"Graph",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"n",
")",
":",
"G",
".",
"add_node",
"(",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"n_edg... | Just a simple generator that creates a network with n nodes and
n_edges edges. Edges are assigned randomly, only avoiding self loops. | [
"Just",
"a",
"simple",
"generator",
"that",
"creates",
"a",
"network",
"with",
"n",
"nodes",
"and",
"n_edges",
"edges",
".",
"Edges",
"are",
"assigned",
"randomly",
"only",
"avoiding",
"self",
"loops",
"."
] | python | train |
quantumlib/Cirq | cirq/google/sim/xmon_simulator.py | https://github.com/quantumlib/Cirq/blob/0827da80dd7880e5b923eb69407e980ed9bc0bd2/cirq/google/sim/xmon_simulator.py#L211-L290 | def _base_iterator(
self,
circuit: circuits.Circuit,
qubit_order: ops.QubitOrderOrList,
initial_state: Union[int, np.ndarray],
perform_measurements: bool=True,
) -> Iterator['XmonStepResult']:
"""See definition in `cirq.SimulatesIntermediateState`.
If the ini... | [
"def",
"_base_iterator",
"(",
"self",
",",
"circuit",
":",
"circuits",
".",
"Circuit",
",",
"qubit_order",
":",
"ops",
".",
"QubitOrderOrList",
",",
"initial_state",
":",
"Union",
"[",
"int",
",",
"np",
".",
"ndarray",
"]",
",",
"perform_measurements",
":",
... | See definition in `cirq.SimulatesIntermediateState`.
If the initial state is an int, the state is set to the computational
basis state corresponding to this state. Otherwise if the initial
state is a np.ndarray it is the full initial state. In this case it
must be the correct size, be ... | [
"See",
"definition",
"in",
"cirq",
".",
"SimulatesIntermediateState",
"."
] | python | train |
fedora-infra/fmn.lib | fmn/lib/models.py | https://github.com/fedora-infra/fmn.lib/blob/3120725556153d07c1809530f0fadcf250439110/fmn/lib/models.py#L724-L726 | def hash_producer(*args, **kwargs):
""" Returns a random hash for a confirmation secret. """
return hashlib.md5(six.text_type(uuid.uuid4()).encode('utf-8')).hexdigest() | [
"def",
"hash_producer",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"hashlib",
".",
"md5",
"(",
"six",
".",
"text_type",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
... | Returns a random hash for a confirmation secret. | [
"Returns",
"a",
"random",
"hash",
"for",
"a",
"confirmation",
"secret",
"."
] | python | train |
aws/aws-xray-sdk-python | aws_xray_sdk/core/models/entity.py | https://github.com/aws/aws-xray-sdk-python/blob/707358cd3a516d51f2ebf71cf34f00e8d906a667/aws_xray_sdk/core/models/entity.py#L192-L208 | def apply_status_code(self, status_code):
"""
When a trace entity is generated under the http context,
the status code will affect this entity's fault/error/throttle flags.
Flip these flags based on status code.
"""
self._check_ended()
if not status_code:
... | [
"def",
"apply_status_code",
"(",
"self",
",",
"status_code",
")",
":",
"self",
".",
"_check_ended",
"(",
")",
"if",
"not",
"status_code",
":",
"return",
"if",
"status_code",
">=",
"500",
":",
"self",
".",
"add_fault_flag",
"(",
")",
"elif",
"status_code",
... | When a trace entity is generated under the http context,
the status code will affect this entity's fault/error/throttle flags.
Flip these flags based on status code. | [
"When",
"a",
"trace",
"entity",
"is",
"generated",
"under",
"the",
"http",
"context",
"the",
"status",
"code",
"will",
"affect",
"this",
"entity",
"s",
"fault",
"/",
"error",
"/",
"throttle",
"flags",
".",
"Flip",
"these",
"flags",
"based",
"on",
"status",... | python | train |
honzamach/pydgets | pydgets/widgets.py | https://github.com/honzamach/pydgets/blob/5ca4ce19fc2d9b5f41441fb9163810f8ca502e79/pydgets/widgets.py#L1060-L1083 | def _render_line(self, line, settings):
"""
Render single box line.
"""
s = self._es(settings, self.SETTING_WIDTH, self.SETTING_FLAG_BORDER, self.SETTING_MARGIN, self.SETTING_MARGIN_LEFT, self.SETTING_MARGIN_RIGHT)
width_content = self.calculate_width_widget_int(**s)
s =... | [
"def",
"_render_line",
"(",
"self",
",",
"line",
",",
"settings",
")",
":",
"s",
"=",
"self",
".",
"_es",
"(",
"settings",
",",
"self",
".",
"SETTING_WIDTH",
",",
"self",
".",
"SETTING_FLAG_BORDER",
",",
"self",
".",
"SETTING_MARGIN",
",",
"self",
".",
... | Render single box line. | [
"Render",
"single",
"box",
"line",
"."
] | python | train |
mezz64/pyEmby | pyemby/server.py | https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L148-L154 | def _do_update_callback(self, msg):
"""Call registered callback functions."""
for callback, device in self._update_callbacks:
if device == msg:
_LOGGER.debug('Update callback %s for device %s by %s',
callback, device, msg)
self._e... | [
"def",
"_do_update_callback",
"(",
"self",
",",
"msg",
")",
":",
"for",
"callback",
",",
"device",
"in",
"self",
".",
"_update_callbacks",
":",
"if",
"device",
"==",
"msg",
":",
"_LOGGER",
".",
"debug",
"(",
"'Update callback %s for device %s by %s'",
",",
"ca... | Call registered callback functions. | [
"Call",
"registered",
"callback",
"functions",
"."
] | python | train |
peeringdb/peeringdb-py | peeringdb/backend.py | https://github.com/peeringdb/peeringdb-py/blob/cf2060a1d5ef879a01cf849e54b7756909ab2661/peeringdb/backend.py#L8-L27 | def reftag_to_cls(fn):
"""
decorator that checks function arguments for `concrete` and `resource`
and will properly set them to class references if a string (reftag) is
passed as the value
"""
names, _, _, values = inspect.getargspec(fn)
@wraps(fn)
def wrapped(*args, **kwargs):
i... | [
"def",
"reftag_to_cls",
"(",
"fn",
")",
":",
"names",
",",
"_",
",",
"_",
",",
"values",
"=",
"inspect",
".",
"getargspec",
"(",
"fn",
")",
"@",
"wraps",
"(",
"fn",
")",
"def",
"wrapped",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"i"... | decorator that checks function arguments for `concrete` and `resource`
and will properly set them to class references if a string (reftag) is
passed as the value | [
"decorator",
"that",
"checks",
"function",
"arguments",
"for",
"concrete",
"and",
"resource",
"and",
"will",
"properly",
"set",
"them",
"to",
"class",
"references",
"if",
"a",
"string",
"(",
"reftag",
")",
"is",
"passed",
"as",
"the",
"value"
] | python | train |
niklasf/python-chess | chess/pgn.py | https://github.com/niklasf/python-chess/blob/d91f986ca3e046b300a0d7d9ee2a13b07610fe1a/chess/pgn.py#L834-L840 | def handle_error(self, error: Exception) -> None:
"""
Populates :data:`chess.pgn.Game.errors` with encountered errors and
logs them.
"""
LOGGER.exception("error during pgn parsing")
self.game.errors.append(error) | [
"def",
"handle_error",
"(",
"self",
",",
"error",
":",
"Exception",
")",
"->",
"None",
":",
"LOGGER",
".",
"exception",
"(",
"\"error during pgn parsing\"",
")",
"self",
".",
"game",
".",
"errors",
".",
"append",
"(",
"error",
")"
] | Populates :data:`chess.pgn.Game.errors` with encountered errors and
logs them. | [
"Populates",
":",
"data",
":",
"chess",
".",
"pgn",
".",
"Game",
".",
"errors",
"with",
"encountered",
"errors",
"and",
"logs",
"them",
"."
] | python | train |
JohnVinyard/featureflow | featureflow/extractor.py | https://github.com/JohnVinyard/featureflow/blob/7731487b00e38fa4f58c88b7881870fda2d69fdb/featureflow/extractor.py#L118-L126 | def _finalized(self):
"""
Return true if all dependencies have informed this node that they'll
be sending no more data (by calling _finalize()), and that they have
sent at least one batch of data (by calling enqueue())
"""
return \
len(self._finalized_dependen... | [
"def",
"_finalized",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"_finalized_dependencies",
")",
">=",
"self",
".",
"dependency_count",
"and",
"len",
"(",
"self",
".",
"_enqueued_dependencies",
")",
">=",
"self",
".",
"dependency_count"
] | Return true if all dependencies have informed this node that they'll
be sending no more data (by calling _finalize()), and that they have
sent at least one batch of data (by calling enqueue()) | [
"Return",
"true",
"if",
"all",
"dependencies",
"have",
"informed",
"this",
"node",
"that",
"they",
"ll",
"be",
"sending",
"no",
"more",
"data",
"(",
"by",
"calling",
"_finalize",
"()",
")",
"and",
"that",
"they",
"have",
"sent",
"at",
"least",
"one",
"ba... | python | train |
QuantEcon/QuantEcon.py | quantecon/game_theory/repeated_game.py | https://github.com/QuantEcon/QuantEcon.py/blob/26a66c552f2a73967d7efb6e1f4b4c4985a12643/quantecon/game_theory/repeated_game.py#L370-L431 | def _intersect(C, n, weights, IC, pt0, pt1, tol):
"""
Find the intersection points of a half-closed simplex
(pt0, pt1] and IC constraints.
Parameters
----------
C : ndarray(float, ndim=2)
The 4 by 2 array for storing the generated points of
one action profile. One action profile... | [
"def",
"_intersect",
"(",
"C",
",",
"n",
",",
"weights",
",",
"IC",
",",
"pt0",
",",
"pt1",
",",
"tol",
")",
":",
"for",
"i",
"in",
"range",
"(",
"2",
")",
":",
"if",
"(",
"abs",
"(",
"pt0",
"[",
"i",
"]",
"-",
"pt1",
"[",
"i",
"]",
")",
... | Find the intersection points of a half-closed simplex
(pt0, pt1] and IC constraints.
Parameters
----------
C : ndarray(float, ndim=2)
The 4 by 2 array for storing the generated points of
one action profile. One action profile can only
generate at most 4 points.
n : scalar(i... | [
"Find",
"the",
"intersection",
"points",
"of",
"a",
"half",
"-",
"closed",
"simplex",
"(",
"pt0",
"pt1",
"]",
"and",
"IC",
"constraints",
"."
] | python | train |
pyupio/pyup | pyup/bot.py | https://github.com/pyupio/pyup/blob/b20fa88e03cfdf5dc409a9f00d27629188171c31/pyup/bot.py#L115-L128 | def update(self, **kwargs):
"""
Main entrypoint to kick off an update run.
:param kwargs:
:return: RequirementsBundle
"""
self.configure(**kwargs)
self.get_all_requirements()
self.apply_updates(
initial=kwargs.get("initial", False),
... | [
"def",
"update",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"configure",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"get_all_requirements",
"(",
")",
"self",
".",
"apply_updates",
"(",
"initial",
"=",
"kwargs",
".",
"get",
"(",
"\"i... | Main entrypoint to kick off an update run.
:param kwargs:
:return: RequirementsBundle | [
"Main",
"entrypoint",
"to",
"kick",
"off",
"an",
"update",
"run",
".",
":",
"param",
"kwargs",
":",
":",
"return",
":",
"RequirementsBundle"
] | python | train |
IRC-SPHERE/HyperStream | hyperstream/stream/stream.py | https://github.com/IRC-SPHERE/HyperStream/blob/98478f4d31ed938f4aa7c958ed0d4c3ffcb2e780/hyperstream/stream/stream.py#L308-L317 | def calculated_intervals(self, intervals):
"""
Updates the calculated intervals in the database. Performs an upsert
:param intervals: The calculated intervals
:return: None
"""
if len(intervals) > 1:
raise ValueError("Only single calculated interval valid for... | [
"def",
"calculated_intervals",
"(",
"self",
",",
"intervals",
")",
":",
"if",
"len",
"(",
"intervals",
")",
">",
"1",
":",
"raise",
"ValueError",
"(",
"\"Only single calculated interval valid for AssetStream\"",
")",
"super",
"(",
"AssetStream",
",",
"self",
".",
... | Updates the calculated intervals in the database. Performs an upsert
:param intervals: The calculated intervals
:return: None | [
"Updates",
"the",
"calculated",
"intervals",
"in",
"the",
"database",
".",
"Performs",
"an",
"upsert"
] | python | train |
ppo/django-guitar | guitar/utils/__init__.py | https://github.com/ppo/django-guitar/blob/857282219c0c4ff5907c3ad04ef012281d245348/guitar/utils/__init__.py#L9-L19 | def get_perm_name(cls, action, full=True):
"""
Return the name of the permission for a given model and action.
By default it returns the full permission name `app_label.perm_codename`. If `full=False`, it returns only the
`perm_codename`.
"""
codename = "{}_{}".format(action, cls.__name__.lower... | [
"def",
"get_perm_name",
"(",
"cls",
",",
"action",
",",
"full",
"=",
"True",
")",
":",
"codename",
"=",
"\"{}_{}\"",
".",
"format",
"(",
"action",
",",
"cls",
".",
"__name__",
".",
"lower",
"(",
")",
")",
"if",
"full",
":",
"return",
"\"{}.{}\"",
"."... | Return the name of the permission for a given model and action.
By default it returns the full permission name `app_label.perm_codename`. If `full=False`, it returns only the
`perm_codename`. | [
"Return",
"the",
"name",
"of",
"the",
"permission",
"for",
"a",
"given",
"model",
"and",
"action",
"."
] | python | train |
sensu-plugins/sensu-plugin-python | sensu_plugin/plugin.py | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/plugin.py#L51-L55 | def output(self, args):
'''
Print the output message.
'''
print("SensuPlugin: {}".format(' '.join(str(a) for a in args))) | [
"def",
"output",
"(",
"self",
",",
"args",
")",
":",
"print",
"(",
"\"SensuPlugin: {}\"",
".",
"format",
"(",
"' '",
".",
"join",
"(",
"str",
"(",
"a",
")",
"for",
"a",
"in",
"args",
")",
")",
")"
] | Print the output message. | [
"Print",
"the",
"output",
"message",
"."
] | python | train |
timothyb0912/pylogit | pylogit/bootstrap_sampler.py | https://github.com/timothyb0912/pylogit/blob/f83b0fd6debaa7358d87c3828428f6d4ead71357/pylogit/bootstrap_sampler.py#L245-L273 | def check_column_existence(col_name, df, presence=True):
"""
Checks whether or not `col_name` is in `df` and raises a helpful error msg
if the desired condition is not met.
Parameters
----------
col_name : str.
Should represent a column whose presence in `df` is to be checked.
df : ... | [
"def",
"check_column_existence",
"(",
"col_name",
",",
"df",
",",
"presence",
"=",
"True",
")",
":",
"if",
"presence",
":",
"if",
"col_name",
"not",
"in",
"df",
".",
"columns",
":",
"msg",
"=",
"\"Ensure that `{}` is in `df.columns`.\"",
"raise",
"ValueError",
... | Checks whether or not `col_name` is in `df` and raises a helpful error msg
if the desired condition is not met.
Parameters
----------
col_name : str.
Should represent a column whose presence in `df` is to be checked.
df : pandas DataFrame.
The dataframe that will be checked for the ... | [
"Checks",
"whether",
"or",
"not",
"col_name",
"is",
"in",
"df",
"and",
"raises",
"a",
"helpful",
"error",
"msg",
"if",
"the",
"desired",
"condition",
"is",
"not",
"met",
"."
] | python | train |
benmontet/f3 | f3/photometry.py | https://github.com/benmontet/f3/blob/b2e1dc250e4e3e884a54c501cd35cf02d5b8719e/f3/photometry.py#L540-L583 | def define_spotsignal(self):
"""
Identify the "expected" flux value at the time of each observation based on the
Kepler long-cadence data, to ensure variations observed are not the effects of a single
large starspot. Only works if the target star was targeted for long or short cadence
... | [
"def",
"define_spotsignal",
"(",
"self",
")",
":",
"client",
"=",
"kplr",
".",
"API",
"(",
")",
"star",
"=",
"client",
".",
"star",
"(",
"self",
".",
"kic",
")",
"lcs",
"=",
"star",
".",
"get_light_curves",
"(",
"short_cadence",
"=",
"False",
")",
"t... | Identify the "expected" flux value at the time of each observation based on the
Kepler long-cadence data, to ensure variations observed are not the effects of a single
large starspot. Only works if the target star was targeted for long or short cadence
observations during the primary mission. | [
"Identify",
"the",
"expected",
"flux",
"value",
"at",
"the",
"time",
"of",
"each",
"observation",
"based",
"on",
"the",
"Kepler",
"long",
"-",
"cadence",
"data",
"to",
"ensure",
"variations",
"observed",
"are",
"not",
"the",
"effects",
"of",
"a",
"single",
... | python | valid |
metagriffin/fso | fso/filesystemoverlay.py | https://github.com/metagriffin/fso/blob/c37701fbfdfde359a2044eb9420abe569a7b35e4/fso/filesystemoverlay.py#L445-L462 | def fso_makedirs(self, path, mode=None):
'overlays os.makedirs()'
path = self.abs(path)
cur = '/'
segments = path.split('/')
for idx, seg in enumerate(segments):
cur = os.path.join(cur, seg)
try:
st = self.fso_stat(cur)
except OSError:
st = None
if st is None:... | [
"def",
"fso_makedirs",
"(",
"self",
",",
"path",
",",
"mode",
"=",
"None",
")",
":",
"path",
"=",
"self",
".",
"abs",
"(",
"path",
")",
"cur",
"=",
"'/'",
"segments",
"=",
"path",
".",
"split",
"(",
"'/'",
")",
"for",
"idx",
",",
"seg",
"in",
"... | overlays os.makedirs() | [
"overlays",
"os",
".",
"makedirs",
"()"
] | python | valid |
lingfeiwang/findr-python | findr/pij.py | https://github.com/lingfeiwang/findr-python/blob/417f163e658fee6ef311571f7048f96069a0cf1f/findr/pij.py#L456-L493 | def cassists(self,dc,dt,dt2,nodiag=False,memlimit=-1):
"""Calculates probability of gene i regulating gene j with continuous data assisted method,
with multiple tests, by converting log likelihoods into probabilities per A for all B.
Probabilities are converted from likelihood ratios separately for each A. This give... | [
"def",
"cassists",
"(",
"self",
",",
"dc",
",",
"dt",
",",
"dt2",
",",
"nodiag",
"=",
"False",
",",
"memlimit",
"=",
"-",
"1",
")",
":",
"return",
"_cassists_any",
"(",
"self",
",",
"dc",
",",
"dt",
",",
"dt2",
",",
"\"pijs_cassist\"",
",",
"nodiag... | Calculates probability of gene i regulating gene j with continuous data assisted method,
with multiple tests, by converting log likelihoods into probabilities per A for all B.
Probabilities are converted from likelihood ratios separately for each A. This gives better
predictions when the number of secondary targets ... | [
"Calculates",
"probability",
"of",
"gene",
"i",
"regulating",
"gene",
"j",
"with",
"continuous",
"data",
"assisted",
"method",
"with",
"multiple",
"tests",
"by",
"converting",
"log",
"likelihoods",
"into",
"probabilities",
"per",
"A",
"for",
"all",
"B",
".",
"... | python | train |
firecat53/urlscan | urlscan/urlscan.py | https://github.com/firecat53/urlscan/blob/2d10807d01167873733da3b478c784f8fa21bbc0/urlscan/urlscan.py#L446-L460 | def decode_bytes(byt, enc='utf-8'):
"""Given a string or bytes input, return a string.
Args: bytes - bytes or string
enc - encoding to use for decoding the byte string.
"""
try:
strg = byt.decode(enc)
except UnicodeDecodeError as err:
strg = "Unable to decode mess... | [
"def",
"decode_bytes",
"(",
"byt",
",",
"enc",
"=",
"'utf-8'",
")",
":",
"try",
":",
"strg",
"=",
"byt",
".",
"decode",
"(",
"enc",
")",
"except",
"UnicodeDecodeError",
"as",
"err",
":",
"strg",
"=",
"\"Unable to decode message:\\n{}\\n{}\"",
".",
"format",
... | Given a string or bytes input, return a string.
Args: bytes - bytes or string
enc - encoding to use for decoding the byte string. | [
"Given",
"a",
"string",
"or",
"bytes",
"input",
"return",
"a",
"string",
"."
] | python | train |
fermiPy/fermipy | fermipy/jobs/job_archive.py | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/job_archive.py#L123-L140 | def get_status(self):
"""Return an overall status based
on the number of jobs in various states.
"""
if self.n_total == 0:
return JobStatus.no_job
elif self.n_done == self.n_total:
return JobStatus.done
elif self.n_failed > 0:
# If more... | [
"def",
"get_status",
"(",
"self",
")",
":",
"if",
"self",
".",
"n_total",
"==",
"0",
":",
"return",
"JobStatus",
".",
"no_job",
"elif",
"self",
".",
"n_done",
"==",
"self",
".",
"n_total",
":",
"return",
"JobStatus",
".",
"done",
"elif",
"self",
".",
... | Return an overall status based
on the number of jobs in various states. | [
"Return",
"an",
"overall",
"status",
"based",
"on",
"the",
"number",
"of",
"jobs",
"in",
"various",
"states",
"."
] | python | train |
saltstack/salt | salt/key.py | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/key.py#L228-L284 | def run(self):
'''
Run the logic for saltkey
'''
self._update_opts()
cmd = self.opts['fun']
veri = None
ret = None
try:
if cmd in ('accept', 'reject', 'delete'):
ret = self._run_cmd('name_match')
if not isinstan... | [
"def",
"run",
"(",
"self",
")",
":",
"self",
".",
"_update_opts",
"(",
")",
"cmd",
"=",
"self",
".",
"opts",
"[",
"'fun'",
"]",
"veri",
"=",
"None",
"ret",
"=",
"None",
"try",
":",
"if",
"cmd",
"in",
"(",
"'accept'",
",",
"'reject'",
",",
"'delet... | Run the logic for saltkey | [
"Run",
"the",
"logic",
"for",
"saltkey"
] | python | train |
acutesoftware/AIKIF | aikif/web_app/page_data.py | https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/page_data.py#L28-L39 | def show_data_file(fname):
""" shows a data file in CSV format - all files live in CORE folder """
txt = '<H2>' + fname + '</H2>'
print (fname)
#try:
txt += web.read_csv_to_html_table(fname, 'Y') # it is ok to use a table for actual table data
#except:
# txt += '<H2>ERROR - cant read file</... | [
"def",
"show_data_file",
"(",
"fname",
")",
":",
"txt",
"=",
"'<H2>'",
"+",
"fname",
"+",
"'</H2>'",
"print",
"(",
"fname",
")",
"#try:",
"txt",
"+=",
"web",
".",
"read_csv_to_html_table",
"(",
"fname",
",",
"'Y'",
")",
"# it is ok to use a table for actual ta... | shows a data file in CSV format - all files live in CORE folder | [
"shows",
"a",
"data",
"file",
"in",
"CSV",
"format",
"-",
"all",
"files",
"live",
"in",
"CORE",
"folder"
] | python | train |
benhoff/pluginmanager | pluginmanager/plugin_interface.py | https://github.com/benhoff/pluginmanager/blob/a8a184f9ebfbb521703492cb88c1dbda4cd04c06/pluginmanager/plugin_interface.py#L153-L165 | def add_plugin_directories(self, paths, except_blacklisted=True):
"""
Adds `directories` to the set of plugin directories.
`directories` may be either a single object or a iterable.
`directories` can be relative paths, but will be converted into
absolute paths based on the curr... | [
"def",
"add_plugin_directories",
"(",
"self",
",",
"paths",
",",
"except_blacklisted",
"=",
"True",
")",
":",
"self",
".",
"directory_manager",
".",
"add_directories",
"(",
"paths",
",",
"except_blacklisted",
")"
] | Adds `directories` to the set of plugin directories.
`directories` may be either a single object or a iterable.
`directories` can be relative paths, but will be converted into
absolute paths based on the current working directory.
if `except_blacklisted` is `True` all `directories` in... | [
"Adds",
"directories",
"to",
"the",
"set",
"of",
"plugin",
"directories",
"."
] | python | train |
markperdue/pyvesync | src/pyvesync/helpers.py | https://github.com/markperdue/pyvesync/blob/7552dd1a6dd5ebc452acf78e33fd8f6e721e8cfc/src/pyvesync/helpers.py#L222-L256 | def resolve_updates(orig_list, updated_list):
"""Merges changes from one list of devices against another"""
if updated_list is not None and updated_list:
if orig_list is None:
orig_list = updated_list
else:
# Add new devices not in list but found ... | [
"def",
"resolve_updates",
"(",
"orig_list",
",",
"updated_list",
")",
":",
"if",
"updated_list",
"is",
"not",
"None",
"and",
"updated_list",
":",
"if",
"orig_list",
"is",
"None",
":",
"orig_list",
"=",
"updated_list",
"else",
":",
"# Add new devices not in list bu... | Merges changes from one list of devices against another | [
"Merges",
"changes",
"from",
"one",
"list",
"of",
"devices",
"against",
"another"
] | python | train |
OpenHumans/open-humans-api | ohapi/projects.py | https://github.com/OpenHumans/open-humans-api/blob/ca2a28cf5d55cfdae13dd222ba58c25565bdb86e/ohapi/projects.py#L139-L180 | def download_all(self, target_dir, source=None, project_data=False,
memberlist=None, excludelist=None,
max_size=MAX_SIZE_DEFAULT, id_filename=False):
"""
Download data for all users including shared data files.
:param target_dir: This field is the targe... | [
"def",
"download_all",
"(",
"self",
",",
"target_dir",
",",
"source",
"=",
"None",
",",
"project_data",
"=",
"False",
",",
"memberlist",
"=",
"None",
",",
"excludelist",
"=",
"None",
",",
"max_size",
"=",
"MAX_SIZE_DEFAULT",
",",
"id_filename",
"=",
"False",... | Download data for all users including shared data files.
:param target_dir: This field is the target directory to download data.
:param source: This field is the data source. It's default value is
None.
:param project_data: This field is data related to particular project.
... | [
"Download",
"data",
"for",
"all",
"users",
"including",
"shared",
"data",
"files",
"."
] | python | train |
fulfilio/python-magento | magento/sales.py | https://github.com/fulfilio/python-magento/blob/720ec136a6e438a9ee4ee92848a9820b91732750/magento/sales.py#L78-L92 | def addcomment(self, order_increment_id,
status, comment=None, notify=False):
"""
Add comment to order or change its state
:param order_increment_id: Order ID
TODO: Identify possible values for status
"""
if comment is None:
comment = ""
r... | [
"def",
"addcomment",
"(",
"self",
",",
"order_increment_id",
",",
"status",
",",
"comment",
"=",
"None",
",",
"notify",
"=",
"False",
")",
":",
"if",
"comment",
"is",
"None",
":",
"comment",
"=",
"\"\"",
"return",
"bool",
"(",
"self",
".",
"call",
"(",... | Add comment to order or change its state
:param order_increment_id: Order ID
TODO: Identify possible values for status | [
"Add",
"comment",
"to",
"order",
"or",
"change",
"its",
"state"
] | python | train |
AlecAivazis/graphql-over-kafka | nautilus/network/events/actionHandlers/createHandler.py | https://github.com/AlecAivazis/graphql-over-kafka/blob/70e2acef27a2f87355590be1a6ca60ce3ab4d09c/nautilus/network/events/actionHandlers/createHandler.py#L12-L83 | def create_handler(Model, name=None, **kwds):
"""
This factory returns an action handler that creates a new instance of
the specified model when a create action is recieved, assuming the
action follows nautilus convetions.
Args:
Model (nautilus.BaseModel): The model to c... | [
"def",
"create_handler",
"(",
"Model",
",",
"name",
"=",
"None",
",",
"*",
"*",
"kwds",
")",
":",
"async",
"def",
"action_handler",
"(",
"service",
",",
"action_type",
",",
"payload",
",",
"props",
",",
"notify",
"=",
"True",
",",
"*",
"*",
"kwds",
"... | This factory returns an action handler that creates a new instance of
the specified model when a create action is recieved, assuming the
action follows nautilus convetions.
Args:
Model (nautilus.BaseModel): The model to create when the action
received.
Retur... | [
"This",
"factory",
"returns",
"an",
"action",
"handler",
"that",
"creates",
"a",
"new",
"instance",
"of",
"the",
"specified",
"model",
"when",
"a",
"create",
"action",
"is",
"recieved",
"assuming",
"the",
"action",
"follows",
"nautilus",
"convetions",
"."
] | python | train |
econ-ark/HARK | HARK/ConsumptionSaving/TractableBufferStockModel.py | https://github.com/econ-ark/HARK/blob/3d184153a189e618a87c9540df1cd12044039cc5/HARK/ConsumptionSaving/TractableBufferStockModel.py#L420-L433 | def getStates(self):
'''
Calculate market resources for all agents this period.
Parameters
----------
None
Returns
-------
None
'''
self.bLvlNow = self.Rfree*self.aLvlNow
self.mLvlNow = self.bLvlNow + self.eStateNow | [
"def",
"getStates",
"(",
"self",
")",
":",
"self",
".",
"bLvlNow",
"=",
"self",
".",
"Rfree",
"*",
"self",
".",
"aLvlNow",
"self",
".",
"mLvlNow",
"=",
"self",
".",
"bLvlNow",
"+",
"self",
".",
"eStateNow"
] | Calculate market resources for all agents this period.
Parameters
----------
None
Returns
-------
None | [
"Calculate",
"market",
"resources",
"for",
"all",
"agents",
"this",
"period",
"."
] | python | train |
sosy-lab/benchexec | benchexec/tablegenerator/__init__.py | https://github.com/sosy-lab/benchexec/blob/44428f67f41384c03aea13e7e25f884764653617/benchexec/tablegenerator/__init__.py#L629-L718 | def create_from_xml(sourcefileTag, get_value_from_logfile, listOfColumns,
correct_only, log_zip_cache, columns_relevant_for_diff,
result_file_or_url):
'''
This function collects the values from one run.
Only columns that should be part of the table... | [
"def",
"create_from_xml",
"(",
"sourcefileTag",
",",
"get_value_from_logfile",
",",
"listOfColumns",
",",
"correct_only",
",",
"log_zip_cache",
",",
"columns_relevant_for_diff",
",",
"result_file_or_url",
")",
":",
"def",
"read_logfile_lines",
"(",
"log_file",
")",
":",... | This function collects the values from one run.
Only columns that should be part of the table are collected. | [
"This",
"function",
"collects",
"the",
"values",
"from",
"one",
"run",
".",
"Only",
"columns",
"that",
"should",
"be",
"part",
"of",
"the",
"table",
"are",
"collected",
"."
] | python | train |
spacetelescope/stsci.tools | lib/stsci/tools/validate.py | https://github.com/spacetelescope/stsci.tools/blob/9a022503ad24ca54ce83331482dfa3ff6de9f403/lib/stsci/tools/validate.py#L1110-L1131 | def is_int_list(value, min=None, max=None):
"""
Check that the value is a list of integers.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an integer.
>>> vtor = Validator()
>>> vtor.check('int_list', ())
[]
>>> vtor.check(... | [
"def",
"is_int_list",
"(",
"value",
",",
"min",
"=",
"None",
",",
"max",
"=",
"None",
")",
":",
"return",
"[",
"is_integer",
"(",
"mem",
")",
"for",
"mem",
"in",
"is_list",
"(",
"value",
",",
"min",
",",
"max",
")",
"]"
] | Check that the value is a list of integers.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an integer.
>>> vtor = Validator()
>>> vtor.check('int_list', ())
[]
>>> vtor.check('int_list', [])
[]
>>> vtor.check('int_list', (1... | [
"Check",
"that",
"the",
"value",
"is",
"a",
"list",
"of",
"integers",
"."
] | python | train |
cloud9ers/gurumate | environment/lib/python2.7/site-packages/IPython/core/prefilter.py | https://github.com/cloud9ers/gurumate/blob/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e/environment/lib/python2.7/site-packages/IPython/core/prefilter.py#L180-L183 | def unregister_transformer(self, transformer):
"""Unregister a transformer instance."""
if transformer in self._transformers:
self._transformers.remove(transformer) | [
"def",
"unregister_transformer",
"(",
"self",
",",
"transformer",
")",
":",
"if",
"transformer",
"in",
"self",
".",
"_transformers",
":",
"self",
".",
"_transformers",
".",
"remove",
"(",
"transformer",
")"
] | Unregister a transformer instance. | [
"Unregister",
"a",
"transformer",
"instance",
"."
] | python | test |
proycon/pynlpl | pynlpl/textprocessors.py | https://github.com/proycon/pynlpl/blob/7707f69a91caaa6cde037f0d0379f1d42500a68b/pynlpl/textprocessors.py#L403-L411 | def split_sentences(tokens):
"""Split sentences (based on tokenised data), returns sentences as a list of lists of tokens, each sentence is a list of tokens"""
begin = 0
for i, token in enumerate(tokens):
if is_end_of_sentence(tokens, i):
yield tokens[begin:i+1]
begin = i+1
... | [
"def",
"split_sentences",
"(",
"tokens",
")",
":",
"begin",
"=",
"0",
"for",
"i",
",",
"token",
"in",
"enumerate",
"(",
"tokens",
")",
":",
"if",
"is_end_of_sentence",
"(",
"tokens",
",",
"i",
")",
":",
"yield",
"tokens",
"[",
"begin",
":",
"i",
"+",... | Split sentences (based on tokenised data), returns sentences as a list of lists of tokens, each sentence is a list of tokens | [
"Split",
"sentences",
"(",
"based",
"on",
"tokenised",
"data",
")",
"returns",
"sentences",
"as",
"a",
"list",
"of",
"lists",
"of",
"tokens",
"each",
"sentence",
"is",
"a",
"list",
"of",
"tokens"
] | python | train |
ToFuProject/tofu | tofu/data/_core.py | https://github.com/ToFuProject/tofu/blob/39d6b2e7ced9e13666572dfd37e19403f1d6ff8d/tofu/data/_core.py#L1582-L1605 | def plot(self, key=None,
cmap=None, ms=4, vmin=None, vmax=None,
vmin_map=None, vmax_map=None, cmap_map=None, normt_map=False,
ntMax=None, nchMax=None, nlbdMax=3,
lls=None, lct=None, lcch=None, lclbd=None, cbck=None,
inct=[1,10], incX=[1,5], inclbd=[1,10],... | [
"def",
"plot",
"(",
"self",
",",
"key",
"=",
"None",
",",
"cmap",
"=",
"None",
",",
"ms",
"=",
"4",
",",
"vmin",
"=",
"None",
",",
"vmax",
"=",
"None",
",",
"vmin_map",
"=",
"None",
",",
"vmax_map",
"=",
"None",
",",
"cmap_map",
"=",
"None",
",... | Plot the data content in a generic interactive figure | [
"Plot",
"the",
"data",
"content",
"in",
"a",
"generic",
"interactive",
"figure"
] | python | train |
oisinmulvihill/stomper | lib/stomper/utils.py | https://github.com/oisinmulvihill/stomper/blob/842ed2353a4ddd638d35929ae5b7b70eb298305c/lib/stomper/utils.py#L11-L22 | def log_init(level):
"""Set up a logger that catches all channels and logs it to stdout.
This is used to set up logging when testing.
"""
log = logging.getLogger()
hdlr = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
hdlr.se... | [
"def",
"log_init",
"(",
"level",
")",
":",
"log",
"=",
"logging",
".",
"getLogger",
"(",
")",
"hdlr",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"'%(asctime)s %(name)s %(levelname)s %(message)s'",
")",
"... | Set up a logger that catches all channels and logs it to stdout.
This is used to set up logging when testing. | [
"Set",
"up",
"a",
"logger",
"that",
"catches",
"all",
"channels",
"and",
"logs",
"it",
"to",
"stdout",
".",
"This",
"is",
"used",
"to",
"set",
"up",
"logging",
"when",
"testing",
"."
] | python | train |
inveniosoftware/invenio-pidstore | invenio_pidstore/providers/datacite.py | https://github.com/inveniosoftware/invenio-pidstore/blob/8bf35f4e62d5dcaf1a2cfe5803245ba5220a9b78/invenio_pidstore/providers/datacite.py#L35-L49 | def create(cls, pid_value, **kwargs):
"""Create a new record identifier.
For more information about parameters,
see :meth:`invenio_pidstore.providers.BaseProvider.create`.
:param pid_value: Persistent identifier value.
:params **kwargs: See
:meth:`invenio_pidstore.p... | [
"def",
"create",
"(",
"cls",
",",
"pid_value",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"super",
"(",
"DataCiteProvider",
",",
"cls",
")",
".",
"create",
"(",
"pid_value",
"=",
"pid_value",
",",
"*",
"*",
"kwargs",
")"
] | Create a new record identifier.
For more information about parameters,
see :meth:`invenio_pidstore.providers.BaseProvider.create`.
:param pid_value: Persistent identifier value.
:params **kwargs: See
:meth:`invenio_pidstore.providers.base.BaseProvider.create` extra
... | [
"Create",
"a",
"new",
"record",
"identifier",
"."
] | python | train |
teaearlgraycold/puni | puni/base.py | https://github.com/teaearlgraycold/puni/blob/f6d0bfde99942b29a6f91273e48abcd2d7a94c93/puni/base.py#L87-L96 | def full_url(self):
"""Return the full reddit URL associated with the usernote.
Arguments:
subreddit: the subreddit name for the note (PRAW Subreddit object)
"""
if self.link == '':
return None
else:
return Note._expand_url(self.link, self.sub... | [
"def",
"full_url",
"(",
"self",
")",
":",
"if",
"self",
".",
"link",
"==",
"''",
":",
"return",
"None",
"else",
":",
"return",
"Note",
".",
"_expand_url",
"(",
"self",
".",
"link",
",",
"self",
".",
"subreddit",
")"
] | Return the full reddit URL associated with the usernote.
Arguments:
subreddit: the subreddit name for the note (PRAW Subreddit object) | [
"Return",
"the",
"full",
"reddit",
"URL",
"associated",
"with",
"the",
"usernote",
"."
] | python | train |
ymoch/apyori | apyori.py | https://github.com/ymoch/apyori/blob/8cc20a19d01b18b83e18e54aabb416c8dedabfde/apyori.py#L376-L397 | def dump_as_json(record, output_file):
"""
Dump an relation record as a json value.
Arguments:
record -- A RelationRecord instance to dump.
output_file -- A file to output.
"""
def default_func(value):
"""
Default conversion for JSON value.
"""
if isi... | [
"def",
"dump_as_json",
"(",
"record",
",",
"output_file",
")",
":",
"def",
"default_func",
"(",
"value",
")",
":",
"\"\"\"\n Default conversion for JSON value.\n \"\"\"",
"if",
"isinstance",
"(",
"value",
",",
"frozenset",
")",
":",
"return",
"sorted",
... | Dump an relation record as a json value.
Arguments:
record -- A RelationRecord instance to dump.
output_file -- A file to output. | [
"Dump",
"an",
"relation",
"record",
"as",
"a",
"json",
"value",
"."
] | python | train |
AndrewAnnex/SpiceyPy | spiceypy/spiceypy.py | https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L1649-L1669 | def cylrec(r, lon, z):
"""
Convert from cylindrical to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cylrec_c.html
:param r: Distance of a point from z axis.
:type r: float
:param lon: Angle (radians) of a point from xZ plane.
:type lon: float
:param ... | [
"def",
"cylrec",
"(",
"r",
",",
"lon",
",",
"z",
")",
":",
"r",
"=",
"ctypes",
".",
"c_double",
"(",
"r",
")",
"lon",
"=",
"ctypes",
".",
"c_double",
"(",
"lon",
")",
"z",
"=",
"ctypes",
".",
"c_double",
"(",
"z",
")",
"rectan",
"=",
"stypes",
... | Convert from cylindrical to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cylrec_c.html
:param r: Distance of a point from z axis.
:type r: float
:param lon: Angle (radians) of a point from xZ plane.
:type lon: float
:param z: Height of a point above xY plane... | [
"Convert",
"from",
"cylindrical",
"to",
"rectangular",
"coordinates",
"."
] | python | train |
poppy-project/pypot | pypot/vrep/remoteApiBindings/vrep.py | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/remoteApiBindings/vrep.py#L208-L213 | def simxJointGetForce(clientID, jointHandle, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
force = ct.c_float()
return c_GetJointForce(clientID, jointHandle, ct.byref(force), operationMode), force.value | [
"def",
"simxJointGetForce",
"(",
"clientID",
",",
"jointHandle",
",",
"operationMode",
")",
":",
"force",
"=",
"ct",
".",
"c_float",
"(",
")",
"return",
"c_GetJointForce",
"(",
"clientID",
",",
"jointHandle",
",",
"ct",
".",
"byref",
"(",
"force",
")",
","... | Please have a look at the function description/documentation in the V-REP user manual | [
"Please",
"have",
"a",
"look",
"at",
"the",
"function",
"description",
"/",
"documentation",
"in",
"the",
"V",
"-",
"REP",
"user",
"manual"
] | python | train |
mental32/spotify.py | spotify/utils.py | https://github.com/mental32/spotify.py/blob/bb296cac7c3dd289908906b7069bd80f43950515/spotify/utils.py#L49-L64 | def assert_hasattr(attr: str, msg: str, tp: BaseException = SpotifyException) -> Callable:
"""decorator to assert an object has an attribute when run."""
def decorator(func: Callable) -> Callable:
@functools.wraps(func)
def decorated(self, *args, **kwargs):
if not hasattr(self, attr)... | [
"def",
"assert_hasattr",
"(",
"attr",
":",
"str",
",",
"msg",
":",
"str",
",",
"tp",
":",
"BaseException",
"=",
"SpotifyException",
")",
"->",
"Callable",
":",
"def",
"decorator",
"(",
"func",
":",
"Callable",
")",
"->",
"Callable",
":",
"@",
"functools"... | decorator to assert an object has an attribute when run. | [
"decorator",
"to",
"assert",
"an",
"object",
"has",
"an",
"attribute",
"when",
"run",
"."
] | python | test |
calocan/rescape-python-helpers | rescape_python_helpers/functional/ramda.py | https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L564-L574 | def merge_all(dcts):
"""
Shallow merge all the dcts
:param dcts:
:return:
"""
return reduce(
lambda accum, dct: merge(accum, dct),
dict(),
dcts
) | [
"def",
"merge_all",
"(",
"dcts",
")",
":",
"return",
"reduce",
"(",
"lambda",
"accum",
",",
"dct",
":",
"merge",
"(",
"accum",
",",
"dct",
")",
",",
"dict",
"(",
")",
",",
"dcts",
")"
] | Shallow merge all the dcts
:param dcts:
:return: | [
"Shallow",
"merge",
"all",
"the",
"dcts",
":",
"param",
"dcts",
":",
":",
"return",
":"
] | python | train |
doctorzeb8/django-era | era/utils/functools.py | https://github.com/doctorzeb8/django-era/blob/6fd433ef6081c5be295df22bcea70dc2642baaa4/era/utils/functools.py#L23-L43 | def unidec(fnx):
'''
@unidec
def render(view, request, flag=True):
pass
@render
def first_view(request):
pass
@render(flag=False)
def second_view(request):
pass
'''
return lambda *ax, **kx: (
wraps(ax[0])(lambda *ay, **ky: fnx(ax[0], *ay, **ky)) \
... | [
"def",
"unidec",
"(",
"fnx",
")",
":",
"return",
"lambda",
"*",
"ax",
",",
"*",
"*",
"kx",
":",
"(",
"wraps",
"(",
"ax",
"[",
"0",
"]",
")",
"(",
"lambda",
"*",
"ay",
",",
"*",
"*",
"ky",
":",
"fnx",
"(",
"ax",
"[",
"0",
"]",
",",
"*",
... | @unidec
def render(view, request, flag=True):
pass
@render
def first_view(request):
pass
@render(flag=False)
def second_view(request):
pass | [
"@unidec",
"def",
"render",
"(",
"view",
"request",
"flag",
"=",
"True",
")",
":",
"pass"
] | python | train |
openstax/cnx-archive | cnxarchive/views/robots.py | https://github.com/openstax/cnx-archive/blob/d31d34aa8bbc8a9fde6cd4227a0df92726e8daf4/cnxarchive/views/robots.py#L14-L24 | def robots(request):
"""Return a simple "don't index me" robots.txt file."""
resp = request.response
resp.status = '200 OK'
resp.content_type = 'text/plain'
resp.body = """
User-Agent: *
Disallow: /
"""
return resp | [
"def",
"robots",
"(",
"request",
")",
":",
"resp",
"=",
"request",
".",
"response",
"resp",
".",
"status",
"=",
"'200 OK'",
"resp",
".",
"content_type",
"=",
"'text/plain'",
"resp",
".",
"body",
"=",
"\"\"\"\nUser-Agent: *\nDisallow: /\n\"\"\"",
"return",
"resp"... | Return a simple "don't index me" robots.txt file. | [
"Return",
"a",
"simple",
"don",
"t",
"index",
"me",
"robots",
".",
"txt",
"file",
"."
] | python | train |
iotile/typedargs | typedargs/metadata.py | https://github.com/iotile/typedargs/blob/0a5091a664b9b4d836e091e9ba583e944f438fd8/typedargs/metadata.py#L131-L139 | def typed_returnvalue(self, type_name, formatter=None):
"""Add type information to the return value of this function.
Args:
type_name (str): The name of the type of the return value.
formatter (str): An optional name of a formatting function specified
for the typ... | [
"def",
"typed_returnvalue",
"(",
"self",
",",
"type_name",
",",
"formatter",
"=",
"None",
")",
":",
"self",
".",
"return_info",
"=",
"ReturnInfo",
"(",
"type_name",
",",
"formatter",
",",
"True",
",",
"None",
")"
] | Add type information to the return value of this function.
Args:
type_name (str): The name of the type of the return value.
formatter (str): An optional name of a formatting function specified
for the type given in type_name. | [
"Add",
"type",
"information",
"to",
"the",
"return",
"value",
"of",
"this",
"function",
"."
] | python | test |
sethmlarson/virtualbox-python | virtualbox/library.py | https://github.com/sethmlarson/virtualbox-python/blob/706c8e3f6e3aee17eb06458e73cbb4bc2d37878b/virtualbox/library.py#L13243-L13287 | def set_auto_discard_for_device(self, name, controller_port, device, discard):
"""Sets a flag in the device information which indicates that the medium
supports discarding unused blocks (called trimming for SATA or unmap
for SCSI devices) .This may or may not be supported by a particular drive,
... | [
"def",
"set_auto_discard_for_device",
"(",
"self",
",",
"name",
",",
"controller_port",
",",
"device",
",",
"discard",
")",
":",
"if",
"not",
"isinstance",
"(",
"name",
",",
"basestring",
")",
":",
"raise",
"TypeError",
"(",
"\"name can only be an instance of type... | Sets a flag in the device information which indicates that the medium
supports discarding unused blocks (called trimming for SATA or unmap
for SCSI devices) .This may or may not be supported by a particular drive,
and is silently ignored in the latter case. At the moment only hard disks
... | [
"Sets",
"a",
"flag",
"in",
"the",
"device",
"information",
"which",
"indicates",
"that",
"the",
"medium",
"supports",
"discarding",
"unused",
"blocks",
"(",
"called",
"trimming",
"for",
"SATA",
"or",
"unmap",
"for",
"SCSI",
"devices",
")",
".",
"This",
"may"... | python | train |
palantir/typedjsonrpc | typedjsonrpc/server.py | https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/server.py#L129-L139 | def _try_trigger_before_first_request_funcs(self): # pylint: disable=C0103
"""Runs each function from ``self.before_first_request_funcs`` once and only once."""
if self._after_first_request_handled:
return
else:
with self._before_first_request_lock:
if se... | [
"def",
"_try_trigger_before_first_request_funcs",
"(",
"self",
")",
":",
"# pylint: disable=C0103",
"if",
"self",
".",
"_after_first_request_handled",
":",
"return",
"else",
":",
"with",
"self",
".",
"_before_first_request_lock",
":",
"if",
"self",
".",
"_after_first_re... | Runs each function from ``self.before_first_request_funcs`` once and only once. | [
"Runs",
"each",
"function",
"from",
"self",
".",
"before_first_request_funcs",
"once",
"and",
"only",
"once",
"."
] | python | train |
cloud9ers/gurumate | environment/lib/python2.7/site-packages/IPython/extensions/storemagic.py | https://github.com/cloud9ers/gurumate/blob/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e/environment/lib/python2.7/site-packages/IPython/extensions/storemagic.py#L228-L234 | def load_ipython_extension(ip):
"""Load the extension in IPython."""
global _loaded
if not _loaded:
plugin = StoreMagic(shell=ip, config=ip.config)
ip.plugin_manager.register_plugin('storemagic', plugin)
_loaded = True | [
"def",
"load_ipython_extension",
"(",
"ip",
")",
":",
"global",
"_loaded",
"if",
"not",
"_loaded",
":",
"plugin",
"=",
"StoreMagic",
"(",
"shell",
"=",
"ip",
",",
"config",
"=",
"ip",
".",
"config",
")",
"ip",
".",
"plugin_manager",
".",
"register_plugin",... | Load the extension in IPython. | [
"Load",
"the",
"extension",
"in",
"IPython",
"."
] | python | test |
rameshg87/pyremotevbox | pyremotevbox/ZSI/generate/wsdl2dispatch.py | https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/generate/wsdl2dispatch.py#L330-L344 | def createMethodBody(msgInName, msgOutName, **kw):
'''return a tuple of strings containing the body of a method.
msgInName -- None or a str
msgOutName -- None or a str
'''
body = []
if msgInName is not None:
body.append('request = ps.Parse(%s.typecode)' %msgI... | [
"def",
"createMethodBody",
"(",
"msgInName",
",",
"msgOutName",
",",
"*",
"*",
"kw",
")",
":",
"body",
"=",
"[",
"]",
"if",
"msgInName",
"is",
"not",
"None",
":",
"body",
".",
"append",
"(",
"'request = ps.Parse(%s.typecode)'",
"%",
"msgInName",
")",
"if",... | return a tuple of strings containing the body of a method.
msgInName -- None or a str
msgOutName -- None or a str | [
"return",
"a",
"tuple",
"of",
"strings",
"containing",
"the",
"body",
"of",
"a",
"method",
".",
"msgInName",
"--",
"None",
"or",
"a",
"str",
"msgOutName",
"--",
"None",
"or",
"a",
"str"
] | python | train |
Parsl/parsl | parsl/providers/googlecloud/googlecloud.py | https://github.com/Parsl/parsl/blob/d7afb3bc37f50dcf224ae78637944172edb35dac/parsl/providers/googlecloud/googlecloud.py#L112-L137 | def submit(self, command, blocksize, tasks_per_node, job_name="parsl.auto"):
''' The submit method takes the command string to be executed upon
instantiation of a resource most often to start a pilot.
Args :
- command (str) : The bash command string to be executed.
- b... | [
"def",
"submit",
"(",
"self",
",",
"command",
",",
"blocksize",
",",
"tasks_per_node",
",",
"job_name",
"=",
"\"parsl.auto\"",
")",
":",
"wrapped_cmd",
"=",
"self",
".",
"launcher",
"(",
"command",
",",
"tasks_per_node",
",",
"1",
")",
"instance",
",",
"na... | The submit method takes the command string to be executed upon
instantiation of a resource most often to start a pilot.
Args :
- command (str) : The bash command string to be executed.
- blocksize (int) : Blocksize to be requested
- tasks_per_node (int) : command ... | [
"The",
"submit",
"method",
"takes",
"the",
"command",
"string",
"to",
"be",
"executed",
"upon",
"instantiation",
"of",
"a",
"resource",
"most",
"often",
"to",
"start",
"a",
"pilot",
"."
] | python | valid |
softlayer/softlayer-python | SoftLayer/CLI/hardware/create_options.py | https://github.com/softlayer/softlayer-python/blob/9f181be08cc3668353b05a6de0cb324f52cff6fa/SoftLayer/CLI/hardware/create_options.py#L13-L56 | def cli(env):
"""Server order options for a given chassis."""
hardware_manager = hardware.HardwareManager(env.client)
options = hardware_manager.get_create_options()
tables = []
# Datacenters
dc_table = formatting.Table(['datacenter', 'value'])
dc_table.sortby = 'value'
for location i... | [
"def",
"cli",
"(",
"env",
")",
":",
"hardware_manager",
"=",
"hardware",
".",
"HardwareManager",
"(",
"env",
".",
"client",
")",
"options",
"=",
"hardware_manager",
".",
"get_create_options",
"(",
")",
"tables",
"=",
"[",
"]",
"# Datacenters",
"dc_table",
"=... | Server order options for a given chassis. | [
"Server",
"order",
"options",
"for",
"a",
"given",
"chassis",
"."
] | python | train |
tensorflow/tensor2tensor | tensor2tensor/data_generators/image_utils.py | https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/data_generators/image_utils.py#L282-L311 | def image_generator(images, labels):
"""Generator for images that takes image and labels lists and creates pngs.
Args:
images: list of images given as [width x height x channels] numpy arrays.
labels: list of ints, same length as images.
Yields:
A dictionary representing the images with the followin... | [
"def",
"image_generator",
"(",
"images",
",",
"labels",
")",
":",
"if",
"not",
"images",
":",
"raise",
"ValueError",
"(",
"\"Must provide some images for the generator.\"",
")",
"width",
",",
"height",
",",
"_",
"=",
"images",
"[",
"0",
"]",
".",
"shape",
"f... | Generator for images that takes image and labels lists and creates pngs.
Args:
images: list of images given as [width x height x channels] numpy arrays.
labels: list of ints, same length as images.
Yields:
A dictionary representing the images with the following fields:
* image/encoded: the string ... | [
"Generator",
"for",
"images",
"that",
"takes",
"image",
"and",
"labels",
"lists",
"and",
"creates",
"pngs",
"."
] | python | train |
Parsl/parsl | parsl/executors/ipp_controller.py | https://github.com/Parsl/parsl/blob/d7afb3bc37f50dcf224ae78637944172edb35dac/parsl/executors/ipp_controller.py#L136-L166 | def close(self):
"""Terminate the controller process and its child processes.
Args:
- None
"""
if self.reuse:
logger.debug("Ipcontroller not shutting down: reuse enabled")
return
if self.mode == "manual":
logger.debug("Ipcontrol... | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"reuse",
":",
"logger",
".",
"debug",
"(",
"\"Ipcontroller not shutting down: reuse enabled\"",
")",
"return",
"if",
"self",
".",
"mode",
"==",
"\"manual\"",
":",
"logger",
".",
"debug",
"(",
"\"Ipcon... | Terminate the controller process and its child processes.
Args:
- None | [
"Terminate",
"the",
"controller",
"process",
"and",
"its",
"child",
"processes",
"."
] | python | valid |
saltstack/salt | salt/returners/postgres_local_cache.py | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/returners/postgres_local_cache.py#L200-L221 | def prep_jid(nocache=False, passed_jid=None):
'''
Return a job id and prepare the job id directory
This is the function responsible for making sure jids don't collide
(unless its passed a jid). So do what you have to do to make sure that
stays the case
'''
conn = _get_conn()
if conn is N... | [
"def",
"prep_jid",
"(",
"nocache",
"=",
"False",
",",
"passed_jid",
"=",
"None",
")",
":",
"conn",
"=",
"_get_conn",
"(",
")",
"if",
"conn",
"is",
"None",
":",
"return",
"None",
"cur",
"=",
"conn",
".",
"cursor",
"(",
")",
"if",
"passed_jid",
"is",
... | Return a job id and prepare the job id directory
This is the function responsible for making sure jids don't collide
(unless its passed a jid). So do what you have to do to make sure that
stays the case | [
"Return",
"a",
"job",
"id",
"and",
"prepare",
"the",
"job",
"id",
"directory",
"This",
"is",
"the",
"function",
"responsible",
"for",
"making",
"sure",
"jids",
"don",
"t",
"collide",
"(",
"unless",
"its",
"passed",
"a",
"jid",
")",
".",
"So",
"do",
"wh... | python | train |
zerotk/easyfs | zerotk/easyfs/_easyfs.py | https://github.com/zerotk/easyfs/blob/140923db51fb91d5a5847ad17412e8bce51ba3da/zerotk/easyfs/_easyfs.py#L70-L90 | def Cwd(directory):
'''
Context manager for current directory (uses with_statement)
e.g.:
# working on some directory
with Cwd('/home/new_dir'):
# working on new_dir
# working on some directory again
:param unicode directory:
Target directory to enter
'... | [
"def",
"Cwd",
"(",
"directory",
")",
":",
"old_directory",
"=",
"six",
".",
"moves",
".",
"getcwd",
"(",
")",
"if",
"directory",
"is",
"not",
"None",
":",
"os",
".",
"chdir",
"(",
"directory",
")",
"try",
":",
"yield",
"directory",
"finally",
":",
"o... | Context manager for current directory (uses with_statement)
e.g.:
# working on some directory
with Cwd('/home/new_dir'):
# working on new_dir
# working on some directory again
:param unicode directory:
Target directory to enter | [
"Context",
"manager",
"for",
"current",
"directory",
"(",
"uses",
"with_statement",
")"
] | python | valid |
quantopian/zipline | zipline/pipeline/filters/filter.py | https://github.com/quantopian/zipline/blob/77ad15e6dc4c1cbcdc133653bac8a63fc704f7fe/zipline/pipeline/filters/filter.py#L237-L245 | def create(cls, expr, binds):
"""
Helper for creating new NumExprFactors.
This is just a wrapper around NumericalExpression.__new__ that always
forwards `bool` as the dtype, since Filters can only be of boolean
dtype.
"""
return cls(expr=expr, binds=binds, dtype=... | [
"def",
"create",
"(",
"cls",
",",
"expr",
",",
"binds",
")",
":",
"return",
"cls",
"(",
"expr",
"=",
"expr",
",",
"binds",
"=",
"binds",
",",
"dtype",
"=",
"bool_dtype",
")"
] | Helper for creating new NumExprFactors.
This is just a wrapper around NumericalExpression.__new__ that always
forwards `bool` as the dtype, since Filters can only be of boolean
dtype. | [
"Helper",
"for",
"creating",
"new",
"NumExprFactors",
"."
] | python | train |
ivknv/s3m | s3m.py | https://github.com/ivknv/s3m/blob/71663c12613d41cf7d3dd99c819d50a7c1b7ff9d/s3m.py#L133-L140 | def close(self):
"""Close the cursor"""
if self.closed or self.connection.closed:
return
self._cursor.close()
self.closed = True | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"closed",
"or",
"self",
".",
"connection",
".",
"closed",
":",
"return",
"self",
".",
"_cursor",
".",
"close",
"(",
")",
"self",
".",
"closed",
"=",
"True"
] | Close the cursor | [
"Close",
"the",
"cursor"
] | python | train |
rossengeorgiev/aprs-python | aprslib/inet.py | https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L265-L310 | def _send_login(self):
"""
Sends login string to server
"""
login_str = "user {0} pass {1} vers aprslib {3}{2}\r\n"
login_str = login_str.format(
self.callsign,
self.passwd,
(" filter " + self.filter) if self.filter != "" else "",
_... | [
"def",
"_send_login",
"(",
"self",
")",
":",
"login_str",
"=",
"\"user {0} pass {1} vers aprslib {3}{2}\\r\\n\"",
"login_str",
"=",
"login_str",
".",
"format",
"(",
"self",
".",
"callsign",
",",
"self",
".",
"passwd",
",",
"(",
"\" filter \"",
"+",
"self",
".",
... | Sends login string to server | [
"Sends",
"login",
"string",
"to",
"server"
] | python | valid |
hvac/hvac | hvac/utils.py | https://github.com/hvac/hvac/blob/cce5b86889193f622c2a72a4a1b7e1c9c8aff1ce/hvac/utils.py#L76-L102 | def generate_property_deprecation_message(to_be_removed_in_version, old_name, new_name, new_attribute,
module_name='Client'):
"""Generate a message to be used when warning about the use of deprecated properties.
:param to_be_removed_in_version: Version of this module t... | [
"def",
"generate_property_deprecation_message",
"(",
"to_be_removed_in_version",
",",
"old_name",
",",
"new_name",
",",
"new_attribute",
",",
"module_name",
"=",
"'Client'",
")",
":",
"message",
"=",
"\"Call to deprecated property '{name}'. This property will be removed in versio... | Generate a message to be used when warning about the use of deprecated properties.
:param to_be_removed_in_version: Version of this module the deprecated property will be removed in.
:type to_be_removed_in_version: str
:param old_name: Deprecated property name.
:type old_name: str
:param new_name: ... | [
"Generate",
"a",
"message",
"to",
"be",
"used",
"when",
"warning",
"about",
"the",
"use",
"of",
"deprecated",
"properties",
"."
] | python | train |
gwastro/pycbc | pycbc/population/rates_functions.py | https://github.com/gwastro/pycbc/blob/7a64cdd104d263f1b6ea0b01e6841837d05a4cb3/pycbc/population/rates_functions.py#L13-L55 | def process_full_data(fname, rhomin, mass1, mass2, lo_mchirp, hi_mchirp):
"""Read the zero-lag and time-lag triggers identified by templates in
a specified range of chirp mass.
Parameters
----------
hdfile:
File that stores all the triggers
rhomin: float
... | [
"def",
"process_full_data",
"(",
"fname",
",",
"rhomin",
",",
"mass1",
",",
"mass2",
",",
"lo_mchirp",
",",
"hi_mchirp",
")",
":",
"with",
"h5py",
".",
"File",
"(",
"fname",
",",
"'r'",
")",
"as",
"bulk",
":",
"id_bkg",
"=",
"bulk",
"[",
"'background_e... | Read the zero-lag and time-lag triggers identified by templates in
a specified range of chirp mass.
Parameters
----------
hdfile:
File that stores all the triggers
rhomin: float
Minimum value of SNR threhold (will need including ifar)
mass1: array
... | [
"Read",
"the",
"zero",
"-",
"lag",
"and",
"time",
"-",
"lag",
"triggers",
"identified",
"by",
"templates",
"in",
"a",
"specified",
"range",
"of",
"chirp",
"mass",
"."
] | python | train |
numenta/nupic | src/nupic/swarming/exp_generator/experiment_generator.py | https://github.com/numenta/nupic/blob/5922fafffdccc8812e72b3324965ad2f7d4bbdad/src/nupic/swarming/exp_generator/experiment_generator.py#L1010-L1019 | def _getPropertyValue(schema, propertyName, options):
"""Checks to see if property is specified in 'options'. If not, reads the
default value from the schema"""
if propertyName not in options:
paramsSchema = schema['properties'][propertyName]
if 'default' in paramsSchema:
options[propertyName] = pa... | [
"def",
"_getPropertyValue",
"(",
"schema",
",",
"propertyName",
",",
"options",
")",
":",
"if",
"propertyName",
"not",
"in",
"options",
":",
"paramsSchema",
"=",
"schema",
"[",
"'properties'",
"]",
"[",
"propertyName",
"]",
"if",
"'default'",
"in",
"paramsSche... | Checks to see if property is specified in 'options'. If not, reads the
default value from the schema | [
"Checks",
"to",
"see",
"if",
"property",
"is",
"specified",
"in",
"options",
".",
"If",
"not",
"reads",
"the",
"default",
"value",
"from",
"the",
"schema"
] | python | valid |
genialis/resolwe | resolwe/elastic/builder.py | https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/elastic/builder.py#L424-L428 | def register_signals(self):
"""Register signals for all indexes."""
for index in self.indexes:
if index.object_type:
self._connect_signal(index) | [
"def",
"register_signals",
"(",
"self",
")",
":",
"for",
"index",
"in",
"self",
".",
"indexes",
":",
"if",
"index",
".",
"object_type",
":",
"self",
".",
"_connect_signal",
"(",
"index",
")"
] | Register signals for all indexes. | [
"Register",
"signals",
"for",
"all",
"indexes",
"."
] | python | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.