repository_name
stringlengths 7
55
| func_path_in_repository
stringlengths 4
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 75
104k
| language
stringclasses 1
value | func_code_string
stringlengths 75
104k
| func_code_tokens
sequencelengths 19
28.4k
| func_documentation_string
stringlengths 1
46.9k
| func_documentation_tokens
sequencelengths 1
1.97k
| split_name
stringclasses 1
value | func_code_url
stringlengths 87
315
|
---|---|---|---|---|---|---|---|---|---|---|
ClericPy/torequests | torequests/utils.py | Regex.show_all | def show_all(self, as_string=True):
""", python2 will not show flags"""
result = []
for item in self.container:
pattern = str(item[0])[10:] if PY3 else item[0].pattern
instances = item[2] or []
value = (
'%s "%s"' % (item[1].__name__, (item[1].__doc__ or ""))
if callable(item[1])
else str(item[1])
)
value = "%s %s" % (type(item[1]), value)
result.append(" => ".join((pattern, ",".join(instances), value)))
return "\n".join(result) if as_string else result | python | def show_all(self, as_string=True):
""", python2 will not show flags"""
result = []
for item in self.container:
pattern = str(item[0])[10:] if PY3 else item[0].pattern
instances = item[2] or []
value = (
'%s "%s"' % (item[1].__name__, (item[1].__doc__ or ""))
if callable(item[1])
else str(item[1])
)
value = "%s %s" % (type(item[1]), value)
result.append(" => ".join((pattern, ",".join(instances), value)))
return "\n".join(result) if as_string else result | [
"def",
"show_all",
"(",
"self",
",",
"as_string",
"=",
"True",
")",
":",
"result",
"=",
"[",
"]",
"for",
"item",
"in",
"self",
".",
"container",
":",
"pattern",
"=",
"str",
"(",
"item",
"[",
"0",
"]",
")",
"[",
"10",
":",
"]",
"if",
"PY3",
"else",
"item",
"[",
"0",
"]",
".",
"pattern",
"instances",
"=",
"item",
"[",
"2",
"]",
"or",
"[",
"]",
"value",
"=",
"(",
"'%s \"%s\"'",
"%",
"(",
"item",
"[",
"1",
"]",
".",
"__name__",
",",
"(",
"item",
"[",
"1",
"]",
".",
"__doc__",
"or",
"\"\"",
")",
")",
"if",
"callable",
"(",
"item",
"[",
"1",
"]",
")",
"else",
"str",
"(",
"item",
"[",
"1",
"]",
")",
")",
"value",
"=",
"\"%s %s\"",
"%",
"(",
"type",
"(",
"item",
"[",
"1",
"]",
")",
",",
"value",
")",
"result",
".",
"append",
"(",
"\" => \"",
".",
"join",
"(",
"(",
"pattern",
",",
"\",\"",
".",
"join",
"(",
"instances",
")",
",",
"value",
")",
")",
")",
"return",
"\"\\n\"",
".",
"join",
"(",
"result",
")",
"if",
"as_string",
"else",
"result"
] | , python2 will not show flags | [
"python2",
"will",
"not",
"show",
"flags"
] | train | https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L666-L679 |
ClericPy/torequests | torequests/utils.py | Timer.x | def x(self):
"""Call self.log_func(self) and return expect_string."""
self._log_after_del = False
passed_string = self.string
if self.log_func:
self.log_func(self)
else:
print_info(
"Timer [%(passed)s]: %(name)s, start at %(start)s."
% (
dict(
name=self.name, start=ttime(self.start_at), passed=passed_string
)
)
)
return passed_string | python | def x(self):
"""Call self.log_func(self) and return expect_string."""
self._log_after_del = False
passed_string = self.string
if self.log_func:
self.log_func(self)
else:
print_info(
"Timer [%(passed)s]: %(name)s, start at %(start)s."
% (
dict(
name=self.name, start=ttime(self.start_at), passed=passed_string
)
)
)
return passed_string | [
"def",
"x",
"(",
"self",
")",
":",
"self",
".",
"_log_after_del",
"=",
"False",
"passed_string",
"=",
"self",
".",
"string",
"if",
"self",
".",
"log_func",
":",
"self",
".",
"log_func",
"(",
"self",
")",
"else",
":",
"print_info",
"(",
"\"Timer [%(passed)s]: %(name)s, start at %(start)s.\"",
"%",
"(",
"dict",
"(",
"name",
"=",
"self",
".",
"name",
",",
"start",
"=",
"ttime",
"(",
"self",
".",
"start_at",
")",
",",
"passed",
"=",
"passed_string",
")",
")",
")",
"return",
"passed_string"
] | Call self.log_func(self) and return expect_string. | [
"Call",
"self",
".",
"log_func",
"(",
"self",
")",
"and",
"return",
"expect_string",
"."
] | train | https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L865-L880 |
ClericPy/torequests | torequests/utils.py | Timer.tick | def tick(self):
"""Return the time cost string as expect."""
string = self.passed
if self.rounding:
string = round(string)
if self.readable:
string = self.readable(string)
return string | python | def tick(self):
"""Return the time cost string as expect."""
string = self.passed
if self.rounding:
string = round(string)
if self.readable:
string = self.readable(string)
return string | [
"def",
"tick",
"(",
"self",
")",
":",
"string",
"=",
"self",
".",
"passed",
"if",
"self",
".",
"rounding",
":",
"string",
"=",
"round",
"(",
"string",
")",
"if",
"self",
".",
"readable",
":",
"string",
"=",
"self",
".",
"readable",
"(",
"string",
")",
"return",
"string"
] | Return the time cost string as expect. | [
"Return",
"the",
"time",
"cost",
"string",
"as",
"expect",
"."
] | train | https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L887-L894 |
ClericPy/torequests | torequests/utils.py | Timer.watch | def watch(*timer_args, **timer_kwargs):
"""Decorator for Timer."""
def wrapper(function):
@wraps(function)
def inner(*args, **kwargs):
args1 = ", ".join(map(repr, args)) if args else ""
kwargs1 = ", ".join(
["%s=%s" % (i, repr(kwargs[i])) for i in sorted(kwargs.keys())]
)
arg = ", ".join(filter(None, [args1, kwargs1]))
name = "%s(%s)" % (function.__name__, arg)
_ = Timer(name=name, *timer_args, **timer_kwargs)
result = function(*args, **kwargs)
return result
return inner
return wrapper | python | def watch(*timer_args, **timer_kwargs):
"""Decorator for Timer."""
def wrapper(function):
@wraps(function)
def inner(*args, **kwargs):
args1 = ", ".join(map(repr, args)) if args else ""
kwargs1 = ", ".join(
["%s=%s" % (i, repr(kwargs[i])) for i in sorted(kwargs.keys())]
)
arg = ", ".join(filter(None, [args1, kwargs1]))
name = "%s(%s)" % (function.__name__, arg)
_ = Timer(name=name, *timer_args, **timer_kwargs)
result = function(*args, **kwargs)
return result
return inner
return wrapper | [
"def",
"watch",
"(",
"*",
"timer_args",
",",
"*",
"*",
"timer_kwargs",
")",
":",
"def",
"wrapper",
"(",
"function",
")",
":",
"@",
"wraps",
"(",
"function",
")",
"def",
"inner",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"args1",
"=",
"\", \"",
".",
"join",
"(",
"map",
"(",
"repr",
",",
"args",
")",
")",
"if",
"args",
"else",
"\"\"",
"kwargs1",
"=",
"\", \"",
".",
"join",
"(",
"[",
"\"%s=%s\"",
"%",
"(",
"i",
",",
"repr",
"(",
"kwargs",
"[",
"i",
"]",
")",
")",
"for",
"i",
"in",
"sorted",
"(",
"kwargs",
".",
"keys",
"(",
")",
")",
"]",
")",
"arg",
"=",
"\", \"",
".",
"join",
"(",
"filter",
"(",
"None",
",",
"[",
"args1",
",",
"kwargs1",
"]",
")",
")",
"name",
"=",
"\"%s(%s)\"",
"%",
"(",
"function",
".",
"__name__",
",",
"arg",
")",
"_",
"=",
"Timer",
"(",
"name",
"=",
"name",
",",
"*",
"timer_args",
",",
"*",
"*",
"timer_kwargs",
")",
"result",
"=",
"function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"result",
"return",
"inner",
"return",
"wrapper"
] | Decorator for Timer. | [
"Decorator",
"for",
"Timer",
"."
] | train | https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L897-L915 |
ClericPy/torequests | torequests/utils.py | ClipboardWatcher.default_callback | def default_callback(self, text):
"""Default clean the \\n in text."""
text = text.replace("\r\n", "\n")
text = "%s\n" % text
flush_print(text, sep="", end="")
return text | python | def default_callback(self, text):
"""Default clean the \\n in text."""
text = text.replace("\r\n", "\n")
text = "%s\n" % text
flush_print(text, sep="", end="")
return text | [
"def",
"default_callback",
"(",
"self",
",",
"text",
")",
":",
"text",
"=",
"text",
".",
"replace",
"(",
"\"\\r\\n\"",
",",
"\"\\n\"",
")",
"text",
"=",
"\"%s\\n\"",
"%",
"text",
"flush_print",
"(",
"text",
",",
"sep",
"=",
"\"\"",
",",
"end",
"=",
"\"\"",
")",
"return",
"text"
] | Default clean the \\n in text. | [
"Default",
"clean",
"the",
"\\\\",
"n",
"in",
"text",
"."
] | train | https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L964-L969 |
ClericPy/torequests | torequests/utils.py | ClipboardWatcher.watch | def watch(self, limit=None, timeout=None):
"""Block method to watch the clipboard changing."""
start_time = time.time()
count = 0
while not timeout or time.time() - start_time < timeout:
new = self.read()
if new != self.temp:
count += 1
self.callback(new)
if count == limit:
break
self.temp = new
time.sleep(self.interval) | python | def watch(self, limit=None, timeout=None):
"""Block method to watch the clipboard changing."""
start_time = time.time()
count = 0
while not timeout or time.time() - start_time < timeout:
new = self.read()
if new != self.temp:
count += 1
self.callback(new)
if count == limit:
break
self.temp = new
time.sleep(self.interval) | [
"def",
"watch",
"(",
"self",
",",
"limit",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"count",
"=",
"0",
"while",
"not",
"timeout",
"or",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
"<",
"timeout",
":",
"new",
"=",
"self",
".",
"read",
"(",
")",
"if",
"new",
"!=",
"self",
".",
"temp",
":",
"count",
"+=",
"1",
"self",
".",
"callback",
"(",
"new",
")",
"if",
"count",
"==",
"limit",
":",
"break",
"self",
".",
"temp",
"=",
"new",
"time",
".",
"sleep",
"(",
"self",
".",
"interval",
")"
] | Block method to watch the clipboard changing. | [
"Block",
"method",
"to",
"watch",
"the",
"clipboard",
"changing",
"."
] | train | https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L971-L983 |
ClericPy/torequests | torequests/utils.py | ClipboardWatcher.watch_async | def watch_async(self, limit=None, timeout=None):
"""Non-block method to watch the clipboard changing."""
return self.watch(limit=limit, timeout=timeout) | python | def watch_async(self, limit=None, timeout=None):
"""Non-block method to watch the clipboard changing."""
return self.watch(limit=limit, timeout=timeout) | [
"def",
"watch_async",
"(",
"self",
",",
"limit",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"return",
"self",
".",
"watch",
"(",
"limit",
"=",
"limit",
",",
"timeout",
"=",
"timeout",
")"
] | Non-block method to watch the clipboard changing. | [
"Non",
"-",
"block",
"method",
"to",
"watch",
"the",
"clipboard",
"changing",
"."
] | train | https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L991-L993 |
ClericPy/torequests | torequests/utils.py | RegMatch.find_one | def find_one(cls, pattern, string, flags=0):
"""JS-like match object. Use index number to get groups, if not match or no group, will return ''.
Basic Usage::
>>> from torequests.utils import find_one
>>> string = "abcd"
>>> find_one("a.*", string)
<torequests.utils.RegMatch object at 0x0705F1D0>
>>> find_one("a.*", string)[0]
'abcd'
>>> find_one("a.*", string)[1]
''
>>> find_one("a(.)", string)[0]
'ab'
>>> find_one("a(.)", string)[1]
'b'
>>> find_one("a(.)", string)[2] or "default"
'default'
>>> import re
>>> item = find_one("a(B)(C)", string, flags=re.I | re.S)
>>> item
<torequests.utils.RegMatch object at 0x0705F1D0>
>>> item[0]
'abc'
>>> item[1]
'b'
>>> item[2]
'c'
>>> item[3]
''
>>> # import re
>>> # re.findone = find_one
>>> register_re_findone()
>>> re.findone('a(b)', 'abcd')[1] or 'default'
'b'
"""
item = re.search(pattern, string, flags=flags)
return cls(item) | python | def find_one(cls, pattern, string, flags=0):
"""JS-like match object. Use index number to get groups, if not match or no group, will return ''.
Basic Usage::
>>> from torequests.utils import find_one
>>> string = "abcd"
>>> find_one("a.*", string)
<torequests.utils.RegMatch object at 0x0705F1D0>
>>> find_one("a.*", string)[0]
'abcd'
>>> find_one("a.*", string)[1]
''
>>> find_one("a(.)", string)[0]
'ab'
>>> find_one("a(.)", string)[1]
'b'
>>> find_one("a(.)", string)[2] or "default"
'default'
>>> import re
>>> item = find_one("a(B)(C)", string, flags=re.I | re.S)
>>> item
<torequests.utils.RegMatch object at 0x0705F1D0>
>>> item[0]
'abc'
>>> item[1]
'b'
>>> item[2]
'c'
>>> item[3]
''
>>> # import re
>>> # re.findone = find_one
>>> register_re_findone()
>>> re.findone('a(b)', 'abcd')[1] or 'default'
'b'
"""
item = re.search(pattern, string, flags=flags)
return cls(item) | [
"def",
"find_one",
"(",
"cls",
",",
"pattern",
",",
"string",
",",
"flags",
"=",
"0",
")",
":",
"item",
"=",
"re",
".",
"search",
"(",
"pattern",
",",
"string",
",",
"flags",
"=",
"flags",
")",
"return",
"cls",
"(",
"item",
")"
] | JS-like match object. Use index number to get groups, if not match or no group, will return ''.
Basic Usage::
>>> from torequests.utils import find_one
>>> string = "abcd"
>>> find_one("a.*", string)
<torequests.utils.RegMatch object at 0x0705F1D0>
>>> find_one("a.*", string)[0]
'abcd'
>>> find_one("a.*", string)[1]
''
>>> find_one("a(.)", string)[0]
'ab'
>>> find_one("a(.)", string)[1]
'b'
>>> find_one("a(.)", string)[2] or "default"
'default'
>>> import re
>>> item = find_one("a(B)(C)", string, flags=re.I | re.S)
>>> item
<torequests.utils.RegMatch object at 0x0705F1D0>
>>> item[0]
'abc'
>>> item[1]
'b'
>>> item[2]
'c'
>>> item[3]
''
>>> # import re
>>> # re.findone = find_one
>>> register_re_findone()
>>> re.findone('a(b)', 'abcd')[1] or 'default'
'b' | [
"JS",
"-",
"like",
"match",
"object",
".",
"Use",
"index",
"number",
"to",
"get",
"groups",
"if",
"not",
"match",
"or",
"no",
"group",
"will",
"return",
"."
] | train | https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/utils.py#L1460-L1499 |
vsoch/helpme | helpme/main/github/utils.py | create_issue | def create_issue(title, body, repo, token):
'''create a Github issue, given a title, body, repo, and token.
Parameters
==========
title: the issue title
body: the issue body
repo: the full name of the repo
token: the user's personal Github token
'''
owner, name = repo.split('/')
url = 'https://api.github.com/repos/%s/%s/issues' % (owner, name)
data = {'title': title, 'body': body }
headers = { "Authorization": "token %s" % token,
"Accept": "application/vnd.github.symmetra-preview+json" }
response = requests.post(url, data=json.dumps(data), headers=headers)
if response.status_code in [201, 202]:
url = response.json()['html_url']
bot.info(url)
return url
elif response.status_code == 404:
bot.error('Cannot create issue. Does your token have scope repo?')
sys.exit(1)
else:
bot.error('Cannot create issue %s' %title)
bot.error(response.content)
sys.exit(1) | python | def create_issue(title, body, repo, token):
'''create a Github issue, given a title, body, repo, and token.
Parameters
==========
title: the issue title
body: the issue body
repo: the full name of the repo
token: the user's personal Github token
'''
owner, name = repo.split('/')
url = 'https://api.github.com/repos/%s/%s/issues' % (owner, name)
data = {'title': title, 'body': body }
headers = { "Authorization": "token %s" % token,
"Accept": "application/vnd.github.symmetra-preview+json" }
response = requests.post(url, data=json.dumps(data), headers=headers)
if response.status_code in [201, 202]:
url = response.json()['html_url']
bot.info(url)
return url
elif response.status_code == 404:
bot.error('Cannot create issue. Does your token have scope repo?')
sys.exit(1)
else:
bot.error('Cannot create issue %s' %title)
bot.error(response.content)
sys.exit(1) | [
"def",
"create_issue",
"(",
"title",
",",
"body",
",",
"repo",
",",
"token",
")",
":",
"owner",
",",
"name",
"=",
"repo",
".",
"split",
"(",
"'/'",
")",
"url",
"=",
"'https://api.github.com/repos/%s/%s/issues'",
"%",
"(",
"owner",
",",
"name",
")",
"data",
"=",
"{",
"'title'",
":",
"title",
",",
"'body'",
":",
"body",
"}",
"headers",
"=",
"{",
"\"Authorization\"",
":",
"\"token %s\"",
"%",
"token",
",",
"\"Accept\"",
":",
"\"application/vnd.github.symmetra-preview+json\"",
"}",
"response",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
",",
"headers",
"=",
"headers",
")",
"if",
"response",
".",
"status_code",
"in",
"[",
"201",
",",
"202",
"]",
":",
"url",
"=",
"response",
".",
"json",
"(",
")",
"[",
"'html_url'",
"]",
"bot",
".",
"info",
"(",
"url",
")",
"return",
"url",
"elif",
"response",
".",
"status_code",
"==",
"404",
":",
"bot",
".",
"error",
"(",
"'Cannot create issue. Does your token have scope repo?'",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"else",
":",
"bot",
".",
"error",
"(",
"'Cannot create issue %s'",
"%",
"title",
")",
"bot",
".",
"error",
"(",
"response",
".",
"content",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | create a Github issue, given a title, body, repo, and token.
Parameters
==========
title: the issue title
body: the issue body
repo: the full name of the repo
token: the user's personal Github token | [
"create",
"a",
"Github",
"issue",
"given",
"a",
"title",
"body",
"repo",
"and",
"token",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/github/utils.py#L26-L59 |
PlaidWeb/Pushl | pushl/webmentions.py | get_target | async def get_target(config, url):
""" Given a URL, get the webmention endpoint """
previous = config.cache.get(
'target', url, schema_version=SCHEMA_VERSION) if config.cache else None
headers = previous.caching if previous else None
request = await utils.retry_get(config, url, headers=headers)
if not request or not request.success:
return previous
if request.cached:
return previous
current = Target(request)
if config.cache:
config.cache.set('target', url, current)
return current | python | async def get_target(config, url):
""" Given a URL, get the webmention endpoint """
previous = config.cache.get(
'target', url, schema_version=SCHEMA_VERSION) if config.cache else None
headers = previous.caching if previous else None
request = await utils.retry_get(config, url, headers=headers)
if not request or not request.success:
return previous
if request.cached:
return previous
current = Target(request)
if config.cache:
config.cache.set('target', url, current)
return current | [
"async",
"def",
"get_target",
"(",
"config",
",",
"url",
")",
":",
"previous",
"=",
"config",
".",
"cache",
".",
"get",
"(",
"'target'",
",",
"url",
",",
"schema_version",
"=",
"SCHEMA_VERSION",
")",
"if",
"config",
".",
"cache",
"else",
"None",
"headers",
"=",
"previous",
".",
"caching",
"if",
"previous",
"else",
"None",
"request",
"=",
"await",
"utils",
".",
"retry_get",
"(",
"config",
",",
"url",
",",
"headers",
"=",
"headers",
")",
"if",
"not",
"request",
"or",
"not",
"request",
".",
"success",
":",
"return",
"previous",
"if",
"request",
".",
"cached",
":",
"return",
"previous",
"current",
"=",
"Target",
"(",
"request",
")",
"if",
"config",
".",
"cache",
":",
"config",
".",
"cache",
".",
"set",
"(",
"'target'",
",",
"url",
",",
"current",
")",
"return",
"current"
] | Given a URL, get the webmention endpoint | [
"Given",
"a",
"URL",
"get",
"the",
"webmention",
"endpoint"
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/webmentions.py#L160-L180 |
PlaidWeb/Pushl | pushl/webmentions.py | Target.send | async def send(self, config, entry):
""" Send a webmention to this target from the specified entry """
if self.endpoint:
LOGGER.debug("%s -> %s", entry.url, self.url)
try:
await self.endpoint.send(config, entry.url, self.url)
except Exception as err: # pylint:disable=broad-except
LOGGER.warning("Ping %s: got %s: %s",
self.url, err.__class__.__name__, err) | python | async def send(self, config, entry):
""" Send a webmention to this target from the specified entry """
if self.endpoint:
LOGGER.debug("%s -> %s", entry.url, self.url)
try:
await self.endpoint.send(config, entry.url, self.url)
except Exception as err: # pylint:disable=broad-except
LOGGER.warning("Ping %s: got %s: %s",
self.url, err.__class__.__name__, err) | [
"async",
"def",
"send",
"(",
"self",
",",
"config",
",",
"entry",
")",
":",
"if",
"self",
".",
"endpoint",
":",
"LOGGER",
".",
"debug",
"(",
"\"%s -> %s\"",
",",
"entry",
".",
"url",
",",
"self",
".",
"url",
")",
"try",
":",
"await",
"self",
".",
"endpoint",
".",
"send",
"(",
"config",
",",
"entry",
".",
"url",
",",
"self",
".",
"url",
")",
"except",
"Exception",
"as",
"err",
":",
"# pylint:disable=broad-except",
"LOGGER",
".",
"warning",
"(",
"\"Ping %s: got %s: %s\"",
",",
"self",
".",
"url",
",",
"err",
".",
"__class__",
".",
"__name__",
",",
"err",
")"
] | Send a webmention to this target from the specified entry | [
"Send",
"a",
"webmention",
"to",
"this",
"target",
"from",
"the",
"specified",
"entry"
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/webmentions.py#L148-L156 |
vsoch/helpme | helpme/main/__init__.py | get_helper | def get_helper(name=None, quiet=True, **kwargs):
'''
get the correct helper depending on the environment variable
HELPME_CLIENT
quiet: if True, suppress most output about the client (e.g. speak)
'''
# Second priority, from environment
from helpme.defaults import HELPME_CLIENT
# First priority, from command line
if name is not None:
HELPME_CLIENT = name
# If no obvious credential provided, we can use HELPME_CLIENT
if HELPME_CLIENT == 'github': from .github import Helper;
elif HELPME_CLIENT == 'uservoice': from .uservoice import Helper
elif HELPME_CLIENT == 'discourse': from .discourse import Helper
else: from .github import Helper
Helper.name = HELPME_CLIENT
Helper.quiet = quiet
# Initialize the database
return Helper() | python | def get_helper(name=None, quiet=True, **kwargs):
'''
get the correct helper depending on the environment variable
HELPME_CLIENT
quiet: if True, suppress most output about the client (e.g. speak)
'''
# Second priority, from environment
from helpme.defaults import HELPME_CLIENT
# First priority, from command line
if name is not None:
HELPME_CLIENT = name
# If no obvious credential provided, we can use HELPME_CLIENT
if HELPME_CLIENT == 'github': from .github import Helper;
elif HELPME_CLIENT == 'uservoice': from .uservoice import Helper
elif HELPME_CLIENT == 'discourse': from .discourse import Helper
else: from .github import Helper
Helper.name = HELPME_CLIENT
Helper.quiet = quiet
# Initialize the database
return Helper() | [
"def",
"get_helper",
"(",
"name",
"=",
"None",
",",
"quiet",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"# Second priority, from environment",
"from",
"helpme",
".",
"defaults",
"import",
"HELPME_CLIENT",
"# First priority, from command line",
"if",
"name",
"is",
"not",
"None",
":",
"HELPME_CLIENT",
"=",
"name",
"# If no obvious credential provided, we can use HELPME_CLIENT",
"if",
"HELPME_CLIENT",
"==",
"'github'",
":",
"from",
".",
"github",
"import",
"Helper",
"elif",
"HELPME_CLIENT",
"==",
"'uservoice'",
":",
"from",
".",
"uservoice",
"import",
"Helper",
"elif",
"HELPME_CLIENT",
"==",
"'discourse'",
":",
"from",
".",
"discourse",
"import",
"Helper",
"else",
":",
"from",
".",
"github",
"import",
"Helper",
"Helper",
".",
"name",
"=",
"HELPME_CLIENT",
"Helper",
".",
"quiet",
"=",
"quiet",
"# Initialize the database",
"return",
"Helper",
"(",
")"
] | get the correct helper depending on the environment variable
HELPME_CLIENT
quiet: if True, suppress most output about the client (e.g. speak) | [
"get",
"the",
"correct",
"helper",
"depending",
"on",
"the",
"environment",
"variable",
"HELPME_CLIENT"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/__init__.py#L23-L48 |
iotile/baBLE | interfaces/python/bable_interface/scripts/mock_bable_interface.py | mock_bable | def mock_bable(monkeypatch):
""" Mock the BaBLEInterface class with some controllers inside. """
mocked_bable = MockBaBLE()
mocked_bable.set_controllers([
Controller(0, '11:22:33:44:55:66', '#0'),
Controller(1, '22:33:44:55:66:11', '#1', settings={'powered': True, 'low_energy': True}),
Controller(2, '33:44:55:66:11:22', '#2', settings={'powered': True})
])
monkeypatch.setattr(bable_interface, 'BaBLEInterface', lambda: mocked_bable)
return mocked_bable | python | def mock_bable(monkeypatch):
""" Mock the BaBLEInterface class with some controllers inside. """
mocked_bable = MockBaBLE()
mocked_bable.set_controllers([
Controller(0, '11:22:33:44:55:66', '#0'),
Controller(1, '22:33:44:55:66:11', '#1', settings={'powered': True, 'low_energy': True}),
Controller(2, '33:44:55:66:11:22', '#2', settings={'powered': True})
])
monkeypatch.setattr(bable_interface, 'BaBLEInterface', lambda: mocked_bable)
return mocked_bable | [
"def",
"mock_bable",
"(",
"monkeypatch",
")",
":",
"mocked_bable",
"=",
"MockBaBLE",
"(",
")",
"mocked_bable",
".",
"set_controllers",
"(",
"[",
"Controller",
"(",
"0",
",",
"'11:22:33:44:55:66'",
",",
"'#0'",
")",
",",
"Controller",
"(",
"1",
",",
"'22:33:44:55:66:11'",
",",
"'#1'",
",",
"settings",
"=",
"{",
"'powered'",
":",
"True",
",",
"'low_energy'",
":",
"True",
"}",
")",
",",
"Controller",
"(",
"2",
",",
"'33:44:55:66:11:22'",
",",
"'#2'",
",",
"settings",
"=",
"{",
"'powered'",
":",
"True",
"}",
")",
"]",
")",
"monkeypatch",
".",
"setattr",
"(",
"bable_interface",
",",
"'BaBLEInterface'",
",",
"lambda",
":",
"mocked_bable",
")",
"return",
"mocked_bable"
] | Mock the BaBLEInterface class with some controllers inside. | [
"Mock",
"the",
"BaBLEInterface",
"class",
"with",
"some",
"controllers",
"inside",
"."
] | train | https://github.com/iotile/baBLE/blob/faedca2c70b7fe91ea8ae0c3d8aff6bf843bd9db/interfaces/python/bable_interface/scripts/mock_bable_interface.py#L542-L553 |
SetBased/py-stratum | pystratum/command/PyStratumCommand.py | PyStratumCommand.handle | def handle(self):
"""
Executes the actual Stratum program.
"""
self.output = PyStratumStyle(self.input, self.output)
command = self.get_application().find('constants')
ret = command.execute(self.input, self.output)
if ret:
return ret
command = self.get_application().find('loader')
ret = command.execute(self.input, self.output)
if ret:
return ret
command = self.get_application().find('wrapper')
ret = command.execute(self.input, self.output)
self.output.writeln('')
return ret | python | def handle(self):
"""
Executes the actual Stratum program.
"""
self.output = PyStratumStyle(self.input, self.output)
command = self.get_application().find('constants')
ret = command.execute(self.input, self.output)
if ret:
return ret
command = self.get_application().find('loader')
ret = command.execute(self.input, self.output)
if ret:
return ret
command = self.get_application().find('wrapper')
ret = command.execute(self.input, self.output)
self.output.writeln('')
return ret | [
"def",
"handle",
"(",
"self",
")",
":",
"self",
".",
"output",
"=",
"PyStratumStyle",
"(",
"self",
".",
"input",
",",
"self",
".",
"output",
")",
"command",
"=",
"self",
".",
"get_application",
"(",
")",
".",
"find",
"(",
"'constants'",
")",
"ret",
"=",
"command",
".",
"execute",
"(",
"self",
".",
"input",
",",
"self",
".",
"output",
")",
"if",
"ret",
":",
"return",
"ret",
"command",
"=",
"self",
".",
"get_application",
"(",
")",
".",
"find",
"(",
"'loader'",
")",
"ret",
"=",
"command",
".",
"execute",
"(",
"self",
".",
"input",
",",
"self",
".",
"output",
")",
"if",
"ret",
":",
"return",
"ret",
"command",
"=",
"self",
".",
"get_application",
"(",
")",
".",
"find",
"(",
"'wrapper'",
")",
"ret",
"=",
"command",
".",
"execute",
"(",
"self",
".",
"input",
",",
"self",
".",
"output",
")",
"self",
".",
"output",
".",
"writeln",
"(",
"''",
")",
"return",
"ret"
] | Executes the actual Stratum program. | [
"Executes",
"the",
"actual",
"Stratum",
"program",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/command/PyStratumCommand.py#L26-L47 |
nephila/djangocms-helper | djangocms_helper/utils.py | load_from_file | def load_from_file(module_path):
"""
Load a python module from its absolute filesystem path
Borrowed from django-cms
"""
from imp import load_module, PY_SOURCE
imported = None
if module_path:
with open(module_path, 'r') as openfile:
imported = load_module('mod', openfile, module_path, ('imported', 'r', PY_SOURCE))
return imported | python | def load_from_file(module_path):
"""
Load a python module from its absolute filesystem path
Borrowed from django-cms
"""
from imp import load_module, PY_SOURCE
imported = None
if module_path:
with open(module_path, 'r') as openfile:
imported = load_module('mod', openfile, module_path, ('imported', 'r', PY_SOURCE))
return imported | [
"def",
"load_from_file",
"(",
"module_path",
")",
":",
"from",
"imp",
"import",
"load_module",
",",
"PY_SOURCE",
"imported",
"=",
"None",
"if",
"module_path",
":",
"with",
"open",
"(",
"module_path",
",",
"'r'",
")",
"as",
"openfile",
":",
"imported",
"=",
"load_module",
"(",
"'mod'",
",",
"openfile",
",",
"module_path",
",",
"(",
"'imported'",
",",
"'r'",
",",
"PY_SOURCE",
")",
")",
"return",
"imported"
] | Load a python module from its absolute filesystem path
Borrowed from django-cms | [
"Load",
"a",
"python",
"module",
"from",
"its",
"absolute",
"filesystem",
"path"
] | train | https://github.com/nephila/djangocms-helper/blob/3fe53aee7b06922112c5e4445b74afeb86f6d836/djangocms_helper/utils.py#L58-L70 |
nephila/djangocms-helper | djangocms_helper/utils.py | work_in | def work_in(dirname=None):
"""
Context manager version of os.chdir. When exited, returns to the working
directory prior to entering.
Grabbed from cookiecutter, thanks audreyr!
"""
curdir = os.getcwd()
try:
if dirname is not None:
if dirname not in sys.path:
sys.path.insert(0, dirname)
os.chdir(dirname)
yield
finally:
os.chdir(curdir) | python | def work_in(dirname=None):
"""
Context manager version of os.chdir. When exited, returns to the working
directory prior to entering.
Grabbed from cookiecutter, thanks audreyr!
"""
curdir = os.getcwd()
try:
if dirname is not None:
if dirname not in sys.path:
sys.path.insert(0, dirname)
os.chdir(dirname)
yield
finally:
os.chdir(curdir) | [
"def",
"work_in",
"(",
"dirname",
"=",
"None",
")",
":",
"curdir",
"=",
"os",
".",
"getcwd",
"(",
")",
"try",
":",
"if",
"dirname",
"is",
"not",
"None",
":",
"if",
"dirname",
"not",
"in",
"sys",
".",
"path",
":",
"sys",
".",
"path",
".",
"insert",
"(",
"0",
",",
"dirname",
")",
"os",
".",
"chdir",
"(",
"dirname",
")",
"yield",
"finally",
":",
"os",
".",
"chdir",
"(",
"curdir",
")"
] | Context manager version of os.chdir. When exited, returns to the working
directory prior to entering.
Grabbed from cookiecutter, thanks audreyr! | [
"Context",
"manager",
"version",
"of",
"os",
".",
"chdir",
".",
"When",
"exited",
"returns",
"to",
"the",
"working",
"directory",
"prior",
"to",
"entering",
"."
] | train | https://github.com/nephila/djangocms-helper/blob/3fe53aee7b06922112c5e4445b74afeb86f6d836/djangocms_helper/utils.py#L74-L89 |
nephila/djangocms-helper | djangocms_helper/utils.py | _reset_django | def _reset_django(settings):
"""
Hackish way to reset the django instance settings and AppConfig
:param settings: django settings module
"""
if settings._wrapped != empty:
clear_url_caches()
from django.apps import apps
apps.clear_cache()
settings._wrapped = empty
clear_url_caches() | python | def _reset_django(settings):
"""
Hackish way to reset the django instance settings and AppConfig
:param settings: django settings module
"""
if settings._wrapped != empty:
clear_url_caches()
from django.apps import apps
apps.clear_cache()
settings._wrapped = empty
clear_url_caches() | [
"def",
"_reset_django",
"(",
"settings",
")",
":",
"if",
"settings",
".",
"_wrapped",
"!=",
"empty",
":",
"clear_url_caches",
"(",
")",
"from",
"django",
".",
"apps",
"import",
"apps",
"apps",
".",
"clear_cache",
"(",
")",
"settings",
".",
"_wrapped",
"=",
"empty",
"clear_url_caches",
"(",
")"
] | Hackish way to reset the django instance settings and AppConfig
:param settings: django settings module | [
"Hackish",
"way",
"to",
"reset",
"the",
"django",
"instance",
"settings",
"and",
"AppConfig",
":",
"param",
"settings",
":",
"django",
"settings",
"module"
] | train | https://github.com/nephila/djangocms-helper/blob/3fe53aee7b06922112c5e4445b74afeb86f6d836/djangocms_helper/utils.py#L130-L140 |
nephila/djangocms-helper | djangocms_helper/utils.py | _make_settings | def _make_settings(args, application, settings, STATIC_ROOT, MEDIA_ROOT):
"""
Setup the Django settings
:param args: docopt arguments
:param default_settings: default Django settings
:param settings: Django settings module
:param STATIC_ROOT: static root directory
:param MEDIA_ROOT: media root directory
:return:
"""
import dj_database_url
from .default_settings import get_default_settings, get_boilerplates_settings
try:
extra_settings_file = args.get('--extra-settings')
if not extra_settings_file:
extra_settings_file = HELPER_FILE
if extra_settings_file[-3:] != '.py':
filename, __ = os.path.splitext(extra_settings_file)
extra_settings_file = '{0}.py'.format(filename)
extra_settings = load_from_file(extra_settings_file).HELPER_SETTINGS
except (IOError, AttributeError):
extra_settings = None
default_name = ':memory:' if args['test'] else 'local.sqlite'
db_url = os.environ.get('DATABASE_URL', 'sqlite://localhost/%s' % default_name)
configs = {
'DATABASES': {'default': dj_database_url.parse(db_url)},
'STATIC_ROOT': STATIC_ROOT,
'MEDIA_ROOT': MEDIA_ROOT,
'USE_TZ': True,
'USE_CMS': args['--cms'],
'BASE_APPLICATION': application
}
if configs['USE_CMS'] or getattr(extra_settings, 'USE_CMS', False):
CMS_APPS = [
'cms',
'menus',
'sekizai',
]
CMS_APP_STYLE = [
'djangocms_admin_style'
]
CMS_PROCESSORS = [
'cms.context_processors.cms_settings',
'sekizai.context_processors.sekizai',
]
CMS_MIDDLEWARE = [
'cms.middleware.language.LanguageCookieMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
]
if not CMS_31 and args['server']:
CMS_MIDDLEWARE.append('cms.middleware.utils.ApphookReloadMiddleware')
URLCONF = 'djangocms_helper.urls'
else:
CMS_APPS = []
CMS_APP_STYLE = []
CMS_MIDDLEWARE = []
CMS_PROCESSORS = []
URLCONF = 'djangocms_helper.urls'
default_settings = get_default_settings(
CMS_APPS, CMS_PROCESSORS, CMS_MIDDLEWARE, CMS_APP_STYLE, URLCONF, application
)
migrate = args.get('--migrate') or not args.get('--no-migrate')
default_settings.update(configs)
if extra_settings:
apps = extra_settings.pop('INSTALLED_APPS', [])
apps_top = extra_settings.pop('TOP_INSTALLED_APPS', [])
template_processors = extra_settings.pop('TEMPLATE_CONTEXT_PROCESSORS', [])
template_loaders = extra_settings.pop('TEMPLATE_LOADERS', [])
template_dirs = extra_settings.pop('TEMPLATE_DIRS', [])
middleware = extra_settings.pop('MIDDLEWARE_CLASSES', [])
middleware_top = extra_settings.pop('TOP_MIDDLEWARE_CLASSES', [])
default_settings.update(extra_settings)
for app in apps_top:
default_settings['INSTALLED_APPS'].insert(0, app)
default_settings['INSTALLED_APPS'].extend(apps)
default_settings['TEMPLATE_CONTEXT_PROCESSORS'].extend(template_processors)
default_settings['TEMPLATE_LOADERS'].extend(template_loaders)
if 'TEMPLATE_DIRS' not in default_settings:
default_settings['TEMPLATE_DIRS'] = []
default_settings['TEMPLATE_DIRS'].extend(template_dirs)
default_settings['MIDDLEWARE_CLASSES'].extend(middleware)
for middleware in middleware_top:
default_settings['MIDDLEWARE_CLASSES'].insert(0, middleware)
if 'cms' in default_settings['INSTALLED_APPS']:
if 'treebeard' not in default_settings['INSTALLED_APPS']:
default_settings['INSTALLED_APPS'].append('treebeard')
if ('filer' in default_settings['INSTALLED_APPS'] and
'mptt' not in default_settings['INSTALLED_APPS']):
default_settings['INSTALLED_APPS'].append('mptt')
if ('filer' in default_settings['INSTALLED_APPS'] and
'easy_thumbnails' not in default_settings['INSTALLED_APPS']):
default_settings['INSTALLED_APPS'].append('easy_thumbnails')
if args['--boilerplate']:
boilerplate_settings = get_boilerplates_settings()
# Do not override helper settings with defaults.
if 'ALDRYN_BOILERPLATE_NAME' in default_settings.keys():
del boilerplate_settings['ALDRYN_BOILERPLATE_NAME']
default_settings = extend_settings(
default_settings, boilerplate_settings, 'STATICFILES_FINDERS',
'django.contrib.staticfiles.finders.AppDirectoriesFinder'
)
del boilerplate_settings['STATICFILES_FINDERS']
default_settings = extend_settings(
default_settings, boilerplate_settings, 'TEMPLATE_LOADERS',
'django.template.loaders.app_directories.Loader'
)
del boilerplate_settings['TEMPLATE_LOADERS']
for setting in ('INSTALLED_APPS', 'TEMPLATE_CONTEXT_PROCESSORS'):
default_settings[setting].extend(boilerplate_settings[setting])
del boilerplate_settings[setting]
default_settings.update(boilerplate_settings)
default_settings['TEMPLATES'] = [
{'NAME': 'django',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'context_processors': [
template_processor.replace('django.core', 'django.template')
for template_processor in default_settings.pop('TEMPLATE_CONTEXT_PROCESSORS')
],
'loaders': default_settings.pop('TEMPLATE_LOADERS')
}}
]
if 'TEMPLATE_DIRS' in default_settings:
default_settings['TEMPLATES'][0]['DIRS'] = default_settings.pop('TEMPLATE_DIRS')
# Support for custom user models
if 'AUTH_USER_MODEL' in os.environ:
custom_user_app = os.environ['AUTH_USER_MODEL'].rpartition('.')[0]
custom_user_model = '.'.join(os.environ['AUTH_USER_MODEL'].split('.')[-2:])
default_settings['INSTALLED_APPS'].insert(
default_settings['INSTALLED_APPS'].index('cms'),
custom_user_app
)
default_settings['AUTH_USER_MODEL'] = custom_user_model
if args['test']:
default_settings['SESSION_ENGINE'] = 'django.contrib.sessions.backends.cache'
if application not in default_settings['INSTALLED_APPS']:
default_settings['INSTALLED_APPS'].append(application)
if not migrate:
default_settings['MIGRATION_MODULES'] = DisableMigrations()
if 'MIDDLEWARE' not in default_settings:
default_settings['MIDDLEWARE'] = default_settings['MIDDLEWARE_CLASSES']
del default_settings['MIDDLEWARE_CLASSES']
_reset_django(settings)
settings.configure(**default_settings)
django.setup()
reload_urls(settings, cms_apps=False)
return settings | python | def _make_settings(args, application, settings, STATIC_ROOT, MEDIA_ROOT):
"""
Setup the Django settings
:param args: docopt arguments
:param default_settings: default Django settings
:param settings: Django settings module
:param STATIC_ROOT: static root directory
:param MEDIA_ROOT: media root directory
:return:
"""
import dj_database_url
from .default_settings import get_default_settings, get_boilerplates_settings
try:
extra_settings_file = args.get('--extra-settings')
if not extra_settings_file:
extra_settings_file = HELPER_FILE
if extra_settings_file[-3:] != '.py':
filename, __ = os.path.splitext(extra_settings_file)
extra_settings_file = '{0}.py'.format(filename)
extra_settings = load_from_file(extra_settings_file).HELPER_SETTINGS
except (IOError, AttributeError):
extra_settings = None
default_name = ':memory:' if args['test'] else 'local.sqlite'
db_url = os.environ.get('DATABASE_URL', 'sqlite://localhost/%s' % default_name)
configs = {
'DATABASES': {'default': dj_database_url.parse(db_url)},
'STATIC_ROOT': STATIC_ROOT,
'MEDIA_ROOT': MEDIA_ROOT,
'USE_TZ': True,
'USE_CMS': args['--cms'],
'BASE_APPLICATION': application
}
if configs['USE_CMS'] or getattr(extra_settings, 'USE_CMS', False):
CMS_APPS = [
'cms',
'menus',
'sekizai',
]
CMS_APP_STYLE = [
'djangocms_admin_style'
]
CMS_PROCESSORS = [
'cms.context_processors.cms_settings',
'sekizai.context_processors.sekizai',
]
CMS_MIDDLEWARE = [
'cms.middleware.language.LanguageCookieMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
]
if not CMS_31 and args['server']:
CMS_MIDDLEWARE.append('cms.middleware.utils.ApphookReloadMiddleware')
URLCONF = 'djangocms_helper.urls'
else:
CMS_APPS = []
CMS_APP_STYLE = []
CMS_MIDDLEWARE = []
CMS_PROCESSORS = []
URLCONF = 'djangocms_helper.urls'
default_settings = get_default_settings(
CMS_APPS, CMS_PROCESSORS, CMS_MIDDLEWARE, CMS_APP_STYLE, URLCONF, application
)
migrate = args.get('--migrate') or not args.get('--no-migrate')
default_settings.update(configs)
if extra_settings:
apps = extra_settings.pop('INSTALLED_APPS', [])
apps_top = extra_settings.pop('TOP_INSTALLED_APPS', [])
template_processors = extra_settings.pop('TEMPLATE_CONTEXT_PROCESSORS', [])
template_loaders = extra_settings.pop('TEMPLATE_LOADERS', [])
template_dirs = extra_settings.pop('TEMPLATE_DIRS', [])
middleware = extra_settings.pop('MIDDLEWARE_CLASSES', [])
middleware_top = extra_settings.pop('TOP_MIDDLEWARE_CLASSES', [])
default_settings.update(extra_settings)
for app in apps_top:
default_settings['INSTALLED_APPS'].insert(0, app)
default_settings['INSTALLED_APPS'].extend(apps)
default_settings['TEMPLATE_CONTEXT_PROCESSORS'].extend(template_processors)
default_settings['TEMPLATE_LOADERS'].extend(template_loaders)
if 'TEMPLATE_DIRS' not in default_settings:
default_settings['TEMPLATE_DIRS'] = []
default_settings['TEMPLATE_DIRS'].extend(template_dirs)
default_settings['MIDDLEWARE_CLASSES'].extend(middleware)
for middleware in middleware_top:
default_settings['MIDDLEWARE_CLASSES'].insert(0, middleware)
if 'cms' in default_settings['INSTALLED_APPS']:
if 'treebeard' not in default_settings['INSTALLED_APPS']:
default_settings['INSTALLED_APPS'].append('treebeard')
if ('filer' in default_settings['INSTALLED_APPS'] and
'mptt' not in default_settings['INSTALLED_APPS']):
default_settings['INSTALLED_APPS'].append('mptt')
if ('filer' in default_settings['INSTALLED_APPS'] and
'easy_thumbnails' not in default_settings['INSTALLED_APPS']):
default_settings['INSTALLED_APPS'].append('easy_thumbnails')
if args['--boilerplate']:
boilerplate_settings = get_boilerplates_settings()
# Do not override helper settings with defaults.
if 'ALDRYN_BOILERPLATE_NAME' in default_settings.keys():
del boilerplate_settings['ALDRYN_BOILERPLATE_NAME']
default_settings = extend_settings(
default_settings, boilerplate_settings, 'STATICFILES_FINDERS',
'django.contrib.staticfiles.finders.AppDirectoriesFinder'
)
del boilerplate_settings['STATICFILES_FINDERS']
default_settings = extend_settings(
default_settings, boilerplate_settings, 'TEMPLATE_LOADERS',
'django.template.loaders.app_directories.Loader'
)
del boilerplate_settings['TEMPLATE_LOADERS']
for setting in ('INSTALLED_APPS', 'TEMPLATE_CONTEXT_PROCESSORS'):
default_settings[setting].extend(boilerplate_settings[setting])
del boilerplate_settings[setting]
default_settings.update(boilerplate_settings)
default_settings['TEMPLATES'] = [
{'NAME': 'django',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'context_processors': [
template_processor.replace('django.core', 'django.template')
for template_processor in default_settings.pop('TEMPLATE_CONTEXT_PROCESSORS')
],
'loaders': default_settings.pop('TEMPLATE_LOADERS')
}}
]
if 'TEMPLATE_DIRS' in default_settings:
default_settings['TEMPLATES'][0]['DIRS'] = default_settings.pop('TEMPLATE_DIRS')
# Support for custom user models
if 'AUTH_USER_MODEL' in os.environ:
custom_user_app = os.environ['AUTH_USER_MODEL'].rpartition('.')[0]
custom_user_model = '.'.join(os.environ['AUTH_USER_MODEL'].split('.')[-2:])
default_settings['INSTALLED_APPS'].insert(
default_settings['INSTALLED_APPS'].index('cms'),
custom_user_app
)
default_settings['AUTH_USER_MODEL'] = custom_user_model
if args['test']:
default_settings['SESSION_ENGINE'] = 'django.contrib.sessions.backends.cache'
if application not in default_settings['INSTALLED_APPS']:
default_settings['INSTALLED_APPS'].append(application)
if not migrate:
default_settings['MIGRATION_MODULES'] = DisableMigrations()
if 'MIDDLEWARE' not in default_settings:
default_settings['MIDDLEWARE'] = default_settings['MIDDLEWARE_CLASSES']
del default_settings['MIDDLEWARE_CLASSES']
_reset_django(settings)
settings.configure(**default_settings)
django.setup()
reload_urls(settings, cms_apps=False)
return settings | [
"def",
"_make_settings",
"(",
"args",
",",
"application",
",",
"settings",
",",
"STATIC_ROOT",
",",
"MEDIA_ROOT",
")",
":",
"import",
"dj_database_url",
"from",
".",
"default_settings",
"import",
"get_default_settings",
",",
"get_boilerplates_settings",
"try",
":",
"extra_settings_file",
"=",
"args",
".",
"get",
"(",
"'--extra-settings'",
")",
"if",
"not",
"extra_settings_file",
":",
"extra_settings_file",
"=",
"HELPER_FILE",
"if",
"extra_settings_file",
"[",
"-",
"3",
":",
"]",
"!=",
"'.py'",
":",
"filename",
",",
"__",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"extra_settings_file",
")",
"extra_settings_file",
"=",
"'{0}.py'",
".",
"format",
"(",
"filename",
")",
"extra_settings",
"=",
"load_from_file",
"(",
"extra_settings_file",
")",
".",
"HELPER_SETTINGS",
"except",
"(",
"IOError",
",",
"AttributeError",
")",
":",
"extra_settings",
"=",
"None",
"default_name",
"=",
"':memory:'",
"if",
"args",
"[",
"'test'",
"]",
"else",
"'local.sqlite'",
"db_url",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'DATABASE_URL'",
",",
"'sqlite://localhost/%s'",
"%",
"default_name",
")",
"configs",
"=",
"{",
"'DATABASES'",
":",
"{",
"'default'",
":",
"dj_database_url",
".",
"parse",
"(",
"db_url",
")",
"}",
",",
"'STATIC_ROOT'",
":",
"STATIC_ROOT",
",",
"'MEDIA_ROOT'",
":",
"MEDIA_ROOT",
",",
"'USE_TZ'",
":",
"True",
",",
"'USE_CMS'",
":",
"args",
"[",
"'--cms'",
"]",
",",
"'BASE_APPLICATION'",
":",
"application",
"}",
"if",
"configs",
"[",
"'USE_CMS'",
"]",
"or",
"getattr",
"(",
"extra_settings",
",",
"'USE_CMS'",
",",
"False",
")",
":",
"CMS_APPS",
"=",
"[",
"'cms'",
",",
"'menus'",
",",
"'sekizai'",
",",
"]",
"CMS_APP_STYLE",
"=",
"[",
"'djangocms_admin_style'",
"]",
"CMS_PROCESSORS",
"=",
"[",
"'cms.context_processors.cms_settings'",
",",
"'sekizai.context_processors.sekizai'",
",",
"]",
"CMS_MIDDLEWARE",
"=",
"[",
"'cms.middleware.language.LanguageCookieMiddleware'",
",",
"'cms.middleware.user.CurrentUserMiddleware'",
",",
"'cms.middleware.page.CurrentPageMiddleware'",
",",
"'cms.middleware.toolbar.ToolbarMiddleware'",
",",
"]",
"if",
"not",
"CMS_31",
"and",
"args",
"[",
"'server'",
"]",
":",
"CMS_MIDDLEWARE",
".",
"append",
"(",
"'cms.middleware.utils.ApphookReloadMiddleware'",
")",
"URLCONF",
"=",
"'djangocms_helper.urls'",
"else",
":",
"CMS_APPS",
"=",
"[",
"]",
"CMS_APP_STYLE",
"=",
"[",
"]",
"CMS_MIDDLEWARE",
"=",
"[",
"]",
"CMS_PROCESSORS",
"=",
"[",
"]",
"URLCONF",
"=",
"'djangocms_helper.urls'",
"default_settings",
"=",
"get_default_settings",
"(",
"CMS_APPS",
",",
"CMS_PROCESSORS",
",",
"CMS_MIDDLEWARE",
",",
"CMS_APP_STYLE",
",",
"URLCONF",
",",
"application",
")",
"migrate",
"=",
"args",
".",
"get",
"(",
"'--migrate'",
")",
"or",
"not",
"args",
".",
"get",
"(",
"'--no-migrate'",
")",
"default_settings",
".",
"update",
"(",
"configs",
")",
"if",
"extra_settings",
":",
"apps",
"=",
"extra_settings",
".",
"pop",
"(",
"'INSTALLED_APPS'",
",",
"[",
"]",
")",
"apps_top",
"=",
"extra_settings",
".",
"pop",
"(",
"'TOP_INSTALLED_APPS'",
",",
"[",
"]",
")",
"template_processors",
"=",
"extra_settings",
".",
"pop",
"(",
"'TEMPLATE_CONTEXT_PROCESSORS'",
",",
"[",
"]",
")",
"template_loaders",
"=",
"extra_settings",
".",
"pop",
"(",
"'TEMPLATE_LOADERS'",
",",
"[",
"]",
")",
"template_dirs",
"=",
"extra_settings",
".",
"pop",
"(",
"'TEMPLATE_DIRS'",
",",
"[",
"]",
")",
"middleware",
"=",
"extra_settings",
".",
"pop",
"(",
"'MIDDLEWARE_CLASSES'",
",",
"[",
"]",
")",
"middleware_top",
"=",
"extra_settings",
".",
"pop",
"(",
"'TOP_MIDDLEWARE_CLASSES'",
",",
"[",
"]",
")",
"default_settings",
".",
"update",
"(",
"extra_settings",
")",
"for",
"app",
"in",
"apps_top",
":",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
".",
"insert",
"(",
"0",
",",
"app",
")",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
".",
"extend",
"(",
"apps",
")",
"default_settings",
"[",
"'TEMPLATE_CONTEXT_PROCESSORS'",
"]",
".",
"extend",
"(",
"template_processors",
")",
"default_settings",
"[",
"'TEMPLATE_LOADERS'",
"]",
".",
"extend",
"(",
"template_loaders",
")",
"if",
"'TEMPLATE_DIRS'",
"not",
"in",
"default_settings",
":",
"default_settings",
"[",
"'TEMPLATE_DIRS'",
"]",
"=",
"[",
"]",
"default_settings",
"[",
"'TEMPLATE_DIRS'",
"]",
".",
"extend",
"(",
"template_dirs",
")",
"default_settings",
"[",
"'MIDDLEWARE_CLASSES'",
"]",
".",
"extend",
"(",
"middleware",
")",
"for",
"middleware",
"in",
"middleware_top",
":",
"default_settings",
"[",
"'MIDDLEWARE_CLASSES'",
"]",
".",
"insert",
"(",
"0",
",",
"middleware",
")",
"if",
"'cms'",
"in",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
":",
"if",
"'treebeard'",
"not",
"in",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
":",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
".",
"append",
"(",
"'treebeard'",
")",
"if",
"(",
"'filer'",
"in",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
"and",
"'mptt'",
"not",
"in",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
")",
":",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
".",
"append",
"(",
"'mptt'",
")",
"if",
"(",
"'filer'",
"in",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
"and",
"'easy_thumbnails'",
"not",
"in",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
")",
":",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
".",
"append",
"(",
"'easy_thumbnails'",
")",
"if",
"args",
"[",
"'--boilerplate'",
"]",
":",
"boilerplate_settings",
"=",
"get_boilerplates_settings",
"(",
")",
"# Do not override helper settings with defaults.",
"if",
"'ALDRYN_BOILERPLATE_NAME'",
"in",
"default_settings",
".",
"keys",
"(",
")",
":",
"del",
"boilerplate_settings",
"[",
"'ALDRYN_BOILERPLATE_NAME'",
"]",
"default_settings",
"=",
"extend_settings",
"(",
"default_settings",
",",
"boilerplate_settings",
",",
"'STATICFILES_FINDERS'",
",",
"'django.contrib.staticfiles.finders.AppDirectoriesFinder'",
")",
"del",
"boilerplate_settings",
"[",
"'STATICFILES_FINDERS'",
"]",
"default_settings",
"=",
"extend_settings",
"(",
"default_settings",
",",
"boilerplate_settings",
",",
"'TEMPLATE_LOADERS'",
",",
"'django.template.loaders.app_directories.Loader'",
")",
"del",
"boilerplate_settings",
"[",
"'TEMPLATE_LOADERS'",
"]",
"for",
"setting",
"in",
"(",
"'INSTALLED_APPS'",
",",
"'TEMPLATE_CONTEXT_PROCESSORS'",
")",
":",
"default_settings",
"[",
"setting",
"]",
".",
"extend",
"(",
"boilerplate_settings",
"[",
"setting",
"]",
")",
"del",
"boilerplate_settings",
"[",
"setting",
"]",
"default_settings",
".",
"update",
"(",
"boilerplate_settings",
")",
"default_settings",
"[",
"'TEMPLATES'",
"]",
"=",
"[",
"{",
"'NAME'",
":",
"'django'",
",",
"'BACKEND'",
":",
"'django.template.backends.django.DjangoTemplates'",
",",
"'OPTIONS'",
":",
"{",
"'context_processors'",
":",
"[",
"template_processor",
".",
"replace",
"(",
"'django.core'",
",",
"'django.template'",
")",
"for",
"template_processor",
"in",
"default_settings",
".",
"pop",
"(",
"'TEMPLATE_CONTEXT_PROCESSORS'",
")",
"]",
",",
"'loaders'",
":",
"default_settings",
".",
"pop",
"(",
"'TEMPLATE_LOADERS'",
")",
"}",
"}",
"]",
"if",
"'TEMPLATE_DIRS'",
"in",
"default_settings",
":",
"default_settings",
"[",
"'TEMPLATES'",
"]",
"[",
"0",
"]",
"[",
"'DIRS'",
"]",
"=",
"default_settings",
".",
"pop",
"(",
"'TEMPLATE_DIRS'",
")",
"# Support for custom user models",
"if",
"'AUTH_USER_MODEL'",
"in",
"os",
".",
"environ",
":",
"custom_user_app",
"=",
"os",
".",
"environ",
"[",
"'AUTH_USER_MODEL'",
"]",
".",
"rpartition",
"(",
"'.'",
")",
"[",
"0",
"]",
"custom_user_model",
"=",
"'.'",
".",
"join",
"(",
"os",
".",
"environ",
"[",
"'AUTH_USER_MODEL'",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"2",
":",
"]",
")",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
".",
"insert",
"(",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
".",
"index",
"(",
"'cms'",
")",
",",
"custom_user_app",
")",
"default_settings",
"[",
"'AUTH_USER_MODEL'",
"]",
"=",
"custom_user_model",
"if",
"args",
"[",
"'test'",
"]",
":",
"default_settings",
"[",
"'SESSION_ENGINE'",
"]",
"=",
"'django.contrib.sessions.backends.cache'",
"if",
"application",
"not",
"in",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
":",
"default_settings",
"[",
"'INSTALLED_APPS'",
"]",
".",
"append",
"(",
"application",
")",
"if",
"not",
"migrate",
":",
"default_settings",
"[",
"'MIGRATION_MODULES'",
"]",
"=",
"DisableMigrations",
"(",
")",
"if",
"'MIDDLEWARE'",
"not",
"in",
"default_settings",
":",
"default_settings",
"[",
"'MIDDLEWARE'",
"]",
"=",
"default_settings",
"[",
"'MIDDLEWARE_CLASSES'",
"]",
"del",
"default_settings",
"[",
"'MIDDLEWARE_CLASSES'",
"]",
"_reset_django",
"(",
"settings",
")",
"settings",
".",
"configure",
"(",
"*",
"*",
"default_settings",
")",
"django",
".",
"setup",
"(",
")",
"reload_urls",
"(",
"settings",
",",
"cms_apps",
"=",
"False",
")",
"return",
"settings"
] | Setup the Django settings
:param args: docopt arguments
:param default_settings: default Django settings
:param settings: Django settings module
:param STATIC_ROOT: static root directory
:param MEDIA_ROOT: media root directory
:return: | [
"Setup",
"the",
"Django",
"settings",
":",
"param",
"args",
":",
"docopt",
"arguments",
":",
"param",
"default_settings",
":",
"default",
"Django",
"settings",
":",
"param",
"settings",
":",
"Django",
"settings",
"module",
":",
"param",
"STATIC_ROOT",
":",
"static",
"root",
"directory",
":",
"param",
"MEDIA_ROOT",
":",
"media",
"root",
"directory",
":",
"return",
":"
] | train | https://github.com/nephila/djangocms-helper/blob/3fe53aee7b06922112c5e4445b74afeb86f6d836/djangocms_helper/utils.py#L150-L314 |
nephila/djangocms-helper | djangocms_helper/utils.py | ensure_unicoded_and_unique | def ensure_unicoded_and_unique(args_list, application):
"""
Iterate over args_list, make it unicode if needed and ensure that there
are no duplicates.
Returns list of unicoded arguments in the same order.
"""
unicoded_args = []
for argument in args_list:
argument = (six.u(argument)
if not isinstance(argument, six.text_type) else argument)
if argument not in unicoded_args or argument == application:
unicoded_args.append(argument)
return unicoded_args | python | def ensure_unicoded_and_unique(args_list, application):
"""
Iterate over args_list, make it unicode if needed and ensure that there
are no duplicates.
Returns list of unicoded arguments in the same order.
"""
unicoded_args = []
for argument in args_list:
argument = (six.u(argument)
if not isinstance(argument, six.text_type) else argument)
if argument not in unicoded_args or argument == application:
unicoded_args.append(argument)
return unicoded_args | [
"def",
"ensure_unicoded_and_unique",
"(",
"args_list",
",",
"application",
")",
":",
"unicoded_args",
"=",
"[",
"]",
"for",
"argument",
"in",
"args_list",
":",
"argument",
"=",
"(",
"six",
".",
"u",
"(",
"argument",
")",
"if",
"not",
"isinstance",
"(",
"argument",
",",
"six",
".",
"text_type",
")",
"else",
"argument",
")",
"if",
"argument",
"not",
"in",
"unicoded_args",
"or",
"argument",
"==",
"application",
":",
"unicoded_args",
".",
"append",
"(",
"argument",
")",
"return",
"unicoded_args"
] | Iterate over args_list, make it unicode if needed and ensure that there
are no duplicates.
Returns list of unicoded arguments in the same order. | [
"Iterate",
"over",
"args_list",
"make",
"it",
"unicode",
"if",
"needed",
"and",
"ensure",
"that",
"there",
"are",
"no",
"duplicates",
".",
"Returns",
"list",
"of",
"unicoded",
"arguments",
"in",
"the",
"same",
"order",
"."
] | train | https://github.com/nephila/djangocms-helper/blob/3fe53aee7b06922112c5e4445b74afeb86f6d836/djangocms_helper/utils.py#L407-L419 |
MozillaSecurity/fuzzfetch | src/fuzzfetch/path.py | rmtree | def rmtree(path):
"""shutil.rmtree() but also handle junction points and access errors on Windows.
"""
if islink(path):
unlink(path)
elif os.path.isdir(path):
for sub in os.listdir(path):
sub = os.path.join(path, sub)
if os.path.isfile(sub):
if not os.access(sub, os.W_OK):
# Is the error an access error?
os.chmod(sub, stat.S_IWUSR)
os.unlink(sub)
else:
rmtree(sub)
os.rmdir(path)
else:
raise RuntimeError("rmtree called on non-link/folder") | python | def rmtree(path):
"""shutil.rmtree() but also handle junction points and access errors on Windows.
"""
if islink(path):
unlink(path)
elif os.path.isdir(path):
for sub in os.listdir(path):
sub = os.path.join(path, sub)
if os.path.isfile(sub):
if not os.access(sub, os.W_OK):
# Is the error an access error?
os.chmod(sub, stat.S_IWUSR)
os.unlink(sub)
else:
rmtree(sub)
os.rmdir(path)
else:
raise RuntimeError("rmtree called on non-link/folder") | [
"def",
"rmtree",
"(",
"path",
")",
":",
"if",
"islink",
"(",
"path",
")",
":",
"unlink",
"(",
"path",
")",
"elif",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"for",
"sub",
"in",
"os",
".",
"listdir",
"(",
"path",
")",
":",
"sub",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"sub",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"sub",
")",
":",
"if",
"not",
"os",
".",
"access",
"(",
"sub",
",",
"os",
".",
"W_OK",
")",
":",
"# Is the error an access error?",
"os",
".",
"chmod",
"(",
"sub",
",",
"stat",
".",
"S_IWUSR",
")",
"os",
".",
"unlink",
"(",
"sub",
")",
"else",
":",
"rmtree",
"(",
"sub",
")",
"os",
".",
"rmdir",
"(",
"path",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"rmtree called on non-link/folder\"",
")"
] | shutil.rmtree() but also handle junction points and access errors on Windows. | [
"shutil",
".",
"rmtree",
"()",
"but",
"also",
"handle",
"junction",
"points",
"and",
"access",
"errors",
"on",
"Windows",
"."
] | train | https://github.com/MozillaSecurity/fuzzfetch/blob/166cbfc71b679db019b9ac777dce12ccfdfc2c10/src/fuzzfetch/path.py#L21-L38 |
MozillaSecurity/fuzzfetch | src/fuzzfetch/path.py | islink | def islink(path):
"""os.path.islink() but return True for junction points on Windows.
"""
if platform.system() == "Windows":
if sys.version_info[:2] < (3, 5):
try:
# pylint: disable=undefined-variable
attrs = ctypes.windll.kernel32.GetFileAttributesW(unicode(path)) # noqa
assert attrs != -1
return bool(attrs & FILE_ATTRIBUTE_REPARSE_POINT)
except (AttributeError, AssertionError):
return False
try:
st = os.lstat(path) # pylint: disable=invalid-name
except (OSError, AttributeError):
return False
# pylint: disable=no-member
return stat.S_ISLNK(st.st_mode) or st.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT
return os.path.islink(path) | python | def islink(path):
"""os.path.islink() but return True for junction points on Windows.
"""
if platform.system() == "Windows":
if sys.version_info[:2] < (3, 5):
try:
# pylint: disable=undefined-variable
attrs = ctypes.windll.kernel32.GetFileAttributesW(unicode(path)) # noqa
assert attrs != -1
return bool(attrs & FILE_ATTRIBUTE_REPARSE_POINT)
except (AttributeError, AssertionError):
return False
try:
st = os.lstat(path) # pylint: disable=invalid-name
except (OSError, AttributeError):
return False
# pylint: disable=no-member
return stat.S_ISLNK(st.st_mode) or st.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT
return os.path.islink(path) | [
"def",
"islink",
"(",
"path",
")",
":",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"\"Windows\"",
":",
"if",
"sys",
".",
"version_info",
"[",
":",
"2",
"]",
"<",
"(",
"3",
",",
"5",
")",
":",
"try",
":",
"# pylint: disable=undefined-variable",
"attrs",
"=",
"ctypes",
".",
"windll",
".",
"kernel32",
".",
"GetFileAttributesW",
"(",
"unicode",
"(",
"path",
")",
")",
"# noqa",
"assert",
"attrs",
"!=",
"-",
"1",
"return",
"bool",
"(",
"attrs",
"&",
"FILE_ATTRIBUTE_REPARSE_POINT",
")",
"except",
"(",
"AttributeError",
",",
"AssertionError",
")",
":",
"return",
"False",
"try",
":",
"st",
"=",
"os",
".",
"lstat",
"(",
"path",
")",
"# pylint: disable=invalid-name",
"except",
"(",
"OSError",
",",
"AttributeError",
")",
":",
"return",
"False",
"# pylint: disable=no-member",
"return",
"stat",
".",
"S_ISLNK",
"(",
"st",
".",
"st_mode",
")",
"or",
"st",
".",
"st_file_attributes",
"&",
"stat",
".",
"FILE_ATTRIBUTE_REPARSE_POINT",
"return",
"os",
".",
"path",
".",
"islink",
"(",
"path",
")"
] | os.path.islink() but return True for junction points on Windows. | [
"os",
".",
"path",
".",
"islink",
"()",
"but",
"return",
"True",
"for",
"junction",
"points",
"on",
"Windows",
"."
] | train | https://github.com/MozillaSecurity/fuzzfetch/blob/166cbfc71b679db019b9ac777dce12ccfdfc2c10/src/fuzzfetch/path.py#L41-L59 |
MozillaSecurity/fuzzfetch | src/fuzzfetch/path.py | unlink | def unlink(link):
"""os.unlink() but handle junction points on Windows.
"""
if islink(link) and platform.system() == "Windows" and sys.version_info[:2] < (3, 5):
# deleting junction points was added to os.unlink in 3.5
# https://bugs.python.org/issue18314
subprocess.check_call(["rmdir", link], shell=True)
else:
os.unlink(link) | python | def unlink(link):
"""os.unlink() but handle junction points on Windows.
"""
if islink(link) and platform.system() == "Windows" and sys.version_info[:2] < (3, 5):
# deleting junction points was added to os.unlink in 3.5
# https://bugs.python.org/issue18314
subprocess.check_call(["rmdir", link], shell=True)
else:
os.unlink(link) | [
"def",
"unlink",
"(",
"link",
")",
":",
"if",
"islink",
"(",
"link",
")",
"and",
"platform",
".",
"system",
"(",
")",
"==",
"\"Windows\"",
"and",
"sys",
".",
"version_info",
"[",
":",
"2",
"]",
"<",
"(",
"3",
",",
"5",
")",
":",
"# deleting junction points was added to os.unlink in 3.5",
"# https://bugs.python.org/issue18314",
"subprocess",
".",
"check_call",
"(",
"[",
"\"rmdir\"",
",",
"link",
"]",
",",
"shell",
"=",
"True",
")",
"else",
":",
"os",
".",
"unlink",
"(",
"link",
")"
] | os.unlink() but handle junction points on Windows. | [
"os",
".",
"unlink",
"()",
"but",
"handle",
"junction",
"points",
"on",
"Windows",
"."
] | train | https://github.com/MozillaSecurity/fuzzfetch/blob/166cbfc71b679db019b9ac777dce12ccfdfc2c10/src/fuzzfetch/path.py#L62-L70 |
MozillaSecurity/fuzzfetch | src/fuzzfetch/path.py | symlink | def symlink(target, link):
"""os.symlink() but use a junction point on Windows.
"""
if islink(link):
unlink(link)
if platform.system() == "Windows":
if sys.version_info[:2] < (3, 5):
with open(os.devnull, "w") as nul:
subprocess.check_call(["mklink", "/J", link, target], shell=True, stdout=nul)
else:
_winapi.CreateJunction(target, link)
else:
os.symlink(target, link) | python | def symlink(target, link):
"""os.symlink() but use a junction point on Windows.
"""
if islink(link):
unlink(link)
if platform.system() == "Windows":
if sys.version_info[:2] < (3, 5):
with open(os.devnull, "w") as nul:
subprocess.check_call(["mklink", "/J", link, target], shell=True, stdout=nul)
else:
_winapi.CreateJunction(target, link)
else:
os.symlink(target, link) | [
"def",
"symlink",
"(",
"target",
",",
"link",
")",
":",
"if",
"islink",
"(",
"link",
")",
":",
"unlink",
"(",
"link",
")",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"\"Windows\"",
":",
"if",
"sys",
".",
"version_info",
"[",
":",
"2",
"]",
"<",
"(",
"3",
",",
"5",
")",
":",
"with",
"open",
"(",
"os",
".",
"devnull",
",",
"\"w\"",
")",
"as",
"nul",
":",
"subprocess",
".",
"check_call",
"(",
"[",
"\"mklink\"",
",",
"\"/J\"",
",",
"link",
",",
"target",
"]",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"nul",
")",
"else",
":",
"_winapi",
".",
"CreateJunction",
"(",
"target",
",",
"link",
")",
"else",
":",
"os",
".",
"symlink",
"(",
"target",
",",
"link",
")"
] | os.symlink() but use a junction point on Windows. | [
"os",
".",
"symlink",
"()",
"but",
"use",
"a",
"junction",
"point",
"on",
"Windows",
"."
] | train | https://github.com/MozillaSecurity/fuzzfetch/blob/166cbfc71b679db019b9ac777dce12ccfdfc2c10/src/fuzzfetch/path.py#L73-L85 |
SetBased/py-stratum | pystratum/Util.py | Util.write_two_phases | def write_two_phases(filename, data, io):
"""
Writes a file in two phase to the filesystem.
First write the data to a temporary file (in the same directory) and than renames the temporary file. If the
file already exists and its content is equal to the data that must be written no action is taken. This has the
following advantages:
* In case of some write error (e.g. disk full) the original file is kep in tact and no file with partially data
is written.
* Renaming a file is atomic. So, running processes will never read a partially written data.
:param str filename: The name of the file were the data must be stored.
:param str data: The data that must be written.
:param pystratum.style.PyStratumStyle.PyStratumStyle io: The output decorator.
"""
write_flag = True
if os.path.exists(filename):
with open(filename, 'r') as file:
old_data = file.read()
if data == old_data:
write_flag = False
if write_flag:
tmp_filename = filename + '.tmp'
with open(tmp_filename, 'w+') as file:
file.write(data)
os.replace(tmp_filename, filename)
io.text('Wrote: <fso>{0}</fso>'.format(filename))
else:
io.text('File <fso>{0}</fso> is up to date'.format(filename)) | python | def write_two_phases(filename, data, io):
"""
Writes a file in two phase to the filesystem.
First write the data to a temporary file (in the same directory) and than renames the temporary file. If the
file already exists and its content is equal to the data that must be written no action is taken. This has the
following advantages:
* In case of some write error (e.g. disk full) the original file is kep in tact and no file with partially data
is written.
* Renaming a file is atomic. So, running processes will never read a partially written data.
:param str filename: The name of the file were the data must be stored.
:param str data: The data that must be written.
:param pystratum.style.PyStratumStyle.PyStratumStyle io: The output decorator.
"""
write_flag = True
if os.path.exists(filename):
with open(filename, 'r') as file:
old_data = file.read()
if data == old_data:
write_flag = False
if write_flag:
tmp_filename = filename + '.tmp'
with open(tmp_filename, 'w+') as file:
file.write(data)
os.replace(tmp_filename, filename)
io.text('Wrote: <fso>{0}</fso>'.format(filename))
else:
io.text('File <fso>{0}</fso> is up to date'.format(filename)) | [
"def",
"write_two_phases",
"(",
"filename",
",",
"data",
",",
"io",
")",
":",
"write_flag",
"=",
"True",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"file",
":",
"old_data",
"=",
"file",
".",
"read",
"(",
")",
"if",
"data",
"==",
"old_data",
":",
"write_flag",
"=",
"False",
"if",
"write_flag",
":",
"tmp_filename",
"=",
"filename",
"+",
"'.tmp'",
"with",
"open",
"(",
"tmp_filename",
",",
"'w+'",
")",
"as",
"file",
":",
"file",
".",
"write",
"(",
"data",
")",
"os",
".",
"replace",
"(",
"tmp_filename",
",",
"filename",
")",
"io",
".",
"text",
"(",
"'Wrote: <fso>{0}</fso>'",
".",
"format",
"(",
"filename",
")",
")",
"else",
":",
"io",
".",
"text",
"(",
"'File <fso>{0}</fso> is up to date'",
".",
"format",
"(",
"filename",
")",
")"
] | Writes a file in two phase to the filesystem.
First write the data to a temporary file (in the same directory) and than renames the temporary file. If the
file already exists and its content is equal to the data that must be written no action is taken. This has the
following advantages:
* In case of some write error (e.g. disk full) the original file is kep in tact and no file with partially data
is written.
* Renaming a file is atomic. So, running processes will never read a partially written data.
:param str filename: The name of the file were the data must be stored.
:param str data: The data that must be written.
:param pystratum.style.PyStratumStyle.PyStratumStyle io: The output decorator. | [
"Writes",
"a",
"file",
"in",
"two",
"phase",
"to",
"the",
"filesystem",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/Util.py#L14-L43 |
PlaidWeb/Pushl | pushl/entries.py | get_entry | async def get_entry(config, url):
""" Given an entry URL, return the entry
Arguments:
config -- the configuration
url -- the URL of the entry
Returns: 3-tuple of (current, previous, updated) """
previous = config.cache.get(
'entry', url,
schema_version=SCHEMA_VERSION) if config.cache else None
headers = previous.caching if previous else None
request = await utils.retry_get(config, url, headers=headers)
if not request or not request.success:
LOGGER.error("Could not get entry %s: %d", url,
request.status if request else -1)
return None, previous, False
# cache hit
if request.cached:
return previous, previous, False
current = Entry(request)
# Content updated
if config.cache:
config.cache.set('entry', url, current)
return current, previous, (not previous
or previous.digest != current.digest
or previous.status != current.status) | python | async def get_entry(config, url):
""" Given an entry URL, return the entry
Arguments:
config -- the configuration
url -- the URL of the entry
Returns: 3-tuple of (current, previous, updated) """
previous = config.cache.get(
'entry', url,
schema_version=SCHEMA_VERSION) if config.cache else None
headers = previous.caching if previous else None
request = await utils.retry_get(config, url, headers=headers)
if not request or not request.success:
LOGGER.error("Could not get entry %s: %d", url,
request.status if request else -1)
return None, previous, False
# cache hit
if request.cached:
return previous, previous, False
current = Entry(request)
# Content updated
if config.cache:
config.cache.set('entry', url, current)
return current, previous, (not previous
or previous.digest != current.digest
or previous.status != current.status) | [
"async",
"def",
"get_entry",
"(",
"config",
",",
"url",
")",
":",
"previous",
"=",
"config",
".",
"cache",
".",
"get",
"(",
"'entry'",
",",
"url",
",",
"schema_version",
"=",
"SCHEMA_VERSION",
")",
"if",
"config",
".",
"cache",
"else",
"None",
"headers",
"=",
"previous",
".",
"caching",
"if",
"previous",
"else",
"None",
"request",
"=",
"await",
"utils",
".",
"retry_get",
"(",
"config",
",",
"url",
",",
"headers",
"=",
"headers",
")",
"if",
"not",
"request",
"or",
"not",
"request",
".",
"success",
":",
"LOGGER",
".",
"error",
"(",
"\"Could not get entry %s: %d\"",
",",
"url",
",",
"request",
".",
"status",
"if",
"request",
"else",
"-",
"1",
")",
"return",
"None",
",",
"previous",
",",
"False",
"# cache hit",
"if",
"request",
".",
"cached",
":",
"return",
"previous",
",",
"previous",
",",
"False",
"current",
"=",
"Entry",
"(",
"request",
")",
"# Content updated",
"if",
"config",
".",
"cache",
":",
"config",
".",
"cache",
".",
"set",
"(",
"'entry'",
",",
"url",
",",
"current",
")",
"return",
"current",
",",
"previous",
",",
"(",
"not",
"previous",
"or",
"previous",
".",
"digest",
"!=",
"current",
".",
"digest",
"or",
"previous",
".",
"status",
"!=",
"current",
".",
"status",
")"
] | Given an entry URL, return the entry
Arguments:
config -- the configuration
url -- the URL of the entry
Returns: 3-tuple of (current, previous, updated) | [
"Given",
"an",
"entry",
"URL",
"return",
"the",
"entry"
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/entries.py#L108-L142 |
PlaidWeb/Pushl | pushl/entries.py | Entry._check_rel | def _check_rel(attrs, rel_whitelist, rel_blacklist):
""" Check a link's relations against the whitelist or blacklist.
First, this will reject based on blacklist.
Next, if there is a whitelist, there must be at least one rel that matches.
To explicitly allow links without a rel you can add None to the whitelist
(e.g. ['in-reply-to',None])
"""
rels = attrs.get('rel', [None])
if rel_blacklist:
# Never return True for a link whose rel appears in the blacklist
for rel in rels:
if rel in rel_blacklist:
return False
if rel_whitelist:
# If there is a whitelist for rels, only return true for a rel that
# appears in it
for rel in rels:
if rel in rel_whitelist:
return True
# If there is a whitelist and we don't match, then reject
return False
return True | python | def _check_rel(attrs, rel_whitelist, rel_blacklist):
""" Check a link's relations against the whitelist or blacklist.
First, this will reject based on blacklist.
Next, if there is a whitelist, there must be at least one rel that matches.
To explicitly allow links without a rel you can add None to the whitelist
(e.g. ['in-reply-to',None])
"""
rels = attrs.get('rel', [None])
if rel_blacklist:
# Never return True for a link whose rel appears in the blacklist
for rel in rels:
if rel in rel_blacklist:
return False
if rel_whitelist:
# If there is a whitelist for rels, only return true for a rel that
# appears in it
for rel in rels:
if rel in rel_whitelist:
return True
# If there is a whitelist and we don't match, then reject
return False
return True | [
"def",
"_check_rel",
"(",
"attrs",
",",
"rel_whitelist",
",",
"rel_blacklist",
")",
":",
"rels",
"=",
"attrs",
".",
"get",
"(",
"'rel'",
",",
"[",
"None",
"]",
")",
"if",
"rel_blacklist",
":",
"# Never return True for a link whose rel appears in the blacklist",
"for",
"rel",
"in",
"rels",
":",
"if",
"rel",
"in",
"rel_blacklist",
":",
"return",
"False",
"if",
"rel_whitelist",
":",
"# If there is a whitelist for rels, only return true for a rel that",
"# appears in it",
"for",
"rel",
"in",
"rels",
":",
"if",
"rel",
"in",
"rel_whitelist",
":",
"return",
"True",
"# If there is a whitelist and we don't match, then reject",
"return",
"False",
"return",
"True"
] | Check a link's relations against the whitelist or blacklist.
First, this will reject based on blacklist.
Next, if there is a whitelist, there must be at least one rel that matches.
To explicitly allow links without a rel you can add None to the whitelist
(e.g. ['in-reply-to',None]) | [
"Check",
"a",
"link",
"s",
"relations",
"against",
"the",
"whitelist",
"or",
"blacklist",
"."
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/entries.py#L61-L88 |
PlaidWeb/Pushl | pushl/entries.py | Entry._domain_differs | def _domain_differs(self, href):
""" Check that a link is not on the same domain as the source URL """
target = utils.get_domain(href)
if not target:
return False
origin = utils.get_domain(self.url)
return target != origin | python | def _domain_differs(self, href):
""" Check that a link is not on the same domain as the source URL """
target = utils.get_domain(href)
if not target:
return False
origin = utils.get_domain(self.url)
return target != origin | [
"def",
"_domain_differs",
"(",
"self",
",",
"href",
")",
":",
"target",
"=",
"utils",
".",
"get_domain",
"(",
"href",
")",
"if",
"not",
"target",
":",
"return",
"False",
"origin",
"=",
"utils",
".",
"get_domain",
"(",
"self",
".",
"url",
")",
"return",
"target",
"!=",
"origin"
] | Check that a link is not on the same domain as the source URL | [
"Check",
"that",
"a",
"link",
"is",
"not",
"on",
"the",
"same",
"domain",
"as",
"the",
"source",
"URL"
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/entries.py#L90-L97 |
PlaidWeb/Pushl | pushl/entries.py | Entry.get_targets | def get_targets(self, config):
""" Given an Entry object, return all of the outgoing links. """
return {urllib.parse.urljoin(self.url, attrs['href'])
for attrs in self._targets
if self._check_rel(attrs, config.rel_whitelist, config.rel_blacklist)
and self._domain_differs(attrs['href'])} | python | def get_targets(self, config):
""" Given an Entry object, return all of the outgoing links. """
return {urllib.parse.urljoin(self.url, attrs['href'])
for attrs in self._targets
if self._check_rel(attrs, config.rel_whitelist, config.rel_blacklist)
and self._domain_differs(attrs['href'])} | [
"def",
"get_targets",
"(",
"self",
",",
"config",
")",
":",
"return",
"{",
"urllib",
".",
"parse",
".",
"urljoin",
"(",
"self",
".",
"url",
",",
"attrs",
"[",
"'href'",
"]",
")",
"for",
"attrs",
"in",
"self",
".",
"_targets",
"if",
"self",
".",
"_check_rel",
"(",
"attrs",
",",
"config",
".",
"rel_whitelist",
",",
"config",
".",
"rel_blacklist",
")",
"and",
"self",
".",
"_domain_differs",
"(",
"attrs",
"[",
"'href'",
"]",
")",
"}"
] | Given an Entry object, return all of the outgoing links. | [
"Given",
"an",
"Entry",
"object",
"return",
"all",
"of",
"the",
"outgoing",
"links",
"."
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/entries.py#L99-L105 |
SetBased/py-stratum | pystratum/command/LoaderCommand.py | LoaderCommand.handle | def handle(self):
"""
Executes loader command.
"""
self.output = PyStratumStyle(self.input, self.output)
config_file = self.argument('config_file')
sources = self.argument('file_names')
status = self.run_command(config_file, sources)
return status | python | def handle(self):
"""
Executes loader command.
"""
self.output = PyStratumStyle(self.input, self.output)
config_file = self.argument('config_file')
sources = self.argument('file_names')
status = self.run_command(config_file, sources)
return status | [
"def",
"handle",
"(",
"self",
")",
":",
"self",
".",
"output",
"=",
"PyStratumStyle",
"(",
"self",
".",
"input",
",",
"self",
".",
"output",
")",
"config_file",
"=",
"self",
".",
"argument",
"(",
"'config_file'",
")",
"sources",
"=",
"self",
".",
"argument",
"(",
"'file_names'",
")",
"status",
"=",
"self",
".",
"run_command",
"(",
"config_file",
",",
"sources",
")",
"return",
"status"
] | Executes loader command. | [
"Executes",
"loader",
"command",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/command/LoaderCommand.py#L29-L40 |
SetBased/py-stratum | pystratum/command/LoaderCommand.py | LoaderCommand.run_command | def run_command(self, config_file, sources):
"""
:param str config_file: The name of config file.
:param list sources: The list with source files.
"""
config = configparser.ConfigParser()
config.read(config_file)
rdbms = config.get('database', 'rdbms').lower()
loader = self.create_routine_loader(rdbms)
status = loader.main(config_file, sources)
return status | python | def run_command(self, config_file, sources):
"""
:param str config_file: The name of config file.
:param list sources: The list with source files.
"""
config = configparser.ConfigParser()
config.read(config_file)
rdbms = config.get('database', 'rdbms').lower()
loader = self.create_routine_loader(rdbms)
status = loader.main(config_file, sources)
return status | [
"def",
"run_command",
"(",
"self",
",",
"config_file",
",",
"sources",
")",
":",
"config",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"config",
".",
"read",
"(",
"config_file",
")",
"rdbms",
"=",
"config",
".",
"get",
"(",
"'database'",
",",
"'rdbms'",
")",
".",
"lower",
"(",
")",
"loader",
"=",
"self",
".",
"create_routine_loader",
"(",
"rdbms",
")",
"status",
"=",
"loader",
".",
"main",
"(",
"config_file",
",",
"sources",
")",
"return",
"status"
] | :param str config_file: The name of config file.
:param list sources: The list with source files. | [
":",
"param",
"str",
"config_file",
":",
"The",
"name",
"of",
"config",
"file",
".",
":",
"param",
"list",
"sources",
":",
"The",
"list",
"with",
"source",
"files",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/command/LoaderCommand.py#L43-L56 |
SetBased/py-stratum | pystratum/command/LoaderCommand.py | LoaderCommand.create_routine_loader | def create_routine_loader(self, rdbms):
"""
Factory for creating a Routine Loader objects (i.e. objects for loading stored routines into a RDBMS instance
from (pseudo) SQL files.
:param str rdbms: The target RDBMS (i.e. mysql, mssql or pgsql).
:rtype: pystratum.RoutineLoader.RoutineLoader
"""
# Note: We load modules and classes dynamically such that on the end user's system only the required modules
# and other dependencies for the targeted RDBMS must be installed (and required modules and other
# dependencies for the other RDBMSs are not required).
if rdbms == 'mysql':
module = locate('pystratum_mysql.MySqlRoutineLoader')
return module.MySqlRoutineLoader(self.output)
if rdbms == 'mssql':
module = locate('pystratum_mssql.MsSqlRoutineLoader')
return module.MsSqlRoutineLoader(self.output)
if rdbms == 'pgsql':
module = locate('pystratum_pgsql.PgSqlRoutineLoader')
return module.PgSqlRoutineLoader(self.output)
raise Exception("Unknown RDBMS '{0!s}'.".format(rdbms)) | python | def create_routine_loader(self, rdbms):
"""
Factory for creating a Routine Loader objects (i.e. objects for loading stored routines into a RDBMS instance
from (pseudo) SQL files.
:param str rdbms: The target RDBMS (i.e. mysql, mssql or pgsql).
:rtype: pystratum.RoutineLoader.RoutineLoader
"""
# Note: We load modules and classes dynamically such that on the end user's system only the required modules
# and other dependencies for the targeted RDBMS must be installed (and required modules and other
# dependencies for the other RDBMSs are not required).
if rdbms == 'mysql':
module = locate('pystratum_mysql.MySqlRoutineLoader')
return module.MySqlRoutineLoader(self.output)
if rdbms == 'mssql':
module = locate('pystratum_mssql.MsSqlRoutineLoader')
return module.MsSqlRoutineLoader(self.output)
if rdbms == 'pgsql':
module = locate('pystratum_pgsql.PgSqlRoutineLoader')
return module.PgSqlRoutineLoader(self.output)
raise Exception("Unknown RDBMS '{0!s}'.".format(rdbms)) | [
"def",
"create_routine_loader",
"(",
"self",
",",
"rdbms",
")",
":",
"# Note: We load modules and classes dynamically such that on the end user's system only the required modules",
"# and other dependencies for the targeted RDBMS must be installed (and required modules and other",
"# dependencies for the other RDBMSs are not required).",
"if",
"rdbms",
"==",
"'mysql'",
":",
"module",
"=",
"locate",
"(",
"'pystratum_mysql.MySqlRoutineLoader'",
")",
"return",
"module",
".",
"MySqlRoutineLoader",
"(",
"self",
".",
"output",
")",
"if",
"rdbms",
"==",
"'mssql'",
":",
"module",
"=",
"locate",
"(",
"'pystratum_mssql.MsSqlRoutineLoader'",
")",
"return",
"module",
".",
"MsSqlRoutineLoader",
"(",
"self",
".",
"output",
")",
"if",
"rdbms",
"==",
"'pgsql'",
":",
"module",
"=",
"locate",
"(",
"'pystratum_pgsql.PgSqlRoutineLoader'",
")",
"return",
"module",
".",
"PgSqlRoutineLoader",
"(",
"self",
".",
"output",
")",
"raise",
"Exception",
"(",
"\"Unknown RDBMS '{0!s}'.\"",
".",
"format",
"(",
"rdbms",
")",
")"
] | Factory for creating a Routine Loader objects (i.e. objects for loading stored routines into a RDBMS instance
from (pseudo) SQL files.
:param str rdbms: The target RDBMS (i.e. mysql, mssql or pgsql).
:rtype: pystratum.RoutineLoader.RoutineLoader | [
"Factory",
"for",
"creating",
"a",
"Routine",
"Loader",
"objects",
"(",
"i",
".",
"e",
".",
"objects",
"for",
"loading",
"stored",
"routines",
"into",
"a",
"RDBMS",
"instance",
"from",
"(",
"pseudo",
")",
"SQL",
"files",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/command/LoaderCommand.py#L59-L84 |
DreamLab/VmShepherd | src/vmshepherd/iaas/dummy_driver.py | DummyIaasDriver.create_vm | async def create_vm(self, *, preset_name, image, flavor, security_groups=None,
userdata=None, key_name=None, availability_zone=None,
subnet=None):
"""
Dummy create_vm func.
"""
info = {
'id': next(self._id_it),
'name': preset_name,
'ip': ['127.0.0.1'],
'created': 0,
'state': VmState.RUNNING,
'flavor': flavor,
'image': image,
'metadata': {'test-meta': 'abctest'},
'timed_shutdown_at': 1522753481,
'tags': ['a-tag', 'b-tag', 'c-tag']
}
logging.debug('Prepare vm: %s', info)
vm = Vm(self, **info)
self._vms[vm.id] = vm
logging.debug('Create: %s', vm)
return None | python | async def create_vm(self, *, preset_name, image, flavor, security_groups=None,
userdata=None, key_name=None, availability_zone=None,
subnet=None):
"""
Dummy create_vm func.
"""
info = {
'id': next(self._id_it),
'name': preset_name,
'ip': ['127.0.0.1'],
'created': 0,
'state': VmState.RUNNING,
'flavor': flavor,
'image': image,
'metadata': {'test-meta': 'abctest'},
'timed_shutdown_at': 1522753481,
'tags': ['a-tag', 'b-tag', 'c-tag']
}
logging.debug('Prepare vm: %s', info)
vm = Vm(self, **info)
self._vms[vm.id] = vm
logging.debug('Create: %s', vm)
return None | [
"async",
"def",
"create_vm",
"(",
"self",
",",
"*",
",",
"preset_name",
",",
"image",
",",
"flavor",
",",
"security_groups",
"=",
"None",
",",
"userdata",
"=",
"None",
",",
"key_name",
"=",
"None",
",",
"availability_zone",
"=",
"None",
",",
"subnet",
"=",
"None",
")",
":",
"info",
"=",
"{",
"'id'",
":",
"next",
"(",
"self",
".",
"_id_it",
")",
",",
"'name'",
":",
"preset_name",
",",
"'ip'",
":",
"[",
"'127.0.0.1'",
"]",
",",
"'created'",
":",
"0",
",",
"'state'",
":",
"VmState",
".",
"RUNNING",
",",
"'flavor'",
":",
"flavor",
",",
"'image'",
":",
"image",
",",
"'metadata'",
":",
"{",
"'test-meta'",
":",
"'abctest'",
"}",
",",
"'timed_shutdown_at'",
":",
"1522753481",
",",
"'tags'",
":",
"[",
"'a-tag'",
",",
"'b-tag'",
",",
"'c-tag'",
"]",
"}",
"logging",
".",
"debug",
"(",
"'Prepare vm: %s'",
",",
"info",
")",
"vm",
"=",
"Vm",
"(",
"self",
",",
"*",
"*",
"info",
")",
"self",
".",
"_vms",
"[",
"vm",
".",
"id",
"]",
"=",
"vm",
"logging",
".",
"debug",
"(",
"'Create: %s'",
",",
"vm",
")",
"return",
"None"
] | Dummy create_vm func. | [
"Dummy",
"create_vm",
"func",
"."
] | train | https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/dummy_driver.py#L34-L56 |
DreamLab/VmShepherd | src/vmshepherd/iaas/dummy_driver.py | DummyIaasDriver.list_vms | async def list_vms(self, preset_name):
"""Dummy list_vms func"""
return list(vm for vm in self._vms.values() if vm.name == preset_name) | python | async def list_vms(self, preset_name):
"""Dummy list_vms func"""
return list(vm for vm in self._vms.values() if vm.name == preset_name) | [
"async",
"def",
"list_vms",
"(",
"self",
",",
"preset_name",
")",
":",
"return",
"list",
"(",
"vm",
"for",
"vm",
"in",
"self",
".",
"_vms",
".",
"values",
"(",
")",
"if",
"vm",
".",
"name",
"==",
"preset_name",
")"
] | Dummy list_vms func | [
"Dummy",
"list_vms",
"func"
] | train | https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/dummy_driver.py#L59-L61 |
DreamLab/VmShepherd | src/vmshepherd/iaas/dummy_driver.py | DummyIaasDriver.terminate_vm | async def terminate_vm(self, vm_id):
""" Dummy terminate_vm func """
if vm_id not in self._vms:
raise DummyIaasVmNotFound()
del self._vms[vm_id]
return None | python | async def terminate_vm(self, vm_id):
""" Dummy terminate_vm func """
if vm_id not in self._vms:
raise DummyIaasVmNotFound()
del self._vms[vm_id]
return None | [
"async",
"def",
"terminate_vm",
"(",
"self",
",",
"vm_id",
")",
":",
"if",
"vm_id",
"not",
"in",
"self",
".",
"_vms",
":",
"raise",
"DummyIaasVmNotFound",
"(",
")",
"del",
"self",
".",
"_vms",
"[",
"vm_id",
"]",
"return",
"None"
] | Dummy terminate_vm func | [
"Dummy",
"terminate_vm",
"func"
] | train | https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/dummy_driver.py#L64-L69 |
DreamLab/VmShepherd | src/vmshepherd/iaas/dummy_driver.py | DummyIaasDriver.get_vm | async def get_vm(self, vm_id):
""" Dummy get_vm func """
if vm_id not in self._vms:
raise DummyIaasVmNotFound()
return self._vms[vm_id] | python | async def get_vm(self, vm_id):
""" Dummy get_vm func """
if vm_id not in self._vms:
raise DummyIaasVmNotFound()
return self._vms[vm_id] | [
"async",
"def",
"get_vm",
"(",
"self",
",",
"vm_id",
")",
":",
"if",
"vm_id",
"not",
"in",
"self",
".",
"_vms",
":",
"raise",
"DummyIaasVmNotFound",
"(",
")",
"return",
"self",
".",
"_vms",
"[",
"vm_id",
"]"
] | Dummy get_vm func | [
"Dummy",
"get_vm",
"func"
] | train | https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/dummy_driver.py#L72-L76 |
mjirik/io3d | io3d/misc.py | suggest_filename | def suggest_filename(file_path, exists=None):
"""
Try if exist path and append number to its end.
For debug you can set as input if file exists or not.
"""
import os.path
import re
if not isinstance(exists, bool):
exists = os.path.exists(file_path)
if exists:
file_path, file_extension = os.path.splitext(file_path)
# print(file_path)
m = re.search(r"_\d+$", file_path)
if m is None:
# cislo = 2
new_cislo_str = "_2"
else:
cislostr = (m.group())
cislo = int(cislostr[1:]) + 1
# it is normal number
file_path = file_path[:-len(cislostr)]
new_cislo_str = "_" + str(cislo)
file_path = file_path + new_cislo_str + file_extension # .zfill(2)
# trorcha rekurze
file_path = suggest_filename(file_path)
return file_path | python | def suggest_filename(file_path, exists=None):
"""
Try if exist path and append number to its end.
For debug you can set as input if file exists or not.
"""
import os.path
import re
if not isinstance(exists, bool):
exists = os.path.exists(file_path)
if exists:
file_path, file_extension = os.path.splitext(file_path)
# print(file_path)
m = re.search(r"_\d+$", file_path)
if m is None:
# cislo = 2
new_cislo_str = "_2"
else:
cislostr = (m.group())
cislo = int(cislostr[1:]) + 1
# it is normal number
file_path = file_path[:-len(cislostr)]
new_cislo_str = "_" + str(cislo)
file_path = file_path + new_cislo_str + file_extension # .zfill(2)
# trorcha rekurze
file_path = suggest_filename(file_path)
return file_path | [
"def",
"suggest_filename",
"(",
"file_path",
",",
"exists",
"=",
"None",
")",
":",
"import",
"os",
".",
"path",
"import",
"re",
"if",
"not",
"isinstance",
"(",
"exists",
",",
"bool",
")",
":",
"exists",
"=",
"os",
".",
"path",
".",
"exists",
"(",
"file_path",
")",
"if",
"exists",
":",
"file_path",
",",
"file_extension",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"file_path",
")",
"# print(file_path)",
"m",
"=",
"re",
".",
"search",
"(",
"r\"_\\d+$\"",
",",
"file_path",
")",
"if",
"m",
"is",
"None",
":",
"# cislo = 2",
"new_cislo_str",
"=",
"\"_2\"",
"else",
":",
"cislostr",
"=",
"(",
"m",
".",
"group",
"(",
")",
")",
"cislo",
"=",
"int",
"(",
"cislostr",
"[",
"1",
":",
"]",
")",
"+",
"1",
"# it is normal number",
"file_path",
"=",
"file_path",
"[",
":",
"-",
"len",
"(",
"cislostr",
")",
"]",
"new_cislo_str",
"=",
"\"_\"",
"+",
"str",
"(",
"cislo",
")",
"file_path",
"=",
"file_path",
"+",
"new_cislo_str",
"+",
"file_extension",
"# .zfill(2)",
"# trorcha rekurze",
"file_path",
"=",
"suggest_filename",
"(",
"file_path",
")",
"return",
"file_path"
] | Try if exist path and append number to its end.
For debug you can set as input if file exists or not. | [
"Try",
"if",
"exist",
"path",
"and",
"append",
"number",
"to",
"its",
"end",
".",
"For",
"debug",
"you",
"can",
"set",
"as",
"input",
"if",
"file",
"exists",
"or",
"not",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/misc.py#L30-L58 |
mjirik/io3d | io3d/misc.py | obj_from_file | def obj_from_file(filename='annotation.yaml', filetype='auto'):
''' Read object from file '''
if filetype == 'auto':
_, ext = os.path.splitext(filename)
filetype = ext[1:]
if filetype in ('yaml', 'yml'):
from ruamel.yaml import YAML
yaml = YAML(typ="unsafe")
with open(filename, encoding="utf-8") as f:
obj = yaml.load(f)
if obj is None:
obj = {}
# import yaml
# with open(filename, encoding="utf-8") as f:
# intext = f.read()
# obj = yaml.load(intext)
elif filetype in ('pickle', 'pkl', 'pklz', 'picklezip'):
fcontent = read_pkl_and_pklz(filename)
# import pickle
if sys.version_info[0] < 3:
import cPickle as pickle
else:
import _pickle as pickle
# import sPickle as pickle
if sys.version_info.major == 2:
obj = pickle.loads(fcontent)
else:
obj = pickle.loads(fcontent, encoding="latin1")
else:
logger.error('Unknown filetype ' + filetype)
return obj | python | def obj_from_file(filename='annotation.yaml', filetype='auto'):
''' Read object from file '''
if filetype == 'auto':
_, ext = os.path.splitext(filename)
filetype = ext[1:]
if filetype in ('yaml', 'yml'):
from ruamel.yaml import YAML
yaml = YAML(typ="unsafe")
with open(filename, encoding="utf-8") as f:
obj = yaml.load(f)
if obj is None:
obj = {}
# import yaml
# with open(filename, encoding="utf-8") as f:
# intext = f.read()
# obj = yaml.load(intext)
elif filetype in ('pickle', 'pkl', 'pklz', 'picklezip'):
fcontent = read_pkl_and_pklz(filename)
# import pickle
if sys.version_info[0] < 3:
import cPickle as pickle
else:
import _pickle as pickle
# import sPickle as pickle
if sys.version_info.major == 2:
obj = pickle.loads(fcontent)
else:
obj = pickle.loads(fcontent, encoding="latin1")
else:
logger.error('Unknown filetype ' + filetype)
return obj | [
"def",
"obj_from_file",
"(",
"filename",
"=",
"'annotation.yaml'",
",",
"filetype",
"=",
"'auto'",
")",
":",
"if",
"filetype",
"==",
"'auto'",
":",
"_",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"filetype",
"=",
"ext",
"[",
"1",
":",
"]",
"if",
"filetype",
"in",
"(",
"'yaml'",
",",
"'yml'",
")",
":",
"from",
"ruamel",
".",
"yaml",
"import",
"YAML",
"yaml",
"=",
"YAML",
"(",
"typ",
"=",
"\"unsafe\"",
")",
"with",
"open",
"(",
"filename",
",",
"encoding",
"=",
"\"utf-8\"",
")",
"as",
"f",
":",
"obj",
"=",
"yaml",
".",
"load",
"(",
"f",
")",
"if",
"obj",
"is",
"None",
":",
"obj",
"=",
"{",
"}",
"# import yaml",
"# with open(filename, encoding=\"utf-8\") as f:",
"# intext = f.read()",
"# obj = yaml.load(intext)",
"elif",
"filetype",
"in",
"(",
"'pickle'",
",",
"'pkl'",
",",
"'pklz'",
",",
"'picklezip'",
")",
":",
"fcontent",
"=",
"read_pkl_and_pklz",
"(",
"filename",
")",
"# import pickle",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"import",
"cPickle",
"as",
"pickle",
"else",
":",
"import",
"_pickle",
"as",
"pickle",
"# import sPickle as pickle",
"if",
"sys",
".",
"version_info",
".",
"major",
"==",
"2",
":",
"obj",
"=",
"pickle",
".",
"loads",
"(",
"fcontent",
")",
"else",
":",
"obj",
"=",
"pickle",
".",
"loads",
"(",
"fcontent",
",",
"encoding",
"=",
"\"latin1\"",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'Unknown filetype '",
"+",
"filetype",
")",
"return",
"obj"
] | Read object from file | [
"Read",
"object",
"from",
"file"
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/misc.py#L61-L93 |
mjirik/io3d | io3d/misc.py | read_pkl_and_pklz | def read_pkl_and_pklz(filename):
"""
Try read zipped or not zipped pickle file
"""
fcontent = None
try:
import gzip
f = gzip.open(filename, 'rb')
fcontent = f.read()
f.close()
except IOError as e:
# if the problem is in not gzip file
logger.info("Input gzip exception: " + str(e))
f = open(filename, 'rb')
fcontent = f.read()
f.close()
except Exception as e:
# other problem
import traceback
logger.error("Input gzip exception: " + str(e))
logger.error(traceback.format_exc())
return fcontent | python | def read_pkl_and_pklz(filename):
"""
Try read zipped or not zipped pickle file
"""
fcontent = None
try:
import gzip
f = gzip.open(filename, 'rb')
fcontent = f.read()
f.close()
except IOError as e:
# if the problem is in not gzip file
logger.info("Input gzip exception: " + str(e))
f = open(filename, 'rb')
fcontent = f.read()
f.close()
except Exception as e:
# other problem
import traceback
logger.error("Input gzip exception: " + str(e))
logger.error(traceback.format_exc())
return fcontent | [
"def",
"read_pkl_and_pklz",
"(",
"filename",
")",
":",
"fcontent",
"=",
"None",
"try",
":",
"import",
"gzip",
"f",
"=",
"gzip",
".",
"open",
"(",
"filename",
",",
"'rb'",
")",
"fcontent",
"=",
"f",
".",
"read",
"(",
")",
"f",
".",
"close",
"(",
")",
"except",
"IOError",
"as",
"e",
":",
"# if the problem is in not gzip file",
"logger",
".",
"info",
"(",
"\"Input gzip exception: \"",
"+",
"str",
"(",
"e",
")",
")",
"f",
"=",
"open",
"(",
"filename",
",",
"'rb'",
")",
"fcontent",
"=",
"f",
".",
"read",
"(",
")",
"f",
".",
"close",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"# other problem",
"import",
"traceback",
"logger",
".",
"error",
"(",
"\"Input gzip exception: \"",
"+",
"str",
"(",
"e",
")",
")",
"logger",
".",
"error",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"return",
"fcontent"
] | Try read zipped or not zipped pickle file | [
"Try",
"read",
"zipped",
"or",
"not",
"zipped",
"pickle",
"file"
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/misc.py#L96-L118 |
mjirik/io3d | io3d/misc.py | obj_to_file | def obj_to_file(obj, filename, filetype='auto', ndarray_to_list=False, squeeze=True):
'''Writes annotation in file.
:param filetype:
auto
yaml
pkl, pickle
pklz, picklezip
:param ndarray_to_list: convert ndarrays in obj to lists
:param squeeze: squeeze ndarray
'''
# import json
# with open(filename, mode='w') as f:
# json.dump(annotation,f)
if ndarray_to_list:
obj = ndarray_to_list_in_structure(obj, squeeze=squeeze)
# write to yaml
d = os.path.dirname(os.path.abspath(filename))
if not os.path.exists(d):
os.makedirs(d)
if filetype == 'auto':
_, ext = os.path.splitext(filename)
filetype = ext[1:]
if filetype in ('yaml', 'yml'):
# import yaml
from ruamel.yaml import YAML
yaml = YAML(typ="unsafe")
with open(filename, 'wt', encoding="utf-8") as f:
yaml.dump(obj, f)
# if sys.version_info.major == 2:
# with open(filename, 'wb') as f:
# yaml.dump(obj, f, encoding="utf-8")
# else:
# with open(filename, "w", encoding="utf-8") as f:
# yaml.dump(obj, f)
elif filetype in ('pickle', 'pkl'):
f = open(filename, 'wb')
logger.info("filename " + filename)
# if sys.version_info[0] < 3: import cPickle as pickle
# else: import _pickle as pickle
import pickle
pickle.dump(obj, f, -1)
f.close
elif filetype in ('streamingpicklezip', 'spklz'):
# this is not working :-(
import gzip
import sPickle as pickle
f = gzip.open(filename, 'wb', compresslevel=1)
# f = open(filename, 'wb')
pickle.s_dump(obj, f)
f.close
elif filetype in ('picklezip', 'pklz'):
import gzip
if sys.version_info[0] < 3: import cPickle as pickle
else: import _pickle as pickle
f = gzip.open(filename, 'wb', compresslevel=1)
# f = open(filename, 'wb')
pickle.dump(obj, f)
f.close
elif filetype in('mat'):
import scipy.io as sio
sio.savemat(filename, obj)
else:
logger.error('Unknown filetype ' + filetype) | python | def obj_to_file(obj, filename, filetype='auto', ndarray_to_list=False, squeeze=True):
'''Writes annotation in file.
:param filetype:
auto
yaml
pkl, pickle
pklz, picklezip
:param ndarray_to_list: convert ndarrays in obj to lists
:param squeeze: squeeze ndarray
'''
# import json
# with open(filename, mode='w') as f:
# json.dump(annotation,f)
if ndarray_to_list:
obj = ndarray_to_list_in_structure(obj, squeeze=squeeze)
# write to yaml
d = os.path.dirname(os.path.abspath(filename))
if not os.path.exists(d):
os.makedirs(d)
if filetype == 'auto':
_, ext = os.path.splitext(filename)
filetype = ext[1:]
if filetype in ('yaml', 'yml'):
# import yaml
from ruamel.yaml import YAML
yaml = YAML(typ="unsafe")
with open(filename, 'wt', encoding="utf-8") as f:
yaml.dump(obj, f)
# if sys.version_info.major == 2:
# with open(filename, 'wb') as f:
# yaml.dump(obj, f, encoding="utf-8")
# else:
# with open(filename, "w", encoding="utf-8") as f:
# yaml.dump(obj, f)
elif filetype in ('pickle', 'pkl'):
f = open(filename, 'wb')
logger.info("filename " + filename)
# if sys.version_info[0] < 3: import cPickle as pickle
# else: import _pickle as pickle
import pickle
pickle.dump(obj, f, -1)
f.close
elif filetype in ('streamingpicklezip', 'spklz'):
# this is not working :-(
import gzip
import sPickle as pickle
f = gzip.open(filename, 'wb', compresslevel=1)
# f = open(filename, 'wb')
pickle.s_dump(obj, f)
f.close
elif filetype in ('picklezip', 'pklz'):
import gzip
if sys.version_info[0] < 3: import cPickle as pickle
else: import _pickle as pickle
f = gzip.open(filename, 'wb', compresslevel=1)
# f = open(filename, 'wb')
pickle.dump(obj, f)
f.close
elif filetype in('mat'):
import scipy.io as sio
sio.savemat(filename, obj)
else:
logger.error('Unknown filetype ' + filetype) | [
"def",
"obj_to_file",
"(",
"obj",
",",
"filename",
",",
"filetype",
"=",
"'auto'",
",",
"ndarray_to_list",
"=",
"False",
",",
"squeeze",
"=",
"True",
")",
":",
"# import json",
"# with open(filename, mode='w') as f:",
"# json.dump(annotation,f)",
"if",
"ndarray_to_list",
":",
"obj",
"=",
"ndarray_to_list_in_structure",
"(",
"obj",
",",
"squeeze",
"=",
"squeeze",
")",
"# write to yaml",
"d",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"d",
")",
":",
"os",
".",
"makedirs",
"(",
"d",
")",
"if",
"filetype",
"==",
"'auto'",
":",
"_",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"filetype",
"=",
"ext",
"[",
"1",
":",
"]",
"if",
"filetype",
"in",
"(",
"'yaml'",
",",
"'yml'",
")",
":",
"# import yaml",
"from",
"ruamel",
".",
"yaml",
"import",
"YAML",
"yaml",
"=",
"YAML",
"(",
"typ",
"=",
"\"unsafe\"",
")",
"with",
"open",
"(",
"filename",
",",
"'wt'",
",",
"encoding",
"=",
"\"utf-8\"",
")",
"as",
"f",
":",
"yaml",
".",
"dump",
"(",
"obj",
",",
"f",
")",
"# if sys.version_info.major == 2:",
"# with open(filename, 'wb') as f:",
"# yaml.dump(obj, f, encoding=\"utf-8\")",
"# else:",
"# with open(filename, \"w\", encoding=\"utf-8\") as f:",
"# yaml.dump(obj, f)",
"elif",
"filetype",
"in",
"(",
"'pickle'",
",",
"'pkl'",
")",
":",
"f",
"=",
"open",
"(",
"filename",
",",
"'wb'",
")",
"logger",
".",
"info",
"(",
"\"filename \"",
"+",
"filename",
")",
"# if sys.version_info[0] < 3: import cPickle as pickle",
"# else: import _pickle as pickle",
"import",
"pickle",
"pickle",
".",
"dump",
"(",
"obj",
",",
"f",
",",
"-",
"1",
")",
"f",
".",
"close",
"elif",
"filetype",
"in",
"(",
"'streamingpicklezip'",
",",
"'spklz'",
")",
":",
"# this is not working :-(",
"import",
"gzip",
"import",
"sPickle",
"as",
"pickle",
"f",
"=",
"gzip",
".",
"open",
"(",
"filename",
",",
"'wb'",
",",
"compresslevel",
"=",
"1",
")",
"# f = open(filename, 'wb')",
"pickle",
".",
"s_dump",
"(",
"obj",
",",
"f",
")",
"f",
".",
"close",
"elif",
"filetype",
"in",
"(",
"'picklezip'",
",",
"'pklz'",
")",
":",
"import",
"gzip",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"import",
"cPickle",
"as",
"pickle",
"else",
":",
"import",
"_pickle",
"as",
"pickle",
"f",
"=",
"gzip",
".",
"open",
"(",
"filename",
",",
"'wb'",
",",
"compresslevel",
"=",
"1",
")",
"# f = open(filename, 'wb')",
"pickle",
".",
"dump",
"(",
"obj",
",",
"f",
")",
"f",
".",
"close",
"elif",
"filetype",
"in",
"(",
"'mat'",
")",
":",
"import",
"scipy",
".",
"io",
"as",
"sio",
"sio",
".",
"savemat",
"(",
"filename",
",",
"obj",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'Unknown filetype '",
"+",
"filetype",
")"
] | Writes annotation in file.
:param filetype:
auto
yaml
pkl, pickle
pklz, picklezip
:param ndarray_to_list: convert ndarrays in obj to lists
:param squeeze: squeeze ndarray | [
"Writes",
"annotation",
"in",
"file",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/misc.py#L121-L189 |
mjirik/io3d | io3d/misc.py | resize_to_shape | def resize_to_shape(data, shape, zoom=None, mode='nearest', order=0):
"""
Function resize input data to specific shape.
:param data: input 3d array-like data
:param shape: shape of output data
:param zoom: zoom is used for back compatibility
:mode: default is 'nearest'
"""
# @TODO remove old code in except part
try:
# rint 'pred vyjimkou'
# aise Exception ('test without skimage')
# rint 'za vyjimkou'
import skimage
import skimage.transform
# Now we need reshape seeds and segmentation to original size
segm_orig_scale = skimage.transform.resize(
data, shape, order=0,
preserve_range=True,
mode="constant",
)
segmentation = segm_orig_scale
logger.debug('resize to orig with skimage')
except:
import scipy
import scipy.ndimage
dtype = data.dtype
if zoom is None:
zoom = shape / np.asarray(data.shape).astype(np.double)
segm_orig_scale = scipy.ndimage.zoom(
data,
1.0 / zoom,
mode=mode,
order=order
).astype(dtype)
logger.debug('resize to orig with scipy.ndimage')
# @TODO odstranit hack pro oříznutí na stejnou velikost
# v podstatě je to vyřešeno, ale nechalo by se to dělat elegantněji v zoom
# tam je bohužel patrně bug
# rint 'd3d ', self.data3d.shape
# rint 's orig scale shape ', segm_orig_scale.shape
shp = [
np.min([segm_orig_scale.shape[0], shape[0]]),
np.min([segm_orig_scale.shape[1], shape[1]]),
np.min([segm_orig_scale.shape[2], shape[2]]),
]
# elf.data3d = self.data3d[0:shp[0], 0:shp[1], 0:shp[2]]
# mport ipdb; ipdb.set_trace() # BREAKPOINT
segmentation = np.zeros(shape, dtype=dtype)
segmentation[
0:shp[0],
0:shp[1],
0:shp[2]] = segm_orig_scale[0:shp[0], 0:shp[1], 0:shp[2]]
del segm_orig_scale
return segmentation | python | def resize_to_shape(data, shape, zoom=None, mode='nearest', order=0):
"""
Function resize input data to specific shape.
:param data: input 3d array-like data
:param shape: shape of output data
:param zoom: zoom is used for back compatibility
:mode: default is 'nearest'
"""
# @TODO remove old code in except part
try:
# rint 'pred vyjimkou'
# aise Exception ('test without skimage')
# rint 'za vyjimkou'
import skimage
import skimage.transform
# Now we need reshape seeds and segmentation to original size
segm_orig_scale = skimage.transform.resize(
data, shape, order=0,
preserve_range=True,
mode="constant",
)
segmentation = segm_orig_scale
logger.debug('resize to orig with skimage')
except:
import scipy
import scipy.ndimage
dtype = data.dtype
if zoom is None:
zoom = shape / np.asarray(data.shape).astype(np.double)
segm_orig_scale = scipy.ndimage.zoom(
data,
1.0 / zoom,
mode=mode,
order=order
).astype(dtype)
logger.debug('resize to orig with scipy.ndimage')
# @TODO odstranit hack pro oříznutí na stejnou velikost
# v podstatě je to vyřešeno, ale nechalo by se to dělat elegantněji v zoom
# tam je bohužel patrně bug
# rint 'd3d ', self.data3d.shape
# rint 's orig scale shape ', segm_orig_scale.shape
shp = [
np.min([segm_orig_scale.shape[0], shape[0]]),
np.min([segm_orig_scale.shape[1], shape[1]]),
np.min([segm_orig_scale.shape[2], shape[2]]),
]
# elf.data3d = self.data3d[0:shp[0], 0:shp[1], 0:shp[2]]
# mport ipdb; ipdb.set_trace() # BREAKPOINT
segmentation = np.zeros(shape, dtype=dtype)
segmentation[
0:shp[0],
0:shp[1],
0:shp[2]] = segm_orig_scale[0:shp[0], 0:shp[1], 0:shp[2]]
del segm_orig_scale
return segmentation | [
"def",
"resize_to_shape",
"(",
"data",
",",
"shape",
",",
"zoom",
"=",
"None",
",",
"mode",
"=",
"'nearest'",
",",
"order",
"=",
"0",
")",
":",
"# @TODO remove old code in except part",
"try",
":",
"# rint 'pred vyjimkou'",
"# aise Exception ('test without skimage')",
"# rint 'za vyjimkou'",
"import",
"skimage",
"import",
"skimage",
".",
"transform",
"# Now we need reshape seeds and segmentation to original size",
"segm_orig_scale",
"=",
"skimage",
".",
"transform",
".",
"resize",
"(",
"data",
",",
"shape",
",",
"order",
"=",
"0",
",",
"preserve_range",
"=",
"True",
",",
"mode",
"=",
"\"constant\"",
",",
")",
"segmentation",
"=",
"segm_orig_scale",
"logger",
".",
"debug",
"(",
"'resize to orig with skimage'",
")",
"except",
":",
"import",
"scipy",
"import",
"scipy",
".",
"ndimage",
"dtype",
"=",
"data",
".",
"dtype",
"if",
"zoom",
"is",
"None",
":",
"zoom",
"=",
"shape",
"/",
"np",
".",
"asarray",
"(",
"data",
".",
"shape",
")",
".",
"astype",
"(",
"np",
".",
"double",
")",
"segm_orig_scale",
"=",
"scipy",
".",
"ndimage",
".",
"zoom",
"(",
"data",
",",
"1.0",
"/",
"zoom",
",",
"mode",
"=",
"mode",
",",
"order",
"=",
"order",
")",
".",
"astype",
"(",
"dtype",
")",
"logger",
".",
"debug",
"(",
"'resize to orig with scipy.ndimage'",
")",
"# @TODO odstranit hack pro oříznutí na stejnou velikost",
"# v podstatě je to vyřešeno, ale nechalo by se to dělat elegantněji v zoom",
"# tam je bohužel patrně bug",
"# rint 'd3d ', self.data3d.shape",
"# rint 's orig scale shape ', segm_orig_scale.shape",
"shp",
"=",
"[",
"np",
".",
"min",
"(",
"[",
"segm_orig_scale",
".",
"shape",
"[",
"0",
"]",
",",
"shape",
"[",
"0",
"]",
"]",
")",
",",
"np",
".",
"min",
"(",
"[",
"segm_orig_scale",
".",
"shape",
"[",
"1",
"]",
",",
"shape",
"[",
"1",
"]",
"]",
")",
",",
"np",
".",
"min",
"(",
"[",
"segm_orig_scale",
".",
"shape",
"[",
"2",
"]",
",",
"shape",
"[",
"2",
"]",
"]",
")",
",",
"]",
"# elf.data3d = self.data3d[0:shp[0], 0:shp[1], 0:shp[2]]",
"# mport ipdb; ipdb.set_trace() # BREAKPOINT",
"segmentation",
"=",
"np",
".",
"zeros",
"(",
"shape",
",",
"dtype",
"=",
"dtype",
")",
"segmentation",
"[",
"0",
":",
"shp",
"[",
"0",
"]",
",",
"0",
":",
"shp",
"[",
"1",
"]",
",",
"0",
":",
"shp",
"[",
"2",
"]",
"]",
"=",
"segm_orig_scale",
"[",
"0",
":",
"shp",
"[",
"0",
"]",
",",
"0",
":",
"shp",
"[",
"1",
"]",
",",
"0",
":",
"shp",
"[",
"2",
"]",
"]",
"del",
"segm_orig_scale",
"return",
"segmentation"
] | Function resize input data to specific shape.
:param data: input 3d array-like data
:param shape: shape of output data
:param zoom: zoom is used for back compatibility
:mode: default is 'nearest' | [
"Function",
"resize",
"input",
"data",
"to",
"specific",
"shape",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/misc.py#L191-L253 |
mjirik/io3d | io3d/misc.py | resize_to_mm | def resize_to_mm(data3d, voxelsize_mm, new_voxelsize_mm, mode='nearest'):
"""
Function can resize data3d or segmentation to specifed voxelsize_mm
:new_voxelsize_mm: requested voxelsize. List of 3 numbers, also
can be a string 'orig', 'orgi*2' and 'orgi*4'.
:voxelsize_mm: size of voxel
:mode: default is 'nearest'
"""
import scipy
import scipy.ndimage
if np.all(list(new_voxelsize_mm) == 'orig'):
new_voxelsize_mm = np.array(voxelsize_mm)
elif np.all(list(new_voxelsize_mm) == 'orig*2'):
new_voxelsize_mm = np.array(voxelsize_mm) * 2
elif np.all(list(new_voxelsize_mm) == 'orig*4'):
new_voxelsize_mm = np.array(voxelsize_mm) * 4
# vx_size = np.array(metadata['voxelsize_mm']) * 4
zoom = voxelsize_mm / (1.0 * np.array(new_voxelsize_mm))
data3d_res = scipy.ndimage.zoom(
data3d,
zoom,
mode=mode,
order=1
).astype(data3d.dtype)
return data3d_res | python | def resize_to_mm(data3d, voxelsize_mm, new_voxelsize_mm, mode='nearest'):
"""
Function can resize data3d or segmentation to specifed voxelsize_mm
:new_voxelsize_mm: requested voxelsize. List of 3 numbers, also
can be a string 'orig', 'orgi*2' and 'orgi*4'.
:voxelsize_mm: size of voxel
:mode: default is 'nearest'
"""
import scipy
import scipy.ndimage
if np.all(list(new_voxelsize_mm) == 'orig'):
new_voxelsize_mm = np.array(voxelsize_mm)
elif np.all(list(new_voxelsize_mm) == 'orig*2'):
new_voxelsize_mm = np.array(voxelsize_mm) * 2
elif np.all(list(new_voxelsize_mm) == 'orig*4'):
new_voxelsize_mm = np.array(voxelsize_mm) * 4
# vx_size = np.array(metadata['voxelsize_mm']) * 4
zoom = voxelsize_mm / (1.0 * np.array(new_voxelsize_mm))
data3d_res = scipy.ndimage.zoom(
data3d,
zoom,
mode=mode,
order=1
).astype(data3d.dtype)
return data3d_res | [
"def",
"resize_to_mm",
"(",
"data3d",
",",
"voxelsize_mm",
",",
"new_voxelsize_mm",
",",
"mode",
"=",
"'nearest'",
")",
":",
"import",
"scipy",
"import",
"scipy",
".",
"ndimage",
"if",
"np",
".",
"all",
"(",
"list",
"(",
"new_voxelsize_mm",
")",
"==",
"'orig'",
")",
":",
"new_voxelsize_mm",
"=",
"np",
".",
"array",
"(",
"voxelsize_mm",
")",
"elif",
"np",
".",
"all",
"(",
"list",
"(",
"new_voxelsize_mm",
")",
"==",
"'orig*2'",
")",
":",
"new_voxelsize_mm",
"=",
"np",
".",
"array",
"(",
"voxelsize_mm",
")",
"*",
"2",
"elif",
"np",
".",
"all",
"(",
"list",
"(",
"new_voxelsize_mm",
")",
"==",
"'orig*4'",
")",
":",
"new_voxelsize_mm",
"=",
"np",
".",
"array",
"(",
"voxelsize_mm",
")",
"*",
"4",
"# vx_size = np.array(metadata['voxelsize_mm']) * 4",
"zoom",
"=",
"voxelsize_mm",
"/",
"(",
"1.0",
"*",
"np",
".",
"array",
"(",
"new_voxelsize_mm",
")",
")",
"data3d_res",
"=",
"scipy",
".",
"ndimage",
".",
"zoom",
"(",
"data3d",
",",
"zoom",
",",
"mode",
"=",
"mode",
",",
"order",
"=",
"1",
")",
".",
"astype",
"(",
"data3d",
".",
"dtype",
")",
"return",
"data3d_res"
] | Function can resize data3d or segmentation to specifed voxelsize_mm
:new_voxelsize_mm: requested voxelsize. List of 3 numbers, also
can be a string 'orig', 'orgi*2' and 'orgi*4'.
:voxelsize_mm: size of voxel
:mode: default is 'nearest' | [
"Function",
"can",
"resize",
"data3d",
"or",
"segmentation",
"to",
"specifed",
"voxelsize_mm",
":",
"new_voxelsize_mm",
":",
"requested",
"voxelsize",
".",
"List",
"of",
"3",
"numbers",
"also",
"can",
"be",
"a",
"string",
"orig",
"orgi",
"*",
"2",
"and",
"orgi",
"*",
"4",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/misc.py#L256-L283 |
mjirik/io3d | io3d/misc.py | suits_with_dtype | def suits_with_dtype(mn, mx, dtype):
"""
Check whether range of values can be stored into defined data type.
:param mn: range minimum
:param mx: range maximum
:param dtype:
:return:
"""
type_info = np.iinfo(dtype)
if mx <= type_info.max and mn >= type_info.min:
return True
else:
return False | python | def suits_with_dtype(mn, mx, dtype):
"""
Check whether range of values can be stored into defined data type.
:param mn: range minimum
:param mx: range maximum
:param dtype:
:return:
"""
type_info = np.iinfo(dtype)
if mx <= type_info.max and mn >= type_info.min:
return True
else:
return False | [
"def",
"suits_with_dtype",
"(",
"mn",
",",
"mx",
",",
"dtype",
")",
":",
"type_info",
"=",
"np",
".",
"iinfo",
"(",
"dtype",
")",
"if",
"mx",
"<=",
"type_info",
".",
"max",
"and",
"mn",
">=",
"type_info",
".",
"min",
":",
"return",
"True",
"else",
":",
"return",
"False"
] | Check whether range of values can be stored into defined data type.
:param mn: range minimum
:param mx: range maximum
:param dtype:
:return: | [
"Check",
"whether",
"range",
"of",
"values",
"can",
"be",
"stored",
"into",
"defined",
"data",
"type",
".",
":",
"param",
"mn",
":",
"range",
"minimum",
":",
"param",
"mx",
":",
"range",
"maximum",
":",
"param",
"dtype",
":",
":",
"return",
":"
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/misc.py#L285-L297 |
mjirik/io3d | io3d/misc.py | use_economic_dtype | def use_economic_dtype(data3d, slope=1, inter=0, dtype=None):
""" Use more economic integer-like dtype if it is possible.
:param data3d:
:param dtype: if dtype is not used, the automatic is used
:return:
"""
if dtype is None:
dtype = data3d.dtype
if issubclass(dtype.type, np.integer):
mn = data3d.min() * slope + inter
mx = data3d.max() * slope + inter
if suits_with_dtype(mn, mx, dtype=np.uint8):
dtype = np.uint8
elif suits_with_dtype(mn, mx, dtype=np.int8):
dtype = np.int8
elif suits_with_dtype(mn, mx, dtype=np.uint16):
dtype = np.uint16
elif suits_with_dtype(mn, mx, dtype=np.int16):
dtype = np.int16
elif suits_with_dtype(mn, mx, dtype=np.uint32):
dtype = np.uint32
elif suits_with_dtype(mn, mx, dtype=np.int32):
dtype = np.int32
# new_data3d = ((np.float(slope) * data3d) + np.float(inter)).astype(dtype)
if slope == 1 and inter == 0:
# this can prevent out of memmory
new_data3d = data3d.astype(dtype)
else:
new_data3d = ((slope * data3d) + inter).astype(dtype)
return new_data3d | python | def use_economic_dtype(data3d, slope=1, inter=0, dtype=None):
""" Use more economic integer-like dtype if it is possible.
:param data3d:
:param dtype: if dtype is not used, the automatic is used
:return:
"""
if dtype is None:
dtype = data3d.dtype
if issubclass(dtype.type, np.integer):
mn = data3d.min() * slope + inter
mx = data3d.max() * slope + inter
if suits_with_dtype(mn, mx, dtype=np.uint8):
dtype = np.uint8
elif suits_with_dtype(mn, mx, dtype=np.int8):
dtype = np.int8
elif suits_with_dtype(mn, mx, dtype=np.uint16):
dtype = np.uint16
elif suits_with_dtype(mn, mx, dtype=np.int16):
dtype = np.int16
elif suits_with_dtype(mn, mx, dtype=np.uint32):
dtype = np.uint32
elif suits_with_dtype(mn, mx, dtype=np.int32):
dtype = np.int32
# new_data3d = ((np.float(slope) * data3d) + np.float(inter)).astype(dtype)
if slope == 1 and inter == 0:
# this can prevent out of memmory
new_data3d = data3d.astype(dtype)
else:
new_data3d = ((slope * data3d) + inter).astype(dtype)
return new_data3d | [
"def",
"use_economic_dtype",
"(",
"data3d",
",",
"slope",
"=",
"1",
",",
"inter",
"=",
"0",
",",
"dtype",
"=",
"None",
")",
":",
"if",
"dtype",
"is",
"None",
":",
"dtype",
"=",
"data3d",
".",
"dtype",
"if",
"issubclass",
"(",
"dtype",
".",
"type",
",",
"np",
".",
"integer",
")",
":",
"mn",
"=",
"data3d",
".",
"min",
"(",
")",
"*",
"slope",
"+",
"inter",
"mx",
"=",
"data3d",
".",
"max",
"(",
")",
"*",
"slope",
"+",
"inter",
"if",
"suits_with_dtype",
"(",
"mn",
",",
"mx",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
":",
"dtype",
"=",
"np",
".",
"uint8",
"elif",
"suits_with_dtype",
"(",
"mn",
",",
"mx",
",",
"dtype",
"=",
"np",
".",
"int8",
")",
":",
"dtype",
"=",
"np",
".",
"int8",
"elif",
"suits_with_dtype",
"(",
"mn",
",",
"mx",
",",
"dtype",
"=",
"np",
".",
"uint16",
")",
":",
"dtype",
"=",
"np",
".",
"uint16",
"elif",
"suits_with_dtype",
"(",
"mn",
",",
"mx",
",",
"dtype",
"=",
"np",
".",
"int16",
")",
":",
"dtype",
"=",
"np",
".",
"int16",
"elif",
"suits_with_dtype",
"(",
"mn",
",",
"mx",
",",
"dtype",
"=",
"np",
".",
"uint32",
")",
":",
"dtype",
"=",
"np",
".",
"uint32",
"elif",
"suits_with_dtype",
"(",
"mn",
",",
"mx",
",",
"dtype",
"=",
"np",
".",
"int32",
")",
":",
"dtype",
"=",
"np",
".",
"int32",
"# new_data3d = ((np.float(slope) * data3d) + np.float(inter)).astype(dtype)",
"if",
"slope",
"==",
"1",
"and",
"inter",
"==",
"0",
":",
"# this can prevent out of memmory",
"new_data3d",
"=",
"data3d",
".",
"astype",
"(",
"dtype",
")",
"else",
":",
"new_data3d",
"=",
"(",
"(",
"slope",
"*",
"data3d",
")",
"+",
"inter",
")",
".",
"astype",
"(",
"dtype",
")",
"return",
"new_data3d"
] | Use more economic integer-like dtype if it is possible.
:param data3d:
:param dtype: if dtype is not used, the automatic is used
:return: | [
"Use",
"more",
"economic",
"integer",
"-",
"like",
"dtype",
"if",
"it",
"is",
"possible",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/misc.py#L299-L331 |
mjirik/io3d | io3d/dcmtools.py | get_sitk_image_from_ndarray | def get_sitk_image_from_ndarray(data3d):
"""
Prepare SimpleItk Image object and rescale data to unsigned types.
Simple ITK with version higher than 1.0.0 can not write signed int16. This function check
the SimpleITK version and use work around with Rescale Intercept and Rescale Slope
:param data3d:
:return:
"""
import SimpleITK as sitk
rescale_intercept = None
if sitk.Version.MajorVersion() > 0:
if data3d.dtype == np.int8:
rescale_intercept = -2**7
data3d = (data3d - rescale_intercept).astype(np.uint8)
elif data3d.dtype == np.int16:
# simpleitk is not able to store this. It uses only 11 bites
# rescale_intercept = -2**15
rescale_intercept = -2**10
data3d = (data3d - rescale_intercept).astype(np.uint16)
elif data3d.dtype == np.int32:
rescale_intercept = -2**31
data3d = (data3d - rescale_intercept).astype(np.uint16)
dim = sitk.GetImageFromArray(data3d)
if sitk.Version.MajorVersion() > 0:
if rescale_intercept is not None:
# rescale slope (0028|1053), rescale intercept (0028|1052)
dim.SetMetaData("0028|1052", str(rescale_intercept))
dim.SetMetaData("0028|1053", "1")
return dim | python | def get_sitk_image_from_ndarray(data3d):
"""
Prepare SimpleItk Image object and rescale data to unsigned types.
Simple ITK with version higher than 1.0.0 can not write signed int16. This function check
the SimpleITK version and use work around with Rescale Intercept and Rescale Slope
:param data3d:
:return:
"""
import SimpleITK as sitk
rescale_intercept = None
if sitk.Version.MajorVersion() > 0:
if data3d.dtype == np.int8:
rescale_intercept = -2**7
data3d = (data3d - rescale_intercept).astype(np.uint8)
elif data3d.dtype == np.int16:
# simpleitk is not able to store this. It uses only 11 bites
# rescale_intercept = -2**15
rescale_intercept = -2**10
data3d = (data3d - rescale_intercept).astype(np.uint16)
elif data3d.dtype == np.int32:
rescale_intercept = -2**31
data3d = (data3d - rescale_intercept).astype(np.uint16)
dim = sitk.GetImageFromArray(data3d)
if sitk.Version.MajorVersion() > 0:
if rescale_intercept is not None:
# rescale slope (0028|1053), rescale intercept (0028|1052)
dim.SetMetaData("0028|1052", str(rescale_intercept))
dim.SetMetaData("0028|1053", "1")
return dim | [
"def",
"get_sitk_image_from_ndarray",
"(",
"data3d",
")",
":",
"import",
"SimpleITK",
"as",
"sitk",
"rescale_intercept",
"=",
"None",
"if",
"sitk",
".",
"Version",
".",
"MajorVersion",
"(",
")",
">",
"0",
":",
"if",
"data3d",
".",
"dtype",
"==",
"np",
".",
"int8",
":",
"rescale_intercept",
"=",
"-",
"2",
"**",
"7",
"data3d",
"=",
"(",
"data3d",
"-",
"rescale_intercept",
")",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
"elif",
"data3d",
".",
"dtype",
"==",
"np",
".",
"int16",
":",
"# simpleitk is not able to store this. It uses only 11 bites",
"# rescale_intercept = -2**15",
"rescale_intercept",
"=",
"-",
"2",
"**",
"10",
"data3d",
"=",
"(",
"data3d",
"-",
"rescale_intercept",
")",
".",
"astype",
"(",
"np",
".",
"uint16",
")",
"elif",
"data3d",
".",
"dtype",
"==",
"np",
".",
"int32",
":",
"rescale_intercept",
"=",
"-",
"2",
"**",
"31",
"data3d",
"=",
"(",
"data3d",
"-",
"rescale_intercept",
")",
".",
"astype",
"(",
"np",
".",
"uint16",
")",
"dim",
"=",
"sitk",
".",
"GetImageFromArray",
"(",
"data3d",
")",
"if",
"sitk",
".",
"Version",
".",
"MajorVersion",
"(",
")",
">",
"0",
":",
"if",
"rescale_intercept",
"is",
"not",
"None",
":",
"# rescale slope (0028|1053), rescale intercept (0028|1052)",
"dim",
".",
"SetMetaData",
"(",
"\"0028|1052\"",
",",
"str",
"(",
"rescale_intercept",
")",
")",
"dim",
".",
"SetMetaData",
"(",
"\"0028|1053\"",
",",
"\"1\"",
")",
"return",
"dim"
] | Prepare SimpleItk Image object and rescale data to unsigned types.
Simple ITK with version higher than 1.0.0 can not write signed int16. This function check
the SimpleITK version and use work around with Rescale Intercept and Rescale Slope
:param data3d:
:return: | [
"Prepare",
"SimpleItk",
"Image",
"object",
"and",
"rescale",
"data",
"to",
"unsigned",
"types",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmtools.py#L27-L59 |
mjirik/io3d | io3d/dcmtools.py | get_slope_and_intercept_from_pdcm | def get_slope_and_intercept_from_pdcm(dcmdata):
"""
Get scale and intercept from pydicom file object.
:param dcmdata:
:return:
"""
if hasattr(dcmdata, "RescaleSlope") and hasattr(dcmdata, "RescaleIntercept"):
rescale_slope = dcmdata.RescaleSlope
rescale_intercept = dcmdata.RescaleIntercept
slope, inter = get_slope_and_intercept_from_strings(rescale_slope, rescale_intercept)
else:
slope = 1
inter = 0
return slope, inter | python | def get_slope_and_intercept_from_pdcm(dcmdata):
"""
Get scale and intercept from pydicom file object.
:param dcmdata:
:return:
"""
if hasattr(dcmdata, "RescaleSlope") and hasattr(dcmdata, "RescaleIntercept"):
rescale_slope = dcmdata.RescaleSlope
rescale_intercept = dcmdata.RescaleIntercept
slope, inter = get_slope_and_intercept_from_strings(rescale_slope, rescale_intercept)
else:
slope = 1
inter = 0
return slope, inter | [
"def",
"get_slope_and_intercept_from_pdcm",
"(",
"dcmdata",
")",
":",
"if",
"hasattr",
"(",
"dcmdata",
",",
"\"RescaleSlope\"",
")",
"and",
"hasattr",
"(",
"dcmdata",
",",
"\"RescaleIntercept\"",
")",
":",
"rescale_slope",
"=",
"dcmdata",
".",
"RescaleSlope",
"rescale_intercept",
"=",
"dcmdata",
".",
"RescaleIntercept",
"slope",
",",
"inter",
"=",
"get_slope_and_intercept_from_strings",
"(",
"rescale_slope",
",",
"rescale_intercept",
")",
"else",
":",
"slope",
"=",
"1",
"inter",
"=",
"0",
"return",
"slope",
",",
"inter"
] | Get scale and intercept from pydicom file object.
:param dcmdata:
:return: | [
"Get",
"scale",
"and",
"intercept",
"from",
"pydicom",
"file",
"object",
".",
":",
"param",
"dcmdata",
":",
":",
"return",
":"
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmtools.py#L74-L89 |
bhearsum/chunkify | chunkify/__init__.py | split_evenly | def split_evenly(n, chunks):
"""Split an integer into evenly distributed list
>>> split_evenly(7, 3)
[3, 2, 2]
>>> split_evenly(12, 3)
[4, 4, 4]
>>> split_evenly(35, 10)
[4, 4, 4, 4, 4, 3, 3, 3, 3, 3]
>>> split_evenly(1, 2)
Traceback (most recent call last):
...
ChunkingError: Number of chunks is greater than number
"""
if n < chunks:
raise ChunkingError("Number of chunks is greater than number")
if n % chunks == 0:
# Either we can evenly split or only 1 chunk left
return [n / chunks] * chunks
# otherwise the current chunk should be a bit larger
max_size = n / chunks + 1
return [max_size] + split_evenly(n - max_size, chunks - 1) | python | def split_evenly(n, chunks):
"""Split an integer into evenly distributed list
>>> split_evenly(7, 3)
[3, 2, 2]
>>> split_evenly(12, 3)
[4, 4, 4]
>>> split_evenly(35, 10)
[4, 4, 4, 4, 4, 3, 3, 3, 3, 3]
>>> split_evenly(1, 2)
Traceback (most recent call last):
...
ChunkingError: Number of chunks is greater than number
"""
if n < chunks:
raise ChunkingError("Number of chunks is greater than number")
if n % chunks == 0:
# Either we can evenly split or only 1 chunk left
return [n / chunks] * chunks
# otherwise the current chunk should be a bit larger
max_size = n / chunks + 1
return [max_size] + split_evenly(n - max_size, chunks - 1) | [
"def",
"split_evenly",
"(",
"n",
",",
"chunks",
")",
":",
"if",
"n",
"<",
"chunks",
":",
"raise",
"ChunkingError",
"(",
"\"Number of chunks is greater than number\"",
")",
"if",
"n",
"%",
"chunks",
"==",
"0",
":",
"# Either we can evenly split or only 1 chunk left",
"return",
"[",
"n",
"/",
"chunks",
"]",
"*",
"chunks",
"# otherwise the current chunk should be a bit larger",
"max_size",
"=",
"n",
"/",
"chunks",
"+",
"1",
"return",
"[",
"max_size",
"]",
"+",
"split_evenly",
"(",
"n",
"-",
"max_size",
",",
"chunks",
"-",
"1",
")"
] | Split an integer into evenly distributed list
>>> split_evenly(7, 3)
[3, 2, 2]
>>> split_evenly(12, 3)
[4, 4, 4]
>>> split_evenly(35, 10)
[4, 4, 4, 4, 4, 3, 3, 3, 3, 3]
>>> split_evenly(1, 2)
Traceback (most recent call last):
...
ChunkingError: Number of chunks is greater than number | [
"Split",
"an",
"integer",
"into",
"evenly",
"distributed",
"list"
] | train | https://github.com/bhearsum/chunkify/blob/f3a693b17c80626852523955bf3c01b4fd93439b/chunkify/__init__.py#L8-L33 |
vsoch/helpme | helpme/main/discourse/__init__.py | Helper.check_env | def check_env(self, envar, value):
'''ensure that variable envar is set to some value,
otherwise exit on error.
Parameters
==========
envar: the environment variable name
value: the setting that shouldn't be None
'''
if value is None:
bot.error('You must export %s to use Discourse' % envar)
print('https://vsoch.github.io/helpme/helper-discourse')
sys.exit(1) | python | def check_env(self, envar, value):
'''ensure that variable envar is set to some value,
otherwise exit on error.
Parameters
==========
envar: the environment variable name
value: the setting that shouldn't be None
'''
if value is None:
bot.error('You must export %s to use Discourse' % envar)
print('https://vsoch.github.io/helpme/helper-discourse')
sys.exit(1) | [
"def",
"check_env",
"(",
"self",
",",
"envar",
",",
"value",
")",
":",
"if",
"value",
"is",
"None",
":",
"bot",
".",
"error",
"(",
"'You must export %s to use Discourse'",
"%",
"envar",
")",
"print",
"(",
"'https://vsoch.github.io/helpme/helper-discourse'",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | ensure that variable envar is set to some value,
otherwise exit on error.
Parameters
==========
envar: the environment variable name
value: the setting that shouldn't be None | [
"ensure",
"that",
"variable",
"envar",
"is",
"set",
"to",
"some",
"value",
"otherwise",
"exit",
"on",
"error",
".",
"Parameters",
"==========",
"envar",
":",
"the",
"environment",
"variable",
"name",
"value",
":",
"the",
"setting",
"that",
"shouldn",
"t",
"be",
"None"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/discourse/__init__.py#L65-L77 |
vsoch/helpme | helpme/main/discourse/__init__.py | Helper._generate_keys | def _generate_keys(self):
'''the discourse API requires the interactions to be signed, so we
generate a keypair on behalf of the user
'''
from helpme.defaults import HELPME_CLIENT_SECRETS
keypair_dir = os.path.join(os.path.dirname(HELPME_CLIENT_SECRETS),
'discourse')
# Have we generated a keypair file before?
self.keypair_file = os.path.join(keypair_dir, 'private.pem')
# We likely won't have generated it on first use!
if not hasattr(self, 'key'):
self.key = generate_keypair(self.keypair_file)
# If we generated the keypair file, we will have already loaded the key
if not hasattr(self, 'public_key'):
self.public_key = load_keypair(self.keypair_file) | python | def _generate_keys(self):
'''the discourse API requires the interactions to be signed, so we
generate a keypair on behalf of the user
'''
from helpme.defaults import HELPME_CLIENT_SECRETS
keypair_dir = os.path.join(os.path.dirname(HELPME_CLIENT_SECRETS),
'discourse')
# Have we generated a keypair file before?
self.keypair_file = os.path.join(keypair_dir, 'private.pem')
# We likely won't have generated it on first use!
if not hasattr(self, 'key'):
self.key = generate_keypair(self.keypair_file)
# If we generated the keypair file, we will have already loaded the key
if not hasattr(self, 'public_key'):
self.public_key = load_keypair(self.keypair_file) | [
"def",
"_generate_keys",
"(",
"self",
")",
":",
"from",
"helpme",
".",
"defaults",
"import",
"HELPME_CLIENT_SECRETS",
"keypair_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"HELPME_CLIENT_SECRETS",
")",
",",
"'discourse'",
")",
"# Have we generated a keypair file before?",
"self",
".",
"keypair_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"keypair_dir",
",",
"'private.pem'",
")",
"# We likely won't have generated it on first use!",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'key'",
")",
":",
"self",
".",
"key",
"=",
"generate_keypair",
"(",
"self",
".",
"keypair_file",
")",
"# If we generated the keypair file, we will have already loaded the key",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'public_key'",
")",
":",
"self",
".",
"public_key",
"=",
"load_keypair",
"(",
"self",
".",
"keypair_file",
")"
] | the discourse API requires the interactions to be signed, so we
generate a keypair on behalf of the user | [
"the",
"discourse",
"API",
"requires",
"the",
"interactions",
"to",
"be",
"signed",
"so",
"we",
"generate",
"a",
"keypair",
"on",
"behalf",
"of",
"the",
"user"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/discourse/__init__.py#L80-L97 |
vsoch/helpme | helpme/main/discourse/__init__.py | Helper._submit | def _submit(self):
'''submit the question to the board. When we get here we should have
(under self.data)
{'record_environment': [('DISPLAY', ':0')],
'user_prompt_board': 'http://127.0.0.1',
'user_prompt_issue': 'I want to know why dinosaurs are so great!',
'user_prompt_title': 'Why are dinosaurs so great?'}
self.token should be propogated with the personal access token
'''
body = self.data['user_prompt_issue']
title = self.data['user_prompt_title']
board = self.data['user_prompt_board']
username = self.data['user_prompt_username']
category = self.data['user_prompt_category']
# Step 1: Token
if self.token == None:
self.token = self.request_token(board)
self._get_and_update_setting('HELPME_DISCOURSE_TOKEN', self.token)
# Step 1: Environment
envars = self.data.get('record_environment')
body = body + envars_to_markdown(envars)
# Step 2: Asciinema
asciinema = self.data.get('record_asciinema')
if asciinema not in [None, '']:
url = upload_asciinema(asciinema)
# If the upload is successful, add a link to it.
if url is not None:
body += "\n[View Asciinema Recording](%s)" % url
# Add other metadata about client
body += "\n\ngenerated by [HelpMe](https://vsoch.github.io/helpme/)"
body += "\nHelpMe Discourse Id: %s" %(self.run_id)
# Submit the issue
post = self.create_post(title, body, board, category, username)
return post | python | def _submit(self):
'''submit the question to the board. When we get here we should have
(under self.data)
{'record_environment': [('DISPLAY', ':0')],
'user_prompt_board': 'http://127.0.0.1',
'user_prompt_issue': 'I want to know why dinosaurs are so great!',
'user_prompt_title': 'Why are dinosaurs so great?'}
self.token should be propogated with the personal access token
'''
body = self.data['user_prompt_issue']
title = self.data['user_prompt_title']
board = self.data['user_prompt_board']
username = self.data['user_prompt_username']
category = self.data['user_prompt_category']
# Step 1: Token
if self.token == None:
self.token = self.request_token(board)
self._get_and_update_setting('HELPME_DISCOURSE_TOKEN', self.token)
# Step 1: Environment
envars = self.data.get('record_environment')
body = body + envars_to_markdown(envars)
# Step 2: Asciinema
asciinema = self.data.get('record_asciinema')
if asciinema not in [None, '']:
url = upload_asciinema(asciinema)
# If the upload is successful, add a link to it.
if url is not None:
body += "\n[View Asciinema Recording](%s)" % url
# Add other metadata about client
body += "\n\ngenerated by [HelpMe](https://vsoch.github.io/helpme/)"
body += "\nHelpMe Discourse Id: %s" %(self.run_id)
# Submit the issue
post = self.create_post(title, body, board, category, username)
return post | [
"def",
"_submit",
"(",
"self",
")",
":",
"body",
"=",
"self",
".",
"data",
"[",
"'user_prompt_issue'",
"]",
"title",
"=",
"self",
".",
"data",
"[",
"'user_prompt_title'",
"]",
"board",
"=",
"self",
".",
"data",
"[",
"'user_prompt_board'",
"]",
"username",
"=",
"self",
".",
"data",
"[",
"'user_prompt_username'",
"]",
"category",
"=",
"self",
".",
"data",
"[",
"'user_prompt_category'",
"]",
"# Step 1: Token",
"if",
"self",
".",
"token",
"==",
"None",
":",
"self",
".",
"token",
"=",
"self",
".",
"request_token",
"(",
"board",
")",
"self",
".",
"_get_and_update_setting",
"(",
"'HELPME_DISCOURSE_TOKEN'",
",",
"self",
".",
"token",
")",
"# Step 1: Environment",
"envars",
"=",
"self",
".",
"data",
".",
"get",
"(",
"'record_environment'",
")",
"body",
"=",
"body",
"+",
"envars_to_markdown",
"(",
"envars",
")",
"# Step 2: Asciinema",
"asciinema",
"=",
"self",
".",
"data",
".",
"get",
"(",
"'record_asciinema'",
")",
"if",
"asciinema",
"not",
"in",
"[",
"None",
",",
"''",
"]",
":",
"url",
"=",
"upload_asciinema",
"(",
"asciinema",
")",
"# If the upload is successful, add a link to it.",
"if",
"url",
"is",
"not",
"None",
":",
"body",
"+=",
"\"\\n[View Asciinema Recording](%s)\"",
"%",
"url",
"# Add other metadata about client",
"body",
"+=",
"\"\\n\\ngenerated by [HelpMe](https://vsoch.github.io/helpme/)\"",
"body",
"+=",
"\"\\nHelpMe Discourse Id: %s\"",
"%",
"(",
"self",
".",
"run_id",
")",
"# Submit the issue",
"post",
"=",
"self",
".",
"create_post",
"(",
"title",
",",
"body",
",",
"board",
",",
"category",
",",
"username",
")",
"return",
"post"
] | submit the question to the board. When we get here we should have
(under self.data)
{'record_environment': [('DISPLAY', ':0')],
'user_prompt_board': 'http://127.0.0.1',
'user_prompt_issue': 'I want to know why dinosaurs are so great!',
'user_prompt_title': 'Why are dinosaurs so great?'}
self.token should be propogated with the personal access token | [
"submit",
"the",
"question",
"to",
"the",
"board",
".",
"When",
"we",
"get",
"here",
"we",
"should",
"have",
"(",
"under",
"self",
".",
"data",
")",
"{",
"record_environment",
":",
"[",
"(",
"DISPLAY",
":",
"0",
")",
"]",
"user_prompt_board",
":",
"http",
":",
"//",
"127",
".",
"0",
".",
"0",
".",
"1",
"user_prompt_issue",
":",
"I",
"want",
"to",
"know",
"why",
"dinosaurs",
"are",
"so",
"great!",
"user_prompt_title",
":",
"Why",
"are",
"dinosaurs",
"so",
"great?",
"}"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/discourse/__init__.py#L121-L167 |
closeio/cachecow | cachecow/__init__.py | CacheCow.get | def get(self, cls, id_field, id_val):
"""
Retrieve an object which `id_field` matches `id_val`. If it exists in
the cache, it will be fetched from Redis. If not, it will be fetched
via the `fetch` method and cached in Redis (unless the cache flag got
invalidated in the meantime).
"""
cache_key, flag_key = self.get_keys(cls, id_field, id_val)
result = self.get_cached_or_set_flag(keys=(cache_key, flag_key))
# in Lua, arrays cannot hold nil values, so e.g. if [1, nil] is returned,
# we'll only get [1] here. That's why we need to append None ourselves.
if len(result) == 1:
result.append(None)
previous_flag, cached_data = result
# if cached data was found, deserialize and return it
if cached_data is not None:
deserialized = self.deserialize(cls, cached_data)
# verify that the cached object matches our expectations
# if not, return from the persistant storage instead.
if self.verify(cls, id_field, id_val, deserialized):
return deserialized
else:
# invalidate the cache if it didn't pass verification
self.invalidate(cls, id_field, id_val)
obj = self.fetch(cls, id_field, id_val)
# If the flag wasn't previously set, then we set it and we're responsible
# for putting the item in the cache. Do this unless the cache got
# invalidated and the flag was removed.
if not previous_flag:
obj_serialized = self.serialize(obj)
self.cache(keys=(cache_key, flag_key), args=(obj_serialized,))
return obj | python | def get(self, cls, id_field, id_val):
"""
Retrieve an object which `id_field` matches `id_val`. If it exists in
the cache, it will be fetched from Redis. If not, it will be fetched
via the `fetch` method and cached in Redis (unless the cache flag got
invalidated in the meantime).
"""
cache_key, flag_key = self.get_keys(cls, id_field, id_val)
result = self.get_cached_or_set_flag(keys=(cache_key, flag_key))
# in Lua, arrays cannot hold nil values, so e.g. if [1, nil] is returned,
# we'll only get [1] here. That's why we need to append None ourselves.
if len(result) == 1:
result.append(None)
previous_flag, cached_data = result
# if cached data was found, deserialize and return it
if cached_data is not None:
deserialized = self.deserialize(cls, cached_data)
# verify that the cached object matches our expectations
# if not, return from the persistant storage instead.
if self.verify(cls, id_field, id_val, deserialized):
return deserialized
else:
# invalidate the cache if it didn't pass verification
self.invalidate(cls, id_field, id_val)
obj = self.fetch(cls, id_field, id_val)
# If the flag wasn't previously set, then we set it and we're responsible
# for putting the item in the cache. Do this unless the cache got
# invalidated and the flag was removed.
if not previous_flag:
obj_serialized = self.serialize(obj)
self.cache(keys=(cache_key, flag_key), args=(obj_serialized,))
return obj | [
"def",
"get",
"(",
"self",
",",
"cls",
",",
"id_field",
",",
"id_val",
")",
":",
"cache_key",
",",
"flag_key",
"=",
"self",
".",
"get_keys",
"(",
"cls",
",",
"id_field",
",",
"id_val",
")",
"result",
"=",
"self",
".",
"get_cached_or_set_flag",
"(",
"keys",
"=",
"(",
"cache_key",
",",
"flag_key",
")",
")",
"# in Lua, arrays cannot hold nil values, so e.g. if [1, nil] is returned,",
"# we'll only get [1] here. That's why we need to append None ourselves.",
"if",
"len",
"(",
"result",
")",
"==",
"1",
":",
"result",
".",
"append",
"(",
"None",
")",
"previous_flag",
",",
"cached_data",
"=",
"result",
"# if cached data was found, deserialize and return it",
"if",
"cached_data",
"is",
"not",
"None",
":",
"deserialized",
"=",
"self",
".",
"deserialize",
"(",
"cls",
",",
"cached_data",
")",
"# verify that the cached object matches our expectations",
"# if not, return from the persistant storage instead.",
"if",
"self",
".",
"verify",
"(",
"cls",
",",
"id_field",
",",
"id_val",
",",
"deserialized",
")",
":",
"return",
"deserialized",
"else",
":",
"# invalidate the cache if it didn't pass verification",
"self",
".",
"invalidate",
"(",
"cls",
",",
"id_field",
",",
"id_val",
")",
"obj",
"=",
"self",
".",
"fetch",
"(",
"cls",
",",
"id_field",
",",
"id_val",
")",
"# If the flag wasn't previously set, then we set it and we're responsible",
"# for putting the item in the cache. Do this unless the cache got",
"# invalidated and the flag was removed.",
"if",
"not",
"previous_flag",
":",
"obj_serialized",
"=",
"self",
".",
"serialize",
"(",
"obj",
")",
"self",
".",
"cache",
"(",
"keys",
"=",
"(",
"cache_key",
",",
"flag_key",
")",
",",
"args",
"=",
"(",
"obj_serialized",
",",
")",
")",
"return",
"obj"
] | Retrieve an object which `id_field` matches `id_val`. If it exists in
the cache, it will be fetched from Redis. If not, it will be fetched
via the `fetch` method and cached in Redis (unless the cache flag got
invalidated in the meantime). | [
"Retrieve",
"an",
"object",
"which",
"id_field",
"matches",
"id_val",
".",
"If",
"it",
"exists",
"in",
"the",
"cache",
"it",
"will",
"be",
"fetched",
"from",
"Redis",
".",
"If",
"not",
"it",
"will",
"be",
"fetched",
"via",
"the",
"fetch",
"method",
"and",
"cached",
"in",
"Redis",
"(",
"unless",
"the",
"cache",
"flag",
"got",
"invalidated",
"in",
"the",
"meantime",
")",
"."
] | train | https://github.com/closeio/cachecow/blob/a0531686db40baa81b3cfa0076a23a53d2762cc6/cachecow/__init__.py#L60-L99 |
closeio/cachecow | cachecow/__init__.py | CacheCow.verify | def verify(self, cls, id_field, id_val, obj_from_cache):
"""
Verify that the object we retrieved from cache matches the requested
`id_field`/`id_val`.
"""
return getattr(obj_from_cache, id_field) == id_val | python | def verify(self, cls, id_field, id_val, obj_from_cache):
"""
Verify that the object we retrieved from cache matches the requested
`id_field`/`id_val`.
"""
return getattr(obj_from_cache, id_field) == id_val | [
"def",
"verify",
"(",
"self",
",",
"cls",
",",
"id_field",
",",
"id_val",
",",
"obj_from_cache",
")",
":",
"return",
"getattr",
"(",
"obj_from_cache",
",",
"id_field",
")",
"==",
"id_val"
] | Verify that the object we retrieved from cache matches the requested
`id_field`/`id_val`. | [
"Verify",
"that",
"the",
"object",
"we",
"retrieved",
"from",
"cache",
"matches",
"the",
"requested",
"id_field",
"/",
"id_val",
"."
] | train | https://github.com/closeio/cachecow/blob/a0531686db40baa81b3cfa0076a23a53d2762cc6/cachecow/__init__.py#L101-L106 |
closeio/cachecow | cachecow/__init__.py | CacheCow.invalidate | def invalidate(self, cls, id_field, id_val):
"""
Invalidate the cache for a given Mongo object by deleting the cached
data and the cache flag.
"""
cache_key, flag_key = self.get_keys(cls, id_field, id_val)
pipeline = self.redis.pipeline()
pipeline.delete(cache_key)
pipeline.delete(flag_key)
pipeline.execute() | python | def invalidate(self, cls, id_field, id_val):
"""
Invalidate the cache for a given Mongo object by deleting the cached
data and the cache flag.
"""
cache_key, flag_key = self.get_keys(cls, id_field, id_val)
pipeline = self.redis.pipeline()
pipeline.delete(cache_key)
pipeline.delete(flag_key)
pipeline.execute() | [
"def",
"invalidate",
"(",
"self",
",",
"cls",
",",
"id_field",
",",
"id_val",
")",
":",
"cache_key",
",",
"flag_key",
"=",
"self",
".",
"get_keys",
"(",
"cls",
",",
"id_field",
",",
"id_val",
")",
"pipeline",
"=",
"self",
".",
"redis",
".",
"pipeline",
"(",
")",
"pipeline",
".",
"delete",
"(",
"cache_key",
")",
"pipeline",
".",
"delete",
"(",
"flag_key",
")",
"pipeline",
".",
"execute",
"(",
")"
] | Invalidate the cache for a given Mongo object by deleting the cached
data and the cache flag. | [
"Invalidate",
"the",
"cache",
"for",
"a",
"given",
"Mongo",
"object",
"by",
"deleting",
"the",
"cached",
"data",
"and",
"the",
"cache",
"flag",
"."
] | train | https://github.com/closeio/cachecow/blob/a0531686db40baa81b3cfa0076a23a53d2762cc6/cachecow/__init__.py#L120-L130 |
DreamLab/VmShepherd | src/vmshepherd/presets/preset.py | Preset.manage | async def manage(self):
""" Manage function docstring"""
self._vms = await self.iaas.list_vms(self.name)
vms_stat = Counter([vm.get_state() for vm in self._vms])
missing = self.count - len(self._vms) if len(self._vms) < self.count else 0
logging.info(
'VMs Status: %s expected, %s in iaas, %s running, %s nearby shutdown, %s pending, %s after time shutdown, '
'%s terminated, %s error, %s unknown, %s missing',
self.count, len(self._vms), vms_stat[VmState.RUNNING.value], vms_stat[VmState.NEARBY_SHUTDOWN.value],
vms_stat[VmState.PENDING.value], vms_stat[VmState.AFTER_TIME_SHUTDOWN.value],
vms_stat[VmState.TERMINATED.value], vms_stat[VmState.ERROR.value], vms_stat[VmState.UNKNOWN.value], missing, extra=self._extra
)
for vm in self._vms:
if vm.is_dead():
logging.info("Terminate %s", vm, extra=self._extra)
await vm.terminate()
self.terminated += 1
to_create = self.count - (len(self._vms) - self.terminated - vms_stat[VmState.NEARBY_SHUTDOWN.value])
to_create = to_create if to_create > 0 else 0
logging.debug("Create %s Vm", to_create, extra=self._extra)
await self._create_vms(to_create)
await self._healthcheck(self._vms)
logging.info(
'VMs Status update: %s terminated, %s terminated by healthcheck, %s created, %s failed healthcheck',
self.terminated, self.healthcheck_terminated, to_create, len(self.runtime.failed_checks),
extra=self._extra
) | python | async def manage(self):
""" Manage function docstring"""
self._vms = await self.iaas.list_vms(self.name)
vms_stat = Counter([vm.get_state() for vm in self._vms])
missing = self.count - len(self._vms) if len(self._vms) < self.count else 0
logging.info(
'VMs Status: %s expected, %s in iaas, %s running, %s nearby shutdown, %s pending, %s after time shutdown, '
'%s terminated, %s error, %s unknown, %s missing',
self.count, len(self._vms), vms_stat[VmState.RUNNING.value], vms_stat[VmState.NEARBY_SHUTDOWN.value],
vms_stat[VmState.PENDING.value], vms_stat[VmState.AFTER_TIME_SHUTDOWN.value],
vms_stat[VmState.TERMINATED.value], vms_stat[VmState.ERROR.value], vms_stat[VmState.UNKNOWN.value], missing, extra=self._extra
)
for vm in self._vms:
if vm.is_dead():
logging.info("Terminate %s", vm, extra=self._extra)
await vm.terminate()
self.terminated += 1
to_create = self.count - (len(self._vms) - self.terminated - vms_stat[VmState.NEARBY_SHUTDOWN.value])
to_create = to_create if to_create > 0 else 0
logging.debug("Create %s Vm", to_create, extra=self._extra)
await self._create_vms(to_create)
await self._healthcheck(self._vms)
logging.info(
'VMs Status update: %s terminated, %s terminated by healthcheck, %s created, %s failed healthcheck',
self.terminated, self.healthcheck_terminated, to_create, len(self.runtime.failed_checks),
extra=self._extra
) | [
"async",
"def",
"manage",
"(",
"self",
")",
":",
"self",
".",
"_vms",
"=",
"await",
"self",
".",
"iaas",
".",
"list_vms",
"(",
"self",
".",
"name",
")",
"vms_stat",
"=",
"Counter",
"(",
"[",
"vm",
".",
"get_state",
"(",
")",
"for",
"vm",
"in",
"self",
".",
"_vms",
"]",
")",
"missing",
"=",
"self",
".",
"count",
"-",
"len",
"(",
"self",
".",
"_vms",
")",
"if",
"len",
"(",
"self",
".",
"_vms",
")",
"<",
"self",
".",
"count",
"else",
"0",
"logging",
".",
"info",
"(",
"'VMs Status: %s expected, %s in iaas, %s running, %s nearby shutdown, %s pending, %s after time shutdown, '",
"'%s terminated, %s error, %s unknown, %s missing'",
",",
"self",
".",
"count",
",",
"len",
"(",
"self",
".",
"_vms",
")",
",",
"vms_stat",
"[",
"VmState",
".",
"RUNNING",
".",
"value",
"]",
",",
"vms_stat",
"[",
"VmState",
".",
"NEARBY_SHUTDOWN",
".",
"value",
"]",
",",
"vms_stat",
"[",
"VmState",
".",
"PENDING",
".",
"value",
"]",
",",
"vms_stat",
"[",
"VmState",
".",
"AFTER_TIME_SHUTDOWN",
".",
"value",
"]",
",",
"vms_stat",
"[",
"VmState",
".",
"TERMINATED",
".",
"value",
"]",
",",
"vms_stat",
"[",
"VmState",
".",
"ERROR",
".",
"value",
"]",
",",
"vms_stat",
"[",
"VmState",
".",
"UNKNOWN",
".",
"value",
"]",
",",
"missing",
",",
"extra",
"=",
"self",
".",
"_extra",
")",
"for",
"vm",
"in",
"self",
".",
"_vms",
":",
"if",
"vm",
".",
"is_dead",
"(",
")",
":",
"logging",
".",
"info",
"(",
"\"Terminate %s\"",
",",
"vm",
",",
"extra",
"=",
"self",
".",
"_extra",
")",
"await",
"vm",
".",
"terminate",
"(",
")",
"self",
".",
"terminated",
"+=",
"1",
"to_create",
"=",
"self",
".",
"count",
"-",
"(",
"len",
"(",
"self",
".",
"_vms",
")",
"-",
"self",
".",
"terminated",
"-",
"vms_stat",
"[",
"VmState",
".",
"NEARBY_SHUTDOWN",
".",
"value",
"]",
")",
"to_create",
"=",
"to_create",
"if",
"to_create",
">",
"0",
"else",
"0",
"logging",
".",
"debug",
"(",
"\"Create %s Vm\"",
",",
"to_create",
",",
"extra",
"=",
"self",
".",
"_extra",
")",
"await",
"self",
".",
"_create_vms",
"(",
"to_create",
")",
"await",
"self",
".",
"_healthcheck",
"(",
"self",
".",
"_vms",
")",
"logging",
".",
"info",
"(",
"'VMs Status update: %s terminated, %s terminated by healthcheck, %s created, %s failed healthcheck'",
",",
"self",
".",
"terminated",
",",
"self",
".",
"healthcheck_terminated",
",",
"to_create",
",",
"len",
"(",
"self",
".",
"runtime",
".",
"failed_checks",
")",
",",
"extra",
"=",
"self",
".",
"_extra",
")"
] | Manage function docstring | [
"Manage",
"function",
"docstring"
] | train | https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/presets/preset.py#L76-L103 |
pschmitt/pynuki | pynuki/pynuki.py | NukiLock.update | def update(self, aggressive=False):
"""
Update the state of the NukiLock
:param aggressive: Whether to aggressively poll the Bridge. If set to
True, this will actively query the Lock, thus using more battery.
:type aggressive: bool
"""
if aggressive:
data = self._bridge.lock_state(self.nuki_id)
if not data['success']:
logger.warning(
'Failed to update the state of lock {}'.format(self.nuki_id)
)
self._json.update({k: v for k, v in data.items() if k != 'success'})
else:
data = [l for l in self._bridge.locks if l.nuki_id == self.nuki_id]
assert data, (
'Failed to update data for lock. '
'Nuki ID {} volatized.'.format(self.nuki_id))
self._json.update(data[0]._json) | python | def update(self, aggressive=False):
"""
Update the state of the NukiLock
:param aggressive: Whether to aggressively poll the Bridge. If set to
True, this will actively query the Lock, thus using more battery.
:type aggressive: bool
"""
if aggressive:
data = self._bridge.lock_state(self.nuki_id)
if not data['success']:
logger.warning(
'Failed to update the state of lock {}'.format(self.nuki_id)
)
self._json.update({k: v for k, v in data.items() if k != 'success'})
else:
data = [l for l in self._bridge.locks if l.nuki_id == self.nuki_id]
assert data, (
'Failed to update data for lock. '
'Nuki ID {} volatized.'.format(self.nuki_id))
self._json.update(data[0]._json) | [
"def",
"update",
"(",
"self",
",",
"aggressive",
"=",
"False",
")",
":",
"if",
"aggressive",
":",
"data",
"=",
"self",
".",
"_bridge",
".",
"lock_state",
"(",
"self",
".",
"nuki_id",
")",
"if",
"not",
"data",
"[",
"'success'",
"]",
":",
"logger",
".",
"warning",
"(",
"'Failed to update the state of lock {}'",
".",
"format",
"(",
"self",
".",
"nuki_id",
")",
")",
"self",
".",
"_json",
".",
"update",
"(",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"data",
".",
"items",
"(",
")",
"if",
"k",
"!=",
"'success'",
"}",
")",
"else",
":",
"data",
"=",
"[",
"l",
"for",
"l",
"in",
"self",
".",
"_bridge",
".",
"locks",
"if",
"l",
".",
"nuki_id",
"==",
"self",
".",
"nuki_id",
"]",
"assert",
"data",
",",
"(",
"'Failed to update data for lock. '",
"'Nuki ID {} volatized.'",
".",
"format",
"(",
"self",
".",
"nuki_id",
")",
")",
"self",
".",
"_json",
".",
"update",
"(",
"data",
"[",
"0",
"]",
".",
"_json",
")"
] | Update the state of the NukiLock
:param aggressive: Whether to aggressively poll the Bridge. If set to
True, this will actively query the Lock, thus using more battery.
:type aggressive: bool | [
"Update",
"the",
"state",
"of",
"the",
"NukiLock",
":",
"param",
"aggressive",
":",
"Whether",
"to",
"aggressively",
"poll",
"the",
"Bridge",
".",
"If",
"set",
"to",
"True",
"this",
"will",
"actively",
"query",
"the",
"Lock",
"thus",
"using",
"more",
"battery",
".",
":",
"type",
"aggressive",
":",
"bool"
] | train | https://github.com/pschmitt/pynuki/blob/4258dec17bd333e1d475b2cb62438bd67dd017a7/pynuki/pynuki.py#L84-L103 |
CentOS/python-cicoclient | cicoclient/utils.py | get_dict_properties | def get_dict_properties(item, fields, mixed_case_fields=[], formatters={}):
"""Return a tuple containing the item properties.
:param item: a single dict resource
:param fields: tuple of strings with the desired field names
:param mixed_case_fields: tuple of field names to preserve case
:param formatters: dictionary mapping field names to callables
to format the values
"""
row = []
for field in fields:
if field in mixed_case_fields:
field_name = field.replace(' ', '_')
else:
field_name = field.lower().replace(' ', '_')
data = item[field_name] if field_name in item else ''
if field in formatters:
row.append(formatters[field](data))
else:
row.append(data)
return tuple(row) | python | def get_dict_properties(item, fields, mixed_case_fields=[], formatters={}):
"""Return a tuple containing the item properties.
:param item: a single dict resource
:param fields: tuple of strings with the desired field names
:param mixed_case_fields: tuple of field names to preserve case
:param formatters: dictionary mapping field names to callables
to format the values
"""
row = []
for field in fields:
if field in mixed_case_fields:
field_name = field.replace(' ', '_')
else:
field_name = field.lower().replace(' ', '_')
data = item[field_name] if field_name in item else ''
if field in formatters:
row.append(formatters[field](data))
else:
row.append(data)
return tuple(row) | [
"def",
"get_dict_properties",
"(",
"item",
",",
"fields",
",",
"mixed_case_fields",
"=",
"[",
"]",
",",
"formatters",
"=",
"{",
"}",
")",
":",
"row",
"=",
"[",
"]",
"for",
"field",
"in",
"fields",
":",
"if",
"field",
"in",
"mixed_case_fields",
":",
"field_name",
"=",
"field",
".",
"replace",
"(",
"' '",
",",
"'_'",
")",
"else",
":",
"field_name",
"=",
"field",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"' '",
",",
"'_'",
")",
"data",
"=",
"item",
"[",
"field_name",
"]",
"if",
"field_name",
"in",
"item",
"else",
"''",
"if",
"field",
"in",
"formatters",
":",
"row",
".",
"append",
"(",
"formatters",
"[",
"field",
"]",
"(",
"data",
")",
")",
"else",
":",
"row",
".",
"append",
"(",
"data",
")",
"return",
"tuple",
"(",
"row",
")"
] | Return a tuple containing the item properties.
:param item: a single dict resource
:param fields: tuple of strings with the desired field names
:param mixed_case_fields: tuple of field names to preserve case
:param formatters: dictionary mapping field names to callables
to format the values | [
"Return",
"a",
"tuple",
"containing",
"the",
"item",
"properties",
".",
":",
"param",
"item",
":",
"a",
"single",
"dict",
"resource",
":",
"param",
"fields",
":",
"tuple",
"of",
"strings",
"with",
"the",
"desired",
"field",
"names",
":",
"param",
"mixed_case_fields",
":",
"tuple",
"of",
"field",
"names",
"to",
"preserve",
"case",
":",
"param",
"formatters",
":",
"dictionary",
"mapping",
"field",
"names",
"to",
"callables",
"to",
"format",
"the",
"values"
] | train | https://github.com/CentOS/python-cicoclient/blob/ffee34f446ceb25348b13a500d5c545df202c182/cicoclient/utils.py#L21-L41 |
CentOS/python-cicoclient | cicoclient/utils.py | log_method | def log_method(log, level=logging.DEBUG):
"""Logs a method and its arguments when entered."""
def decorator(func):
func_name = func.__name__
@six.wraps(func)
def wrapper(self, *args, **kwargs):
if log.isEnabledFor(level):
pretty_args = []
if args:
pretty_args.extend(str(a) for a in args)
if kwargs:
pretty_args.extend(
"%s=%s" % (k, v) for k, v in six.iteritems(kwargs))
log.log(level, "%s(%s)", func_name, ", ".join(pretty_args))
return func(self, *args, **kwargs)
return wrapper
return decorator | python | def log_method(log, level=logging.DEBUG):
"""Logs a method and its arguments when entered."""
def decorator(func):
func_name = func.__name__
@six.wraps(func)
def wrapper(self, *args, **kwargs):
if log.isEnabledFor(level):
pretty_args = []
if args:
pretty_args.extend(str(a) for a in args)
if kwargs:
pretty_args.extend(
"%s=%s" % (k, v) for k, v in six.iteritems(kwargs))
log.log(level, "%s(%s)", func_name, ", ".join(pretty_args))
return func(self, *args, **kwargs)
return wrapper
return decorator | [
"def",
"log_method",
"(",
"log",
",",
"level",
"=",
"logging",
".",
"DEBUG",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"func_name",
"=",
"func",
".",
"__name__",
"@",
"six",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"log",
".",
"isEnabledFor",
"(",
"level",
")",
":",
"pretty_args",
"=",
"[",
"]",
"if",
"args",
":",
"pretty_args",
".",
"extend",
"(",
"str",
"(",
"a",
")",
"for",
"a",
"in",
"args",
")",
"if",
"kwargs",
":",
"pretty_args",
".",
"extend",
"(",
"\"%s=%s\"",
"%",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"six",
".",
"iteritems",
"(",
"kwargs",
")",
")",
"log",
".",
"log",
"(",
"level",
",",
"\"%s(%s)\"",
",",
"func_name",
",",
"\", \"",
".",
"join",
"(",
"pretty_args",
")",
")",
"return",
"func",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper",
"return",
"decorator"
] | Logs a method and its arguments when entered. | [
"Logs",
"a",
"method",
"and",
"its",
"arguments",
"when",
"entered",
"."
] | train | https://github.com/CentOS/python-cicoclient/blob/ffee34f446ceb25348b13a500d5c545df202c182/cicoclient/utils.py#L44-L64 |
launchdarkly/relayCommander | relay_commander/replay_builder.py | check_local | def check_local() -> None:
"""
Verify required directories exist.
This functions checks the current working directory to ensure that
the required directories exist. If they do not exist, it will create them.
"""
to_check = ['./replay', './replay/toDo', './replay/archive']
for i in to_check:
if not os.path.exists(i):
os.makedirs(i) | python | def check_local() -> None:
"""
Verify required directories exist.
This functions checks the current working directory to ensure that
the required directories exist. If they do not exist, it will create them.
"""
to_check = ['./replay', './replay/toDo', './replay/archive']
for i in to_check:
if not os.path.exists(i):
os.makedirs(i) | [
"def",
"check_local",
"(",
")",
"->",
"None",
":",
"to_check",
"=",
"[",
"'./replay'",
",",
"'./replay/toDo'",
",",
"'./replay/archive'",
"]",
"for",
"i",
"in",
"to_check",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"i",
")",
":",
"os",
".",
"makedirs",
"(",
"i",
")"
] | Verify required directories exist.
This functions checks the current working directory to ensure that
the required directories exist. If they do not exist, it will create them. | [
"Verify",
"required",
"directories",
"exist",
"."
] | train | https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/replay_builder.py#L33-L44 |
launchdarkly/relayCommander | relay_commander/replay_builder.py | create_file | def create_file(project: str, environment: str, feature: str, state: str) -> None:
"""
Create file to replay.
Create file with ``rc`` command that will be called against the
LaunchDarkly API when ``rc playback`` is called from the main CLI.
:param project: LaunchDarkly Project
:param environment: LaunchDarkly Environment
:param feature: LaunchDarkly Feature
:param state: State to update feature flag
"""
check_local()
save_path = './replay/toDo/'
filename = '{0}.txt'.format(str(uuid.uuid1()))
complete_name = os.path.join(save_path, filename)
with open(complete_name, 'w') as filename:
filename.write('rc update-ld-api -p {0} -e {1} -f {2} -s {3}'.format(
project,
environment,
feature,
state
)) | python | def create_file(project: str, environment: str, feature: str, state: str) -> None:
"""
Create file to replay.
Create file with ``rc`` command that will be called against the
LaunchDarkly API when ``rc playback`` is called from the main CLI.
:param project: LaunchDarkly Project
:param environment: LaunchDarkly Environment
:param feature: LaunchDarkly Feature
:param state: State to update feature flag
"""
check_local()
save_path = './replay/toDo/'
filename = '{0}.txt'.format(str(uuid.uuid1()))
complete_name = os.path.join(save_path, filename)
with open(complete_name, 'w') as filename:
filename.write('rc update-ld-api -p {0} -e {1} -f {2} -s {3}'.format(
project,
environment,
feature,
state
)) | [
"def",
"create_file",
"(",
"project",
":",
"str",
",",
"environment",
":",
"str",
",",
"feature",
":",
"str",
",",
"state",
":",
"str",
")",
"->",
"None",
":",
"check_local",
"(",
")",
"save_path",
"=",
"'./replay/toDo/'",
"filename",
"=",
"'{0}.txt'",
".",
"format",
"(",
"str",
"(",
"uuid",
".",
"uuid1",
"(",
")",
")",
")",
"complete_name",
"=",
"os",
".",
"path",
".",
"join",
"(",
"save_path",
",",
"filename",
")",
"with",
"open",
"(",
"complete_name",
",",
"'w'",
")",
"as",
"filename",
":",
"filename",
".",
"write",
"(",
"'rc update-ld-api -p {0} -e {1} -f {2} -s {3}'",
".",
"format",
"(",
"project",
",",
"environment",
",",
"feature",
",",
"state",
")",
")"
] | Create file to replay.
Create file with ``rc`` command that will be called against the
LaunchDarkly API when ``rc playback`` is called from the main CLI.
:param project: LaunchDarkly Project
:param environment: LaunchDarkly Environment
:param feature: LaunchDarkly Feature
:param state: State to update feature flag | [
"Create",
"file",
"to",
"replay",
"."
] | train | https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/replay_builder.py#L47-L70 |
launchdarkly/relayCommander | relay_commander/replay_builder.py | execute_replay | def execute_replay() -> None:
"""
Execute all commands.
For every command that is found in replay/toDo, execute each of them
and move the file to the replay/archive directory.
"""
files = glob.glob('./replay/toDo/*')
sorted_files = sorted(files, key=os.path.getctime)
if not sorted_files: # list is not empty
LOG.debug('Found %s, beginning execution.', sorted_files)
for command_file in sorted_files:
with open(command_file, 'r') as command:
cmd = command.read()
LOG.debug('executing command: %s', cmd)
resp = run([cmd, '-v', 'DEBUG'], shell=True, check=True)
LOG.debug(resp)
LOG.debug('moving %s to archive', command.name)
move_command = 'mv {0} ./replay/archive/'.format(command.name)
run(move_command, shell=True, check=True)
LOG.info('LaunchDarkly is now up to date.')
else:
LOG.warning('No files found, nothing to replay.') | python | def execute_replay() -> None:
"""
Execute all commands.
For every command that is found in replay/toDo, execute each of them
and move the file to the replay/archive directory.
"""
files = glob.glob('./replay/toDo/*')
sorted_files = sorted(files, key=os.path.getctime)
if not sorted_files: # list is not empty
LOG.debug('Found %s, beginning execution.', sorted_files)
for command_file in sorted_files:
with open(command_file, 'r') as command:
cmd = command.read()
LOG.debug('executing command: %s', cmd)
resp = run([cmd, '-v', 'DEBUG'], shell=True, check=True)
LOG.debug(resp)
LOG.debug('moving %s to archive', command.name)
move_command = 'mv {0} ./replay/archive/'.format(command.name)
run(move_command, shell=True, check=True)
LOG.info('LaunchDarkly is now up to date.')
else:
LOG.warning('No files found, nothing to replay.') | [
"def",
"execute_replay",
"(",
")",
"->",
"None",
":",
"files",
"=",
"glob",
".",
"glob",
"(",
"'./replay/toDo/*'",
")",
"sorted_files",
"=",
"sorted",
"(",
"files",
",",
"key",
"=",
"os",
".",
"path",
".",
"getctime",
")",
"if",
"not",
"sorted_files",
":",
"# list is not empty",
"LOG",
".",
"debug",
"(",
"'Found %s, beginning execution.'",
",",
"sorted_files",
")",
"for",
"command_file",
"in",
"sorted_files",
":",
"with",
"open",
"(",
"command_file",
",",
"'r'",
")",
"as",
"command",
":",
"cmd",
"=",
"command",
".",
"read",
"(",
")",
"LOG",
".",
"debug",
"(",
"'executing command: %s'",
",",
"cmd",
")",
"resp",
"=",
"run",
"(",
"[",
"cmd",
",",
"'-v'",
",",
"'DEBUG'",
"]",
",",
"shell",
"=",
"True",
",",
"check",
"=",
"True",
")",
"LOG",
".",
"debug",
"(",
"resp",
")",
"LOG",
".",
"debug",
"(",
"'moving %s to archive'",
",",
"command",
".",
"name",
")",
"move_command",
"=",
"'mv {0} ./replay/archive/'",
".",
"format",
"(",
"command",
".",
"name",
")",
"run",
"(",
"move_command",
",",
"shell",
"=",
"True",
",",
"check",
"=",
"True",
")",
"LOG",
".",
"info",
"(",
"'LaunchDarkly is now up to date.'",
")",
"else",
":",
"LOG",
".",
"warning",
"(",
"'No files found, nothing to replay.'",
")"
] | Execute all commands.
For every command that is found in replay/toDo, execute each of them
and move the file to the replay/archive directory. | [
"Execute",
"all",
"commands",
"."
] | train | https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/replay_builder.py#L73-L96 |
vsoch/helpme | helpme/utils/terminal.py | choice_prompt | def choice_prompt(prompt, choices=None, choice=None):
'''Ask the user for a prompt, and only return when one of the requested
options is provided.
Parameters
==========
prompt: the prompt to ask the user
choices: a list of choices that are valid, defaults to [Y/N/y/n]
'''
if not choices:
choices = ["y", "n", "Y", "N"]
print(prompt)
get_input = getattr(__builtins__, 'raw_input', input)
pretty_choices = '/'.join(choices)
message = 'Please enter your choice [%s] : ' %(pretty_choices)
while choice not in choices:
choice = get_input(message).strip()
# If the option isn't valid, this is shown next
message = "Please enter a valid option in [%s]" %(pretty_choices)
return choice | python | def choice_prompt(prompt, choices=None, choice=None):
'''Ask the user for a prompt, and only return when one of the requested
options is provided.
Parameters
==========
prompt: the prompt to ask the user
choices: a list of choices that are valid, defaults to [Y/N/y/n]
'''
if not choices:
choices = ["y", "n", "Y", "N"]
print(prompt)
get_input = getattr(__builtins__, 'raw_input', input)
pretty_choices = '/'.join(choices)
message = 'Please enter your choice [%s] : ' %(pretty_choices)
while choice not in choices:
choice = get_input(message).strip()
# If the option isn't valid, this is shown next
message = "Please enter a valid option in [%s]" %(pretty_choices)
return choice | [
"def",
"choice_prompt",
"(",
"prompt",
",",
"choices",
"=",
"None",
",",
"choice",
"=",
"None",
")",
":",
"if",
"not",
"choices",
":",
"choices",
"=",
"[",
"\"y\"",
",",
"\"n\"",
",",
"\"Y\"",
",",
"\"N\"",
"]",
"print",
"(",
"prompt",
")",
"get_input",
"=",
"getattr",
"(",
"__builtins__",
",",
"'raw_input'",
",",
"input",
")",
"pretty_choices",
"=",
"'/'",
".",
"join",
"(",
"choices",
")",
"message",
"=",
"'Please enter your choice [%s] : '",
"%",
"(",
"pretty_choices",
")",
"while",
"choice",
"not",
"in",
"choices",
":",
"choice",
"=",
"get_input",
"(",
"message",
")",
".",
"strip",
"(",
")",
"# If the option isn't valid, this is shown next",
"message",
"=",
"\"Please enter a valid option in [%s]\"",
"%",
"(",
"pretty_choices",
")",
"return",
"choice"
] | Ask the user for a prompt, and only return when one of the requested
options is provided.
Parameters
==========
prompt: the prompt to ask the user
choices: a list of choices that are valid, defaults to [Y/N/y/n] | [
"Ask",
"the",
"user",
"for",
"a",
"prompt",
"and",
"only",
"return",
"when",
"one",
"of",
"the",
"requested",
"options",
"is",
"provided",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/terminal.py#L46-L68 |
vsoch/helpme | helpme/utils/terminal.py | regexp_prompt | def regexp_prompt(prompt, regexp='.', answer=''):
'''Ask the user for a text entry that matches a regular expression
Parameters
==========
prompt: the prompt to ask the user
regexp: the regular expression to match. defaults to anything.
'''
get_input = getattr(__builtins__, 'raw_input', input)
while not re.search(regexp, answer):
answer = get_input(prompt + ': ').strip()
# If the option isn't valid, this is shown next
message = "Your entry must match the regular expression %s" %regexp
return answer | python | def regexp_prompt(prompt, regexp='.', answer=''):
'''Ask the user for a text entry that matches a regular expression
Parameters
==========
prompt: the prompt to ask the user
regexp: the regular expression to match. defaults to anything.
'''
get_input = getattr(__builtins__, 'raw_input', input)
while not re.search(regexp, answer):
answer = get_input(prompt + ': ').strip()
# If the option isn't valid, this is shown next
message = "Your entry must match the regular expression %s" %regexp
return answer | [
"def",
"regexp_prompt",
"(",
"prompt",
",",
"regexp",
"=",
"'.'",
",",
"answer",
"=",
"''",
")",
":",
"get_input",
"=",
"getattr",
"(",
"__builtins__",
",",
"'raw_input'",
",",
"input",
")",
"while",
"not",
"re",
".",
"search",
"(",
"regexp",
",",
"answer",
")",
":",
"answer",
"=",
"get_input",
"(",
"prompt",
"+",
"': '",
")",
".",
"strip",
"(",
")",
"# If the option isn't valid, this is shown next",
"message",
"=",
"\"Your entry must match the regular expression %s\"",
"%",
"regexp",
"return",
"answer"
] | Ask the user for a text entry that matches a regular expression
Parameters
==========
prompt: the prompt to ask the user
regexp: the regular expression to match. defaults to anything. | [
"Ask",
"the",
"user",
"for",
"a",
"text",
"entry",
"that",
"matches",
"a",
"regular",
"expression"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/terminal.py#L71-L86 |
vsoch/helpme | helpme/utils/terminal.py | which | def which(software, strip_newline=True):
'''get_install will return the path to where an executable is installed.
'''
if software is None:
software = "singularity"
cmd = ['which', software ]
try:
result = run_command(cmd)
if strip_newline is True:
result['message'] = result['message'].strip('\n')
return result
except: # FileNotFoundError
return None | python | def which(software, strip_newline=True):
'''get_install will return the path to where an executable is installed.
'''
if software is None:
software = "singularity"
cmd = ['which', software ]
try:
result = run_command(cmd)
if strip_newline is True:
result['message'] = result['message'].strip('\n')
return result
except: # FileNotFoundError
return None | [
"def",
"which",
"(",
"software",
",",
"strip_newline",
"=",
"True",
")",
":",
"if",
"software",
"is",
"None",
":",
"software",
"=",
"\"singularity\"",
"cmd",
"=",
"[",
"'which'",
",",
"software",
"]",
"try",
":",
"result",
"=",
"run_command",
"(",
"cmd",
")",
"if",
"strip_newline",
"is",
"True",
":",
"result",
"[",
"'message'",
"]",
"=",
"result",
"[",
"'message'",
"]",
".",
"strip",
"(",
"'\\n'",
")",
"return",
"result",
"except",
":",
"# FileNotFoundError",
"return",
"None"
] | get_install will return the path to where an executable is installed. | [
"get_install",
"will",
"return",
"the",
"path",
"to",
"where",
"an",
"executable",
"is",
"installed",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/terminal.py#L91-L104 |
PlaidWeb/Pushl | pushl/utils.py | guess_encoding | def guess_encoding(request):
""" Try to guess the encoding of a request without going through the slow chardet process"""
ctype = request.headers.get('content-type')
if not ctype:
# we don't have a content-type, somehow, so...
LOGGER.warning("%s: no content-type; headers are %s",
request.url, request.headers)
return 'utf-8'
# explicit declaration
match = re.search(r'charset=([^ ;]*)(;| |$)', ctype)
if match:
return match[1]
# html default
if ctype.startswith('text/html'):
return 'iso-8859-1'
# everything else's default
return 'utf-8' | python | def guess_encoding(request):
""" Try to guess the encoding of a request without going through the slow chardet process"""
ctype = request.headers.get('content-type')
if not ctype:
# we don't have a content-type, somehow, so...
LOGGER.warning("%s: no content-type; headers are %s",
request.url, request.headers)
return 'utf-8'
# explicit declaration
match = re.search(r'charset=([^ ;]*)(;| |$)', ctype)
if match:
return match[1]
# html default
if ctype.startswith('text/html'):
return 'iso-8859-1'
# everything else's default
return 'utf-8' | [
"def",
"guess_encoding",
"(",
"request",
")",
":",
"ctype",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"'content-type'",
")",
"if",
"not",
"ctype",
":",
"# we don't have a content-type, somehow, so...",
"LOGGER",
".",
"warning",
"(",
"\"%s: no content-type; headers are %s\"",
",",
"request",
".",
"url",
",",
"request",
".",
"headers",
")",
"return",
"'utf-8'",
"# explicit declaration",
"match",
"=",
"re",
".",
"search",
"(",
"r'charset=([^ ;]*)(;| |$)'",
",",
"ctype",
")",
"if",
"match",
":",
"return",
"match",
"[",
"1",
"]",
"# html default",
"if",
"ctype",
".",
"startswith",
"(",
"'text/html'",
")",
":",
"return",
"'iso-8859-1'",
"# everything else's default",
"return",
"'utf-8'"
] | Try to guess the encoding of a request without going through the slow chardet process | [
"Try",
"to",
"guess",
"the",
"encoding",
"of",
"a",
"request",
"without",
"going",
"through",
"the",
"slow",
"chardet",
"process"
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/utils.py#L15-L34 |
PlaidWeb/Pushl | pushl/utils.py | _make_headers | def _make_headers(config, kwargs):
""" Replace the kwargs with one where the headers include our user-agent """
headers = kwargs.get('headers')
headers = headers.copy() if headers is not None else {}
headers['User-Agent'] = config.args.user_agent
kwargs = kwargs.copy()
kwargs['headers'] = headers
return kwargs | python | def _make_headers(config, kwargs):
""" Replace the kwargs with one where the headers include our user-agent """
headers = kwargs.get('headers')
headers = headers.copy() if headers is not None else {}
headers['User-Agent'] = config.args.user_agent
kwargs = kwargs.copy()
kwargs['headers'] = headers
return kwargs | [
"def",
"_make_headers",
"(",
"config",
",",
"kwargs",
")",
":",
"headers",
"=",
"kwargs",
".",
"get",
"(",
"'headers'",
")",
"headers",
"=",
"headers",
".",
"copy",
"(",
")",
"if",
"headers",
"is",
"not",
"None",
"else",
"{",
"}",
"headers",
"[",
"'User-Agent'",
"]",
"=",
"config",
".",
"args",
".",
"user_agent",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"kwargs",
"[",
"'headers'",
"]",
"=",
"headers",
"return",
"kwargs"
] | Replace the kwargs with one where the headers include our user-agent | [
"Replace",
"the",
"kwargs",
"with",
"one",
"where",
"the",
"headers",
"include",
"our",
"user",
"-",
"agent"
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/utils.py#L97-L106 |
PlaidWeb/Pushl | pushl/utils.py | retry_get | async def retry_get(config, url, *args, **kwargs):
""" aiohttp wrapper for GET """
return await _retry_do(config.session.get, url, *args,
**_make_headers(config, kwargs)) | python | async def retry_get(config, url, *args, **kwargs):
""" aiohttp wrapper for GET """
return await _retry_do(config.session.get, url, *args,
**_make_headers(config, kwargs)) | [
"async",
"def",
"retry_get",
"(",
"config",
",",
"url",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"await",
"_retry_do",
"(",
"config",
".",
"session",
".",
"get",
",",
"url",
",",
"*",
"args",
",",
"*",
"*",
"_make_headers",
"(",
"config",
",",
"kwargs",
")",
")"
] | aiohttp wrapper for GET | [
"aiohttp",
"wrapper",
"for",
"GET"
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/utils.py#L109-L112 |
PlaidWeb/Pushl | pushl/utils.py | retry_post | async def retry_post(config, url, *args, **kwargs):
""" aiohttp wrapper for POST """
return await _retry_do(config.session.post, url, *args,
**_make_headers(config, kwargs)) | python | async def retry_post(config, url, *args, **kwargs):
""" aiohttp wrapper for POST """
return await _retry_do(config.session.post, url, *args,
**_make_headers(config, kwargs)) | [
"async",
"def",
"retry_post",
"(",
"config",
",",
"url",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"await",
"_retry_do",
"(",
"config",
".",
"session",
".",
"post",
",",
"url",
",",
"*",
"args",
",",
"*",
"*",
"_make_headers",
"(",
"config",
",",
"kwargs",
")",
")"
] | aiohttp wrapper for POST | [
"aiohttp",
"wrapper",
"for",
"POST"
] | train | https://github.com/PlaidWeb/Pushl/blob/5ea92275c37a6c1989e3d5f53e26c6e0ebfb9a8c/pushl/utils.py#L115-L118 |
vsoch/helpme | helpme/main/base/__init__.py | HelperBase.run | def run(self, positionals=None):
'''run the entire helper procedure, including:
- start: initialize the helper, collection preferences
- record: record any relevant features for the environment / session
- interact: interact with the user for additional informatoin
- submit: submit the completed request
Each of the above functions for a helper can determine global
collection preferences from the system helpme.cfg in the module
root. After performing global actions, each function then calls
a class specific function of the same name (e.g., start calls
_start) that is implemented by the helper class to do custom
operations for the helper.
'''
# Step 0: Each run session is given a fun name
self.run_id = RobotNamer().generate()
# Step 1: get config steps
steps = self.config._sections[self.name]
# Step 2: Start the helper (announce and run start, which is init code)
self.start(positionals)
# Step 3: Iterate through flow, check each step for known record/prompt,
# and collect outputs appropriately
for step, content in steps.items():
self.collect(step, content)
# Step 4: When data collected, pass data structures to submit
self.submit() | python | def run(self, positionals=None):
'''run the entire helper procedure, including:
- start: initialize the helper, collection preferences
- record: record any relevant features for the environment / session
- interact: interact with the user for additional informatoin
- submit: submit the completed request
Each of the above functions for a helper can determine global
collection preferences from the system helpme.cfg in the module
root. After performing global actions, each function then calls
a class specific function of the same name (e.g., start calls
_start) that is implemented by the helper class to do custom
operations for the helper.
'''
# Step 0: Each run session is given a fun name
self.run_id = RobotNamer().generate()
# Step 1: get config steps
steps = self.config._sections[self.name]
# Step 2: Start the helper (announce and run start, which is init code)
self.start(positionals)
# Step 3: Iterate through flow, check each step for known record/prompt,
# and collect outputs appropriately
for step, content in steps.items():
self.collect(step, content)
# Step 4: When data collected, pass data structures to submit
self.submit() | [
"def",
"run",
"(",
"self",
",",
"positionals",
"=",
"None",
")",
":",
"# Step 0: Each run session is given a fun name",
"self",
".",
"run_id",
"=",
"RobotNamer",
"(",
")",
".",
"generate",
"(",
")",
"# Step 1: get config steps",
"steps",
"=",
"self",
".",
"config",
".",
"_sections",
"[",
"self",
".",
"name",
"]",
"# Step 2: Start the helper (announce and run start, which is init code)",
"self",
".",
"start",
"(",
"positionals",
")",
"# Step 3: Iterate through flow, check each step for known record/prompt,",
"# and collect outputs appropriately",
"for",
"step",
",",
"content",
"in",
"steps",
".",
"items",
"(",
")",
":",
"self",
".",
"collect",
"(",
"step",
",",
"content",
")",
"# Step 4: When data collected, pass data structures to submit",
"self",
".",
"submit",
"(",
")"
] | run the entire helper procedure, including:
- start: initialize the helper, collection preferences
- record: record any relevant features for the environment / session
- interact: interact with the user for additional informatoin
- submit: submit the completed request
Each of the above functions for a helper can determine global
collection preferences from the system helpme.cfg in the module
root. After performing global actions, each function then calls
a class specific function of the same name (e.g., start calls
_start) that is implemented by the helper class to do custom
operations for the helper. | [
"run",
"the",
"entire",
"helper",
"procedure",
"including",
":"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/__init__.py#L71-L102 |
vsoch/helpme | helpme/main/base/__init__.py | HelperBase.start | def start(self, positionals=None):
'''start the helper flow. We check helper system configurations to
determine components that should be collected for the submission.
This is where the client can also pass on any extra (positional)
arguments in a list from the user.
'''
bot.info('[helpme|%s]' %(self.name))
self.speak()
self._start(positionals) | python | def start(self, positionals=None):
'''start the helper flow. We check helper system configurations to
determine components that should be collected for the submission.
This is where the client can also pass on any extra (positional)
arguments in a list from the user.
'''
bot.info('[helpme|%s]' %(self.name))
self.speak()
self._start(positionals) | [
"def",
"start",
"(",
"self",
",",
"positionals",
"=",
"None",
")",
":",
"bot",
".",
"info",
"(",
"'[helpme|%s]'",
"%",
"(",
"self",
".",
"name",
")",
")",
"self",
".",
"speak",
"(",
")",
"self",
".",
"_start",
"(",
"positionals",
")"
] | start the helper flow. We check helper system configurations to
determine components that should be collected for the submission.
This is where the client can also pass on any extra (positional)
arguments in a list from the user. | [
"start",
"the",
"helper",
"flow",
".",
"We",
"check",
"helper",
"system",
"configurations",
"to",
"determine",
"components",
"that",
"should",
"be",
"collected",
"for",
"the",
"submission",
".",
"This",
"is",
"where",
"the",
"client",
"can",
"also",
"pass",
"on",
"any",
"extra",
"(",
"positional",
")",
"arguments",
"in",
"a",
"list",
"from",
"the",
"user",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/__init__.py#L105-L113 |
vsoch/helpme | helpme/main/base/__init__.py | HelperBase.collect | def collect(self, step, content):
'''given a name of a configuration key and the provided content, collect
the required metadata from the user.
Parameters
==========
step: the key in the configuration. Can be one of:
user_message_<name>
runtime_arg_<name>
record_asciinema
record_environment
user_prompt_<name>
content: the default value or boolean to indicate doing the step.
'''
# Option 1: The step is just a message to print to the user
if step.startswith('user_message'):
print(content)
# Option 2: The step is to collect a user prompt (if not at runtime)
elif step.startswith('user_prompt'):
self.collect_argument(step, content)
# Option 3: The step is to record an asciinema!
elif step == 'record_asciinema':
self.record_asciinema()
# Option 4: Record the user environment
elif step == "record_environment":
self.record_environment()
bot.debug(self.data) | python | def collect(self, step, content):
'''given a name of a configuration key and the provided content, collect
the required metadata from the user.
Parameters
==========
step: the key in the configuration. Can be one of:
user_message_<name>
runtime_arg_<name>
record_asciinema
record_environment
user_prompt_<name>
content: the default value or boolean to indicate doing the step.
'''
# Option 1: The step is just a message to print to the user
if step.startswith('user_message'):
print(content)
# Option 2: The step is to collect a user prompt (if not at runtime)
elif step.startswith('user_prompt'):
self.collect_argument(step, content)
# Option 3: The step is to record an asciinema!
elif step == 'record_asciinema':
self.record_asciinema()
# Option 4: Record the user environment
elif step == "record_environment":
self.record_environment()
bot.debug(self.data) | [
"def",
"collect",
"(",
"self",
",",
"step",
",",
"content",
")",
":",
"# Option 1: The step is just a message to print to the user",
"if",
"step",
".",
"startswith",
"(",
"'user_message'",
")",
":",
"print",
"(",
"content",
")",
"# Option 2: The step is to collect a user prompt (if not at runtime)",
"elif",
"step",
".",
"startswith",
"(",
"'user_prompt'",
")",
":",
"self",
".",
"collect_argument",
"(",
"step",
",",
"content",
")",
"# Option 3: The step is to record an asciinema!",
"elif",
"step",
"==",
"'record_asciinema'",
":",
"self",
".",
"record_asciinema",
"(",
")",
"# Option 4: Record the user environment",
"elif",
"step",
"==",
"\"record_environment\"",
":",
"self",
".",
"record_environment",
"(",
")",
"bot",
".",
"debug",
"(",
"self",
".",
"data",
")"
] | given a name of a configuration key and the provided content, collect
the required metadata from the user.
Parameters
==========
step: the key in the configuration. Can be one of:
user_message_<name>
runtime_arg_<name>
record_asciinema
record_environment
user_prompt_<name>
content: the default value or boolean to indicate doing the step. | [
"given",
"a",
"name",
"of",
"a",
"configuration",
"key",
"and",
"the",
"provided",
"content",
"collect",
"the",
"required",
"metadata",
"from",
"the",
"user",
".",
"Parameters",
"==========",
"step",
":",
"the",
"key",
"in",
"the",
"configuration",
".",
"Can",
"be",
"one",
"of",
":",
"user_message_<name",
">",
"runtime_arg_<name",
">",
"record_asciinema",
"record_environment",
"user_prompt_<name",
">",
"content",
":",
"the",
"default",
"value",
"or",
"boolean",
"to",
"indicate",
"doing",
"the",
"step",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/__init__.py#L138-L169 |
vsoch/helpme | helpme/main/base/__init__.py | HelperBase.collect_argument | def collect_argument(self, step, message):
'''given a key in the configuration, collect the runtime argument if
provided. Otherwise, prompt the user for the value.
Parameters
==========
step: the name of the step, should be 'runtime_arg_<name>'
message: the content of the step, the message to show the user if the
argument <name> is not found under args.
'''
if step not in self.data:
self.data[step] = regexp_prompt(message) | python | def collect_argument(self, step, message):
'''given a key in the configuration, collect the runtime argument if
provided. Otherwise, prompt the user for the value.
Parameters
==========
step: the name of the step, should be 'runtime_arg_<name>'
message: the content of the step, the message to show the user if the
argument <name> is not found under args.
'''
if step not in self.data:
self.data[step] = regexp_prompt(message) | [
"def",
"collect_argument",
"(",
"self",
",",
"step",
",",
"message",
")",
":",
"if",
"step",
"not",
"in",
"self",
".",
"data",
":",
"self",
".",
"data",
"[",
"step",
"]",
"=",
"regexp_prompt",
"(",
"message",
")"
] | given a key in the configuration, collect the runtime argument if
provided. Otherwise, prompt the user for the value.
Parameters
==========
step: the name of the step, should be 'runtime_arg_<name>'
message: the content of the step, the message to show the user if the
argument <name> is not found under args. | [
"given",
"a",
"key",
"in",
"the",
"configuration",
"collect",
"the",
"runtime",
"argument",
"if",
"provided",
".",
"Otherwise",
"prompt",
"the",
"user",
"for",
"the",
"value",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/__init__.py#L172-L184 |
vsoch/helpme | helpme/main/base/__init__.py | HelperBase.record_environment | def record_environment(self):
'''collect a limited set of environment variables based on the list
under record_envirionment in the configuration file.
'''
# whitelist is a newline separated list under record_environment
envars = self._get_setting(name='whitelist',
section='record_environment',
user=False)
if envars is not None:
# User uppercase
envars = [x.upper() for x in envars.split('\n')]
# Make transparent for the user
bot.custom(prefix="Environment ",
message='|'.join(envars),
color="CYAN")
# Iterate through and collect based on name
keep = [(k,v) for k,v in os.environ.items() if k.upper() in envars]
# Ask the user for permission
if confirm_prompt('Is this list ok to share?'):
self.data['record_environment'] = keep | python | def record_environment(self):
'''collect a limited set of environment variables based on the list
under record_envirionment in the configuration file.
'''
# whitelist is a newline separated list under record_environment
envars = self._get_setting(name='whitelist',
section='record_environment',
user=False)
if envars is not None:
# User uppercase
envars = [x.upper() for x in envars.split('\n')]
# Make transparent for the user
bot.custom(prefix="Environment ",
message='|'.join(envars),
color="CYAN")
# Iterate through and collect based on name
keep = [(k,v) for k,v in os.environ.items() if k.upper() in envars]
# Ask the user for permission
if confirm_prompt('Is this list ok to share?'):
self.data['record_environment'] = keep | [
"def",
"record_environment",
"(",
"self",
")",
":",
"# whitelist is a newline separated list under record_environment",
"envars",
"=",
"self",
".",
"_get_setting",
"(",
"name",
"=",
"'whitelist'",
",",
"section",
"=",
"'record_environment'",
",",
"user",
"=",
"False",
")",
"if",
"envars",
"is",
"not",
"None",
":",
"# User uppercase",
"envars",
"=",
"[",
"x",
".",
"upper",
"(",
")",
"for",
"x",
"in",
"envars",
".",
"split",
"(",
"'\\n'",
")",
"]",
"# Make transparent for the user",
"bot",
".",
"custom",
"(",
"prefix",
"=",
"\"Environment \"",
",",
"message",
"=",
"'|'",
".",
"join",
"(",
"envars",
")",
",",
"color",
"=",
"\"CYAN\"",
")",
"# Iterate through and collect based on name",
"keep",
"=",
"[",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"os",
".",
"environ",
".",
"items",
"(",
")",
"if",
"k",
".",
"upper",
"(",
")",
"in",
"envars",
"]",
"# Ask the user for permission",
"if",
"confirm_prompt",
"(",
"'Is this list ok to share?'",
")",
":",
"self",
".",
"data",
"[",
"'record_environment'",
"]",
"=",
"keep"
] | collect a limited set of environment variables based on the list
under record_envirionment in the configuration file. | [
"collect",
"a",
"limited",
"set",
"of",
"environment",
"variables",
"based",
"on",
"the",
"list",
"under",
"record_envirionment",
"in",
"the",
"configuration",
"file",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/__init__.py#L189-L219 |
vsoch/helpme | helpme/main/base/__init__.py | HelperBase.record_asciinema | def record_asciinema(self):
'''record an asciinema from the user session, saving the file to
a temporary file and showing the user so if he/she needs to do it
again, the file can be provided. The flow of events below makes
the following checks:
1. The user confirms it is ok to record
2. The record_asciinema setting is present and True in the config
3. An asciinema file path has not been provided by the user
'''
# If the user already provided a file, we don't need to ask again
if 'record_asciinema' not in self.data:
if confirm_prompt("Would you like to send a terminal recording?"):
try:
record = self.config.getboolean(self.name, 'record_asciinema')
filename = record_asciinema()
self.data['record_asciinema'] = filename
message = '''If you need to run helpme again you can give
the path to this file with --asciinema %s''' % filename
bot.custom(prefix="Asciinema ", message=message, color="CYAN")
except NoOptionError:
bot.warning('Cannot record asciinema, skipping.') | python | def record_asciinema(self):
'''record an asciinema from the user session, saving the file to
a temporary file and showing the user so if he/she needs to do it
again, the file can be provided. The flow of events below makes
the following checks:
1. The user confirms it is ok to record
2. The record_asciinema setting is present and True in the config
3. An asciinema file path has not been provided by the user
'''
# If the user already provided a file, we don't need to ask again
if 'record_asciinema' not in self.data:
if confirm_prompt("Would you like to send a terminal recording?"):
try:
record = self.config.getboolean(self.name, 'record_asciinema')
filename = record_asciinema()
self.data['record_asciinema'] = filename
message = '''If you need to run helpme again you can give
the path to this file with --asciinema %s''' % filename
bot.custom(prefix="Asciinema ", message=message, color="CYAN")
except NoOptionError:
bot.warning('Cannot record asciinema, skipping.') | [
"def",
"record_asciinema",
"(",
"self",
")",
":",
"# If the user already provided a file, we don't need to ask again",
"if",
"'record_asciinema'",
"not",
"in",
"self",
".",
"data",
":",
"if",
"confirm_prompt",
"(",
"\"Would you like to send a terminal recording?\"",
")",
":",
"try",
":",
"record",
"=",
"self",
".",
"config",
".",
"getboolean",
"(",
"self",
".",
"name",
",",
"'record_asciinema'",
")",
"filename",
"=",
"record_asciinema",
"(",
")",
"self",
".",
"data",
"[",
"'record_asciinema'",
"]",
"=",
"filename",
"message",
"=",
"'''If you need to run helpme again you can give\n the path to this file with --asciinema %s'''",
"%",
"filename",
"bot",
".",
"custom",
"(",
"prefix",
"=",
"\"Asciinema \"",
",",
"message",
"=",
"message",
",",
"color",
"=",
"\"CYAN\"",
")",
"except",
"NoOptionError",
":",
"bot",
".",
"warning",
"(",
"'Cannot record asciinema, skipping.'",
")"
] | record an asciinema from the user session, saving the file to
a temporary file and showing the user so if he/she needs to do it
again, the file can be provided. The flow of events below makes
the following checks:
1. The user confirms it is ok to record
2. The record_asciinema setting is present and True in the config
3. An asciinema file path has not been provided by the user | [
"record",
"an",
"asciinema",
"from",
"the",
"user",
"session",
"saving",
"the",
"file",
"to",
"a",
"temporary",
"file",
"and",
"showing",
"the",
"user",
"so",
"if",
"he",
"/",
"she",
"needs",
"to",
"do",
"it",
"again",
"the",
"file",
"can",
"be",
"provided",
".",
"The",
"flow",
"of",
"events",
"below",
"makes",
"the",
"following",
"checks",
":"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/__init__.py#L222-L252 |
vsoch/helpme | helpme/main/base/__init__.py | HelperBase.speak | def speak(self):
'''
a function for the helper to announce him or herself, depending
on the level specified. If you want your client to have additional
announced things here, then implement the class `_speak` for your
client.
'''
if self.quiet is False:
bot.info('[helper|%s]' %(self.name))
self._speak() | python | def speak(self):
'''
a function for the helper to announce him or herself, depending
on the level specified. If you want your client to have additional
announced things here, then implement the class `_speak` for your
client.
'''
if self.quiet is False:
bot.info('[helper|%s]' %(self.name))
self._speak() | [
"def",
"speak",
"(",
"self",
")",
":",
"if",
"self",
".",
"quiet",
"is",
"False",
":",
"bot",
".",
"info",
"(",
"'[helper|%s]'",
"%",
"(",
"self",
".",
"name",
")",
")",
"self",
".",
"_speak",
"(",
")"
] | a function for the helper to announce him or herself, depending
on the level specified. If you want your client to have additional
announced things here, then implement the class `_speak` for your
client. | [
"a",
"function",
"for",
"the",
"helper",
"to",
"announce",
"him",
"or",
"herself",
"depending",
"on",
"the",
"level",
"specified",
".",
"If",
"you",
"want",
"your",
"client",
"to",
"have",
"additional",
"announced",
"things",
"here",
"then",
"implement",
"the",
"class",
"_speak",
"for",
"your",
"client",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/base/__init__.py#L257-L267 |
SetBased/py-stratum | pystratum/Constants.py | Constants.main | def main(self, config_filename, regex):
"""
:param str config_filename: The config filename.
:param str regex: The regular expression for columns which we want to use.
:rtype: int
"""
self._read_configuration_file(config_filename)
if self._constants_filename:
self._io.title('Constants')
self.connect()
self._get_old_columns()
self._get_columns()
self._enhance_columns()
self._merge_columns()
self._write_columns()
self._get_labels(regex)
self._fill_constants()
self.__write_constant_class()
self.disconnect()
self.__log_number_of_constants()
else:
self._io.log_verbose('Constants not enabled')
return 0 | python | def main(self, config_filename, regex):
"""
:param str config_filename: The config filename.
:param str regex: The regular expression for columns which we want to use.
:rtype: int
"""
self._read_configuration_file(config_filename)
if self._constants_filename:
self._io.title('Constants')
self.connect()
self._get_old_columns()
self._get_columns()
self._enhance_columns()
self._merge_columns()
self._write_columns()
self._get_labels(regex)
self._fill_constants()
self.__write_constant_class()
self.disconnect()
self.__log_number_of_constants()
else:
self._io.log_verbose('Constants not enabled')
return 0 | [
"def",
"main",
"(",
"self",
",",
"config_filename",
",",
"regex",
")",
":",
"self",
".",
"_read_configuration_file",
"(",
"config_filename",
")",
"if",
"self",
".",
"_constants_filename",
":",
"self",
".",
"_io",
".",
"title",
"(",
"'Constants'",
")",
"self",
".",
"connect",
"(",
")",
"self",
".",
"_get_old_columns",
"(",
")",
"self",
".",
"_get_columns",
"(",
")",
"self",
".",
"_enhance_columns",
"(",
")",
"self",
".",
"_merge_columns",
"(",
")",
"self",
".",
"_write_columns",
"(",
")",
"self",
".",
"_get_labels",
"(",
"regex",
")",
"self",
".",
"_fill_constants",
"(",
")",
"self",
".",
"__write_constant_class",
"(",
")",
"self",
".",
"disconnect",
"(",
")",
"self",
".",
"__log_number_of_constants",
"(",
")",
"else",
":",
"self",
".",
"_io",
".",
"log_verbose",
"(",
"'Constants not enabled'",
")",
"return",
"0"
] | :param str config_filename: The config filename.
:param str regex: The regular expression for columns which we want to use.
:rtype: int | [
":",
"param",
"str",
"config_filename",
":",
"The",
"config",
"filename",
".",
":",
"param",
"str",
"regex",
":",
"The",
"regular",
"expression",
"for",
"columns",
"which",
"we",
"want",
"to",
"use",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/Constants.py#L90-L116 |
SetBased/py-stratum | pystratum/Constants.py | Constants.__log_number_of_constants | def __log_number_of_constants(self):
"""
Logs the number of constants generated.
"""
n_id = len(self._labels)
n_widths = len(self._constants) - n_id
self._io.writeln('')
self._io.text('Number of constants based on column widths: {0}'.format(n_widths))
self._io.text('Number of constants based on database IDs : {0}'.format(n_id)) | python | def __log_number_of_constants(self):
"""
Logs the number of constants generated.
"""
n_id = len(self._labels)
n_widths = len(self._constants) - n_id
self._io.writeln('')
self._io.text('Number of constants based on column widths: {0}'.format(n_widths))
self._io.text('Number of constants based on database IDs : {0}'.format(n_id)) | [
"def",
"__log_number_of_constants",
"(",
"self",
")",
":",
"n_id",
"=",
"len",
"(",
"self",
".",
"_labels",
")",
"n_widths",
"=",
"len",
"(",
"self",
".",
"_constants",
")",
"-",
"n_id",
"self",
".",
"_io",
".",
"writeln",
"(",
"''",
")",
"self",
".",
"_io",
".",
"text",
"(",
"'Number of constants based on column widths: {0}'",
".",
"format",
"(",
"n_widths",
")",
")",
"self",
".",
"_io",
".",
"text",
"(",
"'Number of constants based on database IDs : {0}'",
".",
"format",
"(",
"n_id",
")",
")"
] | Logs the number of constants generated. | [
"Logs",
"the",
"number",
"of",
"constants",
"generated",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/Constants.py#L119-L128 |
SetBased/py-stratum | pystratum/Constants.py | Constants._read_configuration_file | def _read_configuration_file(self, config_filename):
"""
Reads parameters from the configuration file.
:param str config_filename: The name of the configuration file.
"""
config = configparser.ConfigParser()
config.read(config_filename)
self._constants_filename = config.get('constants', 'columns')
self._prefix = config.get('constants', 'prefix')
self._class_name = config.get('constants', 'class') | python | def _read_configuration_file(self, config_filename):
"""
Reads parameters from the configuration file.
:param str config_filename: The name of the configuration file.
"""
config = configparser.ConfigParser()
config.read(config_filename)
self._constants_filename = config.get('constants', 'columns')
self._prefix = config.get('constants', 'prefix')
self._class_name = config.get('constants', 'class') | [
"def",
"_read_configuration_file",
"(",
"self",
",",
"config_filename",
")",
":",
"config",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"config",
".",
"read",
"(",
"config_filename",
")",
"self",
".",
"_constants_filename",
"=",
"config",
".",
"get",
"(",
"'constants'",
",",
"'columns'",
")",
"self",
".",
"_prefix",
"=",
"config",
".",
"get",
"(",
"'constants'",
",",
"'prefix'",
")",
"self",
".",
"_class_name",
"=",
"config",
".",
"get",
"(",
"'constants'",
",",
"'class'",
")"
] | Reads parameters from the configuration file.
:param str config_filename: The name of the configuration file. | [
"Reads",
"parameters",
"from",
"the",
"configuration",
"file",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/Constants.py#L131-L142 |
SetBased/py-stratum | pystratum/Constants.py | Constants.__write_constant_class | def __write_constant_class(self):
"""
Inserts new and replaces old (if any) constant declaration statements in the class that acts like a namespace
for constants.
"""
helper = ConstantClass(self._class_name, self._io)
content = helper.source_with_constants(self._constants)
Util.write_two_phases(helper.file_name(), content, self._io) | python | def __write_constant_class(self):
"""
Inserts new and replaces old (if any) constant declaration statements in the class that acts like a namespace
for constants.
"""
helper = ConstantClass(self._class_name, self._io)
content = helper.source_with_constants(self._constants)
Util.write_two_phases(helper.file_name(), content, self._io) | [
"def",
"__write_constant_class",
"(",
"self",
")",
":",
"helper",
"=",
"ConstantClass",
"(",
"self",
".",
"_class_name",
",",
"self",
".",
"_io",
")",
"content",
"=",
"helper",
".",
"source_with_constants",
"(",
"self",
".",
"_constants",
")",
"Util",
".",
"write_two_phases",
"(",
"helper",
".",
"file_name",
"(",
")",
",",
"content",
",",
"self",
".",
"_io",
")"
] | Inserts new and replaces old (if any) constant declaration statements in the class that acts like a namespace
for constants. | [
"Inserts",
"new",
"and",
"replaces",
"old",
"(",
"if",
"any",
")",
"constant",
"declaration",
"statements",
"in",
"the",
"class",
"that",
"acts",
"like",
"a",
"namespace",
"for",
"constants",
"."
] | train | https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/Constants.py#L205-L214 |
launchdarkly/relayCommander | relay_commander/validator.py | _check_env_var | def _check_env_var(envvar: str) -> bool:
"""Check Environment Variable to verify that it is set and not empty.
:param envvar: Environment Variable to Check.
:returns: True if Environment Variable is set and not empty.
:raises: KeyError if Environment Variable is not set or is empty.
.. versionadded:: 0.0.12
"""
if os.getenv(envvar) is None:
raise KeyError(
"Required ENVVAR: {0} is not set".format(envvar))
if not os.getenv(envvar): # test if env var is empty
raise KeyError(
"Required ENVVAR: {0} is empty".format(envvar))
return True | python | def _check_env_var(envvar: str) -> bool:
"""Check Environment Variable to verify that it is set and not empty.
:param envvar: Environment Variable to Check.
:returns: True if Environment Variable is set and not empty.
:raises: KeyError if Environment Variable is not set or is empty.
.. versionadded:: 0.0.12
"""
if os.getenv(envvar) is None:
raise KeyError(
"Required ENVVAR: {0} is not set".format(envvar))
if not os.getenv(envvar): # test if env var is empty
raise KeyError(
"Required ENVVAR: {0} is empty".format(envvar))
return True | [
"def",
"_check_env_var",
"(",
"envvar",
":",
"str",
")",
"->",
"bool",
":",
"if",
"os",
".",
"getenv",
"(",
"envvar",
")",
"is",
"None",
":",
"raise",
"KeyError",
"(",
"\"Required ENVVAR: {0} is not set\"",
".",
"format",
"(",
"envvar",
")",
")",
"if",
"not",
"os",
".",
"getenv",
"(",
"envvar",
")",
":",
"# test if env var is empty",
"raise",
"KeyError",
"(",
"\"Required ENVVAR: {0} is empty\"",
".",
"format",
"(",
"envvar",
")",
")",
"return",
"True"
] | Check Environment Variable to verify that it is set and not empty.
:param envvar: Environment Variable to Check.
:returns: True if Environment Variable is set and not empty.
:raises: KeyError if Environment Variable is not set or is empty.
.. versionadded:: 0.0.12 | [
"Check",
"Environment",
"Variable",
"to",
"verify",
"that",
"it",
"is",
"set",
"and",
"not",
"empty",
"."
] | train | https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/validator.py#L19-L36 |
launchdarkly/relayCommander | relay_commander/validator.py | valid_state | def valid_state(state: str) -> bool:
"""Validate State Argument
Checks that either 'on' or 'off' was entered as an argument to the
CLI and make it lower case.
:param state: state to validate.
:returns: True if state is valid.
.. versionchanged:: 0.0.12
This moethod was renamed from validateState to valid_state to conform
to PEP-8. Also removed "magic" text for state and instead reference the
_VALID_STATES constant.
"""
lower_case_state = state.lower()
if lower_case_state in _VALID_STATES:
return True
return False | python | def valid_state(state: str) -> bool:
"""Validate State Argument
Checks that either 'on' or 'off' was entered as an argument to the
CLI and make it lower case.
:param state: state to validate.
:returns: True if state is valid.
.. versionchanged:: 0.0.12
This moethod was renamed from validateState to valid_state to conform
to PEP-8. Also removed "magic" text for state and instead reference the
_VALID_STATES constant.
"""
lower_case_state = state.lower()
if lower_case_state in _VALID_STATES:
return True
return False | [
"def",
"valid_state",
"(",
"state",
":",
"str",
")",
"->",
"bool",
":",
"lower_case_state",
"=",
"state",
".",
"lower",
"(",
")",
"if",
"lower_case_state",
"in",
"_VALID_STATES",
":",
"return",
"True",
"return",
"False"
] | Validate State Argument
Checks that either 'on' or 'off' was entered as an argument to the
CLI and make it lower case.
:param state: state to validate.
:returns: True if state is valid.
.. versionchanged:: 0.0.12
This moethod was renamed from validateState to valid_state to conform
to PEP-8. Also removed "magic" text for state and instead reference the
_VALID_STATES constant. | [
"Validate",
"State",
"Argument"
] | train | https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/validator.py#L39-L58 |
launchdarkly/relayCommander | relay_commander/validator.py | valid_env_vars | def valid_env_vars() -> bool:
"""Validate that required env vars exist.
:returns: True if required env vars exist.
.. versionadded:: 0.0.12
"""
for envvar in _REQUIRED_ENV_VARS:
try:
_check_env_var(envvar)
except KeyError as ex:
LOG.error(ex)
sys.exit(1)
return True | python | def valid_env_vars() -> bool:
"""Validate that required env vars exist.
:returns: True if required env vars exist.
.. versionadded:: 0.0.12
"""
for envvar in _REQUIRED_ENV_VARS:
try:
_check_env_var(envvar)
except KeyError as ex:
LOG.error(ex)
sys.exit(1)
return True | [
"def",
"valid_env_vars",
"(",
")",
"->",
"bool",
":",
"for",
"envvar",
"in",
"_REQUIRED_ENV_VARS",
":",
"try",
":",
"_check_env_var",
"(",
"envvar",
")",
"except",
"KeyError",
"as",
"ex",
":",
"LOG",
".",
"error",
"(",
"ex",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"return",
"True"
] | Validate that required env vars exist.
:returns: True if required env vars exist.
.. versionadded:: 0.0.12 | [
"Validate",
"that",
"required",
"env",
"vars",
"exist",
"."
] | train | https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/validator.py#L61-L74 |
DreamLab/VmShepherd | src/vmshepherd/http/__init__.py | WebServer.configure_panel | def configure_panel(self):
"""
Configure templates and routing
"""
webroot = os.path.dirname(__file__)
self.template_path = os.path.join(webroot, 'templates')
aiohttp_jinja2.setup(
self, loader=jinja2.FileSystemLoader(self.template_path),
filters={'sorted': sorted, 'int': int}
)
self['static_root_url'] = '/static'
self.router.add_view('/', Panel)
self.router.add_static(
'/static/', path=os.path.join(webroot, 'static'), name='static'
) | python | def configure_panel(self):
"""
Configure templates and routing
"""
webroot = os.path.dirname(__file__)
self.template_path = os.path.join(webroot, 'templates')
aiohttp_jinja2.setup(
self, loader=jinja2.FileSystemLoader(self.template_path),
filters={'sorted': sorted, 'int': int}
)
self['static_root_url'] = '/static'
self.router.add_view('/', Panel)
self.router.add_static(
'/static/', path=os.path.join(webroot, 'static'), name='static'
) | [
"def",
"configure_panel",
"(",
"self",
")",
":",
"webroot",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
"self",
".",
"template_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"webroot",
",",
"'templates'",
")",
"aiohttp_jinja2",
".",
"setup",
"(",
"self",
",",
"loader",
"=",
"jinja2",
".",
"FileSystemLoader",
"(",
"self",
".",
"template_path",
")",
",",
"filters",
"=",
"{",
"'sorted'",
":",
"sorted",
",",
"'int'",
":",
"int",
"}",
")",
"self",
"[",
"'static_root_url'",
"]",
"=",
"'/static'",
"self",
".",
"router",
".",
"add_view",
"(",
"'/'",
",",
"Panel",
")",
"self",
".",
"router",
".",
"add_static",
"(",
"'/static/'",
",",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"webroot",
",",
"'static'",
")",
",",
"name",
"=",
"'static'",
")"
] | Configure templates and routing | [
"Configure",
"templates",
"and",
"routing"
] | train | https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/http/__init__.py#L20-L36 |
DreamLab/VmShepherd | src/vmshepherd/http/__init__.py | WebServer.start | async def start(self):
"""
Initialize and start WebServer
"""
logging.info('Starting server, listening on %s.', self.port)
runner = web.AppRunner(self)
await runner.setup()
site = web.TCPSite(runner, '', self.port)
await site.start() | python | async def start(self):
"""
Initialize and start WebServer
"""
logging.info('Starting server, listening on %s.', self.port)
runner = web.AppRunner(self)
await runner.setup()
site = web.TCPSite(runner, '', self.port)
await site.start() | [
"async",
"def",
"start",
"(",
"self",
")",
":",
"logging",
".",
"info",
"(",
"'Starting server, listening on %s.'",
",",
"self",
".",
"port",
")",
"runner",
"=",
"web",
".",
"AppRunner",
"(",
"self",
")",
"await",
"runner",
".",
"setup",
"(",
")",
"site",
"=",
"web",
".",
"TCPSite",
"(",
"runner",
",",
"''",
",",
"self",
".",
"port",
")",
"await",
"site",
".",
"start",
"(",
")"
] | Initialize and start WebServer | [
"Initialize",
"and",
"start",
"WebServer"
] | train | https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/http/__init__.py#L45-L53 |
DreamLab/VmShepherd | src/vmshepherd/http/__init__.py | Panel.get | async def get(self):
"""
Inject all preset data to Panel and Render a Home Page
"""
shepherd = self.request.app.vmshepherd
data = {'presets': {}, 'config': shepherd.config}
presets = await shepherd.preset_manager.list_presets()
runtime = shepherd.runtime_manager
for name in presets:
preset = shepherd.preset_manager.get_preset(name)
data['presets'][name] = {
'preset': preset,
'vms': preset.vms,
'runtime': await runtime.get_preset_data(name),
'vmshepherd_id': shepherd.instance_id,
'now': time.time()
}
return data | python | async def get(self):
"""
Inject all preset data to Panel and Render a Home Page
"""
shepherd = self.request.app.vmshepherd
data = {'presets': {}, 'config': shepherd.config}
presets = await shepherd.preset_manager.list_presets()
runtime = shepherd.runtime_manager
for name in presets:
preset = shepherd.preset_manager.get_preset(name)
data['presets'][name] = {
'preset': preset,
'vms': preset.vms,
'runtime': await runtime.get_preset_data(name),
'vmshepherd_id': shepherd.instance_id,
'now': time.time()
}
return data | [
"async",
"def",
"get",
"(",
"self",
")",
":",
"shepherd",
"=",
"self",
".",
"request",
".",
"app",
".",
"vmshepherd",
"data",
"=",
"{",
"'presets'",
":",
"{",
"}",
",",
"'config'",
":",
"shepherd",
".",
"config",
"}",
"presets",
"=",
"await",
"shepherd",
".",
"preset_manager",
".",
"list_presets",
"(",
")",
"runtime",
"=",
"shepherd",
".",
"runtime_manager",
"for",
"name",
"in",
"presets",
":",
"preset",
"=",
"shepherd",
".",
"preset_manager",
".",
"get_preset",
"(",
"name",
")",
"data",
"[",
"'presets'",
"]",
"[",
"name",
"]",
"=",
"{",
"'preset'",
":",
"preset",
",",
"'vms'",
":",
"preset",
".",
"vms",
",",
"'runtime'",
":",
"await",
"runtime",
".",
"get_preset_data",
"(",
"name",
")",
",",
"'vmshepherd_id'",
":",
"shepherd",
".",
"instance_id",
",",
"'now'",
":",
"time",
".",
"time",
"(",
")",
"}",
"return",
"data"
] | Inject all preset data to Panel and Render a Home Page | [
"Inject",
"all",
"preset",
"data",
"to",
"Panel",
"and",
"Render",
"a",
"Home",
"Page"
] | train | https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/http/__init__.py#L59-L76 |
julot/sphinxcontrib-dd | sphinxcontrib/dd/database_diagram.py | serialize | def serialize(dictionary):
"""
Turn dictionary into argument like string.
"""
data = []
for key, value in dictionary.items():
data.append('{0}="{1}"'.format(key, value))
return ', '.join(data) | python | def serialize(dictionary):
"""
Turn dictionary into argument like string.
"""
data = []
for key, value in dictionary.items():
data.append('{0}="{1}"'.format(key, value))
return ', '.join(data) | [
"def",
"serialize",
"(",
"dictionary",
")",
":",
"data",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"dictionary",
".",
"items",
"(",
")",
":",
"data",
".",
"append",
"(",
"'{0}=\"{1}\"'",
".",
"format",
"(",
"key",
",",
"value",
")",
")",
"return",
"', '",
".",
"join",
"(",
"data",
")"
] | Turn dictionary into argument like string. | [
"Turn",
"dictionary",
"into",
"argument",
"like",
"string",
"."
] | train | https://github.com/julot/sphinxcontrib-dd/blob/18619b356508b9a99cc329eeae53cbf299a5d1de/sphinxcontrib/dd/database_diagram.py#L12-L21 |
bpannier/simpletr64 | simpletr64/actions/lan.py | Lan.getAmountOfHostsConnected | def getAmountOfHostsConnected(self, lanInterfaceId=1, timeout=1):
"""Execute NewHostNumberOfEntries action to get the amount of known hosts.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: the amount of known hosts.
:rtype: int
.. seealso:: :meth:`~simpletr64.actions.Lan.getHostDetailsByIndex`
"""
namespace = Lan.getServiceType("getAmountOfHostsConnected") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetHostNumberOfEntries", timeout=timeout)
return int(results["NewHostNumberOfEntries"]) | python | def getAmountOfHostsConnected(self, lanInterfaceId=1, timeout=1):
"""Execute NewHostNumberOfEntries action to get the amount of known hosts.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: the amount of known hosts.
:rtype: int
.. seealso:: :meth:`~simpletr64.actions.Lan.getHostDetailsByIndex`
"""
namespace = Lan.getServiceType("getAmountOfHostsConnected") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetHostNumberOfEntries", timeout=timeout)
return int(results["NewHostNumberOfEntries"]) | [
"def",
"getAmountOfHostsConnected",
"(",
"self",
",",
"lanInterfaceId",
"=",
"1",
",",
"timeout",
"=",
"1",
")",
":",
"namespace",
"=",
"Lan",
".",
"getServiceType",
"(",
"\"getAmountOfHostsConnected\"",
")",
"+",
"str",
"(",
"lanInterfaceId",
")",
"uri",
"=",
"self",
".",
"getControlURL",
"(",
"namespace",
")",
"results",
"=",
"self",
".",
"execute",
"(",
"uri",
",",
"namespace",
",",
"\"GetHostNumberOfEntries\"",
",",
"timeout",
"=",
"timeout",
")",
"return",
"int",
"(",
"results",
"[",
"\"NewHostNumberOfEntries\"",
"]",
")"
] | Execute NewHostNumberOfEntries action to get the amount of known hosts.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: the amount of known hosts.
:rtype: int
.. seealso:: :meth:`~simpletr64.actions.Lan.getHostDetailsByIndex` | [
"Execute",
"NewHostNumberOfEntries",
"action",
"to",
"get",
"the",
"amount",
"of",
"known",
"hosts",
"."
] | train | https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/lan.py#L91-L106 |
bpannier/simpletr64 | simpletr64/actions/lan.py | Lan.getHostDetailsByIndex | def getHostDetailsByIndex(self, index, lanInterfaceId=1, timeout=1):
"""Execute GetGenericHostEntry action to get detailed information's of a connected host.
:param index: the index of the host
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: the detailed information's of a connected host.
:rtype: HostDetails
.. seealso:: :meth:`~simpletr64.actions.Lan.getAmountOfHostsConnected`
"""
namespace = Lan.getServiceType("getHostDetailsByIndex") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetGenericHostEntry", timeout=timeout, NewIndex=index)
return HostDetails(results) | python | def getHostDetailsByIndex(self, index, lanInterfaceId=1, timeout=1):
"""Execute GetGenericHostEntry action to get detailed information's of a connected host.
:param index: the index of the host
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: the detailed information's of a connected host.
:rtype: HostDetails
.. seealso:: :meth:`~simpletr64.actions.Lan.getAmountOfHostsConnected`
"""
namespace = Lan.getServiceType("getHostDetailsByIndex") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetGenericHostEntry", timeout=timeout, NewIndex=index)
return HostDetails(results) | [
"def",
"getHostDetailsByIndex",
"(",
"self",
",",
"index",
",",
"lanInterfaceId",
"=",
"1",
",",
"timeout",
"=",
"1",
")",
":",
"namespace",
"=",
"Lan",
".",
"getServiceType",
"(",
"\"getHostDetailsByIndex\"",
")",
"+",
"str",
"(",
"lanInterfaceId",
")",
"uri",
"=",
"self",
".",
"getControlURL",
"(",
"namespace",
")",
"results",
"=",
"self",
".",
"execute",
"(",
"uri",
",",
"namespace",
",",
"\"GetGenericHostEntry\"",
",",
"timeout",
"=",
"timeout",
",",
"NewIndex",
"=",
"index",
")",
"return",
"HostDetails",
"(",
"results",
")"
] | Execute GetGenericHostEntry action to get detailed information's of a connected host.
:param index: the index of the host
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: the detailed information's of a connected host.
:rtype: HostDetails
.. seealso:: :meth:`~simpletr64.actions.Lan.getAmountOfHostsConnected` | [
"Execute",
"GetGenericHostEntry",
"action",
"to",
"get",
"detailed",
"information",
"s",
"of",
"a",
"connected",
"host",
"."
] | train | https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/lan.py#L108-L124 |
bpannier/simpletr64 | simpletr64/actions/lan.py | Lan.getHostDetailsByMACAddress | def getHostDetailsByMACAddress(self, macAddress, lanInterfaceId=1, timeout=1):
"""Get host details for a host specified by its MAC address.
:param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might
be case sensitive, depending on the router
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: return the host details if found otherwise an Exception will be raised
:rtype: HostDetails
"""
namespace = Lan.getServiceType("getHostDetailsByMACAddress") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetSpecificHostEntry", timeout=timeout, NewMACAddress=macAddress)
return HostDetails(results, macAddress=macAddress) | python | def getHostDetailsByMACAddress(self, macAddress, lanInterfaceId=1, timeout=1):
"""Get host details for a host specified by its MAC address.
:param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might
be case sensitive, depending on the router
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: return the host details if found otherwise an Exception will be raised
:rtype: HostDetails
"""
namespace = Lan.getServiceType("getHostDetailsByMACAddress") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetSpecificHostEntry", timeout=timeout, NewMACAddress=macAddress)
return HostDetails(results, macAddress=macAddress) | [
"def",
"getHostDetailsByMACAddress",
"(",
"self",
",",
"macAddress",
",",
"lanInterfaceId",
"=",
"1",
",",
"timeout",
"=",
"1",
")",
":",
"namespace",
"=",
"Lan",
".",
"getServiceType",
"(",
"\"getHostDetailsByMACAddress\"",
")",
"+",
"str",
"(",
"lanInterfaceId",
")",
"uri",
"=",
"self",
".",
"getControlURL",
"(",
"namespace",
")",
"results",
"=",
"self",
".",
"execute",
"(",
"uri",
",",
"namespace",
",",
"\"GetSpecificHostEntry\"",
",",
"timeout",
"=",
"timeout",
",",
"NewMACAddress",
"=",
"macAddress",
")",
"return",
"HostDetails",
"(",
"results",
",",
"macAddress",
"=",
"macAddress",
")"
] | Get host details for a host specified by its MAC address.
:param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might
be case sensitive, depending on the router
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: return the host details if found otherwise an Exception will be raised
:rtype: HostDetails | [
"Get",
"host",
"details",
"for",
"a",
"host",
"specified",
"by",
"its",
"MAC",
"address",
"."
] | train | https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/lan.py#L126-L141 |
bpannier/simpletr64 | simpletr64/actions/lan.py | Lan.getEthernetInfo | def getEthernetInfo(self, lanInterfaceId=1, timeout=1):
"""Execute GetInfo action to get information's about the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: information's about the Ethernet interface.
:rtype: EthernetInfo
"""
namespace = Lan.getServiceType("getEthernetInfo") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetInfo", timeout=timeout)
return EthernetInfo(results) | python | def getEthernetInfo(self, lanInterfaceId=1, timeout=1):
"""Execute GetInfo action to get information's about the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: information's about the Ethernet interface.
:rtype: EthernetInfo
"""
namespace = Lan.getServiceType("getEthernetInfo") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetInfo", timeout=timeout)
return EthernetInfo(results) | [
"def",
"getEthernetInfo",
"(",
"self",
",",
"lanInterfaceId",
"=",
"1",
",",
"timeout",
"=",
"1",
")",
":",
"namespace",
"=",
"Lan",
".",
"getServiceType",
"(",
"\"getEthernetInfo\"",
")",
"+",
"str",
"(",
"lanInterfaceId",
")",
"uri",
"=",
"self",
".",
"getControlURL",
"(",
"namespace",
")",
"results",
"=",
"self",
".",
"execute",
"(",
"uri",
",",
"namespace",
",",
"\"GetInfo\"",
",",
"timeout",
"=",
"timeout",
")",
"return",
"EthernetInfo",
"(",
"results",
")"
] | Execute GetInfo action to get information's about the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: information's about the Ethernet interface.
:rtype: EthernetInfo | [
"Execute",
"GetInfo",
"action",
"to",
"get",
"information",
"s",
"about",
"the",
"Ethernet",
"interface",
"."
] | train | https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/lan.py#L143-L156 |
bpannier/simpletr64 | simpletr64/actions/lan.py | Lan.getEthernetStatistic | def getEthernetStatistic(self, lanInterfaceId=1, timeout=1):
"""Execute GetStatistics action to get statistics of the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: statisticss of the Ethernet interface.
:rtype: EthernetStatistic
"""
namespace = Lan.getServiceType("getEthernetStatistic") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetStatistics", timeout=timeout)
return EthernetStatistic(results) | python | def getEthernetStatistic(self, lanInterfaceId=1, timeout=1):
"""Execute GetStatistics action to get statistics of the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: statisticss of the Ethernet interface.
:rtype: EthernetStatistic
"""
namespace = Lan.getServiceType("getEthernetStatistic") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
results = self.execute(uri, namespace, "GetStatistics", timeout=timeout)
return EthernetStatistic(results) | [
"def",
"getEthernetStatistic",
"(",
"self",
",",
"lanInterfaceId",
"=",
"1",
",",
"timeout",
"=",
"1",
")",
":",
"namespace",
"=",
"Lan",
".",
"getServiceType",
"(",
"\"getEthernetStatistic\"",
")",
"+",
"str",
"(",
"lanInterfaceId",
")",
"uri",
"=",
"self",
".",
"getControlURL",
"(",
"namespace",
")",
"results",
"=",
"self",
".",
"execute",
"(",
"uri",
",",
"namespace",
",",
"\"GetStatistics\"",
",",
"timeout",
"=",
"timeout",
")",
"return",
"EthernetStatistic",
"(",
"results",
")"
] | Execute GetStatistics action to get statistics of the Ethernet interface.
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
:return: statisticss of the Ethernet interface.
:rtype: EthernetStatistic | [
"Execute",
"GetStatistics",
"action",
"to",
"get",
"statistics",
"of",
"the",
"Ethernet",
"interface",
"."
] | train | https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/lan.py#L158-L171 |
bpannier/simpletr64 | simpletr64/actions/lan.py | Lan.setEnable | def setEnable(self, status, lanInterfaceId=1, timeout=1):
"""Set enable status for a LAN interface, be careful you don't cut yourself off.
:param bool status: enable or disable the interface
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
"""
namespace = Lan.getServiceType("setEnable") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
if status:
setStatus = 1
else:
setStatus = 0
self.execute(uri, namespace, "SetEnable", timeout=timeout, NewEnable=setStatus) | python | def setEnable(self, status, lanInterfaceId=1, timeout=1):
"""Set enable status for a LAN interface, be careful you don't cut yourself off.
:param bool status: enable or disable the interface
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed
"""
namespace = Lan.getServiceType("setEnable") + str(lanInterfaceId)
uri = self.getControlURL(namespace)
if status:
setStatus = 1
else:
setStatus = 0
self.execute(uri, namespace, "SetEnable", timeout=timeout, NewEnable=setStatus) | [
"def",
"setEnable",
"(",
"self",
",",
"status",
",",
"lanInterfaceId",
"=",
"1",
",",
"timeout",
"=",
"1",
")",
":",
"namespace",
"=",
"Lan",
".",
"getServiceType",
"(",
"\"setEnable\"",
")",
"+",
"str",
"(",
"lanInterfaceId",
")",
"uri",
"=",
"self",
".",
"getControlURL",
"(",
"namespace",
")",
"if",
"status",
":",
"setStatus",
"=",
"1",
"else",
":",
"setStatus",
"=",
"0",
"self",
".",
"execute",
"(",
"uri",
",",
"namespace",
",",
"\"SetEnable\"",
",",
"timeout",
"=",
"timeout",
",",
"NewEnable",
"=",
"setStatus",
")"
] | Set enable status for a LAN interface, be careful you don't cut yourself off.
:param bool status: enable or disable the interface
:param int lanInterfaceId: the id of the LAN interface
:param float timeout: the timeout to wait for the action to be executed | [
"Set",
"enable",
"status",
"for",
"a",
"LAN",
"interface",
"be",
"careful",
"you",
"don",
"t",
"cut",
"yourself",
"off",
"."
] | train | https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/lan.py#L173-L188 |
vsoch/helpme | helpme/main/uservoice/__init__.py | Helper._submit | def _submit(self):
'''submit a uservoice ticket. When we get here we should have:
{'user_prompt_issue': 'I want to do the thing.',
'record_asciinema': '/tmp/helpme.93o__nt5.json',
'record_environment': ((1,1),(2,2)...(N,N))}
Required Client Variables
self.api_key
self.api_secret
self.subdomain
self.email
'''
# Step 0: Authenticate with uservoice API
self.authenticate()
title = "HelpMe UserVoice Ticket: %s" %(self.run_id)
body = self.data['user_prompt_issue']
# Step 1: Environment
envars = self.data.get('record_environment')
if envars not in [None, '', []]:
body += '\n\nEnvironment:\n'
for envar in envars:
body += ' - %s: %s\n' %(envar[0], envar[1])
# Step 2: Asciinema
asciinema = self.data.get('record_asciinema')
if asciinema not in [None, '']:
url = upload_asciinema(asciinema)
# If the upload is successful, add a link to it.
if url is not None:
body += "\n\nAsciinema Recording: %s" %url
# Add other metadata about client
body += "\ngenerated by HelpMe: https://vsoch.github.io/helpme/"
# Submit the ticket!
self.post_ticket(title, body) | python | def _submit(self):
'''submit a uservoice ticket. When we get here we should have:
{'user_prompt_issue': 'I want to do the thing.',
'record_asciinema': '/tmp/helpme.93o__nt5.json',
'record_environment': ((1,1),(2,2)...(N,N))}
Required Client Variables
self.api_key
self.api_secret
self.subdomain
self.email
'''
# Step 0: Authenticate with uservoice API
self.authenticate()
title = "HelpMe UserVoice Ticket: %s" %(self.run_id)
body = self.data['user_prompt_issue']
# Step 1: Environment
envars = self.data.get('record_environment')
if envars not in [None, '', []]:
body += '\n\nEnvironment:\n'
for envar in envars:
body += ' - %s: %s\n' %(envar[0], envar[1])
# Step 2: Asciinema
asciinema = self.data.get('record_asciinema')
if asciinema not in [None, '']:
url = upload_asciinema(asciinema)
# If the upload is successful, add a link to it.
if url is not None:
body += "\n\nAsciinema Recording: %s" %url
# Add other metadata about client
body += "\ngenerated by HelpMe: https://vsoch.github.io/helpme/"
# Submit the ticket!
self.post_ticket(title, body) | [
"def",
"_submit",
"(",
"self",
")",
":",
"# Step 0: Authenticate with uservoice API",
"self",
".",
"authenticate",
"(",
")",
"title",
"=",
"\"HelpMe UserVoice Ticket: %s\"",
"%",
"(",
"self",
".",
"run_id",
")",
"body",
"=",
"self",
".",
"data",
"[",
"'user_prompt_issue'",
"]",
"# Step 1: Environment",
"envars",
"=",
"self",
".",
"data",
".",
"get",
"(",
"'record_environment'",
")",
"if",
"envars",
"not",
"in",
"[",
"None",
",",
"''",
",",
"[",
"]",
"]",
":",
"body",
"+=",
"'\\n\\nEnvironment:\\n'",
"for",
"envar",
"in",
"envars",
":",
"body",
"+=",
"' - %s: %s\\n'",
"%",
"(",
"envar",
"[",
"0",
"]",
",",
"envar",
"[",
"1",
"]",
")",
"# Step 2: Asciinema",
"asciinema",
"=",
"self",
".",
"data",
".",
"get",
"(",
"'record_asciinema'",
")",
"if",
"asciinema",
"not",
"in",
"[",
"None",
",",
"''",
"]",
":",
"url",
"=",
"upload_asciinema",
"(",
"asciinema",
")",
"# If the upload is successful, add a link to it.",
"if",
"url",
"is",
"not",
"None",
":",
"body",
"+=",
"\"\\n\\nAsciinema Recording: %s\"",
"%",
"url",
"# Add other metadata about client",
"body",
"+=",
"\"\\ngenerated by HelpMe: https://vsoch.github.io/helpme/\"",
"# Submit the ticket!",
"self",
".",
"post_ticket",
"(",
"title",
",",
"body",
")"
] | submit a uservoice ticket. When we get here we should have:
{'user_prompt_issue': 'I want to do the thing.',
'record_asciinema': '/tmp/helpme.93o__nt5.json',
'record_environment': ((1,1),(2,2)...(N,N))}
Required Client Variables
self.api_key
self.api_secret
self.subdomain
self.email | [
"submit",
"a",
"uservoice",
"ticket",
".",
"When",
"we",
"get",
"here",
"we",
"should",
"have",
":",
"{",
"user_prompt_issue",
":",
"I",
"want",
"to",
"do",
"the",
"thing",
".",
"record_asciinema",
":",
"/",
"tmp",
"/",
"helpme",
".",
"93o__nt5",
".",
"json",
"record_environment",
":",
"((",
"1",
"1",
")",
"(",
"2",
"2",
")",
"...",
"(",
"N",
"N",
"))",
"}"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/uservoice/__init__.py#L53-L99 |
vsoch/helpme | helpme/main/uservoice/__init__.py | Helper.authenticate | def authenticate(self):
'''authenticate with uservoice by creating a client.'''
if not hasattr(self, 'client'):
self.client = uservoice.Client(self.subdomain,
self.api_key,
self.api_secret) | python | def authenticate(self):
'''authenticate with uservoice by creating a client.'''
if not hasattr(self, 'client'):
self.client = uservoice.Client(self.subdomain,
self.api_key,
self.api_secret) | [
"def",
"authenticate",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'client'",
")",
":",
"self",
".",
"client",
"=",
"uservoice",
".",
"Client",
"(",
"self",
".",
"subdomain",
",",
"self",
".",
"api_key",
",",
"self",
".",
"api_secret",
")"
] | authenticate with uservoice by creating a client. | [
"authenticate",
"with",
"uservoice",
"by",
"creating",
"a",
"client",
"."
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/uservoice/__init__.py#L104-L110 |
vsoch/helpme | helpme/main/uservoice/__init__.py | Helper.post_ticket | def post_ticket(self, title, body):
'''post_ticket will post a ticket to the uservoice helpdesk
Parameters
==========
title: the title (subject) of the issue
body: the message to send
'''
# Populate the ticket
ticket = {'subject': title,
'message': body }
response = self.client.post("/api/v1/tickets.json", {
'email': self.email,
'ticket': ticket })['ticket']
bot.info(response['url']) | python | def post_ticket(self, title, body):
'''post_ticket will post a ticket to the uservoice helpdesk
Parameters
==========
title: the title (subject) of the issue
body: the message to send
'''
# Populate the ticket
ticket = {'subject': title,
'message': body }
response = self.client.post("/api/v1/tickets.json", {
'email': self.email,
'ticket': ticket })['ticket']
bot.info(response['url']) | [
"def",
"post_ticket",
"(",
"self",
",",
"title",
",",
"body",
")",
":",
"# Populate the ticket",
"ticket",
"=",
"{",
"'subject'",
":",
"title",
",",
"'message'",
":",
"body",
"}",
"response",
"=",
"self",
".",
"client",
".",
"post",
"(",
"\"/api/v1/tickets.json\"",
",",
"{",
"'email'",
":",
"self",
".",
"email",
",",
"'ticket'",
":",
"ticket",
"}",
")",
"[",
"'ticket'",
"]",
"bot",
".",
"info",
"(",
"response",
"[",
"'url'",
"]",
")"
] | post_ticket will post a ticket to the uservoice helpdesk
Parameters
==========
title: the title (subject) of the issue
body: the message to send | [
"post_ticket",
"will",
"post",
"a",
"ticket",
"to",
"the",
"uservoice",
"helpdesk"
] | train | https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/main/uservoice/__init__.py#L114-L131 |
mjirik/io3d | io3d/datareader.py | read | def read(datapath, qt_app=None, dataplus_format=True, gui=False, start=0, stop=None, step=1, convert_to_gray=True,
series_number=None, dicom_expected=None, **kwargs):
"""Simple read function. Internally calls DataReader.Get3DData()"""
dr = DataReader()
return dr.Get3DData(datapath=datapath, qt_app=qt_app, dataplus_format=dataplus_format, gui=gui, start=start,
stop=stop, step=step, convert_to_gray=convert_to_gray, series_number=series_number,
use_economic_dtype=True, dicom_expected=dicom_expected , **kwargs) | python | def read(datapath, qt_app=None, dataplus_format=True, gui=False, start=0, stop=None, step=1, convert_to_gray=True,
series_number=None, dicom_expected=None, **kwargs):
"""Simple read function. Internally calls DataReader.Get3DData()"""
dr = DataReader()
return dr.Get3DData(datapath=datapath, qt_app=qt_app, dataplus_format=dataplus_format, gui=gui, start=start,
stop=stop, step=step, convert_to_gray=convert_to_gray, series_number=series_number,
use_economic_dtype=True, dicom_expected=dicom_expected , **kwargs) | [
"def",
"read",
"(",
"datapath",
",",
"qt_app",
"=",
"None",
",",
"dataplus_format",
"=",
"True",
",",
"gui",
"=",
"False",
",",
"start",
"=",
"0",
",",
"stop",
"=",
"None",
",",
"step",
"=",
"1",
",",
"convert_to_gray",
"=",
"True",
",",
"series_number",
"=",
"None",
",",
"dicom_expected",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"dr",
"=",
"DataReader",
"(",
")",
"return",
"dr",
".",
"Get3DData",
"(",
"datapath",
"=",
"datapath",
",",
"qt_app",
"=",
"qt_app",
",",
"dataplus_format",
"=",
"dataplus_format",
",",
"gui",
"=",
"gui",
",",
"start",
"=",
"start",
",",
"stop",
"=",
"stop",
",",
"step",
"=",
"step",
",",
"convert_to_gray",
"=",
"convert_to_gray",
",",
"series_number",
"=",
"series_number",
",",
"use_economic_dtype",
"=",
"True",
",",
"dicom_expected",
"=",
"dicom_expected",
",",
"*",
"*",
"kwargs",
")"
] | Simple read function. Internally calls DataReader.Get3DData() | [
"Simple",
"read",
"function",
".",
"Internally",
"calls",
"DataReader",
".",
"Get3DData",
"()"
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareader.py#L34-L40 |
mjirik/io3d | io3d/datareader.py | _metadata | def _metadata(image, datapath):
"""Function which returns metadata dict.
:param image: image to get spacing from
:param datapath: path to data
:return: {'series_number': '', 'datadir': '', 'voxelsize_mm': ''}
"""
metadata = {'series_number': 0, 'datadir': datapath}
spacing = image.GetSpacing()
metadata['voxelsize_mm'] = [
spacing[2],
spacing[0],
spacing[1],
]
return metadata | python | def _metadata(image, datapath):
"""Function which returns metadata dict.
:param image: image to get spacing from
:param datapath: path to data
:return: {'series_number': '', 'datadir': '', 'voxelsize_mm': ''}
"""
metadata = {'series_number': 0, 'datadir': datapath}
spacing = image.GetSpacing()
metadata['voxelsize_mm'] = [
spacing[2],
spacing[0],
spacing[1],
]
return metadata | [
"def",
"_metadata",
"(",
"image",
",",
"datapath",
")",
":",
"metadata",
"=",
"{",
"'series_number'",
":",
"0",
",",
"'datadir'",
":",
"datapath",
"}",
"spacing",
"=",
"image",
".",
"GetSpacing",
"(",
")",
"metadata",
"[",
"'voxelsize_mm'",
"]",
"=",
"[",
"spacing",
"[",
"2",
"]",
",",
"spacing",
"[",
"0",
"]",
",",
"spacing",
"[",
"1",
"]",
",",
"]",
"return",
"metadata"
] | Function which returns metadata dict.
:param image: image to get spacing from
:param datapath: path to data
:return: {'series_number': '', 'datadir': '', 'voxelsize_mm': ''} | [
"Function",
"which",
"returns",
"metadata",
"dict",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareader.py#L44-L58 |
mjirik/io3d | io3d/datareader.py | DataReader.Get3DData | def Get3DData(self, datapath, qt_app=None, dataplus_format=True, gui=False, start=0, stop=None, step=1,
convert_to_gray=True, series_number=None, use_economic_dtype=True, dicom_expected=None, **kwargs):
"""Returns 3D data and its metadata.
# NOTE(:param qt_app:) If it is set to None (as default) all dialogs for series selection are performed in
terminal. If qt_app is set to QtGui.QApplication() dialogs are in Qt.
:param datapath: directory with input data
:param qt_app: Dialog destination. If None (default) -> terminal, if 'QtGui.QApplication()' -> Qt
:param dataplus_format: New data format. Metadata and data are returned in one structure.
:param gui: True if 'QtGui.QApplication()' instead of terminal should be used
:param int start: used for DicomReader, defines where 3D data reading should start
:param int stop: used for DicomReader, defines where 3D data reading should stop
:param int step: used for DicomReader, defines step for 3D data reading
:param bool convert_to_gray: if True -> RGB is converted to gray
:param int series_number: used in DicomReader, essential in metadata
:param use_economic_dtype: if True, casts 3D data array to less space consuming dtype
:param dicom_expected: set true if it is known that data is in dicom format. Set False to suppress
dicom warnings.
:return: tuple (data3d, metadata)
"""
self.orig_datapath = datapath
datapath = os.path.expanduser(datapath)
if series_number is not None and type(series_number) != int:
series_number = int(series_number)
if not os.path.exists(datapath):
logger.error("Path '" + datapath + "' does not exist")
return
if qt_app is None and gui is True:
from PyQt4.QtGui import QApplication
qt_app = QApplication(sys.argv)
if type(datapath) is not str:
datapath = str(datapath)
datapath = os.path.normpath(datapath)
self.start = start
self.stop = stop
self.step = step
self.convert_to_gray = convert_to_gray
self.series_number = series_number
self.kwargs = kwargs
self.qt_app = qt_app
self.gui = gui
if os.path.isfile(datapath):
logger.debug('file read recognized')
data3d, metadata = self.__ReadFromFile(datapath)
elif os.path.exists(datapath):
logger.debug('directory read recognized')
data3d, metadata = self.__ReadFromDirectory(datapath=datapath, dicom_expected=dicom_expected)
# datapath, start, stop, step, gui=gui, **kwargs)
else:
logger.error('Data path {} not found'.format(datapath))
if convert_to_gray:
if len(data3d.shape) > 3:
# TODO: implement better rgb2gray
data3d = data3d[:, :, :, 0]
if use_economic_dtype:
data3d = self.__use_economic_dtype(data3d)
if dataplus_format:
logger.debug('dataplus format')
# metadata = {'voxelsize_mm': [1, 1, 1]}
datap = metadata
datap['data3d'] = data3d
logger.debug('datap keys () : ' + str(datap.keys()))
return datap
else:
return data3d, metadata | python | def Get3DData(self, datapath, qt_app=None, dataplus_format=True, gui=False, start=0, stop=None, step=1,
convert_to_gray=True, series_number=None, use_economic_dtype=True, dicom_expected=None, **kwargs):
"""Returns 3D data and its metadata.
# NOTE(:param qt_app:) If it is set to None (as default) all dialogs for series selection are performed in
terminal. If qt_app is set to QtGui.QApplication() dialogs are in Qt.
:param datapath: directory with input data
:param qt_app: Dialog destination. If None (default) -> terminal, if 'QtGui.QApplication()' -> Qt
:param dataplus_format: New data format. Metadata and data are returned in one structure.
:param gui: True if 'QtGui.QApplication()' instead of terminal should be used
:param int start: used for DicomReader, defines where 3D data reading should start
:param int stop: used for DicomReader, defines where 3D data reading should stop
:param int step: used for DicomReader, defines step for 3D data reading
:param bool convert_to_gray: if True -> RGB is converted to gray
:param int series_number: used in DicomReader, essential in metadata
:param use_economic_dtype: if True, casts 3D data array to less space consuming dtype
:param dicom_expected: set true if it is known that data is in dicom format. Set False to suppress
dicom warnings.
:return: tuple (data3d, metadata)
"""
self.orig_datapath = datapath
datapath = os.path.expanduser(datapath)
if series_number is not None and type(series_number) != int:
series_number = int(series_number)
if not os.path.exists(datapath):
logger.error("Path '" + datapath + "' does not exist")
return
if qt_app is None and gui is True:
from PyQt4.QtGui import QApplication
qt_app = QApplication(sys.argv)
if type(datapath) is not str:
datapath = str(datapath)
datapath = os.path.normpath(datapath)
self.start = start
self.stop = stop
self.step = step
self.convert_to_gray = convert_to_gray
self.series_number = series_number
self.kwargs = kwargs
self.qt_app = qt_app
self.gui = gui
if os.path.isfile(datapath):
logger.debug('file read recognized')
data3d, metadata = self.__ReadFromFile(datapath)
elif os.path.exists(datapath):
logger.debug('directory read recognized')
data3d, metadata = self.__ReadFromDirectory(datapath=datapath, dicom_expected=dicom_expected)
# datapath, start, stop, step, gui=gui, **kwargs)
else:
logger.error('Data path {} not found'.format(datapath))
if convert_to_gray:
if len(data3d.shape) > 3:
# TODO: implement better rgb2gray
data3d = data3d[:, :, :, 0]
if use_economic_dtype:
data3d = self.__use_economic_dtype(data3d)
if dataplus_format:
logger.debug('dataplus format')
# metadata = {'voxelsize_mm': [1, 1, 1]}
datap = metadata
datap['data3d'] = data3d
logger.debug('datap keys () : ' + str(datap.keys()))
return datap
else:
return data3d, metadata | [
"def",
"Get3DData",
"(",
"self",
",",
"datapath",
",",
"qt_app",
"=",
"None",
",",
"dataplus_format",
"=",
"True",
",",
"gui",
"=",
"False",
",",
"start",
"=",
"0",
",",
"stop",
"=",
"None",
",",
"step",
"=",
"1",
",",
"convert_to_gray",
"=",
"True",
",",
"series_number",
"=",
"None",
",",
"use_economic_dtype",
"=",
"True",
",",
"dicom_expected",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"orig_datapath",
"=",
"datapath",
"datapath",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"datapath",
")",
"if",
"series_number",
"is",
"not",
"None",
"and",
"type",
"(",
"series_number",
")",
"!=",
"int",
":",
"series_number",
"=",
"int",
"(",
"series_number",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"datapath",
")",
":",
"logger",
".",
"error",
"(",
"\"Path '\"",
"+",
"datapath",
"+",
"\"' does not exist\"",
")",
"return",
"if",
"qt_app",
"is",
"None",
"and",
"gui",
"is",
"True",
":",
"from",
"PyQt4",
".",
"QtGui",
"import",
"QApplication",
"qt_app",
"=",
"QApplication",
"(",
"sys",
".",
"argv",
")",
"if",
"type",
"(",
"datapath",
")",
"is",
"not",
"str",
":",
"datapath",
"=",
"str",
"(",
"datapath",
")",
"datapath",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"datapath",
")",
"self",
".",
"start",
"=",
"start",
"self",
".",
"stop",
"=",
"stop",
"self",
".",
"step",
"=",
"step",
"self",
".",
"convert_to_gray",
"=",
"convert_to_gray",
"self",
".",
"series_number",
"=",
"series_number",
"self",
".",
"kwargs",
"=",
"kwargs",
"self",
".",
"qt_app",
"=",
"qt_app",
"self",
".",
"gui",
"=",
"gui",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"datapath",
")",
":",
"logger",
".",
"debug",
"(",
"'file read recognized'",
")",
"data3d",
",",
"metadata",
"=",
"self",
".",
"__ReadFromFile",
"(",
"datapath",
")",
"elif",
"os",
".",
"path",
".",
"exists",
"(",
"datapath",
")",
":",
"logger",
".",
"debug",
"(",
"'directory read recognized'",
")",
"data3d",
",",
"metadata",
"=",
"self",
".",
"__ReadFromDirectory",
"(",
"datapath",
"=",
"datapath",
",",
"dicom_expected",
"=",
"dicom_expected",
")",
"# datapath, start, stop, step, gui=gui, **kwargs)",
"else",
":",
"logger",
".",
"error",
"(",
"'Data path {} not found'",
".",
"format",
"(",
"datapath",
")",
")",
"if",
"convert_to_gray",
":",
"if",
"len",
"(",
"data3d",
".",
"shape",
")",
">",
"3",
":",
"# TODO: implement better rgb2gray",
"data3d",
"=",
"data3d",
"[",
":",
",",
":",
",",
":",
",",
"0",
"]",
"if",
"use_economic_dtype",
":",
"data3d",
"=",
"self",
".",
"__use_economic_dtype",
"(",
"data3d",
")",
"if",
"dataplus_format",
":",
"logger",
".",
"debug",
"(",
"'dataplus format'",
")",
"# metadata = {'voxelsize_mm': [1, 1, 1]}",
"datap",
"=",
"metadata",
"datap",
"[",
"'data3d'",
"]",
"=",
"data3d",
"logger",
".",
"debug",
"(",
"'datap keys () : '",
"+",
"str",
"(",
"datap",
".",
"keys",
"(",
")",
")",
")",
"return",
"datap",
"else",
":",
"return",
"data3d",
",",
"metadata"
] | Returns 3D data and its metadata.
# NOTE(:param qt_app:) If it is set to None (as default) all dialogs for series selection are performed in
terminal. If qt_app is set to QtGui.QApplication() dialogs are in Qt.
:param datapath: directory with input data
:param qt_app: Dialog destination. If None (default) -> terminal, if 'QtGui.QApplication()' -> Qt
:param dataplus_format: New data format. Metadata and data are returned in one structure.
:param gui: True if 'QtGui.QApplication()' instead of terminal should be used
:param int start: used for DicomReader, defines where 3D data reading should start
:param int stop: used for DicomReader, defines where 3D data reading should stop
:param int step: used for DicomReader, defines step for 3D data reading
:param bool convert_to_gray: if True -> RGB is converted to gray
:param int series_number: used in DicomReader, essential in metadata
:param use_economic_dtype: if True, casts 3D data array to less space consuming dtype
:param dicom_expected: set true if it is known that data is in dicom format. Set False to suppress
dicom warnings.
:return: tuple (data3d, metadata) | [
"Returns",
"3D",
"data",
"and",
"its",
"metadata",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareader.py#L67-L140 |
mjirik/io3d | io3d/datareader.py | DataReader.__ReadFromDirectory | def __ReadFromDirectory(self, datapath, dicom_expected=None):
"""This function is actually the ONE, which reads 3D data from file
:param datapath: path to file
:return: tuple (data3d, metadata)
"""
start = self.start
stop = self.stop
step = self.step
kwargs = self.kwargs
gui = self.gui
if (dicom_expected is not False) and (dcmr.is_dicom_dir(datapath)): # reading dicom
logger.debug('Dir - DICOM')
logger.debug("dicom_expected " + str(dicom_expected))
reader = dcmr.DicomReader(datapath,
series_number=self.series_number,
gui=gui,
**kwargs) # qt_app=None, gui=True)
data3d = reader.get_3Ddata(start, stop, step)
metadata = reader.get_metaData()
metadata['series_number'] = reader.series_number
metadata['datadir'] = datapath
self.overlay_fcn = reader.get_overlay
else: # reading image sequence
logger.debug('Dir - Image sequence')
logger.debug('Getting list of readable files...')
flist = []
try:
import SimpleITK as Sitk
except ImportError as e:
logger.error("Unable to import SimpleITK. On Windows try version 1.0.1")
for f in os.listdir(datapath):
try:
Sitk.ReadImage(os.path.join(datapath, f))
except Exception as e:
logger.warning("Cant load file: " + str(f))
logger.warning(e)
continue
flist.append(os.path.join(datapath, f))
flist.sort()
logger.debug('Reading image data...')
image = Sitk.ReadImage(flist)
logger.debug('Getting numpy array from image data...')
data3d = Sitk.GetArrayFromImage(image)
metadata = _metadata(image, datapath)
return data3d, metadata | python | def __ReadFromDirectory(self, datapath, dicom_expected=None):
"""This function is actually the ONE, which reads 3D data from file
:param datapath: path to file
:return: tuple (data3d, metadata)
"""
start = self.start
stop = self.stop
step = self.step
kwargs = self.kwargs
gui = self.gui
if (dicom_expected is not False) and (dcmr.is_dicom_dir(datapath)): # reading dicom
logger.debug('Dir - DICOM')
logger.debug("dicom_expected " + str(dicom_expected))
reader = dcmr.DicomReader(datapath,
series_number=self.series_number,
gui=gui,
**kwargs) # qt_app=None, gui=True)
data3d = reader.get_3Ddata(start, stop, step)
metadata = reader.get_metaData()
metadata['series_number'] = reader.series_number
metadata['datadir'] = datapath
self.overlay_fcn = reader.get_overlay
else: # reading image sequence
logger.debug('Dir - Image sequence')
logger.debug('Getting list of readable files...')
flist = []
try:
import SimpleITK as Sitk
except ImportError as e:
logger.error("Unable to import SimpleITK. On Windows try version 1.0.1")
for f in os.listdir(datapath):
try:
Sitk.ReadImage(os.path.join(datapath, f))
except Exception as e:
logger.warning("Cant load file: " + str(f))
logger.warning(e)
continue
flist.append(os.path.join(datapath, f))
flist.sort()
logger.debug('Reading image data...')
image = Sitk.ReadImage(flist)
logger.debug('Getting numpy array from image data...')
data3d = Sitk.GetArrayFromImage(image)
metadata = _metadata(image, datapath)
return data3d, metadata | [
"def",
"__ReadFromDirectory",
"(",
"self",
",",
"datapath",
",",
"dicom_expected",
"=",
"None",
")",
":",
"start",
"=",
"self",
".",
"start",
"stop",
"=",
"self",
".",
"stop",
"step",
"=",
"self",
".",
"step",
"kwargs",
"=",
"self",
".",
"kwargs",
"gui",
"=",
"self",
".",
"gui",
"if",
"(",
"dicom_expected",
"is",
"not",
"False",
")",
"and",
"(",
"dcmr",
".",
"is_dicom_dir",
"(",
"datapath",
")",
")",
":",
"# reading dicom",
"logger",
".",
"debug",
"(",
"'Dir - DICOM'",
")",
"logger",
".",
"debug",
"(",
"\"dicom_expected \"",
"+",
"str",
"(",
"dicom_expected",
")",
")",
"reader",
"=",
"dcmr",
".",
"DicomReader",
"(",
"datapath",
",",
"series_number",
"=",
"self",
".",
"series_number",
",",
"gui",
"=",
"gui",
",",
"*",
"*",
"kwargs",
")",
"# qt_app=None, gui=True)",
"data3d",
"=",
"reader",
".",
"get_3Ddata",
"(",
"start",
",",
"stop",
",",
"step",
")",
"metadata",
"=",
"reader",
".",
"get_metaData",
"(",
")",
"metadata",
"[",
"'series_number'",
"]",
"=",
"reader",
".",
"series_number",
"metadata",
"[",
"'datadir'",
"]",
"=",
"datapath",
"self",
".",
"overlay_fcn",
"=",
"reader",
".",
"get_overlay",
"else",
":",
"# reading image sequence",
"logger",
".",
"debug",
"(",
"'Dir - Image sequence'",
")",
"logger",
".",
"debug",
"(",
"'Getting list of readable files...'",
")",
"flist",
"=",
"[",
"]",
"try",
":",
"import",
"SimpleITK",
"as",
"Sitk",
"except",
"ImportError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"Unable to import SimpleITK. On Windows try version 1.0.1\"",
")",
"for",
"f",
"in",
"os",
".",
"listdir",
"(",
"datapath",
")",
":",
"try",
":",
"Sitk",
".",
"ReadImage",
"(",
"os",
".",
"path",
".",
"join",
"(",
"datapath",
",",
"f",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"warning",
"(",
"\"Cant load file: \"",
"+",
"str",
"(",
"f",
")",
")",
"logger",
".",
"warning",
"(",
"e",
")",
"continue",
"flist",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"datapath",
",",
"f",
")",
")",
"flist",
".",
"sort",
"(",
")",
"logger",
".",
"debug",
"(",
"'Reading image data...'",
")",
"image",
"=",
"Sitk",
".",
"ReadImage",
"(",
"flist",
")",
"logger",
".",
"debug",
"(",
"'Getting numpy array from image data...'",
")",
"data3d",
"=",
"Sitk",
".",
"GetArrayFromImage",
"(",
"image",
")",
"metadata",
"=",
"_metadata",
"(",
"image",
",",
"datapath",
")",
"return",
"data3d",
",",
"metadata"
] | This function is actually the ONE, which reads 3D data from file
:param datapath: path to file
:return: tuple (data3d, metadata) | [
"This",
"function",
"is",
"actually",
"the",
"ONE",
"which",
"reads",
"3D",
"data",
"from",
"file"
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareader.py#L143-L191 |
mjirik/io3d | io3d/datareader.py | DataReader.__ReadFromFile | def __ReadFromFile(self, datapath):
"""Reads file and returns containing 3D data and its metadata.
Supported formats: pklz, pkl, hdf5, idx, dcm, Dcm, dicom, bz2 and "raw files"
:param datapath: path to file to read
:return: tuple (data3d, metadata)
"""
def _create_meta(_datapath):
"""Just simply returns some dict. This functions exists in order to keep DRY"""
meta = {
'series_number': 0,
'datadir': _datapath
}
return meta
path, ext = os.path.splitext(datapath)
ext = ext[1:]
if ext in ('pklz', 'pkl'):
logger.debug('pklz format detected')
from . import misc
data = misc.obj_from_file(datapath, filetype='pkl')
data3d = data.pop('data3d')
# metadata must have series_number
metadata = _create_meta(datapath)
metadata.update(data)
elif ext in ['hdf5']:
from . import hdf5_io
datap = hdf5_io.load_dict_from_hdf5(datapath)
# datap = self.read_hdf5(datapath)
data3d = datap.pop('data3d')
# back compatibility
if 'metadata' in datap.keys():
datap = datap['metadata']
# metadata must have series_number
metadata = _create_meta(datapath)
metadata.update(datap)
elif ext in ['idx']:
from . import idxformat
idxreader = idxformat.IDXReader()
data3d, metadata = idxreader.read(datapath)
elif ext in ['dcm', 'DCM', 'dicom']:
data3d, metadata = self._read_with_sitk(datapath)
metadata = self._fix_sitk_bug(datapath, metadata)
elif ext in ["bz2"]:
new_datapath = tgz.untar(datapath)
data3d, metadata = self.__ReadFromDirectory(new_datapath)
else:
logger.debug('file format "' + str(ext) + '"')
# reading raw file
data3d, metadata = self._read_with_sitk(datapath)
return data3d, metadata | python | def __ReadFromFile(self, datapath):
"""Reads file and returns containing 3D data and its metadata.
Supported formats: pklz, pkl, hdf5, idx, dcm, Dcm, dicom, bz2 and "raw files"
:param datapath: path to file to read
:return: tuple (data3d, metadata)
"""
def _create_meta(_datapath):
"""Just simply returns some dict. This functions exists in order to keep DRY"""
meta = {
'series_number': 0,
'datadir': _datapath
}
return meta
path, ext = os.path.splitext(datapath)
ext = ext[1:]
if ext in ('pklz', 'pkl'):
logger.debug('pklz format detected')
from . import misc
data = misc.obj_from_file(datapath, filetype='pkl')
data3d = data.pop('data3d')
# metadata must have series_number
metadata = _create_meta(datapath)
metadata.update(data)
elif ext in ['hdf5']:
from . import hdf5_io
datap = hdf5_io.load_dict_from_hdf5(datapath)
# datap = self.read_hdf5(datapath)
data3d = datap.pop('data3d')
# back compatibility
if 'metadata' in datap.keys():
datap = datap['metadata']
# metadata must have series_number
metadata = _create_meta(datapath)
metadata.update(datap)
elif ext in ['idx']:
from . import idxformat
idxreader = idxformat.IDXReader()
data3d, metadata = idxreader.read(datapath)
elif ext in ['dcm', 'DCM', 'dicom']:
data3d, metadata = self._read_with_sitk(datapath)
metadata = self._fix_sitk_bug(datapath, metadata)
elif ext in ["bz2"]:
new_datapath = tgz.untar(datapath)
data3d, metadata = self.__ReadFromDirectory(new_datapath)
else:
logger.debug('file format "' + str(ext) + '"')
# reading raw file
data3d, metadata = self._read_with_sitk(datapath)
return data3d, metadata | [
"def",
"__ReadFromFile",
"(",
"self",
",",
"datapath",
")",
":",
"def",
"_create_meta",
"(",
"_datapath",
")",
":",
"\"\"\"Just simply returns some dict. This functions exists in order to keep DRY\"\"\"",
"meta",
"=",
"{",
"'series_number'",
":",
"0",
",",
"'datadir'",
":",
"_datapath",
"}",
"return",
"meta",
"path",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"datapath",
")",
"ext",
"=",
"ext",
"[",
"1",
":",
"]",
"if",
"ext",
"in",
"(",
"'pklz'",
",",
"'pkl'",
")",
":",
"logger",
".",
"debug",
"(",
"'pklz format detected'",
")",
"from",
".",
"import",
"misc",
"data",
"=",
"misc",
".",
"obj_from_file",
"(",
"datapath",
",",
"filetype",
"=",
"'pkl'",
")",
"data3d",
"=",
"data",
".",
"pop",
"(",
"'data3d'",
")",
"# metadata must have series_number",
"metadata",
"=",
"_create_meta",
"(",
"datapath",
")",
"metadata",
".",
"update",
"(",
"data",
")",
"elif",
"ext",
"in",
"[",
"'hdf5'",
"]",
":",
"from",
".",
"import",
"hdf5_io",
"datap",
"=",
"hdf5_io",
".",
"load_dict_from_hdf5",
"(",
"datapath",
")",
"# datap = self.read_hdf5(datapath)",
"data3d",
"=",
"datap",
".",
"pop",
"(",
"'data3d'",
")",
"# back compatibility",
"if",
"'metadata'",
"in",
"datap",
".",
"keys",
"(",
")",
":",
"datap",
"=",
"datap",
"[",
"'metadata'",
"]",
"# metadata must have series_number",
"metadata",
"=",
"_create_meta",
"(",
"datapath",
")",
"metadata",
".",
"update",
"(",
"datap",
")",
"elif",
"ext",
"in",
"[",
"'idx'",
"]",
":",
"from",
".",
"import",
"idxformat",
"idxreader",
"=",
"idxformat",
".",
"IDXReader",
"(",
")",
"data3d",
",",
"metadata",
"=",
"idxreader",
".",
"read",
"(",
"datapath",
")",
"elif",
"ext",
"in",
"[",
"'dcm'",
",",
"'DCM'",
",",
"'dicom'",
"]",
":",
"data3d",
",",
"metadata",
"=",
"self",
".",
"_read_with_sitk",
"(",
"datapath",
")",
"metadata",
"=",
"self",
".",
"_fix_sitk_bug",
"(",
"datapath",
",",
"metadata",
")",
"elif",
"ext",
"in",
"[",
"\"bz2\"",
"]",
":",
"new_datapath",
"=",
"tgz",
".",
"untar",
"(",
"datapath",
")",
"data3d",
",",
"metadata",
"=",
"self",
".",
"__ReadFromDirectory",
"(",
"new_datapath",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'file format \"'",
"+",
"str",
"(",
"ext",
")",
"+",
"'\"'",
")",
"# reading raw file",
"data3d",
",",
"metadata",
"=",
"self",
".",
"_read_with_sitk",
"(",
"datapath",
")",
"return",
"data3d",
",",
"metadata"
] | Reads file and returns containing 3D data and its metadata.
Supported formats: pklz, pkl, hdf5, idx, dcm, Dcm, dicom, bz2 and "raw files"
:param datapath: path to file to read
:return: tuple (data3d, metadata) | [
"Reads",
"file",
"and",
"returns",
"containing",
"3D",
"data",
"and",
"its",
"metadata",
".",
"Supported",
"formats",
":",
"pklz",
"pkl",
"hdf5",
"idx",
"dcm",
"Dcm",
"dicom",
"bz2",
"and",
"raw",
"files"
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareader.py#L203-L256 |
mjirik/io3d | io3d/datareader.py | DataReader._read_with_sitk | def _read_with_sitk(datapath):
"""Reads file using SimpleITK. Returns array of pixels (image located in datapath) and its metadata.
:param datapath: path to file (img or dicom)
:return: tuple (data3d, metadata), where data3d is array of pixels
"""
try:
import SimpleITK as Sitk
except ImportError as e:
logger.error("Unable to import SimpleITK. On Windows try version 1.0.1")
image = Sitk.ReadImage(datapath)
data3d = dcmtools.get_pixel_array_from_sitk(image)
# data3d, original_dtype = dcmreaddata.get_pixel_array_from_dcmobj(image)
metadata = _metadata(image, datapath)
return data3d, metadata | python | def _read_with_sitk(datapath):
"""Reads file using SimpleITK. Returns array of pixels (image located in datapath) and its metadata.
:param datapath: path to file (img or dicom)
:return: tuple (data3d, metadata), where data3d is array of pixels
"""
try:
import SimpleITK as Sitk
except ImportError as e:
logger.error("Unable to import SimpleITK. On Windows try version 1.0.1")
image = Sitk.ReadImage(datapath)
data3d = dcmtools.get_pixel_array_from_sitk(image)
# data3d, original_dtype = dcmreaddata.get_pixel_array_from_dcmobj(image)
metadata = _metadata(image, datapath)
return data3d, metadata | [
"def",
"_read_with_sitk",
"(",
"datapath",
")",
":",
"try",
":",
"import",
"SimpleITK",
"as",
"Sitk",
"except",
"ImportError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"Unable to import SimpleITK. On Windows try version 1.0.1\"",
")",
"image",
"=",
"Sitk",
".",
"ReadImage",
"(",
"datapath",
")",
"data3d",
"=",
"dcmtools",
".",
"get_pixel_array_from_sitk",
"(",
"image",
")",
"# data3d, original_dtype = dcmreaddata.get_pixel_array_from_dcmobj(image)",
"metadata",
"=",
"_metadata",
"(",
"image",
",",
"datapath",
")",
"return",
"data3d",
",",
"metadata"
] | Reads file using SimpleITK. Returns array of pixels (image located in datapath) and its metadata.
:param datapath: path to file (img or dicom)
:return: tuple (data3d, metadata), where data3d is array of pixels | [
"Reads",
"file",
"using",
"SimpleITK",
".",
"Returns",
"array",
"of",
"pixels",
"(",
"image",
"located",
"in",
"datapath",
")",
"and",
"its",
"metadata",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareader.py#L259-L273 |
mjirik/io3d | io3d/datareader.py | DataReader._fix_sitk_bug | def _fix_sitk_bug(path, metadata):
"""There is a bug in simple ITK for Z axis in 3D images. This is a fix.
:param path: path to dicom file to read
:param metadata: metadata to correct
:return: corrected metadata
"""
ds = dicom.read_file(path)
try:
metadata["voxelsize_mm"][0] = ds.SpacingBetweenSlices
except Exception as e:
logger.warning("Read dicom 'SpacingBetweenSlices' failed: ", e)
return metadata | python | def _fix_sitk_bug(path, metadata):
"""There is a bug in simple ITK for Z axis in 3D images. This is a fix.
:param path: path to dicom file to read
:param metadata: metadata to correct
:return: corrected metadata
"""
ds = dicom.read_file(path)
try:
metadata["voxelsize_mm"][0] = ds.SpacingBetweenSlices
except Exception as e:
logger.warning("Read dicom 'SpacingBetweenSlices' failed: ", e)
return metadata | [
"def",
"_fix_sitk_bug",
"(",
"path",
",",
"metadata",
")",
":",
"ds",
"=",
"dicom",
".",
"read_file",
"(",
"path",
")",
"try",
":",
"metadata",
"[",
"\"voxelsize_mm\"",
"]",
"[",
"0",
"]",
"=",
"ds",
".",
"SpacingBetweenSlices",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"warning",
"(",
"\"Read dicom 'SpacingBetweenSlices' failed: \"",
",",
"e",
")",
"return",
"metadata"
] | There is a bug in simple ITK for Z axis in 3D images. This is a fix.
:param path: path to dicom file to read
:param metadata: metadata to correct
:return: corrected metadata | [
"There",
"is",
"a",
"bug",
"in",
"simple",
"ITK",
"for",
"Z",
"axis",
"in",
"3D",
"images",
".",
"This",
"is",
"a",
"fix",
"."
] | train | https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareader.py#L276-L288 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.