repository_name
stringlengths 7
55
| func_path_in_repository
stringlengths 4
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 75
104k
| language
stringclasses 1
value | func_code_string
stringlengths 75
104k
| func_code_tokens
sequencelengths 19
28.4k
| func_documentation_string
stringlengths 1
46.9k
| func_documentation_tokens
sequencelengths 1
1.97k
| split_name
stringclasses 1
value | func_code_url
stringlengths 87
315
|
---|---|---|---|---|---|---|---|---|---|---|
eumis/pyviews | pyviews/core/common.py | CoreError.add_cause | def add_cause(self, error: Exception):
'''Adds cause error to error message'''
self.add_info('Cause error', '{0} - {1}'.format(type(error).__name__, error)) | python | def add_cause(self, error: Exception):
'''Adds cause error to error message'''
self.add_info('Cause error', '{0} - {1}'.format(type(error).__name__, error)) | [
"def",
"add_cause",
"(",
"self",
",",
"error",
":",
"Exception",
")",
":",
"self",
".",
"add_info",
"(",
"'Cause error'",
",",
"'{0} - {1}'",
".",
"format",
"(",
"type",
"(",
"error",
")",
".",
"__name__",
",",
"error",
")",
")"
] | Adds cause error to error message | [
"Adds",
"cause",
"error",
"to",
"error",
"message"
] | train | https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/common.py#L38-L40 |
Workiva/furious | furious/handlers/__init__.py | process_async_task | def process_async_task(headers, request_body):
"""Process an Async task and execute the requested function."""
async_options = json.loads(request_body)
async = async_from_options(async_options)
_log_task_info(headers,
extra_task_info=async.get_options().get('_extra_task_info'))
logging.info(async._function_path)
with context.execution_context_from_async(async):
run_job()
return 200, async._function_path | python | def process_async_task(headers, request_body):
"""Process an Async task and execute the requested function."""
async_options = json.loads(request_body)
async = async_from_options(async_options)
_log_task_info(headers,
extra_task_info=async.get_options().get('_extra_task_info'))
logging.info(async._function_path)
with context.execution_context_from_async(async):
run_job()
return 200, async._function_path | [
"def",
"process_async_task",
"(",
"headers",
",",
"request_body",
")",
":",
"async_options",
"=",
"json",
".",
"loads",
"(",
"request_body",
")",
"async",
"=",
"async_from_options",
"(",
"async_options",
")",
"_log_task_info",
"(",
"headers",
",",
"extra_task_info",
"=",
"async",
".",
"get_options",
"(",
")",
".",
"get",
"(",
"'_extra_task_info'",
")",
")",
"logging",
".",
"info",
"(",
"async",
".",
"_function_path",
")",
"with",
"context",
".",
"execution_context_from_async",
"(",
"async",
")",
":",
"run_job",
"(",
")",
"return",
"200",
",",
"async",
".",
"_function_path"
] | Process an Async task and execute the requested function. | [
"Process",
"an",
"Async",
"task",
"and",
"execute",
"the",
"requested",
"function",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/handlers/__init__.py#L27-L40 |
Workiva/furious | furious/handlers/__init__.py | _log_task_info | def _log_task_info(headers, extra_task_info=None):
"""Processes the header from task requests to log analytical data."""
ran_at = time.time()
task_eta = float(headers.get('X-Appengine-Tasketa', 0.0))
task_info = {
'retry_count': headers.get('X-Appengine-Taskretrycount', ''),
'execution_count': headers.get('X-Appengine-Taskexecutioncount', ''),
'task_eta': task_eta,
'ran': ran_at,
'gae_latency_seconds': ran_at - task_eta
}
if extra_task_info:
task_info['extra'] = extra_task_info
logging.debug('TASK-INFO: %s', json.dumps(task_info)) | python | def _log_task_info(headers, extra_task_info=None):
"""Processes the header from task requests to log analytical data."""
ran_at = time.time()
task_eta = float(headers.get('X-Appengine-Tasketa', 0.0))
task_info = {
'retry_count': headers.get('X-Appengine-Taskretrycount', ''),
'execution_count': headers.get('X-Appengine-Taskexecutioncount', ''),
'task_eta': task_eta,
'ran': ran_at,
'gae_latency_seconds': ran_at - task_eta
}
if extra_task_info:
task_info['extra'] = extra_task_info
logging.debug('TASK-INFO: %s', json.dumps(task_info)) | [
"def",
"_log_task_info",
"(",
"headers",
",",
"extra_task_info",
"=",
"None",
")",
":",
"ran_at",
"=",
"time",
".",
"time",
"(",
")",
"task_eta",
"=",
"float",
"(",
"headers",
".",
"get",
"(",
"'X-Appengine-Tasketa'",
",",
"0.0",
")",
")",
"task_info",
"=",
"{",
"'retry_count'",
":",
"headers",
".",
"get",
"(",
"'X-Appengine-Taskretrycount'",
",",
"''",
")",
",",
"'execution_count'",
":",
"headers",
".",
"get",
"(",
"'X-Appengine-Taskexecutioncount'",
",",
"''",
")",
",",
"'task_eta'",
":",
"task_eta",
",",
"'ran'",
":",
"ran_at",
",",
"'gae_latency_seconds'",
":",
"ran_at",
"-",
"task_eta",
"}",
"if",
"extra_task_info",
":",
"task_info",
"[",
"'extra'",
"]",
"=",
"extra_task_info",
"logging",
".",
"debug",
"(",
"'TASK-INFO: %s'",
",",
"json",
".",
"dumps",
"(",
"task_info",
")",
")"
] | Processes the header from task requests to log analytical data. | [
"Processes",
"the",
"header",
"from",
"task",
"requests",
"to",
"log",
"analytical",
"data",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/handlers/__init__.py#L43-L58 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/runners/firmware_runner.py | FirmwareRunner.load_firmware | def load_firmware(self, path, vrf_management_name=None):
"""Update firmware version on device by loading provided image, performs following steps:
1. Copy bin file from remote tftp server.
2. Clear in run config boot system section.
3. Set downloaded bin file as boot file and then reboot device.
4. Check if firmware was successfully installed.
:param path: full path to firmware file on ftp/tftp location
:param vrf_management_name: VRF Name
:return: status / exception
"""
url = UrlParser.parse_url(path)
required_keys = [UrlParser.FILENAME, UrlParser.HOSTNAME, UrlParser.SCHEME]
if not url or not all(key in url for key in required_keys):
raise Exception(self.__class__.__name__, "Path is wrong or empty")
self.load_firmware_flow.execute_flow(path, vrf_management_name, self._timeout) | python | def load_firmware(self, path, vrf_management_name=None):
"""Update firmware version on device by loading provided image, performs following steps:
1. Copy bin file from remote tftp server.
2. Clear in run config boot system section.
3. Set downloaded bin file as boot file and then reboot device.
4. Check if firmware was successfully installed.
:param path: full path to firmware file on ftp/tftp location
:param vrf_management_name: VRF Name
:return: status / exception
"""
url = UrlParser.parse_url(path)
required_keys = [UrlParser.FILENAME, UrlParser.HOSTNAME, UrlParser.SCHEME]
if not url or not all(key in url for key in required_keys):
raise Exception(self.__class__.__name__, "Path is wrong or empty")
self.load_firmware_flow.execute_flow(path, vrf_management_name, self._timeout) | [
"def",
"load_firmware",
"(",
"self",
",",
"path",
",",
"vrf_management_name",
"=",
"None",
")",
":",
"url",
"=",
"UrlParser",
".",
"parse_url",
"(",
"path",
")",
"required_keys",
"=",
"[",
"UrlParser",
".",
"FILENAME",
",",
"UrlParser",
".",
"HOSTNAME",
",",
"UrlParser",
".",
"SCHEME",
"]",
"if",
"not",
"url",
"or",
"not",
"all",
"(",
"key",
"in",
"url",
"for",
"key",
"in",
"required_keys",
")",
":",
"raise",
"Exception",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"\"Path is wrong or empty\"",
")",
"self",
".",
"load_firmware_flow",
".",
"execute_flow",
"(",
"path",
",",
"vrf_management_name",
",",
"self",
".",
"_timeout",
")"
] | Update firmware version on device by loading provided image, performs following steps:
1. Copy bin file from remote tftp server.
2. Clear in run config boot system section.
3. Set downloaded bin file as boot file and then reboot device.
4. Check if firmware was successfully installed.
:param path: full path to firmware file on ftp/tftp location
:param vrf_management_name: VRF Name
:return: status / exception | [
"Update",
"firmware",
"version",
"on",
"device",
"by",
"loading",
"provided",
"image",
"performs",
"following",
"steps",
":"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/runners/firmware_runner.py#L37-L56 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/standards/sdn/configuration_attributes_structure.py | GenericSDNResource._parse_ports | def _parse_ports(self, ports):
"""Parse ports string into the list
:param str ports:
:rtype: list[tuple[str, str]]
"""
if not ports:
return []
return [tuple(port_pair.split("::")) for port_pair in ports.strip(";").split(";")] | python | def _parse_ports(self, ports):
"""Parse ports string into the list
:param str ports:
:rtype: list[tuple[str, str]]
"""
if not ports:
return []
return [tuple(port_pair.split("::")) for port_pair in ports.strip(";").split(";")] | [
"def",
"_parse_ports",
"(",
"self",
",",
"ports",
")",
":",
"if",
"not",
"ports",
":",
"return",
"[",
"]",
"return",
"[",
"tuple",
"(",
"port_pair",
".",
"split",
"(",
"\"::\"",
")",
")",
"for",
"port_pair",
"in",
"ports",
".",
"strip",
"(",
"\";\"",
")",
".",
"split",
"(",
"\";\"",
")",
"]"
] | Parse ports string into the list
:param str ports:
:rtype: list[tuple[str, str]] | [
"Parse",
"ports",
"string",
"into",
"the",
"list"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/standards/sdn/configuration_attributes_structure.py#L24-L33 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/standards/sdn/configuration_attributes_structure.py | GenericSDNResource.add_trunk_ports | def add_trunk_ports(self):
"""SDN Controller enable trunk ports
:rtype: list[tuple[str, str]]
"""
ports = self.attributes.get("{}Enable Full Trunk Ports".format(self.namespace_prefix), None)
return self._parse_ports(ports=ports) | python | def add_trunk_ports(self):
"""SDN Controller enable trunk ports
:rtype: list[tuple[str, str]]
"""
ports = self.attributes.get("{}Enable Full Trunk Ports".format(self.namespace_prefix), None)
return self._parse_ports(ports=ports) | [
"def",
"add_trunk_ports",
"(",
"self",
")",
":",
"ports",
"=",
"self",
".",
"attributes",
".",
"get",
"(",
"\"{}Enable Full Trunk Ports\"",
".",
"format",
"(",
"self",
".",
"namespace_prefix",
")",
",",
"None",
")",
"return",
"self",
".",
"_parse_ports",
"(",
"ports",
"=",
"ports",
")"
] | SDN Controller enable trunk ports
:rtype: list[tuple[str, str]] | [
"SDN",
"Controller",
"enable",
"trunk",
"ports"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/standards/sdn/configuration_attributes_structure.py#L68-L74 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/standards/sdn/configuration_attributes_structure.py | GenericSDNResource.remove_trunk_ports | def remove_trunk_ports(self):
"""SDN Controller disable trunk ports
:rtype: list[tuple[str, str]]
"""
ports = self.attributes.get("{}Disable Full Trunk Ports".format(self.namespace_prefix), None)
return self._parse_ports(ports=ports) | python | def remove_trunk_ports(self):
"""SDN Controller disable trunk ports
:rtype: list[tuple[str, str]]
"""
ports = self.attributes.get("{}Disable Full Trunk Ports".format(self.namespace_prefix), None)
return self._parse_ports(ports=ports) | [
"def",
"remove_trunk_ports",
"(",
"self",
")",
":",
"ports",
"=",
"self",
".",
"attributes",
".",
"get",
"(",
"\"{}Disable Full Trunk Ports\"",
".",
"format",
"(",
"self",
".",
"namespace_prefix",
")",
",",
"None",
")",
"return",
"self",
".",
"_parse_ports",
"(",
"ports",
"=",
"ports",
")"
] | SDN Controller disable trunk ports
:rtype: list[tuple[str, str]] | [
"SDN",
"Controller",
"disable",
"trunk",
"ports"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/standards/sdn/configuration_attributes_structure.py#L77-L83 |
Workiva/furious | example/callback.py | handle_an_error | def handle_an_error():
"""Will be run if the async task raises an unhandled exception."""
import os
from furious.context import get_current_async
async = get_current_async()
async_exception = async.result.payload
exc_info = async_exception.traceback
logging.info('async job blew up, exception info: %r', exc_info)
retries = int(os.environ['HTTP_X_APPENGINE_TASKRETRYCOUNT'])
if retries < 2:
raise Exception(async_exception.error)
else:
logging.info('Caught too many errors, giving up now.') | python | def handle_an_error():
"""Will be run if the async task raises an unhandled exception."""
import os
from furious.context import get_current_async
async = get_current_async()
async_exception = async.result.payload
exc_info = async_exception.traceback
logging.info('async job blew up, exception info: %r', exc_info)
retries = int(os.environ['HTTP_X_APPENGINE_TASKRETRYCOUNT'])
if retries < 2:
raise Exception(async_exception.error)
else:
logging.info('Caught too many errors, giving up now.') | [
"def",
"handle_an_error",
"(",
")",
":",
"import",
"os",
"from",
"furious",
".",
"context",
"import",
"get_current_async",
"async",
"=",
"get_current_async",
"(",
")",
"async_exception",
"=",
"async",
".",
"result",
".",
"payload",
"exc_info",
"=",
"async_exception",
".",
"traceback",
"logging",
".",
"info",
"(",
"'async job blew up, exception info: %r'",
",",
"exc_info",
")",
"retries",
"=",
"int",
"(",
"os",
".",
"environ",
"[",
"'HTTP_X_APPENGINE_TASKRETRYCOUNT'",
"]",
")",
"if",
"retries",
"<",
"2",
":",
"raise",
"Exception",
"(",
"async_exception",
".",
"error",
")",
"else",
":",
"logging",
".",
"info",
"(",
"'Caught too many errors, giving up now.'",
")"
] | Will be run if the async task raises an unhandled exception. | [
"Will",
"be",
"run",
"if",
"the",
"async",
"task",
"raises",
"an",
"unhandled",
"exception",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/example/callback.py#L125-L140 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/autoload/autoload_builder.py | AutoloadDetailsBuilder._validate_build_resource_structure | def _validate_build_resource_structure(autoload_resource):
"""Validate resource structure
:param dict autoload_resource:
:return correct autoload resource structure
:rtype: dict
"""
result = {}
for resource_prefix, resources in autoload_resource.iteritems():
max_free_index = max(map(int, resources)) + 1 or 1
for index, sub_resources in resources.iteritems():
if not index or index == -1:
index = max_free_index
max_free_index += 1
if len(sub_resources) > 1:
result["{0}{1}".format(resource_prefix, index)] = sub_resources[0]
for resource in sub_resources[1:]:
result["{0}{1}".format(resource_prefix, str(max_free_index))] = resource
max_free_index += 1
else:
result["{0}{1}".format(resource_prefix, index)] = sub_resources[0]
return result | python | def _validate_build_resource_structure(autoload_resource):
"""Validate resource structure
:param dict autoload_resource:
:return correct autoload resource structure
:rtype: dict
"""
result = {}
for resource_prefix, resources in autoload_resource.iteritems():
max_free_index = max(map(int, resources)) + 1 or 1
for index, sub_resources in resources.iteritems():
if not index or index == -1:
index = max_free_index
max_free_index += 1
if len(sub_resources) > 1:
result["{0}{1}".format(resource_prefix, index)] = sub_resources[0]
for resource in sub_resources[1:]:
result["{0}{1}".format(resource_prefix, str(max_free_index))] = resource
max_free_index += 1
else:
result["{0}{1}".format(resource_prefix, index)] = sub_resources[0]
return result | [
"def",
"_validate_build_resource_structure",
"(",
"autoload_resource",
")",
":",
"result",
"=",
"{",
"}",
"for",
"resource_prefix",
",",
"resources",
"in",
"autoload_resource",
".",
"iteritems",
"(",
")",
":",
"max_free_index",
"=",
"max",
"(",
"map",
"(",
"int",
",",
"resources",
")",
")",
"+",
"1",
"or",
"1",
"for",
"index",
",",
"sub_resources",
"in",
"resources",
".",
"iteritems",
"(",
")",
":",
"if",
"not",
"index",
"or",
"index",
"==",
"-",
"1",
":",
"index",
"=",
"max_free_index",
"max_free_index",
"+=",
"1",
"if",
"len",
"(",
"sub_resources",
")",
">",
"1",
":",
"result",
"[",
"\"{0}{1}\"",
".",
"format",
"(",
"resource_prefix",
",",
"index",
")",
"]",
"=",
"sub_resources",
"[",
"0",
"]",
"for",
"resource",
"in",
"sub_resources",
"[",
"1",
":",
"]",
":",
"result",
"[",
"\"{0}{1}\"",
".",
"format",
"(",
"resource_prefix",
",",
"str",
"(",
"max_free_index",
")",
")",
"]",
"=",
"resource",
"max_free_index",
"+=",
"1",
"else",
":",
"result",
"[",
"\"{0}{1}\"",
".",
"format",
"(",
"resource_prefix",
",",
"index",
")",
"]",
"=",
"sub_resources",
"[",
"0",
"]",
"return",
"result"
] | Validate resource structure
:param dict autoload_resource:
:return correct autoload resource structure
:rtype: dict | [
"Validate",
"resource",
"structure"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/autoload/autoload_builder.py#L23-L49 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/autoload/autoload_builder.py | AutoloadDetailsBuilder._build_autoload_details | def _build_autoload_details(self, autoload_data, relative_path=""):
""" Build autoload details
:param autoload_data: dict:
:param relative_path: str: full relative path of current autoload resource
"""
self._autoload_details.attributes.extend([AutoLoadAttribute(relative_address=relative_path,
attribute_name=attribute_name,
attribute_value=attribute_value)
for attribute_name, attribute_value in
autoload_data.attributes.iteritems()])
for resource_relative_path, resource in self._validate_build_resource_structure(autoload_data.resources).iteritems():
full_relative_path = posixpath.join(relative_path, resource_relative_path)
self._autoload_details.resources.append(AutoLoadResource(model=resource.cloudshell_model_name,
name=resource.name,
relative_address=full_relative_path,
unique_identifier=resource.unique_identifier))
self._build_autoload_details(autoload_data=resource, relative_path=full_relative_path) | python | def _build_autoload_details(self, autoload_data, relative_path=""):
""" Build autoload details
:param autoload_data: dict:
:param relative_path: str: full relative path of current autoload resource
"""
self._autoload_details.attributes.extend([AutoLoadAttribute(relative_address=relative_path,
attribute_name=attribute_name,
attribute_value=attribute_value)
for attribute_name, attribute_value in
autoload_data.attributes.iteritems()])
for resource_relative_path, resource in self._validate_build_resource_structure(autoload_data.resources).iteritems():
full_relative_path = posixpath.join(relative_path, resource_relative_path)
self._autoload_details.resources.append(AutoLoadResource(model=resource.cloudshell_model_name,
name=resource.name,
relative_address=full_relative_path,
unique_identifier=resource.unique_identifier))
self._build_autoload_details(autoload_data=resource, relative_path=full_relative_path) | [
"def",
"_build_autoload_details",
"(",
"self",
",",
"autoload_data",
",",
"relative_path",
"=",
"\"\"",
")",
":",
"self",
".",
"_autoload_details",
".",
"attributes",
".",
"extend",
"(",
"[",
"AutoLoadAttribute",
"(",
"relative_address",
"=",
"relative_path",
",",
"attribute_name",
"=",
"attribute_name",
",",
"attribute_value",
"=",
"attribute_value",
")",
"for",
"attribute_name",
",",
"attribute_value",
"in",
"autoload_data",
".",
"attributes",
".",
"iteritems",
"(",
")",
"]",
")",
"for",
"resource_relative_path",
",",
"resource",
"in",
"self",
".",
"_validate_build_resource_structure",
"(",
"autoload_data",
".",
"resources",
")",
".",
"iteritems",
"(",
")",
":",
"full_relative_path",
"=",
"posixpath",
".",
"join",
"(",
"relative_path",
",",
"resource_relative_path",
")",
"self",
".",
"_autoload_details",
".",
"resources",
".",
"append",
"(",
"AutoLoadResource",
"(",
"model",
"=",
"resource",
".",
"cloudshell_model_name",
",",
"name",
"=",
"resource",
".",
"name",
",",
"relative_address",
"=",
"full_relative_path",
",",
"unique_identifier",
"=",
"resource",
".",
"unique_identifier",
")",
")",
"self",
".",
"_build_autoload_details",
"(",
"autoload_data",
"=",
"resource",
",",
"relative_path",
"=",
"full_relative_path",
")"
] | Build autoload details
:param autoload_data: dict:
:param relative_path: str: full relative path of current autoload resource | [
"Build",
"autoload",
"details"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/autoload/autoload_builder.py#L51-L71 |
eumis/pyviews | pyviews/core/node.py | Node.set_attr | def set_attr(self, key: str, value):
"""Sets node attribute. Can be customized by attr_setter property"""
self.attr_setter(self, key, value) | python | def set_attr(self, key: str, value):
"""Sets node attribute. Can be customized by attr_setter property"""
self.attr_setter(self, key, value) | [
"def",
"set_attr",
"(",
"self",
",",
"key",
":",
"str",
",",
"value",
")",
":",
"self",
".",
"attr_setter",
"(",
"self",
",",
"key",
",",
"value",
")"
] | Sets node attribute. Can be customized by attr_setter property | [
"Sets",
"node",
"attribute",
".",
"Can",
"be",
"customized",
"by",
"attr_setter",
"property"
] | train | https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/node.py#L38-L40 |
eumis/pyviews | pyviews/core/node.py | Node.add_binding | def add_binding(self, binding: Binding):
"""Stores binding"""
binding.add_error_info = lambda error: error.add_view_info(self._xml_node.view_info)
self._bindings.append(binding) | python | def add_binding(self, binding: Binding):
"""Stores binding"""
binding.add_error_info = lambda error: error.add_view_info(self._xml_node.view_info)
self._bindings.append(binding) | [
"def",
"add_binding",
"(",
"self",
",",
"binding",
":",
"Binding",
")",
":",
"binding",
".",
"add_error_info",
"=",
"lambda",
"error",
":",
"error",
".",
"add_view_info",
"(",
"self",
".",
"_xml_node",
".",
"view_info",
")",
"self",
".",
"_bindings",
".",
"append",
"(",
"binding",
")"
] | Stores binding | [
"Stores",
"binding"
] | train | https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/node.py#L42-L45 |
eumis/pyviews | pyviews/core/node.py | Node.destroy | def destroy(self):
"""Destroys node"""
self.destroy_children()
self.destroy_bindings()
if self.on_destroy:
self.on_destroy(self) | python | def destroy(self):
"""Destroys node"""
self.destroy_children()
self.destroy_bindings()
if self.on_destroy:
self.on_destroy(self) | [
"def",
"destroy",
"(",
"self",
")",
":",
"self",
".",
"destroy_children",
"(",
")",
"self",
".",
"destroy_bindings",
"(",
")",
"if",
"self",
".",
"on_destroy",
":",
"self",
".",
"on_destroy",
"(",
"self",
")"
] | Destroys node | [
"Destroys",
"node"
] | train | https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/node.py#L55-L60 |
eumis/pyviews | pyviews/core/node.py | Property.set | def set(self, value):
"""Sets value"""
self._value = self._setter(self._node, value, self._value) if self._setter else value | python | def set(self, value):
"""Sets value"""
self._value = self._setter(self._node, value, self._value) if self._setter else value | [
"def",
"set",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"_value",
"=",
"self",
".",
"_setter",
"(",
"self",
".",
"_node",
",",
"value",
",",
"self",
".",
"_value",
")",
"if",
"self",
".",
"_setter",
"else",
"value"
] | Sets value | [
"Sets",
"value"
] | train | https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/node.py#L121-L123 |
eumis/pyviews | pyviews/core/node.py | Property.new | def new(self, node: Node):
"""Creates property for node"""
return Property(self.name, self._setter, node) | python | def new(self, node: Node):
"""Creates property for node"""
return Property(self.name, self._setter, node) | [
"def",
"new",
"(",
"self",
",",
"node",
":",
"Node",
")",
":",
"return",
"Property",
"(",
"self",
".",
"name",
",",
"self",
".",
"_setter",
",",
"node",
")"
] | Creates property for node | [
"Creates",
"property",
"for",
"node"
] | train | https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/node.py#L125-L127 |
kmedian/korr | korr/corr_vs_pval.py | corr_vs_pval | def corr_vs_pval(r, pval, plim=0.01, rlim=0.4, dpi=96):
"""Histogram for correlation coefficients and its p-values (colored)
Parameters:
-----------
r : np.ndarray
Correlation coefficient matrix.
The upper triangular elements are extracted if a NxN is provided.
Otherwise provide a plain vector.
pval : np.ndarray
NxN matrix with p-values
plim : float
Discretionary alpha threshold to judge if a p-value is considered
"significant" or not. (Default: 0.01 or 1%)
rlim : float
Descretionary threshold to judge if an absolute correlation
coefficient is big enough. (Default: 0.4)
dpi : int
Set the resolution of the matplotlib graphics.
Return:
-------
fig, ax
matplotlib figure and axes for further tweaking
"""
# reshape
if len(r.shape) == 2:
idx = (np.tri(N=r.shape[0], k=-1) == 1)
r = r[idx]
pval = pval[idx]
# indicies for the three groups
i1 = (pval >= plim)
i2 = (pval < plim) & (np.abs(r) > rlim)
i3 = (pval < plim) & (np.abs(r) <= rlim)
# plot paramters
absmax = np.max(np.abs(r))
b = (np.arange(0, 21) / 10 - 1) * absmax
c = plt.get_cmap('tab10').colors
c = (c[1], c[8], c[7])
# create plot
fig, ax = plt.subplots(dpi=dpi)
ax.hist((r[i1], r[i2], r[i3]), histtype='bar',
stacked=True, bins=b, color=c)
# legend, title, labels
ax.legend(['p >= ' + str(plim),
'p < ' + str(plim) + ' and |r| > ' + str(rlim),
'p < ' + str(plim) + ' and |r| <= ' + str(rlim)],
loc=2, bbox_to_anchor=(1.04, 1.04))
ax.set_ylabel('frequency')
ax.set_xlabel('correlation coefficient')
# design grid
ax.grid(color='darkgray', linestyle='-.')
for edge, spine in ax.spines.items():
spine.set_visible(False)
# output figure/axes onbjects
return fig, ax | python | def corr_vs_pval(r, pval, plim=0.01, rlim=0.4, dpi=96):
"""Histogram for correlation coefficients and its p-values (colored)
Parameters:
-----------
r : np.ndarray
Correlation coefficient matrix.
The upper triangular elements are extracted if a NxN is provided.
Otherwise provide a plain vector.
pval : np.ndarray
NxN matrix with p-values
plim : float
Discretionary alpha threshold to judge if a p-value is considered
"significant" or not. (Default: 0.01 or 1%)
rlim : float
Descretionary threshold to judge if an absolute correlation
coefficient is big enough. (Default: 0.4)
dpi : int
Set the resolution of the matplotlib graphics.
Return:
-------
fig, ax
matplotlib figure and axes for further tweaking
"""
# reshape
if len(r.shape) == 2:
idx = (np.tri(N=r.shape[0], k=-1) == 1)
r = r[idx]
pval = pval[idx]
# indicies for the three groups
i1 = (pval >= plim)
i2 = (pval < plim) & (np.abs(r) > rlim)
i3 = (pval < plim) & (np.abs(r) <= rlim)
# plot paramters
absmax = np.max(np.abs(r))
b = (np.arange(0, 21) / 10 - 1) * absmax
c = plt.get_cmap('tab10').colors
c = (c[1], c[8], c[7])
# create plot
fig, ax = plt.subplots(dpi=dpi)
ax.hist((r[i1], r[i2], r[i3]), histtype='bar',
stacked=True, bins=b, color=c)
# legend, title, labels
ax.legend(['p >= ' + str(plim),
'p < ' + str(plim) + ' and |r| > ' + str(rlim),
'p < ' + str(plim) + ' and |r| <= ' + str(rlim)],
loc=2, bbox_to_anchor=(1.04, 1.04))
ax.set_ylabel('frequency')
ax.set_xlabel('correlation coefficient')
# design grid
ax.grid(color='darkgray', linestyle='-.')
for edge, spine in ax.spines.items():
spine.set_visible(False)
# output figure/axes onbjects
return fig, ax | [
"def",
"corr_vs_pval",
"(",
"r",
",",
"pval",
",",
"plim",
"=",
"0.01",
",",
"rlim",
"=",
"0.4",
",",
"dpi",
"=",
"96",
")",
":",
"# reshape",
"if",
"len",
"(",
"r",
".",
"shape",
")",
"==",
"2",
":",
"idx",
"=",
"(",
"np",
".",
"tri",
"(",
"N",
"=",
"r",
".",
"shape",
"[",
"0",
"]",
",",
"k",
"=",
"-",
"1",
")",
"==",
"1",
")",
"r",
"=",
"r",
"[",
"idx",
"]",
"pval",
"=",
"pval",
"[",
"idx",
"]",
"# indicies for the three groups",
"i1",
"=",
"(",
"pval",
">=",
"plim",
")",
"i2",
"=",
"(",
"pval",
"<",
"plim",
")",
"&",
"(",
"np",
".",
"abs",
"(",
"r",
")",
">",
"rlim",
")",
"i3",
"=",
"(",
"pval",
"<",
"plim",
")",
"&",
"(",
"np",
".",
"abs",
"(",
"r",
")",
"<=",
"rlim",
")",
"# plot paramters",
"absmax",
"=",
"np",
".",
"max",
"(",
"np",
".",
"abs",
"(",
"r",
")",
")",
"b",
"=",
"(",
"np",
".",
"arange",
"(",
"0",
",",
"21",
")",
"/",
"10",
"-",
"1",
")",
"*",
"absmax",
"c",
"=",
"plt",
".",
"get_cmap",
"(",
"'tab10'",
")",
".",
"colors",
"c",
"=",
"(",
"c",
"[",
"1",
"]",
",",
"c",
"[",
"8",
"]",
",",
"c",
"[",
"7",
"]",
")",
"# create plot",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"dpi",
"=",
"dpi",
")",
"ax",
".",
"hist",
"(",
"(",
"r",
"[",
"i1",
"]",
",",
"r",
"[",
"i2",
"]",
",",
"r",
"[",
"i3",
"]",
")",
",",
"histtype",
"=",
"'bar'",
",",
"stacked",
"=",
"True",
",",
"bins",
"=",
"b",
",",
"color",
"=",
"c",
")",
"# legend, title, labels",
"ax",
".",
"legend",
"(",
"[",
"'p >= '",
"+",
"str",
"(",
"plim",
")",
",",
"'p < '",
"+",
"str",
"(",
"plim",
")",
"+",
"' and |r| > '",
"+",
"str",
"(",
"rlim",
")",
",",
"'p < '",
"+",
"str",
"(",
"plim",
")",
"+",
"' and |r| <= '",
"+",
"str",
"(",
"rlim",
")",
"]",
",",
"loc",
"=",
"2",
",",
"bbox_to_anchor",
"=",
"(",
"1.04",
",",
"1.04",
")",
")",
"ax",
".",
"set_ylabel",
"(",
"'frequency'",
")",
"ax",
".",
"set_xlabel",
"(",
"'correlation coefficient'",
")",
"# design grid",
"ax",
".",
"grid",
"(",
"color",
"=",
"'darkgray'",
",",
"linestyle",
"=",
"'-.'",
")",
"for",
"edge",
",",
"spine",
"in",
"ax",
".",
"spines",
".",
"items",
"(",
")",
":",
"spine",
".",
"set_visible",
"(",
"False",
")",
"# output figure/axes onbjects",
"return",
"fig",
",",
"ax"
] | Histogram for correlation coefficients and its p-values (colored)
Parameters:
-----------
r : np.ndarray
Correlation coefficient matrix.
The upper triangular elements are extracted if a NxN is provided.
Otherwise provide a plain vector.
pval : np.ndarray
NxN matrix with p-values
plim : float
Discretionary alpha threshold to judge if a p-value is considered
"significant" or not. (Default: 0.01 or 1%)
rlim : float
Descretionary threshold to judge if an absolute correlation
coefficient is big enough. (Default: 0.4)
dpi : int
Set the resolution of the matplotlib graphics.
Return:
-------
fig, ax
matplotlib figure and axes for further tweaking | [
"Histogram",
"for",
"correlation",
"coefficients",
"and",
"its",
"p",
"-",
"values",
"(",
"colored",
")"
] | train | https://github.com/kmedian/korr/blob/4eb86fc14b1fc1b69204069b7753d115b327c937/korr/corr_vs_pval.py#L5-L70 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/standards/traffic/virtual/blade/configuration_attributes_structure.py | TrafficGeneratorVBladeResource.from_context | def from_context(cls, context, shell_type=None, shell_name=None):
"""Create an instance of TrafficGeneratorVBladeResource from the given context
:param cloudshell.shell.core.driver_context.ResourceCommandContext context:
:param str shell_type: shell type
:param str shell_name: shell name
:rtype: TrafficGeneratorVChassisResource
"""
return cls(address=context.resource.address,
family=context.resource.family,
shell_type=shell_type,
shell_name=shell_name,
fullname=context.resource.fullname,
attributes=dict(context.resource.attributes),
name=context.resource.name) | python | def from_context(cls, context, shell_type=None, shell_name=None):
"""Create an instance of TrafficGeneratorVBladeResource from the given context
:param cloudshell.shell.core.driver_context.ResourceCommandContext context:
:param str shell_type: shell type
:param str shell_name: shell name
:rtype: TrafficGeneratorVChassisResource
"""
return cls(address=context.resource.address,
family=context.resource.family,
shell_type=shell_type,
shell_name=shell_name,
fullname=context.resource.fullname,
attributes=dict(context.resource.attributes),
name=context.resource.name) | [
"def",
"from_context",
"(",
"cls",
",",
"context",
",",
"shell_type",
"=",
"None",
",",
"shell_name",
"=",
"None",
")",
":",
"return",
"cls",
"(",
"address",
"=",
"context",
".",
"resource",
".",
"address",
",",
"family",
"=",
"context",
".",
"resource",
".",
"family",
",",
"shell_type",
"=",
"shell_type",
",",
"shell_name",
"=",
"shell_name",
",",
"fullname",
"=",
"context",
".",
"resource",
".",
"fullname",
",",
"attributes",
"=",
"dict",
"(",
"context",
".",
"resource",
".",
"attributes",
")",
",",
"name",
"=",
"context",
".",
"resource",
".",
"name",
")"
] | Create an instance of TrafficGeneratorVBladeResource from the given context
:param cloudshell.shell.core.driver_context.ResourceCommandContext context:
:param str shell_type: shell type
:param str shell_name: shell name
:rtype: TrafficGeneratorVChassisResource | [
"Create",
"an",
"instance",
"of",
"TrafficGeneratorVBladeResource",
"from",
"the",
"given",
"context"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/standards/traffic/virtual/blade/configuration_attributes_structure.py#L28-L42 |
idlesign/django-siteflags | siteflags/models.py | update_filter_dict | def update_filter_dict(d, user, status):
"""Helper. Updates filter dict for a queryset.
:param dict d:
:param User|None user:
:param int|None status:
:return:
"""
if user is not None:
if not user.id:
return None
d['user'] = user
if status is not None:
d['status'] = status | python | def update_filter_dict(d, user, status):
"""Helper. Updates filter dict for a queryset.
:param dict d:
:param User|None user:
:param int|None status:
:return:
"""
if user is not None:
if not user.id:
return None
d['user'] = user
if status is not None:
d['status'] = status | [
"def",
"update_filter_dict",
"(",
"d",
",",
"user",
",",
"status",
")",
":",
"if",
"user",
"is",
"not",
"None",
":",
"if",
"not",
"user",
".",
"id",
":",
"return",
"None",
"d",
"[",
"'user'",
"]",
"=",
"user",
"if",
"status",
"is",
"not",
"None",
":",
"d",
"[",
"'status'",
"]",
"=",
"status"
] | Helper. Updates filter dict for a queryset.
:param dict d:
:param User|None user:
:param int|None status:
:return: | [
"Helper",
".",
"Updates",
"filter",
"dict",
"for",
"a",
"queryset",
"."
] | train | https://github.com/idlesign/django-siteflags/blob/c8d1a40afb2f60d68ad6d34af3f528e76bdb7142/siteflags/models.py#L233-L246 |
idlesign/django-siteflags | siteflags/models.py | ModelWithFlag.get_flags_for_types | def get_flags_for_types(cls, mdl_classes, user=None, status=None, allow_empty=True):
"""Returns a dictionary with flag objects associated with the given model classes (types).
The dictionary is indexed by model classes.
Each dict entry contains a list of associated flag objects.
:param list mdl_classes:
:param User user:
:param int status:
:param bool allow_empty: Flag. Include results for all given types, even those without associated flags.
:rtype: dict
"""
return get_model_class_from_string(MODEL_FLAG).get_flags_for_types(
mdl_classes, user=user, status=status, allow_empty=allow_empty) | python | def get_flags_for_types(cls, mdl_classes, user=None, status=None, allow_empty=True):
"""Returns a dictionary with flag objects associated with the given model classes (types).
The dictionary is indexed by model classes.
Each dict entry contains a list of associated flag objects.
:param list mdl_classes:
:param User user:
:param int status:
:param bool allow_empty: Flag. Include results for all given types, even those without associated flags.
:rtype: dict
"""
return get_model_class_from_string(MODEL_FLAG).get_flags_for_types(
mdl_classes, user=user, status=status, allow_empty=allow_empty) | [
"def",
"get_flags_for_types",
"(",
"cls",
",",
"mdl_classes",
",",
"user",
"=",
"None",
",",
"status",
"=",
"None",
",",
"allow_empty",
"=",
"True",
")",
":",
"return",
"get_model_class_from_string",
"(",
"MODEL_FLAG",
")",
".",
"get_flags_for_types",
"(",
"mdl_classes",
",",
"user",
"=",
"user",
",",
"status",
"=",
"status",
",",
"allow_empty",
"=",
"allow_empty",
")"
] | Returns a dictionary with flag objects associated with the given model classes (types).
The dictionary is indexed by model classes.
Each dict entry contains a list of associated flag objects.
:param list mdl_classes:
:param User user:
:param int status:
:param bool allow_empty: Flag. Include results for all given types, even those without associated flags.
:rtype: dict | [
"Returns",
"a",
"dictionary",
"with",
"flag",
"objects",
"associated",
"with",
"the",
"given",
"model",
"classes",
"(",
"types",
")",
".",
"The",
"dictionary",
"is",
"indexed",
"by",
"model",
"classes",
".",
"Each",
"dict",
"entry",
"contains",
"a",
"list",
"of",
"associated",
"flag",
"objects",
"."
] | train | https://github.com/idlesign/django-siteflags/blob/c8d1a40afb2f60d68ad6d34af3f528e76bdb7142/siteflags/models.py#L139-L151 |
idlesign/django-siteflags | siteflags/models.py | ModelWithFlag.get_flags_for_objects | def get_flags_for_objects(cls, objects_list, user=None, status=None):
"""Returns a dictionary with flag objects associated with the given model objects.
The dictionary is indexed by objects IDs.
Each dict entry contains a list of associated flag objects.
:param list, QuerySet objects_list:
:param User user:
:param int status:
:rtype: dict
"""
return get_model_class_from_string(MODEL_FLAG).get_flags_for_objects(objects_list, user=user, status=status) | python | def get_flags_for_objects(cls, objects_list, user=None, status=None):
"""Returns a dictionary with flag objects associated with the given model objects.
The dictionary is indexed by objects IDs.
Each dict entry contains a list of associated flag objects.
:param list, QuerySet objects_list:
:param User user:
:param int status:
:rtype: dict
"""
return get_model_class_from_string(MODEL_FLAG).get_flags_for_objects(objects_list, user=user, status=status) | [
"def",
"get_flags_for_objects",
"(",
"cls",
",",
"objects_list",
",",
"user",
"=",
"None",
",",
"status",
"=",
"None",
")",
":",
"return",
"get_model_class_from_string",
"(",
"MODEL_FLAG",
")",
".",
"get_flags_for_objects",
"(",
"objects_list",
",",
"user",
"=",
"user",
",",
"status",
"=",
"status",
")"
] | Returns a dictionary with flag objects associated with the given model objects.
The dictionary is indexed by objects IDs.
Each dict entry contains a list of associated flag objects.
:param list, QuerySet objects_list:
:param User user:
:param int status:
:rtype: dict | [
"Returns",
"a",
"dictionary",
"with",
"flag",
"objects",
"associated",
"with",
"the",
"given",
"model",
"objects",
".",
"The",
"dictionary",
"is",
"indexed",
"by",
"objects",
"IDs",
".",
"Each",
"dict",
"entry",
"contains",
"a",
"list",
"of",
"associated",
"flag",
"objects",
"."
] | train | https://github.com/idlesign/django-siteflags/blob/c8d1a40afb2f60d68ad6d34af3f528e76bdb7142/siteflags/models.py#L154-L164 |
idlesign/django-siteflags | siteflags/models.py | ModelWithFlag.get_flags | def get_flags(self, user=None, status=None):
"""Returns flags for the object optionally filtered by status.
:param User user: Optional user filter
:param int status: Optional status filter
:rtype: QuerySet
"""
filter_kwargs = {}
update_filter_dict(filter_kwargs, user, status)
return self.flags.filter(**filter_kwargs).all() | python | def get_flags(self, user=None, status=None):
"""Returns flags for the object optionally filtered by status.
:param User user: Optional user filter
:param int status: Optional status filter
:rtype: QuerySet
"""
filter_kwargs = {}
update_filter_dict(filter_kwargs, user, status)
return self.flags.filter(**filter_kwargs).all() | [
"def",
"get_flags",
"(",
"self",
",",
"user",
"=",
"None",
",",
"status",
"=",
"None",
")",
":",
"filter_kwargs",
"=",
"{",
"}",
"update_filter_dict",
"(",
"filter_kwargs",
",",
"user",
",",
"status",
")",
"return",
"self",
".",
"flags",
".",
"filter",
"(",
"*",
"*",
"filter_kwargs",
")",
".",
"all",
"(",
")"
] | Returns flags for the object optionally filtered by status.
:param User user: Optional user filter
:param int status: Optional status filter
:rtype: QuerySet | [
"Returns",
"flags",
"for",
"the",
"object",
"optionally",
"filtered",
"by",
"status",
"."
] | train | https://github.com/idlesign/django-siteflags/blob/c8d1a40afb2f60d68ad6d34af3f528e76bdb7142/siteflags/models.py#L166-L175 |
idlesign/django-siteflags | siteflags/models.py | ModelWithFlag.set_flag | def set_flag(self, user, note=None, status=None):
"""Flags the object.
:param User user:
:param str note: User-defined note for this flag.
:param int status: Optional status integer (the meaning is defined by a developer).
:return:
"""
if not user.id:
return None
init_kwargs = {
'user': user,
'linked_object': self,
}
if note is not None:
init_kwargs['note'] = note
if status is not None:
init_kwargs['status'] = status
flag = get_flag_model()(**init_kwargs)
try:
flag.save()
except IntegrityError: # Record already exists.
pass
return flag | python | def set_flag(self, user, note=None, status=None):
"""Flags the object.
:param User user:
:param str note: User-defined note for this flag.
:param int status: Optional status integer (the meaning is defined by a developer).
:return:
"""
if not user.id:
return None
init_kwargs = {
'user': user,
'linked_object': self,
}
if note is not None:
init_kwargs['note'] = note
if status is not None:
init_kwargs['status'] = status
flag = get_flag_model()(**init_kwargs)
try:
flag.save()
except IntegrityError: # Record already exists.
pass
return flag | [
"def",
"set_flag",
"(",
"self",
",",
"user",
",",
"note",
"=",
"None",
",",
"status",
"=",
"None",
")",
":",
"if",
"not",
"user",
".",
"id",
":",
"return",
"None",
"init_kwargs",
"=",
"{",
"'user'",
":",
"user",
",",
"'linked_object'",
":",
"self",
",",
"}",
"if",
"note",
"is",
"not",
"None",
":",
"init_kwargs",
"[",
"'note'",
"]",
"=",
"note",
"if",
"status",
"is",
"not",
"None",
":",
"init_kwargs",
"[",
"'status'",
"]",
"=",
"status",
"flag",
"=",
"get_flag_model",
"(",
")",
"(",
"*",
"*",
"init_kwargs",
")",
"try",
":",
"flag",
".",
"save",
"(",
")",
"except",
"IntegrityError",
":",
"# Record already exists.",
"pass",
"return",
"flag"
] | Flags the object.
:param User user:
:param str note: User-defined note for this flag.
:param int status: Optional status integer (the meaning is defined by a developer).
:return: | [
"Flags",
"the",
"object",
"."
] | train | https://github.com/idlesign/django-siteflags/blob/c8d1a40afb2f60d68ad6d34af3f528e76bdb7142/siteflags/models.py#L177-L202 |
idlesign/django-siteflags | siteflags/models.py | ModelWithFlag.remove_flag | def remove_flag(self, user=None, status=None):
"""Removes flag(s) from the object.
:param User user: Optional user filter
:param int status: Optional status filter
:return:
"""
filter_kwargs = {
'content_type': ContentType.objects.get_for_model(self),
'object_id': self.id
}
update_filter_dict(filter_kwargs, user, status)
get_flag_model().objects.filter(**filter_kwargs).delete() | python | def remove_flag(self, user=None, status=None):
"""Removes flag(s) from the object.
:param User user: Optional user filter
:param int status: Optional status filter
:return:
"""
filter_kwargs = {
'content_type': ContentType.objects.get_for_model(self),
'object_id': self.id
}
update_filter_dict(filter_kwargs, user, status)
get_flag_model().objects.filter(**filter_kwargs).delete() | [
"def",
"remove_flag",
"(",
"self",
",",
"user",
"=",
"None",
",",
"status",
"=",
"None",
")",
":",
"filter_kwargs",
"=",
"{",
"'content_type'",
":",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"self",
")",
",",
"'object_id'",
":",
"self",
".",
"id",
"}",
"update_filter_dict",
"(",
"filter_kwargs",
",",
"user",
",",
"status",
")",
"get_flag_model",
"(",
")",
".",
"objects",
".",
"filter",
"(",
"*",
"*",
"filter_kwargs",
")",
".",
"delete",
"(",
")"
] | Removes flag(s) from the object.
:param User user: Optional user filter
:param int status: Optional status filter
:return: | [
"Removes",
"flag",
"(",
"s",
")",
"from",
"the",
"object",
"."
] | train | https://github.com/idlesign/django-siteflags/blob/c8d1a40afb2f60d68ad6d34af3f528e76bdb7142/siteflags/models.py#L204-L216 |
idlesign/django-siteflags | siteflags/models.py | ModelWithFlag.is_flagged | def is_flagged(self, user=None, status=None):
"""Returns boolean whether the objects is flagged by a user.
:param User user: Optional user filter
:param int status: Optional status filter
:return:
"""
filter_kwargs = {
'content_type': ContentType.objects.get_for_model(self),
'object_id': self.id,
}
update_filter_dict(filter_kwargs, user, status)
return self.flags.filter(**filter_kwargs).count() | python | def is_flagged(self, user=None, status=None):
"""Returns boolean whether the objects is flagged by a user.
:param User user: Optional user filter
:param int status: Optional status filter
:return:
"""
filter_kwargs = {
'content_type': ContentType.objects.get_for_model(self),
'object_id': self.id,
}
update_filter_dict(filter_kwargs, user, status)
return self.flags.filter(**filter_kwargs).count() | [
"def",
"is_flagged",
"(",
"self",
",",
"user",
"=",
"None",
",",
"status",
"=",
"None",
")",
":",
"filter_kwargs",
"=",
"{",
"'content_type'",
":",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"self",
")",
",",
"'object_id'",
":",
"self",
".",
"id",
",",
"}",
"update_filter_dict",
"(",
"filter_kwargs",
",",
"user",
",",
"status",
")",
"return",
"self",
".",
"flags",
".",
"filter",
"(",
"*",
"*",
"filter_kwargs",
")",
".",
"count",
"(",
")"
] | Returns boolean whether the objects is flagged by a user.
:param User user: Optional user filter
:param int status: Optional status filter
:return: | [
"Returns",
"boolean",
"whether",
"the",
"objects",
"is",
"flagged",
"by",
"a",
"user",
"."
] | train | https://github.com/idlesign/django-siteflags/blob/c8d1a40afb2f60d68ad6d34af3f528e76bdb7142/siteflags/models.py#L218-L230 |
horejsek/python-sqlpuzzle | sqlpuzzle/_backends/__init__.py | set_backend | def set_backend(database):
"""
Configure used database, so sqlpuzzle can generate queries which are needed.
For now there is only support of MySQL and PostgreSQL.
"""
new_backend = BACKENDS.get(database.lower())
if not new_backend:
raise Exception('Backend {} is not supported.'.format(database))
global BACKEND # pylint: disable=global-statement
BACKEND = new_backend | python | def set_backend(database):
"""
Configure used database, so sqlpuzzle can generate queries which are needed.
For now there is only support of MySQL and PostgreSQL.
"""
new_backend = BACKENDS.get(database.lower())
if not new_backend:
raise Exception('Backend {} is not supported.'.format(database))
global BACKEND # pylint: disable=global-statement
BACKEND = new_backend | [
"def",
"set_backend",
"(",
"database",
")",
":",
"new_backend",
"=",
"BACKENDS",
".",
"get",
"(",
"database",
".",
"lower",
"(",
")",
")",
"if",
"not",
"new_backend",
":",
"raise",
"Exception",
"(",
"'Backend {} is not supported.'",
".",
"format",
"(",
"database",
")",
")",
"global",
"BACKEND",
"# pylint: disable=global-statement",
"BACKEND",
"=",
"new_backend"
] | Configure used database, so sqlpuzzle can generate queries which are needed.
For now there is only support of MySQL and PostgreSQL. | [
"Configure",
"used",
"database",
"so",
"sqlpuzzle",
"can",
"generate",
"queries",
"which",
"are",
"needed",
".",
"For",
"now",
"there",
"is",
"only",
"support",
"of",
"MySQL",
"and",
"PostgreSQL",
"."
] | train | https://github.com/horejsek/python-sqlpuzzle/blob/d3a42ed1b339b8eafddb8d2c28a3a5832b3998dd/sqlpuzzle/_backends/__init__.py#L19-L28 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/runners/state_runner.py | StateRunner.health_check | def health_check(self):
""" Verify that device is accessible over CLI by sending ENTER for cli session """
api_response = 'Online'
result = 'Health check on resource {}'.format(self._resource_name)
try:
health_check_flow = RunCommandFlow(self.cli_handler, self._logger)
health_check_flow.execute_flow()
result += ' passed.'
except Exception as e:
self._logger.exception(e)
api_response = 'Error'
result += ' failed.'
try:
self._api.SetResourceLiveStatus(self._resource_name, api_response, result)
except Exception:
self._logger.error('Cannot update {} resource status on portal'.format(self._resource_name))
return result | python | def health_check(self):
""" Verify that device is accessible over CLI by sending ENTER for cli session """
api_response = 'Online'
result = 'Health check on resource {}'.format(self._resource_name)
try:
health_check_flow = RunCommandFlow(self.cli_handler, self._logger)
health_check_flow.execute_flow()
result += ' passed.'
except Exception as e:
self._logger.exception(e)
api_response = 'Error'
result += ' failed.'
try:
self._api.SetResourceLiveStatus(self._resource_name, api_response, result)
except Exception:
self._logger.error('Cannot update {} resource status on portal'.format(self._resource_name))
return result | [
"def",
"health_check",
"(",
"self",
")",
":",
"api_response",
"=",
"'Online'",
"result",
"=",
"'Health check on resource {}'",
".",
"format",
"(",
"self",
".",
"_resource_name",
")",
"try",
":",
"health_check_flow",
"=",
"RunCommandFlow",
"(",
"self",
".",
"cli_handler",
",",
"self",
".",
"_logger",
")",
"health_check_flow",
".",
"execute_flow",
"(",
")",
"result",
"+=",
"' passed.'",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"_logger",
".",
"exception",
"(",
"e",
")",
"api_response",
"=",
"'Error'",
"result",
"+=",
"' failed.'",
"try",
":",
"self",
".",
"_api",
".",
"SetResourceLiveStatus",
"(",
"self",
".",
"_resource_name",
",",
"api_response",
",",
"result",
")",
"except",
"Exception",
":",
"self",
".",
"_logger",
".",
"error",
"(",
"'Cannot update {} resource status on portal'",
".",
"format",
"(",
"self",
".",
"_resource_name",
")",
")",
"return",
"result"
] | Verify that device is accessible over CLI by sending ENTER for cli session | [
"Verify",
"that",
"device",
"is",
"accessible",
"over",
"CLI",
"by",
"sending",
"ENTER",
"for",
"cli",
"session"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/runners/state_runner.py#L26-L46 |
horejsek/python-sqlpuzzle | sqlpuzzle/_backends/sql.py | SqlBackend.reference | def reference(cls, value):
"""
Convert as reference on column.
table => "table"
table.column => "table"."column"
db.table.column => "db"."table"."column"
table."col.umn" => "table"."col.umn"
"table"."col.umn" => "table"."col.umn"
"""
from sqlpuzzle._common.utils import force_text
value = force_text(value)
parts = re.split(r'{quote}([^{quote}]+){quote}|\.'.format(quote=cls.reference_quote), value)
parts = ('{quote}{i}{quote}'.format(quote=cls.reference_quote, i=i) if i != '*' else i for i in parts if i)
return '.'.join(parts) | python | def reference(cls, value):
"""
Convert as reference on column.
table => "table"
table.column => "table"."column"
db.table.column => "db"."table"."column"
table."col.umn" => "table"."col.umn"
"table"."col.umn" => "table"."col.umn"
"""
from sqlpuzzle._common.utils import force_text
value = force_text(value)
parts = re.split(r'{quote}([^{quote}]+){quote}|\.'.format(quote=cls.reference_quote), value)
parts = ('{quote}{i}{quote}'.format(quote=cls.reference_quote, i=i) if i != '*' else i for i in parts if i)
return '.'.join(parts) | [
"def",
"reference",
"(",
"cls",
",",
"value",
")",
":",
"from",
"sqlpuzzle",
".",
"_common",
".",
"utils",
"import",
"force_text",
"value",
"=",
"force_text",
"(",
"value",
")",
"parts",
"=",
"re",
".",
"split",
"(",
"r'{quote}([^{quote}]+){quote}|\\.'",
".",
"format",
"(",
"quote",
"=",
"cls",
".",
"reference_quote",
")",
",",
"value",
")",
"parts",
"=",
"(",
"'{quote}{i}{quote}'",
".",
"format",
"(",
"quote",
"=",
"cls",
".",
"reference_quote",
",",
"i",
"=",
"i",
")",
"if",
"i",
"!=",
"'*'",
"else",
"i",
"for",
"i",
"in",
"parts",
"if",
"i",
")",
"return",
"'.'",
".",
"join",
"(",
"parts",
")"
] | Convert as reference on column.
table => "table"
table.column => "table"."column"
db.table.column => "db"."table"."column"
table."col.umn" => "table"."col.umn"
"table"."col.umn" => "table"."col.umn" | [
"Convert",
"as",
"reference",
"on",
"column",
".",
"table",
"=",
">",
"table",
"table",
".",
"column",
"=",
">",
"table",
".",
"column",
"db",
".",
"table",
".",
"column",
"=",
">",
"db",
".",
"table",
".",
"column",
"table",
".",
"col",
".",
"umn",
"=",
">",
"table",
".",
"col",
".",
"umn",
"table",
".",
"col",
".",
"umn",
"=",
">",
"table",
".",
"col",
".",
"umn"
] | train | https://github.com/horejsek/python-sqlpuzzle/blob/d3a42ed1b339b8eafddb8d2c28a3a5832b3998dd/sqlpuzzle/_backends/sql.py#L31-L44 |
Workiva/furious | furious/job_utils.py | reference_to_path | def reference_to_path(reference):
"""Convert a reference to a Python object to a string path."""
# Try to pop the options off whatever they passed in.
if isinstance(reference, basestring):
# This is an object path name in str form.
import re
if not re.match(r'^[^\d\W]([a-zA-Z._]|((?<!\.)\d))+$', reference):
raise errors.BadObjectPathError(
'Invalid reference path, must meet Python\'s identifier '
'requirements, passed value was "%s".', reference)
return reference
if callable(reference):
# This is a function or a class.
# Try to figure out the path to the reference.
parts = [reference.__module__]
if hasattr(reference, 'im_class'):
parts.append(reference.im_class.__name__)
if hasattr(reference, 'func_name'):
parts.append(reference.func_name)
elif reference.__module__ == '__builtin__':
return reference.__name__
elif hasattr(reference, '__name__'):
# Probably a class
parts.append(reference.__name__)
else:
raise errors.BadObjectPathError("Invalid object type.")
return '.'.join(parts)
raise errors.BadObjectPathError("Unable to determine path to callable.")
elif hasattr(reference, '__package__'):
# This is probably a module.
return reference.__name__
raise errors.BadObjectPathError("Must provide a reference path or reference.") | python | def reference_to_path(reference):
"""Convert a reference to a Python object to a string path."""
# Try to pop the options off whatever they passed in.
if isinstance(reference, basestring):
# This is an object path name in str form.
import re
if not re.match(r'^[^\d\W]([a-zA-Z._]|((?<!\.)\d))+$', reference):
raise errors.BadObjectPathError(
'Invalid reference path, must meet Python\'s identifier '
'requirements, passed value was "%s".', reference)
return reference
if callable(reference):
# This is a function or a class.
# Try to figure out the path to the reference.
parts = [reference.__module__]
if hasattr(reference, 'im_class'):
parts.append(reference.im_class.__name__)
if hasattr(reference, 'func_name'):
parts.append(reference.func_name)
elif reference.__module__ == '__builtin__':
return reference.__name__
elif hasattr(reference, '__name__'):
# Probably a class
parts.append(reference.__name__)
else:
raise errors.BadObjectPathError("Invalid object type.")
return '.'.join(parts)
raise errors.BadObjectPathError("Unable to determine path to callable.")
elif hasattr(reference, '__package__'):
# This is probably a module.
return reference.__name__
raise errors.BadObjectPathError("Must provide a reference path or reference.") | [
"def",
"reference_to_path",
"(",
"reference",
")",
":",
"# Try to pop the options off whatever they passed in.",
"if",
"isinstance",
"(",
"reference",
",",
"basestring",
")",
":",
"# This is an object path name in str form.",
"import",
"re",
"if",
"not",
"re",
".",
"match",
"(",
"r'^[^\\d\\W]([a-zA-Z._]|((?<!\\.)\\d))+$'",
",",
"reference",
")",
":",
"raise",
"errors",
".",
"BadObjectPathError",
"(",
"'Invalid reference path, must meet Python\\'s identifier '",
"'requirements, passed value was \"%s\".'",
",",
"reference",
")",
"return",
"reference",
"if",
"callable",
"(",
"reference",
")",
":",
"# This is a function or a class.",
"# Try to figure out the path to the reference.",
"parts",
"=",
"[",
"reference",
".",
"__module__",
"]",
"if",
"hasattr",
"(",
"reference",
",",
"'im_class'",
")",
":",
"parts",
".",
"append",
"(",
"reference",
".",
"im_class",
".",
"__name__",
")",
"if",
"hasattr",
"(",
"reference",
",",
"'func_name'",
")",
":",
"parts",
".",
"append",
"(",
"reference",
".",
"func_name",
")",
"elif",
"reference",
".",
"__module__",
"==",
"'__builtin__'",
":",
"return",
"reference",
".",
"__name__",
"elif",
"hasattr",
"(",
"reference",
",",
"'__name__'",
")",
":",
"# Probably a class",
"parts",
".",
"append",
"(",
"reference",
".",
"__name__",
")",
"else",
":",
"raise",
"errors",
".",
"BadObjectPathError",
"(",
"\"Invalid object type.\"",
")",
"return",
"'.'",
".",
"join",
"(",
"parts",
")",
"raise",
"errors",
".",
"BadObjectPathError",
"(",
"\"Unable to determine path to callable.\"",
")",
"elif",
"hasattr",
"(",
"reference",
",",
"'__package__'",
")",
":",
"# This is probably a module.",
"return",
"reference",
".",
"__name__",
"raise",
"errors",
".",
"BadObjectPathError",
"(",
"\"Must provide a reference path or reference.\"",
")"
] | Convert a reference to a Python object to a string path. | [
"Convert",
"a",
"reference",
"to",
"a",
"Python",
"object",
"to",
"a",
"string",
"path",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/job_utils.py#L42-L79 |
Workiva/furious | furious/job_utils.py | path_to_reference | def path_to_reference(path):
"""Convert an object path reference to a reference."""
# By default JSON decodes strings as unicode. The Python __import__ does
# not like that choice. So we'll just cast all function paths to a string.
# NOTE: that there is no corresponding unit test for the classmethod
# version of this problem. It only impacts importing modules.
path = str(path)
if '.' not in path:
try:
return globals()["__builtins__"][path]
except KeyError:
try:
return getattr(globals()["__builtins__"], path)
except AttributeError:
pass
try:
return globals()[path]
except KeyError:
pass
raise errors.BadObjectPathError(
'Unable to find function "%s".' % (path,))
module_path, function_name = path.rsplit('.', 1)
try:
module = __import__(name=module_path,
fromlist=[function_name])
except ImportError:
module_path, class_name = module_path.rsplit('.', 1)
module = __import__(name=module_path, fromlist=[class_name])
module = getattr(module, class_name)
try:
return getattr(module, function_name)
except AttributeError:
raise errors.BadObjectPathError(
'Unable to find function "%s".' % (path,)) | python | def path_to_reference(path):
"""Convert an object path reference to a reference."""
# By default JSON decodes strings as unicode. The Python __import__ does
# not like that choice. So we'll just cast all function paths to a string.
# NOTE: that there is no corresponding unit test for the classmethod
# version of this problem. It only impacts importing modules.
path = str(path)
if '.' not in path:
try:
return globals()["__builtins__"][path]
except KeyError:
try:
return getattr(globals()["__builtins__"], path)
except AttributeError:
pass
try:
return globals()[path]
except KeyError:
pass
raise errors.BadObjectPathError(
'Unable to find function "%s".' % (path,))
module_path, function_name = path.rsplit('.', 1)
try:
module = __import__(name=module_path,
fromlist=[function_name])
except ImportError:
module_path, class_name = module_path.rsplit('.', 1)
module = __import__(name=module_path, fromlist=[class_name])
module = getattr(module, class_name)
try:
return getattr(module, function_name)
except AttributeError:
raise errors.BadObjectPathError(
'Unable to find function "%s".' % (path,)) | [
"def",
"path_to_reference",
"(",
"path",
")",
":",
"# By default JSON decodes strings as unicode. The Python __import__ does",
"# not like that choice. So we'll just cast all function paths to a string.",
"# NOTE: that there is no corresponding unit test for the classmethod",
"# version of this problem. It only impacts importing modules.",
"path",
"=",
"str",
"(",
"path",
")",
"if",
"'.'",
"not",
"in",
"path",
":",
"try",
":",
"return",
"globals",
"(",
")",
"[",
"\"__builtins__\"",
"]",
"[",
"path",
"]",
"except",
"KeyError",
":",
"try",
":",
"return",
"getattr",
"(",
"globals",
"(",
")",
"[",
"\"__builtins__\"",
"]",
",",
"path",
")",
"except",
"AttributeError",
":",
"pass",
"try",
":",
"return",
"globals",
"(",
")",
"[",
"path",
"]",
"except",
"KeyError",
":",
"pass",
"raise",
"errors",
".",
"BadObjectPathError",
"(",
"'Unable to find function \"%s\".'",
"%",
"(",
"path",
",",
")",
")",
"module_path",
",",
"function_name",
"=",
"path",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"try",
":",
"module",
"=",
"__import__",
"(",
"name",
"=",
"module_path",
",",
"fromlist",
"=",
"[",
"function_name",
"]",
")",
"except",
"ImportError",
":",
"module_path",
",",
"class_name",
"=",
"module_path",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"module",
"=",
"__import__",
"(",
"name",
"=",
"module_path",
",",
"fromlist",
"=",
"[",
"class_name",
"]",
")",
"module",
"=",
"getattr",
"(",
"module",
",",
"class_name",
")",
"try",
":",
"return",
"getattr",
"(",
"module",
",",
"function_name",
")",
"except",
"AttributeError",
":",
"raise",
"errors",
".",
"BadObjectPathError",
"(",
"'Unable to find function \"%s\".'",
"%",
"(",
"path",
",",
")",
")"
] | Convert an object path reference to a reference. | [
"Convert",
"an",
"object",
"path",
"reference",
"to",
"a",
"reference",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/job_utils.py#L82-L123 |
Workiva/furious | furious/job_utils.py | encode_callbacks | def encode_callbacks(callbacks):
"""Encode callbacks to as a dict suitable for JSON encoding."""
from furious.async import Async
if not callbacks:
return
encoded_callbacks = {}
for event, callback in callbacks.iteritems():
if callable(callback):
callback, _ = get_function_path_and_options(callback)
elif isinstance(callback, Async):
callback = callback.to_dict()
encoded_callbacks[event] = callback
return encoded_callbacks | python | def encode_callbacks(callbacks):
"""Encode callbacks to as a dict suitable for JSON encoding."""
from furious.async import Async
if not callbacks:
return
encoded_callbacks = {}
for event, callback in callbacks.iteritems():
if callable(callback):
callback, _ = get_function_path_and_options(callback)
elif isinstance(callback, Async):
callback = callback.to_dict()
encoded_callbacks[event] = callback
return encoded_callbacks | [
"def",
"encode_callbacks",
"(",
"callbacks",
")",
":",
"from",
"furious",
".",
"async",
"import",
"Async",
"if",
"not",
"callbacks",
":",
"return",
"encoded_callbacks",
"=",
"{",
"}",
"for",
"event",
",",
"callback",
"in",
"callbacks",
".",
"iteritems",
"(",
")",
":",
"if",
"callable",
"(",
"callback",
")",
":",
"callback",
",",
"_",
"=",
"get_function_path_and_options",
"(",
"callback",
")",
"elif",
"isinstance",
"(",
"callback",
",",
"Async",
")",
":",
"callback",
"=",
"callback",
".",
"to_dict",
"(",
")",
"encoded_callbacks",
"[",
"event",
"]",
"=",
"callback",
"return",
"encoded_callbacks"
] | Encode callbacks to as a dict suitable for JSON encoding. | [
"Encode",
"callbacks",
"to",
"as",
"a",
"dict",
"suitable",
"for",
"JSON",
"encoding",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/job_utils.py#L126-L143 |
Workiva/furious | furious/job_utils.py | decode_callbacks | def decode_callbacks(encoded_callbacks):
"""Decode the callbacks to an executable form."""
from furious.async import Async
callbacks = {}
for event, callback in encoded_callbacks.iteritems():
if isinstance(callback, dict):
async_type = Async
if '_type' in callback:
async_type = path_to_reference(callback['_type'])
callback = async_type.from_dict(callback)
else:
callback = path_to_reference(callback)
callbacks[event] = callback
return callbacks | python | def decode_callbacks(encoded_callbacks):
"""Decode the callbacks to an executable form."""
from furious.async import Async
callbacks = {}
for event, callback in encoded_callbacks.iteritems():
if isinstance(callback, dict):
async_type = Async
if '_type' in callback:
async_type = path_to_reference(callback['_type'])
callback = async_type.from_dict(callback)
else:
callback = path_to_reference(callback)
callbacks[event] = callback
return callbacks | [
"def",
"decode_callbacks",
"(",
"encoded_callbacks",
")",
":",
"from",
"furious",
".",
"async",
"import",
"Async",
"callbacks",
"=",
"{",
"}",
"for",
"event",
",",
"callback",
"in",
"encoded_callbacks",
".",
"iteritems",
"(",
")",
":",
"if",
"isinstance",
"(",
"callback",
",",
"dict",
")",
":",
"async_type",
"=",
"Async",
"if",
"'_type'",
"in",
"callback",
":",
"async_type",
"=",
"path_to_reference",
"(",
"callback",
"[",
"'_type'",
"]",
")",
"callback",
"=",
"async_type",
".",
"from_dict",
"(",
"callback",
")",
"else",
":",
"callback",
"=",
"path_to_reference",
"(",
"callback",
")",
"callbacks",
"[",
"event",
"]",
"=",
"callback",
"return",
"callbacks"
] | Decode the callbacks to an executable form. | [
"Decode",
"the",
"callbacks",
"to",
"an",
"executable",
"form",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/job_utils.py#L146-L162 |
upsight/doctor | doctor/schema.py | SchemaRefResolver._format_stack | def _format_stack(self, stack, current=None):
"""Prettifies a scope stack for use in error messages.
:param list(str) stack: List of scopes.
:param str current: The current scope. If specified, will be appended
onto the stack before formatting.
:returns: str
"""
if current is not None:
stack = stack + [current]
if len(stack) > 1:
prefix = os.path.commonprefix(stack)
if prefix.endswith('/'):
prefix = prefix[:-1]
stack = [scope[len(prefix):] for scope in stack]
return ' => '.join(stack) | python | def _format_stack(self, stack, current=None):
"""Prettifies a scope stack for use in error messages.
:param list(str) stack: List of scopes.
:param str current: The current scope. If specified, will be appended
onto the stack before formatting.
:returns: str
"""
if current is not None:
stack = stack + [current]
if len(stack) > 1:
prefix = os.path.commonprefix(stack)
if prefix.endswith('/'):
prefix = prefix[:-1]
stack = [scope[len(prefix):] for scope in stack]
return ' => '.join(stack) | [
"def",
"_format_stack",
"(",
"self",
",",
"stack",
",",
"current",
"=",
"None",
")",
":",
"if",
"current",
"is",
"not",
"None",
":",
"stack",
"=",
"stack",
"+",
"[",
"current",
"]",
"if",
"len",
"(",
"stack",
")",
">",
"1",
":",
"prefix",
"=",
"os",
".",
"path",
".",
"commonprefix",
"(",
"stack",
")",
"if",
"prefix",
".",
"endswith",
"(",
"'/'",
")",
":",
"prefix",
"=",
"prefix",
"[",
":",
"-",
"1",
"]",
"stack",
"=",
"[",
"scope",
"[",
"len",
"(",
"prefix",
")",
":",
"]",
"for",
"scope",
"in",
"stack",
"]",
"return",
"' => '",
".",
"join",
"(",
"stack",
")"
] | Prettifies a scope stack for use in error messages.
:param list(str) stack: List of scopes.
:param str current: The current scope. If specified, will be appended
onto the stack before formatting.
:returns: str | [
"Prettifies",
"a",
"scope",
"stack",
"for",
"use",
"in",
"error",
"messages",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L20-L35 |
upsight/doctor | doctor/schema.py | SchemaRefResolver.resolve | def resolve(self, ref, document=None):
"""Resolve a fragment within the schema.
If the resolved value contains a $ref, it will attempt to resolve that
as well, until it gets something that is not a reference. Circular
references will raise a SchemaError.
:param str ref: URI to resolve.
:param dict document: Optional schema in which to resolve the URI.
:returns: a tuple of the final, resolved URI (after any recursion) and
resolved value in the schema that the URI references.
:raises SchemaError:
"""
try:
# This logic is basically the RefResolver's resolve function, but
# updated to support fragments of dynamic documents. The jsonschema
# module supports passing documents when resolving fragments, but
# it doesn't expose that capability in the resolve function.
url = self._urljoin_cache(self.resolution_scope, ref)
if document is None:
# No document passed, so just resolve it as we normally would.
resolved = self._remote_cache(url)
else:
# Document passed, so assume it's a fragment.
_, fragment = urldefrag(url)
resolved = self.resolve_fragment(document, fragment)
except jsonschema.RefResolutionError as e:
# Failed to find a ref. Make the error a bit prettier so we can
# figure out where it came from.
message = e.args[0]
if self._scopes_stack:
message = '{} (from {})'.format(
message, self._format_stack(self._scopes_stack))
raise SchemaError(message)
if isinstance(resolved, dict) and '$ref' in resolved:
# Try to resolve the reference, so we can get the actual value we
# want, instead of a useless dict with a $ref in it.
if url in self._scopes_stack:
# We've already tried to look up this URL, so this must
# be a circular reference in the schema.
raise SchemaError(
'Circular reference in schema: {}'.format(
self._format_stack(self._scopes_stack + [url])))
try:
self.push_scope(url)
return self.resolve(resolved['$ref'])
finally:
self.pop_scope()
else:
return url, resolved | python | def resolve(self, ref, document=None):
"""Resolve a fragment within the schema.
If the resolved value contains a $ref, it will attempt to resolve that
as well, until it gets something that is not a reference. Circular
references will raise a SchemaError.
:param str ref: URI to resolve.
:param dict document: Optional schema in which to resolve the URI.
:returns: a tuple of the final, resolved URI (after any recursion) and
resolved value in the schema that the URI references.
:raises SchemaError:
"""
try:
# This logic is basically the RefResolver's resolve function, but
# updated to support fragments of dynamic documents. The jsonschema
# module supports passing documents when resolving fragments, but
# it doesn't expose that capability in the resolve function.
url = self._urljoin_cache(self.resolution_scope, ref)
if document is None:
# No document passed, so just resolve it as we normally would.
resolved = self._remote_cache(url)
else:
# Document passed, so assume it's a fragment.
_, fragment = urldefrag(url)
resolved = self.resolve_fragment(document, fragment)
except jsonschema.RefResolutionError as e:
# Failed to find a ref. Make the error a bit prettier so we can
# figure out where it came from.
message = e.args[0]
if self._scopes_stack:
message = '{} (from {})'.format(
message, self._format_stack(self._scopes_stack))
raise SchemaError(message)
if isinstance(resolved, dict) and '$ref' in resolved:
# Try to resolve the reference, so we can get the actual value we
# want, instead of a useless dict with a $ref in it.
if url in self._scopes_stack:
# We've already tried to look up this URL, so this must
# be a circular reference in the schema.
raise SchemaError(
'Circular reference in schema: {}'.format(
self._format_stack(self._scopes_stack + [url])))
try:
self.push_scope(url)
return self.resolve(resolved['$ref'])
finally:
self.pop_scope()
else:
return url, resolved | [
"def",
"resolve",
"(",
"self",
",",
"ref",
",",
"document",
"=",
"None",
")",
":",
"try",
":",
"# This logic is basically the RefResolver's resolve function, but",
"# updated to support fragments of dynamic documents. The jsonschema",
"# module supports passing documents when resolving fragments, but",
"# it doesn't expose that capability in the resolve function.",
"url",
"=",
"self",
".",
"_urljoin_cache",
"(",
"self",
".",
"resolution_scope",
",",
"ref",
")",
"if",
"document",
"is",
"None",
":",
"# No document passed, so just resolve it as we normally would.",
"resolved",
"=",
"self",
".",
"_remote_cache",
"(",
"url",
")",
"else",
":",
"# Document passed, so assume it's a fragment.",
"_",
",",
"fragment",
"=",
"urldefrag",
"(",
"url",
")",
"resolved",
"=",
"self",
".",
"resolve_fragment",
"(",
"document",
",",
"fragment",
")",
"except",
"jsonschema",
".",
"RefResolutionError",
"as",
"e",
":",
"# Failed to find a ref. Make the error a bit prettier so we can",
"# figure out where it came from.",
"message",
"=",
"e",
".",
"args",
"[",
"0",
"]",
"if",
"self",
".",
"_scopes_stack",
":",
"message",
"=",
"'{} (from {})'",
".",
"format",
"(",
"message",
",",
"self",
".",
"_format_stack",
"(",
"self",
".",
"_scopes_stack",
")",
")",
"raise",
"SchemaError",
"(",
"message",
")",
"if",
"isinstance",
"(",
"resolved",
",",
"dict",
")",
"and",
"'$ref'",
"in",
"resolved",
":",
"# Try to resolve the reference, so we can get the actual value we",
"# want, instead of a useless dict with a $ref in it.",
"if",
"url",
"in",
"self",
".",
"_scopes_stack",
":",
"# We've already tried to look up this URL, so this must",
"# be a circular reference in the schema.",
"raise",
"SchemaError",
"(",
"'Circular reference in schema: {}'",
".",
"format",
"(",
"self",
".",
"_format_stack",
"(",
"self",
".",
"_scopes_stack",
"+",
"[",
"url",
"]",
")",
")",
")",
"try",
":",
"self",
".",
"push_scope",
"(",
"url",
")",
"return",
"self",
".",
"resolve",
"(",
"resolved",
"[",
"'$ref'",
"]",
")",
"finally",
":",
"self",
".",
"pop_scope",
"(",
")",
"else",
":",
"return",
"url",
",",
"resolved"
] | Resolve a fragment within the schema.
If the resolved value contains a $ref, it will attempt to resolve that
as well, until it gets something that is not a reference. Circular
references will raise a SchemaError.
:param str ref: URI to resolve.
:param dict document: Optional schema in which to resolve the URI.
:returns: a tuple of the final, resolved URI (after any recursion) and
resolved value in the schema that the URI references.
:raises SchemaError: | [
"Resolve",
"a",
"fragment",
"within",
"the",
"schema",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L37-L87 |
upsight/doctor | doctor/schema.py | SchemaRefResolver.resolve_remote | def resolve_remote(self, uri):
"""Add support to load YAML files.
This will attempt to load a YAML file first, and then go back to the
default behavior.
:param str uri: the URI to resolve
:returns: the retrieved document
"""
if uri.startswith('file://'):
try:
path = uri[7:]
with open(path, 'r') as schema_file:
result = yaml.load(schema_file)
if self.cache_remote:
self.store[uri] = result
return result
except yaml.parser.ParserError as e:
logging.debug('Error parsing {!r} as YAML: {}'.format(
uri, e))
return super(SchemaRefResolver, self).resolve_remote(uri) | python | def resolve_remote(self, uri):
"""Add support to load YAML files.
This will attempt to load a YAML file first, and then go back to the
default behavior.
:param str uri: the URI to resolve
:returns: the retrieved document
"""
if uri.startswith('file://'):
try:
path = uri[7:]
with open(path, 'r') as schema_file:
result = yaml.load(schema_file)
if self.cache_remote:
self.store[uri] = result
return result
except yaml.parser.ParserError as e:
logging.debug('Error parsing {!r} as YAML: {}'.format(
uri, e))
return super(SchemaRefResolver, self).resolve_remote(uri) | [
"def",
"resolve_remote",
"(",
"self",
",",
"uri",
")",
":",
"if",
"uri",
".",
"startswith",
"(",
"'file://'",
")",
":",
"try",
":",
"path",
"=",
"uri",
"[",
"7",
":",
"]",
"with",
"open",
"(",
"path",
",",
"'r'",
")",
"as",
"schema_file",
":",
"result",
"=",
"yaml",
".",
"load",
"(",
"schema_file",
")",
"if",
"self",
".",
"cache_remote",
":",
"self",
".",
"store",
"[",
"uri",
"]",
"=",
"result",
"return",
"result",
"except",
"yaml",
".",
"parser",
".",
"ParserError",
"as",
"e",
":",
"logging",
".",
"debug",
"(",
"'Error parsing {!r} as YAML: {}'",
".",
"format",
"(",
"uri",
",",
"e",
")",
")",
"return",
"super",
"(",
"SchemaRefResolver",
",",
"self",
")",
".",
"resolve_remote",
"(",
"uri",
")"
] | Add support to load YAML files.
This will attempt to load a YAML file first, and then go back to the
default behavior.
:param str uri: the URI to resolve
:returns: the retrieved document | [
"Add",
"support",
"to",
"load",
"YAML",
"files",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L89-L109 |
upsight/doctor | doctor/schema.py | Schema.get_validator | def get_validator(self, schema=None):
"""Get a jsonschema validator.
:param dict schema: A custom schema to validate against.
:returns: an instance of jsonschema Draft4Validator.
"""
schema = schema if schema is not None else self.schema
return jsonschema.Draft4Validator(
schema, resolver=self.resolver,
format_checker=jsonschema.draft4_format_checker) | python | def get_validator(self, schema=None):
"""Get a jsonschema validator.
:param dict schema: A custom schema to validate against.
:returns: an instance of jsonschema Draft4Validator.
"""
schema = schema if schema is not None else self.schema
return jsonschema.Draft4Validator(
schema, resolver=self.resolver,
format_checker=jsonschema.draft4_format_checker) | [
"def",
"get_validator",
"(",
"self",
",",
"schema",
"=",
"None",
")",
":",
"schema",
"=",
"schema",
"if",
"schema",
"is",
"not",
"None",
"else",
"self",
".",
"schema",
"return",
"jsonschema",
".",
"Draft4Validator",
"(",
"schema",
",",
"resolver",
"=",
"self",
".",
"resolver",
",",
"format_checker",
"=",
"jsonschema",
".",
"draft4_format_checker",
")"
] | Get a jsonschema validator.
:param dict schema: A custom schema to validate against.
:returns: an instance of jsonschema Draft4Validator. | [
"Get",
"a",
"jsonschema",
"validator",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L127-L136 |
upsight/doctor | doctor/schema.py | Schema.resolve | def resolve(self, ref, document=None):
"""Resolve a ref within the schema.
This is just a convenience method, since RefResolver returns both a URI
and the resolved value, and we usually just need the resolved value.
:param str ref: URI to resolve.
:param dict document: Optional schema in which to resolve the URI.
:returns: the portion of the schema that the URI references.
:see: :meth:`SchemaRefResolver.resolve`
"""
_, resolved = self.resolver.resolve(ref, document=document)
return resolved | python | def resolve(self, ref, document=None):
"""Resolve a ref within the schema.
This is just a convenience method, since RefResolver returns both a URI
and the resolved value, and we usually just need the resolved value.
:param str ref: URI to resolve.
:param dict document: Optional schema in which to resolve the URI.
:returns: the portion of the schema that the URI references.
:see: :meth:`SchemaRefResolver.resolve`
"""
_, resolved = self.resolver.resolve(ref, document=document)
return resolved | [
"def",
"resolve",
"(",
"self",
",",
"ref",
",",
"document",
"=",
"None",
")",
":",
"_",
",",
"resolved",
"=",
"self",
".",
"resolver",
".",
"resolve",
"(",
"ref",
",",
"document",
"=",
"document",
")",
"return",
"resolved"
] | Resolve a ref within the schema.
This is just a convenience method, since RefResolver returns both a URI
and the resolved value, and we usually just need the resolved value.
:param str ref: URI to resolve.
:param dict document: Optional schema in which to resolve the URI.
:returns: the portion of the schema that the URI references.
:see: :meth:`SchemaRefResolver.resolve` | [
"Resolve",
"a",
"ref",
"within",
"the",
"schema",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L138-L150 |
upsight/doctor | doctor/schema.py | Schema.resolver | def resolver(self):
"""jsonschema RefResolver object for the base schema."""
if self._resolver is not None:
return self._resolver
if self._schema_path is not None:
# the documentation for ref resolving
# https://github.com/Julian/jsonschema/issues/98
# https://python-jsonschema.readthedocs.org/en/latest/references/
self._resolver = SchemaRefResolver(
'file://' + self._schema_path + '/', self.schema)
else:
self._resolver = SchemaRefResolver.from_schema(self.schema)
return self._resolver | python | def resolver(self):
"""jsonschema RefResolver object for the base schema."""
if self._resolver is not None:
return self._resolver
if self._schema_path is not None:
# the documentation for ref resolving
# https://github.com/Julian/jsonschema/issues/98
# https://python-jsonschema.readthedocs.org/en/latest/references/
self._resolver = SchemaRefResolver(
'file://' + self._schema_path + '/', self.schema)
else:
self._resolver = SchemaRefResolver.from_schema(self.schema)
return self._resolver | [
"def",
"resolver",
"(",
"self",
")",
":",
"if",
"self",
".",
"_resolver",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_resolver",
"if",
"self",
".",
"_schema_path",
"is",
"not",
"None",
":",
"# the documentation for ref resolving",
"# https://github.com/Julian/jsonschema/issues/98",
"# https://python-jsonschema.readthedocs.org/en/latest/references/",
"self",
".",
"_resolver",
"=",
"SchemaRefResolver",
"(",
"'file://'",
"+",
"self",
".",
"_schema_path",
"+",
"'/'",
",",
"self",
".",
"schema",
")",
"else",
":",
"self",
".",
"_resolver",
"=",
"SchemaRefResolver",
".",
"from_schema",
"(",
"self",
".",
"schema",
")",
"return",
"self",
".",
"_resolver"
] | jsonschema RefResolver object for the base schema. | [
"jsonschema",
"RefResolver",
"object",
"for",
"the",
"base",
"schema",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L153-L165 |
upsight/doctor | doctor/schema.py | Schema.validate | def validate(self, value, validator):
"""Validates and returns the value.
If the value does not validate against the schema, SchemaValidationError
will be raised.
:param value: A value to validate (usually a dict).
:param validator: An instance of a jsonschema validator class, as
created by Schema.get_validator().
:returns: the passed value.
:raises SchemaValidationError:
:raises Exception:
"""
try:
validator.validate(value)
except Exception as e:
logging.debug(e, exc_info=e)
if isinstance(e, DoctorError):
raise
else:
# Gather all the validation errors
validation_errors = sorted(
validator.iter_errors(value), key=lambda e: e.path)
errors = {}
for error in validation_errors:
try:
key = error.path[0]
except IndexError:
key = '_other'
errors[key] = error.args[0]
raise SchemaValidationError(e.args[0], errors=errors)
return value | python | def validate(self, value, validator):
"""Validates and returns the value.
If the value does not validate against the schema, SchemaValidationError
will be raised.
:param value: A value to validate (usually a dict).
:param validator: An instance of a jsonschema validator class, as
created by Schema.get_validator().
:returns: the passed value.
:raises SchemaValidationError:
:raises Exception:
"""
try:
validator.validate(value)
except Exception as e:
logging.debug(e, exc_info=e)
if isinstance(e, DoctorError):
raise
else:
# Gather all the validation errors
validation_errors = sorted(
validator.iter_errors(value), key=lambda e: e.path)
errors = {}
for error in validation_errors:
try:
key = error.path[0]
except IndexError:
key = '_other'
errors[key] = error.args[0]
raise SchemaValidationError(e.args[0], errors=errors)
return value | [
"def",
"validate",
"(",
"self",
",",
"value",
",",
"validator",
")",
":",
"try",
":",
"validator",
".",
"validate",
"(",
"value",
")",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"debug",
"(",
"e",
",",
"exc_info",
"=",
"e",
")",
"if",
"isinstance",
"(",
"e",
",",
"DoctorError",
")",
":",
"raise",
"else",
":",
"# Gather all the validation errors",
"validation_errors",
"=",
"sorted",
"(",
"validator",
".",
"iter_errors",
"(",
"value",
")",
",",
"key",
"=",
"lambda",
"e",
":",
"e",
".",
"path",
")",
"errors",
"=",
"{",
"}",
"for",
"error",
"in",
"validation_errors",
":",
"try",
":",
"key",
"=",
"error",
".",
"path",
"[",
"0",
"]",
"except",
"IndexError",
":",
"key",
"=",
"'_other'",
"errors",
"[",
"key",
"]",
"=",
"error",
".",
"args",
"[",
"0",
"]",
"raise",
"SchemaValidationError",
"(",
"e",
".",
"args",
"[",
"0",
"]",
",",
"errors",
"=",
"errors",
")",
"return",
"value"
] | Validates and returns the value.
If the value does not validate against the schema, SchemaValidationError
will be raised.
:param value: A value to validate (usually a dict).
:param validator: An instance of a jsonschema validator class, as
created by Schema.get_validator().
:returns: the passed value.
:raises SchemaValidationError:
:raises Exception: | [
"Validates",
"and",
"returns",
"the",
"value",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L167-L198 |
upsight/doctor | doctor/schema.py | Schema.validate_json | def validate_json(self, json_value, validator):
"""Validates and returns the parsed JSON string.
If the value is not valid JSON, ParseError will be raised. If it is
valid JSON, but does not validate against the schema,
SchemaValidationError will be raised.
:param str json_value: JSON value.
:param validator: An instance of a jsonschema validator class, as
created by Schema.get_validator().
:returns: the parsed JSON value.
"""
value = parse_json(json_value)
return self.validate(value, validator) | python | def validate_json(self, json_value, validator):
"""Validates and returns the parsed JSON string.
If the value is not valid JSON, ParseError will be raised. If it is
valid JSON, but does not validate against the schema,
SchemaValidationError will be raised.
:param str json_value: JSON value.
:param validator: An instance of a jsonschema validator class, as
created by Schema.get_validator().
:returns: the parsed JSON value.
"""
value = parse_json(json_value)
return self.validate(value, validator) | [
"def",
"validate_json",
"(",
"self",
",",
"json_value",
",",
"validator",
")",
":",
"value",
"=",
"parse_json",
"(",
"json_value",
")",
"return",
"self",
".",
"validate",
"(",
"value",
",",
"validator",
")"
] | Validates and returns the parsed JSON string.
If the value is not valid JSON, ParseError will be raised. If it is
valid JSON, but does not validate against the schema,
SchemaValidationError will be raised.
:param str json_value: JSON value.
:param validator: An instance of a jsonschema validator class, as
created by Schema.get_validator().
:returns: the parsed JSON value. | [
"Validates",
"and",
"returns",
"the",
"parsed",
"JSON",
"string",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L200-L213 |
upsight/doctor | doctor/schema.py | Schema.from_file | def from_file(cls, schema_filepath, *args, **kwargs):
"""Create an instance from a YAML or JSON schema file.
Any additional args or kwargs will be passed on when constructing the
new schema instance (useful for subclasses).
:param str schema_filepath: Path to the schema file.
:returns: an instance of the class.
:raises SchemaLoadingError: for invalid input files.
"""
schema_filepath = os.path.abspath(schema_filepath)
try:
with open(schema_filepath, 'r') as schema_file:
schema = yaml.load(schema_file.read())
except Exception:
msg = 'Error loading schema file {}'.format(schema_filepath)
logging.exception(msg)
raise SchemaLoadingError(msg)
return cls(schema, *args, schema_path=os.path.dirname(schema_filepath),
**kwargs) | python | def from_file(cls, schema_filepath, *args, **kwargs):
"""Create an instance from a YAML or JSON schema file.
Any additional args or kwargs will be passed on when constructing the
new schema instance (useful for subclasses).
:param str schema_filepath: Path to the schema file.
:returns: an instance of the class.
:raises SchemaLoadingError: for invalid input files.
"""
schema_filepath = os.path.abspath(schema_filepath)
try:
with open(schema_filepath, 'r') as schema_file:
schema = yaml.load(schema_file.read())
except Exception:
msg = 'Error loading schema file {}'.format(schema_filepath)
logging.exception(msg)
raise SchemaLoadingError(msg)
return cls(schema, *args, schema_path=os.path.dirname(schema_filepath),
**kwargs) | [
"def",
"from_file",
"(",
"cls",
",",
"schema_filepath",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"schema_filepath",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"schema_filepath",
")",
"try",
":",
"with",
"open",
"(",
"schema_filepath",
",",
"'r'",
")",
"as",
"schema_file",
":",
"schema",
"=",
"yaml",
".",
"load",
"(",
"schema_file",
".",
"read",
"(",
")",
")",
"except",
"Exception",
":",
"msg",
"=",
"'Error loading schema file {}'",
".",
"format",
"(",
"schema_filepath",
")",
"logging",
".",
"exception",
"(",
"msg",
")",
"raise",
"SchemaLoadingError",
"(",
"msg",
")",
"return",
"cls",
"(",
"schema",
",",
"*",
"args",
",",
"schema_path",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"schema_filepath",
")",
",",
"*",
"*",
"kwargs",
")"
] | Create an instance from a YAML or JSON schema file.
Any additional args or kwargs will be passed on when constructing the
new schema instance (useful for subclasses).
:param str schema_filepath: Path to the schema file.
:returns: an instance of the class.
:raises SchemaLoadingError: for invalid input files. | [
"Create",
"an",
"instance",
"from",
"a",
"YAML",
"or",
"JSON",
"schema",
"file",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/schema.py#L216-L235 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/flows/action_flows.py | RunCommandFlow.execute_flow | def execute_flow(self, custom_command="", is_config=False):
""" Execute flow which run custom command on device
:param custom_command: the command to execute on device
:param is_config: if True then run command in configuration mode
:return: command execution output
"""
responses = []
if isinstance(custom_command, str):
commands = [custom_command]
elif isinstance(custom_command, tuple):
commands = list(custom_command)
else:
commands = custom_command
if is_config:
mode = self._cli_handler.config_mode
if not mode:
raise Exception(self.__class__.__name__,
"CliHandler configuration is missing. Config Mode has to be defined")
else:
mode = self._cli_handler.enable_mode
if not mode:
raise Exception(self.__class__.__name__,
"CliHandler configuration is missing. Enable Mode has to be defined")
with self._cli_handler.get_cli_service(mode) as session:
for cmd in commands:
responses.append(session.send_command(command=cmd))
return '\n'.join(responses) | python | def execute_flow(self, custom_command="", is_config=False):
""" Execute flow which run custom command on device
:param custom_command: the command to execute on device
:param is_config: if True then run command in configuration mode
:return: command execution output
"""
responses = []
if isinstance(custom_command, str):
commands = [custom_command]
elif isinstance(custom_command, tuple):
commands = list(custom_command)
else:
commands = custom_command
if is_config:
mode = self._cli_handler.config_mode
if not mode:
raise Exception(self.__class__.__name__,
"CliHandler configuration is missing. Config Mode has to be defined")
else:
mode = self._cli_handler.enable_mode
if not mode:
raise Exception(self.__class__.__name__,
"CliHandler configuration is missing. Enable Mode has to be defined")
with self._cli_handler.get_cli_service(mode) as session:
for cmd in commands:
responses.append(session.send_command(command=cmd))
return '\n'.join(responses) | [
"def",
"execute_flow",
"(",
"self",
",",
"custom_command",
"=",
"\"\"",
",",
"is_config",
"=",
"False",
")",
":",
"responses",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"custom_command",
",",
"str",
")",
":",
"commands",
"=",
"[",
"custom_command",
"]",
"elif",
"isinstance",
"(",
"custom_command",
",",
"tuple",
")",
":",
"commands",
"=",
"list",
"(",
"custom_command",
")",
"else",
":",
"commands",
"=",
"custom_command",
"if",
"is_config",
":",
"mode",
"=",
"self",
".",
"_cli_handler",
".",
"config_mode",
"if",
"not",
"mode",
":",
"raise",
"Exception",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"\"CliHandler configuration is missing. Config Mode has to be defined\"",
")",
"else",
":",
"mode",
"=",
"self",
".",
"_cli_handler",
".",
"enable_mode",
"if",
"not",
"mode",
":",
"raise",
"Exception",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"\"CliHandler configuration is missing. Enable Mode has to be defined\"",
")",
"with",
"self",
".",
"_cli_handler",
".",
"get_cli_service",
"(",
"mode",
")",
"as",
"session",
":",
"for",
"cmd",
"in",
"commands",
":",
"responses",
".",
"append",
"(",
"session",
".",
"send_command",
"(",
"command",
"=",
"cmd",
")",
")",
"return",
"'\\n'",
".",
"join",
"(",
"responses",
")"
] | Execute flow which run custom command on device
:param custom_command: the command to execute on device
:param is_config: if True then run command in configuration mode
:return: command execution output | [
"Execute",
"flow",
"which",
"run",
"custom",
"command",
"on",
"device"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/flows/action_flows.py#L109-L139 |
QualiSystems/cloudshell-networking-devices | cloudshell/devices/runners/run_command_runner.py | RunCommandRunner.run_custom_config_command | def run_custom_config_command(self, custom_command):
""" Execute custom command in configuration mode on device
:param custom_command: command
:return: result of command execution
"""
return self.run_command_flow.execute_flow(custom_command=custom_command, is_config=True) | python | def run_custom_config_command(self, custom_command):
""" Execute custom command in configuration mode on device
:param custom_command: command
:return: result of command execution
"""
return self.run_command_flow.execute_flow(custom_command=custom_command, is_config=True) | [
"def",
"run_custom_config_command",
"(",
"self",
",",
"custom_command",
")",
":",
"return",
"self",
".",
"run_command_flow",
".",
"execute_flow",
"(",
"custom_command",
"=",
"custom_command",
",",
"is_config",
"=",
"True",
")"
] | Execute custom command in configuration mode on device
:param custom_command: command
:return: result of command execution | [
"Execute",
"custom",
"command",
"in",
"configuration",
"mode",
"on",
"device"
] | train | https://github.com/QualiSystems/cloudshell-networking-devices/blob/009aab33edb30035b52fe10dbb91db61c95ba4d9/cloudshell/devices/runners/run_command_runner.py#L41-L48 |
internetarchive/doublethink | doublethink/__init__.py | parse_rethinkdb_url | def parse_rethinkdb_url(s):
'''
Parses a url like this rethinkdb://server1:port,server2:port/database/table
Returns:
tuple `(['server1:port', 'server2:port'], database, table)`
`table` and `database` may be None
Raises:
ValueError if url cannot be parsed as a rethinkdb url
There is some precedent for this kind of url (though only with a single
host):
- https://gist.github.com/lucidfrontier45/e5881a8fca25e51ab21c3cf4b4179daa
- https://github.com/laggyluke/node-parse-rethinkdb-url
'''
result = ParsedRethinkDbUrl(None, None, None)
parsed = urllib_parse.urlparse(s)
if parsed.scheme != 'rethinkdb':
raise ValueError
hosts = parsed.netloc.split(',')
database = None
table = None
path_segments = parsed.path.split('/')[1:]
if len(path_segments) >= 3:
raise ValueError
if len(path_segments) >= 1:
database = path_segments[0]
if len(path_segments) == 2:
table = path_segments[1]
if '' in hosts or database == '' or table == '':
raise ValueError
if any('@' in host for host in hosts):
raise ValueError
return ParsedRethinkDbUrl(hosts, database, table) | python | def parse_rethinkdb_url(s):
'''
Parses a url like this rethinkdb://server1:port,server2:port/database/table
Returns:
tuple `(['server1:port', 'server2:port'], database, table)`
`table` and `database` may be None
Raises:
ValueError if url cannot be parsed as a rethinkdb url
There is some precedent for this kind of url (though only with a single
host):
- https://gist.github.com/lucidfrontier45/e5881a8fca25e51ab21c3cf4b4179daa
- https://github.com/laggyluke/node-parse-rethinkdb-url
'''
result = ParsedRethinkDbUrl(None, None, None)
parsed = urllib_parse.urlparse(s)
if parsed.scheme != 'rethinkdb':
raise ValueError
hosts = parsed.netloc.split(',')
database = None
table = None
path_segments = parsed.path.split('/')[1:]
if len(path_segments) >= 3:
raise ValueError
if len(path_segments) >= 1:
database = path_segments[0]
if len(path_segments) == 2:
table = path_segments[1]
if '' in hosts or database == '' or table == '':
raise ValueError
if any('@' in host for host in hosts):
raise ValueError
return ParsedRethinkDbUrl(hosts, database, table) | [
"def",
"parse_rethinkdb_url",
"(",
"s",
")",
":",
"result",
"=",
"ParsedRethinkDbUrl",
"(",
"None",
",",
"None",
",",
"None",
")",
"parsed",
"=",
"urllib_parse",
".",
"urlparse",
"(",
"s",
")",
"if",
"parsed",
".",
"scheme",
"!=",
"'rethinkdb'",
":",
"raise",
"ValueError",
"hosts",
"=",
"parsed",
".",
"netloc",
".",
"split",
"(",
"','",
")",
"database",
"=",
"None",
"table",
"=",
"None",
"path_segments",
"=",
"parsed",
".",
"path",
".",
"split",
"(",
"'/'",
")",
"[",
"1",
":",
"]",
"if",
"len",
"(",
"path_segments",
")",
">=",
"3",
":",
"raise",
"ValueError",
"if",
"len",
"(",
"path_segments",
")",
">=",
"1",
":",
"database",
"=",
"path_segments",
"[",
"0",
"]",
"if",
"len",
"(",
"path_segments",
")",
"==",
"2",
":",
"table",
"=",
"path_segments",
"[",
"1",
"]",
"if",
"''",
"in",
"hosts",
"or",
"database",
"==",
"''",
"or",
"table",
"==",
"''",
":",
"raise",
"ValueError",
"if",
"any",
"(",
"'@'",
"in",
"host",
"for",
"host",
"in",
"hosts",
")",
":",
"raise",
"ValueError",
"return",
"ParsedRethinkDbUrl",
"(",
"hosts",
",",
"database",
",",
"table",
")"
] | Parses a url like this rethinkdb://server1:port,server2:port/database/table
Returns:
tuple `(['server1:port', 'server2:port'], database, table)`
`table` and `database` may be None
Raises:
ValueError if url cannot be parsed as a rethinkdb url
There is some precedent for this kind of url (though only with a single
host):
- https://gist.github.com/lucidfrontier45/e5881a8fca25e51ab21c3cf4b4179daa
- https://github.com/laggyluke/node-parse-rethinkdb-url | [
"Parses",
"a",
"url",
"like",
"this",
"rethinkdb",
":",
"//",
"server1",
":",
"port",
"server2",
":",
"port",
"/",
"database",
"/",
"table"
] | train | https://github.com/internetarchive/doublethink/blob/f7fc7da725c9b572d473c717b3dad9af98a7a2b4/doublethink/__init__.py#L51-L89 |
Workiva/furious | furious/async.py | async_from_options | def async_from_options(options):
"""Deserialize an Async or Async subclass from an options dict."""
_type = options.pop('_type', 'furious.async.Async')
_type = path_to_reference(_type)
return _type.from_dict(options) | python | def async_from_options(options):
"""Deserialize an Async or Async subclass from an options dict."""
_type = options.pop('_type', 'furious.async.Async')
_type = path_to_reference(_type)
return _type.from_dict(options) | [
"def",
"async_from_options",
"(",
"options",
")",
":",
"_type",
"=",
"options",
".",
"pop",
"(",
"'_type'",
",",
"'furious.async.Async'",
")",
"_type",
"=",
"path_to_reference",
"(",
"_type",
")",
"return",
"_type",
".",
"from_dict",
"(",
"options",
")"
] | Deserialize an Async or Async subclass from an options dict. | [
"Deserialize",
"an",
"Async",
"or",
"Async",
"subclass",
"from",
"an",
"options",
"dict",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L590-L596 |
Workiva/furious | furious/async.py | encode_async_options | def encode_async_options(async):
"""Encode Async options for JSON encoding."""
options = copy.deepcopy(async._options)
options['_type'] = reference_to_path(async.__class__)
# JSON don't like datetimes.
eta = options.get('task_args', {}).get('eta')
if eta:
options['task_args']['eta'] = time.mktime(eta.timetuple())
callbacks = async._options.get('callbacks')
if callbacks:
options['callbacks'] = encode_callbacks(callbacks)
if '_context_checker' in options:
_checker = options.pop('_context_checker')
options['__context_checker'] = reference_to_path(_checker)
if '_process_results' in options:
_processor = options.pop('_process_results')
options['__process_results'] = reference_to_path(_processor)
return options | python | def encode_async_options(async):
"""Encode Async options for JSON encoding."""
options = copy.deepcopy(async._options)
options['_type'] = reference_to_path(async.__class__)
# JSON don't like datetimes.
eta = options.get('task_args', {}).get('eta')
if eta:
options['task_args']['eta'] = time.mktime(eta.timetuple())
callbacks = async._options.get('callbacks')
if callbacks:
options['callbacks'] = encode_callbacks(callbacks)
if '_context_checker' in options:
_checker = options.pop('_context_checker')
options['__context_checker'] = reference_to_path(_checker)
if '_process_results' in options:
_processor = options.pop('_process_results')
options['__process_results'] = reference_to_path(_processor)
return options | [
"def",
"encode_async_options",
"(",
"async",
")",
":",
"options",
"=",
"copy",
".",
"deepcopy",
"(",
"async",
".",
"_options",
")",
"options",
"[",
"'_type'",
"]",
"=",
"reference_to_path",
"(",
"async",
".",
"__class__",
")",
"# JSON don't like datetimes.",
"eta",
"=",
"options",
".",
"get",
"(",
"'task_args'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'eta'",
")",
"if",
"eta",
":",
"options",
"[",
"'task_args'",
"]",
"[",
"'eta'",
"]",
"=",
"time",
".",
"mktime",
"(",
"eta",
".",
"timetuple",
"(",
")",
")",
"callbacks",
"=",
"async",
".",
"_options",
".",
"get",
"(",
"'callbacks'",
")",
"if",
"callbacks",
":",
"options",
"[",
"'callbacks'",
"]",
"=",
"encode_callbacks",
"(",
"callbacks",
")",
"if",
"'_context_checker'",
"in",
"options",
":",
"_checker",
"=",
"options",
".",
"pop",
"(",
"'_context_checker'",
")",
"options",
"[",
"'__context_checker'",
"]",
"=",
"reference_to_path",
"(",
"_checker",
")",
"if",
"'_process_results'",
"in",
"options",
":",
"_processor",
"=",
"options",
".",
"pop",
"(",
"'_process_results'",
")",
"options",
"[",
"'__process_results'",
"]",
"=",
"reference_to_path",
"(",
"_processor",
")",
"return",
"options"
] | Encode Async options for JSON encoding. | [
"Encode",
"Async",
"options",
"for",
"JSON",
"encoding",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L599-L622 |
Workiva/furious | furious/async.py | decode_async_options | def decode_async_options(options):
"""Decode Async options from JSON decoding."""
async_options = copy.deepcopy(options)
# JSON don't like datetimes.
eta = async_options.get('task_args', {}).get('eta')
if eta:
from datetime import datetime
async_options['task_args']['eta'] = datetime.fromtimestamp(eta)
# If there are callbacks, reconstitute them.
callbacks = async_options.get('callbacks', {})
if callbacks:
async_options['callbacks'] = decode_callbacks(callbacks)
if '__context_checker' in options:
_checker = options['__context_checker']
async_options['_context_checker'] = path_to_reference(_checker)
if '__process_results' in options:
_processor = options['__process_results']
async_options['_process_results'] = path_to_reference(_processor)
return async_options | python | def decode_async_options(options):
"""Decode Async options from JSON decoding."""
async_options = copy.deepcopy(options)
# JSON don't like datetimes.
eta = async_options.get('task_args', {}).get('eta')
if eta:
from datetime import datetime
async_options['task_args']['eta'] = datetime.fromtimestamp(eta)
# If there are callbacks, reconstitute them.
callbacks = async_options.get('callbacks', {})
if callbacks:
async_options['callbacks'] = decode_callbacks(callbacks)
if '__context_checker' in options:
_checker = options['__context_checker']
async_options['_context_checker'] = path_to_reference(_checker)
if '__process_results' in options:
_processor = options['__process_results']
async_options['_process_results'] = path_to_reference(_processor)
return async_options | [
"def",
"decode_async_options",
"(",
"options",
")",
":",
"async_options",
"=",
"copy",
".",
"deepcopy",
"(",
"options",
")",
"# JSON don't like datetimes.",
"eta",
"=",
"async_options",
".",
"get",
"(",
"'task_args'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'eta'",
")",
"if",
"eta",
":",
"from",
"datetime",
"import",
"datetime",
"async_options",
"[",
"'task_args'",
"]",
"[",
"'eta'",
"]",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"eta",
")",
"# If there are callbacks, reconstitute them.",
"callbacks",
"=",
"async_options",
".",
"get",
"(",
"'callbacks'",
",",
"{",
"}",
")",
"if",
"callbacks",
":",
"async_options",
"[",
"'callbacks'",
"]",
"=",
"decode_callbacks",
"(",
"callbacks",
")",
"if",
"'__context_checker'",
"in",
"options",
":",
"_checker",
"=",
"options",
"[",
"'__context_checker'",
"]",
"async_options",
"[",
"'_context_checker'",
"]",
"=",
"path_to_reference",
"(",
"_checker",
")",
"if",
"'__process_results'",
"in",
"options",
":",
"_processor",
"=",
"options",
"[",
"'__process_results'",
"]",
"async_options",
"[",
"'_process_results'",
"]",
"=",
"path_to_reference",
"(",
"_processor",
")",
"return",
"async_options"
] | Decode Async options from JSON decoding. | [
"Decode",
"Async",
"options",
"from",
"JSON",
"decoding",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L625-L648 |
Workiva/furious | furious/async.py | defaults | def defaults(**options):
"""Set default Async options on the function decorated.
Note: you must pass the decorated function by reference, not as a
"path.string.to.function" for this to have any effect.
"""
_check_options(options)
def real_decorator(function):
function._async_options = options
@wraps(function)
def wrapper(*args, **kwargs):
return function(*args, **kwargs)
return wrapper
return real_decorator | python | def defaults(**options):
"""Set default Async options on the function decorated.
Note: you must pass the decorated function by reference, not as a
"path.string.to.function" for this to have any effect.
"""
_check_options(options)
def real_decorator(function):
function._async_options = options
@wraps(function)
def wrapper(*args, **kwargs):
return function(*args, **kwargs)
return wrapper
return real_decorator | [
"def",
"defaults",
"(",
"*",
"*",
"options",
")",
":",
"_check_options",
"(",
"options",
")",
"def",
"real_decorator",
"(",
"function",
")",
":",
"function",
".",
"_async_options",
"=",
"options",
"@",
"wraps",
"(",
"function",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper",
"return",
"real_decorator"
] | Set default Async options on the function decorated.
Note: you must pass the decorated function by reference, not as a
"path.string.to.function" for this to have any effect. | [
"Set",
"default",
"Async",
"options",
"on",
"the",
"function",
"decorated",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L651-L667 |
Workiva/furious | furious/async.py | Async._persist_result | def _persist_result(self):
"""Store this Async's result in persistent storage."""
self._prepare_persistence_engine()
return self._persistence_engine.store_async_result(
self.id, self.result) | python | def _persist_result(self):
"""Store this Async's result in persistent storage."""
self._prepare_persistence_engine()
return self._persistence_engine.store_async_result(
self.id, self.result) | [
"def",
"_persist_result",
"(",
"self",
")",
":",
"self",
".",
"_prepare_persistence_engine",
"(",
")",
"return",
"self",
".",
"_persistence_engine",
".",
"store_async_result",
"(",
"self",
".",
"id",
",",
"self",
".",
"result",
")"
] | Store this Async's result in persistent storage. | [
"Store",
"this",
"Async",
"s",
"result",
"in",
"persistent",
"storage",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L169-L174 |
Workiva/furious | furious/async.py | Async._initialize_recursion_depth | def _initialize_recursion_depth(self):
"""Ensure recursion info is initialized, if not, initialize it."""
from furious.context import get_current_async
recursion_options = self._options.get('_recursion', {})
current_depth = recursion_options.get('current', 0)
max_depth = recursion_options.get('max', MAX_DEPTH)
try:
executing_async = get_current_async()
# If this async is within an executing async, use the depth off
# that async. Otherwise use the depth set in the async's options.
current_depth = executing_async.recursion_depth
# If max_depth does not equal MAX_DEPTH, it is custom. Otherwise
# use the max_depth from the containing async.
if max_depth == MAX_DEPTH:
executing_options = executing_async.get_options().get(
'_recursion', {})
max_depth = executing_options.get('max', max_depth)
except errors.NotInContextError:
# This Async is not being constructed inside an executing Async.
pass
# Store the recursion info.
self.update_options(_recursion={'current': current_depth,
'max': max_depth}) | python | def _initialize_recursion_depth(self):
"""Ensure recursion info is initialized, if not, initialize it."""
from furious.context import get_current_async
recursion_options = self._options.get('_recursion', {})
current_depth = recursion_options.get('current', 0)
max_depth = recursion_options.get('max', MAX_DEPTH)
try:
executing_async = get_current_async()
# If this async is within an executing async, use the depth off
# that async. Otherwise use the depth set in the async's options.
current_depth = executing_async.recursion_depth
# If max_depth does not equal MAX_DEPTH, it is custom. Otherwise
# use the max_depth from the containing async.
if max_depth == MAX_DEPTH:
executing_options = executing_async.get_options().get(
'_recursion', {})
max_depth = executing_options.get('max', max_depth)
except errors.NotInContextError:
# This Async is not being constructed inside an executing Async.
pass
# Store the recursion info.
self.update_options(_recursion={'current': current_depth,
'max': max_depth}) | [
"def",
"_initialize_recursion_depth",
"(",
"self",
")",
":",
"from",
"furious",
".",
"context",
"import",
"get_current_async",
"recursion_options",
"=",
"self",
".",
"_options",
".",
"get",
"(",
"'_recursion'",
",",
"{",
"}",
")",
"current_depth",
"=",
"recursion_options",
".",
"get",
"(",
"'current'",
",",
"0",
")",
"max_depth",
"=",
"recursion_options",
".",
"get",
"(",
"'max'",
",",
"MAX_DEPTH",
")",
"try",
":",
"executing_async",
"=",
"get_current_async",
"(",
")",
"# If this async is within an executing async, use the depth off",
"# that async. Otherwise use the depth set in the async's options.",
"current_depth",
"=",
"executing_async",
".",
"recursion_depth",
"# If max_depth does not equal MAX_DEPTH, it is custom. Otherwise",
"# use the max_depth from the containing async.",
"if",
"max_depth",
"==",
"MAX_DEPTH",
":",
"executing_options",
"=",
"executing_async",
".",
"get_options",
"(",
")",
".",
"get",
"(",
"'_recursion'",
",",
"{",
"}",
")",
"max_depth",
"=",
"executing_options",
".",
"get",
"(",
"'max'",
",",
"max_depth",
")",
"except",
"errors",
".",
"NotInContextError",
":",
"# This Async is not being constructed inside an executing Async.",
"pass",
"# Store the recursion info.",
"self",
".",
"update_options",
"(",
"_recursion",
"=",
"{",
"'current'",
":",
"current_depth",
",",
"'max'",
":",
"max_depth",
"}",
")"
] | Ensure recursion info is initialized, if not, initialize it. | [
"Ensure",
"recursion",
"info",
"is",
"initialized",
"if",
"not",
"initialize",
"it",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L208-L237 |
Workiva/furious | furious/async.py | Async.check_recursion_depth | def check_recursion_depth(self):
"""Check recursion depth, raise AsyncRecursionError if too deep."""
from furious.async import MAX_DEPTH
recursion_options = self._options.get('_recursion', {})
max_depth = recursion_options.get('max', MAX_DEPTH)
# Check if recursion check has been disabled, then check depth.
if (max_depth != DISABLE_RECURSION_CHECK and
self.recursion_depth > max_depth):
raise errors.AsyncRecursionError('Max recursion depth reached.') | python | def check_recursion_depth(self):
"""Check recursion depth, raise AsyncRecursionError if too deep."""
from furious.async import MAX_DEPTH
recursion_options = self._options.get('_recursion', {})
max_depth = recursion_options.get('max', MAX_DEPTH)
# Check if recursion check has been disabled, then check depth.
if (max_depth != DISABLE_RECURSION_CHECK and
self.recursion_depth > max_depth):
raise errors.AsyncRecursionError('Max recursion depth reached.') | [
"def",
"check_recursion_depth",
"(",
"self",
")",
":",
"from",
"furious",
".",
"async",
"import",
"MAX_DEPTH",
"recursion_options",
"=",
"self",
".",
"_options",
".",
"get",
"(",
"'_recursion'",
",",
"{",
"}",
")",
"max_depth",
"=",
"recursion_options",
".",
"get",
"(",
"'max'",
",",
"MAX_DEPTH",
")",
"# Check if recursion check has been disabled, then check depth.",
"if",
"(",
"max_depth",
"!=",
"DISABLE_RECURSION_CHECK",
"and",
"self",
".",
"recursion_depth",
">",
"max_depth",
")",
":",
"raise",
"errors",
".",
"AsyncRecursionError",
"(",
"'Max recursion depth reached.'",
")"
] | Check recursion depth, raise AsyncRecursionError if too deep. | [
"Check",
"recursion",
"depth",
"raise",
"AsyncRecursionError",
"if",
"too",
"deep",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L239-L249 |
Workiva/furious | furious/async.py | Async._update_job | def _update_job(self, target, args, kwargs):
"""Specify the function this async job is to execute when run."""
target_path, options = get_function_path_and_options(target)
assert isinstance(args, (tuple, list)) or args is None
assert isinstance(kwargs, dict) or kwargs is None
if options:
self.update_options(**options)
self._options['job'] = (target_path, args, kwargs) | python | def _update_job(self, target, args, kwargs):
"""Specify the function this async job is to execute when run."""
target_path, options = get_function_path_and_options(target)
assert isinstance(args, (tuple, list)) or args is None
assert isinstance(kwargs, dict) or kwargs is None
if options:
self.update_options(**options)
self._options['job'] = (target_path, args, kwargs) | [
"def",
"_update_job",
"(",
"self",
",",
"target",
",",
"args",
",",
"kwargs",
")",
":",
"target_path",
",",
"options",
"=",
"get_function_path_and_options",
"(",
"target",
")",
"assert",
"isinstance",
"(",
"args",
",",
"(",
"tuple",
",",
"list",
")",
")",
"or",
"args",
"is",
"None",
"assert",
"isinstance",
"(",
"kwargs",
",",
"dict",
")",
"or",
"kwargs",
"is",
"None",
"if",
"options",
":",
"self",
".",
"update_options",
"(",
"*",
"*",
"options",
")",
"self",
".",
"_options",
"[",
"'job'",
"]",
"=",
"(",
"target_path",
",",
"args",
",",
"kwargs",
")"
] | Specify the function this async job is to execute when run. | [
"Specify",
"the",
"function",
"this",
"async",
"job",
"is",
"to",
"execute",
"when",
"run",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L251-L261 |
Workiva/furious | furious/async.py | Async.set_execution_context | def set_execution_context(self, execution_context):
"""Set the ExecutionContext this async is executing under."""
if self._execution_context:
raise errors.AlreadyInContextError
self._execution_context = execution_context | python | def set_execution_context(self, execution_context):
"""Set the ExecutionContext this async is executing under."""
if self._execution_context:
raise errors.AlreadyInContextError
self._execution_context = execution_context | [
"def",
"set_execution_context",
"(",
"self",
",",
"execution_context",
")",
":",
"if",
"self",
".",
"_execution_context",
":",
"raise",
"errors",
".",
"AlreadyInContextError",
"self",
".",
"_execution_context",
"=",
"execution_context"
] | Set the ExecutionContext this async is executing under. | [
"Set",
"the",
"ExecutionContext",
"this",
"async",
"is",
"executing",
"under",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L263-L268 |
Workiva/furious | furious/async.py | Async.update_options | def update_options(self, **options):
"""Safely update this async job's configuration options."""
_check_options(options)
if 'persistence_engine' in options:
options['persistence_engine'] = reference_to_path(
options['persistence_engine'])
if 'id' in options:
self._id = options['id']
self._options.update(options) | python | def update_options(self, **options):
"""Safely update this async job's configuration options."""
_check_options(options)
if 'persistence_engine' in options:
options['persistence_engine'] = reference_to_path(
options['persistence_engine'])
if 'id' in options:
self._id = options['id']
self._options.update(options) | [
"def",
"update_options",
"(",
"self",
",",
"*",
"*",
"options",
")",
":",
"_check_options",
"(",
"options",
")",
"if",
"'persistence_engine'",
"in",
"options",
":",
"options",
"[",
"'persistence_engine'",
"]",
"=",
"reference_to_path",
"(",
"options",
"[",
"'persistence_engine'",
"]",
")",
"if",
"'id'",
"in",
"options",
":",
"self",
".",
"_id",
"=",
"options",
"[",
"'id'",
"]",
"self",
".",
"_options",
".",
"update",
"(",
"options",
")"
] | Safely update this async job's configuration options. | [
"Safely",
"update",
"this",
"async",
"job",
"s",
"configuration",
"options",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L274-L286 |
Workiva/furious | furious/async.py | Async.to_task | def to_task(self):
"""Return a task object representing this async job."""
from google.appengine.api.taskqueue import Task
from google.appengine.api.taskqueue import TaskRetryOptions
self._increment_recursion_level()
self.check_recursion_depth()
url = "%s/%s" % (ASYNC_ENDPOINT, self.function_path)
kwargs = {
'url': url,
'headers': self.get_headers().copy(),
'payload': json.dumps(self.to_dict())
}
kwargs.update(copy.deepcopy(self.get_task_args()))
# Set task_retry_limit
retry_options = copy.deepcopy(DEFAULT_RETRY_OPTIONS)
retry_options.update(kwargs.pop('retry_options', {}))
kwargs['retry_options'] = TaskRetryOptions(**retry_options)
return Task(**kwargs) | python | def to_task(self):
"""Return a task object representing this async job."""
from google.appengine.api.taskqueue import Task
from google.appengine.api.taskqueue import TaskRetryOptions
self._increment_recursion_level()
self.check_recursion_depth()
url = "%s/%s" % (ASYNC_ENDPOINT, self.function_path)
kwargs = {
'url': url,
'headers': self.get_headers().copy(),
'payload': json.dumps(self.to_dict())
}
kwargs.update(copy.deepcopy(self.get_task_args()))
# Set task_retry_limit
retry_options = copy.deepcopy(DEFAULT_RETRY_OPTIONS)
retry_options.update(kwargs.pop('retry_options', {}))
kwargs['retry_options'] = TaskRetryOptions(**retry_options)
return Task(**kwargs) | [
"def",
"to_task",
"(",
"self",
")",
":",
"from",
"google",
".",
"appengine",
".",
"api",
".",
"taskqueue",
"import",
"Task",
"from",
"google",
".",
"appengine",
".",
"api",
".",
"taskqueue",
"import",
"TaskRetryOptions",
"self",
".",
"_increment_recursion_level",
"(",
")",
"self",
".",
"check_recursion_depth",
"(",
")",
"url",
"=",
"\"%s/%s\"",
"%",
"(",
"ASYNC_ENDPOINT",
",",
"self",
".",
"function_path",
")",
"kwargs",
"=",
"{",
"'url'",
":",
"url",
",",
"'headers'",
":",
"self",
".",
"get_headers",
"(",
")",
".",
"copy",
"(",
")",
",",
"'payload'",
":",
"json",
".",
"dumps",
"(",
"self",
".",
"to_dict",
"(",
")",
")",
"}",
"kwargs",
".",
"update",
"(",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"get_task_args",
"(",
")",
")",
")",
"# Set task_retry_limit",
"retry_options",
"=",
"copy",
".",
"deepcopy",
"(",
"DEFAULT_RETRY_OPTIONS",
")",
"retry_options",
".",
"update",
"(",
"kwargs",
".",
"pop",
"(",
"'retry_options'",
",",
"{",
"}",
")",
")",
"kwargs",
"[",
"'retry_options'",
"]",
"=",
"TaskRetryOptions",
"(",
"*",
"*",
"retry_options",
")",
"return",
"Task",
"(",
"*",
"*",
"kwargs",
")"
] | Return a task object representing this async job. | [
"Return",
"a",
"task",
"object",
"representing",
"this",
"async",
"job",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L305-L327 |
Workiva/furious | furious/async.py | Async.start | def start(self, transactional=False, async=False, rpc=None):
"""Insert the task into the requested queue, 'default' if non given.
If a TransientError is hit the task will re-insert the task. If a
TaskAlreadyExistsError or TombstonedTaskError is hit the task will
silently fail.
If the async flag is set, then the add will be done asynchronously and
the return value will be the rpc object; otherwise the return value is
the task itself. If the rpc kwarg is provided, but we're not in async
mode, then it is ignored.
"""
from google.appengine.api import taskqueue
task = self.to_task()
queue = taskqueue.Queue(name=self.get_queue())
retry_transient = self._options.get('retry_transient_errors', True)
retry_delay = self._options.get('retry_delay', RETRY_SLEEP_SECS)
add = queue.add
if async:
add = partial(queue.add_async, rpc=rpc)
try:
ret = add(task, transactional=transactional)
except taskqueue.TransientError:
# Always re-raise for transactional insert, or if specified by
# options.
if transactional or not retry_transient:
raise
time.sleep(retry_delay)
ret = add(task, transactional=transactional)
except (taskqueue.TaskAlreadyExistsError,
taskqueue.TombstonedTaskError):
return
# TODO: Return a "result" object.
return ret | python | def start(self, transactional=False, async=False, rpc=None):
"""Insert the task into the requested queue, 'default' if non given.
If a TransientError is hit the task will re-insert the task. If a
TaskAlreadyExistsError or TombstonedTaskError is hit the task will
silently fail.
If the async flag is set, then the add will be done asynchronously and
the return value will be the rpc object; otherwise the return value is
the task itself. If the rpc kwarg is provided, but we're not in async
mode, then it is ignored.
"""
from google.appengine.api import taskqueue
task = self.to_task()
queue = taskqueue.Queue(name=self.get_queue())
retry_transient = self._options.get('retry_transient_errors', True)
retry_delay = self._options.get('retry_delay', RETRY_SLEEP_SECS)
add = queue.add
if async:
add = partial(queue.add_async, rpc=rpc)
try:
ret = add(task, transactional=transactional)
except taskqueue.TransientError:
# Always re-raise for transactional insert, or if specified by
# options.
if transactional or not retry_transient:
raise
time.sleep(retry_delay)
ret = add(task, transactional=transactional)
except (taskqueue.TaskAlreadyExistsError,
taskqueue.TombstonedTaskError):
return
# TODO: Return a "result" object.
return ret | [
"def",
"start",
"(",
"self",
",",
"transactional",
"=",
"False",
",",
"async",
"=",
"False",
",",
"rpc",
"=",
"None",
")",
":",
"from",
"google",
".",
"appengine",
".",
"api",
"import",
"taskqueue",
"task",
"=",
"self",
".",
"to_task",
"(",
")",
"queue",
"=",
"taskqueue",
".",
"Queue",
"(",
"name",
"=",
"self",
".",
"get_queue",
"(",
")",
")",
"retry_transient",
"=",
"self",
".",
"_options",
".",
"get",
"(",
"'retry_transient_errors'",
",",
"True",
")",
"retry_delay",
"=",
"self",
".",
"_options",
".",
"get",
"(",
"'retry_delay'",
",",
"RETRY_SLEEP_SECS",
")",
"add",
"=",
"queue",
".",
"add",
"if",
"async",
":",
"add",
"=",
"partial",
"(",
"queue",
".",
"add_async",
",",
"rpc",
"=",
"rpc",
")",
"try",
":",
"ret",
"=",
"add",
"(",
"task",
",",
"transactional",
"=",
"transactional",
")",
"except",
"taskqueue",
".",
"TransientError",
":",
"# Always re-raise for transactional insert, or if specified by",
"# options.",
"if",
"transactional",
"or",
"not",
"retry_transient",
":",
"raise",
"time",
".",
"sleep",
"(",
"retry_delay",
")",
"ret",
"=",
"add",
"(",
"task",
",",
"transactional",
"=",
"transactional",
")",
"except",
"(",
"taskqueue",
".",
"TaskAlreadyExistsError",
",",
"taskqueue",
".",
"TombstonedTaskError",
")",
":",
"return",
"# TODO: Return a \"result\" object.",
"return",
"ret"
] | Insert the task into the requested queue, 'default' if non given.
If a TransientError is hit the task will re-insert the task. If a
TaskAlreadyExistsError or TombstonedTaskError is hit the task will
silently fail.
If the async flag is set, then the add will be done asynchronously and
the return value will be the rpc object; otherwise the return value is
the task itself. If the rpc kwarg is provided, but we're not in async
mode, then it is ignored. | [
"Insert",
"the",
"task",
"into",
"the",
"requested",
"queue",
"default",
"if",
"non",
"given",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L329-L368 |
Workiva/furious | furious/async.py | Async.from_dict | def from_dict(cls, async):
"""Return an async job from a dict output by Async.to_dict."""
async_options = decode_async_options(async)
target, args, kwargs = async_options.pop('job')
return cls(target, args, kwargs, **async_options) | python | def from_dict(cls, async):
"""Return an async job from a dict output by Async.to_dict."""
async_options = decode_async_options(async)
target, args, kwargs = async_options.pop('job')
return cls(target, args, kwargs, **async_options) | [
"def",
"from_dict",
"(",
"cls",
",",
"async",
")",
":",
"async_options",
"=",
"decode_async_options",
"(",
"async",
")",
"target",
",",
"args",
",",
"kwargs",
"=",
"async_options",
".",
"pop",
"(",
"'job'",
")",
"return",
"cls",
"(",
"target",
",",
"args",
",",
"kwargs",
",",
"*",
"*",
"async_options",
")"
] | Return an async job from a dict output by Async.to_dict. | [
"Return",
"an",
"async",
"job",
"from",
"a",
"dict",
"output",
"by",
"Async",
".",
"to_dict",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L381-L387 |
Workiva/furious | furious/async.py | Async._prepare_persistence_engine | def _prepare_persistence_engine(self):
"""Load the specified persistence engine, or the default if none is
set.
"""
if self._persistence_engine:
return
persistence_engine = self._options.get('persistence_engine')
if persistence_engine:
self._persistence_engine = path_to_reference(persistence_engine)
return
from furious.config import get_default_persistence_engine
self._persistence_engine = get_default_persistence_engine() | python | def _prepare_persistence_engine(self):
"""Load the specified persistence engine, or the default if none is
set.
"""
if self._persistence_engine:
return
persistence_engine = self._options.get('persistence_engine')
if persistence_engine:
self._persistence_engine = path_to_reference(persistence_engine)
return
from furious.config import get_default_persistence_engine
self._persistence_engine = get_default_persistence_engine() | [
"def",
"_prepare_persistence_engine",
"(",
"self",
")",
":",
"if",
"self",
".",
"_persistence_engine",
":",
"return",
"persistence_engine",
"=",
"self",
".",
"_options",
".",
"get",
"(",
"'persistence_engine'",
")",
"if",
"persistence_engine",
":",
"self",
".",
"_persistence_engine",
"=",
"path_to_reference",
"(",
"persistence_engine",
")",
"return",
"from",
"furious",
".",
"config",
"import",
"get_default_persistence_engine",
"self",
".",
"_persistence_engine",
"=",
"get_default_persistence_engine",
"(",
")"
] | Load the specified persistence engine, or the default if none is
set. | [
"Load",
"the",
"specified",
"persistence",
"engine",
"or",
"the",
"default",
"if",
"none",
"is",
"set",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L389-L403 |
Workiva/furious | furious/async.py | Async._get_context_id | def _get_context_id(self):
"""If this async is in a context set the context id."""
from furious.context import get_current_context
context_id = self._options.get('context_id')
if context_id:
return context_id
try:
context = get_current_context()
except errors.NotInContextError:
context = None
self.update_options(context_id=None)
if context:
context_id = context.id
self.update_options(context_id=context_id)
return context_id | python | def _get_context_id(self):
"""If this async is in a context set the context id."""
from furious.context import get_current_context
context_id = self._options.get('context_id')
if context_id:
return context_id
try:
context = get_current_context()
except errors.NotInContextError:
context = None
self.update_options(context_id=None)
if context:
context_id = context.id
self.update_options(context_id=context_id)
return context_id | [
"def",
"_get_context_id",
"(",
"self",
")",
":",
"from",
"furious",
".",
"context",
"import",
"get_current_context",
"context_id",
"=",
"self",
".",
"_options",
".",
"get",
"(",
"'context_id'",
")",
"if",
"context_id",
":",
"return",
"context_id",
"try",
":",
"context",
"=",
"get_current_context",
"(",
")",
"except",
"errors",
".",
"NotInContextError",
":",
"context",
"=",
"None",
"self",
".",
"update_options",
"(",
"context_id",
"=",
"None",
")",
"if",
"context",
":",
"context_id",
"=",
"context",
".",
"id",
"self",
".",
"update_options",
"(",
"context_id",
"=",
"context_id",
")",
"return",
"context_id"
] | If this async is in a context set the context id. | [
"If",
"this",
"async",
"is",
"in",
"a",
"context",
"set",
"the",
"context",
"id",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L405-L425 |
Workiva/furious | furious/async.py | Async._get_parent_id | def _get_parent_id(self):
"""If this async is in within another async set that async id as the
parent.
"""
parent_id = self._options.get('parent_id')
if parent_id:
return parent_id
from furious.context import get_current_async
try:
async = get_current_async()
except errors.NotInContextError:
async = None
if async:
parent_id = ":".join([async.parent_id.split(":")[0], async.id])
else:
parent_id = self.request_id
self.update_options(parent_id=parent_id)
return parent_id | python | def _get_parent_id(self):
"""If this async is in within another async set that async id as the
parent.
"""
parent_id = self._options.get('parent_id')
if parent_id:
return parent_id
from furious.context import get_current_async
try:
async = get_current_async()
except errors.NotInContextError:
async = None
if async:
parent_id = ":".join([async.parent_id.split(":")[0], async.id])
else:
parent_id = self.request_id
self.update_options(parent_id=parent_id)
return parent_id | [
"def",
"_get_parent_id",
"(",
"self",
")",
":",
"parent_id",
"=",
"self",
".",
"_options",
".",
"get",
"(",
"'parent_id'",
")",
"if",
"parent_id",
":",
"return",
"parent_id",
"from",
"furious",
".",
"context",
"import",
"get_current_async",
"try",
":",
"async",
"=",
"get_current_async",
"(",
")",
"except",
"errors",
".",
"NotInContextError",
":",
"async",
"=",
"None",
"if",
"async",
":",
"parent_id",
"=",
"\":\"",
".",
"join",
"(",
"[",
"async",
".",
"parent_id",
".",
"split",
"(",
"\":\"",
")",
"[",
"0",
"]",
",",
"async",
".",
"id",
"]",
")",
"else",
":",
"parent_id",
"=",
"self",
".",
"request_id",
"self",
".",
"update_options",
"(",
"parent_id",
"=",
"parent_id",
")",
"return",
"parent_id"
] | If this async is in within another async set that async id as the
parent. | [
"If",
"this",
"async",
"is",
"in",
"within",
"another",
"async",
"set",
"that",
"async",
"id",
"as",
"the",
"parent",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L427-L449 |
Workiva/furious | furious/async.py | Async.full_id | def full_id(self):
"""Return the full_id for this Async. Consists of the parent id, id and
context id.
"""
full_id = ""
if self.parent_id:
full_id = ":".join([self.parent_id, self.id])
else:
full_id = self.id
if self.context_id:
full_id = "|".join([full_id, self.context_id])
return full_id | python | def full_id(self):
"""Return the full_id for this Async. Consists of the parent id, id and
context id.
"""
full_id = ""
if self.parent_id:
full_id = ":".join([self.parent_id, self.id])
else:
full_id = self.id
if self.context_id:
full_id = "|".join([full_id, self.context_id])
return full_id | [
"def",
"full_id",
"(",
"self",
")",
":",
"full_id",
"=",
"\"\"",
"if",
"self",
".",
"parent_id",
":",
"full_id",
"=",
"\":\"",
".",
"join",
"(",
"[",
"self",
".",
"parent_id",
",",
"self",
".",
"id",
"]",
")",
"else",
":",
"full_id",
"=",
"self",
".",
"id",
"if",
"self",
".",
"context_id",
":",
"full_id",
"=",
"\"|\"",
".",
"join",
"(",
"[",
"full_id",
",",
"self",
".",
"context_id",
"]",
")",
"return",
"full_id"
] | Return the full_id for this Async. Consists of the parent id, id and
context id. | [
"Return",
"the",
"full_id",
"for",
"this",
"Async",
".",
"Consists",
"of",
"the",
"parent",
"id",
"id",
"and",
"context",
"id",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L477-L491 |
Workiva/furious | furious/async.py | Async._increment_recursion_level | def _increment_recursion_level(self):
"""Increment current_depth based on either defaults or the enclosing
Async.
"""
# Update the recursion info. This is done so that if an async created
# outside an executing context, or one previously created is later
# loaded from storage, that the "current" setting is correctly set.
self._initialize_recursion_depth()
recursion_options = self._options.get('_recursion', {})
current_depth = recursion_options.get('current', 0) + 1
max_depth = recursion_options.get('max', MAX_DEPTH)
# Increment and store
self.update_options(_recursion={'current': current_depth,
'max': max_depth}) | python | def _increment_recursion_level(self):
"""Increment current_depth based on either defaults or the enclosing
Async.
"""
# Update the recursion info. This is done so that if an async created
# outside an executing context, or one previously created is later
# loaded from storage, that the "current" setting is correctly set.
self._initialize_recursion_depth()
recursion_options = self._options.get('_recursion', {})
current_depth = recursion_options.get('current', 0) + 1
max_depth = recursion_options.get('max', MAX_DEPTH)
# Increment and store
self.update_options(_recursion={'current': current_depth,
'max': max_depth}) | [
"def",
"_increment_recursion_level",
"(",
"self",
")",
":",
"# Update the recursion info. This is done so that if an async created",
"# outside an executing context, or one previously created is later",
"# loaded from storage, that the \"current\" setting is correctly set.",
"self",
".",
"_initialize_recursion_depth",
"(",
")",
"recursion_options",
"=",
"self",
".",
"_options",
".",
"get",
"(",
"'_recursion'",
",",
"{",
"}",
")",
"current_depth",
"=",
"recursion_options",
".",
"get",
"(",
"'current'",
",",
"0",
")",
"+",
"1",
"max_depth",
"=",
"recursion_options",
".",
"get",
"(",
"'max'",
",",
"MAX_DEPTH",
")",
"# Increment and store",
"self",
".",
"update_options",
"(",
"_recursion",
"=",
"{",
"'current'",
":",
"current_depth",
",",
"'max'",
":",
"max_depth",
"}",
")"
] | Increment current_depth based on either defaults or the enclosing
Async. | [
"Increment",
"current_depth",
"based",
"on",
"either",
"defaults",
"or",
"the",
"enclosing",
"Async",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L497-L512 |
Workiva/furious | furious/async.py | Async.context_id | def context_id(self):
"""Return this Async's Context Id if it exists."""
if not self._context_id:
self._context_id = self._get_context_id()
self.update_options(context_id=self._context_id)
return self._context_id | python | def context_id(self):
"""Return this Async's Context Id if it exists."""
if not self._context_id:
self._context_id = self._get_context_id()
self.update_options(context_id=self._context_id)
return self._context_id | [
"def",
"context_id",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_context_id",
":",
"self",
".",
"_context_id",
"=",
"self",
".",
"_get_context_id",
"(",
")",
"self",
".",
"update_options",
"(",
"context_id",
"=",
"self",
".",
"_context_id",
")",
"return",
"self",
".",
"_context_id"
] | Return this Async's Context Id if it exists. | [
"Return",
"this",
"Async",
"s",
"Context",
"Id",
"if",
"it",
"exists",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L515-L521 |
Workiva/furious | furious/async.py | AsyncResult._payload_to_dict | def _payload_to_dict(self):
"""When an error status the payload is holding an AsyncException that
is converted to a serializable dict.
"""
if self.status != self.ERROR or not self.payload:
return self.payload
import traceback
return {
"error": self.payload.error,
"args": self.payload.args,
"traceback": traceback.format_exception(*self.payload.traceback)
} | python | def _payload_to_dict(self):
"""When an error status the payload is holding an AsyncException that
is converted to a serializable dict.
"""
if self.status != self.ERROR or not self.payload:
return self.payload
import traceback
return {
"error": self.payload.error,
"args": self.payload.args,
"traceback": traceback.format_exception(*self.payload.traceback)
} | [
"def",
"_payload_to_dict",
"(",
"self",
")",
":",
"if",
"self",
".",
"status",
"!=",
"self",
".",
"ERROR",
"or",
"not",
"self",
".",
"payload",
":",
"return",
"self",
".",
"payload",
"import",
"traceback",
"return",
"{",
"\"error\"",
":",
"self",
".",
"payload",
".",
"error",
",",
"\"args\"",
":",
"self",
".",
"payload",
".",
"args",
",",
"\"traceback\"",
":",
"traceback",
".",
"format_exception",
"(",
"*",
"self",
".",
"payload",
".",
"traceback",
")",
"}"
] | When an error status the payload is holding an AsyncException that
is converted to a serializable dict. | [
"When",
"an",
"error",
"status",
"the",
"payload",
"is",
"holding",
"an",
"AsyncException",
"that",
"is",
"converted",
"to",
"a",
"serializable",
"dict",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/async.py#L574-L587 |
upsight/doctor | doctor/routing.py | put | def put(func: Callable, allowed_exceptions: List = None,
title: str = None, req_obj_type: Callable = None) -> HTTPMethod:
"""Returns a HTTPMethod instance to create a PUT route.
:see: :class:`~doctor.routing.HTTPMethod`
"""
return HTTPMethod('put', func, allowed_exceptions=allowed_exceptions,
title=title, req_obj_type=req_obj_type) | python | def put(func: Callable, allowed_exceptions: List = None,
title: str = None, req_obj_type: Callable = None) -> HTTPMethod:
"""Returns a HTTPMethod instance to create a PUT route.
:see: :class:`~doctor.routing.HTTPMethod`
"""
return HTTPMethod('put', func, allowed_exceptions=allowed_exceptions,
title=title, req_obj_type=req_obj_type) | [
"def",
"put",
"(",
"func",
":",
"Callable",
",",
"allowed_exceptions",
":",
"List",
"=",
"None",
",",
"title",
":",
"str",
"=",
"None",
",",
"req_obj_type",
":",
"Callable",
"=",
"None",
")",
"->",
"HTTPMethod",
":",
"return",
"HTTPMethod",
"(",
"'put'",
",",
"func",
",",
"allowed_exceptions",
"=",
"allowed_exceptions",
",",
"title",
"=",
"title",
",",
"req_obj_type",
"=",
"req_obj_type",
")"
] | Returns a HTTPMethod instance to create a PUT route.
:see: :class:`~doctor.routing.HTTPMethod` | [
"Returns",
"a",
"HTTPMethod",
"instance",
"to",
"create",
"a",
"PUT",
"route",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/routing.py#L78-L85 |
upsight/doctor | doctor/routing.py | create_http_method | def create_http_method(logic: Callable, http_method: str,
handle_http: Callable, before: Callable = None,
after: Callable = None) -> Callable:
"""Create a handler method to be used in a handler class.
:param callable logic: The underlying function to execute with the
parsed and validated parameters.
:param str http_method: HTTP method this will handle.
:param handle_http: The HTTP handler function that should be
used to wrap the logic functions.
:param before: A function to be called before the logic function associated
with the route.
:param after: A function to be called after the logic function associated
with the route.
:returns: A handler function.
"""
@functools.wraps(logic)
def fn(handler, *args, **kwargs):
if before is not None and callable(before):
before()
result = handle_http(handler, args, kwargs, logic)
if after is not None and callable(after):
after(result)
return result
return fn | python | def create_http_method(logic: Callable, http_method: str,
handle_http: Callable, before: Callable = None,
after: Callable = None) -> Callable:
"""Create a handler method to be used in a handler class.
:param callable logic: The underlying function to execute with the
parsed and validated parameters.
:param str http_method: HTTP method this will handle.
:param handle_http: The HTTP handler function that should be
used to wrap the logic functions.
:param before: A function to be called before the logic function associated
with the route.
:param after: A function to be called after the logic function associated
with the route.
:returns: A handler function.
"""
@functools.wraps(logic)
def fn(handler, *args, **kwargs):
if before is not None and callable(before):
before()
result = handle_http(handler, args, kwargs, logic)
if after is not None and callable(after):
after(result)
return result
return fn | [
"def",
"create_http_method",
"(",
"logic",
":",
"Callable",
",",
"http_method",
":",
"str",
",",
"handle_http",
":",
"Callable",
",",
"before",
":",
"Callable",
"=",
"None",
",",
"after",
":",
"Callable",
"=",
"None",
")",
"->",
"Callable",
":",
"@",
"functools",
".",
"wraps",
"(",
"logic",
")",
"def",
"fn",
"(",
"handler",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"before",
"is",
"not",
"None",
"and",
"callable",
"(",
"before",
")",
":",
"before",
"(",
")",
"result",
"=",
"handle_http",
"(",
"handler",
",",
"args",
",",
"kwargs",
",",
"logic",
")",
"if",
"after",
"is",
"not",
"None",
"and",
"callable",
"(",
"after",
")",
":",
"after",
"(",
"result",
")",
"return",
"result",
"return",
"fn"
] | Create a handler method to be used in a handler class.
:param callable logic: The underlying function to execute with the
parsed and validated parameters.
:param str http_method: HTTP method this will handle.
:param handle_http: The HTTP handler function that should be
used to wrap the logic functions.
:param before: A function to be called before the logic function associated
with the route.
:param after: A function to be called after the logic function associated
with the route.
:returns: A handler function. | [
"Create",
"a",
"handler",
"method",
"to",
"be",
"used",
"in",
"a",
"handler",
"class",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/routing.py#L88-L112 |
upsight/doctor | doctor/routing.py | get_handler_name | def get_handler_name(route: Route, logic: Callable) -> str:
"""Gets the handler name.
:param route: A Route instance.
:param logic: The logic function.
:returns: A handler class name.
"""
if route.handler_name is not None:
return route.handler_name
if any(m for m in route.methods if m.method.lower() == 'post'):
# A list endpoint
if route.heading != 'API':
return '{}ListHandler'.format(get_valid_class_name(route.heading))
return '{}ListHandler'.format(get_valid_class_name(logic.__name__))
if route.heading != 'API':
return '{}Handler'.format(get_valid_class_name(route.heading))
return '{}Handler'.format(get_valid_class_name(logic.__name__)) | python | def get_handler_name(route: Route, logic: Callable) -> str:
"""Gets the handler name.
:param route: A Route instance.
:param logic: The logic function.
:returns: A handler class name.
"""
if route.handler_name is not None:
return route.handler_name
if any(m for m in route.methods if m.method.lower() == 'post'):
# A list endpoint
if route.heading != 'API':
return '{}ListHandler'.format(get_valid_class_name(route.heading))
return '{}ListHandler'.format(get_valid_class_name(logic.__name__))
if route.heading != 'API':
return '{}Handler'.format(get_valid_class_name(route.heading))
return '{}Handler'.format(get_valid_class_name(logic.__name__)) | [
"def",
"get_handler_name",
"(",
"route",
":",
"Route",
",",
"logic",
":",
"Callable",
")",
"->",
"str",
":",
"if",
"route",
".",
"handler_name",
"is",
"not",
"None",
":",
"return",
"route",
".",
"handler_name",
"if",
"any",
"(",
"m",
"for",
"m",
"in",
"route",
".",
"methods",
"if",
"m",
".",
"method",
".",
"lower",
"(",
")",
"==",
"'post'",
")",
":",
"# A list endpoint",
"if",
"route",
".",
"heading",
"!=",
"'API'",
":",
"return",
"'{}ListHandler'",
".",
"format",
"(",
"get_valid_class_name",
"(",
"route",
".",
"heading",
")",
")",
"return",
"'{}ListHandler'",
".",
"format",
"(",
"get_valid_class_name",
"(",
"logic",
".",
"__name__",
")",
")",
"if",
"route",
".",
"heading",
"!=",
"'API'",
":",
"return",
"'{}Handler'",
".",
"format",
"(",
"get_valid_class_name",
"(",
"route",
".",
"heading",
")",
")",
"return",
"'{}Handler'",
".",
"format",
"(",
"get_valid_class_name",
"(",
"logic",
".",
"__name__",
")",
")"
] | Gets the handler name.
:param route: A Route instance.
:param logic: The logic function.
:returns: A handler class name. | [
"Gets",
"the",
"handler",
"name",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/routing.py#L143-L159 |
upsight/doctor | doctor/routing.py | create_routes | def create_routes(routes: Tuple[HTTPMethod], handle_http: Callable,
default_base_handler_class: Any) -> List[Tuple[str, Any]]:
"""Creates handler routes from the provided routes.
:param routes: A tuple containing the route and another tuple with
all http methods allowed for the route.
:param handle_http: The HTTP handler function that should be
used to wrap the logic functions.
:param default_base_handler_class: The default base handler class that
should be used.
:returns: A list of tuples containing the route and generated handler.
"""
created_routes = []
all_handler_names = []
for r in routes:
handler = None
if r.base_handler_class is not None:
base_handler_class = r.base_handler_class
else:
base_handler_class = default_base_handler_class
# Define the handler name. To prevent issues where auto-generated
# handler names conflict with existing, appending a number to the
# end of the hanlder name if it already exists.
handler_name = get_handler_name(r, r.methods[0].logic)
if handler_name in all_handler_names:
handler_name = '{}{}'.format(
handler_name, len(all_handler_names))
all_handler_names.append(handler_name)
for method in r.methods:
logic = method.logic
http_method = method.method
http_func = create_http_method(logic, http_method, handle_http,
before=r.before, after=r.after)
handler_methods_and_properties = {
'__name__': handler_name,
'_doctor_heading': r.heading,
'methods': set([http_method.upper()]),
http_method: http_func,
}
if handler is None:
handler = type(
handler_name, (base_handler_class,),
handler_methods_and_properties)
else:
setattr(handler, http_method, http_func)
# This is specific to Flask. Its MethodView class
# initializes the methods attribute in __new__ so we
# need to add all the other http methods we are defining
# on the handler after it gets created by type.
if hasattr(handler, 'methods'):
handler.methods.add(http_method.upper())
created_routes.append((r.route, handler))
return created_routes | python | def create_routes(routes: Tuple[HTTPMethod], handle_http: Callable,
default_base_handler_class: Any) -> List[Tuple[str, Any]]:
"""Creates handler routes from the provided routes.
:param routes: A tuple containing the route and another tuple with
all http methods allowed for the route.
:param handle_http: The HTTP handler function that should be
used to wrap the logic functions.
:param default_base_handler_class: The default base handler class that
should be used.
:returns: A list of tuples containing the route and generated handler.
"""
created_routes = []
all_handler_names = []
for r in routes:
handler = None
if r.base_handler_class is not None:
base_handler_class = r.base_handler_class
else:
base_handler_class = default_base_handler_class
# Define the handler name. To prevent issues where auto-generated
# handler names conflict with existing, appending a number to the
# end of the hanlder name if it already exists.
handler_name = get_handler_name(r, r.methods[0].logic)
if handler_name in all_handler_names:
handler_name = '{}{}'.format(
handler_name, len(all_handler_names))
all_handler_names.append(handler_name)
for method in r.methods:
logic = method.logic
http_method = method.method
http_func = create_http_method(logic, http_method, handle_http,
before=r.before, after=r.after)
handler_methods_and_properties = {
'__name__': handler_name,
'_doctor_heading': r.heading,
'methods': set([http_method.upper()]),
http_method: http_func,
}
if handler is None:
handler = type(
handler_name, (base_handler_class,),
handler_methods_and_properties)
else:
setattr(handler, http_method, http_func)
# This is specific to Flask. Its MethodView class
# initializes the methods attribute in __new__ so we
# need to add all the other http methods we are defining
# on the handler after it gets created by type.
if hasattr(handler, 'methods'):
handler.methods.add(http_method.upper())
created_routes.append((r.route, handler))
return created_routes | [
"def",
"create_routes",
"(",
"routes",
":",
"Tuple",
"[",
"HTTPMethod",
"]",
",",
"handle_http",
":",
"Callable",
",",
"default_base_handler_class",
":",
"Any",
")",
"->",
"List",
"[",
"Tuple",
"[",
"str",
",",
"Any",
"]",
"]",
":",
"created_routes",
"=",
"[",
"]",
"all_handler_names",
"=",
"[",
"]",
"for",
"r",
"in",
"routes",
":",
"handler",
"=",
"None",
"if",
"r",
".",
"base_handler_class",
"is",
"not",
"None",
":",
"base_handler_class",
"=",
"r",
".",
"base_handler_class",
"else",
":",
"base_handler_class",
"=",
"default_base_handler_class",
"# Define the handler name. To prevent issues where auto-generated",
"# handler names conflict with existing, appending a number to the",
"# end of the hanlder name if it already exists.",
"handler_name",
"=",
"get_handler_name",
"(",
"r",
",",
"r",
".",
"methods",
"[",
"0",
"]",
".",
"logic",
")",
"if",
"handler_name",
"in",
"all_handler_names",
":",
"handler_name",
"=",
"'{}{}'",
".",
"format",
"(",
"handler_name",
",",
"len",
"(",
"all_handler_names",
")",
")",
"all_handler_names",
".",
"append",
"(",
"handler_name",
")",
"for",
"method",
"in",
"r",
".",
"methods",
":",
"logic",
"=",
"method",
".",
"logic",
"http_method",
"=",
"method",
".",
"method",
"http_func",
"=",
"create_http_method",
"(",
"logic",
",",
"http_method",
",",
"handle_http",
",",
"before",
"=",
"r",
".",
"before",
",",
"after",
"=",
"r",
".",
"after",
")",
"handler_methods_and_properties",
"=",
"{",
"'__name__'",
":",
"handler_name",
",",
"'_doctor_heading'",
":",
"r",
".",
"heading",
",",
"'methods'",
":",
"set",
"(",
"[",
"http_method",
".",
"upper",
"(",
")",
"]",
")",
",",
"http_method",
":",
"http_func",
",",
"}",
"if",
"handler",
"is",
"None",
":",
"handler",
"=",
"type",
"(",
"handler_name",
",",
"(",
"base_handler_class",
",",
")",
",",
"handler_methods_and_properties",
")",
"else",
":",
"setattr",
"(",
"handler",
",",
"http_method",
",",
"http_func",
")",
"# This is specific to Flask. Its MethodView class",
"# initializes the methods attribute in __new__ so we",
"# need to add all the other http methods we are defining",
"# on the handler after it gets created by type.",
"if",
"hasattr",
"(",
"handler",
",",
"'methods'",
")",
":",
"handler",
".",
"methods",
".",
"add",
"(",
"http_method",
".",
"upper",
"(",
")",
")",
"created_routes",
".",
"append",
"(",
"(",
"r",
".",
"route",
",",
"handler",
")",
")",
"return",
"created_routes"
] | Creates handler routes from the provided routes.
:param routes: A tuple containing the route and another tuple with
all http methods allowed for the route.
:param handle_http: The HTTP handler function that should be
used to wrap the logic functions.
:param default_base_handler_class: The default base handler class that
should be used.
:returns: A list of tuples containing the route and generated handler. | [
"Creates",
"handler",
"routes",
"from",
"the",
"provided",
"routes",
"."
] | train | https://github.com/upsight/doctor/blob/2cf1d433f6f1aa1355644b449a757c0660793cdd/doctor/routing.py#L162-L217 |
horejsek/python-sqlpuzzle | sqlpuzzle/_common/argsparser.py | parse_args | def parse_args(options={}, *args, **kwds):
"""
Parser of arguments.
dict options {
int min_items: Min of required items to fold one tuple. (default: 1)
int max_items: Count of items in one tuple. Last `max_items-min_items`
items is by default set to None. (default: 1)
bool allow_dict: Flag allowing dictionary as first (and only one)
argument or dictinary as **kwds. (default: False)
bool allow_list: Flag allowing list as first (and only one) argument.
(default: False)
}
Examples:
calling with min_items=1, max_items=2, allow_dict=False:
arg1, arg2 => ((arg1, None), (arg2, None))
(arg1a, arg1b), arg2 => ((arg1a, arg1b), arg2, None))
arg1=val1 => FAIL
{key1: val1} => FAIL
calling with min_items=2, max_items=3, allow_dict=True:
arg1, arg2 => ((arg1, arg2, None),)
arg1, arg2, arg3 => ((arg1, arg2, arg3),)
(arg1a, arg1b, arg1c) => ((arg1a, arg1b, arg1c),)
arg1=val1, arg2=val2 => ((arg1, val1, None), (arg2, val2, None))
{key1: val1, key2: val2} => ((key1, val1, None), (key2, val2, None))
(arg1a, arg1b), arg2a, arg2b => FAIL
"""
parser_options = ParserOptions(options)
parser_input = ParserInput(args, kwds)
parser = Parser(parser_options, parser_input)
parser.parse()
return parser.output_data | python | def parse_args(options={}, *args, **kwds):
"""
Parser of arguments.
dict options {
int min_items: Min of required items to fold one tuple. (default: 1)
int max_items: Count of items in one tuple. Last `max_items-min_items`
items is by default set to None. (default: 1)
bool allow_dict: Flag allowing dictionary as first (and only one)
argument or dictinary as **kwds. (default: False)
bool allow_list: Flag allowing list as first (and only one) argument.
(default: False)
}
Examples:
calling with min_items=1, max_items=2, allow_dict=False:
arg1, arg2 => ((arg1, None), (arg2, None))
(arg1a, arg1b), arg2 => ((arg1a, arg1b), arg2, None))
arg1=val1 => FAIL
{key1: val1} => FAIL
calling with min_items=2, max_items=3, allow_dict=True:
arg1, arg2 => ((arg1, arg2, None),)
arg1, arg2, arg3 => ((arg1, arg2, arg3),)
(arg1a, arg1b, arg1c) => ((arg1a, arg1b, arg1c),)
arg1=val1, arg2=val2 => ((arg1, val1, None), (arg2, val2, None))
{key1: val1, key2: val2} => ((key1, val1, None), (key2, val2, None))
(arg1a, arg1b), arg2a, arg2b => FAIL
"""
parser_options = ParserOptions(options)
parser_input = ParserInput(args, kwds)
parser = Parser(parser_options, parser_input)
parser.parse()
return parser.output_data | [
"def",
"parse_args",
"(",
"options",
"=",
"{",
"}",
",",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"parser_options",
"=",
"ParserOptions",
"(",
"options",
")",
"parser_input",
"=",
"ParserInput",
"(",
"args",
",",
"kwds",
")",
"parser",
"=",
"Parser",
"(",
"parser_options",
",",
"parser_input",
")",
"parser",
".",
"parse",
"(",
")",
"return",
"parser",
".",
"output_data"
] | Parser of arguments.
dict options {
int min_items: Min of required items to fold one tuple. (default: 1)
int max_items: Count of items in one tuple. Last `max_items-min_items`
items is by default set to None. (default: 1)
bool allow_dict: Flag allowing dictionary as first (and only one)
argument or dictinary as **kwds. (default: False)
bool allow_list: Flag allowing list as first (and only one) argument.
(default: False)
}
Examples:
calling with min_items=1, max_items=2, allow_dict=False:
arg1, arg2 => ((arg1, None), (arg2, None))
(arg1a, arg1b), arg2 => ((arg1a, arg1b), arg2, None))
arg1=val1 => FAIL
{key1: val1} => FAIL
calling with min_items=2, max_items=3, allow_dict=True:
arg1, arg2 => ((arg1, arg2, None),)
arg1, arg2, arg3 => ((arg1, arg2, arg3),)
(arg1a, arg1b, arg1c) => ((arg1a, arg1b, arg1c),)
arg1=val1, arg2=val2 => ((arg1, val1, None), (arg2, val2, None))
{key1: val1, key2: val2} => ((key1, val1, None), (key2, val2, None))
(arg1a, arg1b), arg2a, arg2b => FAIL | [
"Parser",
"of",
"arguments",
"."
] | train | https://github.com/horejsek/python-sqlpuzzle/blob/d3a42ed1b339b8eafddb8d2c28a3a5832b3998dd/sqlpuzzle/_common/argsparser.py#L7-L42 |
Workiva/furious | example/simple_workflow.py | simple_state_machine | def simple_state_machine():
"""Pick a number, if it is more than some cuttoff continue the chain."""
from random import random
from furious.async import Async
number = random()
logging.info('Generating a number... %s', number)
if number > 0.25:
logging.info('Continuing to do stuff.')
return Async(target=simple_state_machine)
return number | python | def simple_state_machine():
"""Pick a number, if it is more than some cuttoff continue the chain."""
from random import random
from furious.async import Async
number = random()
logging.info('Generating a number... %s', number)
if number > 0.25:
logging.info('Continuing to do stuff.')
return Async(target=simple_state_machine)
return number | [
"def",
"simple_state_machine",
"(",
")",
":",
"from",
"random",
"import",
"random",
"from",
"furious",
".",
"async",
"import",
"Async",
"number",
"=",
"random",
"(",
")",
"logging",
".",
"info",
"(",
"'Generating a number... %s'",
",",
"number",
")",
"if",
"number",
">",
"0.25",
":",
"logging",
".",
"info",
"(",
"'Continuing to do stuff.'",
")",
"return",
"Async",
"(",
"target",
"=",
"simple_state_machine",
")",
"return",
"number"
] | Pick a number, if it is more than some cuttoff continue the chain. | [
"Pick",
"a",
"number",
"if",
"it",
"is",
"more",
"than",
"some",
"cuttoff",
"continue",
"the",
"chain",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/example/simple_workflow.py#L44-L57 |
TriOptima/tri.form | lib/tri/form/render.py | render_attrs | def render_attrs(attrs):
"""
Render HTML attributes, or return '' if no attributes needs to be rendered.
"""
if attrs is not None:
def parts():
for key, value in sorted(attrs.items()):
if value is None:
continue
if value is True:
yield '%s' % (key, )
continue
if key == 'class' and isinstance(value, dict):
if not value:
continue
value = render_class(value)
if key == 'style' and isinstance(value, dict):
if not value:
continue
value = render_style(value)
yield '%s="%s"' % (key, ('%s' % value).replace('"', '"'))
return mark_safe(' %s' % ' '.join(parts()))
return '' | python | def render_attrs(attrs):
"""
Render HTML attributes, or return '' if no attributes needs to be rendered.
"""
if attrs is not None:
def parts():
for key, value in sorted(attrs.items()):
if value is None:
continue
if value is True:
yield '%s' % (key, )
continue
if key == 'class' and isinstance(value, dict):
if not value:
continue
value = render_class(value)
if key == 'style' and isinstance(value, dict):
if not value:
continue
value = render_style(value)
yield '%s="%s"' % (key, ('%s' % value).replace('"', '"'))
return mark_safe(' %s' % ' '.join(parts()))
return '' | [
"def",
"render_attrs",
"(",
"attrs",
")",
":",
"if",
"attrs",
"is",
"not",
"None",
":",
"def",
"parts",
"(",
")",
":",
"for",
"key",
",",
"value",
"in",
"sorted",
"(",
"attrs",
".",
"items",
"(",
")",
")",
":",
"if",
"value",
"is",
"None",
":",
"continue",
"if",
"value",
"is",
"True",
":",
"yield",
"'%s'",
"%",
"(",
"key",
",",
")",
"continue",
"if",
"key",
"==",
"'class'",
"and",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"if",
"not",
"value",
":",
"continue",
"value",
"=",
"render_class",
"(",
"value",
")",
"if",
"key",
"==",
"'style'",
"and",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"if",
"not",
"value",
":",
"continue",
"value",
"=",
"render_style",
"(",
"value",
")",
"yield",
"'%s=\"%s\"'",
"%",
"(",
"key",
",",
"(",
"'%s'",
"%",
"value",
")",
".",
"replace",
"(",
"'\"'",
",",
"'"'",
")",
")",
"return",
"mark_safe",
"(",
"' %s'",
"%",
"' '",
".",
"join",
"(",
"parts",
"(",
")",
")",
")",
"return",
"''"
] | Render HTML attributes, or return '' if no attributes needs to be rendered. | [
"Render",
"HTML",
"attributes",
"or",
"return",
"if",
"no",
"attributes",
"needs",
"to",
"be",
"rendered",
"."
] | train | https://github.com/TriOptima/tri.form/blob/0c8efaac8fe113619932def1570befe11b634927/lib/tri/form/render.py#L4-L26 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | context_completion_checker | def context_completion_checker(async):
"""Persist async marker and async the completion check"""
store_async_marker(async.id, async.result.status if async.result else -1)
logging.debug("Async check completion for: %s", async.context_id)
current_queue = _get_current_queue()
from furious.async import Async
logging.debug("Completion Check queue:%s", current_queue)
Async(_completion_checker, queue=current_queue,
args=(async.id, async.context_id)).start()
return True | python | def context_completion_checker(async):
"""Persist async marker and async the completion check"""
store_async_marker(async.id, async.result.status if async.result else -1)
logging.debug("Async check completion for: %s", async.context_id)
current_queue = _get_current_queue()
from furious.async import Async
logging.debug("Completion Check queue:%s", current_queue)
Async(_completion_checker, queue=current_queue,
args=(async.id, async.context_id)).start()
return True | [
"def",
"context_completion_checker",
"(",
"async",
")",
":",
"store_async_marker",
"(",
"async",
".",
"id",
",",
"async",
".",
"result",
".",
"status",
"if",
"async",
".",
"result",
"else",
"-",
"1",
")",
"logging",
".",
"debug",
"(",
"\"Async check completion for: %s\"",
",",
"async",
".",
"context_id",
")",
"current_queue",
"=",
"_get_current_queue",
"(",
")",
"from",
"furious",
".",
"async",
"import",
"Async",
"logging",
".",
"debug",
"(",
"\"Completion Check queue:%s\"",
",",
"current_queue",
")",
"Async",
"(",
"_completion_checker",
",",
"queue",
"=",
"current_queue",
",",
"args",
"=",
"(",
"async",
".",
"id",
",",
"async",
".",
"context_id",
")",
")",
".",
"start",
"(",
")",
"return",
"True"
] | Persist async marker and async the completion check | [
"Persist",
"async",
"marker",
"and",
"async",
"the",
"completion",
"check"
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L136-L148 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | _completion_checker | def _completion_checker(async_id, context_id):
"""Check if all Async jobs within a Context have been run."""
if not context_id:
logging.debug("Context for async %s does not exist", async_id)
return
context = FuriousContext.from_id(context_id)
marker = FuriousCompletionMarker.get_by_id(context_id)
if marker and marker.complete:
logging.info("Context %s already complete" % context_id)
return True
task_ids = context.task_ids
if async_id in task_ids:
task_ids.remove(async_id)
logging.debug("Loaded context.")
logging.debug(task_ids)
done, has_errors = _check_markers(task_ids)
if not done:
return False
_mark_context_complete(marker, context, has_errors)
return True | python | def _completion_checker(async_id, context_id):
"""Check if all Async jobs within a Context have been run."""
if not context_id:
logging.debug("Context for async %s does not exist", async_id)
return
context = FuriousContext.from_id(context_id)
marker = FuriousCompletionMarker.get_by_id(context_id)
if marker and marker.complete:
logging.info("Context %s already complete" % context_id)
return True
task_ids = context.task_ids
if async_id in task_ids:
task_ids.remove(async_id)
logging.debug("Loaded context.")
logging.debug(task_ids)
done, has_errors = _check_markers(task_ids)
if not done:
return False
_mark_context_complete(marker, context, has_errors)
return True | [
"def",
"_completion_checker",
"(",
"async_id",
",",
"context_id",
")",
":",
"if",
"not",
"context_id",
":",
"logging",
".",
"debug",
"(",
"\"Context for async %s does not exist\"",
",",
"async_id",
")",
"return",
"context",
"=",
"FuriousContext",
".",
"from_id",
"(",
"context_id",
")",
"marker",
"=",
"FuriousCompletionMarker",
".",
"get_by_id",
"(",
"context_id",
")",
"if",
"marker",
"and",
"marker",
".",
"complete",
":",
"logging",
".",
"info",
"(",
"\"Context %s already complete\"",
"%",
"context_id",
")",
"return",
"True",
"task_ids",
"=",
"context",
".",
"task_ids",
"if",
"async_id",
"in",
"task_ids",
":",
"task_ids",
".",
"remove",
"(",
"async_id",
")",
"logging",
".",
"debug",
"(",
"\"Loaded context.\"",
")",
"logging",
".",
"debug",
"(",
"task_ids",
")",
"done",
",",
"has_errors",
"=",
"_check_markers",
"(",
"task_ids",
")",
"if",
"not",
"done",
":",
"return",
"False",
"_mark_context_complete",
"(",
"marker",
",",
"context",
",",
"has_errors",
")",
"return",
"True"
] | Check if all Async jobs within a Context have been run. | [
"Check",
"if",
"all",
"Async",
"jobs",
"within",
"a",
"Context",
"have",
"been",
"run",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L156-L184 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | _check_markers | def _check_markers(task_ids, offset=10):
"""Returns a flag for markers being found for the task_ids. If all task ids
have markers True will be returned. Otherwise it will return False as soon
as a None result is hit.
"""
shuffle(task_ids)
has_errors = False
for index in xrange(0, len(task_ids), offset):
keys = [ndb.Key(FuriousAsyncMarker, id)
for id in task_ids[index:index + offset]]
markers = ndb.get_multi(keys)
if not all(markers):
logging.debug("Not all Async's complete")
return False, None
# Did any of the aync's fail? Check the success property on the
# AsyncResult.
has_errors = not all((marker.success for marker in markers))
return True, has_errors | python | def _check_markers(task_ids, offset=10):
"""Returns a flag for markers being found for the task_ids. If all task ids
have markers True will be returned. Otherwise it will return False as soon
as a None result is hit.
"""
shuffle(task_ids)
has_errors = False
for index in xrange(0, len(task_ids), offset):
keys = [ndb.Key(FuriousAsyncMarker, id)
for id in task_ids[index:index + offset]]
markers = ndb.get_multi(keys)
if not all(markers):
logging.debug("Not all Async's complete")
return False, None
# Did any of the aync's fail? Check the success property on the
# AsyncResult.
has_errors = not all((marker.success for marker in markers))
return True, has_errors | [
"def",
"_check_markers",
"(",
"task_ids",
",",
"offset",
"=",
"10",
")",
":",
"shuffle",
"(",
"task_ids",
")",
"has_errors",
"=",
"False",
"for",
"index",
"in",
"xrange",
"(",
"0",
",",
"len",
"(",
"task_ids",
")",
",",
"offset",
")",
":",
"keys",
"=",
"[",
"ndb",
".",
"Key",
"(",
"FuriousAsyncMarker",
",",
"id",
")",
"for",
"id",
"in",
"task_ids",
"[",
"index",
":",
"index",
"+",
"offset",
"]",
"]",
"markers",
"=",
"ndb",
".",
"get_multi",
"(",
"keys",
")",
"if",
"not",
"all",
"(",
"markers",
")",
":",
"logging",
".",
"debug",
"(",
"\"Not all Async's complete\"",
")",
"return",
"False",
",",
"None",
"# Did any of the aync's fail? Check the success property on the",
"# AsyncResult.",
"has_errors",
"=",
"not",
"all",
"(",
"(",
"marker",
".",
"success",
"for",
"marker",
"in",
"markers",
")",
")",
"return",
"True",
",",
"has_errors"
] | Returns a flag for markers being found for the task_ids. If all task ids
have markers True will be returned. Otherwise it will return False as soon
as a None result is hit. | [
"Returns",
"a",
"flag",
"for",
"markers",
"being",
"found",
"for",
"the",
"task_ids",
".",
"If",
"all",
"task",
"ids",
"have",
"markers",
"True",
"will",
"be",
"returned",
".",
"Otherwise",
"it",
"will",
"return",
"False",
"as",
"soon",
"as",
"a",
"None",
"result",
"is",
"hit",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L187-L210 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | _mark_context_complete | def _mark_context_complete(marker, context, has_errors):
"""Transactionally 'complete' the context."""
current = None
if marker:
current = marker.key.get()
if not current:
return False
if current and current.complete:
return False
current.complete = True
current.has_errors = has_errors
current.put()
# Kick off completion tasks.
_insert_post_complete_tasks(context)
return True | python | def _mark_context_complete(marker, context, has_errors):
"""Transactionally 'complete' the context."""
current = None
if marker:
current = marker.key.get()
if not current:
return False
if current and current.complete:
return False
current.complete = True
current.has_errors = has_errors
current.put()
# Kick off completion tasks.
_insert_post_complete_tasks(context)
return True | [
"def",
"_mark_context_complete",
"(",
"marker",
",",
"context",
",",
"has_errors",
")",
":",
"current",
"=",
"None",
"if",
"marker",
":",
"current",
"=",
"marker",
".",
"key",
".",
"get",
"(",
")",
"if",
"not",
"current",
":",
"return",
"False",
"if",
"current",
"and",
"current",
".",
"complete",
":",
"return",
"False",
"current",
".",
"complete",
"=",
"True",
"current",
".",
"has_errors",
"=",
"has_errors",
"current",
".",
"put",
"(",
")",
"# Kick off completion tasks.",
"_insert_post_complete_tasks",
"(",
"context",
")",
"return",
"True"
] | Transactionally 'complete' the context. | [
"Transactionally",
"complete",
"the",
"context",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L214-L235 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | _insert_post_complete_tasks | def _insert_post_complete_tasks(context):
"""Insert the event's asyncs and cleanup tasks."""
logging.debug("Context %s is complete.", context.id)
# Async event handlers
context.exec_event_handler('complete', transactional=True)
# Insert cleanup tasks
try:
# TODO: If tracking results we may not want to auto cleanup and instead
# wait until the results have been accessed.
from furious.async import Async
Async(_cleanup_markers, queue=CLEAN_QUEUE,
args=[context.id, context.task_ids],
task_args={'countdown': CLEAN_DELAY}).start()
except:
pass | python | def _insert_post_complete_tasks(context):
"""Insert the event's asyncs and cleanup tasks."""
logging.debug("Context %s is complete.", context.id)
# Async event handlers
context.exec_event_handler('complete', transactional=True)
# Insert cleanup tasks
try:
# TODO: If tracking results we may not want to auto cleanup and instead
# wait until the results have been accessed.
from furious.async import Async
Async(_cleanup_markers, queue=CLEAN_QUEUE,
args=[context.id, context.task_ids],
task_args={'countdown': CLEAN_DELAY}).start()
except:
pass | [
"def",
"_insert_post_complete_tasks",
"(",
"context",
")",
":",
"logging",
".",
"debug",
"(",
"\"Context %s is complete.\"",
",",
"context",
".",
"id",
")",
"# Async event handlers",
"context",
".",
"exec_event_handler",
"(",
"'complete'",
",",
"transactional",
"=",
"True",
")",
"# Insert cleanup tasks",
"try",
":",
"# TODO: If tracking results we may not want to auto cleanup and instead",
"# wait until the results have been accessed.",
"from",
"furious",
".",
"async",
"import",
"Async",
"Async",
"(",
"_cleanup_markers",
",",
"queue",
"=",
"CLEAN_QUEUE",
",",
"args",
"=",
"[",
"context",
".",
"id",
",",
"context",
".",
"task_ids",
"]",
",",
"task_args",
"=",
"{",
"'countdown'",
":",
"CLEAN_DELAY",
"}",
")",
".",
"start",
"(",
")",
"except",
":",
"pass"
] | Insert the event's asyncs and cleanup tasks. | [
"Insert",
"the",
"event",
"s",
"asyncs",
"and",
"cleanup",
"tasks",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L238-L255 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | _cleanup_markers | def _cleanup_markers(context_id, task_ids):
"""Delete the FuriousAsyncMarker entities corresponding to ids."""
logging.debug("Cleanup %d markers for Context %s",
len(task_ids), context_id)
# TODO: Handle exceptions and retries here.
delete_entities = [ndb.Key(FuriousAsyncMarker, id) for id in task_ids]
delete_entities.append(ndb.Key(FuriousCompletionMarker, context_id))
ndb.delete_multi(delete_entities)
logging.debug("Markers cleaned.") | python | def _cleanup_markers(context_id, task_ids):
"""Delete the FuriousAsyncMarker entities corresponding to ids."""
logging.debug("Cleanup %d markers for Context %s",
len(task_ids), context_id)
# TODO: Handle exceptions and retries here.
delete_entities = [ndb.Key(FuriousAsyncMarker, id) for id in task_ids]
delete_entities.append(ndb.Key(FuriousCompletionMarker, context_id))
ndb.delete_multi(delete_entities)
logging.debug("Markers cleaned.") | [
"def",
"_cleanup_markers",
"(",
"context_id",
",",
"task_ids",
")",
":",
"logging",
".",
"debug",
"(",
"\"Cleanup %d markers for Context %s\"",
",",
"len",
"(",
"task_ids",
")",
",",
"context_id",
")",
"# TODO: Handle exceptions and retries here.",
"delete_entities",
"=",
"[",
"ndb",
".",
"Key",
"(",
"FuriousAsyncMarker",
",",
"id",
")",
"for",
"id",
"in",
"task_ids",
"]",
"delete_entities",
".",
"append",
"(",
"ndb",
".",
"Key",
"(",
"FuriousCompletionMarker",
",",
"context_id",
")",
")",
"ndb",
".",
"delete_multi",
"(",
"delete_entities",
")",
"logging",
".",
"debug",
"(",
"\"Markers cleaned.\"",
")"
] | Delete the FuriousAsyncMarker entities corresponding to ids. | [
"Delete",
"the",
"FuriousAsyncMarker",
"entities",
"corresponding",
"to",
"ids",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L258-L270 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | store_context | def store_context(context):
"""Persist a furious.context.Context object to the datastore by loading it
into a FuriousContext ndb.Model.
"""
logging.debug("Attempting to store Context %s.", context.id)
entity = FuriousContext.from_context(context)
# TODO: Handle exceptions and retries here.
marker = FuriousCompletionMarker(id=context.id)
key, _ = ndb.put_multi((entity, marker))
logging.debug("Stored Context with key: %s.", key)
return key | python | def store_context(context):
"""Persist a furious.context.Context object to the datastore by loading it
into a FuriousContext ndb.Model.
"""
logging.debug("Attempting to store Context %s.", context.id)
entity = FuriousContext.from_context(context)
# TODO: Handle exceptions and retries here.
marker = FuriousCompletionMarker(id=context.id)
key, _ = ndb.put_multi((entity, marker))
logging.debug("Stored Context with key: %s.", key)
return key | [
"def",
"store_context",
"(",
"context",
")",
":",
"logging",
".",
"debug",
"(",
"\"Attempting to store Context %s.\"",
",",
"context",
".",
"id",
")",
"entity",
"=",
"FuriousContext",
".",
"from_context",
"(",
"context",
")",
"# TODO: Handle exceptions and retries here.",
"marker",
"=",
"FuriousCompletionMarker",
"(",
"id",
"=",
"context",
".",
"id",
")",
"key",
",",
"_",
"=",
"ndb",
".",
"put_multi",
"(",
"(",
"entity",
",",
"marker",
")",
")",
"logging",
".",
"debug",
"(",
"\"Stored Context with key: %s.\"",
",",
"key",
")",
"return",
"key"
] | Persist a furious.context.Context object to the datastore by loading it
into a FuriousContext ndb.Model. | [
"Persist",
"a",
"furious",
".",
"context",
".",
"Context",
"object",
"to",
"the",
"datastore",
"by",
"loading",
"it",
"into",
"a",
"FuriousContext",
"ndb",
".",
"Model",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L279-L294 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | store_async_result | def store_async_result(async_id, async_result):
"""Persist the Async's result to the datastore."""
logging.debug("Storing result for %s", async_id)
key = FuriousAsyncMarker(
id=async_id, result=json.dumps(async_result.to_dict()),
status=async_result.status).put()
logging.debug("Setting Async result %s using marker: %s.", async_result,
key) | python | def store_async_result(async_id, async_result):
"""Persist the Async's result to the datastore."""
logging.debug("Storing result for %s", async_id)
key = FuriousAsyncMarker(
id=async_id, result=json.dumps(async_result.to_dict()),
status=async_result.status).put()
logging.debug("Setting Async result %s using marker: %s.", async_result,
key) | [
"def",
"store_async_result",
"(",
"async_id",
",",
"async_result",
")",
":",
"logging",
".",
"debug",
"(",
"\"Storing result for %s\"",
",",
"async_id",
")",
"key",
"=",
"FuriousAsyncMarker",
"(",
"id",
"=",
"async_id",
",",
"result",
"=",
"json",
".",
"dumps",
"(",
"async_result",
".",
"to_dict",
"(",
")",
")",
",",
"status",
"=",
"async_result",
".",
"status",
")",
".",
"put",
"(",
")",
"logging",
".",
"debug",
"(",
"\"Setting Async result %s using marker: %s.\"",
",",
"async_result",
",",
"key",
")"
] | Persist the Async's result to the datastore. | [
"Persist",
"the",
"Async",
"s",
"result",
"to",
"the",
"datastore",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L297-L307 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | store_async_marker | def store_async_marker(async_id, status):
"""Persist a marker indicating the Async ran to the datastore."""
logging.debug("Attempting to mark Async %s complete.", async_id)
# QUESTION: Do we trust if the marker had a flag result to just trust it?
marker = FuriousAsyncMarker.get_by_id(async_id)
if marker:
logging.debug("Marker already exists for %s.", async_id)
return
# TODO: Handle exceptions and retries here.
key = FuriousAsyncMarker(id=async_id, status=status).put()
logging.debug("Marked Async complete using marker: %s.", key) | python | def store_async_marker(async_id, status):
"""Persist a marker indicating the Async ran to the datastore."""
logging.debug("Attempting to mark Async %s complete.", async_id)
# QUESTION: Do we trust if the marker had a flag result to just trust it?
marker = FuriousAsyncMarker.get_by_id(async_id)
if marker:
logging.debug("Marker already exists for %s.", async_id)
return
# TODO: Handle exceptions and retries here.
key = FuriousAsyncMarker(id=async_id, status=status).put()
logging.debug("Marked Async complete using marker: %s.", key) | [
"def",
"store_async_marker",
"(",
"async_id",
",",
"status",
")",
":",
"logging",
".",
"debug",
"(",
"\"Attempting to mark Async %s complete.\"",
",",
"async_id",
")",
"# QUESTION: Do we trust if the marker had a flag result to just trust it?",
"marker",
"=",
"FuriousAsyncMarker",
".",
"get_by_id",
"(",
"async_id",
")",
"if",
"marker",
":",
"logging",
".",
"debug",
"(",
"\"Marker already exists for %s.\"",
",",
"async_id",
")",
"return",
"# TODO: Handle exceptions and retries here.",
"key",
"=",
"FuriousAsyncMarker",
"(",
"id",
"=",
"async_id",
",",
"status",
"=",
"status",
")",
".",
"put",
"(",
")",
"logging",
".",
"debug",
"(",
"\"Marked Async complete using marker: %s.\"",
",",
"key",
")"
] | Persist a marker indicating the Async ran to the datastore. | [
"Persist",
"a",
"marker",
"indicating",
"the",
"Async",
"ran",
"to",
"the",
"datastore",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L310-L325 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | iter_context_results | def iter_context_results(context, batch_size=10, task_cache=None):
"""Yield out the results found on the markers for the context task ids."""
for futures in iget_batches(context.task_ids, batch_size=batch_size):
for key, future in futures:
task = future.get_result()
if task_cache is not None:
task_cache[key.id()] = task
yield key.id(), task | python | def iter_context_results(context, batch_size=10, task_cache=None):
"""Yield out the results found on the markers for the context task ids."""
for futures in iget_batches(context.task_ids, batch_size=batch_size):
for key, future in futures:
task = future.get_result()
if task_cache is not None:
task_cache[key.id()] = task
yield key.id(), task | [
"def",
"iter_context_results",
"(",
"context",
",",
"batch_size",
"=",
"10",
",",
"task_cache",
"=",
"None",
")",
":",
"for",
"futures",
"in",
"iget_batches",
"(",
"context",
".",
"task_ids",
",",
"batch_size",
"=",
"batch_size",
")",
":",
"for",
"key",
",",
"future",
"in",
"futures",
":",
"task",
"=",
"future",
".",
"get_result",
"(",
")",
"if",
"task_cache",
"is",
"not",
"None",
":",
"task_cache",
"[",
"key",
".",
"id",
"(",
")",
"]",
"=",
"task",
"yield",
"key",
".",
"id",
"(",
")",
",",
"task"
] | Yield out the results found on the markers for the context task ids. | [
"Yield",
"out",
"the",
"results",
"found",
"on",
"the",
"markers",
"for",
"the",
"context",
"task",
"ids",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L328-L338 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | iget_batches | def iget_batches(task_ids, batch_size=10):
"""Yield out a map of the keys and futures in batches of the batch size
passed in.
"""
make_key = lambda _id: ndb.Key(FuriousAsyncMarker, _id)
for keys in i_batch(imap(make_key, task_ids), batch_size):
yield izip(keys, ndb.get_multi_async(keys)) | python | def iget_batches(task_ids, batch_size=10):
"""Yield out a map of the keys and futures in batches of the batch size
passed in.
"""
make_key = lambda _id: ndb.Key(FuriousAsyncMarker, _id)
for keys in i_batch(imap(make_key, task_ids), batch_size):
yield izip(keys, ndb.get_multi_async(keys)) | [
"def",
"iget_batches",
"(",
"task_ids",
",",
"batch_size",
"=",
"10",
")",
":",
"make_key",
"=",
"lambda",
"_id",
":",
"ndb",
".",
"Key",
"(",
"FuriousAsyncMarker",
",",
"_id",
")",
"for",
"keys",
"in",
"i_batch",
"(",
"imap",
"(",
"make_key",
",",
"task_ids",
")",
",",
"batch_size",
")",
":",
"yield",
"izip",
"(",
"keys",
",",
"ndb",
".",
"get_multi_async",
"(",
"keys",
")",
")"
] | Yield out a map of the keys and futures in batches of the batch size
passed in. | [
"Yield",
"out",
"a",
"map",
"of",
"the",
"keys",
"and",
"futures",
"in",
"batches",
"of",
"the",
"batch",
"size",
"passed",
"in",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L341-L348 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | i_batch | def i_batch(items, size):
"""Generator that iteratively batches items to a max size and consumes the
items as each batch is yielded.
"""
for items_batch in iter(lambda: tuple(islice(items, size)),
tuple()):
yield items_batch | python | def i_batch(items, size):
"""Generator that iteratively batches items to a max size and consumes the
items as each batch is yielded.
"""
for items_batch in iter(lambda: tuple(islice(items, size)),
tuple()):
yield items_batch | [
"def",
"i_batch",
"(",
"items",
",",
"size",
")",
":",
"for",
"items_batch",
"in",
"iter",
"(",
"lambda",
":",
"tuple",
"(",
"islice",
"(",
"items",
",",
"size",
")",
")",
",",
"tuple",
"(",
")",
")",
":",
"yield",
"items_batch"
] | Generator that iteratively batches items to a max size and consumes the
items as each batch is yielded. | [
"Generator",
"that",
"iteratively",
"batches",
"items",
"to",
"a",
"max",
"size",
"and",
"consumes",
"the",
"items",
"as",
"each",
"batch",
"is",
"yielded",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L351-L357 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | FuriousContext.from_context | def from_context(cls, context):
"""Create a `cls` entity from a context."""
return cls(id=context.id, context=context.to_dict()) | python | def from_context(cls, context):
"""Create a `cls` entity from a context."""
return cls(id=context.id, context=context.to_dict()) | [
"def",
"from_context",
"(",
"cls",
",",
"context",
")",
":",
"return",
"cls",
"(",
"id",
"=",
"context",
".",
"id",
",",
"context",
"=",
"context",
".",
"to_dict",
"(",
")",
")"
] | Create a `cls` entity from a context. | [
"Create",
"a",
"cls",
"entity",
"from",
"a",
"context",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L50-L52 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | FuriousContext.from_id | def from_id(cls, id):
"""Load a `cls` entity and instantiate the Context it stores."""
from furious.context import Context
# TODO: Handle exceptions and retries here.
entity = cls.get_by_id(id)
if not entity:
raise FuriousContextNotFoundError(
"Context entity not found for: {}".format(id))
return Context.from_dict(entity.context) | python | def from_id(cls, id):
"""Load a `cls` entity and instantiate the Context it stores."""
from furious.context import Context
# TODO: Handle exceptions and retries here.
entity = cls.get_by_id(id)
if not entity:
raise FuriousContextNotFoundError(
"Context entity not found for: {}".format(id))
return Context.from_dict(entity.context) | [
"def",
"from_id",
"(",
"cls",
",",
"id",
")",
":",
"from",
"furious",
".",
"context",
"import",
"Context",
"# TODO: Handle exceptions and retries here.",
"entity",
"=",
"cls",
".",
"get_by_id",
"(",
"id",
")",
"if",
"not",
"entity",
":",
"raise",
"FuriousContextNotFoundError",
"(",
"\"Context entity not found for: {}\"",
".",
"format",
"(",
"id",
")",
")",
"return",
"Context",
".",
"from_dict",
"(",
"entity",
".",
"context",
")"
] | Load a `cls` entity and instantiate the Context it stores. | [
"Load",
"a",
"cls",
"entity",
"and",
"instantiate",
"the",
"Context",
"it",
"stores",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L55-L65 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | ContextResult.items | def items(self):
"""Yield the async reuslts for the context."""
for key, task in self._tasks:
if not (task and task.result):
yield key, None
else:
yield key, json.loads(task.result)["payload"] | python | def items(self):
"""Yield the async reuslts for the context."""
for key, task in self._tasks:
if not (task and task.result):
yield key, None
else:
yield key, json.loads(task.result)["payload"] | [
"def",
"items",
"(",
"self",
")",
":",
"for",
"key",
",",
"task",
"in",
"self",
".",
"_tasks",
":",
"if",
"not",
"(",
"task",
"and",
"task",
".",
"result",
")",
":",
"yield",
"key",
",",
"None",
"else",
":",
"yield",
"key",
",",
"json",
".",
"loads",
"(",
"task",
".",
"result",
")",
"[",
"\"payload\"",
"]"
] | Yield the async reuslts for the context. | [
"Yield",
"the",
"async",
"reuslts",
"for",
"the",
"context",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L112-L118 |
Workiva/furious | furious/extras/appengine/ndb_persistence.py | ContextResult.values | def values(self):
"""Yield the async reuslt values for the context."""
for _, task in self._tasks:
if not (task and task.result):
yield None
else:
yield json.loads(task.result)["payload"] | python | def values(self):
"""Yield the async reuslt values for the context."""
for _, task in self._tasks:
if not (task and task.result):
yield None
else:
yield json.loads(task.result)["payload"] | [
"def",
"values",
"(",
"self",
")",
":",
"for",
"_",
",",
"task",
"in",
"self",
".",
"_tasks",
":",
"if",
"not",
"(",
"task",
"and",
"task",
".",
"result",
")",
":",
"yield",
"None",
"else",
":",
"yield",
"json",
".",
"loads",
"(",
"task",
".",
"result",
")",
"[",
"\"payload\"",
"]"
] | Yield the async reuslt values for the context. | [
"Yield",
"the",
"async",
"reuslt",
"values",
"for",
"the",
"context",
"."
] | train | https://github.com/Workiva/furious/blob/c29823ec8b98549e7439d7273aa064d1e5830632/furious/extras/appengine/ndb_persistence.py#L120-L126 |
aiidalab/aiidalab-widgets-base | aiidalab_widgets_base/display.py | aiidalab_display | def aiidalab_display(obj, downloadable=True, **kwargs):
"""Display AiiDA data types in Jupyter notebooks.
:param downloadable: If True, add link/button to download content of displayed AiiDA object.
Defers to IPython.display.display for any objects it does not recognize.
"""
from aiidalab_widgets_base import aiida_visualizers
try:
visualizer = getattr(aiida_visualizers, AIIDA_VISUALIZER_MAPPING[obj.type])
display(visualizer(obj, downloadable=downloadable), **kwargs)
except KeyError:
display(obj, **kwargs) | python | def aiidalab_display(obj, downloadable=True, **kwargs):
"""Display AiiDA data types in Jupyter notebooks.
:param downloadable: If True, add link/button to download content of displayed AiiDA object.
Defers to IPython.display.display for any objects it does not recognize.
"""
from aiidalab_widgets_base import aiida_visualizers
try:
visualizer = getattr(aiida_visualizers, AIIDA_VISUALIZER_MAPPING[obj.type])
display(visualizer(obj, downloadable=downloadable), **kwargs)
except KeyError:
display(obj, **kwargs) | [
"def",
"aiidalab_display",
"(",
"obj",
",",
"downloadable",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"aiidalab_widgets_base",
"import",
"aiida_visualizers",
"try",
":",
"visualizer",
"=",
"getattr",
"(",
"aiida_visualizers",
",",
"AIIDA_VISUALIZER_MAPPING",
"[",
"obj",
".",
"type",
"]",
")",
"display",
"(",
"visualizer",
"(",
"obj",
",",
"downloadable",
"=",
"downloadable",
")",
",",
"*",
"*",
"kwargs",
")",
"except",
"KeyError",
":",
"display",
"(",
"obj",
",",
"*",
"*",
"kwargs",
")"
] | Display AiiDA data types in Jupyter notebooks.
:param downloadable: If True, add link/button to download content of displayed AiiDA object.
Defers to IPython.display.display for any objects it does not recognize. | [
"Display",
"AiiDA",
"data",
"types",
"in",
"Jupyter",
"notebooks",
"."
] | train | https://github.com/aiidalab/aiidalab-widgets-base/blob/291a9b159eac902aee655862322670ec1b0cd5b1/aiidalab_widgets_base/display.py#L14-L26 |
aiidalab/aiidalab-widgets-base | aiidalab_widgets_base/computers.py | SshComputerSetup.on_setup_ssh | def on_setup_ssh(self, b):
"""ATTENTION: modifying the order of operations in this function can lead to unexpected problems"""
with self._setup_ssh_out:
clear_output()
self._ssh_keygen()
#temporary passwords
password = self.__password
proxy_password = self.__proxy_password
# step 1: if hostname is not provided - do not do anything
if self.hostname is None: # check hostname
print("Please specify the computer hostname")
return
# step 2: check if password-free access was enabled earlier
if self.can_login():
print ("Password-free access is already enabled")
# it can still happen that password-free access is enabled
# but host is not present in the config file - fixing this
if not self.is_in_config():
self._write_ssh_config() # we do not use proxy here, because if computer
# can be accessed without any info in the config - proxy is not needed.
self.setup_counter += 1 # only if config file has changed - increase setup_counter
return
# step 3: if can't login already, chek whether all required information is provided
if self.username is None: # check username
print("Please enter your ssh username")
return
if len(password.strip()) == 0: # check password
print("Please enter your ssh password")
return
# step 4: get the right commands to access the proxy server (if provided)
success, proxycmd = self._configure_proxy(password, proxy_password)
if not success:
return
# step 5: make host known by ssh on the proxy server
if not self.is_host_known():
self._make_host_known(self.hostname,['ssh']+[proxycmd] if proxycmd else [])
# step 6: sending public key to the main host
if not self._send_pubkey(self.hostname, self.username, password, proxycmd):
print ("Could not send public key to {}".format(self.hostname))
return
# step 7: modify the ssh config file if necessary
if not self.is_in_config():
self._write_ssh_config(proxycmd=proxycmd)
# TODO: add a check if new config is different from the current one. If so
# infrom the user about it.
# step 8: final check
if self.can_login():
self.setup_counter += 1
print("Automatic ssh setup successful :-)")
return
else:
print("Automatic ssh setup failed, sorry :-(")
return | python | def on_setup_ssh(self, b):
"""ATTENTION: modifying the order of operations in this function can lead to unexpected problems"""
with self._setup_ssh_out:
clear_output()
self._ssh_keygen()
#temporary passwords
password = self.__password
proxy_password = self.__proxy_password
# step 1: if hostname is not provided - do not do anything
if self.hostname is None: # check hostname
print("Please specify the computer hostname")
return
# step 2: check if password-free access was enabled earlier
if self.can_login():
print ("Password-free access is already enabled")
# it can still happen that password-free access is enabled
# but host is not present in the config file - fixing this
if not self.is_in_config():
self._write_ssh_config() # we do not use proxy here, because if computer
# can be accessed without any info in the config - proxy is not needed.
self.setup_counter += 1 # only if config file has changed - increase setup_counter
return
# step 3: if can't login already, chek whether all required information is provided
if self.username is None: # check username
print("Please enter your ssh username")
return
if len(password.strip()) == 0: # check password
print("Please enter your ssh password")
return
# step 4: get the right commands to access the proxy server (if provided)
success, proxycmd = self._configure_proxy(password, proxy_password)
if not success:
return
# step 5: make host known by ssh on the proxy server
if not self.is_host_known():
self._make_host_known(self.hostname,['ssh']+[proxycmd] if proxycmd else [])
# step 6: sending public key to the main host
if not self._send_pubkey(self.hostname, self.username, password, proxycmd):
print ("Could not send public key to {}".format(self.hostname))
return
# step 7: modify the ssh config file if necessary
if not self.is_in_config():
self._write_ssh_config(proxycmd=proxycmd)
# TODO: add a check if new config is different from the current one. If so
# infrom the user about it.
# step 8: final check
if self.can_login():
self.setup_counter += 1
print("Automatic ssh setup successful :-)")
return
else:
print("Automatic ssh setup failed, sorry :-(")
return | [
"def",
"on_setup_ssh",
"(",
"self",
",",
"b",
")",
":",
"with",
"self",
".",
"_setup_ssh_out",
":",
"clear_output",
"(",
")",
"self",
".",
"_ssh_keygen",
"(",
")",
"#temporary passwords",
"password",
"=",
"self",
".",
"__password",
"proxy_password",
"=",
"self",
".",
"__proxy_password",
"# step 1: if hostname is not provided - do not do anything",
"if",
"self",
".",
"hostname",
"is",
"None",
":",
"# check hostname",
"print",
"(",
"\"Please specify the computer hostname\"",
")",
"return",
"# step 2: check if password-free access was enabled earlier",
"if",
"self",
".",
"can_login",
"(",
")",
":",
"print",
"(",
"\"Password-free access is already enabled\"",
")",
"# it can still happen that password-free access is enabled",
"# but host is not present in the config file - fixing this",
"if",
"not",
"self",
".",
"is_in_config",
"(",
")",
":",
"self",
".",
"_write_ssh_config",
"(",
")",
"# we do not use proxy here, because if computer",
"# can be accessed without any info in the config - proxy is not needed.",
"self",
".",
"setup_counter",
"+=",
"1",
"# only if config file has changed - increase setup_counter",
"return",
"# step 3: if can't login already, chek whether all required information is provided",
"if",
"self",
".",
"username",
"is",
"None",
":",
"# check username",
"print",
"(",
"\"Please enter your ssh username\"",
")",
"return",
"if",
"len",
"(",
"password",
".",
"strip",
"(",
")",
")",
"==",
"0",
":",
"# check password",
"print",
"(",
"\"Please enter your ssh password\"",
")",
"return",
"# step 4: get the right commands to access the proxy server (if provided)",
"success",
",",
"proxycmd",
"=",
"self",
".",
"_configure_proxy",
"(",
"password",
",",
"proxy_password",
")",
"if",
"not",
"success",
":",
"return",
"# step 5: make host known by ssh on the proxy server",
"if",
"not",
"self",
".",
"is_host_known",
"(",
")",
":",
"self",
".",
"_make_host_known",
"(",
"self",
".",
"hostname",
",",
"[",
"'ssh'",
"]",
"+",
"[",
"proxycmd",
"]",
"if",
"proxycmd",
"else",
"[",
"]",
")",
"# step 6: sending public key to the main host",
"if",
"not",
"self",
".",
"_send_pubkey",
"(",
"self",
".",
"hostname",
",",
"self",
".",
"username",
",",
"password",
",",
"proxycmd",
")",
":",
"print",
"(",
"\"Could not send public key to {}\"",
".",
"format",
"(",
"self",
".",
"hostname",
")",
")",
"return",
"# step 7: modify the ssh config file if necessary",
"if",
"not",
"self",
".",
"is_in_config",
"(",
")",
":",
"self",
".",
"_write_ssh_config",
"(",
"proxycmd",
"=",
"proxycmd",
")",
"# TODO: add a check if new config is different from the current one. If so",
"# infrom the user about it.",
"# step 8: final check",
"if",
"self",
".",
"can_login",
"(",
")",
":",
"self",
".",
"setup_counter",
"+=",
"1",
"print",
"(",
"\"Automatic ssh setup successful :-)\"",
")",
"return",
"else",
":",
"print",
"(",
"\"Automatic ssh setup failed, sorry :-(\"",
")",
"return"
] | ATTENTION: modifying the order of operations in this function can lead to unexpected problems | [
"ATTENTION",
":",
"modifying",
"the",
"order",
"of",
"operations",
"in",
"this",
"function",
"can",
"lead",
"to",
"unexpected",
"problems"
] | train | https://github.com/aiidalab/aiidalab-widgets-base/blob/291a9b159eac902aee655862322670ec1b0cd5b1/aiidalab_widgets_base/computers.py#L247-L308 |
aiidalab/aiidalab-widgets-base | aiidalab_widgets_base/computers.py | SshComputerSetup.__password | def __password(self):
"""Returning the password and immediately destroying it"""
passwd = copy(self._inp_password.value)
self._inp_password.value = ''
return passwd | python | def __password(self):
"""Returning the password and immediately destroying it"""
passwd = copy(self._inp_password.value)
self._inp_password.value = ''
return passwd | [
"def",
"__password",
"(",
"self",
")",
":",
"passwd",
"=",
"copy",
"(",
"self",
".",
"_inp_password",
".",
"value",
")",
"self",
".",
"_inp_password",
".",
"value",
"=",
"''",
"return",
"passwd"
] | Returning the password and immediately destroying it | [
"Returning",
"the",
"password",
"and",
"immediately",
"destroying",
"it"
] | train | https://github.com/aiidalab/aiidalab-widgets-base/blob/291a9b159eac902aee655862322670ec1b0cd5b1/aiidalab_widgets_base/computers.py#L344-L348 |
aiidalab/aiidalab-widgets-base | aiidalab_widgets_base/computers.py | SshComputerSetup.__proxy_password | def __proxy_password(self):
"""Returning the password and immediately destroying it"""
passwd = copy(self._inp_proxy_password.value)
self._inp_proxy_password.value = ''
return passwd | python | def __proxy_password(self):
"""Returning the password and immediately destroying it"""
passwd = copy(self._inp_proxy_password.value)
self._inp_proxy_password.value = ''
return passwd | [
"def",
"__proxy_password",
"(",
"self",
")",
":",
"passwd",
"=",
"copy",
"(",
"self",
".",
"_inp_proxy_password",
".",
"value",
")",
"self",
".",
"_inp_proxy_password",
".",
"value",
"=",
"''",
"return",
"passwd"
] | Returning the password and immediately destroying it | [
"Returning",
"the",
"password",
"and",
"immediately",
"destroying",
"it"
] | train | https://github.com/aiidalab/aiidalab-widgets-base/blob/291a9b159eac902aee655862322670ec1b0cd5b1/aiidalab_widgets_base/computers.py#L351-L355 |
aiidalab/aiidalab-widgets-base | aiidalab_widgets_base/computers.py | SshComputerSetup.username | def username(self):
"""Loking for username in user's input and config file"""
if len(self._inp_username.value.strip()) == 0: # if username provided by user
if not self.hostname is None:
config = parse_sshconfig(self.hostname)
if 'user' in config: # if username is present in the config file
return config['user']
else:
return None
else:
return self._inp_username.value | python | def username(self):
"""Loking for username in user's input and config file"""
if len(self._inp_username.value.strip()) == 0: # if username provided by user
if not self.hostname is None:
config = parse_sshconfig(self.hostname)
if 'user' in config: # if username is present in the config file
return config['user']
else:
return None
else:
return self._inp_username.value | [
"def",
"username",
"(",
"self",
")",
":",
"if",
"len",
"(",
"self",
".",
"_inp_username",
".",
"value",
".",
"strip",
"(",
")",
")",
"==",
"0",
":",
"# if username provided by user",
"if",
"not",
"self",
".",
"hostname",
"is",
"None",
":",
"config",
"=",
"parse_sshconfig",
"(",
"self",
".",
"hostname",
")",
"if",
"'user'",
"in",
"config",
":",
"# if username is present in the config file",
"return",
"config",
"[",
"'user'",
"]",
"else",
":",
"return",
"None",
"else",
":",
"return",
"self",
".",
"_inp_username",
".",
"value"
] | Loking for username in user's input and config file | [
"Loking",
"for",
"username",
"in",
"user",
"s",
"input",
"and",
"config",
"file"
] | train | https://github.com/aiidalab/aiidalab-widgets-base/blob/291a9b159eac902aee655862322670ec1b0cd5b1/aiidalab_widgets_base/computers.py#L369-L379 |
aiidalab/aiidalab-widgets-base | aiidalab_widgets_base/computers.py | AiidaComputerSetup._configure_computer | def _configure_computer(self):
"""create DbAuthInfo"""
print("Configuring '{}'".format(self.name))
sshcfg = parse_sshconfig(self.hostname)
authparams = {
'compress': True,
'gss_auth': False,
'gss_deleg_creds': False,
'gss_host': self.hostname,
'gss_kex': False,
'key_policy': 'WarningPolicy',
'load_system_host_keys': True,
'port': 22,
'timeout': 60,
}
if 'user' in sshcfg:
authparams['username'] = sshcfg['user']
else:
print ("SSH username is not provided, please run `verdi computer configure {}` "
"from the command line".format(self.name))
return
if 'proxycommand' in sshcfg:
authparams['proxy_command'] = sshcfg['proxycommand']
aiidauser = get_automatic_user()
authinfo = DbAuthInfo(dbcomputer=Computer.get(self.name).dbcomputer, aiidauser=aiidauser)
authinfo.set_auth_params(authparams)
authinfo.save()
print(check_output(['verdi', 'computer', 'show', self.name])) | python | def _configure_computer(self):
"""create DbAuthInfo"""
print("Configuring '{}'".format(self.name))
sshcfg = parse_sshconfig(self.hostname)
authparams = {
'compress': True,
'gss_auth': False,
'gss_deleg_creds': False,
'gss_host': self.hostname,
'gss_kex': False,
'key_policy': 'WarningPolicy',
'load_system_host_keys': True,
'port': 22,
'timeout': 60,
}
if 'user' in sshcfg:
authparams['username'] = sshcfg['user']
else:
print ("SSH username is not provided, please run `verdi computer configure {}` "
"from the command line".format(self.name))
return
if 'proxycommand' in sshcfg:
authparams['proxy_command'] = sshcfg['proxycommand']
aiidauser = get_automatic_user()
authinfo = DbAuthInfo(dbcomputer=Computer.get(self.name).dbcomputer, aiidauser=aiidauser)
authinfo.set_auth_params(authparams)
authinfo.save()
print(check_output(['verdi', 'computer', 'show', self.name])) | [
"def",
"_configure_computer",
"(",
"self",
")",
":",
"print",
"(",
"\"Configuring '{}'\"",
".",
"format",
"(",
"self",
".",
"name",
")",
")",
"sshcfg",
"=",
"parse_sshconfig",
"(",
"self",
".",
"hostname",
")",
"authparams",
"=",
"{",
"'compress'",
":",
"True",
",",
"'gss_auth'",
":",
"False",
",",
"'gss_deleg_creds'",
":",
"False",
",",
"'gss_host'",
":",
"self",
".",
"hostname",
",",
"'gss_kex'",
":",
"False",
",",
"'key_policy'",
":",
"'WarningPolicy'",
",",
"'load_system_host_keys'",
":",
"True",
",",
"'port'",
":",
"22",
",",
"'timeout'",
":",
"60",
",",
"}",
"if",
"'user'",
"in",
"sshcfg",
":",
"authparams",
"[",
"'username'",
"]",
"=",
"sshcfg",
"[",
"'user'",
"]",
"else",
":",
"print",
"(",
"\"SSH username is not provided, please run `verdi computer configure {}` \"",
"\"from the command line\"",
".",
"format",
"(",
"self",
".",
"name",
")",
")",
"return",
"if",
"'proxycommand'",
"in",
"sshcfg",
":",
"authparams",
"[",
"'proxy_command'",
"]",
"=",
"sshcfg",
"[",
"'proxycommand'",
"]",
"aiidauser",
"=",
"get_automatic_user",
"(",
")",
"authinfo",
"=",
"DbAuthInfo",
"(",
"dbcomputer",
"=",
"Computer",
".",
"get",
"(",
"self",
".",
"name",
")",
".",
"dbcomputer",
",",
"aiidauser",
"=",
"aiidauser",
")",
"authinfo",
".",
"set_auth_params",
"(",
"authparams",
")",
"authinfo",
".",
"save",
"(",
")",
"print",
"(",
"check_output",
"(",
"[",
"'verdi'",
",",
"'computer'",
",",
"'show'",
",",
"self",
".",
"name",
"]",
")",
")"
] | create DbAuthInfo | [
"create",
"DbAuthInfo"
] | train | https://github.com/aiidalab/aiidalab-widgets-base/blob/291a9b159eac902aee655862322670ec1b0cd5b1/aiidalab_widgets_base/computers.py#L501-L528 |
eumis/pyviews | setup.py | setup_package | def setup_package():
"""Package setup"""
setup(
name='pyviews',
version=_get_version(),
description='Base package for xml views',
long_description=_get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/eumis/pyviews',
author='eumis(Eugen Misievich)',
author_email='misievich@gmail.com',
license='MIT',
classifiers=[
# 2 - Pre-Alpha
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6'
],
python_requires='>=3.6',
keywords='binding pyviews python mvvm tkviews wxviews',
packages=find_packages(exclude=['*.tests'])) | python | def setup_package():
"""Package setup"""
setup(
name='pyviews',
version=_get_version(),
description='Base package for xml views',
long_description=_get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/eumis/pyviews',
author='eumis(Eugen Misievich)',
author_email='misievich@gmail.com',
license='MIT',
classifiers=[
# 2 - Pre-Alpha
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6'
],
python_requires='>=3.6',
keywords='binding pyviews python mvvm tkviews wxviews',
packages=find_packages(exclude=['*.tests'])) | [
"def",
"setup_package",
"(",
")",
":",
"setup",
"(",
"name",
"=",
"'pyviews'",
",",
"version",
"=",
"_get_version",
"(",
")",
",",
"description",
"=",
"'Base package for xml views'",
",",
"long_description",
"=",
"_get_long_description",
"(",
")",
",",
"long_description_content_type",
"=",
"'text/markdown'",
",",
"url",
"=",
"'https://github.com/eumis/pyviews'",
",",
"author",
"=",
"'eumis(Eugen Misievich)'",
",",
"author_email",
"=",
"'misievich@gmail.com'",
",",
"license",
"=",
"'MIT'",
",",
"classifiers",
"=",
"[",
"# 2 - Pre-Alpha",
"# 3 - Alpha",
"# 4 - Beta",
"# 5 - Production/Stable",
"'Development Status :: 5 - Production/Stable'",
",",
"'Intended Audience :: Developers'",
",",
"'Topic :: Software Development :: Libraries'",
",",
"'License :: OSI Approved :: MIT License'",
",",
"'Programming Language :: Python :: 3.6'",
"]",
",",
"python_requires",
"=",
"'>=3.6'",
",",
"keywords",
"=",
"'binding pyviews python mvvm tkviews wxviews'",
",",
"packages",
"=",
"find_packages",
"(",
"exclude",
"=",
"[",
"'*.tests'",
"]",
")",
")"
] | Package setup | [
"Package",
"setup"
] | train | https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/setup.py#L6-L31 |
kmedian/korr | korr/confusion.py | confusion | def confusion(a, p):
"""Confusion Matrix
- confusion matrix, error matrix, matching matrix (unsupervised)
- is a special case: 2x2 contingency table (xtab, RxC table)
- both dimensions are variables with the same classes/labels,
i.e. actual and predicted variables are binary [0,1]
Parameters:
-----------
a : ndarray
Actual values, binary [0,1]
p : ndarray
Predicted values, binary [0,1]
Returns:
--------
cm : ndarray
Confusion matrix
predicted=0 predicted=1
actual=0 tn fp
actual=1 fn tp
Example:
--------
import korr
cm = korr.confusion(a, p)
tn, fp, fn, tp = cm.ravel()
Alternatives:
-------------
import pandas as pd
cm = pd.crosstab(a, p)
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(a, p)
"""
m = a == p # matches (a=1, p=1) and (a=0, p=0)
f = np.logical_not(m) # xor (a=1, p=0) and (a=0, p=1)
tp = np.sum(np.logical_and(m, a)) # 11
tn = np.sum(np.logical_and(m, np.logical_not(a))) # 00
fn = np.sum(np.logical_and(f, a)) # 10
fp = np.sum(np.logical_and(f, p)) # 01
return np.array([[tn, fp], [fn, tp]]) | python | def confusion(a, p):
"""Confusion Matrix
- confusion matrix, error matrix, matching matrix (unsupervised)
- is a special case: 2x2 contingency table (xtab, RxC table)
- both dimensions are variables with the same classes/labels,
i.e. actual and predicted variables are binary [0,1]
Parameters:
-----------
a : ndarray
Actual values, binary [0,1]
p : ndarray
Predicted values, binary [0,1]
Returns:
--------
cm : ndarray
Confusion matrix
predicted=0 predicted=1
actual=0 tn fp
actual=1 fn tp
Example:
--------
import korr
cm = korr.confusion(a, p)
tn, fp, fn, tp = cm.ravel()
Alternatives:
-------------
import pandas as pd
cm = pd.crosstab(a, p)
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(a, p)
"""
m = a == p # matches (a=1, p=1) and (a=0, p=0)
f = np.logical_not(m) # xor (a=1, p=0) and (a=0, p=1)
tp = np.sum(np.logical_and(m, a)) # 11
tn = np.sum(np.logical_and(m, np.logical_not(a))) # 00
fn = np.sum(np.logical_and(f, a)) # 10
fp = np.sum(np.logical_and(f, p)) # 01
return np.array([[tn, fp], [fn, tp]]) | [
"def",
"confusion",
"(",
"a",
",",
"p",
")",
":",
"m",
"=",
"a",
"==",
"p",
"# matches (a=1, p=1) and (a=0, p=0)",
"f",
"=",
"np",
".",
"logical_not",
"(",
"m",
")",
"# xor (a=1, p=0) and (a=0, p=1)",
"tp",
"=",
"np",
".",
"sum",
"(",
"np",
".",
"logical_and",
"(",
"m",
",",
"a",
")",
")",
"# 11",
"tn",
"=",
"np",
".",
"sum",
"(",
"np",
".",
"logical_and",
"(",
"m",
",",
"np",
".",
"logical_not",
"(",
"a",
")",
")",
")",
"# 00",
"fn",
"=",
"np",
".",
"sum",
"(",
"np",
".",
"logical_and",
"(",
"f",
",",
"a",
")",
")",
"# 10",
"fp",
"=",
"np",
".",
"sum",
"(",
"np",
".",
"logical_and",
"(",
"f",
",",
"p",
")",
")",
"# 01",
"return",
"np",
".",
"array",
"(",
"[",
"[",
"tn",
",",
"fp",
"]",
",",
"[",
"fn",
",",
"tp",
"]",
"]",
")"
] | Confusion Matrix
- confusion matrix, error matrix, matching matrix (unsupervised)
- is a special case: 2x2 contingency table (xtab, RxC table)
- both dimensions are variables with the same classes/labels,
i.e. actual and predicted variables are binary [0,1]
Parameters:
-----------
a : ndarray
Actual values, binary [0,1]
p : ndarray
Predicted values, binary [0,1]
Returns:
--------
cm : ndarray
Confusion matrix
predicted=0 predicted=1
actual=0 tn fp
actual=1 fn tp
Example:
--------
import korr
cm = korr.confusion(a, p)
tn, fp, fn, tp = cm.ravel()
Alternatives:
-------------
import pandas as pd
cm = pd.crosstab(a, p)
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(a, p) | [
"Confusion",
"Matrix"
] | train | https://github.com/kmedian/korr/blob/4eb86fc14b1fc1b69204069b7753d115b327c937/korr/confusion.py#L4-L49 |
internetarchive/doublethink | doublethink/orm.py | Document.load | def load(cls, rr, pk):
'''
Retrieves a document from the database, by primary key.
'''
if pk is None:
return None
d = rr.table(cls.table).get(pk).run()
if d is None:
return None
doc = cls(rr, d)
return doc | python | def load(cls, rr, pk):
'''
Retrieves a document from the database, by primary key.
'''
if pk is None:
return None
d = rr.table(cls.table).get(pk).run()
if d is None:
return None
doc = cls(rr, d)
return doc | [
"def",
"load",
"(",
"cls",
",",
"rr",
",",
"pk",
")",
":",
"if",
"pk",
"is",
"None",
":",
"return",
"None",
"d",
"=",
"rr",
".",
"table",
"(",
"cls",
".",
"table",
")",
".",
"get",
"(",
"pk",
")",
".",
"run",
"(",
")",
"if",
"d",
"is",
"None",
":",
"return",
"None",
"doc",
"=",
"cls",
"(",
"rr",
",",
"d",
")",
"return",
"doc"
] | Retrieves a document from the database, by primary key. | [
"Retrieves",
"a",
"document",
"from",
"the",
"database",
"by",
"primary",
"key",
"."
] | train | https://github.com/internetarchive/doublethink/blob/f7fc7da725c9b572d473c717b3dad9af98a7a2b4/doublethink/orm.py#L161-L171 |
internetarchive/doublethink | doublethink/orm.py | Document.table_ensure | def table_ensure(cls, rr):
'''
Creates the table if it doesn't exist.
'''
dbs = rr.db_list().run()
if not rr.dbname in dbs:
logging.info('creating rethinkdb database %s', repr(rr.dbname))
rr.db_create(rr.dbname).run()
tables = rr.table_list().run()
if not cls.table in tables:
logging.info(
'creating rethinkdb table %s in database %s',
repr(cls.table), repr(rr.dbname))
cls.table_create(rr) | python | def table_ensure(cls, rr):
'''
Creates the table if it doesn't exist.
'''
dbs = rr.db_list().run()
if not rr.dbname in dbs:
logging.info('creating rethinkdb database %s', repr(rr.dbname))
rr.db_create(rr.dbname).run()
tables = rr.table_list().run()
if not cls.table in tables:
logging.info(
'creating rethinkdb table %s in database %s',
repr(cls.table), repr(rr.dbname))
cls.table_create(rr) | [
"def",
"table_ensure",
"(",
"cls",
",",
"rr",
")",
":",
"dbs",
"=",
"rr",
".",
"db_list",
"(",
")",
".",
"run",
"(",
")",
"if",
"not",
"rr",
".",
"dbname",
"in",
"dbs",
":",
"logging",
".",
"info",
"(",
"'creating rethinkdb database %s'",
",",
"repr",
"(",
"rr",
".",
"dbname",
")",
")",
"rr",
".",
"db_create",
"(",
"rr",
".",
"dbname",
")",
".",
"run",
"(",
")",
"tables",
"=",
"rr",
".",
"table_list",
"(",
")",
".",
"run",
"(",
")",
"if",
"not",
"cls",
".",
"table",
"in",
"tables",
":",
"logging",
".",
"info",
"(",
"'creating rethinkdb table %s in database %s'",
",",
"repr",
"(",
"cls",
".",
"table",
")",
",",
"repr",
"(",
"rr",
".",
"dbname",
")",
")",
"cls",
".",
"table_create",
"(",
"rr",
")"
] | Creates the table if it doesn't exist. | [
"Creates",
"the",
"table",
"if",
"it",
"doesn",
"t",
"exist",
"."
] | train | https://github.com/internetarchive/doublethink/blob/f7fc7da725c9b572d473c717b3dad9af98a7a2b4/doublethink/orm.py#L182-L195 |
internetarchive/doublethink | doublethink/orm.py | Document.pk_field | def pk_field(self):
'''
Name of the primary key field as retrieved from rethinkdb table
metadata, 'id' by default. Should not be overridden. Override
`table_create` if you want to use a nonstandard field as the primary
key.
'''
if not self._pk:
try:
pk = self.rr.db('rethinkdb').table('table_config').filter({
'db': self.rr.dbname, 'name': self.table}).get_field(
'primary_key')[0].run()
self._pk = pk
except Exception as e:
raise Exception(
'problem determining primary key for table %s.%s: %s',
self.rr.dbname, self.table, e)
return self._pk | python | def pk_field(self):
'''
Name of the primary key field as retrieved from rethinkdb table
metadata, 'id' by default. Should not be overridden. Override
`table_create` if you want to use a nonstandard field as the primary
key.
'''
if not self._pk:
try:
pk = self.rr.db('rethinkdb').table('table_config').filter({
'db': self.rr.dbname, 'name': self.table}).get_field(
'primary_key')[0].run()
self._pk = pk
except Exception as e:
raise Exception(
'problem determining primary key for table %s.%s: %s',
self.rr.dbname, self.table, e)
return self._pk | [
"def",
"pk_field",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_pk",
":",
"try",
":",
"pk",
"=",
"self",
".",
"rr",
".",
"db",
"(",
"'rethinkdb'",
")",
".",
"table",
"(",
"'table_config'",
")",
".",
"filter",
"(",
"{",
"'db'",
":",
"self",
".",
"rr",
".",
"dbname",
",",
"'name'",
":",
"self",
".",
"table",
"}",
")",
".",
"get_field",
"(",
"'primary_key'",
")",
"[",
"0",
"]",
".",
"run",
"(",
")",
"self",
".",
"_pk",
"=",
"pk",
"except",
"Exception",
"as",
"e",
":",
"raise",
"Exception",
"(",
"'problem determining primary key for table %s.%s: %s'",
",",
"self",
".",
"rr",
".",
"dbname",
",",
"self",
".",
"table",
",",
"e",
")",
"return",
"self",
".",
"_pk"
] | Name of the primary key field as retrieved from rethinkdb table
metadata, 'id' by default. Should not be overridden. Override
`table_create` if you want to use a nonstandard field as the primary
key. | [
"Name",
"of",
"the",
"primary",
"key",
"field",
"as",
"retrieved",
"from",
"rethinkdb",
"table",
"metadata",
"id",
"by",
"default",
".",
"Should",
"not",
"be",
"overridden",
".",
"Override",
"table_create",
"if",
"you",
"want",
"to",
"use",
"a",
"nonstandard",
"field",
"as",
"the",
"primary",
"key",
"."
] | train | https://github.com/internetarchive/doublethink/blob/f7fc7da725c9b572d473c717b3dad9af98a7a2b4/doublethink/orm.py#L268-L285 |
internetarchive/doublethink | doublethink/orm.py | Document.save | def save(self):
'''
Persist changes to rethinkdb. Updates only the fields that have
changed. Performs insert rather than update if the document has no
primary key or if the primary key is absent from the database.
If there have been any changes to nested fields, updates the first
level attribute. For example, if foo['bar']['baz']['quux'] has changed,
all of foo['bar'] is replaced, but foo['something_else'] is not
touched.
'''
should_insert = False
try:
self[self.pk_field] # raises KeyError if missing
if self._updates:
# r.literal() to replace, not merge with, nested fields
updates = {field: r.literal(self._updates[field])
for field in self._updates}
query = self.rr.table(self.table).get(
self.pk_value).update(updates)
result = query.run()
if result['skipped']: # primary key not found
should_insert = True
elif result['errors'] or result['deleted']:
raise Exception(
'unexpected result %s from rethinkdb query %s' % (
result, query))
if not should_insert and self._deletes:
query = self.rr.table(self.table).get(self.pk_value).replace(
r.row.without(self._deletes))
result = query.run()
if result['errors']: # primary key not found
should_insert = True
elif result['replaced'] != 1:
raise Exception(
'unexpected result %s from rethinkdb query %s' % (
result, query))
except KeyError:
should_insert = True
if should_insert:
query = self.rr.table(self.table).insert(self)
result = query.run()
if result['inserted'] != 1:
raise Exception(
'unexpected result %s from rethinkdb query %s' % (
result, query))
if 'generated_keys' in result:
dict.__setitem__(
self, self.pk_field, result['generated_keys'][0])
self._clear_updates() | python | def save(self):
'''
Persist changes to rethinkdb. Updates only the fields that have
changed. Performs insert rather than update if the document has no
primary key or if the primary key is absent from the database.
If there have been any changes to nested fields, updates the first
level attribute. For example, if foo['bar']['baz']['quux'] has changed,
all of foo['bar'] is replaced, but foo['something_else'] is not
touched.
'''
should_insert = False
try:
self[self.pk_field] # raises KeyError if missing
if self._updates:
# r.literal() to replace, not merge with, nested fields
updates = {field: r.literal(self._updates[field])
for field in self._updates}
query = self.rr.table(self.table).get(
self.pk_value).update(updates)
result = query.run()
if result['skipped']: # primary key not found
should_insert = True
elif result['errors'] or result['deleted']:
raise Exception(
'unexpected result %s from rethinkdb query %s' % (
result, query))
if not should_insert and self._deletes:
query = self.rr.table(self.table).get(self.pk_value).replace(
r.row.without(self._deletes))
result = query.run()
if result['errors']: # primary key not found
should_insert = True
elif result['replaced'] != 1:
raise Exception(
'unexpected result %s from rethinkdb query %s' % (
result, query))
except KeyError:
should_insert = True
if should_insert:
query = self.rr.table(self.table).insert(self)
result = query.run()
if result['inserted'] != 1:
raise Exception(
'unexpected result %s from rethinkdb query %s' % (
result, query))
if 'generated_keys' in result:
dict.__setitem__(
self, self.pk_field, result['generated_keys'][0])
self._clear_updates() | [
"def",
"save",
"(",
"self",
")",
":",
"should_insert",
"=",
"False",
"try",
":",
"self",
"[",
"self",
".",
"pk_field",
"]",
"# raises KeyError if missing",
"if",
"self",
".",
"_updates",
":",
"# r.literal() to replace, not merge with, nested fields",
"updates",
"=",
"{",
"field",
":",
"r",
".",
"literal",
"(",
"self",
".",
"_updates",
"[",
"field",
"]",
")",
"for",
"field",
"in",
"self",
".",
"_updates",
"}",
"query",
"=",
"self",
".",
"rr",
".",
"table",
"(",
"self",
".",
"table",
")",
".",
"get",
"(",
"self",
".",
"pk_value",
")",
".",
"update",
"(",
"updates",
")",
"result",
"=",
"query",
".",
"run",
"(",
")",
"if",
"result",
"[",
"'skipped'",
"]",
":",
"# primary key not found",
"should_insert",
"=",
"True",
"elif",
"result",
"[",
"'errors'",
"]",
"or",
"result",
"[",
"'deleted'",
"]",
":",
"raise",
"Exception",
"(",
"'unexpected result %s from rethinkdb query %s'",
"%",
"(",
"result",
",",
"query",
")",
")",
"if",
"not",
"should_insert",
"and",
"self",
".",
"_deletes",
":",
"query",
"=",
"self",
".",
"rr",
".",
"table",
"(",
"self",
".",
"table",
")",
".",
"get",
"(",
"self",
".",
"pk_value",
")",
".",
"replace",
"(",
"r",
".",
"row",
".",
"without",
"(",
"self",
".",
"_deletes",
")",
")",
"result",
"=",
"query",
".",
"run",
"(",
")",
"if",
"result",
"[",
"'errors'",
"]",
":",
"# primary key not found",
"should_insert",
"=",
"True",
"elif",
"result",
"[",
"'replaced'",
"]",
"!=",
"1",
":",
"raise",
"Exception",
"(",
"'unexpected result %s from rethinkdb query %s'",
"%",
"(",
"result",
",",
"query",
")",
")",
"except",
"KeyError",
":",
"should_insert",
"=",
"True",
"if",
"should_insert",
":",
"query",
"=",
"self",
".",
"rr",
".",
"table",
"(",
"self",
".",
"table",
")",
".",
"insert",
"(",
"self",
")",
"result",
"=",
"query",
".",
"run",
"(",
")",
"if",
"result",
"[",
"'inserted'",
"]",
"!=",
"1",
":",
"raise",
"Exception",
"(",
"'unexpected result %s from rethinkdb query %s'",
"%",
"(",
"result",
",",
"query",
")",
")",
"if",
"'generated_keys'",
"in",
"result",
":",
"dict",
".",
"__setitem__",
"(",
"self",
",",
"self",
".",
"pk_field",
",",
"result",
"[",
"'generated_keys'",
"]",
"[",
"0",
"]",
")",
"self",
".",
"_clear_updates",
"(",
")"
] | Persist changes to rethinkdb. Updates only the fields that have
changed. Performs insert rather than update if the document has no
primary key or if the primary key is absent from the database.
If there have been any changes to nested fields, updates the first
level attribute. For example, if foo['bar']['baz']['quux'] has changed,
all of foo['bar'] is replaced, but foo['something_else'] is not
touched. | [
"Persist",
"changes",
"to",
"rethinkdb",
".",
"Updates",
"only",
"the",
"fields",
"that",
"have",
"changed",
".",
"Performs",
"insert",
"rather",
"than",
"update",
"if",
"the",
"document",
"has",
"no",
"primary",
"key",
"or",
"if",
"the",
"primary",
"key",
"is",
"absent",
"from",
"the",
"database",
"."
] | train | https://github.com/internetarchive/doublethink/blob/f7fc7da725c9b572d473c717b3dad9af98a7a2b4/doublethink/orm.py#L302-L353 |
internetarchive/doublethink | doublethink/orm.py | Document.refresh | def refresh(self):
'''
Refresh the document from the database.
'''
d = self.rr.table(self.table).get(self.pk_value).run()
if d is None:
raise KeyError
for k in d:
dict.__setitem__(
self, k, watch(d[k], callback=self._updated, field=k)) | python | def refresh(self):
'''
Refresh the document from the database.
'''
d = self.rr.table(self.table).get(self.pk_value).run()
if d is None:
raise KeyError
for k in d:
dict.__setitem__(
self, k, watch(d[k], callback=self._updated, field=k)) | [
"def",
"refresh",
"(",
"self",
")",
":",
"d",
"=",
"self",
".",
"rr",
".",
"table",
"(",
"self",
".",
"table",
")",
".",
"get",
"(",
"self",
".",
"pk_value",
")",
".",
"run",
"(",
")",
"if",
"d",
"is",
"None",
":",
"raise",
"KeyError",
"for",
"k",
"in",
"d",
":",
"dict",
".",
"__setitem__",
"(",
"self",
",",
"k",
",",
"watch",
"(",
"d",
"[",
"k",
"]",
",",
"callback",
"=",
"self",
".",
"_updated",
",",
"field",
"=",
"k",
")",
")"
] | Refresh the document from the database. | [
"Refresh",
"the",
"document",
"from",
"the",
"database",
"."
] | train | https://github.com/internetarchive/doublethink/blob/f7fc7da725c9b572d473c717b3dad9af98a7a2b4/doublethink/orm.py#L355-L364 |
eumis/pyviews | pyviews/core/xml.py | Parser.parse | def parse(self, xml_file, view_name=None) -> XmlNode:
"""Parses xml file with xml_path and returns XmlNode"""
self._setup_parser()
try:
self._view_name = view_name
self._parser.ParseFile(xml_file)
except ExpatError as error:
# pylint: disable=E1101
raise XmlError(errors.messages[error.code], ViewInfo(view_name, error.lineno))
root = self._root
self._reset()
return root | python | def parse(self, xml_file, view_name=None) -> XmlNode:
"""Parses xml file with xml_path and returns XmlNode"""
self._setup_parser()
try:
self._view_name = view_name
self._parser.ParseFile(xml_file)
except ExpatError as error:
# pylint: disable=E1101
raise XmlError(errors.messages[error.code], ViewInfo(view_name, error.lineno))
root = self._root
self._reset()
return root | [
"def",
"parse",
"(",
"self",
",",
"xml_file",
",",
"view_name",
"=",
"None",
")",
"->",
"XmlNode",
":",
"self",
".",
"_setup_parser",
"(",
")",
"try",
":",
"self",
".",
"_view_name",
"=",
"view_name",
"self",
".",
"_parser",
".",
"ParseFile",
"(",
"xml_file",
")",
"except",
"ExpatError",
"as",
"error",
":",
"# pylint: disable=E1101",
"raise",
"XmlError",
"(",
"errors",
".",
"messages",
"[",
"error",
".",
"code",
"]",
",",
"ViewInfo",
"(",
"view_name",
",",
"error",
".",
"lineno",
")",
")",
"root",
"=",
"self",
".",
"_root",
"self",
".",
"_reset",
"(",
")",
"return",
"root"
] | Parses xml file with xml_path and returns XmlNode | [
"Parses",
"xml",
"file",
"with",
"xml_path",
"and",
"returns",
"XmlNode"
] | train | https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/core/xml.py#L111-L123 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.