repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
SetBased/py-stratum
pystratum/DocBlockReflection.py
DocBlockReflection.__clean_doc_block
def __clean_doc_block(self): """ Cleans the DocBlock from leading and trailing white space and comment tokens. """ # Return immediately if the DockBlock is empty. if not self._comment: return for i in range(1, len(self._comment) - 1): self._comment[i] = re.sub(r'^\s*\*', '', self._comment[i]) self._comment[0] = re.sub(r'^\s*/\*\*', '', self._comment[0]) self._comment[-1] = re.sub(r'\*/\s*$', '', self._comment[-1]) for i, line in enumerate(self._comment): self._comment[i] = line.strip() self._comment = self.__remove_leading_empty_lines(self._comment) self._comment = self.__remove_trailing_empty_lines(self._comment)
python
def __clean_doc_block(self): """ Cleans the DocBlock from leading and trailing white space and comment tokens. """ # Return immediately if the DockBlock is empty. if not self._comment: return for i in range(1, len(self._comment) - 1): self._comment[i] = re.sub(r'^\s*\*', '', self._comment[i]) self._comment[0] = re.sub(r'^\s*/\*\*', '', self._comment[0]) self._comment[-1] = re.sub(r'\*/\s*$', '', self._comment[-1]) for i, line in enumerate(self._comment): self._comment[i] = line.strip() self._comment = self.__remove_leading_empty_lines(self._comment) self._comment = self.__remove_trailing_empty_lines(self._comment)
[ "def", "__clean_doc_block", "(", "self", ")", ":", "# Return immediately if the DockBlock is empty.", "if", "not", "self", ".", "_comment", ":", "return", "for", "i", "in", "range", "(", "1", ",", "len", "(", "self", ".", "_comment", ")", "-", "1", ")", ":", "self", ".", "_comment", "[", "i", "]", "=", "re", ".", "sub", "(", "r'^\\s*\\*'", ",", "''", ",", "self", ".", "_comment", "[", "i", "]", ")", "self", ".", "_comment", "[", "0", "]", "=", "re", ".", "sub", "(", "r'^\\s*/\\*\\*'", ",", "''", ",", "self", ".", "_comment", "[", "0", "]", ")", "self", ".", "_comment", "[", "-", "1", "]", "=", "re", ".", "sub", "(", "r'\\*/\\s*$'", ",", "''", ",", "self", ".", "_comment", "[", "-", "1", "]", ")", "for", "i", ",", "line", "in", "enumerate", "(", "self", ".", "_comment", ")", ":", "self", ".", "_comment", "[", "i", "]", "=", "line", ".", "strip", "(", ")", "self", ".", "_comment", "=", "self", ".", "__remove_leading_empty_lines", "(", "self", ".", "_comment", ")", "self", ".", "_comment", "=", "self", ".", "__remove_trailing_empty_lines", "(", "self", ".", "_comment", ")" ]
Cleans the DocBlock from leading and trailing white space and comment tokens.
[ "Cleans", "the", "DocBlock", "from", "leading", "and", "trailing", "white", "space", "and", "comment", "tokens", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/DocBlockReflection.py#L125-L144
SetBased/py-stratum
pystratum/DocBlockReflection.py
DocBlockReflection.__extract_description
def __extract_description(self): """ Extracts the description from the DocBlock. The description start at the first line and stops at the first tag or the end of the DocBlock. """ tmp = list() for line in self._comment: if len(line) >= 1 and line[0] == '@': break tmp.append(line) tmp = self.__remove_trailing_empty_lines(tmp) self._description = os.linesep.join(tmp)
python
def __extract_description(self): """ Extracts the description from the DocBlock. The description start at the first line and stops at the first tag or the end of the DocBlock. """ tmp = list() for line in self._comment: if len(line) >= 1 and line[0] == '@': break tmp.append(line) tmp = self.__remove_trailing_empty_lines(tmp) self._description = os.linesep.join(tmp)
[ "def", "__extract_description", "(", "self", ")", ":", "tmp", "=", "list", "(", ")", "for", "line", "in", "self", ".", "_comment", ":", "if", "len", "(", "line", ")", ">=", "1", "and", "line", "[", "0", "]", "==", "'@'", ":", "break", "tmp", ".", "append", "(", "line", ")", "tmp", "=", "self", ".", "__remove_trailing_empty_lines", "(", "tmp", ")", "self", ".", "_description", "=", "os", ".", "linesep", ".", "join", "(", "tmp", ")" ]
Extracts the description from the DocBlock. The description start at the first line and stops at the first tag or the end of the DocBlock.
[ "Extracts", "the", "description", "from", "the", "DocBlock", ".", "The", "description", "start", "at", "the", "first", "line", "and", "stops", "at", "the", "first", "tag", "or", "the", "end", "of", "the", "DocBlock", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/DocBlockReflection.py#L147-L161
SetBased/py-stratum
pystratum/DocBlockReflection.py
DocBlockReflection.__extract_tags
def __extract_tags(self): """ Extract tags from the DocBlock. """ tags = list() current = None for line in self._comment: parts = re.match(r'^@(\w+)', line) if parts: current = (parts.group(1), list()) tags.append(current) if current: if line == '': current = None else: current[1].append(line) for tag in tags: self._tags.append((tag[0], os.linesep.join(tag[1])))
python
def __extract_tags(self): """ Extract tags from the DocBlock. """ tags = list() current = None for line in self._comment: parts = re.match(r'^@(\w+)', line) if parts: current = (parts.group(1), list()) tags.append(current) if current: if line == '': current = None else: current[1].append(line) for tag in tags: self._tags.append((tag[0], os.linesep.join(tag[1])))
[ "def", "__extract_tags", "(", "self", ")", ":", "tags", "=", "list", "(", ")", "current", "=", "None", "for", "line", "in", "self", ".", "_comment", ":", "parts", "=", "re", ".", "match", "(", "r'^@(\\w+)'", ",", "line", ")", "if", "parts", ":", "current", "=", "(", "parts", ".", "group", "(", "1", ")", ",", "list", "(", ")", ")", "tags", ".", "append", "(", "current", ")", "if", "current", ":", "if", "line", "==", "''", ":", "current", "=", "None", "else", ":", "current", "[", "1", "]", ".", "append", "(", "line", ")", "for", "tag", "in", "tags", ":", "self", ".", "_tags", ".", "append", "(", "(", "tag", "[", "0", "]", ",", "os", ".", "linesep", ".", "join", "(", "tag", "[", "1", "]", ")", ")", ")" ]
Extract tags from the DocBlock.
[ "Extract", "tags", "from", "the", "DocBlock", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/DocBlockReflection.py#L164-L183
SetBased/py-stratum
pystratum/command/ConstantsCommand.py
ConstantsCommand.handle
def handle(self): """ Executes constants command when PyStratumCommand is activated. """ self.output = PyStratumStyle(self.input, self.output) config_file = self.input.get_argument('config_file') self.run_command(config_file)
python
def handle(self): """ Executes constants command when PyStratumCommand is activated. """ self.output = PyStratumStyle(self.input, self.output) config_file = self.input.get_argument('config_file') self.run_command(config_file)
[ "def", "handle", "(", "self", ")", ":", "self", ".", "output", "=", "PyStratumStyle", "(", "self", ".", "input", ",", "self", ".", "output", ")", "config_file", "=", "self", ".", "input", ".", "get_argument", "(", "'config_file'", ")", "self", ".", "run_command", "(", "config_file", ")" ]
Executes constants command when PyStratumCommand is activated.
[ "Executes", "constants", "command", "when", "PyStratumCommand", "is", "activated", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/command/ConstantsCommand.py#L28-L35
SetBased/py-stratum
pystratum/command/ConstantsCommand.py
ConstantsCommand.run_command
def run_command(self, config_file): """ :param str config_file: The name of config file. """ config = configparser.ConfigParser() config.read(config_file) rdbms = config.get('database', 'rdbms').lower() label_regex = config.get('constants', 'label_regex') constants = self.create_constants(rdbms) constants.main(config_file, label_regex)
python
def run_command(self, config_file): """ :param str config_file: The name of config file. """ config = configparser.ConfigParser() config.read(config_file) rdbms = config.get('database', 'rdbms').lower() label_regex = config.get('constants', 'label_regex') constants = self.create_constants(rdbms) constants.main(config_file, label_regex)
[ "def", "run_command", "(", "self", ",", "config_file", ")", ":", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "config", ".", "read", "(", "config_file", ")", "rdbms", "=", "config", ".", "get", "(", "'database'", ",", "'rdbms'", ")", ".", "lower", "(", ")", "label_regex", "=", "config", ".", "get", "(", "'constants'", ",", "'label_regex'", ")", "constants", "=", "self", ".", "create_constants", "(", "rdbms", ")", "constants", ".", "main", "(", "config_file", ",", "label_regex", ")" ]
:param str config_file: The name of config file.
[ ":", "param", "str", "config_file", ":", "The", "name", "of", "config", "file", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/command/ConstantsCommand.py#L38-L50
SetBased/py-stratum
pystratum/command/ConstantsCommand.py
ConstantsCommand.create_constants
def create_constants(self, rdbms): """ Factory for creating a Constants objects (i.e. objects for creating constants based on column widths, and auto increment columns and labels). :param str rdbms: The target RDBMS (i.e. mysql, mssql or pgsql). :rtype: pystratum.Constants.Constants """ # Note: We load modules and classes dynamically such that on the end user's system only the required modules # and other dependencies for the targeted RDBMS must be installed (and required modules and other # dependencies for the other RDBMSs are not required). if rdbms == 'mysql': module = locate('pystratum_mysql.MySqlConstants') return module.MySqlConstants(self.output) if rdbms == 'mssql': module = locate('pystratum_mssql.MsSqlConstants') return module.MsSqlConstants(self.output) if rdbms == 'pgsql': module = locate('pystratum_pgsql.PgSqlConstants') return module.PgSqlConstants(self.output) raise Exception("Unknown RDBMS '{0!s}'.".format(rdbms))
python
def create_constants(self, rdbms): """ Factory for creating a Constants objects (i.e. objects for creating constants based on column widths, and auto increment columns and labels). :param str rdbms: The target RDBMS (i.e. mysql, mssql or pgsql). :rtype: pystratum.Constants.Constants """ # Note: We load modules and classes dynamically such that on the end user's system only the required modules # and other dependencies for the targeted RDBMS must be installed (and required modules and other # dependencies for the other RDBMSs are not required). if rdbms == 'mysql': module = locate('pystratum_mysql.MySqlConstants') return module.MySqlConstants(self.output) if rdbms == 'mssql': module = locate('pystratum_mssql.MsSqlConstants') return module.MsSqlConstants(self.output) if rdbms == 'pgsql': module = locate('pystratum_pgsql.PgSqlConstants') return module.PgSqlConstants(self.output) raise Exception("Unknown RDBMS '{0!s}'.".format(rdbms))
[ "def", "create_constants", "(", "self", ",", "rdbms", ")", ":", "# Note: We load modules and classes dynamically such that on the end user's system only the required modules", "# and other dependencies for the targeted RDBMS must be installed (and required modules and other", "# dependencies for the other RDBMSs are not required).", "if", "rdbms", "==", "'mysql'", ":", "module", "=", "locate", "(", "'pystratum_mysql.MySqlConstants'", ")", "return", "module", ".", "MySqlConstants", "(", "self", ".", "output", ")", "if", "rdbms", "==", "'mssql'", ":", "module", "=", "locate", "(", "'pystratum_mssql.MsSqlConstants'", ")", "return", "module", ".", "MsSqlConstants", "(", "self", ".", "output", ")", "if", "rdbms", "==", "'pgsql'", ":", "module", "=", "locate", "(", "'pystratum_pgsql.PgSqlConstants'", ")", "return", "module", ".", "PgSqlConstants", "(", "self", ".", "output", ")", "raise", "Exception", "(", "\"Unknown RDBMS '{0!s}'.\"", ".", "format", "(", "rdbms", ")", ")" ]
Factory for creating a Constants objects (i.e. objects for creating constants based on column widths, and auto increment columns and labels). :param str rdbms: The target RDBMS (i.e. mysql, mssql or pgsql). :rtype: pystratum.Constants.Constants
[ "Factory", "for", "creating", "a", "Constants", "objects", "(", "i", ".", "e", ".", "objects", "for", "creating", "constants", "based", "on", "column", "widths", "and", "auto", "increment", "columns", "and", "labels", ")", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/command/ConstantsCommand.py#L53-L78
launchdarkly/relayCommander
relay_commander/redis_wrapper.py
RedisWrapper._format_key_name
def _format_key_name(self) -> str: """Return formatted redis key name.""" key_name = 'ld:{0}:{1}:features'.format( self.project_key, self.environment_key ) return key_name
python
def _format_key_name(self) -> str: """Return formatted redis key name.""" key_name = 'ld:{0}:{1}:features'.format( self.project_key, self.environment_key ) return key_name
[ "def", "_format_key_name", "(", "self", ")", "->", "str", ":", "key_name", "=", "'ld:{0}:{1}:features'", ".", "format", "(", "self", ".", "project_key", ",", "self", ".", "environment_key", ")", "return", "key_name" ]
Return formatted redis key name.
[ "Return", "formatted", "redis", "key", "name", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/redis_wrapper.py#L59-L65
launchdarkly/relayCommander
relay_commander/redis_wrapper.py
RedisWrapper.connection_string_parser
def connection_string_parser(uri: str) -> list: """ Parse Connection string to extract host and port. :param uri: full URI for redis connection in the form of host:port :returns: list of RedisConnection objects """ redis_connections = [] raw_connections = uri.split(',') connections = [ connection for connection in raw_connections if len(connection) > 0 ] for connection in connections: raw_connection = connection.split(':') if len(raw_connection) == 1: host = raw_connection[0].strip() port = _DEFAULT_REDIS_PORT elif len(raw_connection) == 2: host = raw_connection[0].strip() port = int(raw_connection[1]) else: raise RuntimeError( "Unable to parse redis connection string: {0}".format( raw_connection ) ) redis_connection = _RedisConnection(host, port) redis_connections.append(redis_connection) return redis_connections
python
def connection_string_parser(uri: str) -> list: """ Parse Connection string to extract host and port. :param uri: full URI for redis connection in the form of host:port :returns: list of RedisConnection objects """ redis_connections = [] raw_connections = uri.split(',') connections = [ connection for connection in raw_connections if len(connection) > 0 ] for connection in connections: raw_connection = connection.split(':') if len(raw_connection) == 1: host = raw_connection[0].strip() port = _DEFAULT_REDIS_PORT elif len(raw_connection) == 2: host = raw_connection[0].strip() port = int(raw_connection[1]) else: raise RuntimeError( "Unable to parse redis connection string: {0}".format( raw_connection ) ) redis_connection = _RedisConnection(host, port) redis_connections.append(redis_connection) return redis_connections
[ "def", "connection_string_parser", "(", "uri", ":", "str", ")", "->", "list", ":", "redis_connections", "=", "[", "]", "raw_connections", "=", "uri", ".", "split", "(", "','", ")", "connections", "=", "[", "connection", "for", "connection", "in", "raw_connections", "if", "len", "(", "connection", ")", ">", "0", "]", "for", "connection", "in", "connections", ":", "raw_connection", "=", "connection", ".", "split", "(", "':'", ")", "if", "len", "(", "raw_connection", ")", "==", "1", ":", "host", "=", "raw_connection", "[", "0", "]", ".", "strip", "(", ")", "port", "=", "_DEFAULT_REDIS_PORT", "elif", "len", "(", "raw_connection", ")", "==", "2", ":", "host", "=", "raw_connection", "[", "0", "]", ".", "strip", "(", ")", "port", "=", "int", "(", "raw_connection", "[", "1", "]", ")", "else", ":", "raise", "RuntimeError", "(", "\"Unable to parse redis connection string: {0}\"", ".", "format", "(", "raw_connection", ")", ")", "redis_connection", "=", "_RedisConnection", "(", "host", ",", "port", ")", "redis_connections", ".", "append", "(", "redis_connection", ")", "return", "redis_connections" ]
Parse Connection string to extract host and port. :param uri: full URI for redis connection in the form of host:port :returns: list of RedisConnection objects
[ "Parse", "Connection", "string", "to", "extract", "host", "and", "port", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/redis_wrapper.py#L68-L100
launchdarkly/relayCommander
relay_commander/redis_wrapper.py
RedisWrapper.get_flag_record
def get_flag_record(self, feature_key: str) -> str: """Get feature flag record from redis. :param feature_key: key for feature flag :return: value of feature flag key in redis. :raises: KeyError if key is not found. """ key_name = self._format_key_name() flag = self.redis.hget(key_name, feature_key) if flag is None: raise KeyError('Redis key: {0} not found.'.format(key_name)) return flag
python
def get_flag_record(self, feature_key: str) -> str: """Get feature flag record from redis. :param feature_key: key for feature flag :return: value of feature flag key in redis. :raises: KeyError if key is not found. """ key_name = self._format_key_name() flag = self.redis.hget(key_name, feature_key) if flag is None: raise KeyError('Redis key: {0} not found.'.format(key_name)) return flag
[ "def", "get_flag_record", "(", "self", ",", "feature_key", ":", "str", ")", "->", "str", ":", "key_name", "=", "self", ".", "_format_key_name", "(", ")", "flag", "=", "self", ".", "redis", ".", "hget", "(", "key_name", ",", "feature_key", ")", "if", "flag", "is", "None", ":", "raise", "KeyError", "(", "'Redis key: {0} not found.'", ".", "format", "(", "key_name", ")", ")", "return", "flag" ]
Get feature flag record from redis. :param feature_key: key for feature flag :return: value of feature flag key in redis. :raises: KeyError if key is not found.
[ "Get", "feature", "flag", "record", "from", "redis", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/redis_wrapper.py#L102-L117
launchdarkly/relayCommander
relay_commander/redis_wrapper.py
RedisWrapper.update_flag_record
def update_flag_record(self, state: str, feature_key: str) -> None: """Update redis record with new state. :param state: state for feature flag. :param feature_key: key for feature flag. """ key_name = self._format_key_name() try: parsed_flag = json.loads(self.get_flag_record(feature_key).decode('utf-8')) parsed_flag['on'] = state parsed_flag['version'] += 1 updated_flag = json.dumps(parsed_flag).encode('utf-8') except KeyError as ex: LOG.error(ex) sys.exit(1) LOG.info('updating %s to %s', feature_key, state) self.redis.hset(key_name, feature_key, updated_flag)
python
def update_flag_record(self, state: str, feature_key: str) -> None: """Update redis record with new state. :param state: state for feature flag. :param feature_key: key for feature flag. """ key_name = self._format_key_name() try: parsed_flag = json.loads(self.get_flag_record(feature_key).decode('utf-8')) parsed_flag['on'] = state parsed_flag['version'] += 1 updated_flag = json.dumps(parsed_flag).encode('utf-8') except KeyError as ex: LOG.error(ex) sys.exit(1) LOG.info('updating %s to %s', feature_key, state) self.redis.hset(key_name, feature_key, updated_flag)
[ "def", "update_flag_record", "(", "self", ",", "state", ":", "str", ",", "feature_key", ":", "str", ")", "->", "None", ":", "key_name", "=", "self", ".", "_format_key_name", "(", ")", "try", ":", "parsed_flag", "=", "json", ".", "loads", "(", "self", ".", "get_flag_record", "(", "feature_key", ")", ".", "decode", "(", "'utf-8'", ")", ")", "parsed_flag", "[", "'on'", "]", "=", "state", "parsed_flag", "[", "'version'", "]", "+=", "1", "updated_flag", "=", "json", ".", "dumps", "(", "parsed_flag", ")", ".", "encode", "(", "'utf-8'", ")", "except", "KeyError", "as", "ex", ":", "LOG", ".", "error", "(", "ex", ")", "sys", ".", "exit", "(", "1", ")", "LOG", ".", "info", "(", "'updating %s to %s'", ",", "feature_key", ",", "state", ")", "self", ".", "redis", ".", "hset", "(", "key_name", ",", "feature_key", ",", "updated_flag", ")" ]
Update redis record with new state. :param state: state for feature flag. :param feature_key: key for feature flag.
[ "Update", "redis", "record", "with", "new", "state", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/redis_wrapper.py#L119-L137
ClericPy/torequests
torequests/dummy.py
Asyncme
def Asyncme(func, n=None, interval=0, default_callback=None, loop=None): """Wrap coro_function into the function return NewTask.""" return coros(n, interval, default_callback, loop)(func)
python
def Asyncme(func, n=None, interval=0, default_callback=None, loop=None): """Wrap coro_function into the function return NewTask.""" return coros(n, interval, default_callback, loop)(func)
[ "def", "Asyncme", "(", "func", ",", "n", "=", "None", ",", "interval", "=", "0", ",", "default_callback", "=", "None", ",", "loop", "=", "None", ")", ":", "return", "coros", "(", "n", ",", "interval", ",", "default_callback", ",", "loop", ")", "(", "func", ")" ]
Wrap coro_function into the function return NewTask.
[ "Wrap", "coro_function", "into", "the", "function", "return", "NewTask", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L332-L334
ClericPy/torequests
torequests/dummy.py
coros
def coros(n=None, interval=0, default_callback=None, loop=None): """Decorator for wrap coro_function into the function return NewTask.""" submitter = Loop( n=n, interval=interval, default_callback=default_callback, loop=loop ).submitter return submitter
python
def coros(n=None, interval=0, default_callback=None, loop=None): """Decorator for wrap coro_function into the function return NewTask.""" submitter = Loop( n=n, interval=interval, default_callback=default_callback, loop=loop ).submitter return submitter
[ "def", "coros", "(", "n", "=", "None", ",", "interval", "=", "0", ",", "default_callback", "=", "None", ",", "loop", "=", "None", ")", ":", "submitter", "=", "Loop", "(", "n", "=", "n", ",", "interval", "=", "interval", ",", "default_callback", "=", "default_callback", ",", "loop", "=", "loop", ")", ".", "submitter", "return", "submitter" ]
Decorator for wrap coro_function into the function return NewTask.
[ "Decorator", "for", "wrap", "coro_function", "into", "the", "function", "return", "NewTask", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L337-L343
ClericPy/torequests
torequests/dummy.py
NewTask.wrap_callback
def wrap_callback(function): """Set the callback's result as self._callback_result.""" @wraps(function) def wrapped(task): task._callback_result = function(task) return task._callback_result return wrapped
python
def wrap_callback(function): """Set the callback's result as self._callback_result.""" @wraps(function) def wrapped(task): task._callback_result = function(task) return task._callback_result return wrapped
[ "def", "wrap_callback", "(", "function", ")", ":", "@", "wraps", "(", "function", ")", "def", "wrapped", "(", "task", ")", ":", "task", ".", "_callback_result", "=", "function", "(", "task", ")", "return", "task", ".", "_callback_result", "return", "wrapped" ]
Set the callback's result as self._callback_result.
[ "Set", "the", "callback", "s", "result", "as", "self", ".", "_callback_result", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L65-L73
ClericPy/torequests
torequests/dummy.py
NewTask._schedule_callbacks
def _schedule_callbacks(self, clear_cb=False): """Recording the task_end_time and task_cost_time, and prevent super()._schedule_callbacks to clean self._callbacks.""" self.task_end_time = time.time() self.task_cost_time = self.task_end_time - self.task_start_time callbacks = self._callbacks[:] if not callbacks: return if clear_cb: self._callbacks[:] = [] for callback in callbacks: self._loop.call_soon(callback, self, *self.extra_args)
python
def _schedule_callbacks(self, clear_cb=False): """Recording the task_end_time and task_cost_time, and prevent super()._schedule_callbacks to clean self._callbacks.""" self.task_end_time = time.time() self.task_cost_time = self.task_end_time - self.task_start_time callbacks = self._callbacks[:] if not callbacks: return if clear_cb: self._callbacks[:] = [] for callback in callbacks: self._loop.call_soon(callback, self, *self.extra_args)
[ "def", "_schedule_callbacks", "(", "self", ",", "clear_cb", "=", "False", ")", ":", "self", ".", "task_end_time", "=", "time", ".", "time", "(", ")", "self", ".", "task_cost_time", "=", "self", ".", "task_end_time", "-", "self", ".", "task_start_time", "callbacks", "=", "self", ".", "_callbacks", "[", ":", "]", "if", "not", "callbacks", ":", "return", "if", "clear_cb", ":", "self", ".", "_callbacks", "[", ":", "]", "=", "[", "]", "for", "callback", "in", "callbacks", ":", "self", ".", "_loop", ".", "call_soon", "(", "callback", ",", "self", ",", "*", "self", ".", "extra_args", ")" ]
Recording the task_end_time and task_cost_time, and prevent super()._schedule_callbacks to clean self._callbacks.
[ "Recording", "the", "task_end_time", "and", "task_cost_time", "and", "prevent", "super", "()", ".", "_schedule_callbacks", "to", "clean", "self", ".", "_callbacks", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L75-L86
ClericPy/torequests
torequests/dummy.py
NewTask.callback_result
def callback_result(self): """Blocking until the task finish and return the callback_result.until""" if self._state == self._PENDING: self._loop.run_until_complete(self) if self._callbacks: result = self._callback_result else: result = self.result() return result
python
def callback_result(self): """Blocking until the task finish and return the callback_result.until""" if self._state == self._PENDING: self._loop.run_until_complete(self) if self._callbacks: result = self._callback_result else: result = self.result() return result
[ "def", "callback_result", "(", "self", ")", ":", "if", "self", ".", "_state", "==", "self", ".", "_PENDING", ":", "self", ".", "_loop", ".", "run_until_complete", "(", "self", ")", "if", "self", ".", "_callbacks", ":", "result", "=", "self", ".", "_callback_result", "else", ":", "result", "=", "self", ".", "result", "(", ")", "return", "result" ]
Blocking until the task finish and return the callback_result.until
[ "Blocking", "until", "the", "task", "finish", "and", "return", "the", "callback_result", ".", "until" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L99-L107
ClericPy/torequests
torequests/dummy.py
NewTask.x
def x(self): """Blocking until the task finish and return the self.result()""" if self._state == self._PENDING: self._loop.run_until_complete(self) return self.result()
python
def x(self): """Blocking until the task finish and return the self.result()""" if self._state == self._PENDING: self._loop.run_until_complete(self) return self.result()
[ "def", "x", "(", "self", ")", ":", "if", "self", ".", "_state", "==", "self", ".", "_PENDING", ":", "self", ".", "_loop", ".", "run_until_complete", "(", "self", ")", "return", "self", ".", "result", "(", ")" ]
Blocking until the task finish and return the self.result()
[ "Blocking", "until", "the", "task", "finish", "and", "return", "the", "self", ".", "result", "()" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L110-L114
ClericPy/torequests
torequests/dummy.py
Loop._wrap_coro_function_with_sem
def _wrap_coro_function_with_sem(self, coro_func): """Decorator set the coro_function has sem/interval control.""" sem = self.frequency.sem interval = self.frequency.interval @wraps(coro_func) async def new_coro_func(*args, **kwargs): if sem: async with sem: result = await coro_func(*args, **kwargs) if interval: await asyncio.sleep(interval) return result else: result = await coro_func(*args, **kwargs) if interval: await asyncio.sleep(interval) return result return new_coro_func
python
def _wrap_coro_function_with_sem(self, coro_func): """Decorator set the coro_function has sem/interval control.""" sem = self.frequency.sem interval = self.frequency.interval @wraps(coro_func) async def new_coro_func(*args, **kwargs): if sem: async with sem: result = await coro_func(*args, **kwargs) if interval: await asyncio.sleep(interval) return result else: result = await coro_func(*args, **kwargs) if interval: await asyncio.sleep(interval) return result return new_coro_func
[ "def", "_wrap_coro_function_with_sem", "(", "self", ",", "coro_func", ")", ":", "sem", "=", "self", ".", "frequency", ".", "sem", "interval", "=", "self", ".", "frequency", ".", "interval", "@", "wraps", "(", "coro_func", ")", "async", "def", "new_coro_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "sem", ":", "async", "with", "sem", ":", "result", "=", "await", "coro_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "interval", ":", "await", "asyncio", ".", "sleep", "(", "interval", ")", "return", "result", "else", ":", "result", "=", "await", "coro_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "interval", ":", "await", "asyncio", ".", "sleep", "(", "interval", ")", "return", "result", "return", "new_coro_func" ]
Decorator set the coro_function has sem/interval control.
[ "Decorator", "set", "the", "coro_function", "has", "sem", "/", "interval", "control", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L155-L174
ClericPy/torequests
torequests/dummy.py
Loop.run_in_executor
def run_in_executor(self, executor=None, func=None, *args): """If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)""" return self.loop.run_in_executor(executor, func, *args)
python
def run_in_executor(self, executor=None, func=None, *args): """If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)""" return self.loop.run_in_executor(executor, func, *args)
[ "def", "run_in_executor", "(", "self", ",", "executor", "=", "None", ",", "func", "=", "None", ",", "*", "args", ")", ":", "return", "self", ".", "loop", ".", "run_in_executor", "(", "executor", ",", "func", ",", "*", "args", ")" ]
If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)
[ "If", "kwargs", "needed", "try", "like", "this", ":", "func", "=", "lambda", ":", "foo", "(", "*", "args", "**", "kwargs", ")" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L176-L178
ClericPy/torequests
torequests/dummy.py
Loop.run_in_thread_pool
def run_in_thread_pool(self, pool_size=None, func=None, *args): """If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)""" executor = Pool(pool_size) return self.loop.run_in_executor(executor, func, *args)
python
def run_in_thread_pool(self, pool_size=None, func=None, *args): """If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)""" executor = Pool(pool_size) return self.loop.run_in_executor(executor, func, *args)
[ "def", "run_in_thread_pool", "(", "self", ",", "pool_size", "=", "None", ",", "func", "=", "None", ",", "*", "args", ")", ":", "executor", "=", "Pool", "(", "pool_size", ")", "return", "self", ".", "loop", ".", "run_in_executor", "(", "executor", ",", "func", ",", "*", "args", ")" ]
If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)
[ "If", "kwargs", "needed", "try", "like", "this", ":", "func", "=", "lambda", ":", "foo", "(", "*", "args", "**", "kwargs", ")" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L180-L183
ClericPy/torequests
torequests/dummy.py
Loop.run_in_process_pool
def run_in_process_pool(self, pool_size=None, func=None, *args): """If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)""" executor = ProcessPool(pool_size) return self.loop.run_in_executor(executor, func, *args)
python
def run_in_process_pool(self, pool_size=None, func=None, *args): """If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)""" executor = ProcessPool(pool_size) return self.loop.run_in_executor(executor, func, *args)
[ "def", "run_in_process_pool", "(", "self", ",", "pool_size", "=", "None", ",", "func", "=", "None", ",", "*", "args", ")", ":", "executor", "=", "ProcessPool", "(", "pool_size", ")", "return", "self", ".", "loop", ".", "run_in_executor", "(", "executor", ",", "func", ",", "*", "args", ")" ]
If `kwargs` needed, try like this: func=lambda: foo(*args, **kwargs)
[ "If", "kwargs", "needed", "try", "like", "this", ":", "func", "=", "lambda", ":", "foo", "(", "*", "args", "**", "kwargs", ")" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L185-L188
ClericPy/torequests
torequests/dummy.py
Loop.run_coroutine_threadsafe
def run_coroutine_threadsafe(self, coro, loop=None, callback=None): """Be used when loop running in a single non-main thread.""" if not asyncio.iscoroutine(coro): raise TypeError("A await in coroutines. object is required") loop = loop or self.loop future = NewFuture(callback=callback) def callback_func(): try: asyncio.futures._chain_future(NewTask(coro, loop=loop), future) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) raise loop.call_soon_threadsafe(callback_func) return future
python
def run_coroutine_threadsafe(self, coro, loop=None, callback=None): """Be used when loop running in a single non-main thread.""" if not asyncio.iscoroutine(coro): raise TypeError("A await in coroutines. object is required") loop = loop or self.loop future = NewFuture(callback=callback) def callback_func(): try: asyncio.futures._chain_future(NewTask(coro, loop=loop), future) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) raise loop.call_soon_threadsafe(callback_func) return future
[ "def", "run_coroutine_threadsafe", "(", "self", ",", "coro", ",", "loop", "=", "None", ",", "callback", "=", "None", ")", ":", "if", "not", "asyncio", ".", "iscoroutine", "(", "coro", ")", ":", "raise", "TypeError", "(", "\"A await in coroutines. object is required\"", ")", "loop", "=", "loop", "or", "self", ".", "loop", "future", "=", "NewFuture", "(", "callback", "=", "callback", ")", "def", "callback_func", "(", ")", ":", "try", ":", "asyncio", ".", "futures", ".", "_chain_future", "(", "NewTask", "(", "coro", ",", "loop", "=", "loop", ")", ",", "future", ")", "except", "Exception", "as", "exc", ":", "if", "future", ".", "set_running_or_notify_cancel", "(", ")", ":", "future", ".", "set_exception", "(", "exc", ")", "raise", "loop", ".", "call_soon_threadsafe", "(", "callback_func", ")", "return", "future" ]
Be used when loop running in a single non-main thread.
[ "Be", "used", "when", "loop", "running", "in", "a", "single", "non", "-", "main", "thread", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L190-L206
ClericPy/torequests
torequests/dummy.py
Loop.apply
def apply(self, coro_function, args=None, kwargs=None, callback=None): """Submit a coro_function(*args, **kwargs) as NewTask to self.loop with loop.frequncy control. :: from torequests.dummy import Loop import asyncio loop = Loop() async def test(i): result = await asyncio.sleep(1) return (loop.frequency, i) task = loop.apply(test, [1]) print(task) # loop.x can be ignore loop.x print(task.x) # <NewTask pending coro=<new_coro_func() running at torequests/torequests/dummy.py:154>> # (Frequency(sem=<0/0>, interval=0, name=loop_sem), 1) """ args = args or () kwargs = kwargs or {} coro = self._wrap_coro_function_with_sem(coro_function)(*args, **kwargs) return self.submit(coro, callback=callback)
python
def apply(self, coro_function, args=None, kwargs=None, callback=None): """Submit a coro_function(*args, **kwargs) as NewTask to self.loop with loop.frequncy control. :: from torequests.dummy import Loop import asyncio loop = Loop() async def test(i): result = await asyncio.sleep(1) return (loop.frequency, i) task = loop.apply(test, [1]) print(task) # loop.x can be ignore loop.x print(task.x) # <NewTask pending coro=<new_coro_func() running at torequests/torequests/dummy.py:154>> # (Frequency(sem=<0/0>, interval=0, name=loop_sem), 1) """ args = args or () kwargs = kwargs or {} coro = self._wrap_coro_function_with_sem(coro_function)(*args, **kwargs) return self.submit(coro, callback=callback)
[ "def", "apply", "(", "self", ",", "coro_function", ",", "args", "=", "None", ",", "kwargs", "=", "None", ",", "callback", "=", "None", ")", ":", "args", "=", "args", "or", "(", ")", "kwargs", "=", "kwargs", "or", "{", "}", "coro", "=", "self", ".", "_wrap_coro_function_with_sem", "(", "coro_function", ")", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self", ".", "submit", "(", "coro", ",", "callback", "=", "callback", ")" ]
Submit a coro_function(*args, **kwargs) as NewTask to self.loop with loop.frequncy control. :: from torequests.dummy import Loop import asyncio loop = Loop() async def test(i): result = await asyncio.sleep(1) return (loop.frequency, i) task = loop.apply(test, [1]) print(task) # loop.x can be ignore loop.x print(task.x) # <NewTask pending coro=<new_coro_func() running at torequests/torequests/dummy.py:154>> # (Frequency(sem=<0/0>, interval=0, name=loop_sem), 1)
[ "Submit", "a", "coro_function", "(", "*", "args", "**", "kwargs", ")", "as", "NewTask", "to", "self", ".", "loop", "with", "loop", ".", "frequncy", "control", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L208-L233
ClericPy/torequests
torequests/dummy.py
Loop.submit
def submit(self, coro, callback=None): """Submit a coro as NewTask to self.loop without loop.frequncy control. :: from torequests.dummy import Loop import asyncio loop = Loop() async def test(i): result = await asyncio.sleep(1) return (loop.frequency, i) coro = test(0) task = loop.submit(coro) print(task) # loop.x can be ignore loop.x print(task.x) # <NewTask pending coro=<test() running at torequests/temp_code.py:58>> # (Frequency(sem=<0/0>, interval=0, name=loop_sem), 0) """ callback = callback or self.default_callback if self.async_running: return self.run_coroutine_threadsafe(coro, callback=callback) else: return NewTask(coro, loop=self.loop, callback=callback)
python
def submit(self, coro, callback=None): """Submit a coro as NewTask to self.loop without loop.frequncy control. :: from torequests.dummy import Loop import asyncio loop = Loop() async def test(i): result = await asyncio.sleep(1) return (loop.frequency, i) coro = test(0) task = loop.submit(coro) print(task) # loop.x can be ignore loop.x print(task.x) # <NewTask pending coro=<test() running at torequests/temp_code.py:58>> # (Frequency(sem=<0/0>, interval=0, name=loop_sem), 0) """ callback = callback or self.default_callback if self.async_running: return self.run_coroutine_threadsafe(coro, callback=callback) else: return NewTask(coro, loop=self.loop, callback=callback)
[ "def", "submit", "(", "self", ",", "coro", ",", "callback", "=", "None", ")", ":", "callback", "=", "callback", "or", "self", ".", "default_callback", "if", "self", ".", "async_running", ":", "return", "self", ".", "run_coroutine_threadsafe", "(", "coro", ",", "callback", "=", "callback", ")", "else", ":", "return", "NewTask", "(", "coro", ",", "loop", "=", "self", ".", "loop", ",", "callback", "=", "callback", ")" ]
Submit a coro as NewTask to self.loop without loop.frequncy control. :: from torequests.dummy import Loop import asyncio loop = Loop() async def test(i): result = await asyncio.sleep(1) return (loop.frequency, i) coro = test(0) task = loop.submit(coro) print(task) # loop.x can be ignore loop.x print(task.x) # <NewTask pending coro=<test() running at torequests/temp_code.py:58>> # (Frequency(sem=<0/0>, interval=0, name=loop_sem), 0)
[ "Submit", "a", "coro", "as", "NewTask", "to", "self", ".", "loop", "without", "loop", ".", "frequncy", "control", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L235-L262
ClericPy/torequests
torequests/dummy.py
Loop.submitter
def submitter(self, f): """Decorator to submit a coro-function as NewTask to self.loop with sem control. Use default_callback frequency of loop.""" f = self._wrap_coro_function_with_sem(f) @wraps(f) def wrapped(*args, **kwargs): return self.submit(f(*args, **kwargs)) return wrapped
python
def submitter(self, f): """Decorator to submit a coro-function as NewTask to self.loop with sem control. Use default_callback frequency of loop.""" f = self._wrap_coro_function_with_sem(f) @wraps(f) def wrapped(*args, **kwargs): return self.submit(f(*args, **kwargs)) return wrapped
[ "def", "submitter", "(", "self", ",", "f", ")", ":", "f", "=", "self", ".", "_wrap_coro_function_with_sem", "(", "f", ")", "@", "wraps", "(", "f", ")", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "submit", "(", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "return", "wrapped" ]
Decorator to submit a coro-function as NewTask to self.loop with sem control. Use default_callback frequency of loop.
[ "Decorator", "to", "submit", "a", "coro", "-", "function", "as", "NewTask", "to", "self", ".", "loop", "with", "sem", "control", ".", "Use", "default_callback", "frequency", "of", "loop", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L264-L273
ClericPy/torequests
torequests/dummy.py
Loop.todo_tasks
def todo_tasks(self): """Return tasks in loop which its state is pending.""" tasks = [task for task in self.all_tasks if task._state == NewTask._PENDING] return tasks
python
def todo_tasks(self): """Return tasks in loop which its state is pending.""" tasks = [task for task in self.all_tasks if task._state == NewTask._PENDING] return tasks
[ "def", "todo_tasks", "(", "self", ")", ":", "tasks", "=", "[", "task", "for", "task", "in", "self", ".", "all_tasks", "if", "task", ".", "_state", "==", "NewTask", ".", "_PENDING", "]", "return", "tasks" ]
Return tasks in loop which its state is pending.
[ "Return", "tasks", "in", "loop", "which", "its", "state", "is", "pending", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L281-L284
ClericPy/torequests
torequests/dummy.py
Loop.done_tasks
def done_tasks(self): """Return tasks in loop which its state is not pending.""" tasks = [task for task in self.all_tasks if task._state != NewTask._PENDING] return tasks
python
def done_tasks(self): """Return tasks in loop which its state is not pending.""" tasks = [task for task in self.all_tasks if task._state != NewTask._PENDING] return tasks
[ "def", "done_tasks", "(", "self", ")", ":", "tasks", "=", "[", "task", "for", "task", "in", "self", ".", "all_tasks", "if", "task", ".", "_state", "!=", "NewTask", ".", "_PENDING", "]", "return", "tasks" ]
Return tasks in loop which its state is not pending.
[ "Return", "tasks", "in", "loop", "which", "its", "state", "is", "not", "pending", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L287-L290
ClericPy/torequests
torequests/dummy.py
Loop.run
def run(self, tasks=None, timeout=None): """Block, run loop until all tasks completed.""" timeout = self._timeout if timeout is None else timeout if self.async_running or self.loop.is_running(): return self.wait_all_tasks_done(timeout) else: tasks = tasks or self.todo_tasks return self.loop.run_until_complete(asyncio.gather(*tasks, loop=self.loop))
python
def run(self, tasks=None, timeout=None): """Block, run loop until all tasks completed.""" timeout = self._timeout if timeout is None else timeout if self.async_running or self.loop.is_running(): return self.wait_all_tasks_done(timeout) else: tasks = tasks or self.todo_tasks return self.loop.run_until_complete(asyncio.gather(*tasks, loop=self.loop))
[ "def", "run", "(", "self", ",", "tasks", "=", "None", ",", "timeout", "=", "None", ")", ":", "timeout", "=", "self", ".", "_timeout", "if", "timeout", "is", "None", "else", "timeout", "if", "self", ".", "async_running", "or", "self", ".", "loop", ".", "is_running", "(", ")", ":", "return", "self", ".", "wait_all_tasks_done", "(", "timeout", ")", "else", ":", "tasks", "=", "tasks", "or", "self", ".", "todo_tasks", "return", "self", ".", "loop", ".", "run_until_complete", "(", "asyncio", ".", "gather", "(", "*", "tasks", ",", "loop", "=", "self", ".", "loop", ")", ")" ]
Block, run loop until all tasks completed.
[ "Block", "run", "loop", "until", "all", "tasks", "completed", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L292-L299
ClericPy/torequests
torequests/dummy.py
Loop.wait_all_tasks_done
def wait_all_tasks_done(self, timeout=None, delay=0.5, interval=0.1): """Block, only be used while loop running in a single non-main thread.""" timeout = self._timeout if timeout is None else timeout timeout = timeout or float("inf") start_time = time.time() time.sleep(delay) while 1: if not self.todo_tasks: return self.all_tasks if time.time() - start_time > timeout: return self.done_tasks time.sleep(interval)
python
def wait_all_tasks_done(self, timeout=None, delay=0.5, interval=0.1): """Block, only be used while loop running in a single non-main thread.""" timeout = self._timeout if timeout is None else timeout timeout = timeout or float("inf") start_time = time.time() time.sleep(delay) while 1: if not self.todo_tasks: return self.all_tasks if time.time() - start_time > timeout: return self.done_tasks time.sleep(interval)
[ "def", "wait_all_tasks_done", "(", "self", ",", "timeout", "=", "None", ",", "delay", "=", "0.5", ",", "interval", "=", "0.1", ")", ":", "timeout", "=", "self", ".", "_timeout", "if", "timeout", "is", "None", "else", "timeout", "timeout", "=", "timeout", "or", "float", "(", "\"inf\"", ")", "start_time", "=", "time", ".", "time", "(", ")", "time", ".", "sleep", "(", "delay", ")", "while", "1", ":", "if", "not", "self", ".", "todo_tasks", ":", "return", "self", ".", "all_tasks", "if", "time", ".", "time", "(", ")", "-", "start_time", ">", "timeout", ":", "return", "self", ".", "done_tasks", "time", ".", "sleep", "(", "interval", ")" ]
Block, only be used while loop running in a single non-main thread.
[ "Block", "only", "be", "used", "while", "loop", "running", "in", "a", "single", "non", "-", "main", "thread", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L301-L312
ClericPy/torequests
torequests/dummy.py
Loop.pendings
async def pendings(self, tasks=None): """Used for await in coroutines. `await loop.pendings()` `await loop.pendings(tasks)` """ tasks = tasks or self.todo_tasks await asyncio.gather(*tasks, loop=self.loop)
python
async def pendings(self, tasks=None): """Used for await in coroutines. `await loop.pendings()` `await loop.pendings(tasks)` """ tasks = tasks or self.todo_tasks await asyncio.gather(*tasks, loop=self.loop)
[ "async", "def", "pendings", "(", "self", ",", "tasks", "=", "None", ")", ":", "tasks", "=", "tasks", "or", "self", ".", "todo_tasks", "await", "asyncio", ".", "gather", "(", "*", "tasks", ",", "loop", "=", "self", ".", "loop", ")" ]
Used for await in coroutines. `await loop.pendings()` `await loop.pendings(tasks)`
[ "Used", "for", "await", "in", "coroutines", ".", "await", "loop", ".", "pendings", "()", "await", "loop", ".", "pendings", "(", "tasks", ")" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L323-L329
ClericPy/torequests
torequests/dummy.py
Requests.ensure_frequencies
def ensure_frequencies(self, frequencies): """Ensure frequencies is dict of host-frequencies.""" if not frequencies: return {} if not isinstance(frequencies, dict): raise ValueError("frequencies should be dict") frequencies = { host: Frequency.ensure_frequency(frequencies[host]) for host in frequencies } return frequencies
python
def ensure_frequencies(self, frequencies): """Ensure frequencies is dict of host-frequencies.""" if not frequencies: return {} if not isinstance(frequencies, dict): raise ValueError("frequencies should be dict") frequencies = { host: Frequency.ensure_frequency(frequencies[host]) for host in frequencies } return frequencies
[ "def", "ensure_frequencies", "(", "self", ",", "frequencies", ")", ":", "if", "not", "frequencies", ":", "return", "{", "}", "if", "not", "isinstance", "(", "frequencies", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"frequencies should be dict\"", ")", "frequencies", "=", "{", "host", ":", "Frequency", ".", "ensure_frequency", "(", "frequencies", "[", "host", "]", ")", "for", "host", "in", "frequencies", "}", "return", "frequencies" ]
Ensure frequencies is dict of host-frequencies.
[ "Ensure", "frequencies", "is", "dict", "of", "host", "-", "frequencies", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L493-L502
ClericPy/torequests
torequests/dummy.py
Requests.set_frequency
def set_frequency(self, host, sem=None, interval=None): """Set frequency for host with sem and interval.""" # single sem or global sem sem = sem or self.sem interval = self.interval if interval is None else interval frequency = Frequency(sem, interval, host) frequencies = {host: frequency} self.update_frequency(frequencies) return frequency
python
def set_frequency(self, host, sem=None, interval=None): """Set frequency for host with sem and interval.""" # single sem or global sem sem = sem or self.sem interval = self.interval if interval is None else interval frequency = Frequency(sem, interval, host) frequencies = {host: frequency} self.update_frequency(frequencies) return frequency
[ "def", "set_frequency", "(", "self", ",", "host", ",", "sem", "=", "None", ",", "interval", "=", "None", ")", ":", "# single sem or global sem", "sem", "=", "sem", "or", "self", ".", "sem", "interval", "=", "self", ".", "interval", "if", "interval", "is", "None", "else", "interval", "frequency", "=", "Frequency", "(", "sem", ",", "interval", ",", "host", ")", "frequencies", "=", "{", "host", ":", "frequency", "}", "self", ".", "update_frequency", "(", "frequencies", ")", "return", "frequency" ]
Set frequency for host with sem and interval.
[ "Set", "frequency", "for", "host", "with", "sem", "and", "interval", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L504-L512
ClericPy/torequests
torequests/dummy.py
Requests.close
def close(self): """Should be closed[explicit] while using external session or connector, instead of close by self.__del__.""" try: if not self.session.closed: if self.session._connector_owner: self.session._connector.close() self.session._connector = None except Exception as e: Config.dummy_logger.error("can not close session for: %s" % e)
python
def close(self): """Should be closed[explicit] while using external session or connector, instead of close by self.__del__.""" try: if not self.session.closed: if self.session._connector_owner: self.session._connector.close() self.session._connector = None except Exception as e: Config.dummy_logger.error("can not close session for: %s" % e)
[ "def", "close", "(", "self", ")", ":", "try", ":", "if", "not", "self", ".", "session", ".", "closed", ":", "if", "self", ".", "session", ".", "_connector_owner", ":", "self", ".", "session", ".", "_connector", ".", "close", "(", ")", "self", ".", "session", ".", "_connector", "=", "None", "except", "Exception", "as", "e", ":", "Config", ".", "dummy_logger", ".", "error", "(", "\"can not close session for: %s\"", "%", "e", ")" ]
Should be closed[explicit] while using external session or connector, instead of close by self.__del__.
[ "Should", "be", "closed", "[", "explicit", "]", "while", "using", "external", "session", "or", "connector", "instead", "of", "close", "by", "self", ".", "__del__", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/dummy.py#L615-L624
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver.openstack_exception
def openstack_exception(func): ''' Openstack exceptions decorator ''' async def wrap(*args, **kwargs): try: return await func(*args, **kwargs) except Exception as e: logging.error(e) raise IaasException return wrap
python
def openstack_exception(func): ''' Openstack exceptions decorator ''' async def wrap(*args, **kwargs): try: return await func(*args, **kwargs) except Exception as e: logging.error(e) raise IaasException return wrap
[ "def", "openstack_exception", "(", "func", ")", ":", "async", "def", "wrap", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "await", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "e", ")", "raise", "IaasException", "return", "wrap" ]
Openstack exceptions decorator
[ "Openstack", "exceptions", "decorator" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L18-L28
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver.initialize_openstack
def initialize_openstack(func): ''' Initialize and refresh openstack connection ''' async def wrap(self, *args, **kwargs): if not hasattr(self, 'auth') or not self.auth.is_token_valid(): self.auth = AuthPassword(auth_url=self.config['auth_url'], username=self.config['username'], password=self.config['password'], project_name=self.config['project_name'], user_domain_name=self.config['user_domain_name'], project_domain_name=self.config['project_domain_name']) self.nova = NovaClient(session=self.auth) self.glance = GlanceClient(session=self.auth) await self.nova.init_api(timeout=self.config.get('http_timeout', 10)) await self.glance.init_api(timeout=self.config.get('http_timeout', 10)) if not hasattr(self, 'last_init') or self.last_init < (time.time() - 60): await self.initialize() self.last_init = time.time() return await func(self, *args, **kwargs) return wrap
python
def initialize_openstack(func): ''' Initialize and refresh openstack connection ''' async def wrap(self, *args, **kwargs): if not hasattr(self, 'auth') or not self.auth.is_token_valid(): self.auth = AuthPassword(auth_url=self.config['auth_url'], username=self.config['username'], password=self.config['password'], project_name=self.config['project_name'], user_domain_name=self.config['user_domain_name'], project_domain_name=self.config['project_domain_name']) self.nova = NovaClient(session=self.auth) self.glance = GlanceClient(session=self.auth) await self.nova.init_api(timeout=self.config.get('http_timeout', 10)) await self.glance.init_api(timeout=self.config.get('http_timeout', 10)) if not hasattr(self, 'last_init') or self.last_init < (time.time() - 60): await self.initialize() self.last_init = time.time() return await func(self, *args, **kwargs) return wrap
[ "def", "initialize_openstack", "(", "func", ")", ":", "async", "def", "wrap", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'auth'", ")", "or", "not", "self", ".", "auth", ".", "is_token_valid", "(", ")", ":", "self", ".", "auth", "=", "AuthPassword", "(", "auth_url", "=", "self", ".", "config", "[", "'auth_url'", "]", ",", "username", "=", "self", ".", "config", "[", "'username'", "]", ",", "password", "=", "self", ".", "config", "[", "'password'", "]", ",", "project_name", "=", "self", ".", "config", "[", "'project_name'", "]", ",", "user_domain_name", "=", "self", ".", "config", "[", "'user_domain_name'", "]", ",", "project_domain_name", "=", "self", ".", "config", "[", "'project_domain_name'", "]", ")", "self", ".", "nova", "=", "NovaClient", "(", "session", "=", "self", ".", "auth", ")", "self", ".", "glance", "=", "GlanceClient", "(", "session", "=", "self", ".", "auth", ")", "await", "self", ".", "nova", ".", "init_api", "(", "timeout", "=", "self", ".", "config", ".", "get", "(", "'http_timeout'", ",", "10", ")", ")", "await", "self", ".", "glance", ".", "init_api", "(", "timeout", "=", "self", ".", "config", ".", "get", "(", "'http_timeout'", ",", "10", ")", ")", "if", "not", "hasattr", "(", "self", ",", "'last_init'", ")", "or", "self", ".", "last_init", "<", "(", "time", ".", "time", "(", ")", "-", "60", ")", ":", "await", "self", ".", "initialize", "(", ")", "self", ".", "last_init", "=", "time", ".", "time", "(", ")", "return", "await", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrap" ]
Initialize and refresh openstack connection
[ "Initialize", "and", "refresh", "openstack", "connection" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L30-L52
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver.initialize
async def initialize(self): ''' Initialize static data like images and flavores and set it as object property ''' flavors = await self._list_flavors() images = await self._list_images() self.flavors_map = bidict() self.images_map = bidict() self.images_details = {} for flavor in flavors: self.flavors_map.put(flavor['id'], flavor['name'], on_dup_key='OVERWRITE', on_dup_val='OVERWRITE') for image in images: # @TODO filetes : # @TODO filtering by owner # if hasattr(image, 'owner_id') and image.owner_id in self.config['image_owner_ids']: # @TODO enable filtering by tag # if 'lastest' in image.tags: self.images_details[image['id']] = { 'name': image['name'], 'created_at': image['created_at'], 'latest': 'latest' in image['tags'] } self.images_map.put(image['id'], image['name'], on_dup_key='OVERWRITE', on_dup_val='OVERWRITE')
python
async def initialize(self): ''' Initialize static data like images and flavores and set it as object property ''' flavors = await self._list_flavors() images = await self._list_images() self.flavors_map = bidict() self.images_map = bidict() self.images_details = {} for flavor in flavors: self.flavors_map.put(flavor['id'], flavor['name'], on_dup_key='OVERWRITE', on_dup_val='OVERWRITE') for image in images: # @TODO filetes : # @TODO filtering by owner # if hasattr(image, 'owner_id') and image.owner_id in self.config['image_owner_ids']: # @TODO enable filtering by tag # if 'lastest' in image.tags: self.images_details[image['id']] = { 'name': image['name'], 'created_at': image['created_at'], 'latest': 'latest' in image['tags'] } self.images_map.put(image['id'], image['name'], on_dup_key='OVERWRITE', on_dup_val='OVERWRITE')
[ "async", "def", "initialize", "(", "self", ")", ":", "flavors", "=", "await", "self", ".", "_list_flavors", "(", ")", "images", "=", "await", "self", ".", "_list_images", "(", ")", "self", ".", "flavors_map", "=", "bidict", "(", ")", "self", ".", "images_map", "=", "bidict", "(", ")", "self", ".", "images_details", "=", "{", "}", "for", "flavor", "in", "flavors", ":", "self", ".", "flavors_map", ".", "put", "(", "flavor", "[", "'id'", "]", ",", "flavor", "[", "'name'", "]", ",", "on_dup_key", "=", "'OVERWRITE'", ",", "on_dup_val", "=", "'OVERWRITE'", ")", "for", "image", "in", "images", ":", "# @TODO filetes :", "# @TODO filtering by owner", "# if hasattr(image, 'owner_id') and image.owner_id in self.config['image_owner_ids']:", "# @TODO enable filtering by tag", "# if 'lastest' in image.tags:", "self", ".", "images_details", "[", "image", "[", "'id'", "]", "]", "=", "{", "'name'", ":", "image", "[", "'name'", "]", ",", "'created_at'", ":", "image", "[", "'created_at'", "]", ",", "'latest'", ":", "'latest'", "in", "image", "[", "'tags'", "]", "}", "self", ".", "images_map", ".", "put", "(", "image", "[", "'id'", "]", ",", "image", "[", "'name'", "]", ",", "on_dup_key", "=", "'OVERWRITE'", ",", "on_dup_val", "=", "'OVERWRITE'", ")" ]
Initialize static data like images and flavores and set it as object property
[ "Initialize", "static", "data", "like", "images", "and", "flavores", "and", "set", "it", "as", "object", "property" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L54-L79
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver.create_vm
async def create_vm(self, preset_name, image, flavor, security_groups=None, userdata=None, key_name=None, availability_zone=None, subnets=None): ''' Create VM :arg preset_name: string :arg image: string image id :arg flavor: string flavor id :arg security_groups: list :arg userdata: string :arg key_name: string :arg availability_zone: string :arg subnets: list :returns list Vm objects @TODO 1. returns image id ''' image_id = self.images_map.inv.get(image) flavor_id = self.flavors_map.inv.get(flavor) spec = { "name": preset_name, "flavorRef": flavor_id, "imageRef": image_id, "security_groups": [{"name": group} for group in security_groups], "user_data": userdata } if availability_zone is not None: spec.update({"availability_zone": availability_zone}) if subnets is not None: spec.update({"networks": [{'uuid': subnet['net-id']} for subnet in subnets]}) if userdata is not None: userdata = userdata.encode('utf-8') userdata = base64.b64encode(userdata).decode('utf-8') spec.update({"user_data": userdata}) result = await self.nova.servers.create(server=spec) return result["server"]
python
async def create_vm(self, preset_name, image, flavor, security_groups=None, userdata=None, key_name=None, availability_zone=None, subnets=None): ''' Create VM :arg preset_name: string :arg image: string image id :arg flavor: string flavor id :arg security_groups: list :arg userdata: string :arg key_name: string :arg availability_zone: string :arg subnets: list :returns list Vm objects @TODO 1. returns image id ''' image_id = self.images_map.inv.get(image) flavor_id = self.flavors_map.inv.get(flavor) spec = { "name": preset_name, "flavorRef": flavor_id, "imageRef": image_id, "security_groups": [{"name": group} for group in security_groups], "user_data": userdata } if availability_zone is not None: spec.update({"availability_zone": availability_zone}) if subnets is not None: spec.update({"networks": [{'uuid': subnet['net-id']} for subnet in subnets]}) if userdata is not None: userdata = userdata.encode('utf-8') userdata = base64.b64encode(userdata).decode('utf-8') spec.update({"user_data": userdata}) result = await self.nova.servers.create(server=spec) return result["server"]
[ "async", "def", "create_vm", "(", "self", ",", "preset_name", ",", "image", ",", "flavor", ",", "security_groups", "=", "None", ",", "userdata", "=", "None", ",", "key_name", "=", "None", ",", "availability_zone", "=", "None", ",", "subnets", "=", "None", ")", ":", "image_id", "=", "self", ".", "images_map", ".", "inv", ".", "get", "(", "image", ")", "flavor_id", "=", "self", ".", "flavors_map", ".", "inv", ".", "get", "(", "flavor", ")", "spec", "=", "{", "\"name\"", ":", "preset_name", ",", "\"flavorRef\"", ":", "flavor_id", ",", "\"imageRef\"", ":", "image_id", ",", "\"security_groups\"", ":", "[", "{", "\"name\"", ":", "group", "}", "for", "group", "in", "security_groups", "]", ",", "\"user_data\"", ":", "userdata", "}", "if", "availability_zone", "is", "not", "None", ":", "spec", ".", "update", "(", "{", "\"availability_zone\"", ":", "availability_zone", "}", ")", "if", "subnets", "is", "not", "None", ":", "spec", ".", "update", "(", "{", "\"networks\"", ":", "[", "{", "'uuid'", ":", "subnet", "[", "'net-id'", "]", "}", "for", "subnet", "in", "subnets", "]", "}", ")", "if", "userdata", "is", "not", "None", ":", "userdata", "=", "userdata", ".", "encode", "(", "'utf-8'", ")", "userdata", "=", "base64", ".", "b64encode", "(", "userdata", ")", ".", "decode", "(", "'utf-8'", ")", "spec", ".", "update", "(", "{", "\"user_data\"", ":", "userdata", "}", ")", "result", "=", "await", "self", ".", "nova", ".", "servers", ".", "create", "(", "server", "=", "spec", ")", "return", "result", "[", "\"server\"", "]" ]
Create VM :arg preset_name: string :arg image: string image id :arg flavor: string flavor id :arg security_groups: list :arg userdata: string :arg key_name: string :arg availability_zone: string :arg subnets: list :returns list Vm objects @TODO 1. returns image id
[ "Create", "VM", ":", "arg", "preset_name", ":", "string", ":", "arg", "image", ":", "string", "image", "id", ":", "arg", "flavor", ":", "string", "flavor", "id", ":", "arg", "security_groups", ":", "list", ":", "arg", "userdata", ":", "string", ":", "arg", "key_name", ":", "string", ":", "arg", "availability_zone", ":", "string", ":", "arg", "subnets", ":", "list", ":", "returns", "list", "Vm", "objects" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L83-L118
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver.list_vms
async def list_vms(self, preset_name): ''' List VMs by preset name :arg present_name: string ''' response = await self.nova.servers.list(name=f'^{preset_name}$') result = [] for server in response['servers']: result.append(self._map_vm_structure(server)) return result
python
async def list_vms(self, preset_name): ''' List VMs by preset name :arg present_name: string ''' response = await self.nova.servers.list(name=f'^{preset_name}$') result = [] for server in response['servers']: result.append(self._map_vm_structure(server)) return result
[ "async", "def", "list_vms", "(", "self", ",", "preset_name", ")", ":", "response", "=", "await", "self", ".", "nova", ".", "servers", ".", "list", "(", "name", "=", "f'^{preset_name}$'", ")", "result", "=", "[", "]", "for", "server", "in", "response", "[", "'servers'", "]", ":", "result", ".", "append", "(", "self", ".", "_map_vm_structure", "(", "server", ")", ")", "return", "result" ]
List VMs by preset name :arg present_name: string
[ "List", "VMs", "by", "preset", "name", ":", "arg", "present_name", ":", "string" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L122-L132
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver.terminate_vm
async def terminate_vm(self, vm_id): ''' Terminate VM :arg vm_id: string ''' try: await self.nova.servers.force_delete(vm_id) except JSONDecodeError as exc: logging.info("nova sent 'content-type: application/json' but no content appeared, whatever") pass except Exception: raise
python
async def terminate_vm(self, vm_id): ''' Terminate VM :arg vm_id: string ''' try: await self.nova.servers.force_delete(vm_id) except JSONDecodeError as exc: logging.info("nova sent 'content-type: application/json' but no content appeared, whatever") pass except Exception: raise
[ "async", "def", "terminate_vm", "(", "self", ",", "vm_id", ")", ":", "try", ":", "await", "self", ".", "nova", ".", "servers", ".", "force_delete", "(", "vm_id", ")", "except", "JSONDecodeError", "as", "exc", ":", "logging", ".", "info", "(", "\"nova sent 'content-type: application/json' but no content appeared, whatever\"", ")", "pass", "except", "Exception", ":", "raise" ]
Terminate VM :arg vm_id: string
[ "Terminate", "VM", ":", "arg", "vm_id", ":", "string" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L135-L146
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver.get_vm
async def get_vm(self, vm_id): ''' Get VM :arg vm_id: string :returns vm: object ''' result = await self.nova.servers.get(vm_id) return self._map_vm_structure(result["server"])
python
async def get_vm(self, vm_id): ''' Get VM :arg vm_id: string :returns vm: object ''' result = await self.nova.servers.get(vm_id) return self._map_vm_structure(result["server"])
[ "async", "def", "get_vm", "(", "self", ",", "vm_id", ")", ":", "result", "=", "await", "self", ".", "nova", ".", "servers", ".", "get", "(", "vm_id", ")", "return", "self", ".", "_map_vm_structure", "(", "result", "[", "\"server\"", "]", ")" ]
Get VM :arg vm_id: string :returns vm: object
[ "Get", "VM", ":", "arg", "vm_id", ":", "string", ":", "returns", "vm", ":", "object" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L150-L157
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver._map_vm_structure
def _map_vm_structure(self, vm): ''' Vm unification :arg vm: object :returns object ''' ip = self._extract_ips(vm['addresses']) created = datetime.strptime(vm['created'], '%Y-%m-%dT%H:%M:%SZ') flavor = self.flavors_map.get(vm['flavor'].get('id')) image = self.images_map.get(vm['image'].get('id')) timed_shutdown_at = vm.get('metadata', {}).get('iaas_timed_shutdown') timed_shutdown_at = int(timed_shutdown_at) if timed_shutdown_at else None state = self._map_vm_status(vm['status'], timed_shutdown_at) iaasvm = Vm(self, vm['id'], vm['name'], ip, created, state=state, metadata=vm['metadata'], tags=vm.get('tags', []), flavor=flavor, image=image, timed_shutdown_at=timed_shutdown_at) return iaasvm
python
def _map_vm_structure(self, vm): ''' Vm unification :arg vm: object :returns object ''' ip = self._extract_ips(vm['addresses']) created = datetime.strptime(vm['created'], '%Y-%m-%dT%H:%M:%SZ') flavor = self.flavors_map.get(vm['flavor'].get('id')) image = self.images_map.get(vm['image'].get('id')) timed_shutdown_at = vm.get('metadata', {}).get('iaas_timed_shutdown') timed_shutdown_at = int(timed_shutdown_at) if timed_shutdown_at else None state = self._map_vm_status(vm['status'], timed_shutdown_at) iaasvm = Vm(self, vm['id'], vm['name'], ip, created, state=state, metadata=vm['metadata'], tags=vm.get('tags', []), flavor=flavor, image=image, timed_shutdown_at=timed_shutdown_at) return iaasvm
[ "def", "_map_vm_structure", "(", "self", ",", "vm", ")", ":", "ip", "=", "self", ".", "_extract_ips", "(", "vm", "[", "'addresses'", "]", ")", "created", "=", "datetime", ".", "strptime", "(", "vm", "[", "'created'", "]", ",", "'%Y-%m-%dT%H:%M:%SZ'", ")", "flavor", "=", "self", ".", "flavors_map", ".", "get", "(", "vm", "[", "'flavor'", "]", ".", "get", "(", "'id'", ")", ")", "image", "=", "self", ".", "images_map", ".", "get", "(", "vm", "[", "'image'", "]", ".", "get", "(", "'id'", ")", ")", "timed_shutdown_at", "=", "vm", ".", "get", "(", "'metadata'", ",", "{", "}", ")", ".", "get", "(", "'iaas_timed_shutdown'", ")", "timed_shutdown_at", "=", "int", "(", "timed_shutdown_at", ")", "if", "timed_shutdown_at", "else", "None", "state", "=", "self", ".", "_map_vm_status", "(", "vm", "[", "'status'", "]", ",", "timed_shutdown_at", ")", "iaasvm", "=", "Vm", "(", "self", ",", "vm", "[", "'id'", "]", ",", "vm", "[", "'name'", "]", ",", "ip", ",", "created", ",", "state", "=", "state", ",", "metadata", "=", "vm", "[", "'metadata'", "]", ",", "tags", "=", "vm", ".", "get", "(", "'tags'", ",", "[", "]", ")", ",", "flavor", "=", "flavor", ",", "image", "=", "image", ",", "timed_shutdown_at", "=", "timed_shutdown_at", ")", "return", "iaasvm" ]
Vm unification :arg vm: object :returns object
[ "Vm", "unification", ":", "arg", "vm", ":", "object", ":", "returns", "object" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L175-L190
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver._map_vm_status
def _map_vm_status(self, openstack_status, timed_shutdown_at=None): ''' Map openstack vm statuses to vmshepherd vm statuses openstack vm statuses: ACTIVE, BUILD, DELETED, ERROR, HARD_REBOOT, MIGRATING, PASSWORD, PAUSED, REBOOT, REBUILD, RESCUE, RESIZE, REVERT_RESIZE, SHELVED, SHELVED_OFFLOADED, SHUTOFF, SOFT_DELETED, SUSPENDED, UNKNOWN, VERIFY_RESIZE :arg string openstack_status :returns string ''' statuses = { VmState.TERMINATED: [ 'ERROR', 'DELETED', 'SHUTOFF', 'SOFT_DELETED', 'SUSPENDED' ], VmState.PENDING: [ 'BUILD', 'REBUILD' ], VmState.RUNNING: ['ACTIVE'] } state = VmState.UNKNOWN for vmstate, value in statuses.items(): if openstack_status in value: state = vmstate break if timed_shutdown_at: now = time.time() if timed_shutdown_at < now: state = VmState.AFTER_TIME_SHUTDOWN elif (timed_shutdown_at - now) < self.config.get('shutdown_grace_period', 900): state = VmState.NEARBY_SHUTDOWN return state
python
def _map_vm_status(self, openstack_status, timed_shutdown_at=None): ''' Map openstack vm statuses to vmshepherd vm statuses openstack vm statuses: ACTIVE, BUILD, DELETED, ERROR, HARD_REBOOT, MIGRATING, PASSWORD, PAUSED, REBOOT, REBUILD, RESCUE, RESIZE, REVERT_RESIZE, SHELVED, SHELVED_OFFLOADED, SHUTOFF, SOFT_DELETED, SUSPENDED, UNKNOWN, VERIFY_RESIZE :arg string openstack_status :returns string ''' statuses = { VmState.TERMINATED: [ 'ERROR', 'DELETED', 'SHUTOFF', 'SOFT_DELETED', 'SUSPENDED' ], VmState.PENDING: [ 'BUILD', 'REBUILD' ], VmState.RUNNING: ['ACTIVE'] } state = VmState.UNKNOWN for vmstate, value in statuses.items(): if openstack_status in value: state = vmstate break if timed_shutdown_at: now = time.time() if timed_shutdown_at < now: state = VmState.AFTER_TIME_SHUTDOWN elif (timed_shutdown_at - now) < self.config.get('shutdown_grace_period', 900): state = VmState.NEARBY_SHUTDOWN return state
[ "def", "_map_vm_status", "(", "self", ",", "openstack_status", ",", "timed_shutdown_at", "=", "None", ")", ":", "statuses", "=", "{", "VmState", ".", "TERMINATED", ":", "[", "'ERROR'", ",", "'DELETED'", ",", "'SHUTOFF'", ",", "'SOFT_DELETED'", ",", "'SUSPENDED'", "]", ",", "VmState", ".", "PENDING", ":", "[", "'BUILD'", ",", "'REBUILD'", "]", ",", "VmState", ".", "RUNNING", ":", "[", "'ACTIVE'", "]", "}", "state", "=", "VmState", ".", "UNKNOWN", "for", "vmstate", ",", "value", "in", "statuses", ".", "items", "(", ")", ":", "if", "openstack_status", "in", "value", ":", "state", "=", "vmstate", "break", "if", "timed_shutdown_at", ":", "now", "=", "time", ".", "time", "(", ")", "if", "timed_shutdown_at", "<", "now", ":", "state", "=", "VmState", ".", "AFTER_TIME_SHUTDOWN", "elif", "(", "timed_shutdown_at", "-", "now", ")", "<", "self", ".", "config", ".", "get", "(", "'shutdown_grace_period'", ",", "900", ")", ":", "state", "=", "VmState", ".", "NEARBY_SHUTDOWN", "return", "state" ]
Map openstack vm statuses to vmshepherd vm statuses openstack vm statuses: ACTIVE, BUILD, DELETED, ERROR, HARD_REBOOT, MIGRATING, PASSWORD, PAUSED, REBOOT, REBUILD, RESCUE, RESIZE, REVERT_RESIZE, SHELVED, SHELVED_OFFLOADED, SHUTOFF, SOFT_DELETED, SUSPENDED, UNKNOWN, VERIFY_RESIZE :arg string openstack_status :returns string
[ "Map", "openstack", "vm", "statuses", "to", "vmshepherd", "vm", "statuses", "openstack", "vm", "statuses", ":", "ACTIVE", "BUILD", "DELETED", "ERROR", "HARD_REBOOT", "MIGRATING", "PASSWORD", "PAUSED", "REBOOT", "REBUILD", "RESCUE", "RESIZE", "REVERT_RESIZE", "SHELVED", "SHELVED_OFFLOADED", "SHUTOFF", "SOFT_DELETED", "SUSPENDED", "UNKNOWN", "VERIFY_RESIZE" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L192-L229
DreamLab/VmShepherd
src/vmshepherd/iaas/openstack_driver.py
OpenStackDriver._extract_ips
def _extract_ips(self, data): ''' Extract ip addressess from openstack structure { 'pl-krk-2-int-301-c2-int-1': [ { 'OS-EXT-IPS-MAC:mac_addr': 'fa:16:3e:29:f1:bb', 'version': 4, 'addr': '10.185.138.36', 'OS-EXT-IPS:type': 'fixed' } ] } :arg data: dict :returns list ''' result = [] for region in data.items(): for interface in region[1]: result.append(interface['addr']) return result
python
def _extract_ips(self, data): ''' Extract ip addressess from openstack structure { 'pl-krk-2-int-301-c2-int-1': [ { 'OS-EXT-IPS-MAC:mac_addr': 'fa:16:3e:29:f1:bb', 'version': 4, 'addr': '10.185.138.36', 'OS-EXT-IPS:type': 'fixed' } ] } :arg data: dict :returns list ''' result = [] for region in data.items(): for interface in region[1]: result.append(interface['addr']) return result
[ "def", "_extract_ips", "(", "self", ",", "data", ")", ":", "result", "=", "[", "]", "for", "region", "in", "data", ".", "items", "(", ")", ":", "for", "interface", "in", "region", "[", "1", "]", ":", "result", ".", "append", "(", "interface", "[", "'addr'", "]", ")", "return", "result" ]
Extract ip addressess from openstack structure { 'pl-krk-2-int-301-c2-int-1': [ { 'OS-EXT-IPS-MAC:mac_addr': 'fa:16:3e:29:f1:bb', 'version': 4, 'addr': '10.185.138.36', 'OS-EXT-IPS:type': 'fixed' } ] } :arg data: dict :returns list
[ "Extract", "ip", "addressess", "from", "openstack", "structure", "{", "pl", "-", "krk", "-", "2", "-", "int", "-", "301", "-", "c2", "-", "int", "-", "1", ":", "[", "{", "OS", "-", "EXT", "-", "IPS", "-", "MAC", ":", "mac_addr", ":", "fa", ":", "16", ":", "3e", ":", "29", ":", "f1", ":", "bb", "version", ":", "4", "addr", ":", "10", ".", "185", ".", "138", ".", "36", "OS", "-", "EXT", "-", "IPS", ":", "type", ":", "fixed", "}", "]", "}", ":", "arg", "data", ":", "dict", ":", "returns", "list" ]
train
https://github.com/DreamLab/VmShepherd/blob/709a412c372b897d53808039c5c64a8b69c12c8d/src/vmshepherd/iaas/openstack_driver.py#L231-L251
mjirik/io3d
io3d/datareaderqt.py
_set_label_text
def _set_label_text(obj, text, tooltip=None, replace_all=False): """ Keep text before first colon and replace the rest with new text. If there is no colon in the :param obj: :param text: :param tooltip: :param replace_all: No colon is searched and whole text is replaced :return: """ dlab = str(obj.text()) index_of_colon = dlab.find(': ') if index_of_colon == -1: index_of_colon = 0 else: index_of_colon += 2 if replace_all: index_of_colon = 0 obj.setText(dlab[:index_of_colon] + '%s' % text) if tooltip is not None: obj.setToolTip(tooltip)
python
def _set_label_text(obj, text, tooltip=None, replace_all=False): """ Keep text before first colon and replace the rest with new text. If there is no colon in the :param obj: :param text: :param tooltip: :param replace_all: No colon is searched and whole text is replaced :return: """ dlab = str(obj.text()) index_of_colon = dlab.find(': ') if index_of_colon == -1: index_of_colon = 0 else: index_of_colon += 2 if replace_all: index_of_colon = 0 obj.setText(dlab[:index_of_colon] + '%s' % text) if tooltip is not None: obj.setToolTip(tooltip)
[ "def", "_set_label_text", "(", "obj", ",", "text", ",", "tooltip", "=", "None", ",", "replace_all", "=", "False", ")", ":", "dlab", "=", "str", "(", "obj", ".", "text", "(", ")", ")", "index_of_colon", "=", "dlab", ".", "find", "(", "': '", ")", "if", "index_of_colon", "==", "-", "1", ":", "index_of_colon", "=", "0", "else", ":", "index_of_colon", "+=", "2", "if", "replace_all", ":", "index_of_colon", "=", "0", "obj", ".", "setText", "(", "dlab", "[", ":", "index_of_colon", "]", "+", "'%s'", "%", "text", ")", "if", "tooltip", "is", "not", "None", ":", "obj", ".", "setToolTip", "(", "tooltip", ")" ]
Keep text before first colon and replace the rest with new text. If there is no colon in the :param obj: :param text: :param tooltip: :param replace_all: No colon is searched and whole text is replaced :return:
[ "Keep", "text", "before", "first", "colon", "and", "replace", "the", "rest", "with", "new", "text", "." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareaderqt.py#L271-L292
mjirik/io3d
io3d/datareaderqt.py
DataReaderWidget.__get_datafile
def __get_datafile(self, app=False): """ Draw a dialog for directory selection. """ if self.cache is not None: cache_loadfiledir = self.cache.get_or_none('loadfiledir') self.loadfiledir = str(cache_loadfiledir) if self.loadfiledir is None: self.loadfiledir = '' directory = str(self.loadfiledir) from PyQt4.QtGui import QFileDialog if not app: inner_app = QApplication(sys.argv) if self._skip_get_path_dialog_for_tests: dcmdir = self.datapath else: dcmdir = QFileDialog.getOpenFileName( caption='Select Data File', directory=directory # ptions=QFileDialog.ShowDirsOnly, ) # dcmdir = QFileDialog.getOpenFileName( # caption='Select Data file', # # ptions=QFileDialog.ShowDirsOnly, # directory=directory # ) # pp.exec_() if not app: inner_app.exit(0) dcmdir = get_str(dcmdir) if len(dcmdir) > 0: # # dcmdir = "%s" % (dcmdir) # dcmdir = dcmdir.encode("utf8") pass else: dcmdir = None head, teil = os.path.split(dcmdir) if self.cache is not None: self.cache.update('loadfiledir', head) return dcmdir
python
def __get_datafile(self, app=False): """ Draw a dialog for directory selection. """ if self.cache is not None: cache_loadfiledir = self.cache.get_or_none('loadfiledir') self.loadfiledir = str(cache_loadfiledir) if self.loadfiledir is None: self.loadfiledir = '' directory = str(self.loadfiledir) from PyQt4.QtGui import QFileDialog if not app: inner_app = QApplication(sys.argv) if self._skip_get_path_dialog_for_tests: dcmdir = self.datapath else: dcmdir = QFileDialog.getOpenFileName( caption='Select Data File', directory=directory # ptions=QFileDialog.ShowDirsOnly, ) # dcmdir = QFileDialog.getOpenFileName( # caption='Select Data file', # # ptions=QFileDialog.ShowDirsOnly, # directory=directory # ) # pp.exec_() if not app: inner_app.exit(0) dcmdir = get_str(dcmdir) if len(dcmdir) > 0: # # dcmdir = "%s" % (dcmdir) # dcmdir = dcmdir.encode("utf8") pass else: dcmdir = None head, teil = os.path.split(dcmdir) if self.cache is not None: self.cache.update('loadfiledir', head) return dcmdir
[ "def", "__get_datafile", "(", "self", ",", "app", "=", "False", ")", ":", "if", "self", ".", "cache", "is", "not", "None", ":", "cache_loadfiledir", "=", "self", ".", "cache", ".", "get_or_none", "(", "'loadfiledir'", ")", "self", ".", "loadfiledir", "=", "str", "(", "cache_loadfiledir", ")", "if", "self", ".", "loadfiledir", "is", "None", ":", "self", ".", "loadfiledir", "=", "''", "directory", "=", "str", "(", "self", ".", "loadfiledir", ")", "from", "PyQt4", ".", "QtGui", "import", "QFileDialog", "if", "not", "app", ":", "inner_app", "=", "QApplication", "(", "sys", ".", "argv", ")", "if", "self", ".", "_skip_get_path_dialog_for_tests", ":", "dcmdir", "=", "self", ".", "datapath", "else", ":", "dcmdir", "=", "QFileDialog", ".", "getOpenFileName", "(", "caption", "=", "'Select Data File'", ",", "directory", "=", "directory", "# ptions=QFileDialog.ShowDirsOnly,", ")", "# dcmdir = QFileDialog.getOpenFileName(", "# caption='Select Data file',", "# # ptions=QFileDialog.ShowDirsOnly,", "# directory=directory", "# )", "# pp.exec_()", "if", "not", "app", ":", "inner_app", ".", "exit", "(", "0", ")", "dcmdir", "=", "get_str", "(", "dcmdir", ")", "if", "len", "(", "dcmdir", ")", ">", "0", ":", "#", "# dcmdir = \"%s\" % (dcmdir)", "# dcmdir = dcmdir.encode(\"utf8\")", "pass", "else", ":", "dcmdir", "=", "None", "head", ",", "teil", "=", "os", ".", "path", ".", "split", "(", "dcmdir", ")", "if", "self", ".", "cache", "is", "not", "None", ":", "self", ".", "cache", ".", "update", "(", "'loadfiledir'", ",", "head", ")", "return", "dcmdir" ]
Draw a dialog for directory selection.
[ "Draw", "a", "dialog", "for", "directory", "selection", "." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareaderqt.py#L95-L141
mjirik/io3d
io3d/datareaderqt.py
DataReaderWidget.__get_datadir
def __get_datadir(self, app=False): """ Draw a dialog for directory selection. """ # if 'datadir' in self.oseg.cache.data.keys(): # if : # directory = self.oseg.input_datapath_start if self.cache is not None: cache_loaddir = self.cache.get_or_none('loaddir') self.loaddir = str(cache_loaddir) # self.loaddir = str(self.cache.get_or_none('loaddir')) if self.loaddir is None: self.loaddir = '' directory = self.loaddir from PyQt4.QtGui import QFileDialog if not app: app_inner = QApplication(sys.argv) if self._skip_get_path_dialog_for_tests: dcmdir = self.datapath else: dcmdir = QFileDialog.getExistingDirectory( caption='Select DICOM Folder', options=QFileDialog.ShowDirsOnly, directory=directory ) # pp.exec_() if not app: app_inner.exit(0) dcmdir = get_str(dcmdir) if len(dcmdir) > 0: # dcmdir = "%s" % (dcmdir) # dcmdir = dcmdir.encode("utf8") pass else: dcmdir = None if self.cache is not None: self.cache.update('loaddir', dcmdir) return str(dcmdir)
python
def __get_datadir(self, app=False): """ Draw a dialog for directory selection. """ # if 'datadir' in self.oseg.cache.data.keys(): # if : # directory = self.oseg.input_datapath_start if self.cache is not None: cache_loaddir = self.cache.get_or_none('loaddir') self.loaddir = str(cache_loaddir) # self.loaddir = str(self.cache.get_or_none('loaddir')) if self.loaddir is None: self.loaddir = '' directory = self.loaddir from PyQt4.QtGui import QFileDialog if not app: app_inner = QApplication(sys.argv) if self._skip_get_path_dialog_for_tests: dcmdir = self.datapath else: dcmdir = QFileDialog.getExistingDirectory( caption='Select DICOM Folder', options=QFileDialog.ShowDirsOnly, directory=directory ) # pp.exec_() if not app: app_inner.exit(0) dcmdir = get_str(dcmdir) if len(dcmdir) > 0: # dcmdir = "%s" % (dcmdir) # dcmdir = dcmdir.encode("utf8") pass else: dcmdir = None if self.cache is not None: self.cache.update('loaddir', dcmdir) return str(dcmdir)
[ "def", "__get_datadir", "(", "self", ",", "app", "=", "False", ")", ":", "# if 'datadir' in self.oseg.cache.data.keys():", "# if :", "# directory = self.oseg.input_datapath_start", "if", "self", ".", "cache", "is", "not", "None", ":", "cache_loaddir", "=", "self", ".", "cache", ".", "get_or_none", "(", "'loaddir'", ")", "self", ".", "loaddir", "=", "str", "(", "cache_loaddir", ")", "# self.loaddir = str(self.cache.get_or_none('loaddir'))", "if", "self", ".", "loaddir", "is", "None", ":", "self", ".", "loaddir", "=", "''", "directory", "=", "self", ".", "loaddir", "from", "PyQt4", ".", "QtGui", "import", "QFileDialog", "if", "not", "app", ":", "app_inner", "=", "QApplication", "(", "sys", ".", "argv", ")", "if", "self", ".", "_skip_get_path_dialog_for_tests", ":", "dcmdir", "=", "self", ".", "datapath", "else", ":", "dcmdir", "=", "QFileDialog", ".", "getExistingDirectory", "(", "caption", "=", "'Select DICOM Folder'", ",", "options", "=", "QFileDialog", ".", "ShowDirsOnly", ",", "directory", "=", "directory", ")", "# pp.exec_()", "if", "not", "app", ":", "app_inner", ".", "exit", "(", "0", ")", "dcmdir", "=", "get_str", "(", "dcmdir", ")", "if", "len", "(", "dcmdir", ")", ">", "0", ":", "# dcmdir = \"%s\" % (dcmdir)", "# dcmdir = dcmdir.encode(\"utf8\")", "pass", "else", ":", "dcmdir", "=", "None", "if", "self", ".", "cache", "is", "not", "None", ":", "self", ".", "cache", ".", "update", "(", "'loaddir'", ",", "dcmdir", ")", "return", "str", "(", "dcmdir", ")" ]
Draw a dialog for directory selection.
[ "Draw", "a", "dialog", "for", "directory", "selection", "." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareaderqt.py#L143-L188
mjirik/io3d
io3d/datareaderqt.py
DataReaderWidget.read_data_from_prepared_datapath
def read_data_from_prepared_datapath(self): """ Function is called in the end of process :return: """ reader = datareader.DataReader() self.datap = reader.Get3DData(self.datapath, dataplus_format=True, gui=True, qt_app=self.qt_app) _set_label_text(self.text_dcm_dir, _make_text_short(self.datapath), self.datapath) _set_label_text(self.text_dcm_data, self.get_data_info(), replace_all=True) if self.after_function is not None: self.after_function(self) self.__show_message('Data read finished')
python
def read_data_from_prepared_datapath(self): """ Function is called in the end of process :return: """ reader = datareader.DataReader() self.datap = reader.Get3DData(self.datapath, dataplus_format=True, gui=True, qt_app=self.qt_app) _set_label_text(self.text_dcm_dir, _make_text_short(self.datapath), self.datapath) _set_label_text(self.text_dcm_data, self.get_data_info(), replace_all=True) if self.after_function is not None: self.after_function(self) self.__show_message('Data read finished')
[ "def", "read_data_from_prepared_datapath", "(", "self", ")", ":", "reader", "=", "datareader", ".", "DataReader", "(", ")", "self", ".", "datap", "=", "reader", ".", "Get3DData", "(", "self", ".", "datapath", ",", "dataplus_format", "=", "True", ",", "gui", "=", "True", ",", "qt_app", "=", "self", ".", "qt_app", ")", "_set_label_text", "(", "self", ".", "text_dcm_dir", ",", "_make_text_short", "(", "self", ".", "datapath", ")", ",", "self", ".", "datapath", ")", "_set_label_text", "(", "self", ".", "text_dcm_data", ",", "self", ".", "get_data_info", "(", ")", ",", "replace_all", "=", "True", ")", "if", "self", ".", "after_function", "is", "not", "None", ":", "self", ".", "after_function", "(", "self", ")", "self", ".", "__show_message", "(", "'Data read finished'", ")" ]
Function is called in the end of process :return:
[ "Function", "is", "called", "in", "the", "end", "of", "process", ":", "return", ":" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/datareaderqt.py#L232-L246
SetBased/py-stratum
pystratum/RoutineWrapperGenerator.py
RoutineWrapperGenerator.main
def main(self, config_filename): """ The "main" of the wrapper generator. Returns 0 on success, 1 if one or more errors occurred. :param str config_filename: The name of the configuration file. :rtype: int """ self._read_configuration_file(config_filename) if self._wrapper_class_name: self._io.title('Wrapper') self.__generate_wrapper_class() else: self._io.log_verbose('Wrapper not enabled') return 0
python
def main(self, config_filename): """ The "main" of the wrapper generator. Returns 0 on success, 1 if one or more errors occurred. :param str config_filename: The name of the configuration file. :rtype: int """ self._read_configuration_file(config_filename) if self._wrapper_class_name: self._io.title('Wrapper') self.__generate_wrapper_class() else: self._io.log_verbose('Wrapper not enabled') return 0
[ "def", "main", "(", "self", ",", "config_filename", ")", ":", "self", ".", "_read_configuration_file", "(", "config_filename", ")", "if", "self", ".", "_wrapper_class_name", ":", "self", ".", "_io", ".", "title", "(", "'Wrapper'", ")", "self", ".", "__generate_wrapper_class", "(", ")", "else", ":", "self", ".", "_io", ".", "log_verbose", "(", "'Wrapper not enabled'", ")", "return", "0" ]
The "main" of the wrapper generator. Returns 0 on success, 1 if one or more errors occurred. :param str config_filename: The name of the configuration file. :rtype: int
[ "The", "main", "of", "the", "wrapper", "generator", ".", "Returns", "0", "on", "success", "1", "if", "one", "or", "more", "errors", "occurred", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineWrapperGenerator.py#L81-L98
SetBased/py-stratum
pystratum/RoutineWrapperGenerator.py
RoutineWrapperGenerator.__generate_wrapper_class
def __generate_wrapper_class(self): """ Generates the wrapper class. """ routines = self._read_routine_metadata() self._write_class_header() if routines: for routine_name in sorted(routines): if routines[routine_name]['designation'] != 'hidden': self._write_routine_function(routines[routine_name]) else: self._io.error('No files with stored routines found') self._write_class_trailer() Util.write_two_phases(self._wrapper_filename, self._code, self._io)
python
def __generate_wrapper_class(self): """ Generates the wrapper class. """ routines = self._read_routine_metadata() self._write_class_header() if routines: for routine_name in sorted(routines): if routines[routine_name]['designation'] != 'hidden': self._write_routine_function(routines[routine_name]) else: self._io.error('No files with stored routines found') self._write_class_trailer() Util.write_two_phases(self._wrapper_filename, self._code, self._io)
[ "def", "__generate_wrapper_class", "(", "self", ")", ":", "routines", "=", "self", ".", "_read_routine_metadata", "(", ")", "self", ".", "_write_class_header", "(", ")", "if", "routines", ":", "for", "routine_name", "in", "sorted", "(", "routines", ")", ":", "if", "routines", "[", "routine_name", "]", "[", "'designation'", "]", "!=", "'hidden'", ":", "self", ".", "_write_routine_function", "(", "routines", "[", "routine_name", "]", ")", "else", ":", "self", ".", "_io", ".", "error", "(", "'No files with stored routines found'", ")", "self", ".", "_write_class_trailer", "(", ")", "Util", ".", "write_two_phases", "(", "self", ".", "_wrapper_filename", ",", "self", ".", "_code", ",", "self", ".", "_io", ")" ]
Generates the wrapper class.
[ "Generates", "the", "wrapper", "class", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineWrapperGenerator.py#L101-L118
SetBased/py-stratum
pystratum/RoutineWrapperGenerator.py
RoutineWrapperGenerator._read_configuration_file
def _read_configuration_file(self, config_filename): """ Reads parameters from the configuration file. :param str config_filename: The name of the configuration file. """ config = configparser.ConfigParser() config.read(config_filename) self._parent_class_name = config.get('wrapper', 'parent_class') self._parent_class_namespace = config.get('wrapper', 'parent_class_namespace') self._wrapper_class_name = config.get('wrapper', 'wrapper_class') self._wrapper_filename = config.get('wrapper', 'wrapper_file') self._metadata_filename = config.get('wrapper', 'metadata') self._lob_as_string_flag = config.get('wrapper', 'lob_as_string')
python
def _read_configuration_file(self, config_filename): """ Reads parameters from the configuration file. :param str config_filename: The name of the configuration file. """ config = configparser.ConfigParser() config.read(config_filename) self._parent_class_name = config.get('wrapper', 'parent_class') self._parent_class_namespace = config.get('wrapper', 'parent_class_namespace') self._wrapper_class_name = config.get('wrapper', 'wrapper_class') self._wrapper_filename = config.get('wrapper', 'wrapper_file') self._metadata_filename = config.get('wrapper', 'metadata') self._lob_as_string_flag = config.get('wrapper', 'lob_as_string')
[ "def", "_read_configuration_file", "(", "self", ",", "config_filename", ")", ":", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "config", ".", "read", "(", "config_filename", ")", "self", ".", "_parent_class_name", "=", "config", ".", "get", "(", "'wrapper'", ",", "'parent_class'", ")", "self", ".", "_parent_class_namespace", "=", "config", ".", "get", "(", "'wrapper'", ",", "'parent_class_namespace'", ")", "self", ".", "_wrapper_class_name", "=", "config", ".", "get", "(", "'wrapper'", ",", "'wrapper_class'", ")", "self", ".", "_wrapper_filename", "=", "config", ".", "get", "(", "'wrapper'", ",", "'wrapper_file'", ")", "self", ".", "_metadata_filename", "=", "config", ".", "get", "(", "'wrapper'", ",", "'metadata'", ")", "self", ".", "_lob_as_string_flag", "=", "config", ".", "get", "(", "'wrapper'", ",", "'lob_as_string'", ")" ]
Reads parameters from the configuration file. :param str config_filename: The name of the configuration file.
[ "Reads", "parameters", "from", "the", "configuration", "file", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineWrapperGenerator.py#L121-L135
SetBased/py-stratum
pystratum/RoutineWrapperGenerator.py
RoutineWrapperGenerator._read_routine_metadata
def _read_routine_metadata(self): """ Returns the metadata of stored routines. :rtype: dict """ metadata = {} if os.path.isfile(self._metadata_filename): with open(self._metadata_filename, 'r') as file: metadata = json.load(file) return metadata
python
def _read_routine_metadata(self): """ Returns the metadata of stored routines. :rtype: dict """ metadata = {} if os.path.isfile(self._metadata_filename): with open(self._metadata_filename, 'r') as file: metadata = json.load(file) return metadata
[ "def", "_read_routine_metadata", "(", "self", ")", ":", "metadata", "=", "{", "}", "if", "os", ".", "path", ".", "isfile", "(", "self", ".", "_metadata_filename", ")", ":", "with", "open", "(", "self", ".", "_metadata_filename", ",", "'r'", ")", "as", "file", ":", "metadata", "=", "json", ".", "load", "(", "file", ")", "return", "metadata" ]
Returns the metadata of stored routines. :rtype: dict
[ "Returns", "the", "metadata", "of", "stored", "routines", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineWrapperGenerator.py#L138-L149
SetBased/py-stratum
pystratum/RoutineWrapperGenerator.py
RoutineWrapperGenerator._write_class_header
def _write_class_header(self): """ Generate a class header for stored routine wrapper. """ self._write_line('from {0!s} import {1!s}'.format(self._parent_class_namespace, self._parent_class_name)) self._write_line() self._write_line() self._write_line('# ' + ('-' * 118)) self._write_line('class {0!s}({1!s}):'.format(self._wrapper_class_name, self._parent_class_name)) self._write_line(' """') self._write_line(' The stored routines wrappers.') self._write_line(' """')
python
def _write_class_header(self): """ Generate a class header for stored routine wrapper. """ self._write_line('from {0!s} import {1!s}'.format(self._parent_class_namespace, self._parent_class_name)) self._write_line() self._write_line() self._write_line('# ' + ('-' * 118)) self._write_line('class {0!s}({1!s}):'.format(self._wrapper_class_name, self._parent_class_name)) self._write_line(' """') self._write_line(' The stored routines wrappers.') self._write_line(' """')
[ "def", "_write_class_header", "(", "self", ")", ":", "self", ".", "_write_line", "(", "'from {0!s} import {1!s}'", ".", "format", "(", "self", ".", "_parent_class_namespace", ",", "self", ".", "_parent_class_name", ")", ")", "self", ".", "_write_line", "(", ")", "self", ".", "_write_line", "(", ")", "self", ".", "_write_line", "(", "'# '", "+", "(", "'-'", "*", "118", ")", ")", "self", ".", "_write_line", "(", "'class {0!s}({1!s}):'", ".", "format", "(", "self", ".", "_wrapper_class_name", ",", "self", ".", "_parent_class_name", ")", ")", "self", ".", "_write_line", "(", "' \"\"\"'", ")", "self", ".", "_write_line", "(", "' The stored routines wrappers.'", ")", "self", ".", "_write_line", "(", "' \"\"\"'", ")" ]
Generate a class header for stored routine wrapper.
[ "Generate", "a", "class", "header", "for", "stored", "routine", "wrapper", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineWrapperGenerator.py#L152-L163
SetBased/py-stratum
pystratum/RoutineWrapperGenerator.py
RoutineWrapperGenerator._write_line
def _write_line(self, text=''): """ Writes a line with Python code to the generate code buffer. :param str text: The line with Python code. """ if text: self._code += str(text) + "\n" else: self._code += "\n"
python
def _write_line(self, text=''): """ Writes a line with Python code to the generate code buffer. :param str text: The line with Python code. """ if text: self._code += str(text) + "\n" else: self._code += "\n"
[ "def", "_write_line", "(", "self", ",", "text", "=", "''", ")", ":", "if", "text", ":", "self", ".", "_code", "+=", "str", "(", "text", ")", "+", "\"\\n\"", "else", ":", "self", ".", "_code", "+=", "\"\\n\"" ]
Writes a line with Python code to the generate code buffer. :param str text: The line with Python code.
[ "Writes", "a", "line", "with", "Python", "code", "to", "the", "generate", "code", "buffer", "." ]
train
https://github.com/SetBased/py-stratum/blob/7c5ffaa2fdd03f865832a5190b5897ff2c0e3155/pystratum/RoutineWrapperGenerator.py#L166-L175
ClericPy/torequests
torequests/main.py
Async
def Async(f, n=None, timeout=None): """Concise usage for pool.submit. Basic Usage Asnyc & threads :: from torequests.main import Async, threads import time def use_submit(i): time.sleep(i) result = 'use_submit: %s' % i print(result) return result @threads() def use_decorator(i): time.sleep(i) result = 'use_decorator: %s' % i print(result) return result new_use_submit = Async(use_submit) tasks = [new_use_submit(i) for i in (2, 1, 0) ] + [use_decorator(i) for i in (2, 1, 0)] print([type(i) for i in tasks]) results = [i.x for i in tasks] print(results) # use_submit: 0 # use_decorator: 0 # [<class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>] # use_submit: 1 # use_decorator: 1 # use_submit: 2 # use_decorator: 2 # ['use_submit: 2', 'use_submit: 1', 'use_submit: 0', 'use_decorator: 2', 'use_decorator: 1', 'use_decorator: 0'] """ return threads(n=n, timeout=timeout)(f)
python
def Async(f, n=None, timeout=None): """Concise usage for pool.submit. Basic Usage Asnyc & threads :: from torequests.main import Async, threads import time def use_submit(i): time.sleep(i) result = 'use_submit: %s' % i print(result) return result @threads() def use_decorator(i): time.sleep(i) result = 'use_decorator: %s' % i print(result) return result new_use_submit = Async(use_submit) tasks = [new_use_submit(i) for i in (2, 1, 0) ] + [use_decorator(i) for i in (2, 1, 0)] print([type(i) for i in tasks]) results = [i.x for i in tasks] print(results) # use_submit: 0 # use_decorator: 0 # [<class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>] # use_submit: 1 # use_decorator: 1 # use_submit: 2 # use_decorator: 2 # ['use_submit: 2', 'use_submit: 1', 'use_submit: 0', 'use_decorator: 2', 'use_decorator: 1', 'use_decorator: 0'] """ return threads(n=n, timeout=timeout)(f)
[ "def", "Async", "(", "f", ",", "n", "=", "None", ",", "timeout", "=", "None", ")", ":", "return", "threads", "(", "n", "=", "n", ",", "timeout", "=", "timeout", ")", "(", "f", ")" ]
Concise usage for pool.submit. Basic Usage Asnyc & threads :: from torequests.main import Async, threads import time def use_submit(i): time.sleep(i) result = 'use_submit: %s' % i print(result) return result @threads() def use_decorator(i): time.sleep(i) result = 'use_decorator: %s' % i print(result) return result new_use_submit = Async(use_submit) tasks = [new_use_submit(i) for i in (2, 1, 0) ] + [use_decorator(i) for i in (2, 1, 0)] print([type(i) for i in tasks]) results = [i.x for i in tasks] print(results) # use_submit: 0 # use_decorator: 0 # [<class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>, <class 'torequests.main.NewFuture'>] # use_submit: 1 # use_decorator: 1 # use_submit: 2 # use_decorator: 2 # ['use_submit: 2', 'use_submit: 1', 'use_submit: 0', 'use_decorator: 2', 'use_decorator: 1', 'use_decorator: 0']
[ "Concise", "usage", "for", "pool", ".", "submit", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L394-L434
ClericPy/torequests
torequests/main.py
get_results_generator
def get_results_generator(future_list, timeout=None, sort_by_completed=False): """Return as a generator of tasks order by completed sequence.""" try: # python2 not support yield from if sort_by_completed: for future in as_completed(future_list, timeout=timeout): yield future.x else: for future in future_list: yield future.x except TimeoutError: return
python
def get_results_generator(future_list, timeout=None, sort_by_completed=False): """Return as a generator of tasks order by completed sequence.""" try: # python2 not support yield from if sort_by_completed: for future in as_completed(future_list, timeout=timeout): yield future.x else: for future in future_list: yield future.x except TimeoutError: return
[ "def", "get_results_generator", "(", "future_list", ",", "timeout", "=", "None", ",", "sort_by_completed", "=", "False", ")", ":", "try", ":", "# python2 not support yield from", "if", "sort_by_completed", ":", "for", "future", "in", "as_completed", "(", "future_list", ",", "timeout", "=", "timeout", ")", ":", "yield", "future", ".", "x", "else", ":", "for", "future", "in", "future_list", ":", "yield", "future", ".", "x", "except", "TimeoutError", ":", "return" ]
Return as a generator of tasks order by completed sequence.
[ "Return", "as", "a", "generator", "of", "tasks", "order", "by", "completed", "sequence", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L442-L453
ClericPy/torequests
torequests/main.py
run_after_async
def run_after_async(seconds, func, *args, **kwargs): """Run the function after seconds asynchronously.""" t = Timer(seconds, func, args, kwargs) t.daemon = True t.start() return t
python
def run_after_async(seconds, func, *args, **kwargs): """Run the function after seconds asynchronously.""" t = Timer(seconds, func, args, kwargs) t.daemon = True t.start() return t
[ "def", "run_after_async", "(", "seconds", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "t", "=", "Timer", "(", "seconds", ",", "func", ",", "args", ",", "kwargs", ")", "t", ".", "daemon", "=", "True", "t", ".", "start", "(", ")", "return", "t" ]
Run the function after seconds asynchronously.
[ "Run", "the", "function", "after", "seconds", "asynchronously", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L456-L461
ClericPy/torequests
torequests/main.py
NewExecutorPoolMixin.async_func
def async_func(self, function): """Decorator for let a normal function return the NewFuture""" @wraps(function) def wrapped(*args, **kwargs): return self.submit(function, *args, **kwargs) return wrapped
python
def async_func(self, function): """Decorator for let a normal function return the NewFuture""" @wraps(function) def wrapped(*args, **kwargs): return self.submit(function, *args, **kwargs) return wrapped
[ "def", "async_func", "(", "self", ",", "function", ")", ":", "@", "wraps", "(", "function", ")", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "submit", "(", "function", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapped" ]
Decorator for let a normal function return the NewFuture
[ "Decorator", "for", "let", "a", "normal", "function", "return", "the", "NewFuture" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L73-L80
ClericPy/torequests
torequests/main.py
NewExecutorPoolMixin._get_cpu_count
def _get_cpu_count(self): """Get the cpu count.""" try: from multiprocessing import cpu_count return cpu_count() except Exception as e: Config.main_logger.error("_get_cpu_count failed for %s" % e)
python
def _get_cpu_count(self): """Get the cpu count.""" try: from multiprocessing import cpu_count return cpu_count() except Exception as e: Config.main_logger.error("_get_cpu_count failed for %s" % e)
[ "def", "_get_cpu_count", "(", "self", ")", ":", "try", ":", "from", "multiprocessing", "import", "cpu_count", "return", "cpu_count", "(", ")", "except", "Exception", "as", "e", ":", "Config", ".", "main_logger", ".", "error", "(", "\"_get_cpu_count failed for %s\"", "%", "e", ")" ]
Get the cpu count.
[ "Get", "the", "cpu", "count", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L86-L93
ClericPy/torequests
torequests/main.py
Pool.submit
def submit(self, func, *args, **kwargs): """Submit a function to the pool, `self.submit(function,arg1,arg2,arg3=3)`""" with self._shutdown_lock: if self._shutdown: raise RuntimeError("cannot schedule new futures after shutdown") callback = kwargs.pop("callback", self.default_callback) future = NewFuture( self._timeout, args, kwargs, callback=callback, catch_exception=self.catch_exception, ) w = _WorkItem(future, func, args, kwargs) self._work_queue.put(w) self._adjust_thread_count() self._all_futures.add(future) return future
python
def submit(self, func, *args, **kwargs): """Submit a function to the pool, `self.submit(function,arg1,arg2,arg3=3)`""" with self._shutdown_lock: if self._shutdown: raise RuntimeError("cannot schedule new futures after shutdown") callback = kwargs.pop("callback", self.default_callback) future = NewFuture( self._timeout, args, kwargs, callback=callback, catch_exception=self.catch_exception, ) w = _WorkItem(future, func, args, kwargs) self._work_queue.put(w) self._adjust_thread_count() self._all_futures.add(future) return future
[ "def", "submit", "(", "self", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "_shutdown_lock", ":", "if", "self", ".", "_shutdown", ":", "raise", "RuntimeError", "(", "\"cannot schedule new futures after shutdown\"", ")", "callback", "=", "kwargs", ".", "pop", "(", "\"callback\"", ",", "self", ".", "default_callback", ")", "future", "=", "NewFuture", "(", "self", ".", "_timeout", ",", "args", ",", "kwargs", ",", "callback", "=", "callback", ",", "catch_exception", "=", "self", ".", "catch_exception", ",", ")", "w", "=", "_WorkItem", "(", "future", ",", "func", ",", "args", ",", "kwargs", ")", "self", ".", "_work_queue", ".", "put", "(", "w", ")", "self", ".", "_adjust_thread_count", "(", ")", "self", ".", "_all_futures", ".", "add", "(", "future", ")", "return", "future" ]
Submit a function to the pool, `self.submit(function,arg1,arg2,arg3=3)`
[ "Submit", "a", "function", "to", "the", "pool", "self", ".", "submit", "(", "function", "arg1", "arg2", "arg3", "=", "3", ")" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L182-L200
ClericPy/torequests
torequests/main.py
ProcessPool.submit
def submit(self, func, *args, **kwargs): """Submit a function to the pool, `self.submit(function,arg1,arg2,arg3=3)`""" with self._shutdown_lock: if PY3 and self._broken: raise BrokenProcessPool( "A child process terminated " "abruptly, the process pool is not usable anymore" ) if self._shutdown_thread: raise RuntimeError("cannot schedule new futures after shutdown") callback = kwargs.pop("callback", self.default_callback) future = NewFuture( self._timeout, args, kwargs, callback=callback, catch_exception=self.catch_exception, ) w = _WorkItem(future, func, args, kwargs) self._pending_work_items[self._queue_count] = w self._work_ids.put(self._queue_count) self._queue_count += 1 self._result_queue.put(None) self._start_queue_management_thread() if PY2: self._adjust_process_count() self._all_futures.add(future) return future
python
def submit(self, func, *args, **kwargs): """Submit a function to the pool, `self.submit(function,arg1,arg2,arg3=3)`""" with self._shutdown_lock: if PY3 and self._broken: raise BrokenProcessPool( "A child process terminated " "abruptly, the process pool is not usable anymore" ) if self._shutdown_thread: raise RuntimeError("cannot schedule new futures after shutdown") callback = kwargs.pop("callback", self.default_callback) future = NewFuture( self._timeout, args, kwargs, callback=callback, catch_exception=self.catch_exception, ) w = _WorkItem(future, func, args, kwargs) self._pending_work_items[self._queue_count] = w self._work_ids.put(self._queue_count) self._queue_count += 1 self._result_queue.put(None) self._start_queue_management_thread() if PY2: self._adjust_process_count() self._all_futures.add(future) return future
[ "def", "submit", "(", "self", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "_shutdown_lock", ":", "if", "PY3", "and", "self", ".", "_broken", ":", "raise", "BrokenProcessPool", "(", "\"A child process terminated \"", "\"abruptly, the process pool is not usable anymore\"", ")", "if", "self", ".", "_shutdown_thread", ":", "raise", "RuntimeError", "(", "\"cannot schedule new futures after shutdown\"", ")", "callback", "=", "kwargs", ".", "pop", "(", "\"callback\"", ",", "self", ".", "default_callback", ")", "future", "=", "NewFuture", "(", "self", ".", "_timeout", ",", "args", ",", "kwargs", ",", "callback", "=", "callback", ",", "catch_exception", "=", "self", ".", "catch_exception", ",", ")", "w", "=", "_WorkItem", "(", "future", ",", "func", ",", "args", ",", "kwargs", ")", "self", ".", "_pending_work_items", "[", "self", ".", "_queue_count", "]", "=", "w", "self", ".", "_work_ids", ".", "put", "(", "self", ".", "_queue_count", ")", "self", ".", "_queue_count", "+=", "1", "self", ".", "_result_queue", ".", "put", "(", "None", ")", "self", ".", "_start_queue_management_thread", "(", ")", "if", "PY2", ":", "self", ".", "_adjust_process_count", "(", ")", "self", ".", "_all_futures", ".", "add", "(", "future", ")", "return", "future" ]
Submit a function to the pool, `self.submit(function,arg1,arg2,arg3=3)`
[ "Submit", "a", "function", "to", "the", "pool", "self", ".", "submit", "(", "function", "arg1", "arg2", "arg3", "=", "3", ")" ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L256-L284
ClericPy/torequests
torequests/main.py
NewFuture._invoke_callbacks
def _invoke_callbacks(self): """Record the task_end_time & task_cost_time, set result for self._callback_result.""" self.task_end_time = time.time() self.task_cost_time = self.task_end_time - self.task_start_time with self._condition: for callback in self._done_callbacks: try: result = callback(self) if callback in self._user_callbacks: self._callback_result = result except Exception as e: Config.main_logger.error("exception calling callback for %s" % e) self._condition.notify_all()
python
def _invoke_callbacks(self): """Record the task_end_time & task_cost_time, set result for self._callback_result.""" self.task_end_time = time.time() self.task_cost_time = self.task_end_time - self.task_start_time with self._condition: for callback in self._done_callbacks: try: result = callback(self) if callback in self._user_callbacks: self._callback_result = result except Exception as e: Config.main_logger.error("exception calling callback for %s" % e) self._condition.notify_all()
[ "def", "_invoke_callbacks", "(", "self", ")", ":", "self", ".", "task_end_time", "=", "time", ".", "time", "(", ")", "self", ".", "task_cost_time", "=", "self", ".", "task_end_time", "-", "self", ".", "task_start_time", "with", "self", ".", "_condition", ":", "for", "callback", "in", "self", ".", "_done_callbacks", ":", "try", ":", "result", "=", "callback", "(", "self", ")", "if", "callback", "in", "self", ".", "_user_callbacks", ":", "self", ".", "_callback_result", "=", "result", "except", "Exception", "as", "e", ":", "Config", ".", "main_logger", ".", "error", "(", "\"exception calling callback for %s\"", "%", "e", ")", "self", ".", "_condition", ".", "notify_all", "(", ")" ]
Record the task_end_time & task_cost_time, set result for self._callback_result.
[ "Record", "the", "task_end_time", "&", "task_cost_time", "set", "result", "for", "self", ".", "_callback_result", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L332-L344
ClericPy/torequests
torequests/main.py
NewFuture.callback_result
def callback_result(self): """Block the main thead until future finish, return the future.callback_result.""" if self._state in [PENDING, RUNNING]: self.x if self._user_callbacks: return self._callback_result else: return self.x
python
def callback_result(self): """Block the main thead until future finish, return the future.callback_result.""" if self._state in [PENDING, RUNNING]: self.x if self._user_callbacks: return self._callback_result else: return self.x
[ "def", "callback_result", "(", "self", ")", ":", "if", "self", ".", "_state", "in", "[", "PENDING", ",", "RUNNING", "]", ":", "self", ".", "x", "if", "self", ".", "_user_callbacks", ":", "return", "self", ".", "_callback_result", "else", ":", "return", "self", ".", "x" ]
Block the main thead until future finish, return the future.callback_result.
[ "Block", "the", "main", "thead", "until", "future", "finish", "return", "the", "future", ".", "callback_result", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L357-L364
ClericPy/torequests
torequests/main.py
NewFuture.x
def x(self): """Block the main thead until future finish, return the future.result().""" with self._condition: result = None if not self.done(): self._condition.wait(self._timeout) if not self.done(): # timeout self.set_exception(TimeoutError()) if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: # cancelled result = CancelledError() elif self._state == FINISHED: # finished if self._exception: result = self._exception else: result = self._result if isinstance(result, Exception): if self.catch_exception: result = FailureException(result) return result else: raise result return result
python
def x(self): """Block the main thead until future finish, return the future.result().""" with self._condition: result = None if not self.done(): self._condition.wait(self._timeout) if not self.done(): # timeout self.set_exception(TimeoutError()) if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: # cancelled result = CancelledError() elif self._state == FINISHED: # finished if self._exception: result = self._exception else: result = self._result if isinstance(result, Exception): if self.catch_exception: result = FailureException(result) return result else: raise result return result
[ "def", "x", "(", "self", ")", ":", "with", "self", ".", "_condition", ":", "result", "=", "None", "if", "not", "self", ".", "done", "(", ")", ":", "self", ".", "_condition", ".", "wait", "(", "self", ".", "_timeout", ")", "if", "not", "self", ".", "done", "(", ")", ":", "# timeout", "self", ".", "set_exception", "(", "TimeoutError", "(", ")", ")", "if", "self", ".", "_state", "in", "[", "CANCELLED", ",", "CANCELLED_AND_NOTIFIED", "]", ":", "# cancelled", "result", "=", "CancelledError", "(", ")", "elif", "self", ".", "_state", "==", "FINISHED", ":", "# finished", "if", "self", ".", "_exception", ":", "result", "=", "self", ".", "_exception", "else", ":", "result", "=", "self", ".", "_result", "if", "isinstance", "(", "result", ",", "Exception", ")", ":", "if", "self", ".", "catch_exception", ":", "result", "=", "FailureException", "(", "result", ")", "return", "result", "else", ":", "raise", "result", "return", "result" ]
Block the main thead until future finish, return the future.result().
[ "Block", "the", "main", "thead", "until", "future", "finish", "return", "the", "future", ".", "result", "()", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L367-L391
ClericPy/torequests
torequests/main.py
tPool.close
def close(self, wait=False): """Close session, shutdown pool.""" self.session.close() self.pool.shutdown(wait=wait)
python
def close(self, wait=False): """Close session, shutdown pool.""" self.session.close() self.pool.shutdown(wait=wait)
[ "def", "close", "(", "self", ",", "wait", "=", "False", ")", ":", "self", ".", "session", ".", "close", "(", ")", "self", ".", "pool", ".", "shutdown", "(", "wait", "=", "wait", ")" ]
Close session, shutdown pool.
[ "Close", "session", "shutdown", "pool", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L553-L556
ClericPy/torequests
torequests/main.py
tPool.request
def request(self, method, url, callback=None, retry=0, **kwargs): """Similar to `requests.request`, but return as NewFuture.""" return self.pool.submit( self._request, method=method, url=url, retry=retry, callback=callback or self.default_callback, **kwargs )
python
def request(self, method, url, callback=None, retry=0, **kwargs): """Similar to `requests.request`, but return as NewFuture.""" return self.pool.submit( self._request, method=method, url=url, retry=retry, callback=callback or self.default_callback, **kwargs )
[ "def", "request", "(", "self", ",", "method", ",", "url", ",", "callback", "=", "None", ",", "retry", "=", "0", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "pool", ".", "submit", "(", "self", ".", "_request", ",", "method", "=", "method", ",", "url", "=", "url", ",", "retry", "=", "retry", ",", "callback", "=", "callback", "or", "self", ".", "default_callback", ",", "*", "*", "kwargs", ")" ]
Similar to `requests.request`, but return as NewFuture.
[ "Similar", "to", "requests", ".", "request", "but", "return", "as", "NewFuture", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L604-L613
ClericPy/torequests
torequests/main.py
tPool.get
def get(self, url, params=None, callback=None, retry=0, **kwargs): """Similar to `requests.get`, but return as NewFuture.""" kwargs.setdefault("allow_redirects", True) return self.request( "get", url=url, params=params, callback=callback, retry=retry, **kwargs )
python
def get(self, url, params=None, callback=None, retry=0, **kwargs): """Similar to `requests.get`, but return as NewFuture.""" kwargs.setdefault("allow_redirects", True) return self.request( "get", url=url, params=params, callback=callback, retry=retry, **kwargs )
[ "def", "get", "(", "self", ",", "url", ",", "params", "=", "None", ",", "callback", "=", "None", ",", "retry", "=", "0", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "\"allow_redirects\"", ",", "True", ")", "return", "self", ".", "request", "(", "\"get\"", ",", "url", "=", "url", ",", "params", "=", "params", ",", "callback", "=", "callback", ",", "retry", "=", "retry", ",", "*", "*", "kwargs", ")" ]
Similar to `requests.get`, but return as NewFuture.
[ "Similar", "to", "requests", ".", "get", "but", "return", "as", "NewFuture", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L615-L620
ClericPy/torequests
torequests/main.py
tPool.post
def post(self, url, data=None, json=None, callback=None, retry=0, **kwargs): """Similar to `requests.post`, but return as NewFuture.""" return self.request( "post", url=url, data=data, json=json, callback=callback, retry=retry, **kwargs )
python
def post(self, url, data=None, json=None, callback=None, retry=0, **kwargs): """Similar to `requests.post`, but return as NewFuture.""" return self.request( "post", url=url, data=data, json=json, callback=callback, retry=retry, **kwargs )
[ "def", "post", "(", "self", ",", "url", ",", "data", "=", "None", ",", "json", "=", "None", ",", "callback", "=", "None", ",", "retry", "=", "0", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "request", "(", "\"post\"", ",", "url", "=", "url", ",", "data", "=", "data", ",", "json", "=", "json", ",", "callback", "=", "callback", ",", "retry", "=", "retry", ",", "*", "*", "kwargs", ")" ]
Similar to `requests.post`, but return as NewFuture.
[ "Similar", "to", "requests", ".", "post", "but", "return", "as", "NewFuture", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L622-L632
ClericPy/torequests
torequests/main.py
tPool.head
def head(self, url, callback=None, retry=0, **kwargs): """Similar to `requests.head`, but return as NewFuture.""" kwargs.setdefault("allow_redirects", False) return self.request("head", url=url, callback=callback, retry=retry, **kwargs)
python
def head(self, url, callback=None, retry=0, **kwargs): """Similar to `requests.head`, but return as NewFuture.""" kwargs.setdefault("allow_redirects", False) return self.request("head", url=url, callback=callback, retry=retry, **kwargs)
[ "def", "head", "(", "self", ",", "url", ",", "callback", "=", "None", ",", "retry", "=", "0", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "\"allow_redirects\"", ",", "False", ")", "return", "self", ".", "request", "(", "\"head\"", ",", "url", "=", "url", ",", "callback", "=", "callback", ",", "retry", "=", "retry", ",", "*", "*", "kwargs", ")" ]
Similar to `requests.head`, but return as NewFuture.
[ "Similar", "to", "requests", ".", "head", "but", "return", "as", "NewFuture", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L644-L647
ClericPy/torequests
torequests/main.py
tPool.options
def options(self, url, callback=None, retry=0, **kwargs): """Similar to `requests.options`, but return as NewFuture.""" kwargs.setdefault("allow_redirects", True) return self.request( "options", url=url, callback=callback, retry=retry, **kwargs )
python
def options(self, url, callback=None, retry=0, **kwargs): """Similar to `requests.options`, but return as NewFuture.""" kwargs.setdefault("allow_redirects", True) return self.request( "options", url=url, callback=callback, retry=retry, **kwargs )
[ "def", "options", "(", "self", ",", "url", ",", "callback", "=", "None", ",", "retry", "=", "0", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "\"allow_redirects\"", ",", "True", ")", "return", "self", ".", "request", "(", "\"options\"", ",", "url", "=", "url", ",", "callback", "=", "callback", ",", "retry", "=", "retry", ",", "*", "*", "kwargs", ")" ]
Similar to `requests.options`, but return as NewFuture.
[ "Similar", "to", "requests", ".", "options", "but", "return", "as", "NewFuture", "." ]
train
https://github.com/ClericPy/torequests/blob/1793261688d7a47e1c3a0830d83f8552f5e3e5d9/torequests/main.py#L649-L654
bpannier/simpletr64
simpletr64/actions/wan.py
Wan.getLinkInfo
def getLinkInfo(self, wanInterfaceId=1, timeout=1): """Execute GetInfo action to get basic WAN link information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: basic WAN link information's :rtype: WanLinkInfo """ namespace = Wan.getServiceType("getLinkInfo") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return WanLinkInfo(results)
python
def getLinkInfo(self, wanInterfaceId=1, timeout=1): """Execute GetInfo action to get basic WAN link information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: basic WAN link information's :rtype: WanLinkInfo """ namespace = Wan.getServiceType("getLinkInfo") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return WanLinkInfo(results)
[ "def", "getLinkInfo", "(", "self", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"getLinkInfo\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetInfo\"", ",", "timeout", "=", "timeout", ")", "return", "WanLinkInfo", "(", "results", ")" ]
Execute GetInfo action to get basic WAN link information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: basic WAN link information's :rtype: WanLinkInfo
[ "Execute", "GetInfo", "action", "to", "get", "basic", "WAN", "link", "information", "s", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wan.py#L93-L106
bpannier/simpletr64
simpletr64/actions/wan.py
Wan.getLinkProperties
def getLinkProperties(self, wanInterfaceId=1, timeout=1): """Execute GetCommonLinkProperties action to get WAN link properties. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: WAN link properties :rtype: WanLinkProperties """ namespace = Wan.getServiceType("getLinkProperties") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetCommonLinkProperties", timeout=timeout) return WanLinkProperties(results)
python
def getLinkProperties(self, wanInterfaceId=1, timeout=1): """Execute GetCommonLinkProperties action to get WAN link properties. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: WAN link properties :rtype: WanLinkProperties """ namespace = Wan.getServiceType("getLinkProperties") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetCommonLinkProperties", timeout=timeout) return WanLinkProperties(results)
[ "def", "getLinkProperties", "(", "self", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"getLinkProperties\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetCommonLinkProperties\"", ",", "timeout", "=", "timeout", ")", "return", "WanLinkProperties", "(", "results", ")" ]
Execute GetCommonLinkProperties action to get WAN link properties. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: WAN link properties :rtype: WanLinkProperties
[ "Execute", "GetCommonLinkProperties", "action", "to", "get", "WAN", "link", "properties", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wan.py#L108-L121
bpannier/simpletr64
simpletr64/actions/wan.py
Wan.getADSLInfo
def getADSLInfo(self, wanInterfaceId=1, timeout=1): """Execute GetInfo action to get basic ADSL information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: ADSL informations. :rtype: ADSLInfo """ namespace = Wan.getServiceType("getADSLInfo") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return ADSLInfo(results)
python
def getADSLInfo(self, wanInterfaceId=1, timeout=1): """Execute GetInfo action to get basic ADSL information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: ADSL informations. :rtype: ADSLInfo """ namespace = Wan.getServiceType("getADSLInfo") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return ADSLInfo(results)
[ "def", "getADSLInfo", "(", "self", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"getADSLInfo\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetInfo\"", ",", "timeout", "=", "timeout", ")", "return", "ADSLInfo", "(", "results", ")" ]
Execute GetInfo action to get basic ADSL information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: ADSL informations. :rtype: ADSLInfo
[ "Execute", "GetInfo", "action", "to", "get", "basic", "ADSL", "information", "s", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wan.py#L123-L136
bpannier/simpletr64
simpletr64/actions/wan.py
Wan.getEthernetLinkStatus
def getEthernetLinkStatus(self, wanInterfaceId=1, timeout=1): """Execute GetEthernetLinkStatus action to get the status of the ethernet link. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: status of the ethernet link :rtype: str """ namespace = Wan.getServiceType("getEthernetLinkStatus") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetEthernetLinkStatus", timeout=timeout) return results["NewEthernetLinkStatus"]
python
def getEthernetLinkStatus(self, wanInterfaceId=1, timeout=1): """Execute GetEthernetLinkStatus action to get the status of the ethernet link. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: status of the ethernet link :rtype: str """ namespace = Wan.getServiceType("getEthernetLinkStatus") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetEthernetLinkStatus", timeout=timeout) return results["NewEthernetLinkStatus"]
[ "def", "getEthernetLinkStatus", "(", "self", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"getEthernetLinkStatus\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetEthernetLinkStatus\"", ",", "timeout", "=", "timeout", ")", "return", "results", "[", "\"NewEthernetLinkStatus\"", "]" ]
Execute GetEthernetLinkStatus action to get the status of the ethernet link. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: status of the ethernet link :rtype: str
[ "Execute", "GetEthernetLinkStatus", "action", "to", "get", "the", "status", "of", "the", "ethernet", "link", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wan.py#L138-L151
bpannier/simpletr64
simpletr64/actions/wan.py
Wan.getByteStatistic
def getByteStatistic(self, wanInterfaceId=1, timeout=1): """Execute GetTotalBytesSent&GetTotalBytesReceived actions to get WAN statistics. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: a tuple of two values, total bytes sent and total bytes received :rtype: list[int] """ namespace = Wan.getServiceType("getByteStatistic") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetTotalBytesSent", timeout=timeout) results2 = self.execute(uri, namespace, "GetTotalBytesReceived", timeout=timeout) return [int(results["NewTotalBytesSent"]), int(results2["NewTotalBytesReceived"])]
python
def getByteStatistic(self, wanInterfaceId=1, timeout=1): """Execute GetTotalBytesSent&GetTotalBytesReceived actions to get WAN statistics. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: a tuple of two values, total bytes sent and total bytes received :rtype: list[int] """ namespace = Wan.getServiceType("getByteStatistic") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetTotalBytesSent", timeout=timeout) results2 = self.execute(uri, namespace, "GetTotalBytesReceived", timeout=timeout) return [int(results["NewTotalBytesSent"]), int(results2["NewTotalBytesReceived"])]
[ "def", "getByteStatistic", "(", "self", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"getByteStatistic\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetTotalBytesSent\"", ",", "timeout", "=", "timeout", ")", "results2", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetTotalBytesReceived\"", ",", "timeout", "=", "timeout", ")", "return", "[", "int", "(", "results", "[", "\"NewTotalBytesSent\"", "]", ")", ",", "int", "(", "results2", "[", "\"NewTotalBytesReceived\"", "]", ")", "]" ]
Execute GetTotalBytesSent&GetTotalBytesReceived actions to get WAN statistics. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: a tuple of two values, total bytes sent and total bytes received :rtype: list[int]
[ "Execute", "GetTotalBytesSent&GetTotalBytesReceived", "actions", "to", "get", "WAN", "statistics", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wan.py#L153-L168
bpannier/simpletr64
simpletr64/actions/wan.py
Wan.getConnectionInfo
def getConnectionInfo(self, wanInterfaceId=1, timeout=1): """Execute GetInfo action to get WAN connection information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: WAN connection information's. :rtype: ConnectionInfo """ namespace = Wan.getServiceType("getConnectionInfo") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return ConnectionInfo(results)
python
def getConnectionInfo(self, wanInterfaceId=1, timeout=1): """Execute GetInfo action to get WAN connection information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: WAN connection information's. :rtype: ConnectionInfo """ namespace = Wan.getServiceType("getConnectionInfo") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return ConnectionInfo(results)
[ "def", "getConnectionInfo", "(", "self", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"getConnectionInfo\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetInfo\"", ",", "timeout", "=", "timeout", ")", "return", "ConnectionInfo", "(", "results", ")" ]
Execute GetInfo action to get WAN connection information's. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: WAN connection information's. :rtype: ConnectionInfo
[ "Execute", "GetInfo", "action", "to", "get", "WAN", "connection", "information", "s", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wan.py#L187-L200
bpannier/simpletr64
simpletr64/actions/wan.py
Wan.setEnable
def setEnable(self, status, wanInterfaceId=1, timeout=1): """Set enable status for a WAN interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int wanInterfaceId: the id of the WAN interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wan.getServiceType("setEnable") + str(wanInterfaceId) uri = self.getControlURL(namespace) if status: setStatus = 1 else: setStatus = 0 self.execute(uri, namespace, "SetEnable", timeout=timeout, NewEnable=setStatus)
python
def setEnable(self, status, wanInterfaceId=1, timeout=1): """Set enable status for a WAN interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int wanInterfaceId: the id of the WAN interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wan.getServiceType("setEnable") + str(wanInterfaceId) uri = self.getControlURL(namespace) if status: setStatus = 1 else: setStatus = 0 self.execute(uri, namespace, "SetEnable", timeout=timeout, NewEnable=setStatus)
[ "def", "setEnable", "(", "self", ",", "status", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"setEnable\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "if", "status", ":", "setStatus", "=", "1", "else", ":", "setStatus", "=", "0", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"SetEnable\"", ",", "timeout", "=", "timeout", ",", "NewEnable", "=", "setStatus", ")" ]
Set enable status for a WAN interface, be careful you don't cut yourself off. :param bool status: enable or disable the interface :param int wanInterfaceId: the id of the WAN interface :param float timeout: the timeout to wait for the action to be executed
[ "Set", "enable", "status", "for", "a", "WAN", "interface", "be", "careful", "you", "don", "t", "cut", "yourself", "off", "." ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wan.py#L202-L217
bpannier/simpletr64
simpletr64/actions/wan.py
Wan.requestConnection
def requestConnection(self, wanInterfaceId=1, timeout=1): """Request the connection to be established :param int wanInterfaceId: the id of the WAN interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wan.getServiceType("requestConnection") + str(wanInterfaceId) uri = self.getControlURL(namespace) self.execute(uri, namespace, "RequestConnection", timeout=timeout)
python
def requestConnection(self, wanInterfaceId=1, timeout=1): """Request the connection to be established :param int wanInterfaceId: the id of the WAN interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wan.getServiceType("requestConnection") + str(wanInterfaceId) uri = self.getControlURL(namespace) self.execute(uri, namespace, "RequestConnection", timeout=timeout)
[ "def", "requestConnection", "(", "self", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"requestConnection\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"RequestConnection\"", ",", "timeout", "=", "timeout", ")" ]
Request the connection to be established :param int wanInterfaceId: the id of the WAN interface :param float timeout: the timeout to wait for the action to be executed
[ "Request", "the", "connection", "to", "be", "established" ]
train
https://github.com/bpannier/simpletr64/blob/31081139f4e6c85084a56de1617df73927135466/simpletr64/actions/wan.py#L219-L228
vsoch/helpme
helpme/utils/fileio.py
write_config
def write_config(filename, config, mode="w"): '''use configparser to write a config object to filename ''' with open(filename, mode) as filey: config.write(filey) return filename
python
def write_config(filename, config, mode="w"): '''use configparser to write a config object to filename ''' with open(filename, mode) as filey: config.write(filey) return filename
[ "def", "write_config", "(", "filename", ",", "config", ",", "mode", "=", "\"w\"", ")", ":", "with", "open", "(", "filename", ",", "mode", ")", "as", "filey", ":", "config", ".", "write", "(", "filey", ")", "return", "filename" ]
use configparser to write a config object to filename
[ "use", "configparser", "to", "write", "a", "config", "object", "to", "filename" ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/fileio.py#L55-L60
vsoch/helpme
helpme/utils/fileio.py
generate_temporary_file
def generate_temporary_file(folder='/tmp', prefix='helpme', ext='json'): '''write a temporary file, in base directory with a particular extension. Parameters ========== folder: the base directory to write in. prefix: the prefix to use ext: the extension to use. ''' tmp = next(tempfile._get_candidate_names()) return '%s/%s.%s.%s' %(folder, prefix, tmp, ext)
python
def generate_temporary_file(folder='/tmp', prefix='helpme', ext='json'): '''write a temporary file, in base directory with a particular extension. Parameters ========== folder: the base directory to write in. prefix: the prefix to use ext: the extension to use. ''' tmp = next(tempfile._get_candidate_names()) return '%s/%s.%s.%s' %(folder, prefix, tmp, ext)
[ "def", "generate_temporary_file", "(", "folder", "=", "'/tmp'", ",", "prefix", "=", "'helpme'", ",", "ext", "=", "'json'", ")", ":", "tmp", "=", "next", "(", "tempfile", ".", "_get_candidate_names", "(", ")", ")", "return", "'%s/%s.%s.%s'", "%", "(", "folder", ",", "prefix", ",", "tmp", ",", "ext", ")" ]
write a temporary file, in base directory with a particular extension. Parameters ========== folder: the base directory to write in. prefix: the prefix to use ext: the extension to use.
[ "write", "a", "temporary", "file", "in", "base", "directory", "with", "a", "particular", "extension", ".", "Parameters", "==========", "folder", ":", "the", "base", "directory", "to", "write", "in", ".", "prefix", ":", "the", "prefix", "to", "use", "ext", ":", "the", "extension", "to", "use", "." ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/fileio.py#L73-L84
vsoch/helpme
helpme/utils/fileio.py
copyfile
def copyfile(source, destination, force=True): '''copy a file from a source to its destination. ''' if os.path.exists(destination) and force is True: os.remove(destination) shutil.copyfile(source, destination) return destination
python
def copyfile(source, destination, force=True): '''copy a file from a source to its destination. ''' if os.path.exists(destination) and force is True: os.remove(destination) shutil.copyfile(source, destination) return destination
[ "def", "copyfile", "(", "source", ",", "destination", ",", "force", "=", "True", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "destination", ")", "and", "force", "is", "True", ":", "os", ".", "remove", "(", "destination", ")", "shutil", ".", "copyfile", "(", "source", ",", "destination", ")", "return", "destination" ]
copy a file from a source to its destination.
[ "copy", "a", "file", "from", "a", "source", "to", "its", "destination", "." ]
train
https://github.com/vsoch/helpme/blob/e609172260b10cddadb2d2023ab26da8082a9feb/helpme/utils/fileio.py#L87-L93
CentOS/python-cicoclient
cicoclient/wrapper.py
CicoWrapper.full_inventory
def full_inventory(self): """ Returns a full inventory Some additional work required to provide consistent and consumable output. Inventory output only contains values, no keys - Add the keys to the output so that it can be consumed more easily. """ if self._full_inventory: return self._full_inventory resp, inventory = self.get('Inventory') keys = ['host_id', 'hostname', 'ip_address', 'chassis', 'used_count', 'current_state', 'comment', 'distro', 'rel', 'centos_version', 'architecture', 'node_pool', 'console_port', 'flavor'] real_inventory = dict() for host in inventory: real_inventory[host[1]] = dict() for key in keys: real_inventory[host[1]][key] = host[keys.index(key)] self._full_inventory = real_inventory return self._full_inventory
python
def full_inventory(self): """ Returns a full inventory Some additional work required to provide consistent and consumable output. Inventory output only contains values, no keys - Add the keys to the output so that it can be consumed more easily. """ if self._full_inventory: return self._full_inventory resp, inventory = self.get('Inventory') keys = ['host_id', 'hostname', 'ip_address', 'chassis', 'used_count', 'current_state', 'comment', 'distro', 'rel', 'centos_version', 'architecture', 'node_pool', 'console_port', 'flavor'] real_inventory = dict() for host in inventory: real_inventory[host[1]] = dict() for key in keys: real_inventory[host[1]][key] = host[keys.index(key)] self._full_inventory = real_inventory return self._full_inventory
[ "def", "full_inventory", "(", "self", ")", ":", "if", "self", ".", "_full_inventory", ":", "return", "self", ".", "_full_inventory", "resp", ",", "inventory", "=", "self", ".", "get", "(", "'Inventory'", ")", "keys", "=", "[", "'host_id'", ",", "'hostname'", ",", "'ip_address'", ",", "'chassis'", ",", "'used_count'", ",", "'current_state'", ",", "'comment'", ",", "'distro'", ",", "'rel'", ",", "'centos_version'", ",", "'architecture'", ",", "'node_pool'", ",", "'console_port'", ",", "'flavor'", "]", "real_inventory", "=", "dict", "(", ")", "for", "host", "in", "inventory", ":", "real_inventory", "[", "host", "[", "1", "]", "]", "=", "dict", "(", ")", "for", "key", "in", "keys", ":", "real_inventory", "[", "host", "[", "1", "]", "]", "[", "key", "]", "=", "host", "[", "keys", ".", "index", "(", "key", ")", "]", "self", ".", "_full_inventory", "=", "real_inventory", "return", "self", ".", "_full_inventory" ]
Returns a full inventory Some additional work required to provide consistent and consumable output. Inventory output only contains values, no keys - Add the keys to the output so that it can be consumed more easily.
[ "Returns", "a", "full", "inventory", "Some", "additional", "work", "required", "to", "provide", "consistent", "and", "consumable", "output", ".", "Inventory", "output", "only", "contains", "values", "no", "keys", "-", "Add", "the", "keys", "to", "the", "output", "so", "that", "it", "can", "be", "consumed", "more", "easily", "." ]
train
https://github.com/CentOS/python-cicoclient/blob/ffee34f446ceb25348b13a500d5c545df202c182/cicoclient/wrapper.py#L55-L81
CentOS/python-cicoclient
cicoclient/wrapper.py
CicoWrapper.self_inventory
def self_inventory(self): """ Inventory output will only contain the server name and the session ID when a key is provided. Provide the same format as with the full inventory instead for consistency. """ if self.api_key is None: return {} if self._self_inventory: return self._self_inventory resp, self_inventory = self.get('Inventory?key=%s' % self.api_key) real_self_inventory = dict() for host in self_inventory: real_self_inventory[host[0]] = self.full_inventory[host[0]] self._self_inventory = real_self_inventory return self._self_inventory
python
def self_inventory(self): """ Inventory output will only contain the server name and the session ID when a key is provided. Provide the same format as with the full inventory instead for consistency. """ if self.api_key is None: return {} if self._self_inventory: return self._self_inventory resp, self_inventory = self.get('Inventory?key=%s' % self.api_key) real_self_inventory = dict() for host in self_inventory: real_self_inventory[host[0]] = self.full_inventory[host[0]] self._self_inventory = real_self_inventory return self._self_inventory
[ "def", "self_inventory", "(", "self", ")", ":", "if", "self", ".", "api_key", "is", "None", ":", "return", "{", "}", "if", "self", ".", "_self_inventory", ":", "return", "self", ".", "_self_inventory", "resp", ",", "self_inventory", "=", "self", ".", "get", "(", "'Inventory?key=%s'", "%", "self", ".", "api_key", ")", "real_self_inventory", "=", "dict", "(", ")", "for", "host", "in", "self_inventory", ":", "real_self_inventory", "[", "host", "[", "0", "]", "]", "=", "self", ".", "full_inventory", "[", "host", "[", "0", "]", "]", "self", ".", "_self_inventory", "=", "real_self_inventory", "return", "self", ".", "_self_inventory" ]
Inventory output will only contain the server name and the session ID when a key is provided. Provide the same format as with the full inventory instead for consistency.
[ "Inventory", "output", "will", "only", "contain", "the", "server", "name", "and", "the", "session", "ID", "when", "a", "key", "is", "provided", ".", "Provide", "the", "same", "format", "as", "with", "the", "full", "inventory", "instead", "for", "consistency", "." ]
train
https://github.com/CentOS/python-cicoclient/blob/ffee34f446ceb25348b13a500d5c545df202c182/cicoclient/wrapper.py#L84-L104
CentOS/python-cicoclient
cicoclient/wrapper.py
CicoWrapper._ssid_inventory
def _ssid_inventory(self, inventory, ssid): """ Filters an inventory to only return servers matching ssid """ matching_hosts = {} for host in inventory: if inventory[host]['comment'] == ssid: matching_hosts[host] = inventory[host] return matching_hosts
python
def _ssid_inventory(self, inventory, ssid): """ Filters an inventory to only return servers matching ssid """ matching_hosts = {} for host in inventory: if inventory[host]['comment'] == ssid: matching_hosts[host] = inventory[host] return matching_hosts
[ "def", "_ssid_inventory", "(", "self", ",", "inventory", ",", "ssid", ")", ":", "matching_hosts", "=", "{", "}", "for", "host", "in", "inventory", ":", "if", "inventory", "[", "host", "]", "[", "'comment'", "]", "==", "ssid", ":", "matching_hosts", "[", "host", "]", "=", "inventory", "[", "host", "]", "return", "matching_hosts" ]
Filters an inventory to only return servers matching ssid
[ "Filters", "an", "inventory", "to", "only", "return", "servers", "matching", "ssid" ]
train
https://github.com/CentOS/python-cicoclient/blob/ffee34f446ceb25348b13a500d5c545df202c182/cicoclient/wrapper.py#L106-L115
CentOS/python-cicoclient
cicoclient/wrapper.py
CicoWrapper.inventory
def inventory(self, all=False, ssid=None): """ Returns a node inventory. If an API key is specified, only the nodes provisioned by this key will be returned. :return: { inventory } """ if all or self.api_key is None: if ssid is not None: return self._ssid_inventory(self.full_inventory, ssid) else: return self.full_inventory else: if ssid is not None: return self._ssid_inventory(self.self_inventory, ssid) else: return self.self_inventory
python
def inventory(self, all=False, ssid=None): """ Returns a node inventory. If an API key is specified, only the nodes provisioned by this key will be returned. :return: { inventory } """ if all or self.api_key is None: if ssid is not None: return self._ssid_inventory(self.full_inventory, ssid) else: return self.full_inventory else: if ssid is not None: return self._ssid_inventory(self.self_inventory, ssid) else: return self.self_inventory
[ "def", "inventory", "(", "self", ",", "all", "=", "False", ",", "ssid", "=", "None", ")", ":", "if", "all", "or", "self", ".", "api_key", "is", "None", ":", "if", "ssid", "is", "not", "None", ":", "return", "self", ".", "_ssid_inventory", "(", "self", ".", "full_inventory", ",", "ssid", ")", "else", ":", "return", "self", ".", "full_inventory", "else", ":", "if", "ssid", "is", "not", "None", ":", "return", "self", ".", "_ssid_inventory", "(", "self", ".", "self_inventory", ",", "ssid", ")", "else", ":", "return", "self", ".", "self_inventory" ]
Returns a node inventory. If an API key is specified, only the nodes provisioned by this key will be returned. :return: { inventory }
[ "Returns", "a", "node", "inventory", ".", "If", "an", "API", "key", "is", "specified", "only", "the", "nodes", "provisioned", "by", "this", "key", "will", "be", "returned", "." ]
train
https://github.com/CentOS/python-cicoclient/blob/ffee34f446ceb25348b13a500d5c545df202c182/cicoclient/wrapper.py#L117-L133
CentOS/python-cicoclient
cicoclient/wrapper.py
CicoWrapper.node_get
def node_get(self, arch=None, ver=None, flavor=None, count=1, retry_count=1, retry_interval=10): """ Requests specified number of nodes with the provided parameters. :param arch: Server architecture (ex: x86_64) :param ver: CentOS version (ex: 7) :param count: Number of servers (ex: 2) :parma flavor: The flavor of machine to use (multi-arch only) :param retry_count: Number of times to retry in case of failure (ex: 5) :param retry_interval: Wait in seconds between each retry (ex: 30) :return: [ [ requested_hosts ], ssid ] """ if self.api_key is None: raise exceptions.ApiKeyRequired args = "key=%s" % self.api_key if arch is not None: args += "&arch=%s" % arch if ver is not None: args += "&ver=%s" % ver if flavor is not None: args += "&flavor=%s" % flavor args += "&count=%s" % count resp, body = self.get('Node/get?%s' % args) if not body: for _ in range(retry_count): time.sleep(retry_interval) resp, body = self.get('Node/get?%s' % args) if body: break if not body: raise exceptions.NoInventory # Get the hosts that were requested. # Note: We have to iterate over full inventory instead of just the # hosts we got back from the response because the reply contains the # fqdn of the host while the full inventory only contains a short name. requested_hosts = dict() for host in self.full_inventory: for full_host in body['hosts']: if host in full_host: requested_hosts[host] = self.full_inventory[host] return requested_hosts, body['ssid']
python
def node_get(self, arch=None, ver=None, flavor=None, count=1, retry_count=1, retry_interval=10): """ Requests specified number of nodes with the provided parameters. :param arch: Server architecture (ex: x86_64) :param ver: CentOS version (ex: 7) :param count: Number of servers (ex: 2) :parma flavor: The flavor of machine to use (multi-arch only) :param retry_count: Number of times to retry in case of failure (ex: 5) :param retry_interval: Wait in seconds between each retry (ex: 30) :return: [ [ requested_hosts ], ssid ] """ if self.api_key is None: raise exceptions.ApiKeyRequired args = "key=%s" % self.api_key if arch is not None: args += "&arch=%s" % arch if ver is not None: args += "&ver=%s" % ver if flavor is not None: args += "&flavor=%s" % flavor args += "&count=%s" % count resp, body = self.get('Node/get?%s' % args) if not body: for _ in range(retry_count): time.sleep(retry_interval) resp, body = self.get('Node/get?%s' % args) if body: break if not body: raise exceptions.NoInventory # Get the hosts that were requested. # Note: We have to iterate over full inventory instead of just the # hosts we got back from the response because the reply contains the # fqdn of the host while the full inventory only contains a short name. requested_hosts = dict() for host in self.full_inventory: for full_host in body['hosts']: if host in full_host: requested_hosts[host] = self.full_inventory[host] return requested_hosts, body['ssid']
[ "def", "node_get", "(", "self", ",", "arch", "=", "None", ",", "ver", "=", "None", ",", "flavor", "=", "None", ",", "count", "=", "1", ",", "retry_count", "=", "1", ",", "retry_interval", "=", "10", ")", ":", "if", "self", ".", "api_key", "is", "None", ":", "raise", "exceptions", ".", "ApiKeyRequired", "args", "=", "\"key=%s\"", "%", "self", ".", "api_key", "if", "arch", "is", "not", "None", ":", "args", "+=", "\"&arch=%s\"", "%", "arch", "if", "ver", "is", "not", "None", ":", "args", "+=", "\"&ver=%s\"", "%", "ver", "if", "flavor", "is", "not", "None", ":", "args", "+=", "\"&flavor=%s\"", "%", "flavor", "args", "+=", "\"&count=%s\"", "%", "count", "resp", ",", "body", "=", "self", ".", "get", "(", "'Node/get?%s'", "%", "args", ")", "if", "not", "body", ":", "for", "_", "in", "range", "(", "retry_count", ")", ":", "time", ".", "sleep", "(", "retry_interval", ")", "resp", ",", "body", "=", "self", ".", "get", "(", "'Node/get?%s'", "%", "args", ")", "if", "body", ":", "break", "if", "not", "body", ":", "raise", "exceptions", ".", "NoInventory", "# Get the hosts that were requested.", "# Note: We have to iterate over full inventory instead of just the", "# hosts we got back from the response because the reply contains the", "# fqdn of the host while the full inventory only contains a short name.", "requested_hosts", "=", "dict", "(", ")", "for", "host", "in", "self", ".", "full_inventory", ":", "for", "full_host", "in", "body", "[", "'hosts'", "]", ":", "if", "host", "in", "full_host", ":", "requested_hosts", "[", "host", "]", "=", "self", ".", "full_inventory", "[", "host", "]", "return", "requested_hosts", ",", "body", "[", "'ssid'", "]" ]
Requests specified number of nodes with the provided parameters. :param arch: Server architecture (ex: x86_64) :param ver: CentOS version (ex: 7) :param count: Number of servers (ex: 2) :parma flavor: The flavor of machine to use (multi-arch only) :param retry_count: Number of times to retry in case of failure (ex: 5) :param retry_interval: Wait in seconds between each retry (ex: 30) :return: [ [ requested_hosts ], ssid ]
[ "Requests", "specified", "number", "of", "nodes", "with", "the", "provided", "parameters", "." ]
train
https://github.com/CentOS/python-cicoclient/blob/ffee34f446ceb25348b13a500d5c545df202c182/cicoclient/wrapper.py#L135-L181
CentOS/python-cicoclient
cicoclient/wrapper.py
CicoWrapper.node_done
def node_done(self, ssid=None): """ Release the servers for the specified ssid. The API doesn't provide any kind of output, try to be helpful by providing the list of servers to be released. :param ssid: ssid of the server pool :return: [ requested_hosts ] """ if self.api_key is None: raise exceptions.ApiKeyRequired if ssid is None: raise exceptions.SsidRequired # There is no body replied in this call so at least get the hosts for # the specified ssid to return them. requested_hosts = dict() for host in self.self_inventory: if ssid == self.self_inventory[host]['comment']: requested_hosts[host] = self.full_inventory[host] args = "key={key}&ssid={ssid}".format(key=self.api_key, ssid=ssid) resp, body = self.get('Node/done?%s' % args) return requested_hosts
python
def node_done(self, ssid=None): """ Release the servers for the specified ssid. The API doesn't provide any kind of output, try to be helpful by providing the list of servers to be released. :param ssid: ssid of the server pool :return: [ requested_hosts ] """ if self.api_key is None: raise exceptions.ApiKeyRequired if ssid is None: raise exceptions.SsidRequired # There is no body replied in this call so at least get the hosts for # the specified ssid to return them. requested_hosts = dict() for host in self.self_inventory: if ssid == self.self_inventory[host]['comment']: requested_hosts[host] = self.full_inventory[host] args = "key={key}&ssid={ssid}".format(key=self.api_key, ssid=ssid) resp, body = self.get('Node/done?%s' % args) return requested_hosts
[ "def", "node_done", "(", "self", ",", "ssid", "=", "None", ")", ":", "if", "self", ".", "api_key", "is", "None", ":", "raise", "exceptions", ".", "ApiKeyRequired", "if", "ssid", "is", "None", ":", "raise", "exceptions", ".", "SsidRequired", "# There is no body replied in this call so at least get the hosts for", "# the specified ssid to return them.", "requested_hosts", "=", "dict", "(", ")", "for", "host", "in", "self", ".", "self_inventory", ":", "if", "ssid", "==", "self", ".", "self_inventory", "[", "host", "]", "[", "'comment'", "]", ":", "requested_hosts", "[", "host", "]", "=", "self", ".", "full_inventory", "[", "host", "]", "args", "=", "\"key={key}&ssid={ssid}\"", ".", "format", "(", "key", "=", "self", ".", "api_key", ",", "ssid", "=", "ssid", ")", "resp", ",", "body", "=", "self", ".", "get", "(", "'Node/done?%s'", "%", "args", ")", "return", "requested_hosts" ]
Release the servers for the specified ssid. The API doesn't provide any kind of output, try to be helpful by providing the list of servers to be released. :param ssid: ssid of the server pool :return: [ requested_hosts ]
[ "Release", "the", "servers", "for", "the", "specified", "ssid", ".", "The", "API", "doesn", "t", "provide", "any", "kind", "of", "output", "try", "to", "be", "helpful", "by", "providing", "the", "list", "of", "servers", "to", "be", "released", "." ]
train
https://github.com/CentOS/python-cicoclient/blob/ffee34f446ceb25348b13a500d5c545df202c182/cicoclient/wrapper.py#L183-L209
launchdarkly/relayCommander
relay_commander/rc.py
update_redis
def update_redis(project: str, environment: str, feature: str, state: str) \ -> None: """ Update redis state for a feature flag. :param project: LaunchDarkly project key. :param environment: LaunchDarkly environment key. :param feature: LaunchDarkly feature key. :param state: State for a feature flag. """ try: hosts = RedisWrapper.connection_string_parser( os.environ.get('REDIS_HOSTS')) except RuntimeError as ex: LOG.error(ex) sys.exit(1) for host in hosts: LOG.info("connecting to %s:%s", host.host, host.port) try: if valid_state(state): new_state = state.lower() redis = RedisWrapper( host.host, host.port, project, environment ) redis.update_flag_record(new_state, feature) create_file(project, environment, feature, new_state) LOG.info("%s was successfully updated.", feature) else: raise Exception('Invalid state: {0}, -s needs \ to be either on or off.'.format(state)) except KeyError as ex: LOG.error("unable to update %s. Exception: %s", host.host, ex) sys.exit(1)
python
def update_redis(project: str, environment: str, feature: str, state: str) \ -> None: """ Update redis state for a feature flag. :param project: LaunchDarkly project key. :param environment: LaunchDarkly environment key. :param feature: LaunchDarkly feature key. :param state: State for a feature flag. """ try: hosts = RedisWrapper.connection_string_parser( os.environ.get('REDIS_HOSTS')) except RuntimeError as ex: LOG.error(ex) sys.exit(1) for host in hosts: LOG.info("connecting to %s:%s", host.host, host.port) try: if valid_state(state): new_state = state.lower() redis = RedisWrapper( host.host, host.port, project, environment ) redis.update_flag_record(new_state, feature) create_file(project, environment, feature, new_state) LOG.info("%s was successfully updated.", feature) else: raise Exception('Invalid state: {0}, -s needs \ to be either on or off.'.format(state)) except KeyError as ex: LOG.error("unable to update %s. Exception: %s", host.host, ex) sys.exit(1)
[ "def", "update_redis", "(", "project", ":", "str", ",", "environment", ":", "str", ",", "feature", ":", "str", ",", "state", ":", "str", ")", "->", "None", ":", "try", ":", "hosts", "=", "RedisWrapper", ".", "connection_string_parser", "(", "os", ".", "environ", ".", "get", "(", "'REDIS_HOSTS'", ")", ")", "except", "RuntimeError", "as", "ex", ":", "LOG", ".", "error", "(", "ex", ")", "sys", ".", "exit", "(", "1", ")", "for", "host", "in", "hosts", ":", "LOG", ".", "info", "(", "\"connecting to %s:%s\"", ",", "host", ".", "host", ",", "host", ".", "port", ")", "try", ":", "if", "valid_state", "(", "state", ")", ":", "new_state", "=", "state", ".", "lower", "(", ")", "redis", "=", "RedisWrapper", "(", "host", ".", "host", ",", "host", ".", "port", ",", "project", ",", "environment", ")", "redis", ".", "update_flag_record", "(", "new_state", ",", "feature", ")", "create_file", "(", "project", ",", "environment", ",", "feature", ",", "new_state", ")", "LOG", ".", "info", "(", "\"%s was successfully updated.\"", ",", "feature", ")", "else", ":", "raise", "Exception", "(", "'Invalid state: {0}, -s needs \\\n to be either on or off.'", ".", "format", "(", "state", ")", ")", "except", "KeyError", "as", "ex", ":", "LOG", ".", "error", "(", "\"unable to update %s. Exception: %s\"", ",", "host", ".", "host", ",", "ex", ")", "sys", ".", "exit", "(", "1", ")" ]
Update redis state for a feature flag. :param project: LaunchDarkly project key. :param environment: LaunchDarkly environment key. :param feature: LaunchDarkly feature key. :param state: State for a feature flag.
[ "Update", "redis", "state", "for", "a", "feature", "flag", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/rc.py#L42-L80
launchdarkly/relayCommander
relay_commander/rc.py
update_ld_api
def update_ld_api(project: str, environment: str, feature: str, state: str): """ Execute command against the LaunchDarkly API. This command is generally not used directly, instead it is called as a part of running the ``playback()`` function. :param project: LaunchDarkly project key. :param environment: LaunchDarkly environment key. :param feature: LaunchDarkly feature key. :param state: State for a feature flag. """ ld_api = LaunchDarklyApi( os.environ.get('LD_API_KEY'), project, environment ) if valid_state(state): if state.lower() == 'off': new_state = False else: new_state = True ld_api.update_flag(new_state, feature) else: raise ValueError('Invalid state: {0}, -s needs to be either \ on or off.'.format(state))
python
def update_ld_api(project: str, environment: str, feature: str, state: str): """ Execute command against the LaunchDarkly API. This command is generally not used directly, instead it is called as a part of running the ``playback()`` function. :param project: LaunchDarkly project key. :param environment: LaunchDarkly environment key. :param feature: LaunchDarkly feature key. :param state: State for a feature flag. """ ld_api = LaunchDarklyApi( os.environ.get('LD_API_KEY'), project, environment ) if valid_state(state): if state.lower() == 'off': new_state = False else: new_state = True ld_api.update_flag(new_state, feature) else: raise ValueError('Invalid state: {0}, -s needs to be either \ on or off.'.format(state))
[ "def", "update_ld_api", "(", "project", ":", "str", ",", "environment", ":", "str", ",", "feature", ":", "str", ",", "state", ":", "str", ")", ":", "ld_api", "=", "LaunchDarklyApi", "(", "os", ".", "environ", ".", "get", "(", "'LD_API_KEY'", ")", ",", "project", ",", "environment", ")", "if", "valid_state", "(", "state", ")", ":", "if", "state", ".", "lower", "(", ")", "==", "'off'", ":", "new_state", "=", "False", "else", ":", "new_state", "=", "True", "ld_api", ".", "update_flag", "(", "new_state", ",", "feature", ")", "else", ":", "raise", "ValueError", "(", "'Invalid state: {0}, -s needs to be either \\\n on or off.'", ".", "format", "(", "state", ")", ")" ]
Execute command against the LaunchDarkly API. This command is generally not used directly, instead it is called as a part of running the ``playback()`` function. :param project: LaunchDarkly project key. :param environment: LaunchDarkly environment key. :param feature: LaunchDarkly feature key. :param state: State for a feature flag.
[ "Execute", "command", "against", "the", "LaunchDarkly", "API", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/rc.py#L100-L126
launchdarkly/relayCommander
relay_commander/rc.py
generate_relay_config
def generate_relay_config(project): """ Generate Relay Proxy Configuration. Generate a ld-relay.conf file to quickly spin up a relay proxy. Right now this is mostly used for integration testing. :param project: LaunchDarkly project key """ ld_api = LaunchDarklyApi( os.environ.get('LD_API_KEY'), project_key=project ) config = ConfigGenerator() envs = ld_api.get_environments(project) config.generate_relay_config(envs)
python
def generate_relay_config(project): """ Generate Relay Proxy Configuration. Generate a ld-relay.conf file to quickly spin up a relay proxy. Right now this is mostly used for integration testing. :param project: LaunchDarkly project key """ ld_api = LaunchDarklyApi( os.environ.get('LD_API_KEY'), project_key=project ) config = ConfigGenerator() envs = ld_api.get_environments(project) config.generate_relay_config(envs)
[ "def", "generate_relay_config", "(", "project", ")", ":", "ld_api", "=", "LaunchDarklyApi", "(", "os", ".", "environ", ".", "get", "(", "'LD_API_KEY'", ")", ",", "project_key", "=", "project", ")", "config", "=", "ConfigGenerator", "(", ")", "envs", "=", "ld_api", ".", "get_environments", "(", "project", ")", "config", ".", "generate_relay_config", "(", "envs", ")" ]
Generate Relay Proxy Configuration. Generate a ld-relay.conf file to quickly spin up a relay proxy. Right now this is mostly used for integration testing. :param project: LaunchDarkly project key
[ "Generate", "Relay", "Proxy", "Configuration", "." ]
train
https://github.com/launchdarkly/relayCommander/blob/eee7fa22f04edc3854dd53c3ec2db8c599ad1e89/relay_commander/rc.py#L131-L147
mjirik/io3d
io3d/dcmreaddata.py
dicomdir_info
def dicomdir_info(dirpath, *args, **kwargs): """ Get information about series in dir""" dr = DicomReader(dirpath=dirpath, *args, **kwargs) info = dr.dicomdirectory.get_stats_of_series_in_dir() return info
python
def dicomdir_info(dirpath, *args, **kwargs): """ Get information about series in dir""" dr = DicomReader(dirpath=dirpath, *args, **kwargs) info = dr.dicomdirectory.get_stats_of_series_in_dir() return info
[ "def", "dicomdir_info", "(", "dirpath", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "dr", "=", "DicomReader", "(", "dirpath", "=", "dirpath", ",", "*", "args", ",", "*", "*", "kwargs", ")", "info", "=", "dr", ".", "dicomdirectory", ".", "get_stats_of_series_in_dir", "(", ")", "return", "info" ]
Get information about series in dir
[ "Get", "information", "about", "series", "in", "dir" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L45-L49
mjirik/io3d
io3d/dcmreaddata.py
is_dicom_dir
def is_dicom_dir(datapath): """ Check if in dir is one or more dicom file. We use two methods. First is based on dcm extension detection. """ # Second tries open files # with dicom module. retval = False datapath = op.expanduser(datapath) for f in os.listdir(datapath): if f.endswith((".dcm", ".DCM")): retval = True return True # @todo not working and I dont know why try: pydicom.read_file(os.path.join(datapath, f)) retval = True # except pydicom.errors.InvalidDicomError: # logger.debug("Invalid Dicom while reading file " + str(f)) except Exception as e: logger.warning("Unable to read dicom file " + str(f)) logger.warning(e) # import traceback # traceback.print_exc() if retval: return True return False
python
def is_dicom_dir(datapath): """ Check if in dir is one or more dicom file. We use two methods. First is based on dcm extension detection. """ # Second tries open files # with dicom module. retval = False datapath = op.expanduser(datapath) for f in os.listdir(datapath): if f.endswith((".dcm", ".DCM")): retval = True return True # @todo not working and I dont know why try: pydicom.read_file(os.path.join(datapath, f)) retval = True # except pydicom.errors.InvalidDicomError: # logger.debug("Invalid Dicom while reading file " + str(f)) except Exception as e: logger.warning("Unable to read dicom file " + str(f)) logger.warning(e) # import traceback # traceback.print_exc() if retval: return True return False
[ "def", "is_dicom_dir", "(", "datapath", ")", ":", "# Second tries open files", "# with dicom module.", "retval", "=", "False", "datapath", "=", "op", ".", "expanduser", "(", "datapath", ")", "for", "f", "in", "os", ".", "listdir", "(", "datapath", ")", ":", "if", "f", ".", "endswith", "(", "(", "\".dcm\"", ",", "\".DCM\"", ")", ")", ":", "retval", "=", "True", "return", "True", "# @todo not working and I dont know why", "try", ":", "pydicom", ".", "read_file", "(", "os", ".", "path", ".", "join", "(", "datapath", ",", "f", ")", ")", "retval", "=", "True", "# except pydicom.errors.InvalidDicomError:", "# logger.debug(\"Invalid Dicom while reading file \" + str(f))", "except", "Exception", "as", "e", ":", "logger", ".", "warning", "(", "\"Unable to read dicom file \"", "+", "str", "(", "f", ")", ")", "logger", ".", "warning", "(", "e", ")", "# import traceback", "# traceback.print_exc()", "if", "retval", ":", "return", "True", "return", "False" ]
Check if in dir is one or more dicom file. We use two methods. First is based on dcm extension detection.
[ "Check", "if", "in", "dir", "is", "one", "or", "more", "dicom", "file", ".", "We", "use", "two", "methods", ".", "First", "is", "based", "on", "dcm", "extension", "detection", "." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L52-L81
mjirik/io3d
io3d/dcmreaddata.py
files_in_dir
def files_in_dir(dirpath, wildcard="*", startpath=None): """ Function generates list of files from specific dir files_in_dir(dirpath, wildcard="*.*", startpath=None) dirpath: required directory wilcard: mask for files startpath: start for relative path Example files_in_dir('medical/jatra-kiv','*.dcm', '~/data/') """ import glob filelist = [] if startpath is not None: completedirpath = os.path.join(startpath, dirpath) else: completedirpath = dirpath if os.path.exists(completedirpath): logger.info('completedirpath = ' + completedirpath) else: logger.error('Wrong path: ' + completedirpath) raise Exception('Wrong path : ' + completedirpath) for infile in glob.glob(os.path.join(completedirpath, wildcard)): filelist.append(infile) if len(filelist) == 0: logger.error('No required files in path: ' + completedirpath) raise Exception('No required file in path: ' + completedirpath) return filelist
python
def files_in_dir(dirpath, wildcard="*", startpath=None): """ Function generates list of files from specific dir files_in_dir(dirpath, wildcard="*.*", startpath=None) dirpath: required directory wilcard: mask for files startpath: start for relative path Example files_in_dir('medical/jatra-kiv','*.dcm', '~/data/') """ import glob filelist = [] if startpath is not None: completedirpath = os.path.join(startpath, dirpath) else: completedirpath = dirpath if os.path.exists(completedirpath): logger.info('completedirpath = ' + completedirpath) else: logger.error('Wrong path: ' + completedirpath) raise Exception('Wrong path : ' + completedirpath) for infile in glob.glob(os.path.join(completedirpath, wildcard)): filelist.append(infile) if len(filelist) == 0: logger.error('No required files in path: ' + completedirpath) raise Exception('No required file in path: ' + completedirpath) return filelist
[ "def", "files_in_dir", "(", "dirpath", ",", "wildcard", "=", "\"*\"", ",", "startpath", "=", "None", ")", ":", "import", "glob", "filelist", "=", "[", "]", "if", "startpath", "is", "not", "None", ":", "completedirpath", "=", "os", ".", "path", ".", "join", "(", "startpath", ",", "dirpath", ")", "else", ":", "completedirpath", "=", "dirpath", "if", "os", ".", "path", ".", "exists", "(", "completedirpath", ")", ":", "logger", ".", "info", "(", "'completedirpath = '", "+", "completedirpath", ")", "else", ":", "logger", ".", "error", "(", "'Wrong path: '", "+", "completedirpath", ")", "raise", "Exception", "(", "'Wrong path : '", "+", "completedirpath", ")", "for", "infile", "in", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "completedirpath", ",", "wildcard", ")", ")", ":", "filelist", ".", "append", "(", "infile", ")", "if", "len", "(", "filelist", ")", "==", "0", ":", "logger", ".", "error", "(", "'No required files in path: '", "+", "completedirpath", ")", "raise", "Exception", "(", "'No required file in path: '", "+", "completedirpath", ")", "return", "filelist" ]
Function generates list of files from specific dir files_in_dir(dirpath, wildcard="*.*", startpath=None) dirpath: required directory wilcard: mask for files startpath: start for relative path Example files_in_dir('medical/jatra-kiv','*.dcm', '~/data/')
[ "Function", "generates", "list", "of", "files", "from", "specific", "dir" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L357-L394
mjirik/io3d
io3d/dcmreaddata.py
get_slice_location
def get_slice_location(dcmdata, teil=None): """ get location of the slice :param dcmdata: dicom data structure :param teil: filename. Used when slice location doesnt exist :return: """ slice_location = None if hasattr(dcmdata, 'SliceLocation'): # print(dcmdata.SliceLocation) # print(type(dcmdata.SliceLocation)) try: slice_location = float(dcmdata.SliceLocation) except Exception as exc: logger.info("It is not possible to use SliceLocation") logger.debug(traceback.format_exc()) if slice_location is None and hasattr(dcmdata, "SliceThickness") and teil is not None: logger.debug( "Estimating SliceLocation wiht image number and SliceThickness" ) # from builtins import map i = list(map(int, re.findall('\d+', teil))) i = i[-1] try: slice_location = float(i * float(dcmdata.SliceThickness)) except ValueError as e: print(type(dcmdata.SliceThickness)) print(dcmdata.SliceThickness) logger.debug(traceback.format_exc()) logger.debug("SliceThickness problem") if slice_location is None and hasattr(dcmdata, "ImagePositionPatient") and hasattr(dcmdata, "ImageOrientationPatient"): if dcmdata.ImageOrientationPatient == [1, 0, 0, 0, 1, 0]: slice_location = dcmdata.ImagePositionPatient[2] else: logger.warning("Unknown ImageOrientationPatient") if slice_location is None: logger.warning("Problem with slice location") return slice_location
python
def get_slice_location(dcmdata, teil=None): """ get location of the slice :param dcmdata: dicom data structure :param teil: filename. Used when slice location doesnt exist :return: """ slice_location = None if hasattr(dcmdata, 'SliceLocation'): # print(dcmdata.SliceLocation) # print(type(dcmdata.SliceLocation)) try: slice_location = float(dcmdata.SliceLocation) except Exception as exc: logger.info("It is not possible to use SliceLocation") logger.debug(traceback.format_exc()) if slice_location is None and hasattr(dcmdata, "SliceThickness") and teil is not None: logger.debug( "Estimating SliceLocation wiht image number and SliceThickness" ) # from builtins import map i = list(map(int, re.findall('\d+', teil))) i = i[-1] try: slice_location = float(i * float(dcmdata.SliceThickness)) except ValueError as e: print(type(dcmdata.SliceThickness)) print(dcmdata.SliceThickness) logger.debug(traceback.format_exc()) logger.debug("SliceThickness problem") if slice_location is None and hasattr(dcmdata, "ImagePositionPatient") and hasattr(dcmdata, "ImageOrientationPatient"): if dcmdata.ImageOrientationPatient == [1, 0, 0, 0, 1, 0]: slice_location = dcmdata.ImagePositionPatient[2] else: logger.warning("Unknown ImageOrientationPatient") if slice_location is None: logger.warning("Problem with slice location") return slice_location
[ "def", "get_slice_location", "(", "dcmdata", ",", "teil", "=", "None", ")", ":", "slice_location", "=", "None", "if", "hasattr", "(", "dcmdata", ",", "'SliceLocation'", ")", ":", "# print(dcmdata.SliceLocation)", "# print(type(dcmdata.SliceLocation))", "try", ":", "slice_location", "=", "float", "(", "dcmdata", ".", "SliceLocation", ")", "except", "Exception", "as", "exc", ":", "logger", ".", "info", "(", "\"It is not possible to use SliceLocation\"", ")", "logger", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "if", "slice_location", "is", "None", "and", "hasattr", "(", "dcmdata", ",", "\"SliceThickness\"", ")", "and", "teil", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"Estimating SliceLocation wiht image number and SliceThickness\"", ")", "# from builtins import map", "i", "=", "list", "(", "map", "(", "int", ",", "re", ".", "findall", "(", "'\\d+'", ",", "teil", ")", ")", ")", "i", "=", "i", "[", "-", "1", "]", "try", ":", "slice_location", "=", "float", "(", "i", "*", "float", "(", "dcmdata", ".", "SliceThickness", ")", ")", "except", "ValueError", "as", "e", ":", "print", "(", "type", "(", "dcmdata", ".", "SliceThickness", ")", ")", "print", "(", "dcmdata", ".", "SliceThickness", ")", "logger", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "logger", ".", "debug", "(", "\"SliceThickness problem\"", ")", "if", "slice_location", "is", "None", "and", "hasattr", "(", "dcmdata", ",", "\"ImagePositionPatient\"", ")", "and", "hasattr", "(", "dcmdata", ",", "\"ImageOrientationPatient\"", ")", ":", "if", "dcmdata", ".", "ImageOrientationPatient", "==", "[", "1", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", "]", ":", "slice_location", "=", "dcmdata", ".", "ImagePositionPatient", "[", "2", "]", "else", ":", "logger", ".", "warning", "(", "\"Unknown ImageOrientationPatient\"", ")", "if", "slice_location", "is", "None", ":", "logger", ".", "warning", "(", "\"Problem with slice location\"", ")", "return", "slice_location" ]
get location of the slice :param dcmdata: dicom data structure :param teil: filename. Used when slice location doesnt exist :return:
[ "get", "location", "of", "the", "slice" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L877-L918
mjirik/io3d
io3d/dcmreaddata.py
attr_to_dict
def attr_to_dict(obj, attr, dct): """ Add attribute to dict if it exists. :param dct: :param obj: object :param attr: object attribute name :return: dict """ if hasattr(obj, attr): dct[attr] = getattr(obj, attr) return dct
python
def attr_to_dict(obj, attr, dct): """ Add attribute to dict if it exists. :param dct: :param obj: object :param attr: object attribute name :return: dict """ if hasattr(obj, attr): dct[attr] = getattr(obj, attr) return dct
[ "def", "attr_to_dict", "(", "obj", ",", "attr", ",", "dct", ")", ":", "if", "hasattr", "(", "obj", ",", "attr", ")", ":", "dct", "[", "attr", "]", "=", "getattr", "(", "obj", ",", "attr", ")", "return", "dct" ]
Add attribute to dict if it exists. :param dct: :param obj: object :param attr: object attribute name :return: dict
[ "Add", "attribute", "to", "dict", "if", "it", "exists", ".", ":", "param", "dct", ":", ":", "param", "obj", ":", "object", ":", "param", "attr", ":", "object", "attribute", "name", ":", "return", ":", "dict" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L928-L938
mjirik/io3d
io3d/dcmreaddata.py
get_series_number_by_guess_for_liver
def get_series_number_by_guess_for_liver(dcmreader, counts, bins, qt_app=None): """ Select the venous series from CT with around 200 images :param dcmreader: :param counts: :param bins: :param qt_app: :return: """ series_info = dcmreader.dicomdirectory.get_stats_of_series_in_dir() print(dcmreader.print_series_info(series_info)) import pandas as pd df = pd.DataFrame(list(series_info.values())) #select CT df = df[df["Modality"].str.lower().str.contains("ct") == True] # select just venous df = df[df["SeriesDescription"].str.lower().str.contains("ven") == True] # remove saggittal df = df[df["SeriesDescription"].str.lower().str.contains("sag") == False] # remove cor df = df[df["SeriesDescription"].str.lower().str.contains("cor") == False] df["dst_to_200"] = np.abs(200 - df.Count) dfs = df.sort_values(by="dst_to_200", ascending=True) sn = list(dfs.SeriesNumber)[0] return sn
python
def get_series_number_by_guess_for_liver(dcmreader, counts, bins, qt_app=None): """ Select the venous series from CT with around 200 images :param dcmreader: :param counts: :param bins: :param qt_app: :return: """ series_info = dcmreader.dicomdirectory.get_stats_of_series_in_dir() print(dcmreader.print_series_info(series_info)) import pandas as pd df = pd.DataFrame(list(series_info.values())) #select CT df = df[df["Modality"].str.lower().str.contains("ct") == True] # select just venous df = df[df["SeriesDescription"].str.lower().str.contains("ven") == True] # remove saggittal df = df[df["SeriesDescription"].str.lower().str.contains("sag") == False] # remove cor df = df[df["SeriesDescription"].str.lower().str.contains("cor") == False] df["dst_to_200"] = np.abs(200 - df.Count) dfs = df.sort_values(by="dst_to_200", ascending=True) sn = list(dfs.SeriesNumber)[0] return sn
[ "def", "get_series_number_by_guess_for_liver", "(", "dcmreader", ",", "counts", ",", "bins", ",", "qt_app", "=", "None", ")", ":", "series_info", "=", "dcmreader", ".", "dicomdirectory", ".", "get_stats_of_series_in_dir", "(", ")", "print", "(", "dcmreader", ".", "print_series_info", "(", "series_info", ")", ")", "import", "pandas", "as", "pd", "df", "=", "pd", ".", "DataFrame", "(", "list", "(", "series_info", ".", "values", "(", ")", ")", ")", "#select CT", "df", "=", "df", "[", "df", "[", "\"Modality\"", "]", ".", "str", ".", "lower", "(", ")", ".", "str", ".", "contains", "(", "\"ct\"", ")", "==", "True", "]", "# select just venous", "df", "=", "df", "[", "df", "[", "\"SeriesDescription\"", "]", ".", "str", ".", "lower", "(", ")", ".", "str", ".", "contains", "(", "\"ven\"", ")", "==", "True", "]", "# remove saggittal", "df", "=", "df", "[", "df", "[", "\"SeriesDescription\"", "]", ".", "str", ".", "lower", "(", ")", ".", "str", ".", "contains", "(", "\"sag\"", ")", "==", "False", "]", "# remove cor", "df", "=", "df", "[", "df", "[", "\"SeriesDescription\"", "]", ".", "str", ".", "lower", "(", ")", ".", "str", ".", "contains", "(", "\"cor\"", ")", "==", "False", "]", "df", "[", "\"dst_to_200\"", "]", "=", "np", ".", "abs", "(", "200", "-", "df", ".", "Count", ")", "dfs", "=", "df", ".", "sort_values", "(", "by", "=", "\"dst_to_200\"", ",", "ascending", "=", "True", ")", "sn", "=", "list", "(", "dfs", ".", "SeriesNumber", ")", "[", "0", "]", "return", "sn" ]
Select the venous series from CT with around 200 images :param dcmreader: :param counts: :param bins: :param qt_app: :return:
[ "Select", "the", "venous", "series", "from", "CT", "with", "around", "200", "images", ":", "param", "dcmreader", ":", ":", "param", "counts", ":", ":", "param", "bins", ":", ":", "param", "qt_app", ":", ":", "return", ":" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L940-L966
mjirik/io3d
io3d/dcmreaddata.py
DicomReader.get_overlay
def get_overlay(self): """ Function make 3D data from dicom file slices. There are usualy more overlays in the data. """ overlay = {} dcmlist = self.files_in_serie for i in range(len(dcmlist)): onefile = dcmlist[i] logger.info("reading '%s'" % onefile) data = self._read_file(onefile) if len(overlay) == 0: # first there is created dictionary with # avalible overlay indexes for i_overlay in range(0, 50): try: # overlay index data2d = decode_overlay_slice(data, i_overlay) # mport pdb; pdb.set_trace() shp2 = data2d.shape overlay[i_overlay] = np.zeros([len(dcmlist), shp2[0], shp2[1]], dtype=np.int8) overlay[i_overlay][-i - 1, :, :] = data2d except Exception: # exception is exceptetd. We are trying numbers 0-50 # logger.exception('Problem with overlay image number ' + # str(i_overlay)) pass else: for i_overlay in overlay.keys(): try: data2d = decode_overlay_slice(data, i_overlay) overlay[i_overlay][-i - 1, :, :] = data2d except Exception: logger.warning('Problem with overlay number ' + str(i_overlay)) return overlay
python
def get_overlay(self): """ Function make 3D data from dicom file slices. There are usualy more overlays in the data. """ overlay = {} dcmlist = self.files_in_serie for i in range(len(dcmlist)): onefile = dcmlist[i] logger.info("reading '%s'" % onefile) data = self._read_file(onefile) if len(overlay) == 0: # first there is created dictionary with # avalible overlay indexes for i_overlay in range(0, 50): try: # overlay index data2d = decode_overlay_slice(data, i_overlay) # mport pdb; pdb.set_trace() shp2 = data2d.shape overlay[i_overlay] = np.zeros([len(dcmlist), shp2[0], shp2[1]], dtype=np.int8) overlay[i_overlay][-i - 1, :, :] = data2d except Exception: # exception is exceptetd. We are trying numbers 0-50 # logger.exception('Problem with overlay image number ' + # str(i_overlay)) pass else: for i_overlay in overlay.keys(): try: data2d = decode_overlay_slice(data, i_overlay) overlay[i_overlay][-i - 1, :, :] = data2d except Exception: logger.warning('Problem with overlay number ' + str(i_overlay)) return overlay
[ "def", "get_overlay", "(", "self", ")", ":", "overlay", "=", "{", "}", "dcmlist", "=", "self", ".", "files_in_serie", "for", "i", "in", "range", "(", "len", "(", "dcmlist", ")", ")", ":", "onefile", "=", "dcmlist", "[", "i", "]", "logger", ".", "info", "(", "\"reading '%s'\"", "%", "onefile", ")", "data", "=", "self", ".", "_read_file", "(", "onefile", ")", "if", "len", "(", "overlay", ")", "==", "0", ":", "# first there is created dictionary with", "# avalible overlay indexes", "for", "i_overlay", "in", "range", "(", "0", ",", "50", ")", ":", "try", ":", "# overlay index", "data2d", "=", "decode_overlay_slice", "(", "data", ",", "i_overlay", ")", "# mport pdb; pdb.set_trace()", "shp2", "=", "data2d", ".", "shape", "overlay", "[", "i_overlay", "]", "=", "np", ".", "zeros", "(", "[", "len", "(", "dcmlist", ")", ",", "shp2", "[", "0", "]", ",", "shp2", "[", "1", "]", "]", ",", "dtype", "=", "np", ".", "int8", ")", "overlay", "[", "i_overlay", "]", "[", "-", "i", "-", "1", ",", ":", ",", ":", "]", "=", "data2d", "except", "Exception", ":", "# exception is exceptetd. We are trying numbers 0-50", "# logger.exception('Problem with overlay image number ' +", "# str(i_overlay))", "pass", "else", ":", "for", "i_overlay", "in", "overlay", ".", "keys", "(", ")", ":", "try", ":", "data2d", "=", "decode_overlay_slice", "(", "data", ",", "i_overlay", ")", "overlay", "[", "i_overlay", "]", "[", "-", "i", "-", "1", ",", ":", ",", ":", "]", "=", "data2d", "except", "Exception", ":", "logger", ".", "warning", "(", "'Problem with overlay number '", "+", "str", "(", "i_overlay", ")", ")", "return", "overlay" ]
Function make 3D data from dicom file slices. There are usualy more overlays in the data.
[ "Function", "make", "3D", "data", "from", "dicom", "file", "slices", ".", "There", "are", "usualy", "more", "overlays", "in", "the", "data", "." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L202-L243
mjirik/io3d
io3d/dcmreaddata.py
DicomReader.get_3Ddata
def get_3Ddata(self, start=0, stop=None, step=1): """ Function make 3D data from dicom file slices """ data3d = [] dcmlist = self.files_in_serie # print('stsp ', start, stop, step) # raw_max = None # raw_min = None # slope = None # inter = None # get shape 2d # sometimes there is render in series if len(self.files_in_serie) > 1: data = self._read_file(dcmlist[0]) data2d1 = data.pixel_array data = self._read_file(dcmlist[1]) data2d2 = data.pixel_array if (data2d1.shape[0] == data2d2.shape[0]) and (data2d1.shape[1] == data2d2.shape[1]): pass else: dcmlist.pop(0) if stop is None: stop = len(dcmlist) # printRescaleWarning = False for i in xrange(start, stop, step): onefile = dcmlist[i] data = self._read_file(onefile) new_data2d = data.pixel_array # new_data2d, slope, inter = dcmtools.get_pixel_array_from_pdcm(data) # mport pdb; pdb.set_trace() if len(data3d) == 0: shp2 = new_data2d.shape data3d = np.zeros([len(dcmlist), shp2[0], shp2[1]], dtype=new_data2d.dtype) slope, inter = dcmtools.get_slope_and_intercept_from_pdcm(data) # first readed slide is at the end if (data3d.shape[1] == new_data2d.shape[0]) and (data3d.shape[2] == new_data2d.shape[1]): data3d[-i - 1, :, :] = new_data2d else: msg = "Problem with shape " + \ "Data size: " + str(data3d.nbytes) + \ ', shape: ' + str(shp2) + 'x' + str(len(dcmlist)) + \ ' file ' + onefile logger.warning(msg) print(msg) logger.debug("Data size: " + str(data3d.nbytes) + ', shape: ' + str(shp2) + 'x' + str(len(dcmlist)) + ' file ' + onefile) data3d = misc.use_economic_dtype(data3d, slope=slope, inter=inter) # if original_dtype == np.uint16 and data3d.dtype == np.int16: # data3d = data3d.astype(np.int32) # or just force set slope=0.5, inter = 0 # new_data2d = rescale_pixel_array(data2d, slope, inter) # if printRescaleWarning: # print("Automatic Rescale with slope 0.5") # logger.warning("Automatic Rescale with slope 0.5") # data3d = dcmtools.rescale_pixel_array(data3d, slope=slope, inter=inter) return data3d
python
def get_3Ddata(self, start=0, stop=None, step=1): """ Function make 3D data from dicom file slices """ data3d = [] dcmlist = self.files_in_serie # print('stsp ', start, stop, step) # raw_max = None # raw_min = None # slope = None # inter = None # get shape 2d # sometimes there is render in series if len(self.files_in_serie) > 1: data = self._read_file(dcmlist[0]) data2d1 = data.pixel_array data = self._read_file(dcmlist[1]) data2d2 = data.pixel_array if (data2d1.shape[0] == data2d2.shape[0]) and (data2d1.shape[1] == data2d2.shape[1]): pass else: dcmlist.pop(0) if stop is None: stop = len(dcmlist) # printRescaleWarning = False for i in xrange(start, stop, step): onefile = dcmlist[i] data = self._read_file(onefile) new_data2d = data.pixel_array # new_data2d, slope, inter = dcmtools.get_pixel_array_from_pdcm(data) # mport pdb; pdb.set_trace() if len(data3d) == 0: shp2 = new_data2d.shape data3d = np.zeros([len(dcmlist), shp2[0], shp2[1]], dtype=new_data2d.dtype) slope, inter = dcmtools.get_slope_and_intercept_from_pdcm(data) # first readed slide is at the end if (data3d.shape[1] == new_data2d.shape[0]) and (data3d.shape[2] == new_data2d.shape[1]): data3d[-i - 1, :, :] = new_data2d else: msg = "Problem with shape " + \ "Data size: " + str(data3d.nbytes) + \ ', shape: ' + str(shp2) + 'x' + str(len(dcmlist)) + \ ' file ' + onefile logger.warning(msg) print(msg) logger.debug("Data size: " + str(data3d.nbytes) + ', shape: ' + str(shp2) + 'x' + str(len(dcmlist)) + ' file ' + onefile) data3d = misc.use_economic_dtype(data3d, slope=slope, inter=inter) # if original_dtype == np.uint16 and data3d.dtype == np.int16: # data3d = data3d.astype(np.int32) # or just force set slope=0.5, inter = 0 # new_data2d = rescale_pixel_array(data2d, slope, inter) # if printRescaleWarning: # print("Automatic Rescale with slope 0.5") # logger.warning("Automatic Rescale with slope 0.5") # data3d = dcmtools.rescale_pixel_array(data3d, slope=slope, inter=inter) return data3d
[ "def", "get_3Ddata", "(", "self", ",", "start", "=", "0", ",", "stop", "=", "None", ",", "step", "=", "1", ")", ":", "data3d", "=", "[", "]", "dcmlist", "=", "self", ".", "files_in_serie", "# print('stsp ', start, stop, step)", "# raw_max = None", "# raw_min = None", "# slope = None", "# inter = None", "# get shape 2d", "# sometimes there is render in series", "if", "len", "(", "self", ".", "files_in_serie", ")", ">", "1", ":", "data", "=", "self", ".", "_read_file", "(", "dcmlist", "[", "0", "]", ")", "data2d1", "=", "data", ".", "pixel_array", "data", "=", "self", ".", "_read_file", "(", "dcmlist", "[", "1", "]", ")", "data2d2", "=", "data", ".", "pixel_array", "if", "(", "data2d1", ".", "shape", "[", "0", "]", "==", "data2d2", ".", "shape", "[", "0", "]", ")", "and", "(", "data2d1", ".", "shape", "[", "1", "]", "==", "data2d2", ".", "shape", "[", "1", "]", ")", ":", "pass", "else", ":", "dcmlist", ".", "pop", "(", "0", ")", "if", "stop", "is", "None", ":", "stop", "=", "len", "(", "dcmlist", ")", "# printRescaleWarning = False", "for", "i", "in", "xrange", "(", "start", ",", "stop", ",", "step", ")", ":", "onefile", "=", "dcmlist", "[", "i", "]", "data", "=", "self", ".", "_read_file", "(", "onefile", ")", "new_data2d", "=", "data", ".", "pixel_array", "# new_data2d, slope, inter = dcmtools.get_pixel_array_from_pdcm(data)", "# mport pdb; pdb.set_trace()", "if", "len", "(", "data3d", ")", "==", "0", ":", "shp2", "=", "new_data2d", ".", "shape", "data3d", "=", "np", ".", "zeros", "(", "[", "len", "(", "dcmlist", ")", ",", "shp2", "[", "0", "]", ",", "shp2", "[", "1", "]", "]", ",", "dtype", "=", "new_data2d", ".", "dtype", ")", "slope", ",", "inter", "=", "dcmtools", ".", "get_slope_and_intercept_from_pdcm", "(", "data", ")", "# first readed slide is at the end", "if", "(", "data3d", ".", "shape", "[", "1", "]", "==", "new_data2d", ".", "shape", "[", "0", "]", ")", "and", "(", "data3d", ".", "shape", "[", "2", "]", "==", "new_data2d", ".", "shape", "[", "1", "]", ")", ":", "data3d", "[", "-", "i", "-", "1", ",", ":", ",", ":", "]", "=", "new_data2d", "else", ":", "msg", "=", "\"Problem with shape \"", "+", "\"Data size: \"", "+", "str", "(", "data3d", ".", "nbytes", ")", "+", "', shape: '", "+", "str", "(", "shp2", ")", "+", "'x'", "+", "str", "(", "len", "(", "dcmlist", ")", ")", "+", "' file '", "+", "onefile", "logger", ".", "warning", "(", "msg", ")", "print", "(", "msg", ")", "logger", ".", "debug", "(", "\"Data size: \"", "+", "str", "(", "data3d", ".", "nbytes", ")", "+", "', shape: '", "+", "str", "(", "shp2", ")", "+", "'x'", "+", "str", "(", "len", "(", "dcmlist", ")", ")", "+", "' file '", "+", "onefile", ")", "data3d", "=", "misc", ".", "use_economic_dtype", "(", "data3d", ",", "slope", "=", "slope", ",", "inter", "=", "inter", ")", "# if original_dtype == np.uint16 and data3d.dtype == np.int16:", "# data3d = data3d.astype(np.int32)", "# or just force set slope=0.5, inter = 0", "# new_data2d = rescale_pixel_array(data2d, slope, inter)", "# if printRescaleWarning:", "# print(\"Automatic Rescale with slope 0.5\")", "# logger.warning(\"Automatic Rescale with slope 0.5\")", "# data3d = dcmtools.rescale_pixel_array(data3d, slope=slope, inter=inter)", "return", "data3d" ]
Function make 3D data from dicom file slices
[ "Function", "make", "3D", "data", "from", "dicom", "file", "slices" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L249-L319
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory.get_standard_dicomdir_info
def get_standard_dicomdir_info(self): """ Read DICOMDIR, crate if necessary. :return: """ dicomdir_filepath = os.path.join(self.dirpath, self.standard_dicomdir_filename) if not os.path.exists(dicomdir_filepath): self.create_standard_dicomdir() return self.read_standard_dicomdir_info()
python
def get_standard_dicomdir_info(self): """ Read DICOMDIR, crate if necessary. :return: """ dicomdir_filepath = os.path.join(self.dirpath, self.standard_dicomdir_filename) if not os.path.exists(dicomdir_filepath): self.create_standard_dicomdir() return self.read_standard_dicomdir_info()
[ "def", "get_standard_dicomdir_info", "(", "self", ")", ":", "dicomdir_filepath", "=", "os", ".", "path", ".", "join", "(", "self", ".", "dirpath", ",", "self", ".", "standard_dicomdir_filename", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dicomdir_filepath", ")", ":", "self", ".", "create_standard_dicomdir", "(", ")", "return", "self", ".", "read_standard_dicomdir_info", "(", ")" ]
Read DICOMDIR, crate if necessary. :return:
[ "Read", "DICOMDIR", "crate", "if", "necessary", ".", ":", "return", ":" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L444-L452
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory.get_metadata_new
def get_metadata_new(self, series_number): """ Return series metadata. Output condatin information about voxelsize_mm, series_number and modality. If it is possible, the ImageComment, AcquisitionDate and few other dicom tags are also in output dict. :param series_number: :return: metadata dict with voxelsize_mm, SeriesNumber and other dicom tags """ # TODO implement simplier metadata function # automatic test is prepared files, files_with_info = self.get_sorted_series_files(series_number=series_number, return_files_with_info=True) metadata = { # 'voxelsize_mm': voxelsize_mm, # 'Modality': data1.Modality, # 'SeriesNumber': series_number, # 'SeriesDescription' = data1.SeriesDescription, # 'ImageComments' : data1.ImageComments, # "AcquisitionDate": metadata, # "StudyDate": metadata, # "StudyDescription": metadata, # "RequestedProcedureDescription", metadata } return metadata
python
def get_metadata_new(self, series_number): """ Return series metadata. Output condatin information about voxelsize_mm, series_number and modality. If it is possible, the ImageComment, AcquisitionDate and few other dicom tags are also in output dict. :param series_number: :return: metadata dict with voxelsize_mm, SeriesNumber and other dicom tags """ # TODO implement simplier metadata function # automatic test is prepared files, files_with_info = self.get_sorted_series_files(series_number=series_number, return_files_with_info=True) metadata = { # 'voxelsize_mm': voxelsize_mm, # 'Modality': data1.Modality, # 'SeriesNumber': series_number, # 'SeriesDescription' = data1.SeriesDescription, # 'ImageComments' : data1.ImageComments, # "AcquisitionDate": metadata, # "StudyDate": metadata, # "StudyDescription": metadata, # "RequestedProcedureDescription", metadata } return metadata
[ "def", "get_metadata_new", "(", "self", ",", "series_number", ")", ":", "# TODO implement simplier metadata function", "# automatic test is prepared", "files", ",", "files_with_info", "=", "self", ".", "get_sorted_series_files", "(", "series_number", "=", "series_number", ",", "return_files_with_info", "=", "True", ")", "metadata", "=", "{", "# 'voxelsize_mm': voxelsize_mm,", "# 'Modality': data1.Modality,", "# 'SeriesNumber': series_number,", "# 'SeriesDescription' = data1.SeriesDescription,", "# 'ImageComments' : data1.ImageComments,", "# \"AcquisitionDate\": metadata,", "# \"StudyDate\": metadata,", "# \"StudyDescription\": metadata,", "# \"RequestedProcedureDescription\", metadata", "}", "return", "metadata" ]
Return series metadata. Output condatin information about voxelsize_mm, series_number and modality. If it is possible, the ImageComment, AcquisitionDate and few other dicom tags are also in output dict. :param series_number: :return: metadata dict with voxelsize_mm, SeriesNumber and other dicom tags
[ "Return", "series", "metadata", ".", "Output", "condatin", "information", "about", "voxelsize_mm", "series_number", "and", "modality", ".", "If", "it", "is", "possible", "the", "ImageComment", "AcquisitionDate", "and", "few", "other", "dicom", "tags", "are", "also", "in", "output", "dict", ".", ":", "param", "series_number", ":", ":", "return", ":", "metadata", "dict", "with", "voxelsize_mm", "SeriesNumber", "and", "other", "dicom", "tags" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L455-L478
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory.get_metaData
def get_metaData(self, dcmlist, series_number): """ Get metadata. Voxel size is obtained from PixelSpacing and difference of SliceLocation of two neighboorhoding slices (first have index ifile). Files in are used. """ # if dcmlist is None: # dcmlist = self.files_in_serie # number of slice where to extract metadata inforamtion ifile = 0 if len(dcmlist) == 0: return {} logger.debug("Filename: " + dcmlist[ifile]) data1 = self._read_file(dcmlist[ifile]) try: # try to get difference from the beginning and also from the end voxeldepth = self._get_slice_location_difference(dcmlist, ifile) voxeldepth_end = self._get_slice_location_difference(dcmlist, -2) if voxeldepth != voxeldepth_end: logger.warning("Depth of slices is not the same in beginning and end of the sequence") voxeldepth_1 = self._get_slice_location_difference(dcmlist, 1) voxeldepth = np.median([voxeldepth, voxeldepth_end, voxeldepth_1]) # head1, teil1 = os.path.split(dcmlist[ifile]) # head2, teil2 = os.path.split(dcmlist[ifile + 1]) # # data2 = self._read_file(dcmlist[ifile + 1]) # loc1 = get_slice_location(data1, teil1) # loc2 = get_slice_location(data2, teil2) # voxeldepth = float(np.abs(loc1 - loc2)) except Exception: logger.warning('Problem with voxel depth. Using SliceThickness') logger.debug(traceback.format_exc()) # + ' SeriesNumber: ' + str(data1.SeriesNumber)) try: voxeldepth = float(data1.SliceThickness) except Exception: logger.warning('Probem with SliceThicknes, setting zero. ' + traceback.format_exc()) voxeldepth = 0 try: pixelsize_mm = data1.PixelSpacing except: logger.warning('Problem with PixelSpacing. Using [1,1]') pixelsize_mm = [1, 1] voxelsize_mm = [ voxeldepth, float(pixelsize_mm[0]), float(pixelsize_mm[1]), ] metadata = {'voxelsize_mm': voxelsize_mm, 'Modality': data1.Modality, 'SeriesNumber': series_number } try: metadata['SeriesDescription'] = data1.SeriesDescription except: logger.info( 'Problem with tag SeriesDescription, SeriesNumber: ' + str(data1.SeriesNumber)) try: metadata['ImageComments'] = data1.ImageComments except: logger.info( 'Problem with tag ImageComments') # , SeriesNumber: ' + # str(data1.SeriesNumber)) try: metadata['Modality'] = data1.Modality except: logger.info( 'Problem with tag Modality') # SeriesNumber: ' + # str(data1.SeriesNumber)) metadata = attr_to_dict(data1, "AcquisitionDate", metadata) metadata = attr_to_dict(data1, "StudyDate", metadata) metadata = attr_to_dict(data1, "StudyID", metadata) metadata = attr_to_dict(data1, "StudyDescription", metadata) metadata = attr_to_dict(data1, "RequestedProcedureDescription", metadata) metadata = attr_to_dict(data1, "PatientSex", metadata) metadata = attr_to_dict(data1, "PatientAge", metadata) metadata = attr_to_dict(data1, "PatientID", metadata) metadata = attr_to_dict(data1, "PatientName", metadata) # metadata = attr_to_dict(data1, "AcquisitionTime", metadata) metadata['dcmfilelist'] = dcmlist return metadata
python
def get_metaData(self, dcmlist, series_number): """ Get metadata. Voxel size is obtained from PixelSpacing and difference of SliceLocation of two neighboorhoding slices (first have index ifile). Files in are used. """ # if dcmlist is None: # dcmlist = self.files_in_serie # number of slice where to extract metadata inforamtion ifile = 0 if len(dcmlist) == 0: return {} logger.debug("Filename: " + dcmlist[ifile]) data1 = self._read_file(dcmlist[ifile]) try: # try to get difference from the beginning and also from the end voxeldepth = self._get_slice_location_difference(dcmlist, ifile) voxeldepth_end = self._get_slice_location_difference(dcmlist, -2) if voxeldepth != voxeldepth_end: logger.warning("Depth of slices is not the same in beginning and end of the sequence") voxeldepth_1 = self._get_slice_location_difference(dcmlist, 1) voxeldepth = np.median([voxeldepth, voxeldepth_end, voxeldepth_1]) # head1, teil1 = os.path.split(dcmlist[ifile]) # head2, teil2 = os.path.split(dcmlist[ifile + 1]) # # data2 = self._read_file(dcmlist[ifile + 1]) # loc1 = get_slice_location(data1, teil1) # loc2 = get_slice_location(data2, teil2) # voxeldepth = float(np.abs(loc1 - loc2)) except Exception: logger.warning('Problem with voxel depth. Using SliceThickness') logger.debug(traceback.format_exc()) # + ' SeriesNumber: ' + str(data1.SeriesNumber)) try: voxeldepth = float(data1.SliceThickness) except Exception: logger.warning('Probem with SliceThicknes, setting zero. ' + traceback.format_exc()) voxeldepth = 0 try: pixelsize_mm = data1.PixelSpacing except: logger.warning('Problem with PixelSpacing. Using [1,1]') pixelsize_mm = [1, 1] voxelsize_mm = [ voxeldepth, float(pixelsize_mm[0]), float(pixelsize_mm[1]), ] metadata = {'voxelsize_mm': voxelsize_mm, 'Modality': data1.Modality, 'SeriesNumber': series_number } try: metadata['SeriesDescription'] = data1.SeriesDescription except: logger.info( 'Problem with tag SeriesDescription, SeriesNumber: ' + str(data1.SeriesNumber)) try: metadata['ImageComments'] = data1.ImageComments except: logger.info( 'Problem with tag ImageComments') # , SeriesNumber: ' + # str(data1.SeriesNumber)) try: metadata['Modality'] = data1.Modality except: logger.info( 'Problem with tag Modality') # SeriesNumber: ' + # str(data1.SeriesNumber)) metadata = attr_to_dict(data1, "AcquisitionDate", metadata) metadata = attr_to_dict(data1, "StudyDate", metadata) metadata = attr_to_dict(data1, "StudyID", metadata) metadata = attr_to_dict(data1, "StudyDescription", metadata) metadata = attr_to_dict(data1, "RequestedProcedureDescription", metadata) metadata = attr_to_dict(data1, "PatientSex", metadata) metadata = attr_to_dict(data1, "PatientAge", metadata) metadata = attr_to_dict(data1, "PatientID", metadata) metadata = attr_to_dict(data1, "PatientName", metadata) # metadata = attr_to_dict(data1, "AcquisitionTime", metadata) metadata['dcmfilelist'] = dcmlist return metadata
[ "def", "get_metaData", "(", "self", ",", "dcmlist", ",", "series_number", ")", ":", "# if dcmlist is None:", "# dcmlist = self.files_in_serie", "# number of slice where to extract metadata inforamtion", "ifile", "=", "0", "if", "len", "(", "dcmlist", ")", "==", "0", ":", "return", "{", "}", "logger", ".", "debug", "(", "\"Filename: \"", "+", "dcmlist", "[", "ifile", "]", ")", "data1", "=", "self", ".", "_read_file", "(", "dcmlist", "[", "ifile", "]", ")", "try", ":", "# try to get difference from the beginning and also from the end", "voxeldepth", "=", "self", ".", "_get_slice_location_difference", "(", "dcmlist", ",", "ifile", ")", "voxeldepth_end", "=", "self", ".", "_get_slice_location_difference", "(", "dcmlist", ",", "-", "2", ")", "if", "voxeldepth", "!=", "voxeldepth_end", ":", "logger", ".", "warning", "(", "\"Depth of slices is not the same in beginning and end of the sequence\"", ")", "voxeldepth_1", "=", "self", ".", "_get_slice_location_difference", "(", "dcmlist", ",", "1", ")", "voxeldepth", "=", "np", ".", "median", "(", "[", "voxeldepth", ",", "voxeldepth_end", ",", "voxeldepth_1", "]", ")", "# head1, teil1 = os.path.split(dcmlist[ifile])", "# head2, teil2 = os.path.split(dcmlist[ifile + 1])", "#", "# data2 = self._read_file(dcmlist[ifile + 1])", "# loc1 = get_slice_location(data1, teil1)", "# loc2 = get_slice_location(data2, teil2)", "# voxeldepth = float(np.abs(loc1 - loc2))", "except", "Exception", ":", "logger", ".", "warning", "(", "'Problem with voxel depth. Using SliceThickness'", ")", "logger", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "# + ' SeriesNumber: ' + str(data1.SeriesNumber))", "try", ":", "voxeldepth", "=", "float", "(", "data1", ".", "SliceThickness", ")", "except", "Exception", ":", "logger", ".", "warning", "(", "'Probem with SliceThicknes, setting zero. '", "+", "traceback", ".", "format_exc", "(", ")", ")", "voxeldepth", "=", "0", "try", ":", "pixelsize_mm", "=", "data1", ".", "PixelSpacing", "except", ":", "logger", ".", "warning", "(", "'Problem with PixelSpacing. Using [1,1]'", ")", "pixelsize_mm", "=", "[", "1", ",", "1", "]", "voxelsize_mm", "=", "[", "voxeldepth", ",", "float", "(", "pixelsize_mm", "[", "0", "]", ")", ",", "float", "(", "pixelsize_mm", "[", "1", "]", ")", ",", "]", "metadata", "=", "{", "'voxelsize_mm'", ":", "voxelsize_mm", ",", "'Modality'", ":", "data1", ".", "Modality", ",", "'SeriesNumber'", ":", "series_number", "}", "try", ":", "metadata", "[", "'SeriesDescription'", "]", "=", "data1", ".", "SeriesDescription", "except", ":", "logger", ".", "info", "(", "'Problem with tag SeriesDescription, SeriesNumber: '", "+", "str", "(", "data1", ".", "SeriesNumber", ")", ")", "try", ":", "metadata", "[", "'ImageComments'", "]", "=", "data1", ".", "ImageComments", "except", ":", "logger", ".", "info", "(", "'Problem with tag ImageComments'", ")", "# , SeriesNumber: ' +", "# str(data1.SeriesNumber))", "try", ":", "metadata", "[", "'Modality'", "]", "=", "data1", ".", "Modality", "except", ":", "logger", ".", "info", "(", "'Problem with tag Modality'", ")", "# SeriesNumber: ' +", "# str(data1.SeriesNumber))", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"AcquisitionDate\"", ",", "metadata", ")", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"StudyDate\"", ",", "metadata", ")", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"StudyID\"", ",", "metadata", ")", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"StudyDescription\"", ",", "metadata", ")", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"RequestedProcedureDescription\"", ",", "metadata", ")", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"PatientSex\"", ",", "metadata", ")", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"PatientAge\"", ",", "metadata", ")", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"PatientID\"", ",", "metadata", ")", "metadata", "=", "attr_to_dict", "(", "data1", ",", "\"PatientName\"", ",", "metadata", ")", "# metadata = attr_to_dict(data1, \"AcquisitionTime\", metadata)", "metadata", "[", "'dcmfilelist'", "]", "=", "dcmlist", "return", "metadata" ]
Get metadata. Voxel size is obtained from PixelSpacing and difference of SliceLocation of two neighboorhoding slices (first have index ifile). Files in are used.
[ "Get", "metadata", ".", "Voxel", "size", "is", "obtained", "from", "PixelSpacing", "and", "difference", "of", "SliceLocation", "of", "two", "neighboorhoding", "slices", "(", "first", "have", "index", "ifile", ")", ".", "Files", "in", "are", "used", "." ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L491-L586
mjirik/io3d
io3d/dcmreaddata.py
DicomDirectory.get_stats_of_series_in_dir
def get_stats_of_series_in_dir(self, study_id=None): """ Dicom series staticstics, input is dcmdir, not dirpath Information is generated from dicomdir.pkl and first files of series """ if study_id is not None: logger.error("study_id tag is not implemented yet") return import numpy as np dcmdir = self.files_with_info # get series number # vytvoření slovníku, kde je klíčem číslo série a hodnotou jsou všechny # informace z dicomdir series_info = {line['SeriesNumber']: line for line in dcmdir} # počítání velikosti série try: dcmdirseries = [line['SeriesNumber'] for line in dcmdir] except: logger.debug('Dicom tag SeriesNumber not found') series_info = {0: {'Count': 0}} return series_info # eturn [0],[0] bins, counts = np.unique(dcmdirseries, return_counts=True) # sestavení informace o velikosti série a slovníku for i in range(0, len(bins)): series_info[bins[i]]['Count'] = counts[i] # adding information from files lst = self.get_sorted_series_files(series_number=bins[i]) metadata = self.get_metaData(dcmlist=lst, series_number=bins[i]) # adding dictionary metadata to series_info dictionary series_info[bins[i]] = dict( list(series_info[bins[i]].items()) + list(metadata.items()) ) return series_info
python
def get_stats_of_series_in_dir(self, study_id=None): """ Dicom series staticstics, input is dcmdir, not dirpath Information is generated from dicomdir.pkl and first files of series """ if study_id is not None: logger.error("study_id tag is not implemented yet") return import numpy as np dcmdir = self.files_with_info # get series number # vytvoření slovníku, kde je klíčem číslo série a hodnotou jsou všechny # informace z dicomdir series_info = {line['SeriesNumber']: line for line in dcmdir} # počítání velikosti série try: dcmdirseries = [line['SeriesNumber'] for line in dcmdir] except: logger.debug('Dicom tag SeriesNumber not found') series_info = {0: {'Count': 0}} return series_info # eturn [0],[0] bins, counts = np.unique(dcmdirseries, return_counts=True) # sestavení informace o velikosti série a slovníku for i in range(0, len(bins)): series_info[bins[i]]['Count'] = counts[i] # adding information from files lst = self.get_sorted_series_files(series_number=bins[i]) metadata = self.get_metaData(dcmlist=lst, series_number=bins[i]) # adding dictionary metadata to series_info dictionary series_info[bins[i]] = dict( list(series_info[bins[i]].items()) + list(metadata.items()) ) return series_info
[ "def", "get_stats_of_series_in_dir", "(", "self", ",", "study_id", "=", "None", ")", ":", "if", "study_id", "is", "not", "None", ":", "logger", ".", "error", "(", "\"study_id tag is not implemented yet\"", ")", "return", "import", "numpy", "as", "np", "dcmdir", "=", "self", ".", "files_with_info", "# get series number", "# vytvoření slovníku, kde je klíčem číslo série a hodnotou jsou všechny", "# informace z dicomdir", "series_info", "=", "{", "line", "[", "'SeriesNumber'", "]", ":", "line", "for", "line", "in", "dcmdir", "}", "# počítání velikosti série", "try", ":", "dcmdirseries", "=", "[", "line", "[", "'SeriesNumber'", "]", "for", "line", "in", "dcmdir", "]", "except", ":", "logger", ".", "debug", "(", "'Dicom tag SeriesNumber not found'", ")", "series_info", "=", "{", "0", ":", "{", "'Count'", ":", "0", "}", "}", "return", "series_info", "# eturn [0],[0]", "bins", ",", "counts", "=", "np", ".", "unique", "(", "dcmdirseries", ",", "return_counts", "=", "True", ")", "# sestavení informace o velikosti série a slovníku", "for", "i", "in", "range", "(", "0", ",", "len", "(", "bins", ")", ")", ":", "series_info", "[", "bins", "[", "i", "]", "]", "[", "'Count'", "]", "=", "counts", "[", "i", "]", "# adding information from files", "lst", "=", "self", ".", "get_sorted_series_files", "(", "series_number", "=", "bins", "[", "i", "]", ")", "metadata", "=", "self", ".", "get_metaData", "(", "dcmlist", "=", "lst", ",", "series_number", "=", "bins", "[", "i", "]", ")", "# adding dictionary metadata to series_info dictionary", "series_info", "[", "bins", "[", "i", "]", "]", "=", "dict", "(", "list", "(", "series_info", "[", "bins", "[", "i", "]", "]", ".", "items", "(", ")", ")", "+", "list", "(", "metadata", ".", "items", "(", ")", ")", ")", "return", "series_info" ]
Dicom series staticstics, input is dcmdir, not dirpath Information is generated from dicomdir.pkl and first files of series
[ "Dicom", "series", "staticstics", "input", "is", "dcmdir", "not", "dirpath", "Information", "is", "generated", "from", "dicomdir", ".", "pkl", "and", "first", "files", "of", "series" ]
train
https://github.com/mjirik/io3d/blob/ccaf3e378dcc967f2565d477fc27583fd0f61fcc/io3d/dcmreaddata.py#L603-L643