Unnamed: 0
int64
0
10k
repository_name
stringlengths
7
54
func_path_in_repository
stringlengths
5
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
100
30.3k
language
stringclasses
1 value
func_code_string
stringlengths
100
30.3k
func_code_tokens
stringlengths
138
33.2k
func_documentation_string
stringlengths
1
15k
func_documentation_tokens
stringlengths
5
5.14k
split_name
stringclasses
1 value
func_code_url
stringlengths
91
315
9,900
ggaughan/pipe2py
pipe2py/lib/utils.py
get_input
def get_input(context, conf): """Gets a user parameter, either from the console or from an outer submodule/system Assumes conf has name, default, prompt and debug """ name = conf['name']['value'] prompt = conf['prompt']['value'] default = conf['default']['value'] or conf['debug']['value'] if context.submodule or context.inputs: value = context.inputs.get(name, default) elif not context.test: # we skip user interaction during tests raw = raw_input("%s (default=%s) " % (encode(prompt), encode(default))) value = raw or default else: value = default return value
python
def get_input(context, conf): """Gets a user parameter, either from the console or from an outer submodule/system Assumes conf has name, default, prompt and debug """ name = conf['name']['value'] prompt = conf['prompt']['value'] default = conf['default']['value'] or conf['debug']['value'] if context.submodule or context.inputs: value = context.inputs.get(name, default) elif not context.test: # we skip user interaction during tests raw = raw_input("%s (default=%s) " % (encode(prompt), encode(default))) value = raw or default else: value = default return value
['def', 'get_input', '(', 'context', ',', 'conf', ')', ':', 'name', '=', 'conf', '[', "'name'", ']', '[', "'value'", ']', 'prompt', '=', 'conf', '[', "'prompt'", ']', '[', "'value'", ']', 'default', '=', 'conf', '[', "'default'", ']', '[', "'value'", ']', 'or', 'conf', '[', "'debug'", ']', '[', "'value'", ']', 'if', 'context', '.', 'submodule', 'or', 'context', '.', 'inputs', ':', 'value', '=', 'context', '.', 'inputs', '.', 'get', '(', 'name', ',', 'default', ')', 'elif', 'not', 'context', '.', 'test', ':', '# we skip user interaction during tests', 'raw', '=', 'raw_input', '(', '"%s (default=%s) "', '%', '(', 'encode', '(', 'prompt', ')', ',', 'encode', '(', 'default', ')', ')', ')', 'value', '=', 'raw', 'or', 'default', 'else', ':', 'value', '=', 'default', 'return', 'value']
Gets a user parameter, either from the console or from an outer submodule/system Assumes conf has name, default, prompt and debug
['Gets', 'a', 'user', 'parameter', 'either', 'from', 'the', 'console', 'or', 'from', 'an', 'outer', 'submodule', '/', 'system']
train
https://github.com/ggaughan/pipe2py/blob/4767d6d1fd354d2a35e6528594b8deb8a033eed4/pipe2py/lib/utils.py#L295-L314
9,901
asweigart/pyautogui
pyautogui/_pyautogui_win.py
_scroll
def _scroll(clicks, x=None, y=None): """Send the mouse vertical scroll event to Windows by calling the mouse_event() win32 function. Args: clicks (int): The amount of scrolling to do. A positive value is the mouse wheel moving forward (scrolling up), a negative value is backwards (down). x (int): The x position of the mouse event. y (int): The y position of the mouse event. Returns: None """ startx, starty = _position() width, height = _size() if x is None: x = startx else: if x < 0: x = 0 elif x >= width: x = width - 1 if y is None: y = starty else: if y < 0: y = 0 elif y >= height: y = height - 1 try: _sendMouseEvent(MOUSEEVENTF_WHEEL, x, y, dwData=clicks) except (PermissionError, OSError): # TODO: We need to figure out how to prevent these errors, see https://github.com/asweigart/pyautogui/issues/60 pass
python
def _scroll(clicks, x=None, y=None): """Send the mouse vertical scroll event to Windows by calling the mouse_event() win32 function. Args: clicks (int): The amount of scrolling to do. A positive value is the mouse wheel moving forward (scrolling up), a negative value is backwards (down). x (int): The x position of the mouse event. y (int): The y position of the mouse event. Returns: None """ startx, starty = _position() width, height = _size() if x is None: x = startx else: if x < 0: x = 0 elif x >= width: x = width - 1 if y is None: y = starty else: if y < 0: y = 0 elif y >= height: y = height - 1 try: _sendMouseEvent(MOUSEEVENTF_WHEEL, x, y, dwData=clicks) except (PermissionError, OSError): # TODO: We need to figure out how to prevent these errors, see https://github.com/asweigart/pyautogui/issues/60 pass
['def', '_scroll', '(', 'clicks', ',', 'x', '=', 'None', ',', 'y', '=', 'None', ')', ':', 'startx', ',', 'starty', '=', '_position', '(', ')', 'width', ',', 'height', '=', '_size', '(', ')', 'if', 'x', 'is', 'None', ':', 'x', '=', 'startx', 'else', ':', 'if', 'x', '<', '0', ':', 'x', '=', '0', 'elif', 'x', '>=', 'width', ':', 'x', '=', 'width', '-', '1', 'if', 'y', 'is', 'None', ':', 'y', '=', 'starty', 'else', ':', 'if', 'y', '<', '0', ':', 'y', '=', '0', 'elif', 'y', '>=', 'height', ':', 'y', '=', 'height', '-', '1', 'try', ':', '_sendMouseEvent', '(', 'MOUSEEVENTF_WHEEL', ',', 'x', ',', 'y', ',', 'dwData', '=', 'clicks', ')', 'except', '(', 'PermissionError', ',', 'OSError', ')', ':', '# TODO: We need to figure out how to prevent these errors, see https://github.com/asweigart/pyautogui/issues/60', 'pass']
Send the mouse vertical scroll event to Windows by calling the mouse_event() win32 function. Args: clicks (int): The amount of scrolling to do. A positive value is the mouse wheel moving forward (scrolling up), a negative value is backwards (down). x (int): The x position of the mouse event. y (int): The y position of the mouse event. Returns: None
['Send', 'the', 'mouse', 'vertical', 'scroll', 'event', 'to', 'Windows', 'by', 'calling', 'the', 'mouse_event', '()', 'win32', 'function', '.']
train
https://github.com/asweigart/pyautogui/blob/77524bd47334a89024013fd48e05151c3ac9289a/pyautogui/_pyautogui_win.py#L520-L554
9,902
deepmind/pysc2
pysc2/env/lan_sc2_env.py
tcp_client
def tcp_client(tcp_addr): """Connect to the tcp server, and return the settings.""" family = socket.AF_INET6 if ":" in tcp_addr.ip else socket.AF_INET sock = socket.socket(family, socket.SOCK_STREAM, socket.IPPROTO_TCP) for i in range(300): logging.info("Connecting to: %s, attempt %d", tcp_addr, i) try: sock.connect(tcp_addr) break except socket.error: time.sleep(1) else: sock.connect(tcp_addr) # One last try, but don't catch this error. logging.info("Connected.") map_data = read_tcp(sock) settings_str = read_tcp(sock) if not settings_str: raise socket.error("Failed to read") settings = json.loads(settings_str.decode()) logging.info("Got settings. map_name: %s.", settings["map_name"]) logging.debug("settings: %s", settings) settings["map_data"] = map_data return sock, settings
python
def tcp_client(tcp_addr): """Connect to the tcp server, and return the settings.""" family = socket.AF_INET6 if ":" in tcp_addr.ip else socket.AF_INET sock = socket.socket(family, socket.SOCK_STREAM, socket.IPPROTO_TCP) for i in range(300): logging.info("Connecting to: %s, attempt %d", tcp_addr, i) try: sock.connect(tcp_addr) break except socket.error: time.sleep(1) else: sock.connect(tcp_addr) # One last try, but don't catch this error. logging.info("Connected.") map_data = read_tcp(sock) settings_str = read_tcp(sock) if not settings_str: raise socket.error("Failed to read") settings = json.loads(settings_str.decode()) logging.info("Got settings. map_name: %s.", settings["map_name"]) logging.debug("settings: %s", settings) settings["map_data"] = map_data return sock, settings
['def', 'tcp_client', '(', 'tcp_addr', ')', ':', 'family', '=', 'socket', '.', 'AF_INET6', 'if', '":"', 'in', 'tcp_addr', '.', 'ip', 'else', 'socket', '.', 'AF_INET', 'sock', '=', 'socket', '.', 'socket', '(', 'family', ',', 'socket', '.', 'SOCK_STREAM', ',', 'socket', '.', 'IPPROTO_TCP', ')', 'for', 'i', 'in', 'range', '(', '300', ')', ':', 'logging', '.', 'info', '(', '"Connecting to: %s, attempt %d"', ',', 'tcp_addr', ',', 'i', ')', 'try', ':', 'sock', '.', 'connect', '(', 'tcp_addr', ')', 'break', 'except', 'socket', '.', 'error', ':', 'time', '.', 'sleep', '(', '1', ')', 'else', ':', 'sock', '.', 'connect', '(', 'tcp_addr', ')', "# One last try, but don't catch this error.", 'logging', '.', 'info', '(', '"Connected."', ')', 'map_data', '=', 'read_tcp', '(', 'sock', ')', 'settings_str', '=', 'read_tcp', '(', 'sock', ')', 'if', 'not', 'settings_str', ':', 'raise', 'socket', '.', 'error', '(', '"Failed to read"', ')', 'settings', '=', 'json', '.', 'loads', '(', 'settings_str', '.', 'decode', '(', ')', ')', 'logging', '.', 'info', '(', '"Got settings. map_name: %s."', ',', 'settings', '[', '"map_name"', ']', ')', 'logging', '.', 'debug', '(', '"settings: %s"', ',', 'settings', ')', 'settings', '[', '"map_data"', ']', '=', 'map_data', 'return', 'sock', ',', 'settings']
Connect to the tcp server, and return the settings.
['Connect', 'to', 'the', 'tcp', 'server', 'and', 'return', 'the', 'settings', '.']
train
https://github.com/deepmind/pysc2/blob/df4cc4b00f07a2242be9ba153d4a7f4ad2017897/pysc2/env/lan_sc2_env.py#L83-L106
9,903
f3at/feat
src/feat/extern/log/log.py
init
def init(envVarName, enableColorOutput=False): """ Initialize the logging system and parse the environment variable of the given name. Needs to be called before starting the actual application. """ global _initialized if _initialized: return global _ENV_VAR_NAME _ENV_VAR_NAME = envVarName if enableColorOutput: _preformatLevels(envVarName + "_NO_COLOR") else: _preformatLevels(None) if envVarName in os.environ: # install a log handler that uses the value of the environment var setDebug(os.environ[envVarName]) addLimitedLogHandler(stderrHandler) _initialized = True
python
def init(envVarName, enableColorOutput=False): """ Initialize the logging system and parse the environment variable of the given name. Needs to be called before starting the actual application. """ global _initialized if _initialized: return global _ENV_VAR_NAME _ENV_VAR_NAME = envVarName if enableColorOutput: _preformatLevels(envVarName + "_NO_COLOR") else: _preformatLevels(None) if envVarName in os.environ: # install a log handler that uses the value of the environment var setDebug(os.environ[envVarName]) addLimitedLogHandler(stderrHandler) _initialized = True
['def', 'init', '(', 'envVarName', ',', 'enableColorOutput', '=', 'False', ')', ':', 'global', '_initialized', 'if', '_initialized', ':', 'return', 'global', '_ENV_VAR_NAME', '_ENV_VAR_NAME', '=', 'envVarName', 'if', 'enableColorOutput', ':', '_preformatLevels', '(', 'envVarName', '+', '"_NO_COLOR"', ')', 'else', ':', '_preformatLevels', '(', 'None', ')', 'if', 'envVarName', 'in', 'os', '.', 'environ', ':', '# install a log handler that uses the value of the environment var', 'setDebug', '(', 'os', '.', 'environ', '[', 'envVarName', ']', ')', 'addLimitedLogHandler', '(', 'stderrHandler', ')', '_initialized', '=', 'True']
Initialize the logging system and parse the environment variable of the given name. Needs to be called before starting the actual application.
['Initialize', 'the', 'logging', 'system', 'and', 'parse', 'the', 'environment', 'variable', 'of', 'the', 'given', 'name', '.', 'Needs', 'to', 'be', 'called', 'before', 'starting', 'the', 'actual', 'application', '.']
train
https://github.com/f3at/feat/blob/15da93fc9d6ec8154f52a9172824e25821195ef8/src/feat/extern/log/log.py#L484-L508
9,904
totalgood/twip
twip/nlp.py
make_named_stemmer
def make_named_stemmer(stem=None, min_len=3): """Construct a callable object and a string sufficient to reconstruct it later (unpickling) >>> make_named_stemmer('str_lower') ('str_lower', <function str_lower at ...>) >>> make_named_stemmer('Lancaster') ('lancaster', <Stemmer object at ...>) """ name, stem = stringify(stem), make_stemmer(stem=stem, min_len=min_len) if hasattr(stem, '__name__'): return stem.__name__, stem if name.strip().lower() in STEMMER_TYPES: return name.strip().lower(), stem if hasattr(stem, 'pattern'): return stem.pattern, stem return stringify(stem), stem
python
def make_named_stemmer(stem=None, min_len=3): """Construct a callable object and a string sufficient to reconstruct it later (unpickling) >>> make_named_stemmer('str_lower') ('str_lower', <function str_lower at ...>) >>> make_named_stemmer('Lancaster') ('lancaster', <Stemmer object at ...>) """ name, stem = stringify(stem), make_stemmer(stem=stem, min_len=min_len) if hasattr(stem, '__name__'): return stem.__name__, stem if name.strip().lower() in STEMMER_TYPES: return name.strip().lower(), stem if hasattr(stem, 'pattern'): return stem.pattern, stem return stringify(stem), stem
['def', 'make_named_stemmer', '(', 'stem', '=', 'None', ',', 'min_len', '=', '3', ')', ':', 'name', ',', 'stem', '=', 'stringify', '(', 'stem', ')', ',', 'make_stemmer', '(', 'stem', '=', 'stem', ',', 'min_len', '=', 'min_len', ')', 'if', 'hasattr', '(', 'stem', ',', "'__name__'", ')', ':', 'return', 'stem', '.', '__name__', ',', 'stem', 'if', 'name', '.', 'strip', '(', ')', '.', 'lower', '(', ')', 'in', 'STEMMER_TYPES', ':', 'return', 'name', '.', 'strip', '(', ')', '.', 'lower', '(', ')', ',', 'stem', 'if', 'hasattr', '(', 'stem', ',', "'pattern'", ')', ':', 'return', 'stem', '.', 'pattern', ',', 'stem', 'return', 'stringify', '(', 'stem', ')', ',', 'stem']
Construct a callable object and a string sufficient to reconstruct it later (unpickling) >>> make_named_stemmer('str_lower') ('str_lower', <function str_lower at ...>) >>> make_named_stemmer('Lancaster') ('lancaster', <Stemmer object at ...>)
['Construct', 'a', 'callable', 'object', 'and', 'a', 'string', 'sufficient', 'to', 'reconstruct', 'it', 'later', '(', 'unpickling', ')']
train
https://github.com/totalgood/twip/blob/5c0411d2acfbe5b421841072814c9152591c03f7/twip/nlp.py#L297-L312
9,905
ThreatConnect-Inc/tcex
tcex/tcex_ti_indicator.py
Indicator.data
def data(self): """Return Indicator data.""" # add attributes if self._attributes: self._indicator_data['attribute'] = [] for attr in self._attributes: if attr.valid: self._indicator_data['attribute'].append(attr.data) # add file actions if self._file_actions: self._indicator_data.setdefault('fileAction', {}) self._indicator_data['fileAction'].setdefault('children', []) for action in self._file_actions: self._indicator_data['fileAction']['children'].append(action.data) # add file occurrences if self._occurrences: self._indicator_data.setdefault('fileOccurrence', []) for occurrence in self._occurrences: self._indicator_data['fileOccurrence'].append(occurrence.data) # add security labels if self._labels: self._indicator_data['securityLabel'] = [] for label in self._labels: self._indicator_data['securityLabel'].append(label.data) # add tags if self._tags: self._indicator_data['tag'] = [] for tag in self._tags: if tag.valid: self._indicator_data['tag'].append(tag.data) return self._indicator_data
python
def data(self): """Return Indicator data.""" # add attributes if self._attributes: self._indicator_data['attribute'] = [] for attr in self._attributes: if attr.valid: self._indicator_data['attribute'].append(attr.data) # add file actions if self._file_actions: self._indicator_data.setdefault('fileAction', {}) self._indicator_data['fileAction'].setdefault('children', []) for action in self._file_actions: self._indicator_data['fileAction']['children'].append(action.data) # add file occurrences if self._occurrences: self._indicator_data.setdefault('fileOccurrence', []) for occurrence in self._occurrences: self._indicator_data['fileOccurrence'].append(occurrence.data) # add security labels if self._labels: self._indicator_data['securityLabel'] = [] for label in self._labels: self._indicator_data['securityLabel'].append(label.data) # add tags if self._tags: self._indicator_data['tag'] = [] for tag in self._tags: if tag.valid: self._indicator_data['tag'].append(tag.data) return self._indicator_data
['def', 'data', '(', 'self', ')', ':', '# add attributes', 'if', 'self', '.', '_attributes', ':', 'self', '.', '_indicator_data', '[', "'attribute'", ']', '=', '[', ']', 'for', 'attr', 'in', 'self', '.', '_attributes', ':', 'if', 'attr', '.', 'valid', ':', 'self', '.', '_indicator_data', '[', "'attribute'", ']', '.', 'append', '(', 'attr', '.', 'data', ')', '# add file actions', 'if', 'self', '.', '_file_actions', ':', 'self', '.', '_indicator_data', '.', 'setdefault', '(', "'fileAction'", ',', '{', '}', ')', 'self', '.', '_indicator_data', '[', "'fileAction'", ']', '.', 'setdefault', '(', "'children'", ',', '[', ']', ')', 'for', 'action', 'in', 'self', '.', '_file_actions', ':', 'self', '.', '_indicator_data', '[', "'fileAction'", ']', '[', "'children'", ']', '.', 'append', '(', 'action', '.', 'data', ')', '# add file occurrences', 'if', 'self', '.', '_occurrences', ':', 'self', '.', '_indicator_data', '.', 'setdefault', '(', "'fileOccurrence'", ',', '[', ']', ')', 'for', 'occurrence', 'in', 'self', '.', '_occurrences', ':', 'self', '.', '_indicator_data', '[', "'fileOccurrence'", ']', '.', 'append', '(', 'occurrence', '.', 'data', ')', '# add security labels', 'if', 'self', '.', '_labels', ':', 'self', '.', '_indicator_data', '[', "'securityLabel'", ']', '=', '[', ']', 'for', 'label', 'in', 'self', '.', '_labels', ':', 'self', '.', '_indicator_data', '[', "'securityLabel'", ']', '.', 'append', '(', 'label', '.', 'data', ')', '# add tags', 'if', 'self', '.', '_tags', ':', 'self', '.', '_indicator_data', '[', "'tag'", ']', '=', '[', ']', 'for', 'tag', 'in', 'self', '.', '_tags', ':', 'if', 'tag', '.', 'valid', ':', 'self', '.', '_indicator_data', '[', "'tag'", ']', '.', 'append', '(', 'tag', '.', 'data', ')', 'return', 'self', '.', '_indicator_data']
Return Indicator data.
['Return', 'Indicator', 'data', '.']
train
https://github.com/ThreatConnect-Inc/tcex/blob/dd4d7a1ef723af1561687120191886b9a2fd4b47/tcex/tcex_ti_indicator.py#L218-L248
9,906
welbornprod/colr
colr/__main__.py
print_err
def print_err(*args, **kwargs): """ A wrapper for print() that uses stderr by default. """ if kwargs.get('file', None) is None: kwargs['file'] = sys.stderr color = dict_pop_or(kwargs, 'color', True) # Use color if asked, but only if the file is a tty. if color and kwargs['file'].isatty(): # Keep any Colr args passed, convert strs into Colrs. msg = kwargs.get('sep', ' ').join( str(a) if isinstance(a, C) else str(C(a, 'red')) for a in args ) else: # The file is not a tty anyway, no escape codes. msg = kwargs.get('sep', ' ').join( str(a.stripped() if isinstance(a, C) else a) for a in args ) newline = dict_pop_or(kwargs, 'newline', False) if newline: msg = '\n{}'.format(msg) print(msg, **kwargs)
python
def print_err(*args, **kwargs): """ A wrapper for print() that uses stderr by default. """ if kwargs.get('file', None) is None: kwargs['file'] = sys.stderr color = dict_pop_or(kwargs, 'color', True) # Use color if asked, but only if the file is a tty. if color and kwargs['file'].isatty(): # Keep any Colr args passed, convert strs into Colrs. msg = kwargs.get('sep', ' ').join( str(a) if isinstance(a, C) else str(C(a, 'red')) for a in args ) else: # The file is not a tty anyway, no escape codes. msg = kwargs.get('sep', ' ').join( str(a.stripped() if isinstance(a, C) else a) for a in args ) newline = dict_pop_or(kwargs, 'newline', False) if newline: msg = '\n{}'.format(msg) print(msg, **kwargs)
['def', 'print_err', '(', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'if', 'kwargs', '.', 'get', '(', "'file'", ',', 'None', ')', 'is', 'None', ':', 'kwargs', '[', "'file'", ']', '=', 'sys', '.', 'stderr', 'color', '=', 'dict_pop_or', '(', 'kwargs', ',', "'color'", ',', 'True', ')', '# Use color if asked, but only if the file is a tty.', 'if', 'color', 'and', 'kwargs', '[', "'file'", ']', '.', 'isatty', '(', ')', ':', '# Keep any Colr args passed, convert strs into Colrs.', 'msg', '=', 'kwargs', '.', 'get', '(', "'sep'", ',', "' '", ')', '.', 'join', '(', 'str', '(', 'a', ')', 'if', 'isinstance', '(', 'a', ',', 'C', ')', 'else', 'str', '(', 'C', '(', 'a', ',', "'red'", ')', ')', 'for', 'a', 'in', 'args', ')', 'else', ':', '# The file is not a tty anyway, no escape codes.', 'msg', '=', 'kwargs', '.', 'get', '(', "'sep'", ',', "' '", ')', '.', 'join', '(', 'str', '(', 'a', '.', 'stripped', '(', ')', 'if', 'isinstance', '(', 'a', ',', 'C', ')', 'else', 'a', ')', 'for', 'a', 'in', 'args', ')', 'newline', '=', 'dict_pop_or', '(', 'kwargs', ',', "'newline'", ',', 'False', ')', 'if', 'newline', ':', 'msg', '=', "'\\n{}'", '.', 'format', '(', 'msg', ')', 'print', '(', 'msg', ',', '*', '*', 'kwargs', ')']
A wrapper for print() that uses stderr by default.
['A', 'wrapper', 'for', 'print', '()', 'that', 'uses', 'stderr', 'by', 'default', '.']
train
https://github.com/welbornprod/colr/blob/417117fdbddbc53142096685ac2af006b2bd0220/colr/__main__.py#L425-L447
9,907
bskinn/opan
opan/xyz.py
OpanXYZ.angle_single
def angle_single(self, g_num, at_1, at_2, at_3): """ Spanning angle among three atoms. The indices `at_1` and `at_3` can be the same (yielding a trivial zero angle), but `at_2` must be different from both `at_1` and `at_3`. Parameters ---------- g_num |int| -- Index of the desired geometry at_1 |int| -- Index of the first atom at_2 |int| -- Index of the second atom at_3 |int| -- Index of the third atom Returns ------- angle |npfloat_| -- Spanning angle in degrees between `at_1`-`at_2`-`at_3`, from geometry `g_num` Raises ------ ~exceptions.IndexError If an invalid (out-of-range) `g_num` or `at_#` is provided ~exceptions.ValueError If `at_2` is equal to either `at_1` or `at_3` """ # Imports import numpy as np from .utils import safe_cast as scast from .utils.vector import vec_angle # The below errors are explicitly thrown since they are multiplied by # three when they are used as an index and thus give non-intuitive # errors in later code. # Complain if at_1 is invalid if not(-self.num_atoms <= at_1 < self.num_atoms): raise IndexError("Invalid index for 'at_1' ({0})".format(at_1)) # Complain if at_2 is invalid if not(-self.num_atoms <= at_2 < self.num_atoms): raise IndexError("Invalid index for 'at_2' ({0})".format(at_2)) # Complain if at_3 is invalid if not(-self.num_atoms <= at_3 < self.num_atoms): raise IndexError("Invalid index for 'at_3' ({0})".format(at_3)) # Should never be necessary (save for badly erroneous calling code), # but coerce the at_x to their floor() values. This is again # needed since they are multiplied by three in the index expresssions # below, and can cause funny behavior when truncated by the indexing at_1 = scast(np.floor(at_1), np.int_) at_2 = scast(np.floor(at_2), np.int_) at_3 = scast(np.floor(at_3), np.int_) # Complain if at_2 is equal to either at_1 or at_3. Must factor in # the possibility of negative indexing via modulo arithmetic. if (at_2 % self.num_atoms) == (at_1 % self.num_atoms): raise ValueError("'at_1' and 'at_2' must be different") if (at_2 % self.num_atoms) == (at_3 % self.num_atoms): raise ValueError("'at_2' and 'at_3' must be different") # Trivial return if at_1 and at_3 are the same if (at_1 % self.num_atoms) == (at_3 % self.num_atoms): # Angle is identically zero in this case return 0.0 ## end if # Store the displacement vectors from at_2 to at_1 and to at_3 # The np.float64 type should be retained through the displ_single call. vec_2_1 = self.displ_single(g_num, at_2, at_1) vec_2_3 = self.displ_single(g_num, at_2, at_3) # Compute and return the calculated angle, in degrees # v1 {dot} v2 == |v1||v2| * cos(theta) angle = vec_angle(vec_2_1, vec_2_3) return angle
python
def angle_single(self, g_num, at_1, at_2, at_3): """ Spanning angle among three atoms. The indices `at_1` and `at_3` can be the same (yielding a trivial zero angle), but `at_2` must be different from both `at_1` and `at_3`. Parameters ---------- g_num |int| -- Index of the desired geometry at_1 |int| -- Index of the first atom at_2 |int| -- Index of the second atom at_3 |int| -- Index of the third atom Returns ------- angle |npfloat_| -- Spanning angle in degrees between `at_1`-`at_2`-`at_3`, from geometry `g_num` Raises ------ ~exceptions.IndexError If an invalid (out-of-range) `g_num` or `at_#` is provided ~exceptions.ValueError If `at_2` is equal to either `at_1` or `at_3` """ # Imports import numpy as np from .utils import safe_cast as scast from .utils.vector import vec_angle # The below errors are explicitly thrown since they are multiplied by # three when they are used as an index and thus give non-intuitive # errors in later code. # Complain if at_1 is invalid if not(-self.num_atoms <= at_1 < self.num_atoms): raise IndexError("Invalid index for 'at_1' ({0})".format(at_1)) # Complain if at_2 is invalid if not(-self.num_atoms <= at_2 < self.num_atoms): raise IndexError("Invalid index for 'at_2' ({0})".format(at_2)) # Complain if at_3 is invalid if not(-self.num_atoms <= at_3 < self.num_atoms): raise IndexError("Invalid index for 'at_3' ({0})".format(at_3)) # Should never be necessary (save for badly erroneous calling code), # but coerce the at_x to their floor() values. This is again # needed since they are multiplied by three in the index expresssions # below, and can cause funny behavior when truncated by the indexing at_1 = scast(np.floor(at_1), np.int_) at_2 = scast(np.floor(at_2), np.int_) at_3 = scast(np.floor(at_3), np.int_) # Complain if at_2 is equal to either at_1 or at_3. Must factor in # the possibility of negative indexing via modulo arithmetic. if (at_2 % self.num_atoms) == (at_1 % self.num_atoms): raise ValueError("'at_1' and 'at_2' must be different") if (at_2 % self.num_atoms) == (at_3 % self.num_atoms): raise ValueError("'at_2' and 'at_3' must be different") # Trivial return if at_1 and at_3 are the same if (at_1 % self.num_atoms) == (at_3 % self.num_atoms): # Angle is identically zero in this case return 0.0 ## end if # Store the displacement vectors from at_2 to at_1 and to at_3 # The np.float64 type should be retained through the displ_single call. vec_2_1 = self.displ_single(g_num, at_2, at_1) vec_2_3 = self.displ_single(g_num, at_2, at_3) # Compute and return the calculated angle, in degrees # v1 {dot} v2 == |v1||v2| * cos(theta) angle = vec_angle(vec_2_1, vec_2_3) return angle
['def', 'angle_single', '(', 'self', ',', 'g_num', ',', 'at_1', ',', 'at_2', ',', 'at_3', ')', ':', '# Imports', 'import', 'numpy', 'as', 'np', 'from', '.', 'utils', 'import', 'safe_cast', 'as', 'scast', 'from', '.', 'utils', '.', 'vector', 'import', 'vec_angle', '# The below errors are explicitly thrown since they are multiplied by', '# three when they are used as an index and thus give non-intuitive', '# errors in later code.', '# Complain if at_1 is invalid', 'if', 'not', '(', '-', 'self', '.', 'num_atoms', '<=', 'at_1', '<', 'self', '.', 'num_atoms', ')', ':', 'raise', 'IndexError', '(', '"Invalid index for \'at_1\' ({0})"', '.', 'format', '(', 'at_1', ')', ')', '# Complain if at_2 is invalid', 'if', 'not', '(', '-', 'self', '.', 'num_atoms', '<=', 'at_2', '<', 'self', '.', 'num_atoms', ')', ':', 'raise', 'IndexError', '(', '"Invalid index for \'at_2\' ({0})"', '.', 'format', '(', 'at_2', ')', ')', '# Complain if at_3 is invalid', 'if', 'not', '(', '-', 'self', '.', 'num_atoms', '<=', 'at_3', '<', 'self', '.', 'num_atoms', ')', ':', 'raise', 'IndexError', '(', '"Invalid index for \'at_3\' ({0})"', '.', 'format', '(', 'at_3', ')', ')', '# Should never be necessary (save for badly erroneous calling code),', '# but coerce the at_x to their floor() values. This is again', '# needed since they are multiplied by three in the index expresssions', '# below, and can cause funny behavior when truncated by the indexing', 'at_1', '=', 'scast', '(', 'np', '.', 'floor', '(', 'at_1', ')', ',', 'np', '.', 'int_', ')', 'at_2', '=', 'scast', '(', 'np', '.', 'floor', '(', 'at_2', ')', ',', 'np', '.', 'int_', ')', 'at_3', '=', 'scast', '(', 'np', '.', 'floor', '(', 'at_3', ')', ',', 'np', '.', 'int_', ')', '# Complain if at_2 is equal to either at_1 or at_3. Must factor in', '# the possibility of negative indexing via modulo arithmetic.', 'if', '(', 'at_2', '%', 'self', '.', 'num_atoms', ')', '==', '(', 'at_1', '%', 'self', '.', 'num_atoms', ')', ':', 'raise', 'ValueError', '(', '"\'at_1\' and \'at_2\' must be different"', ')', 'if', '(', 'at_2', '%', 'self', '.', 'num_atoms', ')', '==', '(', 'at_3', '%', 'self', '.', 'num_atoms', ')', ':', 'raise', 'ValueError', '(', '"\'at_2\' and \'at_3\' must be different"', ')', '# Trivial return if at_1 and at_3 are the same', 'if', '(', 'at_1', '%', 'self', '.', 'num_atoms', ')', '==', '(', 'at_3', '%', 'self', '.', 'num_atoms', ')', ':', '# Angle is identically zero in this case', 'return', '0.0', '## end if', '# Store the displacement vectors from at_2 to at_1 and to at_3', '# The np.float64 type should be retained through the displ_single call.', 'vec_2_1', '=', 'self', '.', 'displ_single', '(', 'g_num', ',', 'at_2', ',', 'at_1', ')', 'vec_2_3', '=', 'self', '.', 'displ_single', '(', 'g_num', ',', 'at_2', ',', 'at_3', ')', '# Compute and return the calculated angle, in degrees', '# v1 {dot} v2 == |v1||v2| * cos(theta)', 'angle', '=', 'vec_angle', '(', 'vec_2_1', ',', 'vec_2_3', ')', 'return', 'angle']
Spanning angle among three atoms. The indices `at_1` and `at_3` can be the same (yielding a trivial zero angle), but `at_2` must be different from both `at_1` and `at_3`. Parameters ---------- g_num |int| -- Index of the desired geometry at_1 |int| -- Index of the first atom at_2 |int| -- Index of the second atom at_3 |int| -- Index of the third atom Returns ------- angle |npfloat_| -- Spanning angle in degrees between `at_1`-`at_2`-`at_3`, from geometry `g_num` Raises ------ ~exceptions.IndexError If an invalid (out-of-range) `g_num` or `at_#` is provided ~exceptions.ValueError If `at_2` is equal to either `at_1` or `at_3`
['Spanning', 'angle', 'among', 'three', 'atoms', '.']
train
https://github.com/bskinn/opan/blob/0b1b21662df6abc971407a9386db21a8796fbfe5/opan/xyz.py#L919-L1010
9,908
awslabs/sockeye
sockeye/data_io.py
ParallelDataSet.load
def load(fname: str) -> 'ParallelDataSet': """ Loads a dataset from a binary .npy file. """ data = mx.nd.load(fname) n = len(data) // 3 source = data[:n] target = data[n:2 * n] label = data[2 * n:] assert len(source) == len(target) == len(label) return ParallelDataSet(source, target, label)
python
def load(fname: str) -> 'ParallelDataSet': """ Loads a dataset from a binary .npy file. """ data = mx.nd.load(fname) n = len(data) // 3 source = data[:n] target = data[n:2 * n] label = data[2 * n:] assert len(source) == len(target) == len(label) return ParallelDataSet(source, target, label)
['def', 'load', '(', 'fname', ':', 'str', ')', '->', "'ParallelDataSet'", ':', 'data', '=', 'mx', '.', 'nd', '.', 'load', '(', 'fname', ')', 'n', '=', 'len', '(', 'data', ')', '//', '3', 'source', '=', 'data', '[', ':', 'n', ']', 'target', '=', 'data', '[', 'n', ':', '2', '*', 'n', ']', 'label', '=', 'data', '[', '2', '*', 'n', ':', ']', 'assert', 'len', '(', 'source', ')', '==', 'len', '(', 'target', ')', '==', 'len', '(', 'label', ')', 'return', 'ParallelDataSet', '(', 'source', ',', 'target', ',', 'label', ')']
Loads a dataset from a binary .npy file.
['Loads', 'a', 'dataset', 'from', 'a', 'binary', '.', 'npy', 'file', '.']
train
https://github.com/awslabs/sockeye/blob/5d64a1ee1ef3cbba17c6d1d94bc061020c43f6ab/sockeye/data_io.py#L1373-L1383
9,909
Microsoft/azure-devops-python-api
azure-devops/azure/devops/v5_1/work_item_tracking/work_item_tracking_client.py
WorkItemTrackingClient.delete_field
def delete_field(self, field_name_or_ref_name, project=None): """DeleteField. [Preview API] Deletes the field. :param str field_name_or_ref_name: Field simple name or reference name :param str project: Project ID or project name """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') if field_name_or_ref_name is not None: route_values['fieldNameOrRefName'] = self._serialize.url('field_name_or_ref_name', field_name_or_ref_name, 'str') self._send(http_method='DELETE', location_id='b51fd764-e5c2-4b9b-aaf7-3395cf4bdd94', version='5.1-preview.2', route_values=route_values)
python
def delete_field(self, field_name_or_ref_name, project=None): """DeleteField. [Preview API] Deletes the field. :param str field_name_or_ref_name: Field simple name or reference name :param str project: Project ID or project name """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') if field_name_or_ref_name is not None: route_values['fieldNameOrRefName'] = self._serialize.url('field_name_or_ref_name', field_name_or_ref_name, 'str') self._send(http_method='DELETE', location_id='b51fd764-e5c2-4b9b-aaf7-3395cf4bdd94', version='5.1-preview.2', route_values=route_values)
['def', 'delete_field', '(', 'self', ',', 'field_name_or_ref_name', ',', 'project', '=', 'None', ')', ':', 'route_values', '=', '{', '}', 'if', 'project', 'is', 'not', 'None', ':', 'route_values', '[', "'project'", ']', '=', 'self', '.', '_serialize', '.', 'url', '(', "'project'", ',', 'project', ',', "'str'", ')', 'if', 'field_name_or_ref_name', 'is', 'not', 'None', ':', 'route_values', '[', "'fieldNameOrRefName'", ']', '=', 'self', '.', '_serialize', '.', 'url', '(', "'field_name_or_ref_name'", ',', 'field_name_or_ref_name', ',', "'str'", ')', 'self', '.', '_send', '(', 'http_method', '=', "'DELETE'", ',', 'location_id', '=', "'b51fd764-e5c2-4b9b-aaf7-3395cf4bdd94'", ',', 'version', '=', "'5.1-preview.2'", ',', 'route_values', '=', 'route_values', ')']
DeleteField. [Preview API] Deletes the field. :param str field_name_or_ref_name: Field simple name or reference name :param str project: Project ID or project name
['DeleteField', '.', '[', 'Preview', 'API', ']', 'Deletes', 'the', 'field', '.', ':', 'param', 'str', 'field_name_or_ref_name', ':', 'Field', 'simple', 'name', 'or', 'reference', 'name', ':', 'param', 'str', 'project', ':', 'Project', 'ID', 'or', 'project', 'name']
train
https://github.com/Microsoft/azure-devops-python-api/blob/4777ffda2f5052fabbaddb2abe9cb434e0cf1aa8/azure-devops/azure/devops/v5_1/work_item_tracking/work_item_tracking_client.py#L626-L640
9,910
brndnmtthws/dragon-rest
dragon_rest/dragons.py
DragonAPI.auth
def auth(self): """Authenticate with the miner and obtain a JSON web token (JWT).""" response = requests.post( parse.urljoin(self.base_url, '/api/auth'), timeout=self.timeout, data={'username': self.username, 'password': self.password}) response.raise_for_status() json = response.json() if 'jwt' not in json: raise ValueError("Not authorized: didn't receive token, check username or password.") self.jwt = json['jwt'] return json
python
def auth(self): """Authenticate with the miner and obtain a JSON web token (JWT).""" response = requests.post( parse.urljoin(self.base_url, '/api/auth'), timeout=self.timeout, data={'username': self.username, 'password': self.password}) response.raise_for_status() json = response.json() if 'jwt' not in json: raise ValueError("Not authorized: didn't receive token, check username or password.") self.jwt = json['jwt'] return json
['def', 'auth', '(', 'self', ')', ':', 'response', '=', 'requests', '.', 'post', '(', 'parse', '.', 'urljoin', '(', 'self', '.', 'base_url', ',', "'/api/auth'", ')', ',', 'timeout', '=', 'self', '.', 'timeout', ',', 'data', '=', '{', "'username'", ':', 'self', '.', 'username', ',', "'password'", ':', 'self', '.', 'password', '}', ')', 'response', '.', 'raise_for_status', '(', ')', 'json', '=', 'response', '.', 'json', '(', ')', 'if', "'jwt'", 'not', 'in', 'json', ':', 'raise', 'ValueError', '(', '"Not authorized: didn\'t receive token, check username or password."', ')', 'self', '.', 'jwt', '=', 'json', '[', "'jwt'", ']', 'return', 'json']
Authenticate with the miner and obtain a JSON web token (JWT).
['Authenticate', 'with', 'the', 'miner', 'and', 'obtain', 'a', 'JSON', 'web', 'token', '(', 'JWT', ')', '.']
train
https://github.com/brndnmtthws/dragon-rest/blob/10ea09a6203c0cbfeeeb854702764bd778769887/dragon_rest/dragons.py#L118-L129
9,911
peo3/cgroup-utils
cgutils/cgroup.py
walk_cgroups
def walk_cgroups(cgroup, action, opaque): """ The function applies the action function with the opaque object to each control group under the cgroup recursively. """ action(cgroup, opaque) for child in cgroup.childs: walk_cgroups(child, action, opaque)
python
def walk_cgroups(cgroup, action, opaque): """ The function applies the action function with the opaque object to each control group under the cgroup recursively. """ action(cgroup, opaque) for child in cgroup.childs: walk_cgroups(child, action, opaque)
['def', 'walk_cgroups', '(', 'cgroup', ',', 'action', ',', 'opaque', ')', ':', 'action', '(', 'cgroup', ',', 'opaque', ')', 'for', 'child', 'in', 'cgroup', '.', 'childs', ':', 'walk_cgroups', '(', 'child', ',', 'action', ',', 'opaque', ')']
The function applies the action function with the opaque object to each control group under the cgroup recursively.
['The', 'function', 'applies', 'the', 'action', 'function', 'with', 'the', 'opaque', 'object', 'to', 'each', 'control', 'group', 'under', 'the', 'cgroup', 'recursively', '.']
train
https://github.com/peo3/cgroup-utils/blob/fd7e99f438ce334bac5669fba0d08a6502fd7a82/cgutils/cgroup.py#L938-L945
9,912
glue-viz/glue-vispy-viewers
glue_vispy_viewers/extern/vispy/scene/widgets/console.py
ConsoleVisual.write
def write(self, text='', wrap=True): """Write text and scroll Parameters ---------- text : str Text to write. ``''`` can be used for a blank line, as a newline is automatically added to the end of each line. wrap : str If True, long messages will be wrapped to span multiple lines. """ # Clear line if not isinstance(text, string_types): raise TypeError('text must be a string') # ensure we only have ASCII chars text = text.encode('utf-8').decode('ascii', errors='replace') self._pending_writes.append((text, wrap)) self.update()
python
def write(self, text='', wrap=True): """Write text and scroll Parameters ---------- text : str Text to write. ``''`` can be used for a blank line, as a newline is automatically added to the end of each line. wrap : str If True, long messages will be wrapped to span multiple lines. """ # Clear line if not isinstance(text, string_types): raise TypeError('text must be a string') # ensure we only have ASCII chars text = text.encode('utf-8').decode('ascii', errors='replace') self._pending_writes.append((text, wrap)) self.update()
['def', 'write', '(', 'self', ',', 'text', '=', "''", ',', 'wrap', '=', 'True', ')', ':', '# Clear line', 'if', 'not', 'isinstance', '(', 'text', ',', 'string_types', ')', ':', 'raise', 'TypeError', '(', "'text must be a string'", ')', '# ensure we only have ASCII chars', 'text', '=', 'text', '.', 'encode', '(', "'utf-8'", ')', '.', 'decode', '(', "'ascii'", ',', 'errors', '=', "'replace'", ')', 'self', '.', '_pending_writes', '.', 'append', '(', '(', 'text', ',', 'wrap', ')', ')', 'self', '.', 'update', '(', ')']
Write text and scroll Parameters ---------- text : str Text to write. ``''`` can be used for a blank line, as a newline is automatically added to the end of each line. wrap : str If True, long messages will be wrapped to span multiple lines.
['Write', 'text', 'and', 'scroll']
train
https://github.com/glue-viz/glue-vispy-viewers/blob/54a4351d98c1f90dfb1a557d1b447c1f57470eea/glue_vispy_viewers/extern/vispy/scene/widgets/console.py#L300-L317
9,913
ThreatConnect-Inc/tcex
tcex/tcex.py
TcEx._logger_api
def _logger_api(self): """Add API logging handler.""" from .tcex_logger import TcExLogHandler, TcExLogFormatter api = TcExLogHandler(self.session) api.set_name('api') api.setLevel(logging.DEBUG) api.setFormatter(TcExLogFormatter()) self.log.addHandler(api)
python
def _logger_api(self): """Add API logging handler.""" from .tcex_logger import TcExLogHandler, TcExLogFormatter api = TcExLogHandler(self.session) api.set_name('api') api.setLevel(logging.DEBUG) api.setFormatter(TcExLogFormatter()) self.log.addHandler(api)
['def', '_logger_api', '(', 'self', ')', ':', 'from', '.', 'tcex_logger', 'import', 'TcExLogHandler', ',', 'TcExLogFormatter', 'api', '=', 'TcExLogHandler', '(', 'self', '.', 'session', ')', 'api', '.', 'set_name', '(', "'api'", ')', 'api', '.', 'setLevel', '(', 'logging', '.', 'DEBUG', ')', 'api', '.', 'setFormatter', '(', 'TcExLogFormatter', '(', ')', ')', 'self', '.', 'log', '.', 'addHandler', '(', 'api', ')']
Add API logging handler.
['Add', 'API', 'logging', 'handler', '.']
train
https://github.com/ThreatConnect-Inc/tcex/blob/dd4d7a1ef723af1561687120191886b9a2fd4b47/tcex/tcex.py#L186-L194
9,914
linuxsoftware/ls.joyous
ls/joyous/models/events.py
RecurringEventPage._getFromTime
def _getFromTime(self, atDate=None): """ What was the time of this event? Due to time zones that depends what day we are talking about. If no day is given, assume today. """ if atDate is None: atDate = timezone.localdate(timezone=self.tz) return getLocalTime(atDate, self.time_from, self.tz)
python
def _getFromTime(self, atDate=None): """ What was the time of this event? Due to time zones that depends what day we are talking about. If no day is given, assume today. """ if atDate is None: atDate = timezone.localdate(timezone=self.tz) return getLocalTime(atDate, self.time_from, self.tz)
['def', '_getFromTime', '(', 'self', ',', 'atDate', '=', 'None', ')', ':', 'if', 'atDate', 'is', 'None', ':', 'atDate', '=', 'timezone', '.', 'localdate', '(', 'timezone', '=', 'self', '.', 'tz', ')', 'return', 'getLocalTime', '(', 'atDate', ',', 'self', '.', 'time_from', ',', 'self', '.', 'tz', ')']
What was the time of this event? Due to time zones that depends what day we are talking about. If no day is given, assume today.
['What', 'was', 'the', 'time', 'of', 'this', 'event?', 'Due', 'to', 'time', 'zones', 'that', 'depends', 'what', 'day', 'we', 'are', 'talking', 'about', '.', 'If', 'no', 'day', 'is', 'given', 'assume', 'today', '.']
train
https://github.com/linuxsoftware/ls.joyous/blob/316283140ca5171a68ad3170a5964fdc89be0b56/ls/joyous/models/events.py#L1074-L1081
9,915
ssato/python-anyconfig
src/anyconfig/processors.py
find_by_type_or_id
def find_by_type_or_id(type_or_id, prs): """ :param type_or_id: Type of the data to process or ID of the processor class :param prs: A list of :class:`anyconfig.models.processor.Processor` classes :return: A list of processor classes to process files of given data type or processor 'type_or_id' found by its ID :raises: UnknownProcessorTypeError """ def pred(pcls): """Predicate""" return pcls.cid() == type_or_id or pcls.type() == type_or_id pclss = findall_with_pred(pred, prs) if not pclss: raise UnknownProcessorTypeError(type_or_id) return pclss
python
def find_by_type_or_id(type_or_id, prs): """ :param type_or_id: Type of the data to process or ID of the processor class :param prs: A list of :class:`anyconfig.models.processor.Processor` classes :return: A list of processor classes to process files of given data type or processor 'type_or_id' found by its ID :raises: UnknownProcessorTypeError """ def pred(pcls): """Predicate""" return pcls.cid() == type_or_id or pcls.type() == type_or_id pclss = findall_with_pred(pred, prs) if not pclss: raise UnknownProcessorTypeError(type_or_id) return pclss
['def', 'find_by_type_or_id', '(', 'type_or_id', ',', 'prs', ')', ':', 'def', 'pred', '(', 'pcls', ')', ':', '"""Predicate"""', 'return', 'pcls', '.', 'cid', '(', ')', '==', 'type_or_id', 'or', 'pcls', '.', 'type', '(', ')', '==', 'type_or_id', 'pclss', '=', 'findall_with_pred', '(', 'pred', ',', 'prs', ')', 'if', 'not', 'pclss', ':', 'raise', 'UnknownProcessorTypeError', '(', 'type_or_id', ')', 'return', 'pclss']
:param type_or_id: Type of the data to process or ID of the processor class :param prs: A list of :class:`anyconfig.models.processor.Processor` classes :return: A list of processor classes to process files of given data type or processor 'type_or_id' found by its ID :raises: UnknownProcessorTypeError
[':', 'param', 'type_or_id', ':', 'Type', 'of', 'the', 'data', 'to', 'process', 'or', 'ID', 'of', 'the', 'processor', 'class', ':', 'param', 'prs', ':', 'A', 'list', 'of', ':', 'class', ':', 'anyconfig', '.', 'models', '.', 'processor', '.', 'Processor', 'classes', ':', 'return', ':', 'A', 'list', 'of', 'processor', 'classes', 'to', 'process', 'files', 'of', 'given', 'data', 'type', 'or', 'processor', 'type_or_id', 'found', 'by', 'its', 'ID', ':', 'raises', ':', 'UnknownProcessorTypeError']
train
https://github.com/ssato/python-anyconfig/blob/f2f4fb8d8e232aadea866c202e1dd7a5967e2877/src/anyconfig/processors.py#L122-L139
9,916
trailofbits/protofuzz
protofuzz/gen.py
Permuter._resolve_child
def _resolve_child(self, path): 'Return a member generator by a dot-delimited path' obj = self for component in path.split('.'): ptr = obj if not isinstance(ptr, Permuter): raise self.MessageNotFound("Bad element path [wrong type]") # pylint: disable=protected-access found_gen = (_ for _ in ptr._generators if _.name() == component) obj = next(found_gen, None) if not obj: raise self.MessageNotFound("Path '{}' unresolved to member." .format(path)) return ptr, obj
python
def _resolve_child(self, path): 'Return a member generator by a dot-delimited path' obj = self for component in path.split('.'): ptr = obj if not isinstance(ptr, Permuter): raise self.MessageNotFound("Bad element path [wrong type]") # pylint: disable=protected-access found_gen = (_ for _ in ptr._generators if _.name() == component) obj = next(found_gen, None) if not obj: raise self.MessageNotFound("Path '{}' unresolved to member." .format(path)) return ptr, obj
['def', '_resolve_child', '(', 'self', ',', 'path', ')', ':', 'obj', '=', 'self', 'for', 'component', 'in', 'path', '.', 'split', '(', "'.'", ')', ':', 'ptr', '=', 'obj', 'if', 'not', 'isinstance', '(', 'ptr', ',', 'Permuter', ')', ':', 'raise', 'self', '.', 'MessageNotFound', '(', '"Bad element path [wrong type]"', ')', '# pylint: disable=protected-access', 'found_gen', '=', '(', '_', 'for', '_', 'in', 'ptr', '.', '_generators', 'if', '_', '.', 'name', '(', ')', '==', 'component', ')', 'obj', '=', 'next', '(', 'found_gen', ',', 'None', ')', 'if', 'not', 'obj', ':', 'raise', 'self', '.', 'MessageNotFound', '(', '"Path \'{}\' unresolved to member."', '.', 'format', '(', 'path', ')', ')', 'return', 'ptr', ',', 'obj']
Return a member generator by a dot-delimited path
['Return', 'a', 'member', 'generator', 'by', 'a', 'dot', '-', 'delimited', 'path']
train
https://github.com/trailofbits/protofuzz/blob/589492d34de9a0da6cc5554094e2588b893b2fd8/protofuzz/gen.py#L111-L128
9,917
elehcimd/pynb
pynb/notebook.py
Notebook.process
def process(self, uid, add_footer=False, no_exec=False, disable_cache=False, ignore_cache=False): """ Execute notebook :return: self """ self.exec_begin = time.perf_counter() self.exec_begin_dt = datetime.datetime.now() ep = CachedExecutePreprocessor(timeout=None, kernel_name='python3') ep.disable_cache = disable_cache ep.ignore_cache = ignore_cache ep.uid = uid # Execute the notebook if not no_exec: with warnings.catch_warnings(): # On MacOS, annoying warning "RuntimeWarning: Failed to set sticky bit on" # Let's suppress it. warnings.simplefilter("ignore") ep.preprocess(self.nb, {'metadata': {'path': '.'}}) self.exec_time = time.perf_counter() - self.exec_begin if add_footer: self.add_cell_footer() if not no_exec: logging.info('Execution time: {0:.2f}s'.format(self.exec_time)) return self
python
def process(self, uid, add_footer=False, no_exec=False, disable_cache=False, ignore_cache=False): """ Execute notebook :return: self """ self.exec_begin = time.perf_counter() self.exec_begin_dt = datetime.datetime.now() ep = CachedExecutePreprocessor(timeout=None, kernel_name='python3') ep.disable_cache = disable_cache ep.ignore_cache = ignore_cache ep.uid = uid # Execute the notebook if not no_exec: with warnings.catch_warnings(): # On MacOS, annoying warning "RuntimeWarning: Failed to set sticky bit on" # Let's suppress it. warnings.simplefilter("ignore") ep.preprocess(self.nb, {'metadata': {'path': '.'}}) self.exec_time = time.perf_counter() - self.exec_begin if add_footer: self.add_cell_footer() if not no_exec: logging.info('Execution time: {0:.2f}s'.format(self.exec_time)) return self
['def', 'process', '(', 'self', ',', 'uid', ',', 'add_footer', '=', 'False', ',', 'no_exec', '=', 'False', ',', 'disable_cache', '=', 'False', ',', 'ignore_cache', '=', 'False', ')', ':', 'self', '.', 'exec_begin', '=', 'time', '.', 'perf_counter', '(', ')', 'self', '.', 'exec_begin_dt', '=', 'datetime', '.', 'datetime', '.', 'now', '(', ')', 'ep', '=', 'CachedExecutePreprocessor', '(', 'timeout', '=', 'None', ',', 'kernel_name', '=', "'python3'", ')', 'ep', '.', 'disable_cache', '=', 'disable_cache', 'ep', '.', 'ignore_cache', '=', 'ignore_cache', 'ep', '.', 'uid', '=', 'uid', '# Execute the notebook', 'if', 'not', 'no_exec', ':', 'with', 'warnings', '.', 'catch_warnings', '(', ')', ':', '# On MacOS, annoying warning "RuntimeWarning: Failed to set sticky bit on"', "# Let's suppress it.", 'warnings', '.', 'simplefilter', '(', '"ignore"', ')', 'ep', '.', 'preprocess', '(', 'self', '.', 'nb', ',', '{', "'metadata'", ':', '{', "'path'", ':', "'.'", '}', '}', ')', 'self', '.', 'exec_time', '=', 'time', '.', 'perf_counter', '(', ')', '-', 'self', '.', 'exec_begin', 'if', 'add_footer', ':', 'self', '.', 'add_cell_footer', '(', ')', 'if', 'not', 'no_exec', ':', 'logging', '.', 'info', '(', "'Execution time: {0:.2f}s'", '.', 'format', '(', 'self', '.', 'exec_time', ')', ')', 'return', 'self']
Execute notebook :return: self
['Execute', 'notebook', ':', 'return', ':', 'self']
train
https://github.com/elehcimd/pynb/blob/a32af1f0e574f880eccda4a46aede6d65151f8c9/pynb/notebook.py#L340-L371
9,918
dswah/pyGAM
pygam/pygam.py
LogisticGAM.accuracy
def accuracy(self, X=None, y=None, mu=None): """ computes the accuracy of the LogisticGAM Parameters ---------- note: X or mu must be defined. defaults to mu X : array-like of shape (n_samples, m_features), optional (default=None) containing input data y : array-like of shape (n,) containing target data mu : array-like of shape (n_samples,), optional (default=None expected value of the targets given the model and inputs Returns ------- float in [0, 1] """ if not self._is_fitted: raise AttributeError('GAM has not been fitted. Call fit first.') y = check_y(y, self.link, self.distribution, verbose=self.verbose) if X is not None: X = check_X(X, n_feats=self.statistics_['m_features'], edge_knots=self.edge_knots_, dtypes=self.dtype, features=self.feature, verbose=self.verbose) if mu is None: mu = self.predict_mu(X) check_X_y(mu, y) return ((mu > 0.5).astype(int) == y).mean()
python
def accuracy(self, X=None, y=None, mu=None): """ computes the accuracy of the LogisticGAM Parameters ---------- note: X or mu must be defined. defaults to mu X : array-like of shape (n_samples, m_features), optional (default=None) containing input data y : array-like of shape (n,) containing target data mu : array-like of shape (n_samples,), optional (default=None expected value of the targets given the model and inputs Returns ------- float in [0, 1] """ if not self._is_fitted: raise AttributeError('GAM has not been fitted. Call fit first.') y = check_y(y, self.link, self.distribution, verbose=self.verbose) if X is not None: X = check_X(X, n_feats=self.statistics_['m_features'], edge_knots=self.edge_knots_, dtypes=self.dtype, features=self.feature, verbose=self.verbose) if mu is None: mu = self.predict_mu(X) check_X_y(mu, y) return ((mu > 0.5).astype(int) == y).mean()
['def', 'accuracy', '(', 'self', ',', 'X', '=', 'None', ',', 'y', '=', 'None', ',', 'mu', '=', 'None', ')', ':', 'if', 'not', 'self', '.', '_is_fitted', ':', 'raise', 'AttributeError', '(', "'GAM has not been fitted. Call fit first.'", ')', 'y', '=', 'check_y', '(', 'y', ',', 'self', '.', 'link', ',', 'self', '.', 'distribution', ',', 'verbose', '=', 'self', '.', 'verbose', ')', 'if', 'X', 'is', 'not', 'None', ':', 'X', '=', 'check_X', '(', 'X', ',', 'n_feats', '=', 'self', '.', 'statistics_', '[', "'m_features'", ']', ',', 'edge_knots', '=', 'self', '.', 'edge_knots_', ',', 'dtypes', '=', 'self', '.', 'dtype', ',', 'features', '=', 'self', '.', 'feature', ',', 'verbose', '=', 'self', '.', 'verbose', ')', 'if', 'mu', 'is', 'None', ':', 'mu', '=', 'self', '.', 'predict_mu', '(', 'X', ')', 'check_X_y', '(', 'mu', ',', 'y', ')', 'return', '(', '(', 'mu', '>', '0.5', ')', '.', 'astype', '(', 'int', ')', '==', 'y', ')', '.', 'mean', '(', ')']
computes the accuracy of the LogisticGAM Parameters ---------- note: X or mu must be defined. defaults to mu X : array-like of shape (n_samples, m_features), optional (default=None) containing input data y : array-like of shape (n,) containing target data mu : array-like of shape (n_samples,), optional (default=None expected value of the targets given the model and inputs Returns ------- float in [0, 1]
['computes', 'the', 'accuracy', 'of', 'the', 'LogisticGAM']
train
https://github.com/dswah/pyGAM/blob/b3e5c3cd580f0a3ad69f9372861624f67760c325/pygam/pygam.py#L2395-L2426
9,919
glue-viz/glue-vispy-viewers
glue_vispy_viewers/extern/vispy/visuals/isocurve.py
IsocurveVisual._compute_iso_line
def _compute_iso_line(self): """ compute LineVisual vertices, connects and color-index """ level_index = [] connects = [] verts = [] # calculate which level are within data range # this works for now and the existing examples, but should be tested # thoroughly also with the data-sanity check in set_data-function choice = np.nonzero((self.levels > self._data.min()) & (self._levels < self._data.max())) levels_to_calc = np.array(self.levels)[choice] # save minimum level index self._level_min = choice[0][0] for level in levels_to_calc: # if we use matplotlib isoline algorithm we need to add half a # pixel in both (x,y) dimensions because isolines are aligned to # pixel centers if _HAS_MPL: nlist = self._iso.trace(level, level, 0) paths = nlist[:len(nlist)//2] v, c = self._get_verts_and_connect(paths) v += np.array([0.5, 0.5]) else: paths = isocurve(self._data.astype(float).T, level, extend_to_edge=True, connected=True) v, c = self._get_verts_and_connect(paths) level_index.append(v.shape[0]) connects.append(np.hstack((c, [False]))) verts.append(v) self._li = np.hstack(level_index) self._connect = np.hstack(connects) self._verts = np.vstack(verts)
python
def _compute_iso_line(self): """ compute LineVisual vertices, connects and color-index """ level_index = [] connects = [] verts = [] # calculate which level are within data range # this works for now and the existing examples, but should be tested # thoroughly also with the data-sanity check in set_data-function choice = np.nonzero((self.levels > self._data.min()) & (self._levels < self._data.max())) levels_to_calc = np.array(self.levels)[choice] # save minimum level index self._level_min = choice[0][0] for level in levels_to_calc: # if we use matplotlib isoline algorithm we need to add half a # pixel in both (x,y) dimensions because isolines are aligned to # pixel centers if _HAS_MPL: nlist = self._iso.trace(level, level, 0) paths = nlist[:len(nlist)//2] v, c = self._get_verts_and_connect(paths) v += np.array([0.5, 0.5]) else: paths = isocurve(self._data.astype(float).T, level, extend_to_edge=True, connected=True) v, c = self._get_verts_and_connect(paths) level_index.append(v.shape[0]) connects.append(np.hstack((c, [False]))) verts.append(v) self._li = np.hstack(level_index) self._connect = np.hstack(connects) self._verts = np.vstack(verts)
['def', '_compute_iso_line', '(', 'self', ')', ':', 'level_index', '=', '[', ']', 'connects', '=', '[', ']', 'verts', '=', '[', ']', '# calculate which level are within data range', '# this works for now and the existing examples, but should be tested', '# thoroughly also with the data-sanity check in set_data-function', 'choice', '=', 'np', '.', 'nonzero', '(', '(', 'self', '.', 'levels', '>', 'self', '.', '_data', '.', 'min', '(', ')', ')', '&', '(', 'self', '.', '_levels', '<', 'self', '.', '_data', '.', 'max', '(', ')', ')', ')', 'levels_to_calc', '=', 'np', '.', 'array', '(', 'self', '.', 'levels', ')', '[', 'choice', ']', '# save minimum level index', 'self', '.', '_level_min', '=', 'choice', '[', '0', ']', '[', '0', ']', 'for', 'level', 'in', 'levels_to_calc', ':', '# if we use matplotlib isoline algorithm we need to add half a', '# pixel in both (x,y) dimensions because isolines are aligned to', '# pixel centers', 'if', '_HAS_MPL', ':', 'nlist', '=', 'self', '.', '_iso', '.', 'trace', '(', 'level', ',', 'level', ',', '0', ')', 'paths', '=', 'nlist', '[', ':', 'len', '(', 'nlist', ')', '//', '2', ']', 'v', ',', 'c', '=', 'self', '.', '_get_verts_and_connect', '(', 'paths', ')', 'v', '+=', 'np', '.', 'array', '(', '[', '0.5', ',', '0.5', ']', ')', 'else', ':', 'paths', '=', 'isocurve', '(', 'self', '.', '_data', '.', 'astype', '(', 'float', ')', '.', 'T', ',', 'level', ',', 'extend_to_edge', '=', 'True', ',', 'connected', '=', 'True', ')', 'v', ',', 'c', '=', 'self', '.', '_get_verts_and_connect', '(', 'paths', ')', 'level_index', '.', 'append', '(', 'v', '.', 'shape', '[', '0', ']', ')', 'connects', '.', 'append', '(', 'np', '.', 'hstack', '(', '(', 'c', ',', '[', 'False', ']', ')', ')', ')', 'verts', '.', 'append', '(', 'v', ')', 'self', '.', '_li', '=', 'np', '.', 'hstack', '(', 'level_index', ')', 'self', '.', '_connect', '=', 'np', '.', 'hstack', '(', 'connects', ')', 'self', '.', '_verts', '=', 'np', '.', 'vstack', '(', 'verts', ')']
compute LineVisual vertices, connects and color-index
['compute', 'LineVisual', 'vertices', 'connects', 'and', 'color', '-', 'index']
train
https://github.com/glue-viz/glue-vispy-viewers/blob/54a4351d98c1f90dfb1a557d1b447c1f57470eea/glue_vispy_viewers/extern/vispy/visuals/isocurve.py#L138-L175
9,920
tanghaibao/goatools
goatools/grouper/aart_geneproducts_one.py
AArtGeneProductSetsOne.prt_report_grp1
def prt_report_grp1(self, prt=sys.stdout, **kws_grp): """Print full GO/gene report with grouping.""" summaryline = self.str_summaryline() # Print grouped GO IDs prt.write("{SUMMARY}\n".format(SUMMARY=summaryline)) self.prt_gos_grouped(prt, **kws_grp) # genes genes = sorted(self.gene2gos.keys()) prt.write("\n\n{SUMMARY}\n\n".format(SUMMARY=summaryline)) self.prt_section_key(prt) self.prt_gene_aart(genes, prt) # Sort genes prt.write("\n\n{SUMMARY}\n\n".format(SUMMARY=summaryline)) self.prt_gene_aart_details(genes, prt) return (self.name, self.get_section_marks())
python
def prt_report_grp1(self, prt=sys.stdout, **kws_grp): """Print full GO/gene report with grouping.""" summaryline = self.str_summaryline() # Print grouped GO IDs prt.write("{SUMMARY}\n".format(SUMMARY=summaryline)) self.prt_gos_grouped(prt, **kws_grp) # genes genes = sorted(self.gene2gos.keys()) prt.write("\n\n{SUMMARY}\n\n".format(SUMMARY=summaryline)) self.prt_section_key(prt) self.prt_gene_aart(genes, prt) # Sort genes prt.write("\n\n{SUMMARY}\n\n".format(SUMMARY=summaryline)) self.prt_gene_aart_details(genes, prt) return (self.name, self.get_section_marks())
['def', 'prt_report_grp1', '(', 'self', ',', 'prt', '=', 'sys', '.', 'stdout', ',', '*', '*', 'kws_grp', ')', ':', 'summaryline', '=', 'self', '.', 'str_summaryline', '(', ')', '# Print grouped GO IDs', 'prt', '.', 'write', '(', '"{SUMMARY}\\n"', '.', 'format', '(', 'SUMMARY', '=', 'summaryline', ')', ')', 'self', '.', 'prt_gos_grouped', '(', 'prt', ',', '*', '*', 'kws_grp', ')', '# genes', 'genes', '=', 'sorted', '(', 'self', '.', 'gene2gos', '.', 'keys', '(', ')', ')', 'prt', '.', 'write', '(', '"\\n\\n{SUMMARY}\\n\\n"', '.', 'format', '(', 'SUMMARY', '=', 'summaryline', ')', ')', 'self', '.', 'prt_section_key', '(', 'prt', ')', 'self', '.', 'prt_gene_aart', '(', 'genes', ',', 'prt', ')', '# Sort genes', 'prt', '.', 'write', '(', '"\\n\\n{SUMMARY}\\n\\n"', '.', 'format', '(', 'SUMMARY', '=', 'summaryline', ')', ')', 'self', '.', 'prt_gene_aart_details', '(', 'genes', ',', 'prt', ')', 'return', '(', 'self', '.', 'name', ',', 'self', '.', 'get_section_marks', '(', ')', ')']
Print full GO/gene report with grouping.
['Print', 'full', 'GO', '/', 'gene', 'report', 'with', 'grouping', '.']
train
https://github.com/tanghaibao/goatools/blob/407682e573a108864a79031f8ca19ee3bf377626/goatools/grouper/aart_geneproducts_one.py#L52-L66
9,921
synw/dataswim
dataswim/data/clean.py
Clean.fill_nan
def fill_nan(self, val: str, *cols): """ Fill NaN values with new values in the main dataframe :param val: new value :type val: str :param \*cols: names of the colums :type \*cols: str, at least one :example: ``ds.fill_nan("new value", "mycol1", "mycol2")`` """ df = self._fill_nan(val, *cols) if df is not None: self.df = df else: self.err("Can not fill nan values")
python
def fill_nan(self, val: str, *cols): """ Fill NaN values with new values in the main dataframe :param val: new value :type val: str :param \*cols: names of the colums :type \*cols: str, at least one :example: ``ds.fill_nan("new value", "mycol1", "mycol2")`` """ df = self._fill_nan(val, *cols) if df is not None: self.df = df else: self.err("Can not fill nan values")
['def', 'fill_nan', '(', 'self', ',', 'val', ':', 'str', ',', '*', 'cols', ')', ':', 'df', '=', 'self', '.', '_fill_nan', '(', 'val', ',', '*', 'cols', ')', 'if', 'df', 'is', 'not', 'None', ':', 'self', '.', 'df', '=', 'df', 'else', ':', 'self', '.', 'err', '(', '"Can not fill nan values"', ')']
Fill NaN values with new values in the main dataframe :param val: new value :type val: str :param \*cols: names of the colums :type \*cols: str, at least one :example: ``ds.fill_nan("new value", "mycol1", "mycol2")``
['Fill', 'NaN', 'values', 'with', 'new', 'values', 'in', 'the', 'main', 'dataframe']
train
https://github.com/synw/dataswim/blob/4a4a53f80daa7cd8e8409d76a19ce07296269da2/dataswim/data/clean.py#L88-L103
9,922
nornir-automation/nornir
nornir/core/inventory.py
Host.get_connection_state
def get_connection_state(self, connection: str) -> Dict[str, Any]: """ For an already established connection return its state. """ if connection not in self.connections: raise ConnectionNotOpen(connection) return self.connections[connection].state
python
def get_connection_state(self, connection: str) -> Dict[str, Any]: """ For an already established connection return its state. """ if connection not in self.connections: raise ConnectionNotOpen(connection) return self.connections[connection].state
['def', 'get_connection_state', '(', 'self', ',', 'connection', ':', 'str', ')', '->', 'Dict', '[', 'str', ',', 'Any', ']', ':', 'if', 'connection', 'not', 'in', 'self', '.', 'connections', ':', 'raise', 'ConnectionNotOpen', '(', 'connection', ')', 'return', 'self', '.', 'connections', '[', 'connection', ']', '.', 'state']
For an already established connection return its state.
['For', 'an', 'already', 'established', 'connection', 'return', 'its', 'state', '.']
train
https://github.com/nornir-automation/nornir/blob/3425c47fd870db896cb80f619bae23bd98d50c74/nornir/core/inventory.py#L306-L313
9,923
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.invert
def invert(self, output_directory=None, catch_output=True, **kwargs): """Invert this instance, and import the result files No directories/files will be overwritten. Raise an IOError if the output directory exists. Parameters ---------- output_directory: string, optional use this directory as output directory for the generated tomodir. If None, then a temporary directory will be used that is deleted after import. catch_output: bool, optional Do not show CRTomo output cores: int, optional how many cores to use for CRTomo Returns ------- return_code: bool Return 0 if the inversion completed successfully. Return 1 if no measurements are present. """ self._check_state() if self.can_invert: if output_directory is not None: if not os.path.isdir(output_directory): os.makedirs(output_directory) tempdir = output_directory self._invert(tempdir, catch_output, **kwargs) else: raise IOError( 'output directory already exists: {0}'.format( output_directory ) ) else: with tempfile.TemporaryDirectory(dir=self.tempdir) as tempdir: self._invert(tempdir, catch_output, **kwargs) return 0 else: print( 'Sorry, no measurements present, cannot model yet' ) return 1
python
def invert(self, output_directory=None, catch_output=True, **kwargs): """Invert this instance, and import the result files No directories/files will be overwritten. Raise an IOError if the output directory exists. Parameters ---------- output_directory: string, optional use this directory as output directory for the generated tomodir. If None, then a temporary directory will be used that is deleted after import. catch_output: bool, optional Do not show CRTomo output cores: int, optional how many cores to use for CRTomo Returns ------- return_code: bool Return 0 if the inversion completed successfully. Return 1 if no measurements are present. """ self._check_state() if self.can_invert: if output_directory is not None: if not os.path.isdir(output_directory): os.makedirs(output_directory) tempdir = output_directory self._invert(tempdir, catch_output, **kwargs) else: raise IOError( 'output directory already exists: {0}'.format( output_directory ) ) else: with tempfile.TemporaryDirectory(dir=self.tempdir) as tempdir: self._invert(tempdir, catch_output, **kwargs) return 0 else: print( 'Sorry, no measurements present, cannot model yet' ) return 1
['def', 'invert', '(', 'self', ',', 'output_directory', '=', 'None', ',', 'catch_output', '=', 'True', ',', '*', '*', 'kwargs', ')', ':', 'self', '.', '_check_state', '(', ')', 'if', 'self', '.', 'can_invert', ':', 'if', 'output_directory', 'is', 'not', 'None', ':', 'if', 'not', 'os', '.', 'path', '.', 'isdir', '(', 'output_directory', ')', ':', 'os', '.', 'makedirs', '(', 'output_directory', ')', 'tempdir', '=', 'output_directory', 'self', '.', '_invert', '(', 'tempdir', ',', 'catch_output', ',', '*', '*', 'kwargs', ')', 'else', ':', 'raise', 'IOError', '(', "'output directory already exists: {0}'", '.', 'format', '(', 'output_directory', ')', ')', 'else', ':', 'with', 'tempfile', '.', 'TemporaryDirectory', '(', 'dir', '=', 'self', '.', 'tempdir', ')', 'as', 'tempdir', ':', 'self', '.', '_invert', '(', 'tempdir', ',', 'catch_output', ',', '*', '*', 'kwargs', ')', 'return', '0', 'else', ':', 'print', '(', "'Sorry, no measurements present, cannot model yet'", ')', 'return', '1']
Invert this instance, and import the result files No directories/files will be overwritten. Raise an IOError if the output directory exists. Parameters ---------- output_directory: string, optional use this directory as output directory for the generated tomodir. If None, then a temporary directory will be used that is deleted after import. catch_output: bool, optional Do not show CRTomo output cores: int, optional how many cores to use for CRTomo Returns ------- return_code: bool Return 0 if the inversion completed successfully. Return 1 if no measurements are present.
['Invert', 'this', 'instance', 'and', 'import', 'the', 'result', 'files']
train
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L956-L1001
9,924
google/grr
grr/core/grr_response_core/lib/config_lib.py
ParseConfigCommandLine
def ParseConfigCommandLine(): """Parse all the command line options which control the config system.""" # The user may specify the primary config file on the command line. if flags.FLAGS.config: _CONFIG.Initialize(filename=flags.FLAGS.config, must_exist=True) else: raise RuntimeError("A config file is not specified.") # Allow secondary configuration files to be specified. if flags.FLAGS.secondary_configs: for config_file in flags.FLAGS.secondary_configs: _CONFIG.LoadSecondaryConfig(config_file) # Allow individual options to be specified as global overrides. for statement in flags.FLAGS.parameter: if "=" not in statement: raise RuntimeError("statement %s on command line not valid." % statement) name, value = statement.split("=", 1) _CONFIG.global_override[name] = value # Load additional contexts from the command line. for context in flags.FLAGS.context: if context: _CONFIG.AddContext(context) if _CONFIG["Config.writeback"]: _CONFIG.SetWriteBack(_CONFIG["Config.writeback"]) # Does the user want to dump help? We do this after the config system is # initialized so the user can examine what we think the value of all the # parameters are. if flags.FLAGS.config_help: print("Configuration overview.") _CONFIG.PrintHelp() sys.exit(0)
python
def ParseConfigCommandLine(): """Parse all the command line options which control the config system.""" # The user may specify the primary config file on the command line. if flags.FLAGS.config: _CONFIG.Initialize(filename=flags.FLAGS.config, must_exist=True) else: raise RuntimeError("A config file is not specified.") # Allow secondary configuration files to be specified. if flags.FLAGS.secondary_configs: for config_file in flags.FLAGS.secondary_configs: _CONFIG.LoadSecondaryConfig(config_file) # Allow individual options to be specified as global overrides. for statement in flags.FLAGS.parameter: if "=" not in statement: raise RuntimeError("statement %s on command line not valid." % statement) name, value = statement.split("=", 1) _CONFIG.global_override[name] = value # Load additional contexts from the command line. for context in flags.FLAGS.context: if context: _CONFIG.AddContext(context) if _CONFIG["Config.writeback"]: _CONFIG.SetWriteBack(_CONFIG["Config.writeback"]) # Does the user want to dump help? We do this after the config system is # initialized so the user can examine what we think the value of all the # parameters are. if flags.FLAGS.config_help: print("Configuration overview.") _CONFIG.PrintHelp() sys.exit(0)
['def', 'ParseConfigCommandLine', '(', ')', ':', '# The user may specify the primary config file on the command line.', 'if', 'flags', '.', 'FLAGS', '.', 'config', ':', '_CONFIG', '.', 'Initialize', '(', 'filename', '=', 'flags', '.', 'FLAGS', '.', 'config', ',', 'must_exist', '=', 'True', ')', 'else', ':', 'raise', 'RuntimeError', '(', '"A config file is not specified."', ')', '# Allow secondary configuration files to be specified.', 'if', 'flags', '.', 'FLAGS', '.', 'secondary_configs', ':', 'for', 'config_file', 'in', 'flags', '.', 'FLAGS', '.', 'secondary_configs', ':', '_CONFIG', '.', 'LoadSecondaryConfig', '(', 'config_file', ')', '# Allow individual options to be specified as global overrides.', 'for', 'statement', 'in', 'flags', '.', 'FLAGS', '.', 'parameter', ':', 'if', '"="', 'not', 'in', 'statement', ':', 'raise', 'RuntimeError', '(', '"statement %s on command line not valid."', '%', 'statement', ')', 'name', ',', 'value', '=', 'statement', '.', 'split', '(', '"="', ',', '1', ')', '_CONFIG', '.', 'global_override', '[', 'name', ']', '=', 'value', '# Load additional contexts from the command line.', 'for', 'context', 'in', 'flags', '.', 'FLAGS', '.', 'context', ':', 'if', 'context', ':', '_CONFIG', '.', 'AddContext', '(', 'context', ')', 'if', '_CONFIG', '[', '"Config.writeback"', ']', ':', '_CONFIG', '.', 'SetWriteBack', '(', '_CONFIG', '[', '"Config.writeback"', ']', ')', '# Does the user want to dump help? We do this after the config system is', '# initialized so the user can examine what we think the value of all the', '# parameters are.', 'if', 'flags', '.', 'FLAGS', '.', 'config_help', ':', 'print', '(', '"Configuration overview."', ')', '_CONFIG', '.', 'PrintHelp', '(', ')', 'sys', '.', 'exit', '(', '0', ')']
Parse all the command line options which control the config system.
['Parse', 'all', 'the', 'command', 'line', 'options', 'which', 'control', 'the', 'config', 'system', '.']
train
https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/core/grr_response_core/lib/config_lib.py#L1704-L1740
9,925
senaite/senaite.core
bika/lims/exportimport/instruments/shimadzu/nexera/LC2040C.py
TSVParser.parse_TSVline
def parse_TSVline(self, line): """ Parses result lines """ split_row = [token.strip() for token in line.split('\t')] _results = {'DefaultResult': 'Conc.'} # ID# 1 if split_row[0] == 'ID#': return 0 # Name CBDV - cannabidivarin elif split_row[0] == 'Name': if split_row[1]: self._currentanalysiskw = split_row[1] return 0 else: self.warn("Analysis Keyword not found or empty", numline=self._numline, line=line) # Data Filename Sample Name Sample ID Sample Type Level# elif 'Sample ID' in split_row: split_row.insert(0, '#') self._currentresultsheader = split_row return 0 # 1 QC PREP A_QC PREP A_009.lcd QC PREP elif split_row[0].isdigit(): _results.update(dict(zip(self._currentresultsheader, split_row))) # 10/17/2016 7:55:06 PM try: da = datetime.strptime( _results['Date Acquired'], "%m/%d/%Y %I:%M:%S %p") self._header['Output Date'] = da self._header['Output Time'] = da except ValueError: self.err("Invalid Output Time format", numline=self._numline, line=line) result = _results[_results['DefaultResult']] column_name = _results['DefaultResult'] result = self.zeroValueDefaultInstrumentResults( column_name, result, line) _results[_results['DefaultResult']] = result self._addRawResult(_results['Sample ID'], values={self._currentanalysiskw: _results}, override=False)
python
def parse_TSVline(self, line): """ Parses result lines """ split_row = [token.strip() for token in line.split('\t')] _results = {'DefaultResult': 'Conc.'} # ID# 1 if split_row[0] == 'ID#': return 0 # Name CBDV - cannabidivarin elif split_row[0] == 'Name': if split_row[1]: self._currentanalysiskw = split_row[1] return 0 else: self.warn("Analysis Keyword not found or empty", numline=self._numline, line=line) # Data Filename Sample Name Sample ID Sample Type Level# elif 'Sample ID' in split_row: split_row.insert(0, '#') self._currentresultsheader = split_row return 0 # 1 QC PREP A_QC PREP A_009.lcd QC PREP elif split_row[0].isdigit(): _results.update(dict(zip(self._currentresultsheader, split_row))) # 10/17/2016 7:55:06 PM try: da = datetime.strptime( _results['Date Acquired'], "%m/%d/%Y %I:%M:%S %p") self._header['Output Date'] = da self._header['Output Time'] = da except ValueError: self.err("Invalid Output Time format", numline=self._numline, line=line) result = _results[_results['DefaultResult']] column_name = _results['DefaultResult'] result = self.zeroValueDefaultInstrumentResults( column_name, result, line) _results[_results['DefaultResult']] = result self._addRawResult(_results['Sample ID'], values={self._currentanalysiskw: _results}, override=False)
['def', 'parse_TSVline', '(', 'self', ',', 'line', ')', ':', 'split_row', '=', '[', 'token', '.', 'strip', '(', ')', 'for', 'token', 'in', 'line', '.', 'split', '(', "'\\t'", ')', ']', '_results', '=', '{', "'DefaultResult'", ':', "'Conc.'", '}', '# ID# 1', 'if', 'split_row', '[', '0', ']', '==', "'ID#'", ':', 'return', '0', '# Name\tCBDV - cannabidivarin', 'elif', 'split_row', '[', '0', ']', '==', "'Name'", ':', 'if', 'split_row', '[', '1', ']', ':', 'self', '.', '_currentanalysiskw', '=', 'split_row', '[', '1', ']', 'return', '0', 'else', ':', 'self', '.', 'warn', '(', '"Analysis Keyword not found or empty"', ',', 'numline', '=', 'self', '.', '_numline', ',', 'line', '=', 'line', ')', '# Data Filename\tSample Name\tSample ID\tSample Type\tLevel#', 'elif', "'Sample ID'", 'in', 'split_row', ':', 'split_row', '.', 'insert', '(', '0', ',', "'#'", ')', 'self', '.', '_currentresultsheader', '=', 'split_row', 'return', '0', '# 1\tQC PREP A_QC PREP A_009.lcd\tQC PREP', 'elif', 'split_row', '[', '0', ']', '.', 'isdigit', '(', ')', ':', '_results', '.', 'update', '(', 'dict', '(', 'zip', '(', 'self', '.', '_currentresultsheader', ',', 'split_row', ')', ')', ')', '# 10/17/2016 7:55:06 PM', 'try', ':', 'da', '=', 'datetime', '.', 'strptime', '(', '_results', '[', "'Date Acquired'", ']', ',', '"%m/%d/%Y %I:%M:%S %p"', ')', 'self', '.', '_header', '[', "'Output Date'", ']', '=', 'da', 'self', '.', '_header', '[', "'Output Time'", ']', '=', 'da', 'except', 'ValueError', ':', 'self', '.', 'err', '(', '"Invalid Output Time format"', ',', 'numline', '=', 'self', '.', '_numline', ',', 'line', '=', 'line', ')', 'result', '=', '_results', '[', '_results', '[', "'DefaultResult'", ']', ']', 'column_name', '=', '_results', '[', "'DefaultResult'", ']', 'result', '=', 'self', '.', 'zeroValueDefaultInstrumentResults', '(', 'column_name', ',', 'result', ',', 'line', ')', '_results', '[', '_results', '[', "'DefaultResult'", ']', ']', '=', 'result', 'self', '.', '_addRawResult', '(', '_results', '[', "'Sample ID'", ']', ',', 'values', '=', '{', 'self', '.', '_currentanalysiskw', ':', '_results', '}', ',', 'override', '=', 'False', ')']
Parses result lines
['Parses', 'result', 'lines']
train
https://github.com/senaite/senaite.core/blob/7602ce2ea2f9e81eb34e20ce17b98a3e70713f85/bika/lims/exportimport/instruments/shimadzu/nexera/LC2040C.py#L101-L146
9,926
evhub/coconut
coconut/compiler/grammar.py
subscriptgroup_handle
def subscriptgroup_handle(tokens): """Process subscriptgroups.""" internal_assert(0 < len(tokens) <= 3, "invalid slice args", tokens) args = [] for arg in tokens: if not arg: arg = "None" args.append(arg) if len(args) == 1: return args[0] else: return "_coconut.slice(" + ", ".join(args) + ")"
python
def subscriptgroup_handle(tokens): """Process subscriptgroups.""" internal_assert(0 < len(tokens) <= 3, "invalid slice args", tokens) args = [] for arg in tokens: if not arg: arg = "None" args.append(arg) if len(args) == 1: return args[0] else: return "_coconut.slice(" + ", ".join(args) + ")"
['def', 'subscriptgroup_handle', '(', 'tokens', ')', ':', 'internal_assert', '(', '0', '<', 'len', '(', 'tokens', ')', '<=', '3', ',', '"invalid slice args"', ',', 'tokens', ')', 'args', '=', '[', ']', 'for', 'arg', 'in', 'tokens', ':', 'if', 'not', 'arg', ':', 'arg', '=', '"None"', 'args', '.', 'append', '(', 'arg', ')', 'if', 'len', '(', 'args', ')', '==', '1', ':', 'return', 'args', '[', '0', ']', 'else', ':', 'return', '"_coconut.slice("', '+', '", "', '.', 'join', '(', 'args', ')', '+', '")"']
Process subscriptgroups.
['Process', 'subscriptgroups', '.']
train
https://github.com/evhub/coconut/blob/ff97177344e7604e89a0a98a977a87ed2a56fc6d/coconut/compiler/grammar.py#L563-L574
9,927
ANTsX/ANTsPy
ants/utils/invariant_image_similarity.py
invariant_image_similarity
def invariant_image_similarity(image1, image2, local_search_iterations=0, metric='MI', thetas=np.linspace(0,360,5), thetas2=np.linspace(0,360,5), thetas3=np.linspace(0,360,5), scale_image=1, do_reflection=False, txfn=None, transform='Affine'): """ Similarity metrics between two images as a function of geometry Compute similarity metric between two images as image is rotated about its center w/ or w/o optimization ANTsR function: `invariantImageSimilarity` Arguments --------- image1 : ANTsImage reference image image2 : ANTsImage moving image local_search_iterations : integer integer controlling local search in multistart metric : string which metric to use MI GC thetas : 1D-ndarray/list/tuple numeric vector of search angles in degrees thetas2 : 1D-ndarray/list/tuple numeric vector of search angles in degrees around principal axis 2 (3D) thetas3 : 1D-ndarray/list/tuple numeric vector of search angles in degrees around principal axis 3 (3D) scale_image : scalar global scale do_reflection : boolean whether to reflect image about principal axis txfn : string (optional) if present, write optimal tx to .mat file transform : string type of transform to use Rigid Similarity Affine Returns ------- pd.DataFrame dataframe with metric values and transformation parameters Example ------- >>> import ants >>> img1 = ants.image_read(ants.get_ants_data('r16')) >>> img2 = ants.image_read(ants.get_ants_data('r64')) >>> metric = ants.invariant_image_similarity(img1,img2) """ if transform not in {'Rigid', 'Similarity', 'Affine'}: raise ValueError('transform must be one of Rigid/Similarity/Affine') if image1.pixeltype != 'float': image1 = image1.clone('float') if image2.pixeltype != 'float': image2 = image2.clone('float') if txfn is None: txfn = mktemp(suffix='.mat') # convert thetas to radians thetain = (thetas * math.pi) / 180. thetain2 = (thetas2 * math.pi) / 180. thetain3 = (thetas3 * math.pi) / 180. image1 = utils.iMath(image1, 'Normalize') image2 = utils.iMath(image2, 'Normalize') idim = image1.dimension fpname = ['FixedParam%i'%i for i in range(1,idim+1)] if not do_reflection: libfn = utils.get_lib_fn('invariantImageSimilarity_%s%iD' % (transform, idim)) r1 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, int(do_reflection), txfn) r1 = np.asarray(r1) pnames = ['Param%i'%i for i in range(1,r1.shape[1])] pnames[(len(pnames)-idim):len(pnames)] = fpname r1 = pd.DataFrame(r1, columns=['MetricValue']+pnames) return r1, txfn else: txfn1 = mktemp(suffix='.mat') txfn2 = mktemp(suffix='.mat') txfn3 = mktemp(suffix='.mat') txfn4 = mktemp(suffix='.mat') libfn = utils.get_lib_fn('invariantImageSimilarity_%s%iD' % (transform, idim)) ## R1 ## r1 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, 0, txfn1) r1 = np.asarray(r1) pnames = ['Param%i'%i for i in range(1,r1.shape[1])] pnames[(len(pnames)-idim):len(pnames)] = fpname r1 = pd.DataFrame(r1, columns=['MetricValue']+pnames) ## R2 ## r2 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, 1, txfn2) r2 = np.asarray(r2) r2 = pd.DataFrame(r2, columns=['MetricValue']+pnames) ## R3 ## r3 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, 2, txfn3) r3 = np.asarray(r3) r3 = pd.DataFrame(r3, columns=['MetricValue']+pnames) ## R4 ## r4 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, 3, txfn4) r4 = np.asarray(r4) r4 = pd.DataFrame(r4, columns=['MetricValue']+pnames) rmins = [np.min(r1.iloc[:,0]), np.min(r2.iloc[:,0]), np.min(r3.iloc[:,0]), np.min(r4.iloc[:,0])] ww = np.argmin(rmins) if ww == 0: return r1, txfn1 elif ww == 1: return r2, txfn2 elif ww == 2: return r3, txfn3 elif ww == 3: return r4, txfn4
python
def invariant_image_similarity(image1, image2, local_search_iterations=0, metric='MI', thetas=np.linspace(0,360,5), thetas2=np.linspace(0,360,5), thetas3=np.linspace(0,360,5), scale_image=1, do_reflection=False, txfn=None, transform='Affine'): """ Similarity metrics between two images as a function of geometry Compute similarity metric between two images as image is rotated about its center w/ or w/o optimization ANTsR function: `invariantImageSimilarity` Arguments --------- image1 : ANTsImage reference image image2 : ANTsImage moving image local_search_iterations : integer integer controlling local search in multistart metric : string which metric to use MI GC thetas : 1D-ndarray/list/tuple numeric vector of search angles in degrees thetas2 : 1D-ndarray/list/tuple numeric vector of search angles in degrees around principal axis 2 (3D) thetas3 : 1D-ndarray/list/tuple numeric vector of search angles in degrees around principal axis 3 (3D) scale_image : scalar global scale do_reflection : boolean whether to reflect image about principal axis txfn : string (optional) if present, write optimal tx to .mat file transform : string type of transform to use Rigid Similarity Affine Returns ------- pd.DataFrame dataframe with metric values and transformation parameters Example ------- >>> import ants >>> img1 = ants.image_read(ants.get_ants_data('r16')) >>> img2 = ants.image_read(ants.get_ants_data('r64')) >>> metric = ants.invariant_image_similarity(img1,img2) """ if transform not in {'Rigid', 'Similarity', 'Affine'}: raise ValueError('transform must be one of Rigid/Similarity/Affine') if image1.pixeltype != 'float': image1 = image1.clone('float') if image2.pixeltype != 'float': image2 = image2.clone('float') if txfn is None: txfn = mktemp(suffix='.mat') # convert thetas to radians thetain = (thetas * math.pi) / 180. thetain2 = (thetas2 * math.pi) / 180. thetain3 = (thetas3 * math.pi) / 180. image1 = utils.iMath(image1, 'Normalize') image2 = utils.iMath(image2, 'Normalize') idim = image1.dimension fpname = ['FixedParam%i'%i for i in range(1,idim+1)] if not do_reflection: libfn = utils.get_lib_fn('invariantImageSimilarity_%s%iD' % (transform, idim)) r1 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, int(do_reflection), txfn) r1 = np.asarray(r1) pnames = ['Param%i'%i for i in range(1,r1.shape[1])] pnames[(len(pnames)-idim):len(pnames)] = fpname r1 = pd.DataFrame(r1, columns=['MetricValue']+pnames) return r1, txfn else: txfn1 = mktemp(suffix='.mat') txfn2 = mktemp(suffix='.mat') txfn3 = mktemp(suffix='.mat') txfn4 = mktemp(suffix='.mat') libfn = utils.get_lib_fn('invariantImageSimilarity_%s%iD' % (transform, idim)) ## R1 ## r1 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, 0, txfn1) r1 = np.asarray(r1) pnames = ['Param%i'%i for i in range(1,r1.shape[1])] pnames[(len(pnames)-idim):len(pnames)] = fpname r1 = pd.DataFrame(r1, columns=['MetricValue']+pnames) ## R2 ## r2 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, 1, txfn2) r2 = np.asarray(r2) r2 = pd.DataFrame(r2, columns=['MetricValue']+pnames) ## R3 ## r3 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, 2, txfn3) r3 = np.asarray(r3) r3 = pd.DataFrame(r3, columns=['MetricValue']+pnames) ## R4 ## r4 = libfn(image1.pointer, image2.pointer, list(thetain), list(thetain2), list(thetain3), local_search_iterations, metric, scale_image, 3, txfn4) r4 = np.asarray(r4) r4 = pd.DataFrame(r4, columns=['MetricValue']+pnames) rmins = [np.min(r1.iloc[:,0]), np.min(r2.iloc[:,0]), np.min(r3.iloc[:,0]), np.min(r4.iloc[:,0])] ww = np.argmin(rmins) if ww == 0: return r1, txfn1 elif ww == 1: return r2, txfn2 elif ww == 2: return r3, txfn3 elif ww == 3: return r4, txfn4
['def', 'invariant_image_similarity', '(', 'image1', ',', 'image2', ',', 'local_search_iterations', '=', '0', ',', 'metric', '=', "'MI'", ',', 'thetas', '=', 'np', '.', 'linspace', '(', '0', ',', '360', ',', '5', ')', ',', 'thetas2', '=', 'np', '.', 'linspace', '(', '0', ',', '360', ',', '5', ')', ',', 'thetas3', '=', 'np', '.', 'linspace', '(', '0', ',', '360', ',', '5', ')', ',', 'scale_image', '=', '1', ',', 'do_reflection', '=', 'False', ',', 'txfn', '=', 'None', ',', 'transform', '=', "'Affine'", ')', ':', 'if', 'transform', 'not', 'in', '{', "'Rigid'", ',', "'Similarity'", ',', "'Affine'", '}', ':', 'raise', 'ValueError', '(', "'transform must be one of Rigid/Similarity/Affine'", ')', 'if', 'image1', '.', 'pixeltype', '!=', "'float'", ':', 'image1', '=', 'image1', '.', 'clone', '(', "'float'", ')', 'if', 'image2', '.', 'pixeltype', '!=', "'float'", ':', 'image2', '=', 'image2', '.', 'clone', '(', "'float'", ')', 'if', 'txfn', 'is', 'None', ':', 'txfn', '=', 'mktemp', '(', 'suffix', '=', "'.mat'", ')', '# convert thetas to radians', 'thetain', '=', '(', 'thetas', '*', 'math', '.', 'pi', ')', '/', '180.', 'thetain2', '=', '(', 'thetas2', '*', 'math', '.', 'pi', ')', '/', '180.', 'thetain3', '=', '(', 'thetas3', '*', 'math', '.', 'pi', ')', '/', '180.', 'image1', '=', 'utils', '.', 'iMath', '(', 'image1', ',', "'Normalize'", ')', 'image2', '=', 'utils', '.', 'iMath', '(', 'image2', ',', "'Normalize'", ')', 'idim', '=', 'image1', '.', 'dimension', 'fpname', '=', '[', "'FixedParam%i'", '%', 'i', 'for', 'i', 'in', 'range', '(', '1', ',', 'idim', '+', '1', ')', ']', 'if', 'not', 'do_reflection', ':', 'libfn', '=', 'utils', '.', 'get_lib_fn', '(', "'invariantImageSimilarity_%s%iD'", '%', '(', 'transform', ',', 'idim', ')', ')', 'r1', '=', 'libfn', '(', 'image1', '.', 'pointer', ',', 'image2', '.', 'pointer', ',', 'list', '(', 'thetain', ')', ',', 'list', '(', 'thetain2', ')', ',', 'list', '(', 'thetain3', ')', ',', 'local_search_iterations', ',', 'metric', ',', 'scale_image', ',', 'int', '(', 'do_reflection', ')', ',', 'txfn', ')', 'r1', '=', 'np', '.', 'asarray', '(', 'r1', ')', 'pnames', '=', '[', "'Param%i'", '%', 'i', 'for', 'i', 'in', 'range', '(', '1', ',', 'r1', '.', 'shape', '[', '1', ']', ')', ']', 'pnames', '[', '(', 'len', '(', 'pnames', ')', '-', 'idim', ')', ':', 'len', '(', 'pnames', ')', ']', '=', 'fpname', 'r1', '=', 'pd', '.', 'DataFrame', '(', 'r1', ',', 'columns', '=', '[', "'MetricValue'", ']', '+', 'pnames', ')', 'return', 'r1', ',', 'txfn', 'else', ':', 'txfn1', '=', 'mktemp', '(', 'suffix', '=', "'.mat'", ')', 'txfn2', '=', 'mktemp', '(', 'suffix', '=', "'.mat'", ')', 'txfn3', '=', 'mktemp', '(', 'suffix', '=', "'.mat'", ')', 'txfn4', '=', 'mktemp', '(', 'suffix', '=', "'.mat'", ')', 'libfn', '=', 'utils', '.', 'get_lib_fn', '(', "'invariantImageSimilarity_%s%iD'", '%', '(', 'transform', ',', 'idim', ')', ')', '## R1 ##', 'r1', '=', 'libfn', '(', 'image1', '.', 'pointer', ',', 'image2', '.', 'pointer', ',', 'list', '(', 'thetain', ')', ',', 'list', '(', 'thetain2', ')', ',', 'list', '(', 'thetain3', ')', ',', 'local_search_iterations', ',', 'metric', ',', 'scale_image', ',', '0', ',', 'txfn1', ')', 'r1', '=', 'np', '.', 'asarray', '(', 'r1', ')', 'pnames', '=', '[', "'Param%i'", '%', 'i', 'for', 'i', 'in', 'range', '(', '1', ',', 'r1', '.', 'shape', '[', '1', ']', ')', ']', 'pnames', '[', '(', 'len', '(', 'pnames', ')', '-', 'idim', ')', ':', 'len', '(', 'pnames', ')', ']', '=', 'fpname', 'r1', '=', 'pd', '.', 'DataFrame', '(', 'r1', ',', 'columns', '=', '[', "'MetricValue'", ']', '+', 'pnames', ')', '## R2 ##', 'r2', '=', 'libfn', '(', 'image1', '.', 'pointer', ',', 'image2', '.', 'pointer', ',', 'list', '(', 'thetain', ')', ',', 'list', '(', 'thetain2', ')', ',', 'list', '(', 'thetain3', ')', ',', 'local_search_iterations', ',', 'metric', ',', 'scale_image', ',', '1', ',', 'txfn2', ')', 'r2', '=', 'np', '.', 'asarray', '(', 'r2', ')', 'r2', '=', 'pd', '.', 'DataFrame', '(', 'r2', ',', 'columns', '=', '[', "'MetricValue'", ']', '+', 'pnames', ')', '## R3 ##', 'r3', '=', 'libfn', '(', 'image1', '.', 'pointer', ',', 'image2', '.', 'pointer', ',', 'list', '(', 'thetain', ')', ',', 'list', '(', 'thetain2', ')', ',', 'list', '(', 'thetain3', ')', ',', 'local_search_iterations', ',', 'metric', ',', 'scale_image', ',', '2', ',', 'txfn3', ')', 'r3', '=', 'np', '.', 'asarray', '(', 'r3', ')', 'r3', '=', 'pd', '.', 'DataFrame', '(', 'r3', ',', 'columns', '=', '[', "'MetricValue'", ']', '+', 'pnames', ')', '## R4 ##', 'r4', '=', 'libfn', '(', 'image1', '.', 'pointer', ',', 'image2', '.', 'pointer', ',', 'list', '(', 'thetain', ')', ',', 'list', '(', 'thetain2', ')', ',', 'list', '(', 'thetain3', ')', ',', 'local_search_iterations', ',', 'metric', ',', 'scale_image', ',', '3', ',', 'txfn4', ')', 'r4', '=', 'np', '.', 'asarray', '(', 'r4', ')', 'r4', '=', 'pd', '.', 'DataFrame', '(', 'r4', ',', 'columns', '=', '[', "'MetricValue'", ']', '+', 'pnames', ')', 'rmins', '=', '[', 'np', '.', 'min', '(', 'r1', '.', 'iloc', '[', ':', ',', '0', ']', ')', ',', 'np', '.', 'min', '(', 'r2', '.', 'iloc', '[', ':', ',', '0', ']', ')', ',', 'np', '.', 'min', '(', 'r3', '.', 'iloc', '[', ':', ',', '0', ']', ')', ',', 'np', '.', 'min', '(', 'r4', '.', 'iloc', '[', ':', ',', '0', ']', ')', ']', 'ww', '=', 'np', '.', 'argmin', '(', 'rmins', ')', 'if', 'ww', '==', '0', ':', 'return', 'r1', ',', 'txfn1', 'elif', 'ww', '==', '1', ':', 'return', 'r2', ',', 'txfn2', 'elif', 'ww', '==', '2', ':', 'return', 'r3', ',', 'txfn3', 'elif', 'ww', '==', '3', ':', 'return', 'r4', ',', 'txfn4']
Similarity metrics between two images as a function of geometry Compute similarity metric between two images as image is rotated about its center w/ or w/o optimization ANTsR function: `invariantImageSimilarity` Arguments --------- image1 : ANTsImage reference image image2 : ANTsImage moving image local_search_iterations : integer integer controlling local search in multistart metric : string which metric to use MI GC thetas : 1D-ndarray/list/tuple numeric vector of search angles in degrees thetas2 : 1D-ndarray/list/tuple numeric vector of search angles in degrees around principal axis 2 (3D) thetas3 : 1D-ndarray/list/tuple numeric vector of search angles in degrees around principal axis 3 (3D) scale_image : scalar global scale do_reflection : boolean whether to reflect image about principal axis txfn : string (optional) if present, write optimal tx to .mat file transform : string type of transform to use Rigid Similarity Affine Returns ------- pd.DataFrame dataframe with metric values and transformation parameters Example ------- >>> import ants >>> img1 = ants.image_read(ants.get_ants_data('r16')) >>> img2 = ants.image_read(ants.get_ants_data('r64')) >>> metric = ants.invariant_image_similarity(img1,img2)
['Similarity', 'metrics', 'between', 'two', 'images', 'as', 'a', 'function', 'of', 'geometry']
train
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/invariant_image_similarity.py#L15-L197
9,928
bigchaindb/bigchaindb-driver
bigchaindb_driver/driver.py
TransactionsEndpoint.prepare
def prepare(*, operation='CREATE', signers=None, recipients=None, asset=None, metadata=None, inputs=None): """Prepares a transaction payload, ready to be fulfilled. Args: operation (str): The operation to perform. Must be ``'CREATE'`` or ``'TRANSFER'``. Case insensitive. Defaults to ``'CREATE'``. signers (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the issuer(s) of the asset being created. Only applies for ``'CREATE'`` operations. Defaults to ``None``. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created or transferred. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created or transferred. MUST be supplied for ``'TRANSFER'`` operations. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`, optional): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. Only applies to, and MUST be supplied for, ``'TRANSFER'`` operations. Returns: dict: The prepared transaction. Raises: :class:`~.exceptions.BigchaindbException`: If ``operation`` is not ``'CREATE'`` or ``'TRANSFER'``. .. important:: **CREATE operations** * ``signers`` MUST be set. * ``recipients``, ``asset``, and ``metadata`` MAY be set. * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * The argument ``inputs`` is ignored. * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers **TRANSFER operations** * ``recipients``, ``asset``, and ``inputs`` MUST be set. * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } * ``metadata`` MAY be set. * The argument ``signers`` is ignored. """ return prepare_transaction( operation=operation, signers=signers, recipients=recipients, asset=asset, metadata=metadata, inputs=inputs, )
python
def prepare(*, operation='CREATE', signers=None, recipients=None, asset=None, metadata=None, inputs=None): """Prepares a transaction payload, ready to be fulfilled. Args: operation (str): The operation to perform. Must be ``'CREATE'`` or ``'TRANSFER'``. Case insensitive. Defaults to ``'CREATE'``. signers (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the issuer(s) of the asset being created. Only applies for ``'CREATE'`` operations. Defaults to ``None``. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created or transferred. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created or transferred. MUST be supplied for ``'TRANSFER'`` operations. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`, optional): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. Only applies to, and MUST be supplied for, ``'TRANSFER'`` operations. Returns: dict: The prepared transaction. Raises: :class:`~.exceptions.BigchaindbException`: If ``operation`` is not ``'CREATE'`` or ``'TRANSFER'``. .. important:: **CREATE operations** * ``signers`` MUST be set. * ``recipients``, ``asset``, and ``metadata`` MAY be set. * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * The argument ``inputs`` is ignored. * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers **TRANSFER operations** * ``recipients``, ``asset``, and ``inputs`` MUST be set. * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } * ``metadata`` MAY be set. * The argument ``signers`` is ignored. """ return prepare_transaction( operation=operation, signers=signers, recipients=recipients, asset=asset, metadata=metadata, inputs=inputs, )
['def', 'prepare', '(', '*', ',', 'operation', '=', "'CREATE'", ',', 'signers', '=', 'None', ',', 'recipients', '=', 'None', ',', 'asset', '=', 'None', ',', 'metadata', '=', 'None', ',', 'inputs', '=', 'None', ')', ':', 'return', 'prepare_transaction', '(', 'operation', '=', 'operation', ',', 'signers', '=', 'signers', ',', 'recipients', '=', 'recipients', ',', 'asset', '=', 'asset', ',', 'metadata', '=', 'metadata', ',', 'inputs', '=', 'inputs', ',', ')']
Prepares a transaction payload, ready to be fulfilled. Args: operation (str): The operation to perform. Must be ``'CREATE'`` or ``'TRANSFER'``. Case insensitive. Defaults to ``'CREATE'``. signers (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the issuer(s) of the asset being created. Only applies for ``'CREATE'`` operations. Defaults to ``None``. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created or transferred. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created or transferred. MUST be supplied for ``'TRANSFER'`` operations. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`, optional): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. Only applies to, and MUST be supplied for, ``'TRANSFER'`` operations. Returns: dict: The prepared transaction. Raises: :class:`~.exceptions.BigchaindbException`: If ``operation`` is not ``'CREATE'`` or ``'TRANSFER'``. .. important:: **CREATE operations** * ``signers`` MUST be set. * ``recipients``, ``asset``, and ``metadata`` MAY be set. * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * The argument ``inputs`` is ignored. * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers **TRANSFER operations** * ``recipients``, ``asset``, and ``inputs`` MUST be set. * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } * ``metadata`` MAY be set. * The argument ``signers`` is ignored.
['Prepares', 'a', 'transaction', 'payload', 'ready', 'to', 'be', 'fulfilled', '.']
train
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/driver.py#L181-L255
9,929
mishbahr/django-usersettings2
usersettings/context_processors.py
usersettings
def usersettings(request): """ Returns the current ``UserSettings`` based on the SITE_ID in the project's settings as context variables If there is no 'usersettings' attribute in the request, fetches the current UserSettings (from usersettings.shortcuts.get_current_usersettings). """ if hasattr(request, 'usersettings'): usersettings = request.usersettings else: from .shortcuts import get_current_usersettings usersettings = get_current_usersettings() return { 'usersettings': usersettings }
python
def usersettings(request): """ Returns the current ``UserSettings`` based on the SITE_ID in the project's settings as context variables If there is no 'usersettings' attribute in the request, fetches the current UserSettings (from usersettings.shortcuts.get_current_usersettings). """ if hasattr(request, 'usersettings'): usersettings = request.usersettings else: from .shortcuts import get_current_usersettings usersettings = get_current_usersettings() return { 'usersettings': usersettings }
['def', 'usersettings', '(', 'request', ')', ':', 'if', 'hasattr', '(', 'request', ',', "'usersettings'", ')', ':', 'usersettings', '=', 'request', '.', 'usersettings', 'else', ':', 'from', '.', 'shortcuts', 'import', 'get_current_usersettings', 'usersettings', '=', 'get_current_usersettings', '(', ')', 'return', '{', "'usersettings'", ':', 'usersettings', '}']
Returns the current ``UserSettings`` based on the SITE_ID in the project's settings as context variables If there is no 'usersettings' attribute in the request, fetches the current UserSettings (from usersettings.shortcuts.get_current_usersettings).
['Returns', 'the', 'current', 'UserSettings', 'based', 'on', 'the', 'SITE_ID', 'in', 'the', 'project', 's', 'settings', 'as', 'context', 'variables']
train
https://github.com/mishbahr/django-usersettings2/blob/cbc2f4b2e01d5401bec8a3fa39151730cd2dcd2a/usersettings/context_processors.py#L2-L18
9,930
shaunduncan/helga-facts
helga_facts.py
term_regex
def term_regex(term): """ Returns a case-insensitive regex for searching terms """ return re.compile(r'^{0}$'.format(re.escape(term)), re.IGNORECASE)
python
def term_regex(term): """ Returns a case-insensitive regex for searching terms """ return re.compile(r'^{0}$'.format(re.escape(term)), re.IGNORECASE)
['def', 'term_regex', '(', 'term', ')', ':', 'return', 're', '.', 'compile', '(', "r'^{0}$'", '.', 'format', '(', 're', '.', 'escape', '(', 'term', ')', ')', ',', 're', '.', 'IGNORECASE', ')']
Returns a case-insensitive regex for searching terms
['Returns', 'a', 'case', '-', 'insensitive', 'regex', 'for', 'searching', 'terms']
train
https://github.com/shaunduncan/helga-facts/blob/956b1d93abccdaaf318d7cac4451edc7e73bf5e9/helga_facts.py#L23-L27
9,931
mbj4668/pyang
pyang/plugins/sample-xml-skeleton.py
SampleXMLSkeletonPlugin.leaf
def leaf(self, node, elem, module, path): """Create a sample leaf element.""" if node.i_default is None: nel, newm, path = self.sample_element(node, elem, module, path) if path is None: return if self.annots: nel.append(etree.Comment( " type: %s " % node.search_one("type").arg)) elif self.defaults: nel, newm, path = self.sample_element(node, elem, module, path) if path is None: return nel.text = str(node.i_default_str)
python
def leaf(self, node, elem, module, path): """Create a sample leaf element.""" if node.i_default is None: nel, newm, path = self.sample_element(node, elem, module, path) if path is None: return if self.annots: nel.append(etree.Comment( " type: %s " % node.search_one("type").arg)) elif self.defaults: nel, newm, path = self.sample_element(node, elem, module, path) if path is None: return nel.text = str(node.i_default_str)
['def', 'leaf', '(', 'self', ',', 'node', ',', 'elem', ',', 'module', ',', 'path', ')', ':', 'if', 'node', '.', 'i_default', 'is', 'None', ':', 'nel', ',', 'newm', ',', 'path', '=', 'self', '.', 'sample_element', '(', 'node', ',', 'elem', ',', 'module', ',', 'path', ')', 'if', 'path', 'is', 'None', ':', 'return', 'if', 'self', '.', 'annots', ':', 'nel', '.', 'append', '(', 'etree', '.', 'Comment', '(', '" type: %s "', '%', 'node', '.', 'search_one', '(', '"type"', ')', '.', 'arg', ')', ')', 'elif', 'self', '.', 'defaults', ':', 'nel', ',', 'newm', ',', 'path', '=', 'self', '.', 'sample_element', '(', 'node', ',', 'elem', ',', 'module', ',', 'path', ')', 'if', 'path', 'is', 'None', ':', 'return', 'nel', '.', 'text', '=', 'str', '(', 'node', '.', 'i_default_str', ')']
Create a sample leaf element.
['Create', 'a', 'sample', 'leaf', 'element', '.']
train
https://github.com/mbj4668/pyang/blob/f2a5cc3142162e5b9ee4e18d154568d939ff63dd/pyang/plugins/sample-xml-skeleton.py#L153-L166
9,932
senaite/senaite.core
bika/lims/browser/samplinground/printform.py
PrintForm.pdfFromPOST
def pdfFromPOST(self): """ It returns the pdf for the sampling rounds printed """ html = self.request.form.get('html') style = self.request.form.get('style') reporthtml = "<html><head>%s</head><body><div id='report'>%s</body></html>" % (style, html) return self.printFromHTML(safe_unicode(reporthtml).encode('utf-8'))
python
def pdfFromPOST(self): """ It returns the pdf for the sampling rounds printed """ html = self.request.form.get('html') style = self.request.form.get('style') reporthtml = "<html><head>%s</head><body><div id='report'>%s</body></html>" % (style, html) return self.printFromHTML(safe_unicode(reporthtml).encode('utf-8'))
['def', 'pdfFromPOST', '(', 'self', ')', ':', 'html', '=', 'self', '.', 'request', '.', 'form', '.', 'get', '(', "'html'", ')', 'style', '=', 'self', '.', 'request', '.', 'form', '.', 'get', '(', "'style'", ')', 'reporthtml', '=', '"<html><head>%s</head><body><div id=\'report\'>%s</body></html>"', '%', '(', 'style', ',', 'html', ')', 'return', 'self', '.', 'printFromHTML', '(', 'safe_unicode', '(', 'reporthtml', ')', '.', 'encode', '(', "'utf-8'", ')', ')']
It returns the pdf for the sampling rounds printed
['It', 'returns', 'the', 'pdf', 'for', 'the', 'sampling', 'rounds', 'printed']
train
https://github.com/senaite/senaite.core/blob/7602ce2ea2f9e81eb34e20ce17b98a3e70713f85/bika/lims/browser/samplinground/printform.py#L241-L248
9,933
restran/mountains
mountains/encoding/converter.py
bin2str
def bin2str(b): """ Binary to string. """ ret = [] for pos in range(0, len(b), 8): ret.append(chr(int(b[pos:pos + 8], 2))) return ''.join(ret)
python
def bin2str(b): """ Binary to string. """ ret = [] for pos in range(0, len(b), 8): ret.append(chr(int(b[pos:pos + 8], 2))) return ''.join(ret)
['def', 'bin2str', '(', 'b', ')', ':', 'ret', '=', '[', ']', 'for', 'pos', 'in', 'range', '(', '0', ',', 'len', '(', 'b', ')', ',', '8', ')', ':', 'ret', '.', 'append', '(', 'chr', '(', 'int', '(', 'b', '[', 'pos', ':', 'pos', '+', '8', ']', ',', '2', ')', ')', ')', 'return', "''", '.', 'join', '(', 'ret', ')']
Binary to string.
['Binary', 'to', 'string', '.']
train
https://github.com/restran/mountains/blob/a97fee568b112f4e10d878f815d0db3dd0a98d74/mountains/encoding/converter.py#L183-L190
9,934
roboogle/gtkmvc3
gtkmvco/examples/converter/src/models/currencies.py
CurrenciesModel.add
def add(self, model): """raises an exception if the model cannot be added""" def foo(m, p, i): if m[i][0].name == model.name: raise ValueError("Model already exists") return # checks if already existing self.foreach(foo) self.append((model,)) return
python
def add(self, model): """raises an exception if the model cannot be added""" def foo(m, p, i): if m[i][0].name == model.name: raise ValueError("Model already exists") return # checks if already existing self.foreach(foo) self.append((model,)) return
['def', 'add', '(', 'self', ',', 'model', ')', ':', 'def', 'foo', '(', 'm', ',', 'p', ',', 'i', ')', ':', 'if', 'm', '[', 'i', ']', '[', '0', ']', '.', 'name', '==', 'model', '.', 'name', ':', 'raise', 'ValueError', '(', '"Model already exists"', ')', 'return', '# checks if already existing', 'self', '.', 'foreach', '(', 'foo', ')', 'self', '.', 'append', '(', '(', 'model', ',', ')', ')', 'return']
raises an exception if the model cannot be added
['raises', 'an', 'exception', 'if', 'the', 'model', 'cannot', 'be', 'added']
train
https://github.com/roboogle/gtkmvc3/blob/63405fd8d2056be26af49103b13a8d5e57fe4dff/gtkmvco/examples/converter/src/models/currencies.py#L56-L66
9,935
pantsbuild/pants
src/python/pants/base/deprecated.py
validate_deprecation_semver
def validate_deprecation_semver(version_string, version_description): """Validates that version_string is a valid semver. If so, returns that semver. Raises an error otherwise. :param str version_string: A pantsbuild.pants version which affects some deprecated entity. :param str version_description: A string used in exception messages to describe what the `version_string` represents. :rtype: `packaging.version.Version` :raises DeprecationApplicationError: if the version_string parameter is invalid. """ if version_string is None: raise MissingSemanticVersionError('The {} must be provided.'.format(version_description)) if not isinstance(version_string, six.string_types): raise BadSemanticVersionError('The {} must be a version string.'.format(version_description)) try: # NB: packaging will see versions like 1.a.0 as 1a0, and are "valid" # We explicitly want our versions to be of the form x.y.z. v = Version(version_string) if len(v.base_version.split('.')) != 3: raise BadSemanticVersionError('The given {} is not a valid version: ' '{}'.format(version_description, version_string)) if not v.is_prerelease: raise NonDevSemanticVersionError('The given {} is not a dev version: {}\n' 'Features should generally be removed in the first `dev` release ' 'of a release cycle.'.format(version_description, version_string)) return v except InvalidVersion as e: raise BadSemanticVersionError('The given {} {} is not a valid version: ' '{}'.format(version_description, version_string, e))
python
def validate_deprecation_semver(version_string, version_description): """Validates that version_string is a valid semver. If so, returns that semver. Raises an error otherwise. :param str version_string: A pantsbuild.pants version which affects some deprecated entity. :param str version_description: A string used in exception messages to describe what the `version_string` represents. :rtype: `packaging.version.Version` :raises DeprecationApplicationError: if the version_string parameter is invalid. """ if version_string is None: raise MissingSemanticVersionError('The {} must be provided.'.format(version_description)) if not isinstance(version_string, six.string_types): raise BadSemanticVersionError('The {} must be a version string.'.format(version_description)) try: # NB: packaging will see versions like 1.a.0 as 1a0, and are "valid" # We explicitly want our versions to be of the form x.y.z. v = Version(version_string) if len(v.base_version.split('.')) != 3: raise BadSemanticVersionError('The given {} is not a valid version: ' '{}'.format(version_description, version_string)) if not v.is_prerelease: raise NonDevSemanticVersionError('The given {} is not a dev version: {}\n' 'Features should generally be removed in the first `dev` release ' 'of a release cycle.'.format(version_description, version_string)) return v except InvalidVersion as e: raise BadSemanticVersionError('The given {} {} is not a valid version: ' '{}'.format(version_description, version_string, e))
['def', 'validate_deprecation_semver', '(', 'version_string', ',', 'version_description', ')', ':', 'if', 'version_string', 'is', 'None', ':', 'raise', 'MissingSemanticVersionError', '(', "'The {} must be provided.'", '.', 'format', '(', 'version_description', ')', ')', 'if', 'not', 'isinstance', '(', 'version_string', ',', 'six', '.', 'string_types', ')', ':', 'raise', 'BadSemanticVersionError', '(', "'The {} must be a version string.'", '.', 'format', '(', 'version_description', ')', ')', 'try', ':', '# NB: packaging will see versions like 1.a.0 as 1a0, and are "valid"', '# We explicitly want our versions to be of the form x.y.z.', 'v', '=', 'Version', '(', 'version_string', ')', 'if', 'len', '(', 'v', '.', 'base_version', '.', 'split', '(', "'.'", ')', ')', '!=', '3', ':', 'raise', 'BadSemanticVersionError', '(', "'The given {} is not a valid version: '", "'{}'", '.', 'format', '(', 'version_description', ',', 'version_string', ')', ')', 'if', 'not', 'v', '.', 'is_prerelease', ':', 'raise', 'NonDevSemanticVersionError', '(', "'The given {} is not a dev version: {}\\n'", "'Features should generally be removed in the first `dev` release '", "'of a release cycle.'", '.', 'format', '(', 'version_description', ',', 'version_string', ')', ')', 'return', 'v', 'except', 'InvalidVersion', 'as', 'e', ':', 'raise', 'BadSemanticVersionError', '(', "'The given {} {} is not a valid version: '", "'{}'", '.', 'format', '(', 'version_description', ',', 'version_string', ',', 'e', ')', ')']
Validates that version_string is a valid semver. If so, returns that semver. Raises an error otherwise. :param str version_string: A pantsbuild.pants version which affects some deprecated entity. :param str version_description: A string used in exception messages to describe what the `version_string` represents. :rtype: `packaging.version.Version` :raises DeprecationApplicationError: if the version_string parameter is invalid.
['Validates', 'that', 'version_string', 'is', 'a', 'valid', 'semver', '.']
train
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/deprecated.py#L61-L90
9,936
crocs-muni/roca
roca/detect.py
RocaFingerprinter.process_ssh
def process_ssh(self, data, name): """ Processes SSH keys :param data: :param name: :return: """ if data is None or len(data) == 0: return ret = [] try: lines = [x.strip() for x in data.split(b'\n')] for idx, line in enumerate(lines): ret.append(self.process_ssh_line(line, name, idx)) except Exception as e: logger.debug('Exception in processing SSH public key %s : %s' % (name, e)) self.trace_logger.log(e) return ret
python
def process_ssh(self, data, name): """ Processes SSH keys :param data: :param name: :return: """ if data is None or len(data) == 0: return ret = [] try: lines = [x.strip() for x in data.split(b'\n')] for idx, line in enumerate(lines): ret.append(self.process_ssh_line(line, name, idx)) except Exception as e: logger.debug('Exception in processing SSH public key %s : %s' % (name, e)) self.trace_logger.log(e) return ret
['def', 'process_ssh', '(', 'self', ',', 'data', ',', 'name', ')', ':', 'if', 'data', 'is', 'None', 'or', 'len', '(', 'data', ')', '==', '0', ':', 'return', 'ret', '=', '[', ']', 'try', ':', 'lines', '=', '[', 'x', '.', 'strip', '(', ')', 'for', 'x', 'in', 'data', '.', 'split', '(', "b'\\n'", ')', ']', 'for', 'idx', ',', 'line', 'in', 'enumerate', '(', 'lines', ')', ':', 'ret', '.', 'append', '(', 'self', '.', 'process_ssh_line', '(', 'line', ',', 'name', ',', 'idx', ')', ')', 'except', 'Exception', 'as', 'e', ':', 'logger', '.', 'debug', '(', "'Exception in processing SSH public key %s : %s'", '%', '(', 'name', ',', 'e', ')', ')', 'self', '.', 'trace_logger', '.', 'log', '(', 'e', ')', 'return', 'ret']
Processes SSH keys :param data: :param name: :return:
['Processes', 'SSH', 'keys', ':', 'param', 'data', ':', ':', 'param', 'name', ':', ':', 'return', ':']
train
https://github.com/crocs-muni/roca/blob/74ad6ce63c428d83dcffce9c5e26ef7b9e30faa5/roca/detect.py#L1605-L1624
9,937
openvax/varcode
varcode/common.py
groupby_field
def groupby_field(records, field_name, skip_none=True): """ Given a list of objects, group them into a dictionary by the unique values of a given field name. """ return apply_groupby( records, lambda obj: getattr(obj, field_name), skip_none=skip_none)
python
def groupby_field(records, field_name, skip_none=True): """ Given a list of objects, group them into a dictionary by the unique values of a given field name. """ return apply_groupby( records, lambda obj: getattr(obj, field_name), skip_none=skip_none)
['def', 'groupby_field', '(', 'records', ',', 'field_name', ',', 'skip_none', '=', 'True', ')', ':', 'return', 'apply_groupby', '(', 'records', ',', 'lambda', 'obj', ':', 'getattr', '(', 'obj', ',', 'field_name', ')', ',', 'skip_none', '=', 'skip_none', ')']
Given a list of objects, group them into a dictionary by the unique values of a given field name.
['Given', 'a', 'list', 'of', 'objects', 'group', 'them', 'into', 'a', 'dictionary', 'by', 'the', 'unique', 'values', 'of', 'a', 'given', 'field', 'name', '.']
train
https://github.com/openvax/varcode/blob/981633db45ca2b31f76c06894a7360ea5d70a9b8/varcode/common.py#L49-L57
9,938
Hypex/hyppy
hyppy/hapi.py
HAPIResponse.parse
def parse(response): """Parse a postdata-style response format from the API into usable data""" """Split a a=1b=2c=3 string into a dictionary of pairs""" tokens = {r[0]: r[1] for r in [r.split('=') for r in response.split("&")]} # The odd dummy parameter is of no use to us if 'dummy' in tokens: del tokens['dummy'] """ If we have key names that end in digits, these indicate the result set contains multiple sets For example, planet0=Hoth&x=1&y=-10&planet1=Naboo&x=9&y=13 is actually data for two planets Elements that end in digits (like tag0, tag1 for planets) are formatted like (tag0_1, tag1_1), so we rstrip underscores afterwards. """ if re.match('\D\d+$', tokens.keys()[0]): # Produce a list of dictionaries set_tokens = [] for key, value in tokens: key = re.match('^(.+\D)(\d+)$', key) # If the key isn't in the format (i.e. a failsafe), skip it if key is not None: if key.group(1) not in set_tokens: set_tokens[key.group(1)] = {} set_tokens[key.group(1)][key.group(0).rstrip('_')] = value tokens = set_tokens return tokens
python
def parse(response): """Parse a postdata-style response format from the API into usable data""" """Split a a=1b=2c=3 string into a dictionary of pairs""" tokens = {r[0]: r[1] for r in [r.split('=') for r in response.split("&")]} # The odd dummy parameter is of no use to us if 'dummy' in tokens: del tokens['dummy'] """ If we have key names that end in digits, these indicate the result set contains multiple sets For example, planet0=Hoth&x=1&y=-10&planet1=Naboo&x=9&y=13 is actually data for two planets Elements that end in digits (like tag0, tag1 for planets) are formatted like (tag0_1, tag1_1), so we rstrip underscores afterwards. """ if re.match('\D\d+$', tokens.keys()[0]): # Produce a list of dictionaries set_tokens = [] for key, value in tokens: key = re.match('^(.+\D)(\d+)$', key) # If the key isn't in the format (i.e. a failsafe), skip it if key is not None: if key.group(1) not in set_tokens: set_tokens[key.group(1)] = {} set_tokens[key.group(1)][key.group(0).rstrip('_')] = value tokens = set_tokens return tokens
['def', 'parse', '(', 'response', ')', ':', '"""Split a a=1b=2c=3 string into a dictionary of pairs"""', 'tokens', '=', '{', 'r', '[', '0', ']', ':', 'r', '[', '1', ']', 'for', 'r', 'in', '[', 'r', '.', 'split', '(', "'='", ')', 'for', 'r', 'in', 'response', '.', 'split', '(', '"&"', ')', ']', '}', '# The odd dummy parameter is of no use to us', 'if', "'dummy'", 'in', 'tokens', ':', 'del', 'tokens', '[', "'dummy'", ']', '"""\n If we have key names that end in digits, these indicate the result set contains multiple sets\n For example, planet0=Hoth&x=1&y=-10&planet1=Naboo&x=9&y=13 is actually data for two planets\n\n Elements that end in digits (like tag0, tag1 for planets) are formatted like (tag0_1, tag1_1), so we rstrip\n underscores afterwards.\n """', 'if', 're', '.', 'match', '(', "'\\D\\d+$'", ',', 'tokens', '.', 'keys', '(', ')', '[', '0', ']', ')', ':', '# Produce a list of dictionaries', 'set_tokens', '=', '[', ']', 'for', 'key', ',', 'value', 'in', 'tokens', ':', 'key', '=', 're', '.', 'match', '(', "'^(.+\\D)(\\d+)$'", ',', 'key', ')', "# If the key isn't in the format (i.e. a failsafe), skip it", 'if', 'key', 'is', 'not', 'None', ':', 'if', 'key', '.', 'group', '(', '1', ')', 'not', 'in', 'set_tokens', ':', 'set_tokens', '[', 'key', '.', 'group', '(', '1', ')', ']', '=', '{', '}', 'set_tokens', '[', 'key', '.', 'group', '(', '1', ')', ']', '[', 'key', '.', 'group', '(', '0', ')', '.', 'rstrip', '(', "'_'", ')', ']', '=', 'value', 'tokens', '=', 'set_tokens', 'return', 'tokens']
Parse a postdata-style response format from the API into usable data
['Parse', 'a', 'postdata', '-', 'style', 'response', 'format', 'from', 'the', 'API', 'into', 'usable', 'data']
train
https://github.com/Hypex/hyppy/blob/a425619c2a102b0e598fd6cac8aa0f6b766f542d/hyppy/hapi.py#L204-L234
9,939
ryanvarley/ExoData
exodata/astroclasses.py
_createMagConversionDict
def _createMagConversionDict(): """ loads magnitude_conversion.dat which is table A% 1995ApJS..101..117K """ magnitude_conversion_filepath = resource_stream(__name__, 'data/magnitude_conversion.dat') raw_table = np.loadtxt(magnitude_conversion_filepath, '|S5') magDict = {} for row in raw_table: if sys.hexversion >= 0x03000000: starClass = row[1].decode("utf-8") # otherwise we get byte ints or b' caused by 2to3 tableData = [x.decode("utf-8") for x in row[3:]] else: starClass = row[1] tableData = row[3:] magDict[starClass] = tableData return magDict
python
def _createMagConversionDict(): """ loads magnitude_conversion.dat which is table A% 1995ApJS..101..117K """ magnitude_conversion_filepath = resource_stream(__name__, 'data/magnitude_conversion.dat') raw_table = np.loadtxt(magnitude_conversion_filepath, '|S5') magDict = {} for row in raw_table: if sys.hexversion >= 0x03000000: starClass = row[1].decode("utf-8") # otherwise we get byte ints or b' caused by 2to3 tableData = [x.decode("utf-8") for x in row[3:]] else: starClass = row[1] tableData = row[3:] magDict[starClass] = tableData return magDict
['def', '_createMagConversionDict', '(', ')', ':', 'magnitude_conversion_filepath', '=', 'resource_stream', '(', '__name__', ',', "'data/magnitude_conversion.dat'", ')', 'raw_table', '=', 'np', '.', 'loadtxt', '(', 'magnitude_conversion_filepath', ',', "'|S5'", ')', 'magDict', '=', '{', '}', 'for', 'row', 'in', 'raw_table', ':', 'if', 'sys', '.', 'hexversion', '>=', '0x03000000', ':', 'starClass', '=', 'row', '[', '1', ']', '.', 'decode', '(', '"utf-8"', ')', "# otherwise we get byte ints or b' caused by 2to3", 'tableData', '=', '[', 'x', '.', 'decode', '(', '"utf-8"', ')', 'for', 'x', 'in', 'row', '[', '3', ':', ']', ']', 'else', ':', 'starClass', '=', 'row', '[', '1', ']', 'tableData', '=', 'row', '[', '3', ':', ']', 'magDict', '[', 'starClass', ']', '=', 'tableData', 'return', 'magDict']
loads magnitude_conversion.dat which is table A% 1995ApJS..101..117K
['loads', 'magnitude_conversion', '.', 'dat', 'which', 'is', 'table', 'A%', '1995ApJS', '..', '101', '..', '117K']
train
https://github.com/ryanvarley/ExoData/blob/e0d3652117214d2377a707d6778f93b7eb201a41/exodata/astroclasses.py#L1253-L1269
9,940
spyder-ide/spyder
spyder/plugins/ipythonconsole/widgets/shell.py
ShellWidget.set_backend_for_mayavi
def set_backend_for_mayavi(self, command): """ Mayavi plots require the Qt backend, so we try to detect if one is generated to change backends """ calling_mayavi = False lines = command.splitlines() for l in lines: if not l.startswith('#'): if 'import mayavi' in l or 'from mayavi' in l: calling_mayavi = True break if calling_mayavi: message = _("Changing backend to Qt for Mayavi") self._append_plain_text(message + '\n') self.silent_execute("%gui inline\n%gui qt")
python
def set_backend_for_mayavi(self, command): """ Mayavi plots require the Qt backend, so we try to detect if one is generated to change backends """ calling_mayavi = False lines = command.splitlines() for l in lines: if not l.startswith('#'): if 'import mayavi' in l or 'from mayavi' in l: calling_mayavi = True break if calling_mayavi: message = _("Changing backend to Qt for Mayavi") self._append_plain_text(message + '\n') self.silent_execute("%gui inline\n%gui qt")
['def', 'set_backend_for_mayavi', '(', 'self', ',', 'command', ')', ':', 'calling_mayavi', '=', 'False', 'lines', '=', 'command', '.', 'splitlines', '(', ')', 'for', 'l', 'in', 'lines', ':', 'if', 'not', 'l', '.', 'startswith', '(', "'#'", ')', ':', 'if', "'import mayavi'", 'in', 'l', 'or', "'from mayavi'", 'in', 'l', ':', 'calling_mayavi', '=', 'True', 'break', 'if', 'calling_mayavi', ':', 'message', '=', '_', '(', '"Changing backend to Qt for Mayavi"', ')', 'self', '.', '_append_plain_text', '(', 'message', '+', "'\\n'", ')', 'self', '.', 'silent_execute', '(', '"%gui inline\\n%gui qt"', ')']
Mayavi plots require the Qt backend, so we try to detect if one is generated to change backends
['Mayavi', 'plots', 'require', 'the', 'Qt', 'backend', 'so', 'we', 'try', 'to', 'detect', 'if', 'one', 'is', 'generated', 'to', 'change', 'backends']
train
https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/plugins/ipythonconsole/widgets/shell.py#L435-L450
9,941
biosustain/optlang
optlang/gurobi_interface.py
_constraint_lb_and_ub_to_gurobi_sense_rhs_and_range_value
def _constraint_lb_and_ub_to_gurobi_sense_rhs_and_range_value(lb, ub): """Helper function used by Constraint and Model""" if lb is None and ub is None: raise Exception("Free constraint ...") elif lb is None: sense = '<' rhs = float(ub) range_value = 0. elif ub is None: sense = '>' rhs = float(lb) range_value = 0. elif lb == ub: sense = '=' rhs = float(lb) range_value = 0. elif lb > ub: raise ValueError("Lower bound is larger than upper bound.") else: sense = '=' rhs = float(lb) range_value = float(ub - lb) return sense, rhs, range_value
python
def _constraint_lb_and_ub_to_gurobi_sense_rhs_and_range_value(lb, ub): """Helper function used by Constraint and Model""" if lb is None and ub is None: raise Exception("Free constraint ...") elif lb is None: sense = '<' rhs = float(ub) range_value = 0. elif ub is None: sense = '>' rhs = float(lb) range_value = 0. elif lb == ub: sense = '=' rhs = float(lb) range_value = 0. elif lb > ub: raise ValueError("Lower bound is larger than upper bound.") else: sense = '=' rhs = float(lb) range_value = float(ub - lb) return sense, rhs, range_value
['def', '_constraint_lb_and_ub_to_gurobi_sense_rhs_and_range_value', '(', 'lb', ',', 'ub', ')', ':', 'if', 'lb', 'is', 'None', 'and', 'ub', 'is', 'None', ':', 'raise', 'Exception', '(', '"Free constraint ..."', ')', 'elif', 'lb', 'is', 'None', ':', 'sense', '=', "'<'", 'rhs', '=', 'float', '(', 'ub', ')', 'range_value', '=', '0.', 'elif', 'ub', 'is', 'None', ':', 'sense', '=', "'>'", 'rhs', '=', 'float', '(', 'lb', ')', 'range_value', '=', '0.', 'elif', 'lb', '==', 'ub', ':', 'sense', '=', "'='", 'rhs', '=', 'float', '(', 'lb', ')', 'range_value', '=', '0.', 'elif', 'lb', '>', 'ub', ':', 'raise', 'ValueError', '(', '"Lower bound is larger than upper bound."', ')', 'else', ':', 'sense', '=', "'='", 'rhs', '=', 'float', '(', 'lb', ')', 'range_value', '=', 'float', '(', 'ub', '-', 'lb', ')', 'return', 'sense', ',', 'rhs', ',', 'range_value']
Helper function used by Constraint and Model
['Helper', 'function', 'used', 'by', 'Constraint', 'and', 'Model']
train
https://github.com/biosustain/optlang/blob/13673ac26f6b3ba37a2ef392489722c52e3c5ff1/optlang/gurobi_interface.py#L65-L87
9,942
eumis/pyviews
pyviews/rendering/modifiers.py
call
def call(node: Node, key: str, value: Any): """Calls node or node instance method""" value = _to_list(value) if not value or not isinstance(value[-1], dict): value.append({}) args = value[0:-1] kwargs = value[-1] node.__dict__[key](*args, **kwargs)
python
def call(node: Node, key: str, value: Any): """Calls node or node instance method""" value = _to_list(value) if not value or not isinstance(value[-1], dict): value.append({}) args = value[0:-1] kwargs = value[-1] node.__dict__[key](*args, **kwargs)
['def', 'call', '(', 'node', ':', 'Node', ',', 'key', ':', 'str', ',', 'value', ':', 'Any', ')', ':', 'value', '=', '_to_list', '(', 'value', ')', 'if', 'not', 'value', 'or', 'not', 'isinstance', '(', 'value', '[', '-', '1', ']', ',', 'dict', ')', ':', 'value', '.', 'append', '(', '{', '}', ')', 'args', '=', 'value', '[', '0', ':', '-', '1', ']', 'kwargs', '=', 'value', '[', '-', '1', ']', 'node', '.', '__dict__', '[', 'key', ']', '(', '*', 'args', ',', '*', '*', 'kwargs', ')']
Calls node or node instance method
['Calls', 'node', 'or', 'node', 'instance', 'method']
train
https://github.com/eumis/pyviews/blob/80a868242ee9cdc6f4ded594b3e0544cc238ed55/pyviews/rendering/modifiers.py#L24-L31
9,943
RJT1990/pyflux
pyflux/families/t.py
t.second_order_score
def second_order_score(y, mean, scale, shape, skewness): """ GAS t Update term potentially using second-order information - native Python function Parameters ---------- y : float datapoint for the time series mean : float location parameter for the t distribution scale : float scale parameter for the t distribution shape : float tail thickness parameter for the t distribution skewness : float skewness parameter for the t distribution Returns ---------- - Adjusted score of the t family """ return ((shape+1)/shape)*(y-mean)/(np.power(scale,2) + (np.power(y-mean,2)/shape))/((shape+1)*((np.power(scale,2)*shape) - np.power(y-mean,2))/np.power((np.power(scale,2)*shape) + np.power(y-mean,2),2))
python
def second_order_score(y, mean, scale, shape, skewness): """ GAS t Update term potentially using second-order information - native Python function Parameters ---------- y : float datapoint for the time series mean : float location parameter for the t distribution scale : float scale parameter for the t distribution shape : float tail thickness parameter for the t distribution skewness : float skewness parameter for the t distribution Returns ---------- - Adjusted score of the t family """ return ((shape+1)/shape)*(y-mean)/(np.power(scale,2) + (np.power(y-mean,2)/shape))/((shape+1)*((np.power(scale,2)*shape) - np.power(y-mean,2))/np.power((np.power(scale,2)*shape) + np.power(y-mean,2),2))
['def', 'second_order_score', '(', 'y', ',', 'mean', ',', 'scale', ',', 'shape', ',', 'skewness', ')', ':', 'return', '(', '(', 'shape', '+', '1', ')', '/', 'shape', ')', '*', '(', 'y', '-', 'mean', ')', '/', '(', 'np', '.', 'power', '(', 'scale', ',', '2', ')', '+', '(', 'np', '.', 'power', '(', 'y', '-', 'mean', ',', '2', ')', '/', 'shape', ')', ')', '/', '(', '(', 'shape', '+', '1', ')', '*', '(', '(', 'np', '.', 'power', '(', 'scale', ',', '2', ')', '*', 'shape', ')', '-', 'np', '.', 'power', '(', 'y', '-', 'mean', ',', '2', ')', ')', '/', 'np', '.', 'power', '(', '(', 'np', '.', 'power', '(', 'scale', ',', '2', ')', '*', 'shape', ')', '+', 'np', '.', 'power', '(', 'y', '-', 'mean', ',', '2', ')', ',', '2', ')', ')']
GAS t Update term potentially using second-order information - native Python function Parameters ---------- y : float datapoint for the time series mean : float location parameter for the t distribution scale : float scale parameter for the t distribution shape : float tail thickness parameter for the t distribution skewness : float skewness parameter for the t distribution Returns ---------- - Adjusted score of the t family
['GAS', 't', 'Update', 'term', 'potentially', 'using', 'second', '-', 'order', 'information', '-', 'native', 'Python', 'function']
train
https://github.com/RJT1990/pyflux/blob/297f2afc2095acd97c12e827dd500e8ea5da0c0f/pyflux/families/t.py#L330-L354
9,944
ecordell/pymacaroons
pymacaroons/serializers/json_serializer.py
JsonSerializer._deserialize_v1
def _deserialize_v1(self, deserialized): '''Deserialize a JSON macaroon in v1 format. @param serialized the macaroon in v1 JSON format. @return the macaroon object. ''' from pymacaroons.macaroon import Macaroon, MACAROON_V1 from pymacaroons.caveat import Caveat caveats = [] for c in deserialized.get('caveats', []): caveat = Caveat( caveat_id=c['cid'], verification_key_id=( utils.raw_b64decode(c['vid']) if c.get('vid') else None ), location=( c['cl'] if c.get('cl') else None ), version=MACAROON_V1 ) caveats.append(caveat) return Macaroon( location=deserialized.get('location'), identifier=deserialized['identifier'], caveats=caveats, signature=deserialized['signature'], version=MACAROON_V1 )
python
def _deserialize_v1(self, deserialized): '''Deserialize a JSON macaroon in v1 format. @param serialized the macaroon in v1 JSON format. @return the macaroon object. ''' from pymacaroons.macaroon import Macaroon, MACAROON_V1 from pymacaroons.caveat import Caveat caveats = [] for c in deserialized.get('caveats', []): caveat = Caveat( caveat_id=c['cid'], verification_key_id=( utils.raw_b64decode(c['vid']) if c.get('vid') else None ), location=( c['cl'] if c.get('cl') else None ), version=MACAROON_V1 ) caveats.append(caveat) return Macaroon( location=deserialized.get('location'), identifier=deserialized['identifier'], caveats=caveats, signature=deserialized['signature'], version=MACAROON_V1 )
['def', '_deserialize_v1', '(', 'self', ',', 'deserialized', ')', ':', 'from', 'pymacaroons', '.', 'macaroon', 'import', 'Macaroon', ',', 'MACAROON_V1', 'from', 'pymacaroons', '.', 'caveat', 'import', 'Caveat', 'caveats', '=', '[', ']', 'for', 'c', 'in', 'deserialized', '.', 'get', '(', "'caveats'", ',', '[', ']', ')', ':', 'caveat', '=', 'Caveat', '(', 'caveat_id', '=', 'c', '[', "'cid'", ']', ',', 'verification_key_id', '=', '(', 'utils', '.', 'raw_b64decode', '(', 'c', '[', "'vid'", ']', ')', 'if', 'c', '.', 'get', '(', "'vid'", ')', 'else', 'None', ')', ',', 'location', '=', '(', 'c', '[', "'cl'", ']', 'if', 'c', '.', 'get', '(', "'cl'", ')', 'else', 'None', ')', ',', 'version', '=', 'MACAROON_V1', ')', 'caveats', '.', 'append', '(', 'caveat', ')', 'return', 'Macaroon', '(', 'location', '=', 'deserialized', '.', 'get', '(', "'location'", ')', ',', 'identifier', '=', 'deserialized', '[', "'identifier'", ']', ',', 'caveats', '=', 'caveats', ',', 'signature', '=', 'deserialized', '[', "'signature'", ']', ',', 'version', '=', 'MACAROON_V1', ')']
Deserialize a JSON macaroon in v1 format. @param serialized the macaroon in v1 JSON format. @return the macaroon object.
['Deserialize', 'a', 'JSON', 'macaroon', 'in', 'v1', 'format', '.']
train
https://github.com/ecordell/pymacaroons/blob/c941614df15fe732ea432a62788e45410bcb868d/pymacaroons/serializers/json_serializer.py#L69-L99
9,945
ladybug-tools/ladybug
ladybug/euclid.py
Vector2.angle
def angle(self, other): """Return the angle to the vector other""" return math.acos(self.dot(other) / (self.magnitude() * other.magnitude()))
python
def angle(self, other): """Return the angle to the vector other""" return math.acos(self.dot(other) / (self.magnitude() * other.magnitude()))
['def', 'angle', '(', 'self', ',', 'other', ')', ':', 'return', 'math', '.', 'acos', '(', 'self', '.', 'dot', '(', 'other', ')', '/', '(', 'self', '.', 'magnitude', '(', ')', '*', 'other', '.', 'magnitude', '(', ')', ')', ')']
Return the angle to the vector other
['Return', 'the', 'angle', 'to', 'the', 'vector', 'other']
train
https://github.com/ladybug-tools/ladybug/blob/c08b7308077a48d5612f644943f92d5b5dade583/ladybug/euclid.py#L298-L300
9,946
xgfs/NetLSD
netlsd/util.py
mat_to_laplacian
def mat_to_laplacian(mat, normalized): """ Converts a sparse or dence adjacency matrix to Laplacian. Parameters ---------- mat : obj Input adjacency matrix. If it is a Laplacian matrix already, return it. normalized : bool Whether to use normalized Laplacian. Normalized and unnormalized Laplacians capture different properties of graphs, e.g. normalized Laplacian spectrum can determine whether a graph is bipartite, but not the number of its edges. We recommend using normalized Laplacian. Returns ------- obj Laplacian of the input adjacency matrix Examples -------- >>> mat_to_laplacian(numpy.array([[0, 1, 1], [1, 0, 1], [1, 1, 0]]), False) [[ 2, -1, -1], [-1, 2, -1], [-1, -1, 2]] """ if sps.issparse(mat): if np.all(mat.diagonal()>=0): # Check diagonal if np.all((mat-sps.diags(mat.diagonal())).data <= 0): # Check off-diagonal elements return mat else: if np.all(np.diag(mat)>=0): # Check diagonal if np.all(mat - np.diag(mat) <= 0): # Check off-diagonal elements return mat deg = np.squeeze(np.asarray(mat.sum(axis=1))) if sps.issparse(mat): L = sps.diags(deg) - mat else: L = np.diag(deg) - mat if not normalized: return L with np.errstate(divide='ignore'): sqrt_deg = 1.0 / np.sqrt(deg) sqrt_deg[sqrt_deg==np.inf] = 0 if sps.issparse(mat): sqrt_deg_mat = sps.diags(sqrt_deg) else: sqrt_deg_mat = np.diag(sqrt_deg) return sqrt_deg_mat.dot(L).dot(sqrt_deg_mat)
python
def mat_to_laplacian(mat, normalized): """ Converts a sparse or dence adjacency matrix to Laplacian. Parameters ---------- mat : obj Input adjacency matrix. If it is a Laplacian matrix already, return it. normalized : bool Whether to use normalized Laplacian. Normalized and unnormalized Laplacians capture different properties of graphs, e.g. normalized Laplacian spectrum can determine whether a graph is bipartite, but not the number of its edges. We recommend using normalized Laplacian. Returns ------- obj Laplacian of the input adjacency matrix Examples -------- >>> mat_to_laplacian(numpy.array([[0, 1, 1], [1, 0, 1], [1, 1, 0]]), False) [[ 2, -1, -1], [-1, 2, -1], [-1, -1, 2]] """ if sps.issparse(mat): if np.all(mat.diagonal()>=0): # Check diagonal if np.all((mat-sps.diags(mat.diagonal())).data <= 0): # Check off-diagonal elements return mat else: if np.all(np.diag(mat)>=0): # Check diagonal if np.all(mat - np.diag(mat) <= 0): # Check off-diagonal elements return mat deg = np.squeeze(np.asarray(mat.sum(axis=1))) if sps.issparse(mat): L = sps.diags(deg) - mat else: L = np.diag(deg) - mat if not normalized: return L with np.errstate(divide='ignore'): sqrt_deg = 1.0 / np.sqrt(deg) sqrt_deg[sqrt_deg==np.inf] = 0 if sps.issparse(mat): sqrt_deg_mat = sps.diags(sqrt_deg) else: sqrt_deg_mat = np.diag(sqrt_deg) return sqrt_deg_mat.dot(L).dot(sqrt_deg_mat)
['def', 'mat_to_laplacian', '(', 'mat', ',', 'normalized', ')', ':', 'if', 'sps', '.', 'issparse', '(', 'mat', ')', ':', 'if', 'np', '.', 'all', '(', 'mat', '.', 'diagonal', '(', ')', '>=', '0', ')', ':', '# Check diagonal', 'if', 'np', '.', 'all', '(', '(', 'mat', '-', 'sps', '.', 'diags', '(', 'mat', '.', 'diagonal', '(', ')', ')', ')', '.', 'data', '<=', '0', ')', ':', '# Check off-diagonal elements', 'return', 'mat', 'else', ':', 'if', 'np', '.', 'all', '(', 'np', '.', 'diag', '(', 'mat', ')', '>=', '0', ')', ':', '# Check diagonal', 'if', 'np', '.', 'all', '(', 'mat', '-', 'np', '.', 'diag', '(', 'mat', ')', '<=', '0', ')', ':', '# Check off-diagonal elements', 'return', 'mat', 'deg', '=', 'np', '.', 'squeeze', '(', 'np', '.', 'asarray', '(', 'mat', '.', 'sum', '(', 'axis', '=', '1', ')', ')', ')', 'if', 'sps', '.', 'issparse', '(', 'mat', ')', ':', 'L', '=', 'sps', '.', 'diags', '(', 'deg', ')', '-', 'mat', 'else', ':', 'L', '=', 'np', '.', 'diag', '(', 'deg', ')', '-', 'mat', 'if', 'not', 'normalized', ':', 'return', 'L', 'with', 'np', '.', 'errstate', '(', 'divide', '=', "'ignore'", ')', ':', 'sqrt_deg', '=', '1.0', '/', 'np', '.', 'sqrt', '(', 'deg', ')', 'sqrt_deg', '[', 'sqrt_deg', '==', 'np', '.', 'inf', ']', '=', '0', 'if', 'sps', '.', 'issparse', '(', 'mat', ')', ':', 'sqrt_deg_mat', '=', 'sps', '.', 'diags', '(', 'sqrt_deg', ')', 'else', ':', 'sqrt_deg_mat', '=', 'np', '.', 'diag', '(', 'sqrt_deg', ')', 'return', 'sqrt_deg_mat', '.', 'dot', '(', 'L', ')', '.', 'dot', '(', 'sqrt_deg_mat', ')']
Converts a sparse or dence adjacency matrix to Laplacian. Parameters ---------- mat : obj Input adjacency matrix. If it is a Laplacian matrix already, return it. normalized : bool Whether to use normalized Laplacian. Normalized and unnormalized Laplacians capture different properties of graphs, e.g. normalized Laplacian spectrum can determine whether a graph is bipartite, but not the number of its edges. We recommend using normalized Laplacian. Returns ------- obj Laplacian of the input adjacency matrix Examples -------- >>> mat_to_laplacian(numpy.array([[0, 1, 1], [1, 0, 1], [1, 1, 0]]), False) [[ 2, -1, -1], [-1, 2, -1], [-1, -1, 2]]
['Converts', 'a', 'sparse', 'or', 'dence', 'adjacency', 'matrix', 'to', 'Laplacian', '.', 'Parameters', '----------', 'mat', ':', 'obj', 'Input', 'adjacency', 'matrix', '.', 'If', 'it', 'is', 'a', 'Laplacian', 'matrix', 'already', 'return', 'it', '.', 'normalized', ':', 'bool', 'Whether', 'to', 'use', 'normalized', 'Laplacian', '.', 'Normalized', 'and', 'unnormalized', 'Laplacians', 'capture', 'different', 'properties', 'of', 'graphs', 'e', '.', 'g', '.', 'normalized', 'Laplacian', 'spectrum', 'can', 'determine', 'whether', 'a', 'graph', 'is', 'bipartite', 'but', 'not', 'the', 'number', 'of', 'its', 'edges', '.', 'We', 'recommend', 'using', 'normalized', 'Laplacian', '.']
train
https://github.com/xgfs/NetLSD/blob/54820b3669a94852bd9653be23b09e126e901ab3/netlsd/util.py#L129-L174
9,947
scanny/python-pptx
pptx/text/layout.py
TextFitter._break_line
def _break_line(self, line_source, point_size): """ Return a (line, remainder) pair where *line* is the longest line in *line_source* that will fit in this fitter's width and *remainder* is a |_LineSource| object containing the text following the break point. """ lines = _BinarySearchTree.from_ordered_sequence(line_source) predicate = self._fits_in_width_predicate(point_size) return lines.find_max(predicate)
python
def _break_line(self, line_source, point_size): """ Return a (line, remainder) pair where *line* is the longest line in *line_source* that will fit in this fitter's width and *remainder* is a |_LineSource| object containing the text following the break point. """ lines = _BinarySearchTree.from_ordered_sequence(line_source) predicate = self._fits_in_width_predicate(point_size) return lines.find_max(predicate)
['def', '_break_line', '(', 'self', ',', 'line_source', ',', 'point_size', ')', ':', 'lines', '=', '_BinarySearchTree', '.', 'from_ordered_sequence', '(', 'line_source', ')', 'predicate', '=', 'self', '.', '_fits_in_width_predicate', '(', 'point_size', ')', 'return', 'lines', '.', 'find_max', '(', 'predicate', ')']
Return a (line, remainder) pair where *line* is the longest line in *line_source* that will fit in this fitter's width and *remainder* is a |_LineSource| object containing the text following the break point.
['Return', 'a', '(', 'line', 'remainder', ')', 'pair', 'where', '*', 'line', '*', 'is', 'the', 'longest', 'line', 'in', '*', 'line_source', '*', 'that', 'will', 'fit', 'in', 'this', 'fitter', 's', 'width', 'and', '*', 'remainder', '*', 'is', 'a', '|_LineSource|', 'object', 'containing', 'the', 'text', 'following', 'the', 'break', 'point', '.']
train
https://github.com/scanny/python-pptx/blob/d6ab8234f8b03953d2f831ff9394b1852db34130/pptx/text/layout.py#L42-L50
9,948
Stranger6667/postmarker
postmarker/models/stats.py
StatsManager.emailclients
def emailclients(self, tag=None, fromdate=None, todate=None): """ Gets an overview of the email clients used to open your emails. This is only recorded when open tracking is enabled for that email. """ return self.call("GET", "/stats/outbound/opens/emailclients", tag=tag, fromdate=fromdate, todate=todate)
python
def emailclients(self, tag=None, fromdate=None, todate=None): """ Gets an overview of the email clients used to open your emails. This is only recorded when open tracking is enabled for that email. """ return self.call("GET", "/stats/outbound/opens/emailclients", tag=tag, fromdate=fromdate, todate=todate)
['def', 'emailclients', '(', 'self', ',', 'tag', '=', 'None', ',', 'fromdate', '=', 'None', ',', 'todate', '=', 'None', ')', ':', 'return', 'self', '.', 'call', '(', '"GET"', ',', '"/stats/outbound/opens/emailclients"', ',', 'tag', '=', 'tag', ',', 'fromdate', '=', 'fromdate', ',', 'todate', '=', 'todate', ')']
Gets an overview of the email clients used to open your emails. This is only recorded when open tracking is enabled for that email.
['Gets', 'an', 'overview', 'of', 'the', 'email', 'clients', 'used', 'to', 'open', 'your', 'emails', '.', 'This', 'is', 'only', 'recorded', 'when', 'open', 'tracking', 'is', 'enabled', 'for', 'that', 'email', '.']
train
https://github.com/Stranger6667/postmarker/blob/013224ab1761e95c488c7d2701e6fa83f3108d94/postmarker/models/stats.py#L52-L57
9,949
jciskey/pygraph
pygraph/functions/planarity/kocay_algorithm.py
__insert_frond_LF
def __insert_frond_LF(d_w, d_u, dfs_data): """Encapsulates the process of inserting a frond uw into the left side frond group.""" # --Add the frond to the left side dfs_data['LF'].append( (d_w, d_u) ) dfs_data['FG']['l'] += 1 dfs_data['last_inserted_side'] = 'LF'
python
def __insert_frond_LF(d_w, d_u, dfs_data): """Encapsulates the process of inserting a frond uw into the left side frond group.""" # --Add the frond to the left side dfs_data['LF'].append( (d_w, d_u) ) dfs_data['FG']['l'] += 1 dfs_data['last_inserted_side'] = 'LF'
['def', '__insert_frond_LF', '(', 'd_w', ',', 'd_u', ',', 'dfs_data', ')', ':', '# --Add the frond to the left side', 'dfs_data', '[', "'LF'", ']', '.', 'append', '(', '(', 'd_w', ',', 'd_u', ')', ')', 'dfs_data', '[', "'FG'", ']', '[', "'l'", ']', '+=', '1', 'dfs_data', '[', "'last_inserted_side'", ']', '=', "'LF'"]
Encapsulates the process of inserting a frond uw into the left side frond group.
['Encapsulates', 'the', 'process', 'of', 'inserting', 'a', 'frond', 'uw', 'into', 'the', 'left', 'side', 'frond', 'group', '.']
train
https://github.com/jciskey/pygraph/blob/037bb2f32503fecb60d62921f9766d54109f15e2/pygraph/functions/planarity/kocay_algorithm.py#L481-L487
9,950
quantmind/pulsar
pulsar/apps/http/stream.py
HttpStream.read
async def read(self, n=None): """Read all content """ if self._streamed: return b'' buffer = [] async for body in self: buffer.append(body) return b''.join(buffer)
python
async def read(self, n=None): """Read all content """ if self._streamed: return b'' buffer = [] async for body in self: buffer.append(body) return b''.join(buffer)
['async', 'def', 'read', '(', 'self', ',', 'n', '=', 'None', ')', ':', 'if', 'self', '.', '_streamed', ':', 'return', "b''", 'buffer', '=', '[', ']', 'async', 'for', 'body', 'in', 'self', ':', 'buffer', '.', 'append', '(', 'body', ')', 'return', "b''", '.', 'join', '(', 'buffer', ')']
Read all content
['Read', 'all', 'content']
train
https://github.com/quantmind/pulsar/blob/fee44e871954aa6ca36d00bb5a3739abfdb89b26/pulsar/apps/http/stream.py#L27-L35
9,951
pkgw/pwkit
pwkit/environments/casa/dftphotom.py
dftphotom
def dftphotom(cfg): """Run the discrete-Fourier-transform photometry algorithm. See the module-level documentation and the output of ``casatask dftphotom --help`` for help. All of the algorithm configuration is specified in the *cfg* argument, which is an instance of :class:`Config`. """ tb = util.tools.table() ms = util.tools.ms() me = util.tools.measures() # Read stuff in. Even if the weight values don't have their # absolute scale set correctly, we can still use them to set the # relative weighting of the data points. # # datacol is (ncorr, nchan, nchunk) # flag is (ncorr, nchan, nchunk) # weight is (ncorr, nchunk) # uvw is (3, nchunk) # time is (nchunk) # axis_info.corr_axis is (ncorr) # axis_info.freq_axis.chan_freq is (nchan, 1) [for now?] # # Note that we apply msselect() again when reading the data because # selectinit() is broken, but the invocation here is good because it # affects the results from ms.range() and friends. if ':' in (cfg.spw or ''): warn('it looks like you are attempting to select channels within one or more spws') warn('this is NOT IMPLEMENTED; I will average over the whole spw instead') ms.open(b(cfg.vis)) totrows = ms.nrow() ms_sels = dict((n, cfg.get(n)) for n in util.msselect_keys if cfg.get(n) is not None) ms.msselect(b(ms_sels)) rangeinfo = ms.range(b'data_desc_id field_id'.split()) ddids = rangeinfo['data_desc_id'] fields = rangeinfo['field_id'] colnames = [cfg.datacol] + 'flag weight time axis_info'.split() rephase = (cfg.rephase is not None) if fields.size != 1: # I feel comfortable making this a fatal error, even if we're # not rephasing. die('selected data should contain precisely one field; got %d', fields.size) if rephase: fieldid = fields[0] tb.open(b(os.path.join(cfg.vis, 'FIELD'))) phdirinfo = tb.getcell(b'PHASE_DIR', fieldid) tb.close() if phdirinfo.shape[1] != 1: die('trying to rephase but target field (#%d) has a ' 'time-variable phase center, which I can\'t handle', fieldid) ra0, dec0 = phdirinfo[:,0] # in radians. # based on intflib/pwflux.py, which was copied from # hex/hex-lib-calcgainerr: dra = cfg.rephase[0] - ra0 dec = cfg.rephase[1] l = np.sin(dra) * np.cos(dec) m = np.sin(dec) * np.cos(dec0) - np.cos(dra) * np.cos(dec) * np.sin(dec0) n = np.sin(dec) * np.sin(dec0) + np.cos(dra) * np.cos(dec) * np.cos(dec0) n -= 1 # makes the work below easier lmn = np.asarray([l, m, n]) colnames.append('uvw') # Also need this although 99% of the time `ddid` and `spwid` are the same tb.open(b(os.path.join(cfg.vis, 'DATA_DESCRIPTION'))) ddspws = np.asarray(tb.getcol(b'SPECTRAL_WINDOW_ID')) tb.close() tbins = {} colnames = b(colnames) for ddindex, ddid in enumerate(ddids): # Starting in CASA 4.6, selectinit(ddid) stopped actually filtering # your data to match the specified DDID! What garbage. Work around # with our own filtering. ms_sels['taql'] = 'DATA_DESC_ID == %d' % ddid ms.msselect(b(ms_sels)) ms.selectinit(ddid) if cfg.polarization is not None: ms.selectpolarization(b(cfg.polarization.split(','))) ms.iterinit(maxrows=4096) ms.iterorigin() while True: cols = ms.getdata(items=colnames) if rephase: # With appropriate spw/DDID selection, `freqs` has shape # (nchan, 1). Convert to m^-1 so we can multiply against UVW # directly. freqs = cols['axis_info']['freq_axis']['chan_freq'] assert freqs.shape[1] == 1, 'internal inconsistency, chan_freq??' freqs = freqs[:,0] * util.INVERSE_C_MS for i in range(cols['time'].size): # all records time = cols['time'][i] # get out of UTC as fast as we can! For some reason # giving 'unit=s' below doesn't do what one might hope it would. # CASA can convert to a variety of timescales; TAI is probably # the safest conversion in terms of being helpful while remaining # close to the fundamental data, but TT is possible and should # be perfectly precise for standard applications. mq = me.epoch(b'utc', b({'value': time / 86400., 'unit': 'd'})) mjdtt = me.measure(b(mq), b'tt')['m0']['value'] tdata = tbins.get(mjdtt, None) if tdata is None: tdata = tbins[mjdtt] = [0., 0., 0., 0., 0] if rephase: uvw = cols['uvw'][:,i] ph = np.exp((0-2j) * np.pi * np.dot(lmn, uvw) * freqs) for j in range(cols['flag'].shape[0]): # all polns # We just average together all polarizations right now! # (Not actively, but passively by just iterating over them.) data = cols[cfg.datacol][j,:,i] flags = cols['flag'][j,:,i] # XXXXX casacore is currently (ca. 2012) broken and # returns the raw weights from the dataset rather than # applying the polarization selection. Fortunately all of # our weights are the same, and you can never fetch more # pol types than the dataset has, so this bit works # despite the bug. w = np.where(~flags)[0] if not w.size: continue # all flagged if rephase: data *= ph d = data[w].mean() # account for flagged parts. 90% sure this is the # right thing to do: wt = cols['weight'][j,i] * w.size / data.size wd = wt * d # note a little bit of a hack here to encode real^2 and # imag^2 separately: wd2 = wt * (d.real**2 + (1j) * d.imag**2) tdata[0] += wd tdata[1] += wd2 tdata[2] += wt tdata[3] += wt**2 tdata[4] += 1 if not ms.iternext(): break ms.reset() # reset selection filter so we can get next DDID ms.close() # Could gain some efficiency by using a better data structure than a dict(). smjd = sorted(six.iterkeys(tbins)) cfg.format.header(cfg) for mjd in smjd: wd, wd2, wt, wt2, n = tbins[mjd] if n < 3: # not enough data for meaningful statistics continue dtmin = 1440 * (mjd - smjd[0]) r_sc = wd.real / wt * cfg.datascale i_sc = wd.imag / wt * cfg.datascale r2_sc = wd2.real / wt * cfg.datascale**2 i2_sc = wd2.imag / wt * cfg.datascale**2 if cfg.believeweights: ru_sc = wt**-0.5 * cfg.datascale iu_sc = wt**-0.5 * cfg.datascale else: rv_sc = r2_sc - r_sc**2 # variance among real/imag msmts iv_sc = i2_sc - i_sc**2 ru_sc = np.sqrt(rv_sc * wt2) / wt # uncert in mean real/img values iu_sc = np.sqrt(iv_sc * wt2) / wt mag = np.sqrt(r_sc**2 + i_sc**2) umag = np.sqrt(r_sc**2 * ru_sc**2 + i_sc**2 * iu_sc**2) / mag cfg.format.row(cfg, mjd, dtmin, r_sc, ru_sc, i_sc, iu_sc, mag, umag, n)
python
def dftphotom(cfg): """Run the discrete-Fourier-transform photometry algorithm. See the module-level documentation and the output of ``casatask dftphotom --help`` for help. All of the algorithm configuration is specified in the *cfg* argument, which is an instance of :class:`Config`. """ tb = util.tools.table() ms = util.tools.ms() me = util.tools.measures() # Read stuff in. Even if the weight values don't have their # absolute scale set correctly, we can still use them to set the # relative weighting of the data points. # # datacol is (ncorr, nchan, nchunk) # flag is (ncorr, nchan, nchunk) # weight is (ncorr, nchunk) # uvw is (3, nchunk) # time is (nchunk) # axis_info.corr_axis is (ncorr) # axis_info.freq_axis.chan_freq is (nchan, 1) [for now?] # # Note that we apply msselect() again when reading the data because # selectinit() is broken, but the invocation here is good because it # affects the results from ms.range() and friends. if ':' in (cfg.spw or ''): warn('it looks like you are attempting to select channels within one or more spws') warn('this is NOT IMPLEMENTED; I will average over the whole spw instead') ms.open(b(cfg.vis)) totrows = ms.nrow() ms_sels = dict((n, cfg.get(n)) for n in util.msselect_keys if cfg.get(n) is not None) ms.msselect(b(ms_sels)) rangeinfo = ms.range(b'data_desc_id field_id'.split()) ddids = rangeinfo['data_desc_id'] fields = rangeinfo['field_id'] colnames = [cfg.datacol] + 'flag weight time axis_info'.split() rephase = (cfg.rephase is not None) if fields.size != 1: # I feel comfortable making this a fatal error, even if we're # not rephasing. die('selected data should contain precisely one field; got %d', fields.size) if rephase: fieldid = fields[0] tb.open(b(os.path.join(cfg.vis, 'FIELD'))) phdirinfo = tb.getcell(b'PHASE_DIR', fieldid) tb.close() if phdirinfo.shape[1] != 1: die('trying to rephase but target field (#%d) has a ' 'time-variable phase center, which I can\'t handle', fieldid) ra0, dec0 = phdirinfo[:,0] # in radians. # based on intflib/pwflux.py, which was copied from # hex/hex-lib-calcgainerr: dra = cfg.rephase[0] - ra0 dec = cfg.rephase[1] l = np.sin(dra) * np.cos(dec) m = np.sin(dec) * np.cos(dec0) - np.cos(dra) * np.cos(dec) * np.sin(dec0) n = np.sin(dec) * np.sin(dec0) + np.cos(dra) * np.cos(dec) * np.cos(dec0) n -= 1 # makes the work below easier lmn = np.asarray([l, m, n]) colnames.append('uvw') # Also need this although 99% of the time `ddid` and `spwid` are the same tb.open(b(os.path.join(cfg.vis, 'DATA_DESCRIPTION'))) ddspws = np.asarray(tb.getcol(b'SPECTRAL_WINDOW_ID')) tb.close() tbins = {} colnames = b(colnames) for ddindex, ddid in enumerate(ddids): # Starting in CASA 4.6, selectinit(ddid) stopped actually filtering # your data to match the specified DDID! What garbage. Work around # with our own filtering. ms_sels['taql'] = 'DATA_DESC_ID == %d' % ddid ms.msselect(b(ms_sels)) ms.selectinit(ddid) if cfg.polarization is not None: ms.selectpolarization(b(cfg.polarization.split(','))) ms.iterinit(maxrows=4096) ms.iterorigin() while True: cols = ms.getdata(items=colnames) if rephase: # With appropriate spw/DDID selection, `freqs` has shape # (nchan, 1). Convert to m^-1 so we can multiply against UVW # directly. freqs = cols['axis_info']['freq_axis']['chan_freq'] assert freqs.shape[1] == 1, 'internal inconsistency, chan_freq??' freqs = freqs[:,0] * util.INVERSE_C_MS for i in range(cols['time'].size): # all records time = cols['time'][i] # get out of UTC as fast as we can! For some reason # giving 'unit=s' below doesn't do what one might hope it would. # CASA can convert to a variety of timescales; TAI is probably # the safest conversion in terms of being helpful while remaining # close to the fundamental data, but TT is possible and should # be perfectly precise for standard applications. mq = me.epoch(b'utc', b({'value': time / 86400., 'unit': 'd'})) mjdtt = me.measure(b(mq), b'tt')['m0']['value'] tdata = tbins.get(mjdtt, None) if tdata is None: tdata = tbins[mjdtt] = [0., 0., 0., 0., 0] if rephase: uvw = cols['uvw'][:,i] ph = np.exp((0-2j) * np.pi * np.dot(lmn, uvw) * freqs) for j in range(cols['flag'].shape[0]): # all polns # We just average together all polarizations right now! # (Not actively, but passively by just iterating over them.) data = cols[cfg.datacol][j,:,i] flags = cols['flag'][j,:,i] # XXXXX casacore is currently (ca. 2012) broken and # returns the raw weights from the dataset rather than # applying the polarization selection. Fortunately all of # our weights are the same, and you can never fetch more # pol types than the dataset has, so this bit works # despite the bug. w = np.where(~flags)[0] if not w.size: continue # all flagged if rephase: data *= ph d = data[w].mean() # account for flagged parts. 90% sure this is the # right thing to do: wt = cols['weight'][j,i] * w.size / data.size wd = wt * d # note a little bit of a hack here to encode real^2 and # imag^2 separately: wd2 = wt * (d.real**2 + (1j) * d.imag**2) tdata[0] += wd tdata[1] += wd2 tdata[2] += wt tdata[3] += wt**2 tdata[4] += 1 if not ms.iternext(): break ms.reset() # reset selection filter so we can get next DDID ms.close() # Could gain some efficiency by using a better data structure than a dict(). smjd = sorted(six.iterkeys(tbins)) cfg.format.header(cfg) for mjd in smjd: wd, wd2, wt, wt2, n = tbins[mjd] if n < 3: # not enough data for meaningful statistics continue dtmin = 1440 * (mjd - smjd[0]) r_sc = wd.real / wt * cfg.datascale i_sc = wd.imag / wt * cfg.datascale r2_sc = wd2.real / wt * cfg.datascale**2 i2_sc = wd2.imag / wt * cfg.datascale**2 if cfg.believeweights: ru_sc = wt**-0.5 * cfg.datascale iu_sc = wt**-0.5 * cfg.datascale else: rv_sc = r2_sc - r_sc**2 # variance among real/imag msmts iv_sc = i2_sc - i_sc**2 ru_sc = np.sqrt(rv_sc * wt2) / wt # uncert in mean real/img values iu_sc = np.sqrt(iv_sc * wt2) / wt mag = np.sqrt(r_sc**2 + i_sc**2) umag = np.sqrt(r_sc**2 * ru_sc**2 + i_sc**2 * iu_sc**2) / mag cfg.format.row(cfg, mjd, dtmin, r_sc, ru_sc, i_sc, iu_sc, mag, umag, n)
['def', 'dftphotom', '(', 'cfg', ')', ':', 'tb', '=', 'util', '.', 'tools', '.', 'table', '(', ')', 'ms', '=', 'util', '.', 'tools', '.', 'ms', '(', ')', 'me', '=', 'util', '.', 'tools', '.', 'measures', '(', ')', "# Read stuff in. Even if the weight values don't have their", '# absolute scale set correctly, we can still use them to set the', '# relative weighting of the data points.', '#', '# datacol is (ncorr, nchan, nchunk)', '# flag is (ncorr, nchan, nchunk)', '# weight is (ncorr, nchunk)', '# uvw is (3, nchunk)', '# time is (nchunk)', '# axis_info.corr_axis is (ncorr)', '# axis_info.freq_axis.chan_freq is (nchan, 1) [for now?]', '#', '# Note that we apply msselect() again when reading the data because', '# selectinit() is broken, but the invocation here is good because it', '# affects the results from ms.range() and friends.', 'if', "':'", 'in', '(', 'cfg', '.', 'spw', 'or', "''", ')', ':', 'warn', '(', "'it looks like you are attempting to select channels within one or more spws'", ')', 'warn', '(', "'this is NOT IMPLEMENTED; I will average over the whole spw instead'", ')', 'ms', '.', 'open', '(', 'b', '(', 'cfg', '.', 'vis', ')', ')', 'totrows', '=', 'ms', '.', 'nrow', '(', ')', 'ms_sels', '=', 'dict', '(', '(', 'n', ',', 'cfg', '.', 'get', '(', 'n', ')', ')', 'for', 'n', 'in', 'util', '.', 'msselect_keys', 'if', 'cfg', '.', 'get', '(', 'n', ')', 'is', 'not', 'None', ')', 'ms', '.', 'msselect', '(', 'b', '(', 'ms_sels', ')', ')', 'rangeinfo', '=', 'ms', '.', 'range', '(', "b'data_desc_id field_id'", '.', 'split', '(', ')', ')', 'ddids', '=', 'rangeinfo', '[', "'data_desc_id'", ']', 'fields', '=', 'rangeinfo', '[', "'field_id'", ']', 'colnames', '=', '[', 'cfg', '.', 'datacol', ']', '+', "'flag weight time axis_info'", '.', 'split', '(', ')', 'rephase', '=', '(', 'cfg', '.', 'rephase', 'is', 'not', 'None', ')', 'if', 'fields', '.', 'size', '!=', '1', ':', "# I feel comfortable making this a fatal error, even if we're", '# not rephasing.', 'die', '(', "'selected data should contain precisely one field; got %d'", ',', 'fields', '.', 'size', ')', 'if', 'rephase', ':', 'fieldid', '=', 'fields', '[', '0', ']', 'tb', '.', 'open', '(', 'b', '(', 'os', '.', 'path', '.', 'join', '(', 'cfg', '.', 'vis', ',', "'FIELD'", ')', ')', ')', 'phdirinfo', '=', 'tb', '.', 'getcell', '(', "b'PHASE_DIR'", ',', 'fieldid', ')', 'tb', '.', 'close', '(', ')', 'if', 'phdirinfo', '.', 'shape', '[', '1', ']', '!=', '1', ':', 'die', '(', "'trying to rephase but target field (#%d) has a '", "'time-variable phase center, which I can\\'t handle'", ',', 'fieldid', ')', 'ra0', ',', 'dec0', '=', 'phdirinfo', '[', ':', ',', '0', ']', '# in radians.', '# based on intflib/pwflux.py, which was copied from', '# hex/hex-lib-calcgainerr:', 'dra', '=', 'cfg', '.', 'rephase', '[', '0', ']', '-', 'ra0', 'dec', '=', 'cfg', '.', 'rephase', '[', '1', ']', 'l', '=', 'np', '.', 'sin', '(', 'dra', ')', '*', 'np', '.', 'cos', '(', 'dec', ')', 'm', '=', 'np', '.', 'sin', '(', 'dec', ')', '*', 'np', '.', 'cos', '(', 'dec0', ')', '-', 'np', '.', 'cos', '(', 'dra', ')', '*', 'np', '.', 'cos', '(', 'dec', ')', '*', 'np', '.', 'sin', '(', 'dec0', ')', 'n', '=', 'np', '.', 'sin', '(', 'dec', ')', '*', 'np', '.', 'sin', '(', 'dec0', ')', '+', 'np', '.', 'cos', '(', 'dra', ')', '*', 'np', '.', 'cos', '(', 'dec', ')', '*', 'np', '.', 'cos', '(', 'dec0', ')', 'n', '-=', '1', '# makes the work below easier', 'lmn', '=', 'np', '.', 'asarray', '(', '[', 'l', ',', 'm', ',', 'n', ']', ')', 'colnames', '.', 'append', '(', "'uvw'", ')', '# Also need this although 99% of the time `ddid` and `spwid` are the same', 'tb', '.', 'open', '(', 'b', '(', 'os', '.', 'path', '.', 'join', '(', 'cfg', '.', 'vis', ',', "'DATA_DESCRIPTION'", ')', ')', ')', 'ddspws', '=', 'np', '.', 'asarray', '(', 'tb', '.', 'getcol', '(', "b'SPECTRAL_WINDOW_ID'", ')', ')', 'tb', '.', 'close', '(', ')', 'tbins', '=', '{', '}', 'colnames', '=', 'b', '(', 'colnames', ')', 'for', 'ddindex', ',', 'ddid', 'in', 'enumerate', '(', 'ddids', ')', ':', '# Starting in CASA 4.6, selectinit(ddid) stopped actually filtering', '# your data to match the specified DDID! What garbage. Work around', '# with our own filtering.', 'ms_sels', '[', "'taql'", ']', '=', "'DATA_DESC_ID == %d'", '%', 'ddid', 'ms', '.', 'msselect', '(', 'b', '(', 'ms_sels', ')', ')', 'ms', '.', 'selectinit', '(', 'ddid', ')', 'if', 'cfg', '.', 'polarization', 'is', 'not', 'None', ':', 'ms', '.', 'selectpolarization', '(', 'b', '(', 'cfg', '.', 'polarization', '.', 'split', '(', "','", ')', ')', ')', 'ms', '.', 'iterinit', '(', 'maxrows', '=', '4096', ')', 'ms', '.', 'iterorigin', '(', ')', 'while', 'True', ':', 'cols', '=', 'ms', '.', 'getdata', '(', 'items', '=', 'colnames', ')', 'if', 'rephase', ':', '# With appropriate spw/DDID selection, `freqs` has shape', '# (nchan, 1). Convert to m^-1 so we can multiply against UVW', '# directly.', 'freqs', '=', 'cols', '[', "'axis_info'", ']', '[', "'freq_axis'", ']', '[', "'chan_freq'", ']', 'assert', 'freqs', '.', 'shape', '[', '1', ']', '==', '1', ',', "'internal inconsistency, chan_freq??'", 'freqs', '=', 'freqs', '[', ':', ',', '0', ']', '*', 'util', '.', 'INVERSE_C_MS', 'for', 'i', 'in', 'range', '(', 'cols', '[', "'time'", ']', '.', 'size', ')', ':', '# all records', 'time', '=', 'cols', '[', "'time'", ']', '[', 'i', ']', '# get out of UTC as fast as we can! For some reason', "# giving 'unit=s' below doesn't do what one might hope it would.", '# CASA can convert to a variety of timescales; TAI is probably', '# the safest conversion in terms of being helpful while remaining', '# close to the fundamental data, but TT is possible and should', '# be perfectly precise for standard applications.', 'mq', '=', 'me', '.', 'epoch', '(', "b'utc'", ',', 'b', '(', '{', "'value'", ':', 'time', '/', '86400.', ',', "'unit'", ':', "'d'", '}', ')', ')', 'mjdtt', '=', 'me', '.', 'measure', '(', 'b', '(', 'mq', ')', ',', "b'tt'", ')', '[', "'m0'", ']', '[', "'value'", ']', 'tdata', '=', 'tbins', '.', 'get', '(', 'mjdtt', ',', 'None', ')', 'if', 'tdata', 'is', 'None', ':', 'tdata', '=', 'tbins', '[', 'mjdtt', ']', '=', '[', '0.', ',', '0.', ',', '0.', ',', '0.', ',', '0', ']', 'if', 'rephase', ':', 'uvw', '=', 'cols', '[', "'uvw'", ']', '[', ':', ',', 'i', ']', 'ph', '=', 'np', '.', 'exp', '(', '(', '0', '-', '2j', ')', '*', 'np', '.', 'pi', '*', 'np', '.', 'dot', '(', 'lmn', ',', 'uvw', ')', '*', 'freqs', ')', 'for', 'j', 'in', 'range', '(', 'cols', '[', "'flag'", ']', '.', 'shape', '[', '0', ']', ')', ':', '# all polns', '# We just average together all polarizations right now!', '# (Not actively, but passively by just iterating over them.)', 'data', '=', 'cols', '[', 'cfg', '.', 'datacol', ']', '[', 'j', ',', ':', ',', 'i', ']', 'flags', '=', 'cols', '[', "'flag'", ']', '[', 'j', ',', ':', ',', 'i', ']', '# XXXXX casacore is currently (ca. 2012) broken and', '# returns the raw weights from the dataset rather than', '# applying the polarization selection. Fortunately all of', '# our weights are the same, and you can never fetch more', '# pol types than the dataset has, so this bit works', '# despite the bug.', 'w', '=', 'np', '.', 'where', '(', '~', 'flags', ')', '[', '0', ']', 'if', 'not', 'w', '.', 'size', ':', 'continue', '# all flagged', 'if', 'rephase', ':', 'data', '*=', 'ph', 'd', '=', 'data', '[', 'w', ']', '.', 'mean', '(', ')', '# account for flagged parts. 90% sure this is the', '# right thing to do:', 'wt', '=', 'cols', '[', "'weight'", ']', '[', 'j', ',', 'i', ']', '*', 'w', '.', 'size', '/', 'data', '.', 'size', 'wd', '=', 'wt', '*', 'd', '# note a little bit of a hack here to encode real^2 and', '# imag^2 separately:', 'wd2', '=', 'wt', '*', '(', 'd', '.', 'real', '**', '2', '+', '(', '1j', ')', '*', 'd', '.', 'imag', '**', '2', ')', 'tdata', '[', '0', ']', '+=', 'wd', 'tdata', '[', '1', ']', '+=', 'wd2', 'tdata', '[', '2', ']', '+=', 'wt', 'tdata', '[', '3', ']', '+=', 'wt', '**', '2', 'tdata', '[', '4', ']', '+=', '1', 'if', 'not', 'ms', '.', 'iternext', '(', ')', ':', 'break', 'ms', '.', 'reset', '(', ')', '# reset selection filter so we can get next DDID', 'ms', '.', 'close', '(', ')', '# Could gain some efficiency by using a better data structure than a dict().', 'smjd', '=', 'sorted', '(', 'six', '.', 'iterkeys', '(', 'tbins', ')', ')', 'cfg', '.', 'format', '.', 'header', '(', 'cfg', ')', 'for', 'mjd', 'in', 'smjd', ':', 'wd', ',', 'wd2', ',', 'wt', ',', 'wt2', ',', 'n', '=', 'tbins', '[', 'mjd', ']', 'if', 'n', '<', '3', ':', '# not enough data for meaningful statistics', 'continue', 'dtmin', '=', '1440', '*', '(', 'mjd', '-', 'smjd', '[', '0', ']', ')', 'r_sc', '=', 'wd', '.', 'real', '/', 'wt', '*', 'cfg', '.', 'datascale', 'i_sc', '=', 'wd', '.', 'imag', '/', 'wt', '*', 'cfg', '.', 'datascale', 'r2_sc', '=', 'wd2', '.', 'real', '/', 'wt', '*', 'cfg', '.', 'datascale', '**', '2', 'i2_sc', '=', 'wd2', '.', 'imag', '/', 'wt', '*', 'cfg', '.', 'datascale', '**', '2', 'if', 'cfg', '.', 'believeweights', ':', 'ru_sc', '=', 'wt', '**', '-', '0.5', '*', 'cfg', '.', 'datascale', 'iu_sc', '=', 'wt', '**', '-', '0.5', '*', 'cfg', '.', 'datascale', 'else', ':', 'rv_sc', '=', 'r2_sc', '-', 'r_sc', '**', '2', '# variance among real/imag msmts', 'iv_sc', '=', 'i2_sc', '-', 'i_sc', '**', '2', 'ru_sc', '=', 'np', '.', 'sqrt', '(', 'rv_sc', '*', 'wt2', ')', '/', 'wt', '# uncert in mean real/img values', 'iu_sc', '=', 'np', '.', 'sqrt', '(', 'iv_sc', '*', 'wt2', ')', '/', 'wt', 'mag', '=', 'np', '.', 'sqrt', '(', 'r_sc', '**', '2', '+', 'i_sc', '**', '2', ')', 'umag', '=', 'np', '.', 'sqrt', '(', 'r_sc', '**', '2', '*', 'ru_sc', '**', '2', '+', 'i_sc', '**', '2', '*', 'iu_sc', '**', '2', ')', '/', 'mag', 'cfg', '.', 'format', '.', 'row', '(', 'cfg', ',', 'mjd', ',', 'dtmin', ',', 'r_sc', ',', 'ru_sc', ',', 'i_sc', ',', 'iu_sc', ',', 'mag', ',', 'umag', ',', 'n', ')']
Run the discrete-Fourier-transform photometry algorithm. See the module-level documentation and the output of ``casatask dftphotom --help`` for help. All of the algorithm configuration is specified in the *cfg* argument, which is an instance of :class:`Config`.
['Run', 'the', 'discrete', '-', 'Fourier', '-', 'transform', 'photometry', 'algorithm', '.']
train
https://github.com/pkgw/pwkit/blob/d40957a1c3d2ea34e7ceac2267ee9635135f2793/pwkit/environments/casa/dftphotom.py#L190-L381
9,952
cuihantao/andes
andes/models/base.py
ModelBase.on_bus
def on_bus(self, bus_idx): """ Return the indices of elements on the given buses for shunt-connected elements :param bus_idx: idx of the buses to which the elements are connected :return: idx of elements connected to bus_idx """ assert hasattr(self, 'bus') ret = [] if isinstance(bus_idx, (int, float, str)): bus_idx = [bus_idx] for item in bus_idx: idx = [] for e, b in enumerate(self.bus): if b == item: idx.append(self.idx[e]) if len(idx) == 1: idx = idx[0] elif len(idx) == 0: idx = None ret.append(idx) if len(ret) == 1: ret = ret[0] return ret
python
def on_bus(self, bus_idx): """ Return the indices of elements on the given buses for shunt-connected elements :param bus_idx: idx of the buses to which the elements are connected :return: idx of elements connected to bus_idx """ assert hasattr(self, 'bus') ret = [] if isinstance(bus_idx, (int, float, str)): bus_idx = [bus_idx] for item in bus_idx: idx = [] for e, b in enumerate(self.bus): if b == item: idx.append(self.idx[e]) if len(idx) == 1: idx = idx[0] elif len(idx) == 0: idx = None ret.append(idx) if len(ret) == 1: ret = ret[0] return ret
['def', 'on_bus', '(', 'self', ',', 'bus_idx', ')', ':', 'assert', 'hasattr', '(', 'self', ',', "'bus'", ')', 'ret', '=', '[', ']', 'if', 'isinstance', '(', 'bus_idx', ',', '(', 'int', ',', 'float', ',', 'str', ')', ')', ':', 'bus_idx', '=', '[', 'bus_idx', ']', 'for', 'item', 'in', 'bus_idx', ':', 'idx', '=', '[', ']', 'for', 'e', ',', 'b', 'in', 'enumerate', '(', 'self', '.', 'bus', ')', ':', 'if', 'b', '==', 'item', ':', 'idx', '.', 'append', '(', 'self', '.', 'idx', '[', 'e', ']', ')', 'if', 'len', '(', 'idx', ')', '==', '1', ':', 'idx', '=', 'idx', '[', '0', ']', 'elif', 'len', '(', 'idx', ')', '==', '0', ':', 'idx', '=', 'None', 'ret', '.', 'append', '(', 'idx', ')', 'if', 'len', '(', 'ret', ')', '==', '1', ':', 'ret', '=', 'ret', '[', '0', ']', 'return', 'ret']
Return the indices of elements on the given buses for shunt-connected elements :param bus_idx: idx of the buses to which the elements are connected :return: idx of elements connected to bus_idx
['Return', 'the', 'indices', 'of', 'elements', 'on', 'the', 'given', 'buses', 'for', 'shunt', '-', 'connected', 'elements']
train
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/models/base.py#L1366-L1396
9,953
nkavaldj/myhdl_lib
myhdl_lib/fifo.py
fifo
def fifo(rst, clk, full, we, din, empty, re, dout, afull=None, aempty=None, afull_th=None, aempty_th=None, ovf=None, udf=None, count=None, count_max=None, depth=None, width=None): """ Synchronous FIFO Input interface: full, we, din Output interface: empty, re, dout It s possible to set din and dout to None. Then the fifo width will be 0 and the fifo will contain no storage. Extra interface: afull (o) - almost full flag, asserted when the number of empty cells <= afull_th aempty (o) - almost empty flag, asserted when the number of full cells <= aempty_th afull_th (i) - almost full threshold, in terms of fifo cells; signal or constant; Optional, default depth/2 aempty_th (i) - almost empty threshold, in terms of fifo cells; signal or constant; Optional, default depth/2 count (o) - number of occupied fifo cells count_max (o) - max number of occupied fifo cells reached since the last reset ovf (o) - overflow flag, set at the first write in a full fifo, cleared at reset udf (o) - underflow flag, set at the first read from an empty fifo, cleared at reset Parameters: depth - fifo depth, must be >= 1; if not set or set to `None` default value 2 is used width - data width in bits, must be >= 0; if not set or set to `None` the `din` width is used """ if (width == None): width = 0 if din is not None: width = len(din) if (depth == None): depth = 2 full_flg = Signal(bool(1)) empty_flg = Signal(bool(1)) we_safe = Signal(bool(0)) re_safe = Signal(bool(0)) rd_ptr = Signal(intbv(0, min=0, max=depth)) rd_ptr_new = Signal(intbv(0, min=0, max=depth)) wr_ptr = Signal(intbv(0, min=0, max=depth)) wr_ptr_new = Signal(intbv(0, min=0, max=depth)) @always_comb def safe_read_write(): full.next = full_flg empty.next = empty_flg we_safe.next = we and not full_flg re_safe.next = re and not empty_flg #=========================================================================== # Write, Read, Full, Empty #=========================================================================== @always_comb def ptrs_new(): rd_ptr_new.next = ((rd_ptr + 1) % depth) wr_ptr_new.next = ((wr_ptr + 1) % depth) @always(clk.posedge) def state_main(): if (rst): wr_ptr.next = 0 rd_ptr.next = 0 full_flg.next = 0 empty_flg.next = 1 else: # Write pointer if (we_safe): wr_ptr.next = wr_ptr_new # Read pointer if (re_safe): rd_ptr.next = rd_ptr_new # Empty flag if (we_safe): empty_flg.next = 0 elif (re_safe and (rd_ptr_new == wr_ptr)): empty_flg.next = 1 # Full flag if (re_safe): full_flg.next = 0 elif (we_safe and (wr_ptr_new == rd_ptr)): full_flg.next = 1 #=========================================================================== # Count, CountMax #=========================================================================== ''' Count ''' if (count != None) or (count_max != None) or (afull != None) or (aempty != None): count_r = Signal(intbv(0, min=0, max=depth+1)) count_new = Signal(intbv(0, min=-1, max=depth+2)) if (count != None): assert count.max > depth @always_comb def count_out(): count.next = count_r @always_comb def count_comb(): if (we_safe and not re_safe): count_new.next = count_r + 1 elif (not we_safe and re_safe): count_new.next = count_r - 1 else: count_new.next = count_r @always(clk.posedge) def count_proc(): if (rst): count_r.next = 0 else: count_r.next = count_new ''' Count max ''' if (count_max != None): assert count_max.max > depth count_max_r = Signal(intbv(0, min=0,max=count_max.max)) @always(clk.posedge) def count_max_proc(): if (rst): count_max_r.next = 0 else: if (count_max_r < count_new): count_max_r.next = count_new @always_comb def count_max_out(): count_max.next = count_max_r #=========================================================================== # AlmostFull, AlmostEmpty #=========================================================================== ''' AlmostFull flag ''' if (afull != None): if (afull_th == None): afull_th = depth//2 @always(clk.posedge) def afull_proc(): if (rst): afull.next = 0 else: afull.next = (count_new >= depth-afull_th) ''' AlmostEmpty flag ''' if (aempty != None): if (aempty_th == None): aempty_th = depth//2 @always(clk.posedge) def aempty_proc(): if (rst): aempty.next = 1 else: aempty.next = (count_new <= aempty_th) #=========================================================================== # Overflow, Underflow #=========================================================================== ''' Overflow flag ''' if (ovf != None): @always(clk.posedge) def ovf_proc(): if (rst): ovf.next = 0 else: if (we and full_flg ): ovf.next = 1 ''' Underflow flag ''' if (udf != None): @always(clk.posedge) def udf_proc(): if (rst): udf.next = 0 else: if (re and empty_flg): udf.next = 1 if width>0: #=========================================================================== # Memory instance #=========================================================================== mem_we = Signal(bool(0)) mem_addrw = Signal(intbv(0, min=0, max=depth)) mem_addrr = Signal(intbv(0, min=0, max=depth)) mem_di = Signal(intbv(0)[width:0]) mem_do = Signal(intbv(0)[width:0]) # RAM: Simple-Dual-Port, Asynchronous read mem = ram_sdp_ar( clk = clk, we = mem_we, addrw = mem_addrw, addrr = mem_addrr, di = mem_di, do = mem_do ) @always_comb def mem_connect(): mem_we.next = we_safe mem_addrw.next = wr_ptr mem_addrr.next = rd_ptr mem_di.next = din dout.next = mem_do return instances()
python
def fifo(rst, clk, full, we, din, empty, re, dout, afull=None, aempty=None, afull_th=None, aempty_th=None, ovf=None, udf=None, count=None, count_max=None, depth=None, width=None): """ Synchronous FIFO Input interface: full, we, din Output interface: empty, re, dout It s possible to set din and dout to None. Then the fifo width will be 0 and the fifo will contain no storage. Extra interface: afull (o) - almost full flag, asserted when the number of empty cells <= afull_th aempty (o) - almost empty flag, asserted when the number of full cells <= aempty_th afull_th (i) - almost full threshold, in terms of fifo cells; signal or constant; Optional, default depth/2 aempty_th (i) - almost empty threshold, in terms of fifo cells; signal or constant; Optional, default depth/2 count (o) - number of occupied fifo cells count_max (o) - max number of occupied fifo cells reached since the last reset ovf (o) - overflow flag, set at the first write in a full fifo, cleared at reset udf (o) - underflow flag, set at the first read from an empty fifo, cleared at reset Parameters: depth - fifo depth, must be >= 1; if not set or set to `None` default value 2 is used width - data width in bits, must be >= 0; if not set or set to `None` the `din` width is used """ if (width == None): width = 0 if din is not None: width = len(din) if (depth == None): depth = 2 full_flg = Signal(bool(1)) empty_flg = Signal(bool(1)) we_safe = Signal(bool(0)) re_safe = Signal(bool(0)) rd_ptr = Signal(intbv(0, min=0, max=depth)) rd_ptr_new = Signal(intbv(0, min=0, max=depth)) wr_ptr = Signal(intbv(0, min=0, max=depth)) wr_ptr_new = Signal(intbv(0, min=0, max=depth)) @always_comb def safe_read_write(): full.next = full_flg empty.next = empty_flg we_safe.next = we and not full_flg re_safe.next = re and not empty_flg #=========================================================================== # Write, Read, Full, Empty #=========================================================================== @always_comb def ptrs_new(): rd_ptr_new.next = ((rd_ptr + 1) % depth) wr_ptr_new.next = ((wr_ptr + 1) % depth) @always(clk.posedge) def state_main(): if (rst): wr_ptr.next = 0 rd_ptr.next = 0 full_flg.next = 0 empty_flg.next = 1 else: # Write pointer if (we_safe): wr_ptr.next = wr_ptr_new # Read pointer if (re_safe): rd_ptr.next = rd_ptr_new # Empty flag if (we_safe): empty_flg.next = 0 elif (re_safe and (rd_ptr_new == wr_ptr)): empty_flg.next = 1 # Full flag if (re_safe): full_flg.next = 0 elif (we_safe and (wr_ptr_new == rd_ptr)): full_flg.next = 1 #=========================================================================== # Count, CountMax #=========================================================================== ''' Count ''' if (count != None) or (count_max != None) or (afull != None) or (aempty != None): count_r = Signal(intbv(0, min=0, max=depth+1)) count_new = Signal(intbv(0, min=-1, max=depth+2)) if (count != None): assert count.max > depth @always_comb def count_out(): count.next = count_r @always_comb def count_comb(): if (we_safe and not re_safe): count_new.next = count_r + 1 elif (not we_safe and re_safe): count_new.next = count_r - 1 else: count_new.next = count_r @always(clk.posedge) def count_proc(): if (rst): count_r.next = 0 else: count_r.next = count_new ''' Count max ''' if (count_max != None): assert count_max.max > depth count_max_r = Signal(intbv(0, min=0,max=count_max.max)) @always(clk.posedge) def count_max_proc(): if (rst): count_max_r.next = 0 else: if (count_max_r < count_new): count_max_r.next = count_new @always_comb def count_max_out(): count_max.next = count_max_r #=========================================================================== # AlmostFull, AlmostEmpty #=========================================================================== ''' AlmostFull flag ''' if (afull != None): if (afull_th == None): afull_th = depth//2 @always(clk.posedge) def afull_proc(): if (rst): afull.next = 0 else: afull.next = (count_new >= depth-afull_th) ''' AlmostEmpty flag ''' if (aempty != None): if (aempty_th == None): aempty_th = depth//2 @always(clk.posedge) def aempty_proc(): if (rst): aempty.next = 1 else: aempty.next = (count_new <= aempty_th) #=========================================================================== # Overflow, Underflow #=========================================================================== ''' Overflow flag ''' if (ovf != None): @always(clk.posedge) def ovf_proc(): if (rst): ovf.next = 0 else: if (we and full_flg ): ovf.next = 1 ''' Underflow flag ''' if (udf != None): @always(clk.posedge) def udf_proc(): if (rst): udf.next = 0 else: if (re and empty_flg): udf.next = 1 if width>0: #=========================================================================== # Memory instance #=========================================================================== mem_we = Signal(bool(0)) mem_addrw = Signal(intbv(0, min=0, max=depth)) mem_addrr = Signal(intbv(0, min=0, max=depth)) mem_di = Signal(intbv(0)[width:0]) mem_do = Signal(intbv(0)[width:0]) # RAM: Simple-Dual-Port, Asynchronous read mem = ram_sdp_ar( clk = clk, we = mem_we, addrw = mem_addrw, addrr = mem_addrr, di = mem_di, do = mem_do ) @always_comb def mem_connect(): mem_we.next = we_safe mem_addrw.next = wr_ptr mem_addrr.next = rd_ptr mem_di.next = din dout.next = mem_do return instances()
['def', 'fifo', '(', 'rst', ',', 'clk', ',', 'full', ',', 'we', ',', 'din', ',', 'empty', ',', 're', ',', 'dout', ',', 'afull', '=', 'None', ',', 'aempty', '=', 'None', ',', 'afull_th', '=', 'None', ',', 'aempty_th', '=', 'None', ',', 'ovf', '=', 'None', ',', 'udf', '=', 'None', ',', 'count', '=', 'None', ',', 'count_max', '=', 'None', ',', 'depth', '=', 'None', ',', 'width', '=', 'None', ')', ':', 'if', '(', 'width', '==', 'None', ')', ':', 'width', '=', '0', 'if', 'din', 'is', 'not', 'None', ':', 'width', '=', 'len', '(', 'din', ')', 'if', '(', 'depth', '==', 'None', ')', ':', 'depth', '=', '2', 'full_flg', '=', 'Signal', '(', 'bool', '(', '1', ')', ')', 'empty_flg', '=', 'Signal', '(', 'bool', '(', '1', ')', ')', 'we_safe', '=', 'Signal', '(', 'bool', '(', '0', ')', ')', 're_safe', '=', 'Signal', '(', 'bool', '(', '0', ')', ')', 'rd_ptr', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '0', ',', 'max', '=', 'depth', ')', ')', 'rd_ptr_new', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '0', ',', 'max', '=', 'depth', ')', ')', 'wr_ptr', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '0', ',', 'max', '=', 'depth', ')', ')', 'wr_ptr_new', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '0', ',', 'max', '=', 'depth', ')', ')', '@', 'always_comb', 'def', 'safe_read_write', '(', ')', ':', 'full', '.', 'next', '=', 'full_flg', 'empty', '.', 'next', '=', 'empty_flg', 'we_safe', '.', 'next', '=', 'we', 'and', 'not', 'full_flg', 're_safe', '.', 'next', '=', 're', 'and', 'not', 'empty_flg', '#===========================================================================', '# Write, Read, Full, Empty', '#===========================================================================', '@', 'always_comb', 'def', 'ptrs_new', '(', ')', ':', 'rd_ptr_new', '.', 'next', '=', '(', '(', 'rd_ptr', '+', '1', ')', '%', 'depth', ')', 'wr_ptr_new', '.', 'next', '=', '(', '(', 'wr_ptr', '+', '1', ')', '%', 'depth', ')', '@', 'always', '(', 'clk', '.', 'posedge', ')', 'def', 'state_main', '(', ')', ':', 'if', '(', 'rst', ')', ':', 'wr_ptr', '.', 'next', '=', '0', 'rd_ptr', '.', 'next', '=', '0', 'full_flg', '.', 'next', '=', '0', 'empty_flg', '.', 'next', '=', '1', 'else', ':', '# Write pointer', 'if', '(', 'we_safe', ')', ':', 'wr_ptr', '.', 'next', '=', 'wr_ptr_new', '# Read pointer', 'if', '(', 're_safe', ')', ':', 'rd_ptr', '.', 'next', '=', 'rd_ptr_new', '# Empty flag', 'if', '(', 'we_safe', ')', ':', 'empty_flg', '.', 'next', '=', '0', 'elif', '(', 're_safe', 'and', '(', 'rd_ptr_new', '==', 'wr_ptr', ')', ')', ':', 'empty_flg', '.', 'next', '=', '1', '# Full flag', 'if', '(', 're_safe', ')', ':', 'full_flg', '.', 'next', '=', '0', 'elif', '(', 'we_safe', 'and', '(', 'wr_ptr_new', '==', 'rd_ptr', ')', ')', ':', 'full_flg', '.', 'next', '=', '1', '#===========================================================================', '# Count, CountMax', '#===========================================================================', "''' Count '''", 'if', '(', 'count', '!=', 'None', ')', 'or', '(', 'count_max', '!=', 'None', ')', 'or', '(', 'afull', '!=', 'None', ')', 'or', '(', 'aempty', '!=', 'None', ')', ':', 'count_r', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '0', ',', 'max', '=', 'depth', '+', '1', ')', ')', 'count_new', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '-', '1', ',', 'max', '=', 'depth', '+', '2', ')', ')', 'if', '(', 'count', '!=', 'None', ')', ':', 'assert', 'count', '.', 'max', '>', 'depth', '@', 'always_comb', 'def', 'count_out', '(', ')', ':', 'count', '.', 'next', '=', 'count_r', '@', 'always_comb', 'def', 'count_comb', '(', ')', ':', 'if', '(', 'we_safe', 'and', 'not', 're_safe', ')', ':', 'count_new', '.', 'next', '=', 'count_r', '+', '1', 'elif', '(', 'not', 'we_safe', 'and', 're_safe', ')', ':', 'count_new', '.', 'next', '=', 'count_r', '-', '1', 'else', ':', 'count_new', '.', 'next', '=', 'count_r', '@', 'always', '(', 'clk', '.', 'posedge', ')', 'def', 'count_proc', '(', ')', ':', 'if', '(', 'rst', ')', ':', 'count_r', '.', 'next', '=', '0', 'else', ':', 'count_r', '.', 'next', '=', 'count_new', "''' Count max '''", 'if', '(', 'count_max', '!=', 'None', ')', ':', 'assert', 'count_max', '.', 'max', '>', 'depth', 'count_max_r', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '0', ',', 'max', '=', 'count_max', '.', 'max', ')', ')', '@', 'always', '(', 'clk', '.', 'posedge', ')', 'def', 'count_max_proc', '(', ')', ':', 'if', '(', 'rst', ')', ':', 'count_max_r', '.', 'next', '=', '0', 'else', ':', 'if', '(', 'count_max_r', '<', 'count_new', ')', ':', 'count_max_r', '.', 'next', '=', 'count_new', '@', 'always_comb', 'def', 'count_max_out', '(', ')', ':', 'count_max', '.', 'next', '=', 'count_max_r', '#===========================================================================', '# AlmostFull, AlmostEmpty', '#===========================================================================', "''' AlmostFull flag '''", 'if', '(', 'afull', '!=', 'None', ')', ':', 'if', '(', 'afull_th', '==', 'None', ')', ':', 'afull_th', '=', 'depth', '//', '2', '@', 'always', '(', 'clk', '.', 'posedge', ')', 'def', 'afull_proc', '(', ')', ':', 'if', '(', 'rst', ')', ':', 'afull', '.', 'next', '=', '0', 'else', ':', 'afull', '.', 'next', '=', '(', 'count_new', '>=', 'depth', '-', 'afull_th', ')', "''' AlmostEmpty flag '''", 'if', '(', 'aempty', '!=', 'None', ')', ':', 'if', '(', 'aempty_th', '==', 'None', ')', ':', 'aempty_th', '=', 'depth', '//', '2', '@', 'always', '(', 'clk', '.', 'posedge', ')', 'def', 'aempty_proc', '(', ')', ':', 'if', '(', 'rst', ')', ':', 'aempty', '.', 'next', '=', '1', 'else', ':', 'aempty', '.', 'next', '=', '(', 'count_new', '<=', 'aempty_th', ')', '#===========================================================================', '# Overflow, Underflow', '#===========================================================================', "''' Overflow flag '''", 'if', '(', 'ovf', '!=', 'None', ')', ':', '@', 'always', '(', 'clk', '.', 'posedge', ')', 'def', 'ovf_proc', '(', ')', ':', 'if', '(', 'rst', ')', ':', 'ovf', '.', 'next', '=', '0', 'else', ':', 'if', '(', 'we', 'and', 'full_flg', ')', ':', 'ovf', '.', 'next', '=', '1', "''' Underflow flag '''", 'if', '(', 'udf', '!=', 'None', ')', ':', '@', 'always', '(', 'clk', '.', 'posedge', ')', 'def', 'udf_proc', '(', ')', ':', 'if', '(', 'rst', ')', ':', 'udf', '.', 'next', '=', '0', 'else', ':', 'if', '(', 're', 'and', 'empty_flg', ')', ':', 'udf', '.', 'next', '=', '1', 'if', 'width', '>', '0', ':', '#===========================================================================', '# Memory instance', '#===========================================================================', 'mem_we', '=', 'Signal', '(', 'bool', '(', '0', ')', ')', 'mem_addrw', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '0', ',', 'max', '=', 'depth', ')', ')', 'mem_addrr', '=', 'Signal', '(', 'intbv', '(', '0', ',', 'min', '=', '0', ',', 'max', '=', 'depth', ')', ')', 'mem_di', '=', 'Signal', '(', 'intbv', '(', '0', ')', '[', 'width', ':', '0', ']', ')', 'mem_do', '=', 'Signal', '(', 'intbv', '(', '0', ')', '[', 'width', ':', '0', ']', ')', '# RAM: Simple-Dual-Port, Asynchronous read', 'mem', '=', 'ram_sdp_ar', '(', 'clk', '=', 'clk', ',', 'we', '=', 'mem_we', ',', 'addrw', '=', 'mem_addrw', ',', 'addrr', '=', 'mem_addrr', ',', 'di', '=', 'mem_di', ',', 'do', '=', 'mem_do', ')', '@', 'always_comb', 'def', 'mem_connect', '(', ')', ':', 'mem_we', '.', 'next', '=', 'we_safe', 'mem_addrw', '.', 'next', '=', 'wr_ptr', 'mem_addrr', '.', 'next', '=', 'rd_ptr', 'mem_di', '.', 'next', '=', 'din', 'dout', '.', 'next', '=', 'mem_do', 'return', 'instances', '(', ')']
Synchronous FIFO Input interface: full, we, din Output interface: empty, re, dout It s possible to set din and dout to None. Then the fifo width will be 0 and the fifo will contain no storage. Extra interface: afull (o) - almost full flag, asserted when the number of empty cells <= afull_th aempty (o) - almost empty flag, asserted when the number of full cells <= aempty_th afull_th (i) - almost full threshold, in terms of fifo cells; signal or constant; Optional, default depth/2 aempty_th (i) - almost empty threshold, in terms of fifo cells; signal or constant; Optional, default depth/2 count (o) - number of occupied fifo cells count_max (o) - max number of occupied fifo cells reached since the last reset ovf (o) - overflow flag, set at the first write in a full fifo, cleared at reset udf (o) - underflow flag, set at the first read from an empty fifo, cleared at reset Parameters: depth - fifo depth, must be >= 1; if not set or set to `None` default value 2 is used width - data width in bits, must be >= 0; if not set or set to `None` the `din` width is used
['Synchronous', 'FIFO']
train
https://github.com/nkavaldj/myhdl_lib/blob/9902afd2031e7847373f692821b2135fd0810aa8/myhdl_lib/fifo.py#L6-L210
9,954
chemlab/chemlab
chemlab/mviewer/api/appeareance.py
change_color
def change_color(color): """Change the color of the currently selected objects. *color* is represented as a string. Otherwise color can be passed as an rgba tuple of values between 0, 255 Reset the color by passing *color=None*. You can call this function interactively by using:: change_color.interactive() A new dialog will popup with a color chooser. """ rep = current_representation() # Let's parse the color first if isinstance(color, str): # The color should be a string col = color_from_string(color) if isinstance(color, tuple): col = color if color is None: col = None # Color array rep.change_color(rep.selection_state, col)
python
def change_color(color): """Change the color of the currently selected objects. *color* is represented as a string. Otherwise color can be passed as an rgba tuple of values between 0, 255 Reset the color by passing *color=None*. You can call this function interactively by using:: change_color.interactive() A new dialog will popup with a color chooser. """ rep = current_representation() # Let's parse the color first if isinstance(color, str): # The color should be a string col = color_from_string(color) if isinstance(color, tuple): col = color if color is None: col = None # Color array rep.change_color(rep.selection_state, col)
['def', 'change_color', '(', 'color', ')', ':', 'rep', '=', 'current_representation', '(', ')', "# Let's parse the color first", 'if', 'isinstance', '(', 'color', ',', 'str', ')', ':', '# The color should be a string', 'col', '=', 'color_from_string', '(', 'color', ')', 'if', 'isinstance', '(', 'color', ',', 'tuple', ')', ':', 'col', '=', 'color', 'if', 'color', 'is', 'None', ':', 'col', '=', 'None', '# Color array', 'rep', '.', 'change_color', '(', 'rep', '.', 'selection_state', ',', 'col', ')']
Change the color of the currently selected objects. *color* is represented as a string. Otherwise color can be passed as an rgba tuple of values between 0, 255 Reset the color by passing *color=None*. You can call this function interactively by using:: change_color.interactive() A new dialog will popup with a color chooser.
['Change', 'the', 'color', 'of', 'the', 'currently', 'selected', 'objects', '.', '*', 'color', '*', 'is', 'represented', 'as', 'a', 'string', '.', 'Otherwise', 'color', 'can', 'be', 'passed', 'as', 'an', 'rgba', 'tuple', 'of', 'values', 'between', '0', '255']
train
https://github.com/chemlab/chemlab/blob/c8730966316d101e24f39ac3b96b51282aba0abe/chemlab/mviewer/api/appeareance.py#L85-L113
9,955
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.stop_gradient
def stop_gradient(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a copy of the input fluent with stop_gradient at tensor level. Args: x: The input fluent. Returns: A TensorFluent that stops backpropagation of gradient computations. ''' scope = x.scope.as_list() batch = x.batch return TensorFluent(tf.stop_gradient(x.tensor), scope, batch)
python
def stop_gradient(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a copy of the input fluent with stop_gradient at tensor level. Args: x: The input fluent. Returns: A TensorFluent that stops backpropagation of gradient computations. ''' scope = x.scope.as_list() batch = x.batch return TensorFluent(tf.stop_gradient(x.tensor), scope, batch)
['def', 'stop_gradient', '(', 'cls', ',', 'x', ':', "'TensorFluent'", ')', '->', "'TensorFluent'", ':', 'scope', '=', 'x', '.', 'scope', '.', 'as_list', '(', ')', 'batch', '=', 'x', '.', 'batch', 'return', 'TensorFluent', '(', 'tf', '.', 'stop_gradient', '(', 'x', '.', 'tensor', ')', ',', 'scope', ',', 'batch', ')']
Returns a copy of the input fluent with stop_gradient at tensor level. Args: x: The input fluent. Returns: A TensorFluent that stops backpropagation of gradient computations.
['Returns', 'a', 'copy', 'of', 'the', 'input', 'fluent', 'with', 'stop_gradient', 'at', 'tensor', 'level', '.']
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L256-L267
9,956
priestc/moneywagon
moneywagon/tx.py
Transaction.select_inputs
def select_inputs(self, amount): '''Maximize transaction priority. Select the oldest inputs, that are sufficient to cover the spent amount. Then, remove any unneeded inputs, starting with the smallest in value. Returns sum of amounts of inputs selected''' sorted_txin = sorted(self.ins, key=lambda x:-x['input']['confirmations']) total_amount = 0 for (idx, tx_in) in enumerate(sorted_txin): total_amount += tx_in['input']['amount'] if (total_amount >= amount): break sorted_txin = sorted(sorted_txin[:idx+1], key=lambda x:x['input']['amount']) for (idx, tx_in) in enumerate(sorted_txin): value = tx_in['input']['amount'] if (total_amount - value < amount): break else: total_amount -= value self.ins = sorted_txin[idx:] return total_amount
python
def select_inputs(self, amount): '''Maximize transaction priority. Select the oldest inputs, that are sufficient to cover the spent amount. Then, remove any unneeded inputs, starting with the smallest in value. Returns sum of amounts of inputs selected''' sorted_txin = sorted(self.ins, key=lambda x:-x['input']['confirmations']) total_amount = 0 for (idx, tx_in) in enumerate(sorted_txin): total_amount += tx_in['input']['amount'] if (total_amount >= amount): break sorted_txin = sorted(sorted_txin[:idx+1], key=lambda x:x['input']['amount']) for (idx, tx_in) in enumerate(sorted_txin): value = tx_in['input']['amount'] if (total_amount - value < amount): break else: total_amount -= value self.ins = sorted_txin[idx:] return total_amount
['def', 'select_inputs', '(', 'self', ',', 'amount', ')', ':', 'sorted_txin', '=', 'sorted', '(', 'self', '.', 'ins', ',', 'key', '=', 'lambda', 'x', ':', '-', 'x', '[', "'input'", ']', '[', "'confirmations'", ']', ')', 'total_amount', '=', '0', 'for', '(', 'idx', ',', 'tx_in', ')', 'in', 'enumerate', '(', 'sorted_txin', ')', ':', 'total_amount', '+=', 'tx_in', '[', "'input'", ']', '[', "'amount'", ']', 'if', '(', 'total_amount', '>=', 'amount', ')', ':', 'break', 'sorted_txin', '=', 'sorted', '(', 'sorted_txin', '[', ':', 'idx', '+', '1', ']', ',', 'key', '=', 'lambda', 'x', ':', 'x', '[', "'input'", ']', '[', "'amount'", ']', ')', 'for', '(', 'idx', ',', 'tx_in', ')', 'in', 'enumerate', '(', 'sorted_txin', ')', ':', 'value', '=', 'tx_in', '[', "'input'", ']', '[', "'amount'", ']', 'if', '(', 'total_amount', '-', 'value', '<', 'amount', ')', ':', 'break', 'else', ':', 'total_amount', '-=', 'value', 'self', '.', 'ins', '=', 'sorted_txin', '[', 'idx', ':', ']', 'return', 'total_amount']
Maximize transaction priority. Select the oldest inputs, that are sufficient to cover the spent amount. Then, remove any unneeded inputs, starting with the smallest in value. Returns sum of amounts of inputs selected
['Maximize', 'transaction', 'priority', '.', 'Select', 'the', 'oldest', 'inputs', 'that', 'are', 'sufficient', 'to', 'cover', 'the', 'spent', 'amount', '.', 'Then', 'remove', 'any', 'unneeded', 'inputs', 'starting', 'with', 'the', 'smallest', 'in', 'value', '.', 'Returns', 'sum', 'of', 'amounts', 'of', 'inputs', 'selected']
train
https://github.com/priestc/moneywagon/blob/00518f1f557dcca8b3031f46d3564c2baa0227a3/moneywagon/tx.py#L123-L143
9,957
Bogdanp/dramatiq
dramatiq/rate_limits/rate_limiter.py
RateLimiter.acquire
def acquire(self, *, raise_on_failure=True): """Attempt to acquire a slot under this rate limiter. Parameters: raise_on_failure(bool): Whether or not failures should raise an exception. If this is false, the context manager will instead return a boolean value representing whether or not the rate limit slot was acquired. Returns: bool: Whether or not the slot could be acquired. """ acquired = False try: acquired = self._acquire() if raise_on_failure and not acquired: raise RateLimitExceeded("rate limit exceeded for key %(key)r" % vars(self)) yield acquired finally: if acquired: self._release()
python
def acquire(self, *, raise_on_failure=True): """Attempt to acquire a slot under this rate limiter. Parameters: raise_on_failure(bool): Whether or not failures should raise an exception. If this is false, the context manager will instead return a boolean value representing whether or not the rate limit slot was acquired. Returns: bool: Whether or not the slot could be acquired. """ acquired = False try: acquired = self._acquire() if raise_on_failure and not acquired: raise RateLimitExceeded("rate limit exceeded for key %(key)r" % vars(self)) yield acquired finally: if acquired: self._release()
['def', 'acquire', '(', 'self', ',', '*', ',', 'raise_on_failure', '=', 'True', ')', ':', 'acquired', '=', 'False', 'try', ':', 'acquired', '=', 'self', '.', '_acquire', '(', ')', 'if', 'raise_on_failure', 'and', 'not', 'acquired', ':', 'raise', 'RateLimitExceeded', '(', '"rate limit exceeded for key %(key)r"', '%', 'vars', '(', 'self', ')', ')', 'yield', 'acquired', 'finally', ':', 'if', 'acquired', ':', 'self', '.', '_release', '(', ')']
Attempt to acquire a slot under this rate limiter. Parameters: raise_on_failure(bool): Whether or not failures should raise an exception. If this is false, the context manager will instead return a boolean value representing whether or not the rate limit slot was acquired. Returns: bool: Whether or not the slot could be acquired.
['Attempt', 'to', 'acquire', 'a', 'slot', 'under', 'this', 'rate', 'limiter', '.']
train
https://github.com/Bogdanp/dramatiq/blob/a8cc2728478e794952a5a50c3fb19ec455fe91b6/dramatiq/rate_limits/rate_limiter.py#L56-L78
9,958
HDI-Project/ballet
ballet/project.py
Project.pr_num
def pr_num(self): """Return the PR number or None if not on a PR""" result = get_pr_num(repo=self.repo) if result is None: result = get_travis_pr_num() return result
python
def pr_num(self): """Return the PR number or None if not on a PR""" result = get_pr_num(repo=self.repo) if result is None: result = get_travis_pr_num() return result
['def', 'pr_num', '(', 'self', ')', ':', 'result', '=', 'get_pr_num', '(', 'repo', '=', 'self', '.', 'repo', ')', 'if', 'result', 'is', 'None', ':', 'result', '=', 'get_travis_pr_num', '(', ')', 'return', 'result']
Return the PR number or None if not on a PR
['Return', 'the', 'PR', 'number', 'or', 'None', 'if', 'not', 'on', 'a', 'PR']
train
https://github.com/HDI-Project/ballet/blob/6f4d4b87b8234cb6bb38b9e9484a58ef8fe8fdb2/ballet/project.py#L165-L170
9,959
JdeRobot/base
src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v20/ardupilotmega.py
MAVLink.data64_send
def data64_send(self, type, len, data, force_mavlink1=False): ''' Data packet, size 64 type : data type (uint8_t) len : data length (uint8_t) data : raw data (uint8_t) ''' return self.send(self.data64_encode(type, len, data), force_mavlink1=force_mavlink1)
python
def data64_send(self, type, len, data, force_mavlink1=False): ''' Data packet, size 64 type : data type (uint8_t) len : data length (uint8_t) data : raw data (uint8_t) ''' return self.send(self.data64_encode(type, len, data), force_mavlink1=force_mavlink1)
['def', 'data64_send', '(', 'self', ',', 'type', ',', 'len', ',', 'data', ',', 'force_mavlink1', '=', 'False', ')', ':', 'return', 'self', '.', 'send', '(', 'self', '.', 'data64_encode', '(', 'type', ',', 'len', ',', 'data', ')', ',', 'force_mavlink1', '=', 'force_mavlink1', ')']
Data packet, size 64 type : data type (uint8_t) len : data length (uint8_t) data : raw data (uint8_t)
['Data', 'packet', 'size', '64']
train
https://github.com/JdeRobot/base/blob/303b18992785b2fe802212f2d758a60873007f1f/src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v20/ardupilotmega.py#L10273-L10282
9,960
titusjan/argos
argos/widgets/argostreeview.py
ArgosTreeView.expandPath
def expandPath(self, path): """ Follows the path and expand all nodes along the way. Returns (item, index) tuple of the last node in the path (the leaf node). This can be reused e.g. to select it. """ iiPath = self.model().findItemAndIndexPath(path) for (item, index) in iiPath[1:]: # skip invisible root assert index.isValid(), "Sanity check: invalid index in path for item: {}".format(item) self.expand(index) leaf = iiPath[-1] return leaf
python
def expandPath(self, path): """ Follows the path and expand all nodes along the way. Returns (item, index) tuple of the last node in the path (the leaf node). This can be reused e.g. to select it. """ iiPath = self.model().findItemAndIndexPath(path) for (item, index) in iiPath[1:]: # skip invisible root assert index.isValid(), "Sanity check: invalid index in path for item: {}".format(item) self.expand(index) leaf = iiPath[-1] return leaf
['def', 'expandPath', '(', 'self', ',', 'path', ')', ':', 'iiPath', '=', 'self', '.', 'model', '(', ')', '.', 'findItemAndIndexPath', '(', 'path', ')', 'for', '(', 'item', ',', 'index', ')', 'in', 'iiPath', '[', '1', ':', ']', ':', '# skip invisible root', 'assert', 'index', '.', 'isValid', '(', ')', ',', '"Sanity check: invalid index in path for item: {}"', '.', 'format', '(', 'item', ')', 'self', '.', 'expand', '(', 'index', ')', 'leaf', '=', 'iiPath', '[', '-', '1', ']', 'return', 'leaf']
Follows the path and expand all nodes along the way. Returns (item, index) tuple of the last node in the path (the leaf node). This can be reused e.g. to select it.
['Follows', 'the', 'path', 'and', 'expand', 'all', 'nodes', 'along', 'the', 'way', '.', 'Returns', '(', 'item', 'index', ')', 'tuple', 'of', 'the', 'last', 'node', 'in', 'the', 'path', '(', 'the', 'leaf', 'node', ')', '.', 'This', 'can', 'be', 'reused', 'e', '.', 'g', '.', 'to', 'select', 'it', '.']
train
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/widgets/argostreeview.py#L126-L137
9,961
allenai/allennlp
allennlp/models/semantic_parsing/wikitables/wikitables_semantic_parser.py
WikiTablesSemanticParser.decode
def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]: """ This method overrides ``Model.decode``, which gets called after ``Model.forward``, at test time, to finalize predictions. This is (confusingly) a separate notion from the "decoder" in "encoder/decoder", where that decoder logic lives in the ``TransitionFunction``. This method trims the output predictions to the first end symbol, replaces indices with corresponding tokens, and adds a field called ``predicted_tokens`` to the ``output_dict``. """ action_mapping = output_dict['action_mapping'] best_actions = output_dict["best_action_sequence"] debug_infos = output_dict['debug_info'] batch_action_info = [] for batch_index, (predicted_actions, debug_info) in enumerate(zip(best_actions, debug_infos)): instance_action_info = [] for predicted_action, action_debug_info in zip(predicted_actions, debug_info): action_info = {} action_info['predicted_action'] = predicted_action considered_actions = action_debug_info['considered_actions'] probabilities = action_debug_info['probabilities'] actions = [] for action, probability in zip(considered_actions, probabilities): if action != -1: actions.append((action_mapping[(batch_index, action)], probability)) actions.sort() considered_actions, probabilities = zip(*actions) action_info['considered_actions'] = considered_actions action_info['action_probabilities'] = probabilities action_info['question_attention'] = action_debug_info.get('question_attention', []) instance_action_info.append(action_info) batch_action_info.append(instance_action_info) output_dict["predicted_actions"] = batch_action_info return output_dict
python
def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]: """ This method overrides ``Model.decode``, which gets called after ``Model.forward``, at test time, to finalize predictions. This is (confusingly) a separate notion from the "decoder" in "encoder/decoder", where that decoder logic lives in the ``TransitionFunction``. This method trims the output predictions to the first end symbol, replaces indices with corresponding tokens, and adds a field called ``predicted_tokens`` to the ``output_dict``. """ action_mapping = output_dict['action_mapping'] best_actions = output_dict["best_action_sequence"] debug_infos = output_dict['debug_info'] batch_action_info = [] for batch_index, (predicted_actions, debug_info) in enumerate(zip(best_actions, debug_infos)): instance_action_info = [] for predicted_action, action_debug_info in zip(predicted_actions, debug_info): action_info = {} action_info['predicted_action'] = predicted_action considered_actions = action_debug_info['considered_actions'] probabilities = action_debug_info['probabilities'] actions = [] for action, probability in zip(considered_actions, probabilities): if action != -1: actions.append((action_mapping[(batch_index, action)], probability)) actions.sort() considered_actions, probabilities = zip(*actions) action_info['considered_actions'] = considered_actions action_info['action_probabilities'] = probabilities action_info['question_attention'] = action_debug_info.get('question_attention', []) instance_action_info.append(action_info) batch_action_info.append(instance_action_info) output_dict["predicted_actions"] = batch_action_info return output_dict
['def', 'decode', '(', 'self', ',', 'output_dict', ':', 'Dict', '[', 'str', ',', 'torch', '.', 'Tensor', ']', ')', '->', 'Dict', '[', 'str', ',', 'torch', '.', 'Tensor', ']', ':', 'action_mapping', '=', 'output_dict', '[', "'action_mapping'", ']', 'best_actions', '=', 'output_dict', '[', '"best_action_sequence"', ']', 'debug_infos', '=', 'output_dict', '[', "'debug_info'", ']', 'batch_action_info', '=', '[', ']', 'for', 'batch_index', ',', '(', 'predicted_actions', ',', 'debug_info', ')', 'in', 'enumerate', '(', 'zip', '(', 'best_actions', ',', 'debug_infos', ')', ')', ':', 'instance_action_info', '=', '[', ']', 'for', 'predicted_action', ',', 'action_debug_info', 'in', 'zip', '(', 'predicted_actions', ',', 'debug_info', ')', ':', 'action_info', '=', '{', '}', 'action_info', '[', "'predicted_action'", ']', '=', 'predicted_action', 'considered_actions', '=', 'action_debug_info', '[', "'considered_actions'", ']', 'probabilities', '=', 'action_debug_info', '[', "'probabilities'", ']', 'actions', '=', '[', ']', 'for', 'action', ',', 'probability', 'in', 'zip', '(', 'considered_actions', ',', 'probabilities', ')', ':', 'if', 'action', '!=', '-', '1', ':', 'actions', '.', 'append', '(', '(', 'action_mapping', '[', '(', 'batch_index', ',', 'action', ')', ']', ',', 'probability', ')', ')', 'actions', '.', 'sort', '(', ')', 'considered_actions', ',', 'probabilities', '=', 'zip', '(', '*', 'actions', ')', 'action_info', '[', "'considered_actions'", ']', '=', 'considered_actions', 'action_info', '[', "'action_probabilities'", ']', '=', 'probabilities', 'action_info', '[', "'question_attention'", ']', '=', 'action_debug_info', '.', 'get', '(', "'question_attention'", ',', '[', ']', ')', 'instance_action_info', '.', 'append', '(', 'action_info', ')', 'batch_action_info', '.', 'append', '(', 'instance_action_info', ')', 'output_dict', '[', '"predicted_actions"', ']', '=', 'batch_action_info', 'return', 'output_dict']
This method overrides ``Model.decode``, which gets called after ``Model.forward``, at test time, to finalize predictions. This is (confusingly) a separate notion from the "decoder" in "encoder/decoder", where that decoder logic lives in the ``TransitionFunction``. This method trims the output predictions to the first end symbol, replaces indices with corresponding tokens, and adds a field called ``predicted_tokens`` to the ``output_dict``.
['This', 'method', 'overrides', 'Model', '.', 'decode', 'which', 'gets', 'called', 'after', 'Model', '.', 'forward', 'at', 'test', 'time', 'to', 'finalize', 'predictions', '.', 'This', 'is', '(', 'confusingly', ')', 'a', 'separate', 'notion', 'from', 'the', 'decoder', 'in', 'encoder', '/', 'decoder', 'where', 'that', 'decoder', 'logic', 'lives', 'in', 'the', 'TransitionFunction', '.']
train
https://github.com/allenai/allennlp/blob/648a36f77db7e45784c047176074f98534c76636/allennlp/models/semantic_parsing/wikitables/wikitables_semantic_parser.py#L676-L708
9,962
flowersteam/explauto
explauto/sensorimotor_model/inverse/cma.py
CMADataLogger.register
def register(self, es, append=None, modulo=None): """register a `CMAEvolutionStrategy` instance for logging, ``append=True`` appends to previous data logged under the same name, by default previous data are overwritten. """ if not isinstance(es, CMAEvolutionStrategy): raise TypeError("only class CMAEvolutionStrategy can be " + "registered for logging") self.es = es if append is not None: self.append = append if modulo is not None: self.modulo = modulo self.registered = True return self
python
def register(self, es, append=None, modulo=None): """register a `CMAEvolutionStrategy` instance for logging, ``append=True`` appends to previous data logged under the same name, by default previous data are overwritten. """ if not isinstance(es, CMAEvolutionStrategy): raise TypeError("only class CMAEvolutionStrategy can be " + "registered for logging") self.es = es if append is not None: self.append = append if modulo is not None: self.modulo = modulo self.registered = True return self
['def', 'register', '(', 'self', ',', 'es', ',', 'append', '=', 'None', ',', 'modulo', '=', 'None', ')', ':', 'if', 'not', 'isinstance', '(', 'es', ',', 'CMAEvolutionStrategy', ')', ':', 'raise', 'TypeError', '(', '"only class CMAEvolutionStrategy can be "', '+', '"registered for logging"', ')', 'self', '.', 'es', '=', 'es', 'if', 'append', 'is', 'not', 'None', ':', 'self', '.', 'append', '=', 'append', 'if', 'modulo', 'is', 'not', 'None', ':', 'self', '.', 'modulo', '=', 'modulo', 'self', '.', 'registered', '=', 'True', 'return', 'self']
register a `CMAEvolutionStrategy` instance for logging, ``append=True`` appends to previous data logged under the same name, by default previous data are overwritten.
['register', 'a', 'CMAEvolutionStrategy', 'instance', 'for', 'logging', 'append', '=', 'True', 'appends', 'to', 'previous', 'data', 'logged', 'under', 'the', 'same', 'name', 'by', 'default', 'previous', 'data', 'are', 'overwritten', '.']
train
https://github.com/flowersteam/explauto/blob/cf0f81ecb9f6412f7276a95bd27359000e1e26b6/explauto/sensorimotor_model/inverse/cma.py#L5722-L5737
9,963
glomex/gcdt
gcdt/ramuda_core.py
cleanup_bundle
def cleanup_bundle(): """Deletes files used for creating bundle. * vendored/* * bundle.zip """ paths = ['./vendored', './bundle.zip'] for path in paths: if os.path.exists(path): log.debug("Deleting %s..." % path) if os.path.isdir(path): shutil.rmtree(path) else: os.remove(path)
python
def cleanup_bundle(): """Deletes files used for creating bundle. * vendored/* * bundle.zip """ paths = ['./vendored', './bundle.zip'] for path in paths: if os.path.exists(path): log.debug("Deleting %s..." % path) if os.path.isdir(path): shutil.rmtree(path) else: os.remove(path)
['def', 'cleanup_bundle', '(', ')', ':', 'paths', '=', '[', "'./vendored'", ',', "'./bundle.zip'", ']', 'for', 'path', 'in', 'paths', ':', 'if', 'os', '.', 'path', '.', 'exists', '(', 'path', ')', ':', 'log', '.', 'debug', '(', '"Deleting %s..."', '%', 'path', ')', 'if', 'os', '.', 'path', '.', 'isdir', '(', 'path', ')', ':', 'shutil', '.', 'rmtree', '(', 'path', ')', 'else', ':', 'os', '.', 'remove', '(', 'path', ')']
Deletes files used for creating bundle. * vendored/* * bundle.zip
['Deletes', 'files', 'used', 'for', 'creating', 'bundle', '.', '*', 'vendored', '/', '*', '*', 'bundle', '.', 'zip']
train
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L623-L635
9,964
datastax/python-driver
cassandra/metadata.py
NetworkTopologyStrategy.export_for_schema
def export_for_schema(self): """ Returns a string version of these replication options which are suitable for use in a CREATE KEYSPACE statement. """ ret = "{'class': 'NetworkTopologyStrategy'" for dc, repl_factor in sorted(self.dc_replication_factors.items()): ret += ", '%s': '%d'" % (dc, repl_factor) return ret + "}"
python
def export_for_schema(self): """ Returns a string version of these replication options which are suitable for use in a CREATE KEYSPACE statement. """ ret = "{'class': 'NetworkTopologyStrategy'" for dc, repl_factor in sorted(self.dc_replication_factors.items()): ret += ", '%s': '%d'" % (dc, repl_factor) return ret + "}"
['def', 'export_for_schema', '(', 'self', ')', ':', 'ret', '=', '"{\'class\': \'NetworkTopologyStrategy\'"', 'for', 'dc', ',', 'repl_factor', 'in', 'sorted', '(', 'self', '.', 'dc_replication_factors', '.', 'items', '(', ')', ')', ':', 'ret', '+=', '", \'%s\': \'%d\'"', '%', '(', 'dc', ',', 'repl_factor', ')', 'return', 'ret', '+', '"}"']
Returns a string version of these replication options which are suitable for use in a CREATE KEYSPACE statement.
['Returns', 'a', 'string', 'version', 'of', 'these', 'replication', 'options', 'which', 'are', 'suitable', 'for', 'use', 'in', 'a', 'CREATE', 'KEYSPACE', 'statement', '.']
train
https://github.com/datastax/python-driver/blob/30a80d0b798b1f45f8cb77163b1fa791f3e3ca29/cassandra/metadata.py#L565-L573
9,965
Laufire/ec
ec/modules/config.py
task
def task(__decorated__=None, **Config): r"""A decorator to make tasks out of functions. Config: * name (str): The name of the task. Defaults to __decorated__.__name__. * desc (str): The description of the task (optional). * alias (str): The alias for the task (optional). """ if isinstance(__decorated__, tuple): # the task has some args _Task = Task(__decorated__[0], __decorated__[1], Config=Config) else: _Task = Task(__decorated__, [], Config) state.ActiveModuleMemberQ.insert(0, _Task) return _Task.Underlying
python
def task(__decorated__=None, **Config): r"""A decorator to make tasks out of functions. Config: * name (str): The name of the task. Defaults to __decorated__.__name__. * desc (str): The description of the task (optional). * alias (str): The alias for the task (optional). """ if isinstance(__decorated__, tuple): # the task has some args _Task = Task(__decorated__[0], __decorated__[1], Config=Config) else: _Task = Task(__decorated__, [], Config) state.ActiveModuleMemberQ.insert(0, _Task) return _Task.Underlying
['def', 'task', '(', '__decorated__', '=', 'None', ',', '*', '*', 'Config', ')', ':', 'if', 'isinstance', '(', '__decorated__', ',', 'tuple', ')', ':', '# the task has some args', '_Task', '=', 'Task', '(', '__decorated__', '[', '0', ']', ',', '__decorated__', '[', '1', ']', ',', 'Config', '=', 'Config', ')', 'else', ':', '_Task', '=', 'Task', '(', '__decorated__', ',', '[', ']', ',', 'Config', ')', 'state', '.', 'ActiveModuleMemberQ', '.', 'insert', '(', '0', ',', '_Task', ')', 'return', '_Task', '.', 'Underlying']
r"""A decorator to make tasks out of functions. Config: * name (str): The name of the task. Defaults to __decorated__.__name__. * desc (str): The description of the task (optional). * alias (str): The alias for the task (optional).
['r', 'A', 'decorator', 'to', 'make', 'tasks', 'out', 'of', 'functions', '.']
train
https://github.com/Laufire/ec/blob/63e84a1daef9234487d7de538e5da233a7d13071/ec/modules/config.py#L37-L53
9,966
theonion/django-bulbs
bulbs/content/serializers.py
ContentTypeField.to_internal_value
def to_internal_value(self, value): """Convert to integer id.""" natural_key = value.split("_") content_type = ContentType.objects.get_by_natural_key(*natural_key) return content_type.id
python
def to_internal_value(self, value): """Convert to integer id.""" natural_key = value.split("_") content_type = ContentType.objects.get_by_natural_key(*natural_key) return content_type.id
['def', 'to_internal_value', '(', 'self', ',', 'value', ')', ':', 'natural_key', '=', 'value', '.', 'split', '(', '"_"', ')', 'content_type', '=', 'ContentType', '.', 'objects', '.', 'get_by_natural_key', '(', '*', 'natural_key', ')', 'return', 'content_type', '.', 'id']
Convert to integer id.
['Convert', 'to', 'integer', 'id', '.']
train
https://github.com/theonion/django-bulbs/blob/0c0e6e3127a7dc487b96677fab95cacd2b3806da/bulbs/content/serializers.py#L24-L28
9,967
TriOptima/tri.table
lib/tri/table/__init__.py
prepare_headers
def prepare_headers(table, bound_columns): """ :type bound_columns: list of BoundColumn """ if table.request is None: return for column in bound_columns: if column.sortable: params = table.request.GET.copy() param_path = _with_path_prefix(table, 'order') order = table.request.GET.get(param_path, None) start_sort_desc = column.sort_default_desc params[param_path] = column.name if not start_sort_desc else '-' + column.name column.is_sorting = False if order is not None: is_desc = order.startswith('-') order_field = order if not is_desc else order[1:] if order_field == column.name: new_order = order_field if is_desc else ('-' + order_field) params[param_path] = new_order column.sort_direction = DESCENDING if is_desc else ASCENDING column.is_sorting = True column.url = "?" + params.urlencode() else: column.is_sorting = False
python
def prepare_headers(table, bound_columns): """ :type bound_columns: list of BoundColumn """ if table.request is None: return for column in bound_columns: if column.sortable: params = table.request.GET.copy() param_path = _with_path_prefix(table, 'order') order = table.request.GET.get(param_path, None) start_sort_desc = column.sort_default_desc params[param_path] = column.name if not start_sort_desc else '-' + column.name column.is_sorting = False if order is not None: is_desc = order.startswith('-') order_field = order if not is_desc else order[1:] if order_field == column.name: new_order = order_field if is_desc else ('-' + order_field) params[param_path] = new_order column.sort_direction = DESCENDING if is_desc else ASCENDING column.is_sorting = True column.url = "?" + params.urlencode() else: column.is_sorting = False
['def', 'prepare_headers', '(', 'table', ',', 'bound_columns', ')', ':', 'if', 'table', '.', 'request', 'is', 'None', ':', 'return', 'for', 'column', 'in', 'bound_columns', ':', 'if', 'column', '.', 'sortable', ':', 'params', '=', 'table', '.', 'request', '.', 'GET', '.', 'copy', '(', ')', 'param_path', '=', '_with_path_prefix', '(', 'table', ',', "'order'", ')', 'order', '=', 'table', '.', 'request', '.', 'GET', '.', 'get', '(', 'param_path', ',', 'None', ')', 'start_sort_desc', '=', 'column', '.', 'sort_default_desc', 'params', '[', 'param_path', ']', '=', 'column', '.', 'name', 'if', 'not', 'start_sort_desc', 'else', "'-'", '+', 'column', '.', 'name', 'column', '.', 'is_sorting', '=', 'False', 'if', 'order', 'is', 'not', 'None', ':', 'is_desc', '=', 'order', '.', 'startswith', '(', "'-'", ')', 'order_field', '=', 'order', 'if', 'not', 'is_desc', 'else', 'order', '[', '1', ':', ']', 'if', 'order_field', '==', 'column', '.', 'name', ':', 'new_order', '=', 'order_field', 'if', 'is_desc', 'else', '(', "'-'", '+', 'order_field', ')', 'params', '[', 'param_path', ']', '=', 'new_order', 'column', '.', 'sort_direction', '=', 'DESCENDING', 'if', 'is_desc', 'else', 'ASCENDING', 'column', '.', 'is_sorting', '=', 'True', 'column', '.', 'url', '=', '"?"', '+', 'params', '.', 'urlencode', '(', ')', 'else', ':', 'column', '.', 'is_sorting', '=', 'False']
:type bound_columns: list of BoundColumn
[':', 'type', 'bound_columns', ':', 'list', 'of', 'BoundColumn']
train
https://github.com/TriOptima/tri.table/blob/fc38c02098a80a3fb336ac4cf502954d74e31484/lib/tri/table/__init__.py#L112-L138
9,968
glenfant/openxmllib
openxmllib/document.py
Document.mimeType
def mimeType(self): """The official MIME type for this document, guessed from the extensions of the :py:attr:`openxmllib.document.Document.filename` attribute, as opposed to the :py:attr:`openxmllib.document.Document.mime_type` attribute. :return: ``application/xxx`` for this file """ if self.mime_type: # Supposed validated by the factory return self.mime_type for pattern, mime_type in self._extpattern_to_mime.items(): if fnmatch.fnmatch(self.filename, pattern): return mime_type
python
def mimeType(self): """The official MIME type for this document, guessed from the extensions of the :py:attr:`openxmllib.document.Document.filename` attribute, as opposed to the :py:attr:`openxmllib.document.Document.mime_type` attribute. :return: ``application/xxx`` for this file """ if self.mime_type: # Supposed validated by the factory return self.mime_type for pattern, mime_type in self._extpattern_to_mime.items(): if fnmatch.fnmatch(self.filename, pattern): return mime_type
['def', 'mimeType', '(', 'self', ')', ':', 'if', 'self', '.', 'mime_type', ':', '# Supposed validated by the factory', 'return', 'self', '.', 'mime_type', 'for', 'pattern', ',', 'mime_type', 'in', 'self', '.', '_extpattern_to_mime', '.', 'items', '(', ')', ':', 'if', 'fnmatch', '.', 'fnmatch', '(', 'self', '.', 'filename', ',', 'pattern', ')', ':', 'return', 'mime_type']
The official MIME type for this document, guessed from the extensions of the :py:attr:`openxmllib.document.Document.filename` attribute, as opposed to the :py:attr:`openxmllib.document.Document.mime_type` attribute. :return: ``application/xxx`` for this file
['The', 'official', 'MIME', 'type', 'for', 'this', 'document', 'guessed', 'from', 'the', 'extensions', 'of', 'the', ':', 'py', ':', 'attr', ':', 'openxmllib', '.', 'document', '.', 'Document', '.', 'filename', 'attribute', 'as', 'opposed', 'to', 'the', ':', 'py', ':', 'attr', ':', 'openxmllib', '.', 'document', '.', 'Document', '.', 'mime_type', 'attribute', '.']
train
https://github.com/glenfant/openxmllib/blob/c8208f8ecd9fc3ef1e73c1db68081a65361afb3f/openxmllib/document.py#L89-L102
9,969
Accelize/pycosio
pycosio/storage/azure_file.py
_AzureFileSystem._make_dir
def _make_dir(self, client_kwargs): """ Make a directory. args: client_kwargs (dict): Client arguments. """ with _handle_azure_exception(): # Directory if 'directory_name' in client_kwargs: return self.client.create_directory( share_name=client_kwargs['share_name'], directory_name=client_kwargs['directory_name']) # Share return self.client.create_share(**client_kwargs)
python
def _make_dir(self, client_kwargs): """ Make a directory. args: client_kwargs (dict): Client arguments. """ with _handle_azure_exception(): # Directory if 'directory_name' in client_kwargs: return self.client.create_directory( share_name=client_kwargs['share_name'], directory_name=client_kwargs['directory_name']) # Share return self.client.create_share(**client_kwargs)
['def', '_make_dir', '(', 'self', ',', 'client_kwargs', ')', ':', 'with', '_handle_azure_exception', '(', ')', ':', '# Directory', 'if', "'directory_name'", 'in', 'client_kwargs', ':', 'return', 'self', '.', 'client', '.', 'create_directory', '(', 'share_name', '=', 'client_kwargs', '[', "'share_name'", ']', ',', 'directory_name', '=', 'client_kwargs', '[', "'directory_name'", ']', ')', '# Share', 'return', 'self', '.', 'client', '.', 'create_share', '(', '*', '*', 'client_kwargs', ')']
Make a directory. args: client_kwargs (dict): Client arguments.
['Make', 'a', 'directory', '.']
train
https://github.com/Accelize/pycosio/blob/1cc1f8fdf5394d92918b7bae2bfa682169ccc48c/pycosio/storage/azure_file.py#L169-L184
9,970
dmlc/gluon-nlp
scripts/machine_translation/bleu.py
_bpe_to_words
def _bpe_to_words(sentence, delimiter='@@'): """Convert a sequence of bpe words into sentence.""" words = [] word = '' delimiter_len = len(delimiter) for subwords in sentence: if len(subwords) >= delimiter_len and subwords[-delimiter_len:] == delimiter: word += subwords[:-delimiter_len] else: word += subwords words.append(word) word = '' return words
python
def _bpe_to_words(sentence, delimiter='@@'): """Convert a sequence of bpe words into sentence.""" words = [] word = '' delimiter_len = len(delimiter) for subwords in sentence: if len(subwords) >= delimiter_len and subwords[-delimiter_len:] == delimiter: word += subwords[:-delimiter_len] else: word += subwords words.append(word) word = '' return words
['def', '_bpe_to_words', '(', 'sentence', ',', 'delimiter', '=', "'@@'", ')', ':', 'words', '=', '[', ']', 'word', '=', "''", 'delimiter_len', '=', 'len', '(', 'delimiter', ')', 'for', 'subwords', 'in', 'sentence', ':', 'if', 'len', '(', 'subwords', ')', '>=', 'delimiter_len', 'and', 'subwords', '[', '-', 'delimiter_len', ':', ']', '==', 'delimiter', ':', 'word', '+=', 'subwords', '[', ':', '-', 'delimiter_len', ']', 'else', ':', 'word', '+=', 'subwords', 'words', '.', 'append', '(', 'word', ')', 'word', '=', "''", 'return', 'words']
Convert a sequence of bpe words into sentence.
['Convert', 'a', 'sequence', 'of', 'bpe', 'words', 'into', 'sentence', '.']
train
https://github.com/dmlc/gluon-nlp/blob/4b83eb6bcc8881e5f1081a3675adaa19fac5c0ba/scripts/machine_translation/bleu.py#L61-L73
9,971
StackStorm/pybind
pybind/nos/v6_0_2f/brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/__init__.py
spanning_tree_info._set_stp
def _set_stp(self, v, load=False): """ Setter method for stp, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp (container) If this variable is read-only (config: false) in the source YANG file, then _set_stp is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_stp() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=stp.stp, is_container='container', presence=False, yang_name="stp", rest_name="stp", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """stp must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=stp.stp, is_container='container', presence=False, yang_name="stp", rest_name="stp", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)""", }) self.__stp = t if hasattr(self, '_set'): self._set()
python
def _set_stp(self, v, load=False): """ Setter method for stp, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp (container) If this variable is read-only (config: false) in the source YANG file, then _set_stp is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_stp() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=stp.stp, is_container='container', presence=False, yang_name="stp", rest_name="stp", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """stp must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=stp.stp, is_container='container', presence=False, yang_name="stp", rest_name="stp", parent=self, choice=(u'spanning-tree-mode', u'stp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='container', is_config=True)""", }) self.__stp = t if hasattr(self, '_set'): self._set()
['def', '_set_stp', '(', 'self', ',', 'v', ',', 'load', '=', 'False', ')', ':', 'if', 'hasattr', '(', 'v', ',', '"_utype"', ')', ':', 'v', '=', 'v', '.', '_utype', '(', 'v', ')', 'try', ':', 't', '=', 'YANGDynClass', '(', 'v', ',', 'base', '=', 'stp', '.', 'stp', ',', 'is_container', '=', "'container'", ',', 'presence', '=', 'False', ',', 'yang_name', '=', '"stp"', ',', 'rest_name', '=', '"stp"', ',', 'parent', '=', 'self', ',', 'choice', '=', '(', "u'spanning-tree-mode'", ',', "u'stp'", ')', ',', 'path_helper', '=', 'self', '.', '_path_helper', ',', 'extmethods', '=', 'self', '.', '_extmethods', ',', 'register_paths', '=', 'False', ',', 'extensions', '=', 'None', ',', 'namespace', '=', "'urn:brocade.com:mgmt:brocade-xstp-ext'", ',', 'defining_module', '=', "'brocade-xstp-ext'", ',', 'yang_type', '=', "'container'", ',', 'is_config', '=', 'True', ')', 'except', '(', 'TypeError', ',', 'ValueError', ')', ':', 'raise', 'ValueError', '(', '{', "'error-string'", ':', '"""stp must be of a type compatible with container"""', ',', "'defined-type'", ':', '"container"', ',', "'generated-type'", ':', '"""YANGDynClass(base=stp.stp, is_container=\'container\', presence=False, yang_name="stp", rest_name="stp", parent=self, choice=(u\'spanning-tree-mode\', u\'stp\'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace=\'urn:brocade.com:mgmt:brocade-xstp-ext\', defining_module=\'brocade-xstp-ext\', yang_type=\'container\', is_config=True)"""', ',', '}', ')', 'self', '.', '__stp', '=', 't', 'if', 'hasattr', '(', 'self', ',', "'_set'", ')', ':', 'self', '.', '_set', '(', ')']
Setter method for stp, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/stp (container) If this variable is read-only (config: false) in the source YANG file, then _set_stp is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_stp() directly.
['Setter', 'method', 'for', 'stp', 'mapped', 'from', 'YANG', 'variable', '/', 'brocade_xstp_ext_rpc', '/', 'get_stp_brief_info', '/', 'output', '/', 'spanning_tree_info', '/', 'stp', '(', 'container', ')', 'If', 'this', 'variable', 'is', 'read', '-', 'only', '(', 'config', ':', 'false', ')', 'in', 'the', 'source', 'YANG', 'file', 'then', '_set_stp', 'is', 'considered', 'as', 'a', 'private', 'method', '.', 'Backends', 'looking', 'to', 'populate', 'this', 'variable', 'should', 'do', 'so', 'via', 'calling', 'thisObj', '.', '_set_stp', '()', 'directly', '.']
train
https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/nos/v6_0_2f/brocade_xstp_ext_rpc/get_stp_brief_info/output/spanning_tree_info/__init__.py#L140-L161
9,972
Cog-Creators/Red-Lavalink
lavalink/player_manager.py
Player.handle_player_update
async def handle_player_update(self, state: "node.PlayerState"): """ Handles player updates from lavalink. Parameters ---------- state : websocket.PlayerState """ if state.position > self.position: self._is_playing = True self.position = state.position
python
async def handle_player_update(self, state: "node.PlayerState"): """ Handles player updates from lavalink. Parameters ---------- state : websocket.PlayerState """ if state.position > self.position: self._is_playing = True self.position = state.position
['async', 'def', 'handle_player_update', '(', 'self', ',', 'state', ':', '"node.PlayerState"', ')', ':', 'if', 'state', '.', 'position', '>', 'self', '.', 'position', ':', 'self', '.', '_is_playing', '=', 'True', 'self', '.', 'position', '=', 'state', '.', 'position']
Handles player updates from lavalink. Parameters ---------- state : websocket.PlayerState
['Handles', 'player', 'updates', 'from', 'lavalink', '.']
train
https://github.com/Cog-Creators/Red-Lavalink/blob/5b3fc6eb31ee5db8bd2b633a523cf69749957111/lavalink/player_manager.py#L214-L224
9,973
bruth/django-preserialize
preserialize/utils.py
ModelFieldResolver._get_local_fields
def _get_local_fields(self, model): "Return the names of all locally defined fields on the model class." local = [f for f in model._meta.fields] m2m = [f for f in model._meta.many_to_many] fields = local + m2m names = tuple([x.name for x in fields]) return { ':local': dict(list(zip(names, fields))), }
python
def _get_local_fields(self, model): "Return the names of all locally defined fields on the model class." local = [f for f in model._meta.fields] m2m = [f for f in model._meta.many_to_many] fields = local + m2m names = tuple([x.name for x in fields]) return { ':local': dict(list(zip(names, fields))), }
['def', '_get_local_fields', '(', 'self', ',', 'model', ')', ':', 'local', '=', '[', 'f', 'for', 'f', 'in', 'model', '.', '_meta', '.', 'fields', ']', 'm2m', '=', '[', 'f', 'for', 'f', 'in', 'model', '.', '_meta', '.', 'many_to_many', ']', 'fields', '=', 'local', '+', 'm2m', 'names', '=', 'tuple', '(', '[', 'x', '.', 'name', 'for', 'x', 'in', 'fields', ']', ')', 'return', '{', "':local'", ':', 'dict', '(', 'list', '(', 'zip', '(', 'names', ',', 'fields', ')', ')', ')', ',', '}']
Return the names of all locally defined fields on the model class.
['Return', 'the', 'names', 'of', 'all', 'locally', 'defined', 'fields', 'on', 'the', 'model', 'class', '.']
train
https://github.com/bruth/django-preserialize/blob/d772c224bd8c2c9e9ff997d82c54fe6ebb9444b6/preserialize/utils.py#L42-L51
9,974
totalgood/nlpia
src/nlpia/book/examples/ch04_sklearn_pca_source.py
_fit_full
def _fit_full(self=self, X=X, n_components=6): """Fit the model by computing full SVD on X""" n_samples, n_features = X.shape # Center data self.mean_ = np.mean(X, axis=0) print(self.mean_) X -= self.mean_ print(X.round(2)) U, S, V = linalg.svd(X, full_matrices=False) print(V.round(2)) # flip eigenvectors' sign to enforce deterministic output U, V = svd_flip(U, V) components_ = V print(components_.round(2)) # Get variance explained by singular values explained_variance_ = (S ** 2) / (n_samples - 1) total_var = explained_variance_.sum() explained_variance_ratio_ = explained_variance_ / total_var singular_values_ = S.copy() # Store the singular values. # Postprocess the number of components required if n_components == 'mle': n_components = \ _infer_dimension_(explained_variance_, n_samples, n_features) elif 0 < n_components < 1.0: # number of components for which the cumulated explained # variance percentage is superior to the desired threshold ratio_cumsum = stable_cumsum(explained_variance_ratio_) n_components = np.searchsorted(ratio_cumsum, n_components) + 1 # Compute noise covariance using Probabilistic PCA model # The sigma2 maximum likelihood (cf. eq. 12.46) if n_components < min(n_features, n_samples): self.noise_variance_ = explained_variance_[n_components:].mean() else: self.noise_variance_ = 0. self.n_samples_, self.n_features_ = n_samples, n_features self.components_ = components_[:n_components] print(self.components_.round(2)) self.n_components_ = n_components self.explained_variance_ = explained_variance_[:n_components] self.explained_variance_ratio_ = \ explained_variance_ratio_[:n_components] self.singular_values_ = singular_values_[:n_components] return U, S, V
python
def _fit_full(self=self, X=X, n_components=6): """Fit the model by computing full SVD on X""" n_samples, n_features = X.shape # Center data self.mean_ = np.mean(X, axis=0) print(self.mean_) X -= self.mean_ print(X.round(2)) U, S, V = linalg.svd(X, full_matrices=False) print(V.round(2)) # flip eigenvectors' sign to enforce deterministic output U, V = svd_flip(U, V) components_ = V print(components_.round(2)) # Get variance explained by singular values explained_variance_ = (S ** 2) / (n_samples - 1) total_var = explained_variance_.sum() explained_variance_ratio_ = explained_variance_ / total_var singular_values_ = S.copy() # Store the singular values. # Postprocess the number of components required if n_components == 'mle': n_components = \ _infer_dimension_(explained_variance_, n_samples, n_features) elif 0 < n_components < 1.0: # number of components for which the cumulated explained # variance percentage is superior to the desired threshold ratio_cumsum = stable_cumsum(explained_variance_ratio_) n_components = np.searchsorted(ratio_cumsum, n_components) + 1 # Compute noise covariance using Probabilistic PCA model # The sigma2 maximum likelihood (cf. eq. 12.46) if n_components < min(n_features, n_samples): self.noise_variance_ = explained_variance_[n_components:].mean() else: self.noise_variance_ = 0. self.n_samples_, self.n_features_ = n_samples, n_features self.components_ = components_[:n_components] print(self.components_.round(2)) self.n_components_ = n_components self.explained_variance_ = explained_variance_[:n_components] self.explained_variance_ratio_ = \ explained_variance_ratio_[:n_components] self.singular_values_ = singular_values_[:n_components] return U, S, V
['def', '_fit_full', '(', 'self', '=', 'self', ',', 'X', '=', 'X', ',', 'n_components', '=', '6', ')', ':', 'n_samples', ',', 'n_features', '=', 'X', '.', 'shape', '# Center data', 'self', '.', 'mean_', '=', 'np', '.', 'mean', '(', 'X', ',', 'axis', '=', '0', ')', 'print', '(', 'self', '.', 'mean_', ')', 'X', '-=', 'self', '.', 'mean_', 'print', '(', 'X', '.', 'round', '(', '2', ')', ')', 'U', ',', 'S', ',', 'V', '=', 'linalg', '.', 'svd', '(', 'X', ',', 'full_matrices', '=', 'False', ')', 'print', '(', 'V', '.', 'round', '(', '2', ')', ')', "# flip eigenvectors' sign to enforce deterministic output", 'U', ',', 'V', '=', 'svd_flip', '(', 'U', ',', 'V', ')', 'components_', '=', 'V', 'print', '(', 'components_', '.', 'round', '(', '2', ')', ')', '# Get variance explained by singular values', 'explained_variance_', '=', '(', 'S', '**', '2', ')', '/', '(', 'n_samples', '-', '1', ')', 'total_var', '=', 'explained_variance_', '.', 'sum', '(', ')', 'explained_variance_ratio_', '=', 'explained_variance_', '/', 'total_var', 'singular_values_', '=', 'S', '.', 'copy', '(', ')', '# Store the singular values.', '# Postprocess the number of components required', 'if', 'n_components', '==', "'mle'", ':', 'n_components', '=', '_infer_dimension_', '(', 'explained_variance_', ',', 'n_samples', ',', 'n_features', ')', 'elif', '0', '<', 'n_components', '<', '1.0', ':', '# number of components for which the cumulated explained', '# variance percentage is superior to the desired threshold', 'ratio_cumsum', '=', 'stable_cumsum', '(', 'explained_variance_ratio_', ')', 'n_components', '=', 'np', '.', 'searchsorted', '(', 'ratio_cumsum', ',', 'n_components', ')', '+', '1', '# Compute noise covariance using Probabilistic PCA model', '# The sigma2 maximum likelihood (cf. eq. 12.46)', 'if', 'n_components', '<', 'min', '(', 'n_features', ',', 'n_samples', ')', ':', 'self', '.', 'noise_variance_', '=', 'explained_variance_', '[', 'n_components', ':', ']', '.', 'mean', '(', ')', 'else', ':', 'self', '.', 'noise_variance_', '=', '0.', 'self', '.', 'n_samples_', ',', 'self', '.', 'n_features_', '=', 'n_samples', ',', 'n_features', 'self', '.', 'components_', '=', 'components_', '[', ':', 'n_components', ']', 'print', '(', 'self', '.', 'components_', '.', 'round', '(', '2', ')', ')', 'self', '.', 'n_components_', '=', 'n_components', 'self', '.', 'explained_variance_', '=', 'explained_variance_', '[', ':', 'n_components', ']', 'self', '.', 'explained_variance_ratio_', '=', 'explained_variance_ratio_', '[', ':', 'n_components', ']', 'self', '.', 'singular_values_', '=', 'singular_values_', '[', ':', 'n_components', ']', 'return', 'U', ',', 'S', ',', 'V']
Fit the model by computing full SVD on X
['Fit', 'the', 'model', 'by', 'computing', 'full', 'SVD', 'on', 'X']
train
https://github.com/totalgood/nlpia/blob/efa01126275e9cd3c3a5151a644f1c798a9ec53f/src/nlpia/book/examples/ch04_sklearn_pca_source.py#L136-L186
9,975
pypa/setuptools
setuptools/command/easy_install.py
ScriptWriter.best
def best(cls): """ Select the best ScriptWriter for this environment. """ if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): return WindowsScriptWriter.best() else: return cls
python
def best(cls): """ Select the best ScriptWriter for this environment. """ if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): return WindowsScriptWriter.best() else: return cls
['def', 'best', '(', 'cls', ')', ':', 'if', 'sys', '.', 'platform', '==', "'win32'", 'or', '(', 'os', '.', 'name', '==', "'java'", 'and', 'os', '.', '_name', '==', "'nt'", ')', ':', 'return', 'WindowsScriptWriter', '.', 'best', '(', ')', 'else', ':', 'return', 'cls']
Select the best ScriptWriter for this environment.
['Select', 'the', 'best', 'ScriptWriter', 'for', 'this', 'environment', '.']
train
https://github.com/pypa/setuptools/blob/83c667e0b2a98193851c07115d1af65011ed0fb6/setuptools/command/easy_install.py#L2130-L2137
9,976
saltstack/salt
salt/modules/macdefaults.py
write
def write(domain, key, value, type='string', user=None): ''' Write a default to the system CLI Example: .. code-block:: bash salt '*' macdefaults.write com.apple.CrashReporter DialogType Server salt '*' macdefaults.write NSGlobalDomain ApplePersistence True type=bool domain The name of the domain to write to key The key of the given domain to write to value The value to write to the given key type The type of value to be written, valid types are string, data, int[eger], float, bool[ean], date, array, array-add, dict, dict-add user The user to write the defaults to ''' if type == 'bool' or type == 'boolean': if value is True: value = 'TRUE' elif value is False: value = 'FALSE' cmd = 'defaults write "{0}" "{1}" -{2} "{3}"'.format(domain, key, type, value) return __salt__['cmd.run_all'](cmd, runas=user)
python
def write(domain, key, value, type='string', user=None): ''' Write a default to the system CLI Example: .. code-block:: bash salt '*' macdefaults.write com.apple.CrashReporter DialogType Server salt '*' macdefaults.write NSGlobalDomain ApplePersistence True type=bool domain The name of the domain to write to key The key of the given domain to write to value The value to write to the given key type The type of value to be written, valid types are string, data, int[eger], float, bool[ean], date, array, array-add, dict, dict-add user The user to write the defaults to ''' if type == 'bool' or type == 'boolean': if value is True: value = 'TRUE' elif value is False: value = 'FALSE' cmd = 'defaults write "{0}" "{1}" -{2} "{3}"'.format(domain, key, type, value) return __salt__['cmd.run_all'](cmd, runas=user)
['def', 'write', '(', 'domain', ',', 'key', ',', 'value', ',', 'type', '=', "'string'", ',', 'user', '=', 'None', ')', ':', 'if', 'type', '==', "'bool'", 'or', 'type', '==', "'boolean'", ':', 'if', 'value', 'is', 'True', ':', 'value', '=', "'TRUE'", 'elif', 'value', 'is', 'False', ':', 'value', '=', "'FALSE'", 'cmd', '=', '\'defaults write "{0}" "{1}" -{2} "{3}"\'', '.', 'format', '(', 'domain', ',', 'key', ',', 'type', ',', 'value', ')', 'return', '__salt__', '[', "'cmd.run_all'", ']', '(', 'cmd', ',', 'runas', '=', 'user', ')']
Write a default to the system CLI Example: .. code-block:: bash salt '*' macdefaults.write com.apple.CrashReporter DialogType Server salt '*' macdefaults.write NSGlobalDomain ApplePersistence True type=bool domain The name of the domain to write to key The key of the given domain to write to value The value to write to the given key type The type of value to be written, valid types are string, data, int[eger], float, bool[ean], date, array, array-add, dict, dict-add user The user to write the defaults to
['Write', 'a', 'default', 'to', 'the', 'system']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/macdefaults.py#L27-L64
9,977
askedrelic/libgreader
libgreader/auth.py
ClientAuthMethod.post
def post(self, url, postParameters=None, urlParameters=None): """ Convenience method for requesting to google with proper cookies/params. """ if urlParameters: url = url + "?" + self.getParameters(urlParameters) headers = {'Authorization':'GoogleLogin auth=%s' % self.auth_token, 'Content-Type': 'application/x-www-form-urlencoded' } postString = self.postParameters(postParameters) req = requests.post(url, data=postString, headers=headers) return req.text
python
def post(self, url, postParameters=None, urlParameters=None): """ Convenience method for requesting to google with proper cookies/params. """ if urlParameters: url = url + "?" + self.getParameters(urlParameters) headers = {'Authorization':'GoogleLogin auth=%s' % self.auth_token, 'Content-Type': 'application/x-www-form-urlencoded' } postString = self.postParameters(postParameters) req = requests.post(url, data=postString, headers=headers) return req.text
['def', 'post', '(', 'self', ',', 'url', ',', 'postParameters', '=', 'None', ',', 'urlParameters', '=', 'None', ')', ':', 'if', 'urlParameters', ':', 'url', '=', 'url', '+', '"?"', '+', 'self', '.', 'getParameters', '(', 'urlParameters', ')', 'headers', '=', '{', "'Authorization'", ':', "'GoogleLogin auth=%s'", '%', 'self', '.', 'auth_token', ',', "'Content-Type'", ':', "'application/x-www-form-urlencoded'", '}', 'postString', '=', 'self', '.', 'postParameters', '(', 'postParameters', ')', 'req', '=', 'requests', '.', 'post', '(', 'url', ',', 'data', '=', 'postString', ',', 'headers', '=', 'headers', ')', 'return', 'req', '.', 'text']
Convenience method for requesting to google with proper cookies/params.
['Convenience', 'method', 'for', 'requesting', 'to', 'google', 'with', 'proper', 'cookies', '/', 'params', '.']
train
https://github.com/askedrelic/libgreader/blob/7b668ee291c2464ea172ef44393100c369efa970/libgreader/auth.py#L83-L94
9,978
phoebe-project/phoebe2
phoebe/frontend/bundle.py
Bundle._add_history
def _add_history(self, redo_func, redo_kwargs, undo_func, undo_kwargs, **kwargs): """ Add a new log (undo/redoable) to this history context :parameter str redo_func: function to redo the action, must be a method of :class:`Bundle` :parameter dict redo_kwargs: kwargs to pass to the redo_func. Each item must be serializable (float or str, not objects) :parameter str undo_func: function to undo the action, must be a method of :class:`Bundle` :parameter dict undo_kwargs: kwargs to pass to the undo_func. Each item must be serializable (float or str, not objects) :parameter str history: label of the history parameter :raises ValueError: if the label for this history item is forbidden or already exists """ if not self.history_enabled: return param = HistoryParameter(self, redo_func, redo_kwargs, undo_func, undo_kwargs) metawargs = {'context': 'history', 'history': kwargs.get('history', self._default_label('hist', **{'context': 'history'}))} self._check_label(metawargs['history']) self._attach_params([param], **metawargs)
python
def _add_history(self, redo_func, redo_kwargs, undo_func, undo_kwargs, **kwargs): """ Add a new log (undo/redoable) to this history context :parameter str redo_func: function to redo the action, must be a method of :class:`Bundle` :parameter dict redo_kwargs: kwargs to pass to the redo_func. Each item must be serializable (float or str, not objects) :parameter str undo_func: function to undo the action, must be a method of :class:`Bundle` :parameter dict undo_kwargs: kwargs to pass to the undo_func. Each item must be serializable (float or str, not objects) :parameter str history: label of the history parameter :raises ValueError: if the label for this history item is forbidden or already exists """ if not self.history_enabled: return param = HistoryParameter(self, redo_func, redo_kwargs, undo_func, undo_kwargs) metawargs = {'context': 'history', 'history': kwargs.get('history', self._default_label('hist', **{'context': 'history'}))} self._check_label(metawargs['history']) self._attach_params([param], **metawargs)
['def', '_add_history', '(', 'self', ',', 'redo_func', ',', 'redo_kwargs', ',', 'undo_func', ',', 'undo_kwargs', ',', '*', '*', 'kwargs', ')', ':', 'if', 'not', 'self', '.', 'history_enabled', ':', 'return', 'param', '=', 'HistoryParameter', '(', 'self', ',', 'redo_func', ',', 'redo_kwargs', ',', 'undo_func', ',', 'undo_kwargs', ')', 'metawargs', '=', '{', "'context'", ':', "'history'", ',', "'history'", ':', 'kwargs', '.', 'get', '(', "'history'", ',', 'self', '.', '_default_label', '(', "'hist'", ',', '*', '*', '{', "'context'", ':', "'history'", '}', ')', ')', '}', 'self', '.', '_check_label', '(', 'metawargs', '[', "'history'", ']', ')', 'self', '.', '_attach_params', '(', '[', 'param', ']', ',', '*', '*', 'metawargs', ')']
Add a new log (undo/redoable) to this history context :parameter str redo_func: function to redo the action, must be a method of :class:`Bundle` :parameter dict redo_kwargs: kwargs to pass to the redo_func. Each item must be serializable (float or str, not objects) :parameter str undo_func: function to undo the action, must be a method of :class:`Bundle` :parameter dict undo_kwargs: kwargs to pass to the undo_func. Each item must be serializable (float or str, not objects) :parameter str history: label of the history parameter :raises ValueError: if the label for this history item is forbidden or already exists
['Add', 'a', 'new', 'log', '(', 'undo', '/', 'redoable', ')', 'to', 'this', 'history', 'context']
train
https://github.com/phoebe-project/phoebe2/blob/e64b8be683977064e2d55dd1b3ac400f64c3e379/phoebe/frontend/bundle.py#L811-L839
9,979
openstack/hacking
hacking/checks/docstrings.py
is_docstring
def is_docstring(tokens, previous_logical): """Return found docstring 'A docstring is a string literal that occurs as the first statement in a module, function, class,' http://www.python.org/dev/peps/pep-0257/#what-is-a-docstring """ for token_type, text, start, _, _ in tokens: if token_type == tokenize.STRING: break elif token_type != tokenize.INDENT: return False else: return False line = text.lstrip() start, start_triple = _find_first_of(line, START_DOCSTRING_TRIPLE) if (previous_logical.startswith("def ") or previous_logical.startswith("class ")): if start == 0: return text
python
def is_docstring(tokens, previous_logical): """Return found docstring 'A docstring is a string literal that occurs as the first statement in a module, function, class,' http://www.python.org/dev/peps/pep-0257/#what-is-a-docstring """ for token_type, text, start, _, _ in tokens: if token_type == tokenize.STRING: break elif token_type != tokenize.INDENT: return False else: return False line = text.lstrip() start, start_triple = _find_first_of(line, START_DOCSTRING_TRIPLE) if (previous_logical.startswith("def ") or previous_logical.startswith("class ")): if start == 0: return text
['def', 'is_docstring', '(', 'tokens', ',', 'previous_logical', ')', ':', 'for', 'token_type', ',', 'text', ',', 'start', ',', '_', ',', '_', 'in', 'tokens', ':', 'if', 'token_type', '==', 'tokenize', '.', 'STRING', ':', 'break', 'elif', 'token_type', '!=', 'tokenize', '.', 'INDENT', ':', 'return', 'False', 'else', ':', 'return', 'False', 'line', '=', 'text', '.', 'lstrip', '(', ')', 'start', ',', 'start_triple', '=', '_find_first_of', '(', 'line', ',', 'START_DOCSTRING_TRIPLE', ')', 'if', '(', 'previous_logical', '.', 'startswith', '(', '"def "', ')', 'or', 'previous_logical', '.', 'startswith', '(', '"class "', ')', ')', ':', 'if', 'start', '==', '0', ':', 'return', 'text']
Return found docstring 'A docstring is a string literal that occurs as the first statement in a module, function, class,' http://www.python.org/dev/peps/pep-0257/#what-is-a-docstring
['Return', 'found', 'docstring']
train
https://github.com/openstack/hacking/blob/10e58f907181cac91d3b2af422c2458b04a1ec79/hacking/checks/docstrings.py#L128-L147
9,980
jantman/pypi-download-stats
pypi_download_stats/dataquery.py
DataQuery._query_by_installer
def _query_by_installer(self, table_name): """ Query for download data broken down by installer, for one day. :param table_name: table name to query against :type table_name: str :return: dict of download information by installer; keys are project name, values are a dict of installer names to dicts of installer version to download count. :rtype: dict """ logger.info('Querying for downloads by installer in table %s', table_name) q = "SELECT file.project, details.installer.name, " \ "details.installer.version, COUNT(*) as dl_count " \ "%s " \ "%s " \ "GROUP BY file.project, details.installer.name, " \ "details.installer.version;" % ( self._from_for_table(table_name), self._where_for_projects ) res = self._run_query(q) result = self._dict_for_projects() # iterate through results for row in res: # pointer to the per-project result dict proj = result[row['file_project']] # grab the name and version; change None to 'unknown' iname = row['details_installer_name'] iver = row['details_installer_version'] if iname not in proj: proj[iname] = {} if iver not in proj[iname]: proj[iname][iver] = 0 proj[iname][iver] += int(row['dl_count']) return result
python
def _query_by_installer(self, table_name): """ Query for download data broken down by installer, for one day. :param table_name: table name to query against :type table_name: str :return: dict of download information by installer; keys are project name, values are a dict of installer names to dicts of installer version to download count. :rtype: dict """ logger.info('Querying for downloads by installer in table %s', table_name) q = "SELECT file.project, details.installer.name, " \ "details.installer.version, COUNT(*) as dl_count " \ "%s " \ "%s " \ "GROUP BY file.project, details.installer.name, " \ "details.installer.version;" % ( self._from_for_table(table_name), self._where_for_projects ) res = self._run_query(q) result = self._dict_for_projects() # iterate through results for row in res: # pointer to the per-project result dict proj = result[row['file_project']] # grab the name and version; change None to 'unknown' iname = row['details_installer_name'] iver = row['details_installer_version'] if iname not in proj: proj[iname] = {} if iver not in proj[iname]: proj[iname][iver] = 0 proj[iname][iver] += int(row['dl_count']) return result
['def', '_query_by_installer', '(', 'self', ',', 'table_name', ')', ':', 'logger', '.', 'info', '(', "'Querying for downloads by installer in table %s'", ',', 'table_name', ')', 'q', '=', '"SELECT file.project, details.installer.name, "', '"details.installer.version, COUNT(*) as dl_count "', '"%s "', '"%s "', '"GROUP BY file.project, details.installer.name, "', '"details.installer.version;"', '%', '(', 'self', '.', '_from_for_table', '(', 'table_name', ')', ',', 'self', '.', '_where_for_projects', ')', 'res', '=', 'self', '.', '_run_query', '(', 'q', ')', 'result', '=', 'self', '.', '_dict_for_projects', '(', ')', '# iterate through results', 'for', 'row', 'in', 'res', ':', '# pointer to the per-project result dict', 'proj', '=', 'result', '[', 'row', '[', "'file_project'", ']', ']', "# grab the name and version; change None to 'unknown'", 'iname', '=', 'row', '[', "'details_installer_name'", ']', 'iver', '=', 'row', '[', "'details_installer_version'", ']', 'if', 'iname', 'not', 'in', 'proj', ':', 'proj', '[', 'iname', ']', '=', '{', '}', 'if', 'iver', 'not', 'in', 'proj', '[', 'iname', ']', ':', 'proj', '[', 'iname', ']', '[', 'iver', ']', '=', '0', 'proj', '[', 'iname', ']', '[', 'iver', ']', '+=', 'int', '(', 'row', '[', "'dl_count'", ']', ')', 'return', 'result']
Query for download data broken down by installer, for one day. :param table_name: table name to query against :type table_name: str :return: dict of download information by installer; keys are project name, values are a dict of installer names to dicts of installer version to download count. :rtype: dict
['Query', 'for', 'download', 'data', 'broken', 'down', 'by', 'installer', 'for', 'one', 'day', '.']
train
https://github.com/jantman/pypi-download-stats/blob/44a7a6bbcd61a9e7f02bd02c52584a98183f80c5/pypi_download_stats/dataquery.py#L320-L356
9,981
axltxl/m2bk
m2bk/config.py
set_entry
def set_entry(key, value): """ Set a configuration entry :param key: key name :param value: value for this key :raises KeyError: if key is not str """ if type(key) != str: raise KeyError('key must be str') _config[key] = value
python
def set_entry(key, value): """ Set a configuration entry :param key: key name :param value: value for this key :raises KeyError: if key is not str """ if type(key) != str: raise KeyError('key must be str') _config[key] = value
['def', 'set_entry', '(', 'key', ',', 'value', ')', ':', 'if', 'type', '(', 'key', ')', '!=', 'str', ':', 'raise', 'KeyError', '(', "'key must be str'", ')', '_config', '[', 'key', ']', '=', 'value']
Set a configuration entry :param key: key name :param value: value for this key :raises KeyError: if key is not str
['Set', 'a', 'configuration', 'entry']
train
https://github.com/axltxl/m2bk/blob/980083dfd17e6e783753a946e9aa809714551141/m2bk/config.py#L62-L72
9,982
dmlc/gluon-nlp
src/gluonnlp/data/transforms.py
BERTBasicTokenizer._is_control
def _is_control(self, char): """Checks whether `chars` is a control character.""" # These are technically control characters but we count them as whitespace # characters. if char in ['\t', '\n', '\r']: return False cat = unicodedata.category(char) if cat.startswith('C'): return True return False
python
def _is_control(self, char): """Checks whether `chars` is a control character.""" # These are technically control characters but we count them as whitespace # characters. if char in ['\t', '\n', '\r']: return False cat = unicodedata.category(char) if cat.startswith('C'): return True return False
['def', '_is_control', '(', 'self', ',', 'char', ')', ':', '# These are technically control characters but we count them as whitespace', '# characters.', 'if', 'char', 'in', '[', "'\\t'", ',', "'\\n'", ',', "'\\r'", ']', ':', 'return', 'False', 'cat', '=', 'unicodedata', '.', 'category', '(', 'char', ')', 'if', 'cat', '.', 'startswith', '(', "'C'", ')', ':', 'return', 'True', 'return', 'False']
Checks whether `chars` is a control character.
['Checks', 'whether', 'chars', 'is', 'a', 'control', 'character', '.']
train
https://github.com/dmlc/gluon-nlp/blob/4b83eb6bcc8881e5f1081a3675adaa19fac5c0ba/src/gluonnlp/data/transforms.py#L793-L802
9,983
johnnoone/aioconsul
aioconsul/client/query_endpoint.py
QueryEndpoint.items
async def items(self, *, dc=None, watch=None, consistency=None): """Provides a listing of all prepared queries Parameters: dc (str): Specify datacenter that will be used. Defaults to the agent's local datacenter. watch (Blocking): Do a blocking query consistency (Consistency): Force consistency Returns: Collection: List of prepared queries This returns a list of prepared queries, which looks like:: [ { "ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05", "Name": "my-query", "Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e", "Token": "<hidden>", "Service": { "Service": "redis", "Failover": { "NearestN": 3, "Datacenters": ["dc1", "dc2"] }, "OnlyPassing": False, "Tags": ["master", "!experimental"] }, "DNS": { "TTL": timedelta(seconds=10) }, "RaftIndex": { "CreateIndex": 23, "ModifyIndex": 42 } } ] """ response = await self._api.get("/v1/query", params={"dc": dc}) return response.body
python
async def items(self, *, dc=None, watch=None, consistency=None): """Provides a listing of all prepared queries Parameters: dc (str): Specify datacenter that will be used. Defaults to the agent's local datacenter. watch (Blocking): Do a blocking query consistency (Consistency): Force consistency Returns: Collection: List of prepared queries This returns a list of prepared queries, which looks like:: [ { "ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05", "Name": "my-query", "Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e", "Token": "<hidden>", "Service": { "Service": "redis", "Failover": { "NearestN": 3, "Datacenters": ["dc1", "dc2"] }, "OnlyPassing": False, "Tags": ["master", "!experimental"] }, "DNS": { "TTL": timedelta(seconds=10) }, "RaftIndex": { "CreateIndex": 23, "ModifyIndex": 42 } } ] """ response = await self._api.get("/v1/query", params={"dc": dc}) return response.body
['async', 'def', 'items', '(', 'self', ',', '*', ',', 'dc', '=', 'None', ',', 'watch', '=', 'None', ',', 'consistency', '=', 'None', ')', ':', 'response', '=', 'await', 'self', '.', '_api', '.', 'get', '(', '"/v1/query"', ',', 'params', '=', '{', '"dc"', ':', 'dc', '}', ')', 'return', 'response', '.', 'body']
Provides a listing of all prepared queries Parameters: dc (str): Specify datacenter that will be used. Defaults to the agent's local datacenter. watch (Blocking): Do a blocking query consistency (Consistency): Force consistency Returns: Collection: List of prepared queries This returns a list of prepared queries, which looks like:: [ { "ID": "8f246b77-f3e1-ff88-5b48-8ec93abf3e05", "Name": "my-query", "Session": "adf4238a-882b-9ddc-4a9d-5b6758e4159e", "Token": "<hidden>", "Service": { "Service": "redis", "Failover": { "NearestN": 3, "Datacenters": ["dc1", "dc2"] }, "OnlyPassing": False, "Tags": ["master", "!experimental"] }, "DNS": { "TTL": timedelta(seconds=10) }, "RaftIndex": { "CreateIndex": 23, "ModifyIndex": 42 } } ]
['Provides', 'a', 'listing', 'of', 'all', 'prepared', 'queries']
train
https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/query_endpoint.py#L15-L54
9,984
saltstack/salt
salt/modules/boto_kms.py
decrypt
def decrypt(ciphertext_blob, encryption_context=None, grant_tokens=None, region=None, key=None, keyid=None, profile=None): ''' Decrypt ciphertext. CLI example:: salt myminion boto_kms.decrypt encrypted_ciphertext ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) r = {} try: plaintext = conn.decrypt( ciphertext_blob, encryption_context=encryption_context, grant_tokens=grant_tokens ) r['plaintext'] = plaintext['Plaintext'] except boto.exception.BotoServerError as e: r['error'] = __utils__['boto.get_error'](e) return r
python
def decrypt(ciphertext_blob, encryption_context=None, grant_tokens=None, region=None, key=None, keyid=None, profile=None): ''' Decrypt ciphertext. CLI example:: salt myminion boto_kms.decrypt encrypted_ciphertext ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) r = {} try: plaintext = conn.decrypt( ciphertext_blob, encryption_context=encryption_context, grant_tokens=grant_tokens ) r['plaintext'] = plaintext['Plaintext'] except boto.exception.BotoServerError as e: r['error'] = __utils__['boto.get_error'](e) return r
['def', 'decrypt', '(', 'ciphertext_blob', ',', 'encryption_context', '=', 'None', ',', 'grant_tokens', '=', 'None', ',', 'region', '=', 'None', ',', 'key', '=', 'None', ',', 'keyid', '=', 'None', ',', 'profile', '=', 'None', ')', ':', 'conn', '=', '_get_conn', '(', 'region', '=', 'region', ',', 'key', '=', 'key', ',', 'keyid', '=', 'keyid', ',', 'profile', '=', 'profile', ')', 'r', '=', '{', '}', 'try', ':', 'plaintext', '=', 'conn', '.', 'decrypt', '(', 'ciphertext_blob', ',', 'encryption_context', '=', 'encryption_context', ',', 'grant_tokens', '=', 'grant_tokens', ')', 'r', '[', "'plaintext'", ']', '=', 'plaintext', '[', "'Plaintext'", ']', 'except', 'boto', '.', 'exception', '.', 'BotoServerError', 'as', 'e', ':', 'r', '[', "'error'", ']', '=', '__utils__', '[', "'boto.get_error'", ']', '(', 'e', ')', 'return', 'r']
Decrypt ciphertext. CLI example:: salt myminion boto_kms.decrypt encrypted_ciphertext
['Decrypt', 'ciphertext', '.']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/boto_kms.py#L156-L177
9,985
kallimachos/chios
chios/remoteinclude/__init__.py
setup
def setup(app): """ Setup for Sphinx extension. :param app: Sphinx application context. """ app.info('adding remote-include directive...', nonl=True) app.add_directive('remote-include', RemoteInclude) app.info(' done') return { 'version': __version__, 'parallel_read_safe': True, 'parallel_write_safe': True, }
python
def setup(app): """ Setup for Sphinx extension. :param app: Sphinx application context. """ app.info('adding remote-include directive...', nonl=True) app.add_directive('remote-include', RemoteInclude) app.info(' done') return { 'version': __version__, 'parallel_read_safe': True, 'parallel_write_safe': True, }
['def', 'setup', '(', 'app', ')', ':', 'app', '.', 'info', '(', "'adding remote-include directive...'", ',', 'nonl', '=', 'True', ')', 'app', '.', 'add_directive', '(', "'remote-include'", ',', 'RemoteInclude', ')', 'app', '.', 'info', '(', "' done'", ')', 'return', '{', "'version'", ':', '__version__', ',', "'parallel_read_safe'", ':', 'True', ',', "'parallel_write_safe'", ':', 'True', ',', '}']
Setup for Sphinx extension. :param app: Sphinx application context.
['Setup', 'for', 'Sphinx', 'extension', '.']
train
https://github.com/kallimachos/chios/blob/e14044e4019d57089c625d4ad2f73ccb66b8b7b8/chios/remoteinclude/__init__.py#L60-L73
9,986
HDI-Project/BTB
btb/selection/pure.py
PureBestKVelocity.compute_rewards
def compute_rewards(self, scores): """ Compute the "velocity" of (average distance between) the k+1 best scores. Return a list with those k velocities padded out with zeros so that the count remains the same. """ # get the k + 1 best scores in descending order best_scores = sorted(scores, reverse=True)[:self.k + 1] velocities = [best_scores[i] - best_scores[i + 1] for i in range(len(best_scores) - 1)] # pad the list out with zeros to maintain the length of the list zeros = (len(scores) - self.k) * [0] return velocities + zeros
python
def compute_rewards(self, scores): """ Compute the "velocity" of (average distance between) the k+1 best scores. Return a list with those k velocities padded out with zeros so that the count remains the same. """ # get the k + 1 best scores in descending order best_scores = sorted(scores, reverse=True)[:self.k + 1] velocities = [best_scores[i] - best_scores[i + 1] for i in range(len(best_scores) - 1)] # pad the list out with zeros to maintain the length of the list zeros = (len(scores) - self.k) * [0] return velocities + zeros
['def', 'compute_rewards', '(', 'self', ',', 'scores', ')', ':', '# get the k + 1 best scores in descending order', 'best_scores', '=', 'sorted', '(', 'scores', ',', 'reverse', '=', 'True', ')', '[', ':', 'self', '.', 'k', '+', '1', ']', 'velocities', '=', '[', 'best_scores', '[', 'i', ']', '-', 'best_scores', '[', 'i', '+', '1', ']', 'for', 'i', 'in', 'range', '(', 'len', '(', 'best_scores', ')', '-', '1', ')', ']', '# pad the list out with zeros to maintain the length of the list', 'zeros', '=', '(', 'len', '(', 'scores', ')', '-', 'self', '.', 'k', ')', '*', '[', '0', ']', 'return', 'velocities', '+', 'zeros']
Compute the "velocity" of (average distance between) the k+1 best scores. Return a list with those k velocities padded out with zeros so that the count remains the same.
['Compute', 'the', 'velocity', 'of', '(', 'average', 'distance', 'between', ')', 'the', 'k', '+', '1', 'best', 'scores', '.', 'Return', 'a', 'list', 'with', 'those', 'k', 'velocities', 'padded', 'out', 'with', 'zeros', 'so', 'that', 'the', 'count', 'remains', 'the', 'same', '.']
train
https://github.com/HDI-Project/BTB/blob/7f489ebc5591bd0886652ef743098c022d7f7460/btb/selection/pure.py#L23-L36
9,987
python-gitlab/python-gitlab
gitlab/mixins.py
UpdateMixin.update
def update(self, id=None, new_data={}, **kwargs): """Update an object on the server. Args: id: ID of the object to update (can be None if not required) new_data: the update data for the object **kwargs: Extra options to send to the server (e.g. sudo) Returns: dict: The new object data (*not* a RESTObject) Raises: GitlabAuthenticationError: If authentication is not correct GitlabUpdateError: If the server cannot perform the request """ if id is None: path = self.path else: path = '%s/%s' % (self.path, id) self._check_missing_update_attrs(new_data) files = {} # We get the attributes that need some special transformation types = getattr(self, '_types', {}) if types: # Duplicate data to avoid messing with what the user sent us new_data = new_data.copy() for attr_name, type_cls in types.items(): if attr_name in new_data.keys(): type_obj = type_cls(new_data[attr_name]) # if the type if FileAttribute we need to pass the data as # file if issubclass(type_cls, g_types.FileAttribute): k = type_obj.get_file_name(attr_name) files[attr_name] = (k, new_data.pop(attr_name)) else: new_data[attr_name] = type_obj.get_for_api() http_method = self._get_update_method() return http_method(path, post_data=new_data, files=files, **kwargs)
python
def update(self, id=None, new_data={}, **kwargs): """Update an object on the server. Args: id: ID of the object to update (can be None if not required) new_data: the update data for the object **kwargs: Extra options to send to the server (e.g. sudo) Returns: dict: The new object data (*not* a RESTObject) Raises: GitlabAuthenticationError: If authentication is not correct GitlabUpdateError: If the server cannot perform the request """ if id is None: path = self.path else: path = '%s/%s' % (self.path, id) self._check_missing_update_attrs(new_data) files = {} # We get the attributes that need some special transformation types = getattr(self, '_types', {}) if types: # Duplicate data to avoid messing with what the user sent us new_data = new_data.copy() for attr_name, type_cls in types.items(): if attr_name in new_data.keys(): type_obj = type_cls(new_data[attr_name]) # if the type if FileAttribute we need to pass the data as # file if issubclass(type_cls, g_types.FileAttribute): k = type_obj.get_file_name(attr_name) files[attr_name] = (k, new_data.pop(attr_name)) else: new_data[attr_name] = type_obj.get_for_api() http_method = self._get_update_method() return http_method(path, post_data=new_data, files=files, **kwargs)
['def', 'update', '(', 'self', ',', 'id', '=', 'None', ',', 'new_data', '=', '{', '}', ',', '*', '*', 'kwargs', ')', ':', 'if', 'id', 'is', 'None', ':', 'path', '=', 'self', '.', 'path', 'else', ':', 'path', '=', "'%s/%s'", '%', '(', 'self', '.', 'path', ',', 'id', ')', 'self', '.', '_check_missing_update_attrs', '(', 'new_data', ')', 'files', '=', '{', '}', '# We get the attributes that need some special transformation', 'types', '=', 'getattr', '(', 'self', ',', "'_types'", ',', '{', '}', ')', 'if', 'types', ':', '# Duplicate data to avoid messing with what the user sent us', 'new_data', '=', 'new_data', '.', 'copy', '(', ')', 'for', 'attr_name', ',', 'type_cls', 'in', 'types', '.', 'items', '(', ')', ':', 'if', 'attr_name', 'in', 'new_data', '.', 'keys', '(', ')', ':', 'type_obj', '=', 'type_cls', '(', 'new_data', '[', 'attr_name', ']', ')', '# if the type if FileAttribute we need to pass the data as', '# file', 'if', 'issubclass', '(', 'type_cls', ',', 'g_types', '.', 'FileAttribute', ')', ':', 'k', '=', 'type_obj', '.', 'get_file_name', '(', 'attr_name', ')', 'files', '[', 'attr_name', ']', '=', '(', 'k', ',', 'new_data', '.', 'pop', '(', 'attr_name', ')', ')', 'else', ':', 'new_data', '[', 'attr_name', ']', '=', 'type_obj', '.', 'get_for_api', '(', ')', 'http_method', '=', 'self', '.', '_get_update_method', '(', ')', 'return', 'http_method', '(', 'path', ',', 'post_data', '=', 'new_data', ',', 'files', '=', 'files', ',', '*', '*', 'kwargs', ')']
Update an object on the server. Args: id: ID of the object to update (can be None if not required) new_data: the update data for the object **kwargs: Extra options to send to the server (e.g. sudo) Returns: dict: The new object data (*not* a RESTObject) Raises: GitlabAuthenticationError: If authentication is not correct GitlabUpdateError: If the server cannot perform the request
['Update', 'an', 'object', 'on', 'the', 'server', '.']
train
https://github.com/python-gitlab/python-gitlab/blob/16de1b03fde3dbbe8f851614dd1d8c09de102fe5/gitlab/mixins.py#L241-L283
9,988
ml4ai/delphi
delphi/GrFN/networks.py
GroundedFunctionNetwork.to_FIB
def to_FIB(self, other): """ Creates a ForwardInfluenceBlanket object representing the intersection of this model with the other input model. Args: other: The GroundedFunctionNetwork object to compare this model to. Returns: A ForwardInfluenceBlanket object to use for model comparison. """ if not isinstance(other, GroundedFunctionNetwork): raise TypeError( f"Expected GroundedFunctionNetwork, but got {type(other)}" ) def shortname(var): return var[var.find("::") + 2 : var.rfind("_")] def shortname_vars(graph, shortname): return [v for v in graph.nodes() if shortname in v] this_var_nodes = [ shortname(n) for (n, d) in self.nodes(data=True) if d["type"] == "variable" ] other_var_nodes = [ shortname(n) for (n, d) in other.nodes(data=True) if d["type"] == "variable" ] shared_vars = set(this_var_nodes).intersection(set(other_var_nodes)) full_shared_vars = { full_var for shared_var in shared_vars for full_var in shortname_vars(self, shared_var) } return ForwardInfluenceBlanket(self, full_shared_vars)
python
def to_FIB(self, other): """ Creates a ForwardInfluenceBlanket object representing the intersection of this model with the other input model. Args: other: The GroundedFunctionNetwork object to compare this model to. Returns: A ForwardInfluenceBlanket object to use for model comparison. """ if not isinstance(other, GroundedFunctionNetwork): raise TypeError( f"Expected GroundedFunctionNetwork, but got {type(other)}" ) def shortname(var): return var[var.find("::") + 2 : var.rfind("_")] def shortname_vars(graph, shortname): return [v for v in graph.nodes() if shortname in v] this_var_nodes = [ shortname(n) for (n, d) in self.nodes(data=True) if d["type"] == "variable" ] other_var_nodes = [ shortname(n) for (n, d) in other.nodes(data=True) if d["type"] == "variable" ] shared_vars = set(this_var_nodes).intersection(set(other_var_nodes)) full_shared_vars = { full_var for shared_var in shared_vars for full_var in shortname_vars(self, shared_var) } return ForwardInfluenceBlanket(self, full_shared_vars)
['def', 'to_FIB', '(', 'self', ',', 'other', ')', ':', 'if', 'not', 'isinstance', '(', 'other', ',', 'GroundedFunctionNetwork', ')', ':', 'raise', 'TypeError', '(', 'f"Expected GroundedFunctionNetwork, but got {type(other)}"', ')', 'def', 'shortname', '(', 'var', ')', ':', 'return', 'var', '[', 'var', '.', 'find', '(', '"::"', ')', '+', '2', ':', 'var', '.', 'rfind', '(', '"_"', ')', ']', 'def', 'shortname_vars', '(', 'graph', ',', 'shortname', ')', ':', 'return', '[', 'v', 'for', 'v', 'in', 'graph', '.', 'nodes', '(', ')', 'if', 'shortname', 'in', 'v', ']', 'this_var_nodes', '=', '[', 'shortname', '(', 'n', ')', 'for', '(', 'n', ',', 'd', ')', 'in', 'self', '.', 'nodes', '(', 'data', '=', 'True', ')', 'if', 'd', '[', '"type"', ']', '==', '"variable"', ']', 'other_var_nodes', '=', '[', 'shortname', '(', 'n', ')', 'for', '(', 'n', ',', 'd', ')', 'in', 'other', '.', 'nodes', '(', 'data', '=', 'True', ')', 'if', 'd', '[', '"type"', ']', '==', '"variable"', ']', 'shared_vars', '=', 'set', '(', 'this_var_nodes', ')', '.', 'intersection', '(', 'set', '(', 'other_var_nodes', ')', ')', 'full_shared_vars', '=', '{', 'full_var', 'for', 'shared_var', 'in', 'shared_vars', 'for', 'full_var', 'in', 'shortname_vars', '(', 'self', ',', 'shared_var', ')', '}', 'return', 'ForwardInfluenceBlanket', '(', 'self', ',', 'full_shared_vars', ')']
Creates a ForwardInfluenceBlanket object representing the intersection of this model with the other input model. Args: other: The GroundedFunctionNetwork object to compare this model to. Returns: A ForwardInfluenceBlanket object to use for model comparison.
['Creates', 'a', 'ForwardInfluenceBlanket', 'object', 'representing', 'the', 'intersection', 'of', 'this', 'model', 'with', 'the', 'other', 'input', 'model', '.']
train
https://github.com/ml4ai/delphi/blob/6d03d8aafeab99610387c51b89c99738ff2abbe3/delphi/GrFN/networks.py#L550-L590
9,989
TaurusOlson/fntools
fntools/fntools.py
valueof
def valueof(records, key): """Extract the value corresponding to the given key in all the dictionaries >>> bands = [{'name': 'Led Zeppelin', 'singer': 'Robert Plant', 'guitarist': 'Jimmy Page'}, ... {'name': 'Metallica', 'singer': 'James Hetfield', 'guitarist': 'Kirk Hammet'}] >>> valueof(bands, 'singer') ['Robert Plant', 'James Hetfield'] """ if isinstance(records, dict): records = [records] return map(operator.itemgetter(key), records)
python
def valueof(records, key): """Extract the value corresponding to the given key in all the dictionaries >>> bands = [{'name': 'Led Zeppelin', 'singer': 'Robert Plant', 'guitarist': 'Jimmy Page'}, ... {'name': 'Metallica', 'singer': 'James Hetfield', 'guitarist': 'Kirk Hammet'}] >>> valueof(bands, 'singer') ['Robert Plant', 'James Hetfield'] """ if isinstance(records, dict): records = [records] return map(operator.itemgetter(key), records)
['def', 'valueof', '(', 'records', ',', 'key', ')', ':', 'if', 'isinstance', '(', 'records', ',', 'dict', ')', ':', 'records', '=', '[', 'records', ']', 'return', 'map', '(', 'operator', '.', 'itemgetter', '(', 'key', ')', ',', 'records', ')']
Extract the value corresponding to the given key in all the dictionaries >>> bands = [{'name': 'Led Zeppelin', 'singer': 'Robert Plant', 'guitarist': 'Jimmy Page'}, ... {'name': 'Metallica', 'singer': 'James Hetfield', 'guitarist': 'Kirk Hammet'}] >>> valueof(bands, 'singer') ['Robert Plant', 'James Hetfield']
['Extract', 'the', 'value', 'corresponding', 'to', 'the', 'given', 'key', 'in', 'all', 'the', 'dictionaries']
train
https://github.com/TaurusOlson/fntools/blob/316080c7b5bfdd88c9f3fac4a67deb5be3c319e5/fntools/fntools.py#L553-L564
9,990
log2timeline/plaso
plaso/multi_processing/base_process.py
MultiProcessBaseProcess._StartProfiling
def _StartProfiling(self, configuration): """Starts profiling. Args: configuration (ProfilingConfiguration): profiling configuration. """ if not configuration: return if configuration.HaveProfileMemoryGuppy(): self._guppy_memory_profiler = profilers.GuppyMemoryProfiler( self._name, configuration) self._guppy_memory_profiler.Start() if configuration.HaveProfileMemory(): self._memory_profiler = profilers.MemoryProfiler( self._name, configuration) self._memory_profiler.Start() if configuration.HaveProfileProcessing(): identifier = '{0:s}-processing'.format(self._name) self._processing_profiler = profilers.ProcessingProfiler( identifier, configuration) self._processing_profiler.Start() if configuration.HaveProfileSerializers(): identifier = '{0:s}-serializers'.format(self._name) self._serializers_profiler = profilers.SerializersProfiler( identifier, configuration) self._serializers_profiler.Start() if configuration.HaveProfileStorage(): self._storage_profiler = profilers.StorageProfiler( self._name, configuration) self._storage_profiler.Start() if configuration.HaveProfileTasks(): self._tasks_profiler = profilers.TasksProfiler(self._name, configuration) self._tasks_profiler.Start()
python
def _StartProfiling(self, configuration): """Starts profiling. Args: configuration (ProfilingConfiguration): profiling configuration. """ if not configuration: return if configuration.HaveProfileMemoryGuppy(): self._guppy_memory_profiler = profilers.GuppyMemoryProfiler( self._name, configuration) self._guppy_memory_profiler.Start() if configuration.HaveProfileMemory(): self._memory_profiler = profilers.MemoryProfiler( self._name, configuration) self._memory_profiler.Start() if configuration.HaveProfileProcessing(): identifier = '{0:s}-processing'.format(self._name) self._processing_profiler = profilers.ProcessingProfiler( identifier, configuration) self._processing_profiler.Start() if configuration.HaveProfileSerializers(): identifier = '{0:s}-serializers'.format(self._name) self._serializers_profiler = profilers.SerializersProfiler( identifier, configuration) self._serializers_profiler.Start() if configuration.HaveProfileStorage(): self._storage_profiler = profilers.StorageProfiler( self._name, configuration) self._storage_profiler.Start() if configuration.HaveProfileTasks(): self._tasks_profiler = profilers.TasksProfiler(self._name, configuration) self._tasks_profiler.Start()
['def', '_StartProfiling', '(', 'self', ',', 'configuration', ')', ':', 'if', 'not', 'configuration', ':', 'return', 'if', 'configuration', '.', 'HaveProfileMemoryGuppy', '(', ')', ':', 'self', '.', '_guppy_memory_profiler', '=', 'profilers', '.', 'GuppyMemoryProfiler', '(', 'self', '.', '_name', ',', 'configuration', ')', 'self', '.', '_guppy_memory_profiler', '.', 'Start', '(', ')', 'if', 'configuration', '.', 'HaveProfileMemory', '(', ')', ':', 'self', '.', '_memory_profiler', '=', 'profilers', '.', 'MemoryProfiler', '(', 'self', '.', '_name', ',', 'configuration', ')', 'self', '.', '_memory_profiler', '.', 'Start', '(', ')', 'if', 'configuration', '.', 'HaveProfileProcessing', '(', ')', ':', 'identifier', '=', "'{0:s}-processing'", '.', 'format', '(', 'self', '.', '_name', ')', 'self', '.', '_processing_profiler', '=', 'profilers', '.', 'ProcessingProfiler', '(', 'identifier', ',', 'configuration', ')', 'self', '.', '_processing_profiler', '.', 'Start', '(', ')', 'if', 'configuration', '.', 'HaveProfileSerializers', '(', ')', ':', 'identifier', '=', "'{0:s}-serializers'", '.', 'format', '(', 'self', '.', '_name', ')', 'self', '.', '_serializers_profiler', '=', 'profilers', '.', 'SerializersProfiler', '(', 'identifier', ',', 'configuration', ')', 'self', '.', '_serializers_profiler', '.', 'Start', '(', ')', 'if', 'configuration', '.', 'HaveProfileStorage', '(', ')', ':', 'self', '.', '_storage_profiler', '=', 'profilers', '.', 'StorageProfiler', '(', 'self', '.', '_name', ',', 'configuration', ')', 'self', '.', '_storage_profiler', '.', 'Start', '(', ')', 'if', 'configuration', '.', 'HaveProfileTasks', '(', ')', ':', 'self', '.', '_tasks_profiler', '=', 'profilers', '.', 'TasksProfiler', '(', 'self', '.', '_name', ',', 'configuration', ')', 'self', '.', '_tasks_profiler', '.', 'Start', '(', ')']
Starts profiling. Args: configuration (ProfilingConfiguration): profiling configuration.
['Starts', 'profiling', '.']
train
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/multi_processing/base_process.py#L175-L213
9,991
dariusbakunas/rawdisk
rawdisk/plugins/filesystems/ntfs/ntfs.py
Ntfs.detect
def detect(self, filename, offset, standalone=False): """Verifies NTFS filesystem signature. Returns: bool: True if filesystem signature at offset 0x03 \ matches 'NTFS ', False otherwise. """ r = RawStruct( filename=filename, offset=offset + SIG_OFFSET, length=SIG_SIZE) oem_id = r.data if oem_id == b"NTFS ": return True return False
python
def detect(self, filename, offset, standalone=False): """Verifies NTFS filesystem signature. Returns: bool: True if filesystem signature at offset 0x03 \ matches 'NTFS ', False otherwise. """ r = RawStruct( filename=filename, offset=offset + SIG_OFFSET, length=SIG_SIZE) oem_id = r.data if oem_id == b"NTFS ": return True return False
['def', 'detect', '(', 'self', ',', 'filename', ',', 'offset', ',', 'standalone', '=', 'False', ')', ':', 'r', '=', 'RawStruct', '(', 'filename', '=', 'filename', ',', 'offset', '=', 'offset', '+', 'SIG_OFFSET', ',', 'length', '=', 'SIG_SIZE', ')', 'oem_id', '=', 'r', '.', 'data', 'if', 'oem_id', '==', 'b"NTFS "', ':', 'return', 'True', 'return', 'False']
Verifies NTFS filesystem signature. Returns: bool: True if filesystem signature at offset 0x03 \ matches 'NTFS ', False otherwise.
['Verifies', 'NTFS', 'filesystem', 'signature', '.']
train
https://github.com/dariusbakunas/rawdisk/blob/1dc9d0b377fe5da3c406ccec4abc238c54167403/rawdisk/plugins/filesystems/ntfs/ntfs.py#L27-L44
9,992
Azure/azure-python-devtools
src/azure_devtools/ci_tools/bot_framework.py
build_from_issue_comment
def build_from_issue_comment(gh_token, body): """Create a WebhookMetadata from a comment added to an issue. """ if body["action"] in ["created", "edited"]: github_con = Github(gh_token) repo = github_con.get_repo(body['repository']['full_name']) issue = repo.get_issue(body['issue']['number']) text = body['comment']['body'] try: comment = issue.get_comment(body['comment']['id']) except UnknownObjectException: # If the comment has already disapeared, skip the command return None return WebhookMetadata(repo, issue, text, comment) return None
python
def build_from_issue_comment(gh_token, body): """Create a WebhookMetadata from a comment added to an issue. """ if body["action"] in ["created", "edited"]: github_con = Github(gh_token) repo = github_con.get_repo(body['repository']['full_name']) issue = repo.get_issue(body['issue']['number']) text = body['comment']['body'] try: comment = issue.get_comment(body['comment']['id']) except UnknownObjectException: # If the comment has already disapeared, skip the command return None return WebhookMetadata(repo, issue, text, comment) return None
['def', 'build_from_issue_comment', '(', 'gh_token', ',', 'body', ')', ':', 'if', 'body', '[', '"action"', ']', 'in', '[', '"created"', ',', '"edited"', ']', ':', 'github_con', '=', 'Github', '(', 'gh_token', ')', 'repo', '=', 'github_con', '.', 'get_repo', '(', 'body', '[', "'repository'", ']', '[', "'full_name'", ']', ')', 'issue', '=', 'repo', '.', 'get_issue', '(', 'body', '[', "'issue'", ']', '[', "'number'", ']', ')', 'text', '=', 'body', '[', "'comment'", ']', '[', "'body'", ']', 'try', ':', 'comment', '=', 'issue', '.', 'get_comment', '(', 'body', '[', "'comment'", ']', '[', "'id'", ']', ')', 'except', 'UnknownObjectException', ':', '# If the comment has already disapeared, skip the command', 'return', 'None', 'return', 'WebhookMetadata', '(', 'repo', ',', 'issue', ',', 'text', ',', 'comment', ')', 'return', 'None']
Create a WebhookMetadata from a comment added to an issue.
['Create', 'a', 'WebhookMetadata', 'from', 'a', 'comment', 'added', 'to', 'an', 'issue', '.']
train
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/bot_framework.py#L25-L39
9,993
xmunoz/sodapy
sodapy/__init__.py
Socrata.upsert
def upsert(self, dataset_identifier, payload, content_type="json"): ''' Insert, update or delete data to/from an existing dataset. Currently supports json and csv file objects. See here for the upsert documentation: http://dev.socrata.com/publishers/upsert.html ''' resource = _format_new_api_request(dataid=dataset_identifier, content_type=content_type) return self._perform_update("post", resource, payload)
python
def upsert(self, dataset_identifier, payload, content_type="json"): ''' Insert, update or delete data to/from an existing dataset. Currently supports json and csv file objects. See here for the upsert documentation: http://dev.socrata.com/publishers/upsert.html ''' resource = _format_new_api_request(dataid=dataset_identifier, content_type=content_type) return self._perform_update("post", resource, payload)
['def', 'upsert', '(', 'self', ',', 'dataset_identifier', ',', 'payload', ',', 'content_type', '=', '"json"', ')', ':', 'resource', '=', '_format_new_api_request', '(', 'dataid', '=', 'dataset_identifier', ',', 'content_type', '=', 'content_type', ')', 'return', 'self', '.', '_perform_update', '(', '"post"', ',', 'resource', ',', 'payload', ')']
Insert, update or delete data to/from an existing dataset. Currently supports json and csv file objects. See here for the upsert documentation: http://dev.socrata.com/publishers/upsert.html
['Insert', 'update', 'or', 'delete', 'data', 'to', '/', 'from', 'an', 'existing', 'dataset', '.', 'Currently', 'supports', 'json', 'and', 'csv', 'file', 'objects', '.', 'See', 'here', 'for', 'the', 'upsert', 'documentation', ':', 'http', ':', '//', 'dev', '.', 'socrata', '.', 'com', '/', 'publishers', '/', 'upsert', '.', 'html']
train
https://github.com/xmunoz/sodapy/blob/dad2ca9560cde0acb03bdb4423260e891ca40d7b/sodapy/__init__.py#L365-L374
9,994
androguard/androguard
androguard/decompiler/dad/control_flow.py
derived_sequence
def derived_sequence(graph): """ Compute the derived sequence of the graph G The intervals of G are collapsed into nodes, intervals of these nodes are built, and the process is repeated iteratively until we obtain a single node (if the graph is not irreducible) """ deriv_seq = [graph] deriv_interv = [] single_node = False while not single_node: interv_graph, interv_heads = intervals(graph) deriv_interv.append(interv_heads) single_node = len(interv_graph) == 1 if not single_node: deriv_seq.append(interv_graph) graph = interv_graph graph.compute_rpo() return deriv_seq, deriv_interv
python
def derived_sequence(graph): """ Compute the derived sequence of the graph G The intervals of G are collapsed into nodes, intervals of these nodes are built, and the process is repeated iteratively until we obtain a single node (if the graph is not irreducible) """ deriv_seq = [graph] deriv_interv = [] single_node = False while not single_node: interv_graph, interv_heads = intervals(graph) deriv_interv.append(interv_heads) single_node = len(interv_graph) == 1 if not single_node: deriv_seq.append(interv_graph) graph = interv_graph graph.compute_rpo() return deriv_seq, deriv_interv
['def', 'derived_sequence', '(', 'graph', ')', ':', 'deriv_seq', '=', '[', 'graph', ']', 'deriv_interv', '=', '[', ']', 'single_node', '=', 'False', 'while', 'not', 'single_node', ':', 'interv_graph', ',', 'interv_heads', '=', 'intervals', '(', 'graph', ')', 'deriv_interv', '.', 'append', '(', 'interv_heads', ')', 'single_node', '=', 'len', '(', 'interv_graph', ')', '==', '1', 'if', 'not', 'single_node', ':', 'deriv_seq', '.', 'append', '(', 'interv_graph', ')', 'graph', '=', 'interv_graph', 'graph', '.', 'compute_rpo', '(', ')', 'return', 'deriv_seq', ',', 'deriv_interv']
Compute the derived sequence of the graph G The intervals of G are collapsed into nodes, intervals of these nodes are built, and the process is repeated iteratively until we obtain a single node (if the graph is not irreducible)
['Compute', 'the', 'derived', 'sequence', 'of', 'the', 'graph', 'G', 'The', 'intervals', 'of', 'G', 'are', 'collapsed', 'into', 'nodes', 'intervals', 'of', 'these', 'nodes', 'are', 'built', 'and', 'the', 'process', 'is', 'repeated', 'iteratively', 'until', 'we', 'obtain', 'a', 'single', 'node', '(', 'if', 'the', 'graph', 'is', 'not', 'irreducible', ')']
train
https://github.com/androguard/androguard/blob/984c0d981be2950cf0451e484f7b0d4d53bc4911/androguard/decompiler/dad/control_flow.py#L86-L109
9,995
croscon/fleaker
fleaker/config.py
MultiStageConfigurableApp.configure
def configure(self, *args, **kwargs): """Configure the Application through a varied number of sources of different types. This function chains multiple possible configuration methods together in order to just "make it work". You can pass multiple configuration sources in to the method and each one will be tried in a sane fashion. Later sources will override earlier sources if keys collide. For example: .. code:: python from application import default_config app.configure(default_config, os.environ, '.secrets') In the above example, values stored in ``default_config`` will be loaded first, then overwritten by those in ``os.environ``, and so on. An endless number of configuration sources may be passed. Configuration sources are type checked and processed according to the following rules: * ``string`` - if the source is a ``str``, we will assume it is a file or module that should be loaded. If the file ends in ``.json``, then :meth:`flask.Config.from_json` is used; if the file ends in ``.py`` or ``.cfg``, then :meth:`flask.Config.from_pyfile` is used; if the module has any other extension we assume it is an import path, import the module and pass that to :meth:`flask.Config.from_object`. See below for a few more semantics on module loading. * ``dict-like`` - if the source is ``dict-like``, then :meth:`flask.Config.from_mapping` will be used. ``dict-like`` is defined as anything implementing an ``items`` method that returns a tuple of ``key``, ``val``. * ``class`` or ``module`` - if the source is an uninstantiated ``class`` or ``module``, then :meth:`flask.Config.from_object` will be used. Just like Flask's standard configuration, only uppercased keys will be loaded into the config. If the item we are passed is a ``string`` and it is determined to be a possible Python module, then a leading ``.`` is relevant. If a leading ``.`` is provided, we assume that the module to import is located in the current package and operate as such; if it begins with anything else we assume the import path provided is absolute. This allows you to source configuration stored in a module in your package, or in another package. Args: *args (object): Any object you want us to try to configure from. Keyword Args: whitelist_keys_from_mappings (bool): Should we whitelist the keys we pull from mappings? Very useful if you're passing in an entire OS ``environ`` and you want to omit things like ``LESSPIPE``. If no whitelist is provided, we use the pre-existing config keys as a whitelist. whitelist (list[str]): An explicit list of keys that should be allowed. If provided and ``whitelist_keys`` is ``True``, we will use that as our whitelist instead of pre-existing app config keys. """ whitelist_keys_from_mappings = kwargs.get( 'whitelist_keys_from_mappings', False ) whitelist = kwargs.get('whitelist') for item in args: if isinstance(item, string_types): _, ext = splitext(item) if ext == '.json': self._configure_from_json(item) elif ext in ('.cfg', '.py'): self._configure_from_pyfile(item) else: self._configure_from_module(item) elif isinstance(item, (types.ModuleType, type)): self._configure_from_object(item) elif hasattr(item, 'items'): # assume everything else is a mapping like object; ``.items()`` # is what Flask uses under the hood for this method # @TODO: This doesn't handle the edge case of using a tuple of # two element tuples to config; but Flask does that. IMO, if # you do that, you're a monster. self._configure_from_mapping( item, whitelist_keys=whitelist_keys_from_mappings, whitelist=whitelist ) else: raise TypeError("Could not determine a valid type for this" " configuration object: `{}`!".format(item)) # we just finished here, run the post configure callbacks self._run_post_configure_callbacks(args)
python
def configure(self, *args, **kwargs): """Configure the Application through a varied number of sources of different types. This function chains multiple possible configuration methods together in order to just "make it work". You can pass multiple configuration sources in to the method and each one will be tried in a sane fashion. Later sources will override earlier sources if keys collide. For example: .. code:: python from application import default_config app.configure(default_config, os.environ, '.secrets') In the above example, values stored in ``default_config`` will be loaded first, then overwritten by those in ``os.environ``, and so on. An endless number of configuration sources may be passed. Configuration sources are type checked and processed according to the following rules: * ``string`` - if the source is a ``str``, we will assume it is a file or module that should be loaded. If the file ends in ``.json``, then :meth:`flask.Config.from_json` is used; if the file ends in ``.py`` or ``.cfg``, then :meth:`flask.Config.from_pyfile` is used; if the module has any other extension we assume it is an import path, import the module and pass that to :meth:`flask.Config.from_object`. See below for a few more semantics on module loading. * ``dict-like`` - if the source is ``dict-like``, then :meth:`flask.Config.from_mapping` will be used. ``dict-like`` is defined as anything implementing an ``items`` method that returns a tuple of ``key``, ``val``. * ``class`` or ``module`` - if the source is an uninstantiated ``class`` or ``module``, then :meth:`flask.Config.from_object` will be used. Just like Flask's standard configuration, only uppercased keys will be loaded into the config. If the item we are passed is a ``string`` and it is determined to be a possible Python module, then a leading ``.`` is relevant. If a leading ``.`` is provided, we assume that the module to import is located in the current package and operate as such; if it begins with anything else we assume the import path provided is absolute. This allows you to source configuration stored in a module in your package, or in another package. Args: *args (object): Any object you want us to try to configure from. Keyword Args: whitelist_keys_from_mappings (bool): Should we whitelist the keys we pull from mappings? Very useful if you're passing in an entire OS ``environ`` and you want to omit things like ``LESSPIPE``. If no whitelist is provided, we use the pre-existing config keys as a whitelist. whitelist (list[str]): An explicit list of keys that should be allowed. If provided and ``whitelist_keys`` is ``True``, we will use that as our whitelist instead of pre-existing app config keys. """ whitelist_keys_from_mappings = kwargs.get( 'whitelist_keys_from_mappings', False ) whitelist = kwargs.get('whitelist') for item in args: if isinstance(item, string_types): _, ext = splitext(item) if ext == '.json': self._configure_from_json(item) elif ext in ('.cfg', '.py'): self._configure_from_pyfile(item) else: self._configure_from_module(item) elif isinstance(item, (types.ModuleType, type)): self._configure_from_object(item) elif hasattr(item, 'items'): # assume everything else is a mapping like object; ``.items()`` # is what Flask uses under the hood for this method # @TODO: This doesn't handle the edge case of using a tuple of # two element tuples to config; but Flask does that. IMO, if # you do that, you're a monster. self._configure_from_mapping( item, whitelist_keys=whitelist_keys_from_mappings, whitelist=whitelist ) else: raise TypeError("Could not determine a valid type for this" " configuration object: `{}`!".format(item)) # we just finished here, run the post configure callbacks self._run_post_configure_callbacks(args)
['def', 'configure', '(', 'self', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'whitelist_keys_from_mappings', '=', 'kwargs', '.', 'get', '(', "'whitelist_keys_from_mappings'", ',', 'False', ')', 'whitelist', '=', 'kwargs', '.', 'get', '(', "'whitelist'", ')', 'for', 'item', 'in', 'args', ':', 'if', 'isinstance', '(', 'item', ',', 'string_types', ')', ':', '_', ',', 'ext', '=', 'splitext', '(', 'item', ')', 'if', 'ext', '==', "'.json'", ':', 'self', '.', '_configure_from_json', '(', 'item', ')', 'elif', 'ext', 'in', '(', "'.cfg'", ',', "'.py'", ')', ':', 'self', '.', '_configure_from_pyfile', '(', 'item', ')', 'else', ':', 'self', '.', '_configure_from_module', '(', 'item', ')', 'elif', 'isinstance', '(', 'item', ',', '(', 'types', '.', 'ModuleType', ',', 'type', ')', ')', ':', 'self', '.', '_configure_from_object', '(', 'item', ')', 'elif', 'hasattr', '(', 'item', ',', "'items'", ')', ':', '# assume everything else is a mapping like object; ``.items()``', '# is what Flask uses under the hood for this method', "# @TODO: This doesn't handle the edge case of using a tuple of", '# two element tuples to config; but Flask does that. IMO, if', "# you do that, you're a monster.", 'self', '.', '_configure_from_mapping', '(', 'item', ',', 'whitelist_keys', '=', 'whitelist_keys_from_mappings', ',', 'whitelist', '=', 'whitelist', ')', 'else', ':', 'raise', 'TypeError', '(', '"Could not determine a valid type for this"', '" configuration object: `{}`!"', '.', 'format', '(', 'item', ')', ')', '# we just finished here, run the post configure callbacks', 'self', '.', '_run_post_configure_callbacks', '(', 'args', ')']
Configure the Application through a varied number of sources of different types. This function chains multiple possible configuration methods together in order to just "make it work". You can pass multiple configuration sources in to the method and each one will be tried in a sane fashion. Later sources will override earlier sources if keys collide. For example: .. code:: python from application import default_config app.configure(default_config, os.environ, '.secrets') In the above example, values stored in ``default_config`` will be loaded first, then overwritten by those in ``os.environ``, and so on. An endless number of configuration sources may be passed. Configuration sources are type checked and processed according to the following rules: * ``string`` - if the source is a ``str``, we will assume it is a file or module that should be loaded. If the file ends in ``.json``, then :meth:`flask.Config.from_json` is used; if the file ends in ``.py`` or ``.cfg``, then :meth:`flask.Config.from_pyfile` is used; if the module has any other extension we assume it is an import path, import the module and pass that to :meth:`flask.Config.from_object`. See below for a few more semantics on module loading. * ``dict-like`` - if the source is ``dict-like``, then :meth:`flask.Config.from_mapping` will be used. ``dict-like`` is defined as anything implementing an ``items`` method that returns a tuple of ``key``, ``val``. * ``class`` or ``module`` - if the source is an uninstantiated ``class`` or ``module``, then :meth:`flask.Config.from_object` will be used. Just like Flask's standard configuration, only uppercased keys will be loaded into the config. If the item we are passed is a ``string`` and it is determined to be a possible Python module, then a leading ``.`` is relevant. If a leading ``.`` is provided, we assume that the module to import is located in the current package and operate as such; if it begins with anything else we assume the import path provided is absolute. This allows you to source configuration stored in a module in your package, or in another package. Args: *args (object): Any object you want us to try to configure from. Keyword Args: whitelist_keys_from_mappings (bool): Should we whitelist the keys we pull from mappings? Very useful if you're passing in an entire OS ``environ`` and you want to omit things like ``LESSPIPE``. If no whitelist is provided, we use the pre-existing config keys as a whitelist. whitelist (list[str]): An explicit list of keys that should be allowed. If provided and ``whitelist_keys`` is ``True``, we will use that as our whitelist instead of pre-existing app config keys.
['Configure', 'the', 'Application', 'through', 'a', 'varied', 'number', 'of', 'sources', 'of', 'different', 'types', '.']
train
https://github.com/croscon/fleaker/blob/046b026b79c9912bceebb17114bc0c5d2d02e3c7/fleaker/config.py#L56-L156
9,996
rapidpro/expressions
python/temba_expressions/functions/excel.py
datedif
def datedif(ctx, start_date, end_date, unit): """ Calculates the number of days, months, or years between two dates. """ start_date = conversions.to_date(start_date, ctx) end_date = conversions.to_date(end_date, ctx) unit = conversions.to_string(unit, ctx).lower() if start_date > end_date: raise ValueError("Start date cannot be after end date") if unit == 'y': return relativedelta(end_date, start_date).years elif unit == 'm': delta = relativedelta(end_date, start_date) return 12 * delta.years + delta.months elif unit == 'd': return (end_date - start_date).days elif unit == 'md': return relativedelta(end_date, start_date).days elif unit == 'ym': return relativedelta(end_date, start_date).months elif unit == 'yd': return (end_date - start_date.replace(year=end_date.year)).days raise ValueError("Invalid unit value: %s" % unit)
python
def datedif(ctx, start_date, end_date, unit): """ Calculates the number of days, months, or years between two dates. """ start_date = conversions.to_date(start_date, ctx) end_date = conversions.to_date(end_date, ctx) unit = conversions.to_string(unit, ctx).lower() if start_date > end_date: raise ValueError("Start date cannot be after end date") if unit == 'y': return relativedelta(end_date, start_date).years elif unit == 'm': delta = relativedelta(end_date, start_date) return 12 * delta.years + delta.months elif unit == 'd': return (end_date - start_date).days elif unit == 'md': return relativedelta(end_date, start_date).days elif unit == 'ym': return relativedelta(end_date, start_date).months elif unit == 'yd': return (end_date - start_date.replace(year=end_date.year)).days raise ValueError("Invalid unit value: %s" % unit)
['def', 'datedif', '(', 'ctx', ',', 'start_date', ',', 'end_date', ',', 'unit', ')', ':', 'start_date', '=', 'conversions', '.', 'to_date', '(', 'start_date', ',', 'ctx', ')', 'end_date', '=', 'conversions', '.', 'to_date', '(', 'end_date', ',', 'ctx', ')', 'unit', '=', 'conversions', '.', 'to_string', '(', 'unit', ',', 'ctx', ')', '.', 'lower', '(', ')', 'if', 'start_date', '>', 'end_date', ':', 'raise', 'ValueError', '(', '"Start date cannot be after end date"', ')', 'if', 'unit', '==', "'y'", ':', 'return', 'relativedelta', '(', 'end_date', ',', 'start_date', ')', '.', 'years', 'elif', 'unit', '==', "'m'", ':', 'delta', '=', 'relativedelta', '(', 'end_date', ',', 'start_date', ')', 'return', '12', '*', 'delta', '.', 'years', '+', 'delta', '.', 'months', 'elif', 'unit', '==', "'d'", ':', 'return', '(', 'end_date', '-', 'start_date', ')', '.', 'days', 'elif', 'unit', '==', "'md'", ':', 'return', 'relativedelta', '(', 'end_date', ',', 'start_date', ')', '.', 'days', 'elif', 'unit', '==', "'ym'", ':', 'return', 'relativedelta', '(', 'end_date', ',', 'start_date', ')', '.', 'months', 'elif', 'unit', '==', "'yd'", ':', 'return', '(', 'end_date', '-', 'start_date', '.', 'replace', '(', 'year', '=', 'end_date', '.', 'year', ')', ')', '.', 'days', 'raise', 'ValueError', '(', '"Invalid unit value: %s"', '%', 'unit', ')']
Calculates the number of days, months, or years between two dates.
['Calculates', 'the', 'number', 'of', 'days', 'months', 'or', 'years', 'between', 'two', 'dates', '.']
train
https://github.com/rapidpro/expressions/blob/b03d91ec58fc328960bce90ecb5fa49dcf467627/python/temba_expressions/functions/excel.py#L164-L189
9,997
gagneurlab/concise
concise/utils/plot.py
add_letter_to_axis
def add_letter_to_axis(ax, let, col, x, y, height): """Add 'let' with position x,y and height height to matplotlib axis 'ax'. """ if len(let) == 2: colors = [col, "white"] elif len(let) == 1: colors = [col] else: raise ValueError("3 or more Polygons are not supported") for polygon, color in zip(let, colors): new_polygon = affinity.scale( polygon, yfact=height, origin=(0, 0, 0)) new_polygon = affinity.translate( new_polygon, xoff=x, yoff=y) patch = PolygonPatch( new_polygon, edgecolor=color, facecolor=color) ax.add_patch(patch) return
python
def add_letter_to_axis(ax, let, col, x, y, height): """Add 'let' with position x,y and height height to matplotlib axis 'ax'. """ if len(let) == 2: colors = [col, "white"] elif len(let) == 1: colors = [col] else: raise ValueError("3 or more Polygons are not supported") for polygon, color in zip(let, colors): new_polygon = affinity.scale( polygon, yfact=height, origin=(0, 0, 0)) new_polygon = affinity.translate( new_polygon, xoff=x, yoff=y) patch = PolygonPatch( new_polygon, edgecolor=color, facecolor=color) ax.add_patch(patch) return
['def', 'add_letter_to_axis', '(', 'ax', ',', 'let', ',', 'col', ',', 'x', ',', 'y', ',', 'height', ')', ':', 'if', 'len', '(', 'let', ')', '==', '2', ':', 'colors', '=', '[', 'col', ',', '"white"', ']', 'elif', 'len', '(', 'let', ')', '==', '1', ':', 'colors', '=', '[', 'col', ']', 'else', ':', 'raise', 'ValueError', '(', '"3 or more Polygons are not supported"', ')', 'for', 'polygon', ',', 'color', 'in', 'zip', '(', 'let', ',', 'colors', ')', ':', 'new_polygon', '=', 'affinity', '.', 'scale', '(', 'polygon', ',', 'yfact', '=', 'height', ',', 'origin', '=', '(', '0', ',', '0', ',', '0', ')', ')', 'new_polygon', '=', 'affinity', '.', 'translate', '(', 'new_polygon', ',', 'xoff', '=', 'x', ',', 'yoff', '=', 'y', ')', 'patch', '=', 'PolygonPatch', '(', 'new_polygon', ',', 'edgecolor', '=', 'color', ',', 'facecolor', '=', 'color', ')', 'ax', '.', 'add_patch', '(', 'patch', ')', 'return']
Add 'let' with position x,y and height height to matplotlib axis 'ax'.
['Add', 'let', 'with', 'position', 'x', 'y', 'and', 'height', 'height', 'to', 'matplotlib', 'axis', 'ax', '.']
train
https://github.com/gagneurlab/concise/blob/d15262eb1e590008bc96ba31e93bfbdbfa1a9fd4/concise/utils/plot.py#L174-L192
9,998
jtwhite79/pyemu
pyemu/plot/plot_utils.py
res_phi_pie
def res_phi_pie(pst,logger=None, **kwargs): """plot current phi components as a pie chart. Parameters ---------- pst : pyemu.Pst logger : pyemu.Logger kwargs : dict accepts 'include_zero' as a flag to include phi groups with only zero-weight obs (not sure why anyone would do this, but whatevs). Returns ------- ax : matplotlib.Axis """ if logger is None: logger=Logger('Default_Loggger.log',echo=False) logger.log("plot res_phi_pie") if "ensemble" in kwargs: try: res=pst_utils.res_from_en(pst,kwargs['ensemble']) except: logger.statement("res_1to1: could not find ensemble file {0}".format(kwargs['ensemble'])) else: try: res = pst.res except: logger.lraise("res_phi_pie: pst.res is None, couldn't find residuals file") obs = pst.observation_data phi = pst.phi phi_comps = pst.phi_components norm_phi_comps = pst.phi_components_normalized keys = list(phi_comps.keys()) if "include_zero" not in kwargs or kwargs["include_zero"] is True: phi_comps = {k:phi_comps[k] for k in keys if phi_comps[k] > 0.0} keys = list(phi_comps.keys()) norm_phi_comps = {k:norm_phi_comps[k] for k in keys} if "ax" in kwargs: ax = kwargs["ax"] else: fig = plt.figure(figsize=figsize) ax = plt.subplot(1,1,1,aspect="equal") labels = ["{0}\n{1:4G}\n({2:3.1f}%)".format(k,phi_comps[k],100. * (phi_comps[k] / phi)) for k in keys] ax.pie([float(norm_phi_comps[k]) for k in keys],labels=labels) logger.log("plot res_phi_pie") if "filename" in kwargs: plt.savefig(kwargs["filename"]) return ax
python
def res_phi_pie(pst,logger=None, **kwargs): """plot current phi components as a pie chart. Parameters ---------- pst : pyemu.Pst logger : pyemu.Logger kwargs : dict accepts 'include_zero' as a flag to include phi groups with only zero-weight obs (not sure why anyone would do this, but whatevs). Returns ------- ax : matplotlib.Axis """ if logger is None: logger=Logger('Default_Loggger.log',echo=False) logger.log("plot res_phi_pie") if "ensemble" in kwargs: try: res=pst_utils.res_from_en(pst,kwargs['ensemble']) except: logger.statement("res_1to1: could not find ensemble file {0}".format(kwargs['ensemble'])) else: try: res = pst.res except: logger.lraise("res_phi_pie: pst.res is None, couldn't find residuals file") obs = pst.observation_data phi = pst.phi phi_comps = pst.phi_components norm_phi_comps = pst.phi_components_normalized keys = list(phi_comps.keys()) if "include_zero" not in kwargs or kwargs["include_zero"] is True: phi_comps = {k:phi_comps[k] for k in keys if phi_comps[k] > 0.0} keys = list(phi_comps.keys()) norm_phi_comps = {k:norm_phi_comps[k] for k in keys} if "ax" in kwargs: ax = kwargs["ax"] else: fig = plt.figure(figsize=figsize) ax = plt.subplot(1,1,1,aspect="equal") labels = ["{0}\n{1:4G}\n({2:3.1f}%)".format(k,phi_comps[k],100. * (phi_comps[k] / phi)) for k in keys] ax.pie([float(norm_phi_comps[k]) for k in keys],labels=labels) logger.log("plot res_phi_pie") if "filename" in kwargs: plt.savefig(kwargs["filename"]) return ax
['def', 'res_phi_pie', '(', 'pst', ',', 'logger', '=', 'None', ',', '*', '*', 'kwargs', ')', ':', 'if', 'logger', 'is', 'None', ':', 'logger', '=', 'Logger', '(', "'Default_Loggger.log'", ',', 'echo', '=', 'False', ')', 'logger', '.', 'log', '(', '"plot res_phi_pie"', ')', 'if', '"ensemble"', 'in', 'kwargs', ':', 'try', ':', 'res', '=', 'pst_utils', '.', 'res_from_en', '(', 'pst', ',', 'kwargs', '[', "'ensemble'", ']', ')', 'except', ':', 'logger', '.', 'statement', '(', '"res_1to1: could not find ensemble file {0}"', '.', 'format', '(', 'kwargs', '[', "'ensemble'", ']', ')', ')', 'else', ':', 'try', ':', 'res', '=', 'pst', '.', 'res', 'except', ':', 'logger', '.', 'lraise', '(', '"res_phi_pie: pst.res is None, couldn\'t find residuals file"', ')', 'obs', '=', 'pst', '.', 'observation_data', 'phi', '=', 'pst', '.', 'phi', 'phi_comps', '=', 'pst', '.', 'phi_components', 'norm_phi_comps', '=', 'pst', '.', 'phi_components_normalized', 'keys', '=', 'list', '(', 'phi_comps', '.', 'keys', '(', ')', ')', 'if', '"include_zero"', 'not', 'in', 'kwargs', 'or', 'kwargs', '[', '"include_zero"', ']', 'is', 'True', ':', 'phi_comps', '=', '{', 'k', ':', 'phi_comps', '[', 'k', ']', 'for', 'k', 'in', 'keys', 'if', 'phi_comps', '[', 'k', ']', '>', '0.0', '}', 'keys', '=', 'list', '(', 'phi_comps', '.', 'keys', '(', ')', ')', 'norm_phi_comps', '=', '{', 'k', ':', 'norm_phi_comps', '[', 'k', ']', 'for', 'k', 'in', 'keys', '}', 'if', '"ax"', 'in', 'kwargs', ':', 'ax', '=', 'kwargs', '[', '"ax"', ']', 'else', ':', 'fig', '=', 'plt', '.', 'figure', '(', 'figsize', '=', 'figsize', ')', 'ax', '=', 'plt', '.', 'subplot', '(', '1', ',', '1', ',', '1', ',', 'aspect', '=', '"equal"', ')', 'labels', '=', '[', '"{0}\\n{1:4G}\\n({2:3.1f}%)"', '.', 'format', '(', 'k', ',', 'phi_comps', '[', 'k', ']', ',', '100.', '*', '(', 'phi_comps', '[', 'k', ']', '/', 'phi', ')', ')', 'for', 'k', 'in', 'keys', ']', 'ax', '.', 'pie', '(', '[', 'float', '(', 'norm_phi_comps', '[', 'k', ']', ')', 'for', 'k', 'in', 'keys', ']', ',', 'labels', '=', 'labels', ')', 'logger', '.', 'log', '(', '"plot res_phi_pie"', ')', 'if', '"filename"', 'in', 'kwargs', ':', 'plt', '.', 'savefig', '(', 'kwargs', '[', '"filename"', ']', ')', 'return', 'ax']
plot current phi components as a pie chart. Parameters ---------- pst : pyemu.Pst logger : pyemu.Logger kwargs : dict accepts 'include_zero' as a flag to include phi groups with only zero-weight obs (not sure why anyone would do this, but whatevs). Returns ------- ax : matplotlib.Axis
['plot', 'current', 'phi', 'components', 'as', 'a', 'pie', 'chart', '.']
train
https://github.com/jtwhite79/pyemu/blob/c504d8e7a4097cec07655a6318d275739bd8148a/pyemu/plot/plot_utils.py#L609-L660
9,999
inasafe/inasafe
safe/report/processors/default.py
atlas_renderer
def atlas_renderer(layout, coverage_layer, output_path, file_format): """Extract composition using atlas generation. :param layout: QGIS Print Layout object used for producing the report. :type layout: qgis.core.QgsPrintLayout :param coverage_layer: Coverage Layer used for atlas map. :type coverage_layer: QgsMapLayer :param output_path: The output path of the product. :type output_path: str :param file_format: File format of map output, 'pdf' or 'png'. :type file_format: str :return: Generated output path(s). :rtype: str, list """ # set the composer map to be atlas driven composer_map = layout_item( layout, 'impact-map', QgsLayoutItemMap) composer_map.setAtlasDriven(True) composer_map.setAtlasScalingMode(QgsLayoutItemMap.Auto) # setup the atlas composition and composition atlas mode atlas_composition = layout.atlas() atlas_composition.setCoverageLayer(coverage_layer) atlas_on_single_file = layout.customProperty('singleFile', True) if file_format == QgisComposerComponentsMetadata.OutputFormat.PDF: if not atlas_composition.filenameExpression(): atlas_composition.setFilenameExpression( "'output_'||@atlas_featurenumber") output_directory = os.path.dirname(output_path) # we need to set the predefined scales for atlas project_scales = [] scales = QgsProject.instance().readListEntry( "Scales", "/ScalesList")[0] has_project_scales = QgsProject.instance().readBoolEntry( "Scales", "/useProjectScales")[0] if not has_project_scales or not scales: scales_string = str(general_setting("Map/scales", PROJECT_SCALES)) scales = scales_string.split(',') for scale in scales: parts = scale.split(':') if len(parts) == 2: project_scales.append(float(parts[1])) layout.reportContext().setPredefinedScales(project_scales) settings = QgsLayoutExporter.PdfExportSettings() LOGGER.info('Exporting Atlas') atlas_output = [] if atlas_on_single_file: res, error = QgsLayoutExporter.exportToPdf( atlas_composition, output_path, settings) atlas_output.append(output_path) else: res, error = QgsLayoutExporter.exportToPdfs( atlas_composition, output_directory, settings) if res != QgsLayoutExporter.Success: LOGGER.error(error) return atlas_output
python
def atlas_renderer(layout, coverage_layer, output_path, file_format): """Extract composition using atlas generation. :param layout: QGIS Print Layout object used for producing the report. :type layout: qgis.core.QgsPrintLayout :param coverage_layer: Coverage Layer used for atlas map. :type coverage_layer: QgsMapLayer :param output_path: The output path of the product. :type output_path: str :param file_format: File format of map output, 'pdf' or 'png'. :type file_format: str :return: Generated output path(s). :rtype: str, list """ # set the composer map to be atlas driven composer_map = layout_item( layout, 'impact-map', QgsLayoutItemMap) composer_map.setAtlasDriven(True) composer_map.setAtlasScalingMode(QgsLayoutItemMap.Auto) # setup the atlas composition and composition atlas mode atlas_composition = layout.atlas() atlas_composition.setCoverageLayer(coverage_layer) atlas_on_single_file = layout.customProperty('singleFile', True) if file_format == QgisComposerComponentsMetadata.OutputFormat.PDF: if not atlas_composition.filenameExpression(): atlas_composition.setFilenameExpression( "'output_'||@atlas_featurenumber") output_directory = os.path.dirname(output_path) # we need to set the predefined scales for atlas project_scales = [] scales = QgsProject.instance().readListEntry( "Scales", "/ScalesList")[0] has_project_scales = QgsProject.instance().readBoolEntry( "Scales", "/useProjectScales")[0] if not has_project_scales or not scales: scales_string = str(general_setting("Map/scales", PROJECT_SCALES)) scales = scales_string.split(',') for scale in scales: parts = scale.split(':') if len(parts) == 2: project_scales.append(float(parts[1])) layout.reportContext().setPredefinedScales(project_scales) settings = QgsLayoutExporter.PdfExportSettings() LOGGER.info('Exporting Atlas') atlas_output = [] if atlas_on_single_file: res, error = QgsLayoutExporter.exportToPdf( atlas_composition, output_path, settings) atlas_output.append(output_path) else: res, error = QgsLayoutExporter.exportToPdfs( atlas_composition, output_directory, settings) if res != QgsLayoutExporter.Success: LOGGER.error(error) return atlas_output
['def', 'atlas_renderer', '(', 'layout', ',', 'coverage_layer', ',', 'output_path', ',', 'file_format', ')', ':', '# set the composer map to be atlas driven', 'composer_map', '=', 'layout_item', '(', 'layout', ',', "'impact-map'", ',', 'QgsLayoutItemMap', ')', 'composer_map', '.', 'setAtlasDriven', '(', 'True', ')', 'composer_map', '.', 'setAtlasScalingMode', '(', 'QgsLayoutItemMap', '.', 'Auto', ')', '# setup the atlas composition and composition atlas mode', 'atlas_composition', '=', 'layout', '.', 'atlas', '(', ')', 'atlas_composition', '.', 'setCoverageLayer', '(', 'coverage_layer', ')', 'atlas_on_single_file', '=', 'layout', '.', 'customProperty', '(', "'singleFile'", ',', 'True', ')', 'if', 'file_format', '==', 'QgisComposerComponentsMetadata', '.', 'OutputFormat', '.', 'PDF', ':', 'if', 'not', 'atlas_composition', '.', 'filenameExpression', '(', ')', ':', 'atlas_composition', '.', 'setFilenameExpression', '(', '"\'output_\'||@atlas_featurenumber"', ')', 'output_directory', '=', 'os', '.', 'path', '.', 'dirname', '(', 'output_path', ')', '# we need to set the predefined scales for atlas', 'project_scales', '=', '[', ']', 'scales', '=', 'QgsProject', '.', 'instance', '(', ')', '.', 'readListEntry', '(', '"Scales"', ',', '"/ScalesList"', ')', '[', '0', ']', 'has_project_scales', '=', 'QgsProject', '.', 'instance', '(', ')', '.', 'readBoolEntry', '(', '"Scales"', ',', '"/useProjectScales"', ')', '[', '0', ']', 'if', 'not', 'has_project_scales', 'or', 'not', 'scales', ':', 'scales_string', '=', 'str', '(', 'general_setting', '(', '"Map/scales"', ',', 'PROJECT_SCALES', ')', ')', 'scales', '=', 'scales_string', '.', 'split', '(', "','", ')', 'for', 'scale', 'in', 'scales', ':', 'parts', '=', 'scale', '.', 'split', '(', "':'", ')', 'if', 'len', '(', 'parts', ')', '==', '2', ':', 'project_scales', '.', 'append', '(', 'float', '(', 'parts', '[', '1', ']', ')', ')', 'layout', '.', 'reportContext', '(', ')', '.', 'setPredefinedScales', '(', 'project_scales', ')', 'settings', '=', 'QgsLayoutExporter', '.', 'PdfExportSettings', '(', ')', 'LOGGER', '.', 'info', '(', "'Exporting Atlas'", ')', 'atlas_output', '=', '[', ']', 'if', 'atlas_on_single_file', ':', 'res', ',', 'error', '=', 'QgsLayoutExporter', '.', 'exportToPdf', '(', 'atlas_composition', ',', 'output_path', ',', 'settings', ')', 'atlas_output', '.', 'append', '(', 'output_path', ')', 'else', ':', 'res', ',', 'error', '=', 'QgsLayoutExporter', '.', 'exportToPdfs', '(', 'atlas_composition', ',', 'output_directory', ',', 'settings', ')', 'if', 'res', '!=', 'QgsLayoutExporter', '.', 'Success', ':', 'LOGGER', '.', 'error', '(', 'error', ')', 'return', 'atlas_output']
Extract composition using atlas generation. :param layout: QGIS Print Layout object used for producing the report. :type layout: qgis.core.QgsPrintLayout :param coverage_layer: Coverage Layer used for atlas map. :type coverage_layer: QgsMapLayer :param output_path: The output path of the product. :type output_path: str :param file_format: File format of map output, 'pdf' or 'png'. :type file_format: str :return: Generated output path(s). :rtype: str, list
['Extract', 'composition', 'using', 'atlas', 'generation', '.']
train
https://github.com/inasafe/inasafe/blob/831d60abba919f6d481dc94a8d988cc205130724/safe/report/processors/default.py#L716-L781