text
stringlengths
75
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
0.18
def get_notes(self, folderid="", offset=0, limit=10): """Fetch notes :param folderid: The UUID of the folder to fetch notes from :param offset: the pagination offset :param limit: the pagination limit """ if self.standard_grant_type is not "authorization_code": raise DeviantartError("Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint.") response = self._req('/notes', { 'folderid' : folderid, 'offset' : offset, 'limit' : limit }) notes = [] for item in response['results']: n = {} n['noteid'] = item['noteid'] n['ts'] = item['ts'] n['unread'] = item['unread'] n['starred'] = item['starred'] n['sent'] = item['sent'] n['subject'] = item['subject'] n['preview'] = item['preview'] n['body'] = item['body'] n['user'] = User() n['user'].from_dict(item['user']) n['recipients'] = [] for recipient_item in item['recipients']: u = User() u.from_dict(recipient_item) n['recipients'].append(u) notes.append(n) return { "results" : notes, "has_more" : response['has_more'], "next_offset" : response['next_offset'] }
[ "def", "get_notes", "(", "self", ",", "folderid", "=", "\"\"", ",", "offset", "=", "0", ",", "limit", "=", "10", ")", ":", "if", "self", ".", "standard_grant_type", "is", "not", "\"authorization_code\"", ":", "raise", "DeviantartError", "(", "\"Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint.\"", ")", "response", "=", "self", ".", "_req", "(", "'/notes'", ",", "{", "'folderid'", ":", "folderid", ",", "'offset'", ":", "offset", ",", "'limit'", ":", "limit", "}", ")", "notes", "=", "[", "]", "for", "item", "in", "response", "[", "'results'", "]", ":", "n", "=", "{", "}", "n", "[", "'noteid'", "]", "=", "item", "[", "'noteid'", "]", "n", "[", "'ts'", "]", "=", "item", "[", "'ts'", "]", "n", "[", "'unread'", "]", "=", "item", "[", "'unread'", "]", "n", "[", "'starred'", "]", "=", "item", "[", "'starred'", "]", "n", "[", "'sent'", "]", "=", "item", "[", "'sent'", "]", "n", "[", "'subject'", "]", "=", "item", "[", "'subject'", "]", "n", "[", "'preview'", "]", "=", "item", "[", "'preview'", "]", "n", "[", "'body'", "]", "=", "item", "[", "'body'", "]", "n", "[", "'user'", "]", "=", "User", "(", ")", "n", "[", "'user'", "]", ".", "from_dict", "(", "item", "[", "'user'", "]", ")", "n", "[", "'recipients'", "]", "=", "[", "]", "for", "recipient_item", "in", "item", "[", "'recipients'", "]", ":", "u", "=", "User", "(", ")", "u", ".", "from_dict", "(", "recipient_item", ")", "n", "[", "'recipients'", "]", ".", "append", "(", "u", ")", "notes", ".", "append", "(", "n", ")", "return", "{", "\"results\"", ":", "notes", ",", "\"has_more\"", ":", "response", "[", "'has_more'", "]", ",", "\"next_offset\"", ":", "response", "[", "'next_offset'", "]", "}" ]
30.212766
0.006139
def _replace(_self, **kwds): 'Return a new SplitResult object replacing specified fields with new values' result = _self._make(map(kwds.pop, ('scheme', 'netloc', 'path', 'query', 'fragment'), _self)) if kwds: raise ValueError('Got unexpected field names: %r' % kwds.keys()) return result
[ "def", "_replace", "(", "_self", ",", "*", "*", "kwds", ")", ":", "result", "=", "_self", ".", "_make", "(", "map", "(", "kwds", ".", "pop", ",", "(", "'scheme'", ",", "'netloc'", ",", "'path'", ",", "'query'", ",", "'fragment'", ")", ",", "_self", ")", ")", "if", "kwds", ":", "raise", "ValueError", "(", "'Got unexpected field names: %r'", "%", "kwds", ".", "keys", "(", ")", ")", "return", "result" ]
54.333333
0.012085
def plot_coupling_matrix(self, lmax, nwin=None, weights=None, mode='full', axes_labelsize=None, tick_labelsize=None, show=True, ax=None, fname=None): """ Plot the multitaper coupling matrix. This matrix relates the global power spectrum to the expectation of the localized multitaper spectrum. Usage ----- x.plot_coupling_matrix(lmax, [nwin, weights, mode, axes_labelsize, tick_labelsize, show, ax, fname]) Parameters ---------- lmax : int Spherical harmonic bandwidth of the global power spectrum. nwin : int, optional, default = x.nwin Number of tapers used in the mutlitaper spectral analysis. weights : ndarray, optional, default = x.weights Taper weights used with the multitaper spectral analyses. mode : str, opitonal, default = 'full' 'full' returns a biased output spectrum of size lmax+lwin+1. The input spectrum is assumed to be zero for degrees l>lmax. 'same' returns a biased output spectrum with the same size (lmax+1) as the input spectrum. The input spectrum is assumed to be zero for degrees l>lmax. 'valid' returns a biased spectrum with size lmax-lwin+1. This returns only that part of the biased spectrum that is not influenced by the input spectrum beyond degree lmax. axes_labelsize : int, optional, default = None The font size for the x and y axes labels. tick_labelsize : int, optional, default = None The font size for the x and y tick labels. show : bool, optional, default = True If True, plot the image to the screen. ax : matplotlib axes object, optional, default = None An array of matplotlib axes objects where the plots will appear. fname : str, optional, default = None If present, save the image to the specified file. """ figsize = (_mpl.rcParams['figure.figsize'][0], _mpl.rcParams['figure.figsize'][0]) if axes_labelsize is None: axes_labelsize = _mpl.rcParams['axes.labelsize'] if tick_labelsize is None: tick_labelsize = _mpl.rcParams['xtick.labelsize'] if ax is None: fig = _plt.figure(figsize=figsize) axes = fig.add_subplot(111) else: axes = ax axes.imshow(self.coupling_matrix(lmax, nwin=nwin, weights=weights, mode=mode), aspect='auto') axes.set_xlabel('Input power', fontsize=axes_labelsize) axes.set_ylabel('Output power', fontsize=axes_labelsize) axes.tick_params(labelsize=tick_labelsize) axes.minorticks_on() if ax is None: fig.tight_layout(pad=0.5) if show: fig.show() if fname is not None: fig.savefig(fname) return fig, axes
[ "def", "plot_coupling_matrix", "(", "self", ",", "lmax", ",", "nwin", "=", "None", ",", "weights", "=", "None", ",", "mode", "=", "'full'", ",", "axes_labelsize", "=", "None", ",", "tick_labelsize", "=", "None", ",", "show", "=", "True", ",", "ax", "=", "None", ",", "fname", "=", "None", ")", ":", "figsize", "=", "(", "_mpl", ".", "rcParams", "[", "'figure.figsize'", "]", "[", "0", "]", ",", "_mpl", ".", "rcParams", "[", "'figure.figsize'", "]", "[", "0", "]", ")", "if", "axes_labelsize", "is", "None", ":", "axes_labelsize", "=", "_mpl", ".", "rcParams", "[", "'axes.labelsize'", "]", "if", "tick_labelsize", "is", "None", ":", "tick_labelsize", "=", "_mpl", ".", "rcParams", "[", "'xtick.labelsize'", "]", "if", "ax", "is", "None", ":", "fig", "=", "_plt", ".", "figure", "(", "figsize", "=", "figsize", ")", "axes", "=", "fig", ".", "add_subplot", "(", "111", ")", "else", ":", "axes", "=", "ax", "axes", ".", "imshow", "(", "self", ".", "coupling_matrix", "(", "lmax", ",", "nwin", "=", "nwin", ",", "weights", "=", "weights", ",", "mode", "=", "mode", ")", ",", "aspect", "=", "'auto'", ")", "axes", ".", "set_xlabel", "(", "'Input power'", ",", "fontsize", "=", "axes_labelsize", ")", "axes", ".", "set_ylabel", "(", "'Output power'", ",", "fontsize", "=", "axes_labelsize", ")", "axes", ".", "tick_params", "(", "labelsize", "=", "tick_labelsize", ")", "axes", ".", "minorticks_on", "(", ")", "if", "ax", "is", "None", ":", "fig", ".", "tight_layout", "(", "pad", "=", "0.5", ")", "if", "show", ":", "fig", ".", "show", "(", ")", "if", "fname", "is", "not", "None", ":", "fig", ".", "savefig", "(", "fname", ")", "return", "fig", ",", "axes" ]
43.471429
0.001285
def create_index(self, attr, unique=False, accept_none=False): """Create a new index on a given attribute. If C{unique} is True and records are found in the table with duplicate attribute values, the index is deleted and C{KeyError} is raised. If the table already has an index on the given attribute, then ValueError is raised. @param attr: the attribute to be used for indexed access and joins @type attr: string @param unique: flag indicating whether the indexed field values are expected to be unique across table entries @type unique: boolean @param accept_none: flag indicating whether None is an acceptable unique key value for this attribute (always True for non-unique indexes, default=False for unique indexes) @type accept_none: boolean """ if attr in self._indexes: raise ValueError('index %r already defined for table' % attr) if unique: self._indexes[attr] = _UniqueObjIndex(attr, accept_none) self._uniqueIndexes = [ind for ind in self._indexes.values() if ind.is_unique] else: self._indexes[attr] = _ObjIndex(attr) accept_none = True ind = self._indexes[attr] try: for obj in self.obs: obval = getattr(obj, attr, None) if obval is not None or accept_none: ind[obval] = obj else: raise KeyError("None is not an allowed key") return self except KeyError: del self._indexes[attr] self._uniqueIndexes = [ind for ind in self._indexes.values() if ind.is_unique] raise
[ "def", "create_index", "(", "self", ",", "attr", ",", "unique", "=", "False", ",", "accept_none", "=", "False", ")", ":", "if", "attr", "in", "self", ".", "_indexes", ":", "raise", "ValueError", "(", "'index %r already defined for table'", "%", "attr", ")", "if", "unique", ":", "self", ".", "_indexes", "[", "attr", "]", "=", "_UniqueObjIndex", "(", "attr", ",", "accept_none", ")", "self", ".", "_uniqueIndexes", "=", "[", "ind", "for", "ind", "in", "self", ".", "_indexes", ".", "values", "(", ")", "if", "ind", ".", "is_unique", "]", "else", ":", "self", ".", "_indexes", "[", "attr", "]", "=", "_ObjIndex", "(", "attr", ")", "accept_none", "=", "True", "ind", "=", "self", ".", "_indexes", "[", "attr", "]", "try", ":", "for", "obj", "in", "self", ".", "obs", ":", "obval", "=", "getattr", "(", "obj", ",", "attr", ",", "None", ")", "if", "obval", "is", "not", "None", "or", "accept_none", ":", "ind", "[", "obval", "]", "=", "obj", "else", ":", "raise", "KeyError", "(", "\"None is not an allowed key\"", ")", "return", "self", "except", "KeyError", ":", "del", "self", ".", "_indexes", "[", "attr", "]", "self", ".", "_uniqueIndexes", "=", "[", "ind", "for", "ind", "in", "self", ".", "_indexes", ".", "values", "(", ")", "if", "ind", ".", "is_unique", "]", "raise" ]
44.725
0.003829
def _get_transport(self): """ Return the SSH transport to the remote gateway """ if self.ssh_proxy: if isinstance(self.ssh_proxy, paramiko.proxy.ProxyCommand): proxy_repr = repr(self.ssh_proxy.cmd[1]) else: proxy_repr = repr(self.ssh_proxy) self.logger.debug('Connecting via proxy: {0}'.format(proxy_repr)) _socket = self.ssh_proxy else: _socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if isinstance(_socket, socket.socket): _socket.settimeout(SSH_TIMEOUT) _socket.connect((self.ssh_host, self.ssh_port)) transport = paramiko.Transport(_socket) transport.set_keepalive(self.set_keepalive) transport.use_compression(compress=self.compression) transport.daemon = self.daemon_transport return transport
[ "def", "_get_transport", "(", "self", ")", ":", "if", "self", ".", "ssh_proxy", ":", "if", "isinstance", "(", "self", ".", "ssh_proxy", ",", "paramiko", ".", "proxy", ".", "ProxyCommand", ")", ":", "proxy_repr", "=", "repr", "(", "self", ".", "ssh_proxy", ".", "cmd", "[", "1", "]", ")", "else", ":", "proxy_repr", "=", "repr", "(", "self", ".", "ssh_proxy", ")", "self", ".", "logger", ".", "debug", "(", "'Connecting via proxy: {0}'", ".", "format", "(", "proxy_repr", ")", ")", "_socket", "=", "self", ".", "ssh_proxy", "else", ":", "_socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "if", "isinstance", "(", "_socket", ",", "socket", ".", "socket", ")", ":", "_socket", ".", "settimeout", "(", "SSH_TIMEOUT", ")", "_socket", ".", "connect", "(", "(", "self", ".", "ssh_host", ",", "self", ".", "ssh_port", ")", ")", "transport", "=", "paramiko", ".", "Transport", "(", "_socket", ")", "transport", ".", "set_keepalive", "(", "self", ".", "set_keepalive", ")", "transport", ".", "use_compression", "(", "compress", "=", "self", ".", "compression", ")", "transport", ".", "daemon", "=", "self", ".", "daemon_transport", "return", "transport" ]
44.05
0.002222
def _prepare_reserved_tokens(reserved_tokens): """Prepare reserved tokens and a regex for splitting them out of strings.""" reserved_tokens = [tf.compat.as_text(tok) for tok in reserved_tokens or []] dups = _find_duplicates(reserved_tokens) if dups: raise ValueError("Duplicates found in tokens: %s" % dups) reserved_tokens_re = _make_reserved_tokens_re(reserved_tokens) return reserved_tokens, reserved_tokens_re
[ "def", "_prepare_reserved_tokens", "(", "reserved_tokens", ")", ":", "reserved_tokens", "=", "[", "tf", ".", "compat", ".", "as_text", "(", "tok", ")", "for", "tok", "in", "reserved_tokens", "or", "[", "]", "]", "dups", "=", "_find_duplicates", "(", "reserved_tokens", ")", "if", "dups", ":", "raise", "ValueError", "(", "\"Duplicates found in tokens: %s\"", "%", "dups", ")", "reserved_tokens_re", "=", "_make_reserved_tokens_re", "(", "reserved_tokens", ")", "return", "reserved_tokens", ",", "reserved_tokens_re" ]
52.75
0.016317
def _set_router_isis_config(self, v, load=False): """ Setter method for router_isis_config, mapped from YANG variable /isis_state/router_isis_config (container) If this variable is read-only (config: false) in the source YANG file, then _set_router_isis_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_router_isis_config() directly. YANG Description: ISIS Global configuration summary """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=router_isis_config.router_isis_config, is_container='container', presence=False, yang_name="router-isis-config", rest_name="router-isis-config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-router-isis-info', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """router_isis_config must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=router_isis_config.router_isis_config, is_container='container', presence=False, yang_name="router-isis-config", rest_name="router-isis-config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-router-isis-info', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)""", }) self.__router_isis_config = t if hasattr(self, '_set'): self._set()
[ "def", "_set_router_isis_config", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "router_isis_config", ".", "router_isis_config", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"router-isis-config\"", ",", "rest_name", "=", "\"router-isis-config\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'callpoint'", ":", "u'isis-router-isis-info'", ",", "u'cli-suppress-show-path'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-isis-operational'", ",", "defining_module", "=", "'brocade-isis-operational'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "False", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"router_isis_config must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=router_isis_config.router_isis_config, is_container='container', presence=False, yang_name=\"router-isis-config\", rest_name=\"router-isis-config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'isis-router-isis-info', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-isis-operational', defining_module='brocade-isis-operational', yang_type='container', is_config=False)\"\"\"", ",", "}", ")", "self", ".", "__router_isis_config", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
78.333333
0.005255
def cluster_setup(nodes, pcsclustername='pcscluster', extra_args=None): ''' Setup pacemaker cluster via pcs command nodes a list of nodes which should be set up pcsclustername Name of the Pacemaker cluster (default: pcscluster) extra_args list of extra option for the \'pcs cluster setup\' command CLI Example: .. code-block:: bash salt '*' pcs.cluster_setup nodes='[ node1.example.org node2.example.org ]' pcsclustername=pcscluster ''' cmd = ['pcs', 'cluster', 'setup'] cmd += ['--name', pcsclustername] cmd += nodes if isinstance(extra_args, (list, tuple)): cmd += extra_args return __salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False)
[ "def", "cluster_setup", "(", "nodes", ",", "pcsclustername", "=", "'pcscluster'", ",", "extra_args", "=", "None", ")", ":", "cmd", "=", "[", "'pcs'", ",", "'cluster'", ",", "'setup'", "]", "cmd", "+=", "[", "'--name'", ",", "pcsclustername", "]", "cmd", "+=", "nodes", "if", "isinstance", "(", "extra_args", ",", "(", "list", ",", "tuple", ")", ")", ":", "cmd", "+=", "extra_args", "return", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "output_loglevel", "=", "'trace'", ",", "python_shell", "=", "False", ")" ]
28.153846
0.003963
def is_reserved(self): """Test if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within one of the reserved IPv6 Network ranges. """ reserved_nets = [IPv6Network(u'::/8'), IPv6Network(u'100::/8'), IPv6Network(u'200::/7'), IPv6Network(u'400::/6'), IPv6Network(u'800::/5'), IPv6Network(u'1000::/4'), IPv6Network(u'4000::/3'), IPv6Network(u'6000::/3'), IPv6Network(u'8000::/3'), IPv6Network(u'A000::/3'), IPv6Network(u'C000::/3'), IPv6Network(u'E000::/4'), IPv6Network(u'F000::/5'), IPv6Network(u'F800::/6'), IPv6Network(u'FE00::/9')] return any(self in x for x in reserved_nets)
[ "def", "is_reserved", "(", "self", ")", ":", "reserved_nets", "=", "[", "IPv6Network", "(", "u'::/8'", ")", ",", "IPv6Network", "(", "u'100::/8'", ")", ",", "IPv6Network", "(", "u'200::/7'", ")", ",", "IPv6Network", "(", "u'400::/6'", ")", ",", "IPv6Network", "(", "u'800::/5'", ")", ",", "IPv6Network", "(", "u'1000::/4'", ")", ",", "IPv6Network", "(", "u'4000::/3'", ")", ",", "IPv6Network", "(", "u'6000::/3'", ")", ",", "IPv6Network", "(", "u'8000::/3'", ")", ",", "IPv6Network", "(", "u'A000::/3'", ")", ",", "IPv6Network", "(", "u'C000::/3'", ")", ",", "IPv6Network", "(", "u'E000::/4'", ")", ",", "IPv6Network", "(", "u'F000::/5'", ")", ",", "IPv6Network", "(", "u'F800::/6'", ")", ",", "IPv6Network", "(", "u'FE00::/9'", ")", "]", "return", "any", "(", "self", "in", "x", "for", "x", "in", "reserved_nets", ")" ]
46.5
0.002342
def isCollapsed( self ): """ Returns whether or not this group box is collapsed. :return <bool> """ if not self.isCollapsible(): return False if self._inverted: return self.isChecked() return not self.isChecked()
[ "def", "isCollapsed", "(", "self", ")", ":", "if", "not", "self", ".", "isCollapsible", "(", ")", ":", "return", "False", "if", "self", ".", "_inverted", ":", "return", "self", ".", "isChecked", "(", ")", "return", "not", "self", ".", "isChecked", "(", ")" ]
26.166667
0.018462
def RLS(anchors, W, r, print_out=False, grid=None, num_points=10): """ Range least squares (RLS) using grid search. Algorithm written by A.Beck, P.Stoica in "Approximate and Exact solutions of Source Localization Problems". :param anchors: anchor points :param r2: squared distances from anchors to point x. :param grid: where to search for solution. (min, max) where min and max are lists of d elements, d being the dimension of the setup. If not given, the search is conducted within the space covered by the anchors. :param num_points: number of grid points per direction. :return: estimated position of point x. """ def cost_function(arr): X = np.c_[arr] r_measured = np.linalg.norm(anchors - X) mse = np.linalg.norm(r_measured - r)**2 return mse if grid is None: grid = [np.min(anchors, axis=0), np.max(anchors, axis=0)] d = anchors.shape[1] x = np.linspace(grid[0][0], grid[1][0], num_points) y = np.linspace(grid[0][1], grid[1][1], num_points) if d == 2: errors_test = np.zeros((num_points, num_points)) for i, xs in enumerate(x): for j, ys in enumerate(y): errors_test[i, j] = cost_function((xs, ys)) min_idx = errors_test.argmin() min_idx_multi = np.unravel_index(min_idx, errors_test.shape) xhat = np.c_[x[min_idx_multi[0]], y[min_idx_multi[1]]] elif d == 3: z = np.linspace(grid[0][2], grid[1][2], num_points) errors_test = np.zeros((num_points, num_points, num_points)) # TODO: make this more efficient. #xx, yy, zz= np.meshgrid(x, y, z) for i, xs in enumerate(x): for j, ys in enumerate(y): for k, zs in enumerate(z): errors_test[i, j, k] = cost_function((xs, ys, zs)) min_idx = errors_test.argmin() min_idx_multi = np.unravel_index(min_idx, errors_test.shape) xhat = np.c_[x[min_idx_multi[0]], y[min_idx_multi[1]], z[min_idx_multi[2]]] else: raise ValueError('Non-supported number of dimensions.') return xhat[0]
[ "def", "RLS", "(", "anchors", ",", "W", ",", "r", ",", "print_out", "=", "False", ",", "grid", "=", "None", ",", "num_points", "=", "10", ")", ":", "def", "cost_function", "(", "arr", ")", ":", "X", "=", "np", ".", "c_", "[", "arr", "]", "r_measured", "=", "np", ".", "linalg", ".", "norm", "(", "anchors", "-", "X", ")", "mse", "=", "np", ".", "linalg", ".", "norm", "(", "r_measured", "-", "r", ")", "**", "2", "return", "mse", "if", "grid", "is", "None", ":", "grid", "=", "[", "np", ".", "min", "(", "anchors", ",", "axis", "=", "0", ")", ",", "np", ".", "max", "(", "anchors", ",", "axis", "=", "0", ")", "]", "d", "=", "anchors", ".", "shape", "[", "1", "]", "x", "=", "np", ".", "linspace", "(", "grid", "[", "0", "]", "[", "0", "]", ",", "grid", "[", "1", "]", "[", "0", "]", ",", "num_points", ")", "y", "=", "np", ".", "linspace", "(", "grid", "[", "0", "]", "[", "1", "]", ",", "grid", "[", "1", "]", "[", "1", "]", ",", "num_points", ")", "if", "d", "==", "2", ":", "errors_test", "=", "np", ".", "zeros", "(", "(", "num_points", ",", "num_points", ")", ")", "for", "i", ",", "xs", "in", "enumerate", "(", "x", ")", ":", "for", "j", ",", "ys", "in", "enumerate", "(", "y", ")", ":", "errors_test", "[", "i", ",", "j", "]", "=", "cost_function", "(", "(", "xs", ",", "ys", ")", ")", "min_idx", "=", "errors_test", ".", "argmin", "(", ")", "min_idx_multi", "=", "np", ".", "unravel_index", "(", "min_idx", ",", "errors_test", ".", "shape", ")", "xhat", "=", "np", ".", "c_", "[", "x", "[", "min_idx_multi", "[", "0", "]", "]", ",", "y", "[", "min_idx_multi", "[", "1", "]", "]", "]", "elif", "d", "==", "3", ":", "z", "=", "np", ".", "linspace", "(", "grid", "[", "0", "]", "[", "2", "]", ",", "grid", "[", "1", "]", "[", "2", "]", ",", "num_points", ")", "errors_test", "=", "np", ".", "zeros", "(", "(", "num_points", ",", "num_points", ",", "num_points", ")", ")", "# TODO: make this more efficient.", "#xx, yy, zz= np.meshgrid(x, y, z)", "for", "i", ",", "xs", "in", "enumerate", "(", "x", ")", ":", "for", "j", ",", "ys", "in", "enumerate", "(", "y", ")", ":", "for", "k", ",", "zs", "in", "enumerate", "(", "z", ")", ":", "errors_test", "[", "i", ",", "j", ",", "k", "]", "=", "cost_function", "(", "(", "xs", ",", "ys", ",", "zs", ")", ")", "min_idx", "=", "errors_test", ".", "argmin", "(", ")", "min_idx_multi", "=", "np", ".", "unravel_index", "(", "min_idx", ",", "errors_test", ".", "shape", ")", "xhat", "=", "np", ".", "c_", "[", "x", "[", "min_idx_multi", "[", "0", "]", "]", ",", "y", "[", "min_idx_multi", "[", "1", "]", "]", ",", "z", "[", "min_idx_multi", "[", "2", "]", "]", "]", "else", ":", "raise", "ValueError", "(", "'Non-supported number of dimensions.'", ")", "return", "xhat", "[", "0", "]" ]
41.392157
0.002776
def change_node_subscriptions(self, jid, node, subscriptions_to_set): """ Update the subscriptions at a node. :param jid: Address of the PubSub service. :type jid: :class:`aioxmpp.JID` :param node: Name of the node to modify :type node: :class:`str` :param subscriptions_to_set: The subscriptions to set at the node. :type subscriptions_to_set: :class:`~collections.abc.Iterable` of tuples consisting of the JID to (un)subscribe and the subscription level to use. :raises aioxmpp.errors.XMPPError: as returned by the service `subscriptions_to_set` must be an iterable of pairs (`jid`, `subscription`), where the `jid` indicates the JID for which the `subscription` is to be set. """ iq = aioxmpp.stanza.IQ( type_=aioxmpp.structs.IQType.SET, to=jid, payload=pubsub_xso.OwnerRequest( pubsub_xso.OwnerSubscriptions( node, subscriptions=[ pubsub_xso.OwnerSubscription( jid, subscription ) for jid, subscription in subscriptions_to_set ] ) ) ) yield from self.client.send(iq)
[ "def", "change_node_subscriptions", "(", "self", ",", "jid", ",", "node", ",", "subscriptions_to_set", ")", ":", "iq", "=", "aioxmpp", ".", "stanza", ".", "IQ", "(", "type_", "=", "aioxmpp", ".", "structs", ".", "IQType", ".", "SET", ",", "to", "=", "jid", ",", "payload", "=", "pubsub_xso", ".", "OwnerRequest", "(", "pubsub_xso", ".", "OwnerSubscriptions", "(", "node", ",", "subscriptions", "=", "[", "pubsub_xso", ".", "OwnerSubscription", "(", "jid", ",", "subscription", ")", "for", "jid", ",", "subscription", "in", "subscriptions_to_set", "]", ")", ")", ")", "yield", "from", "self", ".", "client", ".", "send", "(", "iq", ")" ]
37.722222
0.001436
def quad_tree(self): """Gets the tile in the Microsoft QuadTree format, converted from TMS""" value = '' tms_x, tms_y = self.tms tms_y = (2 ** self.zoom - 1) - tms_y for i in range(self.zoom, 0, -1): digit = 0 mask = 1 << (i - 1) if (tms_x & mask) != 0: digit += 1 if (tms_y & mask) != 0: digit += 2 value += str(digit) return value
[ "def", "quad_tree", "(", "self", ")", ":", "value", "=", "''", "tms_x", ",", "tms_y", "=", "self", ".", "tms", "tms_y", "=", "(", "2", "**", "self", ".", "zoom", "-", "1", ")", "-", "tms_y", "for", "i", "in", "range", "(", "self", ".", "zoom", ",", "0", ",", "-", "1", ")", ":", "digit", "=", "0", "mask", "=", "1", "<<", "(", "i", "-", "1", ")", "if", "(", "tms_x", "&", "mask", ")", "!=", "0", ":", "digit", "+=", "1", "if", "(", "tms_y", "&", "mask", ")", "!=", "0", ":", "digit", "+=", "2", "value", "+=", "str", "(", "digit", ")", "return", "value" ]
32.785714
0.006356
def process_firmware_image(compact_firmware_file, ilo_object): """Processes the firmware file. Processing the firmware file entails extracting the firmware file from its compact format. Along with the raw (extracted) firmware file, this method also sends out information of whether or not the extracted firmware file a) needs to be uploaded to http store b) is extracted in reality or the file was already in raw format :param compact_firmware_file: firmware file to extract from :param ilo_object: ilo client object (ribcl/ris object) :raises: InvalidInputError, for unsupported file types or raw firmware file not found from compact format. :raises: ImageExtractionFailed, for extraction related issues :returns: core(raw) firmware file :returns: to_upload, boolean to indicate whether to upload or not :returns: is_extracted, boolean to indicate firmware image is actually extracted or not. """ fw_img_extractor = firmware_controller.get_fw_extractor( compact_firmware_file) LOG.debug('Extracting firmware file: %s ...', compact_firmware_file) raw_fw_file_path, is_extracted = fw_img_extractor.extract() # Note(deray): Need to check if this processing is for RIS or RIBCL # based systems. For Gen9 machines (RIS based) the firmware file needs # to be on a http store, and hence requires the upload to happen for the # firmware file. to_upload = False m = re.search('Gen(\d+)', ilo_object.model) if int(m.group(1)) > 8: to_upload = True LOG.debug('Extracting firmware file: %s ... done', compact_firmware_file) msg = ('Firmware file %(fw_file)s is %(msg)s. Need hosting (on an http ' 'store): %(yes_or_no)s' % {'fw_file': compact_firmware_file, 'msg': ('extracted. Extracted file: %s' % raw_fw_file_path if is_extracted else 'already in raw format'), 'yes_or_no': 'Yes' if to_upload else 'No'}) LOG.info(msg) return raw_fw_file_path, to_upload, is_extracted
[ "def", "process_firmware_image", "(", "compact_firmware_file", ",", "ilo_object", ")", ":", "fw_img_extractor", "=", "firmware_controller", ".", "get_fw_extractor", "(", "compact_firmware_file", ")", "LOG", ".", "debug", "(", "'Extracting firmware file: %s ...'", ",", "compact_firmware_file", ")", "raw_fw_file_path", ",", "is_extracted", "=", "fw_img_extractor", ".", "extract", "(", ")", "# Note(deray): Need to check if this processing is for RIS or RIBCL", "# based systems. For Gen9 machines (RIS based) the firmware file needs", "# to be on a http store, and hence requires the upload to happen for the", "# firmware file.", "to_upload", "=", "False", "m", "=", "re", ".", "search", "(", "'Gen(\\d+)'", ",", "ilo_object", ".", "model", ")", "if", "int", "(", "m", ".", "group", "(", "1", ")", ")", ">", "8", ":", "to_upload", "=", "True", "LOG", ".", "debug", "(", "'Extracting firmware file: %s ... done'", ",", "compact_firmware_file", ")", "msg", "=", "(", "'Firmware file %(fw_file)s is %(msg)s. Need hosting (on an http '", "'store): %(yes_or_no)s'", "%", "{", "'fw_file'", ":", "compact_firmware_file", ",", "'msg'", ":", "(", "'extracted. Extracted file: %s'", "%", "raw_fw_file_path", "if", "is_extracted", "else", "'already in raw format'", ")", ",", "'yes_or_no'", ":", "'Yes'", "if", "to_upload", "else", "'No'", "}", ")", "LOG", ".", "info", "(", "msg", ")", "return", "raw_fw_file_path", ",", "to_upload", ",", "is_extracted" ]
50.02439
0.000956
def extend(base: Dict[Any, Any], extension: Dict[Any, Any]) -> Dict[Any, Any]: '''Extend base by updating with the extension. **Arguments** :``base``: dictionary to have keys updated or added :``extension``: dictionary to update base with **Return Value(s)** Resulting dictionary from updating base with extension. ''' _ = copy.deepcopy(base) _.update(extension) return _
[ "def", "extend", "(", "base", ":", "Dict", "[", "Any", ",", "Any", "]", ",", "extension", ":", "Dict", "[", "Any", ",", "Any", "]", ")", "->", "Dict", "[", "Any", ",", "Any", "]", ":", "_", "=", "copy", ".", "deepcopy", "(", "base", ")", "_", ".", "update", "(", "extension", ")", "return", "_" ]
22.555556
0.002364
def managed(name, template_name=None, template_source=None, template_hash=None, template_hash_name=None, saltenv='base', template_engine='jinja', skip_verify=False, context=None, defaults=None, test=False, commit=True, debug=False, replace=False, commit_in=None, commit_at=None, revert_in=None, revert_at=None, **template_vars): ''' Manages the configuration on network devices. By default this state will commit the changes on the device. If there are no changes required, it does not commit and the field ``already_configured`` from the output dictionary will be set as ``True`` to notify that. To avoid committing the configuration, set the argument ``test`` to ``True`` (or via the CLI argument ``test=True``) and will discard (dry run). To preserve the changes, set ``commit`` to ``False`` (either as CLI argument, either as state parameter). However, this is recommended to be used only in exceptional cases when there are applied few consecutive states and/or configuration changes. Otherwise the user might forget that the config DB is locked and the candidate config buffer is not cleared/merged in the running config. To replace the config, set ``replace`` to ``True``. This option is recommended to be used with caution! template_name Identifies path to the template source. The template can be either stored on the local machine, either remotely. The recommended location is under the ``file_roots`` as specified in the master config file. For example, let's suppose the ``file_roots`` is configured as: .. code-block:: yaml file_roots: base: - /etc/salt/states Placing the template under ``/etc/salt/states/templates/example.jinja``, it can be used as ``salt://templates/example.jinja``. Alternatively, for local files, the user can specify the absolute path. If remotely, the source can be retrieved via ``http``, ``https`` or ``ftp``. Examples: - ``salt://my_template.jinja`` - ``/absolute/path/to/my_template.jinja`` - ``http://example.com/template.cheetah`` - ``https:/example.com/template.mako`` - ``ftp://example.com/template.py`` .. versionchanged:: 2019.2.0 This argument can now support a list of templates to be rendered. The resulting configuration text is loaded at once, as a single configuration chunk. template_source: None Inline config template to be rendered and loaded on the device. template_hash: None Hash of the template file. Format: ``{hash_type: 'md5', 'hsum': <md5sum>}`` template_hash_name: None When ``template_hash`` refers to a remote file, this specifies the filename to look for in that file. saltenv: base Specifies the template environment. This will influence the relative imports inside the templates. template_engine: jinja The following templates engines are supported: - :mod:`cheetah<salt.renderers.cheetah>` - :mod:`genshi<salt.renderers.genshi>` - :mod:`jinja<salt.renderers.jinja>` - :mod:`mako<salt.renderers.mako>` - :mod:`py<salt.renderers.py>` - :mod:`wempy<salt.renderers.wempy>` skip_verify: False If ``True``, hash verification of remote file sources (``http://``, ``https://``, ``ftp://``) will be skipped, and the ``source_hash`` argument will be ignored. .. versionchanged:: 2017.7.1 test: False Dry run? If set to ``True``, will apply the config, discard and return the changes. Default: ``False`` (will commit the changes on the device). commit: True Commit? Default: ``True``. debug: False Debug mode. Will insert a new key under the output dictionary, as ``loaded_config`` containing the raw result after the template was rendered. .. note:: This argument cannot be used directly on the command line. Instead, it can be passed through the ``pillar`` variable when executing either of the :py:func:`state.sls <salt.modules.state.sls>` or :py:func:`state.apply <salt.modules.state.apply>` (see below for an example). commit_in: ``None`` Commit the changes in a specific number of minutes / hours. Example of accepted formats: ``5`` (commit in 5 minutes), ``2m`` (commit in 2 minutes), ``1h`` (commit the changes in 1 hour)`, ``5h30m`` (commit the changes in 5 hours and 30 minutes). .. note:: This feature works on any platforms, as it does not rely on the native features of the network operating system. .. note:: After the command is executed and the ``diff`` is not satisfactory, or for any other reasons you have to discard the commit, you are able to do so using the :py:func:`net.cancel_commit <salt.modules.napalm_network.cancel_commit>` execution function, using the commit ID returned by this function. .. warning:: Using this feature, Salt will load the exact configuration you expect, however the diff may change in time (i.e., if an user applies a manual configuration change, or a different process or command changes the configuration in the meanwhile). .. versionadded:: 2019.2.0 commit_at: ``None`` Commit the changes at a specific time. Example of accepted formats: ``1am`` (will commit the changes at the next 1AM), ``13:20`` (will commit at 13:20), ``1:20am``, etc. .. note:: This feature works on any platforms, as it does not rely on the native features of the network operating system. .. note:: After the command is executed and the ``diff`` is not satisfactory, or for any other reasons you have to discard the commit, you are able to do so using the :py:func:`net.cancel_commit <salt.modules.napalm_network.cancel_commit>` execution function, using the commit ID returned by this function. .. warning:: Using this feature, Salt will load the exact configuration you expect, however the diff may change in time (i.e., if an user applies a manual configuration change, or a different process or command changes the configuration in the meanwhile). .. versionadded:: 2019.2.0 revert_in: ``None`` Commit and revert the changes in a specific number of minutes / hours. Example of accepted formats: ``5`` (revert in 5 minutes), ``2m`` (revert in 2 minutes), ``1h`` (revert the changes in 1 hour)`, ``5h30m`` (revert the changes in 5 hours and 30 minutes). .. note:: To confirm the commit, and prevent reverting the changes, you will have to execute the :mod:`net.confirm_commit <salt.modules.napalm_network.confirm_commit>` function, using the commit ID returned by this function. .. warning:: This works on any platform, regardless if they have or don't have native capabilities to confirming a commit. However, please be *very* cautious when using this feature: on Junos (as it is the only NAPALM core platform supporting this natively) it executes a commit confirmed as you would do from the command line. All the other platforms don't have this capability natively, therefore the revert is done via Salt. That means, your device needs to be reachable at the moment when Salt will attempt to revert your changes. Be cautious when pushing configuration changes that would prevent you reach the device. Similarly, if an user or a different process apply other configuration changes in the meanwhile (between the moment you commit and till the changes are reverted), these changes would be equally reverted, as Salt cannot be aware of them. .. versionadded:: 2019.2.0 revert_at: ``None`` Commit and revert the changes at a specific time. Example of accepted formats: ``1am`` (will commit and revert the changes at the next 1AM), ``13:20`` (will commit and revert at 13:20), ``1:20am``, etc. .. note:: To confirm the commit, and prevent reverting the changes, you will have to execute the :mod:`net.confirm_commit <salt.modules.napalm_network.confirm_commit>` function, using the commit ID returned by this function. .. warning:: This works on any platform, regardless if they have or don't have native capabilities to confirming a commit. However, please be *very* cautious when using this feature: on Junos (as it is the only NAPALM core platform supporting this natively) it executes a commit confirmed as you would do from the command line. All the other platforms don't have this capability natively, therefore the revert is done via Salt. That means, your device needs to be reachable at the moment when Salt will attempt to revert your changes. Be cautious when pushing configuration changes that would prevent you reach the device. Similarly, if an user or a different process apply other configuration changes in the meanwhile (between the moment you commit and till the changes are reverted), these changes would be equally reverted, as Salt cannot be aware of them. .. versionadded:: 2019.2.0 replace: False Load and replace the configuration. Default: ``False`` (will apply load merge). context: None Overrides default context variables passed to the template. .. versionadded:: 2019.2.0 defaults: None Default variables/context passed to the template. template_vars Dictionary with the arguments/context to be used when the template is rendered. Do not explicitly specify this argument. This represents any other variable that will be sent to the template rendering system. Please see an example below! In both ``ntp_peers_example_using_pillar`` and ``ntp_peers_example``, ``peers`` is sent as template variable. .. note:: It is more recommended to use the ``context`` argument instead, to avoid any conflicts with other arguments. SLS Example (e.g.: under salt://router/config.sls) : .. code-block:: yaml whole_config_example: netconfig.managed: - template_name: salt://path/to/complete_config.jinja - debug: True - replace: True bgp_config_example: netconfig.managed: - template_name: /absolute/path/to/bgp_neighbors.mako - template_engine: mako prefix_lists_example: netconfig.managed: - template_name: prefix_lists.cheetah - debug: True - template_engine: cheetah ntp_peers_example: netconfig.managed: - template_name: http://bit.ly/2gKOj20 - skip_verify: False - debug: True - peers: - 192.168.0.1 - 192.168.0.1 ntp_peers_example_using_pillar: netconfig.managed: - template_name: http://bit.ly/2gKOj20 - peers: {{ pillar.get('ntp.peers', []) }} Multi template example: .. code-block:: yaml hostname_and_ntp: netconfig.managed: - template_name: - https://bit.ly/2OhSgqP - https://bit.ly/2M6C4Lx - https://bit.ly/2OIWVTs - debug: true - context: hostname: {{ opts.id }} servers: - 172.17.17.1 - 172.17.17.2 peers: - 192.168.0.1 - 192.168.0.2 Usage examples: .. code-block:: bash $ sudo salt 'juniper.device' state.sls router.config test=True $ sudo salt -N all-routers state.sls router.config pillar="{'debug': True}" ``router.config`` depends on the location of the SLS file (see above). Running this command, will be executed all five steps from above. These examples above are not meant to be used in a production environment, their sole purpose is to provide usage examples. Output example: .. code-block:: bash $ sudo salt 'juniper.device' state.sls router.config test=True juniper.device: ---------- ID: ntp_peers_example_using_pillar Function: netconfig.managed Result: None Comment: Testing mode: Configuration discarded. Started: 12:01:40.744535 Duration: 8755.788 ms Changes: ---------- diff: [edit system ntp] peer 192.168.0.1 { ... } + peer 172.17.17.1; + peer 172.17.17.3; Summary for juniper.device ------------ Succeeded: 1 (unchanged=1, changed=1) Failed: 0 ------------ Total states run: 1 Total run time: 8.756 s Raw output example (useful when the output is reused in other states/execution modules): .. code-block:: bash $ sudo salt --out=pprint 'juniper.device' state.sls router.config test=True debug=True .. code-block:: python { 'juniper.device': { 'netconfig_|-ntp_peers_example_using_pillar_|-ntp_peers_example_using_pillar_|-managed': { '__id__': 'ntp_peers_example_using_pillar', '__run_num__': 0, 'already_configured': False, 'changes': { 'diff': '[edit system ntp] peer 192.168.0.1 { ... }+ peer 172.17.17.1;+ peer 172.17.17.3;' }, 'comment': 'Testing mode: Configuration discarded.', 'duration': 7400.759, 'loaded_config': 'system { ntp { peer 172.17.17.1; peer 172.17.17.3; } }', 'name': 'ntp_peers_example_using_pillar', 'result': None, 'start_time': '12:09:09.811445' } } } ''' ret = salt.utils.napalm.default_ret(name) # the user can override the flags the equivalent CLI args # which have higher precedence test = __salt__['config.merge']('test', test) debug = __salt__['config.merge']('debug', debug) commit = __salt__['config.merge']('commit', commit) replace = __salt__['config.merge']('replace', replace) # this might be a bit risky skip_verify = __salt__['config.merge']('skip_verify', skip_verify) commit_in = __salt__['config.merge']('commit_in', commit_in) commit_at = __salt__['config.merge']('commit_at', commit_at) revert_in = __salt__['config.merge']('revert_in', revert_in) revert_at = __salt__['config.merge']('revert_at', revert_at) config_update_ret = _update_config(template_name=template_name, template_source=template_source, template_hash=template_hash, template_hash_name=template_hash_name, saltenv=saltenv, template_engine=template_engine, skip_verify=skip_verify, context=context, defaults=defaults, test=test, commit=commit, commit_in=commit_in, commit_at=commit_at, revert_in=revert_in, revert_at=revert_at, debug=debug, replace=replace, **template_vars) return salt.utils.napalm.loaded_ret(ret, config_update_ret, test, debug)
[ "def", "managed", "(", "name", ",", "template_name", "=", "None", ",", "template_source", "=", "None", ",", "template_hash", "=", "None", ",", "template_hash_name", "=", "None", ",", "saltenv", "=", "'base'", ",", "template_engine", "=", "'jinja'", ",", "skip_verify", "=", "False", ",", "context", "=", "None", ",", "defaults", "=", "None", ",", "test", "=", "False", ",", "commit", "=", "True", ",", "debug", "=", "False", ",", "replace", "=", "False", ",", "commit_in", "=", "None", ",", "commit_at", "=", "None", ",", "revert_in", "=", "None", ",", "revert_at", "=", "None", ",", "*", "*", "template_vars", ")", ":", "ret", "=", "salt", ".", "utils", ".", "napalm", ".", "default_ret", "(", "name", ")", "# the user can override the flags the equivalent CLI args", "# which have higher precedence", "test", "=", "__salt__", "[", "'config.merge'", "]", "(", "'test'", ",", "test", ")", "debug", "=", "__salt__", "[", "'config.merge'", "]", "(", "'debug'", ",", "debug", ")", "commit", "=", "__salt__", "[", "'config.merge'", "]", "(", "'commit'", ",", "commit", ")", "replace", "=", "__salt__", "[", "'config.merge'", "]", "(", "'replace'", ",", "replace", ")", "# this might be a bit risky", "skip_verify", "=", "__salt__", "[", "'config.merge'", "]", "(", "'skip_verify'", ",", "skip_verify", ")", "commit_in", "=", "__salt__", "[", "'config.merge'", "]", "(", "'commit_in'", ",", "commit_in", ")", "commit_at", "=", "__salt__", "[", "'config.merge'", "]", "(", "'commit_at'", ",", "commit_at", ")", "revert_in", "=", "__salt__", "[", "'config.merge'", "]", "(", "'revert_in'", ",", "revert_in", ")", "revert_at", "=", "__salt__", "[", "'config.merge'", "]", "(", "'revert_at'", ",", "revert_at", ")", "config_update_ret", "=", "_update_config", "(", "template_name", "=", "template_name", ",", "template_source", "=", "template_source", ",", "template_hash", "=", "template_hash", ",", "template_hash_name", "=", "template_hash_name", ",", "saltenv", "=", "saltenv", ",", "template_engine", "=", "template_engine", ",", "skip_verify", "=", "skip_verify", ",", "context", "=", "context", ",", "defaults", "=", "defaults", ",", "test", "=", "test", ",", "commit", "=", "commit", ",", "commit_in", "=", "commit_in", ",", "commit_at", "=", "commit_at", ",", "revert_in", "=", "revert_in", ",", "revert_at", "=", "revert_at", ",", "debug", "=", "debug", ",", "replace", "=", "replace", ",", "*", "*", "template_vars", ")", "return", "salt", ".", "utils", ".", "napalm", ".", "loaded_ret", "(", "ret", ",", "config_update_ret", ",", "test", ",", "debug", ")" ]
42.035443
0.002412
def module_function(string): """ Load a function from a python module using a file name, function name specification of format: /path/to/x.py:function_name[:parameter] """ parts = string.split(':', 2) if len(parts) < 2: raise ValueError( "Illegal specification. Should be module:function[:parameter]") module_path, function_name = parts[:2] # Import the module module_vars = {} exec(compile(open(module_path).read(), module_path, 'exec'), module_vars) try: function = module_vars[function_name] except KeyError: raise argparse.ArgumentTypeError("{0} has no attribute '{1}'".format( module_path, function_name)) if len(parts) == 3: old_function = function function = lambda r: old_function(r, parts[2]) return function
[ "def", "module_function", "(", "string", ")", ":", "parts", "=", "string", ".", "split", "(", "':'", ",", "2", ")", "if", "len", "(", "parts", ")", "<", "2", ":", "raise", "ValueError", "(", "\"Illegal specification. Should be module:function[:parameter]\"", ")", "module_path", ",", "function_name", "=", "parts", "[", ":", "2", "]", "# Import the module", "module_vars", "=", "{", "}", "exec", "(", "compile", "(", "open", "(", "module_path", ")", ".", "read", "(", ")", ",", "module_path", ",", "'exec'", ")", ",", "module_vars", ")", "try", ":", "function", "=", "module_vars", "[", "function_name", "]", "except", "KeyError", ":", "raise", "argparse", ".", "ArgumentTypeError", "(", "\"{0} has no attribute '{1}'\"", ".", "format", "(", "module_path", ",", "function_name", ")", ")", "if", "len", "(", "parts", ")", "==", "3", ":", "old_function", "=", "function", "function", "=", "lambda", "r", ":", "old_function", "(", "r", ",", "parts", "[", "2", "]", ")", "return", "function" ]
30.481481
0.002356
def list_worksheets(self): """ List what worksheet keys exist Returns a list of tuples of the form: (WORKSHEET_ID, WORKSHEET_NAME) You can then retrieve the specific WORKSHEET_ID in the future by constructing a new GSpreadsheet(worksheet=WORKSHEET_ID, ...) """ worksheets = self.get_worksheets() return [(x.link[3].href.split('/')[-1], x.title.text) for x in worksheets.entry]
[ "def", "list_worksheets", "(", "self", ")", ":", "worksheets", "=", "self", ".", "get_worksheets", "(", ")", "return", "[", "(", "x", ".", "link", "[", "3", "]", ".", "href", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", ",", "x", ".", "title", ".", "text", ")", "for", "x", "in", "worksheets", ".", "entry", "]" ]
37.833333
0.006452
def offset(self, offset_value): """Return a copy of self, shifted a constant offset. Parameters ---------- offset_value : float Number of pixels to shift the CCDLine. """ new_instance = deepcopy(self) new_instance.poly_funct.coef[0] += offset_value return new_instance
[ "def", "offset", "(", "self", ",", "offset_value", ")", ":", "new_instance", "=", "deepcopy", "(", "self", ")", "new_instance", ".", "poly_funct", ".", "coef", "[", "0", "]", "+=", "offset_value", "return", "new_instance" ]
25.769231
0.005764
def constant_tuple_dict(self): """ Returns ------- constant_tuple_dict: {Constant: ConstantTuple} The set of all constants associated with this mapper """ return {constant_tuple.constant: constant_tuple for name, prior_model in self.prior_model_tuples for constant_tuple in prior_model.constant_tuples}.values()
[ "def", "constant_tuple_dict", "(", "self", ")", ":", "return", "{", "constant_tuple", ".", "constant", ":", "constant_tuple", "for", "name", ",", "prior_model", "in", "self", ".", "prior_model_tuples", "for", "constant_tuple", "in", "prior_model", ".", "constant_tuples", "}", ".", "values", "(", ")" ]
39.4
0.004963
def unpack(self, buff, offset=0): """Unpack a binary message into this object's attributes. Unpack the binary value *buff* and update this object attributes based on the results. Args: buff (bytes): Binary data package to be unpacked. offset (int): Where to begin unpacking. Raises: Exception: If there is a struct unpacking error. """ def _int2hex(number): return "{0:0{1}x}".format(number, 2) try: unpacked_data = struct.unpack('!6B', buff[offset:offset+6]) except struct.error as exception: raise exceptions.UnpackException('%s; %s: %s' % (exception, offset, buff)) transformed_data = ':'.join([_int2hex(x) for x in unpacked_data]) self._value = transformed_data
[ "def", "unpack", "(", "self", ",", "buff", ",", "offset", "=", "0", ")", ":", "def", "_int2hex", "(", "number", ")", ":", "return", "\"{0:0{1}x}\"", ".", "format", "(", "number", ",", "2", ")", "try", ":", "unpacked_data", "=", "struct", ".", "unpack", "(", "'!6B'", ",", "buff", "[", "offset", ":", "offset", "+", "6", "]", ")", "except", "struct", ".", "error", "as", "exception", ":", "raise", "exceptions", ".", "UnpackException", "(", "'%s; %s: %s'", "%", "(", "exception", ",", "offset", ",", "buff", ")", ")", "transformed_data", "=", "':'", ".", "join", "(", "[", "_int2hex", "(", "x", ")", "for", "x", "in", "unpacked_data", "]", ")", "self", ".", "_value", "=", "transformed_data" ]
34.72
0.002242
def add_license( self, url=None, license=None, material=None, imposing=None ): """Add license. :param url: url for the description of the license :type url: string :param license: license type :type license: string :param material: material type :type material: string :param imposing: imposing type :type imposing: string """ hep_license = {} try: license_from_url = get_license_from_url(url) if license_from_url is not None: license = license_from_url except ValueError: pass for key in ('url', 'license', 'material', 'imposing'): if locals()[key] is not None: hep_license[key] = locals()[key] self._append_to('license', hep_license)
[ "def", "add_license", "(", "self", ",", "url", "=", "None", ",", "license", "=", "None", ",", "material", "=", "None", ",", "imposing", "=", "None", ")", ":", "hep_license", "=", "{", "}", "try", ":", "license_from_url", "=", "get_license_from_url", "(", "url", ")", "if", "license_from_url", "is", "not", "None", ":", "license", "=", "license_from_url", "except", "ValueError", ":", "pass", "for", "key", "in", "(", "'url'", ",", "'license'", ",", "'material'", ",", "'imposing'", ")", ":", "if", "locals", "(", ")", "[", "key", "]", "is", "not", "None", ":", "hep_license", "[", "key", "]", "=", "locals", "(", ")", "[", "key", "]", "self", ".", "_append_to", "(", "'license'", ",", "hep_license", ")" ]
24.342857
0.003386
def _coord2offset(self, coord): """Convert a normalized coordinate to an item offset.""" size = self.size offset = 0 for dim, index in enumerate(coord): size //= self._normshape[dim] offset += size * index return offset
[ "def", "_coord2offset", "(", "self", ",", "coord", ")", ":", "size", "=", "self", ".", "size", "offset", "=", "0", "for", "dim", ",", "index", "in", "enumerate", "(", "coord", ")", ":", "size", "//=", "self", ".", "_normshape", "[", "dim", "]", "offset", "+=", "size", "*", "index", "return", "offset" ]
34.5
0.007067
def dispatch_queue(self): """ Dispatch any queued requests. Called by the debugger when it stops. """ self.queue_lock.acquire() q = list(self.queue) self.queue = [] self.queue_lock.release() log.debug("Dispatching requests: {}".format(q)) for req in q: req.response = self.dispatch_request(req) for req in q: req.signal()
[ "def", "dispatch_queue", "(", "self", ")", ":", "self", ".", "queue_lock", ".", "acquire", "(", ")", "q", "=", "list", "(", "self", ".", "queue", ")", "self", ".", "queue", "=", "[", "]", "self", ".", "queue_lock", ".", "release", "(", ")", "log", ".", "debug", "(", "\"Dispatching requests: {}\"", ".", "format", "(", "q", ")", ")", "for", "req", "in", "q", ":", "req", ".", "response", "=", "self", ".", "dispatch_request", "(", "req", ")", "for", "req", "in", "q", ":", "req", ".", "signal", "(", ")" ]
28
0.004608
def K(self, X, X2=None): """Compute the covariance matrix between X and X2.""" if X2 is None: X2 = X base = np.pi * (X[:, None, :] - X2[None, :, :]) / self.period exp_dist = np.exp( -0.5* np.sum( np.square( np.sin( base ) / self.lengthscale ), axis = -1 ) ) return self.variance * exp_dist
[ "def", "K", "(", "self", ",", "X", ",", "X2", "=", "None", ")", ":", "if", "X2", "is", "None", ":", "X2", "=", "X", "base", "=", "np", ".", "pi", "*", "(", "X", "[", ":", ",", "None", ",", ":", "]", "-", "X2", "[", "None", ",", ":", ",", ":", "]", ")", "/", "self", ".", "period", "exp_dist", "=", "np", ".", "exp", "(", "-", "0.5", "*", "np", ".", "sum", "(", "np", ".", "square", "(", "np", ".", "sin", "(", "base", ")", "/", "self", ".", "lengthscale", ")", ",", "axis", "=", "-", "1", ")", ")", "return", "self", ".", "variance", "*", "exp_dist" ]
37.333333
0.040698
def _construct_opf_model(self, case): """ Returns an OPF model. """ # Zero the case result attributes. self.case.reset() base_mva = case.base_mva # Check for one reference bus. oneref, refs = self._ref_check(case) if not oneref: #return {"status": "error"} None # Remove isolated components. bs, ln, gn = self._remove_isolated(case) # Update bus indexes. self.case.index_buses(bs) # Convert single-block piecewise-linear costs into linear polynomial. gn = self._pwl1_to_poly(gn) # Set-up initial problem variables. Va = self._get_voltage_angle_var(refs, bs) Pg = self._get_pgen_var(gn, base_mva) if self.dc: # DC model. # Get the susceptance matrices and phase shift injection vectors. B, Bf, Pbusinj, Pfinj = self.case.makeBdc(bs, ln) # Power mismatch constraints (B*Va + Pg = Pd). Pmis = self._power_mismatch_dc(bs, gn, B, Pbusinj, base_mva) # Branch flow limit constraints. Pf, Pt = self._branch_flow_dc(ln, Bf, Pfinj, base_mva) else: # Set-up additional AC-OPF problem variables. Vm = self._get_voltage_magnitude_var(bs, gn) Qg = self._get_qgen_var(gn, base_mva) Pmis, Qmis, Sf, St = self._nln_constraints(len(bs), len(ln)) vl = self._const_pf_constraints(gn, base_mva) # TODO: Generator PQ capability curve constraints. # PQh, PQl = self._pq_capability_curve_constraints(gn) # Branch voltage angle difference limits. ang = self._voltage_angle_diff_limit(bs, ln) if self.dc: vars = [Va, Pg] constraints = [Pmis, Pf, Pt, ang] else: vars = [Va, Vm, Pg, Qg] constraints = [Pmis, Qmis, Sf, St, #PQh, PQL, vl, ang] # Piece-wise linear generator cost constraints. y, ycon = self._pwl_gen_costs(gn, base_mva) if ycon is not None: vars.append(y) constraints.append(ycon) # Add variables and constraints to the OPF model object. opf = OPFModel(case) opf.add_vars(vars) opf.add_constraints(constraints) if self.dc: # user data opf._Bf = Bf opf._Pfinj = Pfinj return opf
[ "def", "_construct_opf_model", "(", "self", ",", "case", ")", ":", "# Zero the case result attributes.", "self", ".", "case", ".", "reset", "(", ")", "base_mva", "=", "case", ".", "base_mva", "# Check for one reference bus.", "oneref", ",", "refs", "=", "self", ".", "_ref_check", "(", "case", ")", "if", "not", "oneref", ":", "#return {\"status\": \"error\"}", "None", "# Remove isolated components.", "bs", ",", "ln", ",", "gn", "=", "self", ".", "_remove_isolated", "(", "case", ")", "# Update bus indexes.", "self", ".", "case", ".", "index_buses", "(", "bs", ")", "# Convert single-block piecewise-linear costs into linear polynomial.", "gn", "=", "self", ".", "_pwl1_to_poly", "(", "gn", ")", "# Set-up initial problem variables.", "Va", "=", "self", ".", "_get_voltage_angle_var", "(", "refs", ",", "bs", ")", "Pg", "=", "self", ".", "_get_pgen_var", "(", "gn", ",", "base_mva", ")", "if", "self", ".", "dc", ":", "# DC model.", "# Get the susceptance matrices and phase shift injection vectors.", "B", ",", "Bf", ",", "Pbusinj", ",", "Pfinj", "=", "self", ".", "case", ".", "makeBdc", "(", "bs", ",", "ln", ")", "# Power mismatch constraints (B*Va + Pg = Pd).", "Pmis", "=", "self", ".", "_power_mismatch_dc", "(", "bs", ",", "gn", ",", "B", ",", "Pbusinj", ",", "base_mva", ")", "# Branch flow limit constraints.", "Pf", ",", "Pt", "=", "self", ".", "_branch_flow_dc", "(", "ln", ",", "Bf", ",", "Pfinj", ",", "base_mva", ")", "else", ":", "# Set-up additional AC-OPF problem variables.", "Vm", "=", "self", ".", "_get_voltage_magnitude_var", "(", "bs", ",", "gn", ")", "Qg", "=", "self", ".", "_get_qgen_var", "(", "gn", ",", "base_mva", ")", "Pmis", ",", "Qmis", ",", "Sf", ",", "St", "=", "self", ".", "_nln_constraints", "(", "len", "(", "bs", ")", ",", "len", "(", "ln", ")", ")", "vl", "=", "self", ".", "_const_pf_constraints", "(", "gn", ",", "base_mva", ")", "# TODO: Generator PQ capability curve constraints.", "# PQh, PQl = self._pq_capability_curve_constraints(gn)", "# Branch voltage angle difference limits.", "ang", "=", "self", ".", "_voltage_angle_diff_limit", "(", "bs", ",", "ln", ")", "if", "self", ".", "dc", ":", "vars", "=", "[", "Va", ",", "Pg", "]", "constraints", "=", "[", "Pmis", ",", "Pf", ",", "Pt", ",", "ang", "]", "else", ":", "vars", "=", "[", "Va", ",", "Vm", ",", "Pg", ",", "Qg", "]", "constraints", "=", "[", "Pmis", ",", "Qmis", ",", "Sf", ",", "St", ",", "#PQh, PQL,", "vl", ",", "ang", "]", "# Piece-wise linear generator cost constraints.", "y", ",", "ycon", "=", "self", ".", "_pwl_gen_costs", "(", "gn", ",", "base_mva", ")", "if", "ycon", "is", "not", "None", ":", "vars", ".", "append", "(", "y", ")", "constraints", ".", "append", "(", "ycon", ")", "# Add variables and constraints to the OPF model object.", "opf", "=", "OPFModel", "(", "case", ")", "opf", ".", "add_vars", "(", "vars", ")", "opf", ".", "add_constraints", "(", "constraints", ")", "if", "self", ".", "dc", ":", "# user data", "opf", ".", "_Bf", "=", "Bf", "opf", ".", "_Pfinj", "=", "Pfinj", "return", "opf" ]
31.386667
0.003295
def model(method): """Use this to decorate methods that expect a model.""" def wrapper(self, *args, **kwargs): if self.__model__ is None: raise ValidationError( 'You cannot perform CRUD operations without selecting a ' 'model first.', ) return method(self, *args, **kwargs) return wrapper
[ "def", "model", "(", "method", ")", ":", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "__model__", "is", "None", ":", "raise", "ValidationError", "(", "'You cannot perform CRUD operations without selecting a '", "'model first.'", ",", ")", "return", "method", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
39.9
0.004902
def add_headers(self, **headers): """packing dicts into typecode pyclass, may fail if typecodes are used in the body (when asdict=True) """ class _holder: pass def _remap(pyobj, **d): pyobj.__dict__ = d for k,v in pyobj.__dict__.items(): if type(v) is not dict: continue pyobj.__dict__[k] = p = _holder() _remap(p, **v) for k,v in headers.items(): h = filter(lambda i: k in i.type, self.callinfo.inheaders)[0] if h.element_type != 1: raise RuntimeError, 'not implemented' typecode = GED(*h.type) if typecode is None: raise RuntimeError, 'no matching element for %s' %str(h.type) pyclass = typecode.pyclass if pyclass is None: raise RuntimeError, 'no pyclass for typecode %s' %str(h.type) if type(v) is not dict: pyobj = pyclass(v) else: pyobj = pyclass() _remap(pyobj, **v) self.soapheaders.append(pyobj)
[ "def", "add_headers", "(", "self", ",", "*", "*", "headers", ")", ":", "class", "_holder", ":", "pass", "def", "_remap", "(", "pyobj", ",", "*", "*", "d", ")", ":", "pyobj", ".", "__dict__", "=", "d", "for", "k", ",", "v", "in", "pyobj", ".", "__dict__", ".", "items", "(", ")", ":", "if", "type", "(", "v", ")", "is", "not", "dict", ":", "continue", "pyobj", ".", "__dict__", "[", "k", "]", "=", "p", "=", "_holder", "(", ")", "_remap", "(", "p", ",", "*", "*", "v", ")", "for", "k", ",", "v", "in", "headers", ".", "items", "(", ")", ":", "h", "=", "filter", "(", "lambda", "i", ":", "k", "in", "i", ".", "type", ",", "self", ".", "callinfo", ".", "inheaders", ")", "[", "0", "]", "if", "h", ".", "element_type", "!=", "1", ":", "raise", "RuntimeError", ",", "'not implemented'", "typecode", "=", "GED", "(", "*", "h", ".", "type", ")", "if", "typecode", "is", "None", ":", "raise", "RuntimeError", ",", "'no matching element for %s'", "%", "str", "(", "h", ".", "type", ")", "pyclass", "=", "typecode", ".", "pyclass", "if", "pyclass", "is", "None", ":", "raise", "RuntimeError", ",", "'no pyclass for typecode %s'", "%", "str", "(", "h", ".", "type", ")", "if", "type", "(", "v", ")", "is", "not", "dict", ":", "pyobj", "=", "pyclass", "(", "v", ")", "else", ":", "pyobj", "=", "pyclass", "(", ")", "_remap", "(", "pyobj", ",", "*", "*", "v", ")", "self", ".", "soapheaders", ".", "append", "(", "pyobj", ")" ]
34.65625
0.014035
def generate_base_grid(self, vtk_filename=None): """ Run first step of algorithm. Next step is split_voxels :param vtk_filename: :return: """ nd, ed, ed_dir = self.gen_grid_fcn(self.data.shape, self.voxelsize) self.add_nodes(nd) self.add_edges(ed, ed_dir, edge_low_or_high=0) if vtk_filename is not None: self.write_vtk(vtk_filename)
[ "def", "generate_base_grid", "(", "self", ",", "vtk_filename", "=", "None", ")", ":", "nd", ",", "ed", ",", "ed_dir", "=", "self", ".", "gen_grid_fcn", "(", "self", ".", "data", ".", "shape", ",", "self", ".", "voxelsize", ")", "self", ".", "add_nodes", "(", "nd", ")", "self", ".", "add_edges", "(", "ed", ",", "ed_dir", ",", "edge_low_or_high", "=", "0", ")", "if", "vtk_filename", "is", "not", "None", ":", "self", ".", "write_vtk", "(", "vtk_filename", ")" ]
33.916667
0.004785
def check_class(self, id_, class_, lineno, scope=None, show_error=True): """ Check the id is either undefined or defined with the given class. - If the identifier (e.g. variable) does not exists means it's undeclared, and returns True (OK). - If the identifier exists, but its class_ attribute is unknown yet (None), returns also True. This means the identifier has been referenced in advanced and it's undeclared. Otherwise fails returning False. """ assert CLASS.is_valid(class_) entry = self.get_entry(id_, scope) if entry is None or entry.class_ == CLASS.unknown: # Undeclared yet return True if entry.class_ != class_: if show_error: if entry.class_ == CLASS.array: a1 = 'n' else: a1 = '' if class_ == CLASS.array: a2 = 'n' else: a2 = '' syntax_error(lineno, "identifier '%s' is a%s %s, not a%s %s" % (id_, a1, entry.class_, a2, class_)) return False return True
[ "def", "check_class", "(", "self", ",", "id_", ",", "class_", ",", "lineno", ",", "scope", "=", "None", ",", "show_error", "=", "True", ")", ":", "assert", "CLASS", ".", "is_valid", "(", "class_", ")", "entry", "=", "self", ".", "get_entry", "(", "id_", ",", "scope", ")", "if", "entry", "is", "None", "or", "entry", ".", "class_", "==", "CLASS", ".", "unknown", ":", "# Undeclared yet", "return", "True", "if", "entry", ".", "class_", "!=", "class_", ":", "if", "show_error", ":", "if", "entry", ".", "class_", "==", "CLASS", ".", "array", ":", "a1", "=", "'n'", "else", ":", "a1", "=", "''", "if", "class_", "==", "CLASS", ".", "array", ":", "a2", "=", "'n'", "else", ":", "a2", "=", "''", "syntax_error", "(", "lineno", ",", "\"identifier '%s' is a%s %s, not a%s %s\"", "%", "(", "id_", ",", "a1", ",", "entry", ".", "class_", ",", "a2", ",", "class_", ")", ")", "return", "False", "return", "True" ]
36.8125
0.001654
def ip_to_host(ip): ''' Returns the hostname of a given IP ''' try: hostname, aliaslist, ipaddrlist = socket.gethostbyaddr(ip) except Exception as exc: log.debug('salt.utils.network.ip_to_host(%r) failed: %s', ip, exc) hostname = None return hostname
[ "def", "ip_to_host", "(", "ip", ")", ":", "try", ":", "hostname", ",", "aliaslist", ",", "ipaddrlist", "=", "socket", ".", "gethostbyaddr", "(", "ip", ")", "except", "Exception", "as", "exc", ":", "log", ".", "debug", "(", "'salt.utils.network.ip_to_host(%r) failed: %s'", ",", "ip", ",", "exc", ")", "hostname", "=", "None", "return", "hostname" ]
28.9
0.003356
def notification( self, topic_name, topic_project=None, custom_attributes=None, event_types=None, blob_name_prefix=None, payload_format=NONE_PAYLOAD_FORMAT, ): """Factory: create a notification resource for the bucket. See: :class:`.BucketNotification` for parameters. :rtype: :class:`.BucketNotification` """ return BucketNotification( self, topic_name, topic_project=topic_project, custom_attributes=custom_attributes, event_types=event_types, blob_name_prefix=blob_name_prefix, payload_format=payload_format, )
[ "def", "notification", "(", "self", ",", "topic_name", ",", "topic_project", "=", "None", ",", "custom_attributes", "=", "None", ",", "event_types", "=", "None", ",", "blob_name_prefix", "=", "None", ",", "payload_format", "=", "NONE_PAYLOAD_FORMAT", ",", ")", ":", "return", "BucketNotification", "(", "self", ",", "topic_name", ",", "topic_project", "=", "topic_project", ",", "custom_attributes", "=", "custom_attributes", ",", "event_types", "=", "event_types", ",", "blob_name_prefix", "=", "blob_name_prefix", ",", "payload_format", "=", "payload_format", ",", ")" ]
28.541667
0.004237
def default_storable(python_type, exposes=None, version=None, storable_type=None, peek=default_peek): """ Default mechanics for building the storable instance for a type. Arguments: python_type (type): type. exposes (iterable): attributes exposed by the type. version (tuple): version number. storable_type (str): universal string identifier for the type. peek (callable): peeking routine. Returns: Storable: storable instance. """ if not exposes: for extension in expose_extensions: try: exposes = extension(python_type) except (SystemExit, KeyboardInterrupt): raise except: pass else: if exposes: break if not exposes: raise AttributeError('`exposes` required for type: {!r}'.format(python_type)) return Storable(python_type, key=storable_type, \ handlers=StorableHandler(version=version, exposes=exposes, \ poke=poke(exposes), peek=peek(python_type, exposes)))
[ "def", "default_storable", "(", "python_type", ",", "exposes", "=", "None", ",", "version", "=", "None", ",", "storable_type", "=", "None", ",", "peek", "=", "default_peek", ")", ":", "if", "not", "exposes", ":", "for", "extension", "in", "expose_extensions", ":", "try", ":", "exposes", "=", "extension", "(", "python_type", ")", "except", "(", "SystemExit", ",", "KeyboardInterrupt", ")", ":", "raise", "except", ":", "pass", "else", ":", "if", "exposes", ":", "break", "if", "not", "exposes", ":", "raise", "AttributeError", "(", "'`exposes` required for type: {!r}'", ".", "format", "(", "python_type", ")", ")", "return", "Storable", "(", "python_type", ",", "key", "=", "storable_type", ",", "handlers", "=", "StorableHandler", "(", "version", "=", "version", ",", "exposes", "=", "exposes", ",", "poke", "=", "poke", "(", "exposes", ")", ",", "peek", "=", "peek", "(", "python_type", ",", "exposes", ")", ")", ")" ]
29.405405
0.007117
def _norm(self, x): """Compute the safe norm.""" return tf.sqrt(tf.reduce_sum(tf.square(x), keepdims=True, axis=-1) + 1e-7)
[ "def", "_norm", "(", "self", ",", "x", ")", ":", "return", "tf", ".", "sqrt", "(", "tf", ".", "reduce_sum", "(", "tf", ".", "square", "(", "x", ")", ",", "keepdims", "=", "True", ",", "axis", "=", "-", "1", ")", "+", "1e-7", ")" ]
43
0.007634
def recommend(self, limit=10): ''' Listup arms and expected value. Args: limit: Length of the list. Returns: [Tuple(`Arms master id`, `expected value`)] ''' expected_list = [(arm_id, beta_dist.expected_value()) for arm_id, beta_dist in self.__beta_dist_dict.items()] expected_list = sorted(expected_list, key=lambda x: x[1], reverse=True) return expected_list[:limit]
[ "def", "recommend", "(", "self", ",", "limit", "=", "10", ")", ":", "expected_list", "=", "[", "(", "arm_id", ",", "beta_dist", ".", "expected_value", "(", ")", ")", "for", "arm_id", ",", "beta_dist", "in", "self", ".", "__beta_dist_dict", ".", "items", "(", ")", "]", "expected_list", "=", "sorted", "(", "expected_list", ",", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ",", "reverse", "=", "True", ")", "return", "expected_list", "[", ":", "limit", "]" ]
34.615385
0.006494
def copy_style(shapefile_path): """Copy style from the OSM resource directory to the output path. .. versionadded: 3.3 :param shapefile_path: Path to the shapefile that should get the path added. :type shapefile_path: basestring """ source_qml_path = resources_path('petabencana', 'flood-style.qml') output_qml_path = shapefile_path.replace('shp', 'qml') LOGGER.info('Copying qml to: %s' % output_qml_path) copy(source_qml_path, output_qml_path)
[ "def", "copy_style", "(", "shapefile_path", ")", ":", "source_qml_path", "=", "resources_path", "(", "'petabencana'", ",", "'flood-style.qml'", ")", "output_qml_path", "=", "shapefile_path", ".", "replace", "(", "'shp'", ",", "'qml'", ")", "LOGGER", ".", "info", "(", "'Copying qml to: %s'", "%", "output_qml_path", ")", "copy", "(", "source_qml_path", ",", "output_qml_path", ")" ]
39.923077
0.003766
def optimize(self, to_buy, to_sell): ''' Buy sid * parameters['buy_amount'] * parameters['scale'][sid] Sell sid * parameters['sell_amount'] * parameters['scale'][sid] ''' allocations = {} # Process every stock the same way for s in to_buy: quantity = self.properties.get('buy_amount', 100) if s in self.properties.get('scale', {}): quantity *= self.properties['scale'][s] # Allocate defined amount to buy allocations[s] = int(quantity) # NOTE You must provide sell_amount if you want to short for s in to_sell: quantity = self.properties.get( 'sell_amount', self.portfolio.positions[s].amount) if s in self.properties.get('scale', {}): quantity *= self.properties['scale'][s] # Allocate defined amount to buy allocations[s] = -int(quantity) # Defaults values e_ret = 0 e_risk = 1 return allocations, e_ret, e_risk
[ "def", "optimize", "(", "self", ",", "to_buy", ",", "to_sell", ")", ":", "allocations", "=", "{", "}", "# Process every stock the same way", "for", "s", "in", "to_buy", ":", "quantity", "=", "self", ".", "properties", ".", "get", "(", "'buy_amount'", ",", "100", ")", "if", "s", "in", "self", ".", "properties", ".", "get", "(", "'scale'", ",", "{", "}", ")", ":", "quantity", "*=", "self", ".", "properties", "[", "'scale'", "]", "[", "s", "]", "# Allocate defined amount to buy", "allocations", "[", "s", "]", "=", "int", "(", "quantity", ")", "# NOTE You must provide sell_amount if you want to short", "for", "s", "in", "to_sell", ":", "quantity", "=", "self", ".", "properties", ".", "get", "(", "'sell_amount'", ",", "self", ".", "portfolio", ".", "positions", "[", "s", "]", ".", "amount", ")", "if", "s", "in", "self", ".", "properties", ".", "get", "(", "'scale'", ",", "{", "}", ")", ":", "quantity", "*=", "self", ".", "properties", "[", "'scale'", "]", "[", "s", "]", "# Allocate defined amount to buy", "allocations", "[", "s", "]", "=", "-", "int", "(", "quantity", ")", "# Defaults values", "e_ret", "=", "0", "e_risk", "=", "1", "return", "allocations", ",", "e_ret", ",", "e_risk" ]
37.035714
0.00188
def handle_job_exception(self, exception, variables=None): """ Makes and returns a last-ditch error response. :param exception: The exception that happened :type exception: Exception :param variables: A dictionary of context-relevant variables to include in the error response :type variables: dict :return: A `JobResponse` object :rtype: JobResponse """ # Get the error and traceback if we can # noinspection PyBroadException try: error_str, traceback_str = six.text_type(exception), traceback.format_exc() except Exception: self.metrics.counter('server.error.error_formatting_failure').increment() error_str, traceback_str = 'Error formatting error', traceback.format_exc() # Log what happened self.logger.exception(exception) if not isinstance(traceback_str, six.text_type): try: # Try to traceback_str = traceback_str.decode('utf-8') except UnicodeDecodeError: traceback_str = 'UnicodeDecodeError: Traceback could not be decoded' # Make a bare bones job response error_dict = { 'code': ERROR_CODE_SERVER_ERROR, 'message': 'Internal server error: %s' % error_str, 'traceback': traceback_str, } if variables is not None: # noinspection PyBroadException try: error_dict['variables'] = {key: repr(value) for key, value in variables.items()} except Exception: self.metrics.counter('server.error.variable_formatting_failure').increment() error_dict['variables'] = 'Error formatting variables' return JobResponse(errors=[error_dict])
[ "def", "handle_job_exception", "(", "self", ",", "exception", ",", "variables", "=", "None", ")", ":", "# Get the error and traceback if we can", "# noinspection PyBroadException", "try", ":", "error_str", ",", "traceback_str", "=", "six", ".", "text_type", "(", "exception", ")", ",", "traceback", ".", "format_exc", "(", ")", "except", "Exception", ":", "self", ".", "metrics", ".", "counter", "(", "'server.error.error_formatting_failure'", ")", ".", "increment", "(", ")", "error_str", ",", "traceback_str", "=", "'Error formatting error'", ",", "traceback", ".", "format_exc", "(", ")", "# Log what happened", "self", ".", "logger", ".", "exception", "(", "exception", ")", "if", "not", "isinstance", "(", "traceback_str", ",", "six", ".", "text_type", ")", ":", "try", ":", "# Try to", "traceback_str", "=", "traceback_str", ".", "decode", "(", "'utf-8'", ")", "except", "UnicodeDecodeError", ":", "traceback_str", "=", "'UnicodeDecodeError: Traceback could not be decoded'", "# Make a bare bones job response", "error_dict", "=", "{", "'code'", ":", "ERROR_CODE_SERVER_ERROR", ",", "'message'", ":", "'Internal server error: %s'", "%", "error_str", ",", "'traceback'", ":", "traceback_str", ",", "}", "if", "variables", "is", "not", "None", ":", "# noinspection PyBroadException", "try", ":", "error_dict", "[", "'variables'", "]", "=", "{", "key", ":", "repr", "(", "value", ")", "for", "key", ",", "value", "in", "variables", ".", "items", "(", ")", "}", "except", "Exception", ":", "self", ".", "metrics", ".", "counter", "(", "'server.error.variable_formatting_failure'", ")", ".", "increment", "(", ")", "error_dict", "[", "'variables'", "]", "=", "'Error formatting variables'", "return", "JobResponse", "(", "errors", "=", "[", "error_dict", "]", ")" ]
41.581395
0.004918
def local(self): """ Access the local :returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalList :rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalList """ if self._local is None: self._local = LocalList( self._version, account_sid=self._solution['account_sid'], country_code=self._solution['country_code'], ) return self._local
[ "def", "local", "(", "self", ")", ":", "if", "self", ".", "_local", "is", "None", ":", "self", ".", "_local", "=", "LocalList", "(", "self", ".", "_version", ",", "account_sid", "=", "self", ".", "_solution", "[", "'account_sid'", "]", ",", "country_code", "=", "self", ".", "_solution", "[", "'country_code'", "]", ",", ")", "return", "self", ".", "_local" ]
34.714286
0.008016
def process_species(self, limit): """ Loop through the xml file and process the species. We add elements to the graph, and store the id-to-label in the label_hash dict. :param limit: :return: """ myfile = '/'.join((self.rawdir, self.files['data']['file'])) fh = gzip.open(myfile, 'rb') filereader = io.TextIOWrapper(fh, newline="") filereader.readline() # remove the xml declaration line for event, elem in ET.iterparse(filereader): # iterparse is not deprecated # Species ids are == NCBITaxon ids self.process_xml_table( elem, 'Species_gb', self._process_species_table_row, limit) fh.close() return
[ "def", "process_species", "(", "self", ",", "limit", ")", ":", "myfile", "=", "'/'", ".", "join", "(", "(", "self", ".", "rawdir", ",", "self", ".", "files", "[", "'data'", "]", "[", "'file'", "]", ")", ")", "fh", "=", "gzip", ".", "open", "(", "myfile", ",", "'rb'", ")", "filereader", "=", "io", ".", "TextIOWrapper", "(", "fh", ",", "newline", "=", "\"\"", ")", "filereader", ".", "readline", "(", ")", "# remove the xml declaration line", "for", "event", ",", "elem", "in", "ET", ".", "iterparse", "(", "filereader", ")", ":", "# iterparse is not deprecated", "# Species ids are == NCBITaxon ids", "self", ".", "process_xml_table", "(", "elem", ",", "'Species_gb'", ",", "self", ".", "_process_species_table_row", ",", "limit", ")", "fh", ".", "close", "(", ")", "return" ]
40.888889
0.003984
async def delete(self, *names): """ "Delete one or more keys specified by ``names``" Cluster impl: Iterate all keys and send DELETE for each key. This will go a lot slower than a normal delete call in StrictRedis. Operation is no longer atomic. """ count = 0 for arg in names: count += await self.execute_command('DEL', arg) return count
[ "async", "def", "delete", "(", "self", ",", "*", "names", ")", ":", "count", "=", "0", "for", "arg", "in", "names", ":", "count", "+=", "await", "self", ".", "execute_command", "(", "'DEL'", ",", "arg", ")", "return", "count" ]
26.875
0.004494
def convert_cifar100(directory, output_directory, output_filename='cifar100.hdf5'): """Converts the CIFAR-100 dataset to HDF5. Converts the CIFAR-100 dataset to an HDF5 dataset compatible with :class:`fuel.datasets.CIFAR100`. The converted dataset is saved as 'cifar100.hdf5'. This method assumes the existence of the following file: `cifar-100-python.tar.gz` Parameters ---------- directory : str Directory in which the required input files reside. output_directory : str Directory in which to save the converted dataset. output_filename : str, optional Name of the saved dataset. Defaults to 'cifar100.hdf5'. Returns ------- output_paths : tuple of str Single-element tuple containing the path to the converted dataset. """ output_path = os.path.join(output_directory, output_filename) h5file = h5py.File(output_path, mode="w") input_file = os.path.join(directory, 'cifar-100-python.tar.gz') tar_file = tarfile.open(input_file, 'r:gz') file = tar_file.extractfile('cifar-100-python/train') try: if six.PY3: train = cPickle.load(file, encoding='latin1') else: train = cPickle.load(file) finally: file.close() train_features = train['data'].reshape(train['data'].shape[0], 3, 32, 32) train_coarse_labels = numpy.array(train['coarse_labels'], dtype=numpy.uint8) train_fine_labels = numpy.array(train['fine_labels'], dtype=numpy.uint8) file = tar_file.extractfile('cifar-100-python/test') try: if six.PY3: test = cPickle.load(file, encoding='latin1') else: test = cPickle.load(file) finally: file.close() test_features = test['data'].reshape(test['data'].shape[0], 3, 32, 32) test_coarse_labels = numpy.array(test['coarse_labels'], dtype=numpy.uint8) test_fine_labels = numpy.array(test['fine_labels'], dtype=numpy.uint8) data = (('train', 'features', train_features), ('train', 'coarse_labels', train_coarse_labels.reshape((-1, 1))), ('train', 'fine_labels', train_fine_labels.reshape((-1, 1))), ('test', 'features', test_features), ('test', 'coarse_labels', test_coarse_labels.reshape((-1, 1))), ('test', 'fine_labels', test_fine_labels.reshape((-1, 1)))) fill_hdf5_file(h5file, data) h5file['features'].dims[0].label = 'batch' h5file['features'].dims[1].label = 'channel' h5file['features'].dims[2].label = 'height' h5file['features'].dims[3].label = 'width' h5file['coarse_labels'].dims[0].label = 'batch' h5file['coarse_labels'].dims[1].label = 'index' h5file['fine_labels'].dims[0].label = 'batch' h5file['fine_labels'].dims[1].label = 'index' h5file.flush() h5file.close() return (output_path,)
[ "def", "convert_cifar100", "(", "directory", ",", "output_directory", ",", "output_filename", "=", "'cifar100.hdf5'", ")", ":", "output_path", "=", "os", ".", "path", ".", "join", "(", "output_directory", ",", "output_filename", ")", "h5file", "=", "h5py", ".", "File", "(", "output_path", ",", "mode", "=", "\"w\"", ")", "input_file", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "'cifar-100-python.tar.gz'", ")", "tar_file", "=", "tarfile", ".", "open", "(", "input_file", ",", "'r:gz'", ")", "file", "=", "tar_file", ".", "extractfile", "(", "'cifar-100-python/train'", ")", "try", ":", "if", "six", ".", "PY3", ":", "train", "=", "cPickle", ".", "load", "(", "file", ",", "encoding", "=", "'latin1'", ")", "else", ":", "train", "=", "cPickle", ".", "load", "(", "file", ")", "finally", ":", "file", ".", "close", "(", ")", "train_features", "=", "train", "[", "'data'", "]", ".", "reshape", "(", "train", "[", "'data'", "]", ".", "shape", "[", "0", "]", ",", "3", ",", "32", ",", "32", ")", "train_coarse_labels", "=", "numpy", ".", "array", "(", "train", "[", "'coarse_labels'", "]", ",", "dtype", "=", "numpy", ".", "uint8", ")", "train_fine_labels", "=", "numpy", ".", "array", "(", "train", "[", "'fine_labels'", "]", ",", "dtype", "=", "numpy", ".", "uint8", ")", "file", "=", "tar_file", ".", "extractfile", "(", "'cifar-100-python/test'", ")", "try", ":", "if", "six", ".", "PY3", ":", "test", "=", "cPickle", ".", "load", "(", "file", ",", "encoding", "=", "'latin1'", ")", "else", ":", "test", "=", "cPickle", ".", "load", "(", "file", ")", "finally", ":", "file", ".", "close", "(", ")", "test_features", "=", "test", "[", "'data'", "]", ".", "reshape", "(", "test", "[", "'data'", "]", ".", "shape", "[", "0", "]", ",", "3", ",", "32", ",", "32", ")", "test_coarse_labels", "=", "numpy", ".", "array", "(", "test", "[", "'coarse_labels'", "]", ",", "dtype", "=", "numpy", ".", "uint8", ")", "test_fine_labels", "=", "numpy", ".", "array", "(", "test", "[", "'fine_labels'", "]", ",", "dtype", "=", "numpy", ".", "uint8", ")", "data", "=", "(", "(", "'train'", ",", "'features'", ",", "train_features", ")", ",", "(", "'train'", ",", "'coarse_labels'", ",", "train_coarse_labels", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", ")", ",", "(", "'train'", ",", "'fine_labels'", ",", "train_fine_labels", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", ")", ",", "(", "'test'", ",", "'features'", ",", "test_features", ")", ",", "(", "'test'", ",", "'coarse_labels'", ",", "test_coarse_labels", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", ")", ",", "(", "'test'", ",", "'fine_labels'", ",", "test_fine_labels", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", ")", ")", "fill_hdf5_file", "(", "h5file", ",", "data", ")", "h5file", "[", "'features'", "]", ".", "dims", "[", "0", "]", ".", "label", "=", "'batch'", "h5file", "[", "'features'", "]", ".", "dims", "[", "1", "]", ".", "label", "=", "'channel'", "h5file", "[", "'features'", "]", ".", "dims", "[", "2", "]", ".", "label", "=", "'height'", "h5file", "[", "'features'", "]", ".", "dims", "[", "3", "]", ".", "label", "=", "'width'", "h5file", "[", "'coarse_labels'", "]", ".", "dims", "[", "0", "]", ".", "label", "=", "'batch'", "h5file", "[", "'coarse_labels'", "]", ".", "dims", "[", "1", "]", ".", "label", "=", "'index'", "h5file", "[", "'fine_labels'", "]", ".", "dims", "[", "0", "]", ".", "label", "=", "'batch'", "h5file", "[", "'fine_labels'", "]", ".", "dims", "[", "1", "]", ".", "label", "=", "'index'", "h5file", ".", "flush", "(", ")", "h5file", ".", "close", "(", ")", "return", "(", "output_path", ",", ")" ]
36.716049
0.000327
def tfpdef(self, ident_tok, annotation_opt): """(3.0-) tfpdef: NAME [':' test]""" if annotation_opt: colon_loc, annotation = annotation_opt return self._arg(ident_tok, colon_loc, annotation) return self._arg(ident_tok)
[ "def", "tfpdef", "(", "self", ",", "ident_tok", ",", "annotation_opt", ")", ":", "if", "annotation_opt", ":", "colon_loc", ",", "annotation", "=", "annotation_opt", "return", "self", ".", "_arg", "(", "ident_tok", ",", "colon_loc", ",", "annotation", ")", "return", "self", ".", "_arg", "(", "ident_tok", ")" ]
43.5
0.007519
def get_root_object(models): """ Read list of models and returns a Root object with the proper models added. """ root = napalm_yang.base.Root() for model in models: current = napalm_yang for p in model.split("."): current = getattr(current, p) root.add_model(current) return root
[ "def", "get_root_object", "(", "models", ")", ":", "root", "=", "napalm_yang", ".", "base", ".", "Root", "(", ")", "for", "model", "in", "models", ":", "current", "=", "napalm_yang", "for", "p", "in", "model", ".", "split", "(", "\".\"", ")", ":", "current", "=", "getattr", "(", "current", ",", "p", ")", "root", ".", "add_model", "(", "current", ")", "return", "root" ]
25.307692
0.002933
def verify_sauce_connect_is_running(self, options): """ Start Sauce Connect, if it isn't already running. Returns a tuple of two elements: * A boolean which is True if Sauce Connect is now running * The Popen object representing the process so it can be terminated later; if it was already running, this value is "None" """ sc_path = settings.SELENIUM_SAUCE_CONNECT_PATH if len(sc_path) < 2: self.stdout.write('You need to configure SELENIUM_SAUCE_CONNECT_PATH') return False, None username = settings.SELENIUM_SAUCE_USERNAME if not username: self.stdout.write('You need to configure SELENIUM_SAUCE_USERNAME') return False, None key = settings.SELENIUM_SAUCE_API_KEY if not key: self.stdout.write('You need to configure SELENIUM_SAUCE_API_KEY') return False, None # Is it already running? process = Popen(['ps -e | grep "%s"' % key], shell=True, stdout=PIPE) (grep_output, _grep_error) = process.communicate() grep_command = 'grep {}'.format(key) lines = grep_output.split('\n') for line in lines: if 'sc' in line and username in line and grep_command not in line: self.stdout.write('Sauce Connect is already running') return True, None self.stdout.write('Starting Sauce Connect') output = OutputMonitor() command = [sc_path, '-u', username, '-k', key] tunnel_id = options['tunnel_id'] if tunnel_id: command.extend(['-i', tunnel_id]) sc_process = Popen(command, stdout=output.stream.input, stderr=open(os.devnull, 'w'), universal_newlines=True) ready_log_line = 'Connection established.' if not output.wait_for(ready_log_line, 60): self.stdout.write('Timeout starting Sauce Connect:\n') self.stdout.write('\n'.join(output.lines)) return False, None return True, sc_process
[ "def", "verify_sauce_connect_is_running", "(", "self", ",", "options", ")", ":", "sc_path", "=", "settings", ".", "SELENIUM_SAUCE_CONNECT_PATH", "if", "len", "(", "sc_path", ")", "<", "2", ":", "self", ".", "stdout", ".", "write", "(", "'You need to configure SELENIUM_SAUCE_CONNECT_PATH'", ")", "return", "False", ",", "None", "username", "=", "settings", ".", "SELENIUM_SAUCE_USERNAME", "if", "not", "username", ":", "self", ".", "stdout", ".", "write", "(", "'You need to configure SELENIUM_SAUCE_USERNAME'", ")", "return", "False", ",", "None", "key", "=", "settings", ".", "SELENIUM_SAUCE_API_KEY", "if", "not", "key", ":", "self", ".", "stdout", ".", "write", "(", "'You need to configure SELENIUM_SAUCE_API_KEY'", ")", "return", "False", ",", "None", "# Is it already running?", "process", "=", "Popen", "(", "[", "'ps -e | grep \"%s\"'", "%", "key", "]", ",", "shell", "=", "True", ",", "stdout", "=", "PIPE", ")", "(", "grep_output", ",", "_grep_error", ")", "=", "process", ".", "communicate", "(", ")", "grep_command", "=", "'grep {}'", ".", "format", "(", "key", ")", "lines", "=", "grep_output", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "'sc'", "in", "line", "and", "username", "in", "line", "and", "grep_command", "not", "in", "line", ":", "self", ".", "stdout", ".", "write", "(", "'Sauce Connect is already running'", ")", "return", "True", ",", "None", "self", ".", "stdout", ".", "write", "(", "'Starting Sauce Connect'", ")", "output", "=", "OutputMonitor", "(", ")", "command", "=", "[", "sc_path", ",", "'-u'", ",", "username", ",", "'-k'", ",", "key", "]", "tunnel_id", "=", "options", "[", "'tunnel_id'", "]", "if", "tunnel_id", ":", "command", ".", "extend", "(", "[", "'-i'", ",", "tunnel_id", "]", ")", "sc_process", "=", "Popen", "(", "command", ",", "stdout", "=", "output", ".", "stream", ".", "input", ",", "stderr", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", ",", "universal_newlines", "=", "True", ")", "ready_log_line", "=", "'Connection established.'", "if", "not", "output", ".", "wait_for", "(", "ready_log_line", ",", "60", ")", ":", "self", ".", "stdout", ".", "write", "(", "'Timeout starting Sauce Connect:\\n'", ")", "self", ".", "stdout", ".", "write", "(", "'\\n'", ".", "join", "(", "output", ".", "lines", ")", ")", "return", "False", ",", "None", "return", "True", ",", "sc_process" ]
45.170213
0.001383
def move_pos(line=1, column=1, file=sys.stdout): """ Move the cursor to a new position. Values are 1-based, and default to 1. Esc[<line>;<column>H or Esc[<line>;<column>f """ move.pos(line=line, col=column).write(file=file)
[ "def", "move_pos", "(", "line", "=", "1", ",", "column", "=", "1", ",", "file", "=", "sys", ".", "stdout", ")", ":", "move", ".", "pos", "(", "line", "=", "line", ",", "col", "=", "column", ")", ".", "write", "(", "file", "=", "file", ")" ]
28.888889
0.003731
async def deaths(self, root): """Causes of death in the nation, as percentages. Returns ------- an :class:`ApiQuery` of dict with keys of str and values of float """ return { elem.get('type'): float(elem.text) for elem in root.find('DEATHS') }
[ "async", "def", "deaths", "(", "self", ",", "root", ")", ":", "return", "{", "elem", ".", "get", "(", "'type'", ")", ":", "float", "(", "elem", ".", "text", ")", "for", "elem", "in", "root", ".", "find", "(", "'DEATHS'", ")", "}" ]
28.545455
0.006173
def func_globals_inject(func, **overrides): ''' Override specific variables within a function's global context. ''' # recognize methods if hasattr(func, 'im_func'): func = func.__func__ # Get a reference to the function globals dictionary func_globals = func.__globals__ # Save the current function globals dictionary state values for the # overridden objects injected_func_globals = [] overridden_func_globals = {} for override in overrides: if override in func_globals: overridden_func_globals[override] = func_globals[override] else: injected_func_globals.append(override) # Override the function globals with what's passed in the above overrides func_globals.update(overrides) # The context is now ready to be used yield # We're now done with the context # Restore the overwritten function globals func_globals.update(overridden_func_globals) # Remove any entry injected in the function globals for injected in injected_func_globals: del func_globals[injected]
[ "def", "func_globals_inject", "(", "func", ",", "*", "*", "overrides", ")", ":", "# recognize methods", "if", "hasattr", "(", "func", ",", "'im_func'", ")", ":", "func", "=", "func", ".", "__func__", "# Get a reference to the function globals dictionary", "func_globals", "=", "func", ".", "__globals__", "# Save the current function globals dictionary state values for the", "# overridden objects", "injected_func_globals", "=", "[", "]", "overridden_func_globals", "=", "{", "}", "for", "override", "in", "overrides", ":", "if", "override", "in", "func_globals", ":", "overridden_func_globals", "[", "override", "]", "=", "func_globals", "[", "override", "]", "else", ":", "injected_func_globals", ".", "append", "(", "override", ")", "# Override the function globals with what's passed in the above overrides", "func_globals", ".", "update", "(", "overrides", ")", "# The context is now ready to be used", "yield", "# We're now done with the context", "# Restore the overwritten function globals", "func_globals", ".", "update", "(", "overridden_func_globals", ")", "# Remove any entry injected in the function globals", "for", "injected", "in", "injected_func_globals", ":", "del", "func_globals", "[", "injected", "]" ]
31.676471
0.000901
def smart_convert(original, colorkey, pixelalpha): """ this method does several tests on a surface to determine the optimal flags and pixel format for each tile surface. this is done for the best rendering speeds and removes the need to convert() the images on your own """ tile_size = original.get_size() threshold = 127 # the default try: # count the number of pixels in the tile that are not transparent px = pygame.mask.from_surface(original, threshold).count() except: # pygame_sdl2 will fail because the mask module is not included # in this case, just convert_alpha and return it return original.convert_alpha() # there are no transparent pixels in the image if px == tile_size[0] * tile_size[1]: tile = original.convert() # there are transparent pixels, and tiled set a colorkey elif colorkey: tile = original.convert() tile.set_colorkey(colorkey, pygame.RLEACCEL) # there are transparent pixels, and set for perpixel alpha elif pixelalpha: tile = original.convert_alpha() # there are transparent pixels, and we won't handle them else: tile = original.convert() return tile
[ "def", "smart_convert", "(", "original", ",", "colorkey", ",", "pixelalpha", ")", ":", "tile_size", "=", "original", ".", "get_size", "(", ")", "threshold", "=", "127", "# the default", "try", ":", "# count the number of pixels in the tile that are not transparent", "px", "=", "pygame", ".", "mask", ".", "from_surface", "(", "original", ",", "threshold", ")", ".", "count", "(", ")", "except", ":", "# pygame_sdl2 will fail because the mask module is not included", "# in this case, just convert_alpha and return it", "return", "original", ".", "convert_alpha", "(", ")", "# there are no transparent pixels in the image", "if", "px", "==", "tile_size", "[", "0", "]", "*", "tile_size", "[", "1", "]", ":", "tile", "=", "original", ".", "convert", "(", ")", "# there are transparent pixels, and tiled set a colorkey", "elif", "colorkey", ":", "tile", "=", "original", ".", "convert", "(", ")", "tile", ".", "set_colorkey", "(", "colorkey", ",", "pygame", ".", "RLEACCEL", ")", "# there are transparent pixels, and set for perpixel alpha", "elif", "pixelalpha", ":", "tile", "=", "original", ".", "convert_alpha", "(", ")", "# there are transparent pixels, and we won't handle them", "else", ":", "tile", "=", "original", ".", "convert", "(", ")", "return", "tile" ]
32.675676
0.001606
async def get_current_directory(self): """ :py:func:`asyncio.coroutine` Getting current working directory. :rtype: :py:class:`pathlib.PurePosixPath` """ code, info = await self.command("PWD", "257") directory = self.parse_directory_response(info[-1]) return directory
[ "async", "def", "get_current_directory", "(", "self", ")", ":", "code", ",", "info", "=", "await", "self", ".", "command", "(", "\"PWD\"", ",", "\"257\"", ")", "directory", "=", "self", ".", "parse_directory_response", "(", "info", "[", "-", "1", "]", ")", "return", "directory" ]
29.363636
0.006006
def get_format_implementation(ext, format_name=None): """Return the implementation for the desired format""" # remove pre-extension if any ext = '.' + ext.split('.')[-1] formats_for_extension = [] for fmt in JUPYTEXT_FORMATS: if fmt.extension == ext: if fmt.format_name == format_name or not format_name: return fmt formats_for_extension.append(fmt.format_name) if formats_for_extension: if ext == '.md' and format_name == 'pandoc': raise JupytextFormatError('Please install pandoc>=2.7.2') raise JupytextFormatError("Format '{}' is not associated to extension '{}'. " "Please choose one of: {}.".format(format_name, ext, ', '.join(formats_for_extension))) raise JupytextFormatError("No format associated to extension '{}'".format(ext))
[ "def", "get_format_implementation", "(", "ext", ",", "format_name", "=", "None", ")", ":", "# remove pre-extension if any", "ext", "=", "'.'", "+", "ext", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "formats_for_extension", "=", "[", "]", "for", "fmt", "in", "JUPYTEXT_FORMATS", ":", "if", "fmt", ".", "extension", "==", "ext", ":", "if", "fmt", ".", "format_name", "==", "format_name", "or", "not", "format_name", ":", "return", "fmt", "formats_for_extension", ".", "append", "(", "fmt", ".", "format_name", ")", "if", "formats_for_extension", ":", "if", "ext", "==", "'.md'", "and", "format_name", "==", "'pandoc'", ":", "raise", "JupytextFormatError", "(", "'Please install pandoc>=2.7.2'", ")", "raise", "JupytextFormatError", "(", "\"Format '{}' is not associated to extension '{}'. \"", "\"Please choose one of: {}.\"", ".", "format", "(", "format_name", ",", "ext", ",", "', '", ".", "join", "(", "formats_for_extension", ")", ")", ")", "raise", "JupytextFormatError", "(", "\"No format associated to extension '{}'\"", ".", "format", "(", "ext", ")", ")" ]
46.35
0.005285
def register_data(self, typename, value): """Registers a data product, raising if a product was already registered. :API: public :param typename: The type of product to register a value for. :param value: The data product to register under `typename`. :returns: The registered `value`. :raises: :class:`ProductError` if a value for the given product `typename` is already registered. """ if typename in self.data_products: raise ProductError('Already have a product registered for {}, cannot over-write with {}' .format(typename, value)) return self.safe_create_data(typename, lambda: value)
[ "def", "register_data", "(", "self", ",", "typename", ",", "value", ")", ":", "if", "typename", "in", "self", ".", "data_products", ":", "raise", "ProductError", "(", "'Already have a product registered for {}, cannot over-write with {}'", ".", "format", "(", "typename", ",", "value", ")", ")", "return", "self", ".", "safe_create_data", "(", "typename", ",", "lambda", ":", "value", ")" ]
43.933333
0.005944
def getcolor(spec): """ Turn optional color string spec into an array. """ if isinstance(spec, str): from matplotlib import colors return asarray(colors.hex2color(colors.cnames[spec])) else: return spec
[ "def", "getcolor", "(", "spec", ")", ":", "if", "isinstance", "(", "spec", ",", "str", ")", ":", "from", "matplotlib", "import", "colors", "return", "asarray", "(", "colors", ".", "hex2color", "(", "colors", ".", "cnames", "[", "spec", "]", ")", ")", "else", ":", "return", "spec" ]
26.444444
0.004065
def propagateFrom(self, startLayer, **args): """ Propagates activation through the network. Optionally, takes input layer names as keywords, and their associated activations. If input layer(s) are given, then propagate() will return the output layer's activation. If there is more than one output layer, then a dictionary is returned. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagate(input = [1, .5])) 1 """ for layerName in args: self[layerName].copyActivations(args[layerName]) # initialize netinput: started = 0 for layer in self.layers: if layer.name == startLayer: started = 1 continue # don't set this one if not started: continue if layer.type != 'Input' and layer.active: layer.netinput = (layer.weight).copy() # for each connection, in order: started = 0 for layer in self.layers: if layer.name == startLayer: started = 1 continue # don't get inputs into this one if not started: continue if layer.active: for connection in self.connections: if connection.active and connection.toLayer.name == layer.name and connection.fromLayer.active: connection.toLayer.netinput = connection.toLayer.netinput + \ Numeric.matrixmultiply(connection.fromLayer.activation,\ connection.weight) # propagate! if layer.type != 'Input': layer.activation = self.activationFunction(layer.netinput) for layer in self.layers: if layer.log and layer.active: layer.writeLog(self) self.count += 1 # counts number of times propagate() is called if len(args) != 0: dict = {} for layer in self.layers: if layer.type == "Output": dict[layer.name] = layer.activation.copy() if len(dict) == 1: return dict[list(dict.keys())[0]] else: return dict
[ "def", "propagateFrom", "(", "self", ",", "startLayer", ",", "*", "*", "args", ")", ":", "for", "layerName", "in", "args", ":", "self", "[", "layerName", "]", ".", "copyActivations", "(", "args", "[", "layerName", "]", ")", "# initialize netinput:", "started", "=", "0", "for", "layer", "in", "self", ".", "layers", ":", "if", "layer", ".", "name", "==", "startLayer", ":", "started", "=", "1", "continue", "# don't set this one", "if", "not", "started", ":", "continue", "if", "layer", ".", "type", "!=", "'Input'", "and", "layer", ".", "active", ":", "layer", ".", "netinput", "=", "(", "layer", ".", "weight", ")", ".", "copy", "(", ")", "# for each connection, in order:", "started", "=", "0", "for", "layer", "in", "self", ".", "layers", ":", "if", "layer", ".", "name", "==", "startLayer", ":", "started", "=", "1", "continue", "# don't get inputs into this one", "if", "not", "started", ":", "continue", "if", "layer", ".", "active", ":", "for", "connection", "in", "self", ".", "connections", ":", "if", "connection", ".", "active", "and", "connection", ".", "toLayer", ".", "name", "==", "layer", ".", "name", "and", "connection", ".", "fromLayer", ".", "active", ":", "connection", ".", "toLayer", ".", "netinput", "=", "connection", ".", "toLayer", ".", "netinput", "+", "Numeric", ".", "matrixmultiply", "(", "connection", ".", "fromLayer", ".", "activation", ",", "connection", ".", "weight", ")", "# propagate!", "if", "layer", ".", "type", "!=", "'Input'", ":", "layer", ".", "activation", "=", "self", ".", "activationFunction", "(", "layer", ".", "netinput", ")", "for", "layer", "in", "self", ".", "layers", ":", "if", "layer", ".", "log", "and", "layer", ".", "active", ":", "layer", ".", "writeLog", "(", "self", ")", "self", ".", "count", "+=", "1", "# counts number of times propagate() is called", "if", "len", "(", "args", ")", "!=", "0", ":", "dict", "=", "{", "}", "for", "layer", "in", "self", ".", "layers", ":", "if", "layer", ".", "type", "==", "\"Output\"", ":", "dict", "[", "layer", ".", "name", "]", "=", "layer", ".", "activation", ".", "copy", "(", ")", "if", "len", "(", "dict", ")", "==", "1", ":", "return", "dict", "[", "list", "(", "dict", ".", "keys", "(", ")", ")", "[", "0", "]", "]", "else", ":", "return", "dict" ]
43.555556
0.007069
def apply_same_chip_constraints(vertices_resources, nets, constraints): """Modify a set of vertices_resources, nets and constraints to account for all SameChipConstraints. To allow placement algorithms to handle SameChipConstraints without any special cases, Vertices identified in a SameChipConstraint are merged into a new vertex whose vertices_resources are the sum total of their parts which may be placed as if a single vertex. Once placed, the placement can be expanded into a full placement of all the original vertices using :py:func:`finalise_same_chip_constraints`. A typical use pattern might look like:: def my_placer(vertices_resources, nets, machine, constraints): # Should be done first thing since this may redefine # vertices_resources, nets and constraints. vertices_resources, nets, constraints, substitutions = \\ apply_same_chip_constraints(vertices_resources, nets, constraints) # ...deal with other types of constraint... # ...perform placement... finalise_same_chip_constraints(substitutions, placements) return placements Note that this function does not modify its arguments but rather returns new copies of the structures supplied. Parameters ---------- vertices_resources : {vertex: {resource: quantity, ...}, ...} nets : [:py:class:`~rig.netlist.Net`, ...] constraints : [constraint, ...] Returns ------- (vertices_resources, nets, constraints, substitutions) The vertices_resources, nets and constraints values contain modified copies of the supplied data structures modified to contain a single vertex in place of the individual constrained vertices. substitutions is a list of :py:class:`MergedVertex` objects which resulted from the combining of the constrained vertices. The order of the list is the order the substitutions were carried out. The :py:func:`finalise_same_chip_constraints` function can be used to expand a set of substitutions. """ # Make a copy of the basic structures to be modified by this function vertices_resources = vertices_resources.copy() nets = nets[:] constraints = constraints[:] substitutions = [] for same_chip_constraint in constraints: if not isinstance(same_chip_constraint, SameChipConstraint): continue # Skip constraints which don't actually merge anything... if len(same_chip_constraint.vertices) <= 1: continue # The new (merged) vertex with which to replace the constrained # vertices merged_vertex = MergedVertex(same_chip_constraint.vertices) substitutions.append(merged_vertex) # A set containing the set of vertices to be merged (to remove # duplicates) merged_vertices = set(same_chip_constraint.vertices) # Remove the merged vertices from the set of vertices resources and # accumulate the total resources consumed. Note add_resources is not # used since we don't know if the resources consumed by each vertex are # overlapping. total_resources = {} for vertex in merged_vertices: resources = vertices_resources.pop(vertex) for resource, value in iteritems(resources): total_resources[resource] = (total_resources.get(resource, 0) + value) vertices_resources[merged_vertex] = total_resources # Update any nets which pointed to a merged vertex for net_num, net in enumerate(nets): net_changed = False # Change net sources if net.source in merged_vertices: net_changed = True net = Net(merged_vertex, net.sinks, net.weight) # Change net sinks for sink_num, sink in enumerate(net.sinks): if sink in merged_vertices: if not net_changed: net = Net(net.source, net.sinks, net.weight) net_changed = True net.sinks[sink_num] = merged_vertex if net_changed: nets[net_num] = net # Update any constraints which refer to a merged vertex for constraint_num, constraint in enumerate(constraints): if isinstance(constraint, LocationConstraint): if constraint.vertex in merged_vertices: constraints[constraint_num] = LocationConstraint( merged_vertex, constraint.location) elif isinstance(constraint, SameChipConstraint): if not set(constraint.vertices).isdisjoint(merged_vertices): constraints[constraint_num] = SameChipConstraint([ merged_vertex if v in merged_vertices else v for v in constraint.vertices ]) elif isinstance(constraint, RouteEndpointConstraint): if constraint.vertex in merged_vertices: constraints[constraint_num] = RouteEndpointConstraint( merged_vertex, constraint.route) return (vertices_resources, nets, constraints, substitutions)
[ "def", "apply_same_chip_constraints", "(", "vertices_resources", ",", "nets", ",", "constraints", ")", ":", "# Make a copy of the basic structures to be modified by this function", "vertices_resources", "=", "vertices_resources", ".", "copy", "(", ")", "nets", "=", "nets", "[", ":", "]", "constraints", "=", "constraints", "[", ":", "]", "substitutions", "=", "[", "]", "for", "same_chip_constraint", "in", "constraints", ":", "if", "not", "isinstance", "(", "same_chip_constraint", ",", "SameChipConstraint", ")", ":", "continue", "# Skip constraints which don't actually merge anything...", "if", "len", "(", "same_chip_constraint", ".", "vertices", ")", "<=", "1", ":", "continue", "# The new (merged) vertex with which to replace the constrained", "# vertices", "merged_vertex", "=", "MergedVertex", "(", "same_chip_constraint", ".", "vertices", ")", "substitutions", ".", "append", "(", "merged_vertex", ")", "# A set containing the set of vertices to be merged (to remove", "# duplicates)", "merged_vertices", "=", "set", "(", "same_chip_constraint", ".", "vertices", ")", "# Remove the merged vertices from the set of vertices resources and", "# accumulate the total resources consumed. Note add_resources is not", "# used since we don't know if the resources consumed by each vertex are", "# overlapping.", "total_resources", "=", "{", "}", "for", "vertex", "in", "merged_vertices", ":", "resources", "=", "vertices_resources", ".", "pop", "(", "vertex", ")", "for", "resource", ",", "value", "in", "iteritems", "(", "resources", ")", ":", "total_resources", "[", "resource", "]", "=", "(", "total_resources", ".", "get", "(", "resource", ",", "0", ")", "+", "value", ")", "vertices_resources", "[", "merged_vertex", "]", "=", "total_resources", "# Update any nets which pointed to a merged vertex", "for", "net_num", ",", "net", "in", "enumerate", "(", "nets", ")", ":", "net_changed", "=", "False", "# Change net sources", "if", "net", ".", "source", "in", "merged_vertices", ":", "net_changed", "=", "True", "net", "=", "Net", "(", "merged_vertex", ",", "net", ".", "sinks", ",", "net", ".", "weight", ")", "# Change net sinks", "for", "sink_num", ",", "sink", "in", "enumerate", "(", "net", ".", "sinks", ")", ":", "if", "sink", "in", "merged_vertices", ":", "if", "not", "net_changed", ":", "net", "=", "Net", "(", "net", ".", "source", ",", "net", ".", "sinks", ",", "net", ".", "weight", ")", "net_changed", "=", "True", "net", ".", "sinks", "[", "sink_num", "]", "=", "merged_vertex", "if", "net_changed", ":", "nets", "[", "net_num", "]", "=", "net", "# Update any constraints which refer to a merged vertex", "for", "constraint_num", ",", "constraint", "in", "enumerate", "(", "constraints", ")", ":", "if", "isinstance", "(", "constraint", ",", "LocationConstraint", ")", ":", "if", "constraint", ".", "vertex", "in", "merged_vertices", ":", "constraints", "[", "constraint_num", "]", "=", "LocationConstraint", "(", "merged_vertex", ",", "constraint", ".", "location", ")", "elif", "isinstance", "(", "constraint", ",", "SameChipConstraint", ")", ":", "if", "not", "set", "(", "constraint", ".", "vertices", ")", ".", "isdisjoint", "(", "merged_vertices", ")", ":", "constraints", "[", "constraint_num", "]", "=", "SameChipConstraint", "(", "[", "merged_vertex", "if", "v", "in", "merged_vertices", "else", "v", "for", "v", "in", "constraint", ".", "vertices", "]", ")", "elif", "isinstance", "(", "constraint", ",", "RouteEndpointConstraint", ")", ":", "if", "constraint", ".", "vertex", "in", "merged_vertices", ":", "constraints", "[", "constraint_num", "]", "=", "RouteEndpointConstraint", "(", "merged_vertex", ",", "constraint", ".", "route", ")", "return", "(", "vertices_resources", ",", "nets", ",", "constraints", ",", "substitutions", ")" ]
43.081301
0.000184
def _RunAction(self, rule, client_id): """Run all the actions specified in the rule. Args: rule: Rule which actions are to be executed. client_id: Id of a client where rule's actions are to be executed. Returns: Number of actions started. """ actions_count = 0 try: if self._CheckIfHuntTaskWasAssigned(client_id, rule.hunt_id): logging.info( "Foreman: ignoring hunt %s on client %s: was started " "here before", client_id, rule.hunt_id) else: logging.info("Foreman: Starting hunt %s on client %s.", rule.hunt_id, client_id) # hunt_name is only used for legacy hunts. if rule.hunt_name: flow_cls = registry.AFF4FlowRegistry.FlowClassByName(rule.hunt_name) hunt_urn = rdfvalue.RDFURN("aff4:/hunts/%s" % rule.hunt_id) flow_cls.StartClients(hunt_urn, [client_id]) else: hunt.StartHuntFlowOnClient(client_id, rule.hunt_id) actions_count += 1 # There could be all kinds of errors we don't know about when starting the # hunt so we catch everything here. except Exception as e: # pylint: disable=broad-except logging.exception("Failure running foreman action on client %s: %s", rule.hunt_id, e) return actions_count
[ "def", "_RunAction", "(", "self", ",", "rule", ",", "client_id", ")", ":", "actions_count", "=", "0", "try", ":", "if", "self", ".", "_CheckIfHuntTaskWasAssigned", "(", "client_id", ",", "rule", ".", "hunt_id", ")", ":", "logging", ".", "info", "(", "\"Foreman: ignoring hunt %s on client %s: was started \"", "\"here before\"", ",", "client_id", ",", "rule", ".", "hunt_id", ")", "else", ":", "logging", ".", "info", "(", "\"Foreman: Starting hunt %s on client %s.\"", ",", "rule", ".", "hunt_id", ",", "client_id", ")", "# hunt_name is only used for legacy hunts.", "if", "rule", ".", "hunt_name", ":", "flow_cls", "=", "registry", ".", "AFF4FlowRegistry", ".", "FlowClassByName", "(", "rule", ".", "hunt_name", ")", "hunt_urn", "=", "rdfvalue", ".", "RDFURN", "(", "\"aff4:/hunts/%s\"", "%", "rule", ".", "hunt_id", ")", "flow_cls", ".", "StartClients", "(", "hunt_urn", ",", "[", "client_id", "]", ")", "else", ":", "hunt", ".", "StartHuntFlowOnClient", "(", "client_id", ",", "rule", ".", "hunt_id", ")", "actions_count", "+=", "1", "# There could be all kinds of errors we don't know about when starting the", "# hunt so we catch everything here.", "except", "Exception", "as", "e", ":", "# pylint: disable=broad-except", "logging", ".", "exception", "(", "\"Failure running foreman action on client %s: %s\"", ",", "rule", ".", "hunt_id", ",", "e", ")", "return", "actions_count" ]
34.526316
0.00593
def is_newer_than(pth1, pth2): """ Return true if either file pth1 or file pth2 don't exist, or if pth1 has been modified more recently than pth2 """ return not os.path.exists(pth1) or not os.path.exists(pth2) or \ os.stat(pth1).st_mtime > os.stat(pth2).st_mtime
[ "def", "is_newer_than", "(", "pth1", ",", "pth2", ")", ":", "return", "not", "os", ".", "path", ".", "exists", "(", "pth1", ")", "or", "not", "os", ".", "path", ".", "exists", "(", "pth2", ")", "or", "os", ".", "stat", "(", "pth1", ")", ".", "st_mtime", ">", "os", ".", "stat", "(", "pth2", ")", ".", "st_mtime" ]
35.375
0.006897
def app(session_id, app, bind, port): """ Run a local proxy to a service provided by Backend.AI compute sessions. The type of proxy depends on the app definition: plain TCP or HTTP. \b SESSID: The compute session ID. APP: The name of service provided by the given session. """ api_session = None runner = None async def app_setup(): nonlocal api_session, runner loop = current_loop() api_session = AsyncSession() # TODO: generalize protocol using service ports metadata protocol = 'http' runner = ProxyRunner(api_session, session_id, app, protocol, bind, port, loop=loop) await runner.ready() print_info( "A local proxy to the application \"{0}\" ".format(app) + "provided by the session \"{0}\" ".format(session_id) + "is available at: {0}://{1}:{2}" .format(protocol, bind, port) ) async def app_shutdown(): nonlocal api_session, runner print_info("Shutting down....") await runner.close() await api_session.close() print_info("The local proxy to \"{}\" has terminated." .format(app)) asyncio_run_forever(app_setup(), app_shutdown(), stop_signals={signal.SIGINT, signal.SIGTERM})
[ "def", "app", "(", "session_id", ",", "app", ",", "bind", ",", "port", ")", ":", "api_session", "=", "None", "runner", "=", "None", "async", "def", "app_setup", "(", ")", ":", "nonlocal", "api_session", ",", "runner", "loop", "=", "current_loop", "(", ")", "api_session", "=", "AsyncSession", "(", ")", "# TODO: generalize protocol using service ports metadata", "protocol", "=", "'http'", "runner", "=", "ProxyRunner", "(", "api_session", ",", "session_id", ",", "app", ",", "protocol", ",", "bind", ",", "port", ",", "loop", "=", "loop", ")", "await", "runner", ".", "ready", "(", ")", "print_info", "(", "\"A local proxy to the application \\\"{0}\\\" \"", ".", "format", "(", "app", ")", "+", "\"provided by the session \\\"{0}\\\" \"", ".", "format", "(", "session_id", ")", "+", "\"is available at: {0}://{1}:{2}\"", ".", "format", "(", "protocol", ",", "bind", ",", "port", ")", ")", "async", "def", "app_shutdown", "(", ")", ":", "nonlocal", "api_session", ",", "runner", "print_info", "(", "\"Shutting down....\"", ")", "await", "runner", ".", "close", "(", ")", "await", "api_session", ".", "close", "(", ")", "print_info", "(", "\"The local proxy to \\\"{}\\\" has terminated.\"", ".", "format", "(", "app", ")", ")", "asyncio_run_forever", "(", "app_setup", "(", ")", ",", "app_shutdown", "(", ")", ",", "stop_signals", "=", "{", "signal", ".", "SIGINT", ",", "signal", ".", "SIGTERM", "}", ")" ]
33.9
0.000717
def date(self) -> Optional[DateHeader]: """The ``Date`` header.""" try: return cast(DateHeader, self[b'date'][0]) except (KeyError, IndexError): return None
[ "def", "date", "(", "self", ")", "->", "Optional", "[", "DateHeader", "]", ":", "try", ":", "return", "cast", "(", "DateHeader", ",", "self", "[", "b'date'", "]", "[", "0", "]", ")", "except", "(", "KeyError", ",", "IndexError", ")", ":", "return", "None" ]
33.166667
0.009804
def on(self): """Send an ON message to device group.""" on_command = ExtendedSend(self._address, COMMAND_LIGHT_ON_0X11_NONE, self._udata, cmd2=0xff) on_command.set_checksum() self._send_method(on_command, self._on_message_received)
[ "def", "on", "(", "self", ")", ":", "on_command", "=", "ExtendedSend", "(", "self", ".", "_address", ",", "COMMAND_LIGHT_ON_0X11_NONE", ",", "self", ".", "_udata", ",", "cmd2", "=", "0xff", ")", "on_command", ".", "set_checksum", "(", ")", "self", ".", "_send_method", "(", "on_command", ",", "self", ".", "_on_message_received", ")" ]
44.75
0.005479
def get_compression_type(self, file_name): """ Determine compression type for a given file using its extension. :param file_name: a given file name :type file_name: str """ ext = os.path.splitext(file_name)[1] if ext == '.gz': self.ctype = 'gzip' elif ext == '.bz2': self.ctype = 'bzip2' elif ext in ('.xz', '.lzma'): self.ctype = 'lzma' else: self.ctype = None
[ "def", "get_compression_type", "(", "self", ",", "file_name", ")", ":", "ext", "=", "os", ".", "path", ".", "splitext", "(", "file_name", ")", "[", "1", "]", "if", "ext", "==", "'.gz'", ":", "self", ".", "ctype", "=", "'gzip'", "elif", "ext", "==", "'.bz2'", ":", "self", ".", "ctype", "=", "'bzip2'", "elif", "ext", "in", "(", "'.xz'", ",", "'.lzma'", ")", ":", "self", ".", "ctype", "=", "'lzma'", "else", ":", "self", ".", "ctype", "=", "None" ]
27.055556
0.003968
def restricted_cover(l, succsOf): """ Returns a restricted <succsOf> which only takes and yields values from <l> """ fzl = frozenset(l) lut = dict() for i in l: lut[i] = fzl.intersection(succsOf(i)) return lambda x: lut[x]
[ "def", "restricted_cover", "(", "l", ",", "succsOf", ")", ":", "fzl", "=", "frozenset", "(", "l", ")", "lut", "=", "dict", "(", ")", "for", "i", "in", "l", ":", "lut", "[", "i", "]", "=", "fzl", ".", "intersection", "(", "succsOf", "(", "i", ")", ")", "return", "lambda", "x", ":", "lut", "[", "x", "]" ]
31.375
0.007752
def _spelling_pipeline(self, sources, options, personal_dict): """Check spelling pipeline.""" for source in self._pipeline_step(sources, options, personal_dict): # Don't waste time on empty strings if source._has_error(): yield Results([], source.context, source.category, source.error) elif not source.text or source.text.isspace(): continue else: encoding = source.encoding if source._is_bytes(): text = source.text else: # UTF-16 and UTF-32 don't work well with Aspell and Hunspell, # so encode with the compatible UTF-8 instead. if encoding.startswith(('utf-16', 'utf-32')): encoding = 'utf-8' text = source.text.encode(encoding) self.log('', 3) self.log(text, 3) cmd = self.setup_command(encoding, options, personal_dict) self.log("Command: " + str(cmd), 4) try: wordlist = util.call_spellchecker(cmd, input_text=text, encoding=encoding) yield Results( [w for w in sorted(set(wordlist.replace('\r', '').split('\n'))) if w], source.context, source.category ) except Exception as e: # pragma: no cover err = self.get_error(e) yield Results([], source.context, source.category, err)
[ "def", "_spelling_pipeline", "(", "self", ",", "sources", ",", "options", ",", "personal_dict", ")", ":", "for", "source", "in", "self", ".", "_pipeline_step", "(", "sources", ",", "options", ",", "personal_dict", ")", ":", "# Don't waste time on empty strings", "if", "source", ".", "_has_error", "(", ")", ":", "yield", "Results", "(", "[", "]", ",", "source", ".", "context", ",", "source", ".", "category", ",", "source", ".", "error", ")", "elif", "not", "source", ".", "text", "or", "source", ".", "text", ".", "isspace", "(", ")", ":", "continue", "else", ":", "encoding", "=", "source", ".", "encoding", "if", "source", ".", "_is_bytes", "(", ")", ":", "text", "=", "source", ".", "text", "else", ":", "# UTF-16 and UTF-32 don't work well with Aspell and Hunspell,", "# so encode with the compatible UTF-8 instead.", "if", "encoding", ".", "startswith", "(", "(", "'utf-16'", ",", "'utf-32'", ")", ")", ":", "encoding", "=", "'utf-8'", "text", "=", "source", ".", "text", ".", "encode", "(", "encoding", ")", "self", ".", "log", "(", "''", ",", "3", ")", "self", ".", "log", "(", "text", ",", "3", ")", "cmd", "=", "self", ".", "setup_command", "(", "encoding", ",", "options", ",", "personal_dict", ")", "self", ".", "log", "(", "\"Command: \"", "+", "str", "(", "cmd", ")", ",", "4", ")", "try", ":", "wordlist", "=", "util", ".", "call_spellchecker", "(", "cmd", ",", "input_text", "=", "text", ",", "encoding", "=", "encoding", ")", "yield", "Results", "(", "[", "w", "for", "w", "in", "sorted", "(", "set", "(", "wordlist", ".", "replace", "(", "'\\r'", ",", "''", ")", ".", "split", "(", "'\\n'", ")", ")", ")", "if", "w", "]", ",", "source", ".", "context", ",", "source", ".", "category", ")", "except", "Exception", "as", "e", ":", "# pragma: no cover", "err", "=", "self", ".", "get_error", "(", "e", ")", "yield", "Results", "(", "[", "]", ",", "source", ".", "context", ",", "source", ".", "category", ",", "err", ")" ]
46.764706
0.003697
def dict_fields(obj, parent=[]): """ reads a dictionary and returns a list of fields cojoined with a dot notation args: obj: the dictionary to parse parent: name for a parent key. used with a recursive call """ rtn_obj = {} for key, value in obj.items(): new_key = parent + [key] new_key = ".".join(new_key) if isinstance(value, list): if value: value = value[0] if isinstance(value, dict): rtn_obj.update(dict_fields(value, [new_key])) else: rtn_obj.update({new_key: value}) return rtn_obj
[ "def", "dict_fields", "(", "obj", ",", "parent", "=", "[", "]", ")", ":", "rtn_obj", "=", "{", "}", "for", "key", ",", "value", "in", "obj", ".", "items", "(", ")", ":", "new_key", "=", "parent", "+", "[", "key", "]", "new_key", "=", "\".\"", ".", "join", "(", "new_key", ")", "if", "isinstance", "(", "value", ",", "list", ")", ":", "if", "value", ":", "value", "=", "value", "[", "0", "]", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "rtn_obj", ".", "update", "(", "dict_fields", "(", "value", ",", "[", "new_key", "]", ")", ")", "else", ":", "rtn_obj", ".", "update", "(", "{", "new_key", ":", "value", "}", ")", "return", "rtn_obj" ]
30.047619
0.001536
def binary_cross_entropy_loss_with_logits(x, target, name=None): """Calculates the binary cross entropy between sigmoid(x) and target. Expects unscaled logits. Do not pass in results of sigmoid operation. Args: x: the calculated pre-sigmoid values target: the desired values. name: the name for this op, defaults to binary_cross_entropy_with_logits Returns: -(target * -softplus(-x) + (1-target) * (-x - softplus(-x))) Raises: ValueError: If shapes are incompatible. """ with tf.name_scope(name, 'binary_cross_entropy_with_logits', [x, target]) as scope: x.get_shape().assert_is_compatible_with(target.get_shape()) neg_softplus = -tf.nn.softplus(-x) return -tf.add(tf.multiply(target, neg_softplus), tf.multiply(1 - target, -x + neg_softplus), name=scope)
[ "def", "binary_cross_entropy_loss_with_logits", "(", "x", ",", "target", ",", "name", "=", "None", ")", ":", "with", "tf", ".", "name_scope", "(", "name", ",", "'binary_cross_entropy_with_logits'", ",", "[", "x", ",", "target", "]", ")", "as", "scope", ":", "x", ".", "get_shape", "(", ")", ".", "assert_is_compatible_with", "(", "target", ".", "get_shape", "(", ")", ")", "neg_softplus", "=", "-", "tf", ".", "nn", ".", "softplus", "(", "-", "x", ")", "return", "-", "tf", ".", "add", "(", "tf", ".", "multiply", "(", "target", ",", "neg_softplus", ")", ",", "tf", ".", "multiply", "(", "1", "-", "target", ",", "-", "x", "+", "neg_softplus", ")", ",", "name", "=", "scope", ")" ]
40.095238
0.00348
def depends_on_helper(obj): """ Handles using .title if the given object is a troposphere resource. If the given object is a troposphere resource, use the `.title` attribute of that resource. If it's a string, just use the string. This should allow more pythonic use of DependsOn. """ if isinstance(obj, AWSObject): return obj.title elif isinstance(obj, list): return list(map(depends_on_helper, obj)) return obj
[ "def", "depends_on_helper", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "AWSObject", ")", ":", "return", "obj", ".", "title", "elif", "isinstance", "(", "obj", ",", "list", ")", ":", "return", "list", "(", "map", "(", "depends_on_helper", ",", "obj", ")", ")", "return", "obj" ]
37.5
0.002169
def from_spec(cls, spec): """ Load a Custodian instance where the jobs are specified from a structure and a spec dict. This allows simple custom job sequences to be constructed quickly via a YAML file. Args: spec (dict): A dict specifying job. A sample of the dict in YAML format for the usual MP workflow is given as follows ``` jobs: - jb: custodian.vasp.jobs.VaspJob params: final: False suffix: .relax1 - jb: custodian.vasp.jobs.VaspJob params: final: True suffix: .relax2 settings_override: {"file": "CONTCAR", "action": {"_file_copy": {"dest": "POSCAR"}} jobs_common_params: vasp_cmd: /opt/vasp handlers: - hdlr: custodian.vasp.handlers.VaspErrorHandler - hdlr: custodian.vasp.handlers.AliasingErrorHandler - hdlr: custodian.vasp.handlers.MeshSymmetryHandler validators: - vldr: custodian.vasp.validators.VasprunXMLValidator custodian_params: scratch_dir: /tmp ``` The `jobs` key is a list of jobs. Each job is specified via "job": <explicit path>, and all parameters are specified via `params` which is a dict. `common_params` specify a common set of parameters that are passed to all jobs, e.g., vasp_cmd. Returns: Custodian instance. """ dec = MontyDecoder() def load_class(dotpath): modname, classname = dotpath.rsplit(".", 1) mod = __import__(modname, globals(), locals(), [classname], 0) return getattr(mod, classname) def process_params(d): decoded = {} for k, v in d.items(): if k.startswith("$"): if isinstance(v, list): v = [os.path.expandvars(i) for i in v] elif isinstance(v, dict): v = {k2: os.path.expandvars(v2) for k2, v2 in v.items()} else: v = os.path.expandvars(v) decoded[k.strip("$")] = dec.process_decoded(v) return decoded jobs = [] common_params = process_params(spec.get("jobs_common_params", {})) for d in spec["jobs"]: cls_ = load_class(d["jb"]) params = process_params(d.get("params", {})) params.update(common_params) jobs.append(cls_(**params)) handlers = [] for d in spec.get("handlers", []): cls_ = load_class(d["hdlr"]) params = process_params(d.get("params", {})) handlers.append(cls_(**params)) validators = [] for d in spec.get("validators", []): cls_ = load_class(d["vldr"]) params = process_params(d.get("params", {})) validators.append(cls_(**params)) custodian_params = process_params(spec.get("custodian_params", {})) return cls(jobs=jobs, handlers=handlers, validators=validators, **custodian_params)
[ "def", "from_spec", "(", "cls", ",", "spec", ")", ":", "dec", "=", "MontyDecoder", "(", ")", "def", "load_class", "(", "dotpath", ")", ":", "modname", ",", "classname", "=", "dotpath", ".", "rsplit", "(", "\".\"", ",", "1", ")", "mod", "=", "__import__", "(", "modname", ",", "globals", "(", ")", ",", "locals", "(", ")", ",", "[", "classname", "]", ",", "0", ")", "return", "getattr", "(", "mod", ",", "classname", ")", "def", "process_params", "(", "d", ")", ":", "decoded", "=", "{", "}", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", ":", "if", "k", ".", "startswith", "(", "\"$\"", ")", ":", "if", "isinstance", "(", "v", ",", "list", ")", ":", "v", "=", "[", "os", ".", "path", ".", "expandvars", "(", "i", ")", "for", "i", "in", "v", "]", "elif", "isinstance", "(", "v", ",", "dict", ")", ":", "v", "=", "{", "k2", ":", "os", ".", "path", ".", "expandvars", "(", "v2", ")", "for", "k2", ",", "v2", "in", "v", ".", "items", "(", ")", "}", "else", ":", "v", "=", "os", ".", "path", ".", "expandvars", "(", "v", ")", "decoded", "[", "k", ".", "strip", "(", "\"$\"", ")", "]", "=", "dec", ".", "process_decoded", "(", "v", ")", "return", "decoded", "jobs", "=", "[", "]", "common_params", "=", "process_params", "(", "spec", ".", "get", "(", "\"jobs_common_params\"", ",", "{", "}", ")", ")", "for", "d", "in", "spec", "[", "\"jobs\"", "]", ":", "cls_", "=", "load_class", "(", "d", "[", "\"jb\"", "]", ")", "params", "=", "process_params", "(", "d", ".", "get", "(", "\"params\"", ",", "{", "}", ")", ")", "params", ".", "update", "(", "common_params", ")", "jobs", ".", "append", "(", "cls_", "(", "*", "*", "params", ")", ")", "handlers", "=", "[", "]", "for", "d", "in", "spec", ".", "get", "(", "\"handlers\"", ",", "[", "]", ")", ":", "cls_", "=", "load_class", "(", "d", "[", "\"hdlr\"", "]", ")", "params", "=", "process_params", "(", "d", ".", "get", "(", "\"params\"", ",", "{", "}", ")", ")", "handlers", ".", "append", "(", "cls_", "(", "*", "*", "params", ")", ")", "validators", "=", "[", "]", "for", "d", "in", "spec", ".", "get", "(", "\"validators\"", ",", "[", "]", ")", ":", "cls_", "=", "load_class", "(", "d", "[", "\"vldr\"", "]", ")", "params", "=", "process_params", "(", "d", ".", "get", "(", "\"params\"", ",", "{", "}", ")", ")", "validators", ".", "append", "(", "cls_", "(", "*", "*", "params", ")", ")", "custodian_params", "=", "process_params", "(", "spec", ".", "get", "(", "\"custodian_params\"", ",", "{", "}", ")", ")", "return", "cls", "(", "jobs", "=", "jobs", ",", "handlers", "=", "handlers", ",", "validators", "=", "validators", ",", "*", "*", "custodian_params", ")" ]
37.089888
0.00118
def download_files(fapi, file_name, conf, use_cache, cache_dir=None): """ Downloads translated versions of the files """ retrieval_type = conf.get('retrieval-type', 'published') include_original_strings = 'true' if conf.get('include-original-strings', False) else 'false' save_pattern = conf.get('save-pattern') if not save_pattern: raise SmarterlingError("File %s doesn't have a save-pattern" % file_name) if cache_dir and not os.path.exists(cache_dir): print("Creating cache dir: %s" % (cache_dir, )) os.makedirs(cache_dir) uri = file_uri(file_name, conf) translated_items = get_translated_items(fapi, uri, use_cache, cache_dir=cache_dir) for item in translated_items: item = AttributeDict(item) locale = item.locale locale_underscore = locale.replace("-", "_") locale_android_res = locale.replace("-", "-r") locale_parts = locale.split("-") language = locale_parts[0] region = locale_parts[1] if len(locale_parts)>1 else "" file_response = get_translated_file( fapi, file_uri(file_name, conf), locale, retrieval_type, include_original_strings, use_cache, cache_dir=cache_dir) if not file_response: print("%s translation not found for %s" % (item.locale, file_name)) continue print("Processing %s translation for %s" % (item.locale, file_name)) (fd, work_file) = tempfile.mkstemp() try: with open(work_file, 'w') as f: f.write(file_response) finally: os.close(fd) for filter_cmd in conf.get('filters', []): (fd, tmp_file) = tempfile.mkstemp() try : filter_cmd = filter_cmd.replace("{input_file}", work_file) filter_cmd = filter_cmd.replace("{output_file}", tmp_file) filter_cmd = filter_cmd.replace("{locale}", locale) filter_cmd = filter_cmd.replace("{locale_underscore}", locale_underscore) filter_cmd = filter_cmd.replace("{locale_android_res}", locale_android_res) filter_cmd = filter_cmd.replace("{language}", language) filter_cmd = filter_cmd.replace("{region}", region) print(" running filter: %s " % filter_cmd) if os.system(filter_cmd) != 0: raise SmarterlingError("Non 0 exit code from filter: %s" % filter_cmd) shutil.move(tmp_file, work_file) finally: os.close(fd) if conf.has_key('save-cmd'): save_command = conf.get('save-cmd') save_command = save_command.replace("{input_file}", work_file) save_command = save_command.replace("{locale}", locale) save_command = save_command.replace("{locale_underscore}", locale_underscore) save_command = save_command.replace("{locale_android_res}", locale_android_res) save_command = save_command.replace("{language}", language) save_command = save_command.replace("{region}", region) print(" running save command: %s " % save_command) if os.system(save_command) != 0: raise SmarterlingError("Non 0 exit code from save command: %s" % save_command) elif conf.has_key('save-pattern'): save_file = conf.get('save-pattern') save_file = save_file.replace("{locale}", locale) save_file = save_file.replace("{locale_underscore}", locale_underscore) save_file = save_file.replace("{locale_android_res}", locale_android_res) save_file = save_file.replace("{language}", language) save_file = save_file.replace("{region}", region) save_dir = os.path.dirname(save_file) if not os.path.exists(save_dir): os.makedirs(save_dir) elif not os.path.isdir(save_dir): raise SmarterlingError("Expected %s to be a directory, but it's an existing file" % save_dir) print(" saving output to: %s " % save_file) shutil.move(work_file, save_file) else: raise SmarterlingError("no save-cmd or save-pattern for: %s" % file_name)
[ "def", "download_files", "(", "fapi", ",", "file_name", ",", "conf", ",", "use_cache", ",", "cache_dir", "=", "None", ")", ":", "retrieval_type", "=", "conf", ".", "get", "(", "'retrieval-type'", ",", "'published'", ")", "include_original_strings", "=", "'true'", "if", "conf", ".", "get", "(", "'include-original-strings'", ",", "False", ")", "else", "'false'", "save_pattern", "=", "conf", ".", "get", "(", "'save-pattern'", ")", "if", "not", "save_pattern", ":", "raise", "SmarterlingError", "(", "\"File %s doesn't have a save-pattern\"", "%", "file_name", ")", "if", "cache_dir", "and", "not", "os", ".", "path", ".", "exists", "(", "cache_dir", ")", ":", "print", "(", "\"Creating cache dir: %s\"", "%", "(", "cache_dir", ",", ")", ")", "os", ".", "makedirs", "(", "cache_dir", ")", "uri", "=", "file_uri", "(", "file_name", ",", "conf", ")", "translated_items", "=", "get_translated_items", "(", "fapi", ",", "uri", ",", "use_cache", ",", "cache_dir", "=", "cache_dir", ")", "for", "item", "in", "translated_items", ":", "item", "=", "AttributeDict", "(", "item", ")", "locale", "=", "item", ".", "locale", "locale_underscore", "=", "locale", ".", "replace", "(", "\"-\"", ",", "\"_\"", ")", "locale_android_res", "=", "locale", ".", "replace", "(", "\"-\"", ",", "\"-r\"", ")", "locale_parts", "=", "locale", ".", "split", "(", "\"-\"", ")", "language", "=", "locale_parts", "[", "0", "]", "region", "=", "locale_parts", "[", "1", "]", "if", "len", "(", "locale_parts", ")", ">", "1", "else", "\"\"", "file_response", "=", "get_translated_file", "(", "fapi", ",", "file_uri", "(", "file_name", ",", "conf", ")", ",", "locale", ",", "retrieval_type", ",", "include_original_strings", ",", "use_cache", ",", "cache_dir", "=", "cache_dir", ")", "if", "not", "file_response", ":", "print", "(", "\"%s translation not found for %s\"", "%", "(", "item", ".", "locale", ",", "file_name", ")", ")", "continue", "print", "(", "\"Processing %s translation for %s\"", "%", "(", "item", ".", "locale", ",", "file_name", ")", ")", "(", "fd", ",", "work_file", ")", "=", "tempfile", ".", "mkstemp", "(", ")", "try", ":", "with", "open", "(", "work_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "file_response", ")", "finally", ":", "os", ".", "close", "(", "fd", ")", "for", "filter_cmd", "in", "conf", ".", "get", "(", "'filters'", ",", "[", "]", ")", ":", "(", "fd", ",", "tmp_file", ")", "=", "tempfile", ".", "mkstemp", "(", ")", "try", ":", "filter_cmd", "=", "filter_cmd", ".", "replace", "(", "\"{input_file}\"", ",", "work_file", ")", "filter_cmd", "=", "filter_cmd", ".", "replace", "(", "\"{output_file}\"", ",", "tmp_file", ")", "filter_cmd", "=", "filter_cmd", ".", "replace", "(", "\"{locale}\"", ",", "locale", ")", "filter_cmd", "=", "filter_cmd", ".", "replace", "(", "\"{locale_underscore}\"", ",", "locale_underscore", ")", "filter_cmd", "=", "filter_cmd", ".", "replace", "(", "\"{locale_android_res}\"", ",", "locale_android_res", ")", "filter_cmd", "=", "filter_cmd", ".", "replace", "(", "\"{language}\"", ",", "language", ")", "filter_cmd", "=", "filter_cmd", ".", "replace", "(", "\"{region}\"", ",", "region", ")", "print", "(", "\" running filter: %s \"", "%", "filter_cmd", ")", "if", "os", ".", "system", "(", "filter_cmd", ")", "!=", "0", ":", "raise", "SmarterlingError", "(", "\"Non 0 exit code from filter: %s\"", "%", "filter_cmd", ")", "shutil", ".", "move", "(", "tmp_file", ",", "work_file", ")", "finally", ":", "os", ".", "close", "(", "fd", ")", "if", "conf", ".", "has_key", "(", "'save-cmd'", ")", ":", "save_command", "=", "conf", ".", "get", "(", "'save-cmd'", ")", "save_command", "=", "save_command", ".", "replace", "(", "\"{input_file}\"", ",", "work_file", ")", "save_command", "=", "save_command", ".", "replace", "(", "\"{locale}\"", ",", "locale", ")", "save_command", "=", "save_command", ".", "replace", "(", "\"{locale_underscore}\"", ",", "locale_underscore", ")", "save_command", "=", "save_command", ".", "replace", "(", "\"{locale_android_res}\"", ",", "locale_android_res", ")", "save_command", "=", "save_command", ".", "replace", "(", "\"{language}\"", ",", "language", ")", "save_command", "=", "save_command", ".", "replace", "(", "\"{region}\"", ",", "region", ")", "print", "(", "\" running save command: %s \"", "%", "save_command", ")", "if", "os", ".", "system", "(", "save_command", ")", "!=", "0", ":", "raise", "SmarterlingError", "(", "\"Non 0 exit code from save command: %s\"", "%", "save_command", ")", "elif", "conf", ".", "has_key", "(", "'save-pattern'", ")", ":", "save_file", "=", "conf", ".", "get", "(", "'save-pattern'", ")", "save_file", "=", "save_file", ".", "replace", "(", "\"{locale}\"", ",", "locale", ")", "save_file", "=", "save_file", ".", "replace", "(", "\"{locale_underscore}\"", ",", "locale_underscore", ")", "save_file", "=", "save_file", ".", "replace", "(", "\"{locale_android_res}\"", ",", "locale_android_res", ")", "save_file", "=", "save_file", ".", "replace", "(", "\"{language}\"", ",", "language", ")", "save_file", "=", "save_file", ".", "replace", "(", "\"{region}\"", ",", "region", ")", "save_dir", "=", "os", ".", "path", ".", "dirname", "(", "save_file", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "save_dir", ")", ":", "os", ".", "makedirs", "(", "save_dir", ")", "elif", "not", "os", ".", "path", ".", "isdir", "(", "save_dir", ")", ":", "raise", "SmarterlingError", "(", "\"Expected %s to be a directory, but it's an existing file\"", "%", "save_dir", ")", "print", "(", "\" saving output to: %s \"", "%", "save_file", ")", "shutil", ".", "move", "(", "work_file", ",", "save_file", ")", "else", ":", "raise", "SmarterlingError", "(", "\"no save-cmd or save-pattern for: %s\"", "%", "file_name", ")" ]
48.715909
0.006401
def insert(self, start_time: int, schedule: ScheduleComponent) -> 'ScheduleComponent': """Return a new schedule with `schedule` inserted within `self` at `start_time`. Args: start_time: time to be inserted schedule: schedule to be inserted """ return ops.insert(self, start_time, schedule)
[ "def", "insert", "(", "self", ",", "start_time", ":", "int", ",", "schedule", ":", "ScheduleComponent", ")", "->", "'ScheduleComponent'", ":", "return", "ops", ".", "insert", "(", "self", ",", "start_time", ",", "schedule", ")" ]
42.375
0.011561
def _gather_topk_beams(nested, score_or_log_prob, batch_size, beam_size): """Gather top beams from nested structure.""" _, topk_indexes = tf.nn.top_k(score_or_log_prob, k=beam_size) return _gather_beams(nested, topk_indexes, batch_size, beam_size)
[ "def", "_gather_topk_beams", "(", "nested", ",", "score_or_log_prob", ",", "batch_size", ",", "beam_size", ")", ":", "_", ",", "topk_indexes", "=", "tf", ".", "nn", ".", "top_k", "(", "score_or_log_prob", ",", "k", "=", "beam_size", ")", "return", "_gather_beams", "(", "nested", ",", "topk_indexes", ",", "batch_size", ",", "beam_size", ")" ]
62.5
0.01581
def slice_plot(netin, ax, nodelabels=None, timelabels=None, communities=None, plotedgeweights=False, edgeweightscalar=1, timeunit='', linestyle='k-', cmap=None, nodesize=100, nodekwargs=None, edgekwargs=None): r''' Fuction draws "slice graph" and exports axis handles Parameters ---------- netin : array, dict temporal network input (graphlet or contact) ax : matplotlib figure handles. nodelabels : list nodes labels. List of strings. timelabels : list labels of dimension Graph is expressed across. List of strings. communities : array array of size: (time) or (node,time). Nodes will be coloured accordingly. plotedgeweights : bool if True, edges will vary in size (default False) edgeweightscalar : int scalar to multiply all edges if tweaking is needed. timeunit : string unit time axis is in. linestyle : string line style of Bezier curves. nodesize : int size of nodes nodekwargs : dict any additional kwargs for matplotlib.plt.scatter for the nodes edgekwargs : dict any additional kwargs for matplotlib.plt.plots for the edges Returns --------- ax : axis handle of slice graph Examples --------- Create a network with some metadata >>> import numpy as np >>> import teneto >>> import matplotlib.pyplot as plt >>> np.random.seed(2017) # For reproduceability >>> N = 5 # Number of nodes >>> T = 10 # Number of timepoints >>> # Probability of edge activation >>> birth_rate = 0.2 >>> death_rate = .9 >>> # Add node names into the network and say time units are years, go 1 year per graphlet and startyear is 2007 >>> cfg={} >>> cfg['Fs'] = 1 >>> cfg['timeunit'] = 'Years' >>> cfg['t0'] = 2007 #First year in network >>> cfg['nodelabels'] = ['Ashley','Blake','Casey','Dylan','Elliot'] # Node names >>> #Generate network >>> C = teneto.generatenetwork.rand_binomial([N,T],[birth_rate, death_rate],'contact','bu',netinfo=cfg) Now this network can be plotted >>> fig,ax = plt.subplots(figsize=(10,3)) >>> ax = teneto.plot.slice_plot(C, ax, cmap='Pastel2') >>> plt.tight_layout() >>> fig.show() .. plot:: import numpy as np import teneto import matplotlib.pyplot as plt np.random.seed(2017) # For reproduceability N = 5 # Number of nodes T = 10 # Number of timepoints # Probability of edge activation birth_rate = 0.2 death_rate = .9 # Add node names into the network and say time units are years, go 1 year per graphlet and startyear is 2007 cfg={} cfg['Fs'] = 1 cfg['timeunit'] = 'Years' cfg['t0'] = 2007 #First year in network cfg['nodelabels'] = ['Ashley','Blake','Casey','Dylan','Elliot'] #Generate network C = teneto.generatenetwork.rand_binomial([N,T],[birth_rate, death_rate],'contact','bu',netinfo=cfg) fig,ax = plt.subplots(figsize=(10,3)) cmap = 'Pastel2' ax = teneto.plot.slice_plot(C,ax,cmap=cmap) plt.tight_layout() fig.show() ''' # Get input type (C or G) inputType = checkInput(netin) # Convert C representation to G if inputType == 'G': netin = graphlet2contact(netin) inputType = 'C' edgelist = [tuple(np.array(e[0:2]) + e[2] * netin['netshape'][0]) for e in netin['contacts']] if nodelabels is not None and len(nodelabels) == netin['netshape'][0]: pass elif nodelabels is not None and len(nodelabels) != netin['netshape'][0]: raise ValueError('specified node label length does not match netshape') elif nodelabels is None and netin['nodelabels'] == '': nodelabels = np.arange(1, netin['netshape'][0] + 1) else: nodelabels = netin['nodelabels'] if timelabels is not None and len(timelabels) == netin['netshape'][-1]: pass elif timelabels is not None and len(timelabels) != netin['netshape'][-1]: raise ValueError('specified time label length does not match netshape') elif timelabels is None and str(netin['t0']) == '': timelabels = np.arange(1, netin['netshape'][-1] + 1) else: timelabels = np.arange(netin['t0'], netin['Fs'] * netin['netshape'][-1] + netin['t0'], netin['Fs']) if timeunit is None: timeunit = netin['timeunit'] timeNum = len(timelabels) nodeNum = len(nodelabels) posy = np.tile(list(range(0, nodeNum)), timeNum) posx = np.repeat(list(range(0, timeNum)), nodeNum) if nodekwargs is None: nodekwargs = {} if edgekwargs is None: edgekwargs = {} if cmap: nodekwargs['cmap'] = cmap if 'c' not in nodekwargs: nodekwargs['c'] = posy if communities is not None: # check if temporal or static if len(communities.shape) == 1: nodekwargs['c'] = np.tile(communities, timeNum) else: nodekwargs['c'] = communities.flatten(order='F') # plt.plot(points) # Draw Bezier vectors around egde positions for ei, edge in enumerate(edgelist): if plotedgeweights == True and netin['nettype'][0] == 'w': edgekwargs['linewidth'] = netin['values'][ei] * edgeweightscalar bvx, bvy = bezier_points( (posx[edge[0]], posy[edge[0]]), (posx[edge[1]], posy[edge[1]]), nodeNum, 20) ax.plot(bvx, bvy, linestyle, **edgekwargs) ax.set_yticks(range(0, len(nodelabels))) ax.set_xticks(range(0, len(timelabels))) ax.set_yticklabels(nodelabels) ax.set_xticklabels(timelabels) ax.grid() ax.set_frame_on(False) ax.spines['top'].set_visible(False) ax.spines['right'].set_visible(False) ax.get_xaxis().tick_bottom() ax.get_yaxis().tick_left() ax.set_xlim([min(posx) - 1, max(posx) + 1]) ax.set_ylim([min(posy) - 1, max(posy) + 1]) ax.scatter(posx, posy, s=nodesize, zorder=10, **nodekwargs) if timeunit != '': timeunit = ' (' + timeunit + ')' ax.set_xlabel('Time' + timeunit) return ax
[ "def", "slice_plot", "(", "netin", ",", "ax", ",", "nodelabels", "=", "None", ",", "timelabels", "=", "None", ",", "communities", "=", "None", ",", "plotedgeweights", "=", "False", ",", "edgeweightscalar", "=", "1", ",", "timeunit", "=", "''", ",", "linestyle", "=", "'k-'", ",", "cmap", "=", "None", ",", "nodesize", "=", "100", ",", "nodekwargs", "=", "None", ",", "edgekwargs", "=", "None", ")", ":", "# Get input type (C or G)", "inputType", "=", "checkInput", "(", "netin", ")", "# Convert C representation to G", "if", "inputType", "==", "'G'", ":", "netin", "=", "graphlet2contact", "(", "netin", ")", "inputType", "=", "'C'", "edgelist", "=", "[", "tuple", "(", "np", ".", "array", "(", "e", "[", "0", ":", "2", "]", ")", "+", "e", "[", "2", "]", "*", "netin", "[", "'netshape'", "]", "[", "0", "]", ")", "for", "e", "in", "netin", "[", "'contacts'", "]", "]", "if", "nodelabels", "is", "not", "None", "and", "len", "(", "nodelabels", ")", "==", "netin", "[", "'netshape'", "]", "[", "0", "]", ":", "pass", "elif", "nodelabels", "is", "not", "None", "and", "len", "(", "nodelabels", ")", "!=", "netin", "[", "'netshape'", "]", "[", "0", "]", ":", "raise", "ValueError", "(", "'specified node label length does not match netshape'", ")", "elif", "nodelabels", "is", "None", "and", "netin", "[", "'nodelabels'", "]", "==", "''", ":", "nodelabels", "=", "np", ".", "arange", "(", "1", ",", "netin", "[", "'netshape'", "]", "[", "0", "]", "+", "1", ")", "else", ":", "nodelabels", "=", "netin", "[", "'nodelabels'", "]", "if", "timelabels", "is", "not", "None", "and", "len", "(", "timelabels", ")", "==", "netin", "[", "'netshape'", "]", "[", "-", "1", "]", ":", "pass", "elif", "timelabels", "is", "not", "None", "and", "len", "(", "timelabels", ")", "!=", "netin", "[", "'netshape'", "]", "[", "-", "1", "]", ":", "raise", "ValueError", "(", "'specified time label length does not match netshape'", ")", "elif", "timelabels", "is", "None", "and", "str", "(", "netin", "[", "'t0'", "]", ")", "==", "''", ":", "timelabels", "=", "np", ".", "arange", "(", "1", ",", "netin", "[", "'netshape'", "]", "[", "-", "1", "]", "+", "1", ")", "else", ":", "timelabels", "=", "np", ".", "arange", "(", "netin", "[", "'t0'", "]", ",", "netin", "[", "'Fs'", "]", "*", "netin", "[", "'netshape'", "]", "[", "-", "1", "]", "+", "netin", "[", "'t0'", "]", ",", "netin", "[", "'Fs'", "]", ")", "if", "timeunit", "is", "None", ":", "timeunit", "=", "netin", "[", "'timeunit'", "]", "timeNum", "=", "len", "(", "timelabels", ")", "nodeNum", "=", "len", "(", "nodelabels", ")", "posy", "=", "np", ".", "tile", "(", "list", "(", "range", "(", "0", ",", "nodeNum", ")", ")", ",", "timeNum", ")", "posx", "=", "np", ".", "repeat", "(", "list", "(", "range", "(", "0", ",", "timeNum", ")", ")", ",", "nodeNum", ")", "if", "nodekwargs", "is", "None", ":", "nodekwargs", "=", "{", "}", "if", "edgekwargs", "is", "None", ":", "edgekwargs", "=", "{", "}", "if", "cmap", ":", "nodekwargs", "[", "'cmap'", "]", "=", "cmap", "if", "'c'", "not", "in", "nodekwargs", ":", "nodekwargs", "[", "'c'", "]", "=", "posy", "if", "communities", "is", "not", "None", ":", "# check if temporal or static", "if", "len", "(", "communities", ".", "shape", ")", "==", "1", ":", "nodekwargs", "[", "'c'", "]", "=", "np", ".", "tile", "(", "communities", ",", "timeNum", ")", "else", ":", "nodekwargs", "[", "'c'", "]", "=", "communities", ".", "flatten", "(", "order", "=", "'F'", ")", "# plt.plot(points)", "# Draw Bezier vectors around egde positions", "for", "ei", ",", "edge", "in", "enumerate", "(", "edgelist", ")", ":", "if", "plotedgeweights", "==", "True", "and", "netin", "[", "'nettype'", "]", "[", "0", "]", "==", "'w'", ":", "edgekwargs", "[", "'linewidth'", "]", "=", "netin", "[", "'values'", "]", "[", "ei", "]", "*", "edgeweightscalar", "bvx", ",", "bvy", "=", "bezier_points", "(", "(", "posx", "[", "edge", "[", "0", "]", "]", ",", "posy", "[", "edge", "[", "0", "]", "]", ")", ",", "(", "posx", "[", "edge", "[", "1", "]", "]", ",", "posy", "[", "edge", "[", "1", "]", "]", ")", ",", "nodeNum", ",", "20", ")", "ax", ".", "plot", "(", "bvx", ",", "bvy", ",", "linestyle", ",", "*", "*", "edgekwargs", ")", "ax", ".", "set_yticks", "(", "range", "(", "0", ",", "len", "(", "nodelabels", ")", ")", ")", "ax", ".", "set_xticks", "(", "range", "(", "0", ",", "len", "(", "timelabels", ")", ")", ")", "ax", ".", "set_yticklabels", "(", "nodelabels", ")", "ax", ".", "set_xticklabels", "(", "timelabels", ")", "ax", ".", "grid", "(", ")", "ax", ".", "set_frame_on", "(", "False", ")", "ax", ".", "spines", "[", "'top'", "]", ".", "set_visible", "(", "False", ")", "ax", ".", "spines", "[", "'right'", "]", ".", "set_visible", "(", "False", ")", "ax", ".", "get_xaxis", "(", ")", ".", "tick_bottom", "(", ")", "ax", ".", "get_yaxis", "(", ")", ".", "tick_left", "(", ")", "ax", ".", "set_xlim", "(", "[", "min", "(", "posx", ")", "-", "1", ",", "max", "(", "posx", ")", "+", "1", "]", ")", "ax", ".", "set_ylim", "(", "[", "min", "(", "posy", ")", "-", "1", ",", "max", "(", "posy", ")", "+", "1", "]", ")", "ax", ".", "scatter", "(", "posx", ",", "posy", ",", "s", "=", "nodesize", ",", "zorder", "=", "10", ",", "*", "*", "nodekwargs", ")", "if", "timeunit", "!=", "''", ":", "timeunit", "=", "' ('", "+", "timeunit", "+", "')'", "ax", ".", "set_xlabel", "(", "'Time'", "+", "timeunit", ")", "return", "ax" ]
34.159091
0.00194
def PreparePairedSequenceBatch(source, target_in, pad=0): """Build masks for this batch. Args: source: (batch, source_len) array of integer-coded symbols for inputs target_in: (batch, batch_len) array of integer-coded symbols for targets pad: int: the padding symbol used to pad the above Returns: Prepared batch of tuple of arrays: source, input-target, shifted-target, source mask, target mask, source-target "memory" mask, minibatch token count """ target = target_in[:, :-1] target_y = target_in[:, 1:] source_mask = np.reshape(source != pad, (source.shape[0], 1, 1, source.shape[-1])) target_mask = MakeTargetMask(target, pad) memory_mask = ( np.reshape(np.arange(target.shape[-1]) < source.shape[-1], [-1, 1])) ntokens = np.sum(target_y != pad) return (source, target, target_y, source_mask, target_mask, memory_mask, ntokens)
[ "def", "PreparePairedSequenceBatch", "(", "source", ",", "target_in", ",", "pad", "=", "0", ")", ":", "target", "=", "target_in", "[", ":", ",", ":", "-", "1", "]", "target_y", "=", "target_in", "[", ":", ",", "1", ":", "]", "source_mask", "=", "np", ".", "reshape", "(", "source", "!=", "pad", ",", "(", "source", ".", "shape", "[", "0", "]", ",", "1", ",", "1", ",", "source", ".", "shape", "[", "-", "1", "]", ")", ")", "target_mask", "=", "MakeTargetMask", "(", "target", ",", "pad", ")", "memory_mask", "=", "(", "np", ".", "reshape", "(", "np", ".", "arange", "(", "target", ".", "shape", "[", "-", "1", "]", ")", "<", "source", ".", "shape", "[", "-", "1", "]", ",", "[", "-", "1", ",", "1", "]", ")", ")", "ntokens", "=", "np", ".", "sum", "(", "target_y", "!=", "pad", ")", "return", "(", "source", ",", "target", ",", "target_y", ",", "source_mask", ",", "target_mask", ",", "memory_mask", ",", "ntokens", ")" ]
40.818182
0.010881
def remove(name=None, index=None): """ remove the specified configuration """ removed = False count = 1 for configuration in _CONFIG.sections(): if index != None: if count == index: _CONFIG.remove_section(configuration) removed = True break if name != None: if configuration == name: _CONFIG.remove_section(configuration) removed = True break count += 1 if not removed: raise JutException('Unable to find %s configuration' % name) with open(_CONFIG_FILEPATH, 'w') as configfile: _CONFIG.write(configfile)
[ "def", "remove", "(", "name", "=", "None", ",", "index", "=", "None", ")", ":", "removed", "=", "False", "count", "=", "1", "for", "configuration", "in", "_CONFIG", ".", "sections", "(", ")", ":", "if", "index", "!=", "None", ":", "if", "count", "==", "index", ":", "_CONFIG", ".", "remove_section", "(", "configuration", ")", "removed", "=", "True", "break", "if", "name", "!=", "None", ":", "if", "configuration", "==", "name", ":", "_CONFIG", ".", "remove_section", "(", "configuration", ")", "removed", "=", "True", "break", "count", "+=", "1", "if", "not", "removed", ":", "raise", "JutException", "(", "'Unable to find %s configuration'", "%", "name", ")", "with", "open", "(", "_CONFIG_FILEPATH", ",", "'w'", ")", "as", "configfile", ":", "_CONFIG", ".", "write", "(", "configfile", ")" ]
24.107143
0.004274
def _dataset_concat(datasets, dim, data_vars, coords, compat, positions): """ Concatenate a sequence of datasets along a new or existing dimension """ from .dataset import Dataset if compat not in ['equals', 'identical']: raise ValueError("compat=%r invalid: must be 'equals' " "or 'identical'" % compat) dim, coord = _calc_concat_dim_coord(dim) # Make sure we're working on a copy (we'll be loading variables) datasets = [ds.copy() for ds in datasets] datasets = align(*datasets, join='outer', copy=False, exclude=[dim]) concat_over, equals = _calc_concat_over(datasets, dim, data_vars, coords) def insert_result_variable(k, v): assert isinstance(v, Variable) if k in datasets[0].coords: result_coord_names.add(k) result_vars[k] = v # create the new dataset and add constant variables result_vars = OrderedDict() result_coord_names = set(datasets[0].coords) result_attrs = datasets[0].attrs result_encoding = datasets[0].encoding for k, v in datasets[0].variables.items(): if k not in concat_over: insert_result_variable(k, v) # check that global attributes and non-concatenated variables are fixed # across all datasets for ds in datasets[1:]: if (compat == 'identical' and not utils.dict_equiv(ds.attrs, result_attrs)): raise ValueError('dataset global attributes not equal') for k, v in ds.variables.items(): if k not in result_vars and k not in concat_over: raise ValueError('encountered unexpected variable %r' % k) elif (k in result_coord_names) != (k in ds.coords): raise ValueError('%r is a coordinate in some datasets but not ' 'others' % k) elif k in result_vars and k != dim: # Don't use Variable.identical as it internally invokes # Variable.equals, and we may already know the answer if compat == 'identical' and not utils.dict_equiv( v.attrs, result_vars[k].attrs): raise ValueError( 'variable %s not identical across datasets' % k) # Proceed with equals() try: # May be populated when using the "different" method is_equal = equals[k] except KeyError: result_vars[k].load() is_equal = v.equals(result_vars[k]) if not is_equal: raise ValueError( 'variable %s not equal across datasets' % k) # we've already verified everything is consistent; now, calculate # shared dimension sizes so we can expand the necessary variables dim_lengths = [ds.dims.get(dim, 1) for ds in datasets] non_concat_dims = {} for ds in datasets: non_concat_dims.update(ds.dims) non_concat_dims.pop(dim, None) def ensure_common_dims(vars): # ensure each variable with the given name shares the same # dimensions and the same shape for all of them except along the # concat dimension common_dims = tuple(pd.unique([d for v in vars for d in v.dims])) if dim not in common_dims: common_dims = (dim,) + common_dims for var, dim_len in zip(vars, dim_lengths): if var.dims != common_dims: common_shape = tuple(non_concat_dims.get(d, dim_len) for d in common_dims) var = var.set_dims(common_dims, common_shape) yield var # stack up each variable to fill-out the dataset (in order) for k in datasets[0].variables: if k in concat_over: vars = ensure_common_dims([ds.variables[k] for ds in datasets]) combined = concat_vars(vars, dim, positions) insert_result_variable(k, combined) result = Dataset(result_vars, attrs=result_attrs) result = result.set_coords(result_coord_names) result.encoding = result_encoding if coord is not None: # add concat dimension last to ensure that its in the final Dataset result[coord.name] = coord return result
[ "def", "_dataset_concat", "(", "datasets", ",", "dim", ",", "data_vars", ",", "coords", ",", "compat", ",", "positions", ")", ":", "from", ".", "dataset", "import", "Dataset", "if", "compat", "not", "in", "[", "'equals'", ",", "'identical'", "]", ":", "raise", "ValueError", "(", "\"compat=%r invalid: must be 'equals' \"", "\"or 'identical'\"", "%", "compat", ")", "dim", ",", "coord", "=", "_calc_concat_dim_coord", "(", "dim", ")", "# Make sure we're working on a copy (we'll be loading variables)", "datasets", "=", "[", "ds", ".", "copy", "(", ")", "for", "ds", "in", "datasets", "]", "datasets", "=", "align", "(", "*", "datasets", ",", "join", "=", "'outer'", ",", "copy", "=", "False", ",", "exclude", "=", "[", "dim", "]", ")", "concat_over", ",", "equals", "=", "_calc_concat_over", "(", "datasets", ",", "dim", ",", "data_vars", ",", "coords", ")", "def", "insert_result_variable", "(", "k", ",", "v", ")", ":", "assert", "isinstance", "(", "v", ",", "Variable", ")", "if", "k", "in", "datasets", "[", "0", "]", ".", "coords", ":", "result_coord_names", ".", "add", "(", "k", ")", "result_vars", "[", "k", "]", "=", "v", "# create the new dataset and add constant variables", "result_vars", "=", "OrderedDict", "(", ")", "result_coord_names", "=", "set", "(", "datasets", "[", "0", "]", ".", "coords", ")", "result_attrs", "=", "datasets", "[", "0", "]", ".", "attrs", "result_encoding", "=", "datasets", "[", "0", "]", ".", "encoding", "for", "k", ",", "v", "in", "datasets", "[", "0", "]", ".", "variables", ".", "items", "(", ")", ":", "if", "k", "not", "in", "concat_over", ":", "insert_result_variable", "(", "k", ",", "v", ")", "# check that global attributes and non-concatenated variables are fixed", "# across all datasets", "for", "ds", "in", "datasets", "[", "1", ":", "]", ":", "if", "(", "compat", "==", "'identical'", "and", "not", "utils", ".", "dict_equiv", "(", "ds", ".", "attrs", ",", "result_attrs", ")", ")", ":", "raise", "ValueError", "(", "'dataset global attributes not equal'", ")", "for", "k", ",", "v", "in", "ds", ".", "variables", ".", "items", "(", ")", ":", "if", "k", "not", "in", "result_vars", "and", "k", "not", "in", "concat_over", ":", "raise", "ValueError", "(", "'encountered unexpected variable %r'", "%", "k", ")", "elif", "(", "k", "in", "result_coord_names", ")", "!=", "(", "k", "in", "ds", ".", "coords", ")", ":", "raise", "ValueError", "(", "'%r is a coordinate in some datasets but not '", "'others'", "%", "k", ")", "elif", "k", "in", "result_vars", "and", "k", "!=", "dim", ":", "# Don't use Variable.identical as it internally invokes", "# Variable.equals, and we may already know the answer", "if", "compat", "==", "'identical'", "and", "not", "utils", ".", "dict_equiv", "(", "v", ".", "attrs", ",", "result_vars", "[", "k", "]", ".", "attrs", ")", ":", "raise", "ValueError", "(", "'variable %s not identical across datasets'", "%", "k", ")", "# Proceed with equals()", "try", ":", "# May be populated when using the \"different\" method", "is_equal", "=", "equals", "[", "k", "]", "except", "KeyError", ":", "result_vars", "[", "k", "]", ".", "load", "(", ")", "is_equal", "=", "v", ".", "equals", "(", "result_vars", "[", "k", "]", ")", "if", "not", "is_equal", ":", "raise", "ValueError", "(", "'variable %s not equal across datasets'", "%", "k", ")", "# we've already verified everything is consistent; now, calculate", "# shared dimension sizes so we can expand the necessary variables", "dim_lengths", "=", "[", "ds", ".", "dims", ".", "get", "(", "dim", ",", "1", ")", "for", "ds", "in", "datasets", "]", "non_concat_dims", "=", "{", "}", "for", "ds", "in", "datasets", ":", "non_concat_dims", ".", "update", "(", "ds", ".", "dims", ")", "non_concat_dims", ".", "pop", "(", "dim", ",", "None", ")", "def", "ensure_common_dims", "(", "vars", ")", ":", "# ensure each variable with the given name shares the same", "# dimensions and the same shape for all of them except along the", "# concat dimension", "common_dims", "=", "tuple", "(", "pd", ".", "unique", "(", "[", "d", "for", "v", "in", "vars", "for", "d", "in", "v", ".", "dims", "]", ")", ")", "if", "dim", "not", "in", "common_dims", ":", "common_dims", "=", "(", "dim", ",", ")", "+", "common_dims", "for", "var", ",", "dim_len", "in", "zip", "(", "vars", ",", "dim_lengths", ")", ":", "if", "var", ".", "dims", "!=", "common_dims", ":", "common_shape", "=", "tuple", "(", "non_concat_dims", ".", "get", "(", "d", ",", "dim_len", ")", "for", "d", "in", "common_dims", ")", "var", "=", "var", ".", "set_dims", "(", "common_dims", ",", "common_shape", ")", "yield", "var", "# stack up each variable to fill-out the dataset (in order)", "for", "k", "in", "datasets", "[", "0", "]", ".", "variables", ":", "if", "k", "in", "concat_over", ":", "vars", "=", "ensure_common_dims", "(", "[", "ds", ".", "variables", "[", "k", "]", "for", "ds", "in", "datasets", "]", ")", "combined", "=", "concat_vars", "(", "vars", ",", "dim", ",", "positions", ")", "insert_result_variable", "(", "k", ",", "combined", ")", "result", "=", "Dataset", "(", "result_vars", ",", "attrs", "=", "result_attrs", ")", "result", "=", "result", ".", "set_coords", "(", "result_coord_names", ")", "result", ".", "encoding", "=", "result_encoding", "if", "coord", "is", "not", "None", ":", "# add concat dimension last to ensure that its in the final Dataset", "result", "[", "coord", ".", "name", "]", "=", "coord", "return", "result" ]
41.45098
0.000231
def begin_subsegment(self, name, namespace='local'): """ Begin a new subsegment. If there is open subsegment, the newly created subsegment will be the child of latest opened subsegment. If not, it will be the child of the current open segment. :param str name: the name of the subsegment. :param str namespace: currently can only be 'local', 'remote', 'aws'. """ segment = self.current_segment() if not segment: log.warning("No segment found, cannot begin subsegment %s." % name) return None if not segment.sampled: subsegment = DummySubsegment(segment, name) else: subsegment = Subsegment(name, namespace, segment) self.context.put_subsegment(subsegment) return subsegment
[ "def", "begin_subsegment", "(", "self", ",", "name", ",", "namespace", "=", "'local'", ")", ":", "segment", "=", "self", ".", "current_segment", "(", ")", "if", "not", "segment", ":", "log", ".", "warning", "(", "\"No segment found, cannot begin subsegment %s.\"", "%", "name", ")", "return", "None", "if", "not", "segment", ".", "sampled", ":", "subsegment", "=", "DummySubsegment", "(", "segment", ",", "name", ")", "else", ":", "subsegment", "=", "Subsegment", "(", "name", ",", "namespace", ",", "segment", ")", "self", ".", "context", ".", "put_subsegment", "(", "subsegment", ")", "return", "subsegment" ]
33.958333
0.002387
def relabel(self, mapping): """ Rename ConfusionMatrix classes. :param mapping: mapping dictionary :type mapping : dict :return: None """ if not isinstance(mapping, dict): raise pycmMatrixError(MAPPING_FORMAT_ERROR) if self.classes != list(mapping.keys()): raise pycmMatrixError(MAPPING_CLASS_NAME_ERROR) for row in self.classes: temp_dict = {} temp_dict_normalized = {} for col in self.classes: temp_dict[mapping[col]] = self.table[row][col] temp_dict_normalized[mapping[col] ] = self.normalized_table[row][col] del self.table[row] self.table[mapping[row]] = temp_dict del self.normalized_table[row] self.normalized_table[mapping[row]] = temp_dict_normalized self.matrix = self.table self.normalized_matrix = self.normalized_table for param in self.class_stat.keys(): temp_dict = {} for classname in self.classes: temp_dict[mapping[classname] ] = self.class_stat[param][classname] self.class_stat[param] = temp_dict self.classes = list(mapping.values()) self.TP = self.class_stat["TP"] self.TN = self.class_stat["TN"] self.FP = self.class_stat["FP"] self.FN = self.class_stat["FN"] __class_stat_init__(self)
[ "def", "relabel", "(", "self", ",", "mapping", ")", ":", "if", "not", "isinstance", "(", "mapping", ",", "dict", ")", ":", "raise", "pycmMatrixError", "(", "MAPPING_FORMAT_ERROR", ")", "if", "self", ".", "classes", "!=", "list", "(", "mapping", ".", "keys", "(", ")", ")", ":", "raise", "pycmMatrixError", "(", "MAPPING_CLASS_NAME_ERROR", ")", "for", "row", "in", "self", ".", "classes", ":", "temp_dict", "=", "{", "}", "temp_dict_normalized", "=", "{", "}", "for", "col", "in", "self", ".", "classes", ":", "temp_dict", "[", "mapping", "[", "col", "]", "]", "=", "self", ".", "table", "[", "row", "]", "[", "col", "]", "temp_dict_normalized", "[", "mapping", "[", "col", "]", "]", "=", "self", ".", "normalized_table", "[", "row", "]", "[", "col", "]", "del", "self", ".", "table", "[", "row", "]", "self", ".", "table", "[", "mapping", "[", "row", "]", "]", "=", "temp_dict", "del", "self", ".", "normalized_table", "[", "row", "]", "self", ".", "normalized_table", "[", "mapping", "[", "row", "]", "]", "=", "temp_dict_normalized", "self", ".", "matrix", "=", "self", ".", "table", "self", ".", "normalized_matrix", "=", "self", ".", "normalized_table", "for", "param", "in", "self", ".", "class_stat", ".", "keys", "(", ")", ":", "temp_dict", "=", "{", "}", "for", "classname", "in", "self", ".", "classes", ":", "temp_dict", "[", "mapping", "[", "classname", "]", "]", "=", "self", ".", "class_stat", "[", "param", "]", "[", "classname", "]", "self", ".", "class_stat", "[", "param", "]", "=", "temp_dict", "self", ".", "classes", "=", "list", "(", "mapping", ".", "values", "(", ")", ")", "self", ".", "TP", "=", "self", ".", "class_stat", "[", "\"TP\"", "]", "self", ".", "TN", "=", "self", ".", "class_stat", "[", "\"TN\"", "]", "self", ".", "FP", "=", "self", ".", "class_stat", "[", "\"FP\"", "]", "self", ".", "FN", "=", "self", ".", "class_stat", "[", "\"FN\"", "]", "__class_stat_init__", "(", "self", ")" ]
39.783784
0.001326
def input_size(self): '''Size of layer input (for layers with one input).''' shape = self.input_shape if shape is None: raise util.ConfigurationError( 'undefined input size for layer "{}"'.format(self.name)) return shape[-1]
[ "def", "input_size", "(", "self", ")", ":", "shape", "=", "self", ".", "input_shape", "if", "shape", "is", "None", ":", "raise", "util", ".", "ConfigurationError", "(", "'undefined input size for layer \"{}\"'", ".", "format", "(", "self", ".", "name", ")", ")", "return", "shape", "[", "-", "1", "]" ]
39.714286
0.007042
def check_pending_target(self, scan_id, multiscan_proc): """ Check if a scan process is still alive. In case the process finished or is stopped, removes the process from the multiscan _process list. Processes dead and with progress < 100% are considered stopped or with failures. Then will try to stop the other runnings (target) scans owned by the same task. @input scan_id Scan_id of the whole scan. @input multiscan_proc A list with the scan process which may still be alive. @return Actualized list with current runnging scan processes. """ for running_target_proc, running_target_id in multiscan_proc: if not running_target_proc.is_alive(): target_prog = self.get_scan_target_progress( scan_id, running_target_id) if target_prog < 100: self.stop_scan(scan_id) running_target = (running_target_proc, running_target_id) multiscan_proc.remove(running_target) return multiscan_proc
[ "def", "check_pending_target", "(", "self", ",", "scan_id", ",", "multiscan_proc", ")", ":", "for", "running_target_proc", ",", "running_target_id", "in", "multiscan_proc", ":", "if", "not", "running_target_proc", ".", "is_alive", "(", ")", ":", "target_prog", "=", "self", ".", "get_scan_target_progress", "(", "scan_id", ",", "running_target_id", ")", "if", "target_prog", "<", "100", ":", "self", ".", "stop_scan", "(", "scan_id", ")", "running_target", "=", "(", "running_target_proc", ",", "running_target_id", ")", "multiscan_proc", ".", "remove", "(", "running_target", ")", "return", "multiscan_proc" ]
48.304348
0.001765
def ImportFile(store, filename, start): """Import hashes from 'filename' into 'store'.""" with io.open(filename, "r") as fp: reader = csv.Reader(fp.read()) i = 0 current_row = None product_code_list = [] op_system_code_list = [] for row in reader: # Skip first row. i += 1 if i and i % 5000 == 0: data_store.DB.Flush() print("Imported %d hashes" % i) if i > 1: if len(row) != 8: continue try: if i < start: continue if current_row: if current_row[0] == row[0]: # Same hash, add product/system product_code_list.append(int(row[5])) op_system_code_list.append(row[6]) continue # Fall through and add current row. else: # First row. current_row = row product_code_list = [int(row[5])] op_system_code_list = [row[6]] continue _ImportRow(store, current_row, product_code_list, op_system_code_list) # Set new hash. current_row = row product_code_list = [int(row[5])] op_system_code_list = [row[6]] except Exception as e: # pylint: disable=broad-except print("Failed at %d with %s" % (i, str(e))) return i - 1 if current_row: _ImportRow(store, current_row, product_code_list, op_system_code_list) return i
[ "def", "ImportFile", "(", "store", ",", "filename", ",", "start", ")", ":", "with", "io", ".", "open", "(", "filename", ",", "\"r\"", ")", "as", "fp", ":", "reader", "=", "csv", ".", "Reader", "(", "fp", ".", "read", "(", ")", ")", "i", "=", "0", "current_row", "=", "None", "product_code_list", "=", "[", "]", "op_system_code_list", "=", "[", "]", "for", "row", "in", "reader", ":", "# Skip first row.", "i", "+=", "1", "if", "i", "and", "i", "%", "5000", "==", "0", ":", "data_store", ".", "DB", ".", "Flush", "(", ")", "print", "(", "\"Imported %d hashes\"", "%", "i", ")", "if", "i", ">", "1", ":", "if", "len", "(", "row", ")", "!=", "8", ":", "continue", "try", ":", "if", "i", "<", "start", ":", "continue", "if", "current_row", ":", "if", "current_row", "[", "0", "]", "==", "row", "[", "0", "]", ":", "# Same hash, add product/system", "product_code_list", ".", "append", "(", "int", "(", "row", "[", "5", "]", ")", ")", "op_system_code_list", ".", "append", "(", "row", "[", "6", "]", ")", "continue", "# Fall through and add current row.", "else", ":", "# First row.", "current_row", "=", "row", "product_code_list", "=", "[", "int", "(", "row", "[", "5", "]", ")", "]", "op_system_code_list", "=", "[", "row", "[", "6", "]", "]", "continue", "_ImportRow", "(", "store", ",", "current_row", ",", "product_code_list", ",", "op_system_code_list", ")", "# Set new hash.", "current_row", "=", "row", "product_code_list", "=", "[", "int", "(", "row", "[", "5", "]", ")", "]", "op_system_code_list", "=", "[", "row", "[", "6", "]", "]", "except", "Exception", "as", "e", ":", "# pylint: disable=broad-except", "print", "(", "\"Failed at %d with %s\"", "%", "(", "i", ",", "str", "(", "e", ")", ")", ")", "return", "i", "-", "1", "if", "current_row", ":", "_ImportRow", "(", "store", ",", "current_row", ",", "product_code_list", ",", "op_system_code_list", ")", "return", "i" ]
32.363636
0.01636
def _pkl_periodogram(lspinfo, plotdpi=100, override_pfmethod=None): '''This returns the periodogram plot PNG as base64, plus info as a dict. Parameters ---------- lspinfo : dict This is an lspinfo dict containing results from a period-finding function. If it's from an astrobase period-finding function in periodbase, this will already be in the correct format. To use external period-finder results with this function, the `lspinfo` dict must be of the following form, with at least the keys listed below:: {'periods': np.array of all periods searched by the period-finder, 'lspvals': np.array of periodogram power value for each period, 'bestperiod': a float value that is the period with the highest peak in the periodogram, i.e. the most-likely actual period, 'method': a three-letter code naming the period-finder used; must be one of the keys in the `astrobase.periodbase.METHODLABELS` dict, 'nbestperiods': a list of the periods corresponding to periodogram peaks (`nbestlspvals` below) to annotate on the periodogram plot so they can be called out visually, 'nbestlspvals': a list of the power values associated with periodogram peaks to annotate on the periodogram plot so they can be called out visually; should be the same length as `nbestperiods` above} `nbestperiods` and `nbestlspvals` must have at least 5 elements each, e.g. describing the five 'best' (highest power) peaks in the periodogram. plotdpi : int The resolution in DPI of the output periodogram plot to make. override_pfmethod : str or None This is used to set a custom label for this periodogram method. Normally, this is taken from the 'method' key in the input `lspinfo` dict, but if you want to override the output method name, provide this as a string here. This can be useful if you have multiple results you want to incorporate into a checkplotdict from a single period-finder (e.g. if you ran BLS over several period ranges separately). Returns ------- dict Returns a dict that contains the following items:: {methodname: {'periods':the period array from lspinfo, 'lspval': the periodogram power array from lspinfo, 'bestperiod': the best period from lspinfo, 'nbestperiods': the 'nbestperiods' list from lspinfo, 'nbestlspvals': the 'nbestlspvals' list from lspinfo, 'periodogram': base64 encoded string representation of the periodogram plot}} The dict is returned in this format so it can be directly incorporated under the period-finder's label `methodname` in a checkplotdict, using Python's dict `update()` method. ''' # get the appropriate plot ylabel pgramylabel = PLOTYLABELS[lspinfo['method']] # get the periods and lspvals from lspinfo periods = lspinfo['periods'] lspvals = lspinfo['lspvals'] bestperiod = lspinfo['bestperiod'] nbestperiods = lspinfo['nbestperiods'] nbestlspvals = lspinfo['nbestlspvals'] # open the figure instance pgramfig = plt.figure(figsize=(7.5,4.8),dpi=plotdpi) # make the plot plt.plot(periods,lspvals) plt.xscale('log',basex=10) plt.xlabel('Period [days]') plt.ylabel(pgramylabel) plottitle = '%s - %.6f d' % (METHODLABELS[lspinfo['method']], bestperiod) plt.title(plottitle) # show the best five peaks on the plot for xbestperiod, xbestpeak in zip(nbestperiods, nbestlspvals): plt.annotate('%.6f' % xbestperiod, xy=(xbestperiod, xbestpeak), xycoords='data', xytext=(0.0,25.0), textcoords='offset points', arrowprops=dict(arrowstyle="->"),fontsize='14.0') # make a grid plt.grid(color='#a9a9a9', alpha=0.9, zorder=0, linewidth=1.0, linestyle=':') # this is the output instance pgrampng = StrIO() pgramfig.savefig(pgrampng, # bbox_inches='tight', pad_inches=0.0, format='png') plt.close() # encode the finderpng instance to base64 pgrampng.seek(0) pgramb64 = base64.b64encode(pgrampng.read()) # close the stringio buffer pgrampng.close() if not override_pfmethod: # this is the dict to return checkplotdict = { lspinfo['method']:{ 'periods':periods, 'lspvals':lspvals, 'bestperiod':bestperiod, 'nbestperiods':nbestperiods, 'nbestlspvals':nbestlspvals, 'periodogram':pgramb64, } } else: # this is the dict to return checkplotdict = { override_pfmethod:{ 'periods':periods, 'lspvals':lspvals, 'bestperiod':bestperiod, 'nbestperiods':nbestperiods, 'nbestlspvals':nbestlspvals, 'periodogram':pgramb64, } } return checkplotdict
[ "def", "_pkl_periodogram", "(", "lspinfo", ",", "plotdpi", "=", "100", ",", "override_pfmethod", "=", "None", ")", ":", "# get the appropriate plot ylabel", "pgramylabel", "=", "PLOTYLABELS", "[", "lspinfo", "[", "'method'", "]", "]", "# get the periods and lspvals from lspinfo", "periods", "=", "lspinfo", "[", "'periods'", "]", "lspvals", "=", "lspinfo", "[", "'lspvals'", "]", "bestperiod", "=", "lspinfo", "[", "'bestperiod'", "]", "nbestperiods", "=", "lspinfo", "[", "'nbestperiods'", "]", "nbestlspvals", "=", "lspinfo", "[", "'nbestlspvals'", "]", "# open the figure instance", "pgramfig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "7.5", ",", "4.8", ")", ",", "dpi", "=", "plotdpi", ")", "# make the plot", "plt", ".", "plot", "(", "periods", ",", "lspvals", ")", "plt", ".", "xscale", "(", "'log'", ",", "basex", "=", "10", ")", "plt", ".", "xlabel", "(", "'Period [days]'", ")", "plt", ".", "ylabel", "(", "pgramylabel", ")", "plottitle", "=", "'%s - %.6f d'", "%", "(", "METHODLABELS", "[", "lspinfo", "[", "'method'", "]", "]", ",", "bestperiod", ")", "plt", ".", "title", "(", "plottitle", ")", "# show the best five peaks on the plot", "for", "xbestperiod", ",", "xbestpeak", "in", "zip", "(", "nbestperiods", ",", "nbestlspvals", ")", ":", "plt", ".", "annotate", "(", "'%.6f'", "%", "xbestperiod", ",", "xy", "=", "(", "xbestperiod", ",", "xbestpeak", ")", ",", "xycoords", "=", "'data'", ",", "xytext", "=", "(", "0.0", ",", "25.0", ")", ",", "textcoords", "=", "'offset points'", ",", "arrowprops", "=", "dict", "(", "arrowstyle", "=", "\"->\"", ")", ",", "fontsize", "=", "'14.0'", ")", "# make a grid", "plt", ".", "grid", "(", "color", "=", "'#a9a9a9'", ",", "alpha", "=", "0.9", ",", "zorder", "=", "0", ",", "linewidth", "=", "1.0", ",", "linestyle", "=", "':'", ")", "# this is the output instance", "pgrampng", "=", "StrIO", "(", ")", "pgramfig", ".", "savefig", "(", "pgrampng", ",", "# bbox_inches='tight',", "pad_inches", "=", "0.0", ",", "format", "=", "'png'", ")", "plt", ".", "close", "(", ")", "# encode the finderpng instance to base64", "pgrampng", ".", "seek", "(", "0", ")", "pgramb64", "=", "base64", ".", "b64encode", "(", "pgrampng", ".", "read", "(", ")", ")", "# close the stringio buffer", "pgrampng", ".", "close", "(", ")", "if", "not", "override_pfmethod", ":", "# this is the dict to return", "checkplotdict", "=", "{", "lspinfo", "[", "'method'", "]", ":", "{", "'periods'", ":", "periods", ",", "'lspvals'", ":", "lspvals", ",", "'bestperiod'", ":", "bestperiod", ",", "'nbestperiods'", ":", "nbestperiods", ",", "'nbestlspvals'", ":", "nbestlspvals", ",", "'periodogram'", ":", "pgramb64", ",", "}", "}", "else", ":", "# this is the dict to return", "checkplotdict", "=", "{", "override_pfmethod", ":", "{", "'periods'", ":", "periods", ",", "'lspvals'", ":", "lspvals", ",", "'bestperiod'", ":", "bestperiod", ",", "'nbestperiods'", ":", "nbestperiods", ",", "'nbestlspvals'", ":", "nbestlspvals", ",", "'periodogram'", ":", "pgramb64", ",", "}", "}", "return", "checkplotdict" ]
37.275168
0.003858
def create(vm_): ''' get the system build going ''' creds = get_creds() clc.v1.SetCredentials(creds["token"], creds["token_pass"]) cloud_profile = config.is_provider_configured( __opts__, __active_provider_name__ or __virtualname__, ('token',) ) group = config.get_cloud_config_value( 'group', vm_, __opts__, search_global=False, default=None, ) name = vm_['name'] description = config.get_cloud_config_value( 'description', vm_, __opts__, search_global=False, default=None, ) ram = config.get_cloud_config_value( 'ram', vm_, __opts__, search_global=False, default=None, ) backup_level = config.get_cloud_config_value( 'backup_level', vm_, __opts__, search_global=False, default=None, ) template = config.get_cloud_config_value( 'template', vm_, __opts__, search_global=False, default=None, ) password = config.get_cloud_config_value( 'password', vm_, __opts__, search_global=False, default=None, ) cpu = config.get_cloud_config_value( 'cpu', vm_, __opts__, search_global=False, default=None, ) network = config.get_cloud_config_value( 'network', vm_, __opts__, search_global=False, default=None, ) location = config.get_cloud_config_value( 'location', vm_, __opts__, search_global=False, default=None, ) if len(name) > 6: name = name[0:6] if len(password) < 9: password = '' clc_return = clc.v1.Server.Create(alias=None, location=(location), name=(name), template=(template), cpu=(cpu), ram=(ram), backup_level=(backup_level), group=(group), network=(network), description=(description), password=(password)) req_id = clc_return["RequestID"] vm_['ssh_host'] = get_build_status(req_id, name) __utils__['cloud.fire_event']( 'event', 'waiting for ssh', 'salt/cloud/{0}/waiting_for_ssh'.format(name), sock_dir=__opts__['sock_dir'], args={'ip_address': vm_['ssh_host']}, transport=__opts__['transport'] ) # Bootstrap! ret = __utils__['cloud.bootstrap'](vm_, __opts__) return_message = {"Server Name": name, "IP Address": vm_['ssh_host']} ret.update(return_message) return return_message
[ "def", "create", "(", "vm_", ")", ":", "creds", "=", "get_creds", "(", ")", "clc", ".", "v1", ".", "SetCredentials", "(", "creds", "[", "\"token\"", "]", ",", "creds", "[", "\"token_pass\"", "]", ")", "cloud_profile", "=", "config", ".", "is_provider_configured", "(", "__opts__", ",", "__active_provider_name__", "or", "__virtualname__", ",", "(", "'token'", ",", ")", ")", "group", "=", "config", ".", "get_cloud_config_value", "(", "'group'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "name", "=", "vm_", "[", "'name'", "]", "description", "=", "config", ".", "get_cloud_config_value", "(", "'description'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "ram", "=", "config", ".", "get_cloud_config_value", "(", "'ram'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "backup_level", "=", "config", ".", "get_cloud_config_value", "(", "'backup_level'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "template", "=", "config", ".", "get_cloud_config_value", "(", "'template'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "password", "=", "config", ".", "get_cloud_config_value", "(", "'password'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "cpu", "=", "config", ".", "get_cloud_config_value", "(", "'cpu'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "network", "=", "config", ".", "get_cloud_config_value", "(", "'network'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "location", "=", "config", ".", "get_cloud_config_value", "(", "'location'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ",", ")", "if", "len", "(", "name", ")", ">", "6", ":", "name", "=", "name", "[", "0", ":", "6", "]", "if", "len", "(", "password", ")", "<", "9", ":", "password", "=", "''", "clc_return", "=", "clc", ".", "v1", ".", "Server", ".", "Create", "(", "alias", "=", "None", ",", "location", "=", "(", "location", ")", ",", "name", "=", "(", "name", ")", ",", "template", "=", "(", "template", ")", ",", "cpu", "=", "(", "cpu", ")", ",", "ram", "=", "(", "ram", ")", ",", "backup_level", "=", "(", "backup_level", ")", ",", "group", "=", "(", "group", ")", ",", "network", "=", "(", "network", ")", ",", "description", "=", "(", "description", ")", ",", "password", "=", "(", "password", ")", ")", "req_id", "=", "clc_return", "[", "\"RequestID\"", "]", "vm_", "[", "'ssh_host'", "]", "=", "get_build_status", "(", "req_id", ",", "name", ")", "__utils__", "[", "'cloud.fire_event'", "]", "(", "'event'", ",", "'waiting for ssh'", ",", "'salt/cloud/{0}/waiting_for_ssh'", ".", "format", "(", "name", ")", ",", "sock_dir", "=", "__opts__", "[", "'sock_dir'", "]", ",", "args", "=", "{", "'ip_address'", ":", "vm_", "[", "'ssh_host'", "]", "}", ",", "transport", "=", "__opts__", "[", "'transport'", "]", ")", "# Bootstrap!", "ret", "=", "__utils__", "[", "'cloud.bootstrap'", "]", "(", "vm_", ",", "__opts__", ")", "return_message", "=", "{", "\"Server Name\"", ":", "name", ",", "\"IP Address\"", ":", "vm_", "[", "'ssh_host'", "]", "}", "ret", ".", "update", "(", "return_message", ")", "return", "return_message" ]
37.266667
0.000871
async def _handle_conversation_delta(self, conversation): """Receive Conversation delta and create or update the conversation. Args: conversation: hangouts_pb2.Conversation instance Raises: NetworkError: A request to fetch the complete conversation failed. """ conv_id = conversation.conversation_id.id conv = self._conv_dict.get(conv_id, None) if conv is None: # Ignore the delta and fetch the complete conversation. await self._get_or_fetch_conversation(conv_id) else: # Update conversation using the delta. conv.update_conversation(conversation)
[ "async", "def", "_handle_conversation_delta", "(", "self", ",", "conversation", ")", ":", "conv_id", "=", "conversation", ".", "conversation_id", ".", "id", "conv", "=", "self", ".", "_conv_dict", ".", "get", "(", "conv_id", ",", "None", ")", "if", "conv", "is", "None", ":", "# Ignore the delta and fetch the complete conversation.", "await", "self", ".", "_get_or_fetch_conversation", "(", "conv_id", ")", "else", ":", "# Update conversation using the delta.", "conv", ".", "update_conversation", "(", "conversation", ")" ]
39.411765
0.002915
def validate_manifest_against_schema(manifest: Dict[str, Any]) -> None: """ Load and validate manifest against schema located at MANIFEST_SCHEMA_PATH. """ schema_data = _load_schema_data() try: validate(manifest, schema_data) except jsonValidationError as e: raise ValidationError( f"Manifest invalid for schema version {schema_data['version']}. " f"Reason: {e.message}" )
[ "def", "validate_manifest_against_schema", "(", "manifest", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "None", ":", "schema_data", "=", "_load_schema_data", "(", ")", "try", ":", "validate", "(", "manifest", ",", "schema_data", ")", "except", "jsonValidationError", "as", "e", ":", "raise", "ValidationError", "(", "f\"Manifest invalid for schema version {schema_data['version']}. \"", "f\"Reason: {e.message}\"", ")" ]
33.538462
0.002232
def decode(self, encoded_payload): """Decode a transmitted payload.""" self.packets = [] while encoded_payload: if six.byte2int(encoded_payload[0:1]) <= 1: packet_len = 0 i = 1 while six.byte2int(encoded_payload[i:i + 1]) != 255: packet_len = packet_len * 10 + six.byte2int( encoded_payload[i:i + 1]) i += 1 self.packets.append(packet.Packet( encoded_packet=encoded_payload[i + 1:i + 1 + packet_len])) else: i = encoded_payload.find(b':') if i == -1: raise ValueError('invalid payload') # extracting the packet out of the payload is extremely # inefficient, because the payload needs to be treated as # binary, but the non-binary packets have to be parsed as # unicode. Luckily this complication only applies to long # polling, as the websocket transport sends packets # individually wrapped. packet_len = int(encoded_payload[0:i]) pkt = encoded_payload.decode('utf-8', errors='ignore')[ i + 1: i + 1 + packet_len].encode('utf-8') self.packets.append(packet.Packet(encoded_packet=pkt)) # the engine.io protocol sends the packet length in # utf-8 characters, but we need it in bytes to be able to # jump to the next packet in the payload packet_len = len(pkt) encoded_payload = encoded_payload[i + 1 + packet_len:]
[ "def", "decode", "(", "self", ",", "encoded_payload", ")", ":", "self", ".", "packets", "=", "[", "]", "while", "encoded_payload", ":", "if", "six", ".", "byte2int", "(", "encoded_payload", "[", "0", ":", "1", "]", ")", "<=", "1", ":", "packet_len", "=", "0", "i", "=", "1", "while", "six", ".", "byte2int", "(", "encoded_payload", "[", "i", ":", "i", "+", "1", "]", ")", "!=", "255", ":", "packet_len", "=", "packet_len", "*", "10", "+", "six", ".", "byte2int", "(", "encoded_payload", "[", "i", ":", "i", "+", "1", "]", ")", "i", "+=", "1", "self", ".", "packets", ".", "append", "(", "packet", ".", "Packet", "(", "encoded_packet", "=", "encoded_payload", "[", "i", "+", "1", ":", "i", "+", "1", "+", "packet_len", "]", ")", ")", "else", ":", "i", "=", "encoded_payload", ".", "find", "(", "b':'", ")", "if", "i", "==", "-", "1", ":", "raise", "ValueError", "(", "'invalid payload'", ")", "# extracting the packet out of the payload is extremely", "# inefficient, because the payload needs to be treated as", "# binary, but the non-binary packets have to be parsed as", "# unicode. Luckily this complication only applies to long", "# polling, as the websocket transport sends packets", "# individually wrapped.", "packet_len", "=", "int", "(", "encoded_payload", "[", "0", ":", "i", "]", ")", "pkt", "=", "encoded_payload", ".", "decode", "(", "'utf-8'", ",", "errors", "=", "'ignore'", ")", "[", "i", "+", "1", ":", "i", "+", "1", "+", "packet_len", "]", ".", "encode", "(", "'utf-8'", ")", "self", ".", "packets", ".", "append", "(", "packet", ".", "Packet", "(", "encoded_packet", "=", "pkt", ")", ")", "# the engine.io protocol sends the packet length in", "# utf-8 characters, but we need it in bytes to be able to", "# jump to the next packet in the payload", "packet_len", "=", "len", "(", "pkt", ")", "encoded_payload", "=", "encoded_payload", "[", "i", "+", "1", "+", "packet_len", ":", "]" ]
49.117647
0.001174
def reset_actions(self): """ Clears actions that are already stored locally and on the remote end """ if self._driver.w3c: self.w3c_actions.clear_actions() self._actions = []
[ "def", "reset_actions", "(", "self", ")", ":", "if", "self", ".", "_driver", ".", "w3c", ":", "self", ".", "w3c_actions", ".", "clear_actions", "(", ")", "self", ".", "_actions", "=", "[", "]" ]
32
0.013043
def from_dict(self, document): """Create a prediction image set resource from a dictionary serialization. Parameters ---------- document : dict Dictionary serialization of the resource Returns ------- PredictionImageSetHandle Handle for prediction image sets """ return PredictionImageSetHandle( str(document['_id']), document['properties'], [PredictionImageSet.from_dict(img) for img in document['images']], timestamp=datetime.datetime.strptime( document['timestamp'], '%Y-%m-%dT%H:%M:%S.%f' ), is_active=document['active'] )
[ "def", "from_dict", "(", "self", ",", "document", ")", ":", "return", "PredictionImageSetHandle", "(", "str", "(", "document", "[", "'_id'", "]", ")", ",", "document", "[", "'properties'", "]", ",", "[", "PredictionImageSet", ".", "from_dict", "(", "img", ")", "for", "img", "in", "document", "[", "'images'", "]", "]", ",", "timestamp", "=", "datetime", ".", "datetime", ".", "strptime", "(", "document", "[", "'timestamp'", "]", ",", "'%Y-%m-%dT%H:%M:%S.%f'", ")", ",", "is_active", "=", "document", "[", "'active'", "]", ")" ]
30.041667
0.002688
def update_single_grading_period(self, id, course_id, grading_periods_end_date, grading_periods_start_date, grading_periods_weight=None): """ Update a single grading period. Update an existing grading period. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # REQUIRED - PATH - id """ID""" path["id"] = id # REQUIRED - grading_periods[start_date] """The date the grading period starts.""" data["grading_periods[start_date]"] = grading_periods_start_date # REQUIRED - grading_periods[end_date] """no description""" data["grading_periods[end_date]"] = grading_periods_end_date # OPTIONAL - grading_periods[weight] """A weight value that contributes to the overall weight of a grading period set which is used to calculate how much assignments in this period contribute to the total grade""" if grading_periods_weight is not None: data["grading_periods[weight]"] = grading_periods_weight self.logger.debug("PUT /api/v1/courses/{course_id}/grading_periods/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("PUT", "/api/v1/courses/{course_id}/grading_periods/{id}".format(**path), data=data, params=params, no_data=True)
[ "def", "update_single_grading_period", "(", "self", ",", "id", ",", "course_id", ",", "grading_periods_end_date", ",", "grading_periods_start_date", ",", "grading_periods_weight", "=", "None", ")", ":", "path", "=", "{", "}", "data", "=", "{", "}", "params", "=", "{", "}", "# REQUIRED - PATH - course_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"course_id\"", "]", "=", "course_id", "# REQUIRED - PATH - id\r", "\"\"\"ID\"\"\"", "path", "[", "\"id\"", "]", "=", "id", "# REQUIRED - grading_periods[start_date]\r", "\"\"\"The date the grading period starts.\"\"\"", "data", "[", "\"grading_periods[start_date]\"", "]", "=", "grading_periods_start_date", "# REQUIRED - grading_periods[end_date]\r", "\"\"\"no description\"\"\"", "data", "[", "\"grading_periods[end_date]\"", "]", "=", "grading_periods_end_date", "# OPTIONAL - grading_periods[weight]\r", "\"\"\"A weight value that contributes to the overall weight of a grading period set which is used to calculate how much assignments in this period contribute to the total grade\"\"\"", "if", "grading_periods_weight", "is", "not", "None", ":", "data", "[", "\"grading_periods[weight]\"", "]", "=", "grading_periods_weight", "self", ".", "logger", ".", "debug", "(", "\"PUT /api/v1/courses/{course_id}/grading_periods/{id} with query params: {params} and form data: {data}\"", ".", "format", "(", "params", "=", "params", ",", "data", "=", "data", ",", "*", "*", "path", ")", ")", "return", "self", ".", "generic_request", "(", "\"PUT\"", ",", "\"/api/v1/courses/{course_id}/grading_periods/{id}\"", ".", "format", "(", "*", "*", "path", ")", ",", "data", "=", "data", ",", "params", "=", "params", ",", "no_data", "=", "True", ")" ]
44.181818
0.004027
def finish(): """Print warning about interrupt and empty the job queue.""" out.warn("Interrupted!") for t in threads: t.stop() jobs.clear() out.warn("Waiting for download threads to finish.")
[ "def", "finish", "(", ")", ":", "out", ".", "warn", "(", "\"Interrupted!\"", ")", "for", "t", "in", "threads", ":", "t", ".", "stop", "(", ")", "jobs", ".", "clear", "(", ")", "out", ".", "warn", "(", "\"Waiting for download threads to finish.\"", ")" ]
30.428571
0.004566
def update_or_create(cls, external_gateway, name, with_status=False, **kw): """ Update or create external endpoints for the specified external gateway. An ExternalEndpoint is considered unique based on the IP address for the endpoint (you cannot add two external endpoints with the same IP). If the external endpoint is dynamic, then the name is the unique identifier. :param ExternalGateway external_gateway: external gateway reference :param str name: name of the ExternalEndpoint. This is only used as a direct match if the endpoint is dynamic. Otherwise the address field in the keyword arguments will be used as you cannot add multiple external endpoints with the same IP address. :param bool with_status: If set to True, returns a 3-tuple of (ExternalEndpoint, modified, created), where modified and created is the boolean status for operations performed. :param dict kw: keyword arguments to satisfy ExternalEndpoint.create constructor :raises CreateElementFailed: Failed to create external endpoint with reason :raises ElementNotFound: If specified ExternalGateway is not valid :return: if with_status=True, return tuple(ExternalEndpoint, created). Otherwise return only ExternalEndpoint. """ if 'address' in kw: external_endpoint = external_gateway.external_endpoint.get_contains( '({})'.format(kw['address'])) else: external_endpoint = external_gateway.external_endpoint.get_contains(name) updated = False created = False if external_endpoint: # Check for changes for name, value in kw.items(): # Check for differences before updating if getattr(external_endpoint, name, None) != value: external_endpoint.data[name] = value updated = True if updated: external_endpoint.update() else: external_endpoint = external_gateway.external_endpoint.create( name, **kw) created = True if with_status: return external_endpoint, updated, created return external_endpoint
[ "def", "update_or_create", "(", "cls", ",", "external_gateway", ",", "name", ",", "with_status", "=", "False", ",", "*", "*", "kw", ")", ":", "if", "'address'", "in", "kw", ":", "external_endpoint", "=", "external_gateway", ".", "external_endpoint", ".", "get_contains", "(", "'({})'", ".", "format", "(", "kw", "[", "'address'", "]", ")", ")", "else", ":", "external_endpoint", "=", "external_gateway", ".", "external_endpoint", ".", "get_contains", "(", "name", ")", "updated", "=", "False", "created", "=", "False", "if", "external_endpoint", ":", "# Check for changes", "for", "name", ",", "value", "in", "kw", ".", "items", "(", ")", ":", "# Check for differences before updating", "if", "getattr", "(", "external_endpoint", ",", "name", ",", "None", ")", "!=", "value", ":", "external_endpoint", ".", "data", "[", "name", "]", "=", "value", "updated", "=", "True", "if", "updated", ":", "external_endpoint", ".", "update", "(", ")", "else", ":", "external_endpoint", "=", "external_gateway", ".", "external_endpoint", ".", "create", "(", "name", ",", "*", "*", "kw", ")", "created", "=", "True", "if", "with_status", ":", "return", "external_endpoint", ",", "updated", ",", "created", "return", "external_endpoint" ]
50.911111
0.005567
def not_empty(message=None) -> Filter_T: """ Validate any object to ensure it's not empty (is None or has no elements). """ def validate(value): if value is None: _raise_failure(message) if hasattr(value, '__len__') and value.__len__() == 0: _raise_failure(message) return value return validate
[ "def", "not_empty", "(", "message", "=", "None", ")", "->", "Filter_T", ":", "def", "validate", "(", "value", ")", ":", "if", "value", "is", "None", ":", "_raise_failure", "(", "message", ")", "if", "hasattr", "(", "value", ",", "'__len__'", ")", "and", "value", ".", "__len__", "(", ")", "==", "0", ":", "_raise_failure", "(", "message", ")", "return", "value", "return", "validate" ]
27.076923
0.002747
def _compute(self): """ Compute parameters necessary for later steps within the rendering process """ for serie in self.series: serie.points, serie.outliers = \ self._box_points(serie.values, self.box_mode) self._x_pos = [(i + .5) / self._order for i in range(self._order)] if self._min: self._box.ymin = min(self._min, self.zero) if self._max: self._box.ymax = max(self._max, self.zero)
[ "def", "_compute", "(", "self", ")", ":", "for", "serie", "in", "self", ".", "series", ":", "serie", ".", "points", ",", "serie", ".", "outliers", "=", "self", ".", "_box_points", "(", "serie", ".", "values", ",", "self", ".", "box_mode", ")", "self", ".", "_x_pos", "=", "[", "(", "i", "+", ".5", ")", "/", "self", ".", "_order", "for", "i", "in", "range", "(", "self", ".", "_order", ")", "]", "if", "self", ".", "_min", ":", "self", ".", "_box", ".", "ymin", "=", "min", "(", "self", ".", "_min", ",", "self", ".", "zero", ")", "if", "self", ".", "_max", ":", "self", ".", "_box", ".", "ymax", "=", "max", "(", "self", ".", "_max", ",", "self", ".", "zero", ")" ]
32.733333
0.00396
def _maybe_convert_usecols(usecols): """ Convert `usecols` into a compatible format for parsing in `parsers.py`. Parameters ---------- usecols : object The use-columns object to potentially convert. Returns ------- converted : object The compatible format of `usecols`. """ if usecols is None: return usecols if is_integer(usecols): warnings.warn(("Passing in an integer for `usecols` has been " "deprecated. Please pass in a list of int from " "0 to `usecols` inclusive instead."), FutureWarning, stacklevel=2) return lrange(usecols + 1) if isinstance(usecols, str): return _range2cols(usecols) return usecols
[ "def", "_maybe_convert_usecols", "(", "usecols", ")", ":", "if", "usecols", "is", "None", ":", "return", "usecols", "if", "is_integer", "(", "usecols", ")", ":", "warnings", ".", "warn", "(", "(", "\"Passing in an integer for `usecols` has been \"", "\"deprecated. Please pass in a list of int from \"", "\"0 to `usecols` inclusive instead.\"", ")", ",", "FutureWarning", ",", "stacklevel", "=", "2", ")", "return", "lrange", "(", "usecols", "+", "1", ")", "if", "isinstance", "(", "usecols", ",", "str", ")", ":", "return", "_range2cols", "(", "usecols", ")", "return", "usecols" ]
27
0.001277
def load(self, reload=False, require_load=False): # type: (bool, bool) -> None """ Searches for an appropriate config file. If found, loads the file into the current instance. This method can also be used to reload a configuration. Note that you may want to set ``reload`` to ``True`` to clear the configuration before loading in that case. Without doing that, values will remain available even if they have been removed from the config files. :param reload: if set to ``True``, the existing values are cleared before reloading. :param require_load: If set to ``True`` this will raise a :py:exc:`IOError` if no config file has been found to load. """ if reload: # pragma: no cover self.config = None # only load the config if necessary (or explicitly requested) if self.config: # pragma: no cover self._log.debug('Returning cached config instance. Use ' '``reload=True`` to avoid caching!') return path = self._effective_path() config_filename = self._effective_filename() # Next, use the resolved path to find the filenames. Keep track of # which files we loaded in order to inform the user. self._active_path = [join(_, config_filename) for _ in path] for dirname in path: conf_name = join(dirname, config_filename) readable = self.check_file(conf_name) if readable: action = 'Updating' if self._loaded_files else 'Loading initial' self._log.info('%s config from %s', action, conf_name) self.read(conf_name) if conf_name == expanduser("~/.%s/%s/%s" % ( self.group_name, self.app_name, self.filename)): self._log.warning( "DEPRECATION WARNING: The file " "'%s/.%s/%s/app.ini' was loaded. The XDG " "Basedir standard requires this file to be in " "'%s/.config/%s/%s/app.ini'! This location " "will no longer be parsed in a future version of " "config_resolver! You can already (and should) move " "the file!", expanduser("~"), self.group_name, self.app_name, expanduser("~"), self.group_name, self.app_name) self._loaded_files.append(conf_name) if not self._loaded_files and not require_load: self._log.warning( "No config file named %s found! Search path was %r", config_filename, path) elif not self._loaded_files and require_load: raise IOError("No config file named %s found! Search path " "was %r" % (config_filename, path))
[ "def", "load", "(", "self", ",", "reload", "=", "False", ",", "require_load", "=", "False", ")", ":", "# type: (bool, bool) -> None", "if", "reload", ":", "# pragma: no cover", "self", ".", "config", "=", "None", "# only load the config if necessary (or explicitly requested)", "if", "self", ".", "config", ":", "# pragma: no cover", "self", ".", "_log", ".", "debug", "(", "'Returning cached config instance. Use '", "'``reload=True`` to avoid caching!'", ")", "return", "path", "=", "self", ".", "_effective_path", "(", ")", "config_filename", "=", "self", ".", "_effective_filename", "(", ")", "# Next, use the resolved path to find the filenames. Keep track of", "# which files we loaded in order to inform the user.", "self", ".", "_active_path", "=", "[", "join", "(", "_", ",", "config_filename", ")", "for", "_", "in", "path", "]", "for", "dirname", "in", "path", ":", "conf_name", "=", "join", "(", "dirname", ",", "config_filename", ")", "readable", "=", "self", ".", "check_file", "(", "conf_name", ")", "if", "readable", ":", "action", "=", "'Updating'", "if", "self", ".", "_loaded_files", "else", "'Loading initial'", "self", ".", "_log", ".", "info", "(", "'%s config from %s'", ",", "action", ",", "conf_name", ")", "self", ".", "read", "(", "conf_name", ")", "if", "conf_name", "==", "expanduser", "(", "\"~/.%s/%s/%s\"", "%", "(", "self", ".", "group_name", ",", "self", ".", "app_name", ",", "self", ".", "filename", ")", ")", ":", "self", ".", "_log", ".", "warning", "(", "\"DEPRECATION WARNING: The file \"", "\"'%s/.%s/%s/app.ini' was loaded. The XDG \"", "\"Basedir standard requires this file to be in \"", "\"'%s/.config/%s/%s/app.ini'! This location \"", "\"will no longer be parsed in a future version of \"", "\"config_resolver! You can already (and should) move \"", "\"the file!\"", ",", "expanduser", "(", "\"~\"", ")", ",", "self", ".", "group_name", ",", "self", ".", "app_name", ",", "expanduser", "(", "\"~\"", ")", ",", "self", ".", "group_name", ",", "self", ".", "app_name", ")", "self", ".", "_loaded_files", ".", "append", "(", "conf_name", ")", "if", "not", "self", ".", "_loaded_files", "and", "not", "require_load", ":", "self", ".", "_log", ".", "warning", "(", "\"No config file named %s found! Search path was %r\"", ",", "config_filename", ",", "path", ")", "elif", "not", "self", ".", "_loaded_files", "and", "require_load", ":", "raise", "IOError", "(", "\"No config file named %s found! Search path \"", "\"was %r\"", "%", "(", "config_filename", ",", "path", ")", ")" ]
48.737705
0.001319
def parse_sargasso_logs(self, f): """ Parse the sargasso log file. """ species_name = list() items = list() header = list() is_first_line = True for l in f['f'].splitlines(): s = l.split(",") # Check that this actually is a Sargasso file if is_first_line and s[0]!='Sample': return None if len(s) < 7: continue if is_first_line: #prepare header is_first_line = False header = s for i in header[1:]: #find out what species included sname = i.split('-')[-1] if sname not in species_name: species_name.append(sname) #find out what is being counted kname = ("-".join(i.split('-')[-3:-1])) if kname not in items: items.append(kname) else: #start sample lines. sample_name = s.pop(0) chunk_by_species = [s[i:i + len(items)] for i in range(0, len(s), len(items))]; for idx,v in enumerate(chunk_by_species): #adding species name to the sample name for easy interpretation new_sample_name = '_'.join([sample_name,species_name[idx]]) # Clean up sample name new_sample_name = self.clean_s_name(new_sample_name, f['root']) if new_sample_name in self.sargasso_data.keys(): log.debug("Duplicate sample name found! Overwriting: {}".format(new_sample_name)) try: self.sargasso_data[new_sample_name] = dict(zip(items,map(int, v))) except ValueError: pass self.sargasso_keys = items for idx, f_name in enumerate(self.sargasso_data.keys()): # Reorganised parsed data for this sample # Collect total READ count number self.sargasso_data[f_name]['Total'] = 0; for key, value in list(self.sargasso_data[f_name].items()): # iter on both keys and values if key.endswith("Reads"): self.sargasso_data[f_name]['Total'] += value # Calculate the percent aligned if we can try: self.sargasso_data[f_name]['sargasso_percent_assigned'] = (float(self.sargasso_data[f_name]['Assigned-Reads'])/float(self.sargasso_data[f_name]['Total'])) * 100.0 except (KeyError, ZeroDivisionError): pass
[ "def", "parse_sargasso_logs", "(", "self", ",", "f", ")", ":", "species_name", "=", "list", "(", ")", "items", "=", "list", "(", ")", "header", "=", "list", "(", ")", "is_first_line", "=", "True", "for", "l", "in", "f", "[", "'f'", "]", ".", "splitlines", "(", ")", ":", "s", "=", "l", ".", "split", "(", "\",\"", ")", "# Check that this actually is a Sargasso file", "if", "is_first_line", "and", "s", "[", "0", "]", "!=", "'Sample'", ":", "return", "None", "if", "len", "(", "s", ")", "<", "7", ":", "continue", "if", "is_first_line", ":", "#prepare header", "is_first_line", "=", "False", "header", "=", "s", "for", "i", "in", "header", "[", "1", ":", "]", ":", "#find out what species included", "sname", "=", "i", ".", "split", "(", "'-'", ")", "[", "-", "1", "]", "if", "sname", "not", "in", "species_name", ":", "species_name", ".", "append", "(", "sname", ")", "#find out what is being counted", "kname", "=", "(", "\"-\"", ".", "join", "(", "i", ".", "split", "(", "'-'", ")", "[", "-", "3", ":", "-", "1", "]", ")", ")", "if", "kname", "not", "in", "items", ":", "items", ".", "append", "(", "kname", ")", "else", ":", "#start sample lines.", "sample_name", "=", "s", ".", "pop", "(", "0", ")", "chunk_by_species", "=", "[", "s", "[", "i", ":", "i", "+", "len", "(", "items", ")", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "s", ")", ",", "len", "(", "items", ")", ")", "]", "for", "idx", ",", "v", "in", "enumerate", "(", "chunk_by_species", ")", ":", "#adding species name to the sample name for easy interpretation", "new_sample_name", "=", "'_'", ".", "join", "(", "[", "sample_name", ",", "species_name", "[", "idx", "]", "]", ")", "# Clean up sample name", "new_sample_name", "=", "self", ".", "clean_s_name", "(", "new_sample_name", ",", "f", "[", "'root'", "]", ")", "if", "new_sample_name", "in", "self", ".", "sargasso_data", ".", "keys", "(", ")", ":", "log", ".", "debug", "(", "\"Duplicate sample name found! Overwriting: {}\"", ".", "format", "(", "new_sample_name", ")", ")", "try", ":", "self", ".", "sargasso_data", "[", "new_sample_name", "]", "=", "dict", "(", "zip", "(", "items", ",", "map", "(", "int", ",", "v", ")", ")", ")", "except", "ValueError", ":", "pass", "self", ".", "sargasso_keys", "=", "items", "for", "idx", ",", "f_name", "in", "enumerate", "(", "self", ".", "sargasso_data", ".", "keys", "(", ")", ")", ":", "# Reorganised parsed data for this sample", "# Collect total READ count number", "self", ".", "sargasso_data", "[", "f_name", "]", "[", "'Total'", "]", "=", "0", "for", "key", ",", "value", "in", "list", "(", "self", ".", "sargasso_data", "[", "f_name", "]", ".", "items", "(", ")", ")", ":", "# iter on both keys and values", "if", "key", ".", "endswith", "(", "\"Reads\"", ")", ":", "self", ".", "sargasso_data", "[", "f_name", "]", "[", "'Total'", "]", "+=", "value", "# Calculate the percent aligned if we can", "try", ":", "self", ".", "sargasso_data", "[", "f_name", "]", "[", "'sargasso_percent_assigned'", "]", "=", "(", "float", "(", "self", ".", "sargasso_data", "[", "f_name", "]", "[", "'Assigned-Reads'", "]", ")", "/", "float", "(", "self", ".", "sargasso_data", "[", "f_name", "]", "[", "'Total'", "]", ")", ")", "*", "100.0", "except", "(", "KeyError", ",", "ZeroDivisionError", ")", ":", "pass" ]
39.253731
0.008531
def _make_record(self, parent, gline): """Process next record. This method created new record from the line read from file if needed and/or updates its parent record. If the parent record tag is ``BLOB`` and new record tag is ``CONT`` then record is skipped entirely and None is returned. Otherwise if new record tag is ``CONT`` or ``CONC`` its value is added to parent value. For all other tags new record is made and it is added to parent sub_records attribute. Parameters ---------- parent : `model.Record` Parent record of the new record gline : `gedcom_line` Current parsed line Returns ------- `model.Record` or None """ if parent and gline.tag in ("CONT", "CONC"): # concatenate, only for non-BLOBs if parent.tag != "BLOB": # have to be careful concatenating empty/None values value = gline.value if gline.tag == "CONT": value = b"\n" + (value or b"") if value is not None: parent.value = (parent.value or b"") + value return None # avoid infinite cycle dialect = model.DIALECT_DEFAULT if not (gline.level == 0 and gline.tag == "HEAD") and self._header: dialect = self.dialect rec = model.make_record(level=gline.level, xref_id=gline.xref_id, tag=gline.tag, value=gline.value, sub_records=[], offset=gline.offset, dialect=dialect, parser=self) # add to parent's sub-records list if parent: parent.sub_records.append(rec) return rec
[ "def", "_make_record", "(", "self", ",", "parent", ",", "gline", ")", ":", "if", "parent", "and", "gline", ".", "tag", "in", "(", "\"CONT\"", ",", "\"CONC\"", ")", ":", "# concatenate, only for non-BLOBs", "if", "parent", ".", "tag", "!=", "\"BLOB\"", ":", "# have to be careful concatenating empty/None values", "value", "=", "gline", ".", "value", "if", "gline", ".", "tag", "==", "\"CONT\"", ":", "value", "=", "b\"\\n\"", "+", "(", "value", "or", "b\"\"", ")", "if", "value", "is", "not", "None", ":", "parent", ".", "value", "=", "(", "parent", ".", "value", "or", "b\"\"", ")", "+", "value", "return", "None", "# avoid infinite cycle", "dialect", "=", "model", ".", "DIALECT_DEFAULT", "if", "not", "(", "gline", ".", "level", "==", "0", "and", "gline", ".", "tag", "==", "\"HEAD\"", ")", "and", "self", ".", "_header", ":", "dialect", "=", "self", ".", "dialect", "rec", "=", "model", ".", "make_record", "(", "level", "=", "gline", ".", "level", ",", "xref_id", "=", "gline", ".", "xref_id", ",", "tag", "=", "gline", ".", "tag", ",", "value", "=", "gline", ".", "value", ",", "sub_records", "=", "[", "]", ",", "offset", "=", "gline", ".", "offset", ",", "dialect", "=", "dialect", ",", "parser", "=", "self", ")", "# add to parent's sub-records list", "if", "parent", ":", "parent", ".", "sub_records", ".", "append", "(", "rec", ")", "return", "rec" ]
37.595745
0.001103
def fetch_by_refresh_token(self, refresh_token): """ Retrieves an access token by its refresh token. :param refresh_token: The refresh token of an access token as a `str`. :return: An instance of :class:`oauth2.datatype.AccessToken`. :raises: :class:`oauth2.error.AccessTokenNotFound` if not access token could be retrieved. """ row = self.fetchone(self.fetch_by_refresh_token_query, refresh_token) if row is None: raise AccessTokenNotFound scopes = self._fetch_scopes(access_token_id=row[0]) data = self._fetch_data(access_token_id=row[0]) return self._row_to_token(data=data, scopes=scopes, row=row)
[ "def", "fetch_by_refresh_token", "(", "self", ",", "refresh_token", ")", ":", "row", "=", "self", ".", "fetchone", "(", "self", ".", "fetch_by_refresh_token_query", ",", "refresh_token", ")", "if", "row", "is", "None", ":", "raise", "AccessTokenNotFound", "scopes", "=", "self", ".", "_fetch_scopes", "(", "access_token_id", "=", "row", "[", "0", "]", ")", "data", "=", "self", ".", "_fetch_data", "(", "access_token_id", "=", "row", "[", "0", "]", ")", "return", "self", ".", "_row_to_token", "(", "data", "=", "data", ",", "scopes", "=", "scopes", ",", "row", "=", "row", ")" ]
33.571429
0.002759
def do_class(self, element, decl, pseudo): """Implement class declaration - pre-match.""" step = self.state[self.state['current_step']] actions = step['actions'] strval = self.eval_string_value(element, decl.value) actions.append(('attrib', ('class', strval)))
[ "def", "do_class", "(", "self", ",", "element", ",", "decl", ",", "pseudo", ")", ":", "step", "=", "self", ".", "state", "[", "self", ".", "state", "[", "'current_step'", "]", "]", "actions", "=", "step", "[", "'actions'", "]", "strval", "=", "self", ".", "eval_string_value", "(", "element", ",", "decl", ".", "value", ")", "actions", ".", "append", "(", "(", "'attrib'", ",", "(", "'class'", ",", "strval", ")", ")", ")" ]
49.166667
0.006667
def beagle(args): """ %prog beagle input.vcf 1 Use BEAGLE4.1 to impute vcf on chromosome 1. """ p = OptionParser(beagle.__doc__) p.set_home("beagle") p.set_ref() p.set_cpus() opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) vcffile, chr = args pf = vcffile.rsplit(".", 1)[0] outpf = pf + ".beagle" outfile = outpf + ".vcf.gz" mm = MakeManager() beagle_cmd = opts.beagle_home kg = op.join(opts.ref, "1000GP_Phase3") cmd = beagle_cmd + " gt={0}".format(vcffile) cmd += " ref={0}/chr{1}.1kg.phase3.v5a.bref".format(kg, chr) cmd += " map={0}/plink.chr{1}.GRCh37.map".format(kg, chr) cmd += " out={0}".format(outpf) cmd += " nthreads=16 gprobs=true" mm.add(vcffile, outfile, cmd) mm.write()
[ "def", "beagle", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "beagle", ".", "__doc__", ")", "p", ".", "set_home", "(", "\"beagle\"", ")", "p", ".", "set_ref", "(", ")", "p", ".", "set_cpus", "(", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "2", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "vcffile", ",", "chr", "=", "args", "pf", "=", "vcffile", ".", "rsplit", "(", "\".\"", ",", "1", ")", "[", "0", "]", "outpf", "=", "pf", "+", "\".beagle\"", "outfile", "=", "outpf", "+", "\".vcf.gz\"", "mm", "=", "MakeManager", "(", ")", "beagle_cmd", "=", "opts", ".", "beagle_home", "kg", "=", "op", ".", "join", "(", "opts", ".", "ref", ",", "\"1000GP_Phase3\"", ")", "cmd", "=", "beagle_cmd", "+", "\" gt={0}\"", ".", "format", "(", "vcffile", ")", "cmd", "+=", "\" ref={0}/chr{1}.1kg.phase3.v5a.bref\"", ".", "format", "(", "kg", ",", "chr", ")", "cmd", "+=", "\" map={0}/plink.chr{1}.GRCh37.map\"", ".", "format", "(", "kg", ",", "chr", ")", "cmd", "+=", "\" out={0}\"", ".", "format", "(", "outpf", ")", "cmd", "+=", "\" nthreads=16 gprobs=true\"", "mm", ".", "add", "(", "vcffile", ",", "outfile", ",", "cmd", ")", "mm", ".", "write", "(", ")" ]
25.645161
0.001212
def handle(self, type: str, *, kwargs: dict = None) -> Callable: """ Register an event handler with the :obj:`Layabout` instance. Args: type: The name of a Slack RTM API event to be handled. As a special case, although it is not a proper RTM event, ``*`` may be provided to handle all events. For more information about available events see the `Slack RTM API <https://api.slack.com/rtm>`_. kwargs: Optional arbitrary keyword arguments passed to the event handler when the event is triggered. Returns: A decorator that validates and registers a Layabout event handler. Raises: TypeError: If the decorated :obj:`Callable`'s signature does not accept at least 2 parameters. """ def decorator(fn: Callable) -> Callable: # Validate that the wrapped callable is a suitable event handler. sig = signature(fn) num_params = len(sig.parameters) if num_params < 2: raise TypeError(_format_parameter_error_message( fn.__name__, sig, num_params)) # Register a tuple of the callable and its kwargs, if any. self._handlers[type].append((fn, kwargs or {})) return fn return decorator
[ "def", "handle", "(", "self", ",", "type", ":", "str", ",", "*", ",", "kwargs", ":", "dict", "=", "None", ")", "->", "Callable", ":", "def", "decorator", "(", "fn", ":", "Callable", ")", "->", "Callable", ":", "# Validate that the wrapped callable is a suitable event handler.", "sig", "=", "signature", "(", "fn", ")", "num_params", "=", "len", "(", "sig", ".", "parameters", ")", "if", "num_params", "<", "2", ":", "raise", "TypeError", "(", "_format_parameter_error_message", "(", "fn", ".", "__name__", ",", "sig", ",", "num_params", ")", ")", "# Register a tuple of the callable and its kwargs, if any.", "self", ".", "_handlers", "[", "type", "]", ".", "append", "(", "(", "fn", ",", "kwargs", "or", "{", "}", ")", ")", "return", "fn", "return", "decorator" ]
41.484848
0.001428
def _sconnect(host=None, port=None, password=None): ''' Returns an instance of the redis client ''' if host is None: host = __salt__['config.option']('redis_sentinel.host', 'localhost') if port is None: port = __salt__['config.option']('redis_sentinel.port', 26379) if password is None: password = __salt__['config.option']('redis_sentinel.password') return redis.StrictRedis(host, port, password=password, decode_responses=True)
[ "def", "_sconnect", "(", "host", "=", "None", ",", "port", "=", "None", ",", "password", "=", "None", ")", ":", "if", "host", "is", "None", ":", "host", "=", "__salt__", "[", "'config.option'", "]", "(", "'redis_sentinel.host'", ",", "'localhost'", ")", "if", "port", "is", "None", ":", "port", "=", "__salt__", "[", "'config.option'", "]", "(", "'redis_sentinel.port'", ",", "26379", ")", "if", "password", "is", "None", ":", "password", "=", "__salt__", "[", "'config.option'", "]", "(", "'redis_sentinel.password'", ")", "return", "redis", ".", "StrictRedis", "(", "host", ",", "port", ",", "password", "=", "password", ",", "decode_responses", "=", "True", ")" ]
39.25
0.004149